code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Copyright 2013 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.storage.anormdb
import com.twitter.zipkin.storage.Storage
import com.twitter.zipkin.common._
import com.twitter.zipkin.common.Annotation
import com.twitter.zipkin.common.BinaryAnnotation
import com.twitter.zipkin.util.Util
import com.twitter.util.{Duration, Future}
import anorm._
import anorm.SqlParser._
import java.nio.ByteBuffer
import java.sql.Connection
/**
* Retrieve and store span information.
*
* This is one of two places where Zipkin interacts directly with the database,
* the other one being AnormIndex.
*
* NOTE: We're ignoring TTL for now since unlike Cassandra and Redis, SQL
* databases don't have that built in and it shouldn't be a big deal for most
* sites. Several methods in this class deal with TTL and we just assume that
* all spans will live forever.
*/
case class AnormStorage(db: DB, openCon: Option[Connection] = None) extends Storage {
// Database connection object
private implicit val conn = openCon match {
case None => db.getConnection()
case Some(con) => con
}
/**
* Close the storage
*/
def close() { conn.close() }
/**
* Store the span in the underlying storage for later retrieval.
* @return a future for the operation
*/
def storeSpan(span: Span): Future[Unit] = {
val createdTs: Option[Long] = span.firstAnnotation match {
case Some(anno) => Some(anno.timestamp)
case None => None
}
SQL(
"""INSERT INTO zipkin_spans
| (span_id, parent_id, trace_id, span_name, debug, duration, created_ts)
|VALUES
| ({span_id}, {parent_id}, {trace_id}, {span_name}, {debug}, {duration}, {created_ts})
""".stripMargin)
.on("span_id" -> span.id)
.on("parent_id" -> span.parentId)
.on("trace_id" -> span.traceId)
.on("span_name" -> span.name)
.on("debug" -> (if (span.debug) 1 else 0))
.on("duration" -> span.duration)
.on("created_ts" -> createdTs)
.execute()
span.annotations.foreach(a =>
SQL(
"""INSERT INTO zipkin_annotations
| (span_id, trace_id, span_name, service_name, value, ipv4, port,
| a_timestamp, duration)
|VALUES
| ({span_id}, {trace_id}, {span_name}, {service_name}, {value},
| {ipv4}, {port}, {timestamp}, {duration})
""".stripMargin)
.on("span_id" -> span.id)
.on("trace_id" -> span.traceId)
.on("span_name" -> span.name)
.on("service_name" -> a.serviceName)
.on("value" -> a.value)
.on("ipv4" -> a.host.map(_.ipv4))
.on("port" -> a.host.map(_.port))
.on("timestamp" -> a.timestamp)
.on("duration" -> a.duration)
.execute()
)
span.binaryAnnotations.foreach(b =>
SQL(
"""INSERT INTO zipkin_binary_annotations
| (span_id, trace_id, span_name, service_name, key, value,
| annotation_type_value, ipv4, port)
|VALUES
| ({span_id}, {trace_id}, {span_name}, {service_name}, {key}, {value},
| {annotation_type_value}, {ipv4}, {port})
""".stripMargin)
.on("span_id" -> span.id)
.on("trace_id" -> span.traceId)
.on("span_name" -> span.name)
.on("service_name" -> b.host.map(_.serviceName).getOrElse("Unknown service name")) // from Annotation
.on("key" -> b.key)
.on("value" -> Util.getArrayFromBuffer(b.value))
.on("annotation_type_value" -> b.annotationType.value)
.on("ipv4" -> b.host.map(_.ipv4))
.on("port" -> b.host.map(_.ipv4))
.execute()
)
Future.Unit
}
/**
* Set the ttl of a trace. Used to store a particular trace longer than the
* default. It must be oh so interesting!
*/
def setTimeToLive(traceId: Long, ttl: Duration): Future[Unit] = {
Future.Unit
}
/**
* Get the time to live for a specific trace.
* If there are multiple ttl entries for one trace, pick the lowest one.
*/
def getTimeToLive(traceId: Long): Future[Duration] = {
Future.value(Duration.Top)
}
/**
* Finds traces that have been stored from a list of trace IDs
*
* @param traceIds a List of trace IDs
* @return a Set of those trace IDs from the list which are stored
*/
def tracesExist(traceIds: Seq[Long]): Future[Set[Long]] = {
Future {
SQL(
"SELECT trace_id FROM zipkin_spans WHERE trace_id IN (%s)".format(traceIds.mkString(","))
).as(long("trace_id") *).toSet
}
}
/**
* Get the available trace information from the storage system.
* Spans in trace should be sorted by the first annotation timestamp
* in that span. First event should be first in the spans list.
*/
def getSpansByTraceIds(traceIds: Seq[Long]): Future[Seq[Seq[Span]]] = {
val traceIdsString:String = traceIds.mkString(",")
val spans:List[DBSpan] =
SQL(
"""SELECT span_id, parent_id, trace_id, span_name, debug
|FROM zipkin_spans
|WHERE trace_id IN (%s)
""".stripMargin.format(traceIdsString))
.as((long("span_id") ~ get[Option[Long]]("parent_id") ~
long("trace_id") ~ str("span_name") ~ int("debug") map {
case a~b~c~d~e => DBSpan(a, b, c, d, e > 0)
}) *)
val annos:List[DBAnnotation] =
SQL(
"""SELECT span_id, trace_id, service_name, value, ipv4, port, a_timestamp, duration
|FROM zipkin_annotations
|WHERE trace_id IN (%s)
""".stripMargin.format(traceIdsString))
.as((long("span_id") ~ long("trace_id") ~ str("service_name") ~ str("value") ~
get[Option[Int]]("ipv4") ~ get[Option[Int]]("port") ~
long("a_timestamp") ~ get[Option[Long]]("duration") map {
case a~b~c~d~e~f~g~h => DBAnnotation(a, b, c, d, e, f, g, h)
}) *)
val binAnnos:List[DBBinaryAnnotation] =
SQL(
"""SELECT span_id, trace_id, service_name, key, value, annotation_type_value,
| ipv4, port
|FROM zipkin_binary_annotations
|WHERE trace_id IN (%s)
""".stripMargin.format(traceIdsString))
.as((long("span_id") ~ long("trace_id") ~ str("service_name") ~ str("key") ~
db.bytes("value") ~ int("annotation_type_value") ~
get[Option[Int]]("ipv4") ~ get[Option[Int]]("port") map {
case a~b~c~d~e~f~g~h => DBBinaryAnnotation(a, b, c, d, e, f, g, h)
}) *)
val results: Seq[Seq[Span]] = traceIds.map { traceId =>
spans.filter(_.traceId == traceId).map { span =>
val spanAnnos = annos.filter(_.traceId == span.traceId).map { anno =>
val host:Option[Endpoint] = (anno.ipv4, anno.port) match {
case (Some(ipv4), Some(port)) => Some(Endpoint(ipv4, port.toShort, anno.serviceName))
case _ => None
}
val duration:Option[Duration] = anno.duration match {
case Some(nanos) => Some(Duration.fromNanoseconds(nanos))
case None => None
}
Annotation(anno.timestamp, anno.value, host, duration)
}
val spanBinAnnos = binAnnos.filter(_.traceId == span.traceId).map { binAnno =>
val host:Option[Endpoint] = (binAnno.ipv4, binAnno.port) match {
case (Some(ipv4), Some(port)) => Some(Endpoint(ipv4, port.toShort, binAnno.serviceName))
case _ => None
}
val value = ByteBuffer.wrap(binAnno.value)
val annotationType = AnnotationType.fromInt(binAnno.annotationTypeValue)
BinaryAnnotation(binAnno.key, value, annotationType, host)
}
Span(traceId, span.spanName, span.spanId, span.parentId, spanAnnos, spanBinAnnos, span.debug)
}
}
Future {
results.filter(!_.isEmpty)
}
}
def getSpansByTraceId(traceId: Long): Future[Seq[Span]] = {
getSpansByTraceIds(Seq(traceId)).map {
_.head
}
}
/**
* How long do we store the data before we delete it? In seconds.
*/
def getDataTimeToLive: Int = {
Int.MaxValue
}
case class DBSpan(spanId: Long, parentId: Option[Long], traceId: Long, spanName: String, debug: Boolean)
case class DBAnnotation(spanId: Long, traceId: Long, serviceName: String, value: String, ipv4: Option[Int], port: Option[Int], timestamp: Long, duration: Option[Long])
case class DBBinaryAnnotation(spanId: Long, traceId: Long, serviceName: String, key: String, value: Array[Byte], annotationTypeValue: Int, ipv4: Option[Int], port: Option[Int])
}
| newsky/zipkin | zipkin-anormdb/src/main/scala/com/twitter/zipkin/storage/anormdb/AnormStorage.scala | Scala | apache-2.0 | 9,083 |
package com.xiaoguangchen.spa
import org.scalatest.FunSpec
import scala.Predef._
import scala.Tuple3
import scala.Some
import java.text.SimpleDateFormat
import java.util.Date
class PostgresTest extends BaseTest with FunSpec {
if (database == PostgreSQL) {
describe("Update test") {
val qm = QueryManager(open = getConnection)
it("test update ") {
val table = "spa_test"
val selectTableSql = sql" select count(*) from pg_tables where schemaname='public' " +
sql" and tablename = $table"
val count = qm.selectQuery(selectTableSql).toSingle[Long]
if (count.get > 0) {
qm.updateQuery(sql" drop table spa_test ").executeUpdate
}
val count2 = qm.selectQuery(selectTableSql).toSingle[Long]
assert(count2.get === 0)
val createTableSql = sql"create table if not exists spa_test(x Integer)"
qm.updateQuery(createTableSql).executeUpdate
val count1 = qm.selectQuery(selectTableSql).toSingle[Long]
assert(count1.isDefined)
assert(count1.get >= 1) //in case of multi-thread tests
}
describe(" test return auto generated key is not supported ") {
// note: Postgres will append RETURNING to the origin SQL if the "Statement.RETURN_GENERATED_KEYS" is used regardless the select, create delete or update
// Postgres SQL doesn't has good support for returning generated keys
// If we specified RETURN_GENERATED_KEYS, the Posgres will return all the inserted columns and values in the generatedKeys resultset
// and there is no distinction on which column is generated or non-generated, making it hard to dynamically determine
// the generated value. The only way is to tell SPA the auto-generated column name, which is a bit difficult to do in general
// so I decide to not support return generated Key for postgres
val table = "spa_test2"
qm.transaction() {
implicit trans =>
val selectTableSql = sql" select count(*) from pg_tables where schemaname='public' " +
sql" and tablename = $table"
val count = qm.selectQuery(selectTableSql).toSingle[Long]
if (count.get > 0) {
qm.updateQuery(sql" drop table $table" ).executeUpdate
}
val count2 = qm.selectQuery(selectTableSql).toSingle[Long]
assert(count2.get === 0)
//qm.updateQuery(sql" drop table spa_test ").executeUpdate
val createTableSql = sql"create table if not exists spa_test2 ( id SERIAL, x Integer)"
qm.updateQuery(createTableSql).executeUpdate
}
val id1 = qm.updateQuery(sql" INSERT INTO spa_test2(x) values (3) ").executeUpdate
println("id1 = " + id1)
assert(id1 === 1)
val id2 = qm.updateQuery(sql" INSERT INTO spa_test2(x) values (4) ").executeUpdate
println("id2 = " + id2)
assert(id2 === 2)
}
}
describe("batch test ") {
it(" batch update then followed by select query") {
val qm = QueryManager(open = getConnection)
val table = "test"
val selectTableSql = sql" select count(*) from pg_tables where schemaname='public' " +
sql" and tablename = $table"
val count = qm.selectQuery(selectTableSql).toSingle[Long]
if (count.get > 0) {
qm.updateQuery(sql" drop table test ").executeUpdate
}
val createTableSql = sql"create table if not exists spa_test(x Integer, y Integer)"
qm.updateQuery(createTableSql).executeUpdate
val prefixSql = sql"insert into test (x, y) values(?, ?) "
val q = qm.batchUpdateQuery(prefixSql)
//index is zero-based
val size = 10
for (i <- 0 until size) {
val pa = Map(0 -> i, 1 -> i * 20) // x is 0, y is 1
q.addBatch(pa)
}
q.executeBatchUpdate
val z = qm.selectQuery(sql"select x,y from test").toList[(Int, Int)]
assert(z.size === size)
assert(z(0) ===(0, 0))
assert(z(1) ===(1, 20))
assert(z(size - 1) ===(size - 1, 20 * (size - 1)))
}
it("batch test using different data types return tuple3") {
val qm = QueryManager(open = getConnection)
val table = "test"
val selectTableSql = sql" select count(*) from pg_tables where schemaname='public' " +
sql" and tablename = $table"
val count = qm.selectQuery(selectTableSql).toSingle[Long]
if (count.get > 0) {
qm.updateQuery(sql" drop table test ").executeUpdate
}
val createTableSql = sql"create table if not exists spa_test(x DECIMAL(16), y DECIMAL(32), z varchar(30))"
qm.updateQuery(createTableSql).executeUpdate
val prefixSql = sql"insert into test (x, y, z) values(?, ?, ?) "
val q = qm.batchUpdateQuery(prefixSql)
//index is zero-based
val size = 10
for (i <- 0 until size) {
val pa = Map(0 -> i, 1 -> i * 20, 2 -> s"value of $i") // x is 0, y is 1, z is 2
q.addBatch(pa)
}
q.executeBatchUpdate
val z = qm.selectQuery(sql"select x,y, z from test").toList[(Long, Long, String)]
assert(z.size === size)
assert(Tuple3(0, 0, "0") ===(0, 0, "0"))
assert(z(0)._1 === 0L)
assert(z(0)._2 === 0L)
assert(z(0)._3 === "value of 0")
assert(z(1).isInstanceOf[(Long, Long, String)])
assert(z(1) ===(1, 20, "value of 1"))
assert(z(2) ===(2, 40, "value of 2"))
assert(z(size - 1) ===(size - 1, 20 * (size - 1), s"value of ${size - 1}"))
}
}
describe("test select query ") {
it("constructor with Annotation") {
val qm = QueryManager(open = getConnection, true)
val coffees = prepareCoffee(qm)
val results = qm.selectQuery(sql" select * from COFFEES ").toList[Coffee]
assert(results.size === coffees.size)
}
it("test WithIterator ") {
val qm = QueryManager(open = getConnection)
val coffees = prepareCoffee(qm)
// use Column Annotation on the parameters of the constructor
val q = qm.selectQuery(sql" select * from COFFEES ")
val results = q.withIterator {
it: Iterator[Option[Coffee]] =>
it.foldLeft(List[Coffee]())((acc, a) => a.get :: acc).reverse
}
assert(results.size === coffees.size)
}
it("test row extractor ") {
val qm = QueryManager(open = getConnection)
val coffees = prepareCoffee(qm)
val rowProcessor = new RowExtractor[CoffeePrice] {
def extractRow(oneRowCols: Map[ColumnMetadata, Any]): CoffeePrice = {
//there should only two columns
assert(oneRowCols.size == 2)
val colValues = oneRowCols.map(a => a._1.colLabel -> a._2)
val name = colValues.get("COF_NAME").getOrElse("NA").toString
val price = colValues.get("PRICE").getOrElse(0).asInstanceOf[Int].toDouble
CoffeePrice(name, price)
}
}
// use Column Annotation on the parameters of the constructor
val results = qm.selectQuery(sql" select COF_NAME, PRICE from COFFEES ", Some(rowProcessor)).toList[CoffeePrice]
assert(results.size === coffees.size)
}
it("test select query simple data types with mySQL syntax") {
{
val qm = QueryManager(open = getConnection)
val longValue = qm.selectQuery(sql" select 1 ").toSingle[Long]
assert(longValue.get === 1L)
val dblValue = qm.selectQuery(sql"select 1.0 ").toSingle[Double]
assert(dblValue.get === 1.0)
val flValue = qm.selectQuery(sql"select 1.0 ").toSingle[Float]
assert(flValue.get === 1.0)
val intValue = qm.selectQuery(sql"select 1 ").toSingle[Int]
assert(intValue.get === 1)
val bigDecimalValue = qm.selectQuery(sql"select 1.0 ").toSingle[BigDecimal]
assert(bigDecimalValue.get === BigDecimal(1.0))
val stValue = qm.selectQuery(sql"select 'string' ").toSingle[String]
assert(stValue.get === "string")
val formatter = new SimpleDateFormat("yyyy-MM-dd")
val dtValue = qm.selectQuery(sql"select now() ").toSingle[Date]
assert(formatter.format(dtValue.get) == formatter.format(new Date()))
val table = "testdate"
val selectTableSql = sql" select count(*) from pg_tables where schemaname='public' " +
sql" and tablename = $table"
val count = qm.selectQuery(selectTableSql).toSingle[Long]
if (count.get > 0) {
qm.updateQuery(sql" drop table testdate ").executeUpdate
}
val createDateTableSql = sql"create table if not exists testdate(dt DATE)"
qm.updateQuery(createDateTableSql).executeUpdate
val today = new Date()
qm.updateQuery(sql"INSERT INTO testdate(dt) values ($today) ").executeUpdate
val dt = qm.selectQuery(sql"select dt from testdate where dt = $today ").toSingle[Date]
assert(formatter.format(today) == formatter.format(dt.get))
}
}
it("test select query with tuple data types") {
{
val qm = QueryManager(open = getConnection)
val tuple2Value = qm.selectQuery(sql" select 1, '2' ").toSingle[(Int, String)]
assert(tuple2Value.get ===(1, "2"))
val date = new Date()
val formatter = new SimpleDateFormat("yyyy-MM-dd")
val tuple3Value = qm.selectQuery(sql"select 1.0 as A, '2' as B, now() as C ").toSingle[(Double, String, Date)]
assert(tuple3Value != None)
val (x, y, z) = tuple3Value.get
assert((x, y) ===(1.0, "2"))
assert(formatter.format(z) === formatter.format(date))
val tuple4Value = qm.selectQuery(sql" select 1,2,3,4 ").toSingle[(Int, Int, Int, Int)]
assert(tuple4Value.get ===(1, 2, 3, 4))
val tuple5Value = qm.selectQuery(sql" select 1,2,3,4,5 ").toSingle[(Int, Int, Int, Int, Int)]
assert(tuple5Value.get ===(1, 2, 3, 4, 5))
val tuple6Value = qm.selectQuery(sql" select 1,2,3,4,5,6 ").toSingle[(Int, Int, Int, Int, Int, Int)]
assert(tuple6Value.get ===(1, 2, 3, 4, 5, 6))
val tuple7Value = qm.selectQuery(sql" select 1,2,3,4,5,6,7 ").toSingle[(Int, Int, Int, Int, Int, Int, Int)]
assert(tuple7Value.get ===(1, 2, 3, 4, 5, 6, 7))
val tuple8Value = qm.selectQuery(sql" select 1,2,3,4,5,6,7,8 ").toSingle[(Int, Int, Int, Int, Int, Int, Int, Int)]
assert(tuple8Value.get ===(1, 2, 3, 4, 5, 6, 7, 8))
val tuple9Value = qm.selectQuery(sql" select 1,2,3,4,5,6,7,8,9 ").toSingle[(Int, Int, Int, Int, Int, Int, Int, Int, Int)]
assert(tuple9Value.get ===(1, 2, 3, 4, 5, 6, 7, 8, 9))
}
}
}
describe("transaction Test") {
it("test transaction ") {
val qm = QueryManager(open = getConnection)
qm.transaction() {
implicit trans =>
val table = "testdate"
val selectTableSql = sql" select count(*) from pg_tables where schemaname='public' " +
sql" and tablename = $table"
val count = qm.selectQuery(selectTableSql).toSingle[Long]
if (count.get > 0) {
qm.updateQuery(sql" drop table testdate ").executeUpdate
}
val count2 = qm.selectQuery(selectTableSql).toSingle[Long]
assert(count2.get == 0)
val createTableSql = sql"create table if not exists testdate(x INTEGER)"
qm.updateQuery(createTableSql).executeUpdate
val count3 = qm.selectQuery(selectTableSql).toSingle[Long]
assert(count3.get > 0)
}
}
it(" update transaction roll-back") {
val qm = QueryManager(open = getConnection)
val table = "test"
val selectTableSql = sql" select count(*) from pg_tables where schemaname='public' " +
sql" and tablename = $table"
val count = qm.selectQuery(selectTableSql).toSingle[Long]
if (count.get > 0) {
qm.updateQuery(sql" drop table test ").executeUpdate
}
val createTableSql = sql"create table if not exists spa_test(x INTEGER)"
qm.updateQuery(createTableSql).executeUpdate
qm.updateQuery(sql"INSERT INTO spa_test(x) values (1) ").executeUpdate
val xvalue = qm.selectQuery(sql"select x from test").toSingle[Int]
assert(xvalue == Some(1))
intercept[ExecuteQueryException] {
qm.transaction() {
implicit trans =>
//update first
qm.updateQuery(sql"update spa_test set x = 2").executeUpdate
//then throw exception after update
throw new ExecuteQueryException("see if I can rollback")
}
}
println("now trying to select again")
val xvalue2 = qm.selectQuery(sql"select x from spa_test").toSingle[Int]
println(" xvalue2= " + xvalue2)
assert(xvalue2 === Some(1))
qm.transaction() {
implicit trans =>
qm.updateQuery(sql"update spa_test set x = 2").executeUpdate
}
val xvalue3 = qm.selectQuery(sql"select x from spa_test").toSingle[Int]
assert(xvalue3 === Some(2))
}
}
/*
describe("Random Tests") {
it ("temp test") {
val qm = QueryManager(open = getConnection)
val r = qm.selectQuery(sql"select * FROM data_mining.election92 where county = 'Carroll' LIMIT 10", Some(new RecordRowExtractor())).toList
println("r = " + r)
}
}
*/
def prepareCoffee(qm: QueryManager): List[Coffee] = {
println(" select table ")
val table = "coffees"
val selectTableSql = sql" select count(*) from pg_tables where schemaname='public' " +
sql" and tablename = $table"
val count = qm.selectQuery(selectTableSql).toSingle[Long]
if (count.get > 0) {
println(" drop table ")
qm.updateQuery(sql" drop table COFFEES ").executeUpdate
println(" drop table done")
}
println(" create table ")
val createTableSql = sql"create table if not exists if not exists coffees(COF_NAME varchar(30), SUP_ID INTEGER, PRICE DECIMAL(10,4))"
qm.updateQuery(createTableSql).executeUpdate
println(" create table if not exists done ")
val coffees = List(Coffee("Colombian", 101, 7.99), Coffee("Colombian Decaf", 101, 8.99), Coffee("French Roast Decaf", 49, 9.99))
println(" insert coffees ")
qm.transaction() {
implicit trans =>
for (c <- coffees) {
qm.updateQuery(sql"insert into COFFEES (COF_NAME, SUP_ID, PRICE) values (${c.name}, ${c.supId}, ${c.price} )")
.executeUpdate
}
}
println(" return coffees ")
coffees
}
}
}
class RecordRowExtractor( ) extends RowExtractor[Seq[(String, String)]] {
def extractRow(oneRowCols: Map[ColumnMetadata, Any]): Seq[(String, String)] = {
val colValues : Seq[(ColumnMetadata, Any)] = oneRowCols.toList.sortBy(a => a._1.colPos)
colValues.map(a => a._1.colLabel -> (if (a._2 == null) "NA" else a._2.toString))
}
}
| chesterxgchen/spa | src/test/scala/com/xiaoguangchen/spa/PostgresTest.scala | Scala | apache-2.0 | 15,385 |
package net.tobysullivan.shorturl
trait StatsStore {
def incrementHashLookupCount(hash: Int)
def getHashLookupCount(hash: Int): Int
} | tobyjsullivan/shorturl | src/main/scala/net/tobysullivan/shorturl/StatsStore.scala | Scala | mit | 141 |
package scala.reflect
package generic
trait Names {
type Name >: Null <: AnyRef
def newTermName(cs: Array[Char], offset: Int, len: Int): Name
def newTermName(cs: Array[Byte], offset: Int, len: Int): Name
def newTermName(s: String): Name
def mkTermName(name: Name): Name
def newTypeName(cs: Array[Char], offset: Int, len: Int): Name
def newTypeName(cs: Array[Byte], offset: Int, len: Int): Name
def newTypeName(s: String): Name
def mkTypeName(name: Name): Name
}
| cran/rkafkajars | java/scala/reflect/generic/Names.scala | Scala | apache-2.0 | 493 |
/*
* Copyright 2013 Maurício Linhares
*
* Maurício Linhares licenses this file to you under the Apache License,
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.github.mauricio.async.db.postgresql.util
import com.github.mauricio.async.db.postgresql.exceptions.InvalidArrayException
import com.github.mauricio.async.db.util.Log
import scala.collection.mutable
import scala.collection.mutable.StringBuilder
object ArrayStreamingParser {
val log = Log.getByName(ArrayStreamingParser.getClass.getName)
def parse(content: String, delegate: ArrayStreamingParserDelegate) = {
var index = 0
var escaping = false
var quoted = false
var currentElement: StringBuilder = null
var opens = 0
var closes = 0
while (index < content.size) {
val char = content.charAt(index)
if (escaping) {
currentElement.append(char)
escaping = false
} else {
char match {
case '{' if !quoted => {
delegate.arrayStarted
opens += 1
}
case '}' if !quoted => {
if (currentElement != null) {
sendElementEvent(currentElement, quoted, delegate)
currentElement = null
}
delegate.arrayEnded
closes += 1
}
case '"' => {
if (quoted) {
sendElementEvent(currentElement, quoted, delegate)
currentElement = null
quoted = false
} else {
quoted = true
currentElement = new mutable.StringBuilder()
}
}
case ',' if !quoted => {
if (currentElement != null) {
sendElementEvent(currentElement, quoted, delegate)
}
currentElement = null
}
case '\\' => {
escaping = true
}
case _ => {
if (currentElement == null) {
currentElement = new mutable.StringBuilder()
}
currentElement.append(char)
}
}
}
index += 1
}
if (opens != closes) {
throw new InvalidArrayException(
"This array is unbalanced %s".format(content)
)
}
}
def sendElementEvent(
builder: mutable.StringBuilder,
quoted: Boolean,
delegate: ArrayStreamingParserDelegate
): Unit = {
val value = builder.toString()
if (!quoted && "NULL".equalsIgnoreCase(value)) {
delegate.nullElementFound
} else {
delegate.elementFound(value)
}
}
}
| dripower/postgresql-async | postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/ArrayStreamingParser.scala | Scala | apache-2.0 | 3,085 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn.mkldnn
import com.intel.analytics.bigdl.mkl.{DataType, Memory}
import com.intel.analytics.bigdl.dllib.nn.mkldnn.Phase.{InferencePhase, TrainingPhase}
import com.intel.analytics.bigdl.numeric.NumericFloat
import com.intel.analytics.bigdl.dllib.tensor.{DnnTensor, Tensor}
import com.intel.analytics.bigdl.dllib.utils.{BigDLSpecHelper, T}
import org.apache.commons.lang3.SerializationUtils
class CAddTableSpec extends BigDLSpecHelper {
"CAddTable" should "be correct" in {
val layer = CAddTable()
val model = Sequential()
val concat = ConcatTable()
concat.add(ReorderMemory.create(HeapData(Array(2, 2), Memory.Format.nc),
NativeData(Array(2, 2), Memory.Format.nc), HeapData(Array(2, 2), Memory.Format.nc),
NativeData(Array(2, 2), Memory.Format.nc)))
concat.add(ReorderMemory.create(HeapData(Array(2, 2), Memory.Format.nc),
NativeData(Array(2, 2), Memory.Format.nc), HeapData(Array(2, 2), Memory.Format.nc),
NativeData(Array(2, 2), Memory.Format.nc)))
model.add(concat)
model.add(layer)
model.add(ReorderMemory.create(NativeData(Array(2, 2), Memory.Format.nc),
HeapData(Array(2, 2), Memory.Format.nc), NativeData(Array(2, 2), Memory.Format.nc),
HeapData(Array(2, 2), Memory.Format.nc)))
model.compile(Phase.TrainingPhase, Array(HeapData(Array(2, 2), Memory.Format.nc)))
model.forward(Tensor[Float](T(T(1, 2), T(3, 4)))) should be(Tensor[Float](T(
T(2, 4),
T(6, 8)
)))
val dnnGrad = model.backward(Tensor[Float](T(T(1, 2), T(3, 4))), T(
Tensor[Float](T(
T(4, 5),
T(6, 7)
))
)).asInstanceOf[Tensor[Float]]
val heapGrad = Tensor[Float](2, 2)
heapGrad.copy(dnnGrad)
heapGrad should be (
Tensor[Float](T(T(8, 10), T(12, 14)))
)
}
"caddtable with java serialization" should "work correctly" in {
implicit object Owner extends MemoryOwner {
}
val shape = Array(2, 3, 4, 4)
val _1 = Tensor(shape).rand(-1, 1)
val _2 = Tensor(shape).rand(-1, 1)
val input1 = DnnTensor(shape).copy(_1)
val input2 = DnnTensor(shape).copy(_2)
val cat = CAddTable()
cat.setRuntime(new MklDnnRuntime)
cat.initFwdPrimitives(Array(
HeapData(shape, Memory.Format.nchw),
HeapData(shape, Memory.Format.nchw)), TrainingPhase)
cat.initBwdPrimitives(Array(
HeapData(shape, Memory.Format.nchw),
HeapData(shape, Memory.Format.nchw)), TrainingPhase)
cat.forward(T(input1, input2))
val cloned = SerializationUtils.clone(cat)
cloned.setRuntime(new MklDnnRuntime)
cloned.initFwdPrimitives(Array(
HeapData(shape, Memory.Format.nchw),
HeapData(shape, Memory.Format.nchw)), TrainingPhase)
cloned.initBwdPrimitives(Array(
HeapData(shape, Memory.Format.nchw),
HeapData(shape, Memory.Format.nchw)), TrainingPhase)
cloned.forward(T(input1, input2))
Tools.dense(cat.output) should be (Tools.dense(cloned.output))
val gradOutput = Tensor(shape).rand(-1, 1)
cat.backward(T(input1, input2), gradOutput)
cloned.backward(T(input1, input2), gradOutput)
Tools.dense(cat.gradInput.toTable(1)) should be (Tools.dense(cloned.gradInput.toTable(1)))
Tools.dense(cat.gradInput.toTable(2)) should be (Tools.dense(cloned.gradInput.toTable(2)))
Owner.releaseResources()
}
"CAddTable u8" should "be correct" in {
val shape = Array(4, 3, 5, 5)
val model = Sequential()
val concat = ConcatTable()
val cadd = CAddTable()
model.add(Input(shape, Memory.Format.nchw))
model.add(concat).add(cadd)
val input = Tensor[Float](shape).rand(0, 1)
val nativeData1 = NativeData(shape, Memory.Format.nhwc, DataType.U8)
val nativeData2 = NativeData(shape, Memory.Format.nhwc, DataType.U8)
nativeData1.setMask(0)
nativeData1.setScales(Array(255.0f / input.clone().abs().max()))
nativeData2.setMask(0)
nativeData2.setScales(Array(255.0f / input.clone().abs().max()))
concat.add(ReorderMemory(nativeData1))
concat.add(ReorderMemory(nativeData2))
model.add(ReorderMemory(HeapData(shape, Memory.Format.nchw)))
model.evaluate()
model.compile(InferencePhase)
model.forward(input)
val seq2 = Sequential()
.add(Input(shape, Memory.Format.nchw))
.add(ConcatTable()
.add(ReorderMemory(NativeData(shape, Memory.Format.nhwc)))
.add(ReorderMemory(NativeData(shape, Memory.Format.nchw))))
.add(CAddTable())
.add(ReorderMemory(HeapData(shape, Memory.Format.nchw)))
seq2.evaluate()
seq2.compile(InferencePhase)
seq2.forward(input)
println()
}
}
| intel-analytics/BigDL | scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/nn/mkldnn/CAddTableSpec.scala | Scala | apache-2.0 | 5,247 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.stream.sql
import org.apache.flink.api.scala._
import org.apache.flink.table.api.scala._
import org.apache.flink.table.plan.logical._
import org.apache.flink.table.runtime.utils.JavaUserDefinedAggFunctions.WeightedAvgWithMerge
import org.apache.flink.table.utils.TableTestUtil._
import org.apache.flink.table.utils.{StreamTableTestUtil, TableTestBase}
import org.junit.Test
class GroupWindowTest extends TableTestBase {
private val streamUtil: StreamTableTestUtil = streamTestUtil()
streamUtil.addTable[(Int, String, Long)](
"MyTable", 'a, 'b, 'c, 'proctime.proctime, 'rowtime.rowtime)
@Test
def testTumbleFunction() = {
streamUtil.tableEnv.registerFunction("weightedAvg", new WeightedAvgWithMerge)
val sql =
"SELECT " +
" COUNT(*), weightedAvg(c, a) AS wAvg, " +
" TUMBLE_START(rowtime, INTERVAL '15' MINUTE), " +
" TUMBLE_END(rowtime, INTERVAL '15' MINUTE)" +
"FROM MyTable " +
"GROUP BY TUMBLE(rowtime, INTERVAL '15' MINUTE)"
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupWindowAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "rowtime", "c", "a")
),
term("window", TumblingGroupWindow('w$, 'rowtime, 900000.millis)),
term("select",
"COUNT(*) AS EXPR$0",
"weightedAvg(c, a) AS wAvg",
"start('w$) AS w$start",
"end('w$) AS w$end",
"rowtime('w$) AS w$rowtime",
"proctime('w$) AS w$proctime")
),
term("select", "EXPR$0", "wAvg", "w$start AS EXPR$2", "w$end AS EXPR$3")
)
streamUtil.verifySql(sql, expected)
}
@Test
def testHoppingFunction() = {
streamUtil.tableEnv.registerFunction("weightedAvg", new WeightedAvgWithMerge)
val sql =
"SELECT COUNT(*), weightedAvg(c, a) AS wAvg, " +
" HOP_START(proctime, INTERVAL '15' MINUTE, INTERVAL '1' HOUR), " +
" HOP_END(proctime, INTERVAL '15' MINUTE, INTERVAL '1' HOUR) " +
"FROM MyTable " +
"GROUP BY HOP(proctime, INTERVAL '15' MINUTE, INTERVAL '1' HOUR)"
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupWindowAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "proctime", "c", "a")
),
term("window", SlidingGroupWindow('w$, 'proctime, 3600000.millis, 900000.millis)),
term("select",
"COUNT(*) AS EXPR$0",
"weightedAvg(c, a) AS wAvg",
"start('w$) AS w$start",
"end('w$) AS w$end",
"proctime('w$) AS w$proctime")
),
term("select", "EXPR$0", "wAvg", "w$start AS EXPR$2", "w$end AS EXPR$3")
)
streamUtil.verifySql(sql, expected)
}
@Test
def testSessionFunction() = {
streamUtil.tableEnv.registerFunction("weightedAvg", new WeightedAvgWithMerge)
val sql =
"SELECT " +
" COUNT(*), weightedAvg(c, a) AS wAvg, " +
" SESSION_START(proctime, INTERVAL '15' MINUTE), " +
" SESSION_END(proctime, INTERVAL '15' MINUTE) " +
"FROM MyTable " +
"GROUP BY SESSION(proctime, INTERVAL '15' MINUTE)"
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupWindowAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "proctime", "c", "a")
),
term("window", SessionGroupWindow('w$, 'proctime, 900000.millis)),
term("select",
"COUNT(*) AS EXPR$0",
"weightedAvg(c, a) AS wAvg",
"start('w$) AS w$start",
"end('w$) AS w$end",
"proctime('w$) AS w$proctime")
),
term("select", "EXPR$0", "wAvg", "w$start AS EXPR$2", "w$end AS EXPR$3")
)
streamUtil.verifySql(sql, expected)
}
@Test
def testExpressionOnWindowAuxFunction() = {
val sql =
"SELECT " +
" COUNT(*), " +
" TUMBLE_END(rowtime, INTERVAL '15' MINUTE) + INTERVAL '1' MINUTE " +
"FROM MyTable " +
"GROUP BY TUMBLE(rowtime, INTERVAL '15' MINUTE)"
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupWindowAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "rowtime")
),
term("window", TumblingGroupWindow('w$, 'rowtime, 900000.millis)),
term("select",
"COUNT(*) AS EXPR$0",
"start('w$) AS w$start",
"end('w$) AS w$end",
"rowtime('w$) AS w$rowtime",
"proctime('w$) AS w$proctime")
),
term("select", "EXPR$0", "+(w$end, 60000) AS EXPR$1")
)
streamUtil.verifySql(sql, expected)
}
@Test
def testExpressionOnWindowHavingFunction() = {
val sql =
"SELECT " +
" COUNT(*), " +
" HOP_START(rowtime, INTERVAL '15' MINUTE, INTERVAL '1' MINUTE) " +
"FROM MyTable " +
"GROUP BY HOP(rowtime, INTERVAL '15' MINUTE, INTERVAL '1' MINUTE) " +
"HAVING " +
" SUM(a) > 0 AND " +
" QUARTER(HOP_START(rowtime, INTERVAL '15' MINUTE, INTERVAL '1' MINUTE)) = 1"
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupWindowAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "rowtime, a")
),
term("window", SlidingGroupWindow('w$, 'rowtime, 60000.millis, 900000.millis)),
term("select",
"COUNT(*) AS EXPR$0",
"SUM(a) AS $f1",
"start('w$) AS w$start",
"end('w$) AS w$end",
"rowtime('w$) AS w$rowtime",
"proctime('w$) AS w$proctime")
),
term("select", "EXPR$0", "w$start AS EXPR$1"),
term("where",
"AND(>($f1, 0), " +
"=(EXTRACT(FLAG(QUARTER), w$start), 1))")
)
streamUtil.verifySql(sql, expected)
}
@Test
def testMultiWindowSqlWithAggregation() = {
val sql =
s"""SELECT
TUMBLE_ROWTIME(zzzzz, INTERVAL '0.004' SECOND),
TUMBLE_END(zzzzz, INTERVAL '0.004' SECOND),
COUNT(`a`) AS `a`
FROM (
SELECT
COUNT(`a`) AS `a`,
TUMBLE_ROWTIME(rowtime, INTERVAL '0.002' SECOND) AS `zzzzz`
FROM MyTable
GROUP BY TUMBLE(rowtime, INTERVAL '0.002' SECOND)
)
GROUP BY TUMBLE(zzzzz, INTERVAL '0.004' SECOND)"""
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupWindowAggregate",
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupWindowAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "rowtime, a")
),
term("window", TumblingGroupWindow('w$, 'rowtime, 2.millis)),
term("select",
"COUNT(a) AS a",
"start('w$) AS w$start",
"end('w$) AS w$end",
"rowtime('w$) AS w$rowtime",
"proctime('w$) AS w$proctime")
),
term("select", "a", "w$rowtime AS zzzzz")
),
term("window", TumblingGroupWindow('w$, 'zzzzz, 4.millis)),
term("select",
"COUNT(*) AS a",
"start('w$) AS w$start",
"end('w$) AS w$end",
"rowtime('w$) AS w$rowtime",
"proctime('w$) AS w$proctime")
),
term("select", "w$rowtime AS EXPR$0", "w$end AS EXPR$1", "a")
)
streamUtil.verifySql(sql, expected)
}
@Test
def testDecomposableAggFunctions() = {
val sql =
"SELECT " +
" VAR_POP(c), VAR_SAMP(c), STDDEV_POP(c), STDDEV_SAMP(c), " +
" TUMBLE_START(rowtime, INTERVAL '15' MINUTE), " +
" TUMBLE_END(rowtime, INTERVAL '15' MINUTE)" +
"FROM MyTable " +
"GROUP BY TUMBLE(rowtime, INTERVAL '15' MINUTE)"
val expected =
unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupWindowAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "rowtime", "c",
"*(c, c) AS $f2", "*(c, c) AS $f3", "*(c, c) AS $f4", "*(c, c) AS $f5")
),
term("window", TumblingGroupWindow('w$, 'rowtime, 900000.millis)),
term("select",
"SUM($f2) AS $f0",
"SUM(c) AS $f1",
"COUNT(c) AS $f2",
"SUM($f3) AS $f3",
"SUM($f4) AS $f4",
"SUM($f5) AS $f5",
"start('w$) AS w$start",
"end('w$) AS w$end",
"rowtime('w$) AS w$rowtime",
"proctime('w$) AS w$proctime")
),
term("select",
"CAST(/(-($f0, /(*($f1, $f1), $f2)), $f2)) AS EXPR$0",
"CAST(/(-($f3, /(*($f1, $f1), $f2)), CASE(=($f2, 1), null, -($f2, 1)))) AS EXPR$1",
"CAST(POWER(/(-($f4, /(*($f1, $f1), $f2)), $f2), 0.5)) AS EXPR$2",
"CAST(POWER(/(-($f5, /(*($f1, $f1), $f2)), CASE(=($f2, 1), null, -($f2, 1))), 0.5)) " +
"AS EXPR$3",
"w$start AS EXPR$4",
"w$end AS EXPR$5")
)
streamUtil.verifySql(sql, expected)
}
}
| mylog00/flink | flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/stream/sql/GroupWindowTest.scala | Scala | apache-2.0 | 10,462 |
package sri.web.examples.routerexample
import sri.core._
import sri.web.all._
import sri.web.examples.Button
import sri.web.router
import sri.web.router.{WebDynamicPage, WebRouterComponent}
import sri.web.styles.WebStyleSheet
import sri.web.vdom.htmltags._
import scala.scalajs.js
import scala.scalajs.js.annotation.ScalaJSDefined
import scala.scalajs.js.{UndefOr => U, undefined}
object ItemsLeftNav {
@ScalaJSDefined
class Component extends WebRouterComponent[Unit, Unit] {
def render() = {
div(style = styles.container)(
(1 until 10).toSeq.map(i => getItem(i.toString, ItemsRouteModule.Details))
)
}
def getItem(id: String, page: WebDynamicPage[Int]) = {
Button(style = styles.menuItem(id == currentRoute.placeholder.getOrElse("")),
key = id,
onPress = () => navigateToDynamic(page, id))(
span()(s"Item $id")
)
}
}
object styles extends WebStyleSheet {
val container = style(width := 190,
paddingTop := 40,
display.flex,
flexDirection.column,
border := 0,
borderRightWidth := "1px",
borderStyle := "solid",
borderRightColor := "grey")
def menuItem(selected: Boolean) = {
val bg = if (selected) "grey" else "transparent"
style(height := 40,
justifyContent.center,
display.flex,
flexDirection.column,
cursor.pointer,
backgroundColor := bg,
alignItems.center)
}
}
js.constructorOf[Component].contextTypes = router.routerContextTypes
def apply(key: js.UndefOr[String] = js.undefined, ref: js.Function1[Component, Unit] = null) = makeElementNoProps[Component](key = key, ref = ref)
}
| chandu0101/sri | web-examples/src/main/scala/sri/web/examples/routerexample/ItemsLeftNav.scala | Scala | apache-2.0 | 1,696 |
package blanky.utils
import org.scalatest._
import spray.routing.HttpService
import spray.testkit.ScalatestRouteTest
class CorsSupportSpec extends FlatSpec
with ScalatestRouteTest
with HttpService
with CorsSupport
with Matchers {
def actorRefFactory = system // Connect the service API to the test ActorSystem
val testRoute = path("test") {
cors {
get {
complete((200, "'CORS it works!"))
} ~
post {
complete((200, "'CORS I'll update that!"))
}
}
}
"A CORS route" should "work" in {
Get("/test") ~> testRoute ~> check {
status.intValue should be(200)
responseAs[String] should be("'CORS it works!")
}
Post("/test") ~> testRoute ~> check {
status.intValue should be(200)
responseAs[String] should be("'CORS I'll update that!")
}
}
it should "respond to OPTIONS requests properly" in {
Options("/test") ~> testRoute ~> check {
status.intValue should be(200)
header("Access-Control-Allow-Headers").isDefined should be(true)
header("Access-Control-Max-Age").isDefined should be(true)
val allowMethods = header("Access-Control-Allow-Methods").get.value.split(", ")
Array("OPTIONS", "POST", "GET") foreach {
allowMethods should contain(_)
}
Array("PUT", "DELETE") foreach {
allowMethods should not contain (_)
}
}
}
it should "respond to all requests with the Access-Control-Allow-Origin header" in {
Get("/test") ~> testRoute ~> check {
header("Access-Control-Allow-Origin").isDefined should be(true)
}
Post("/test") ~> testRoute ~> check {
header("Access-Control-Allow-Origin").isDefined should be(true)
}
}
} | vadim-shb/blanky | server/src/test/scala/blanky/utils/CorsSupportSpec.scala | Scala | mit | 1,726 |
package com.lunatic.mlx.kddcup99.mllib.thresholds
import com.lunatic.mlx.kddcup99.mllib.metadata.{Prediction, KMeansXMD}
import org.apache.spark.rdd.RDD
/** constant threshold initially used for comparison */
case class ConstantThreshold(kMeansXMD: KMeansXMD, threshold: Double) extends AnomalyThresholdsGenerator {
def generateThresholds(predictions: RDD[Prediction]): Array[Double] =
kMeansXMD.model.clusterCenters.map(_ => threshold)
}
| tupol/sparx-mllib | src/main/scala/com/lunatic/mlx/kddcup99/mllib/thresholds/ConstantThreshold.scala | Scala | apache-2.0 | 447 |
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.actor.dungeon
import scala.collection.immutable
import akka.actor.{ InvalidActorNameException, ChildStats, ChildRestartStats, ChildNameReserved, ActorRef }
import akka.dispatch.sysmsg.{ EarliestFirstSystemMessageList, SystemMessageList, LatestFirstSystemMessageList, SystemMessage }
import akka.util.Collections.{ EmptyImmutableSeq, PartialImmutableValuesIterable }
/**
* INTERNAL API
*/
private[akka] trait ChildrenContainer {
def add(name: String, stats: ChildRestartStats): ChildrenContainer
def remove(child: ActorRef): ChildrenContainer
def getByName(name: String): Option[ChildStats]
def getByRef(actor: ActorRef): Option[ChildRestartStats]
def children: immutable.Iterable[ActorRef]
def stats: immutable.Iterable[ChildRestartStats]
def shallDie(actor: ActorRef): ChildrenContainer
// reserve that name or throw an exception
def reserve(name: String): ChildrenContainer
// cancel a reservation
def unreserve(name: String): ChildrenContainer
def isTerminating: Boolean = false
def isNormal: Boolean = true
}
/**
* INTERNAL API
*
* This object holds the classes performing the logic of managing the children
* of an actor, hence they are intimately tied to ActorCell.
*/
private[akka] object ChildrenContainer {
sealed trait SuspendReason
case object UserRequest extends SuspendReason
// careful with those system messages, all handling to be taking place in ActorCell.scala!
final case class Recreation(cause: Throwable) extends SuspendReason with WaitingForChildren
final case class Creation() extends SuspendReason with WaitingForChildren
case object Termination extends SuspendReason
class ChildRestartsIterable(stats: immutable.MapLike[_, ChildStats, _]) extends PartialImmutableValuesIterable[ChildStats, ChildRestartStats] {
override final def apply(c: ChildStats) = c.asInstanceOf[ChildRestartStats]
override final def isDefinedAt(c: ChildStats) = c.isInstanceOf[ChildRestartStats]
override final def valuesIterator = stats.valuesIterator
}
class ChildrenIterable(stats: immutable.MapLike[_, ChildStats, _]) extends PartialImmutableValuesIterable[ChildStats, ActorRef] {
override final def apply(c: ChildStats) = c.asInstanceOf[ChildRestartStats].child
override final def isDefinedAt(c: ChildStats) = c.isInstanceOf[ChildRestartStats]
override final def valuesIterator = stats.valuesIterator
}
trait WaitingForChildren
trait EmptyChildrenContainer extends ChildrenContainer {
val emptyStats = immutable.TreeMap.empty[String, ChildStats]
override def add(name: String, stats: ChildRestartStats): ChildrenContainer = new NormalChildrenContainer(emptyStats.updated(name, stats))
override def remove(child: ActorRef): ChildrenContainer = this
override def getByName(name: String): Option[ChildRestartStats] = None
override def getByRef(actor: ActorRef): Option[ChildRestartStats] = None
override def children: immutable.Iterable[ActorRef] = EmptyImmutableSeq
override def stats: immutable.Iterable[ChildRestartStats] = EmptyImmutableSeq
override def shallDie(actor: ActorRef): ChildrenContainer = this
override def reserve(name: String): ChildrenContainer = new NormalChildrenContainer(emptyStats.updated(name, ChildNameReserved))
override def unreserve(name: String): ChildrenContainer = this
}
/**
* This is the empty container, shared among all leaf actors.
*/
object EmptyChildrenContainer extends EmptyChildrenContainer {
override def toString = "no children"
}
/**
* This is the empty container which is installed after the last child has
* terminated while stopping; it is necessary to distinguish from the normal
* empty state while calling handleChildTerminated() for the last time.
*/
object TerminatedChildrenContainer extends EmptyChildrenContainer {
override def add(name: String, stats: ChildRestartStats): ChildrenContainer = this
override def reserve(name: String): ChildrenContainer =
throw new IllegalStateException("cannot reserve actor name '" + name + "': already terminated")
override def isTerminating: Boolean = true
override def isNormal: Boolean = false
override def toString = "terminated"
}
/**
* Normal children container: we do have at least one child, but none of our
* children are currently terminating (which is the time period between
* calling context.stop(child) and processing the ChildTerminated() system
* message).
*/
class NormalChildrenContainer(val c: immutable.TreeMap[String, ChildStats]) extends ChildrenContainer {
override def add(name: String, stats: ChildRestartStats): ChildrenContainer = new NormalChildrenContainer(c.updated(name, stats))
override def remove(child: ActorRef): ChildrenContainer = NormalChildrenContainer(c - child.path.name)
override def getByName(name: String): Option[ChildStats] = c.get(name)
override def getByRef(actor: ActorRef): Option[ChildRestartStats] = c.get(actor.path.name) match {
case c @ Some(crs: ChildRestartStats) if (crs.child == actor) ⇒ c.asInstanceOf[Option[ChildRestartStats]]
case _ ⇒ None
}
override def children: immutable.Iterable[ActorRef] =
if (c.isEmpty) EmptyImmutableSeq else new ChildrenIterable(c)
override def stats: immutable.Iterable[ChildRestartStats] =
if (c.isEmpty) EmptyImmutableSeq else new ChildRestartsIterable(c)
override def shallDie(actor: ActorRef): ChildrenContainer = TerminatingChildrenContainer(c, Set(actor), UserRequest)
override def reserve(name: String): ChildrenContainer =
if (c contains name)
throw new InvalidActorNameException(s"actor name [$name] is not unique!")
else new NormalChildrenContainer(c.updated(name, ChildNameReserved))
override def unreserve(name: String): ChildrenContainer = c.get(name) match {
case Some(ChildNameReserved) ⇒ NormalChildrenContainer(c - name)
case _ ⇒ this
}
override def toString =
if (c.size > 20) c.size + " children"
else c.mkString("children:\\n ", "\\n ", "")
}
object NormalChildrenContainer {
def apply(c: immutable.TreeMap[String, ChildStats]): ChildrenContainer =
if (c.isEmpty) EmptyChildrenContainer
else new NormalChildrenContainer(c)
}
/**
* Waiting state: there are outstanding termination requests (i.e. context.stop(child)
* was called but the corresponding ChildTerminated() system message has not yet been
* processed). There could be no specific reason (UserRequested), we could be Restarting
* or Terminating.
*
* Removing the last child which was supposed to be terminating will return a different
* type of container, depending on whether or not children are left and whether or not
* the reason was “Terminating”.
*/
final case class TerminatingChildrenContainer(c: immutable.TreeMap[String, ChildStats], toDie: Set[ActorRef], reason: SuspendReason)
extends ChildrenContainer {
override def add(name: String, stats: ChildRestartStats): ChildrenContainer = copy(c.updated(name, stats))
override def remove(child: ActorRef): ChildrenContainer = {
val t = toDie - child
if (t.isEmpty) reason match {
case Termination ⇒ TerminatedChildrenContainer
case _ ⇒ NormalChildrenContainer(c - child.path.name)
}
else copy(c - child.path.name, t)
}
override def getByName(name: String): Option[ChildStats] = c.get(name)
override def getByRef(actor: ActorRef): Option[ChildRestartStats] = c.get(actor.path.name) match {
case c @ Some(crs: ChildRestartStats) if (crs.child == actor) ⇒ c.asInstanceOf[Option[ChildRestartStats]]
case _ ⇒ None
}
override def children: immutable.Iterable[ActorRef] =
if (c.isEmpty) EmptyImmutableSeq else new ChildrenIterable(c)
override def stats: immutable.Iterable[ChildRestartStats] =
if (c.isEmpty) EmptyImmutableSeq else new ChildRestartsIterable(c)
override def shallDie(actor: ActorRef): ChildrenContainer = copy(toDie = toDie + actor)
override def reserve(name: String): ChildrenContainer = reason match {
case Termination ⇒ throw new IllegalStateException("cannot reserve actor name '" + name + "': terminating")
case _ ⇒
if (c contains name)
throw new InvalidActorNameException(s"actor name [$name] is not unique!")
else copy(c = c.updated(name, ChildNameReserved))
}
override def unreserve(name: String): ChildrenContainer = c.get(name) match {
case Some(ChildNameReserved) ⇒ copy(c = c - name)
case _ ⇒ this
}
override def isTerminating: Boolean = reason == Termination
override def isNormal: Boolean = reason == UserRequest
override def toString =
if (c.size > 20) c.size + " children"
else c.mkString("children (" + toDie.size + " terminating):\\n ", "\\n ", "\\n") + toDie
}
}
| jmnarloch/akka.js | akka-js-actor/jvm/src/main/scala/akka/actor/dungeon/ChildrenContainer.scala | Scala | bsd-3-clause | 9,095 |
package views.json.api.Tag
import play.api.libs.json.{JsValue,Json}
import com.overviewdocs.models.Tag
object index {
def apply(tags: Seq[Tag]): JsValue = {
val jsons: Seq[JsValue] = tags.map(show(_))
Json.toJson(jsons)
}
}
| overview/overview-server | web/app/views/api/Tag/index.json.scala | Scala | agpl-3.0 | 239 |
package net.ruippeixotog.scalafbp.component.stream
import akka.actor.Props
import spray.json.JsValue
import spray.json.DefaultJsonProtocol._
import net.ruippeixotog.scalafbp.component._
case object Zip extends Component {
val name = "stream/Zip"
val description = "Combines elements from two streams in pairs"
val icon = Some("dropbox")
val in1Port = InPort[JsValue]("in1", "The first stream")
val in2Port = InPort[JsValue]("in2", "The second stream")
val inPorts = List(in1Port, in2Port)
val outPort = OutPort[(JsValue, JsValue)]("out", "The zipped stream")
val outPorts = List(outPort)
val instanceProps = Props(new ComponentActor(this) {
in1Port.stream.zip(in2Port.stream)
.doOnCompleted(context.stop(self))
.pipeTo(outPort)
})
}
| ruippeixotog/scalafbp | components/stream/src/main/scala/net/ruippeixotog/scalafbp/component/stream/Zip.scala | Scala | mit | 775 |
package com.eevolution.context.dictionary.domain.api.service
import com.eevolution.context.dictionary.api
import com.eevolution.context.dictionary.domain.model.PackageExport
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: emeris.hernandez@e-evolution.com, http://www.e-evolution.com , http://github.com/EmerisScala
* Created by emeris.hernandez@e-evolution.com , www.e-evolution.com on 10/11/17.
*/
/**
* Package Export Service
*/
trait PackageExportService extends api.Service[PackageExport, Int] {
//Definition
}
| adempiere/ADReactiveSystem | dictionary-api/src/main/scala/com/eevolution/context/dictionary/domain/api/service/PackageExportService.scala | Scala | gpl-3.0 | 1,233 |
package net.benchmark.akka.http.world
import akka.http.scaladsl.server.Directives.{complete, path}
import de.heikoseeberger.akkahttpcirce.ErrorAccumulatingCirceSupport._
class DbRoute(wr: WorldRepository) {
private def rand(): Int = {
java.util.concurrent.ThreadLocalRandom.current().nextInt(10000) + 1
}
def route() = {
path("db") {
complete(wr.require(rand()))
}
}
}
| sumeetchhetri/FrameworkBenchmarks | frameworks/Scala/akka-http/akka-http-slick-postgres/src/main/scala/net/benchmark/akka/http/world/DbRoute.scala | Scala | bsd-3-clause | 399 |
/*
* Copyright (C) 2016 DANS - Data Archiving and Networked Services (info@dans.knaw.nl)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.knaw.dans.easy.multideposit.actions
import better.files.File
import cats.data.ValidatedNec
import cats.instances.list._
import cats.syntax.either._
import cats.syntax.traverse._
import javax.naming.directory.Attributes
import nl.knaw.dans.easy.multideposit.FfprobeRunner.FfprobeError
import nl.knaw.dans.easy.multideposit.PathExplorer.StagingPathExplorer
import nl.knaw.dans.easy.multideposit._
import nl.knaw.dans.easy.multideposit.model.{ AVFileMetadata, Deposit, DepositId, DepositorUserId }
import nl.knaw.dans.lib.logging.DebugEnhancedLogging
class ValidatePreconditions(ldap: Ldap, ffprobe: FfprobeRunner) extends DebugEnhancedLogging {
// TODO refactor to Validated
def validateDeposit(deposit: Deposit)(implicit stage: StagingPathExplorer): FailFast[Unit] = {
val id = deposit.depositId
logger.debug(s"validating deposit $id")
for {
_ <- checkDirectoriesDoNotExist(id)(stage.stagingDir(id), stage.stagingBagDir(id), stage.stagingBagMetadataDir(id))
_ <- checkAudioVideoNotCorrupt(deposit)
_ <- checkDepositorUserId(deposit)
} yield ()
}
def checkDirectoriesDoNotExist(depositId: DepositId)(directories: File*): FailFast[Unit] = {
logger.debug(s"check directories don't exist yet: ${ directories.mkString("[", ", ", "]") }")
directories.find(_.exists)
.map(file => ActionError(s"The deposit for dataset $depositId already exists in $file.").asLeft)
.getOrElse(().asRight)
}
def checkAudioVideoNotCorrupt(deposit: Deposit): Either[InvalidInput, Unit] = {
logger.debug("check that A/V files can be successfully probed by ffprobe")
deposit.files.collect { case fmd: AVFileMetadata => fmd.filepath }
.toList
.traverse[ValidatedNec[FfprobeError, *], Unit](ffprobe.run(_).toValidatedNec)
.leftMap(errors => {
val ffProbeErrors = errors.toNonEmptyList.toList
.map { case FfprobeError(t, e, _) => s" - File: $t, exit code: $e" }
.mkString("\n")
InvalidInput(deposit.row, "Possibly found corrupt A/V files. Ffprobe failed when probing the following files:\\n" + ffProbeErrors)
})
.map(_ => ())
.toEither
}
def checkDepositorUserId(deposit: Deposit): FailFast[Unit] = {
logger.debug("check that the depositor is an active user")
val depositorUserId = deposit.depositorUserId
ldap.query(depositorUserId)(validateDepositorUserId(deposit.row, depositorUserId))
.flatMap {
case Seq() => InvalidInput(deposit.row, s"depositorUserId '$depositorUserId' is unknown").asLeft
case Seq(head) => head.asRight
case _ => ActionError(s"There appear to be multiple users with id '$depositorUserId'").asLeft
}
.flatMap(identity)
}
def validateDepositorUserId(row: Int, depositorUserId: DepositorUserId)(attrs: Attributes): FailFast[Unit] = {
lazy val activeState = Option(attrs.get("dansState")).exists(_.get().toString == "ACTIVE")
Option(attrs.get("uid"))
.map(_.get().toString)
.map {
case `depositorUserId` if activeState => ().asRight
case `depositorUserId` => InvalidInput(row, s"The depositor '$depositorUserId' is not an active user").asLeft
case mismatch => InvalidInput(row, s"Depositor '$depositorUserId' does not exist in LDAP. We've found '$mismatch', which is slightly different. Please check for upper/lowercase spelling mistakes.").asLeft
}
.getOrElse {
InvalidInput(row, s"Depositor '$depositorUserId' does not exist in LDAP.").asLeft
}
}
}
| DANS-KNAW/easy-process-sip | src/main/scala/nl.knaw.dans.easy.multideposit/actions/ValidatePreconditions.scala | Scala | apache-2.0 | 4,190 |
/*
* ============= Ryft-Customized BSD License ============
* Copyright (c) 2015, Ryft Systems, Inc.
* All rights reserved.
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software must display the following acknowledgement:
* This product includes software developed by Ryft Systems, Inc.
* 4. Neither the name of Ryft Systems, Inc. nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY RYFT SYSTEMS, INC. ''AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL RYFT SYSTEMS, INC. BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* ============
*/
package com.ryft.spark.connector.domain
import spray.json._
case class RDDCount (matches: Long)
object RDDCountProtocol extends DefaultJsonProtocol {
implicit val rddCountFormat = jsonFormat1(RDDCount)
}
| getryft/spark-ryft-connector | spark-ryft-connector/src/main/scala/com/ryft/spark/connector/domain/RDDCount.scala | Scala | bsd-3-clause | 2,002 |
package com.eclipsesource.schema
import com.eclipsesource.schema.drafts.{Version4, Version7}
import com.eclipsesource.schema.test.JsonSpec
import org.specs2.mutable.Specification
class AdditionalPropertiesSpec extends Specification with JsonSpec {
"additionalProperties draft4" in {
import Version4._
implicit val validator: SchemaValidator = SchemaValidator(Some(Version4))
validate("additionalProperties", "draft4")
}
"additionalProperties draft7" in {
import Version7._
implicit val validator: SchemaValidator = SchemaValidator(Some(Version7))
validate("additionalProperties", "draft7")
}
}
| eclipsesource/play-json-schema-validator | src/test/scala/com/eclipsesource/schema/AdditionalPropertiesSpec.scala | Scala | apache-2.0 | 630 |
/**
* Copyright (C) 2009-2011 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.scalate.converter
import org.fusesource.scalate.support.Text
import util.matching.Regex.Match
import org.fusesource.scalate.util.Log
object ExpressionLanguage {
protected val operators = Map("eq" -> "==", "ne" -> "!=",
"gt" -> ">", "ge" -> ">=",
"lt" -> "<", "le" -> "<=",
"not" -> "!")
protected val notEmptyRegex = """(\\s|^)(not\\s+empty)\\s(.+)""".r
protected val emptyRegex = """(\\s|^)(empty)\\s(.+)""".r
protected val lengthRegex = """fn:length\\((.+)\\)""".r
def asScala(el: String): String = {
// lets switch the EL style indexing to Scala parens and switch single quotes to doubles
var text = el.replace('[', '(').
replace(']', ')').
replace('\\'', '\\"')
def space(m: Match): String = if (m.start == 0) "" else " "
// "not empty xxx" => "!(xxx isEmpty)"
text = notEmptyRegex.replaceAllIn(text, { m => space(m) + "!(" + m.subgroups.last + " isEmpty)" })
// "empty whatever" => "whatever isEmpty"
text = emptyRegex.replaceAllIn(text, { m => space(m) + m.subgroups.last + " isEmpty" })
// replace EL operators
for ((a, b) <- operators) {
text = text.replaceAll("(\\\\s|^)" + a + "\\\\s", " " + b + " ")
}
// foo.bar => foo.getBar
var first = true
text = text.split('.').map(s =>
if (!first && s.length > 0 && s(0).isUnicodeIdentifierStart) {
"get" + s.capitalize
} else {
first = false
s
}).mkString(".")
// fn:length(foo) => foo.size
text = lengthRegex.replaceAllIn(text, { m => m.subgroups.last + ".size" })
text
}
}
trait IndentWriter {
var out = new StringBuilder
var indentLevel: Int = 0
var indentText: String = " "
def reset(): Unit = {
out = new StringBuilder
}
def indent[T](op: => T): T = { indentLevel += 1; val rc = op; indentLevel -= 1; rc }
def println(line: String): this.type = {
for (i <- 0 until indentLevel) {
print(indentText)
}
print(line)
println
this
}
def print(value: AnyRef): Unit = {
out.append(value)
}
def println() = print("\\n")
def text = out.toString
}
object JspConverter extends Log
class JspConverter extends IndentWriter {
import JspConverter._
var coreLibraryPrefix: String = "c"
var whenCount = 0
def convert(jsp: String): String = {
reset
val parser = new JspParser
val result = parser.parsePage(jsp)
convert(result)
text
}
def convert(list: List[PageFragment]): Unit = {
for (r <- list) {
convert(r)
}
}
def convert(fragment: PageFragment): Unit = fragment match {
case e: Element => transform(e)
case _ => print(fragment.toString)
}
def transform(e: Element): Unit = {
e match {
// core JSTL library
case Element(QualifiedName(coreLibraryPrefix, name), attributes, body) =>
name match {
case "choose" =>
whenCount = 0
convert(body)
print("#end")
case "forEach" =>
val varExp = e.attributeMap.getOrElse("var", textExpression("i"))
print("#for(" + asUnquotedParam(varExp) + " <- ")
e.attributeMap.get("items") match {
case Some(exp) =>
print(asParam(exp) + ")")
case _ =>
val begin = e.attribute("begin")
val end = e.attribute("end")
print(asUnquotedParam(begin) + ".to(" + asUnquotedParam(end))
e.attributeMap.get("step") match {
case Some(step) => print(", " + asUnquotedParam(step))
case _ =>
}
print("))")
}
convert(body)
print("#end")
case "if" =>
val exp = e.attribute("test")
print("#if(" + asParam(exp) + ")")
convert(body)
print("#end")
case "otherwise" =>
print("#else")
convert(body)
case "out" =>
val exp = e.attribute("value")
print("${")
e.attributeMap.get("escapeXml") match {
case Some(TextExpression(Text("true"))) => print("escape(" + asParam(exp) + ")")
case Some(TextExpression(Text("false"))) => print("unescape(" + asParam(exp) + ")")
case Some(e) => print("value(" + asParam(exp) + ", " + asUnquotedParam(e) + ")")
case _ => print(asParam(exp))
}
print("}")
case "set" =>
val exp = e.attribute("value")
val name = e.attribute("var")
print("#{ var " + asUnquotedParam(name) + " = " + asParam(exp) + " }#")
case "when" =>
val exp = e.attribute("test")
print("#" + (if (whenCount == 0) "if" else "elseif") + "(" + asParam(exp) + ")")
whenCount += 1
convert(body)
case "url" =>
val exp = e.attribute("value")
print("${uri(" + asParam(exp) + ")}")
case _ =>
warn("No converter available for tag <" + coreLibraryPrefix + ":" + name + ">: " + e)
print(e)
}
case _ => print(e)
}
}
def print(e: Element): Unit = {
print("<" + e.qualifiedName)
for (a <- e.attributes) {
print(" " + a.name + "=\\"" + asParam(a.value) + "\\"")
}
print("/>")
}
protected def textExpression(s: String) = TextExpression(Text(s))
/**
* Returns the text of an expression as a numeric method parameter
*/
protected def asUnquotedParam(exp: Expression): String = exp.asUnquotedParam
/**
* Returns the text of an expression as a method parameter
*/
protected def asParam(exp: Expression): String = exp.asParam
/**
* Returns the text of an expression as a method parameter
*/
protected def asJsp(exp: Expression): String = exp.asJsp
}
| maslovalex/scalate | scalate-jsp-converter/src/main/scala/org/fusesource/scalate/converter/JspConverter.scala | Scala | apache-2.0 | 6,621 |
/*
* Copyright 2017 Sumo Logic
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ws.epigraph.java.service
import ws.epigraph.java.NewlineStringInterpolator.{NewlineHelper, i}
import ws.epigraph.java.{ObjectGen, ObjectGenContext}
import ws.epigraph.java.service.ServiceObjectGenerators.gen
import ws.epigraph.schema.operations.{CustomOperationDeclaration, HttpMethod}
/**
* @author <a href="mailto:konstantin.sobolev@gmail.com">Konstantin Sobolev</a>
*/
class CustomOperationDeclarationGen(od: CustomOperationDeclaration)
extends ObjectGen[CustomOperationDeclaration](od) {
override protected def generateObject(o: String, ctx: ObjectGenContext): String =
/*@formatter:off*/sn"""\\
new $o(
${generateHttpMethod(od.method(), ctx)},
${gen(od.name(), ctx)},
${i(gen(od.annotations(), ctx))},
${i(gen(od.path(), ctx))},
${i(gen(od.inputProjection(), ctx))},
${i(gen(od.outputProjection(), ctx))},
${gen(od.location(), ctx)}
)"""/*@formatter:on*/
private def generateHttpMethod(m: HttpMethod, ctx: ObjectGenContext): String =
ctx.use(classOf[HttpMethod].getName) + "." +
(m match {
case HttpMethod.GET => "GET"
case HttpMethod.POST => "POST"
case HttpMethod.PUT => "PUT"
case HttpMethod.DELETE => "DELETE"
})
}
| SumoLogic/epigraph | java/codegen/src/main/scala/ws/epigraph/java/service/CustomOperationDeclarationGen.scala | Scala | apache-2.0 | 1,790 |
package argonaut
import monocle.Prism
import monocle.macros.{GenPrism, GenIso}
object CursorOpMonocle extends CursorOpMonocles
trait CursorOpMonocles {
val reattempt: Prism[CursorOp, Unit] = GenPrism[CursorOp, Reattempt.type] composeIso GenIso.unit[Reattempt.type]
val el: Prism[CursorOp, (CursorOpElement, Boolean)] = Prism[CursorOp, (CursorOpElement, Boolean)]{
case Reattempt => None
case El(op, success) => Some((op, success))
}{case (op, success) => El(op, success)}
}
| jedws/argonaut | argonaut-monocle/src/main/scala/argonaut/CursorOpMonocle.scala | Scala | bsd-3-clause | 497 |
import org.scalatest.{Matchers, FunSuite}
/** @version 1.1.0 */
class ProteinTranslationTest extends FunSuite with Matchers {
test("Methionine RNA sequence") {
ProteinTranslation.proteins("AUG") should be(Seq("Methionine"))
}
test("Phenylalanine RNA sequence 1") {
pending
ProteinTranslation.proteins("UUU") should be(Seq("Phenylalanine"))
}
test("Phenylalanine RNA sequence 2") {
pending
ProteinTranslation.proteins("UUC") should be(Seq("Phenylalanine"))
}
test("Leucine RNA sequence 1") {
pending
ProteinTranslation.proteins("UUA") should be(Seq("Leucine"))
}
test("Leucine RNA sequence 2") {
pending
ProteinTranslation.proteins("UUG") should be(Seq("Leucine"))
}
test("Serine RNA sequence 1") {
pending
ProteinTranslation.proteins("UCU") should be(Seq("Serine"))
}
test("Serine RNA sequence 2") {
pending
ProteinTranslation.proteins("UCC") should be(Seq("Serine"))
}
test("Serine RNA sequence 3") {
pending
ProteinTranslation.proteins("UCA") should be(Seq("Serine"))
}
test("Serine RNA sequence 4") {
pending
ProteinTranslation.proteins("UCG") should be(Seq("Serine"))
}
test("Tyrosine RNA sequence 1") {
pending
ProteinTranslation.proteins("UAU") should be(Seq("Tyrosine"))
}
test("Tyrosine RNA sequence 2") {
pending
ProteinTranslation.proteins("UAC") should be(Seq("Tyrosine"))
}
test("Cysteine RNA sequence 1") {
pending
ProteinTranslation.proteins("UGU") should be(Seq("Cysteine"))
}
test("Cysteine RNA sequence 2") {
pending
ProteinTranslation.proteins("UGC") should be(Seq("Cysteine"))
}
test("Tryptophan RNA sequence") {
pending
ProteinTranslation.proteins("UGG") should be(Seq("Tryptophan"))
}
test("STOP codon RNA sequence 1") {
pending
ProteinTranslation.proteins("UAA") should be(Seq())
}
test("STOP codon RNA sequence 2") {
pending
ProteinTranslation.proteins("UAG") should be(Seq())
}
test("STOP codon RNA sequence 3") {
pending
ProteinTranslation.proteins("UGA") should be(Seq())
}
test("Translate RNA strand into correct protein list") {
pending
ProteinTranslation.proteins("AUGUUUUGG") should be(
Seq("Methionine", "Phenylalanine", "Tryptophan"))
}
test("Translation stops if STOP codon at beginning of sequence") {
pending
ProteinTranslation.proteins("UAGUGG") should be(Seq())
}
test("Translation stops if STOP codon at end of two-codon sequence") {
pending
ProteinTranslation.proteins("UGGUAG") should be(Seq("Tryptophan"))
}
test("Translation stops if STOP codon at end of three-codon sequence") {
pending
ProteinTranslation.proteins("AUGUUUUAA") should be(
Seq("Methionine", "Phenylalanine"))
}
test("Translation stops if STOP codon in middle of three-codon sequence") {
pending
ProteinTranslation.proteins("UGGUAGUGG") should be(Seq("Tryptophan"))
}
test("Translation stops if STOP codon in middle of six-codon sequence") {
pending
ProteinTranslation.proteins("UGGUGUUAUUAAUGGUUU") should be(
Seq("Tryptophan", "Cysteine", "Tyrosine"))
}
}
| ricemery/xscala | exercises/protein-translation/src/test/scala/ProteinTranslationTest.scala | Scala | mit | 3,180 |
/* Copyright 2015 UniCredit S.p.A.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitmjations under the License.
*/
package unicredit
import java.io.File
import org.scalajs.core.ir._
import Trees._
import Types._
import org.scalajs.core.tools.io._
object IrPatcherPlugin {
def patchHackedFile(file: File, hackFile: File): Unit = {
val vfile = FileVirtualScalaJSIRFile(file)
val (classInfo, classDef) = vfile.infoAndTree
val className = classDef.name.name
val classType = ClassType(className)
val vHackfile = FileVirtualScalaJSIRFile(hackFile)
val (hackClassInfo, hackClassDef) = vHackfile.infoAndTree
val hackClassType = ClassType(hackClassDef.name.name)
val newMethods =
hackClassDef.defs filter { memberDef =>
memberDef match {
case MethodDef(_, hackIdent, _, _, _) =>
!classDef.defs.exists { md =>
md match {
case MethodDef(_, ident, _, _, _) =>
ident equals hackIdent
case _ => false
}
}
case _ => false
}
}
val hackDefs =
(classDef.defs map { memberDef =>
implicit val pos = memberDef.pos
memberDef match {
case FieldDef(ident, tpe, mutable) =>
val fieldH =
hackClassDef.defs find { md =>
md match {
case FieldDef(hackIdent, _, _) =>
hackIdent equals ident
case _ => false
}
}
fieldH match {
case Some(field @ FieldDef(_, _, mut)) =>
FieldDef(ident, tpe, mut)
case _ =>
FieldDef(ident, tpe, mutable)
}
case _ =>
memberDef
}
})
val newClassDef = classDef.copy(defs = (hackDefs ++ newMethods))(
classDef.optimizerHints)(classDef.pos)
val newClassInfo = Infos.generateClassInfo(newClassDef)
val out = WritableFileVirtualBinaryFile(file)
val outputStream = out.outputStream
try {
InfoSerializers.serialize(outputStream, newClassInfo)
Serializers.serialize(outputStream, newClassDef)
} finally {
outputStream.close()
}
}
def hackAllUnder(base: File, hack: File): Unit = {
import scala.collection.JavaConversions._
if (hack.isDirectory) {
hack.listFiles.foreach(f =>
hackAllUnder(new File(base.getAbsolutePath, f.getName), f)
)
} else if (hack.getAbsolutePath.endsWith(".sjsir")) {
if (hack.exists && base.exists) {
patchHackedFile(base, hack)
}
} else {}
}
def patchThis(classDir: File, configFile: File): Unit = {
import java.nio.file.Files.readAllBytes
import java.nio.file.Paths.get
val hackClassDir = new File(new String(readAllBytes(get(configFile.getAbsolutePath))))
hackAllUnder(classDir, hackClassDir)
}
}
| unicredit/scalajs-ir-patcher | src/main/scala/eu/unicredit/IrPatcherPlugin.scala | Scala | apache-2.0 | 3,403 |
package com.szadowsz.gospel.core
import com.szadowsz.gospel.core.data.Struct
import org.junit.runner.RunWith
import org.scalatest.FunSpec
import org.scalatest.junit.JUnitRunner
/**
* Created on 18/02/2017.
*/
@RunWith(classOf[JUnitRunner])
class AllSolutionsSpec extends FunSpec with BaseEngineSpec {
override protected def init(): PrologEngine = new PrologEngine()
describe("findall/3") {
it("should pass simple test #1") {
val solution = prolog.solve("findall(X, (X=1;Y=2), S).")
solution.isSuccess shouldBe true
}
it("should pass simple test #2") {
val solution = prolog.solve("findall(X+Y, (X=1), S).")
solution.isSuccess shouldBe true
}
it("should pass simple test #3") {
val solution = prolog.solve("findall(X, fail, L).")
solution.isSuccess shouldBe true
}
it("should pass simple test #4") {
val solution = prolog.solve("findall(X, (X=1;X=1), S).")
solution.isSuccess shouldBe true
}
it("should pass simple test #5") {
val solution = prolog.solve("findall(X, (X=1;X=2), [X,Y]).")
solution.isSuccess shouldBe true
}
it("should pass this negative test") {
val solution = prolog.solve("findall(X, (X=2; X=1), [1, 2]).")
solution.isSuccess shouldBe false
}
it("should pass variable test #1") {
val solution = prolog.solve("findall(X, (X=1;Y=2), S).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("S").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[1,_]"
}
it("should pass variable test #2") {
val solution = prolog.solve("findall(X+Y, (X=1), S).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("S").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "['+'(1,_)]"
}
it("should pass variable test #3") {
val solution = prolog.solve("findall(X, fail, L).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("L").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[]"
}
it("should pass variable test #4") {
val solution = prolog.solve("findall(X, (X=1;X=1), S).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("S").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[1,1]"
}
it("should pass variable test #5") {
val solution = prolog.solve("findall(X, (X=1;X=2), [X,Y]).")
solution.isSuccess shouldBe true
val xResult = solution.getVarValue("X")
xResult.toString shouldBe "1"
val yResult = solution.getVarValue("Y")
yResult.toString shouldBe "2"
}
it("should pass variable test #6") {
val solution = prolog.solve("findall(X, (X=1;X=2), S).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("S").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[1,2]"
}
it("should pass exception test #1") {
val ex = getExceptionListener
prolog.addExceptionListener(ex)
val solution = prolog.solve("findall(X,Goal,S).")
ex.exFound shouldBe true
ex.exMsg shouldBe "Instantiation error in argument 2 of all_solutions_predicates_guard(Template_e1,Goal_e1,Instances_e1)"
}
it("should pass exception test #2") {
val ex = getExceptionListener
prolog.addExceptionListener(ex)
val solution = prolog.solve("findall(X,4,S).")
ex.exFound shouldBe true
ex.exMsg shouldBe "Type error in argument 2 of all_solutions_predicates_guard(Template_e1,4,Instances_e1)"
}
}
describe("bagof/3") {
it("should pass simple test #1") {
val solution = prolog.solve("bagof(X, (X=1;Y=2), S).")
solution.isSuccess shouldBe true
}
it("should pass simple test #2") {
val solution = prolog.solve("bagof(X,(X=1;X=2), X).")
solution.isSuccess shouldBe true
}
it("should pass simple test #3") {
val solution = prolog.solve("bagof(X,(X=Y;X=Z), S1).")
solution.isSuccess shouldBe true
}
it("should pass simple test #4") {
val solution = prolog.solve("bagof(1,(Y=1;Y=2), L).")
solution.isSuccess shouldBe true
}
it("should pass simple test #5") {
val solution = prolog.solve("bagof(f(X,Y), (X=a;Y=b), L1).")
solution.isSuccess shouldBe true
}
it("should pass simple test #6") {
val solution = prolog.solve("bagof(X, Y^((X=1,Y=1);(X=2;Y=2)), L2).")
solution.isSuccess shouldBe true
}
it("should pass simple test #7") {
val solution = prolog.solve("bagof(X, Y^((X=1;Y=1);(X=2;Y=2)), L3).")
solution.isSuccess shouldBe true
}
it("should pass simple test #8") {
val solution = prolog.solve("bagof(X, Y^((X=1;Y=2);X=3), Si1).")
solution.isSuccess shouldBe true
}
it("should pass simple test #9") {
val solution = prolog.solve("bagof(X, (X=Y;X=Z;Y=1), S3).")
solution.isSuccess shouldBe true
}
it("should pass this negative test") {
val solution = prolog.solve("bagof(X,fail,S2).")
solution.isSuccess shouldBe false
}
it("should pass variable test #1") {
val solution = prolog.solve("bagof(X,(X=1;X=2), S).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("S").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[1,2]"
}
it("should pass variable test #2") {
val solution = prolog.solve("bagof(X,(X=1;X=2), X).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("X").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[1,2]"
}
it("should pass variable test #3") {
val solution = prolog.solve("bagof(X,(X=Y;X=Z), S1).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("S1").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[Y,Z]"
}
it("should pass variable test #4") {
val solution = prolog.solve("bagof(1,(Y=1;Y=2), L).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("L").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[1]"
}
it("should pass variable test #5") {
val solution = prolog.solve("bagof(f(X,Y), (X=a;Y=b), L1).")
solution.isSuccess shouldBe true
val xResult = solution.getVarValue("L1")
replaceUnderscore(xResult.toString) shouldBe "[f(a,_),f(_,b)]"
}
it("should pass variable test #6") {
val solution = prolog.solve("bagof(X, Y^((X=1,Y=1);(X=2;Y=2)), L2).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("L2").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[1,2,_]"
}
it("should pass variable test #7") {
val solution = prolog.solve("bagof(X, Y^((X=1;Y=1);(X=2;Y=2)), L3).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("L3").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[1,_,2,_]"
}
it("should pass variable test #8") {
val solution = prolog.solve("bagof(X, Y^((X=1;Y=2);X=3), Si1).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("Si1").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[1,_,3]"
}
it("should pass variable test #9") {
val solution = prolog.solve("bagof(X, (X=Y;X=Z;Y=1), S3).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("S3").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[Y,Z]"
}
it("should pass exception test #1") {
val ex = getExceptionListener
prolog.addExceptionListener(ex)
val solution = prolog.solve("bagof(X,Y^Z,L).")
ex.exFound shouldBe true
// ex.exMsg shouldBe "Instantiation error in argument 2 of all_solutions_predicates_guard(_2376378_e173,G_e1,Instances_e1)"
}
it("should pass exception test #2") {
val ex = getExceptionListener
prolog.addExceptionListener(ex)
val solution = prolog.solve("bagof(X,1,L).")
ex.exFound shouldBe true
ex.exMsg shouldBe "Type error in argument 2 of all_solutions_predicates_guard(Template_e1,1,Instances_e1)"
}
it("should pass exception test #3") {
val ex = getExceptionListener
prolog.addExceptionListener(ex)
val solution = prolog.solve("bagof(X,4,S).")
ex.exFound shouldBe true
ex.exMsg shouldBe "Type error in argument 2 of all_solutions_predicates_guard(Template_e1,4,Instances_e1)"
}
}
describe("setof/3") {
it("should pass simple test #1") {
val solution = prolog.solve("setof(X,(X=1;X=2),S).")
solution.isSuccess shouldBe true
}
it("should pass simple test #2") {
val solution = prolog.solve("setof(X,(X=1;X=2),X).")
solution.isSuccess shouldBe true
}
it("should pass simple test #3") {
val solution = prolog.solve("setof(X,(X=2;X=1),S).")
solution.isSuccess shouldBe true
}
it("should pass simple test #4") {
val solution = prolog.solve("setof(X,(X=2;X=2),S).")
solution.isSuccess shouldBe true
}
it("should pass simple test #5") {
val solution = prolog.solve("setof(X,(X=Y;X=Z),S).")
solution.isSuccess shouldBe true
}
it("should pass simple test #6") {
val solution = prolog.solve("setof(1,(Y=2;Y=1),L).")
solution.isSuccess shouldBe true
}
it("should pass simple test #7") {
val solution = prolog.solve("setof(f(X,Y),(X=a;Y=b),L).")
solution.isSuccess shouldBe true
}
it("should pass simple test #8") {
val solution = prolog.solve("setof(X,Y^((X=1,Y=1);(X=2,Y=2)),S).")
solution.isSuccess shouldBe true
}
it("should pass simple test #9") {
val solution = prolog.solve("setof(X,Y^((X=1;Y=1);(X=2,Y=2)),S).")
solution.isSuccess shouldBe true
}
it("should pass simple test #10") {
val solution = prolog.solve("setof(X,Y^((X=1,Y=1);X=3),S).")
solution.isSuccess shouldBe true
}
it("should pass simple test #11") {
val solution = prolog.solve("setof(X,(X=Y;X=Z;Y=1),S).")
solution.isSuccess shouldBe true
}
it("should pass simple test #12") {
prolog.setTheory(new Theory("a(1,f(_)). a(2,f(_))."))
val solution = prolog.solve("setof(X,a(X,Y),L).")
solution.isSuccess shouldBe true
}
it("should pass simple test #13") {
val solution = prolog.solve("setof(X,member(X,[f(U,b),f(V,c)]),L).")
solution.isSuccess shouldBe true
}
it("should pass simple test #14") {
val solution = prolog.solve("setof(X,member(X,[f(b,U),f(c,V)]),[f(b,a),f(c,a)]).")
solution.isSuccess shouldBe true
}
it("should pass simple test #15") {
val solution = prolog.solve("setof(X,member(X,[V,U,f(U),f(V)]),L).")
solution.isSuccess shouldBe true
}
it("should pass simple test #16") {
val solution = prolog.solve("setof(X,member(X,[V,U,f(U),f(V)]),[a,b,f(a),f(b)]).")
solution.isSuccess shouldBe true
}
it("should pass negative test #1") {
val solution = prolog.solve("setof(X,fail,S).")
solution.isSuccess shouldBe false
}
it("should pass negative test #2") {
val solution = prolog.solve("setof(X,member(X,[V,U,f(U),f(V)]),[a,b,f(b),f(a)]).")
solution.isSuccess shouldBe false
}
it("should pass variable test #1") {
val solution = prolog.solve("setof(X,(X=1;X=2),S).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("S").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[1,2]"
}
it("should pass variable test #2") {
val solution = prolog.solve("setof(X,(X=1;X=2),X).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("X").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[1,2]"
}
it("should pass variable test #3") {
val solution = prolog.solve("setof(X,(X=2;X=1),S).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("S").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[1,2]"
}
it("should pass variable test #4") {
val solution = prolog.solve("setof(X,(X=2;X=2),S).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("S").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[2]"
}
it("should pass variable test #5") {
val solution = prolog.solve("setof(X,(X=Y;X=Z),S).")
solution.isSuccess shouldBe true
val xResult = solution.getVarValue("S")
replaceUnderscore(xResult.toString) shouldBe "[Y,Z]"
}
it("should pass variable test #6") {
val solution = prolog.solve("setof(1,(Y=2;Y=1),L).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("L").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[1]"
}
it("should pass variable test #7") {
val solution = prolog.solve("setof(f(X,Y),(X=a;Y=b),L).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("L").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[f(_,b),f(a,_)]"
}
it("should pass variable test #8") {
val solution = prolog.solve("setof(X,Y^((X=1,Y=1);(X=2,Y=2)),S).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("S").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[1,2]"
}
it("should pass variable test #9") {
val solution = prolog.solve("setof(X,Y^((X=1;Y=1);(X=2,Y=2)),S).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("S").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[_,1,2]"
}
it("should pass variable test #10") {
val solution = prolog.solve("setof(X,Y^((X=1,Y=1);X=3),S).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("S").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[1,3]"
}
it("should pass variable test #11") {
val solution = prolog.solve("setof(X,(X=Y;X=Z;Y=1),S).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("S").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[Y,Z]"
}
it("should pass variable test #12") {
prolog.setTheory(new Theory("a(1,f(_)). a(2,f(_))."))
val solution = prolog.solve("setof(X,a(X,Y),L).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("L").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[1,2]"
}
it("should pass variable test #13") {
val solution = prolog.solve("setof(X,member(X,[f(U,b),f(V,c)]),L).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("L").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[f(U,b),f(V,c)]"
}
it("should pass variable test #14") {
val solution = prolog.solve("setof(X,member(X,[V,U,f(U),f(V)]),L).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("L").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "[V,U,f(V),f(U)]"
}
it("should pass variable test #15") {
val solution = prolog.solve("setof(X,member(X,[V,U,f(U),f(V)]),[a,b,f(a),f(b)]).")
solution.isSuccess shouldBe true
val result = solution.getVarValue("V").asInstanceOf[Struct]
replaceUnderscore(result.toString) shouldBe "a"
}
}
}
| zakski/project-soisceal | gospel-core/src/test/scala/com/szadowsz/gospel/core/AllSolutionsSpec.scala | Scala | lgpl-3.0 | 15,751 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.aliyun.datahub
import com.aliyun.datahub.model.RecordEntry
import org.apache.spark.sql.catalyst.expressions.UnsafeRow
import org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
class DatahubRecordToUnsafeRowConverter(
schema: StructType,
sourceOptions: Map[String, String]) {
private val rowWriter = new UnsafeRowWriter(schema.fields.length)
private lazy val precision = sourceOptions("decimal.precision").toInt
private lazy val scale = sourceOptions("decimal.scale").toInt
def toUnsafeRow(
record: RecordEntry,
project: String,
topic: String,
shardId: String): UnsafeRow = {
rowWriter.reset()
rowWriter.zeroOutNullBytes()
var idx = 0
schema.fields.foreach(field => {
field.dataType match {
case LongType =>
val value = record.getBigint(field.name)
if (value != null) {
rowWriter.write(idx, value)
} else {
rowWriter.setNullAt(idx)
}
case BooleanType =>
val value = record.getBoolean(field.name)
if (value != null) {
rowWriter.write(idx, value)
} else {
rowWriter.setNullAt(idx)
}
case _: DecimalType =>
val value = record.getDecimal(field.name)
if (value != null) {
rowWriter.write(idx, Decimal(value, precision, scale), precision, scale)
} else {
rowWriter.setNullAt(idx)
}
case DoubleType =>
val value = record.getDouble(field.name)
if (value != null) {
rowWriter.write(idx, value)
} else {
rowWriter.setNullAt(idx)
}
case TimestampType =>
val value = record.getTimeStampAsMs(field.name)
if (value != null) {
rowWriter.write(idx,
DateTimeUtils.fromJavaTimestamp(new java.sql.Timestamp(value)))
} else {
rowWriter.setNullAt(idx)
}
case _ =>
val value = record.getString(field.name)
if (value != null) {
rowWriter.write(idx, UTF8String.fromString(value))
} else {
rowWriter.setNullAt(idx)
}
}
idx += 1
})
rowWriter.getRow
}
}
| aliyun/aliyun-emapreduce-sdk | emr-datahub/src/main/scala/org/apache/spark/sql/aliyun/datahub/DatahubRecordToUnsafeRowConverter.scala | Scala | artistic-2.0 | 3,248 |
package com.emotioncity.soriento
import com.orientechnologies.orient.core.db.OPartitionedDatabasePool
import com.orientechnologies.orient.core.db.document.{ODatabaseDocument, ODatabaseDocumentTx}
import com.tinkerpop.blueprints.impls.orient.OrientGraph
/*
* Copyright (c) 2014 Dmitriy Parenskiy aka stream (dimparf@gmail.com)
*/
trait OrientDbSupport {
val oDatabaseDocumentPool = new OPartitionedDatabasePool("remote:localhost/emotiongraph", "root", "poweron")
implicit val orientDb = oDatabaseDocumentPool.acquire()
}
| dimparf/Soriento | src/main/scala/com/emotioncity/soriento/OrientDbSupport.scala | Scala | apache-2.0 | 531 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs10x.helpers
import uk.gov.hmrc.ct.accounts.frs10x.retriever.Frs10xAccountsBoxRetriever
import uk.gov.hmrc.ct.accounts.retriever.AccountsBoxRetriever
import uk.gov.hmrc.ct.accounts.{AC3, AC4}
import uk.gov.hmrc.ct.box.ValidatableBox.OptionalIntIdBox
import uk.gov.hmrc.ct.box.retriever.FilingAttributesBoxValueRetriever
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalInteger, CtValidation, ValidatableBox}
import uk.gov.hmrc.ct.validation.TurnoverValidation
trait CovidProfitAndLossValidationHelper[T <: AccountsBoxRetriever] extends ValidatableBox[T with FilingAttributesBoxValueRetriever] with TurnoverValidation {
self: OptionalIntIdBox =>
val value: Option[Int]
val ac12Id: String = "AC12"
val ac16Id: String = "AC16"
val accountsStart: AccountsBoxRetriever => AC3 = {
boxRetriever: AccountsBoxRetriever =>
boxRetriever.ac3()
}
val accountEnd: AccountsBoxRetriever => AC4 = {
boxRetriever: AccountsBoxRetriever =>
boxRetriever.ac4()
}
val turnover: T => Box
val grossProfitOrLoss: T => Box
def validateBox(boxRetriever: BoxRetriever): PartialFunction[Box, Set[CtValidation]]
def validateTurnoverOrGrossProfitOrLoss(boxRetriever: BoxRetriever): Set[CtValidation] = {
if (value.getOrElse(0) <= 0) {
validationSuccess
} else {
validateBox(boxRetriever)(getCorrectBox(boxRetriever))
}
}
def getCorrectBox(boxRetriever: BoxRetriever): Box = {
val isAbridgedJourney = boxRetriever.abridgedFiling().value
val correctBoxWithBoxId =
if (isAbridgedJourney) grossProfitOrLoss(boxRetriever)
else turnover(boxRetriever)
correctBoxWithBoxId
}
def shortenedValidateHmrcTurnover(boxRetriever: BoxRetriever, box: Box, boxId: String, minimumAmount: Option[Int] = None): Set[CtValidation] = {
validateHmrcTurnover(boxRetriever, accountsStart, accountEnd, errorSuffix = s".hmrc.turnover.$boxId", secondaryIncome = box.orZero, minimumAmount)
}
def shortenedValidateCohoTurnover(boxRetriever: BoxRetriever, box: Box, boxId: String): Set[CtValidation] = {
validateCoHoTurnover(boxRetriever, accountsStart, accountEnd, secondaryIncome = box.orZero, errorSuffix = s".coho.turnover.$boxId")
}
type BoxRetriever = T with FilingAttributesBoxValueRetriever
type Frs10xBoxRetriever = Frs10xAccountsBoxRetriever with FilingAttributesBoxValueRetriever
type Box = CtBoxIdentifier with CtOptionalInteger
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frs10x/helpers/CovidProfitAndLossValidationHelper.scala | Scala | apache-2.0 | 3,067 |
package ch.wsl.box.client
import ch.wsl.box.client.mocks.Values
import ch.wsl.box.client.utils.TestHooks
import ch.wsl.box.model.shared.{EntityKind, JSONID, JSONKeyValue}
import org.scalajs.dom.document
import org.scalajs.dom.raw.{Event, HTMLElement, HTMLInputElement}
import scala.concurrent.Future
class ConditionalFieldTest extends TestBase {
"conditional field" should "work" in {
def conditionalField() = document.getElementsByClassName(TestHooks.formField(values.conditionalField)).item(0)
def condidionalVisible() = document.getElementsByClassName(TestHooks.formField(values.conditionalField)).length > 0
for {
_ <- Main.setupUI()
_ <- Context.services.clientSession.login("test", "test")
_ <- waitLoggedIn
_ <- Future {
Context.applicationInstance.goTo(EntityFormState(EntityKind.FORM.kind, values.testFormName, "true", Some(JSONID(Vector(JSONKeyValue("id", "1"))).asString), false))
}
_ <- waitElement(() => document.querySelector(s".${TestHooks.formField(values.conditionerField)}"),s".${TestHooks.formField(values.conditionerField)}")
conditioner = document.querySelector(s".${TestHooks.formField(values.conditionerField)}").asInstanceOf[HTMLInputElement]
_ <- Future {
assert(!condidionalVisible())
conditioner.value = values.conditionalValue
conditioner.onchange(new Event("change"))
}
_ <- waitElement(conditionalField,"conditional field")
_ <- Future {
assert(condidionalVisible())
conditioner.value = "something else"
conditioner.onchange(new Event("change"))
}
_ <- waitNotElement(conditionalField,"should be null")
_ <- Future {
assert(!condidionalVisible())
}
} yield succeed
}
} | Insubric/box | client/src/test/scala/ch/wsl/box/client/ConditionalFieldTest.scala | Scala | apache-2.0 | 1,841 |
package debop4s.data.slick.examples
import debop4s.data.slick.AbstractSlickFunSuite
import debop4s.data.slick.SlickExampleDatabase._
import debop4s.data.slick.SlickExampleDatabase.driver.simple._
import scala.util.Try
/**
* 컬럼 기본값 설정 테스트
* @author sunghyouk.bae@gmail.com
*/
class ColumnDefaultFunSuite extends AbstractSlickFunSuite {
class ColumnDefaults(tag: Tag) extends Table[(Int, String, Option[Boolean])](tag, "column_default") {
def id = column[Int]("id")
def a = column[String]("a", O.Default("foo"), O.Length(128, true))
def b = column[Option[Boolean]]("b", O.Default(Some(true)))
def * = (id, a, b)
}
lazy val columnDefaults = TableQuery[ColumnDefaults]
test("default column") {
withSession { implicit session =>
Try { columnDefaults.ddl.drop }
columnDefaults.ddl.create
// insert into "column_default" ("id") values (?)
columnDefaults.map(_.id) += 42
// select x2."id", x2."a", x2."b" from "column_default" x2
columnDefaults.run shouldEqual List((42, "foo", Some(true)))
}
}
}
| debop/debop4s | debop4s-data-slick/src/test/scala/debop4s/data/slick/examples/ColumnDefaultFunSuite.scala | Scala | apache-2.0 | 1,092 |
package org.airpnp.upnp
import org.airpnp.http.Response._
import java.io.StringWriter
import java.net.InetSocketAddress
import scala.xml.MinimizeMode
import scala.xml.XML
import org.airpnp.Util
import org.airpnp.http.Request
import org.airpnp.http.Response
import org.airpnp.http.RouteHandler
import org.airpnp.http.RoutingHttpServer
import org.fest.assertions.Assertions.assertThat
import org.testng.annotations.AfterClass
import org.testng.annotations.BeforeClass
import org.testng.annotations.Test
import org.airpnp.TraceLogging
class SoapClientTest extends TraceLogging {
var port: Int = 0
var server: RoutingHttpServer = null
@BeforeClass def startServer() = {
port = Util.findPort()
server = new SoapClientTest.SoapServer(new InetSocketAddress("localhost", port))
server.start
}
@AfterClass def stopServer() = {
server.stop(0)
}
private def getUrl(path: String) = "http://localhost:" + port + path
@Test def shouldHandleRegularPost() {
val msg = new SoapMessage("someType", "Baz")
val client = new SoapClient()
val result = client.sendMessage(getUrl("/post"), msg)
assertThat(result.getName).isEqualTo("BazReply")
}
@Test def shouldFallbackToMPost() {
val msg = new SoapMessage("someType", "Baz")
val client = new SoapClient()
val result = client.sendMessage(getUrl("/mpost"), msg)
assertThat(result.getName).isEqualTo("BazReply")
}
@Test(expectedExceptions = Array(classOf[SoapError]))
def shouldParseAndThrowSoapError() {
val msg = new SoapMessage("someType", "Baz")
val client = new SoapClient()
client.sendMessage(getUrl("/err"), msg)
}
@Test def shouldSupportChunkedTransferEncoding() {
val msg = new SoapMessage("someType", "Baz")
val client = new SoapClient()
val result = client.sendMessage(getUrl("/chunked"), msg)
assertThat(result.getName).isEqualTo("BazReply")
}
}
object SoapClientTest {
private class SoapServer(private val addr: InetSocketAddress) extends RoutingHttpServer(addr) {
addRoute("/post", new PostHandler())
addRoute("/chunked", new ChunkedHandler())
addRoute("/mpost", new MPostHandler())
addRoute("/err", new ErrHandler())
}
private class ChunkedHandler extends RouteHandler {
override def handlePOST(request: Request, response: Response) {
val msg = SoapMessage.parse(request.getInputStream)
val reply = new SoapMessage(msg.getServiceType, msg.getName + "Reply")
response.respond(withText(reply.toString).andContentType("text/xml").andIsChunked())
}
}
private class PostHandler extends RouteHandler {
override def handlePOST(request: Request, response: Response) {
val msg = SoapMessage.parse(request.getInputStream)
request.getHeader("SOAPACTION").headOption match {
case Some(x) if x == msg.getSoapAction => {
val reply = new SoapMessage(msg.getServiceType, msg.getName + "Reply")
response.respond(withText(reply.toString).andContentType("text/xml"))
}
case _ => response.respond(withText("Incorrect SOAPACTION header").andStatusCode(400))
}
}
}
private class MPostHandler extends RouteHandler {
override def handlePOST(request: Request, response: Response) {
response.respond(withText("Use M-POST").andStatusCode(405))
}
override def handleUnknown(request: Request, response: Response) {
val msg = SoapMessage.parse(request.getInputStream)
request.getMethod match {
case "M-POST" => {
request.getHeader("01-SOAPACTION").headOption match {
case Some(x) if x == msg.getSoapAction => request.getHeader("MAN").headOption match {
case Some(y) if y == "\\"http://schemas.xmlsoap.org/soap/envelope/\\"; ns=01" => {
val reply = new SoapMessage(msg.getServiceType, msg.getName + "Reply")
response.respond(withText(reply.toString).andContentType("text/xml"))
}
case _ => response.respond(withText("Incorrect MAN header").andStatusCode(400))
}
case _ => response.respond(withText("Incorrect 01-SOAPACTION header").andStatusCode(400))
}
}
case _ => super.handleUnknown(request, response)
}
}
}
private class ErrHandler extends RouteHandler {
override def handlePOST(request: Request, response: Response) {
val str = createSoapError(123, "Some error").xml
response.respond(withUtf8Text(str).andStatusCode(500).andContentType("text/xml"))
}
}
}
| provegard/ScAirPnp | src/test/scala/org/airpnp/upnp/SoapClientTest.scala | Scala | mit | 4,535 |
package org.bitcoins.rpc.auth
/**
* Created by chris on 5/2/17.
*/
sealed trait AuthCredentials {
/** The directory where our bitcoin.conf file is located */
def datadir: String
/** rpcusername field in our bitcoin.conf file */
def username: String
/** rpcpassword field in our bitcoin.conf file */
def password: String
}
object AuthCredentials {
private case class AuthCredentialsImpl(username: String, password: String, datadir: String) extends AuthCredentials
def apply(username: String, password: String): AuthCredentials = {
val defaultDataDir = System.getProperty("user.home") + "/.bitcoin"
AuthCredentials(username,password,defaultDataDir)
}
def apply(username: String, password: String, datadir: String): AuthCredentials = {
AuthCredentialsImpl(username,password,datadir)
}
}
| bitcoin-s/bitcoin-s-rpc-client | src/main/scala/org/bitcoins/rpc/auth/AuthCredentials.scala | Scala | mit | 827 |
// Databricks notebook source
// MAGIC %md
// MAGIC
// MAGIC # [SDS-2.2, Scalable Data Science](https://lamastex.github.io/scalable-data-science/sds/2/2/)
// COMMAND ----------
// MAGIC %md
// MAGIC Archived YouTube video of this live unedited lab-lecture:
// MAGIC
// MAGIC [](https://www.youtube.com/embed/CTcIJcgk87s?start=0&end=2216&autoplay=1)
// COMMAND ----------
// MAGIC %md
// MAGIC # Article Topics in Retweet Network
// MAGIC ## Student Project
// MAGIC by [Li Caldeira Balkeståhl](https://www.linkedin.com/in/li-caldeira-balkest%C3%A5hl-9b839412b/) and [Mariama Jaiteh](https://www.linkedin.com/in/mariama-jaiteh-a97ab373/)
// MAGIC
// MAGIC ### Idea: Do topic modelling on web articles linked from twitter and see what topics are being tweeted by whom
// MAGIC
// MAGIC The aim of the project is to apply topic modelling on web articles linked in tweets obtained from Twitter. Since each article (and its topics) is linked to a user, we hope to highlight the topics spread by every user in their Twitter network.
// MAGIC
// MAGIC
// MAGIC ## How to run the notebook
// MAGIC The repos contains the scala source code and databricks notebook of our project.
// MAGIC
// MAGIC ## Steps
// MAGIC #### 1. Get list of URLs from twitter
// MAGIC #### 2. Get the article content
// MAGIC #### 3. Extract features, clean data
// MAGIC #### 4. Topic modelling
// MAGIC #### 5. Connect to tweet network
// COMMAND ----------
// MAGIC %md
// MAGIC ##Note
// MAGIC This projects uses research data collected from twitter by Raazesh Sainudiin and collaborators. This data is not freely available.
// MAGIC
// MAGIC If you provide your own list of URL's, in an apache spark DataFrame (as org.apache.spark.sql.Dataset[org.apache.spark.sql.Row] = [URL: string]), you should be able to run the full notebook by changing the appropriate variables
// COMMAND ----------
// MAGIC %md
// MAGIC ## 1. Get list of URLs from twitter
// MAGIC
// MAGIC * UK election twitter data (experiment designed by Raazesh Sainudiin and Joakim Johansson)
// MAGIC * Filter on only those that have URL
// MAGIC * We experiment on only a sample of the data by taking a 3% random subsample of the distinct URLSs
// MAGIC * For illustration purposes, this notebook includes a smaller testsample of only 13 URL's
// COMMAND ----------
//the original location of our dataset
val retweetUrlNetwork = spark.read.parquet("/datasets/MEP/GB/RetweetUrlNetworkAsParquetDF")
val ratioNoURL=retweetUrlNetwork.filter($"URL"==="").count().toFloat / retweetUrlNetwork.count().toFloat
val retweetWithUrl = retweetUrlNetwork.filter($"URL"=!="")
val numURLs=retweetWithUrl.cache.count
// COMMAND ----------
//change here for your own dataset
val distinctURLs = retweetWithUrl.select("URL").distinct().cache
val numdistinctURS=distinctURLs.count()
val sampleOfdistinctURls = distinctURLs.sample(false,0.03,12345L)
val num3persample = sampleOfdistinctURls.count()
val testsample = distinctURLs.sample(false,0.00006,12345L)
testsample.count
// COMMAND ----------
// MAGIC %md
// MAGIC ## 2. Get the article content
// MAGIC
// MAGIC * Inspiration (and code) from Mastering Spark for Data Science by Andrew Morgan, Antoine Amend, Matthew Hallet, David George
// MAGIC * Code for URL extension
// MAGIC * Example on how to use Goose
// MAGIC * Goose web scraper (library package com.syncthemall : goose-2.1.25)
// MAGIC * Fetches html content, returns cleaned version of the text and main image (we did not use the image)
// MAGIC * For 1% sample took about 30 min
// MAGIC * Several options could make this quicker, for example filter uninteresting domains before fetching, shorter timeouts...
// MAGIC * Used DataFrames and userdefinedfunction, then flattening the resulting dataframe
// COMMAND ----------
// MAGIC %run ./999_03_StudentProject_ArticleTopicInRTNetwork_webScraping
// COMMAND ----------
val test_scraped = getContent(testsample).cache
// COMMAND ----------
display(test_scraped)
// COMMAND ----------
// MAGIC %md
// MAGIC As can be seen, this is quite slow, it already takes 15 seconds for only 13 URL's. For a 1% sample of our dataset (about 2500 URL's), it took about 20 minutes.
// MAGIC
// MAGIC We incrementally build of 3% sample and saved the DataFrame as a parquet file
// COMMAND ----------
// MAGIC %md
// MAGIC ## 3. Extract features, clean data
// MAGIC
// MAGIC * Scraped dataframe saved as parquet for easy access
// MAGIC * Clean out stuff that is not articles
// MAGIC * youtube, vimeo have videos, twitter has tweets, not articles
// MAGIC * exceptions during scraping will give status "not found"
// MAGIC * but also some "found" whith just "null" as body (we only noticed this later)
// MAGIC * Transform the texts into feature vectors to use with SparkML - we use the built in transformers and estimators of sparkML
// MAGIC * First tokenize the text into words - use RegexTokenizer
// MAGIC * Then remove stopwords, words that don't carry meaning - use StopWordsRemover
// MAGIC * Then transform the array of words into a feature vector (a vector of counts over the vocabulary, the vocabulary is generated from our corpus, then the feature vectors are created from the vocabulary)
// MAGIC * use CountVectorizer, creates a vector of counts over the vocabulary (in one case scaled by TF-IDF approach)
// MAGIC * use HashingTF, first hash the terms, then create a frequency vector over the hashed terms (scaled by TF-IDF approach)
// MAGIC
// COMMAND ----------
//read the parquet file with our 3% sample, it's a DataFrame with the URL, content (body), etc
val scraped_readfromfile= spark.read.parquet("/datasets/Projects_Li_Mariama/scraped_new3percent")
//filter out the articles which gave exceptions
val found_articles=scraped_readfromfile.filter($"status"=!="not found")
//filter out videos and twitter
val filtered_articles=found_articles.filter(($"domain"=!="www.youtube.com" && $"domain"=!="twitter.com" && $"domain"=!="vimeo.com" )).cache
// COMMAND ----------
// MAGIC %md
// MAGIC ## Extract the features
// COMMAND ----------
// MAGIC %md
// MAGIC ###Note
// MAGIC We use a list of stopwords saved as a text file, you can provide your own or use the default
// COMMAND ----------
import org.apache.spark.ml.feature.{RegexTokenizer,StopWordsRemover}
import org.apache.spark.sql.functions._
// Tokenize article bodies
val regexTokenizer = new RegexTokenizer()
.setInputCol("body")
.setOutputCol("tokens")
.setPattern("\\\\W")
.setMinTokenLength(2) // Filter away tokens with length < 2
val wordsData = regexTokenizer.transform(filtered_articles)
// Obtain the stopwords
val stopwords = sc.textFile("/tmp/stopwords").collect()
val remover = new StopWordsRemover()
.setStopWords(stopwords) // This parameter is optional
.setInputCol("tokens")
.setOutputCol("words")
// Create new DataFrame with Stopwords removed
val filtered = remover.transform(wordsData)
import org.apache.spark.ml.feature.CountVectorizer
val vectorizer = new CountVectorizer()
.setInputCol("words")
.setOutputCol("features")
.setVocabSize(20000) //the size of the vocabulary
.setMinDF(5) // the minimum number of different documents a term must appear in to be included in the vocabulary.
.fit(filtered)
val countVectors = vectorizer.transform(filtered)
// COMMAND ----------
// MAGIC %md
// MAGIC ## 4. Topic modelling - usupervised learning
// MAGIC
// MAGIC We want to find topics in our text collection by using unsupervised learning
// MAGIC
// MAGIC * Latent Dirichlet allocation (LDA)
// MAGIC * http://www.cs.columbia.edu/~blei/papers/Blei2012.pdf
// MAGIC * Models the documents as coming from one or several topics (distributions over words)
// MAGIC * Fit method fits the topics
// MAGIC * Transform method tells how much of each document is from each topic
// MAGIC * K-means
// MAGIC * Finds clusters by calculating distances between points
// MAGIC * https://en.wikipedia.org/wiki/K-means_clustering#Algorithms
// COMMAND ----------
// MAGIC %md
// MAGIC #LDA results
// COMMAND ----------
//running the LDA
import org.apache.spark.ml.clustering.LDA
val numTopics=10
val lda = new LDA()
.setK(numTopics)
.setMaxIter(10)
val model = lda.fit(countVectors)
// COMMAND ----------
// MAGIC %md
// MAGIC Access the topics from the fit, translate the word index to words (strings) and get a dataframe of topics described by the most probable words
// COMMAND ----------
val topicIndices = model.describeTopics(maxTermsPerTopic = 8)
val vocabList = vectorizer.vocabulary
val topics= topicIndices.map(
row => ( row.getAs[Int](0),
row.getAs[Seq[Int]]("termIndices").map(vocabList(_)).zip(row.getAs[Seq[Double]]("termWeights"))))
.withColumnRenamed("_1","topic").withColumnRenamed("_2","description")
//can use display to see the topics
// COMMAND ----------
//transform the topics dataframe into a suitable format for the visualization with D3 (D3 code copied from course notebook)
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types._
import org.apache.spark.sql._
//explode makes several rows for each element of "description"
val exploded_topics=topics.withColumn("description", explode($"description"))
//function to flatten the dataframe
implicit class DataFrameFlattener(df: DataFrame) {
def flattenSchema: DataFrame = {
df.select(flatten(Nil, df.schema): _*)
}
protected def flatten(path: Seq[String], schema: DataType): Seq[Column] = schema match {
case s: StructType => s.fields.flatMap(f => flatten(path :+ f.name, f.dataType))
case other => col(path.map(n => s"`$n`").mkString(".")).as(path.mkString(".")) :: Nil //original
}
}
val flat_topics=exploded_topics.flattenSchema.withColumnRenamed("description._1","term").withColumnRenamed("description._2","probability")
// Create JSON data to be passed to D3 visualization
val rawJson = flat_topics.toJSON.collect().mkString(",\\n")
// COMMAND ----------
displayHTML(s"""
<!DOCTYPE html>
<meta charset="utf-8">
<style>
circle {
fill: rgb(31, 119, 180);
fill-opacity: 0.5;
stroke: rgb(31, 119, 180);
stroke-width: 1px;
}
.leaf circle {
fill: #ff7f0e;
fill-opacity: 1;
}
text {
font: 14px sans-serif;
}
</style>
<body>
<script src="https://cdnjs.cloudflare.com/ajax/libs/d3/3.5.5/d3.min.js"></script>
<script>
var json = {
"name": "data",
"children": [
{
"name": "topics",
"children": [
${rawJson}
]
}
]
};
var r = 1200,
format = d3.format(",d"),
fill = d3.scale.category20c();
var bubble = d3.layout.pack()
.sort(null)
.size([r, r])
.padding(1.5);
var vis = d3.select("body").append("svg")
.attr("width", r)
.attr("height", r)
.attr("class", "bubble");
var node = vis.selectAll("g.node")
.data(bubble.nodes(classes(json))
.filter(function(d) { return !d.children; }))
.enter().append("g")
.attr("class", "node")
.attr("transform", function(d) { return "translate(" + d.x + "," + d.y + ")"; })
color = d3.scale.category20();
node.append("title")
.text(function(d) { return d.className + ": " + format(d.value); });
node.append("circle")
.attr("r", function(d) { return d.r; })
.style("fill", function(d) {return color(d.topicName);});
var text = node.append("text")
.attr("text-anchor", "middle")
.attr("dy", ".3em")
.text(function(d) { return d.className.substring(0, d.r / 3)});
text.append("tspan")
.attr("dy", "1.2em")
.attr("x", 0)
.text(function(d) {return Math.ceil(d.value * 10000) /10000; });
// Returns a flattened hierarchy containing all leaf nodes under the root.
function classes(root) {
var classes = [];
function recurse(term, node) {
if (node.children) node.children.forEach(function(child) { recurse(node.term, child); });
else classes.push({topicName: node.topic, className: node.term, value: node.probability});
}
recurse(null, root);
return {children: classes};
}
</script>
""")
// COMMAND ----------
// MAGIC %md
// MAGIC ## Need to clean better
// MAGIC
// MAGIC * remove other languages
// MAGIC * remove non-articles
// MAGIC * for example messages about cookies, about javascript, etc
// MAGIC
// MAGIC For lack of time, we just cleaned the data by filtering out things we found that were not articles or that were other languages. For a code that works well with other document collections, more time should be put into finding suitable, general filters that do not remove the interesting articles
// COMMAND ----------
val articles_clean=wordsData.filter($"body"=!="null") //our error handling from before means we have the string null for articles that threw exceptions (but we don't want them)
.filter(not($"body".contains("We've noticed that you are using an ad blocker."))) //apparently we have some of these adblock messages
.filter(not($"body"==="What term do you want to search?")) //we also have some of these...
.filter(not($"body".contains("uses cookies"))) //lots of sites use cookies, some get as our articles...
.filter(not($"body".contains("continuing to browse")))//more cookie stuff
.filter(not($"body".contains(" das "))) //get rid of most german
.filter(not($"body".contains(" det "))) //get rid of 7 swedish/danish
.filter(not($"body".contains(" il "))) //get rid of italian and some more french
.filter(not($"body".contains(" les "))) //some more french
.filter(not($"body".contains(" nie "))) //one polish? article
.filter(not($"body".contains(" Capitol Hill Publishing Corp"))) //"The contents of this site are ©2017 Capitol Hill Publishing Corp., a subsidiary of News Communications, Inc."
.filter(not($"body".contains("You seem to be using an unsupported browser"))) //two flickr things
.filter(not($"body".contains("Spotify Web Player")))//This browser doesn't support Spotify Web Player. Switch browsers or download Spotify for your desktop.
.filter(not($"body".contains("See more of "))) //see moreof xxx on facebook
.filter(not($"body".contains("enable JavaScript")))
.filter(not($"body".contains("enable Javascript")))
.filter(not($"body".contains("JavaScript isn't")))
.filter(not($"body".contains("JavaScript seems")))
.filter(not($"body".contains("Javascript functionality")))
.filter(not($"body".contains("requires Javascript to be enabled")))
.filter(not($"body".contains("To use this site")))
.filter(not($"body".contains("For the best experience, please turn JavaScript on"))) //ted talks
.filter(not($"body".contains("Sorry, this Silva Article is not viewable.")))//1 article
.filter(not($"body".contains(" ist ")))//2 more german
.filter(not($"body".contains(" che "))) //1 more italian
.filter(not($"body".contains(" que "))) //1 more italian
.filter(not($"body".contains("for this version"))) // (or arXiv:1706.09254v2 arXiv:1706.09254v2 [cs.CL] for this version)
.filter(not($"body".contains("Contact Us"))) //3 contact forms?
.filter(not($"body".contains("Terms and conditions apply to all"))) //channel 5 competiontion winners?
.filter(not($"body".contains("Terms and Conditions"))) //some terms and conditions
.filter(not($"body".contains("If you followed a valid link")))
// COMMAND ----------
articles_clean.count()
// COMMAND ----------
// MAGIC %md
// MAGIC ### New stopwords
// MAGIC * remove more words that don't say much about the topics
// COMMAND ----------
//if we need to add stopwords
//from the words in the topics just identify said, mr, etc as not saying anything about the topic
val add_stopwords = Array("said","mr","new","people","just","year","like","told")
// Combine newly identified stopwords to our exising list of stopwords
val new_stopwords = stopwords.union(add_stopwords)
// COMMAND ----------
// Set params for StopWordsRemover
val remover = new StopWordsRemover()
.setStopWords(new_stopwords) // This parameter is optional
.setInputCol("tokens")
.setOutputCol("words")
// Create new DF with Stopwords removed
val filtered = remover.transform(articles_clean)
// Set params for CountVectorizer
val vectorizer = new CountVectorizer()
.setInputCol("words")
.setOutputCol("features")
//.setVocabSize(20000) //why limit the vocabulary in this way?
.setMinDF(3) // the minimum number of different documents a term must appear in to be included in the vocabulary.
.fit(filtered)
// get our countvector
val countVectors = vectorizer.transform(filtered)
// COMMAND ----------
val numTopics_new=7
val lda = new LDA()
//.setOptimizer(new OnlineLDAOptimizer().setMiniBatchFraction(0.8))
//.setFeaturesCol("features")
.setK(numTopics_new)
.setMaxIter(50)
val model = lda.fit(countVectors)
// COMMAND ----------
// Describe topics.
println("The topics described by their top-weighted terms:")
val topicIndices = model.describeTopics(maxTermsPerTopic = 5) //how to go from this DF to one whith the words for each term insted of just the indices? without going to rdd...
//topicIndices.show(false)
//topicIndices.printSchema()
val vocabList = vectorizer.vocabulary
val topics= topicIndices.map(
row => ( row.getAs[Int](0),
row.getAs[Seq[Int]]("termIndices").map(vocabList(_)).zip(row.getAs[Seq[Double]]("termWeights"))))
.withColumnRenamed("_1","topic").withColumnRenamed("_2","description")
//topics.show(false)
//topics.printSchema()
topics.collect.foreach{ row=>
println("Topic " + row.getAs[Int](0))
row.getSeq[Int](1). //does not check the seq type, so I don't need to make it correct since i don't know how to...
foreach(println)
println("=========")
}
//explode makes several rows for each element of "description"
val exploded_topics=topics.withColumn("description", explode($"description"))
//the function to flatten the dataframe
implicit class DataFrameFlattener(df: DataFrame) {
def flattenSchema: DataFrame = {
df.select(flatten(Nil, df.schema): _*)
}
protected def flatten(path: Seq[String], schema: DataType): Seq[Column] = schema match {
case s: StructType => s.fields.flatMap(f => flatten(path :+ f.name, f.dataType))
case other => col(path.map(n => s"`$n`").mkString(".")).as(path.mkString(".")) :: Nil //original
}
}
val flat_topics=exploded_topics.flattenSchema.withColumnRenamed("description._1","term").withColumnRenamed("description._2","probability")
// Create JSON data to be passed to D3 visualization
val rawJson = flat_topics.toJSON.collect().mkString(",\\n")
// COMMAND ----------
displayHTML(s"""
<!DOCTYPE html>
<meta charset="utf-8">
<style>
circle {
fill: rgb(31, 119, 180);
fill-opacity: 0.5;
stroke: rgb(31, 119, 180);
stroke-width: 1px;
}
.leaf circle {
fill: #ff7f0e;
fill-opacity: 1;
}
text {
font: 14px sans-serif;
}
</style>
<body>
<script src="https://cdnjs.cloudflare.com/ajax/libs/d3/3.5.5/d3.min.js"></script>
<script>
var json = {
"name": "data",
"children": [
{
"name": "topics",
"children": [
${rawJson}
]
}
]
};
var r = 1200,
format = d3.format(",d"),
fill = d3.scale.category20c();
var bubble = d3.layout.pack()
.sort(null)
.size([r, r])
.padding(1.5);
var vis = d3.select("body").append("svg")
.attr("width", r)
.attr("height", r)
.attr("class", "bubble");
var node = vis.selectAll("g.node")
.data(bubble.nodes(classes(json))
.filter(function(d) { return !d.children; }))
.enter().append("g")
.attr("class", "node")
.attr("transform", function(d) { return "translate(" + d.x + "," + d.y + ")"; })
color = d3.scale.category20();
node.append("title")
.text(function(d) { return d.className + ": " + format(d.value); });
node.append("circle")
.attr("r", function(d) { return d.r; })
.style("fill", function(d) {return color(d.topicName);});
var text = node.append("text")
.attr("text-anchor", "middle")
.attr("dy", ".3em")
.text(function(d) { return d.className.substring(0, d.r / 3)});
text.append("tspan")
.attr("dy", "1.2em")
.attr("x", 0)
.text(function(d) {return Math.ceil(d.value * 10000) /10000; });
// Returns a flattened hierarchy containing all leaf nodes under the root.
function classes(root) {
var classes = [];
function recurse(term, node) {
if (node.children) node.children.forEach(function(child) { recurse(node.term, child); });
else classes.push({topicName: node.topic, className: node.term, value: node.probability});
}
recurse(null, root);
return {children: classes};
}
</script>
""")
// COMMAND ----------
// MAGIC %md
// MAGIC ### Conclusion for LDA topics
// MAGIC Some clear topics, but room for improvement.
// MAGIC
// MAGIC For example, should investigate:
// MAGIC
// MAGIC * Different LDA parameters (number of topics, number of iterations...)
// MAGIC * Different minimization procedure
// MAGIC * More stopwords
// MAGIC * Other ways of getting the feature vector (like using TF-IDF)
// COMMAND ----------
// MAGIC %md
// MAGIC ### Access topic probabilities for our dataset
// COMMAND ----------
import org.apache.spark.ml.linalg.{Vector}
//do the LDA transformation on our text dataframe
val transformed2 = model.transform(countVectors)
//define userdefinedfunction to get the probability for the topic we want, in this example topic 0
val getVector0 = udf((v:Vector) => v.apply(0))
//define userdefinedfunction to get the most probable topic for each text
val getMostProbableTopic=udf((v:Vector) => v.argmax)
//transform the dataframe to add the new columns
val blabla=transformed2.withColumn("topic0probability", getVector0($"topicDistribution")).withColumn("Topic",getMostProbableTopic($"topicDistribution"))
// COMMAND ----------
//Ex: look at the titles of the articles with a high percentage of topic 0
display(blabla.select($"title",$"topic0probability").filter($"topic0probability">0.9))
// COMMAND ----------
// MAGIC %md
// MAGIC
// MAGIC # K-means clustering of the articles extracted
// COMMAND ----------
// Some pre-cleaning of the dataframe - mainly to remove unwanted domain names
// !! (this will be merge lately with the previous filtering)
val filtered_articles4Kmeans=found_articles
.filter(($"domain"=!="www.youtube.com"
&& $"domain"=!="twitter.com"
&& $"domain"=!="vimeo.com"
&& $"domain" =!= "www.facebook.com"
&& $"domain"=!="thehill.com"
&& $"domain" =!= "www.mixcloud.com"))
.filter($"body"=!="null")//our error handling from before means we have the string null for articles that threw exceptions (but we don't want them)
.filter(not($"body"===""))
.filter(not(length(col("body")) <= 200)) // count the number of characters
.filter(not($"body"==="What term do you want to search?")) //we also have some of these...
.filter(not($"body".contains("We've noticed that you are using an ad blocker.")))
.filter(not($"body".contains("é"))) //we also have some of these...
.filter(not($"body".contains("è")))
.filter(not($"body".contains("à")))
.filter(not($"body".contains("á")))
.filter(not($"body".contains("í")))
.filter(not($"body".contains("ì")))
.filter(not($"body".contains("ò")))
.filter(not($"body".contains(" das ")))
.filter(not($"body".contains("ö")))
.filter(not($"body".contains("We use cookies ")))
.filter(not($"body".contains("This site is marked private by its owner."))) // for wordpress
.filter(not($"body".contains("We’ve noticed that JavaScript is ")))
.filter(not($"body".contains("You seem to be using an unsupported browser")))
.filter(not($"body".contains("Spotify Web Player")))
.filter(not($"body".contains("We know that sometimes it’s easier for us to come to you with the news.")))
.filter(not($"body".contains("enable JavaScript")))
.filter(not($"body".contains("enable Javascript")))
.filter(not($"body".contains("JavaScript isn't")))
.filter(not($"body".contains("JavaScript seems")))
.filter(not($"body".contains("Javascript functionality")))
.filter(not($"body".contains("requires Javascript to be enabled")))
.filter(not($"body".contains("To use this site")))
.filter(not($"body".contains("For the best experience, please turn JavaScript on"))) //ted talks
.filter(not($"body".contains("for this version"))) // (or arXiv:1706.09254v2 arXiv:1706.09254v2 [cs.CL] for this version)
.filter(not($"body".contains("Contact Us"))) //3 contact forms?
.filter(not($"body".contains("Terms and conditions apply to all"))) //channel 5 competiontion winners?
.filter(not($"body".contains("Terms and Conditions"))) //some terms and conditions
.filter(not($"body".contains("If you followed a valid link")))
.filter(not($"body".contains("turn off your ad blocker")))
//two URL columns
//.join(retweetUrlNetwork.drop("Domain"), retweetUrlNetwork("URL") === found_articles("URL"), "inner")
//.join(retweetUrlNetwork.drop("Domain"),Seq("URL)) // gives a inner join with no duplicated colums
.cache
// COMMAND ----------
// Featurize using the CountVectorizer object
val CountV = new CountVectorizer()
// each cell corresponds to an array of words
.setInputCol("words")
.setOutputCol("feature_wordfreq")
.setMinDF(5)
.fit(wordsDataf)
// transform the dataframe by adding a new column with the feature vectors
val tfcv = CountV.transform(wordsDataf)
// COMMAND ----------
// Remove stopwords
// Create new DF with Stopwords removed
val wordsDataf = remover.transform(wordsData2)
// COMMAND ----------
// MAGIC %md
// MAGIC
// MAGIC ### 1.a. K-means using vectors of term frequencies.
// MAGIC
// MAGIC As previously shown, we use CountVectorizer to convert the article body into a vector of term frequency.
// COMMAND ----------
// MAGIC %md
// MAGIC Run K-means using the vectorial representation of the text as input (features).
// MAGIC
// MAGIC We decided to generate 10 clusters (K=10), the articles will be clusterized around these clusters center using pairwise distances
// COMMAND ----------
// When the filtering is completed and unified, we will use this assignation
//val tfcv = countVectors
// Building the K-Means model
import org.apache.spark.ml.clustering.KMeans
val model_tfcv = new KMeans()
.setTol(0.01) //
.setK(10) // number of clusters
.setMaxIter(1000) // number of iterative runs
.setFeaturesCol("feature_wordfreq")
//.setFeaturesCol("features") // When the filtering is unified, we will use this
.setPredictionCol("predictions_tfcv")
.setSeed(123456) // the random seed for clustering can be removed
.fit(tfcv)
// transform the filtered dataframe and the predicted cluster (column "predictions_tfcv")
val K_modeling_TFCV = model_tfcv.transform(tfcv)
// COMMAND ----------
// MAGIC %md
// MAGIC ### The K-means clusters center
// COMMAND ----------
// Look at cluster centers
println("Cluster Centers: ")
model_tfcv.clusterCenters.foreach(println)
// COMMAND ----------
// Sorting of clusters by population size
val cluster_TFCV_size = K_modeling_TFCV
.groupBy("predictions_tfcv")
.count()
.sort("predictions_tfcv")
.sort(desc("count"))
// COMMAND ----------
// Size of the cluster
display(cluster_TFCV_size)
// COMMAND ----------
//LDA topic clusters for comparison. Note, the LDA only had 7 topics
display(blabla.groupBy("Topic").count().sort("Topic").sort(desc("count")))
// COMMAND ----------
// Selection of one cluster, for example
display(K_modeling_TFCV
.filter($"predictions_tfcv" === 8)
.select("predictions_tfcv","title","body"))
// COMMAND ----------
// MAGIC %md
// MAGIC
// MAGIC By looking at some articles from each cluster:
// MAGIC
// MAGIC * Cluster 0: Diverse
// MAGIC * Cluster 8: Taxes, Care, People, Politics -- Diverse
// MAGIC * Cluster 9: about Trump, Trump+Russia, US politics
// MAGIC * Cluster 3: Ecology, Climate change
// MAGIC Some clusters are singletons.
// COMMAND ----------
// MAGIC %md
// MAGIC ### 1.b. K-means with TF-IDF
// MAGIC Can we improve the clustering with TF-IDF ?
// MAGIC
// MAGIC We will apply the Inverse Document Frequency (IDF) to evaluate the importance of each word.
// MAGIC Using frequency vectors (TF), we will estimate the importance of the words in a corpus
// COMMAND ----------
import org.apache.spark.ml.feature.{IDF,Normalizer}
// Creating an IDF estimator
val idf_cv = new IDF()
.setInputCol("feature_wordfreq") // to replace by "features" when filtering is unified
.setOutputCol("features_cv")
// Generate an IDF model
val idfModel2 = idf_cv.fit(tfcv)
// and scale the feature vector
val rescaledData2 = idfModel2.transform(tfcv)
// normalize
val normalizer2 = new Normalizer()
.setInputCol("features_cv")
.setOutputCol("normFeatures_cv")
.setP(1.0)
val sparseVectorCVTF_IDF = normalizer2.transform(rescaledData2)
// COMMAND ----------
// Building a new K-Means models using Term frequencies (from countVector) and IDF
// K-Means modeling is done as explained above
val model_tfcv_idf = new KMeans()
.setTol(0.01)
.setK(10)
.setMaxIter(1000)
.setFeaturesCol("normFeatures_cv")
.setPredictionCol("predictions_tfcv_idf")
.setSeed(123456)
.fit(sparseVectorCVTF_IDF)
val K_modeling_TFCV_IDF = model_tfcv_idf.transform(sparseVectorCVTF_IDF)
// COMMAND ----------
val cluster_TFCV_IDF_size = K_modeling_TFCV_IDF
.groupBy("predictions_tfcv_idf")
.count()
.sort("predictions_tfcv_idf")
.sort(desc("count"))
// COMMAND ----------
display(cluster_TFCV_IDF_size)
// COMMAND ----------
display(K_modeling_TFCV_IDF
.filter($"predictions_tfcv_idf" === 5)
.select("predictions_tfcv_idf","title", "body"))
// COMMAND ----------
// MAGIC %md
// MAGIC By looking at some articles from each cluster:
// MAGIC
// MAGIC * Cluster 0: Diverse
// MAGIC * Cluster 5: US related (politics, economics)
// COMMAND ----------
// MAGIC %md
// MAGIC ### 2. K-means using TF-IDF with HashingTF
// MAGIC
// MAGIC Instead of CountVectorizer, use a HashingTF on the texts (after tokenization and removal of stopwords)
// MAGIC
// MAGIC For this case, we use a Hash vector = a fixed-sized feature vector that characterizes the article body ("a set of terms")
// COMMAND ----------
//Generate hashing vectors
import org.apache.spark.ml.feature.{HashingTF}
val hashingTF = new HashingTF()
.setInputCol("words")
.setOutputCol("rawFeatures")
.setNumFeatures(18000) // number of features in the vector. We chose a value greater to the vocabulary size
val featurizedData = hashingTF.transform(wordsDataf)
val idf = new IDF()
.setInputCol("rawFeatures")
.setOutputCol("features")
val idfModel = idf.fit(featurizedData)
val rescaledData = idfModel.transform(featurizedData)
val normalizer = new Normalizer()
.setInputCol("features")
.setOutputCol("normFeatures")
.setP(1.0)
val sparseVectorDF = normalizer.transform(rescaledData)
// COMMAND ----------
val model_TFIDF = new KMeans()
.setTol(0.01)
.setK(15)
.setMaxIter(1000)
.setFeaturesCol("normFeatures")
.setPredictionCol("predictions")
.setSeed(123456)
.fit(sparseVectorDF)
val K_modeling_sparseVectorDF = model_TFIDF.transform(sparseVectorDF)
// COMMAND ----------
val cluster_sparseVectorDF_size = K_modeling_sparseVectorDF
.groupBy("predictions")
.count()
.sort("predictions")
.sort(desc("count"))
// COMMAND ----------
display(cluster_sparseVectorDF_size)
// COMMAND ----------
// Example of cluster
display(K_modeling_sparseVectorDF
.filter($"predictions" ===10)
.select("predictions","title","body")
)
// COMMAND ----------
// MAGIC %md
// MAGIC
// MAGIC - Cluster 0: Diverse
// MAGIC - Cluster 10: London attacks, Manchester
// MAGIC - Cluster 5: Trump, US politics, Trump-Russia
// COMMAND ----------
// MAGIC %md
// MAGIC ### Conclusions about the K-Means clustering:
// MAGIC
// MAGIC - The clustring needs to be improved:
// MAGIC * a big cluster with diverse articles; suggest that the clustering has some flaws.
// MAGIC - Some considerations have to be made regarding:
// MAGIC - article body size,
// MAGIC - feature vectors (count vector vs. hashing features vector)
// MAGIC - ideal number of clusters,
// MAGIC - number of iterations, ...
// MAGIC * few meaningful clusters are obtained
// MAGIC - Using Normalization and IDF improve the clustering output.
// COMMAND ----------
// MAGIC %md
// MAGIC ## 5. Connect to tweet network
// MAGIC
// MAGIC * Which is the most probable topc or topics for each tweet with an article?
// MAGIC * We did not have time for this
// COMMAND ----------
//join our LDA results with K-means results
val joinedDF=blabla.select($"URL",$"title",$"body",$"domain",$"Topic").join(K_modeling_TFCV_IDF.drop($"tokens").drop($"words").drop($"description")
.drop($"keywords").drop($"status").drop($"feature_wordfreq")
.drop($"features_cv").drop($"normFeatures_cv"), Seq("URL","title","body","domain"))
// COMMAND ----------
//join with the tweet dataframe (for example, includes original post user ID as OPostUserSNinRT)
val newjoinedDF= joinedDF.join(retweetWithUrl.drop("Domain"), Seq("URL"), "inner")
// COMMAND ----------
//What are the topics in the articles tweeted by Jeremy Corbin?
//We only have 1 in our small sample
display(newjoinedDF.filter($"OPostUserSNinRT"==="jeremycorbyn").select($"title",$"body",$"Topic", $"predictions_tfcv_idf",$"OPostUserSNinRT"))
// COMMAND ----------
// MAGIC %md
// MAGIC
// MAGIC ### Future work:
// MAGIC
// MAGIC - Do a better filtering of the urls without clear body article | raazesh-sainudiin/scalable-data-science | db/2/2/999_03_StudentProject_ArticleTopicInRTNetwork.scala | Scala | unlicense | 33,957 |
package info.hupel.isabelle.cli
import java.net.URLClassLoader
import java.nio.file._
import org.apache.commons.io.FileUtils
import scala.concurrent._
import scala.concurrent.duration.Duration
import monix.execution.{Cancelable, CancelableFuture}
import monix.execution.Scheduler.Implicits.global
import org.log4s._
import cats.instances.either._
import cats.instances.list._
import cats.syntax.traverse._
import coursier.{Dependency, Module}
import coursier.util.Parse
import info.hupel.isabelle.Platform
import info.hupel.isabelle.api._
import info.hupel.isabelle.setup._
object Main {
private lazy val logger = getLogger
def main(args: Array[String]): Unit = Options.parse(args.toList) { (options, rest) =>
options.check()
val parentClassLoader =
if (options.internal) getClass.getClassLoader else null
val afp =
if (options.afp)
options.version match {
case Version.Devel(_) =>
Options.usageAndExit("Option conflict: devel version and --afp are mutually exclusive")
case Version.Stable(identifier) =>
Set(Dependency(Module(s"${BuildInfo.organization}.afp", s"afp-$identifier"), "2.0.+"))
}
else
Set()
val classpath = options.fetch.traverse(Parse.moduleVersion(_, BuildInfo.scalaBinaryVersion)) match {
case Right(Nil) if !options.afp => Future.successful { Nil }
case Right(modules) => Artifacts.fetch(Options.platform, modules.map { case (mod, v) => Dependency(mod, v) }.toSet ++ afp, options.offline)
case Left(error) => sys.error(s"could not parse dependency: $error")
}
val resourceClassLoader = classpath map { files =>
new URLClassLoader(files.map(_.toUri.toURL).toArray, parentClassLoader)
}
logger.info(s"Dumping resources to ${options.resourcePath} ...")
if (Files.exists(options.resourcePath))
FileUtils.cleanDirectory(options.resourcePath.toFile)
val components = resourceClassLoader map { classLoader =>
Resources.dumpIsabelleResources(options.resourcePath, classLoader) match {
case Right(resources) =>
resources.component :: options.component
case Left(Resources.Absent) =>
logger.warn("No resources on classpath")
options.component
case Left(error) =>
sys.error(error.explain)
}
}
val platform = Platform.guess.getOrElse(sys.error(Setup.UnknownPlatform.explain))
lazy val setup = options.home match {
case None =>
Setup.detect(platform, options.version, options.update) match {
case Right(setup) => setup
case Left(Setup.Absent) if !options.offline => Setup.install(platform, options.version).fold(sys error _.explain, identity)
case Left(e) => sys.error(e.explain)
}
case Some(home) =>
Setup(home, Options.platform, options.version)
}
logger.info(s"Using ${options.configuration}")
val updates = List(
OptionKey.Integer("threads").set(Runtime.getRuntime.availableProcessors)
)
val resolver = Resolver.Classpath orElse new Resolver.Maven(options.offline)
lazy val bundle = for {
cs <- CancelableFuture(components, Cancelable.empty)
env <- setup.makeEnvironment(resolver, options.userPath, cs, updates)
} yield Bundle(env, setup, options.configuration)
val app = rest match {
case cmd :: rest =>
Options.commands.get(cmd) match {
case None =>
Options.usageAndExit(s"no such command `$cmd`")
case Some(cmd) =>
bundle.flatMap(cmd.cancelableRun(_, rest))
}
case _ => bundle.map(_ => ())
}
Runtime.getRuntime().addShutdownHook(new Thread() {
override def run(): Unit = {
logger.info("Shutting down ...")
app.cancel()
}
})
Await.result(app, Duration.Inf)
}
}
| larsrh/libisabelle | modules/cli/src/main/scala/Main.scala | Scala | apache-2.0 | 3,879 |
package org.bitcoins.gui.util
import javafx.beans.value.ObservableValue
import org.bitcoins.commons.jsonmodels.ExplorerEnv
import org.bitcoins.core.config.BitcoinNetwork
import org.bitcoins.core.protocol.BlockTimeStamp
import org.bitcoins.core.protocol.tlv.{
ContractInfoTLV,
ContractInfoV0TLV,
ContractInfoV1TLV,
OracleAnnouncementTLV
}
import org.bitcoins.crypto.SchnorrPublicKey
import org.bitcoins.gui.{GUI, GlobalData}
import scalafx.beans.property.StringProperty
import scalafx.scene.control.{Button, TextField, Tooltip}
import scalafx.scene.image.{Image, ImageView}
import scalafx.scene.input.{KeyCode, KeyCodeCombination, KeyCombination}
import scalafx.scene.layout.{Priority, Region}
import scalafx.scene.{Parent, Scene}
import scalafx.stage.FileChooser.ExtensionFilter
import scalafx.stage.{FileChooser, Stage}
import java.awt.Toolkit.getDefaultToolkit
import java.awt.datatransfer.StringSelection
import java.io.File
import java.nio.file.Files
import java.text.NumberFormat
import java.time.format.{DateTimeFormatter, FormatStyle}
import java.time.{Instant, ZoneOffset}
import scala.util.Properties
import scala.util.matching.Regex
object GUIUtil {
val numericRegex: Regex = "-?([1-9,][0-9,]*)?".r
val numberFormatter: NumberFormat = java.text.NumberFormat.getIntegerInstance
def setNumericInput(textField: TextField): Unit = {
textField.text.addListener {
(_: ObservableValue[_ <: String], _: String, newVal: String) =>
if (!newVal.matches(numericRegex.regex))
textField.setText(newVal.replaceAll(numericRegex.regex, ""))
}
}
def epochToDateString(epoch: BlockTimeStamp): String = {
val long = epoch.toUInt32.toLong
val instant = Instant.ofEpochSecond(long)
epochToDateString(instant)
}
def epochToDateString(instant: Instant): String = {
val utc = instant.atOffset(ZoneOffset.UTC)
DateTimeFormatter
.ofLocalizedDate(FormatStyle.MEDIUM)
.format(utc)
}
def setStringToClipboard(str: String): Unit = {
val clipboard = getDefaultToolkit.getSystemClipboard
val sel = new StringSelection(str)
clipboard.setContents(sel, sel)
}
// fileChooser persists so initialDirectory can update across interactions
private lazy val fileChooser = new FileChooser() {
extensionFilters.addAll(txtExtensionFilter, allExtensionFilter)
selectedExtensionFilter = txtExtensionFilter
initialDirectory = new File(Properties.userHome)
}
private lazy val txtExtensionFilter =
new ExtensionFilter("Text Files", "*.txt")
private lazy val allExtensionFilter = new ExtensionFilter("All Files", "*")
def showSaveDialog(
filename: String,
bytesOpt: Option[String],
handleFileOpt: Option[File => Unit]): Unit = {
fileChooser.initialFileName = filename
val chosenFileOpt = Option(fileChooser.showSaveDialog(null))
chosenFileOpt match {
case Some(chosenFile) =>
// Remember last-used directory
fileChooser.initialDirectory = chosenFile.getParentFile
bytesOpt match {
case Some(bytes) => Files.write(chosenFile.toPath, bytes.getBytes)
case None => // There was nothing sent in to write out
}
handleFileOpt match {
case Some(handleFile) => handleFile(chosenFile)
case None => // No callback defined
}
case None => // User canceled in dialog
}
}
def showOpenDialog(handleFile: File => Unit): Option[File] = {
val chosenFileOpt = Option(fileChooser.showOpenDialog(null))
chosenFileOpt match {
case Some(chosenFile) =>
// Remember last-used directory
fileChooser.initialDirectory = chosenFile.getParentFile
handleFile(chosenFile)
case None => // User canceled in dialog
}
chosenFileOpt
}
def getFileChooserButton(handleFile: File => Unit): Button = new Button(
"Browse...") {
onAction = _ => {
val _ = GUIUtil.showOpenDialog(handleFile)
}
}
def getFileSaveButton(
filename: String,
bytes: Option[String],
handleFile: Option[File => Unit]): Button = new Button("Browse...") {
onAction = _ => {
val _ = GUIUtil.showSaveDialog(filename, bytes, handleFile)
}
}
def getCopyToClipboardButton(property: StringProperty): Button = new Button {
styleClass ++= Vector("icon-button", "copy-button")
disable <== property.isEmpty
onAction = _ => {
setStringToClipboard(property.value)
}
tooltip = Tooltip("Copy to Clipboard.")
tooltip.value.setShowDelay(new javafx.util.Duration(100))
}
def getGreenCheck(): ImageView = {
val img = new Image("/icons/green-check.png")
val imageView = new ImageView(img)
imageView.fitHeight = 16
imageView.fitWidth = 16
imageView
}
def getRedX(): ImageView = {
val img = new Image("/icons/red-x.png")
val imageView = new ImageView(img)
imageView.fitHeight = 16
imageView.fitWidth = 16
imageView
}
def getHSpacer(): Region = new Region { hgrow = Priority.Always }
def getVSpacer(): Region = new Region { vgrow = Priority.Always }
def getWindow(
windowTitle: String,
width: Double,
height: Double,
rootView: Parent): Stage = {
val windowScene = new Scene(width, height) {
root = rootView
stylesheets = GlobalData.currentStyleSheets
}
val stage = new Stage() {
title = windowTitle
scene = windowScene
// Icon?
}
if (Properties.isMac || Properties.isLinux) {
windowScene.accelerators.put(
new KeyCodeCombination(KeyCode.W, KeyCombination.ShortcutDown),
() => stage.close())
}
if (Properties.isWin || Properties.isLinux) {
windowScene.accelerators.put(
new KeyCodeCombination(KeyCode.F4, KeyCombination.AltDown),
() => stage.close())
}
stage
}
def getAnnouncementUrl(
network: BitcoinNetwork,
primaryOracle: OracleAnnouncementTLV): String = {
val baseUrl =
ExplorerEnv.fromBitcoinNetwork(network).siteUrl
s"${baseUrl}announcement/${primaryOracle.sha256.hex}"
}
def openUrl(url: String): Unit = {
GUI.hostServices.showDocument(url)
}
val logo = new Image("/icons/bitcoin-s.png")
val logoTestnet = new Image("/icons/bitcoin-s-testnet.png")
val logoSignet = new Image("/icons/bitcoin-s-signet.png")
val logoRegtest = new Image("/icons/bitcoin-s-regtest.png")
def getOraclePubKeyEventId(
contractInfo: ContractInfoTLV): (SchnorrPublicKey, String) = {
contractInfo match {
case ContractInfoV0TLV(_, _, oracleInfo) =>
(oracleInfo.announcements.head.publicKey,
oracleInfo.announcements.head.eventTLV.eventId)
case ContractInfoV1TLV(_, contractOraclePairs) =>
(contractOraclePairs.head._2.announcements.head.publicKey,
contractOraclePairs.head._2.announcements.head.eventTLV.eventId)
}
}
}
| bitcoin-s/bitcoin-s | app/gui/src/main/scala/org/bitcoins/gui/util/GUIUtil.scala | Scala | mit | 6,925 |
/**
* Copyright (C) 2012 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.webapp
import java.util.{Map ⇒ JMap}
import javax.servlet.http.HttpSession
import org.orbeon.oxf.common.OXFException
import org.orbeon.oxf.pipeline.InitUtils
import org.orbeon.oxf.pipeline.api.ExternalContext
import org.orbeon.oxf.pipeline.api.ExternalContext.Session
// External context which only exposes the web app, without request or response
// Session is None when called from init()/destroy()/contextInitialized()/contextDestroyed()
// Session is Some(_) when called from sessionCreated()/sessionDestroyed()
class WebAppExternalContext(webAppContext: WebAppContext, httpSession: Option[HttpSession] = None) extends ExternalContext {
// Return null if we were not provided with a session. This allows detecting whether the session is available or not.
private lazy val session: Session = httpSession map (new SessionImpl(_)) orNull
def getSession(create: Boolean) = session
def getWebAppContext = webAppContext
def getNativeRequest = null
def getNativeResponse = null
def getStartLoggerString = ""
def getEndLoggerString = ""
def getRequest = null
def getResponse = null
def getRequestDispatcher(path: String, isContextRelative: Boolean): ExternalContext.RequestDispatcher = null
private class SessionImpl(private val httpSession: HttpSession) extends ExternalContext.Session {
private var sessionAttributesMap: JMap[String, AnyRef] = _
def getCreationTime = httpSession.getCreationTime
def getId = httpSession.getId
def getLastAccessedTime = httpSession.getLastAccessedTime
def getMaxInactiveInterval = httpSession.getMaxInactiveInterval
def invalidate() = httpSession.invalidate()
def isNew = httpSession.isNew
def setMaxInactiveInterval(interval: Int) = httpSession.setMaxInactiveInterval(interval)
def getAttributesMap = {
if (sessionAttributesMap eq null)
sessionAttributesMap = new InitUtils.SessionMap(httpSession)
sessionAttributesMap
}
def getAttributesMap(scope: Int) = {
if (scope != Session.APPLICATION_SCOPE)
throw new OXFException("Invalid session scope scope: only the application scope is allowed in Servlets")
getAttributesMap
}
def addListener(sessionListener: ExternalContext.Session.SessionListener) =
throw new UnsupportedOperationException
def removeListener(sessionListener: ExternalContext.Session.SessionListener) =
throw new UnsupportedOperationException
}
}
| ajw625/orbeon-forms | src/main/scala/org/orbeon/oxf/webapp/WebAppExternalContext.scala | Scala | lgpl-2.1 | 3,390 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.sql.Timestamp
import scala.math.Ordering
import org.apache.spark.SparkFunSuite
import org.apache.spark.metrics.source.CodegenMetrics
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.expressions.objects._
import org.apache.spark.sql.catalyst.util.{ArrayBasedMapData, DateTimeUtils}
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.LA
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.util.ThreadUtils
/**
* Additional tests for code generation.
*/
class CodeGenerationSuite extends SparkFunSuite with ExpressionEvalHelper {
test("multithreaded eval") {
import scala.concurrent._
import ExecutionContext.Implicits.global
import scala.concurrent.duration._
val futures = (1 to 20).map { _ =>
Future {
GeneratePredicate.generate(EqualTo(Literal(1), Literal(1)))
GenerateMutableProjection.generate(EqualTo(Literal(1), Literal(1)) :: Nil)
GenerateOrdering.generate(Add(Literal(1), Literal(1)).asc :: Nil)
}
}
futures.foreach(ThreadUtils.awaitResult(_, 10.seconds))
}
test("metrics are recorded on compile") {
val startCount1 = CodegenMetrics.METRIC_COMPILATION_TIME.getCount()
val startCount2 = CodegenMetrics.METRIC_SOURCE_CODE_SIZE.getCount()
val startCount3 = CodegenMetrics.METRIC_GENERATED_CLASS_BYTECODE_SIZE.getCount()
val startCount4 = CodegenMetrics.METRIC_GENERATED_METHOD_BYTECODE_SIZE.getCount()
GenerateOrdering.generate(Add(Literal(123), Literal(1)).asc :: Nil)
assert(CodegenMetrics.METRIC_COMPILATION_TIME.getCount() == startCount1 + 1)
assert(CodegenMetrics.METRIC_SOURCE_CODE_SIZE.getCount() == startCount2 + 1)
assert(CodegenMetrics.METRIC_GENERATED_CLASS_BYTECODE_SIZE.getCount() > startCount3)
assert(CodegenMetrics.METRIC_GENERATED_METHOD_BYTECODE_SIZE.getCount() > startCount4)
}
test("SPARK-8443: split wide projections into blocks due to JVM code size limit") {
val length = 5000
val expressions = List.fill(length)(EqualTo(Literal(1), Literal(1)))
val plan = GenerateMutableProjection.generate(expressions)
val actual = plan(new GenericInternalRow(length)).toSeq(expressions.map(_.dataType))
val expected = Seq.fill(length)(true)
if (actual != expected) {
fail(s"Incorrect Evaluation: expressions: $expressions, actual: $actual, expected: $expected")
}
}
test("SPARK-13242: case-when expression with large number of branches (or cases)") {
val cases = 500
val clauses = 20
// Generate an individual case
def generateCase(n: Int): (Expression, Expression) = {
val condition = (1 to clauses)
.map(c => EqualTo(BoundReference(0, StringType, false), Literal(s"$c:$n")))
.reduceLeft[Expression]((l, r) => Or(l, r))
(condition, Literal(n))
}
val expression = CaseWhen((1 to cases).map(generateCase))
val plan = GenerateMutableProjection.generate(Seq(expression))
val input = new GenericInternalRow(Array[Any](UTF8String.fromString(s"$clauses:$cases")))
val actual = plan(input).toSeq(Seq(expression.dataType))
assert(actual.head == cases)
}
test("SPARK-22543: split large if expressions into blocks due to JVM code size limit") {
var strExpr: Expression = Literal("abc")
for (_ <- 1 to 150) {
strExpr = StringDecode(Encode(strExpr, "utf-8"), "utf-8")
}
val expressions = Seq(If(EqualTo(strExpr, strExpr), strExpr, strExpr))
val plan = GenerateMutableProjection.generate(expressions)
val actual = plan(null).toSeq(expressions.map(_.dataType))
assert(actual.length == 1)
val expected = UTF8String.fromString("abc")
if (!checkResult(actual.head, expected, expressions.head)) {
fail(s"Incorrect Evaluation: expressions: $expressions, actual: $actual, expected: $expected")
}
}
test("SPARK-14793: split wide array creation into blocks due to JVM code size limit") {
val length = 5000
val expressions = Seq(CreateArray(List.fill(length)(EqualTo(Literal(1), Literal(1)))))
val plan = GenerateMutableProjection.generate(expressions)
val actual = plan(new GenericInternalRow(length)).toSeq(expressions.map(_.dataType))
assert(actual.length == 1)
val expected = UnsafeArrayData.fromPrimitiveArray(Array.fill(length)(true))
if (!checkResult(actual.head, expected, expressions.head)) {
fail(s"Incorrect Evaluation: expressions: $expressions, actual: $actual, expected: $expected")
}
}
test("SPARK-14793: split wide map creation into blocks due to JVM code size limit") {
val length = 5000
val expressions = Seq(CreateMap(
List.fill(length)(EqualTo(Literal(1), Literal(1))).zipWithIndex.flatMap {
case (expr, i) => Seq(Literal(i), expr)
}))
val plan = GenerateMutableProjection.generate(expressions)
val actual = plan(new GenericInternalRow(length)).toSeq(expressions.map(_.dataType))
assert(actual.length == 1)
val expected = ArrayBasedMapData((0 until length).toArray, Array.fill(length)(true))
if (!checkResult(actual.head, expected, expressions.head)) {
fail(s"Incorrect Evaluation: expressions: $expressions, actual: $actual, expected: $expected")
}
}
test("SPARK-14793: split wide struct creation into blocks due to JVM code size limit") {
val length = 5000
val expressions = Seq(CreateStruct(List.fill(length)(EqualTo(Literal(1), Literal(1)))))
val plan = GenerateMutableProjection.generate(expressions)
val actual = plan(new GenericInternalRow(length)).toSeq(expressions.map(_.dataType))
val expected = Seq(InternalRow(Seq.fill(length)(true): _*))
if (!checkResult(actual, expected, expressions.head)) {
fail(s"Incorrect Evaluation: expressions: $expressions, actual: $actual, expected: $expected")
}
}
test("SPARK-14793: split wide named struct creation into blocks due to JVM code size limit") {
val length = 5000
val expressions = Seq(CreateNamedStruct(
List.fill(length)(EqualTo(Literal(1), Literal(1))).flatMap {
expr => Seq(Literal(expr.toString), expr)
}))
val plan = GenerateMutableProjection.generate(expressions)
val actual = plan(new GenericInternalRow(length)).toSeq(expressions.map(_.dataType))
assert(actual.length == 1)
val expected = InternalRow(Seq.fill(length)(true): _*)
if (!checkResult(actual.head, expected, expressions.head)) {
fail(s"Incorrect Evaluation: expressions: $expressions, actual: $actual, expected: $expected")
}
}
test("SPARK-14224: split wide external row creation into blocks due to JVM code size limit") {
val length = 5000
val schema = StructType(Seq.fill(length)(StructField("int", IntegerType)))
val expressions = Seq(CreateExternalRow(Seq.fill(length)(Literal(1)), schema))
val plan = GenerateMutableProjection.generate(expressions)
val actual = plan(new GenericInternalRow(length)).toSeq(expressions.map(_.dataType))
val expected = Seq(Row.fromSeq(Seq.fill(length)(1)))
if (actual != expected) {
fail(s"Incorrect Evaluation: expressions: $expressions, actual: $actual, expected: $expected")
}
}
test("SPARK-17702: split wide constructor into blocks due to JVM code size limit") {
val length = 5000
val expressions = Seq.fill(length) {
ToUTCTimestamp(
Literal.create(Timestamp.valueOf("2015-07-24 00:00:00"), TimestampType),
Literal.create(LA.getId, StringType))
}
val plan = GenerateMutableProjection.generate(expressions)
val actual = plan(new GenericInternalRow(length)).toSeq(expressions.map(_.dataType))
val expected = Seq.fill(length)(
DateTimeUtils.fromJavaTimestamp(Timestamp.valueOf("2015-07-24 07:00:00")))
if (actual != expected) {
fail(s"Incorrect Evaluation: expressions: $expressions, actual: $actual, expected: $expected")
}
}
test("SPARK-22226: group splitted expressions into one method per nested class") {
val length = 10000
val expressions = Seq.fill(length) {
ToUTCTimestamp(
Literal.create(Timestamp.valueOf("2017-10-10 00:00:00"), TimestampType),
Literal.create(LA.getId, StringType))
}
val plan = GenerateMutableProjection.generate(expressions)
val actual = plan(new GenericInternalRow(length)).toSeq(expressions.map(_.dataType))
val expected = Seq.fill(length)(
DateTimeUtils.fromJavaTimestamp(Timestamp.valueOf("2017-10-10 07:00:00")))
if (actual != expected) {
fail(s"Incorrect Evaluation: expressions: $expressions, actual: $actual, expected: $expected")
}
}
test("test generated safe and unsafe projection") {
val schema = new StructType(Array(
StructField("a", StringType, true),
StructField("b", IntegerType, true),
StructField("c", new StructType(Array(
StructField("aa", StringType, true),
StructField("bb", IntegerType, true)
)), true),
StructField("d", new StructType(Array(
StructField("a", new StructType(Array(
StructField("b", StringType, true),
StructField("", IntegerType, true)
)), true)
)), true)
))
val row = Row("a", 1, Row("b", 2), Row(Row("c", 3)))
val lit = Literal.create(row, schema)
val internalRow = lit.value.asInstanceOf[InternalRow]
val unsafeProj = UnsafeProjection.create(schema)
val unsafeRow: UnsafeRow = unsafeProj(internalRow)
assert(unsafeRow.getUTF8String(0) === UTF8String.fromString("a"))
assert(unsafeRow.getInt(1) === 1)
assert(unsafeRow.getStruct(2, 2).getUTF8String(0) === UTF8String.fromString("b"))
assert(unsafeRow.getStruct(2, 2).getInt(1) === 2)
assert(unsafeRow.getStruct(3, 1).getStruct(0, 2).getUTF8String(0) ===
UTF8String.fromString("c"))
assert(unsafeRow.getStruct(3, 1).getStruct(0, 2).getInt(1) === 3)
val fromUnsafe = SafeProjection.create(schema)
val internalRow2 = fromUnsafe(unsafeRow)
assert(internalRow === internalRow2)
// update unsafeRow should not affect internalRow2
unsafeRow.setInt(1, 10)
unsafeRow.getStruct(2, 2).setInt(1, 10)
unsafeRow.getStruct(3, 1).getStruct(0, 2).setInt(1, 4)
assert(internalRow === internalRow2)
}
test("*/ in the data") {
// When */ appears in a comment block (i.e. in /**/), code gen will break.
// So, in Expression and CodegenFallback, we escape */ to \*\/.
checkEvaluation(
EqualTo(BoundReference(0, StringType, false), Literal.create("*/", StringType)),
true,
InternalRow(UTF8String.fromString("*/")))
}
test("\\u in the data") {
// When \ u appears in a comment block (i.e. in /**/), code gen will break.
// So, in Expression and CodegenFallback, we escape \ u to \\u.
checkEvaluation(
EqualTo(BoundReference(0, StringType, false), Literal.create("\\u", StringType)),
true,
InternalRow(UTF8String.fromString("\\u")))
}
test("check compilation error doesn't occur caused by specific literal") {
// The end of comment (*/) should be escaped.
GenerateUnsafeProjection.generate(
Literal.create("*/Compilation error occurs/*", StringType) :: Nil)
// `\u002A` is `*` and `\u002F` is `/`
// so if the end of comment consists of those characters in queries, we need to escape them.
GenerateUnsafeProjection.generate(
Literal.create("\\u002A/Compilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("\\\\u002A/Compilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("\\u002a/Compilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("\\\\u002a/Compilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("*\\u002FCompilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("*\\\\u002FCompilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("*\\002fCompilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("*\\\\002fCompilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("\\002A\\002FCompilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("\\\\002A\\002FCompilation error occurs/*", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("\\002A\\\\002FCompilation error occurs/*", StringType) :: Nil)
// \ u002X is an invalid unicode literal so it should be escaped.
GenerateUnsafeProjection.generate(
Literal.create("\\u002X/Compilation error occurs", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("\\\\u002X/Compilation error occurs", StringType) :: Nil)
// \ u001 is an invalid unicode literal so it should be escaped.
GenerateUnsafeProjection.generate(
Literal.create("\\u001/Compilation error occurs", StringType) :: Nil)
GenerateUnsafeProjection.generate(
Literal.create("\\\\u001/Compilation error occurs", StringType) :: Nil)
}
test("SPARK-17160: field names are properly escaped by GetExternalRowField") {
val inputObject = BoundReference(0, ObjectType(classOf[Row]), nullable = true)
GenerateUnsafeProjection.generate(
ValidateExternalType(
GetExternalRowField(inputObject, index = 0, fieldName = "\"quote"), IntegerType) :: Nil)
}
test("SPARK-17160: field names are properly escaped by AssertTrue") {
GenerateUnsafeProjection.generate(AssertTrue(Cast(Literal("\""), BooleanType)).child :: Nil)
}
test("should not apply common subexpression elimination on conditional expressions") {
val row = InternalRow(null)
val bound = BoundReference(0, IntegerType, true)
val assertNotNull = AssertNotNull(bound)
val expr = If(IsNull(bound), Literal(1), Add(assertNotNull, assertNotNull))
val projection = GenerateUnsafeProjection.generate(
Seq(expr), subexpressionEliminationEnabled = true)
// should not throw exception
projection(row)
}
test("SPARK-22226: splitExpressions should not generate codes beyond 64KB") {
val colNumber = 10000
val attrs = (1 to colNumber).map(colIndex => AttributeReference(s"_$colIndex", IntegerType)())
val lit = Literal(1000)
val exprs = attrs.flatMap { a =>
Seq(If(lit < a, lit, a), sqrt(a))
}
UnsafeProjection.create(exprs, attrs)
}
test("SPARK-22543: split large predicates into blocks due to JVM code size limit") {
val length = 600
val input = new GenericInternalRow(length)
val utf8Str = UTF8String.fromString(s"abc")
for (i <- 0 until length) {
input.update(i, utf8Str)
}
var exprOr: Expression = Literal(false)
for (i <- 0 until length) {
exprOr = Or(EqualTo(BoundReference(i, StringType, true), Literal(s"c$i")), exprOr)
}
val planOr = GenerateMutableProjection.generate(Seq(exprOr))
val actualOr = planOr(input).toSeq(Seq(exprOr.dataType))
assert(actualOr.length == 1)
val expectedOr = false
if (!checkResult(actualOr.head, expectedOr, exprOr)) {
fail(s"Incorrect Evaluation: expressions: $exprOr, actual: $actualOr, expected: $expectedOr")
}
var exprAnd: Expression = Literal(true)
for (i <- 0 until length) {
exprAnd = And(EqualTo(BoundReference(i, StringType, true), Literal(s"c$i")), exprAnd)
}
val planAnd = GenerateMutableProjection.generate(Seq(exprAnd))
val actualAnd = planAnd(input).toSeq(Seq(exprAnd.dataType))
assert(actualAnd.length == 1)
val expectedAnd = false
if (!checkResult(actualAnd.head, expectedAnd, exprAnd)) {
fail(
s"Incorrect Evaluation: expressions: $exprAnd, actual: $actualAnd, expected: $expectedAnd")
}
}
test("SPARK-22696: CreateExternalRow should not use global variables") {
val ctx = new CodegenContext
val schema = new StructType().add("a", IntegerType).add("b", StringType)
CreateExternalRow(Seq(Literal(1), Literal("x")), schema).genCode(ctx)
assert(ctx.inlinedMutableStates.isEmpty)
}
test("SPARK-22696: InitializeJavaBean should not use global variables") {
val ctx = new CodegenContext
InitializeJavaBean(Literal.fromObject(new java.util.LinkedList[Int]),
Map("add" -> Literal(1))).genCode(ctx)
assert(ctx.inlinedMutableStates.isEmpty)
}
test("SPARK-22716: addReferenceObj should not add mutable states") {
val ctx = new CodegenContext
val foo = new Object()
ctx.addReferenceObj("foo", foo)
assert(ctx.inlinedMutableStates.isEmpty)
}
test("SPARK-18016: define mutable states by using an array") {
val ctx1 = new CodegenContext
for (i <- 1 to CodeGenerator.OUTER_CLASS_VARIABLES_THRESHOLD + 10) {
ctx1.addMutableState(CodeGenerator.JAVA_INT, "i", v => s"$v = $i;")
}
assert(ctx1.inlinedMutableStates.size == CodeGenerator.OUTER_CLASS_VARIABLES_THRESHOLD)
// When the number of primitive type mutable states is over the threshold, others are
// allocated into an array
assert(ctx1.arrayCompactedMutableStates(CodeGenerator.JAVA_INT).arrayNames.size == 1)
assert(ctx1.mutableStateInitCode.size == CodeGenerator.OUTER_CLASS_VARIABLES_THRESHOLD + 10)
val ctx2 = new CodegenContext
for (i <- 1 to CodeGenerator.MUTABLESTATEARRAY_SIZE_LIMIT + 10) {
ctx2.addMutableState("InternalRow[]", "r", v => s"$v = new InternalRow[$i];")
}
// When the number of non-primitive type mutable states is over the threshold, others are
// allocated into a new array
assert(ctx2.inlinedMutableStates.isEmpty)
assert(ctx2.arrayCompactedMutableStates("InternalRow[]").arrayNames.size == 2)
assert(ctx2.arrayCompactedMutableStates("InternalRow[]").getCurrentIndex == 10)
assert(ctx2.mutableStateInitCode.size == CodeGenerator.MUTABLESTATEARRAY_SIZE_LIMIT + 10)
}
test("SPARK-22750: addImmutableStateIfNotExists") {
val ctx = new CodegenContext
val mutableState1 = "field1"
val mutableState2 = "field2"
ctx.addImmutableStateIfNotExists("int", mutableState1)
ctx.addImmutableStateIfNotExists("int", mutableState1)
ctx.addImmutableStateIfNotExists("String", mutableState2)
ctx.addImmutableStateIfNotExists("int", mutableState1)
ctx.addImmutableStateIfNotExists("String", mutableState2)
assert(ctx.inlinedMutableStates.length == 2)
}
test("SPARK-23628: calculateParamLength should compute properly the param length") {
assert(CodeGenerator.calculateParamLength(Seq.range(0, 100).map(Literal(_))) == 101)
assert(CodeGenerator.calculateParamLength(
Seq.range(0, 100).map(x => Literal(x.toLong))) == 201)
}
test("SPARK-23760: CodegenContext.withSubExprEliminationExprs should save/restore correctly") {
val ref = BoundReference(0, IntegerType, true)
val add1 = Add(ref, ref)
val add2 = Add(add1, add1)
val dummy = SubExprEliminationState(
ExprCode(EmptyBlock,
JavaCode.variable("dummy", BooleanType),
JavaCode.variable("dummy", BooleanType)))
// raw testing of basic functionality
{
val ctx = new CodegenContext
val e = ref.genCode(ctx)
// before
ctx.subExprEliminationExprs += ref -> SubExprEliminationState(
ExprCode(EmptyBlock, e.isNull, e.value))
assert(ctx.subExprEliminationExprs.contains(ref))
// call withSubExprEliminationExprs
ctx.withSubExprEliminationExprs(Map(add1 -> dummy)) {
assert(ctx.subExprEliminationExprs.contains(add1))
assert(!ctx.subExprEliminationExprs.contains(ref))
Seq.empty
}
// after
assert(ctx.subExprEliminationExprs.nonEmpty)
assert(ctx.subExprEliminationExprs.contains(ref))
assert(!ctx.subExprEliminationExprs.contains(add1))
}
// emulate an actual codegen workload
{
val ctx = new CodegenContext
// before
ctx.generateExpressions(Seq(add2, add1), doSubexpressionElimination = true) // trigger CSE
assert(ctx.subExprEliminationExprs.contains(add1))
// call withSubExprEliminationExprs
ctx.withSubExprEliminationExprs(Map(ref -> dummy)) {
assert(ctx.subExprEliminationExprs.contains(ref))
assert(!ctx.subExprEliminationExprs.contains(add1))
Seq.empty
}
// after
assert(ctx.subExprEliminationExprs.nonEmpty)
assert(ctx.subExprEliminationExprs.contains(add1))
assert(!ctx.subExprEliminationExprs.contains(ref))
}
}
test("SPARK-23986: freshName can generate duplicated names") {
val ctx = new CodegenContext
val names1 = ctx.freshName("myName1") :: ctx.freshName("myName1") ::
ctx.freshName("myName11") :: Nil
assert(names1.distinct.length == 3)
val names2 = ctx.freshName("a") :: ctx.freshName("a") ::
ctx.freshName("a_1") :: ctx.freshName("a_0") :: Nil
assert(names2.distinct.length == 4)
}
test("SPARK-25113: should log when there exists generated methods above HugeMethodLimit") {
val appender = new LogAppender("huge method limit")
withLogAppender(appender, loggerNames = Seq(classOf[CodeGenerator[_, _]].getName)) {
val x = 42
val expr = HugeCodeIntExpression(x)
val proj = GenerateUnsafeProjection.generate(Seq(expr))
val actual = proj(null)
assert(actual.getInt(0) == x)
}
assert(appender.loggingEvents
.exists(_.getRenderedMessage().contains("Generated method too long")))
}
test("SPARK-28916: subexpression elimination can cause 64kb code limit on UnsafeProjection") {
val numOfExprs = 10000
val exprs = (0 to numOfExprs).flatMap(colIndex =>
Seq(Add(BoundReference(colIndex, DoubleType, true),
BoundReference(numOfExprs + colIndex, DoubleType, true)),
Add(BoundReference(colIndex, DoubleType, true),
BoundReference(numOfExprs + colIndex, DoubleType, true))))
// these should not fail to compile due to 64K limit
GenerateUnsafeProjection.generate(exprs, true)
GenerateMutableProjection.generate(exprs, true)
}
test("SPARK-32624: Use CodeGenerator.typeName() to fix byte[] compile issue") {
val ctx = new CodegenContext
val bytes = new Array[Byte](3)
val refTerm = ctx.addReferenceObj("bytes", bytes)
assert(refTerm == "((byte[]) references[0] /* bytes */)")
}
test("SPARK-32624: CodegenContext.addReferenceObj should work for nested Scala class") {
// emulate TypeUtils.getInterpretedOrdering(StringType)
val ctx = new CodegenContext
val comparator = implicitly[Ordering[UTF8String]]
val refTerm = ctx.addReferenceObj("comparator", comparator)
// Expecting result:
// "((scala.math.LowPriorityOrderingImplicits$$anon$3) references[0] /* comparator */)"
// Using lenient assertions to be resilient to anonymous class numbering changes
assert(!refTerm.contains("null"))
assert(refTerm.contains("scala.math.LowPriorityOrderingImplicits$$anon$"))
}
// TODO (SPARK-35579): Fix this bug in janino or work around it in Spark.
ignore("SPARK-35578: final local variable bug in janino") {
val code =
"""
|public Object generate(Object[] references) {
| return new MyClass(references == null);
|}
|
|class MyClass {
| private boolean b1;
|
| public MyClass(boolean b1) {
| this.b1 = b1;
| }
|
| public UnsafeRow apply(InternalRow i) {
| final int value_0;
| // The bug still exist if the if condition is 'true'. Here we use a variable
| // to make the test more robust, in case the compiler can eliminate the else branch.
| if (b1) {
| } else {
| int field_0 = 1;
| }
| // The second if-else is necessary to trigger the bug.
| if (b1) {
| } else {
| // The bug disappear if it's an int variable.
| long field_1 = 2;
| }
| value_0 = 1;
|
| // The second final variable is necessary to trigger the bug.
| final int value_2;
| if (b1) {
| } else {
| int field_2 = 3;
| }
| value_2 = 2;
|
| return null;
| }
|}
|""".stripMargin
CodeGenerator.compile(new CodeAndComment(code, Map.empty))
}
}
case class HugeCodeIntExpression(value: Int) extends LeafExpression {
override def nullable: Boolean = true
override def dataType: DataType = IntegerType
override def eval(input: InternalRow): Any = value
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
// Assuming HugeMethodLimit to be 8000
val HugeMethodLimit = CodeGenerator.DEFAULT_JVM_HUGE_METHOD_LIMIT
// A single "int dummyN = 0;" will be at least 2 bytes of bytecode:
// 0: iconst_0
// 1: istore_1
// and it'll become bigger as the number of local variables increases.
// So 4000 such dummy local variable definitions are sufficient to bump the bytecode size
// of a generated method to above 8000 bytes.
val hugeCode = (0 until (HugeMethodLimit / 2)).map(i => s"int dummy$i = 0;").mkString("\n")
val code =
code"""{
| $hugeCode
|}
|boolean ${ev.isNull} = false;
|int ${ev.value} = $value;
""".stripMargin
ev.copy(code = code)
}
}
| wangmiao1981/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala | Scala | apache-2.0 | 26,672 |
import GraphGen.State
import Nodes.{Proto3, Proto2, ProtoSyntax}
import org.scalacheck.{Arbitrary, Gen}
/**
* Created by thesamet on 9/28/14.
*/
object GenTypes {
sealed trait ProtoType {
def packable: Boolean
def isMap: Boolean
}
case class Primitive(
name: String,
genValue: Gen[String],
packable: Boolean = true,
isMap: Boolean = false
) extends ProtoType {
override def toString = s"Primitive($name)"
}
private val genInt32 = Arbitrary.arbitrary[Int]
private val genInt64 = Arbitrary.arbitrary[Long]
private val genUInt64 = Gen.chooseNum[Long](0, Long.MaxValue)
private val genUInt32 = Gen.chooseNum[Int](0, Int.MaxValue)
// Simple version, since the one at TextFormatUtils is only in runtimne.
private def escapeBytes(raw: Seq[Byte]): String = {
val builder = new StringBuilder
builder.append('"')
raw.map {
case b if b == '\\"'.toByte => builder.append("\\\\\\"")
case b if b == '\\''.toByte => builder.append("\\\\\\'")
case b if b == '\\\\'.toByte => builder.append("\\\\\\\\")
case b if b >= 0x20 => builder.append(b.toChar)
case b =>
builder.append('\\\\')
builder.append((48 + ((b >>> 6) & 3)).toChar)
builder.append((48 + ((b >>> 3) & 7)).toChar)
builder.append((48 + (b & 7)).toChar)
}
builder.append('"')
builder.result()
}
val ProtoSint32 = Primitive("sint32", genInt32.map(_.toString))
val ProtoUint32 = Primitive("uint32", genUInt32.map(_.toString))
val ProtoInt32 = Primitive("int32", genInt32.map(_.toString))
val ProtoFixed32 = Primitive("fixed32", genUInt32.map(_.toString))
val ProtoSfixed32 = Primitive("sfixed32", genInt32.map(_.toString))
val ProtoSint64 = Primitive("sint64", genInt64.map(_.toString))
val ProtoUint64 = Primitive("uint64", genUInt64.map(_.toString))
val ProtoInt64 = Primitive("int64", genInt64.map(_.toString))
val ProtoFixed64 = Primitive("fixed64", genUInt64.map(_.toString))
val ProtoSfixed64 = Primitive("sfixed64", genInt64.map(_.toString))
val ProtoDouble = Primitive("double", Arbitrary.arbitrary[Double].map(_.toString))
val ProtoFloat = Primitive("float", Arbitrary.arbitrary[Float].map(_.toString))
val ProtoBool = Primitive("bool", Arbitrary.arbitrary[Boolean].map(_.toString))
val ProtoString = Primitive(
"string",
Arbitrary.arbitrary[String].map(_.getBytes("UTF-8").toSeq).map(escapeBytes),
packable = false
)
val ProtoBytes =
Primitive("bytes", Gen.listOf(Arbitrary.arbitrary[Byte]).map(escapeBytes), packable = false)
case class MessageReference(id: Int) extends ProtoType {
def packable = false
def isMap = false
}
case class EnumReference(id: Int) extends ProtoType {
def packable = true
def isMap = false
}
case class MapType(keyType: ProtoType, valueType: ProtoType) extends ProtoType {
def packable = false
def isMap = true
}
def generatePrimitive =
Gen.oneOf(
ProtoSint32,
ProtoUint32,
ProtoInt32,
ProtoFixed32,
ProtoSfixed32,
ProtoSint64,
ProtoUint64,
ProtoInt64,
ProtoFixed64,
ProtoSfixed64,
ProtoDouble,
ProtoFloat,
ProtoBool,
ProtoString,
ProtoBytes
)
def genProto3EnumReference(state: State) =
Gen.oneOf(state.proto3EnumIds).map(EnumReference)
def generateMapKey: Gen[ProtoType] =
Gen.oneOf(
ProtoSint32,
ProtoUint32,
ProtoInt32,
ProtoFixed32,
ProtoSfixed32,
ProtoSint64,
ProtoUint64,
ProtoInt64,
ProtoFixed64,
ProtoSfixed64,
ProtoBool,
ProtoString
)
object FieldModifier extends Enumeration {
val OPTIONAL = Value("optional")
val REQUIRED = Value("required")
val REPEATED = Value("repeated")
}
case class FieldOptions(modifier: FieldModifier.Value, isPacked: Boolean)
def genFieldModifier(allowRequired: Boolean): Gen[FieldModifier.Value] =
if (allowRequired)
Gen.oneOf(FieldModifier.OPTIONAL, FieldModifier.REQUIRED, FieldModifier.REPEATED)
else Gen.oneOf(FieldModifier.OPTIONAL, FieldModifier.REPEATED)
// For enums and messages we choose a type that was either declared before or is nested within
// the current message. This is meant to avoid each file to depend only on previous files.
def genFieldType(
state: State,
syntax: ProtoSyntax,
allowMaps: Boolean = true,
allowCurrentMessage: Boolean = true,
enumMustHaveZeroDefined: Boolean = false
): Gen[ProtoType] = {
val baseFreq = List((5, generatePrimitive))
val withMessages =
if (state._nextMessageId > 0 && allowCurrentMessage)
(1, Gen.chooseNum(0, state._nextMessageId - 1).map(MessageReference)) :: baseFreq
else if (!allowCurrentMessage && state.currentFileInitialMessageId > 0)
(1, Gen.chooseNum(0, state.currentFileInitialMessageId - 1).map(MessageReference)) :: baseFreq
else baseFreq
val withEnums = syntax match {
case Proto2 =>
if (enumMustHaveZeroDefined && state.enumsWithZeroDefined.nonEmpty) {
(1, Gen.oneOf(state.enumsWithZeroDefined).map(EnumReference)) :: withMessages
} else if (!enumMustHaveZeroDefined && state._nextEnumId > 0) {
(1, Gen.chooseNum(0, state._nextEnumId - 1).map(EnumReference)) :: withMessages
} else withMessages
case Proto3 =>
// Proto3 can not include proto2 enums (which always have zero defined)
if (state.proto3EnumIds.nonEmpty)
(1, genProto3EnumReference(state)) :: withMessages
else withMessages
}
val withMaps =
if (!allowMaps)
withEnums
else (1, genMapType(state, syntax)) :: withEnums
Gen.frequency(withMaps: _*)
}
def genMapType(state: State, syntax: ProtoSyntax): Gen[MapType] =
for {
keyType <- generateMapKey
valueType <- genFieldType(
state,
syntax,
allowMaps = false,
// until https://github.com/google/protobuf/issues/355 is fixed.
allowCurrentMessage = false,
enumMustHaveZeroDefined = true
)
} yield MapType(keyType, valueType)
// We allow 'required' only for messages with lower ids. This ensures no cycles of required
// fields.
def genOptionsForField(
messageId: Int,
fieldType: ProtoType,
protoSyntax: ProtoSyntax,
inOneof: Boolean
): Gen[FieldOptions] =
if (inOneof) Gen.const(FieldOptions(FieldModifier.OPTIONAL, isPacked = false))
else
fieldType match {
case MessageReference(id) =>
genFieldModifier(allowRequired = protoSyntax.isProto2 && id < messageId)
.map(mod => FieldOptions(mod, isPacked = false))
case MapType(_, _) => Gen.const(FieldOptions(FieldModifier.REPEATED, isPacked = false))
case _ =>
for {
mod <- genFieldModifier(allowRequired = protoSyntax.isProto2)
packed <- if (fieldType.packable && mod == FieldModifier.REPEATED)
Gen.oneOf(true, false)
else Gen.const(false)
} yield FieldOptions(mod, isPacked = packed)
}
}
| dotty-staging/ScalaPB | proptest/src/test/scala/GenTypes.scala | Scala | apache-2.0 | 7,178 |
/*
* scala-swing (https://www.scala-lang.org)
*
* Copyright EPFL, Lightbend, Inc., contributors
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.swing
import scala.collection.{Iterator, mutable}
/**
* Default partial implementation for buffer adapters.
*/
abstract class BufferWrapper[A] extends mutable.Buffer[A] {
type MoreElem[+B] = IterableOnce[B]
override def clear(): Unit = for (_ <- 0 until length) remove(0)
override def update(n: Int, a: A): Unit = {
remove(n)
insert(n, a)
}
override def iterator: Iterator[A] = Iterator.range(0, length).map(apply)
override def prepend(elem: A): this.type = { insert(0, elem); this }
override def insertAll(idx: Int, elems: MoreElem[A]): Unit = {
var i = idx
for (el <- elems.iterator) {
insert(i, el)
i += 1
}
}
override def remove(idx: Int, count: Int): Unit = {
require(count >= 0)
var n = 0
while (n < count) {
remove(idx + n)
n += 1
}
}
override def patchInPlace(from: Int, patch: MoreElem[A], replaced: Int): this.type = {
if (replaced > 0) {
remove(from, replaced)
}
insertAll(from, patch)
this
}
}
| scala/scala-swing | src/main/scala-2.13+/scala/swing/BufferWrapper.scala | Scala | apache-2.0 | 1,344 |
import com.typesafe.sbt.less.Import.LessKeys
import com.typesafe.sbt.web
import com.typesafe.sbt.web.SbtWeb
import org.sbtidea.SbtIdeaPlugin
import sbt._
import Keys._
import org.scalajs.sbtplugin.ScalaJSPlugin
import org.scalajs.sbtplugin.ScalaJSPlugin.autoImport._
object Build extends sbt.Build {
import web.Import._
val handleLootyHtml = Def.task {
val css = """
<link rel="stylesheet" href="jslib/slickgrid/css/smoothness/jquery-ui-1.8.16.custom.css"/>
<link rel="stylesheet" href="jslib/slickgrid/slick.grid.css"/>
<link rel="stylesheet" href="jslib/font-awesome-4.2.0/css/font-awesome.css"/>
<link rel="stylesheet" href="jslib/select2/select2.css">
"""
val jslib = """
<script type="text/javascript" src="jslib/jailed/0.2.0/jailed.js"></script>
<script type="text/javascript" src="jslib/d3.js"></script>
<script type="text/javascript" src="jslib/jquery.js"></script>
<script type="text/javascript" src="jslib/jquery-migrate.js"></script>
<script type="text/javascript" src="jslib/react/0.12.1/react-with-addons.js"></script>
<script type="text/javascript" src="jslib/signals.js"></script>
<script type="text/javascript" src="jslib/crossroads.js"></script>
<script type="text/javascript" src="jslib/hasher.js"></script>
<script type="text/javascript" src="jslib/esprima.js"></script>
<script type="text/javascript" src="jslib/vm.js"></script>
<script type="text/javascript" src="jslib/select2/select2.js"></script>
<script type="text/javascript" src="jslib/filesaver.js"></script>
<script type="text/javascript" src="jslib/slickgrid/lib/jquery.event.drag-2.0.min.js"></script>
<script type="text/javascript" src="jslib/slickgrid/lib/jquery.event.drop-2.0.min.js"></script>
<script type="text/javascript" src="jslib/slickgrid/slick.core.js"></script>
<script type="text/javascript" src="jslib/slickgrid/slick.grid.js"></script>
<script type="text/javascript" src="jslib/slickgrid/slick.dataview.js"></script>
<script type="text/javascript" src="jslib/theme-switcher.js"></script>
"""
val analytics = """
<script type="text/javascript" src="jslib/startup.js"></script>
"""
//Copy over the html files, while filling in the template sections
val lessFiles = {
val i = (includeFilter in(Assets, LessKeys.less)).value
val x = (excludeFilter in(Assets, LessKeys.less)).value
val srcDir = (sourceDirectory in Assets).value
val lessSrcs = (sourceDirectory in Assets).value ** (i -- x)
val f = lessSrcs.pair(relativeTo(srcDir))
//path delimiter fix for windows builds in css paths
//.replace("\\\\","/")
f.map(_._2).map(_.replace(".less", ".css").replace("\\\\","/"))
}
val less = lessFiles.sorted.map(f => s"""<link rel="stylesheet" href="$f"/>""").mkString("\\n")
val htmlSrcDir: File = (sourceDirectory in Assets).value
val htmlSrcs: PathFinder = htmlSrcDir * "*.template.html"
val outDir: File = WebKeys.webTarget.value / "public" / "main"
val outMappings = htmlSrcs pair Path.rebase(htmlSrcDir, outDir)
outMappings.flatMap {
case (in, out0) =>
val content = IO.read(in)
val outDir = out0.getParentFile
val regex = """.*/([^/]+)\\.template\\.html""".r
val regexWindows = """.*\\\\([^\\\\]+)\\.template\\.html""".r
var basename = regex.findFirstMatchIn(in.getPath) match {
case Some(mat) =>
mat.group(1)
case None =>
// fallback for windows case let it just blow up if get fails here
regexWindows.findFirstMatchIn(in.getPath).getOrElse(sys.error(s"cannot resolve path ${in.getPath}")).group(1)
}
//Make a dev version and a release version
val devFile = outDir / (basename + "-dev.html")
val releaseFile = outDir / (basename + ".html")
val devScripts = """<script type="text/javascript" src="looty-fastopt.js"></script>""".stripMargin
val releaseScripts = """<script type="text/javascript" src="looty-opt.js"></script>"""
def addAll(sjsScripts: String): String = {
var res = content
res = res.replace("<!-- insert scalajs -->", sjsScripts)
res = res.replace("<!-- insert css -->", css)
res = res.replace("<!-- insert less -->", less)
res = res.replace("<!-- insert jslib -->", jslib)
res = res.replace("<!-- insert analytics -->", analytics)
res
}
val devOut = addAll(sjsScripts = devScripts)
val releaseOut = addAll(sjsScripts = releaseScripts)
IO.write(devFile, devOut.getBytes("UTF-8"))
IO.write(releaseFile, releaseOut.getBytes("UTF-8"))
List(devFile, releaseFile)
}
Seq[File](outMappings.map(_._1): _*)
}
lazy val sjsTasks = List(fastOptJS, fullOptJS)
lazy val cgtaOpenVersion = "0.2.2"
lazy val sVersion = "2.11.5"
lazy val sjsOutDir = Def.settingKey[File]("directory for javascript files output by scalajs")
lazy val looty: Project = Project("looty", file("looty"))
.enablePlugins(ScalaJSPlugin, SbtWeb, play.twirl.sbt.SbtTwirl)
.settings(net.virtualvoid.sbt.graph.Plugin.graphSettings : _*)
.settings(
autoCompilerPlugins := true,
scalacOptions += "-deprecation",
scalacOptions += "-unchecked",
scalacOptions += "-feature",
scalacOptions += "-language:implicitConversions",
scalacOptions += "-language:existentials",
scalacOptions += "-language:higherKinds",
scalaVersion := sVersion)
.settings(
libraryDependencies += "biz.cgta" %%%! "otest-sjs" % "0.2.1",
testFrameworks := Seq(new TestFramework("cgta.otest.runner.OtestSbtFramework")),
scalaJSStage in Test := FastOptStage)
.settings(
libraryDependencies ++= Seq(
"org.scala-js" %%% "scalajs-dom" % "0.8.0",
"org.scala-lang.modules" %% "scala-async" % "0.9.2",
"org.scala-js" %%% "scala-parser-combinators" % "1.0.2",
"biz.cgta" %%% "oscala-sjs" % cgtaOpenVersion,
"biz.cgta" %%% "serland-sjs" % cgtaOpenVersion,
"biz.cgta" %%% "cenum-sjs" % cgtaOpenVersion,
"be.doeraene" %%% "scalajs-jquery" % "0.8.0" exclude ("org.webjars", "jquery"),
"com.github.japgolly.scalajs-react" %%% "core" % "0.8.2"
)
)
.settings(
includeFilter in(Assets, LessKeys.less) := "*.less",
excludeFilter in(Assets, LessKeys.less) := "_*.less"
)
.settings(SbtIdeaPlugin.ideaBasePackage := Some("looty"))
.settings(sourceGenerators in Assets <+= handleLootyHtml)
.settings(sourceDirectories in(Compile, play.twirl.sbt.Import.TwirlKeys.compileTemplates) := (unmanagedSourceDirectories in Compile).value)
.settings(sjsOutDir := WebKeys.webTarget.value / "public" / "main")
.settings((fastOptJS in(Compile)) <<= (fastOptJS in(Compile)).dependsOn(WebKeys.assets in Assets))
.settings((fullOptJS in(Compile)) <<= (fullOptJS in(Compile)).dependsOn(WebKeys.assets in Assets))
.settings(sjsTasks.map(t => crossTarget in(Compile, t) := sjsOutDir.value): _*)
lazy val root = Project("root", file("."))
.aggregate(looty)
.settings(scalaVersion := sVersion)
.enablePlugins(SbtWeb)
object Libs {
}
}
| benjaminjackman/looty | project/Build.scala | Scala | gpl-2.0 | 7,179 |
package org.bitcoins.core.p2p
import org.bitcoins.testkit.core.gen.p2p.DataMessageGenerator
import org.bitcoins.testkit.util.BitcoinSUnitTest
import org.bitcoins.core.crypto.DoubleSha256Digest
import org.bitcoins.testkit.core.gen.CryptoGenerators
import org.bitcoins.testkit.Implicits._
class GetHeadersMessageTest extends BitcoinSUnitTest {
it must "have serialization symmetry" in {
forAll(DataMessageGenerator.getHeaderMessages) { headerMsg =>
assert(GetHeadersMessage(headerMsg.hex) == headerMsg)
}
}
it must "be constructable from just hashes" in {
forAll(DataMessageGenerator.getHeaderDefaultProtocolMessage) { getHeader =>
assert(
GetHeadersMessage(getHeader.hashes, getHeader.hashStop) == getHeader)
}
}
it must "be constructable without a stop" in {
def getHash(): DoubleSha256Digest =
CryptoGenerators.doubleSha256Digest.sampleSome
val msg = GetHeadersMessage(List.fill(10)(getHash()))
assert(msg.hashStop == DoubleSha256Digest.empty)
val hash = getHash()
val otherMsg = GetHeadersMessage(hash)
assert(otherMsg == GetHeadersMessage(Vector(hash)))
}
it must "have a meaningful toString" in {
forAll(DataMessageGenerator.getHeaderMessages) { message =>
assert(message.toString().length() < 300)
}
}
}
| bitcoin-s/bitcoin-s-core | core-test/src/test/scala/org/bitcoins/core/p2p/GetHeadersMessageTest.scala | Scala | mit | 1,313 |
package models
import scalikejdbc._
import skinny.orm.{Alias, SkinnyNoIdCRUDMapper}
case class StationGeo(stationId: Long, latitude: Double, longitude: Double) {
def save()(implicit session: DBSession) = StationGeo.create(this)
}
object StationGeo extends SkinnyNoIdCRUDMapper[StationGeo] {
override val defaultAlias: Alias[StationGeo] = createAlias("sg")
override def extract(rs: WrappedResultSet, n: ResultName[StationGeo]): StationGeo = autoConstruct(rs, n)
def create(sg: StationGeo)(implicit session: DBSession) =
createWithAttributes('stationId -> sg.stationId, 'latitude -> sg.latitude, 'longitude -> sg.longitude)
}
| ponkotuy/train-stamp-rally | app/models/StationGeo.scala | Scala | apache-2.0 | 641 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.types
import org.apache.flink.api.common.typeutils._
import org.apache.flink.core.memory.{DataInputView, DataOutputView}
import org.apache.flink.types.Row
class CRowSerializer(val rowSerializer: TypeSerializer[Row]) extends TypeSerializer[CRow] {
override def isImmutableType: Boolean = false
override def duplicate(): TypeSerializer[CRow] = new CRowSerializer(rowSerializer.duplicate())
override def createInstance(): CRow = new CRow(rowSerializer.createInstance(), true)
override def copy(from: CRow): CRow = new CRow(rowSerializer.copy(from.row), from.change)
override def copy(from: CRow, reuse: CRow): CRow = {
rowSerializer.copy(from.row, reuse.row)
reuse.change = from.change
reuse
}
override def getLength: Int = -1
override def serialize(record: CRow, target: DataOutputView): Unit = {
rowSerializer.serialize(record.row, target)
target.writeBoolean(record.change)
}
override def deserialize(source: DataInputView): CRow = {
val row = rowSerializer.deserialize(source)
val change = source.readBoolean()
new CRow(row, change)
}
override def deserialize(reuse: CRow, source: DataInputView): CRow = {
rowSerializer.deserialize(reuse.row, source)
reuse.change = source.readBoolean()
reuse
}
override def copy(source: DataInputView, target: DataOutputView): Unit = {
rowSerializer.copy(source, target)
target.writeBoolean(source.readBoolean())
}
override def canEqual(obj: Any): Boolean = obj.isInstanceOf[CRowSerializer]
override def equals(obj: Any): Boolean = {
if (canEqual(obj)) {
val other = obj.asInstanceOf[CRowSerializer]
rowSerializer.equals(other.rowSerializer)
} else {
false
}
}
override def hashCode: Int = rowSerializer.hashCode() * 13
// --------------------------------------------------------------------------------------------
// Serializer configuration snapshotting & compatibility
// --------------------------------------------------------------------------------------------
override def snapshotConfiguration(): TypeSerializerConfigSnapshot = {
new CRowSerializer.CRowSerializerConfigSnapshot(rowSerializer)
}
override def ensureCompatibility(
configSnapshot: TypeSerializerConfigSnapshot): CompatibilityResult[CRow] = {
configSnapshot match {
case crowSerializerConfigSnapshot: CRowSerializer.CRowSerializerConfigSnapshot =>
val compatResult = CompatibilityUtil.resolveCompatibilityResult(
crowSerializerConfigSnapshot.getSingleNestedSerializerAndConfig.f0,
classOf[UnloadableDummyTypeSerializer[_]],
crowSerializerConfigSnapshot.getSingleNestedSerializerAndConfig.f1,
rowSerializer)
if (compatResult.isRequiresMigration) {
if (compatResult.getConvertDeserializer != null) {
CompatibilityResult.requiresMigration(
new CRowSerializer(
new TypeDeserializerAdapter(compatResult.getConvertDeserializer))
)
} else {
CompatibilityResult.requiresMigration()
}
} else {
CompatibilityResult.compatible()
}
case _ => CompatibilityResult.requiresMigration()
}
}
}
object CRowSerializer {
class CRowSerializerConfigSnapshot(
private val rowSerializer: TypeSerializer[Row])
extends CompositeTypeSerializerConfigSnapshot(rowSerializer) {
/** This empty nullary constructor is required for deserializing the configuration. */
def this() = this(null)
override def getVersion: Int = CRowSerializerConfigSnapshot.VERSION
}
object CRowSerializerConfigSnapshot {
val VERSION = 1
}
}
| WangTaoTheTonic/flink | flink-libraries/flink-table/src/main/scala/org/apache/flink/table/runtime/types/CRowSerializer.scala | Scala | apache-2.0 | 4,544 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rdd
import org.apache.spark.annotation.Experimental
import org.apache.spark.{TaskContext, Logging}
import org.apache.spark.partial.BoundedDouble
import org.apache.spark.partial.MeanEvaluator
import org.apache.spark.partial.PartialResult
import org.apache.spark.partial.SumEvaluator
import org.apache.spark.util.StatCounter
/**
* Extra functions available on RDDs of Doubles through an implicit conversion.
* Import `org.apache.spark.SparkContext._` at the top of your program to use these functions.
*/
class DoubleRDDFunctions(self: RDD[Double]) extends Logging with Serializable {
/** Add up the elements in this RDD. */
def sum(): Double = {
self.reduce(_ + _)
}
/**
* Return a [[org.apache.spark.util.StatCounter]] object that captures the mean, variance and
* count of the RDD's elements in one operation.
*/
def stats(): StatCounter = {
self.mapPartitions(nums => Iterator(StatCounter(nums))).reduce((a, b) => a.merge(b))
}
/** Compute the mean of this RDD's elements. */
def mean(): Double = stats().mean
/** Compute the variance of this RDD's elements. */
def variance(): Double = stats().variance
/** Compute the standard deviation of this RDD's elements. */
def stdev(): Double = stats().stdev
/**
* Compute the sample standard deviation of this RDD's elements (which corrects for bias in
* estimating the standard deviation by dividing by N-1 instead of N).
*/
def sampleStdev(): Double = stats().sampleStdev
/**
* Compute the sample variance of this RDD's elements (which corrects for bias in
* estimating the variance by dividing by N-1 instead of N).
*/
def sampleVariance(): Double = stats().sampleVariance
/**
* :: Experimental ::
* Approximate operation to return the mean within a timeout.
*/
@Experimental
def meanApprox(timeout: Long, confidence: Double = 0.95): PartialResult[BoundedDouble] = {
val processPartition = (ctx: TaskContext, ns: Iterator[Double]) => StatCounter(ns)
val evaluator = new MeanEvaluator(self.partitions.size, confidence)
self.context.runApproximateJob(self, processPartition, evaluator, timeout)
}
/**
* :: Experimental ::
* Approximate operation to return the sum within a timeout.
*/
@Experimental
def sumApprox(timeout: Long, confidence: Double = 0.95): PartialResult[BoundedDouble] = {
val processPartition = (ctx: TaskContext, ns: Iterator[Double]) => StatCounter(ns)
val evaluator = new SumEvaluator(self.partitions.size, confidence)
self.context.runApproximateJob(self, processPartition, evaluator, timeout)
}
/**
* Compute a histogram of the data using bucketCount number of buckets evenly
* spaced between the minimum and maximum of the RDD. For example if the min
* value is 0 and the max is 100 and there are two buckets the resulting
* buckets will be [0, 50) [50, 100]. bucketCount must be at least 1
* If the RDD contains infinity, NaN throws an exception
* If the elements in RDD do not vary (max == min) always returns a single bucket.
*/
def histogram(bucketCount: Int): Pair[Array[Double], Array[Long]] = {
// Compute the minimum and the maxium
val (max: Double, min: Double) = self.mapPartitions { items =>
Iterator(items.foldRight(Double.NegativeInfinity,
Double.PositiveInfinity)((e: Double, x: Pair[Double, Double]) =>
(x._1.max(e), x._2.min(e))))
}.reduce { (maxmin1, maxmin2) =>
(maxmin1._1.max(maxmin2._1), maxmin1._2.min(maxmin2._2))
}
if (min.isNaN || max.isNaN || max.isInfinity || min.isInfinity ) {
throw new UnsupportedOperationException(
"Histogram on either an empty RDD or RDD containing +/-infinity or NaN")
}
val increment = (max-min)/bucketCount.toDouble
val range = if (increment != 0) {
Range.Double.inclusive(min, max, increment)
} else {
List(min, min)
}
val buckets = range.toArray
(buckets, histogram(buckets, true))
}
/**
* Compute a histogram using the provided buckets. The buckets are all open
* to the left except for the last which is closed
* e.g. for the array
* [1, 10, 20, 50] the buckets are [1, 10) [10, 20) [20, 50]
* e.g 1<=x<10 , 10<=x<20, 20<=x<50
* And on the input of 1 and 50 we would have a histogram of 1, 0, 0
*
* Note: if your histogram is evenly spaced (e.g. [0, 10, 20, 30]) this can be switched
* from an O(log n) inseration to O(1) per element. (where n = # buckets) if you set evenBuckets
* to true.
* buckets must be sorted and not contain any duplicates.
* buckets array must be at least two elements
* All NaN entries are treated the same. If you have a NaN bucket it must be
* the maximum value of the last position and all NaN entries will be counted
* in that bucket.
*/
def histogram(buckets: Array[Double], evenBuckets: Boolean = false): Array[Long] = {
if (buckets.length < 2) {
throw new IllegalArgumentException("buckets array must have at least two elements")
}
// The histogramPartition function computes the partail histogram for a given
// partition. The provided bucketFunction determines which bucket in the array
// to increment or returns None if there is no bucket. This is done so we can
// specialize for uniformly distributed buckets and save the O(log n) binary
// search cost.
def histogramPartition(bucketFunction: (Double) => Option[Int])(iter: Iterator[Double]):
Iterator[Array[Long]] = {
val counters = new Array[Long](buckets.length - 1)
while (iter.hasNext) {
bucketFunction(iter.next()) match {
case Some(x: Int) => {counters(x) += 1}
case _ => {}
}
}
Iterator(counters)
}
// Merge the counters.
def mergeCounters(a1: Array[Long], a2: Array[Long]): Array[Long] = {
a1.indices.foreach(i => a1(i) += a2(i))
a1
}
// Basic bucket function. This works using Java's built in Array
// binary search. Takes log(size(buckets))
def basicBucketFunction(e: Double): Option[Int] = {
val location = java.util.Arrays.binarySearch(buckets, e)
if (location < 0) {
// If the location is less than 0 then the insertion point in the array
// to keep it sorted is -location-1
val insertionPoint = -location-1
// If we have to insert before the first element or after the last one
// its out of bounds.
// We do this rather than buckets.lengthCompare(insertionPoint)
// because Array[Double] fails to override it (for now).
if (insertionPoint > 0 && insertionPoint < buckets.length) {
Some(insertionPoint-1)
} else {
None
}
} else if (location < buckets.length - 1) {
// Exact match, just insert here
Some(location)
} else {
// Exact match to the last element
Some(location - 1)
}
}
// Determine the bucket function in constant time. Requires that buckets are evenly spaced
def fastBucketFunction(min: Double, increment: Double, count: Int)(e: Double): Option[Int] = {
// If our input is not a number unless the increment is also NaN then we fail fast
if (e.isNaN()) {
return None
}
val bucketNumber = (e - min)/(increment)
// We do this rather than buckets.lengthCompare(bucketNumber)
// because Array[Double] fails to override it (for now).
if (bucketNumber > count || bucketNumber < 0) {
None
} else {
Some(bucketNumber.toInt.min(count - 1))
}
}
// Decide which bucket function to pass to histogramPartition. We decide here
// rather than having a general function so that the decission need only be made
// once rather than once per shard
val bucketFunction = if (evenBuckets) {
fastBucketFunction(buckets(0), buckets(1)-buckets(0), buckets.length-1) _
} else {
basicBucketFunction _
}
self.mapPartitions(histogramPartition(bucketFunction)).reduce(mergeCounters)
}
}
| zhangjunfang/eclipse-dir | spark/core/src/main/scala/org/apache/spark/rdd/DoubleRDDFunctions.scala | Scala | bsd-2-clause | 8,898 |
/*
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.spark.kernel.protocol.v5.content
// External libraries
import play.api.libs.json._
// Internal libraries
import com.ibm.spark.kernel.protocol.v5._
case class InspectReply(
status: String,
data: Data,
metadata: Metadata,
ename: Option[String],
evalue: Option[String],
traceback: Option[List[String]]
) extends KernelMessageContent {
override def content : String =
Json.toJson(this)(InspectReply.inspectReplyOkWrites).toString
}
object InspectReply extends TypeString {
implicit val inspectReplyOkReads = Json.reads[InspectReply]
implicit val inspectReplyOkWrites = Json.writes[InspectReply]
/**
* Returns the type string associated with this object.
*
* @return The type as a string
*/
override def toTypeString: String = "inspect_reply"
}
| codeaudit/spark-kernel | protocol/src/main/scala/com/ibm/spark/kernel/protocol/v5/content/InspectReply.scala | Scala | apache-2.0 | 1,394 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.eagle.stream.pipeline.parser
import com.typesafe.config.Config
import scala.collection.JavaConversions.mapAsScalaMap
import scala.collection.mutable
class Field(name:String) extends Serializable{
def getName:String = name
}
case class StringField(name:String) extends Field(name)
case class LongField(name:String) extends Field(name)
case class IntegerField(name:String) extends Field(name)
case class BooleanField(name:String) extends Field(name)
case class FloatField(name:String) extends Field(name)
case class DoubleField(name:String) extends Field(name)
case class DatetimeField(name:String,format:String) extends Field(name)
object Field{
def string(name:String) = StringField(name)
def long(name:String) = LongField(name)
def integer(name:String) = IntegerField(name)
def boolean(name:String) = BooleanField(name)
def float(name:String) = FloatField(name)
def double(name:String) = DoubleField(name)
def datetime(name:String)(format:String) = DatetimeField(name,format)
def apply(name:String,typeName:String):Field = typeName match {
case "string" => string(name)
case "long" => long(name)
case "integer" => integer(name)
case "boolean" => boolean(name)
case "float" => float(name)
case "double" => double(name)
case _ => throw new UnsupportedOperationException(s"""Unknown attribute type $typeName for attribute "$name"""")
}
}
case class Schema(attributes:Seq[Field]) extends Serializable{
def getAttribute(attributeName:String):Option[Field]={
if(attributes != null){
attributes.find(_.getName.eq(attributeName))
}else None
}
def indexOfAttribute(attributeName:String):Int = {
if(attributes != null){
attributes.indexWhere(_.getName.eq(attributeName))
} else -1
}
@throws[IllegalArgumentException]
def indexOfAttributeOrException(attributeName:String):Int = {
if(attributes != null){
attributes.indexWhere(_.getName.eq(attributeName))
} else throw new IllegalArgumentException(s"Attribute [$attributeName] is not found in stream $this")
}
}
object Schema{
def parse(map:Map[String,AnyRef]):Schema = {
new Schema(map.keys.map {attributeName =>
map(attributeName) match{
case simpleType:String => Field(attributeName,simpleType)
case complexType:java.util.Map[String,AnyRef] => throw new IllegalStateException(s"ComplexType attribute definition is not supported yet [$attributeName : $complexType] ")
case otherType@_ => throw new IllegalStateException(s"Illegal attribute definition $attributeName : $otherType")
}
}.toSeq)
}
/**
* @param attributes support string, symbol, Attribute and so on.
* @return
*/
def build(attributes:Seq[AnyRef]):Schema = {
new Schema(attributes.map{ a:AnyRef =>
a match {
case t:(String, AnyRef) => {
t._2 match {
case v:String => Field(t._1,v)
case v:Symbol => Field(t._1,v.name)
case _ => throw new UnsupportedOperationException(s"Illegal attribute definition $a")
}
}
case t:Field => t
case _ => throw new UnsupportedOperationException(s"Illegal attribute definition $a")
}
})
}
}
private[pipeline] class StreamUndefinedException(message:String = "stream is not defined",throwable: Throwable = null) extends Exception(message,throwable)
private[pipeline] class SchemaSet {
private val processorSchemaCache = mutable.Map[String,Schema]()
def set(schemaId:String,schema:Schema):Unit = {
if(processorSchemaCache.contains(schemaId)) throw new IllegalArgumentException(
s"""
|Failed to define schema for $schemaId as $schema,
|because it has been defined as ${processorSchemaCache(schemaId)},
|please call updateSchema(processorId,schema) instead
""")
processorSchemaCache.put(schemaId,schema)
}
def get(schemaId:String):Option[Schema] = processorSchemaCache.get(schemaId)
}
private[pipeline] object SchemaSet{
def empty() = new SchemaSet()
/**
* For example:
*
* <code>
* {
* metricStream {
* metric: string
* value: double
* timestamp: long
* }
* }
* </code>
* @param schemaConfig
* @return
*/
def parse(schemaConfig:Map[String,AnyRef]):SchemaSet = {
val schemas = new SchemaSet()
schemaConfig.foreach(entry =>{
schemas.set(entry._1,Schema.parse(entry._2.asInstanceOf[java.util.HashMap[String,AnyRef]].toMap))
})
schemas
}
def parse(config:Config):SchemaSet = parse(config.root().unwrapped().asInstanceOf[java.util.HashMap[String,AnyRef]].toMap)
}
| pkuwm/incubator-eagle | eagle-core/eagle-data-process/eagle-stream-pipeline/src/main/scala/org/apache/eagle/stream/pipeline/parser/Schema.scala | Scala | apache-2.0 | 5,463 |
object Macros {
def foo: Unit = macro Impls.foo
}
object Test extends App {
Macros.foo
} | lampepfl/dotty | tests/disabled/macro/run/macro-term-declared-in-object/Macros_Test_2.scala | Scala | apache-2.0 | 93 |
package net.kemuridama.kafcon.protocol
import net.kemuridama.kafcon.model.ClusterResponseData
trait ClusterResponseDataJsonProtocol
extends JsonProtocol
with BrokerJsonProtocol
with TopicJsonProtocol
with ConnectionStateJsonProtocol {
implicit val clusterResponseDataFormat = jsonFormat8(ClusterResponseData)
}
| kemuridama/kafcon | src/main/scala/net/kemuridama/kafcon/protocol/ClusterResponseDataJsonProtocol.scala | Scala | mit | 325 |
package challenge
import scala.io.BufferedSource
import scalaz.Scalaz._
object Easy191 {
def main(args: Array[String]): Unit = {
println("Word frequencies:")
word_freq(args(0)).foreach(println)
}
def readBook(src: String): BufferedSource = io.Source.fromFile(src)
def extractContent(src: String): String = {
val extractRegex =
("""(?s)\\*\\*\\* ?START OF THE PROJECT GUTENBERG EBOOK .* ?\\*\\*\\*""" +
"""(.*)""" +
"""\\*\\*\\* ?END OF THE PROJECT GUTENBERG EBOOK .* ?\\*\\*\\*""")
.toLowerCase.r
extractRegex.findAllIn(readBook(src).mkString.toLowerCase).matchData
.toList(0).subgroups(0)
}
def word_freq(src: String): List[(String, Int)] = {
val wordRegex = """[^\\W_]\\.([^\\W_]\\.)+|[^\\W_]+(-[^\\W_]+)*'?s?""".r
val contents = extractContent(src)
val words = wordRegex.findAllIn(contents)
words.toList.foldMap(word => Map(word -> 1)).toList.sortBy(elem =>
(-elem._2, elem._1))
}
}
| nichwn/dailyprogrammer-scala | src/main/scala/challenge/Easy191.scala | Scala | mit | 961 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler.cluster.mesos
import java.io.File
import java.util.{Collections, Date, List => JList}
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import org.apache.mesos.{Scheduler, SchedulerDriver}
import org.apache.mesos.Protos.{TaskState => MesosTaskState, _}
import org.apache.mesos.Protos.Environment.Variable
import org.apache.mesos.Protos.TaskStatus.Reason
import org.apache.spark.{SecurityManager, SparkConf, SparkException, TaskState}
import org.apache.spark.deploy.mesos.{MesosDriverDescription, config}
import org.apache.spark.deploy.rest.{CreateSubmissionResponse, KillSubmissionResponse, SubmissionStatusResponse}
import org.apache.spark.metrics.MetricsSystem
import org.apache.spark.security.{ConfigSecurity, VaultHelper}
import org.apache.spark.util.Utils
/**
* Tracks the current state of a Mesos Task that runs a Spark driver.
* @param driverDescription Submitted driver description from
* [[org.apache.spark.deploy.rest.mesos.MesosRestServer]]
* @param taskId Mesos TaskID generated for the task
* @param slaveId Slave ID that the task is assigned to
* @param mesosTaskStatus The last known task status update.
* @param startDate The date the task was launched
* @param finishDate The date the task finished
* @param frameworkId Mesos framework ID the task registers with
*/
private[spark] class MesosClusterSubmissionState(
val driverDescription: MesosDriverDescription,
val taskId: TaskID,
val slaveId: SlaveID,
var mesosTaskStatus: Option[TaskStatus],
var startDate: Date,
var finishDate: Option[Date],
val frameworkId: String)
extends Serializable {
def copy(): MesosClusterSubmissionState = {
new MesosClusterSubmissionState(
driverDescription, taskId, slaveId, mesosTaskStatus, startDate, finishDate, frameworkId)
}
}
/**
* Tracks the retry state of a driver, which includes the next time it should be scheduled
* and necessary information to do exponential backoff.
* This class is not thread-safe, and we expect the caller to handle synchronizing state.
*
* @param lastFailureStatus Last Task status when it failed.
* @param retries Number of times it has been retried.
* @param nextRetry Time at which it should be retried next
* @param waitTime The amount of time driver is scheduled to wait until next retry.
*/
private[spark] class MesosClusterRetryState(
val lastFailureStatus: TaskStatus,
val retries: Int,
val nextRetry: Date,
val waitTime: Int) extends Serializable {
def copy(): MesosClusterRetryState =
new MesosClusterRetryState(lastFailureStatus, retries, nextRetry, waitTime)
}
/**
* The full state of the cluster scheduler, currently being used for displaying
* information on the UI.
*
* @param frameworkId Mesos Framework id for the cluster scheduler.
* @param masterUrl The Mesos master url
* @param queuedDrivers All drivers queued to be launched
* @param launchedDrivers All launched or running drivers
* @param finishedDrivers All terminated drivers
* @param pendingRetryDrivers All drivers pending to be retried
*/
private[spark] class MesosClusterSchedulerState(
val frameworkId: String,
val masterUrl: Option[String],
val queuedDrivers: Iterable[MesosDriverDescription],
val launchedDrivers: Iterable[MesosClusterSubmissionState],
val finishedDrivers: Iterable[MesosClusterSubmissionState],
val pendingRetryDrivers: Iterable[MesosDriverDescription])
/**
* The full state of a Mesos driver, that is being used to display driver information on the UI.
*/
private[spark] class MesosDriverState(
val state: String,
val description: MesosDriverDescription,
val submissionState: Option[MesosClusterSubmissionState] = None)
/**
* A Mesos scheduler that is responsible for launching submitted Spark drivers in cluster mode
* as Mesos tasks in a Mesos cluster.
* All drivers are launched asynchronously by the framework, which will eventually be launched
* by one of the slaves in the cluster. The results of the driver will be stored in slave's task
* sandbox which is accessible by visiting the Mesos UI.
* This scheduler supports recovery by persisting all its state and performs task reconciliation
* on recover, which gets all the latest state for all the drivers from Mesos master.
*/
private[spark] class MesosClusterScheduler(
engineFactory: MesosClusterPersistenceEngineFactory,
conf: SparkConf)
extends Scheduler with MesosSchedulerUtils {
var frameworkUrl: String = _
private val metricsSystem =
MetricsSystem.createMetricsSystem("mesos_cluster", conf, new SecurityManager(conf))
private val master = conf.get("spark.master")
private val appName = conf.get("spark.app.name")
private val queuedCapacity = conf.getInt("spark.mesos.maxDrivers", 200)
private val retainedDrivers = conf.getInt("spark.mesos.retainedDrivers", 200)
private val maxRetryWaitTime = conf.getInt("spark.mesos.cluster.retry.wait.max", 60) // 1 minute
private val useFetchCache = conf.getBoolean("spark.mesos.fetchCache.enable", false)
private val schedulerState = engineFactory.createEngine("scheduler")
private val stateLock = new Object()
private val finishedDrivers =
new mutable.ArrayBuffer[MesosClusterSubmissionState](retainedDrivers)
private var frameworkId: String = null
// Holds all the launched drivers and current launch state, keyed by driver id.
private val launchedDrivers = new mutable.HashMap[String, MesosClusterSubmissionState]()
// Holds a map of driver id to expected slave id that is passed to Mesos for reconciliation.
// All drivers that are loaded after failover are added here, as we need get the latest
// state of the tasks from Mesos.
private val pendingRecover = new mutable.HashMap[String, SlaveID]()
// Stores all the submitted drivers that hasn't been launched.
private val queuedDrivers = new ArrayBuffer[MesosDriverDescription]()
// All supervised drivers that are waiting to retry after termination.
private val pendingRetryDrivers = new ArrayBuffer[MesosDriverDescription]()
private val queuedDriversState = engineFactory.createEngine("driverQueue")
private val launchedDriversState = engineFactory.createEngine("launchedDrivers")
private val pendingRetryDriversState = engineFactory.createEngine("retryList")
// Flag to mark if the scheduler is ready to be called, which is until the scheduler
// is registered with Mesos master.
@volatile protected var ready = false
private var masterInfo: Option[MasterInfo] = None
private var schedulerDriver: SchedulerDriver = _
def submitDriver(desc: MesosDriverDescription): CreateSubmissionResponse = {
val c = new CreateSubmissionResponse
if (!ready) {
c.success = false
c.message = "Scheduler is not ready to take requests"
return c
}
stateLock.synchronized {
if (isQueueFull()) {
c.success = false
c.message = "Already reached maximum submission size"
return c
}
c.submissionId = desc.submissionId
c.success = true
addDriverToQueue(desc)
}
c
}
def killDriver(submissionId: String): KillSubmissionResponse = {
val k = new KillSubmissionResponse
if (!ready) {
k.success = false
k.message = "Scheduler is not ready to take requests"
return k
}
k.submissionId = submissionId
stateLock.synchronized {
// We look for the requested driver in the following places:
// 1. Check if submission is running or launched.
// 2. Check if it's still queued.
// 3. Check if it's in the retry list.
// 4. Check if it has already completed.
if (launchedDrivers.contains(submissionId)) {
val task = launchedDrivers(submissionId)
schedulerDriver.killTask(task.taskId)
k.success = true
k.message = "Killing running driver"
} else if (removeFromQueuedDrivers(submissionId)) {
k.success = true
k.message = "Removed driver while it's still pending"
} else if (removeFromPendingRetryDrivers(submissionId)) {
k.success = true
k.message = "Removed driver while it's being retried"
} else if (finishedDrivers.exists(_.driverDescription.submissionId.equals(submissionId))) {
k.success = false
k.message = "Driver already terminated"
} else {
k.success = false
k.message = "Cannot find driver"
}
}
k
}
def getDriverStatus(submissionId: String): SubmissionStatusResponse = {
val s = new SubmissionStatusResponse
if (!ready) {
s.success = false
s.message = "Scheduler is not ready to take requests"
return s
}
s.submissionId = submissionId
stateLock.synchronized {
if (queuedDrivers.exists(_.submissionId.equals(submissionId))) {
s.success = true
s.driverState = "QUEUED"
} else if (launchedDrivers.contains(submissionId)) {
s.success = true
s.driverState = "RUNNING"
launchedDrivers(submissionId).mesosTaskStatus.foreach(state => s.message = state.toString)
} else if (finishedDrivers.exists(_.driverDescription.submissionId.equals(submissionId))) {
s.success = true
s.driverState = "FINISHED"
finishedDrivers
.find(d => d.driverDescription.submissionId.equals(submissionId)).get.mesosTaskStatus
.foreach(state => s.message = state.toString)
} else if (pendingRetryDrivers.exists(_.submissionId.equals(submissionId))) {
val status = pendingRetryDrivers.find(_.submissionId.equals(submissionId))
.get.retryState.get.lastFailureStatus
s.success = true
s.driverState = "RETRYING"
s.message = status.toString
} else {
s.success = false
s.driverState = "NOT_FOUND"
}
}
s
}
/**
* Gets the driver state to be displayed on the Web UI.
*/
def getDriverState(submissionId: String): Option[MesosDriverState] = {
stateLock.synchronized {
queuedDrivers.find(_.submissionId.equals(submissionId))
.map(d => new MesosDriverState("QUEUED", d))
.orElse(launchedDrivers.get(submissionId)
.map(d => new MesosDriverState("RUNNING", d.driverDescription, Some(d))))
.orElse(finishedDrivers.find(_.driverDescription.submissionId.equals(submissionId))
.map(d => new MesosDriverState("FINISHED", d.driverDescription, Some(d))))
.orElse(pendingRetryDrivers.find(_.submissionId.equals(submissionId))
.map(d => new MesosDriverState("RETRYING", d)))
}
}
private def isQueueFull(): Boolean = launchedDrivers.size >= queuedCapacity
/**
* Recover scheduler state that is persisted.
* We still need to do task reconciliation to be up to date of the latest task states
* as it might have changed while the scheduler is failing over.
*/
private def recoverState(): Unit = {
stateLock.synchronized {
launchedDriversState.fetchAll[MesosClusterSubmissionState]().foreach { state =>
launchedDrivers(state.taskId.getValue) = state
pendingRecover(state.taskId.getValue) = state.slaveId
}
queuedDriversState.fetchAll[MesosDriverDescription]().foreach(d => queuedDrivers += d)
// There is potential timing issue where a queued driver might have been launched
// but the scheduler shuts down before the queued driver was able to be removed
// from the queue. We try to mitigate this issue by walking through all queued drivers
// and remove if they're already launched.
queuedDrivers
.filter(d => launchedDrivers.contains(d.submissionId))
.foreach(d => removeFromQueuedDrivers(d.submissionId))
pendingRetryDriversState.fetchAll[MesosDriverDescription]()
.foreach(s => pendingRetryDrivers += s)
// TODO: Consider storing finished drivers so we can show them on the UI after
// failover. For now we clear the history on each recovery.
finishedDrivers.clear()
}
}
/**
* Starts the cluster scheduler and wait until the scheduler is registered.
* This also marks the scheduler to be ready for requests.
*/
def start(): Unit = {
// TODO: Implement leader election to make sure only one framework running in the cluster.
val fwId = schedulerState.fetch[String]("frameworkId")
fwId.foreach { id =>
frameworkId = id
}
recoverState()
metricsSystem.registerSource(new MesosClusterSchedulerSource(this))
metricsSystem.start()
val driver = createSchedulerDriver(
master,
MesosClusterScheduler.this,
Utils.getCurrentUserName(),
appName,
conf,
Some(frameworkUrl),
Some(true),
Some(Integer.MAX_VALUE),
fwId)
startScheduler(driver)
ready = true
}
def stop(): Unit = {
ready = false
metricsSystem.report()
metricsSystem.stop()
schedulerDriver.stop(true)
}
override def registered(
driver: SchedulerDriver,
newFrameworkId: FrameworkID,
masterInfo: MasterInfo): Unit = {
logInfo("Registered as framework ID " + newFrameworkId.getValue)
if (newFrameworkId.getValue != frameworkId) {
frameworkId = newFrameworkId.getValue
schedulerState.persist("frameworkId", frameworkId)
}
markRegistered()
stateLock.synchronized {
this.masterInfo = Some(masterInfo)
this.schedulerDriver = driver
if (!pendingRecover.isEmpty) {
// Start task reconciliation if we need to recover.
val statuses = pendingRecover.collect {
case (taskId, slaveId) =>
val newStatus = TaskStatus.newBuilder()
.setTaskId(TaskID.newBuilder().setValue(taskId).build())
.setSlaveId(slaveId)
.setState(MesosTaskState.TASK_STAGING)
.build()
launchedDrivers.get(taskId).map(_.mesosTaskStatus.getOrElse(newStatus))
.getOrElse(newStatus)
}
// TODO: Page the status updates to avoid trying to reconcile
// a large amount of tasks at once.
driver.reconcileTasks(statuses.toSeq.asJava)
}
}
}
private def getDriverExecutorURI(desc: MesosDriverDescription): Option[String] = {
desc.conf.getOption("spark.executor.uri")
.orElse(desc.command.environment.get("SPARK_EXECUTOR_URI"))
}
private def getDriverFrameworkID(desc: MesosDriverDescription): String = {
val retries = desc.retryState.map { d => s"-retry-${d.retries.toString}" }.getOrElse("")
s"${frameworkId}-${desc.submissionId}${retries}"
}
private def adjust[A, B](m: collection.Map[A, B], k: A, default: B)(f: B => B) = {
m.updated(k, f(m.getOrElse(k, default)))
}
private def getDriverEnvironment(desc: MesosDriverDescription): Environment = {
// TODO(mgummelt): Don't do this here. This should be passed as a --conf
val commandEnv = adjust(desc.command.environment, "SPARK_SUBMIT_OPTS", "")(
v => s"$v -Dspark.mesos.driver.frameworkId=${getDriverFrameworkID(desc)}"
)
val driverExecCommand = getDriverCommandValue(desc)
val env = desc.conf.getAllWithPrefix("spark.mesos.driverEnv.") ++ commandEnv ++
Map("SPARK_DRIVER_COMMAND" -> driverExecCommand)
val envBuilder = Environment.newBuilder()
env.foreach { case (k, v) =>
envBuilder.addVariables(Variable.newBuilder().setName(k).setValue(v))
}
if (desc.conf.getOption("spark.mesos.driver.docker.network.name").isDefined) {
envBuilder.addVariables(Variable.newBuilder()
.setName("SPARK_VIRTUAL_USER_NETWORK").setValue("true"))
}
envBuilder.build()
}
private def getDriverUris(desc: MesosDriverDescription): List[CommandInfo.URI] = {
val confUris = List(conf.getOption("spark.mesos.uris"),
desc.conf.getOption("spark.mesos.uris"),
desc.conf.getOption("spark.submit.pyFiles")).flatMap(
_.map(_.split(",").map(_.trim))
).flatten
val jarUrl = desc.jarUrl.stripPrefix("file:").stripPrefix("local:")
((jarUrl :: confUris) ++ getDriverExecutorURI(desc).toList).map(uri =>
CommandInfo.URI.newBuilder().setValue(uri.trim()).setCache(useFetchCache).build())
}
private def getDriverCommandValue(desc: MesosDriverDescription): String = {
val dockerDefined = desc.conf.contains("spark.mesos.executor.docker.image")
val executorUri = getDriverExecutorURI(desc)
// Gets the path to run spark-submit, and the path to the Mesos sandbox.
val sandboxPath = if (dockerDefined) "/mnt/mesos/sandbox"
else if (executorUri.isDefined) ".."
else "."
val executable = if (executorUri.isDefined) {
// Application jar is automatically downloaded in the mounted sandbox by Mesos,
// and the path to the mounted volume is stored in $MESOS_SANDBOX env variable.
("./bin/spark-submit", "/mnt/mesos/sandbox")
} else if (executorUri.isDefined) {
val folderBasename = executorUri.get.split('/').last.split('.').head
val entries = conf.getOption("spark.executor.extraLibraryPath")
.map(path => Seq(path) ++ desc.command.libraryPathEntries)
.getOrElse(desc.command.libraryPathEntries)
val prefixEnv = if (!entries.isEmpty) Utils.libraryPathEnvPrefix(entries) else ""
val cmdExecutable = s"cd $folderBasename*; $prefixEnv bin/spark-submit"
// Sandbox path points to the parent folder as we chdir into the folderBasename.
cmdExecutable
} else {
val executorSparkHome = desc.conf.getOption("spark.mesos.executor.home")
.orElse(conf.getOption("spark.home"))
.orElse(Option(System.getenv("SPARK_HOME")))
.getOrElse {
throw new SparkException("Executor Spark home `spark.mesos.executor.home` is not set!")
}
val cmdExecutable = new File(executorSparkHome, "./bin/spark-submit").getPath
cmdExecutable
}
val cmdOptions = generateCmdOption(desc, sandboxPath).mkString(" ")
val primaryResource = new File(sandboxPath, desc.jarUrl.split("/").last).toString()
val appArguments = desc.command.arguments.mkString(" ")
//TODO: replace this behaviour, some properties could have doble quotes
s"$executable $cmdOptions $primaryResource $appArguments".replaceAll("\\"", "")
}
private def buildDriverCommand(desc: MesosDriverDescription): CommandInfo = {
val builder = CommandInfo.newBuilder()
builder.setEnvironment(getDriverEnvironment(desc))
builder.addAllUris(getDriverUris(desc).asJava)
builder.setShell(false)
builder.build()
}
private def generateCmdOption(desc: MesosDriverDescription, sandboxPath: String): Seq[String] = {
var options = Seq(
"--name", desc.conf.get("spark.app.name"),
"--master", s"mesos://${conf.get("spark.master")}",
"--driver-cores", desc.cores.toString,
"--driver-memory", s"${desc.mem}M")
// Assume empty main class means we're running python
if (!desc.command.mainClass.equals("")) {
options ++= Seq("--class", desc.command.mainClass)
}
desc.conf.getOption("spark.executor.memory").foreach { v =>
options ++= Seq("--executor-memory", v)
}
desc.conf.getOption("spark.cores.max").foreach { v =>
options ++= Seq("--total-executor-cores", v)
}
desc.conf.getOption("spark.submit.pyFiles").foreach { pyFiles =>
val formattedFiles = pyFiles.split(",")
.map { path => new File(sandboxPath, path.split("/").last).toString() }
.mkString(",")
options ++= Seq("--py-files", formattedFiles)
}
// --conf
val replicatedOptionsBlacklist = Set(
"spark.jars", // Avoids duplicate classes in classpath
"spark.submit.deployMode", // this would be set to `cluster`, but we need client
"spark.master", // this contains the address of the dispatcher, not master
"spark.shuffle.service.host" // Prevent to propagate any shuffle service host
)
val defaultConf = conf.getAllWithPrefix("spark.mesos.dispatcher.driverDefault.").toMap
val driverConf = desc.conf.getAll
.filter { case (key, _) => !replicatedOptionsBlacklist.contains(key) }
.toMap
(defaultConf ++ driverConf).foreach { case (key, value) =>
options ++= Seq("--conf", s""""$key=${shellEscape(value)}"""".stripMargin) }
options
}
/**
* Escape args for Unix-like shells, unless already quoted by the user.
* Based on: http://www.gnu.org/software/bash/manual/html_node/Double-Quotes.html
* and http://www.grymoire.com/Unix/Quote.html
*
* @param value argument
* @return escaped argument
*/
private[scheduler] def shellEscape(value: String): String = {
val WrappedInQuotes = """^(".+"|'.+')$""".r
val ShellSpecialChars = (""".*([ '<>&|\\?\\*;!#\\\\(\\)"$`]).*""").r
value match {
case WrappedInQuotes(c) => value // The user quoted his args, don't touch it!
case ShellSpecialChars(c) => "\\"" + value.replaceAll("""(["`\\$\\\\])""", """\\\\$1""") + "\\""
case _: String => value // Don't touch harmless strings
}
}
private class ResourceOffer(
val offer: Offer,
var remainingResources: JList[Resource],
var attributes: JList[Attribute]) {
override def toString(): String = {
s"Offer id: ${offer.getId}, resources: ${remainingResources}"
}
}
private def createTaskInfo(desc: MesosDriverDescription, offer: ResourceOffer): TaskInfo = {
val taskId = TaskID.newBuilder().setValue(desc.submissionId).build()
val (remainingResources, cpuResourcesToUse) =
partitionResources(offer.remainingResources, "cpus", desc.cores)
val (finalResources, memResourcesToUse) =
partitionResources(remainingResources.asJava, "mem", desc.mem)
offer.remainingResources = finalResources.asJava
val appName = desc.conf.get("spark.app.name")
val taskInfo = TaskInfo.newBuilder()
.setTaskId(taskId)
.setName(s"Driver for ${appName}")
.setSlaveId(offer.offer.getSlaveId)
.setCommand(buildDriverCommand(desc))
.addAllResources(cpuResourcesToUse.asJava)
.addAllResources(memResourcesToUse.asJava)
taskInfo.setContainer(MesosSchedulerBackendUtil.containerInfo(desc.conf))
taskInfo.build
}
/**
* This method takes all the possible candidates and attempt to schedule them with Mesos offers.
* Every time a new task is scheduled, the afterLaunchCallback is called to perform post scheduled
* logic on each task.
*/
private def scheduleTasks(
candidates: Seq[MesosDriverDescription],
afterLaunchCallback: (String) => Boolean,
currentOffers: List[ResourceOffer],
tasks: mutable.HashMap[OfferID, ArrayBuffer[TaskInfo]]): Unit = {
for (submission <- candidates) {
val driverCpu = submission.cores
val driverMem = submission.mem
val driverConstraints =
parseConstraintString(submission.conf.get(config.DRIVER_CONSTRAINTS))
logTrace(s"Finding offer to launch driver with cpu: $driverCpu, mem: $driverMem, " +
s"driverConstraints: $driverConstraints")
logTrace(s"Finding offer to launch driver with cpu: $driverCpu, mem: $driverMem")
val offerOption = currentOffers.find { offer =>
getResource(offer.remainingResources, "cpus") >= driverCpu &&
getResource(offer.remainingResources, "mem") >= driverMem &&
matchesAttributeRequirements(driverConstraints, toAttributeMap(offer.attributes))
}
if (offerOption.isEmpty) {
logDebug(s"Unable to find offer to launch driver id: ${submission.submissionId}, " +
s"cpu: $driverCpu, mem: $driverMem")
} else {
val offer = offerOption.get
val queuedTasks = tasks.getOrElseUpdate(offer.offer.getId, new ArrayBuffer[TaskInfo])
try {
val task = createTaskInfo(submission, offer)
queuedTasks += task
logTrace(s"Using offer ${offer.offer.getId.getValue} to launch driver " +
submission.submissionId)
val newState = new MesosClusterSubmissionState(
submission,
task.getTaskId,
offer.offer.getSlaveId,
None,
new Date(),
None,
getDriverFrameworkID(submission))
launchedDrivers(submission.submissionId) = newState
launchedDriversState.persist(submission.submissionId, newState)
afterLaunchCallback(submission.submissionId)
} catch {
case e: SparkException =>
afterLaunchCallback(submission.submissionId)
finishedDrivers += new MesosClusterSubmissionState(submission, TaskID.newBuilder().
setValue(submission.submissionId).build(), SlaveID.newBuilder().setValue("").
build(), None, null, None, getDriverFrameworkID(submission))
logError(s"Failed to launch the driver with id: ${submission.submissionId}, " +
s"cpu: $driverCpu, mem: $driverMem, reason: ${e.getMessage}")
}
}
}
}
override def resourceOffers(driver: SchedulerDriver, offers: JList[Offer]): Unit = {
logTrace(s"Received offers from Mesos: \\n${offers.asScala.mkString("\\n")}")
val tasks = new mutable.HashMap[OfferID, ArrayBuffer[TaskInfo]]()
val currentTime = new Date()
val currentOffers = offers.asScala.map {
offer => new ResourceOffer(offer, offer.getResourcesList, offer.getAttributesList)
}.toList
stateLock.synchronized {
// We first schedule all the supervised drivers that are ready to retry.
// This list will be empty if none of the drivers are marked as supervise.
val driversToRetry = pendingRetryDrivers.filter { d =>
d.retryState.get.nextRetry.before(currentTime)
}
scheduleTasks(
copyBuffer(driversToRetry),
removeFromPendingRetryDrivers,
currentOffers,
tasks)
// Then we walk through the queued drivers and try to schedule them.
scheduleTasks(
copyBuffer(queuedDrivers),
removeFromQueuedDrivers,
currentOffers,
tasks)
}
tasks.foreach { case (offerId, taskInfos) =>
driver.launchTasks(Collections.singleton(offerId), taskInfos.asJava)
}
for (offer <- currentOffers if !tasks.contains(offer.offer.getId)) {
declineOffer(driver, offer.offer, None, Some(getRejectOfferDuration(conf)))
}
}
private def copyBuffer(
buffer: ArrayBuffer[MesosDriverDescription]): ArrayBuffer[MesosDriverDescription] = {
val newBuffer = new ArrayBuffer[MesosDriverDescription](buffer.size)
buffer.copyToBuffer(newBuffer)
newBuffer
}
def getSchedulerState(): MesosClusterSchedulerState = {
stateLock.synchronized {
new MesosClusterSchedulerState(
frameworkId,
masterInfo.map(m => s"http://${m.getIp}:${m.getPort}"),
copyBuffer(queuedDrivers),
launchedDrivers.values.map(_.copy()).toList,
finishedDrivers.map(_.copy()).toList,
copyBuffer(pendingRetryDrivers))
}
}
override def offerRescinded(driver: SchedulerDriver, offerId: OfferID): Unit = {}
override def disconnected(driver: SchedulerDriver): Unit = {}
override def reregistered(driver: SchedulerDriver, masterInfo: MasterInfo): Unit = {
logInfo(s"Framework re-registered with master ${masterInfo.getId}")
}
override def slaveLost(driver: SchedulerDriver, slaveId: SlaveID): Unit = {}
override def error(driver: SchedulerDriver, error: String): Unit = {
logError("Error received: " + error)
markErr()
}
/**
* Check if the task state is a recoverable state that we can relaunch the task.
* Task state like TASK_ERROR are not relaunchable state since it wasn't able
* to be validated by Mesos.
*/
private def shouldRelaunch(state: MesosTaskState): Boolean = {
state == MesosTaskState.TASK_FAILED ||
state == MesosTaskState.TASK_LOST
}
override def statusUpdate(driver: SchedulerDriver, status: TaskStatus): Unit = {
val taskId = status.getTaskId.getValue
logInfo(s"Received status update: taskId=${taskId}" +
s" state=${status.getState}" +
s" message=${status.getMessage}" +
s" reason=${status.getReason}");
stateLock.synchronized {
if (launchedDrivers.contains(taskId)) {
if (status.getReason == Reason.REASON_RECONCILIATION &&
!pendingRecover.contains(taskId)) {
// Task has already received update and no longer requires reconciliation.
return
}
val state = launchedDrivers(taskId)
// Check if the driver is supervise enabled and can be relaunched.
if (state.driverDescription.supervise && shouldRelaunch(status.getState)) {
removeFromLaunchedDrivers(taskId)
state.finishDate = Some(new Date())
val retryState: Option[MesosClusterRetryState] = state.driverDescription.retryState
val (retries, waitTimeSec) = retryState
.map { rs => (rs.retries + 1, Math.min(maxRetryWaitTime, rs.waitTime * 2)) }
.getOrElse{ (1, 1) }
val nextRetry = new Date(new Date().getTime + waitTimeSec * 1000L)
var sparkProperties = state.driverDescription.conf.getAll.toMap
if (ConfigSecurity.vaultURI.isDefined)
{
val vaultURI = ConfigSecurity.vaultURI.get
val role = sparkProperties("spark.secret.vault.role")
val driverSecretId =
VaultHelper.getSecretIdFromVault(role).get
val driverRoleId =
VaultHelper.getRoleIdFromVault(role).get
sparkProperties = sparkProperties.updated("spark.secret.roleID", driverRoleId)
.updated("spark.secret.secretID", driverSecretId)
}
else logDebug("No Vault information provided skipping new approle generation")
val newDriverDescription = state.driverDescription.copy(
retryState = Some(new MesosClusterRetryState(status, retries, nextRetry, waitTimeSec)))
addDriverToPending(newDriverDescription, taskId);
} else if (TaskState.isFinished(mesosToTaskState(status.getState))) {
removeFromLaunchedDrivers(taskId)
state.finishDate = Some(new Date())
if (finishedDrivers.size >= retainedDrivers) {
val toRemove = math.max(retainedDrivers / 10, 1)
finishedDrivers.trimStart(toRemove)
}
finishedDrivers += state
}
state.mesosTaskStatus = Option(status)
} else {
logError(s"Unable to find driver $taskId in status update")
}
}
}
override def frameworkMessage(
driver: SchedulerDriver,
executorId: ExecutorID,
slaveId: SlaveID,
message: Array[Byte]): Unit = {}
override def executorLost(
driver: SchedulerDriver,
executorId: ExecutorID,
slaveId: SlaveID,
status: Int): Unit = {}
private def removeFromQueuedDrivers(id: String): Boolean = {
val index = queuedDrivers.indexWhere(_.submissionId.equals(id))
if (index != -1) {
queuedDrivers.remove(index)
queuedDriversState.expunge(id)
true
} else {
false
}
}
private def removeFromLaunchedDrivers(id: String): Boolean = {
if (launchedDrivers.remove(id).isDefined) {
launchedDriversState.expunge(id)
true
} else {
false
}
}
private def removeFromPendingRetryDrivers(id: String): Boolean = {
val index = pendingRetryDrivers.indexWhere(_.submissionId.equals(id))
if (index != -1) {
pendingRetryDrivers.remove(index)
pendingRetryDriversState.expunge(id)
true
} else {
false
}
}
def getQueuedDriversSize: Int = queuedDrivers.size
def getLaunchedDriversSize: Int = launchedDrivers.size
def getPendingRetryDriversSize: Int = pendingRetryDrivers.size
private def addDriverToQueue(desc: MesosDriverDescription): Unit = {
queuedDriversState.persist(desc.submissionId, desc)
queuedDrivers += desc
revive()
}
private def addDriverToPending(desc: MesosDriverDescription, taskId: String) = {
pendingRetryDriversState.persist(taskId, desc)
pendingRetryDrivers += desc
revive()
}
private def revive(): Unit = {
logInfo("Reviving Offers.")
schedulerDriver.reviveOffers()
}
}
| jlopezmalla/spark | resource-managers/mesos/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosClusterScheduler.scala | Scala | apache-2.0 | 33,110 |
package com.codahale.jerkson.deser
import org.codehaus.jackson.map.annotate.JsonCachable
import org.codehaus.jackson.`type`.JavaType
import org.codehaus.jackson.map.{DeserializationContext, JsonDeserializer}
import scala.collection.mutable
import org.codehaus.jackson.{JsonToken, JsonParser}
@JsonCachable
class MutableLinkedHashMapDeserializer(valueType: JavaType,
valueDeserializer: JsonDeserializer[Object]) extends JsonDeserializer[Object] {
def deserialize(jp: JsonParser, ctxt: DeserializationContext) = {
val builder = mutable.LinkedHashMap.newBuilder[String, Object]
if (jp.getCurrentToken == JsonToken.START_OBJECT) {
jp.nextToken()
}
while (jp.getCurrentToken != JsonToken.END_OBJECT) {
val name = jp.getCurrentName
jp.nextToken()
builder += ((name, valueDeserializer.deserialize(jp, ctxt)))
jp.nextToken()
}
builder.result()
}
}
| cphylabs/jerkson-old | src/main/scala/com/codahale/jerkson/deser/MutableLinkedHashMapDeserializer.scala | Scala | mit | 942 |
package models.services
import models.Stash
import org.scalatest.concurrent.{ PatienceConfiguration, ScalaFutures }
import org.scalatest.time.{ Seconds, Span }
import org.scalatestplus.play.PlaySpec
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import play.modules.reactivemongo.ReactiveMongoApi
import reactivemongo.api.gridfs.GridFS
import reactivemongo.api.{ DefaultDB, MongoConnection, MongoDriver }
import reactivemongo.play.json.JSONSerializationPack
import scala.concurrent.Future
/**
* Created by dylangrald on 11/4/16.
*/
class StashStoreSpec extends PlaySpec with ScalaFutures {
val reactiveMongoApi = new ReactiveMongoApi {
override def driver: MongoDriver = new MongoDriver()
override def gridFS: GridFS[JSONSerializationPack.type] = ???
override def connection: MongoConnection = driver.connection(List("localhost"))
override def database: Future[DefaultDB] = connection.database("test-collection")
override def asyncGridFS: Future[GridFS[JSONSerializationPack.type]] = ???
@scala.deprecated("Use [[database]]")
override def db: DefaultDB = ???
}
val stashStore = new StashStore(reactiveMongoApi)
val patienceConfiguration = PatienceConfiguration.Timeout(Span(5, Seconds))
"StashStore.addStash" should {
"add an input PointLocation stash correctly" in {
val inputPointStash = SomeRandom.pointLocationStash()
val savedStash = stashStore.addStash(inputPointStash)
savedStash.futureValue(patienceConfiguration) mustEqual inputPointStash
}
"add an input LineLocation stash correctly" in {
val inputLineStash = SomeRandom.lineLocationStash()
val savedStash = stashStore.addStash(inputLineStash)
savedStash.futureValue(patienceConfiguration) mustEqual inputLineStash
}
"add an input PolygonLocation stash correctly" in {
val inputPolygonStash = SomeRandom.polygonLocationStash()
val savedStash = stashStore.addStash(inputPolygonStash)
savedStash.futureValue(patienceConfiguration) mustEqual inputPolygonStash
}
}
"StashStore.getStashes" should {
"return stashes that were added" in {
val inputPointStash = SomeRandom.pointLocationStash()
val inputLineStash = SomeRandom.lineLocationStash()
val inputPolygonStash = SomeRandom.polygonLocationStash()
val allNewStashes = List(inputPointStash, inputLineStash, inputPolygonStash)
val savedStash1 = stashStore.addStash(inputPointStash)
whenReady(savedStash1) { saved1 =>
val savedStash2 = stashStore.addStash(inputLineStash)
whenReady(savedStash2) { saved2 =>
val savedStash3 = stashStore.addStash(inputPolygonStash)
whenReady(savedStash3) { saved3 =>
val allStashes = stashStore.getStashes()
allStashes.futureValue(patienceConfiguration).intersect(allNewStashes) mustEqual allNewStashes
}
}
}
}
}
"StashController.getStash(id)" should {
"return stash when it exists" in {
val newStash = SomeRandom.lineLocationStash()
val savedStashFuture = stashStore.addStash(newStash)
whenReady(savedStashFuture) { saved =>
val retrievedStashFuture = stashStore.getStash(newStash._id)
retrievedStashFuture.futureValue(patienceConfiguration).get mustEqual newStash
}
}
"return None when it does not exist" in {
val id = SomeRandom.uuidString()
val getStashFuture = stashStore.getStash(id)
getStashFuture.futureValue(patienceConfiguration) mustEqual None
}
}
"StashStore.deleteStash" should {
"delete the stash with the specified id" in {
val stash = SomeRandom.stash(SomeRandom.lineLocation())
val addStashFuture = stashStore.addStash(stash)
whenReady(addStashFuture) { addedStash =>
val deleteStashFuture = stashStore.deleteStash(addedStash._id)
whenReady(deleteStashFuture) { deletedStashResult =>
val getAllStashesFuture = stashStore.getStashes()
getAllStashesFuture.futureValue(patienceConfiguration).contains(stash) mustEqual false
}
}
}
}
"StashStore.updateStash" should {
"update the stash with the new fields" in {
val stashId = SomeRandom.uuidString()
val originalStash = Stash(stashId, SomeRandom.uuidString(), SomeRandom.string(), SomeRandom.lineLocation())
val updatedStashName = SomeRandom.string()
val updatedStashLocation = SomeRandom.pointLocation()
val updatedUserId = SomeRandom.uuidString()
val updatedStash = Stash(stashId, updatedUserId, updatedStashName, updatedStashLocation)
val savedOriginalStashFuture = stashStore.addStash(originalStash)
whenReady(savedOriginalStashFuture) { addedStash =>
val updatedStashFuture = stashStore.updateStash(updatedStash)
updatedStashFuture.futureValue(patienceConfiguration) mustEqual updatedStash
val allStashesFuture = stashStore.getStashes()
whenReady(allStashesFuture) { allStashes =>
allStashes.find(s => s._id.equals(stashId)).get mustEqual updatedStash
}
}
}
}
}
| dgrald/Locals-Only | test/models/services/StashStoreSpec.scala | Scala | apache-2.0 | 5,145 |
package coulomb.cats
import cats._
import cats.implicits._
import cats.kernel.laws.discipline._
import cats.laws.discipline._
import cats.data.NonEmptyList
import coulomb._
import coulomb.cats.implicits._
import coulomb.si._
import coulomb.siprefix._
final class CatsSuite extends munit.DisciplineSuite {
import coulomb.scalacheck.ArbQuantity._
type MetersPerSecond = Meter %/ Second
type KilometersPerSecond = (Kilo %* Meter) %/ Second
val aq: Quantity[Double, MetersPerSecond] = 1000.0.withUnit[MetersPerSecond]
val bq: Quantity[Double, MetersPerSecond] = 1000.0.withUnit[Meter %/ Second]
val cq: Quantity[Int, KilometersPerSecond] = 1.withUnit[(Kilo %* Meter) %/ Second]
val dq: Quantity[Double, Kilogram] = 1.0.withUnit[Kilogram]
test("eq") {
implicit val eqdms = implicitly[Eq[Quantity[Double, Meter %/ Second]]]
// Same units and value types
assert(eqdms.eqv(aq, bq))
// Convertible units and value types
assert(eqdms.eqv(aq, cq))
// Incompatible units won't compile
assert(compileErrors("eqdms.eqv(aq, dq)").nonEmpty)
}
test("order") {
// Same units and value types
val aq2: Quantity[Double, MetersPerSecond] = 900.0.withUnit[MetersPerSecond]
// Sorting a NonEmptyList always uses cats' Order
assert(NonEmptyList.of(aq, aq2).sorted === NonEmptyList.of(aq2, aq))
val cq2: Quantity[Int, KilometersPerSecond] = 0.9.withUnit[KilometersPerSecond]
// You can sort equivalent units if you specify the type which will auto convert
assert(NonEmptyList.of[Quantity[Double, MetersPerSecond]](aq, cq2).sorted === NonEmptyList.of[Quantity[Double, MetersPerSecond]](cq2, aq))
// This can't compile, the type of the list has a mix of all the units
assert(compileErrors("NonEmptyList.of(aq, cq2).sorted").nonEmpty)
// This can't compile, the quantitys are not comparable
assert(compileErrors("NonEmptyList.of(aq, dq).sorted").nonEmpty)
}
checkAll("EqTest", EqTests[Quantity[Int, MetersPerSecond]].eqv)
checkAll("OrderTest", OrderTests[Quantity[Int, MetersPerSecond]].order)
checkAll("FunctorTest", FunctorTests[Quantity[*, MetersPerSecond]].functor[Int, Double, Int])
checkAll("TraverseTest", TraverseTests[Quantity[*, KilometersPerSecond]].traverse[Int, Int, Int, Int, Option, Option])
checkAll("CommutativeMonadTest", CommutativeMonadTests[Quantity[*, KilometersPerSecond]].commutativeMonad[Int, Int, Int])
checkAll("MonoidTest", MonoidTests[Quantity[Double, KilometersPerSecond]].monoid)
}
| erikerlandson/coulomb | coulomb-tests/shared/src/test/scala/coulomb/cats/CatsSuite.scala | Scala | apache-2.0 | 2,496 |
package todos
import suzaku.app.UIBase
import suzaku.platform.Transport
import suzaku.platform.web.{DOMUIManager, WebPlatform}
class TodosUI(transport: Transport) extends UIBase(transport) {
override val platform = WebPlatform
override protected def main(): Unit = {
suzaku.platform.web.widget.registerWidgets(widgetManager.asInstanceOf[DOMUIManager])
}
}
| suzaku-io/suzaku | webdemo/src/main/scala/todos/TodosUI.scala | Scala | apache-2.0 | 369 |
/*******************************************************************************
* Copyright 2013 Simon Todd <simon@sltodd.co.uk>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package uk.co.sltodd.geneticakka
/**
* Asks the Population to add the given chromosomes to the population, and starts the algorithm.
*/
case class InjectPopulation(backup : PopulationBackup) | SLTodd/genetic-akka | src/main/scala/uk/co/sltodd/geneticakka/InjectPopulation.scala | Scala | apache-2.0 | 966 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.example.MLPipeline
import com.intel.analytics.bigdl.nn.{ClassNLLCriterion, Linear, LogSoftMax, Sequential}
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric.NumericFloat
import com.intel.analytics.bigdl.utils.Engine
import org.apache.spark.SparkContext
import org.apache.spark.ml.DLClassifier
import org.apache.spark.sql.SQLContext
/**
* Logistic Regression with BigDL layers and DLClassifier
*/
object DLClassifierLogisticRegression {
def main(args: Array[String]): Unit = {
val conf = Engine.createSparkConf()
.setAppName("DLClassifierLogisticRegression")
.setMaster("local[1]")
val sc = new SparkContext(conf)
val sqlContext = SQLContext.getOrCreate(sc)
Engine.init
val model = Sequential().add(Linear(2, 2)).add(LogSoftMax())
val criterion = ClassNLLCriterion()
val estimator = new DLClassifier(model, criterion, Array(2))
.setBatchSize(4)
.setMaxEpoch(10)
val data = sc.parallelize(Seq(
(Array(0.0, 1.0), 1.0),
(Array(1.0, 0.0), 2.0),
(Array(0.0, 1.0), 1.0),
(Array(1.0, 0.0), 2.0)))
val df = sqlContext.createDataFrame(data).toDF("features", "label")
val dlModel = estimator.fit(df)
dlModel.transform(df).show(false)
}
}
| jenniew/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/example/MLPipeline/DLClassifierLogisticRegression.scala | Scala | apache-2.0 | 1,887 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.random
import org.apache.commons.math3.special.Gamma
import org.apache.spark.SparkFunSuite
import org.apache.spark.util.StatCounter
// TODO update tests to use TestingUtils for floating point comparison after PR 1367 is merged
class RandomDataGeneratorSuite extends SparkFunSuite {
def apiChecks(gen: RandomDataGenerator[Double]) {
// resetting seed should generate the same sequence of random numbers
gen.setSeed(42L)
val array1 = (0 until 1000).map(_ => gen.nextValue())
gen.setSeed(42L)
val array2 = (0 until 1000).map(_ => gen.nextValue())
assert(array1.equals(array2))
// newInstance should contain a difference instance of the rng
// i.e. setting difference seeds for difference instances produces different sequences of
// random numbers.
val gen2 = gen.copy()
gen.setSeed(0L)
val array3 = (0 until 1000).map(_ => gen.nextValue())
gen2.setSeed(1L)
val array4 = (0 until 1000).map(_ => gen2.nextValue())
// Compare arrays instead of elements since individual elements can coincide by chance but the
// sequences should differ given two different seeds.
assert(!array3.equals(array4))
// test that setting the same seed in the copied instance produces the same sequence of numbers
gen.setSeed(0L)
val array5 = (0 until 1000).map(_ => gen.nextValue())
gen2.setSeed(0L)
val array6 = (0 until 1000).map(_ => gen2.nextValue())
assert(array5.equals(array6))
}
def distributionChecks(gen: RandomDataGenerator[Double],
mean: Double = 0.0,
stddev: Double = 1.0,
epsilon: Double = 0.01) {
for (seed <- 0 until 5) {
gen.setSeed(seed.toLong)
val sample = (0 until 100000).map { _ => gen.nextValue()}
val stats = new StatCounter(sample)
assert(math.abs(stats.mean - mean) < epsilon)
assert(math.abs(stats.stdev - stddev) < epsilon)
}
}
test("UniformGenerator") {
val uniform = new UniformGenerator()
apiChecks(uniform)
// Stddev of uniform distribution = (ub - lb) / math.sqrt(12)
distributionChecks(uniform, 0.5, 1 / math.sqrt(12))
}
test("StandardNormalGenerator") {
val normal = new StandardNormalGenerator()
apiChecks(normal)
distributionChecks(normal, 0.0, 1.0)
}
test("LogNormalGenerator") {
List((0.0, 1.0), (0.0, 2.0), (2.0, 1.0), (2.0, 2.0)).foreach {
case (mean: Double, vari: Double) =>
val normal = new LogNormalGenerator(mean, math.sqrt(vari))
apiChecks(normal)
// mean of log normal = e^(mean + var / 2)
val expectedMean = math.exp(mean + 0.5 * vari)
// variance of log normal = (e^var - 1) * e^(2 * mean + var)
val expectedStd = math.sqrt((math.exp(vari) - 1.0) * math.exp(2.0 * mean + vari))
// since sampling error increases with variance, let's set
// the absolute tolerance as a percentage
val epsilon = 0.05 * expectedStd * expectedStd
distributionChecks(normal, expectedMean, expectedStd, epsilon)
}
}
test("PoissonGenerator") {
// mean = 0.0 will not pass the API checks since 0.0 is always deterministically produced.
for (mean <- List(1.0, 5.0, 100.0)) {
val poisson = new PoissonGenerator(mean)
apiChecks(poisson)
distributionChecks(poisson, mean, math.sqrt(mean), 0.1)
}
}
test("ExponentialGenerator") {
// mean = 0.0 will not pass the API checks since 0.0 is always deterministically produced.
for (mean <- List(2.0, 5.0, 10.0, 50.0, 100.0)) {
val exponential = new ExponentialGenerator(mean)
apiChecks(exponential)
// var of exp = lambda^-2 = (1.0 / mean)^-2 = mean^2
// since sampling error increases with variance, let's set
// the absolute tolerance as a percentage
val epsilon = 0.05 * mean * mean
distributionChecks(exponential, mean, mean, epsilon)
}
}
test("GammaGenerator") {
// mean = 0.0 will not pass the API checks since 0.0 is always deterministically produced.
List((1.0, 2.0), (2.0, 2.0), (3.0, 2.0), (5.0, 1.0), (9.0, 0.5)).foreach {
case (shape: Double, scale: Double) =>
val gamma = new GammaGenerator(shape, scale)
apiChecks(gamma)
// mean of gamma = shape * scale
val expectedMean = shape * scale
// var of gamma = shape * scale^2
val expectedStd = math.sqrt(shape * scale * scale)
distributionChecks(gamma, expectedMean, expectedStd, 0.1)
}
}
test("WeibullGenerator") {
List((1.0, 2.0), (2.0, 3.0), (2.5, 3.5), (10.4, 2.222)).foreach {
case (alpha: Double, beta: Double) =>
val weibull = new WeibullGenerator(alpha, beta)
apiChecks(weibull)
val expectedMean = math.exp(Gamma.logGamma(1 + (1 / alpha))) * beta
val expectedVariance = math.exp(
Gamma.logGamma(1 + (2 / alpha))) * beta * beta - expectedMean * expectedMean
val expectedStd = math.sqrt(expectedVariance)
distributionChecks(weibull, expectedMean, expectedStd, 0.1)
}
}
}
| wangyixiaohuihui/spark2-annotation | mllib/src/test/scala/org/apache/spark/mllib/random/RandomDataGeneratorSuite.scala | Scala | apache-2.0 | 6,016 |
package approximation
import approximation.arraygrid._
/**
* Created by Даниил on 14.03.2017.
*/
sealed trait TypeDir{
def preDef(leftX: Double, rangeX: Array[Double], rightX: Double, sigma: Double): Array[Array[Double]]
def toastHeatFlow(heatFlow: Double, x0: Double, x1: Double, x3: Double): Double
def lowerHeatFlow(lowX: Double, midX: Double, uppX: Double): (Double, Double)
def upperHeatFlow(lowX: Double, midX: Double, uppX: Double): (Double, Double)
def heatFlowCoefs(lowX: Double, uppX: Double): Double
def analyticalCoefs(lowX: Double, uppX: Double): Double
}
class Ortho extends TypeDir{
override def preDef(leftX: Double, rangeX: Array[Double], rightX: Double, sigma: Double) = {
arraygrid.makeOrthogonalMatrix(leftX, rangeX, rightX, sigma)
}
override def toastHeatFlow(heatFlow: Double, x0: Double, x1: Double, x3: Double) = ???
override def lowerHeatFlow(lowX: Double, midX: Double, uppX: Double): (Double, Double) = {
val height = ah(lowX, midX, uppX)
val c = 1.0 / dh(midX, uppX)
(height, c)
}
override def upperHeatFlow(lowX: Double, midX: Double, uppX: Double): (Double, Double) = {
val height = ah(lowX, midX, uppX)
val a = 1.0 / dh(lowX, midX)
(height, a)
}
override def heatFlowCoefs(lowX: Double, uppX: Double): Double = {
1.0 / (uppX - lowX)
}
override def analyticalCoefs(lowX: Double, uppX: Double): Double = heatFlowCoefs(lowX, uppX)
}
class Radial extends TypeDir{
override def preDef(leftX: Double, rangeX: Array[Double], rightX: Double, sigma: Double) = {
arraygrid.makeRadialMatrix(leftX, rangeX, rightX, sigma)
}
override def toastHeatFlow(heatFlow: Double,
r0: Double, r1: Double, r2: Double): Double = {
val volume = arraygrid.vol(r0, r1, r2)
heatFlow / (math.Pi * 2.0)
}
override def lowerHeatFlow(lowR: Double, midR: Double, uppR: Double): (Double, Double) = {
val volume = vol(lowR, midR, uppR)
val c = rAtHalf(midR, uppR) / dh(midR, uppR)
(volume, c)
}
override def upperHeatFlow(lowR: Double, midR: Double, uppR: Double): (Double, Double) = {
val volume = vol(lowR, midR, uppR)
val a = rAtHalf(lowR, midR) / dh(lowR, midR)
(volume, a)
}
override def heatFlowCoefs(lowX: Double, uppX: Double): Double = {
val rAtHalf = (lowX + uppX) / 2.0
rAtHalf / (uppX - lowX) * math.Pi * 2.0
}
def heatFlowCoefs(lowX: Double,
midX: Double,
uppX: Double): (Double, Double) = {
(heatFlowCoefs(lowX, midX), heatFlowCoefs(midX, uppX))
}
override def analyticalCoefs(lowX: Double, uppX: Double): Double = {
math.log(uppX / lowX) / (math.Pi * 2)
}
}
class Angular extends TypeDir{
??? //TODO implement angular direction based with use of arraygrid.makeOrthogonalMatrix method
override def preDef(leftX: Double,
rangeX: Array[Double],
rightX: Double,
sigma: Double): Array[Array[Double]] = ???
override def toastHeatFlow(heatFlow: Double, x0: Double, x1: Double, x3: Double) = ???
override def lowerHeatFlow(lowX: Double, midX: Double, uppX: Double) : (Double, Double) = ???
override def heatFlowCoefs(lowX: Double, uppX: Double): Double = {
???
}
override def analyticalCoefs(lowX: Double, uppX: Double) = ???
override def upperHeatFlow(lowX: Double, midX: Double, uppX: Double): (Double, Double) = ???
}
| daniil-timofeev/gridsplines | approximation/src/main/scala/approximation/TypeDir.scala | Scala | apache-2.0 | 3,480 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.executor
import java.io.{File, NotSerializableException}
import java.lang.Thread.UncaughtExceptionHandler
import java.lang.management.ManagementFactory
import java.net.{URI, URL}
import java.nio.ByteBuffer
import java.util.Properties
import java.util.concurrent._
import javax.annotation.concurrent.GuardedBy
import scala.collection.JavaConverters._
import scala.collection.mutable.{ArrayBuffer, HashMap, Map}
import scala.concurrent.duration._
import scala.util.control.NonFatal
import com.google.common.util.concurrent.ThreadFactoryBuilder
import org.apache.spark._
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config._
import org.apache.spark.memory.{SparkOutOfMemoryError, TaskMemoryManager}
import org.apache.spark.rpc.RpcTimeout
import org.apache.spark.scheduler._
import org.apache.spark.shuffle.FetchFailedException
import org.apache.spark.storage.{StorageLevel, TaskResultBlockId}
import org.apache.spark.util._
import org.apache.spark.util.io.ChunkedByteBuffer
/**
* Spark executor, backed by a threadpool to run tasks.
*
* This can be used with Mesos, YARN, and the standalone scheduler.
* An internal RPC interface is used for communication with the driver,
* except in the case of Mesos fine-grained mode.
*/
private[spark] class Executor(
executorId: String,
executorHostname: String,
env: SparkEnv,
userClassPath: Seq[URL] = Nil,
isLocal: Boolean = false,
uncaughtExceptionHandler: UncaughtExceptionHandler = new SparkUncaughtExceptionHandler)
extends Logging {
logInfo(s"Starting executor ID $executorId on host $executorHostname")
// Application dependencies (added through SparkContext) that we've fetched so far on this node.
// Each map holds the master's timestamp for the version of that file or JAR we got.
private val currentFiles: HashMap[String, Long] = new HashMap[String, Long]()
private val currentJars: HashMap[String, Long] = new HashMap[String, Long]()
private val EMPTY_BYTE_BUFFER = ByteBuffer.wrap(new Array[Byte](0))
private val conf = env.conf
// No ip or host:port - just hostname
Utils.checkHost(executorHostname)
// must not have port specified.
assert (0 == Utils.parseHostPort(executorHostname)._2)
// Make sure the local hostname we report matches the cluster scheduler's name for this host
Utils.setCustomHostname(executorHostname)
if (!isLocal) {
// Setup an uncaught exception handler for non-local mode.
// Make any thread terminations due to uncaught exceptions kill the entire
// executor process to avoid surprising stalls.
Thread.setDefaultUncaughtExceptionHandler(uncaughtExceptionHandler)
}
// Start worker thread pool
private val threadPool = {
val threadFactory = new ThreadFactoryBuilder()
.setDaemon(true)
.setNameFormat("Executor task launch worker-%d")
.setThreadFactory(new ThreadFactory {
override def newThread(r: Runnable): Thread =
// Use UninterruptibleThread to run tasks so that we can allow running codes without being
// interrupted by `Thread.interrupt()`. Some issues, such as KAFKA-1894, HADOOP-10622,
// will hang forever if some methods are interrupted.
new UninterruptibleThread(r, "unused") // thread name will be set by ThreadFactoryBuilder
})
.build()
Executors.newCachedThreadPool(threadFactory).asInstanceOf[ThreadPoolExecutor]
}
private val executorSource = new ExecutorSource(threadPool, executorId)
// Pool used for threads that supervise task killing / cancellation
private val taskReaperPool = ThreadUtils.newDaemonCachedThreadPool("Task reaper")
// For tasks which are in the process of being killed, this map holds the most recently created
// TaskReaper. All accesses to this map should be synchronized on the map itself (this isn't
// a ConcurrentHashMap because we use the synchronization for purposes other than simply guarding
// the integrity of the map's internal state). The purpose of this map is to prevent the creation
// of a separate TaskReaper for every killTask() of a given task. Instead, this map allows us to
// track whether an existing TaskReaper fulfills the role of a TaskReaper that we would otherwise
// create. The map key is a task id.
private val taskReaperForTask: HashMap[Long, TaskReaper] = HashMap[Long, TaskReaper]()
if (!isLocal) {
env.blockManager.initialize(conf.getAppId)
env.metricsSystem.registerSource(executorSource)
env.metricsSystem.registerSource(env.blockManager.shuffleMetricsSource)
}
// Whether to load classes in user jars before those in Spark jars
private val userClassPathFirst = conf.get(EXECUTOR_USER_CLASS_PATH_FIRST)
// Whether to monitor killed / interrupted tasks
private val taskReaperEnabled = conf.get(TASK_REAPER_ENABLED)
// Create our ClassLoader
// do this after SparkEnv creation so can access the SecurityManager
private val urlClassLoader = createClassLoader()
private val replClassLoader = addReplClassLoaderIfNeeded(urlClassLoader)
// Set the classloader for serializer
env.serializer.setDefaultClassLoader(replClassLoader)
// SPARK-21928. SerializerManager's internal instance of Kryo might get used in netty threads
// for fetching remote cached RDD blocks, so need to make sure it uses the right classloader too.
env.serializerManager.setDefaultClassLoader(replClassLoader)
private val executorPlugins: Seq[ExecutorPlugin] = {
val pluginNames = conf.get(EXECUTOR_PLUGINS)
if (pluginNames.nonEmpty) {
logDebug(s"Initializing the following plugins: ${pluginNames.mkString(", ")}")
// Plugins need to load using a class loader that includes the executor's user classpath
val pluginList: Seq[ExecutorPlugin] =
Utils.withContextClassLoader(replClassLoader) {
val plugins = Utils.loadExtensions(classOf[ExecutorPlugin], pluginNames, conf)
plugins.foreach { plugin =>
plugin.init()
logDebug(s"Successfully loaded plugin " + plugin.getClass().getCanonicalName())
}
plugins
}
logDebug("Finished initializing plugins")
pluginList
} else {
Nil
}
}
// Max size of direct result. If task result is bigger than this, we use the block manager
// to send the result back.
private val maxDirectResultSize = Math.min(
conf.get(TASK_MAX_DIRECT_RESULT_SIZE),
RpcUtils.maxMessageSizeBytes(conf))
private val maxResultSize = conf.get(MAX_RESULT_SIZE)
// Maintains the list of running tasks.
private val runningTasks = new ConcurrentHashMap[Long, TaskRunner]
/**
* When an executor is unable to send heartbeats to the driver more than `HEARTBEAT_MAX_FAILURES`
* times, it should kill itself. The default value is 60. It means we will retry to send
* heartbeats about 10 minutes because the heartbeat interval is 10s.
*/
private val HEARTBEAT_MAX_FAILURES = conf.get(EXECUTOR_HEARTBEAT_MAX_FAILURES)
/**
* Whether to drop empty accumulators from heartbeats sent to the driver. Including the empty
* accumulators (that satisfy isZero) can make the size of the heartbeat message very large.
*/
private val HEARTBEAT_DROP_ZEROES = conf.get(EXECUTOR_HEARTBEAT_DROP_ZERO_ACCUMULATOR_UPDATES)
/**
* Interval to send heartbeats, in milliseconds
*/
private val HEARTBEAT_INTERVAL_MS = conf.get(EXECUTOR_HEARTBEAT_INTERVAL)
// Executor for the heartbeat task.
private val heartbeater = new Heartbeater(
env.memoryManager,
() => Executor.this.reportHeartBeat(),
"executor-heartbeater",
HEARTBEAT_INTERVAL_MS)
// must be initialized before running startDriverHeartbeat()
private val heartbeatReceiverRef =
RpcUtils.makeDriverRef(HeartbeatReceiver.ENDPOINT_NAME, conf, env.rpcEnv)
/**
* Count the failure times of heartbeat. It should only be accessed in the heartbeat thread. Each
* successful heartbeat will reset it to 0.
*/
private var heartbeatFailures = 0
heartbeater.start()
private[executor] def numRunningTasks: Int = runningTasks.size()
def launchTask(context: ExecutorBackend, taskDescription: TaskDescription): Unit = {
val tr = new TaskRunner(context, taskDescription)
runningTasks.put(taskDescription.taskId, tr)
threadPool.execute(tr)
}
def killTask(taskId: Long, interruptThread: Boolean, reason: String): Unit = {
val taskRunner = runningTasks.get(taskId)
if (taskRunner != null) {
if (taskReaperEnabled) {
val maybeNewTaskReaper: Option[TaskReaper] = taskReaperForTask.synchronized {
val shouldCreateReaper = taskReaperForTask.get(taskId) match {
case None => true
case Some(existingReaper) => interruptThread && !existingReaper.interruptThread
}
if (shouldCreateReaper) {
val taskReaper = new TaskReaper(
taskRunner, interruptThread = interruptThread, reason = reason)
taskReaperForTask(taskId) = taskReaper
Some(taskReaper)
} else {
None
}
}
// Execute the TaskReaper from outside of the synchronized block.
maybeNewTaskReaper.foreach(taskReaperPool.execute)
} else {
taskRunner.kill(interruptThread = interruptThread, reason = reason)
}
}
}
/**
* Function to kill the running tasks in an executor.
* This can be called by executor back-ends to kill the
* tasks instead of taking the JVM down.
* @param interruptThread whether to interrupt the task thread
*/
def killAllTasks(interruptThread: Boolean, reason: String) : Unit = {
runningTasks.keys().asScala.foreach(t =>
killTask(t, interruptThread = interruptThread, reason = reason))
}
def stop(): Unit = {
env.metricsSystem.report()
try {
heartbeater.stop()
} catch {
case NonFatal(e) =>
logWarning("Unable to stop heartbeater", e)
}
threadPool.shutdown()
// Notify plugins that executor is shutting down so they can terminate cleanly
Utils.withContextClassLoader(replClassLoader) {
executorPlugins.foreach { plugin =>
try {
plugin.shutdown()
} catch {
case e: Exception =>
logWarning("Plugin " + plugin.getClass().getCanonicalName() + " shutdown failed", e)
}
}
}
if (!isLocal) {
env.stop()
}
}
/** Returns the total amount of time this JVM process has spent in garbage collection. */
private def computeTotalGcTime(): Long = {
ManagementFactory.getGarbageCollectorMXBeans.asScala.map(_.getCollectionTime).sum
}
class TaskRunner(
execBackend: ExecutorBackend,
private val taskDescription: TaskDescription)
extends Runnable {
val taskId = taskDescription.taskId
val threadName = s"Executor task launch worker for task $taskId"
private val taskName = taskDescription.name
/** If specified, this task has been killed and this option contains the reason. */
@volatile private var reasonIfKilled: Option[String] = None
@volatile private var threadId: Long = -1
def getThreadId: Long = threadId
/** Whether this task has been finished. */
@GuardedBy("TaskRunner.this")
private var finished = false
def isFinished: Boolean = synchronized { finished }
/** How much the JVM process has spent in GC when the task starts to run. */
@volatile var startGCTime: Long = _
/**
* The task to run. This will be set in run() by deserializing the task binary coming
* from the driver. Once it is set, it will never be changed.
*/
@volatile var task: Task[Any] = _
def kill(interruptThread: Boolean, reason: String): Unit = {
logInfo(s"Executor is trying to kill $taskName (TID $taskId), reason: $reason")
reasonIfKilled = Some(reason)
if (task != null) {
synchronized {
if (!finished) {
task.kill(interruptThread, reason)
}
}
}
}
/**
* Set the finished flag to true and clear the current thread's interrupt status
*/
private def setTaskFinishedAndClearInterruptStatus(): Unit = synchronized {
this.finished = true
// SPARK-14234 - Reset the interrupted status of the thread to avoid the
// ClosedByInterruptException during execBackend.statusUpdate which causes
// Executor to crash
Thread.interrupted()
// Notify any waiting TaskReapers. Generally there will only be one reaper per task but there
// is a rare corner-case where one task can have two reapers in case cancel(interrupt=False)
// is followed by cancel(interrupt=True). Thus we use notifyAll() to avoid a lost wakeup:
notifyAll()
}
/**
* Utility function to:
* 1. Report executor runtime and JVM gc time if possible
* 2. Collect accumulator updates
* 3. Set the finished flag to true and clear current thread's interrupt status
*/
private def collectAccumulatorsAndResetStatusOnFailure(taskStartTime: Long) = {
// Report executor runtime and JVM gc time
Option(task).foreach(t => {
t.metrics.setExecutorRunTime(System.currentTimeMillis() - taskStartTime)
t.metrics.setJvmGCTime(computeTotalGcTime() - startGCTime)
})
// Collect latest accumulator values to report back to the driver
val accums: Seq[AccumulatorV2[_, _]] =
Option(task).map(_.collectAccumulatorUpdates(taskFailed = true)).getOrElse(Seq.empty)
val accUpdates = accums.map(acc => acc.toInfo(Some(acc.value), None))
setTaskFinishedAndClearInterruptStatus()
(accums, accUpdates)
}
override def run(): Unit = {
threadId = Thread.currentThread.getId
Thread.currentThread.setName(threadName)
val threadMXBean = ManagementFactory.getThreadMXBean
val taskMemoryManager = new TaskMemoryManager(env.memoryManager, taskId)
val deserializeStartTime = System.currentTimeMillis()
val deserializeStartCpuTime = if (threadMXBean.isCurrentThreadCpuTimeSupported) {
threadMXBean.getCurrentThreadCpuTime
} else 0L
Thread.currentThread.setContextClassLoader(replClassLoader)
val ser = env.closureSerializer.newInstance()
logInfo(s"Running $taskName (TID $taskId)")
execBackend.statusUpdate(taskId, TaskState.RUNNING, EMPTY_BYTE_BUFFER)
var taskStartTime: Long = 0
var taskStartCpu: Long = 0
startGCTime = computeTotalGcTime()
try {
// Must be set before updateDependencies() is called, in case fetching dependencies
// requires access to properties contained within (e.g. for access control).
Executor.taskDeserializationProps.set(taskDescription.properties)
updateDependencies(taskDescription.addedFiles, taskDescription.addedJars)
task = ser.deserialize[Task[Any]](
taskDescription.serializedTask, Thread.currentThread.getContextClassLoader)
task.localProperties = taskDescription.properties
task.setTaskMemoryManager(taskMemoryManager)
// If this task has been killed before we deserialized it, let's quit now. Otherwise,
// continue executing the task.
val killReason = reasonIfKilled
if (killReason.isDefined) {
// Throw an exception rather than returning, because returning within a try{} block
// causes a NonLocalReturnControl exception to be thrown. The NonLocalReturnControl
// exception will be caught by the catch block, leading to an incorrect ExceptionFailure
// for the task.
throw new TaskKilledException(killReason.get)
}
// The purpose of updating the epoch here is to invalidate executor map output status cache
// in case FetchFailures have occurred. In local mode `env.mapOutputTracker` will be
// MapOutputTrackerMaster and its cache invalidation is not based on epoch numbers so
// we don't need to make any special calls here.
if (!isLocal) {
logDebug("Task " + taskId + "'s epoch is " + task.epoch)
env.mapOutputTracker.asInstanceOf[MapOutputTrackerWorker].updateEpoch(task.epoch)
}
// Run the actual task and measure its runtime.
taskStartTime = System.currentTimeMillis()
taskStartCpu = if (threadMXBean.isCurrentThreadCpuTimeSupported) {
threadMXBean.getCurrentThreadCpuTime
} else 0L
var threwException = true
val value = Utils.tryWithSafeFinally {
val res = task.run(
taskAttemptId = taskId,
attemptNumber = taskDescription.attemptNumber,
metricsSystem = env.metricsSystem)
threwException = false
res
} {
val releasedLocks = env.blockManager.releaseAllLocksForTask(taskId)
val freedMemory = taskMemoryManager.cleanUpAllAllocatedMemory()
if (freedMemory > 0 && !threwException) {
val errMsg = s"Managed memory leak detected; size = $freedMemory bytes, TID = $taskId"
if (conf.get(UNSAFE_EXCEPTION_ON_MEMORY_LEAK)) {
throw new SparkException(errMsg)
} else {
logWarning(errMsg)
}
}
if (releasedLocks.nonEmpty && !threwException) {
val errMsg =
s"${releasedLocks.size} block locks were not released by TID = $taskId:\\n" +
releasedLocks.mkString("[", ", ", "]")
if (conf.get(STORAGE_EXCEPTION_PIN_LEAK)) {
throw new SparkException(errMsg)
} else {
logInfo(errMsg)
}
}
}
task.context.fetchFailed.foreach { fetchFailure =>
// uh-oh. it appears the user code has caught the fetch-failure without throwing any
// other exceptions. Its *possible* this is what the user meant to do (though highly
// unlikely). So we will log an error and keep going.
logError(s"TID ${taskId} completed successfully though internally it encountered " +
s"unrecoverable fetch failures! Most likely this means user code is incorrectly " +
s"swallowing Spark's internal ${classOf[FetchFailedException]}", fetchFailure)
}
val taskFinish = System.currentTimeMillis()
val taskFinishCpu = if (threadMXBean.isCurrentThreadCpuTimeSupported) {
threadMXBean.getCurrentThreadCpuTime
} else 0L
// If the task has been killed, let's fail it.
task.context.killTaskIfInterrupted()
val resultSer = env.serializer.newInstance()
val beforeSerialization = System.currentTimeMillis()
val valueBytes = resultSer.serialize(value)
val afterSerialization = System.currentTimeMillis()
// Deserialization happens in two parts: first, we deserialize a Task object, which
// includes the Partition. Second, Task.run() deserializes the RDD and function to be run.
task.metrics.setExecutorDeserializeTime(
(taskStartTime - deserializeStartTime) + task.executorDeserializeTime)
task.metrics.setExecutorDeserializeCpuTime(
(taskStartCpu - deserializeStartCpuTime) + task.executorDeserializeCpuTime)
// We need to subtract Task.run()'s deserialization time to avoid double-counting
task.metrics.setExecutorRunTime((taskFinish - taskStartTime) - task.executorDeserializeTime)
task.metrics.setExecutorCpuTime(
(taskFinishCpu - taskStartCpu) - task.executorDeserializeCpuTime)
task.metrics.setJvmGCTime(computeTotalGcTime() - startGCTime)
task.metrics.setResultSerializationTime(afterSerialization - beforeSerialization)
// Expose task metrics using the Dropwizard metrics system.
// Update task metrics counters
executorSource.METRIC_CPU_TIME.inc(task.metrics.executorCpuTime)
executorSource.METRIC_RUN_TIME.inc(task.metrics.executorRunTime)
executorSource.METRIC_JVM_GC_TIME.inc(task.metrics.jvmGCTime)
executorSource.METRIC_DESERIALIZE_TIME.inc(task.metrics.executorDeserializeTime)
executorSource.METRIC_DESERIALIZE_CPU_TIME.inc(task.metrics.executorDeserializeCpuTime)
executorSource.METRIC_RESULT_SERIALIZE_TIME.inc(task.metrics.resultSerializationTime)
executorSource.METRIC_SHUFFLE_FETCH_WAIT_TIME
.inc(task.metrics.shuffleReadMetrics.fetchWaitTime)
executorSource.METRIC_SHUFFLE_WRITE_TIME.inc(task.metrics.shuffleWriteMetrics.writeTime)
executorSource.METRIC_SHUFFLE_TOTAL_BYTES_READ
.inc(task.metrics.shuffleReadMetrics.totalBytesRead)
executorSource.METRIC_SHUFFLE_REMOTE_BYTES_READ
.inc(task.metrics.shuffleReadMetrics.remoteBytesRead)
executorSource.METRIC_SHUFFLE_REMOTE_BYTES_READ_TO_DISK
.inc(task.metrics.shuffleReadMetrics.remoteBytesReadToDisk)
executorSource.METRIC_SHUFFLE_LOCAL_BYTES_READ
.inc(task.metrics.shuffleReadMetrics.localBytesRead)
executorSource.METRIC_SHUFFLE_RECORDS_READ
.inc(task.metrics.shuffleReadMetrics.recordsRead)
executorSource.METRIC_SHUFFLE_REMOTE_BLOCKS_FETCHED
.inc(task.metrics.shuffleReadMetrics.remoteBlocksFetched)
executorSource.METRIC_SHUFFLE_LOCAL_BLOCKS_FETCHED
.inc(task.metrics.shuffleReadMetrics.localBlocksFetched)
executorSource.METRIC_SHUFFLE_BYTES_WRITTEN
.inc(task.metrics.shuffleWriteMetrics.bytesWritten)
executorSource.METRIC_SHUFFLE_RECORDS_WRITTEN
.inc(task.metrics.shuffleWriteMetrics.recordsWritten)
executorSource.METRIC_INPUT_BYTES_READ
.inc(task.metrics.inputMetrics.bytesRead)
executorSource.METRIC_INPUT_RECORDS_READ
.inc(task.metrics.inputMetrics.recordsRead)
executorSource.METRIC_OUTPUT_BYTES_WRITTEN
.inc(task.metrics.outputMetrics.bytesWritten)
executorSource.METRIC_OUTPUT_RECORDS_WRITTEN
.inc(task.metrics.outputMetrics.recordsWritten)
executorSource.METRIC_RESULT_SIZE.inc(task.metrics.resultSize)
executorSource.METRIC_DISK_BYTES_SPILLED.inc(task.metrics.diskBytesSpilled)
executorSource.METRIC_MEMORY_BYTES_SPILLED.inc(task.metrics.memoryBytesSpilled)
// Note: accumulator updates must be collected after TaskMetrics is updated
val accumUpdates = task.collectAccumulatorUpdates()
// TODO: do not serialize value twice
val directResult = new DirectTaskResult(valueBytes, accumUpdates)
val serializedDirectResult = ser.serialize(directResult)
val resultSize = serializedDirectResult.limit()
// directSend = sending directly back to the driver
val serializedResult: ByteBuffer = {
if (maxResultSize > 0 && resultSize > maxResultSize) {
logWarning(s"Finished $taskName (TID $taskId). Result is larger than maxResultSize " +
s"(${Utils.bytesToString(resultSize)} > ${Utils.bytesToString(maxResultSize)}), " +
s"dropping it.")
ser.serialize(new IndirectTaskResult[Any](TaskResultBlockId(taskId), resultSize))
} else if (resultSize > maxDirectResultSize) {
val blockId = TaskResultBlockId(taskId)
env.blockManager.putBytes(
blockId,
new ChunkedByteBuffer(serializedDirectResult.duplicate()),
StorageLevel.MEMORY_AND_DISK_SER)
logInfo(
s"Finished $taskName (TID $taskId). $resultSize bytes result sent via BlockManager)")
ser.serialize(new IndirectTaskResult[Any](blockId, resultSize))
} else {
logInfo(s"Finished $taskName (TID $taskId). $resultSize bytes result sent to driver")
serializedDirectResult
}
}
setTaskFinishedAndClearInterruptStatus()
execBackend.statusUpdate(taskId, TaskState.FINISHED, serializedResult)
} catch {
case t: TaskKilledException =>
logInfo(s"Executor killed $taskName (TID $taskId), reason: ${t.reason}")
val (accums, accUpdates) = collectAccumulatorsAndResetStatusOnFailure(taskStartTime)
val serializedTK = ser.serialize(TaskKilled(t.reason, accUpdates, accums))
execBackend.statusUpdate(taskId, TaskState.KILLED, serializedTK)
case _: InterruptedException | NonFatal(_) if
task != null && task.reasonIfKilled.isDefined =>
val killReason = task.reasonIfKilled.getOrElse("unknown reason")
logInfo(s"Executor interrupted and killed $taskName (TID $taskId), reason: $killReason")
val (accums, accUpdates) = collectAccumulatorsAndResetStatusOnFailure(taskStartTime)
val serializedTK = ser.serialize(TaskKilled(killReason, accUpdates, accums))
execBackend.statusUpdate(taskId, TaskState.KILLED, serializedTK)
case t: Throwable if hasFetchFailure && !Utils.isFatalError(t) =>
val reason = task.context.fetchFailed.get.toTaskFailedReason
if (!t.isInstanceOf[FetchFailedException]) {
// there was a fetch failure in the task, but some user code wrapped that exception
// and threw something else. Regardless, we treat it as a fetch failure.
val fetchFailedCls = classOf[FetchFailedException].getName
logWarning(s"TID ${taskId} encountered a ${fetchFailedCls} and " +
s"failed, but the ${fetchFailedCls} was hidden by another " +
s"exception. Spark is handling this like a fetch failure and ignoring the " +
s"other exception: $t")
}
setTaskFinishedAndClearInterruptStatus()
execBackend.statusUpdate(taskId, TaskState.FAILED, ser.serialize(reason))
case CausedBy(cDE: CommitDeniedException) =>
val reason = cDE.toTaskCommitDeniedReason
setTaskFinishedAndClearInterruptStatus()
execBackend.statusUpdate(taskId, TaskState.KILLED, ser.serialize(reason))
case t: Throwable =>
// Attempt to exit cleanly by informing the driver of our failure.
// If anything goes wrong (or this was a fatal exception), we will delegate to
// the default uncaught exception handler, which will terminate the Executor.
logError(s"Exception in $taskName (TID $taskId)", t)
// SPARK-20904: Do not report failure to driver if if happened during shut down. Because
// libraries may set up shutdown hooks that race with running tasks during shutdown,
// spurious failures may occur and can result in improper accounting in the driver (e.g.
// the task failure would not be ignored if the shutdown happened because of premption,
// instead of an app issue).
if (!ShutdownHookManager.inShutdown()) {
val (accums, accUpdates) = collectAccumulatorsAndResetStatusOnFailure(taskStartTime)
val serializedTaskEndReason = {
try {
ser.serialize(new ExceptionFailure(t, accUpdates).withAccums(accums))
} catch {
case _: NotSerializableException =>
// t is not serializable so just send the stacktrace
ser.serialize(new ExceptionFailure(t, accUpdates, false).withAccums(accums))
}
}
setTaskFinishedAndClearInterruptStatus()
execBackend.statusUpdate(taskId, TaskState.FAILED, serializedTaskEndReason)
} else {
logInfo("Not reporting error to driver during JVM shutdown.")
}
// Don't forcibly exit unless the exception was inherently fatal, to avoid
// stopping other tasks unnecessarily.
if (!t.isInstanceOf[SparkOutOfMemoryError] && Utils.isFatalError(t)) {
uncaughtExceptionHandler.uncaughtException(Thread.currentThread(), t)
}
} finally {
runningTasks.remove(taskId)
}
}
private def hasFetchFailure: Boolean = {
task != null && task.context != null && task.context.fetchFailed.isDefined
}
}
/**
* Supervises the killing / cancellation of a task by sending the interrupted flag, optionally
* sending a Thread.interrupt(), and monitoring the task until it finishes.
*
* Spark's current task cancellation / task killing mechanism is "best effort" because some tasks
* may not be interruptable or may not respond to their "killed" flags being set. If a significant
* fraction of a cluster's task slots are occupied by tasks that have been marked as killed but
* remain running then this can lead to a situation where new jobs and tasks are starved of
* resources that are being used by these zombie tasks.
*
* The TaskReaper was introduced in SPARK-18761 as a mechanism to monitor and clean up zombie
* tasks. For backwards-compatibility / backportability this component is disabled by default
* and must be explicitly enabled by setting `spark.task.reaper.enabled=true`.
*
* A TaskReaper is created for a particular task when that task is killed / cancelled. Typically
* a task will have only one TaskReaper, but it's possible for a task to have up to two reapers
* in case kill is called twice with different values for the `interrupt` parameter.
*
* Once created, a TaskReaper will run until its supervised task has finished running. If the
* TaskReaper has not been configured to kill the JVM after a timeout (i.e. if
* `spark.task.reaper.killTimeout < 0`) then this implies that the TaskReaper may run indefinitely
* if the supervised task never exits.
*/
private class TaskReaper(
taskRunner: TaskRunner,
val interruptThread: Boolean,
val reason: String)
extends Runnable {
private[this] val taskId: Long = taskRunner.taskId
private[this] val killPollingIntervalMs: Long = conf.get(TASK_REAPER_POLLING_INTERVAL)
private[this] val killTimeoutMs: Long = conf.get(TASK_REAPER_KILL_TIMEOUT)
private[this] val takeThreadDump: Boolean = conf.get(TASK_REAPER_THREAD_DUMP)
override def run(): Unit = {
val startTimeMs = System.currentTimeMillis()
def elapsedTimeMs = System.currentTimeMillis() - startTimeMs
def timeoutExceeded(): Boolean = killTimeoutMs > 0 && elapsedTimeMs > killTimeoutMs
try {
// Only attempt to kill the task once. If interruptThread = false then a second kill
// attempt would be a no-op and if interruptThread = true then it may not be safe or
// effective to interrupt multiple times:
taskRunner.kill(interruptThread = interruptThread, reason = reason)
// Monitor the killed task until it exits. The synchronization logic here is complicated
// because we don't want to synchronize on the taskRunner while possibly taking a thread
// dump, but we also need to be careful to avoid races between checking whether the task
// has finished and wait()ing for it to finish.
var finished: Boolean = false
while (!finished && !timeoutExceeded()) {
taskRunner.synchronized {
// We need to synchronize on the TaskRunner while checking whether the task has
// finished in order to avoid a race where the task is marked as finished right after
// we check and before we call wait().
if (taskRunner.isFinished) {
finished = true
} else {
taskRunner.wait(killPollingIntervalMs)
}
}
if (taskRunner.isFinished) {
finished = true
} else {
logWarning(s"Killed task $taskId is still running after $elapsedTimeMs ms")
if (takeThreadDump) {
try {
Utils.getThreadDumpForThread(taskRunner.getThreadId).foreach { thread =>
if (thread.threadName == taskRunner.threadName) {
logWarning(s"Thread dump from task $taskId:\\n${thread.stackTrace}")
}
}
} catch {
case NonFatal(e) =>
logWarning("Exception thrown while obtaining thread dump: ", e)
}
}
}
}
if (!taskRunner.isFinished && timeoutExceeded()) {
if (isLocal) {
logError(s"Killed task $taskId could not be stopped within $killTimeoutMs ms; " +
"not killing JVM because we are running in local mode.")
} else {
// In non-local-mode, the exception thrown here will bubble up to the uncaught exception
// handler and cause the executor JVM to exit.
throw new SparkException(
s"Killing executor JVM because killed task $taskId could not be stopped within " +
s"$killTimeoutMs ms.")
}
}
} finally {
// Clean up entries in the taskReaperForTask map.
taskReaperForTask.synchronized {
taskReaperForTask.get(taskId).foreach { taskReaperInMap =>
if (taskReaperInMap eq this) {
taskReaperForTask.remove(taskId)
} else {
// This must have been a TaskReaper where interruptThread == false where a subsequent
// killTask() call for the same task had interruptThread == true and overwrote the
// map entry.
}
}
}
}
}
}
/**
* Create a ClassLoader for use in tasks, adding any JARs specified by the user or any classes
* created by the interpreter to the search path
*/
private def createClassLoader(): MutableURLClassLoader = {
// Bootstrap the list of jars with the user class path.
val now = System.currentTimeMillis()
userClassPath.foreach { url =>
currentJars(url.getPath().split("/").last) = now
}
val currentLoader = Utils.getContextOrSparkClassLoader
// For each of the jars in the jarSet, add them to the class loader.
// We assume each of the files has already been fetched.
val urls = userClassPath.toArray ++ currentJars.keySet.map { uri =>
new File(uri.split("/").last).toURI.toURL
}
if (userClassPathFirst) {
new ChildFirstURLClassLoader(urls, currentLoader)
} else {
new MutableURLClassLoader(urls, currentLoader)
}
}
/**
* If the REPL is in use, add another ClassLoader that will read
* new classes defined by the REPL as the user types code
*/
private def addReplClassLoaderIfNeeded(parent: ClassLoader): ClassLoader = {
val classUri = conf.get("spark.repl.class.uri", null)
if (classUri != null) {
logInfo("Using REPL class URI: " + classUri)
try {
val _userClassPathFirst: java.lang.Boolean = userClassPathFirst
val klass = Utils.classForName("org.apache.spark.repl.ExecutorClassLoader")
.asInstanceOf[Class[_ <: ClassLoader]]
val constructor = klass.getConstructor(classOf[SparkConf], classOf[SparkEnv],
classOf[String], classOf[ClassLoader], classOf[Boolean])
constructor.newInstance(conf, env, classUri, parent, _userClassPathFirst)
} catch {
case _: ClassNotFoundException =>
logError("Could not find org.apache.spark.repl.ExecutorClassLoader on classpath!")
System.exit(1)
null
}
} else {
parent
}
}
/**
* Download any missing dependencies if we receive a new set of files and JARs from the
* SparkContext. Also adds any new JARs we fetched to the class loader.
*/
private def updateDependencies(newFiles: Map[String, Long], newJars: Map[String, Long]) {
lazy val hadoopConf = SparkHadoopUtil.get.newConfiguration(conf)
synchronized {
// Fetch missing dependencies
for ((name, timestamp) <- newFiles if currentFiles.getOrElse(name, -1L) < timestamp) {
logInfo("Fetching " + name + " with timestamp " + timestamp)
// Fetch file with useCache mode, close cache for local mode.
Utils.fetchFile(name, new File(SparkFiles.getRootDirectory()), conf,
env.securityManager, hadoopConf, timestamp, useCache = !isLocal)
currentFiles(name) = timestamp
}
for ((name, timestamp) <- newJars) {
val localName = new URI(name).getPath.split("/").last
val currentTimeStamp = currentJars.get(name)
.orElse(currentJars.get(localName))
.getOrElse(-1L)
if (currentTimeStamp < timestamp) {
logInfo("Fetching " + name + " with timestamp " + timestamp)
// Fetch file with useCache mode, close cache for local mode.
Utils.fetchFile(name, new File(SparkFiles.getRootDirectory()), conf,
env.securityManager, hadoopConf, timestamp, useCache = !isLocal)
currentJars(name) = timestamp
// Add it to our class loader
val url = new File(SparkFiles.getRootDirectory(), localName).toURI.toURL
if (!urlClassLoader.getURLs().contains(url)) {
logInfo("Adding " + url + " to class loader")
urlClassLoader.addURL(url)
}
}
}
}
}
/** Reports heartbeat and metrics for active tasks to the driver. */
private def reportHeartBeat(): Unit = {
// list of (task id, accumUpdates) to send back to the driver
val accumUpdates = new ArrayBuffer[(Long, Seq[AccumulatorV2[_, _]])]()
val curGCTime = computeTotalGcTime()
// get executor level memory metrics
val executorUpdates = heartbeater.getCurrentMetrics()
for (taskRunner <- runningTasks.values().asScala) {
if (taskRunner.task != null) {
taskRunner.task.metrics.mergeShuffleReadMetrics()
taskRunner.task.metrics.setJvmGCTime(curGCTime - taskRunner.startGCTime)
val accumulatorsToReport =
if (HEARTBEAT_DROP_ZEROES) {
taskRunner.task.metrics.accumulators().filterNot(_.isZero)
} else {
taskRunner.task.metrics.accumulators()
}
accumUpdates += ((taskRunner.taskId, accumulatorsToReport))
}
}
val message = Heartbeat(executorId, accumUpdates.toArray, env.blockManager.blockManagerId,
executorUpdates)
try {
val response = heartbeatReceiverRef.askSync[HeartbeatResponse](
message, new RpcTimeout(HEARTBEAT_INTERVAL_MS.millis, EXECUTOR_HEARTBEAT_INTERVAL.key))
if (response.reregisterBlockManager) {
logInfo("Told to re-register on heartbeat")
env.blockManager.reregister()
}
heartbeatFailures = 0
} catch {
case NonFatal(e) =>
logWarning("Issue communicating with driver in heartbeater", e)
heartbeatFailures += 1
if (heartbeatFailures >= HEARTBEAT_MAX_FAILURES) {
logError(s"Exit as unable to send heartbeats to driver " +
s"more than $HEARTBEAT_MAX_FAILURES times")
System.exit(ExecutorExitCode.HEARTBEAT_FAILURE)
}
}
}
}
private[spark] object Executor {
// This is reserved for internal use by components that need to read task properties before a
// task is fully deserialized. When possible, the TaskContext.getLocalProperty call should be
// used instead.
val taskDeserializationProps: ThreadLocal[Properties] = new ThreadLocal[Properties]
}
| hhbyyh/spark | core/src/main/scala/org/apache/spark/executor/Executor.scala | Scala | apache-2.0 | 40,266 |
package org.mbari.biauv.integration
import java.io.{FileInputStream, File}
import java.util.regex.Pattern
import java.nio.charset.Charset
import java.nio.channels.FileChannel
import java.nio.{MappedByteBuffer, CharBuffer, ByteOrder}
import org.slf4j.LoggerFactory
/**
* This object does the actual reading of a file
*
* @author Brian Schlining
* @since Sep 7, 2010
*/
object LogRecordReader {
private val log = LoggerFactory.getLogger(getClass)
/**
* Reads a file and returns the records in the log file (including all data)
*
* @param file The file to parse
* @return A List of all records in the log file
*/
def read(file: File): List[LogRecord] = {
val startTime = System.nanoTime
log.debug("Reading " + file.getCanonicalPath)
val fileInputStream = new FileInputStream(file)
val fileChannel = fileInputStream.getChannel
// Get the filessize and map it into memory
val mappedByteBuffer = fileChannel.map(FileChannel.MapMode.READ_ONLY, 0, fileChannel.size())
mappedByteBuffer.order(ByteOrder.LITTLE_ENDIAN)
// Decode the file into a char buffer
val (records, position) = readHeader(mappedByteBuffer)
log.debug("Found " + records.size + " data types")
records.foreach { println(_) }
log.debug("Reading binary data, starting at byte " + position)
mappedByteBuffer.position(position)
def readValue(logRecord: LogRecord) = {
logRecord.format match {
case "Float" => mappedByteBuffer.getFloat()
case "Int" => mappedByteBuffer.getInt()
case "Short" => mappedByteBuffer.getShort()
case "Double" => mappedByteBuffer.getDouble()
}
}
// Read binary data
var i = 0
try {
while(true) {
records.foreach { r =>
r.data = readValue(r) :: r.data
}
i = i + 1
}
}
catch {
case _: Throwable => log.debug("Done. Found " + i + " records")
}
records.foreach { r => r.data = r.data.reverse }
val elapsedTime = (System.nanoTime - startTime) / 1000D / 1000D / 1000D
//log.debug(String.format("Elapsed time is %12.9f nano seconds\\n", Array(elapsedTime)))
records
}
/**af
* Parses the ASCII header into something useful
*
* @param mappedByteBuffer The buffer of the memory mapped log file
* @return A tuple with a list of LogRecords that have not yet been populated with data
* as well as the byte offset to start reading the data with
*
*/
private def readHeader(mappedByteBuffer: MappedByteBuffer): (List[LogRecord], Int) = {
val charsetDecoder = Charset.forName("ISO-8859-15").newDecoder()
val charBuffer = charsetDecoder.decode(mappedByteBuffer)
var records: List[LogRecord] = Nil
val linePattern = Pattern.compile(".*\\r?\\n")
val lineMatcher = linePattern.matcher(charBuffer)
var continue = lineMatcher.find()
var numberOfBytes = 0
while(continue) {
val line = lineMatcher.group().toString // The current line
numberOfBytes = numberOfBytes + line.getBytes.length
log.debug("--- Parsing: " + line.replace('\\r', ' ').replace('\\n', ' '))
val parts = line.split(" ")
continue = !parts(1).startsWith("begin")
if (continue) {
try {
val otherParts = line.split(",")
records = newLogRecord(parts(1), parts(2), otherParts(1), otherParts(2)) :: records
}
catch {
case _: Throwable => log.debug("!!! Invalid line")
}
continue = lineMatcher.find()
}
}
return (records.reverse, numberOfBytes)
}
/**
* Factory method that standardizes some fields of the log records.
*
*/
private def newLogRecord(format: String, shortName: String, longName: String, units: String): LogRecord = {
val f = format match {
case "float" => "Float"
case "integer" => "Int"
case "short" => "Short"
case _ => "Double"
}
val u = shortName match {
case "time" => "seconds since 1970-01-01 00:00:00Z"
case _ => units.replaceAll("[\\r\\n]", "").trim
}
return new LogRecord(f, shortName, longName, u)
}
} | sanyaade-mediadev/vars | vars-standalone/src/main/scala/org/mbari/biauv/integration/LogRecordReader.scala | Scala | lgpl-2.1 | 4,584 |
package is.hail.expr.ir
import is.hail.types.virtual._
import is.hail.utils._
object Binds {
def apply(x: IR, v: String, i: Int): Boolean = Bindings(x, i).exists(_._1 == v)
}
object Bindings {
private val empty: Array[(String, Type)] = Array()
def apply(x: BaseIR, i: Int): Iterable[(String, Type)] = x match {
case Let(name, value, _) => if (i == 1) Array(name -> value.typ) else empty
case TailLoop(name, args, body) => if (i == args.length)
args.map { case (name, ir) => name -> ir.typ } :+
name -> TTuple(TTuple(args.map(_._2.typ): _*), body.typ) else empty
case StreamMap(a, name, _) => if (i == 1) Array(name -> coerce[TStream](a.typ).elementType) else empty
case StreamZip(as, names, _, _) => if (i == as.length) names.zip(as.map(a => coerce[TStream](a.typ).elementType)) else empty
case StreamZipJoin(as, key, curKey, curVals, _) =>
val eltType = coerce[TStruct](coerce[TStream](as.head.typ).elementType)
if (i == as.length)
Array(curKey -> eltType.typeAfterSelectNames(key),
curVals -> TArray(eltType))
else
empty
case StreamFor(a, name, _) => if (i == 1) Array(name -> coerce[TStream](a.typ).elementType) else empty
case StreamFlatMap(a, name, _) => if (i == 1) Array(name -> coerce[TStream](a.typ).elementType) else empty
case StreamFilter(a, name, _) => if (i == 1) Array(name -> coerce[TStream](a.typ).elementType) else empty
case StreamFold(a, zero, accumName, valueName, _) => if (i == 2) Array(accumName -> zero.typ, valueName -> coerce[TStream](a.typ).elementType) else empty
case StreamFold2(a, accum, valueName, seq, result) =>
if (i <= accum.length)
empty
else if (i < 2 * accum.length + 1)
Array((valueName, coerce[TStream](a.typ).elementType)) ++ accum.map { case (name, value) => (name, value.typ) }
else
accum.map { case (name, value) => (name, value.typ) }
case RunAggScan(a, name, _, _, _, _) => if (i == 2 || i == 3) Array(name -> coerce[TStream](a.typ).elementType) else empty
case StreamScan(a, zero, accumName, valueName, _) => if (i == 2) Array(accumName -> zero.typ, valueName -> coerce[TStream](a.typ).elementType) else empty
case StreamAggScan(a, name, _) => if (i == 1) FastIndexedSeq(name -> a.typ.asInstanceOf[TStream].elementType) else empty
case StreamJoinRightDistinct(ll, rr, _, _, l, r, _, _) => if (i == 2) Array(l -> coerce[TStream](ll.typ).elementType, r -> coerce[TStream](rr.typ).elementType) else empty
case ArraySort(a, left, right, _) => if (i == 1) Array(left -> coerce[TStream](a.typ).elementType, right -> coerce[TStream](a.typ).elementType) else empty
case AggArrayPerElement(a, _, indexName, _, _, _) => if (i == 1) FastIndexedSeq(indexName -> TInt32) else empty
case NDArrayMap(nd, name, _) => if (i == 1) Array(name -> coerce[TNDArray](nd.typ).elementType) else empty
case NDArrayMap2(l, r, lName, rName, _) => if (i == 2) Array(lName -> coerce[TNDArray](l.typ).elementType, rName -> coerce[TNDArray](r.typ).elementType) else empty
case CollectDistributedArray(contexts, globals, cname, gname, _, _) => if (i == 2) Array(cname -> coerce[TStream](contexts.typ).elementType, gname -> globals.typ) else empty
case TableAggregate(child, _) => if (i == 1) child.typ.globalEnv.m else empty
case MatrixAggregate(child, _) => if (i == 1) child.typ.globalEnv.m else empty
case TableFilter(child, _) => if (i == 1) child.typ.rowEnv.m else empty
case TableMapGlobals(child, _) => if (i == 1) child.typ.globalEnv.m else empty
case TableMapRows(child, _) => if (i == 1) child.typ.rowEnv.m else empty
case TableAggregateByKey(child, _) => if (i == 1) child.typ.globalEnv.m else empty
case TableKeyByAndAggregate(child, _, _, _, _) => if (i == 1) child.typ.globalEnv.m else if (i == 2) child.typ.rowEnv.m else empty
case TableMapPartitions(child, g, p, _) => if (i == 1) Array(g -> child.typ.globalType, p -> TStream(child.typ.rowType)) else empty
case MatrixMapRows(child, _) => if (i == 1) child.typ.rowEnv.bind("n_cols", TInt32).m else empty
case MatrixFilterRows(child, _) => if (i == 1) child.typ.rowEnv.m else empty
case MatrixMapCols(child, _, _) => if (i == 1) child.typ.colEnv.bind("n_rows", TInt64).m else empty
case MatrixFilterCols(child, _) => if (i == 1) child.typ.colEnv.m else empty
case MatrixMapEntries(child, _) => if (i == 1) child.typ.entryEnv.m else empty
case MatrixFilterEntries(child, _) => if (i == 1) child.typ.entryEnv.m else empty
case MatrixMapGlobals(child, _) => if (i == 1) child.typ.globalEnv.m else empty
case MatrixAggregateColsByKey(child, _, _) => if (i == 1) child.typ.rowEnv.m else if (i == 2) child.typ.globalEnv.m else empty
case MatrixAggregateRowsByKey(child, _, _) => if (i == 1) child.typ.colEnv.m else if (i == 2) child.typ.globalEnv.m else empty
case BlockMatrixMap(_, eltName, _, _) => if (i == 1) Array(eltName -> TFloat64) else empty
case BlockMatrixMap2(_, _, lName, rName, _, _) => if (i == 2) Array(lName -> TFloat64, rName -> TFloat64) else empty
case x@ShuffleWith(_, _, _, _, name, _, _) =>
if (i == 0 || i == 1) Array(name -> x.shuffleType) else empty
case _ => empty
}
}
object AggBindings {
def apply(x: BaseIR, i: Int, parent: BindingEnv[_]): Option[Iterable[(String, Type)]] = {
def wrapped(bindings: Iterable[(String, Type)]): Option[Iterable[(String, Type)]] = {
if (parent.agg.isEmpty)
throw new RuntimeException(s"aggEnv was None for child $i of $x")
Some(bindings)
}
def base: Option[Iterable[(String, Type)]] = parent.agg.map(_ => FastIndexedSeq())
x match {
case AggLet(name, value, _, false) => if (i == 1) wrapped(FastIndexedSeq(name -> value.typ)) else None
case AggFilter(_, _, false) => if (i == 0) None else base
case AggGroupBy(_, _, false) => if (i == 0) None else base
case AggExplode(a, name, _, false) => if (i == 1) wrapped(FastIndexedSeq(name -> a.typ.asInstanceOf[TIterable].elementType)) else None
case AggArrayPerElement(a, elementName, _, _, _, false) => if (i == 1) wrapped(FastIndexedSeq(elementName -> a.typ.asInstanceOf[TIterable].elementType)) else if (i == 2) base else None
case StreamAgg(a, name, _) => if (i == 1) Some(FastIndexedSeq(name -> a.typ.asInstanceOf[TIterable].elementType)) else base
case TableAggregate(child, _) => if (i == 1) Some(child.typ.rowEnv.m) else None
case MatrixAggregate(child, _) => if (i == 1) Some(child.typ.entryEnv.m) else None
case RelationalLet(_, _, _) => None
case CollectDistributedArray(_, _, _, _, _, _) if (i == 2) => None
case _: ApplyAggOp => None
case _: IR => base
case TableAggregateByKey(child, _) => if (i == 1) Some(child.typ.rowEnv.m) else None
case TableKeyByAndAggregate(child, _, _, _, _) => if (i == 1) Some(child.typ.rowEnv.m) else None
case _: TableIR => None
case MatrixMapRows(child, _) => if (i == 1) Some(child.typ.entryEnv.m) else None
case MatrixMapCols(child, _, _) => if (i == 1) Some(child.typ.entryEnv.m) else None
case MatrixAggregateColsByKey(child, _, _) => if (i == 1) Some(child.typ.entryEnv.m) else if (i == 2) Some(child.typ.colEnv.m) else None
case MatrixAggregateRowsByKey(child, _, _) => if (i == 1) Some(child.typ.entryEnv.m) else if (i == 2) Some(child.typ.rowEnv.m) else None
case _: MatrixIR => None
case _: BlockMatrixIR => None
}
}
}
object ScanBindings {
def apply(x: BaseIR, i: Int, parent: BindingEnv[_]): Option[Iterable[(String, Type)]] = {
def wrapped(bindings: Iterable[(String, Type)]): Option[Iterable[(String, Type)]] = {
if (parent.scan.isEmpty)
throw new RuntimeException(s"scanEnv was None for child $i of $x")
Some(bindings)
}
def base: Option[Iterable[(String, Type)]] = parent.scan.map(_ => FastIndexedSeq())
x match {
case AggLet(name, value, _, true) => if (i == 1) wrapped(FastIndexedSeq(name -> value.typ)) else None
case AggFilter(_, _, true) => if (i == 0) None else base
case AggGroupBy(_, _, true) => if (i == 0) None else base
case AggExplode(a, name, _, true) => if (i == 1) wrapped(FastIndexedSeq(name -> a.typ.asInstanceOf[TIterable].elementType)) else None
case AggArrayPerElement(a, elementName, _, _, _, true) => if (i == 1) wrapped(FastIndexedSeq(elementName -> a.typ.asInstanceOf[TIterable].elementType)) else if (i == 2) base else None
case StreamAggScan(a, name, _) => if (i == 1) Some(FastIndexedSeq(name -> a.typ.asInstanceOf[TIterable].elementType)) else base
case TableAggregate(_, _) => None
case MatrixAggregate(_, _) => None
case RelationalLet(_, _, _) => None
case CollectDistributedArray(_, _, _, _, _, _) if (i == 2) => None
case _: ApplyScanOp => None
case _: IR => base
case TableMapRows(child, _) => if (i == 1) Some(child.typ.rowEnv.m) else None
case _: TableIR => None
case MatrixMapRows(child, _) => if (i == 1) Some(child.typ.rowEnv.m) else None
case MatrixMapCols(child, _, _) => if (i == 1) Some(child.typ.colEnv.m) else None
case _: MatrixIR => None
case _: BlockMatrixIR => None
}
}
}
object RelationalBindings {
private val empty: Array[(String, Type)] = Array()
def apply(x: BaseIR, i: Int): Iterable[(String, Type)] = {
x match {
case RelationalLet(name, value, _) => if (i == 1) FastIndexedSeq(name -> value.typ) else empty
case RelationalLetTable(name, value, _) => if (i == 1) FastIndexedSeq(name -> value.typ) else empty
case RelationalLetMatrixTable(name, value, _) => if (i == 1) FastIndexedSeq(name -> value.typ) else empty
case RelationalLetBlockMatrix(name, value, _) => if (i == 1) FastIndexedSeq(name -> value.typ) else empty
case _ => empty
}
}
}
object NewBindings {
def apply(x: BaseIR, i: Int, parent: BindingEnv[_]): BindingEnv[Type] = {
BindingEnv(Env.fromSeq(Bindings(x, i)),
agg = AggBindings(x, i, parent).map(b => Env.fromSeq(b)),
scan = ScanBindings(x, i, parent).map(b => Env.fromSeq(b)),
relational = Env.fromSeq(RelationalBindings(x, i)))
}
}
object ChildEnvWithoutBindings {
def apply[T](ir: BaseIR, i: Int, env: BindingEnv[T]): BindingEnv[T] = {
ir match {
case StreamAgg(_, _, _) => if (i == 1) BindingEnv(eval = env.eval, agg = Some(env.eval), scan = env.scan.map(_ => Env.empty), relational = env.relational) else env
case StreamAggScan(_, _, _) => if (i == 1) BindingEnv(eval = env.eval, agg = env.agg.map(_ => Env.empty), scan = Some(env.eval), relational = env.relational) else env
case ApplyAggOp(init, _, _) => if (i < init.length) env.copy(agg = None) else env.promoteAgg
case ApplyScanOp(init, _, _) => if (i < init.length) env.copy(scan = None) else env.promoteScan
case CollectDistributedArray(_, _, _, _, _, _) => if (i == 2) BindingEnv(relational = env.relational) else env
case MatrixAggregate(_, _) => if (i == 0) BindingEnv(relational = env.relational) else BindingEnv(Env.empty, agg = Some(Env.empty), relational = env.relational)
case TableAggregate(_, _) => if (i == 0) BindingEnv(relational = env.relational) else BindingEnv(Env.empty, agg = Some(Env.empty), relational = env.relational)
case RelationalLet(_, _, _) => if (i == 0) BindingEnv(relational = env.relational) else env.copy(agg = None, scan = None)
case LiftMeOut(_) => BindingEnv(Env.empty[T], env.agg.map(_ => Env.empty), env.scan.map(_ => Env.empty), relational = env.relational)
case _: IR => if (UsesAggEnv(ir, i)) env.promoteAgg else if (UsesScanEnv(ir, i)) env.promoteScan else env
case x => BindingEnv(
agg = AggBindings(x, i, env).map(_ => Env.empty),
scan = ScanBindings(x, i, env).map(_ => Env.empty),
relational = env.relational)
}
}
}
object ChildBindings {
def apply(ir: BaseIR, i: Int, baseEnv: BindingEnv[Type]): BindingEnv[Type] = {
val env = ChildEnvWithoutBindings(ir, i, baseEnv)
val newBindings = NewBindings(ir, i, env)
env.merge(newBindings)
}
def transformed[T](ir: BaseIR, i: Int, baseEnv: BindingEnv[T], f: (String, Type) => T): BindingEnv[T] = {
val env = ChildEnvWithoutBindings(ir, i, baseEnv)
val newBindings = NewBindings(ir, i, env).mapValuesWithKey(f)
env.merge(newBindings)
}
}
| cseed/hail | hail/src/main/scala/is/hail/expr/ir/Binds.scala | Scala | mit | 12,426 |
object Test extends App {
println(Macros.fooUU(2))
println(Macros.fooTU(2))
println(Macros.fooUT(2))
println(Macros.fooTT(2))
}
| scala/scala | test/files/run/macro-impl-relaxed/Test_2.scala | Scala | apache-2.0 | 136 |
/*
* Distributed as part of Scalala, a linear algebra library.
*
* Copyright (C) 2008- Daniel Ramage
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110 USA
*/
package scalala;
package generic;
import org.scalacheck._
import org.scalatest._;
import org.scalatest.junit._;
import org.scalatest.prop._;
import org.junit.runner.RunWith
@RunWith(classOf[JUnitRunner])
class CanAssignIntoTest extends FunSuite with Checkers {
import operators.Implicits._;
test("Array[Int]") {
val a = Array(0,0,0);
val aRef = a;
val b = Array(1,2,3);
val bRef = b;
a := b;
assert(a.toList === b.toList);
assert(!(a eq b));
assert((a eq aRef));
assert((b eq bRef));
}
test("Array[Array[Int]]") {
val a = Array(Array(0,0),Array(0,0),Array(0,0));
val aRef = a;
val b = Array(Array(1,2),Array(3,4),Array(5,6));
val bRef = b;
a := b;
assert(a.map(_.toList).toList === b.map(_.toList).toList);
assert(!(a eq b));
assert((a zip b).forall(tup => !(tup._1 eq tup._2)));
assert((a eq aRef));
assert((b eq bRef));
}
}
| scalala/Scalala | src/test/scala/scalala/generic/CanAssignIntoTest.scala | Scala | lgpl-2.1 | 1,766 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package main.scala
import scala.collection.mutable.{ListBuffer, Queue}
import org.apache.spark.SparkConf
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming._
object SparkStreamingExample {
def main(args: Array[String]) {
val conf = sys.env.get("SPARK_AUDIT_MASTER") match {
case Some(master) => new SparkConf().setAppName("Simple Streaming App").setMaster(master)
case None => new SparkConf().setAppName("Simple Streaming App")
}
val ssc = new StreamingContext(conf, Seconds(1))
val seen = ListBuffer[RDD[Int]]()
val rdd1 = ssc.sparkContext.makeRDD(1 to 100, 10)
val rdd2 = ssc.sparkContext.makeRDD(1 to 1000, 10)
val rdd3 = ssc.sparkContext.makeRDD(1 to 10000, 10)
val queue = Queue(rdd1, rdd2, rdd3)
val stream = ssc.queueStream(queue)
stream.foreachRDD(rdd => seen += rdd)
ssc.start()
Thread.sleep(5000)
def test(f: => Boolean, failureMsg: String) = {
if (!f) {
println(failureMsg)
System.exit(-1)
}
}
val rddCounts = seen.map(rdd => rdd.count()).filter(_ > 0)
test(rddCounts.length == 3, "Did not collect three RDD's from stream")
test(rddCounts.toSet == Set(100, 1000, 10000), "Did not find expected streams")
println("Test succeeded")
ssc.stop()
}
}
// scalastyle:on println
| gioenn/xSpark | dev/audit-release/sbt_app_streaming/src/main/scala/StreamingApp.scala | Scala | apache-2.0 | 2,206 |
// Generated by the Scala Plugin for the Protocol Buffer Compiler.
// Do not edit!
//
// Protofile syntax: PROTO2
package scalapb.options
@SerialVersionUID(0L)
final case class PreprocessorOutput(
optionsByFile: _root_.scala.collection.immutable.Map[_root_.scala.Predef.String, scalapb.options.ScalaPbOptions] = _root_.scala.collection.immutable.Map.empty,
unknownFields: _root_.scalapb.UnknownFieldSet = _root_.scalapb.UnknownFieldSet.empty
) extends scalapb.GeneratedMessage with scalapb.lenses.Updatable[PreprocessorOutput] {
@transient
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
optionsByFile.foreach { __item =>
val __value = scalapb.options.PreprocessorOutput._typemapper_optionsByFile.toBase(__item)
__size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
}
__size += unknownFields.serializedSize
__size
}
override def serializedSize: _root_.scala.Int = {
var __size = __serializedSizeMemoized
if (__size == 0) {
__size = __computeSerializedSize() + 1
__serializedSizeMemoized = __size
}
__size - 1
}
def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
optionsByFile.foreach { __v =>
val __m = scalapb.options.PreprocessorOutput._typemapper_optionsByFile.toBase(__v)
_output__.writeTag(1, 2)
_output__.writeUInt32NoTag(__m.serializedSize)
__m.writeTo(_output__)
};
unknownFields.writeTo(_output__)
}
def clearOptionsByFile = copy(optionsByFile = _root_.scala.collection.immutable.Map.empty)
def addOptionsByFile(__vs: (_root_.scala.Predef.String, scalapb.options.ScalaPbOptions) *): PreprocessorOutput = addAllOptionsByFile(__vs)
def addAllOptionsByFile(__vs: Iterable[(_root_.scala.Predef.String, scalapb.options.ScalaPbOptions)]): PreprocessorOutput = copy(optionsByFile = optionsByFile ++ __vs)
def withOptionsByFile(__v: _root_.scala.collection.immutable.Map[_root_.scala.Predef.String, scalapb.options.ScalaPbOptions]): PreprocessorOutput = copy(optionsByFile = __v)
def withUnknownFields(__v: _root_.scalapb.UnknownFieldSet) = copy(unknownFields = __v)
def discardUnknownFields = copy(unknownFields = _root_.scalapb.UnknownFieldSet.empty)
def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
(__fieldNumber: @_root_.scala.unchecked) match {
case 1 => optionsByFile.iterator.map(scalapb.options.PreprocessorOutput._typemapper_optionsByFile.toBase(_)).toSeq
}
}
def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
_root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
(__field.number: @_root_.scala.unchecked) match {
case 1 => _root_.scalapb.descriptors.PRepeated(optionsByFile.iterator.map(scalapb.options.PreprocessorOutput._typemapper_optionsByFile.toBase(_).toPMessage).toVector)
}
}
def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
def companion: scalapb.options.PreprocessorOutput.type = scalapb.options.PreprocessorOutput
// @@protoc_insertion_point(GeneratedMessage[scalapb.PreprocessorOutput])
}
object PreprocessorOutput extends scalapb.GeneratedMessageCompanion[scalapb.options.PreprocessorOutput] {
implicit def messageCompanion: scalapb.GeneratedMessageCompanion[scalapb.options.PreprocessorOutput] = this
def parseFrom(`_input__`: _root_.com.google.protobuf.CodedInputStream): scalapb.options.PreprocessorOutput = {
val __optionsByFile: _root_.scala.collection.mutable.Builder[(_root_.scala.Predef.String, scalapb.options.ScalaPbOptions), _root_.scala.collection.immutable.Map[_root_.scala.Predef.String, scalapb.options.ScalaPbOptions]] = _root_.scala.collection.immutable.Map.newBuilder[_root_.scala.Predef.String, scalapb.options.ScalaPbOptions]
var `_unknownFields__`: _root_.scalapb.UnknownFieldSet.Builder = null
var _done__ = false
while (!_done__) {
val _tag__ = _input__.readTag()
_tag__ match {
case 0 => _done__ = true
case 10 =>
__optionsByFile += scalapb.options.PreprocessorOutput._typemapper_optionsByFile.toCustom(_root_.scalapb.LiteParser.readMessage[scalapb.options.PreprocessorOutput.OptionsByFileEntry](_input__))
case tag =>
if (_unknownFields__ == null) {
_unknownFields__ = new _root_.scalapb.UnknownFieldSet.Builder()
}
_unknownFields__.parseField(tag, _input__)
}
}
scalapb.options.PreprocessorOutput(
optionsByFile = __optionsByFile.result(),
unknownFields = if (_unknownFields__ == null) _root_.scalapb.UnknownFieldSet.empty else _unknownFields__.result()
)
}
implicit def messageReads: _root_.scalapb.descriptors.Reads[scalapb.options.PreprocessorOutput] = _root_.scalapb.descriptors.Reads{
case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
_root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage eq scalaDescriptor), "FieldDescriptor does not match message type.")
scalapb.options.PreprocessorOutput(
optionsByFile = __fieldsMap.get(scalaDescriptor.findFieldByNumber(1).get).map(_.as[_root_.scala.Seq[scalapb.options.PreprocessorOutput.OptionsByFileEntry]]).getOrElse(_root_.scala.Seq.empty).iterator.map(scalapb.options.PreprocessorOutput._typemapper_optionsByFile.toCustom(_)).toMap
)
case _ => throw new RuntimeException("Expected PMessage")
}
def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = ScalapbProto.javaDescriptor.getMessageTypes().get(8)
def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = ScalapbProto.scalaDescriptor.messages(8)
def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = {
var __out: _root_.scalapb.GeneratedMessageCompanion[_] = null
(__number: @_root_.scala.unchecked) match {
case 1 => __out = scalapb.options.PreprocessorOutput.OptionsByFileEntry
}
__out
}
lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] =
Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]](
_root_.scalapb.options.PreprocessorOutput.OptionsByFileEntry
)
def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__fieldNumber)
lazy val defaultInstance = scalapb.options.PreprocessorOutput(
optionsByFile = _root_.scala.collection.immutable.Map.empty
)
@SerialVersionUID(0L)
final case class OptionsByFileEntry(
key: _root_.scala.Option[_root_.scala.Predef.String] = _root_.scala.None,
value: _root_.scala.Option[scalapb.options.ScalaPbOptions] = _root_.scala.None,
unknownFields: _root_.scalapb.UnknownFieldSet = _root_.scalapb.UnknownFieldSet.empty
) extends scalapb.GeneratedMessage with scalapb.lenses.Updatable[OptionsByFileEntry] {
@transient
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
if (key.isDefined) {
val __value = key.get
__size += _root_.com.google.protobuf.CodedOutputStream.computeStringSize(1, __value)
};
if (value.isDefined) {
val __value = value.get
__size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
};
__size += unknownFields.serializedSize
__size
}
override def serializedSize: _root_.scala.Int = {
var __size = __serializedSizeMemoized
if (__size == 0) {
__size = __computeSerializedSize() + 1
__serializedSizeMemoized = __size
}
__size - 1
}
def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
key.foreach { __v =>
val __m = __v
_output__.writeString(1, __m)
};
value.foreach { __v =>
val __m = __v
_output__.writeTag(2, 2)
_output__.writeUInt32NoTag(__m.serializedSize)
__m.writeTo(_output__)
};
unknownFields.writeTo(_output__)
}
def getKey: _root_.scala.Predef.String = key.getOrElse("")
def clearKey: OptionsByFileEntry = copy(key = _root_.scala.None)
def withKey(__v: _root_.scala.Predef.String): OptionsByFileEntry = copy(key = Option(__v))
def getValue: scalapb.options.ScalaPbOptions = value.getOrElse(scalapb.options.ScalaPbOptions.defaultInstance)
def clearValue: OptionsByFileEntry = copy(value = _root_.scala.None)
def withValue(__v: scalapb.options.ScalaPbOptions): OptionsByFileEntry = copy(value = Option(__v))
def withUnknownFields(__v: _root_.scalapb.UnknownFieldSet) = copy(unknownFields = __v)
def discardUnknownFields = copy(unknownFields = _root_.scalapb.UnknownFieldSet.empty)
def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
(__fieldNumber: @_root_.scala.unchecked) match {
case 1 => key.orNull
case 2 => value.orNull
}
}
def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
_root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
(__field.number: @_root_.scala.unchecked) match {
case 1 => key.map(_root_.scalapb.descriptors.PString(_)).getOrElse(_root_.scalapb.descriptors.PEmpty)
case 2 => value.map(_.toPMessage).getOrElse(_root_.scalapb.descriptors.PEmpty)
}
}
def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
def companion: scalapb.options.PreprocessorOutput.OptionsByFileEntry.type = scalapb.options.PreprocessorOutput.OptionsByFileEntry
// @@protoc_insertion_point(GeneratedMessage[scalapb.PreprocessorOutput.OptionsByFileEntry])
}
object OptionsByFileEntry extends scalapb.GeneratedMessageCompanion[scalapb.options.PreprocessorOutput.OptionsByFileEntry] {
implicit def messageCompanion: scalapb.GeneratedMessageCompanion[scalapb.options.PreprocessorOutput.OptionsByFileEntry] = this
def parseFrom(`_input__`: _root_.com.google.protobuf.CodedInputStream): scalapb.options.PreprocessorOutput.OptionsByFileEntry = {
var __key: _root_.scala.Option[_root_.scala.Predef.String] = _root_.scala.None
var __value: _root_.scala.Option[scalapb.options.ScalaPbOptions] = _root_.scala.None
var `_unknownFields__`: _root_.scalapb.UnknownFieldSet.Builder = null
var _done__ = false
while (!_done__) {
val _tag__ = _input__.readTag()
_tag__ match {
case 0 => _done__ = true
case 10 =>
__key = Option(_input__.readStringRequireUtf8())
case 18 =>
__value = Option(__value.fold(_root_.scalapb.LiteParser.readMessage[scalapb.options.ScalaPbOptions](_input__))(_root_.scalapb.LiteParser.readMessage(_input__, _)))
case tag =>
if (_unknownFields__ == null) {
_unknownFields__ = new _root_.scalapb.UnknownFieldSet.Builder()
}
_unknownFields__.parseField(tag, _input__)
}
}
scalapb.options.PreprocessorOutput.OptionsByFileEntry(
key = __key,
value = __value,
unknownFields = if (_unknownFields__ == null) _root_.scalapb.UnknownFieldSet.empty else _unknownFields__.result()
)
}
implicit def messageReads: _root_.scalapb.descriptors.Reads[scalapb.options.PreprocessorOutput.OptionsByFileEntry] = _root_.scalapb.descriptors.Reads{
case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
_root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage eq scalaDescriptor), "FieldDescriptor does not match message type.")
scalapb.options.PreprocessorOutput.OptionsByFileEntry(
key = __fieldsMap.get(scalaDescriptor.findFieldByNumber(1).get).flatMap(_.as[_root_.scala.Option[_root_.scala.Predef.String]]),
value = __fieldsMap.get(scalaDescriptor.findFieldByNumber(2).get).flatMap(_.as[_root_.scala.Option[scalapb.options.ScalaPbOptions]])
)
case _ => throw new RuntimeException("Expected PMessage")
}
def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = scalapb.options.PreprocessorOutput.javaDescriptor.getNestedTypes().get(0)
def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = scalapb.options.PreprocessorOutput.scalaDescriptor.nestedMessages(0)
def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = {
var __out: _root_.scalapb.GeneratedMessageCompanion[_] = null
(__number: @_root_.scala.unchecked) match {
case 2 => __out = scalapb.options.ScalaPbOptions
}
__out
}
lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] = Seq.empty
def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__fieldNumber)
lazy val defaultInstance = scalapb.options.PreprocessorOutput.OptionsByFileEntry(
key = _root_.scala.None,
value = _root_.scala.None
)
implicit class OptionsByFileEntryLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, scalapb.options.PreprocessorOutput.OptionsByFileEntry]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, scalapb.options.PreprocessorOutput.OptionsByFileEntry](_l) {
def key: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Predef.String] = field(_.getKey)((c_, f_) => c_.copy(key = Option(f_)))
def optionalKey: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[_root_.scala.Predef.String]] = field(_.key)((c_, f_) => c_.copy(key = f_))
def value: _root_.scalapb.lenses.Lens[UpperPB, scalapb.options.ScalaPbOptions] = field(_.getValue)((c_, f_) => c_.copy(value = Option(f_)))
def optionalValue: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[scalapb.options.ScalaPbOptions]] = field(_.value)((c_, f_) => c_.copy(value = f_))
}
final val KEY_FIELD_NUMBER = 1
final val VALUE_FIELD_NUMBER = 2
@transient
implicit val keyValueMapper: _root_.scalapb.TypeMapper[scalapb.options.PreprocessorOutput.OptionsByFileEntry, (_root_.scala.Predef.String, scalapb.options.ScalaPbOptions)] =
_root_.scalapb.TypeMapper[scalapb.options.PreprocessorOutput.OptionsByFileEntry, (_root_.scala.Predef.String, scalapb.options.ScalaPbOptions)](__m => (__m.getKey, __m.getValue))(__p => scalapb.options.PreprocessorOutput.OptionsByFileEntry(Some(__p._1), Some(__p._2)))
def of(
key: _root_.scala.Option[_root_.scala.Predef.String],
value: _root_.scala.Option[scalapb.options.ScalaPbOptions]
): _root_.scalapb.options.PreprocessorOutput.OptionsByFileEntry = _root_.scalapb.options.PreprocessorOutput.OptionsByFileEntry(
key,
value
)
// @@protoc_insertion_point(GeneratedMessageCompanion[scalapb.PreprocessorOutput.OptionsByFileEntry])
}
implicit class PreprocessorOutputLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, scalapb.options.PreprocessorOutput]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, scalapb.options.PreprocessorOutput](_l) {
def optionsByFile: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.collection.immutable.Map[_root_.scala.Predef.String, scalapb.options.ScalaPbOptions]] = field(_.optionsByFile)((c_, f_) => c_.copy(optionsByFile = f_))
}
final val OPTIONS_BY_FILE_FIELD_NUMBER = 1
@transient
private[options] val _typemapper_optionsByFile: _root_.scalapb.TypeMapper[scalapb.options.PreprocessorOutput.OptionsByFileEntry, (_root_.scala.Predef.String, scalapb.options.ScalaPbOptions)] = implicitly[_root_.scalapb.TypeMapper[scalapb.options.PreprocessorOutput.OptionsByFileEntry, (_root_.scala.Predef.String, scalapb.options.ScalaPbOptions)]]
def of(
optionsByFile: _root_.scala.collection.immutable.Map[_root_.scala.Predef.String, scalapb.options.ScalaPbOptions]
): _root_.scalapb.options.PreprocessorOutput = _root_.scalapb.options.PreprocessorOutput(
optionsByFile
)
// @@protoc_insertion_point(GeneratedMessageCompanion[scalapb.PreprocessorOutput])
}
| scalapb/ScalaPB | scalapb-runtime/src/main/scala/scalapb/options/PreprocessorOutput.scala | Scala | apache-2.0 | 16,801 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.contrib.scalaz
import slamdata.Predef._
import scalaz._, Scalaz._
import scalaz.Liskov._
/** A version of MonadError that doesn't extend Monad to avoid ambiguous implicits
* in the presence of multiple "mtl" constraints.
*/
trait MonadError_[F[_], E] {
def raiseError[A](e: E): F[A]
def handleError[A](fa: F[A])(f: E => F[A]): F[A]
def attempt[A](fa: F[A])(implicit F: Applicative[F]): F[E \/ A] =
handleError(fa map (_.right[E]))(_.left[A].point[F])
/** Ensures `f` is sequenced after `fa`, whether the latter succeeded or not.
*
* Useful for releasing resources that may have been acquired in order to
* produce `fa`.
*/
def ensuring[A](fa: F[A])(f: Option[E] => F[Unit])(implicit F: Monad[F]): F[A] =
attempt(fa) flatMap {
case -\/(e) => f(some(e)) *> raiseError(e)
case \/-(a) => f(none) as a
}
def handle[A](fa: F[A])(pf: PartialFunction[E, A])(implicit F: Applicative[F]): F[A] =
handleWith(fa)(pf andThen (_.point[F]))
def handleWith[A](fa: F[A])(pf: PartialFunction[E, F[A]]): F[A] =
handleError(fa)(e => pf.lift(e) getOrElse raiseError(e))
def unattempt[A](fa: F[E \/ A])(implicit F: Monad[F]): F[A] =
fa >>= (_.fold(raiseError[A] _, _.point[F]))
}
object MonadError_ extends MonadError_Instances {
def apply[F[_], E](implicit F: MonadError_[F, E]): MonadError_[F, E] = F
}
sealed abstract class MonadError_Instances extends MonadError_Instances0 {
implicit def kleisliMonadError_[F[_], E, R](implicit F: MonadError_[F, E]): MonadError_[Kleisli[F, R, ?], E] =
new MonadError_[Kleisli[F, R, ?], E] {
def raiseError[A](e: E) =
Kleisli(_ => F.raiseError(e))
def handleError[A](fa: Kleisli[F, R, A])(f: E => Kleisli[F, R, A]) =
Kleisli(r => F.handleError(fa.run(r))(e => f(e).run(r)))
}
implicit def writerTMonadError_[F[_]: Functor, W: Monoid, E](implicit E: MonadError_[F, E]): MonadError_[WriterT[F, W, ?], E] =
new MonadError_[WriterT[F, W, ?], E] {
def raiseError[A](e: E) =
WriterT(E.raiseError[A](e) strengthL mzero[W])
def handleError[A](fa: WriterT[F, W, A])(f: E => WriterT[F, W, A]) =
WriterT(E.handleError(fa.run)(e => f(e).run))
}
implicit def eitherTInnerMonadError_[F[_]: Functor, E1, E2](implicit E: MonadError_[F, E1]): MonadError_[EitherT[F, E2, ?], E1] =
new MonadError_[EitherT[F, E2, ?], E1] {
def raiseError[A](e: E1) =
EitherT(E.raiseError[A](e) map (_.right[E2]))
def handleError[A](fa: EitherT[F, E2, A])(f: E1 => EitherT[F, E2, A]) =
EitherT(E.handleError(fa.run)(e1 => f(e1).run))
}
implicit def stateTMonadError_[F[_]: Monad, E, S](implicit F: MonadError_[F, E]): MonadError_[StateT[F, S, ?], E] =
new MonadError_[StateT[F, S, ?], E] {
def handleError[A](fa: StateT[F, S, A])(f: E => StateT[F, S, A]) =
StateT(s => F.handleError(fa.run(s))(f(_).run(s)))
def raiseError[A](e: E) =
StateT(_ => F.raiseError[(S, A)](e))
}
}
sealed abstract class MonadError_Instances0 {
implicit def monadErrorNoMonad[F[_], E](implicit F: MonadError[F, E]): MonadError_[F, E] =
new MonadError_[F, E] {
def raiseError[A](e: E): F[A] = F.raiseError(e)
def handleError[A](fa: F[A])(f: E => F[A]): F[A] = F.handleError(fa)(f)
}
}
final class MonadError_Ops[F[_], E, A] private[scalaz] (self: F[A])(implicit F0: MonadError_[F, E]) {
final def handleError(f: E => F[A]): F[A] =
F0.handleError(self)(f)
def attempt(implicit F: Applicative[F]): F[E \/ A] =
F0.attempt(self)
def ensuring(f: Option[E] => F[Unit])(implicit F: Monad[F]): F[A] =
F0.ensuring(self)(f)
def handle(pf: PartialFunction[E, A])(implicit F: Applicative[F]): F[A] =
F0.handle(self)(pf)
def handleWith(pf: PartialFunction[E, F[A]]): F[A] =
F0.handleWith(self)(pf)
def unattempt[B](implicit ev: A <~< (E \/ B), M: Monad[F]): F[B] =
F0.unattempt(self.map(ev(_)))
}
| drostron/quasar | foundation/src/main/scala/quasar/contrib/scalaz/MonadError_.scala | Scala | apache-2.0 | 4,569 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.path
import scala.collection.mutable.ListBuffer
import org.scalatest.funspec.PathAnyFunSpec
class Stack[T] {
val MAX = 10
private val buf = new ListBuffer[T]
def push(o: T): Unit = {
if (!full)
buf.prepend(o)
else
throw new IllegalStateException("can't push onto a full stack")
}
def pop(): T = {
if (!empty)
buf.remove(0)
else
throw new IllegalStateException("can't pop an empty stack")
}
def peek: T = {
if (!empty)
buf(0)
else
throw new IllegalStateException("can't pop an empty stack")
}
def full: Boolean = buf.size == MAX
def empty: Boolean = buf.size == 0
def size = buf.size
override def toString = buf.mkString("Stack(", ", ", ")")
}
trait StackBehaviors { this: PathAnyFunSpec =>
def nonEmptyStack(newStack: => Stack[Int], lastItemAdded: Int): Unit = {
it("should be non-empty") {
assert(!newStack.empty)
}
it("should return the top item on peek") {
assert(newStack.peek === lastItemAdded)
}
it("should not remove the top item on peek") {
val stack = newStack
val size = stack.size
assert(stack.peek === lastItemAdded)
assert(stack.size === size)
}
it("should remove the top item on pop") {
val stack = newStack
val size = stack.size
assert(stack.pop() === lastItemAdded)
assert(stack.size === size - 1)
}
}
def nonFullStack(newStack: => Stack[Int]): Unit = {
it("should not be full") {
assert(!newStack.full)
}
it("should add to the top on push") {
val stack = newStack
val size = stack.size
stack.push(7)
assert(stack.size === size + 1)
assert(stack.peek === 7)
}
}
}
class StackSpec extends PathAnyFunSpec with StackBehaviors {
//SCALATESTJS,NATIVE-ONLY override def newInstance = new StackSpec
val lastValuePushed = 9
describe("A Stack") {
val stack = new Stack[Int]
describe("(when empty)") {
it("should be empty") {
info("hi there from should be empty")
assert(stack.empty)
}
it("should complain on peek") {
intercept[IllegalStateException] {
stack.peek
}
}
it("should complain on pop") {
intercept[IllegalStateException] {
stack.pop()
}
}
}
describe("(with one item)") {
stack.push(9)
it("should be non-empty, DUDE!") {
assert(!stack.empty)
}
it("should do something else") {
}
it should behave like nonEmptyStack(stack, lastValuePushed)
it should behave like nonFullStack(stack)
}
describe("(with one item less than capacity)") {
for (i <- 1 to 9)
stack.push(i)
it should behave like nonEmptyStack(stack, lastValuePushed)
it should behave like nonFullStack(stack)
}
describe("(full)") {
for (i <- 0 until stack.MAX)
stack.push(i)
it("should be full") {
assert(stack.full)
}
it should behave like nonEmptyStack(stack, lastValuePushed)
it("should complain on a push") {
withClue("stack was: " + stack) {
intercept[IllegalStateException] {
stack.push(10)
}
}
}
}
}
}
| scalatest/scalatest | jvm/scalatest-test/src/test/scala/org/scalatest/path/StackSpec.scala | Scala | apache-2.0 | 3,938 |
/*
* Copyright (C)2011 Gerald de Jong - GNU General Public License v3
* please see the "license" in this distribution for details.
*
* Contact: "Gerald de Jong" <geralddejong@gmail.com>
*/
package eu.beautifulcode.eig
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.{WordSpec}
class ArrowSpec extends WordSpec with ShouldMatchers {
val TINY = 0.00000001
val a1 = new Arrow(1, 2, 0)
val a2 = new Arrow(3, 2, 0)
val cross: Arrow = new Arrow
"an arrow" should {
"have a proper cross product" in {
cross := a1
cross **= a2
cross.x should be(0)
cross.y should be(0)
cross.z should be(-4.0 plusOrMinus TINY)
(a1 ** a2).z should be(-4.0 plusOrMinus TINY)
}
"have a proper dot product" in {
a1 * a2 should be(7.0 plusOrMinus TINY)
}
"give the right distance" in {
a1 ~~ a2 should be(2.0 plusOrMinus TINY)
(a1 + a2).y should be(4.0 plusOrMinus TINY)
}
"handle addition subtraction and assignment" in {
a1 += a2;
assert(a1.x == 4);
a1.x should be(4)
a1 -= a2;
a1.x should be(1)
a1 ~ a2 should be(4)
a1.quadrance should be(1 * 1 + 2 * 2)
a1 *= 2
a1.quadrance should be(2 * 2 + 4 * 4)
a2.length should be(math.sqrt(2 * 2 + 3 * 3) plusOrMinus TINY)
a1 := a2;
a1.length should be(math.sqrt(2 * 2 + 3 * 3) plusOrMinus TINY)
a1 != 3.0
a1.length should be(3.0 plusOrMinus TINY)
}
"do capping properly when the size is big" in {
val a = new Arrow
a.x = 10.0
val c = a.cap(1.0)
c should be (10.0)
a.length should be (1.0)
}
"do capping properly when nothing has to be done" in {
val a = new Arrow
a.x = 1.0
val c = a.cap(2.0)
c should be (0.0)
a.length should be (1.0)
}
}
} | geralddejong/Tensegrity | src/test/scala/eu/beautifulcode/eig/ArrowSpec.scala | Scala | gpl-3.0 | 1,856 |
package github.joestein.util
import org.apache.log4j.Logger;
/**
* LogHelper is a trait you can mix in to provide easy log4j logging
* for your scala classes.
**/
trait LogHelper {
val loggerName = this.getClass.getName
lazy val logger = Logger.getLogger(loggerName)
def debug(msg: => String) {
if (logger.isDebugEnabled())
logger.debug(msg)
}
def info(msg: => String) {
if (logger.isInfoEnabled())
logger.info(msg)
}
def error(msg: => String) {
logger.error(msg)
}
def fatal(msg: => String) {
logger.fatal(msg)
}
def warn(msg: => String) {
logger.warn(msg)
}
} | joestein/skeletor | src/main/scala/util/Logging.scala | Scala | mit | 607 |
import akka.actor.{ActorSystem, PoisonPill, Props}
import akka.testkit.{ImplicitSender, TestFSMRef, TestKit, TestProbe}
import checkout.{Cancelled, CheckoutFSM, CheckoutMessages, SelectingDelivery}
import org.scalatest.concurrent.Eventually
import org.scalatest.{BeforeAndAfterAll, FlatSpecLike, Matchers}
import shop.Utils.generateID
import scala.concurrent.duration._
class CheckoutFSMTest extends TestKit(ActorSystem("CheckoutFSMTest"))
with FlatSpecLike
with Matchers
with BeforeAndAfterAll
with Eventually
with ImplicitSender {
val customer = TestProbe()
val cart = TestProbe()
override def afterAll {
TestKit.shutdownActorSystem(system)
}
"Checkout" should "start in SelectingDelivery" in {
val id = "checkout-1"
val checkout = system.actorOf(Props(new CheckoutFSM(cart.ref, customer.ref, id)))
checkout ! CheckoutMessages.GetStateName
expectMsg(SelectingDelivery)
}
"Checkout" should "be cancellable after creation" in {
val id = "checkout-2"
val checkout = system.actorOf(Props(new CheckoutFSM(cart.ref, customer.ref, id)))
checkout ! CheckoutMessages.Cancelled
checkout ! CheckoutMessages.GetStateName
expectMsg(Cancelled)
}
"Checkout" should "expire and goto Cancelled " in {
val id = "checkout-3"
val checkout = system.actorOf(Props(new CheckoutFSM(cart.ref, customer.ref, id, checkoutExpirationTime = 100.millis)))
Thread.sleep(100)
checkout ! CheckoutMessages.GetStateName
expectMsg(Cancelled)
}
"Checkout" should "should restore timers" in {
val id = "checkout-4"
val checkout = system.actorOf(Props(new CheckoutFSM(cart.ref, customer.ref, id, checkoutExpirationTime = 1.seconds)))
checkout ! CheckoutMessages.GetStateName
expectMsg(SelectingDelivery)
checkout ! PoisonPill
val checkout2 = system.actorOf(Props(new CheckoutFSM(cart.ref, customer.ref, id, checkoutExpirationTime = 1.seconds)))
Thread.sleep(1500)
checkout2 ! CheckoutMessages.GetStateName
expectMsg(Cancelled)
}
}
// "Checkout" should "go to SelectingPaymentMethod after delivery method selection" in {
// val checkout = TestFSMRef(new CheckoutFSM(cart, customer))
// checkout ! CheckoutMessages.DeliveryMethodSelected("deliveryMethod")
// checkout.stateName shouldBe SelectingPaymentMethod
// checkout.stateData shouldBe CheckoutParameters("deliveryMethod")
// }
//
// "Checkout" should "be cancellable after delivery method selection" in {
// val checkout = TestFSMRef(new CheckoutFSM(cart, customer))
// checkout ! CheckoutMessages.DeliveryMethodSelected("deliveryMethod")
// checkout.stateName shouldBe SelectingPaymentMethod
//
// checkout ! CheckoutMessages.Cancelled
// checkout.stateName shouldBe Cancelled
// }
//
// "Checkout" should "expire and goto Cancelled after delivery method selection" in {
// val checkout = TestFSMRef(new CheckoutFSM(cart, customer, checkoutExpirationTime = 100 millis))
// checkout ! CheckoutMessages.DeliveryMethodSelected("deliveryMethod")
// checkout.stateName shouldBe SelectingPaymentMethod
// eventually {
// checkout.stateName shouldBe Cancelled
// }
// }
//
// "Checkout" should "go to ProcessingPayment after payment selection" in {
// val checkout = TestFSMRef(new CheckoutFSM(cart, customer))
// checkout ! CheckoutMessages.DeliveryMethodSelected("deliveryMethod")
// checkout ! CheckoutMessages.PaymentSelected("payment")
// checkout.stateName shouldBe ProcessingPayment
// val state = checkout.stateData.asInstanceOf[CheckoutParametersWithPaymentService]
// state.delivery shouldBe "deliveryMethod"
// state.payment shouldBe "payment"
// }
//
// "Checkout" should "be cancellable after payment method selection" in {
// val checkout = TestFSMRef(new CheckoutFSM(cart, customer))
// checkout ! CheckoutMessages.DeliveryMethodSelected("deliveryMethod")
// checkout ! CheckoutMessages.PaymentSelected("payment")
// checkout ! CheckoutMessages.Cancelled
// checkout.stateName shouldBe Cancelled
// }
//
// "Checkout" should "expire and goto Cancelled after payment method selection" in {
// val checkout = TestFSMRef(new CheckoutFSM(cart, customer, paymentExpirationTime = 100 millis))
// checkout ! CheckoutMessages.DeliveryMethodSelected("deliveryMethod")
// checkout ! CheckoutMessages.PaymentSelected("payment")
//
// checkout.stateName shouldBe ProcessingPayment
// eventually {
// checkout.stateName shouldBe Cancelled
// }
// }
//
// "Checkout" should "be closeable" in {
// val checkout = TestFSMRef(new CheckoutFSM(cart, customer))
// checkout ! CheckoutMessages.DeliveryMethodSelected("deliveryMethod")
// checkout ! CheckoutMessages.PaymentSelected("payment")
// checkout ! CheckoutMessages.PaymentReceived
// checkout.stateName shouldBe Closed
// }
//
// "Checkout" should "should send CheckoutClosed to cart" in {
// val cart = TestProbe()
// val checkout = TestFSMRef(new CheckoutFSM(cart.ref, customer))
// checkout ! CheckoutMessages.DeliveryMethodSelected("deliveryMethod")
// checkout ! CheckoutMessages.PaymentSelected("payment")
// checkout ! CheckoutMessages.PaymentReceived
// cart.expectMsg(500 millis, CartMessages.CheckoutClosed)
// }
| apisarek/reactivescala-course | src/test/scala/CheckoutFSMTest.scala | Scala | mit | 5,305 |
package org.kepteasy.common
case class Error(message: String)
case class Acknowledge(id: String)
| acflorea/akka-persistence-event-sourcing | src/main/scala/org/kepteasy/common/Messages.scala | Scala | apache-2.0 | 99 |
package pl.touk.nussknacker.ui.security.api
case class AuthenticatedUser(id: String, username: String, roles: Set[String])
| TouK/nussknacker | security/src/main/scala/pl/touk/nussknacker/ui/security/api/AuthenticatedUser.scala | Scala | apache-2.0 | 124 |
/*
Copyright © 2011 William D. Back
This file is part of SudokuSolver.
SudokuSolver is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
SudokuSolver is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Scarab. If not, see <http://www.gnu.org/licenses/>.
*/
package com.thebackfamily.sudoku
import java.util.Date
import scala.io.Source
/**
* The SudokuSolver is a tool that solves sudoku puzzles using a possible variety of approaches.
* There are three ways to enter the puzzle to solve:
* 1. As a single line with nine sets of nine entries or spaces separated by a comma.
* 2. As a 9x9 grid in a file.
* 3. From the command line prompt, one line at a time of nine entries.
* @author Bill Back
*/
object SudokuSolver extends App {
// get the data from the args.
var puzzleInput = new StringBuilder
if (args.length > 0) { // should be name of a file.
val src = Source.fromFile(args(0))
puzzleInput.append (src.mkString)
}
else { // prompt for the lines on the command line.
println ("Enter each line of the puzzle with ',' separating the values. Empty values can be blank spaces or zeros.")
(1 to 0) foreach { cnt =>
println ("Line " + cnt + ": ")
val src = Source.stdin.getLines
puzzleInput.append (src.mkString)
}
}
println ("Puzzle to solve:\n" + puzzleInput.toString)
val puzzle = puzzleInput.toString
// track start time.
val start = new Date().getTime
val solver = new BruteForceSolver
//val solver = new RandomSolver
solver.showStatus = true
println( "Solving puzzle using " + solver.name + " solver.")
val solution = solver.solve(puzzle)
val itWasSolved = Verify(solution)
// show time to complete.
println ((if (itWasSolved) "solved" else "not solved") + " in " + (new Date().getTime - start) + " milliseconds")
println ("Solution:\n")
println(solution.toString)
} | billdback/Sudoku-Solver | src/main/scala/com/thebackfamily/sudoku/SudokuSolver.scala | Scala | gpl-3.0 | 2,338 |
/*
* Copyright 2014 Intelix Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hq.flows.core
import common.Fail
import play.api.libs.json.JsValue
import scalaz.\/
trait BuilderFromConfig[T] {
def configId: String
def build(props: JsValue): \/[Fail,T]
}
| mglukh/ehub | modules/core/src/main/scala/hq/flows/core/BuilderFromConfig.scala | Scala | apache-2.0 | 792 |
package slick.test.lifted
import org.junit.Test
import org.junit.Assert._
/** Test case for the listing of constraints in table definitions */
@deprecated("Using deprecated .simple API", "3.0")
class TableConstraintsTest {
@Test def testConstraintsOrdering {
import slick.driver.H2Driver.simple._
class Foo(tag: Tag) extends Table[Long](tag, "foo_table") {
def id = column[Long]("id")
override def * = id
}
val foos = TableQuery[Foo]
class Bar(tag: Tag) extends Table[Long](tag, "bar_table") {
def id = column[Long]("id")
override def * = id
}
var bars = TableQuery[Bar]
class T(tag: Tag) extends Table[(Int, String, Long, Long)](tag, "t_table") {
def id = column[Int]("id")
def idIndex = index("idx_id", id)
def code = column[String]("code")
def codeIndex = index("idx_code", code)
def pk1 = primaryKey("pk1_t", (id, code))
def pk2 = primaryKey("pk2_t", (id))
def fooId = column[Long]("foo_id")
def foo = foreignKey("fk_foo", fooId, foos)(_.id)
def fooIdndex = index("idx_foo_id", fooId)
def barId = column[Long]("bar_id")
def bar = foreignKey("fk_bar", barId, bars)(_.id)
def barIdIndex = index("idx_bar_id", barId)
override def * = (id, code, fooId, barId)
}
val ts = TableQuery[T]
val foreignKeys = ts.baseTableRow.foreignKeys.map(_.name)
assertTrue("foreignKeys should be sorted by name", foreignKeys == Seq("fk_bar", "fk_foo"))
val primaryKeys = ts.baseTableRow.primaryKeys.map(_.name)
assertTrue("primaryKeys should be sorted by name", primaryKeys == Seq("pk1_t", "pk2_t"))
val indexes = ts.baseTableRow.indexes.map(_.name)
assertTrue("indexes should be sorted by name", indexes == Seq("idx_bar_id", "idx_code", "idx_foo_id", "idx_id"))
}
}
| easel/slick | slick-testkit/src/test/scala/slick/test/lifted/TableConstraintsTest.scala | Scala | bsd-2-clause | 1,858 |
package io.youi.http.content
import java.io.File
import io.youi.http.Headers
import io.youi.net.ContentType
case class FormDataContent(data: List[FormData]) extends Content {
override def length: Long = -1
override def lastModified: Long = -1
override def contentType: ContentType = ContentType.`multipart/form-data`
override def withContentType(contentType: ContentType): Content = this
override def withLastModified(lastModified: Long): Content = this
def fileOption(key: String): Option[FileEntry] = data.find(_.key == key).map(_.entries.head.asInstanceOf[FileEntry])
def stringOption(key: String): Option[StringEntry] = data.find(_.key == key).map(_.entries.head.asInstanceOf[StringEntry])
def file(key: String): FileEntry = fileOption(key).getOrElse(throw new RuntimeException(s"Not found: $key in $this."))
def string(key: String): StringEntry = stringOption(key).getOrElse(throw new RuntimeException(s"Not found: $key in $this."))
def withFile(key: String, fileName: String, file: File, headers: Headers = Headers.empty): FormDataContent = {
val entry = FileEntry(fileName, file, headers)
withEntry(key, entry)
}
def withString(key: String, value: String, headers: Headers = Headers.empty): FormDataContent = {
val entry = StringEntry(value, headers)
withEntry(key, entry)
}
def withEntry(key: String, entry: FormDataEntry): FormDataContent = {
val formData = data.find(_.key == key).getOrElse(FormData(key, Nil))
val updated = formData.copy(entries = formData.entries ::: List(entry))
copy(data = data.filterNot(_.key == key) ::: List(updated))
}
override def toString: String = s"FormDataContent(${data.map(_.key).mkString(", ")})"
override def asString: String = toString
} | outr/youi | core/shared/src/main/scala/io/youi/http/content/FormDataContent.scala | Scala | mit | 1,754 |
package mgoeminne.scalaggplot.geom
import mgoeminne.scalaggplot.position.Position
import mgoeminne.scalaggplot.stat.Statistic
import mgoeminne.scalaggplot._
import org.saddle.Frame
/**
* Blank, draws nothing.
*
* The blank geom draws nothing, but can be a useful way of ensuring common scales between different plots.
*
* == Examples ==
*
* TODO
*
*
* @param mapping The aesthetic mapping, usually constructed with [[aes.aes]] or [[aes.string]].
* Only needs to be set at the layer level if you are overriding the plot defaults.
* @param data A layer specific dataset - only needed if you want to override the plot defaults.
* @param stat The statistical transformation to use on the data for this layer.
* @param position The position adjustment to use for overlappling points on this layer
* @tparam T
*/
case class blank[T]( mapping: Option[(Seq[Numeric[T]], Seq[Numeric[T]])] = None,
data: Option[Frame[Any,Any,T]] = None,
stat: Statistic = blankUtil.defaultStat,
position: Position = blankUtil.defaultPos) extends Geom
private object blankUtil
{
val defaultStat = stat.identity
val defaultPos = position.identity
}
| mgoeminne/scala-ggplot | src/main/scala/mgoeminne/scalaggplot/geom/blank.scala | Scala | lgpl-3.0 | 1,243 |
package sds
import java.io.{IOException, InputStream}
import scala.annotation.tailrec
import sds.classfile.{MemberInfo, ClassfileStream => Stream}
import sds.classfile.attribute.{AttributeInfo => Attribute}
import sds.classfile.constant_pool.{
ConstantInfo => CInfo,
ConstantInfoAdapter => Adapter,
Utf8Info => Utf8
}
class ClassfileReader {
val classfile: Classfile = new Classfile()
def this(data: InputStream) {
this()
try {
read(Stream(data))
} catch {
case e: IOException => e.printStackTrace()
}
}
def this(fileName: String) {
this()
try {
read(Stream(fileName))
} catch {
case e: IOException => e.printStackTrace()
}
}
private def read(data: Stream): Unit = {
classfile.magic = data.int
classfile.minor = data.short
classfile.major = data.short
classfile.pool = readConstantPool(0, new Array[CInfo](data.short - 1), data)
classfile.access = data.short
classfile.thisClass = data.short
classfile.superClass = data.short
classfile.interfaces = (0 until data.short).map((_: Int) => data.short).toArray
lazy val genAttr: ((Stream, Array[CInfo]) => (Attribute)) = (_data: Stream, _pool: Array[CInfo]) => {
val name: Int = _data.short
val utf8: Utf8 = _pool(name - 1).asInstanceOf[Utf8]
Attribute(utf8.value, _data, _pool)
}
lazy val genMember: ((Int) => (MemberInfo)) = (_: Int) => new MemberInfo(data, classfile.pool, genAttr)
classfile.fields = (0 until data.short).map(genMember).toArray
classfile.methods = (0 until data.short).map(genMember).toArray
classfile.attributes = (0 until data.short).map((_: Int) => genAttr(data, classfile.pool)).toArray
data.close()
}
@tailrec
private def readConstantPool(i: Int, pool: Array[CInfo], data: Stream): Array[CInfo] = {
if(i >= pool.length) {
return pool
}
val tag: Int = data.byte
pool(i) = CInfo(tag, data)
tag match {
case CInfo.LONG|CInfo.DOUBLE =>
pool(i + 1) = new Adapter()
readConstantPool(i + 2, pool, data)
case _ => readConstantPool(i + 1, pool, data)
}
}
} | g1144146/sds_for_scala | src/main/scala/sds/ClassfileReader.scala | Scala | apache-2.0 | 2,361 |
package core.authorization
import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.{RejectionHandler, Rejection}
import core.ErrorWrapper
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
import core.CommonJsonProtocol._
case class PermissionRejection(message: String) extends Rejection
trait WithPermissionRejections {
implicit def permissionRejectionHandlers =
RejectionHandler.newBuilder()
.handle { case PermissionRejection(msg) ⇒
complete(Forbidden, ErrorWrapper("forbidden", msg)) }
.result()
}
| piobab/akka-http-rest-api | src/main/scala/core/authorization/WithPermissionRejections.scala | Scala | mit | 616 |
package cache
import play.api.cache.SyncCacheApi
trait CacheResult {
val cache: SyncCacheApi
var knownCacheKeys: Set[String]
def cachePositiveResult[E, T](key: String)(block: => Either[E, T])(implicit ev: scala.reflect.ClassTag[T]) = cache.get[T](key) match {
case Some(cached) => Right(cached)
case None =>
block match {
case Right(result) => {
cache.set(key, result)
knownCacheKeys += key
Right(result)
}
case error => error
}
}
def removeAllCachedResults() {
knownCacheKeys.foreach {
key => cache.remove(key)
}
knownCacheKeys = Set()
}
def removeCached(key: String) {
cache.remove(key)
knownCacheKeys -= key
}
def removeCachedByPredicate(predicate: String => Boolean) {
val curKeys = knownCacheKeys
curKeys.foreach {
key =>
if (predicate(key))
removeCached(key)
}
}
def notSupported: Nothing = throw new UnsupportedOperationException("Operation not supported in cached implementation")
} | digitalinteraction/intake24 | ApiPlayServer/app/cache/CacheResult.scala | Scala | apache-2.0 | 1,060 |
package io.mewbase.rest.http4s
import cats.effect.Effect
import io.mewbase.bson.BsonArray
import io.mewbase.rest.RestServiceAction
import io.mewbase.rest.http4s.BsonEntityCodec._
import org.http4s.Response
import org.http4s.dsl.Http4sDsl
import scala.language.higherKinds
class Http4sRestServiceActionVisitor[F[_]: Effect] extends RestServiceAction.Visitor[F[Response[F]]] with Http4sDsl[F] {
override def visit(retrieveSingleDocument: RestServiceAction.RetrieveSingleDocument): F[Response[F]] =
implicitly[Effect[F]].suspend {
val futureDocument = retrieveSingleDocument.perform()
val document = futureDocument.get()
Ok(document)
}
override def visit(executeCommand: RestServiceAction.ExecuteCommand): F[Response[F]] =
implicitly[Effect[F]].suspend {
executeCommand.perform()
Ok()
}
override def visit(listDocumentIds: RestServiceAction.ListDocumentIds): F[Response[F]] =
implicitly[Effect[F]].suspend {
val documentIds = listDocumentIds.perform()
Ok(BsonArray.from(documentIds))
}
override def visit(listBinders: RestServiceAction.ListBinders): F[Response[F]] =
implicitly[Effect[F]].suspend {
val binders = listBinders.perform()
Ok(BsonArray.from(binders))
}
override def visit(runQuery: RestServiceAction.RunQuery): F[Response[F]] =
implicitly[Effect[F]].suspend {
val result = runQuery.perform()
if (result.isPresent)
Ok(result.get())
else
NotFound("Query not found")
}
override def visit(getMetrics: RestServiceAction.GetMetrics): F[Response[F]] =
implicitly[Effect[F]].suspend {
Ok(getMetrics.perform())
}
} | Tesco/mewbase | mewbase-rest-http4s/src/main/scala/io.mewbase.rest.http4s/Http4sRestServiceActionVisitor.scala | Scala | mit | 1,678 |
package controllers
import java.io.File
import actors.{GetState, ShiftsActor, ShiftsMessageParser}
import akka.actor.{ActorRef, ActorSystem, Props}
import akka.pattern._
import akka.testkit.{ImplicitSender, TestKit}
import akka.util.Timeout
import com.typesafe.config.ConfigFactory
import drt.shared.MilliDate
import org.joda.time.format.DateTimeFormat
import org.specs2.mutable.{After, Specification}
import scala.collection.JavaConversions._
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.util.Try
abstract class AkkaTestkitSpecs2Support(dbLocation: String) extends TestKit(ActorSystem("testActorSystem", ConfigFactory.parseMap(Map(
"akka.persistence.journal.plugin" -> "akka.persistence.journal.leveldb",
"akka.persistence.no-snapshot-store.class" -> "akka.persistence.snapshot.NoSnapshotStore",
"akka.persistence.journal.leveldb.dir" -> dbLocation,
"akka.persistence.snapshot-store.plugin" -> "akka.persistence.snapshot-store.local"
)).withFallback(ConfigFactory.load(getClass.getResource("/application.conf").getPath.toString))))
with After
with ImplicitSender {
def after = {
shutDownActorSystem
new File(dbLocation).listFiles().map(_.delete())
}
def shutDownActorSystem = {
//TODO figure out how to wait for the actor to finish saving rather than this nasty timer.
Thread.sleep(200)
Await.ready(system.terminate(), 2 second)
Await.ready(system.whenTerminated, 2 second)
}
}
class ShiftsActorSpec extends Specification {
sequential
private def shiftsActor(system: ActorSystem) = {
val actor = system.actorOf(Props(classOf[ShiftsActor]), "shiftsactor")
actor
}
implicit val timeout: Timeout = Timeout(5 seconds)
def getTestKit = {
new AkkaTestkitSpecs2Support("target/test") {
def getActor = shiftsActor(system)
def getState(actor: ActorRef) = {
Await.result(actor ? GetState, 1 second)
}
def getStateAndShutdown(actor: ActorRef) = {
val s = getState(actor)
shutDownActorSystem
s
}
}
}
"ShiftsActor" should {
"return the message it that was set if only one message is sent" in {
val testKit2 = getTestKit
val actor = testKit2.getActor
actor ! "shift name, T1, 20/01/17, 10:00, 20:00, 9"
val result = testKit2.getStateAndShutdown(actor)
result === "shift name, T1, 20/01/17, 10:00, 20:00, 9"
}
"return the most recent message if more than one message is sent" in {
val testKit2 = getTestKit
val actor = testKit2.getActor
actor ! "shift name, T1, 20/01/17, 10:00, 20:00, 9"
actor ! "another name, T1, 20/01/17, 10:00, 20:00, 9"
val result = testKit2.getStateAndShutdown(actor)
result === "another name, T1, 20/01/17, 10:00, 20:00, 9"
}
"restore the most recent message sent after a restart" in {
val testKit1 = getTestKit
val actor = testKit1.getActor
actor ! "shift name, T1, 20/01/17, 10:00, 20:00, 9"
actor ! "another name, T1, 20/01/17, 10:00, 20:00, 9"
testKit1.shutDownActorSystem
val testKit2 = getTestKit
val actor2 = testKit2.getActor
val result = testKit2.getStateAndShutdown(actor2)
result === "another name, T1, 20/01/17, 10:00, 20:00, 9"
}
"return recent message if a message is sent after a restart" in {
val testKit1 = getTestKit
val actor = testKit1.getActor
actor ! "shift name, T1, 20/01/17, 10:00, 20:00, 9"
actor ! "another name, T1, 20/01/17, 10:00, 20:00, 9"
testKit1.shutDownActorSystem
val testKit2 = getTestKit
val actor2 = testKit2.getActor
actor2 ! "third name, T1, 20/01/17, 10:00, 20:00, 9"
val result = testKit2.getStateAndShutdown(actor2)
result === "third name, T1, 20/01/17, 10:00, 20:00, 9"
}
"convert date and time into a timestamp" in {
Some(1484906400000L) === ShiftsMessageParser.dateAndTimeToMillis("20/01/17", "10:00")
}
"return None if the date is incorrectly formatted" in {
None === ShiftsMessageParser.dateAndTimeToMillis("adfgdfgdfg7", "10:00")
}
"get start and end date millis from the startDate, endTime and startTime when endTime is later than startTime" in {
val result = ShiftsMessageParser.startAndEndTimestamps("20/01/17", "10:00", "11:00")
result === (Some(1484906400000L), Some(1484910000000L))
}
"get start and end date millis from the startDate, endTime and startTime when endTime is earlier than startTime" in {
val result = ShiftsMessageParser.startAndEndTimestamps("20/01/17", "10:00", "09:00")
result === (Some(1484906400000L), Some(1484989200000L))
}
"get start and end date millis from the startDate, endTime and startTime given invalid data" in {
val result = ShiftsMessageParser.startAndEndTimestamps("jkhsdfjhdsf", "10:00", "09:00")
result === (None, None)
}
"convert timestamp to dateString" in {
val timestamp = 1484906400000L
ShiftsMessageParser.dateString(timestamp) === "20/01/17"
}
"convert timestamp to timeString" in {
val timestamp = 1484906400000L
ShiftsMessageParser.timeString(timestamp) === "10:00"
}
"ShiftsPersistenceApi" should {
"allow setting and getting of shift data" in new AkkaTestkitSpecs2Support("target/test") {
val shiftPersistenceApi = new ShiftPersistence {
override implicit val timeout: Timeout = Timeout(5 seconds)
val actorSystem = system
}
shiftPersistenceApi.saveShifts("shift name, T1, 20/01/17, 10:00, 20:00, 9")
awaitAssert({
val resultFuture = shiftPersistenceApi.getShifts()
val result = Await.result(resultFuture, 1 seconds)
assert("shift name, T1, 20/01/17, 10:00, 20:00, 9" == result)
}, 2 seconds)
}
}
}
}
| somanythings/drt-scalajs-spa-exploration | server/src/test/scala/controllers/ShiftsActorSpec.scala | Scala | apache-2.0 | 5,868 |
package spinoco.protocol.http.header
import spinoco.protocol.common.codec._
import spinoco.protocol.http.header.value.HeaderCodecDefinition
import scala.concurrent.duration._
/**
*
* @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Access-Control-Max-Age
*/
sealed case class `Access-Control-Max-Age`(value: FiniteDuration) extends DefaultHeader
object `Access-Control-Max-Age` { val codec =
HeaderCodecDefinition[`Access-Control-Max-Age`](intAsString.xmap ( i => `Access-Control-Max-Age`(i.seconds),_.value.toSeconds.toInt))
}
| Spinoco/protocol | http/src/main/scala/spinoco/protocol/http/header/Access-Control-Max-Age.scala | Scala | mit | 558 |
package com.locallexing.kernel
object Earley {
final case class SymbolData(val firstIndexIncl : Int, val lastIndexExcl : Int, val input : Domain.V, val output : Domain.V) {
def span : (Int, Int) = (firstIndexIncl, lastIndexExcl)
}
final case class Item(val coreItemId : Int, val data : SymbolData, val children : Vector[SymbolData]) extends Domain.Environment
{
def param : Domain.V = data.input
def origin : Int = data.firstIndexIncl
def inputAt(index : Int) : Domain.V = {
if (index == 0) data.input else children(index - 1).input
}
def outputAt(index : Int) : Domain.V = {
children(index - 1).output
}
override def toString : String = {
val p = if (param == Domain.V.NIL) "" else "{" + param + "}"
"Earley.Item[coreItemId="+coreItemId+p+", origin="+origin+"]"
}
}
def debug(s : String) {
//println("debug: " + s)
}
trait CoreItem {
def nextSymbol : Option[Grammar.Symbol]
def dot : Int
def nonterminal : Grammar.NS
def mkItem(data : SymbolData, children : Vector[SymbolData]) : Option[Item]
// the following functions all fail if there is no appropriate next symbol
def predictedCoreItems : Vector[CoreItem]
def nextCoreItem : CoreItem
def nextSymbolParam(item : Earley.Item) : Domain.V
def ruleIndex : Int
}
final class Kernel[CHAR](val grammar : Grammar[CHAR]) {
import Grammar._
private def computeCoreItems() : (Map[Int, (Int, Int, Int)], Map[(Int, Int, Int), Int], Map[Int, CoreItem]) = {
var coreItemIds : Map[Int, (Int, Int, Int)] = Map()
var coreItemIdsRev : Map[(Int, Int, Int), Int] = Map()
var coreItems : Map[Int, CoreItem] = Map()
var nextId = 0
var n = 0
for (nonterminal <- grammar.nonterminals) {
var r = 0
for (rule <- nonterminal.rules) {
for (dot <- 0 to rule.rhs.size) {
coreItemIds = coreItemIds + (nextId -> (n, r, dot))
coreItemIdsRev = coreItemIdsRev + ((n, r, dot) -> nextId)
nextId += 1
}
r += 1
}
n += 1
}
for ((coreItemId, (n, r, dot)) <- coreItemIds) {
val ns = NS(n)
val nonterminal = grammar.get(ns)
val rule = nonterminal.rules(r)
val (nextSymbol, nextExpr) =
if (dot == rule.rhs.size) (None, rule.out)
else (Some(rule.rhs(dot)._1), rule.rhs(dot)._2)
var predicted : Vector[Int] = Vector()
var nextCoreItemId : Int = -1
nextSymbol match {
case Some(nextns : NS) =>
val nextnonterminal = grammar.get(nextns)
for (nextr <- 0 until nextnonterminal.rules.size) {
val predictedId = coreItemIdsRev((nextns.index, nextr, 0))
predicted = predicted :+ predictedId
}
nextCoreItemId = coreItemId + 1
case Some(nextts : TS) =>
nextCoreItemId = coreItemId + 1
case None =>
}
val coreItem = CI(coreItemId, ns, r, dot, nextSymbol, nextExpr, rule.guard, predicted, nextCoreItemId)
coreItems = coreItems + (coreItemId -> coreItem)
}
(coreItemIds, coreItemIdsRev, coreItems)
}
private val (coreItemIds, coreItemIdsRev, coreItems) = computeCoreItems()
private case class CI(id : Int, nonterminal : NS, ruleIndex : Int, dot : Int, nextSymbol : Option[Symbol], nextExpr : Domain.Expr, guard : Domain.Expr,
predictedCoreItemIds : Vector[Int], nextCoreItemId : Int) extends CoreItem
{
lazy val predictedCoreItems : Vector[CoreItem] = predictedCoreItemIds.map(id => coreItems(id))
lazy val nextCoreItem : CoreItem = coreItems(nextCoreItemId)
def nextSymbolParam(item : Earley.Item) : Domain.V = Domain.Expr.eval(item, nextExpr)
def mkItem(data : SymbolData, children : Vector[SymbolData]) : Option[Item] = {
val item = Earley.Item(id, data, children)
if (children.size == 0) {
Domain.Expr.eval(item, guard) match {
case Domain.V.BOOL(allow) =>
if (!allow) return None
case _ => throw new RuntimeException("internal error in mkItem")
}
}
nextSymbol match {
case None =>
val output = Domain.Expr.eval(item, nextExpr)
Some(Earley.Item(id, SymbolData(data.firstIndexIncl, data.lastIndexExcl, data.input, output), children))
case _ => Some(item)
}
}
}
def coreItem(coreItemId : Int) : CoreItem = coreItems(coreItemId)
def coreItemOf(item : Earley.Item) : CoreItem = coreItem(item.coreItemId)
def numCoreItems : Int = coreItems.size
val startNonterminal = Grammar.NS(0)
}
}
final class Earley[CHAR](kernel : Earley.Kernel[CHAR]) {
import Earley._
import Grammar._
type Bin = Set[Item]
type Bins = Array[Bin]
def Init() : Bin = {
debug("Init")
var bin : Bin = Set()
val numCoreItems = kernel.numCoreItems
for (coreItemId <- 0 until numCoreItems) {
val coreItem = kernel.coreItem(coreItemId)
if (coreItem.dot == 0 && coreItem.nonterminal == kernel.startNonterminal) {
coreItem.mkItem(SymbolData(0, 0, Domain.V.NIL, null), Vector()) match {
case Some(item) => bin = bin + item
case None =>
}
}
}
bin
}
def binSize(bins : Bins, k : Int) : String = {
", binsize = " + bins(k).size
}
def Predict(bins : Bins, k : Int) : Boolean = {
debug("predict " + k + binSize(bins, k))
var bin = bins(k)
val oldSize = bin.size
for (item <- bin) {
val coreItem = kernel.coreItemOf(item)
coreItem.nextSymbol match {
case Some(ns : NS) =>
val param = coreItem.nextSymbolParam(item)
for (predictedCoreItem <- coreItem.predictedCoreItems) {
predictedCoreItem.mkItem(SymbolData(k, k, param, null), Vector()) match {
case Some(item) => bin = bin + item
case None =>
}
}
case _ =>
}
}
bins(k) = bin
bin.size != oldSize
}
def Complete(bins : Bins, k : Int) : Boolean = {
debug("complete " + k + binSize(bins, k))
var bin = bins(k)
val oldSize = bin.size
for (item <- bin) {
val coreItem = kernel.coreItemOf(item)
coreItem.nextSymbol match {
case None =>
val nextSymbol = Some(coreItem.nonterminal)
val param = item.param
for (srcItem <- bins(item.origin)) {
val srcCoreItem = kernel.coreItemOf(srcItem)
if (srcCoreItem.nextSymbol == nextSymbol) {
if (srcCoreItem.nextSymbolParam(srcItem) == param) {
val nextCoreItem = srcCoreItem.nextCoreItem
val data = srcItem.data
nextCoreItem.mkItem(SymbolData(data.firstIndexIncl, k, data.input, data.output), srcItem.children :+ item.data) match {
case Some(item) => bin = bin + item
case None => throw new RuntimeException("internal error")
}
}
}
}
case _ =>
}
}
bins(k) = bin
bin.size != oldSize
}
def Tokens(input : Input[CHAR], bins : Bins, prevTokens : Tokens, k : Int) : Tokens = {
var tokens = prevTokens
val bin = bins(k)
val grammar = kernel.grammar
for (item <- bin) {
val coreItem = kernel.coreItemOf(item)
coreItem.nextSymbol match {
case Some(ts : TS) =>
val param = coreItem.nextSymbolParam(item)
val x = (ts, param)
tokens.get(x) match {
case None =>
grammar.get(ts).lexer.lex(input, k, param) match {
case Some(result) =>
tokens = tokens + (x -> result)
case None =>
}
case _ =>
}
case _ =>
}
}
tokens = grammar.selector.select(input, k, prevTokens, tokens)
debug("found tokens at " + k + ":" + tokens)
tokens
}
def Scan(bins : Bins, tokens : Tokens, k : Int) : Boolean = {
debug("scan " + k + binSize(bins, k))
val bin = bins(k)
val oldSize = bin.size
for (item <- bin) {
val coreItem = kernel.coreItemOf(item)
coreItem.nextSymbol match {
case Some(ts : TS) =>
val param = coreItem.nextSymbolParam(item)
val x = (ts, param)
tokens.get(x) match {
case None =>
case Some((len, out)) =>
val nextCoreItem = coreItem.nextCoreItem
val child = SymbolData(k, k + len, param, out)
val data = item.data
val nextItem = nextCoreItem.mkItem(SymbolData(data.firstIndexIncl, k + len, data.input, data.output), item.children :+ child).get
bins(k + len) = bins(k + len) + nextItem
}
case _ =>
}
}
oldSize != bins(k).size
}
def Pi(bins : Bins, tokens : Tokens, k : Int) : Boolean = {
var changed = false
var oldChanged = false
do {
oldChanged = changed
changed = false
if (Predict(bins, k)) changed = true
if (Complete(bins, k)) changed = true
if (Scan(bins, tokens, k)) changed = true
} while (changed)
oldChanged
}
def computeBin(input : Input[CHAR], bins : Bins, k : Int) {
var tokens : Tokens = Map()
Pi(bins, tokens, k)
while (true) {
tokens = Tokens(input, bins, tokens, k)
if (!Pi(bins, tokens, k)) return
}
}
def wasRecognized(bin : Bin) : Boolean = {
for (item <- bin) {
if (item.origin == 0) {
val coreItem = kernel.coreItemOf(item)
if (coreItem.nonterminal == kernel.startNonterminal && coreItem.nextSymbol == None)
return true
}
}
false
}
def recognize(input : Input[CHAR]) : Either[Array[Bin], Int] = {
val inputSize = input.size
val bins : Bins = new Array(inputSize + 1)
for (k <- 0 to inputSize) bins(k) = Set()
bins(0) = Init()
for (k <- 0 to inputSize) computeBin(input, bins, k)
val recognized = wasRecognized(bins(inputSize))
if (!recognized) {
var k = inputSize
var foundNonemptyBin = false
while (k >= 0 && !foundNonemptyBin) {
if (bins(k).size > 0)
foundNonemptyBin = true
else k -= 1
}
Right(k)
} else {
Left(bins)
}
}
private class ParseTreeConstruction(bins : Array[Bin]) {
import scala.collection.mutable.{Map => MutableMap, Set => MutableSet}
private val cache : MutableMap[(Grammar.NS, Domain.V, Domain.V, Int, Int), ParseTree] = MutableMap()
private val visiting : MutableSet[(Grammar.NS, Domain.V, Domain.V, Int, Int)] = MutableSet()
def getParseTree(nonterminal : Grammar.NS, param : Domain.V, result : Domain.V,
startPosition : Int, endPosition : Int) : ParseTree =
{
val key = (nonterminal, param, result, startPosition, endPosition)
cache.get(key) match {
case None =>
if (visiting(key)) return null
visiting += key
val r = constructParseTree(nonterminal, param, result, startPosition, endPosition)
cache += (key -> r)
visiting -= key
r
case Some(r) =>
r
}
}
/** Constructs the parse tree using the information obtained from the recognition phase. This assumes that there actually exists at least one parse tree.
* @param startPosition the start position (inclusive)
* @param endPosition the end position (exclusive)
*/
def constructParseTree(nonterminal : Grammar.NS, param : Domain.V, result : Domain.V,
startPosition : Int, endPosition : Int) : ParseTree =
{
import ParseTree._
val grammar = kernel.grammar
val bin = bins(endPosition)
var foundItems : List[Item] = List()
for (item <- bin) {
val coreItem = kernel.coreItemOf(item)
if (coreItem.nonterminal == nonterminal && coreItem.nextSymbol == None &&
item.data.input == param && item.data.output == result && item.origin == startPosition)
{
foundItems = item :: foundItems
}
}
def mkTree(foundItem : Item) : NonterminalNode = {
val coreItem = kernel.coreItemOf(foundItem)
val rule = grammar.get(coreItem.nonterminal).rules(coreItem.ruleIndex)
var subtrees = new Array[ParseTree](rule.rhs.size)
var hasAmbiguities = false
for (i <- 0 until subtrees.size) {
val symbol = rule.rhs(i)._1
val child = foundItem.children(i)
symbol match {
case ts : TS =>
subtrees(i) = TerminalNode(ts, child.span, child.input, child.output)
case ns : NS =>
subtrees(i) = getParseTree(ns, child.input, child.output, child.firstIndexIncl, child.lastIndexExcl)
if (subtrees(i) == null) return null
}
hasAmbiguities = hasAmbiguities || subtrees(i).hasAmbiguities
}
NonterminalNode(coreItem.nonterminal, coreItem.ruleIndex, foundItem.data.span, subtrees.toVector,
foundItem.data.input, foundItem.data.output)
}
foundItems match {
case List() => throw new RuntimeException("cannot construct parse tree for " +
nonterminal + " from " + startPosition + " to " + endPosition)
case List(foundItem) => mkTree(foundItem)
case _ =>
val trees = foundItems.map(mkTree _).toVector.filter(t => t != null)
val node = trees.head
if (trees.size == 1) node
else AmbiguousNode(node.symbol, node.span, trees, node.input, node.output)
}
}
}
def parse(input : Input[CHAR]) : Either[ParseTree, Int] = {
recognize(input) match {
case Left(bins) =>
val ptc = new ParseTreeConstruction(bins)
Left(ptc.getParseTree(kernel.startNonterminal, Domain.V.NIL, Domain.V.NIL, 0, input.size))
case Right(k) =>
Right(k)
}
}
} | proofpeer/local-lexing-prototype | shared/src/main/scala/com/locallexing/kernel/Earley.scala | Scala | mit | 14,046 |
package com.twitter.finagle.http2.transport.client
import com.twitter.concurrent.AsyncQueue
import com.twitter.conversions.DurationOps._
import com.twitter.finagle.transport.QueueTransport
import com.twitter.finagle.{Stack, Status}
import com.twitter.util.{Await, Future}
import io.netty.handler.codec.http._
import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.funsuite.AnyFunSuite
class Http2UpgradingTransportTest extends AnyFunSuite with MockitoSugar {
class Ctx {
val (writeq, readq) = (new AsyncQueue[Any](), new AsyncQueue[Any]())
val transport = new QueueTransport[Any, Any](writeq, readq)
val ref = new RefTransport(transport)
def http1Status: Status = Status.Open
val upgradingTransport = new Http2UpgradingTransport(
transport,
ref,
Stack.Params.empty
)
}
val fullRequest = new DefaultFullHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "twitter.com")
val fullResponse = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK)
def await[A](f: Future[A]): A = Await.result(f, 5.seconds)
test("upgrades properly") {
val ctx = new Ctx
import ctx._
upgradingTransport.write(fullRequest)
assert(await(writeq.poll) == fullRequest)
val readF = upgradingTransport.read()
assert(!readF.isDefined)
val newReadQueue = new AsyncQueue[Any]()
val newTransport = new QueueTransport[Any, Any](writeq, newReadQueue)
assert(readq.offer(Http2UpgradingTransport.UpgradeSuccessful(newTransport)))
assert(!readF.isDefined)
assert(newReadQueue.offer(fullResponse))
assert(await(readF) == fullResponse)
}
test("can reject an upgrade") {
val ctx = new Ctx
import ctx._
upgradingTransport.write(fullRequest)
assert(await(writeq.poll) == fullRequest)
val readF = upgradingTransport.read()
assert(!readF.isDefined)
assert(readq.offer(Http2UpgradingTransport.UpgradeRejected))
assert(!readF.isDefined)
assert(readq.offer(fullResponse))
assert(await(readF) == fullResponse)
}
test("honors aborted upgrade dispatches") {
val ctx = new Ctx
import ctx._
val partialRequest = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "twitter.com")
val partialF = upgradingTransport.write(partialRequest)
assert(await(writeq.poll) == partialRequest)
val readF = upgradingTransport.read()
assert(!readF.isDefined)
assert(readq.offer(Http2UpgradingTransport.UpgradeAborted))
assert(!readF.isDefined)
assert(readq.offer(fullResponse))
assert(await(readF) == fullResponse)
}
}
| twitter/finagle | finagle-http2/src/test/scala/com/twitter/finagle/http2/transport/client/Http2UpgradingTransportTest.scala | Scala | apache-2.0 | 2,604 |
/*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.flaminem.flamy.model.core
import com.flaminem.flamy.model.collection.immutable.TableInfoCollection
import com.flaminem.flamy.model.files.FileIndex
/**
* Created by fpin on 7/9/16.
*/
class CompleteModel (
override val tables: TableInfoCollection,
override val fileIndex: FileIndex
) extends Model with ModelComposition[CompleteModel]{
override def copy(
tables: TableInfoCollection = tables,
fileIndex: FileIndex = fileIndex
): CompleteModel = {
new CompleteModel(tables, fileIndex)
}
}
| flaminem/flamy | src/main/scala/com/flaminem/flamy/model/core/CompleteModel.scala | Scala | apache-2.0 | 1,098 |
/*
* Copyright 2006 - 2013
* Stefan Balev <stefan.balev@graphstream-project.org>
* Julien Baudry <julien.baudry@graphstream-project.org>
* Antoine Dutot <antoine.dutot@graphstream-project.org>
* Yoann Pigné <yoann.pigne@graphstream-project.org>
* Guilhelm Savin <guilhelm.savin@graphstream-project.org>
*
* This file is part of GraphStream <http://graphstream-project.org>.
*
* GraphStream is a library whose purpose is to handle static or dynamic
* graph, create them from scratch, file or any source and display them.
*
* This program is free software distributed under the terms of two licenses, the
* CeCILL-C license that fits European law, and the GNU Lesser General Public
* License. You can use, modify and/ or redistribute the software under the terms
* of the CeCILL-C license as circulated by CEA, CNRS and INRIA at the following
* URL <http://www.cecill.info> or under the terms of the GNU LGPL as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* The fact that you are presently reading this means that you have had
* knowledge of the CeCILL-C and LGPL licenses and that you accept their terms.
*/
package org.graphstream.ui.j2dviewer.renderer.test
import java.awt.GridLayout
import javax.swing.JFrame
import _root_.org.graphstream.graph._
import _root_.org.graphstream.graph.implementations._
import _root_.org.graphstream.stream.thread._
import _root_.org.graphstream.ui.swingViewer._
import _root_.org.graphstream.ui.swingViewer.Viewer.ThreadingModel
import _root_.org.graphstream.algorithm.generator._
object TestTwoViewersInOneFrame {
def main(args:Array[String]):Unit = {
System.setProperty( "gs.ui.renderer", "org.graphstream.ui.j2dviewer.J2DGraphRenderer" );
(new TestTwoViewersInOneFrame).test
}
}
class TestTwoViewersInOneFrame extends JFrame {
def test() {
val graph1 = new MultiGraph("g1")
val graph2 = new MultiGraph("g2")
val viewer1 = new Viewer(new ThreadProxyPipe(graph1))
val viewer2 = new Viewer(new ThreadProxyPipe(graph2))
graph1.addAttribute("ui.stylesheet", styleSheet1)
graph2.addAttribute("ui.stylesheet", styleSheet2)
viewer1.addView(new DefaultView(viewer1, "view1", Viewer.newGraphRenderer))
viewer2.addView(new DefaultView(viewer2, "view2", Viewer.newGraphRenderer))
viewer1.enableAutoLayout
viewer2.enableAutoLayout
val gen = new DorogovtsevMendesGenerator
gen.addSink(graph1)
gen.addSink(graph2)
gen.begin
for(i <- 0 until 100)
gen.nextEvents
gen.end
gen.removeSink(graph1)
gen.removeSink(graph2)
// graph1.addNode("A")
// graph1.addNode("B")
// graph1.addNode("C")
// graph1.addEdge("AB", "A", "B", true)
// graph1.addEdge("BC", "B", "C", true)
// graph1.addEdge("CA", "C", "A", true)
// graph2.addNode("A")
// graph2.addNode("B")
// graph2.addNode("C")
// graph2.addEdge("AB", "A", "B", true)
// graph2.addEdge("BC", "B", "C", true)
// graph2.addEdge("CA", "C", "A", true)
setLayout(new GridLayout(1, 2))
//add(new JButton("Button"))
add(viewer1.getView("view1"))
add(viewer2.getView("view2"))
setSize(800, 600)
setVisible(true)
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE)
}
protected val styleSheet1 =
"graph { padding: 40px; }" +
"node { fill-color: red; stroke-mode: plain; stroke-color: black; }";
protected val styleSheet2 =
"graph { padding: 40px; }" +
"node { fill-color: blue; stroke-mode: plain; stroke-color: black; }";
} | prismsoul/gedgraph | sources/prismsoul.genealogy.gedgraph/gs-ui/org/graphstream/ui/j2dviewer/renderer/test/TestTwoViewersInOneFrame.scala | Scala | gpl-2.0 | 3,924 |
package com.aesireanempire.eplus.blocks
import com.aesireanempire.eplus.EnchantingPlusMod
import com.aesireanempire.eplus.blocks.entities.TileEntityAdvEnchantmentTable
import net.minecraftforge.fml.common.registry.GameRegistry
import net.minecraft.init.{Blocks, Items}
import net.minecraft.item.ItemStack
object EplusBlocks {
/**
* Creates any block objects that need to be initialized.
*/
def preInit() = {
GameRegistry.registerBlock(BlockAdvEnchantmentTable, BlockAdvEnchantmentTable.NAME)
GameRegistry.registerTileEntity(classOf[TileEntityAdvEnchantmentTable], EnchantingPlusMod.MODID + ":" + BlockAdvEnchantmentTable.NAME)
}
/**
* Registers all recipes needed for blocks
*/
def init() = {
GameRegistry.addShapedRecipe(
new ItemStack(BlockAdvEnchantmentTable),
"gbg", "oto", "geg",
'g'.asInstanceOf[Character], Items.gold_ingot, 'b'.asInstanceOf[Character], Items.writable_book,
'o'.asInstanceOf[Character], Blocks.obsidian, 't'.asInstanceOf[Character], Blocks.enchanting_table,
'e'.asInstanceOf[Character], Items.ender_eye
)
}
}
| darkhax/EnchantingPlus-Scala | src/main/scala/com/aesireanempire/eplus/blocks/EplusBlocks.scala | Scala | lgpl-3.0 | 1,175 |
/*
* Copyright 2001-2014 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalactic.anyvals
import org.scalactic.Resources
import scala.quoted._
/**
* Trait providing assertion methods that can be called at compile time from macros
* to validate literals in source code.
*
* <p>
* The intent of <code>CompileTimeAssertions</code> is to make it easier to create
* <code>AnyVal</code>s that restrict the values of types for which Scala supports
* literals: <code>Int</code>, <code>Long</code>, <code>Float</code>, <code>Double</code>, <code>Char</code>,
* and <code>String</code>. For example, if you are using odd integers in many places
* in your code, you might have validity checks scattered throughout your code. Here's
* an example of a method that both requires an odd <code>Int</code> is passed (as a
* <em>precondition</em>, and ensures an odd * <code>Int</code> is returned (as
* a <em>postcondition</em>):
* </p>
*
* <pre class="stHighlight">
* def nextOdd(i: Int): Int = {
* def isOdd(x: Int): Boolean = x.abs % 2 == 1
* require(isOdd(i))
* (i + 2) ensuring (isOdd(_))
* }
* </pre>
*
* <p>
* In either the precondition or postcondition check fails, an exception will
* be thrown at runtime. If you have many methods like this you may want to
* create a type to represent an odd <code>Int</code>, so that the checking
* for validity errors is isolated in just one place. By using an <code>AnyVal</code>
* you can avoid boxing the <code>Int</code>, which may be more efficient.
* This might look like:
* </p>
*
* <pre class="stHighlight">
* final class OddInt private (val value: Int) extends AnyVal {
* override def toString: String = s"OddInt($value)"
* }
*
* object OddInt {
* def apply(value: Int): OddInt = {
* require(value.abs % 2 == 1)
* new OddInt(value)
* }
* }
* </pre>
*
* <p>
* An <code>AnyVal</code> cannot have any constructor code, so to ensure that
* any <code>Int</code> passed to the <code>OddInt</code> constructor is actually
* odd, the constructor must be private. That way the only way to construct a
* new <code>OddInt</code> is via the <code>apply</code> factory method in the
* <code>OddInt</code> companion object, which can require that the value be
* odd. This design eliminates the need for placing <code>require</code> and
* <code>ensuring</code> clauses anywhere else that odd <code>Int</code>s are
* needed, because the type promises the constraint. The <code>nextOdd</code>
* method could, therefore, be rewritten as:
* </p>
*
* <pre class="stHighlight">
* def nextOdd(oi: OddInt): OddInt = OddInt(oi.value + 2)
* </pre>
*
* <p>
* Using the compile-time assertions provided by this trait, you can construct
* a factory method implemented via a macro that causes a compile failure
* if <code>OddInt.apply</code> is passed anything besides an odd
* <code>Int</code> literal. Class <code>OddInt</code> would look exactly the
* same as before:
* </p>
*
* <pre class="stHighlight">
* final class OddInt private (val value: Int) extends AnyVal {
* override def toString: String = s"OddInt($value)"
* }
* </pre>
*
* <p>
* In the companion object, however, the <code>apply</code> method would
* be implemented in terms of a macro. Because the <code>apply</code> method
* will only work with literals, you'll need a second method that can work
* an any expression of type <code>Int</code>. We recommend a <code>from</code> method
* that returns an <code>Option[OddInt]</code> that returns <code>Some[OddInt}</code> if the passed <code>Int</code> is odd,
* else returns <code>None</code>, and an <code>ensuringValid</code> method that returns an <code>OddInt</code>
* if the passed <code>Int</code> is valid, else throws <code>AssertionError</code>.
* </p>
*
* <pre class="stHighlight">
* object OddInt {
*
* // The from factory method validates at run time
* def from(value: Int): Option[OddInt] =
* if (OddIntMacro.isValid(value)) Some(new OddInt(value)) else None
*
* // The ensuringValid factory method validates at run time, but throws
* // an AssertionError if invalid
* def ensuringValid(value: Int): OddInt =
* if (OddIntMacro.isValid(value)) new OddInt(value) else {
* throw new AssertionError(s"$value was not a valid OddInt")
* }
*
* // The apply factory method validates at compile time
* import scala.language.experimental.macros
* def apply(value: Int): OddInt = macro OddIntMacro.apply
* }
* </pre>
*
* <p>
* The <code>apply</code> method refers to a macro implementation method in class
* <code>PosIntMacro</code>. The macro implementation of any such method can look
* very similar to this one. The only changes you'd need to make is the
* <code>isValid</code> method implementation and the text of the error messages.
* </p>
*
* <pre class="stHighlight">
* import org.scalactic.anyvals.CompileTimeAssertions
* import reflect.macros.Context
*
* object OddIntMacro extends CompileTimeAssertions {
*
* // Validation method used at both compile- and run-time
* def isValid(i: Int): Boolean = i.abs % 2 == 1
*
* // Apply macro that performs a compile-time assertion
* def apply(c: Context)(value: c.Expr[Int]): c.Expr[OddInt] = {
*
* // Prepare potential compiler error messages
* val notValidMsg = "OddInt.apply can only be invoked on odd Int literals, like OddInt(3)."
* val notLiteralMsg = "OddInt.apply can only be invoked on Int literals, like " +
* "OddInt(3). Please use OddInt.from instead."
*
* // Validate via a compile-time assertion
* ensureValidIntLiteral(c)(value, notValidMsg, notLiteralMsg)(isValid)
*
* // Validated, so rewrite the apply call to a from call
* c.universe.reify { OddInt.ensuringValid(value.splice) }
* }
* }
* </pre>
*
* <p>
* The <code>isValid</code> method just takes the underlying type and returns <code>true</code> if it is valid,
* else <code>false</code>. This method is placed here so the same valiation code can be used both in
* the <code>from</code> method at runtime and the <code>apply</code> macro at compile time. The <code>apply</code>
* actually does just two things. It calls a <code>ensureValidIntLiteral</code>, performing a compile-time assertion
* that value passed to <code>apply</code> is an <code>Int</code> literal that is valid (in this case, odd).
* If the assertion fails, <code>ensureValidIntLiteral</code> will complete abruptly with an exception that will
* contain an appropriate error message (one of the two you passed in) and cause a compiler error with that message.
* If the assertion succeeds, <code>ensureValidIntLiteral</code> will just return normally. The next line of code
* will then execute. This line of code must construct an AST (abstract syntax tree) of code that will replace
* the <code>OddInt.apply</code> invocation. We invoke the other factory method that either returns an <code>OddInt</code>
* or throws an <code>AssertionError</code>, since we've proven at compile time that the call will succeed.
* </p>
*
* <p>
* You may wish to use quasi-quotes instead of reify. The reason we use reify is that this also works on 2.10 without
* any additional plugin (i.e., you don't need macro paradise), and Scalactic supports 2.10.
* </p>
*/
trait CompileTimeAssertions {
/**
* Ensures a given expression of type <code>Int</code> is a literal with a valid value according to a given validation function.
*
* <p>
* If the given <code>Int</code> expression is a literal whose value satisfies the given validation function, this method will
* return normally. Otherwise, if the given <code>Int</code> expression is not a literal, this method will complete abruptly with
* an exception whose detail message includes the <code>String</code> passed as <code>notLiteralMsg</code>. Otherwise, the
* given <code>Int</code> expression is a literal that does <em>not</em> satisfy the given validation function, so this method will
* complete abruptly with an exception whose detail message includes the <code>String</code> passed as <code>notValidMsg</code>.
* </p>
*
* <p>
* This method is intended to be invoked at compile time from macros. When called from a macro, exceptions thrown by this method
* will result in compiler errors. The detail message of the thrown exception will appear as the compiler error message.
* </p>
*
* @param c the compiler context for this assertion
* @param value the <code>Int</code> expression to validate
* @param notValidMsg a <code>String</code> message to include in the exception thrown if the expression is a literal, but not valid
* @param notLiteralMsg a <code>String</code> message to include in the exception thrown if the expression is not a literal
* @param isValid a function used to validate a literal value parsed from the given expression
*/
def ensureValidIntLiteral(value: Expr[Int], notValidMsg: String, notLiteralMsg: String)(isValid: Int => Boolean)(using Quotes): Unit = {
import quotes.reflect._
value.asTerm.underlyingArgument match {
case Literal(intConst) =>
val literalValue = intConst.value.toString.toInt
if (!isValid(literalValue))
report.error(notValidMsg, value.asTerm.pos)
case _ =>
report.error(notLiteralMsg, value.asTerm.pos)
}
}
/**
* Ensures a given expression of type <code>Long</code> is a literal with a valid value according to a given validation function.
*
* <p>
* If the given <code>Long</code> expression is a literal whose value satisfies the given validation function, this method will
* return normally. Otherwise, if the given <code>Long</code> expression is not a literal, this method will complete abruptly with
* an exception whose detail message includes the <code>String</code> passed as <code>notLiteralMsg</code>. Otherwise, the
* given <code>Long</code> expression is a literal that does <em>not</em> satisfy the given validation function, so this method will
* complete abruptly with an exception whose detail message includes the <code>String</code> passed as <code>notValidMsg</code>.
* </p>
*
* <p>
* This method is intended to be invoked at compile time from macros. When called from a macro, exceptions thrown by this method
* will result in compiler errors. The detail message of the thrown exception will appear as the compiler error message.
* </p>
*
* @param c the compiler context for this assertion
* @param value the <code>Long</code> expression to validate
* @param notValidMsg a <code>String</code> message to include in the exception thrown if the expression is a literal, but not valid
* @param notLiteralMsg a <code>String</code> message to include in the exception thrown if the expression is not a literal
* @param isValid a function used to validate a literal value parsed from the given expression
*/
def ensureValidLongLiteral(value: Expr[Long], notValidMsg: String, notLiteralMsg: String)(isValid: Long => Boolean)(using Quotes): Unit = {
import quotes.reflect._
value.asTerm.underlyingArgument match {
case Literal(longConst) =>
val literalValue = longConst.value.toString.toLong
if (!isValid(literalValue))
report.error(notValidMsg, value.asTerm.pos)
case _ =>
report.error(notLiteralMsg, value.asTerm.pos)
}
}
/**
* Ensures a given expression of type <code>Float</code> is a literal with a valid value according to a given validation function.
*
* <p>
* If the given <code>Float</code> expression is a literal whose value satisfies the given validation function, this method will
* return normally. Otherwise, if the given <code>Float</code> expression is not a literal, this method will complete abruptly with
* an exception whose detail message includes the <code>String</code> passed as <code>notLiteralMsg</code>. Otherwise, the
* given <code>Float</code> expression is a literal that does <em>not</em> satisfy the given validation function, so this method will
* complete abruptly with an exception whose detail message includes the <code>String</code> passed as <code>notValidMsg</code>.
* </p>
*
* <p>
* This method is intended to be invoked at compile time from macros. When called from a macro, exceptions thrown by this method
* will result in compiler errors. The detail message of the thrown exception will appear as the compiler error message.
* </p>
*
* @param c the compiler context for this assertion
* @param value the <code>Float</code> expression to validate
* @param notValidMsg a <code>String</code> message to include in the exception thrown if the expression is a literal, but not valid
* @param notLiteralMsg a <code>String</code> message to include in the exception thrown if the expression is not a literal
* @param isValid a function used to validate a literal value parsed from the given expression
*/
def ensureValidFloatLiteral(value: Expr[Float], notValidMsg: String, notLiteralMsg: String)(isValid: Float => Boolean)(using Quotes): Unit = {
import quotes.reflect._
value.asTerm.underlyingArgument match {
case Literal(floatConst) =>
val literalValue = floatConst.value.toString.toFloat
if (!isValid(literalValue))
report.error(notValidMsg, value.asTerm.pos)
case _ =>
report.error(notLiteralMsg, value.asTerm.pos)
}
}
/**
* Ensures a given expression of type <code>Double</code> is a literal with a valid value according to a given validation function.
*
* <p>
* If the given <code>Double</code> expression is a literal whose value satisfies the given validation function, this method will
* return normally. Otherwise, if the given <code>Double</code> expression is not a literal, this method will complete abruptly with
* an exception whose detail message includes the <code>String</code> passed as <code>notLiteralMsg</code>. Otherwise, the
* given <code>Double</code> expression is a literal that does <em>not</em> satisfy the given validation function, so this method will
* complete abruptly with an exception whose detail message includes the <code>String</code> passed as <code>notValidMsg</code>.
* </p>
*
* <p>
* This method is intended to be invoked at compile time from macros. When called from a macro, exceptions thrown by this method
* will result in compiler errors. The detail message of the thrown exception will appear as the compiler error message.
* </p>
*
* @param c the compiler context for this assertion
* @param value the <code>Double</code> expression to validate
* @param notValidMsg a <code>String</code> message to include in the exception thrown if the expression is a literal, but not valid
* @param notLiteralMsg a <code>String</code> message to include in the exception thrown if the expression is not a literal
* @param isValid a function used to validate a literal value parsed from the given expression
*/
def ensureValidDoubleLiteral(value: Expr[Double], notValidMsg: String, notLiteralMsg: String)(isValid: Double => Boolean)(using Quotes): Unit = {
import quotes.reflect._
value.asTerm.underlyingArgument match {
case Literal(doubleConst) =>
val literalValue = doubleConst.value.toString.toDouble
if (!isValid(literalValue))
report.error(notValidMsg, value.asTerm.pos)
case _ =>
report.error(notLiteralMsg, value.asTerm.pos)
}
}
/**
* Ensures a given expression of type <code>String</code> is a literal with a valid value according to a given validation function.
*
* <p>
* If the given <code>String</code> expression is a literal whose value satisfies the given validation function, this method will
* return normally. Otherwise, if the given <code>String</code> expression is not a literal, this method will complete abruptly with
* an exception whose detail message includes the <code>String</code> passed as <code>notLiteralMsg</code>. Otherwise, the
* given <code>String</code> expression is a literal that does <em>not</em> satisfy the given validation function, so this method will
* complete abruptly with an exception whose detail message includes the <code>String</code> passed as <code>notValidMsg</code>.
* </p>
*
* <p>
* This method is intended to be invoked at compile time from macros. When called from a macro, exceptions thrown by this method
* will result in compiler errors. The detail message of the thrown exception will appear as the compiler error message.
* </p>
*
* @param c the compiler context for this assertion
* @param value the <code>String</code> expression to validate
* @param notValidMsg a <code>String</code> message to include in the exception thrown if the expression is a literal, but not valid
* @param notLiteralMsg a <code>String</code> message to include in the exception thrown if the expression is not a literal
* @param isValid a function used to validate a literal value parsed from the given expression
*/
def ensureValidStringLiteral(value: Expr[String], notValidMsg: String, notLiteralMsg: String)(isValid: String => Boolean)(using Quotes): Unit = {
import quotes.reflect._
value.asTerm.underlyingArgument match {
case Literal(stringConst) =>
val literalValue = stringConst.value.toString
if (!isValid(literalValue))
report.error(notValidMsg, value.asTerm.pos)
case _ =>
report.error(notLiteralMsg, value.asTerm.pos)
}
}
/**
* Ensures a given expression of type <code>Char</code> is a literal with a valid value according to a given validation function.
*
* <p>
* If the given <code>Char</code> expression is a literal whose value satisfies the given validation function, this method will
* return normally. Otherwise, if the given <code>Char</code> expression is not a literal, this method will complete abruptly with
* an exception whose detail message includes the <code>String</code> passed as <code>notLiteralMsg</code>. Otherwise, the
* given <code>Char</code> expression is a literal that does <em>not</em> satisfy the given validation function, so this method will
* complete abruptly with an exception whose detail message includes the <code>String</code> passed as <code>notValidMsg</code>.
* </p>
*
* <p>
* This method is intended to be invoked at compile time from macros. When called from a macro, exceptions thrown by this method
* will result in compiler errors. The detail message of the thrown exception will appear as the compiler error message.
* </p>
*
* @param c the compiler context for this assertion
* @param value the <code>Char</code> expression to validate
* @param notValidMsg a <code>String</code> message to include in the exception thrown if the expression is a literal, but not valid
* @param notLiteralMsg a <code>String</code> message to include in the exception thrown if the expression is not a literal
* @param isValid a function used to validate a literal value parsed from the given expression
*/
def ensureValidCharLiteral(value: Expr[Char], notValidMsg: String, notLiteralMsg: String)(isValid: Char => Boolean)(using Quotes): Unit = {
import quotes.reflect._
value.asTerm.underlyingArgument match {
case Literal(charConst) =>
val literalValue = charConst.value.toString.head
if (!isValid(literalValue))
report.error(notValidMsg, value.asTerm.pos)
case _ =>
report.error(notLiteralMsg, value.asTerm.pos)
}
}
}
/**
* Companion object that facilitates the importing of <code>CompileTimeAssertions</code> members as
* an alternative to mixing in the trait.
*/
object CompileTimeAssertions extends CompileTimeAssertions
| scalatest/scalatest | dotty/scalactic/src/main/scala/org/scalactic/anyvals/CompileTimeAssertions.scala | Scala | apache-2.0 | 20,475 |
/**
* GENERATED CODE - DO NOT EDIT!
* Generated by foursquare.thrift.codegen v3.0.0-M15
*
*/
package io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors
import io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.java_thrift_descriptors.{
JavaAnnotation,
JavaAnnotationMeta,
JavaAnnotationMutable,
JavaAnnotationRaw,
JavaBaseType,
JavaBaseTypeMeta,
JavaBaseTypeMutable,
JavaBaseTypeRaw,
JavaConst,
JavaConstMeta,
JavaConstMutable,
JavaConstRaw,
JavaContainerType,
JavaContainerTypeMeta,
JavaContainerTypeMutable,
JavaContainerTypeRaw,
JavaEnum,
JavaEnumElement,
JavaEnumElementMeta,
JavaEnumElementMutable,
JavaEnumElementRaw,
JavaEnumMeta,
JavaEnumMutable,
JavaEnumRaw,
JavaException,
JavaExceptionMeta,
JavaExceptionMutable,
JavaExceptionRaw,
JavaField,
JavaFieldMeta,
JavaFieldMutable,
JavaFieldRaw,
JavaFunction,
JavaFunctionMeta,
JavaFunctionMutable,
JavaFunctionRaw,
JavaInclude,
JavaIncludeMeta,
JavaIncludeMutable,
JavaIncludeRaw,
JavaListType,
JavaListTypeMeta,
JavaListTypeMutable,
JavaListTypeRaw,
JavaMapType,
JavaMapTypeMeta,
JavaMapTypeMutable,
JavaMapTypeRaw,
JavaNamespace,
JavaNamespaceMeta,
JavaNamespaceMutable,
JavaNamespaceRaw,
JavaProgram,
JavaProgramMeta,
JavaProgramMutable,
JavaProgramRaw,
JavaRequiredness,
JavaService,
JavaServiceMeta,
JavaServiceMutable,
JavaServiceRaw,
JavaSetType,
JavaSetTypeMeta,
JavaSetTypeMutable,
JavaSetTypeRaw,
JavaSimpleBaseType,
JavaSimpleContainerType,
JavaSimpleContainerTypeMeta,
JavaSimpleContainerTypeMutable,
JavaSimpleContainerTypeRaw,
JavaSimpleType,
JavaSimpleTypeMeta,
JavaSimpleTypeMutable,
JavaSimpleTypeRaw,
JavaStruct,
JavaStructMeta,
JavaStructMutable,
JavaStructRaw,
JavaType,
JavaTypeMeta,
JavaTypeMutable,
JavaTypeRaw,
JavaTypeRegistry,
JavaTypeRegistryMeta,
JavaTypeRegistryMutable,
JavaTypeRegistryRaw,
JavaTypedef,
JavaTypedefMeta,
JavaTypedefMutable,
JavaTypedefRaw,
JavaTyperef,
JavaTyperefMeta,
JavaTyperefMutable,
JavaTyperefRaw,
JavaUnion,
JavaUnionMeta,
JavaUnionMutable,
JavaUnionRaw
}
sealed abstract class SimpleBaseType private (
override val id: Int,
override val name: String,
override val stringValue: String
) extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Enum[SimpleBaseType]
with JavaSimpleBaseType
with org.apache.thrift.TEnum {
override def toString: String = stringValue
override def getValue: Int = id
override def meta = SimpleBaseType
}
object SimpleBaseType extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.EnumMeta[SimpleBaseType] {
object BOOL extends SimpleBaseType(0, "BOOL", "BOOL")
object BYTE extends SimpleBaseType(1, "BYTE", "BYTE")
object I16 extends SimpleBaseType(2, "I16", "I16")
object I32 extends SimpleBaseType(3, "I32", "I32")
object I64 extends SimpleBaseType(4, "I64", "I64")
object DOUBLE extends SimpleBaseType(5, "DOUBLE", "DOUBLE")
object STRING extends SimpleBaseType(6, "STRING", "STRING")
object BINARY extends SimpleBaseType(7, "BINARY", "BINARY")
final case class UnknownWireValue(val v: Any)
extends SimpleBaseType(
if (v.isInstanceOf[Int]) v.asInstanceOf[Int] else -1,
"?",
if (v.isInstanceOf[String]) v.asInstanceOf[String] else "?"
)
override val values: Vector[SimpleBaseType] =
Vector(
BOOL,
BYTE,
I16,
I32,
I64,
DOUBLE,
STRING,
BINARY
)
override def findByIdOrNull(id: Int): SimpleBaseType = id match {
case 0 => BOOL
case 1 => BYTE
case 2 => I16
case 3 => I32
case 4 => I64
case 5 => DOUBLE
case 6 => STRING
case 7 => BINARY
case _ => null
}
override def findByIdOrUnknown(id: Int): SimpleBaseType = findByIdOrNull(id) match {
case null => new UnknownWireValue(id)
case x: SimpleBaseType => x
}
override def findByNameOrNull(name: String): SimpleBaseType = name match {
case "BOOL" => BOOL
case "BYTE" => BYTE
case "I16" => I16
case "I32" => I32
case "I64" => I64
case "DOUBLE" => DOUBLE
case "STRING" => STRING
case "BINARY" => BINARY
case _ => null
}
override def findByStringValueOrNull(v: String): SimpleBaseType = v match {
case "BOOL" => SimpleBaseType.BOOL
case "BYTE" => SimpleBaseType.BYTE
case "I16" => SimpleBaseType.I16
case "I32" => SimpleBaseType.I32
case "I64" => SimpleBaseType.I64
case "DOUBLE" => SimpleBaseType.DOUBLE
case "STRING" => SimpleBaseType.STRING
case "BINARY" => SimpleBaseType.BINARY
case _ => null
}
override def findByStringValueOrUnknown(v: String): SimpleBaseType = findByStringValueOrNull(v) match {
case null => new UnknownWireValue(v)
case x: SimpleBaseType => x
}
implicit val __$ordering: _root_.scala.math.Ordering[SimpleBaseType] = {
new _root_.scala.math.Ordering[SimpleBaseType] {
override def compare(x: SimpleBaseType, y: SimpleBaseType): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[SimpleBaseType] = {
new _root_.java.util.Comparator[SimpleBaseType] {
override def compare(x: SimpleBaseType, y: SimpleBaseType): Int = x.compare(y)
}
}
}
sealed abstract class Requiredness private (
override val id: Int,
override val name: String,
override val stringValue: String
) extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Enum[Requiredness]
with JavaRequiredness
with org.apache.thrift.TEnum {
override def toString: String = stringValue
override def getValue: Int = id
override def meta = Requiredness
}
object Requiredness extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.EnumMeta[Requiredness] {
object REQUIRED extends Requiredness(0, "REQUIRED", "REQUIRED")
object OPTIONAL extends Requiredness(1, "OPTIONAL", "OPTIONAL")
final case class UnknownWireValue(val v: Any)
extends Requiredness(
if (v.isInstanceOf[Int]) v.asInstanceOf[Int] else -1,
"?",
if (v.isInstanceOf[String]) v.asInstanceOf[String] else "?"
)
override val values: Vector[Requiredness] =
Vector(
REQUIRED,
OPTIONAL
)
override def findByIdOrNull(id: Int): Requiredness = id match {
case 0 => REQUIRED
case 1 => OPTIONAL
case _ => null
}
override def findByIdOrUnknown(id: Int): Requiredness = findByIdOrNull(id) match {
case null => new UnknownWireValue(id)
case x: Requiredness => x
}
override def findByNameOrNull(name: String): Requiredness = name match {
case "REQUIRED" => REQUIRED
case "OPTIONAL" => OPTIONAL
case _ => null
}
override def findByStringValueOrNull(v: String): Requiredness = v match {
case "REQUIRED" => Requiredness.REQUIRED
case "OPTIONAL" => Requiredness.OPTIONAL
case _ => null
}
override def findByStringValueOrUnknown(v: String): Requiredness = findByStringValueOrNull(v) match {
case null => new UnknownWireValue(v)
case x: Requiredness => x
}
implicit val __$ordering: _root_.scala.math.Ordering[Requiredness] = {
new _root_.scala.math.Ordering[Requiredness] {
override def compare(x: Requiredness, y: Requiredness): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[Requiredness] = {
new _root_.java.util.Comparator[Requiredness] {
override def compare(x: Requiredness, y: Requiredness): Int = x.compare(y)
}
}
}
object Include extends IncludeMeta {
object Builder {
sealed trait HasPath
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasPath
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[Include] (private var obj: RawInclude) {
def path(v: String): Include.Builder[State with Builder.HasPath] = {
obj.path_=(v)
this.asInstanceOf[Include.Builder[State with Builder.HasPath]]
}
def resultMutable()(implicit ev0: State <:< Builder.HasPath): MutableInclude = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("Include.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(implicit ev0: State <:< Builder.HasPath): Include = resultMutable()(ev0)
}
def newBuilder: Include.Builder.AllUnspecified = new Builder(Include.createRawRecord)
implicit val companionProvider: IncludeCompanionProvider = new IncludeCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[Include] = {
new _root_.scala.math.Ordering[Include] {
override def compare(x: Include, y: Include): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[Include] = {
new _root_.java.util.Comparator[Include] {
override def compare(x: Include, y: Include): Int = x.compare(y)
}
}
}
class IncludeMeta
extends JavaIncludeMeta[Include, RawInclude, IncludeMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[Include] {
override def recordName: String = "Include"
// Thrift descriptors.
val INCLUDE_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("Include")
val PATH_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"path",
org.apache.thrift.protocol.TType.STRING,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"path" -> PATH_FDESC
)
object _Fields {
case object path extends _Fields(1, "path")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.path
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: Include = createRawRecord
override def createRawRecord: RawInclude = new RawInclude
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[Include] = {
if (x.isInstanceOf[Include]) Some(x.asInstanceOf[Include]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations.empty
// Spindle Descriptors.
val path =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, Include, IncludeMeta](
name = "path",
longName = "path",
id = 1,
annotations = Map(),
owner = this,
getter = _.pathOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Include], v: String) => {
r.asInstanceOf[RawInclude].path_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Include]) => {
r.asInstanceOf[RawInclude].pathUnset()
},
manifest = manifest[String]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Include, IncludeMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Include, IncludeMeta]](
path
)
def apply(
path: String
): Include = {
val ret = this.createRawRecord
ret.path_=(path)
ret
}
}
class IncludeCompanionProvider
extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[Include] {
type CompanionT = IncludeMeta
override def provide: IncludeMeta = Include
}
trait Include
extends JavaInclude[
Include,
RawInclude,
IncludeMeta
]
with org.apache.thrift.TBase[Include, Include._Fields] {
override def meta: IncludeMeta
override def compare(that: Include): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.pathIsSet.compareTo(that.pathIsSet)
cmp != 0
}) cmp
else if (this.pathIsSet && {
cmp = this.pathOrNull.compareTo(that.pathOrNull)
cmp != 0
}) cmp
else 0
}
override def <(that: Include): Boolean = { this.compare(that) < 0 }
override def >(that: Include): Boolean = { this.compare(that) > 0 }
override def <=(that: Include): Boolean = { this.compare(that) <= 0 }
override def >=(that: Include): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: Include): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): Include
def copy(
path: String = pathOrNull
): Include
def mutableCopy(): MutableInclude = {
val ret = Include.createRawRecord
if (pathIsSet) ret.path_=(pathOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableInclude
def toBuilder(): Include.Builder.AllSpecified = {
val ret = new Include.Builder(Include.createRawRecord)
if (pathIsSet) ret.path(pathOrNull)
ret
}
def mergeCopy(that: Include): Include
}
trait MutableInclude
extends Include
with JavaIncludeMutable[
Include,
RawInclude,
IncludeMeta
] {
def path_=(x: String): Unit
def pathUnset(): Unit
def merge(that: Include): Unit
def copy(
path: String = pathOrNull
): MutableInclude
override def mutable: MutableInclude = this
}
final class RawInclude
extends JavaIncludeRaw[
Include,
RawInclude,
IncludeMeta
]
with MutableInclude {
override def meta: IncludeMeta = Include
// fields
// Field #1 - path
private var _path: String = null // Underlying type: String
override def path: String = pathOrThrow
override def path_=(x: String): Unit = { _path = x }
override def pathOption: Option[String] = if (pathIsSet) Some(_path) else None
override def pathOrNull: String = _path
override def pathOrThrow: String =
if (pathIsSet) _path else throw new java.lang.NullPointerException("field path of Include missing")
override def pathIsSet: Boolean = _path != null
override def pathUnset(): Unit = { _path = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(Include.INCLUDE_SDESC)
if (pathIsSet) {
oprot.writeFieldBegin(Include.PATH_FDESC)
oprot.writeString(_path)
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
Include.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // path
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_path = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException(
"Error reading field %d in structure Include".format(field_header.id),
e
)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: Include): Unit = {
if (that.pathIsSet && !this.pathIsSet) {
this.path_=(that.pathOrNull)
}
}
override def mergeCopy(that: Include): Include = {
val ret = Include.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: Include => this.equals(o)
case _ => false
}
def equals(that: Include): Boolean = {
that != null &&
(if (this.pathIsSet) (that.pathIsSet && this.pathOrNull == that.pathOrNull) else !that.pathIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (pathIsSet) hasher.append(_path.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (pathIsSet) ret = pathOrNull :: ret
ret.reverse
}
override def clear() {
pathUnset()
unknownFields = Nil
}
def fieldForId(id: Int): Include._Fields = id match {
case 1 => Include._Fields.path
case _ => null
}
def isSet(field: Include._Fields): Boolean = field match {
case Include._Fields.path => pathIsSet
case _ => false
}
def getFieldValue(field: Include._Fields): AnyRef = field match {
case Include._Fields.path => pathOrNull.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: Include._Fields, value: AnyRef) {
field match {
case Include._Fields.path => path_=(value.asInstanceOf[String])
case _ =>
}
}
override def deepCopy(): RawInclude = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = Include.createRawRecord
ret.read(prot)
ret
}
override def copy(
path: String = pathOrNull
): RawInclude = {
val ret = new RawInclude
if (path != null) ret.path_=(path)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object Namespace extends NamespaceMeta {
object Builder {
sealed trait HasLanguage
sealed trait HasName
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasLanguage with HasName
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[Namespace] (private var obj: RawNamespace) {
def language(v: String): Namespace.Builder[State with Builder.HasLanguage] = {
obj.language_=(v)
this.asInstanceOf[Namespace.Builder[State with Builder.HasLanguage]]
}
def name(v: String): Namespace.Builder[State with Builder.HasName] = {
obj.name_=(v)
this.asInstanceOf[Namespace.Builder[State with Builder.HasName]]
}
def resultMutable()(
implicit ev0: State <:< Builder.HasLanguage,
ev1: State <:< Builder.HasName
): MutableNamespace = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("Namespace.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(implicit ev0: State <:< Builder.HasLanguage, ev1: State <:< Builder.HasName): Namespace =
resultMutable()(ev0, ev1)
}
def newBuilder: Namespace.Builder.AllUnspecified = new Builder(Namespace.createRawRecord)
implicit val companionProvider: NamespaceCompanionProvider = new NamespaceCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[Namespace] = {
new _root_.scala.math.Ordering[Namespace] {
override def compare(x: Namespace, y: Namespace): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[Namespace] = {
new _root_.java.util.Comparator[Namespace] {
override def compare(x: Namespace, y: Namespace): Int = x.compare(y)
}
}
}
class NamespaceMeta
extends JavaNamespaceMeta[Namespace, RawNamespace, NamespaceMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[Namespace] {
override def recordName: String = "Namespace"
// Thrift descriptors.
val NAMESPACE_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("Namespace")
val LANGUAGE_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"language",
org.apache.thrift.protocol.TType.STRING,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val NAME_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"name",
org.apache.thrift.protocol.TType.STRING,
2, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"language" -> LANGUAGE_FDESC,
"name" -> NAME_FDESC
)
object _Fields {
case object language extends _Fields(1, "language")
case object name extends _Fields(2, "name")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.language,
2.toShort -> _Fields.name
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: Namespace = createRawRecord
override def createRawRecord: RawNamespace = new RawNamespace
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[Namespace] = {
if (x.isInstanceOf[Namespace]) Some(x.asInstanceOf[Namespace]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations.empty
// Spindle Descriptors.
val language =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[
String,
Namespace,
NamespaceMeta
](
name = "language",
longName = "language",
id = 1,
annotations = Map(),
owner = this,
getter = _.languageOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Namespace], v: String) => {
r.asInstanceOf[RawNamespace].language_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Namespace]) => {
r.asInstanceOf[RawNamespace].languageUnset()
},
manifest = manifest[String]
)
val name =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[
String,
Namespace,
NamespaceMeta
](
name = "name",
longName = "name",
id = 2,
annotations = Map(),
owner = this,
getter = _.nameOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Namespace], v: String) => {
r.asInstanceOf[RawNamespace].name_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Namespace]) => {
r.asInstanceOf[RawNamespace].nameUnset()
},
manifest = manifest[String]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Namespace, NamespaceMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Namespace, NamespaceMeta]](
language,
name
)
def apply(
language: String,
name: String
): Namespace = {
val ret = this.createRawRecord
ret.language_=(language)
ret.name_=(name)
ret
}
}
class NamespaceCompanionProvider
extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[Namespace] {
type CompanionT = NamespaceMeta
override def provide: NamespaceMeta = Namespace
}
trait Namespace
extends JavaNamespace[
Namespace,
RawNamespace,
NamespaceMeta
]
with org.apache.thrift.TBase[Namespace, Namespace._Fields] {
override def meta: NamespaceMeta
override def compare(that: Namespace): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.languageIsSet.compareTo(that.languageIsSet)
cmp != 0
}) cmp
else if (this.languageIsSet && {
cmp = this.languageOrNull.compareTo(that.languageOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.nameIsSet.compareTo(that.nameIsSet)
cmp != 0
}) cmp
else if (this.nameIsSet && {
cmp = this.nameOrNull.compareTo(that.nameOrNull)
cmp != 0
}) cmp
else 0
}
override def <(that: Namespace): Boolean = { this.compare(that) < 0 }
override def >(that: Namespace): Boolean = { this.compare(that) > 0 }
override def <=(that: Namespace): Boolean = { this.compare(that) <= 0 }
override def >=(that: Namespace): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: Namespace): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): Namespace
def copy(
language: String = languageOrNull,
name: String = nameOrNull
): Namespace
def mutableCopy(): MutableNamespace = {
val ret = Namespace.createRawRecord
if (languageIsSet) ret.language_=(languageOrNull)
if (nameIsSet) ret.name_=(nameOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableNamespace
def toBuilder(): Namespace.Builder.AllSpecified = {
val ret = new Namespace.Builder(Namespace.createRawRecord)
if (languageIsSet) ret.language(languageOrNull)
if (nameIsSet) ret.name(nameOrNull)
ret
}
def mergeCopy(that: Namespace): Namespace
}
trait MutableNamespace
extends Namespace
with JavaNamespaceMutable[
Namespace,
RawNamespace,
NamespaceMeta
] {
def language_=(x: String): Unit
def languageUnset(): Unit
def name_=(x: String): Unit
def nameUnset(): Unit
def merge(that: Namespace): Unit
def copy(
language: String = languageOrNull,
name: String = nameOrNull
): MutableNamespace
override def mutable: MutableNamespace = this
}
final class RawNamespace
extends JavaNamespaceRaw[
Namespace,
RawNamespace,
NamespaceMeta
]
with MutableNamespace {
override def meta: NamespaceMeta = Namespace
// fields
// Field #1 - language
private var _language: String = null // Underlying type: String
override def language: String = languageOrThrow
override def language_=(x: String): Unit = { _language = x }
override def languageOption: Option[String] = if (languageIsSet) Some(_language) else None
override def languageOrNull: String = _language
override def languageOrThrow: String =
if (languageIsSet) _language else throw new java.lang.NullPointerException("field language of Namespace missing")
override def languageIsSet: Boolean = _language != null
override def languageUnset(): Unit = { _language = null }
// Field #2 - name
private var _name: String = null // Underlying type: String
override def name: String = nameOrThrow
override def name_=(x: String): Unit = { _name = x }
override def nameOption: Option[String] = if (nameIsSet) Some(_name) else None
override def nameOrNull: String = _name
override def nameOrThrow: String =
if (nameIsSet) _name else throw new java.lang.NullPointerException("field name of Namespace missing")
override def nameIsSet: Boolean = _name != null
override def nameUnset(): Unit = { _name = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(Namespace.NAMESPACE_SDESC)
if (languageIsSet) {
oprot.writeFieldBegin(Namespace.LANGUAGE_FDESC)
oprot.writeString(_language)
oprot.writeFieldEnd()
}
if (nameIsSet) {
oprot.writeFieldBegin(Namespace.NAME_FDESC)
oprot.writeString(_name)
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
Namespace.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // language
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_language = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 2 => { // name
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_name = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException(
"Error reading field %d in structure Namespace".format(field_header.id),
e
)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: Namespace): Unit = {
if (that.languageIsSet && !this.languageIsSet) {
this.language_=(that.languageOrNull)
}
if (that.nameIsSet && !this.nameIsSet) {
this.name_=(that.nameOrNull)
}
}
override def mergeCopy(that: Namespace): Namespace = {
val ret = Namespace.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: Namespace => this.equals(o)
case _ => false
}
def equals(that: Namespace): Boolean = {
that != null &&
(if (this.languageIsSet) (that.languageIsSet && this.languageOrNull == that.languageOrNull)
else !that.languageIsSet) &&
(if (this.nameIsSet) (that.nameIsSet && this.nameOrNull == that.nameOrNull) else !that.nameIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (languageIsSet) hasher.append(_language.##)
if (nameIsSet) hasher.append(_name.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (languageIsSet) ret = languageOrNull :: ret
if (nameIsSet) ret = nameOrNull :: ret
ret.reverse
}
override def clear() {
languageUnset()
nameUnset()
unknownFields = Nil
}
def fieldForId(id: Int): Namespace._Fields = id match {
case 1 => Namespace._Fields.language
case 2 => Namespace._Fields.name
case _ => null
}
def isSet(field: Namespace._Fields): Boolean = field match {
case Namespace._Fields.language => languageIsSet
case Namespace._Fields.name => nameIsSet
case _ => false
}
def getFieldValue(field: Namespace._Fields): AnyRef = field match {
case Namespace._Fields.language => languageOrNull.asInstanceOf[AnyRef]
case Namespace._Fields.name => nameOrNull.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: Namespace._Fields, value: AnyRef) {
field match {
case Namespace._Fields.language => language_=(value.asInstanceOf[String])
case Namespace._Fields.name => name_=(value.asInstanceOf[String])
case _ =>
}
}
override def deepCopy(): RawNamespace = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = Namespace.createRawRecord
ret.read(prot)
ret
}
override def copy(
language: String = languageOrNull,
name: String = nameOrNull
): RawNamespace = {
val ret = new RawNamespace
if (language != null) ret.language_=(language)
if (name != null) ret.name_=(name)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object Annotation extends AnnotationMeta {
object Builder {
sealed trait HasKey
sealed trait HasValue
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasKey with HasValue
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[Annotation] (private var obj: RawAnnotation) {
def key(v: String): Annotation.Builder[State with Builder.HasKey] = {
obj.key_=(v)
this.asInstanceOf[Annotation.Builder[State with Builder.HasKey]]
}
def value(v: String): Annotation.Builder[State with Builder.HasValue] = {
obj.value_=(v)
this.asInstanceOf[Annotation.Builder[State with Builder.HasValue]]
}
def resultMutable()(implicit ev0: State <:< Builder.HasKey, ev1: State <:< Builder.HasValue): MutableAnnotation = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("Annotation.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(implicit ev0: State <:< Builder.HasKey, ev1: State <:< Builder.HasValue): Annotation =
resultMutable()(ev0, ev1)
}
def newBuilder: Annotation.Builder.AllUnspecified = new Builder(Annotation.createRawRecord)
implicit val companionProvider: AnnotationCompanionProvider = new AnnotationCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[Annotation] = {
new _root_.scala.math.Ordering[Annotation] {
override def compare(x: Annotation, y: Annotation): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[Annotation] = {
new _root_.java.util.Comparator[Annotation] {
override def compare(x: Annotation, y: Annotation): Int = x.compare(y)
}
}
}
class AnnotationMeta
extends JavaAnnotationMeta[Annotation, RawAnnotation, AnnotationMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[Annotation] {
override def recordName: String = "Annotation"
// Thrift descriptors.
val ANNOTATION_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("Annotation")
val KEY_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"key",
org.apache.thrift.protocol.TType.STRING,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val VALUE_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"value",
org.apache.thrift.protocol.TType.STRING,
2, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"key" -> KEY_FDESC,
"value" -> VALUE_FDESC
)
object _Fields {
case object key extends _Fields(1, "key")
case object value extends _Fields(2, "value")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.key,
2.toShort -> _Fields.value
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: Annotation = createRawRecord
override def createRawRecord: RawAnnotation = new RawAnnotation
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[Annotation] = {
if (x.isInstanceOf[Annotation]) Some(x.asInstanceOf[Annotation]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations.empty
// Spindle Descriptors.
val key =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[
String,
Annotation,
AnnotationMeta
](
name = "key",
longName = "key",
id = 1,
annotations = Map(),
owner = this,
getter = _.keyOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Annotation], v: String) => {
r.asInstanceOf[RawAnnotation].key_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Annotation]) => {
r.asInstanceOf[RawAnnotation].keyUnset()
},
manifest = manifest[String]
)
val value =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[
String,
Annotation,
AnnotationMeta
](
name = "value",
longName = "value",
id = 2,
annotations = Map(),
owner = this,
getter = _.valueOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Annotation], v: String) => {
r.asInstanceOf[RawAnnotation].value_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Annotation]) => {
r.asInstanceOf[RawAnnotation].valueUnset()
},
manifest = manifest[String]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Annotation, AnnotationMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Annotation, AnnotationMeta]](
key,
value
)
def apply(
key: String,
value: String
): Annotation = {
val ret = this.createRawRecord
ret.key_=(key)
ret.value_=(value)
ret
}
}
class AnnotationCompanionProvider
extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[Annotation] {
type CompanionT = AnnotationMeta
override def provide: AnnotationMeta = Annotation
}
trait Annotation
extends JavaAnnotation[
Annotation,
RawAnnotation,
AnnotationMeta
]
with org.apache.thrift.TBase[Annotation, Annotation._Fields] {
override def meta: AnnotationMeta
override def compare(that: Annotation): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.keyIsSet.compareTo(that.keyIsSet)
cmp != 0
}) cmp
else if (this.keyIsSet && {
cmp = this.keyOrNull.compareTo(that.keyOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.valueIsSet.compareTo(that.valueIsSet)
cmp != 0
}) cmp
else if (this.valueIsSet && {
cmp = this.valueOrNull.compareTo(that.valueOrNull)
cmp != 0
}) cmp
else 0
}
override def <(that: Annotation): Boolean = { this.compare(that) < 0 }
override def >(that: Annotation): Boolean = { this.compare(that) > 0 }
override def <=(that: Annotation): Boolean = { this.compare(that) <= 0 }
override def >=(that: Annotation): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: Annotation): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): Annotation
def copy(
key: String = keyOrNull,
value: String = valueOrNull
): Annotation
def mutableCopy(): MutableAnnotation = {
val ret = Annotation.createRawRecord
if (keyIsSet) ret.key_=(keyOrNull)
if (valueIsSet) ret.value_=(valueOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableAnnotation
def toBuilder(): Annotation.Builder.AllSpecified = {
val ret = new Annotation.Builder(Annotation.createRawRecord)
if (keyIsSet) ret.key(keyOrNull)
if (valueIsSet) ret.value(valueOrNull)
ret
}
def mergeCopy(that: Annotation): Annotation
}
trait MutableAnnotation
extends Annotation
with JavaAnnotationMutable[
Annotation,
RawAnnotation,
AnnotationMeta
] {
def key_=(x: String): Unit
def keyUnset(): Unit
def value_=(x: String): Unit
def valueUnset(): Unit
def merge(that: Annotation): Unit
def copy(
key: String = keyOrNull,
value: String = valueOrNull
): MutableAnnotation
override def mutable: MutableAnnotation = this
}
final class RawAnnotation
extends JavaAnnotationRaw[
Annotation,
RawAnnotation,
AnnotationMeta
]
with MutableAnnotation {
override def meta: AnnotationMeta = Annotation
// fields
// Field #1 - key
private var _key: String = null // Underlying type: String
override def key: String = keyOrThrow
override def key_=(x: String): Unit = { _key = x }
override def keyOption: Option[String] = if (keyIsSet) Some(_key) else None
override def keyOrNull: String = _key
override def keyOrThrow: String =
if (keyIsSet) _key else throw new java.lang.NullPointerException("field key of Annotation missing")
override def keyIsSet: Boolean = _key != null
override def keyUnset(): Unit = { _key = null }
// Field #2 - value
private var _value: String = null // Underlying type: String
override def value: String = valueOrThrow
override def value_=(x: String): Unit = { _value = x }
override def valueOption: Option[String] = if (valueIsSet) Some(_value) else None
override def valueOrNull: String = _value
override def valueOrThrow: String =
if (valueIsSet) _value else throw new java.lang.NullPointerException("field value of Annotation missing")
override def valueIsSet: Boolean = _value != null
override def valueUnset(): Unit = { _value = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(Annotation.ANNOTATION_SDESC)
if (keyIsSet) {
oprot.writeFieldBegin(Annotation.KEY_FDESC)
oprot.writeString(_key)
oprot.writeFieldEnd()
}
if (valueIsSet) {
oprot.writeFieldBegin(Annotation.VALUE_FDESC)
oprot.writeString(_value)
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
Annotation.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // key
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_key = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 2 => { // value
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_value = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException(
"Error reading field %d in structure Annotation".format(field_header.id),
e
)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: Annotation): Unit = {
if (that.keyIsSet && !this.keyIsSet) {
this.key_=(that.keyOrNull)
}
if (that.valueIsSet && !this.valueIsSet) {
this.value_=(that.valueOrNull)
}
}
override def mergeCopy(that: Annotation): Annotation = {
val ret = Annotation.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: Annotation => this.equals(o)
case _ => false
}
def equals(that: Annotation): Boolean = {
that != null &&
(if (this.keyIsSet) (that.keyIsSet && this.keyOrNull == that.keyOrNull) else !that.keyIsSet) &&
(if (this.valueIsSet) (that.valueIsSet && this.valueOrNull == that.valueOrNull) else !that.valueIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (keyIsSet) hasher.append(_key.##)
if (valueIsSet) hasher.append(_value.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (keyIsSet) ret = keyOrNull :: ret
if (valueIsSet) ret = valueOrNull :: ret
ret.reverse
}
override def clear() {
keyUnset()
valueUnset()
unknownFields = Nil
}
def fieldForId(id: Int): Annotation._Fields = id match {
case 1 => Annotation._Fields.key
case 2 => Annotation._Fields.value
case _ => null
}
def isSet(field: Annotation._Fields): Boolean = field match {
case Annotation._Fields.key => keyIsSet
case Annotation._Fields.value => valueIsSet
case _ => false
}
def getFieldValue(field: Annotation._Fields): AnyRef = field match {
case Annotation._Fields.key => keyOrNull.asInstanceOf[AnyRef]
case Annotation._Fields.value => valueOrNull.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: Annotation._Fields, value: AnyRef) {
field match {
case Annotation._Fields.key => key_=(value.asInstanceOf[String])
case Annotation._Fields.value => value_=(value.asInstanceOf[String])
case _ =>
}
}
override def deepCopy(): RawAnnotation = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = Annotation.createRawRecord
ret.read(prot)
ret
}
override def copy(
key: String = keyOrNull,
value: String = valueOrNull
): RawAnnotation = {
val ret = new RawAnnotation
if (key != null) ret.key_=(key)
if (value != null) ret.value_=(value)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object BaseType extends BaseTypeMeta {
object Builder {
sealed trait HasSimpleBaseType
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasSimpleBaseType
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[BaseType] (private var obj: RawBaseType) {
def simpleBaseType(
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleBaseType
): BaseType.Builder[State with Builder.HasSimpleBaseType] = {
obj.simpleBaseType_=(v)
this.asInstanceOf[BaseType.Builder[State with Builder.HasSimpleBaseType]]
}
def __annotations(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): BaseType.Builder[State] = {
obj.__annotations_=(v)
this
}
def __annotations(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
): BaseType.Builder[State] = {
vOpt match {
case Some(v) => obj.__annotations_=(v)
case None => obj.annotationsUnset()
}
this
}
def resultMutable()(implicit ev0: State <:< Builder.HasSimpleBaseType): MutableBaseType = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("BaseType.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(implicit ev0: State <:< Builder.HasSimpleBaseType): BaseType = resultMutable()(ev0)
}
def newBuilder: BaseType.Builder.AllUnspecified = new Builder(BaseType.createRawRecord)
implicit val companionProvider: BaseTypeCompanionProvider = new BaseTypeCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[BaseType] = {
new _root_.scala.math.Ordering[BaseType] {
override def compare(x: BaseType, y: BaseType): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[BaseType] = {
new _root_.java.util.Comparator[BaseType] {
override def compare(x: BaseType, y: BaseType): Int = x.compare(y)
}
}
}
class BaseTypeMeta
extends JavaBaseTypeMeta[BaseType, RawBaseType, BaseTypeMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[BaseType] {
override def recordName: String = "BaseType"
// Thrift descriptors.
val BASETYPE_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("BaseType")
val SIMPLEBASETYPE_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"simpleBaseType",
org.apache.thrift.protocol.TType.I32,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val ANNOTATIONS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"annotations",
org.apache.thrift.protocol.TType.LIST,
99, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"simpleBaseType" -> SIMPLEBASETYPE_FDESC,
"annotations" -> ANNOTATIONS_FDESC
)
object _Fields {
case object simpleBaseType extends _Fields(1, "simpleBaseType")
case object __annotations extends _Fields(99, "annotations")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.simpleBaseType,
99.toShort -> _Fields.__annotations
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: BaseType = createRawRecord
override def createRawRecord: RawBaseType = new RawBaseType
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[BaseType] = {
if (x.isInstanceOf[BaseType]) Some(x.asInstanceOf[BaseType]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations.empty
// Spindle Descriptors.
val simpleBaseType =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleBaseType,
BaseType,
BaseTypeMeta
](
name = "simpleBaseType",
longName = "simpleBaseType",
id = 1,
annotations = Map(),
owner = this,
getter = _.simpleBaseTypeOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[BaseType],
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleBaseType
) => { r.asInstanceOf[RawBaseType].simpleBaseType_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[BaseType]) => {
r.asInstanceOf[RawBaseType].simpleBaseTypeUnset()
},
manifest = manifest[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleBaseType]
)
val __annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
], BaseType, BaseTypeMeta](
name = "annotations",
longName = "annotations",
id = 99,
annotations = Map(),
owner = this,
getter = _.annotationsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[BaseType],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
) => { r.asInstanceOf[RawBaseType].__annotations_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[BaseType]) => {
r.asInstanceOf[RawBaseType].annotationsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, BaseType, BaseTypeMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, BaseType, BaseTypeMeta]](
simpleBaseType,
__annotations
)
def apply(
simpleBaseType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleBaseType,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): BaseType = {
val ret = this.createRawRecord
ret.simpleBaseType_=(simpleBaseType)
ret.__annotations_=(__annotations)
ret
}
}
class BaseTypeCompanionProvider
extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[BaseType] {
type CompanionT = BaseTypeMeta
override def provide: BaseTypeMeta = BaseType
}
trait BaseType
extends JavaBaseType[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
BaseType,
RawBaseType,
BaseTypeMeta
]
with org.apache.thrift.TBase[BaseType, BaseType._Fields] {
override def meta: BaseTypeMeta
override def compare(that: BaseType): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.simpleBaseTypeIsSet.compareTo(that.simpleBaseTypeIsSet)
cmp != 0
}) cmp
else if (this.simpleBaseTypeIsSet && {
cmp = this.simpleBaseTypeOrNull.compareTo(that.simpleBaseTypeOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.annotationsIsSet.compareTo(that.annotationsIsSet)
cmp != 0
}) cmp
else if (this.annotationsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.__annotations).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.__annotations).asJava
)
cmp != 0
}) cmp
else 0
}
override def <(that: BaseType): Boolean = { this.compare(that) < 0 }
override def >(that: BaseType): Boolean = { this.compare(that) > 0 }
override def <=(that: BaseType): Boolean = { this.compare(that) <= 0 }
override def >=(that: BaseType): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: BaseType): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): BaseType
def copy(
simpleBaseType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleBaseType = simpleBaseTypeOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): BaseType
def mutableCopy(): MutableBaseType = {
val ret = BaseType.createRawRecord
if (simpleBaseTypeIsSet) ret.simpleBaseType_=(simpleBaseTypeOrNull)
if (annotationsIsSet) ret.__annotations_=(annotationsOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableBaseType
def toBuilder(): BaseType.Builder.AllSpecified = {
val ret = new BaseType.Builder(BaseType.createRawRecord)
if (simpleBaseTypeIsSet) ret.simpleBaseType(simpleBaseTypeOrNull)
if (annotationsIsSet) ret.__annotations(annotationsOrNull)
ret
}
def mergeCopy(that: BaseType): BaseType
}
trait MutableBaseType
extends BaseType
with JavaBaseTypeMutable[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
BaseType,
RawBaseType,
BaseTypeMeta
] {
def simpleBaseType_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleBaseType): Unit
def simpleBaseTypeUnset(): Unit
def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit
def annotationsUnset(): Unit
def merge(that: BaseType): Unit
def copy(
simpleBaseType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleBaseType = simpleBaseTypeOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableBaseType
override def mutable: MutableBaseType = this
}
final class RawBaseType
extends JavaBaseTypeRaw[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
BaseType,
RawBaseType,
BaseTypeMeta
]
with MutableBaseType {
override def meta: BaseTypeMeta = BaseType
// fields
// Field #1 - simpleBaseType
private var _simpleBaseType
: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleBaseType = null // Underlying type: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleBaseType
override def simpleBaseType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleBaseType =
simpleBaseTypeOrThrow
override def simpleBaseType_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleBaseType): Unit = {
_simpleBaseType = x
}
override def simpleBaseTypeOption
: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleBaseType] =
if (simpleBaseTypeIsSet) Some(_simpleBaseType) else None
override def simpleBaseTypeOrNull: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleBaseType =
_simpleBaseType
override def simpleBaseTypeOrThrow: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleBaseType =
if (simpleBaseTypeIsSet) _simpleBaseType
else throw new java.lang.NullPointerException("field simpleBaseType of BaseType missing")
override def simpleBaseTypeIsSet: Boolean = _simpleBaseType != null
override def simpleBaseTypeUnset(): Unit = { _simpleBaseType = null }
// Field #99 - annotations
private var _annotations: scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrDefault
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { _annotations = x }
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
if (annotationsIsSet) Some(_annotations) else None
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations else scala.collection.Seq.empty
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] = _annotations
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations
else throw new java.lang.NullPointerException("field __annotations of BaseType missing")
override def annotationsIsSet: Boolean = _annotations != null
override def annotationsUnset(): Unit = { _annotations = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(BaseType.BASETYPE_SDESC)
if (simpleBaseTypeIsSet) {
oprot.writeFieldBegin(BaseType.SIMPLEBASETYPE_FDESC)
oprot.writeI32(_simpleBaseType.getValue())
oprot.writeFieldEnd()
}
if (annotationsIsSet) {
oprot.writeFieldBegin(BaseType.ANNOTATIONS_FDESC)
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _annotations.size)
)
_annotations.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
BaseType.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // simpleBaseType
if (field_header.`type` == org.apache.thrift.protocol.TType.I32 || field_header.`type` == org.apache.thrift.protocol.TType.ENUM) {
_simpleBaseType = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleBaseType
.findByIdOrUnknown(iprot.readI32())
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 99 => { // annotations
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_annotations = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException(
"Error reading field %d in structure BaseType".format(field_header.id),
e
)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: BaseType): Unit = {
if (that.simpleBaseTypeIsSet && !this.simpleBaseTypeIsSet) {
this.simpleBaseType_=(that.simpleBaseTypeOrNull)
}
if (that.annotationsIsSet && !this.annotationsIsSet) {
this.__annotations_=(that.annotationsOrDefault)
} else if (that.annotationsIsSet && this.annotationsIsSet) {
this.__annotations_=(this.__annotations ++ that.__annotations)
}
}
override def mergeCopy(that: BaseType): BaseType = {
val ret = BaseType.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: BaseType => this.equals(o)
case _ => false
}
def equals(that: BaseType): Boolean = {
that != null &&
(if (this.simpleBaseTypeIsSet) (that.simpleBaseTypeIsSet && this.simpleBaseTypeOrNull == that.simpleBaseTypeOrNull)
else !that.simpleBaseTypeIsSet) &&
(if (this.annotationsIsSet) (that.annotationsIsSet && this.annotationsOrDefault == that.annotationsOrDefault)
else !that.annotationsIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (simpleBaseTypeIsSet) hasher.append(_simpleBaseType.##)
if (annotationsIsSet) hasher.append(_annotations.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (simpleBaseTypeIsSet) ret = simpleBaseTypeOrNull :: ret
if (annotationsIsSet) ret = annotationsOrDefault :: ret
ret.reverse
}
override def clear() {
simpleBaseTypeUnset()
annotationsUnset()
unknownFields = Nil
}
def fieldForId(id: Int): BaseType._Fields = id match {
case 1 => BaseType._Fields.simpleBaseType
case 99 => BaseType._Fields.__annotations
case _ => null
}
def isSet(field: BaseType._Fields): Boolean = field match {
case BaseType._Fields.simpleBaseType => simpleBaseTypeIsSet
case BaseType._Fields.__annotations => annotationsIsSet
case _ => false
}
def getFieldValue(field: BaseType._Fields): AnyRef = field match {
case BaseType._Fields.simpleBaseType => simpleBaseTypeOrNull.asInstanceOf[AnyRef]
case BaseType._Fields.__annotations => annotationsOrDefault.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: BaseType._Fields, value: AnyRef) {
field match {
case BaseType._Fields.simpleBaseType =>
simpleBaseType_=(value.asInstanceOf[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleBaseType])
case BaseType._Fields.__annotations =>
__annotations_=(
value
.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
case _ =>
}
}
override def deepCopy(): RawBaseType = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = BaseType.createRawRecord
ret.read(prot)
ret
}
override def copy(
simpleBaseType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleBaseType = simpleBaseTypeOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): RawBaseType = {
val ret = new RawBaseType
if (simpleBaseType != null) ret.simpleBaseType_=(simpleBaseType)
if (__annotations != null) ret.__annotations_=(__annotations)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object ListType extends ListTypeMeta {
object Builder {
sealed trait HasElementTypeId
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasElementTypeId
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[ListType] (private var obj: RawListType) {
def elementTypeId(v: String): ListType.Builder[State with Builder.HasElementTypeId] = {
obj.elementTypeId_=(v)
this.asInstanceOf[ListType.Builder[State with Builder.HasElementTypeId]]
}
def resultMutable()(implicit ev0: State <:< Builder.HasElementTypeId): MutableListType = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("ListType.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(implicit ev0: State <:< Builder.HasElementTypeId): ListType = resultMutable()(ev0)
}
def newBuilder: ListType.Builder.AllUnspecified = new Builder(ListType.createRawRecord)
implicit val companionProvider: ListTypeCompanionProvider = new ListTypeCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[ListType] = {
new _root_.scala.math.Ordering[ListType] {
override def compare(x: ListType, y: ListType): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[ListType] = {
new _root_.java.util.Comparator[ListType] {
override def compare(x: ListType, y: ListType): Int = x.compare(y)
}
}
}
class ListTypeMeta
extends JavaListTypeMeta[ListType, RawListType, ListTypeMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[ListType] {
override def recordName: String = "ListType"
// Thrift descriptors.
val LISTTYPE_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("ListType")
val ELEMENTTYPEID_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"elementTypeId",
org.apache.thrift.protocol.TType.STRING,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"elementTypeId" -> ELEMENTTYPEID_FDESC
)
object _Fields {
case object elementTypeId extends _Fields(1, "elementTypeId")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.elementTypeId
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: ListType = createRawRecord
override def createRawRecord: RawListType = new RawListType
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[ListType] = {
if (x.isInstanceOf[ListType]) Some(x.asInstanceOf[ListType]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations.empty
// Spindle Descriptors.
val elementTypeId =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, ListType, ListTypeMeta](
name = "elementTypeId",
longName = "elementTypeId",
id = 1,
annotations = Map(),
owner = this,
getter = _.elementTypeIdOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[ListType], v: String) => {
r.asInstanceOf[RawListType].elementTypeId_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[ListType]) => {
r.asInstanceOf[RawListType].elementTypeIdUnset()
},
manifest = manifest[String]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, ListType, ListTypeMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, ListType, ListTypeMeta]](
elementTypeId
)
def apply(
elementTypeId: String
): ListType = {
val ret = this.createRawRecord
ret.elementTypeId_=(elementTypeId)
ret
}
}
class ListTypeCompanionProvider
extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[ListType] {
type CompanionT = ListTypeMeta
override def provide: ListTypeMeta = ListType
}
trait ListType
extends JavaListType[
ListType,
RawListType,
ListTypeMeta
]
with org.apache.thrift.TBase[ListType, ListType._Fields] {
override def meta: ListTypeMeta
override def compare(that: ListType): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.elementTypeIdIsSet.compareTo(that.elementTypeIdIsSet)
cmp != 0
}) cmp
else if (this.elementTypeIdIsSet && {
cmp = this.elementTypeIdOrNull.compareTo(that.elementTypeIdOrNull)
cmp != 0
}) cmp
else 0
}
override def <(that: ListType): Boolean = { this.compare(that) < 0 }
override def >(that: ListType): Boolean = { this.compare(that) > 0 }
override def <=(that: ListType): Boolean = { this.compare(that) <= 0 }
override def >=(that: ListType): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: ListType): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): ListType
def copy(
elementTypeId: String = elementTypeIdOrNull
): ListType
def mutableCopy(): MutableListType = {
val ret = ListType.createRawRecord
if (elementTypeIdIsSet) ret.elementTypeId_=(elementTypeIdOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableListType
def toBuilder(): ListType.Builder.AllSpecified = {
val ret = new ListType.Builder(ListType.createRawRecord)
if (elementTypeIdIsSet) ret.elementTypeId(elementTypeIdOrNull)
ret
}
def mergeCopy(that: ListType): ListType
}
trait MutableListType
extends ListType
with JavaListTypeMutable[
ListType,
RawListType,
ListTypeMeta
] {
def elementTypeId_=(x: String): Unit
def elementTypeIdUnset(): Unit
def merge(that: ListType): Unit
def copy(
elementTypeId: String = elementTypeIdOrNull
): MutableListType
override def mutable: MutableListType = this
}
final class RawListType
extends JavaListTypeRaw[
ListType,
RawListType,
ListTypeMeta
]
with MutableListType {
override def meta: ListTypeMeta = ListType
// fields
// Field #1 - elementTypeId
private var _elementTypeId: String = null // Underlying type: String
override def elementTypeId: String = elementTypeIdOrThrow
override def elementTypeId_=(x: String): Unit = { _elementTypeId = x }
override def elementTypeIdOption: Option[String] = if (elementTypeIdIsSet) Some(_elementTypeId) else None
override def elementTypeIdOrNull: String = _elementTypeId
override def elementTypeIdOrThrow: String = if (elementTypeIdIsSet) _elementTypeId
else throw new java.lang.NullPointerException("field elementTypeId of ListType missing")
override def elementTypeIdIsSet: Boolean = _elementTypeId != null
override def elementTypeIdUnset(): Unit = { _elementTypeId = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(ListType.LISTTYPE_SDESC)
if (elementTypeIdIsSet) {
oprot.writeFieldBegin(ListType.ELEMENTTYPEID_FDESC)
oprot.writeString(_elementTypeId)
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
ListType.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // elementTypeId
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_elementTypeId = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException(
"Error reading field %d in structure ListType".format(field_header.id),
e
)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: ListType): Unit = {
if (that.elementTypeIdIsSet && !this.elementTypeIdIsSet) {
this.elementTypeId_=(that.elementTypeIdOrNull)
}
}
override def mergeCopy(that: ListType): ListType = {
val ret = ListType.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: ListType => this.equals(o)
case _ => false
}
def equals(that: ListType): Boolean = {
that != null &&
(if (this.elementTypeIdIsSet) (that.elementTypeIdIsSet && this.elementTypeIdOrNull == that.elementTypeIdOrNull)
else !that.elementTypeIdIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (elementTypeIdIsSet) hasher.append(_elementTypeId.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (elementTypeIdIsSet) ret = elementTypeIdOrNull :: ret
ret.reverse
}
override def clear() {
elementTypeIdUnset()
unknownFields = Nil
}
def fieldForId(id: Int): ListType._Fields = id match {
case 1 => ListType._Fields.elementTypeId
case _ => null
}
def isSet(field: ListType._Fields): Boolean = field match {
case ListType._Fields.elementTypeId => elementTypeIdIsSet
case _ => false
}
def getFieldValue(field: ListType._Fields): AnyRef = field match {
case ListType._Fields.elementTypeId => elementTypeIdOrNull.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: ListType._Fields, value: AnyRef) {
field match {
case ListType._Fields.elementTypeId => elementTypeId_=(value.asInstanceOf[String])
case _ =>
}
}
override def deepCopy(): RawListType = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = ListType.createRawRecord
ret.read(prot)
ret
}
override def copy(
elementTypeId: String = elementTypeIdOrNull
): RawListType = {
val ret = new RawListType
if (elementTypeId != null) ret.elementTypeId_=(elementTypeId)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object SetType extends SetTypeMeta {
object Builder {
sealed trait HasElementTypeId
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasElementTypeId
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[SetType] (private var obj: RawSetType) {
def elementTypeId(v: String): SetType.Builder[State with Builder.HasElementTypeId] = {
obj.elementTypeId_=(v)
this.asInstanceOf[SetType.Builder[State with Builder.HasElementTypeId]]
}
def resultMutable()(implicit ev0: State <:< Builder.HasElementTypeId): MutableSetType = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("SetType.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(implicit ev0: State <:< Builder.HasElementTypeId): SetType = resultMutable()(ev0)
}
def newBuilder: SetType.Builder.AllUnspecified = new Builder(SetType.createRawRecord)
implicit val companionProvider: SetTypeCompanionProvider = new SetTypeCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[SetType] = {
new _root_.scala.math.Ordering[SetType] {
override def compare(x: SetType, y: SetType): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[SetType] = {
new _root_.java.util.Comparator[SetType] {
override def compare(x: SetType, y: SetType): Int = x.compare(y)
}
}
}
class SetTypeMeta
extends JavaSetTypeMeta[SetType, RawSetType, SetTypeMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[SetType] {
override def recordName: String = "SetType"
// Thrift descriptors.
val SETTYPE_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("SetType")
val ELEMENTTYPEID_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"elementTypeId",
org.apache.thrift.protocol.TType.STRING,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"elementTypeId" -> ELEMENTTYPEID_FDESC
)
object _Fields {
case object elementTypeId extends _Fields(1, "elementTypeId")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.elementTypeId
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: SetType = createRawRecord
override def createRawRecord: RawSetType = new RawSetType
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[SetType] = {
if (x.isInstanceOf[SetType]) Some(x.asInstanceOf[SetType]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations.empty
// Spindle Descriptors.
val elementTypeId =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, SetType, SetTypeMeta](
name = "elementTypeId",
longName = "elementTypeId",
id = 1,
annotations = Map(),
owner = this,
getter = _.elementTypeIdOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[SetType], v: String) => {
r.asInstanceOf[RawSetType].elementTypeId_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[SetType]) => {
r.asInstanceOf[RawSetType].elementTypeIdUnset()
},
manifest = manifest[String]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, SetType, SetTypeMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, SetType, SetTypeMeta]](
elementTypeId
)
def apply(
elementTypeId: String
): SetType = {
val ret = this.createRawRecord
ret.elementTypeId_=(elementTypeId)
ret
}
}
class SetTypeCompanionProvider
extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[SetType] {
type CompanionT = SetTypeMeta
override def provide: SetTypeMeta = SetType
}
trait SetType
extends JavaSetType[
SetType,
RawSetType,
SetTypeMeta
]
with org.apache.thrift.TBase[SetType, SetType._Fields] {
override def meta: SetTypeMeta
override def compare(that: SetType): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.elementTypeIdIsSet.compareTo(that.elementTypeIdIsSet)
cmp != 0
}) cmp
else if (this.elementTypeIdIsSet && {
cmp = this.elementTypeIdOrNull.compareTo(that.elementTypeIdOrNull)
cmp != 0
}) cmp
else 0
}
override def <(that: SetType): Boolean = { this.compare(that) < 0 }
override def >(that: SetType): Boolean = { this.compare(that) > 0 }
override def <=(that: SetType): Boolean = { this.compare(that) <= 0 }
override def >=(that: SetType): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: SetType): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): SetType
def copy(
elementTypeId: String = elementTypeIdOrNull
): SetType
def mutableCopy(): MutableSetType = {
val ret = SetType.createRawRecord
if (elementTypeIdIsSet) ret.elementTypeId_=(elementTypeIdOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableSetType
def toBuilder(): SetType.Builder.AllSpecified = {
val ret = new SetType.Builder(SetType.createRawRecord)
if (elementTypeIdIsSet) ret.elementTypeId(elementTypeIdOrNull)
ret
}
def mergeCopy(that: SetType): SetType
}
trait MutableSetType
extends SetType
with JavaSetTypeMutable[
SetType,
RawSetType,
SetTypeMeta
] {
def elementTypeId_=(x: String): Unit
def elementTypeIdUnset(): Unit
def merge(that: SetType): Unit
def copy(
elementTypeId: String = elementTypeIdOrNull
): MutableSetType
override def mutable: MutableSetType = this
}
final class RawSetType
extends JavaSetTypeRaw[
SetType,
RawSetType,
SetTypeMeta
]
with MutableSetType {
override def meta: SetTypeMeta = SetType
// fields
// Field #1 - elementTypeId
private var _elementTypeId: String = null // Underlying type: String
override def elementTypeId: String = elementTypeIdOrThrow
override def elementTypeId_=(x: String): Unit = { _elementTypeId = x }
override def elementTypeIdOption: Option[String] = if (elementTypeIdIsSet) Some(_elementTypeId) else None
override def elementTypeIdOrNull: String = _elementTypeId
override def elementTypeIdOrThrow: String = if (elementTypeIdIsSet) _elementTypeId
else throw new java.lang.NullPointerException("field elementTypeId of SetType missing")
override def elementTypeIdIsSet: Boolean = _elementTypeId != null
override def elementTypeIdUnset(): Unit = { _elementTypeId = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(SetType.SETTYPE_SDESC)
if (elementTypeIdIsSet) {
oprot.writeFieldBegin(SetType.ELEMENTTYPEID_FDESC)
oprot.writeString(_elementTypeId)
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
SetType.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // elementTypeId
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_elementTypeId = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException(
"Error reading field %d in structure SetType".format(field_header.id),
e
)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: SetType): Unit = {
if (that.elementTypeIdIsSet && !this.elementTypeIdIsSet) {
this.elementTypeId_=(that.elementTypeIdOrNull)
}
}
override def mergeCopy(that: SetType): SetType = {
val ret = SetType.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: SetType => this.equals(o)
case _ => false
}
def equals(that: SetType): Boolean = {
that != null &&
(if (this.elementTypeIdIsSet) (that.elementTypeIdIsSet && this.elementTypeIdOrNull == that.elementTypeIdOrNull)
else !that.elementTypeIdIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (elementTypeIdIsSet) hasher.append(_elementTypeId.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (elementTypeIdIsSet) ret = elementTypeIdOrNull :: ret
ret.reverse
}
override def clear() {
elementTypeIdUnset()
unknownFields = Nil
}
def fieldForId(id: Int): SetType._Fields = id match {
case 1 => SetType._Fields.elementTypeId
case _ => null
}
def isSet(field: SetType._Fields): Boolean = field match {
case SetType._Fields.elementTypeId => elementTypeIdIsSet
case _ => false
}
def getFieldValue(field: SetType._Fields): AnyRef = field match {
case SetType._Fields.elementTypeId => elementTypeIdOrNull.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: SetType._Fields, value: AnyRef) {
field match {
case SetType._Fields.elementTypeId => elementTypeId_=(value.asInstanceOf[String])
case _ =>
}
}
override def deepCopy(): RawSetType = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = SetType.createRawRecord
ret.read(prot)
ret
}
override def copy(
elementTypeId: String = elementTypeIdOrNull
): RawSetType = {
val ret = new RawSetType
if (elementTypeId != null) ret.elementTypeId_=(elementTypeId)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object MapType extends MapTypeMeta {
object Builder {
sealed trait HasKeyTypeId
sealed trait HasValueTypeId
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasKeyTypeId with HasValueTypeId
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[MapType] (private var obj: RawMapType) {
def keyTypeId(v: String): MapType.Builder[State with Builder.HasKeyTypeId] = {
obj.keyTypeId_=(v)
this.asInstanceOf[MapType.Builder[State with Builder.HasKeyTypeId]]
}
def valueTypeId(v: String): MapType.Builder[State with Builder.HasValueTypeId] = {
obj.valueTypeId_=(v)
this.asInstanceOf[MapType.Builder[State with Builder.HasValueTypeId]]
}
def resultMutable()(
implicit ev0: State <:< Builder.HasKeyTypeId,
ev1: State <:< Builder.HasValueTypeId
): MutableMapType = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("MapType.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(implicit ev0: State <:< Builder.HasKeyTypeId, ev1: State <:< Builder.HasValueTypeId): MapType =
resultMutable()(ev0, ev1)
}
def newBuilder: MapType.Builder.AllUnspecified = new Builder(MapType.createRawRecord)
implicit val companionProvider: MapTypeCompanionProvider = new MapTypeCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[MapType] = {
new _root_.scala.math.Ordering[MapType] {
override def compare(x: MapType, y: MapType): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[MapType] = {
new _root_.java.util.Comparator[MapType] {
override def compare(x: MapType, y: MapType): Int = x.compare(y)
}
}
}
class MapTypeMeta
extends JavaMapTypeMeta[MapType, RawMapType, MapTypeMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[MapType] {
override def recordName: String = "MapType"
// Thrift descriptors.
val MAPTYPE_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("MapType")
val KEYTYPEID_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"keyTypeId",
org.apache.thrift.protocol.TType.STRING,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val VALUETYPEID_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"valueTypeId",
org.apache.thrift.protocol.TType.STRING,
2, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"keyTypeId" -> KEYTYPEID_FDESC,
"valueTypeId" -> VALUETYPEID_FDESC
)
object _Fields {
case object keyTypeId extends _Fields(1, "keyTypeId")
case object valueTypeId extends _Fields(2, "valueTypeId")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.keyTypeId,
2.toShort -> _Fields.valueTypeId
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: MapType = createRawRecord
override def createRawRecord: RawMapType = new RawMapType
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[MapType] = {
if (x.isInstanceOf[MapType]) Some(x.asInstanceOf[MapType]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations.empty
// Spindle Descriptors.
val keyTypeId =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, MapType, MapTypeMeta](
name = "keyTypeId",
longName = "keyTypeId",
id = 1,
annotations = Map(),
owner = this,
getter = _.keyTypeIdOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[MapType], v: String) => {
r.asInstanceOf[RawMapType].keyTypeId_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[MapType]) => {
r.asInstanceOf[RawMapType].keyTypeIdUnset()
},
manifest = manifest[String]
)
val valueTypeId =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, MapType, MapTypeMeta](
name = "valueTypeId",
longName = "valueTypeId",
id = 2,
annotations = Map(),
owner = this,
getter = _.valueTypeIdOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[MapType], v: String) => {
r.asInstanceOf[RawMapType].valueTypeId_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[MapType]) => {
r.asInstanceOf[RawMapType].valueTypeIdUnset()
},
manifest = manifest[String]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, MapType, MapTypeMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, MapType, MapTypeMeta]](
keyTypeId,
valueTypeId
)
def apply(
keyTypeId: String,
valueTypeId: String
): MapType = {
val ret = this.createRawRecord
ret.keyTypeId_=(keyTypeId)
ret.valueTypeId_=(valueTypeId)
ret
}
}
class MapTypeCompanionProvider
extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[MapType] {
type CompanionT = MapTypeMeta
override def provide: MapTypeMeta = MapType
}
trait MapType
extends JavaMapType[
MapType,
RawMapType,
MapTypeMeta
]
with org.apache.thrift.TBase[MapType, MapType._Fields] {
override def meta: MapTypeMeta
override def compare(that: MapType): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.keyTypeIdIsSet.compareTo(that.keyTypeIdIsSet)
cmp != 0
}) cmp
else if (this.keyTypeIdIsSet && {
cmp = this.keyTypeIdOrNull.compareTo(that.keyTypeIdOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.valueTypeIdIsSet.compareTo(that.valueTypeIdIsSet)
cmp != 0
}) cmp
else if (this.valueTypeIdIsSet && {
cmp = this.valueTypeIdOrNull.compareTo(that.valueTypeIdOrNull)
cmp != 0
}) cmp
else 0
}
override def <(that: MapType): Boolean = { this.compare(that) < 0 }
override def >(that: MapType): Boolean = { this.compare(that) > 0 }
override def <=(that: MapType): Boolean = { this.compare(that) <= 0 }
override def >=(that: MapType): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: MapType): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): MapType
def copy(
keyTypeId: String = keyTypeIdOrNull,
valueTypeId: String = valueTypeIdOrNull
): MapType
def mutableCopy(): MutableMapType = {
val ret = MapType.createRawRecord
if (keyTypeIdIsSet) ret.keyTypeId_=(keyTypeIdOrNull)
if (valueTypeIdIsSet) ret.valueTypeId_=(valueTypeIdOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableMapType
def toBuilder(): MapType.Builder.AllSpecified = {
val ret = new MapType.Builder(MapType.createRawRecord)
if (keyTypeIdIsSet) ret.keyTypeId(keyTypeIdOrNull)
if (valueTypeIdIsSet) ret.valueTypeId(valueTypeIdOrNull)
ret
}
def mergeCopy(that: MapType): MapType
}
trait MutableMapType
extends MapType
with JavaMapTypeMutable[
MapType,
RawMapType,
MapTypeMeta
] {
def keyTypeId_=(x: String): Unit
def keyTypeIdUnset(): Unit
def valueTypeId_=(x: String): Unit
def valueTypeIdUnset(): Unit
def merge(that: MapType): Unit
def copy(
keyTypeId: String = keyTypeIdOrNull,
valueTypeId: String = valueTypeIdOrNull
): MutableMapType
override def mutable: MutableMapType = this
}
final class RawMapType
extends JavaMapTypeRaw[
MapType,
RawMapType,
MapTypeMeta
]
with MutableMapType {
override def meta: MapTypeMeta = MapType
// fields
// Field #1 - keyTypeId
private var _keyTypeId: String = null // Underlying type: String
override def keyTypeId: String = keyTypeIdOrThrow
override def keyTypeId_=(x: String): Unit = { _keyTypeId = x }
override def keyTypeIdOption: Option[String] = if (keyTypeIdIsSet) Some(_keyTypeId) else None
override def keyTypeIdOrNull: String = _keyTypeId
override def keyTypeIdOrThrow: String =
if (keyTypeIdIsSet) _keyTypeId else throw new java.lang.NullPointerException("field keyTypeId of MapType missing")
override def keyTypeIdIsSet: Boolean = _keyTypeId != null
override def keyTypeIdUnset(): Unit = { _keyTypeId = null }
// Field #2 - valueTypeId
private var _valueTypeId: String = null // Underlying type: String
override def valueTypeId: String = valueTypeIdOrThrow
override def valueTypeId_=(x: String): Unit = { _valueTypeId = x }
override def valueTypeIdOption: Option[String] = if (valueTypeIdIsSet) Some(_valueTypeId) else None
override def valueTypeIdOrNull: String = _valueTypeId
override def valueTypeIdOrThrow: String = if (valueTypeIdIsSet) _valueTypeId
else throw new java.lang.NullPointerException("field valueTypeId of MapType missing")
override def valueTypeIdIsSet: Boolean = _valueTypeId != null
override def valueTypeIdUnset(): Unit = { _valueTypeId = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(MapType.MAPTYPE_SDESC)
if (keyTypeIdIsSet) {
oprot.writeFieldBegin(MapType.KEYTYPEID_FDESC)
oprot.writeString(_keyTypeId)
oprot.writeFieldEnd()
}
if (valueTypeIdIsSet) {
oprot.writeFieldBegin(MapType.VALUETYPEID_FDESC)
oprot.writeString(_valueTypeId)
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
MapType.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // keyTypeId
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_keyTypeId = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 2 => { // valueTypeId
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_valueTypeId = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException(
"Error reading field %d in structure MapType".format(field_header.id),
e
)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: MapType): Unit = {
if (that.keyTypeIdIsSet && !this.keyTypeIdIsSet) {
this.keyTypeId_=(that.keyTypeIdOrNull)
}
if (that.valueTypeIdIsSet && !this.valueTypeIdIsSet) {
this.valueTypeId_=(that.valueTypeIdOrNull)
}
}
override def mergeCopy(that: MapType): MapType = {
val ret = MapType.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: MapType => this.equals(o)
case _ => false
}
def equals(that: MapType): Boolean = {
that != null &&
(if (this.keyTypeIdIsSet) (that.keyTypeIdIsSet && this.keyTypeIdOrNull == that.keyTypeIdOrNull)
else !that.keyTypeIdIsSet) &&
(if (this.valueTypeIdIsSet) (that.valueTypeIdIsSet && this.valueTypeIdOrNull == that.valueTypeIdOrNull)
else !that.valueTypeIdIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (keyTypeIdIsSet) hasher.append(_keyTypeId.##)
if (valueTypeIdIsSet) hasher.append(_valueTypeId.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (keyTypeIdIsSet) ret = keyTypeIdOrNull :: ret
if (valueTypeIdIsSet) ret = valueTypeIdOrNull :: ret
ret.reverse
}
override def clear() {
keyTypeIdUnset()
valueTypeIdUnset()
unknownFields = Nil
}
def fieldForId(id: Int): MapType._Fields = id match {
case 1 => MapType._Fields.keyTypeId
case 2 => MapType._Fields.valueTypeId
case _ => null
}
def isSet(field: MapType._Fields): Boolean = field match {
case MapType._Fields.keyTypeId => keyTypeIdIsSet
case MapType._Fields.valueTypeId => valueTypeIdIsSet
case _ => false
}
def getFieldValue(field: MapType._Fields): AnyRef = field match {
case MapType._Fields.keyTypeId => keyTypeIdOrNull.asInstanceOf[AnyRef]
case MapType._Fields.valueTypeId => valueTypeIdOrNull.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: MapType._Fields, value: AnyRef) {
field match {
case MapType._Fields.keyTypeId => keyTypeId_=(value.asInstanceOf[String])
case MapType._Fields.valueTypeId => valueTypeId_=(value.asInstanceOf[String])
case _ =>
}
}
override def deepCopy(): RawMapType = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = MapType.createRawRecord
ret.read(prot)
ret
}
override def copy(
keyTypeId: String = keyTypeIdOrNull,
valueTypeId: String = valueTypeIdOrNull
): RawMapType = {
val ret = new RawMapType
if (keyTypeId != null) ret.keyTypeId_=(keyTypeId)
if (valueTypeId != null) ret.valueTypeId_=(valueTypeId)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object ContainerType extends ContainerTypeMeta {
object Builder {
sealed trait HasSimpleContainerType
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasSimpleContainerType
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[ContainerType] (private var obj: RawContainerType) {
def simpleContainerType(
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType
): ContainerType.Builder[State with Builder.HasSimpleContainerType] = {
obj.simpleContainerType_=(v)
this.asInstanceOf[ContainerType.Builder[State with Builder.HasSimpleContainerType]]
}
def __annotations(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): ContainerType.Builder[State] = {
obj.__annotations_=(v)
this
}
def __annotations(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
): ContainerType.Builder[State] = {
vOpt match {
case Some(v) => obj.__annotations_=(v)
case None => obj.annotationsUnset()
}
this
}
def resultMutable()(implicit ev0: State <:< Builder.HasSimpleContainerType): MutableContainerType = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("ContainerType.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(implicit ev0: State <:< Builder.HasSimpleContainerType): ContainerType = resultMutable()(ev0)
}
def newBuilder: ContainerType.Builder.AllUnspecified = new Builder(ContainerType.createRawRecord)
implicit val companionProvider: ContainerTypeCompanionProvider = new ContainerTypeCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[ContainerType] = {
new _root_.scala.math.Ordering[ContainerType] {
override def compare(x: ContainerType, y: ContainerType): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[ContainerType] = {
new _root_.java.util.Comparator[ContainerType] {
override def compare(x: ContainerType, y: ContainerType): Int = x.compare(y)
}
}
}
class ContainerTypeMeta
extends JavaContainerTypeMeta[ContainerType, RawContainerType, ContainerTypeMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[ContainerType] {
override def recordName: String = "ContainerType"
// Thrift descriptors.
val CONTAINERTYPE_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("ContainerType")
val SIMPLECONTAINERTYPE_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"simpleContainerType",
org.apache.thrift.protocol.TType.STRUCT,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val ANNOTATIONS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"annotations",
org.apache.thrift.protocol.TType.LIST,
99, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"simpleContainerType" -> SIMPLECONTAINERTYPE_FDESC,
"annotations" -> ANNOTATIONS_FDESC
)
object _Fields {
case object simpleContainerType extends _Fields(1, "simpleContainerType")
case object __annotations extends _Fields(99, "annotations")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.simpleContainerType,
99.toShort -> _Fields.__annotations
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: ContainerType = createRawRecord
override def createRawRecord: RawContainerType = new RawContainerType
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[ContainerType] = {
if (x.isInstanceOf[ContainerType]) Some(x.asInstanceOf[ContainerType]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations.empty
// Spindle Descriptors.
val simpleContainerType =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.StructFieldDescriptor[
ContainerType,
ContainerTypeMeta,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerTypeMeta
](
name = "simpleContainerType",
longName = "simpleContainerType",
id = 1,
annotations = Map(),
owner = this,
getter = _.simpleContainerTypeOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[ContainerType],
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType
) => { r.asInstanceOf[RawContainerType].simpleContainerType_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[ContainerType]) => {
r.asInstanceOf[RawContainerType].simpleContainerTypeUnset()
},
structMeta = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType,
manifest = manifest[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType]
)
val __annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
], ContainerType, ContainerTypeMeta](
name = "annotations",
longName = "annotations",
id = 99,
annotations = Map(),
owner = this,
getter = _.annotationsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[ContainerType],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
) => { r.asInstanceOf[RawContainerType].__annotations_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[ContainerType]) => {
r.asInstanceOf[RawContainerType].annotationsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields: Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, ContainerType, ContainerTypeMeta]
] =
Vector[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, ContainerType, ContainerTypeMeta]
](
simpleContainerType,
__annotations
)
def apply(
simpleContainerType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): ContainerType = {
val ret = this.createRawRecord
ret.simpleContainerType_=(simpleContainerType)
ret.__annotations_=(__annotations)
ret
}
}
class ContainerTypeCompanionProvider
extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[ContainerType] {
type CompanionT = ContainerTypeMeta
override def provide: ContainerTypeMeta = ContainerType
}
trait ContainerType
extends JavaContainerType[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType,
ContainerType,
RawContainerType,
ContainerTypeMeta
]
with org.apache.thrift.TBase[ContainerType, ContainerType._Fields] {
override def meta: ContainerTypeMeta
override def compare(that: ContainerType): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.simpleContainerTypeIsSet.compareTo(that.simpleContainerTypeIsSet)
cmp != 0
}) cmp
else if (this.simpleContainerTypeIsSet && {
cmp = this.simpleContainerTypeOrNull.compareTo(that.simpleContainerTypeOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.annotationsIsSet.compareTo(that.annotationsIsSet)
cmp != 0
}) cmp
else if (this.annotationsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.__annotations).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.__annotations).asJava
)
cmp != 0
}) cmp
else 0
}
override def <(that: ContainerType): Boolean = { this.compare(that) < 0 }
override def >(that: ContainerType): Boolean = { this.compare(that) > 0 }
override def <=(that: ContainerType): Boolean = { this.compare(that) <= 0 }
override def >=(that: ContainerType): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: ContainerType): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): ContainerType
def copy(
simpleContainerType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType =
simpleContainerTypeOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): ContainerType
def mutableCopy(): MutableContainerType = {
val ret = ContainerType.createRawRecord
if (simpleContainerTypeIsSet) ret.simpleContainerType_=(simpleContainerTypeOrNull)
if (annotationsIsSet) ret.__annotations_=(annotationsOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableContainerType
def toBuilder(): ContainerType.Builder.AllSpecified = {
val ret = new ContainerType.Builder(ContainerType.createRawRecord)
if (simpleContainerTypeIsSet) ret.simpleContainerType(simpleContainerTypeOrNull)
if (annotationsIsSet) ret.__annotations(annotationsOrNull)
ret
}
def mergeCopy(that: ContainerType): ContainerType
}
trait MutableContainerType
extends ContainerType
with JavaContainerTypeMutable[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType,
ContainerType,
RawContainerType,
ContainerTypeMeta
] {
def simpleContainerType_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType): Unit
def simpleContainerTypeUnset(): Unit
def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit
def annotationsUnset(): Unit
def merge(that: ContainerType): Unit
def copy(
simpleContainerType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType =
simpleContainerTypeOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableContainerType
override def mutable: MutableContainerType = this
}
final class RawContainerType
extends JavaContainerTypeRaw[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType,
ContainerType,
RawContainerType,
ContainerTypeMeta
]
with MutableContainerType {
override def meta: ContainerTypeMeta = ContainerType
// fields
// Field #1 - simpleContainerType
private var _simpleContainerType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType =
null // Underlying type: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType
override def simpleContainerType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType =
simpleContainerTypeOrThrow
override def simpleContainerType_=(
x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType
): Unit = { _simpleContainerType = x }
override def simpleContainerTypeOption
: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType] =
if (simpleContainerTypeIsSet) Some(_simpleContainerType) else None
override def simpleContainerTypeOrNull
: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType = _simpleContainerType
override def simpleContainerTypeOrThrow
: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType = if (simpleContainerTypeIsSet)
_simpleContainerType
else throw new java.lang.NullPointerException("field simpleContainerType of ContainerType missing")
override def simpleContainerTypeIsSet: Boolean = _simpleContainerType != null
override def simpleContainerTypeUnset(): Unit = { _simpleContainerType = null }
// Field #99 - annotations
private var _annotations: scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrDefault
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { _annotations = x }
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
if (annotationsIsSet) Some(_annotations) else None
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations else scala.collection.Seq.empty
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] = _annotations
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations
else throw new java.lang.NullPointerException("field __annotations of ContainerType missing")
override def annotationsIsSet: Boolean = _annotations != null
override def annotationsUnset(): Unit = { _annotations = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(ContainerType.CONTAINERTYPE_SDESC)
if (simpleContainerTypeIsSet) {
oprot.writeFieldBegin(ContainerType.SIMPLECONTAINERTYPE_FDESC)
_simpleContainerType.write(oprot)
oprot.writeFieldEnd()
}
if (annotationsIsSet) {
oprot.writeFieldBegin(ContainerType.ANNOTATIONS_FDESC)
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _annotations.size)
)
_annotations.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
ContainerType.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // simpleContainerType
if (field_header.`type` == org.apache.thrift.protocol.TType.STRUCT) {
_simpleContainerType = ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType.createRawRecord
s.read(iprot)
s
})
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 99 => { // annotations
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_annotations = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException(
"Error reading field %d in structure ContainerType".format(field_header.id),
e
)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: ContainerType): Unit = {
if (that.simpleContainerTypeIsSet && !this.simpleContainerTypeIsSet) {
this.simpleContainerType_=(that.simpleContainerTypeOrNull)
}
if (that.annotationsIsSet && !this.annotationsIsSet) {
this.__annotations_=(that.annotationsOrDefault)
} else if (that.annotationsIsSet && this.annotationsIsSet) {
this.__annotations_=(this.__annotations ++ that.__annotations)
}
}
override def mergeCopy(that: ContainerType): ContainerType = {
val ret = ContainerType.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: ContainerType => this.equals(o)
case _ => false
}
def equals(that: ContainerType): Boolean = {
that != null &&
(if (this.simpleContainerTypeIsSet)
(that.simpleContainerTypeIsSet && this.simpleContainerTypeOrNull == that.simpleContainerTypeOrNull)
else !that.simpleContainerTypeIsSet) &&
(if (this.annotationsIsSet) (that.annotationsIsSet && this.annotationsOrDefault == that.annotationsOrDefault)
else !that.annotationsIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (simpleContainerTypeIsSet) hasher.append(_simpleContainerType.##)
if (annotationsIsSet) hasher.append(_annotations.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (simpleContainerTypeIsSet) ret = simpleContainerTypeOrNull :: ret
if (annotationsIsSet) ret = annotationsOrDefault :: ret
ret.reverse
}
override def clear() {
simpleContainerTypeUnset()
annotationsUnset()
unknownFields = Nil
}
def fieldForId(id: Int): ContainerType._Fields = id match {
case 1 => ContainerType._Fields.simpleContainerType
case 99 => ContainerType._Fields.__annotations
case _ => null
}
def isSet(field: ContainerType._Fields): Boolean = field match {
case ContainerType._Fields.simpleContainerType => simpleContainerTypeIsSet
case ContainerType._Fields.__annotations => annotationsIsSet
case _ => false
}
def getFieldValue(field: ContainerType._Fields): AnyRef = field match {
case ContainerType._Fields.simpleContainerType => simpleContainerTypeOrNull.asInstanceOf[AnyRef]
case ContainerType._Fields.__annotations => annotationsOrDefault.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: ContainerType._Fields, value: AnyRef) {
field match {
case ContainerType._Fields.simpleContainerType =>
simpleContainerType_=(
value.asInstanceOf[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType]
)
case ContainerType._Fields.__annotations =>
__annotations_=(
value
.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
case _ =>
}
}
override def deepCopy(): RawContainerType = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = ContainerType.createRawRecord
ret.read(prot)
ret
}
override def copy(
simpleContainerType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleContainerType =
simpleContainerTypeOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): RawContainerType = {
val ret = new RawContainerType
if (simpleContainerType != null) ret.simpleContainerType_=(simpleContainerType)
if (__annotations != null) ret.__annotations_=(__annotations)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object Typeref extends TyperefMeta {
object Builder {
sealed trait HasTypeAlias
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasTypeAlias
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[Typeref] (private var obj: RawTyperef) {
def typeAlias(v: String): Typeref.Builder[State with Builder.HasTypeAlias] = {
obj.typeAlias_=(v)
this.asInstanceOf[Typeref.Builder[State with Builder.HasTypeAlias]]
}
def resultMutable()(implicit ev0: State <:< Builder.HasTypeAlias): MutableTyperef = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("Typeref.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(implicit ev0: State <:< Builder.HasTypeAlias): Typeref = resultMutable()(ev0)
}
def newBuilder: Typeref.Builder.AllUnspecified = new Builder(Typeref.createRawRecord)
implicit val companionProvider: TyperefCompanionProvider = new TyperefCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[Typeref] = {
new _root_.scala.math.Ordering[Typeref] {
override def compare(x: Typeref, y: Typeref): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[Typeref] = {
new _root_.java.util.Comparator[Typeref] {
override def compare(x: Typeref, y: Typeref): Int = x.compare(y)
}
}
}
class TyperefMeta
extends JavaTyperefMeta[Typeref, RawTyperef, TyperefMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[Typeref] {
override def recordName: String = "Typeref"
// Thrift descriptors.
val TYPEREF_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("Typeref")
val TYPEALIAS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"typeAlias",
org.apache.thrift.protocol.TType.STRING,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"typeAlias" -> TYPEALIAS_FDESC
)
object _Fields {
case object typeAlias extends _Fields(1, "typeAlias")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.typeAlias
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: Typeref = createRawRecord
override def createRawRecord: RawTyperef = new RawTyperef
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[Typeref] = {
if (x.isInstanceOf[Typeref]) Some(x.asInstanceOf[Typeref]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations.empty
// Spindle Descriptors.
val typeAlias =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, Typeref, TyperefMeta](
name = "typeAlias",
longName = "typeAlias",
id = 1,
annotations = Map(),
owner = this,
getter = _.typeAliasOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Typeref], v: String) => {
r.asInstanceOf[RawTyperef].typeAlias_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Typeref]) => {
r.asInstanceOf[RawTyperef].typeAliasUnset()
},
manifest = manifest[String]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Typeref, TyperefMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Typeref, TyperefMeta]](
typeAlias
)
def apply(
typeAlias: String
): Typeref = {
val ret = this.createRawRecord
ret.typeAlias_=(typeAlias)
ret
}
}
class TyperefCompanionProvider
extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[Typeref] {
type CompanionT = TyperefMeta
override def provide: TyperefMeta = Typeref
}
trait Typeref
extends JavaTyperef[
Typeref,
RawTyperef,
TyperefMeta
]
with org.apache.thrift.TBase[Typeref, Typeref._Fields] {
override def meta: TyperefMeta
override def compare(that: Typeref): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.typeAliasIsSet.compareTo(that.typeAliasIsSet)
cmp != 0
}) cmp
else if (this.typeAliasIsSet && {
cmp = this.typeAliasOrNull.compareTo(that.typeAliasOrNull)
cmp != 0
}) cmp
else 0
}
override def <(that: Typeref): Boolean = { this.compare(that) < 0 }
override def >(that: Typeref): Boolean = { this.compare(that) > 0 }
override def <=(that: Typeref): Boolean = { this.compare(that) <= 0 }
override def >=(that: Typeref): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: Typeref): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): Typeref
def copy(
typeAlias: String = typeAliasOrNull
): Typeref
def mutableCopy(): MutableTyperef = {
val ret = Typeref.createRawRecord
if (typeAliasIsSet) ret.typeAlias_=(typeAliasOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableTyperef
def toBuilder(): Typeref.Builder.AllSpecified = {
val ret = new Typeref.Builder(Typeref.createRawRecord)
if (typeAliasIsSet) ret.typeAlias(typeAliasOrNull)
ret
}
def mergeCopy(that: Typeref): Typeref
}
trait MutableTyperef
extends Typeref
with JavaTyperefMutable[
Typeref,
RawTyperef,
TyperefMeta
] {
def typeAlias_=(x: String): Unit
def typeAliasUnset(): Unit
def merge(that: Typeref): Unit
def copy(
typeAlias: String = typeAliasOrNull
): MutableTyperef
override def mutable: MutableTyperef = this
}
final class RawTyperef
extends JavaTyperefRaw[
Typeref,
RawTyperef,
TyperefMeta
]
with MutableTyperef {
override def meta: TyperefMeta = Typeref
// fields
// Field #1 - typeAlias
private var _typeAlias: String = null // Underlying type: String
override def typeAlias: String = typeAliasOrThrow
override def typeAlias_=(x: String): Unit = { _typeAlias = x }
override def typeAliasOption: Option[String] = if (typeAliasIsSet) Some(_typeAlias) else None
override def typeAliasOrNull: String = _typeAlias
override def typeAliasOrThrow: String =
if (typeAliasIsSet) _typeAlias else throw new java.lang.NullPointerException("field typeAlias of Typeref missing")
override def typeAliasIsSet: Boolean = _typeAlias != null
override def typeAliasUnset(): Unit = { _typeAlias = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(Typeref.TYPEREF_SDESC)
if (typeAliasIsSet) {
oprot.writeFieldBegin(Typeref.TYPEALIAS_FDESC)
oprot.writeString(_typeAlias)
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
Typeref.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // typeAlias
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_typeAlias = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException(
"Error reading field %d in structure Typeref".format(field_header.id),
e
)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: Typeref): Unit = {
if (that.typeAliasIsSet && !this.typeAliasIsSet) {
this.typeAlias_=(that.typeAliasOrNull)
}
}
override def mergeCopy(that: Typeref): Typeref = {
val ret = Typeref.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: Typeref => this.equals(o)
case _ => false
}
def equals(that: Typeref): Boolean = {
that != null &&
(if (this.typeAliasIsSet) (that.typeAliasIsSet && this.typeAliasOrNull == that.typeAliasOrNull)
else !that.typeAliasIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (typeAliasIsSet) hasher.append(_typeAlias.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (typeAliasIsSet) ret = typeAliasOrNull :: ret
ret.reverse
}
override def clear() {
typeAliasUnset()
unknownFields = Nil
}
def fieldForId(id: Int): Typeref._Fields = id match {
case 1 => Typeref._Fields.typeAlias
case _ => null
}
def isSet(field: Typeref._Fields): Boolean = field match {
case Typeref._Fields.typeAlias => typeAliasIsSet
case _ => false
}
def getFieldValue(field: Typeref._Fields): AnyRef = field match {
case Typeref._Fields.typeAlias => typeAliasOrNull.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: Typeref._Fields, value: AnyRef) {
field match {
case Typeref._Fields.typeAlias => typeAlias_=(value.asInstanceOf[String])
case _ =>
}
}
override def deepCopy(): RawTyperef = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = Typeref.createRawRecord
ret.read(prot)
ret
}
override def copy(
typeAlias: String = typeAliasOrNull
): RawTyperef = {
val ret = new RawTyperef
if (typeAlias != null) ret.typeAlias_=(typeAlias)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object Type extends TypeMeta {
object Builder {
sealed trait HasId
sealed trait HasSimpleType
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasId with HasSimpleType
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[Type] (private var obj: RawType) {
def id(v: String): Type.Builder[State with Builder.HasId] = {
obj.id_=(v)
this.asInstanceOf[Type.Builder[State with Builder.HasId]]
}
def simpleType(
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType
): Type.Builder[State with Builder.HasSimpleType] = {
obj.simpleType_=(v)
this.asInstanceOf[Type.Builder[State with Builder.HasSimpleType]]
}
def resultMutable()(implicit ev0: State <:< Builder.HasId, ev1: State <:< Builder.HasSimpleType): MutableType = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("Type.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(implicit ev0: State <:< Builder.HasId, ev1: State <:< Builder.HasSimpleType): Type =
resultMutable()(ev0, ev1)
}
def newBuilder: Type.Builder.AllUnspecified = new Builder(Type.createRawRecord)
implicit val companionProvider: TypeCompanionProvider = new TypeCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[Type] = {
new _root_.scala.math.Ordering[Type] {
override def compare(x: Type, y: Type): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[Type] = {
new _root_.java.util.Comparator[Type] {
override def compare(x: Type, y: Type): Int = x.compare(y)
}
}
}
class TypeMeta
extends JavaTypeMeta[Type, RawType, TypeMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[Type] {
override def recordName: String = "Type"
// Thrift descriptors.
val TYPE_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("Type")
val ID_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"id",
org.apache.thrift.protocol.TType.STRING,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val SIMPLETYPE_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"simpleType",
org.apache.thrift.protocol.TType.STRUCT,
2, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"id" -> ID_FDESC,
"simpleType" -> SIMPLETYPE_FDESC
)
object _Fields {
case object id extends _Fields(1, "id")
case object simpleType extends _Fields(2, "simpleType")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.id,
2.toShort -> _Fields.simpleType
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: Type = createRawRecord
override def createRawRecord: RawType = new RawType
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[Type] = {
if (x.isInstanceOf[Type]) Some(x.asInstanceOf[Type]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations.empty
// Spindle Descriptors.
val id =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, Type, TypeMeta](
name = "id",
longName = "id",
id = 1,
annotations = Map(),
owner = this,
getter = _.idOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Type], v: String) => {
r.asInstanceOf[RawType].id_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Type]) => {
r.asInstanceOf[RawType].idUnset()
},
manifest = manifest[String]
)
val simpleType =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.StructFieldDescriptor[
Type,
TypeMeta,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleTypeMeta
](
name = "simpleType",
longName = "simpleType",
id = 2,
annotations = Map(),
owner = this,
getter = _.simpleTypeOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Type],
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType
) => { r.asInstanceOf[RawType].simpleType_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Type]) => {
r.asInstanceOf[RawType].simpleTypeUnset()
},
structMeta = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType,
manifest = manifest[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Type, TypeMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Type, TypeMeta]](
id,
simpleType
)
def apply(
id: String,
simpleType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType
): Type = {
val ret = this.createRawRecord
ret.id_=(id)
ret.simpleType_=(simpleType)
ret
}
}
class TypeCompanionProvider extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[Type] {
type CompanionT = TypeMeta
override def provide: TypeMeta = Type
}
trait Type
extends JavaType[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType, Type, RawType, TypeMeta]
with org.apache.thrift.TBase[Type, Type._Fields] {
override def meta: TypeMeta
override def compare(that: Type): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.idIsSet.compareTo(that.idIsSet)
cmp != 0
}) cmp
else if (this.idIsSet && {
cmp = this.idOrNull.compareTo(that.idOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.simpleTypeIsSet.compareTo(that.simpleTypeIsSet)
cmp != 0
}) cmp
else if (this.simpleTypeIsSet && {
cmp = this.simpleTypeOrNull.compareTo(that.simpleTypeOrNull)
cmp != 0
}) cmp
else 0
}
override def <(that: Type): Boolean = { this.compare(that) < 0 }
override def >(that: Type): Boolean = { this.compare(that) > 0 }
override def <=(that: Type): Boolean = { this.compare(that) <= 0 }
override def >=(that: Type): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: Type): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): Type
def copy(
id: String = idOrNull,
simpleType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType = simpleTypeOrNull
): Type
def mutableCopy(): MutableType = {
val ret = Type.createRawRecord
if (idIsSet) ret.id_=(idOrNull)
if (simpleTypeIsSet) ret.simpleType_=(simpleTypeOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableType
def toBuilder(): Type.Builder.AllSpecified = {
val ret = new Type.Builder(Type.createRawRecord)
if (idIsSet) ret.id(idOrNull)
if (simpleTypeIsSet) ret.simpleType(simpleTypeOrNull)
ret
}
def mergeCopy(that: Type): Type
}
trait MutableType
extends Type
with JavaTypeMutable[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType, Type, RawType, TypeMeta] {
def id_=(x: String): Unit
def idUnset(): Unit
def simpleType_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType): Unit
def simpleTypeUnset(): Unit
def merge(that: Type): Unit
def copy(
id: String = idOrNull,
simpleType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType = simpleTypeOrNull
): MutableType
override def mutable: MutableType = this
}
final class RawType
extends JavaTypeRaw[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType, Type, RawType, TypeMeta]
with MutableType {
override def meta: TypeMeta = Type
// fields
// Field #1 - id
private var _id: String = null // Underlying type: String
override def id: String = idOrThrow
override def id_=(x: String): Unit = { _id = x }
override def idOption: Option[String] = if (idIsSet) Some(_id) else None
override def idOrNull: String = _id
override def idOrThrow: String =
if (idIsSet) _id else throw new java.lang.NullPointerException("field id of Type missing")
override def idIsSet: Boolean = _id != null
override def idUnset(): Unit = { _id = null }
// Field #2 - simpleType
private var _simpleType
: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType = null // Underlying type: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType
override def simpleType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType = simpleTypeOrThrow
override def simpleType_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType): Unit = {
_simpleType = x
}
override def simpleTypeOption: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType] =
if (simpleTypeIsSet) Some(_simpleType) else None
override def simpleTypeOrNull: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType = _simpleType
override def simpleTypeOrThrow: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType =
if (simpleTypeIsSet) _simpleType else throw new java.lang.NullPointerException("field simpleType of Type missing")
override def simpleTypeIsSet: Boolean = _simpleType != null
override def simpleTypeUnset(): Unit = { _simpleType = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(Type.TYPE_SDESC)
if (idIsSet) {
oprot.writeFieldBegin(Type.ID_FDESC)
oprot.writeString(_id)
oprot.writeFieldEnd()
}
if (simpleTypeIsSet) {
oprot.writeFieldBegin(Type.SIMPLETYPE_FDESC)
_simpleType.write(oprot)
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
Type.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // id
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_id = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 2 => { // simpleType
if (field_header.`type` == org.apache.thrift.protocol.TType.STRUCT) {
_simpleType = ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType.createRawRecord
s.read(iprot)
s
})
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException("Error reading field %d in structure Type".format(field_header.id), e)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: Type): Unit = {
if (that.idIsSet && !this.idIsSet) {
this.id_=(that.idOrNull)
}
if (that.simpleTypeIsSet && !this.simpleTypeIsSet) {
this.simpleType_=(that.simpleTypeOrNull)
}
}
override def mergeCopy(that: Type): Type = {
val ret = Type.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: Type => this.equals(o)
case _ => false
}
def equals(that: Type): Boolean = {
that != null &&
(if (this.idIsSet) (that.idIsSet && this.idOrNull == that.idOrNull) else !that.idIsSet) &&
(if (this.simpleTypeIsSet) (that.simpleTypeIsSet && this.simpleTypeOrNull == that.simpleTypeOrNull)
else !that.simpleTypeIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (idIsSet) hasher.append(_id.##)
if (simpleTypeIsSet) hasher.append(_simpleType.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (idIsSet) ret = idOrNull :: ret
if (simpleTypeIsSet) ret = simpleTypeOrNull :: ret
ret.reverse
}
override def clear() {
idUnset()
simpleTypeUnset()
unknownFields = Nil
}
def fieldForId(id: Int): Type._Fields = id match {
case 1 => Type._Fields.id
case 2 => Type._Fields.simpleType
case _ => null
}
def isSet(field: Type._Fields): Boolean = field match {
case Type._Fields.id => idIsSet
case Type._Fields.simpleType => simpleTypeIsSet
case _ => false
}
def getFieldValue(field: Type._Fields): AnyRef = field match {
case Type._Fields.id => idOrNull.asInstanceOf[AnyRef]
case Type._Fields.simpleType => simpleTypeOrNull.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: Type._Fields, value: AnyRef) {
field match {
case Type._Fields.id => id_=(value.asInstanceOf[String])
case Type._Fields.simpleType =>
simpleType_=(value.asInstanceOf[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType])
case _ =>
}
}
override def deepCopy(): RawType = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = Type.createRawRecord
ret.read(prot)
ret
}
override def copy(
id: String = idOrNull,
simpleType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SimpleType = simpleTypeOrNull
): RawType = {
val ret = new RawType
if (id != null) ret.id_=(id)
if (simpleType != null) ret.simpleType_=(simpleType)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object Typedef extends TypedefMeta {
object Builder {
sealed trait HasTypeId
sealed trait HasTypeAlias
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasTypeId with HasTypeAlias
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[Typedef] (private var obj: RawTypedef) {
def typeId(v: String): Typedef.Builder[State with Builder.HasTypeId] = {
obj.typeId_=(v)
this.asInstanceOf[Typedef.Builder[State with Builder.HasTypeId]]
}
def typeAlias(v: String): Typedef.Builder[State with Builder.HasTypeAlias] = {
obj.typeAlias_=(v)
this.asInstanceOf[Typedef.Builder[State with Builder.HasTypeAlias]]
}
def __annotations(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Typedef.Builder[State] = {
obj.__annotations_=(v)
this
}
def __annotations(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
): Typedef.Builder[State] = {
vOpt match {
case Some(v) => obj.__annotations_=(v)
case None => obj.annotationsUnset()
}
this
}
def resultMutable()(
implicit ev0: State <:< Builder.HasTypeId,
ev1: State <:< Builder.HasTypeAlias
): MutableTypedef = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("Typedef.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(implicit ev0: State <:< Builder.HasTypeId, ev1: State <:< Builder.HasTypeAlias): Typedef =
resultMutable()(ev0, ev1)
}
def newBuilder: Typedef.Builder.AllUnspecified = new Builder(Typedef.createRawRecord)
implicit val companionProvider: TypedefCompanionProvider = new TypedefCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[Typedef] = {
new _root_.scala.math.Ordering[Typedef] {
override def compare(x: Typedef, y: Typedef): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[Typedef] = {
new _root_.java.util.Comparator[Typedef] {
override def compare(x: Typedef, y: Typedef): Int = x.compare(y)
}
}
}
class TypedefMeta
extends JavaTypedefMeta[Typedef, RawTypedef, TypedefMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[Typedef] {
override def recordName: String = "Typedef"
// Thrift descriptors.
val TYPEDEF_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("Typedef")
val TYPEID_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"typeId",
org.apache.thrift.protocol.TType.STRING,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val TYPEALIAS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"typeAlias",
org.apache.thrift.protocol.TType.STRING,
2, {
java.util.Collections.emptyMap[String, String]
}
)
val ANNOTATIONS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"annotations",
org.apache.thrift.protocol.TType.LIST,
99, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"typeId" -> TYPEID_FDESC,
"typeAlias" -> TYPEALIAS_FDESC,
"annotations" -> ANNOTATIONS_FDESC
)
object _Fields {
case object typeId extends _Fields(1, "typeId")
case object typeAlias extends _Fields(2, "typeAlias")
case object __annotations extends _Fields(99, "annotations")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.typeId,
2.toShort -> _Fields.typeAlias,
99.toShort -> _Fields.__annotations
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: Typedef = createRawRecord
override def createRawRecord: RawTypedef = new RawTypedef
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[Typedef] = {
if (x.isInstanceOf[Typedef]) Some(x.asInstanceOf[Typedef]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations(
scala.collection.immutable.Vector(
("generate_proxy", "true")
)
)
// Spindle Descriptors.
val typeId =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, Typedef, TypedefMeta](
name = "typeId",
longName = "typeId",
id = 1,
annotations = Map(),
owner = this,
getter = _.typeIdOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Typedef], v: String) => {
r.asInstanceOf[RawTypedef].typeId_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Typedef]) => {
r.asInstanceOf[RawTypedef].typeIdUnset()
},
manifest = manifest[String]
)
val typeAlias =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, Typedef, TypedefMeta](
name = "typeAlias",
longName = "typeAlias",
id = 2,
annotations = Map(),
owner = this,
getter = _.typeAliasOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Typedef], v: String) => {
r.asInstanceOf[RawTypedef].typeAlias_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Typedef]) => {
r.asInstanceOf[RawTypedef].typeAliasUnset()
},
manifest = manifest[String]
)
val __annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
], Typedef, TypedefMeta](
name = "annotations",
longName = "annotations",
id = 99,
annotations = Map(),
owner = this,
getter = _.annotationsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Typedef],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
) => { r.asInstanceOf[RawTypedef].__annotations_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Typedef]) => {
r.asInstanceOf[RawTypedef].annotationsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Typedef, TypedefMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Typedef, TypedefMeta]](
typeId,
typeAlias,
__annotations
)
def apply(
typeId: String,
typeAlias: String,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Typedef = {
val ret = this.createRawRecord
ret.typeId_=(typeId)
ret.typeAlias_=(typeAlias)
ret.__annotations_=(__annotations)
ret
}
}
class TypedefCompanionProvider
extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[Typedef] {
type CompanionT = TypedefMeta
override def provide: TypedefMeta = Typedef
}
trait Typedef
extends JavaTypedef[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
Typedef,
RawTypedef,
TypedefMeta
]
with org.apache.thrift.TBase[Typedef, Typedef._Fields] {
override def meta: TypedefMeta
override def compare(that: Typedef): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.typeIdIsSet.compareTo(that.typeIdIsSet)
cmp != 0
}) cmp
else if (this.typeIdIsSet && {
cmp = this.typeIdOrNull.compareTo(that.typeIdOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.typeAliasIsSet.compareTo(that.typeAliasIsSet)
cmp != 0
}) cmp
else if (this.typeAliasIsSet && {
cmp = this.typeAliasOrNull.compareTo(that.typeAliasOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.annotationsIsSet.compareTo(that.annotationsIsSet)
cmp != 0
}) cmp
else if (this.annotationsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.__annotations).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.__annotations).asJava
)
cmp != 0
}) cmp
else 0
}
override def <(that: Typedef): Boolean = { this.compare(that) < 0 }
override def >(that: Typedef): Boolean = { this.compare(that) > 0 }
override def <=(that: Typedef): Boolean = { this.compare(that) <= 0 }
override def >=(that: Typedef): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: Typedef): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): Typedef
def copy(
typeId: String = typeIdOrNull,
typeAlias: String = typeAliasOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): Typedef
def mutableCopy(): MutableTypedef = {
val ret = Typedef.createRawRecord
if (typeIdIsSet) ret.typeId_=(typeIdOrNull)
if (typeAliasIsSet) ret.typeAlias_=(typeAliasOrNull)
if (annotationsIsSet) ret.__annotations_=(annotationsOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableTypedef
def toBuilder(): Typedef.Builder.AllSpecified = {
val ret = new Typedef.Builder(Typedef.createRawRecord)
if (typeIdIsSet) ret.typeId(typeIdOrNull)
if (typeAliasIsSet) ret.typeAlias(typeAliasOrNull)
if (annotationsIsSet) ret.__annotations(annotationsOrNull)
ret
}
def mergeCopy(that: Typedef): Typedef
}
trait MutableTypedef
extends Typedef
with JavaTypedefMutable[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
Typedef,
RawTypedef,
TypedefMeta
] {
def typeId_=(x: String): Unit
def typeIdUnset(): Unit
def typeAlias_=(x: String): Unit
def typeAliasUnset(): Unit
def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit
def annotationsUnset(): Unit
def merge(that: Typedef): Unit
def copy(
typeId: String = typeIdOrNull,
typeAlias: String = typeAliasOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableTypedef
override def mutable: MutableTypedef = this
}
trait TypedefProxy extends Typedef {
protected def underlying: Typedef
override def meta = underlying.meta
// field/proxy_ref.ssp
override def typeId: String = underlying.typeId
override def typeIdOption: Option[String] = underlying.typeIdOption
override def typeIdOrNull: String = underlying.typeIdOrNull
override def typeIdOrThrow: String = underlying.typeIdOrThrow
override def typeIdIsSet: Boolean = underlying.typeIdIsSet
// field/proxy_ref.ssp
override def typeAlias: String = underlying.typeAlias
override def typeAliasOption: Option[String] = underlying.typeAliasOption
override def typeAliasOrNull: String = underlying.typeAliasOrNull
override def typeAliasOrThrow: String = underlying.typeAliasOrThrow
override def typeAliasIsSet: Boolean = underlying.typeAliasIsSet
// field/proxy_container.ssp
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.__annotations
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
underlying.annotationsOption
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrDefault
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrNull
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrThrow
override def annotationsIsSet: Boolean = underlying.annotationsIsSet
override def compare(that: Typedef): Int = underlying.compare(that)
override def clear() { underlying.clear }
override def read(iprot: org.apache.thrift.protocol.TProtocol) { underlying.read(iprot) }
override def write(oprot: org.apache.thrift.protocol.TProtocol) { underlying.write(oprot) }
override def copy(
typeId: String = typeIdOrNull,
typeAlias: String = typeAliasOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): Typedef = underlying.copy(
typeId = typeId,
typeAlias = typeAlias,
__annotations = __annotations
)
override def mutableCopy(): MutableTypedef = underlying.mutableCopy()
override def mergeCopy(that: Typedef): Typedef = underlying.mergeCopy(that)
override def mutable: MutableTypedef = underlying.mutable
override def deepCopy(): Typedef = underlying.deepCopy()
override def fieldForId(id: Int): Typedef._Fields = underlying.fieldForId(id)
override def isSet(field: Typedef._Fields): Boolean = underlying.isSet(field)
override def getFieldValue(field: Typedef._Fields): AnyRef = underlying.getFieldValue(field)
override def setFieldValue(field: Typedef._Fields, value: AnyRef) { underlying.setFieldValue(field, value) }
override def hashCode(): Int = underlying.hashCode
override def equals(that: Any): Boolean = underlying.equals(that)
override def toString(): String = underlying.toString
}
trait MutableTypedefProxy extends MutableTypedef with TypedefProxy {
protected def underlying: MutableTypedef
override def typeId_=(x: String): Unit = { underlying.typeId_=(x) }
override def typeIdUnset(): Unit = { underlying.typeIdUnset() }
override def typeAlias_=(x: String): Unit = { underlying.typeAlias_=(x) }
override def typeAliasUnset(): Unit = { underlying.typeAliasUnset() }
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { underlying.__annotations_=(x) }
override def annotationsUnset(): Unit = { underlying.annotationsUnset() }
override def copy(
typeId: String = typeIdOrNull,
typeAlias: String = typeAliasOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableTypedef = underlying.copy(
typeId = typeId,
typeAlias = typeAlias,
__annotations = __annotations
)
override def merge(that: Typedef): Unit = underlying.merge(that)
}
final class RawTypedef
extends JavaTypedefRaw[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
Typedef,
RawTypedef,
TypedefMeta
]
with MutableTypedef {
override def meta: TypedefMeta = Typedef
// fields
// Field #1 - typeId
private var _typeId: String = null // Underlying type: String
override def typeId: String = typeIdOrThrow
override def typeId_=(x: String): Unit = { _typeId = x }
override def typeIdOption: Option[String] = if (typeIdIsSet) Some(_typeId) else None
override def typeIdOrNull: String = _typeId
override def typeIdOrThrow: String =
if (typeIdIsSet) _typeId else throw new java.lang.NullPointerException("field typeId of Typedef missing")
override def typeIdIsSet: Boolean = _typeId != null
override def typeIdUnset(): Unit = { _typeId = null }
// Field #2 - typeAlias
private var _typeAlias: String = null // Underlying type: String
override def typeAlias: String = typeAliasOrThrow
override def typeAlias_=(x: String): Unit = { _typeAlias = x }
override def typeAliasOption: Option[String] = if (typeAliasIsSet) Some(_typeAlias) else None
override def typeAliasOrNull: String = _typeAlias
override def typeAliasOrThrow: String =
if (typeAliasIsSet) _typeAlias else throw new java.lang.NullPointerException("field typeAlias of Typedef missing")
override def typeAliasIsSet: Boolean = _typeAlias != null
override def typeAliasUnset(): Unit = { _typeAlias = null }
// Field #99 - annotations
private var _annotations: scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrDefault
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { _annotations = x }
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
if (annotationsIsSet) Some(_annotations) else None
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations else scala.collection.Seq.empty
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] = _annotations
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations
else throw new java.lang.NullPointerException("field __annotations of Typedef missing")
override def annotationsIsSet: Boolean = _annotations != null
override def annotationsUnset(): Unit = { _annotations = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(Typedef.TYPEDEF_SDESC)
if (typeIdIsSet) {
oprot.writeFieldBegin(Typedef.TYPEID_FDESC)
oprot.writeString(_typeId)
oprot.writeFieldEnd()
}
if (typeAliasIsSet) {
oprot.writeFieldBegin(Typedef.TYPEALIAS_FDESC)
oprot.writeString(_typeAlias)
oprot.writeFieldEnd()
}
if (annotationsIsSet) {
oprot.writeFieldBegin(Typedef.ANNOTATIONS_FDESC)
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _annotations.size)
)
_annotations.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
Typedef.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // typeId
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_typeId = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 2 => { // typeAlias
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_typeAlias = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 99 => { // annotations
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_annotations = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException(
"Error reading field %d in structure Typedef".format(field_header.id),
e
)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: Typedef): Unit = {
if (that.typeIdIsSet && !this.typeIdIsSet) {
this.typeId_=(that.typeIdOrNull)
}
if (that.typeAliasIsSet && !this.typeAliasIsSet) {
this.typeAlias_=(that.typeAliasOrNull)
}
if (that.annotationsIsSet && !this.annotationsIsSet) {
this.__annotations_=(that.annotationsOrDefault)
} else if (that.annotationsIsSet && this.annotationsIsSet) {
this.__annotations_=(this.__annotations ++ that.__annotations)
}
}
override def mergeCopy(that: Typedef): Typedef = {
val ret = Typedef.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: Typedef => this.equals(o)
case _ => false
}
def equals(that: Typedef): Boolean = {
that != null &&
(if (this.typeIdIsSet) (that.typeIdIsSet && this.typeIdOrNull == that.typeIdOrNull) else !that.typeIdIsSet) &&
(if (this.typeAliasIsSet) (that.typeAliasIsSet && this.typeAliasOrNull == that.typeAliasOrNull)
else !that.typeAliasIsSet) &&
(if (this.annotationsIsSet) (that.annotationsIsSet && this.annotationsOrDefault == that.annotationsOrDefault)
else !that.annotationsIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (typeIdIsSet) hasher.append(_typeId.##)
if (typeAliasIsSet) hasher.append(_typeAlias.##)
if (annotationsIsSet) hasher.append(_annotations.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (typeIdIsSet) ret = typeIdOrNull :: ret
if (typeAliasIsSet) ret = typeAliasOrNull :: ret
if (annotationsIsSet) ret = annotationsOrDefault :: ret
ret.reverse
}
override def clear() {
typeIdUnset()
typeAliasUnset()
annotationsUnset()
unknownFields = Nil
}
def fieldForId(id: Int): Typedef._Fields = id match {
case 1 => Typedef._Fields.typeId
case 2 => Typedef._Fields.typeAlias
case 99 => Typedef._Fields.__annotations
case _ => null
}
def isSet(field: Typedef._Fields): Boolean = field match {
case Typedef._Fields.typeId => typeIdIsSet
case Typedef._Fields.typeAlias => typeAliasIsSet
case Typedef._Fields.__annotations => annotationsIsSet
case _ => false
}
def getFieldValue(field: Typedef._Fields): AnyRef = field match {
case Typedef._Fields.typeId => typeIdOrNull.asInstanceOf[AnyRef]
case Typedef._Fields.typeAlias => typeAliasOrNull.asInstanceOf[AnyRef]
case Typedef._Fields.__annotations => annotationsOrDefault.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: Typedef._Fields, value: AnyRef) {
field match {
case Typedef._Fields.typeId => typeId_=(value.asInstanceOf[String])
case Typedef._Fields.typeAlias => typeAlias_=(value.asInstanceOf[String])
case Typedef._Fields.__annotations =>
__annotations_=(
value
.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
case _ =>
}
}
override def deepCopy(): RawTypedef = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = Typedef.createRawRecord
ret.read(prot)
ret
}
override def copy(
typeId: String = typeIdOrNull,
typeAlias: String = typeAliasOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): RawTypedef = {
val ret = new RawTypedef
if (typeId != null) ret.typeId_=(typeId)
if (typeAlias != null) ret.typeAlias_=(typeAlias)
if (__annotations != null) ret.__annotations_=(__annotations)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object TypeRegistry extends TypeRegistryMeta {
object Builder {
sealed trait HasIdToType
sealed trait HasAliasToTypeId
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasIdToType with HasAliasToTypeId
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[TypeRegistry] (private var obj: RawTypeRegistry) {
def idToType(
v: scala.collection.immutable.Map[String, io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type]
): TypeRegistry.Builder[State with Builder.HasIdToType] = {
obj.idToType_=(v)
this.asInstanceOf[TypeRegistry.Builder[State with Builder.HasIdToType]]
}
def aliasToTypeId(
v: scala.collection.immutable.Map[String, String]
): TypeRegistry.Builder[State with Builder.HasAliasToTypeId] = {
obj.aliasToTypeId_=(v)
this.asInstanceOf[TypeRegistry.Builder[State with Builder.HasAliasToTypeId]]
}
def resultMutable()(
implicit ev0: State <:< Builder.HasIdToType,
ev1: State <:< Builder.HasAliasToTypeId
): MutableTypeRegistry = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("TypeRegistry.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(implicit ev0: State <:< Builder.HasIdToType, ev1: State <:< Builder.HasAliasToTypeId): TypeRegistry =
resultMutable()(ev0, ev1)
}
def newBuilder: TypeRegistry.Builder.AllUnspecified = new Builder(TypeRegistry.createRawRecord)
implicit val companionProvider: TypeRegistryCompanionProvider = new TypeRegistryCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[TypeRegistry] = {
new _root_.scala.math.Ordering[TypeRegistry] {
override def compare(x: TypeRegistry, y: TypeRegistry): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[TypeRegistry] = {
new _root_.java.util.Comparator[TypeRegistry] {
override def compare(x: TypeRegistry, y: TypeRegistry): Int = x.compare(y)
}
}
}
class TypeRegistryMeta
extends JavaTypeRegistryMeta[TypeRegistry, RawTypeRegistry, TypeRegistryMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[TypeRegistry] {
override def recordName: String = "TypeRegistry"
// Thrift descriptors.
val TYPEREGISTRY_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("TypeRegistry")
val IDTOTYPE_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"idToType",
org.apache.thrift.protocol.TType.MAP,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val ALIASTOTYPEID_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"aliasToTypeId",
org.apache.thrift.protocol.TType.MAP,
2, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"idToType" -> IDTOTYPE_FDESC,
"aliasToTypeId" -> ALIASTOTYPEID_FDESC
)
object _Fields {
case object idToType extends _Fields(1, "idToType")
case object aliasToTypeId extends _Fields(2, "aliasToTypeId")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.idToType,
2.toShort -> _Fields.aliasToTypeId
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: TypeRegistry = createRawRecord
override def createRawRecord: RawTypeRegistry = new RawTypeRegistry
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[TypeRegistry] = {
if (x.isInstanceOf[TypeRegistry]) Some(x.asInstanceOf[TypeRegistry]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations.empty
// Spindle Descriptors.
val idToType =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.immutable.Map[
String,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type
], TypeRegistry, TypeRegistryMeta](
name = "idToType",
longName = "idToType",
id = 1,
annotations = Map(),
owner = this,
getter = _.idToTypeOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[TypeRegistry],
v: scala.collection.immutable.Map[String, io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type]
) => { r.asInstanceOf[RawTypeRegistry].idToType_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[TypeRegistry]) => {
r.asInstanceOf[RawTypeRegistry].idToTypeUnset()
},
manifest = manifest[
scala.collection.immutable.Map[String, io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type]
]
)
val aliasToTypeId =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.immutable.Map[
String,
String
], TypeRegistry, TypeRegistryMeta](
name = "aliasToTypeId",
longName = "aliasToTypeId",
id = 2,
annotations = Map(),
owner = this,
getter = _.aliasToTypeIdOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[TypeRegistry],
v: scala.collection.immutable.Map[String, String]
) => { r.asInstanceOf[RawTypeRegistry].aliasToTypeId_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[TypeRegistry]) => {
r.asInstanceOf[RawTypeRegistry].aliasToTypeIdUnset()
},
manifest = manifest[scala.collection.immutable.Map[String, String]]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, TypeRegistry, TypeRegistryMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, TypeRegistry, TypeRegistryMeta]](
idToType,
aliasToTypeId
)
def apply(
idToType: scala.collection.immutable.Map[String, io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type],
aliasToTypeId: scala.collection.immutable.Map[String, String]
): TypeRegistry = {
val ret = this.createRawRecord
ret.idToType_=(idToType)
ret.aliasToTypeId_=(aliasToTypeId)
ret
}
}
class TypeRegistryCompanionProvider
extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[TypeRegistry] {
type CompanionT = TypeRegistryMeta
override def provide: TypeRegistryMeta = TypeRegistry
}
trait TypeRegistry
extends JavaTypeRegistry[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type,
TypeRegistry,
RawTypeRegistry,
TypeRegistryMeta
]
with org.apache.thrift.TBase[TypeRegistry, TypeRegistry._Fields] {
override def meta: TypeRegistryMeta
override def compare(that: TypeRegistry): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.idToTypeIsSet.compareTo(that.idToTypeIsSet)
cmp != 0
}) cmp
else if (this.idToTypeIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.mapAsJavaMapConverter(this.idToType).asJava,
scala.collection.JavaConverters.mapAsJavaMapConverter(that.idToType).asJava
)
cmp != 0
}) cmp
else if ({
cmp = this.aliasToTypeIdIsSet.compareTo(that.aliasToTypeIdIsSet)
cmp != 0
}) cmp
else if (this.aliasToTypeIdIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.mapAsJavaMapConverter(this.aliasToTypeId).asJava,
scala.collection.JavaConverters.mapAsJavaMapConverter(that.aliasToTypeId).asJava
)
cmp != 0
}) cmp
else 0
}
override def <(that: TypeRegistry): Boolean = { this.compare(that) < 0 }
override def >(that: TypeRegistry): Boolean = { this.compare(that) > 0 }
override def <=(that: TypeRegistry): Boolean = { this.compare(that) <= 0 }
override def >=(that: TypeRegistry): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: TypeRegistry): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): TypeRegistry
def copy(
idToType: scala.collection.immutable.Map[String, io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type] =
idToTypeOrNull,
aliasToTypeId: scala.collection.immutable.Map[String, String] = aliasToTypeIdOrNull
): TypeRegistry
def mutableCopy(): MutableTypeRegistry = {
val ret = TypeRegistry.createRawRecord
if (idToTypeIsSet) ret.idToType_=(idToTypeOrNull)
if (aliasToTypeIdIsSet) ret.aliasToTypeId_=(aliasToTypeIdOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableTypeRegistry
def toBuilder(): TypeRegistry.Builder.AllSpecified = {
val ret = new TypeRegistry.Builder(TypeRegistry.createRawRecord)
if (idToTypeIsSet) ret.idToType(idToTypeOrNull)
if (aliasToTypeIdIsSet) ret.aliasToTypeId(aliasToTypeIdOrNull)
ret
}
def mergeCopy(that: TypeRegistry): TypeRegistry
}
trait MutableTypeRegistry
extends TypeRegistry
with JavaTypeRegistryMutable[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type,
TypeRegistry,
RawTypeRegistry,
TypeRegistryMeta
] {
def idToType_=(
x: scala.collection.immutable.Map[String, io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type]
): Unit
def idToTypeUnset(): Unit
def aliasToTypeId_=(x: scala.collection.immutable.Map[String, String]): Unit
def aliasToTypeIdUnset(): Unit
def merge(that: TypeRegistry): Unit
def copy(
idToType: scala.collection.immutable.Map[String, io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type] =
idToTypeOrNull,
aliasToTypeId: scala.collection.immutable.Map[String, String] = aliasToTypeIdOrNull
): MutableTypeRegistry
override def mutable: MutableTypeRegistry = this
}
final class RawTypeRegistry
extends JavaTypeRegistryRaw[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type,
TypeRegistry,
RawTypeRegistry,
TypeRegistryMeta
]
with MutableTypeRegistry {
override def meta: TypeRegistryMeta = TypeRegistry
// fields
// Field #1 - idToType
private var _idToType
: scala.collection.immutable.Map[String, io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type] = null // Underlying type: scala.collection.immutable.Map[String, io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type]
override def idToType
: scala.collection.immutable.Map[String, io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type] =
idToTypeOrDefault
override def idToType_=(
x: scala.collection.immutable.Map[String, io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type]
): Unit = { _idToType = x }
override def idToTypeOption
: Option[scala.collection.immutable.Map[String, io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type]] =
if (idToTypeIsSet) Some(_idToType) else None
override def idToTypeOrDefault
: scala.collection.immutable.Map[String, io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type] =
if (idToTypeIsSet) _idToType else scala.collection.immutable.Map.empty
override def idToTypeOrNull
: scala.collection.immutable.Map[String, io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type] =
_idToType
override def idToTypeOrThrow
: scala.collection.immutable.Map[String, io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type] =
if (idToTypeIsSet) _idToType else throw new java.lang.NullPointerException("field idToType of TypeRegistry missing")
override def idToTypeIsSet: Boolean = _idToType != null
override def idToTypeUnset(): Unit = { _idToType = null }
// Field #2 - aliasToTypeId
private var _aliasToTypeId
: scala.collection.immutable.Map[String, String] = null // Underlying type: scala.collection.immutable.Map[String, String]
override def aliasToTypeId: scala.collection.immutable.Map[String, String] = aliasToTypeIdOrDefault
override def aliasToTypeId_=(x: scala.collection.immutable.Map[String, String]): Unit = { _aliasToTypeId = x }
override def aliasToTypeIdOption: Option[scala.collection.immutable.Map[String, String]] =
if (aliasToTypeIdIsSet) Some(_aliasToTypeId) else None
override def aliasToTypeIdOrDefault: scala.collection.immutable.Map[String, String] =
if (aliasToTypeIdIsSet) _aliasToTypeId else scala.collection.immutable.Map.empty
override def aliasToTypeIdOrNull: scala.collection.immutable.Map[String, String] = _aliasToTypeId
override def aliasToTypeIdOrThrow: scala.collection.immutable.Map[String, String] = if (aliasToTypeIdIsSet)
_aliasToTypeId
else throw new java.lang.NullPointerException("field aliasToTypeId of TypeRegistry missing")
override def aliasToTypeIdIsSet: Boolean = _aliasToTypeId != null
override def aliasToTypeIdUnset(): Unit = { _aliasToTypeId = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(TypeRegistry.TYPEREGISTRY_SDESC)
if (idToTypeIsSet) {
oprot.writeFieldBegin(TypeRegistry.IDTOTYPE_FDESC)
oprot.writeMapBegin(
new org.apache.thrift.protocol.TMap(
org.apache.thrift.protocol.TType.STRING,
org.apache.thrift.protocol.TType.STRUCT,
_idToType.size
)
)
_idToType.foreach(item => {
oprot.writeString(item._1)
item._2.write(oprot)
})
oprot.writeMapEnd()
oprot.writeFieldEnd()
}
if (aliasToTypeIdIsSet) {
oprot.writeFieldBegin(TypeRegistry.ALIASTOTYPEID_FDESC)
oprot.writeMapBegin(
new org.apache.thrift.protocol.TMap(
org.apache.thrift.protocol.TType.STRING,
org.apache.thrift.protocol.TType.STRING,
_aliasToTypeId.size
)
)
_aliasToTypeId.foreach(item => {
oprot.writeString(item._1)
oprot.writeString(item._2)
})
oprot.writeMapEnd()
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
TypeRegistry.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // idToType
if (field_header.`type` == org.apache.thrift.protocol.TType.MAP) {
_idToType = {
val tmap: org.apache.thrift.protocol.TMap = iprot.readMapBegin()
val builder = scala.collection.immutable.Map
.newBuilder[String, io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type]
builder.sizeHint(tmap.size)
var i: Int = tmap.size
while (i > 0) {
val k = iprot.readString()
val v = ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type.createRawRecord
s.read(iprot)
s
})
builder += ((k, v))
i -= 1
}
builder.result()
}
iprot.readMapEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 2 => { // aliasToTypeId
if (field_header.`type` == org.apache.thrift.protocol.TType.MAP) {
_aliasToTypeId = {
val tmap: org.apache.thrift.protocol.TMap = iprot.readMapBegin()
val builder = scala.collection.immutable.Map.newBuilder[String, String]
builder.sizeHint(tmap.size)
var i: Int = tmap.size
while (i > 0) {
val k = iprot.readString()
val v = iprot.readString()
builder += ((k, v))
i -= 1
}
builder.result()
}
iprot.readMapEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException(
"Error reading field %d in structure TypeRegistry".format(field_header.id),
e
)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: TypeRegistry): Unit = {
if (that.idToTypeIsSet && !this.idToTypeIsSet) {
this.idToType_=(that.idToTypeOrNull)
} else if (that.idToTypeIsSet && this.idToTypeIsSet) {
this.idToType_=(this.idToType ++ that.idToType)
}
if (that.aliasToTypeIdIsSet && !this.aliasToTypeIdIsSet) {
this.aliasToTypeId_=(that.aliasToTypeIdOrNull)
} else if (that.aliasToTypeIdIsSet && this.aliasToTypeIdIsSet) {
this.aliasToTypeId_=(this.aliasToTypeId ++ that.aliasToTypeId)
}
}
override def mergeCopy(that: TypeRegistry): TypeRegistry = {
val ret = TypeRegistry.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: TypeRegistry => this.equals(o)
case _ => false
}
def equals(that: TypeRegistry): Boolean = {
that != null &&
(if (this.idToTypeIsSet) (that.idToTypeIsSet && this.idToTypeOrNull == that.idToTypeOrNull)
else !that.idToTypeIsSet) &&
(if (this.aliasToTypeIdIsSet) (that.aliasToTypeIdIsSet && this.aliasToTypeIdOrNull == that.aliasToTypeIdOrNull)
else !that.aliasToTypeIdIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (idToTypeIsSet) hasher.append(_idToType.##)
if (aliasToTypeIdIsSet) hasher.append(_aliasToTypeId.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (idToTypeIsSet) ret = idToTypeOrNull :: ret
if (aliasToTypeIdIsSet) ret = aliasToTypeIdOrNull :: ret
ret.reverse
}
override def clear() {
idToTypeUnset()
aliasToTypeIdUnset()
unknownFields = Nil
}
def fieldForId(id: Int): TypeRegistry._Fields = id match {
case 1 => TypeRegistry._Fields.idToType
case 2 => TypeRegistry._Fields.aliasToTypeId
case _ => null
}
def isSet(field: TypeRegistry._Fields): Boolean = field match {
case TypeRegistry._Fields.idToType => idToTypeIsSet
case TypeRegistry._Fields.aliasToTypeId => aliasToTypeIdIsSet
case _ => false
}
def getFieldValue(field: TypeRegistry._Fields): AnyRef = field match {
case TypeRegistry._Fields.idToType => idToTypeOrNull.asInstanceOf[AnyRef]
case TypeRegistry._Fields.aliasToTypeId => aliasToTypeIdOrNull.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: TypeRegistry._Fields, value: AnyRef) {
field match {
case TypeRegistry._Fields.idToType =>
idToType_=(
value.asInstanceOf[
scala.collection.immutable.Map[String, io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type]
]
)
case TypeRegistry._Fields.aliasToTypeId =>
aliasToTypeId_=(value.asInstanceOf[scala.collection.immutable.Map[String, String]])
case _ =>
}
}
override def deepCopy(): RawTypeRegistry = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = TypeRegistry.createRawRecord
ret.read(prot)
ret
}
override def copy(
idToType: scala.collection.immutable.Map[String, io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Type] =
idToTypeOrNull,
aliasToTypeId: scala.collection.immutable.Map[String, String] = aliasToTypeIdOrNull
): RawTypeRegistry = {
val ret = new RawTypeRegistry
if (idToType != null) ret.idToType_=(idToType)
if (aliasToTypeId != null) ret.aliasToTypeId_=(aliasToTypeId)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object Const extends ConstMeta {
object Builder {
sealed trait HasTypeId
sealed trait HasName
sealed trait HasValue
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasTypeId with HasName with HasValue
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[Const] (private var obj: RawConst) {
def typeId(v: String): Const.Builder[State with Builder.HasTypeId] = {
obj.typeId_=(v)
this.asInstanceOf[Const.Builder[State with Builder.HasTypeId]]
}
def name(v: String): Const.Builder[State with Builder.HasName] = {
obj.name_=(v)
this.asInstanceOf[Const.Builder[State with Builder.HasName]]
}
def value(v: String): Const.Builder[State with Builder.HasValue] = {
obj.value_=(v)
this.asInstanceOf[Const.Builder[State with Builder.HasValue]]
}
def resultMutable()(
implicit ev0: State <:< Builder.HasTypeId,
ev1: State <:< Builder.HasName,
ev2: State <:< Builder.HasValue
): MutableConst = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("Const.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(
implicit ev0: State <:< Builder.HasTypeId,
ev1: State <:< Builder.HasName,
ev2: State <:< Builder.HasValue
): Const = resultMutable()(ev0, ev1, ev2)
}
def newBuilder: Const.Builder.AllUnspecified = new Builder(Const.createRawRecord)
implicit val companionProvider: ConstCompanionProvider = new ConstCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[Const] = {
new _root_.scala.math.Ordering[Const] {
override def compare(x: Const, y: Const): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[Const] = {
new _root_.java.util.Comparator[Const] {
override def compare(x: Const, y: Const): Int = x.compare(y)
}
}
}
class ConstMeta
extends JavaConstMeta[Const, RawConst, ConstMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[Const] {
override def recordName: String = "Const"
// Thrift descriptors.
val CONST_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("Const")
val TYPEID_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"typeId",
org.apache.thrift.protocol.TType.STRING,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val NAME_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"name",
org.apache.thrift.protocol.TType.STRING,
2, {
java.util.Collections.emptyMap[String, String]
}
)
val VALUE_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"value",
org.apache.thrift.protocol.TType.STRING,
3, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"typeId" -> TYPEID_FDESC,
"name" -> NAME_FDESC,
"value" -> VALUE_FDESC
)
object _Fields {
case object typeId extends _Fields(1, "typeId")
case object name extends _Fields(2, "name")
case object value extends _Fields(3, "value")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.typeId,
2.toShort -> _Fields.name,
3.toShort -> _Fields.value
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: Const = createRawRecord
override def createRawRecord: RawConst = new RawConst
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[Const] = {
if (x.isInstanceOf[Const]) Some(x.asInstanceOf[Const]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations(
scala.collection.immutable.Vector(
("generate_proxy", "true")
)
)
// Spindle Descriptors.
val typeId =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, Const, ConstMeta](
name = "typeId",
longName = "typeId",
id = 1,
annotations = Map(),
owner = this,
getter = _.typeIdOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Const], v: String) => {
r.asInstanceOf[RawConst].typeId_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Const]) => {
r.asInstanceOf[RawConst].typeIdUnset()
},
manifest = manifest[String]
)
val name =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, Const, ConstMeta](
name = "name",
longName = "name",
id = 2,
annotations = Map(),
owner = this,
getter = _.nameOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Const], v: String) => {
r.asInstanceOf[RawConst].name_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Const]) => {
r.asInstanceOf[RawConst].nameUnset()
},
manifest = manifest[String]
)
val value =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, Const, ConstMeta](
name = "value",
longName = "value",
id = 3,
annotations = Map(),
owner = this,
getter = _.valueOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Const], v: String) => {
r.asInstanceOf[RawConst].value_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Const]) => {
r.asInstanceOf[RawConst].valueUnset()
},
manifest = manifest[String]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Const, ConstMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Const, ConstMeta]](
typeId,
name,
value
)
def apply(
typeId: String,
name: String,
value: String
): Const = {
val ret = this.createRawRecord
ret.typeId_=(typeId)
ret.name_=(name)
ret.value_=(value)
ret
}
}
class ConstCompanionProvider extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[Const] {
type CompanionT = ConstMeta
override def provide: ConstMeta = Const
}
trait Const
extends JavaConst[
Const,
RawConst,
ConstMeta
]
with org.apache.thrift.TBase[Const, Const._Fields] {
override def meta: ConstMeta
override def compare(that: Const): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.typeIdIsSet.compareTo(that.typeIdIsSet)
cmp != 0
}) cmp
else if (this.typeIdIsSet && {
cmp = this.typeIdOrNull.compareTo(that.typeIdOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.nameIsSet.compareTo(that.nameIsSet)
cmp != 0
}) cmp
else if (this.nameIsSet && {
cmp = this.nameOrNull.compareTo(that.nameOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.valueIsSet.compareTo(that.valueIsSet)
cmp != 0
}) cmp
else if (this.valueIsSet && {
cmp = this.valueOrNull.compareTo(that.valueOrNull)
cmp != 0
}) cmp
else 0
}
override def <(that: Const): Boolean = { this.compare(that) < 0 }
override def >(that: Const): Boolean = { this.compare(that) > 0 }
override def <=(that: Const): Boolean = { this.compare(that) <= 0 }
override def >=(that: Const): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: Const): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): Const
def copy(
typeId: String = typeIdOrNull,
name: String = nameOrNull,
value: String = valueOrNull
): Const
def mutableCopy(): MutableConst = {
val ret = Const.createRawRecord
if (typeIdIsSet) ret.typeId_=(typeIdOrNull)
if (nameIsSet) ret.name_=(nameOrNull)
if (valueIsSet) ret.value_=(valueOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableConst
def toBuilder(): Const.Builder.AllSpecified = {
val ret = new Const.Builder(Const.createRawRecord)
if (typeIdIsSet) ret.typeId(typeIdOrNull)
if (nameIsSet) ret.name(nameOrNull)
if (valueIsSet) ret.value(valueOrNull)
ret
}
def mergeCopy(that: Const): Const
}
trait MutableConst
extends Const
with JavaConstMutable[
Const,
RawConst,
ConstMeta
] {
def typeId_=(x: String): Unit
def typeIdUnset(): Unit
def name_=(x: String): Unit
def nameUnset(): Unit
def value_=(x: String): Unit
def valueUnset(): Unit
def merge(that: Const): Unit
def copy(
typeId: String = typeIdOrNull,
name: String = nameOrNull,
value: String = valueOrNull
): MutableConst
override def mutable: MutableConst = this
}
trait ConstProxy extends Const {
protected def underlying: Const
override def meta = underlying.meta
// field/proxy_ref.ssp
override def typeId: String = underlying.typeId
override def typeIdOption: Option[String] = underlying.typeIdOption
override def typeIdOrNull: String = underlying.typeIdOrNull
override def typeIdOrThrow: String = underlying.typeIdOrThrow
override def typeIdIsSet: Boolean = underlying.typeIdIsSet
// field/proxy_ref.ssp
override def name: String = underlying.name
override def nameOption: Option[String] = underlying.nameOption
override def nameOrNull: String = underlying.nameOrNull
override def nameOrThrow: String = underlying.nameOrThrow
override def nameIsSet: Boolean = underlying.nameIsSet
// field/proxy_ref.ssp
override def value: String = underlying.value
override def valueOption: Option[String] = underlying.valueOption
override def valueOrNull: String = underlying.valueOrNull
override def valueOrThrow: String = underlying.valueOrThrow
override def valueIsSet: Boolean = underlying.valueIsSet
override def compare(that: Const): Int = underlying.compare(that)
override def clear() { underlying.clear }
override def read(iprot: org.apache.thrift.protocol.TProtocol) { underlying.read(iprot) }
override def write(oprot: org.apache.thrift.protocol.TProtocol) { underlying.write(oprot) }
override def copy(
typeId: String = typeIdOrNull,
name: String = nameOrNull,
value: String = valueOrNull
): Const = underlying.copy(
typeId = typeId,
name = name,
value = value
)
override def mutableCopy(): MutableConst = underlying.mutableCopy()
override def mergeCopy(that: Const): Const = underlying.mergeCopy(that)
override def mutable: MutableConst = underlying.mutable
override def deepCopy(): Const = underlying.deepCopy()
override def fieldForId(id: Int): Const._Fields = underlying.fieldForId(id)
override def isSet(field: Const._Fields): Boolean = underlying.isSet(field)
override def getFieldValue(field: Const._Fields): AnyRef = underlying.getFieldValue(field)
override def setFieldValue(field: Const._Fields, value: AnyRef) { underlying.setFieldValue(field, value) }
override def hashCode(): Int = underlying.hashCode
override def equals(that: Any): Boolean = underlying.equals(that)
override def toString(): String = underlying.toString
}
trait MutableConstProxy extends MutableConst with ConstProxy {
protected def underlying: MutableConst
override def typeId_=(x: String): Unit = { underlying.typeId_=(x) }
override def typeIdUnset(): Unit = { underlying.typeIdUnset() }
override def name_=(x: String): Unit = { underlying.name_=(x) }
override def nameUnset(): Unit = { underlying.nameUnset() }
override def value_=(x: String): Unit = { underlying.value_=(x) }
override def valueUnset(): Unit = { underlying.valueUnset() }
override def copy(
typeId: String = typeIdOrNull,
name: String = nameOrNull,
value: String = valueOrNull
): MutableConst = underlying.copy(
typeId = typeId,
name = name,
value = value
)
override def merge(that: Const): Unit = underlying.merge(that)
}
final class RawConst
extends JavaConstRaw[
Const,
RawConst,
ConstMeta
]
with MutableConst {
override def meta: ConstMeta = Const
// fields
// Field #1 - typeId
private var _typeId: String = null // Underlying type: String
override def typeId: String = typeIdOrThrow
override def typeId_=(x: String): Unit = { _typeId = x }
override def typeIdOption: Option[String] = if (typeIdIsSet) Some(_typeId) else None
override def typeIdOrNull: String = _typeId
override def typeIdOrThrow: String =
if (typeIdIsSet) _typeId else throw new java.lang.NullPointerException("field typeId of Const missing")
override def typeIdIsSet: Boolean = _typeId != null
override def typeIdUnset(): Unit = { _typeId = null }
// Field #2 - name
private var _name: String = null // Underlying type: String
override def name: String = nameOrThrow
override def name_=(x: String): Unit = { _name = x }
override def nameOption: Option[String] = if (nameIsSet) Some(_name) else None
override def nameOrNull: String = _name
override def nameOrThrow: String =
if (nameIsSet) _name else throw new java.lang.NullPointerException("field name of Const missing")
override def nameIsSet: Boolean = _name != null
override def nameUnset(): Unit = { _name = null }
// Field #3 - value
private var _value: String = null // Underlying type: String
override def value: String = valueOrThrow
override def value_=(x: String): Unit = { _value = x }
override def valueOption: Option[String] = if (valueIsSet) Some(_value) else None
override def valueOrNull: String = _value
override def valueOrThrow: String =
if (valueIsSet) _value else throw new java.lang.NullPointerException("field value of Const missing")
override def valueIsSet: Boolean = _value != null
override def valueUnset(): Unit = { _value = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(Const.CONST_SDESC)
if (typeIdIsSet) {
oprot.writeFieldBegin(Const.TYPEID_FDESC)
oprot.writeString(_typeId)
oprot.writeFieldEnd()
}
if (nameIsSet) {
oprot.writeFieldBegin(Const.NAME_FDESC)
oprot.writeString(_name)
oprot.writeFieldEnd()
}
if (valueIsSet) {
oprot.writeFieldBegin(Const.VALUE_FDESC)
oprot.writeString(_value)
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
Const.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // typeId
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_typeId = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 2 => { // name
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_name = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 3 => { // value
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_value = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException("Error reading field %d in structure Const".format(field_header.id), e)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: Const): Unit = {
if (that.typeIdIsSet && !this.typeIdIsSet) {
this.typeId_=(that.typeIdOrNull)
}
if (that.nameIsSet && !this.nameIsSet) {
this.name_=(that.nameOrNull)
}
if (that.valueIsSet && !this.valueIsSet) {
this.value_=(that.valueOrNull)
}
}
override def mergeCopy(that: Const): Const = {
val ret = Const.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: Const => this.equals(o)
case _ => false
}
def equals(that: Const): Boolean = {
that != null &&
(if (this.typeIdIsSet) (that.typeIdIsSet && this.typeIdOrNull == that.typeIdOrNull) else !that.typeIdIsSet) &&
(if (this.nameIsSet) (that.nameIsSet && this.nameOrNull == that.nameOrNull) else !that.nameIsSet) &&
(if (this.valueIsSet) (that.valueIsSet && this.valueOrNull == that.valueOrNull) else !that.valueIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (typeIdIsSet) hasher.append(_typeId.##)
if (nameIsSet) hasher.append(_name.##)
if (valueIsSet) hasher.append(_value.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (typeIdIsSet) ret = typeIdOrNull :: ret
if (nameIsSet) ret = nameOrNull :: ret
if (valueIsSet) ret = valueOrNull :: ret
ret.reverse
}
override def clear() {
typeIdUnset()
nameUnset()
valueUnset()
unknownFields = Nil
}
def fieldForId(id: Int): Const._Fields = id match {
case 1 => Const._Fields.typeId
case 2 => Const._Fields.name
case 3 => Const._Fields.value
case _ => null
}
def isSet(field: Const._Fields): Boolean = field match {
case Const._Fields.typeId => typeIdIsSet
case Const._Fields.name => nameIsSet
case Const._Fields.value => valueIsSet
case _ => false
}
def getFieldValue(field: Const._Fields): AnyRef = field match {
case Const._Fields.typeId => typeIdOrNull.asInstanceOf[AnyRef]
case Const._Fields.name => nameOrNull.asInstanceOf[AnyRef]
case Const._Fields.value => valueOrNull.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: Const._Fields, value: AnyRef) {
field match {
case Const._Fields.typeId => typeId_=(value.asInstanceOf[String])
case Const._Fields.name => name_=(value.asInstanceOf[String])
case Const._Fields.value => value_=(value.asInstanceOf[String])
case _ =>
}
}
override def deepCopy(): RawConst = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = Const.createRawRecord
ret.read(prot)
ret
}
override def copy(
typeId: String = typeIdOrNull,
name: String = nameOrNull,
value: String = valueOrNull
): RawConst = {
val ret = new RawConst
if (typeId != null) ret.typeId_=(typeId)
if (name != null) ret.name_=(name)
if (value != null) ret.value_=(value)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object EnumElement extends EnumElementMeta {
object Builder {
sealed trait HasName
sealed trait HasValue
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasName with HasValue
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[EnumElement] (private var obj: RawEnumElement) {
def name(v: String): EnumElement.Builder[State with Builder.HasName] = {
obj.name_=(v)
this.asInstanceOf[EnumElement.Builder[State with Builder.HasName]]
}
def value(v: Int): EnumElement.Builder[State with Builder.HasValue] = {
obj.value_=(v)
this.asInstanceOf[EnumElement.Builder[State with Builder.HasValue]]
}
def __annotations(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): EnumElement.Builder[State] = {
obj.__annotations_=(v)
this
}
def __annotations(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
): EnumElement.Builder[State] = {
vOpt match {
case Some(v) => obj.__annotations_=(v)
case None => obj.annotationsUnset()
}
this
}
def resultMutable()(
implicit ev0: State <:< Builder.HasName,
ev1: State <:< Builder.HasValue
): MutableEnumElement = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("EnumElement.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(implicit ev0: State <:< Builder.HasName, ev1: State <:< Builder.HasValue): EnumElement =
resultMutable()(ev0, ev1)
}
def newBuilder: EnumElement.Builder.AllUnspecified = new Builder(EnumElement.createRawRecord)
implicit val companionProvider: EnumElementCompanionProvider = new EnumElementCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[EnumElement] = {
new _root_.scala.math.Ordering[EnumElement] {
override def compare(x: EnumElement, y: EnumElement): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[EnumElement] = {
new _root_.java.util.Comparator[EnumElement] {
override def compare(x: EnumElement, y: EnumElement): Int = x.compare(y)
}
}
}
class EnumElementMeta
extends JavaEnumElementMeta[EnumElement, RawEnumElement, EnumElementMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[EnumElement] {
override def recordName: String = "EnumElement"
// Thrift descriptors.
val ENUMELEMENT_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("EnumElement")
val NAME_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"name",
org.apache.thrift.protocol.TType.STRING,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val VALUE_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"value",
org.apache.thrift.protocol.TType.I32,
2, {
java.util.Collections.emptyMap[String, String]
}
)
val ANNOTATIONS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"annotations",
org.apache.thrift.protocol.TType.LIST,
99, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"name" -> NAME_FDESC,
"value" -> VALUE_FDESC,
"annotations" -> ANNOTATIONS_FDESC
)
object _Fields {
case object name extends _Fields(1, "name")
case object value extends _Fields(2, "value")
case object __annotations extends _Fields(99, "annotations")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.name,
2.toShort -> _Fields.value,
99.toShort -> _Fields.__annotations
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: EnumElement = createRawRecord
override def createRawRecord: RawEnumElement = new RawEnumElement
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[EnumElement] = {
if (x.isInstanceOf[EnumElement]) Some(x.asInstanceOf[EnumElement]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations(
scala.collection.immutable.Vector(
("generate_proxy", "true")
)
)
// Spindle Descriptors.
val name =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[
String,
EnumElement,
EnumElementMeta
](
name = "name",
longName = "name",
id = 1,
annotations = Map(),
owner = this,
getter = _.nameOption,
setterRaw =
(r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[EnumElement], v: String) => {
r.asInstanceOf[RawEnumElement].name_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[EnumElement]) => {
r.asInstanceOf[RawEnumElement].nameUnset()
},
manifest = manifest[String]
)
val value =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[
Int,
EnumElement,
EnumElementMeta
](
name = "value",
longName = "value",
id = 2,
annotations = Map(),
owner = this,
getter = _.valueOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[EnumElement], v: Int) => {
r.asInstanceOf[RawEnumElement].value_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[EnumElement]) => {
r.asInstanceOf[RawEnumElement].valueUnset()
},
manifest = manifest[Int]
)
val __annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
], EnumElement, EnumElementMeta](
name = "annotations",
longName = "annotations",
id = 99,
annotations = Map(),
owner = this,
getter = _.annotationsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[EnumElement],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
) => { r.asInstanceOf[RawEnumElement].__annotations_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[EnumElement]) => {
r.asInstanceOf[RawEnumElement].annotationsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, EnumElement, EnumElementMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, EnumElement, EnumElementMeta]](
name,
value,
__annotations
)
def apply(
name: String,
value: Int,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): EnumElement = {
val ret = this.createRawRecord
ret.name_=(name)
ret.value_=(value)
ret.__annotations_=(__annotations)
ret
}
}
class EnumElementCompanionProvider
extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[EnumElement] {
type CompanionT = EnumElementMeta
override def provide: EnumElementMeta = EnumElement
}
trait EnumElement
extends JavaEnumElement[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
EnumElement,
RawEnumElement,
EnumElementMeta
]
with org.apache.thrift.TBase[EnumElement, EnumElement._Fields] {
override def meta: EnumElementMeta
override def compare(that: EnumElement): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.nameIsSet.compareTo(that.nameIsSet)
cmp != 0
}) cmp
else if (this.nameIsSet && {
cmp = this.nameOrNull.compareTo(that.nameOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.valueIsSet.compareTo(that.valueIsSet)
cmp != 0
}) cmp
else if (this.valueIsSet && {
cmp = this.value.compareTo(that.value)
cmp != 0
}) cmp
else if ({
cmp = this.annotationsIsSet.compareTo(that.annotationsIsSet)
cmp != 0
}) cmp
else if (this.annotationsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.__annotations).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.__annotations).asJava
)
cmp != 0
}) cmp
else 0
}
override def <(that: EnumElement): Boolean = { this.compare(that) < 0 }
override def >(that: EnumElement): Boolean = { this.compare(that) > 0 }
override def <=(that: EnumElement): Boolean = { this.compare(that) <= 0 }
override def >=(that: EnumElement): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: EnumElement): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): EnumElement
def copy(
name: String = nameOrNull,
value: java.lang.Integer = valueOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): EnumElement
def mutableCopy(): MutableEnumElement = {
val ret = EnumElement.createRawRecord
if (nameIsSet) ret.name_=(nameOrNull)
if (valueIsSet) ret.value_=(valueOrDefault)
if (annotationsIsSet) ret.__annotations_=(annotationsOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableEnumElement
def toBuilder(): EnumElement.Builder.AllSpecified = {
val ret = new EnumElement.Builder(EnumElement.createRawRecord)
if (nameIsSet) ret.name(nameOrNull)
if (valueIsSet) ret.value(valueOrDefault)
if (annotationsIsSet) ret.__annotations(annotationsOrNull)
ret
}
def mergeCopy(that: EnumElement): EnumElement
}
trait MutableEnumElement
extends EnumElement
with JavaEnumElementMutable[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
EnumElement,
RawEnumElement,
EnumElementMeta
] {
def name_=(x: String): Unit
def nameUnset(): Unit
def value_=(x: Int): Unit
def valueUnset(): Unit
def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit
def annotationsUnset(): Unit
def merge(that: EnumElement): Unit
def copy(
name: String = nameOrNull,
value: java.lang.Integer = valueOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableEnumElement
override def mutable: MutableEnumElement = this
}
trait EnumElementProxy extends EnumElement {
protected def underlying: EnumElement
override def meta = underlying.meta
// field/proxy_ref.ssp
override def name: String = underlying.name
override def nameOption: Option[String] = underlying.nameOption
override def nameOrNull: String = underlying.nameOrNull
override def nameOrThrow: String = underlying.nameOrThrow
override def nameIsSet: Boolean = underlying.nameIsSet
// field/proxy_primitive.ssp
override def value: Int = underlying.value
override def valueOption: Option[Int] = underlying.valueOption
override def valueOrDefault: Int = underlying.valueOrDefault
override def valueOrNull: java.lang.Integer = underlying.valueOrNull
override def valueOrThrow: Int = underlying.valueOrThrow
override def valueIsSet: Boolean = underlying.valueIsSet
// field/proxy_container.ssp
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.__annotations
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
underlying.annotationsOption
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrDefault
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrNull
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrThrow
override def annotationsIsSet: Boolean = underlying.annotationsIsSet
override def compare(that: EnumElement): Int = underlying.compare(that)
override def clear() { underlying.clear }
override def read(iprot: org.apache.thrift.protocol.TProtocol) { underlying.read(iprot) }
override def write(oprot: org.apache.thrift.protocol.TProtocol) { underlying.write(oprot) }
override def copy(
name: String = nameOrNull,
value: java.lang.Integer = valueOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): EnumElement = underlying.copy(
name = name,
value = value,
__annotations = __annotations
)
override def mutableCopy(): MutableEnumElement = underlying.mutableCopy()
override def mergeCopy(that: EnumElement): EnumElement = underlying.mergeCopy(that)
override def mutable: MutableEnumElement = underlying.mutable
override def deepCopy(): EnumElement = underlying.deepCopy()
override def fieldForId(id: Int): EnumElement._Fields = underlying.fieldForId(id)
override def isSet(field: EnumElement._Fields): Boolean = underlying.isSet(field)
override def getFieldValue(field: EnumElement._Fields): AnyRef = underlying.getFieldValue(field)
override def setFieldValue(field: EnumElement._Fields, value: AnyRef) { underlying.setFieldValue(field, value) }
override def hashCode(): Int = underlying.hashCode
override def equals(that: Any): Boolean = underlying.equals(that)
override def toString(): String = underlying.toString
}
trait MutableEnumElementProxy extends MutableEnumElement with EnumElementProxy {
protected def underlying: MutableEnumElement
override def name_=(x: String): Unit = { underlying.name_=(x) }
override def nameUnset(): Unit = { underlying.nameUnset() }
override def value_=(x: Int): Unit = { underlying.value_=(x) }
override def valueUnset(): Unit = { underlying.valueUnset() }
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { underlying.__annotations_=(x) }
override def annotationsUnset(): Unit = { underlying.annotationsUnset() }
override def copy(
name: String = nameOrNull,
value: java.lang.Integer = valueOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableEnumElement = underlying.copy(
name = name,
value = value,
__annotations = __annotations
)
override def merge(that: EnumElement): Unit = underlying.merge(that)
}
final class RawEnumElement
extends JavaEnumElementRaw[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
EnumElement,
RawEnumElement,
EnumElementMeta
]
with MutableEnumElement {
override def meta: EnumElementMeta = EnumElement
// fields
// Field #1 - name
private var _name: String = null // Underlying type: String
override def name: String = nameOrThrow
override def name_=(x: String): Unit = { _name = x }
override def nameOption: Option[String] = if (nameIsSet) Some(_name) else None
override def nameOrNull: String = _name
override def nameOrThrow: String =
if (nameIsSet) _name else throw new java.lang.NullPointerException("field name of EnumElement missing")
override def nameIsSet: Boolean = _name != null
override def nameUnset(): Unit = { _name = null }
// Field #2 - value
private var _value: Int = 0 // Underlying type: Int
private var _valueIsSet: Boolean = false
override def value: Int = valueOrDefault
override def value_=(x: Int): Unit = { _value = x; _valueIsSet = true }
override def valueOption: Option[Int] = if (valueIsSet) Some(_value) else None
override def valueOrDefault: Int = _value
override def valueOrNull: java.lang.Integer = if (valueIsSet) _value else null
override def valueOrThrow: Int =
if (valueIsSet) _value else throw new java.lang.NullPointerException("field value of EnumElement missing")
override def valueIsSet: Boolean = _valueIsSet
override def valueUnset(): Unit = { _valueIsSet = false; _value = 0 }
// Field #99 - annotations
private var _annotations: scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrDefault
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { _annotations = x }
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
if (annotationsIsSet) Some(_annotations) else None
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations else scala.collection.Seq.empty
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] = _annotations
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations
else throw new java.lang.NullPointerException("field __annotations of EnumElement missing")
override def annotationsIsSet: Boolean = _annotations != null
override def annotationsUnset(): Unit = { _annotations = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(EnumElement.ENUMELEMENT_SDESC)
if (nameIsSet) {
oprot.writeFieldBegin(EnumElement.NAME_FDESC)
oprot.writeString(_name)
oprot.writeFieldEnd()
}
if (valueIsSet) {
oprot.writeFieldBegin(EnumElement.VALUE_FDESC)
oprot.writeI32(_value)
oprot.writeFieldEnd()
}
if (annotationsIsSet) {
oprot.writeFieldBegin(EnumElement.ANNOTATIONS_FDESC)
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _annotations.size)
)
_annotations.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
EnumElement.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // name
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_name = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 2 => { // value
if (field_header.`type` == org.apache.thrift.protocol.TType.I32) {
_value = iprot.readI32()
_valueIsSet = true
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 99 => { // annotations
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_annotations = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException(
"Error reading field %d in structure EnumElement".format(field_header.id),
e
)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: EnumElement): Unit = {
if (that.nameIsSet && !this.nameIsSet) {
this.name_=(that.nameOrNull)
}
if (that.valueIsSet && !this.valueIsSet) {
this.value_=(that.valueOrDefault)
}
if (that.annotationsIsSet && !this.annotationsIsSet) {
this.__annotations_=(that.annotationsOrDefault)
} else if (that.annotationsIsSet && this.annotationsIsSet) {
this.__annotations_=(this.__annotations ++ that.__annotations)
}
}
override def mergeCopy(that: EnumElement): EnumElement = {
val ret = EnumElement.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: EnumElement => this.equals(o)
case _ => false
}
def equals(that: EnumElement): Boolean = {
that != null &&
(if (this.nameIsSet) (that.nameIsSet && this.nameOrNull == that.nameOrNull) else !that.nameIsSet) &&
(if (this.valueIsSet) (that.valueIsSet && this.valueOrDefault == that.valueOrDefault) else !that.valueIsSet) &&
(if (this.annotationsIsSet) (that.annotationsIsSet && this.annotationsOrDefault == that.annotationsOrDefault)
else !that.annotationsIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (nameIsSet) hasher.append(_name.##)
if (valueIsSet) hasher.append(_value.##)
if (annotationsIsSet) hasher.append(_annotations.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (nameIsSet) ret = nameOrNull :: ret
if (valueIsSet) ret = valueOrDefault :: ret
if (annotationsIsSet) ret = annotationsOrDefault :: ret
ret.reverse
}
override def clear() {
nameUnset()
valueUnset()
annotationsUnset()
unknownFields = Nil
}
def fieldForId(id: Int): EnumElement._Fields = id match {
case 1 => EnumElement._Fields.name
case 2 => EnumElement._Fields.value
case 99 => EnumElement._Fields.__annotations
case _ => null
}
def isSet(field: EnumElement._Fields): Boolean = field match {
case EnumElement._Fields.name => nameIsSet
case EnumElement._Fields.value => valueIsSet
case EnumElement._Fields.__annotations => annotationsIsSet
case _ => false
}
def getFieldValue(field: EnumElement._Fields): AnyRef = field match {
case EnumElement._Fields.name => nameOrNull.asInstanceOf[AnyRef]
case EnumElement._Fields.value => valueOrDefault.asInstanceOf[AnyRef]
case EnumElement._Fields.__annotations => annotationsOrDefault.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: EnumElement._Fields, value: AnyRef) {
field match {
case EnumElement._Fields.name => name_=(value.asInstanceOf[String])
case EnumElement._Fields.value => value_=(value.asInstanceOf[Int])
case EnumElement._Fields.__annotations =>
__annotations_=(
value
.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
case _ =>
}
}
override def deepCopy(): RawEnumElement = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = EnumElement.createRawRecord
ret.read(prot)
ret
}
override def copy(
name: String = nameOrNull,
value: java.lang.Integer = valueOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): RawEnumElement = {
val ret = new RawEnumElement
if (name != null) ret.name_=(name)
if (value != null) ret.value_=(value)
if (__annotations != null) ret.__annotations_=(__annotations)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object Enum extends EnumMeta {
object Builder {
sealed trait HasName
sealed trait HasElements
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasName with HasElements
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[Enum] (private var obj: RawEnum) {
def name(v: String): Enum.Builder[State with Builder.HasName] = {
obj.name_=(v)
this.asInstanceOf[Enum.Builder[State with Builder.HasName]]
}
def elements(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement]
): Enum.Builder[State with Builder.HasElements] = {
obj.elements_=(v)
this.asInstanceOf[Enum.Builder[State with Builder.HasElements]]
}
def __annotations(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Enum.Builder[State] = {
obj.__annotations_=(v)
this
}
def __annotations(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
): Enum.Builder[State] = {
vOpt match {
case Some(v) => obj.__annotations_=(v)
case None => obj.annotationsUnset()
}
this
}
def resultMutable()(implicit ev0: State <:< Builder.HasName, ev1: State <:< Builder.HasElements): MutableEnum = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("Enum.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(implicit ev0: State <:< Builder.HasName, ev1: State <:< Builder.HasElements): Enum =
resultMutable()(ev0, ev1)
}
def newBuilder: Enum.Builder.AllUnspecified = new Builder(Enum.createRawRecord)
implicit val companionProvider: EnumCompanionProvider = new EnumCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[Enum] = {
new _root_.scala.math.Ordering[Enum] {
override def compare(x: Enum, y: Enum): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[Enum] = {
new _root_.java.util.Comparator[Enum] {
override def compare(x: Enum, y: Enum): Int = x.compare(y)
}
}
}
class EnumMeta
extends JavaEnumMeta[Enum, RawEnum, EnumMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[Enum] {
override def recordName: String = "Enum"
// Thrift descriptors.
val ENUM_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("Enum")
val NAME_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"name",
org.apache.thrift.protocol.TType.STRING,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val ELEMENTS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"elements",
org.apache.thrift.protocol.TType.LIST,
2, {
java.util.Collections.emptyMap[String, String]
}
)
val ANNOTATIONS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"annotations",
org.apache.thrift.protocol.TType.LIST,
99, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"name" -> NAME_FDESC,
"elements" -> ELEMENTS_FDESC,
"annotations" -> ANNOTATIONS_FDESC
)
object _Fields {
case object name extends _Fields(1, "name")
case object elements extends _Fields(2, "elements")
case object __annotations extends _Fields(99, "annotations")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.name,
2.toShort -> _Fields.elements,
99.toShort -> _Fields.__annotations
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: Enum = createRawRecord
override def createRawRecord: RawEnum = new RawEnum
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[Enum] = {
if (x.isInstanceOf[Enum]) Some(x.asInstanceOf[Enum]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations(
scala.collection.immutable.Vector(
("generate_proxy", "true")
)
)
// Spindle Descriptors.
val name =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, Enum, EnumMeta](
name = "name",
longName = "name",
id = 1,
annotations = Map(),
owner = this,
getter = _.nameOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Enum], v: String) => {
r.asInstanceOf[RawEnum].name_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Enum]) => {
r.asInstanceOf[RawEnum].nameUnset()
},
manifest = manifest[String]
)
val elements =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement
], Enum, EnumMeta](
name = "elements",
longName = "elements",
id = 2,
annotations = Map(),
owner = this,
getter = _.elementsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Enum],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement]
) => { r.asInstanceOf[RawEnum].elements_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Enum]) => {
r.asInstanceOf[RawEnum].elementsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement]]
)
val __annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
], Enum, EnumMeta](
name = "annotations",
longName = "annotations",
id = 99,
annotations = Map(),
owner = this,
getter = _.annotationsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Enum],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
) => { r.asInstanceOf[RawEnum].__annotations_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Enum]) => {
r.asInstanceOf[RawEnum].annotationsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Enum, EnumMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Enum, EnumMeta]](
name,
elements,
__annotations
)
def apply(
name: String,
elements: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement],
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Enum = {
val ret = this.createRawRecord
ret.name_=(name)
ret.elements_=(elements)
ret.__annotations_=(__annotations)
ret
}
}
class EnumCompanionProvider extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[Enum] {
type CompanionT = EnumMeta
override def provide: EnumMeta = Enum
}
trait Enum
extends JavaEnum[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement,
Enum,
RawEnum,
EnumMeta
]
with org.apache.thrift.TBase[Enum, Enum._Fields] {
override def meta: EnumMeta
override def compare(that: Enum): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.nameIsSet.compareTo(that.nameIsSet)
cmp != 0
}) cmp
else if (this.nameIsSet && {
cmp = this.nameOrNull.compareTo(that.nameOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.elementsIsSet.compareTo(that.elementsIsSet)
cmp != 0
}) cmp
else if (this.elementsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.elements).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.elements).asJava
)
cmp != 0
}) cmp
else if ({
cmp = this.annotationsIsSet.compareTo(that.annotationsIsSet)
cmp != 0
}) cmp
else if (this.annotationsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.__annotations).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.__annotations).asJava
)
cmp != 0
}) cmp
else 0
}
override def <(that: Enum): Boolean = { this.compare(that) < 0 }
override def >(that: Enum): Boolean = { this.compare(that) > 0 }
override def <=(that: Enum): Boolean = { this.compare(that) <= 0 }
override def >=(that: Enum): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: Enum): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): Enum
def copy(
name: String = nameOrNull,
elements: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement] =
elementsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): Enum
def mutableCopy(): MutableEnum = {
val ret = Enum.createRawRecord
if (nameIsSet) ret.name_=(nameOrNull)
if (elementsIsSet) ret.elements_=(elementsOrNull)
if (annotationsIsSet) ret.__annotations_=(annotationsOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableEnum
def toBuilder(): Enum.Builder.AllSpecified = {
val ret = new Enum.Builder(Enum.createRawRecord)
if (nameIsSet) ret.name(nameOrNull)
if (elementsIsSet) ret.elements(elementsOrNull)
if (annotationsIsSet) ret.__annotations(annotationsOrNull)
ret
}
def mergeCopy(that: Enum): Enum
}
trait MutableEnum
extends Enum
with JavaEnumMutable[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement,
Enum,
RawEnum,
EnumMeta
] {
def name_=(x: String): Unit
def nameUnset(): Unit
def elements_=(x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement]): Unit
def elementsUnset(): Unit
def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit
def annotationsUnset(): Unit
def merge(that: Enum): Unit
def copy(
name: String = nameOrNull,
elements: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement] =
elementsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableEnum
override def mutable: MutableEnum = this
}
trait EnumProxy extends Enum {
protected def underlying: Enum
override def meta = underlying.meta
// field/proxy_ref.ssp
override def name: String = underlying.name
override def nameOption: Option[String] = underlying.nameOption
override def nameOrNull: String = underlying.nameOrNull
override def nameOrThrow: String = underlying.nameOrThrow
override def nameIsSet: Boolean = underlying.nameIsSet
// field/proxy_container.ssp
override def elements: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement] =
underlying.elements
override def elementsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement]] =
underlying.elementsOption
override def elementsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement] =
underlying.elementsOrDefault
override def elementsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement] =
underlying.elementsOrNull
override def elementsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement] =
underlying.elementsOrThrow
override def elementsIsSet: Boolean = underlying.elementsIsSet
// field/proxy_container.ssp
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.__annotations
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
underlying.annotationsOption
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrDefault
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrNull
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrThrow
override def annotationsIsSet: Boolean = underlying.annotationsIsSet
override def compare(that: Enum): Int = underlying.compare(that)
override def clear() { underlying.clear }
override def read(iprot: org.apache.thrift.protocol.TProtocol) { underlying.read(iprot) }
override def write(oprot: org.apache.thrift.protocol.TProtocol) { underlying.write(oprot) }
override def copy(
name: String = nameOrNull,
elements: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement] =
elementsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): Enum = underlying.copy(
name = name,
elements = elements,
__annotations = __annotations
)
override def mutableCopy(): MutableEnum = underlying.mutableCopy()
override def mergeCopy(that: Enum): Enum = underlying.mergeCopy(that)
override def mutable: MutableEnum = underlying.mutable
override def deepCopy(): Enum = underlying.deepCopy()
override def fieldForId(id: Int): Enum._Fields = underlying.fieldForId(id)
override def isSet(field: Enum._Fields): Boolean = underlying.isSet(field)
override def getFieldValue(field: Enum._Fields): AnyRef = underlying.getFieldValue(field)
override def setFieldValue(field: Enum._Fields, value: AnyRef) { underlying.setFieldValue(field, value) }
override def hashCode(): Int = underlying.hashCode
override def equals(that: Any): Boolean = underlying.equals(that)
override def toString(): String = underlying.toString
}
trait MutableEnumProxy extends MutableEnum with EnumProxy {
protected def underlying: MutableEnum
override def name_=(x: String): Unit = { underlying.name_=(x) }
override def nameUnset(): Unit = { underlying.nameUnset() }
override def elements_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement]
): Unit = { underlying.elements_=(x) }
override def elementsUnset(): Unit = { underlying.elementsUnset() }
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { underlying.__annotations_=(x) }
override def annotationsUnset(): Unit = { underlying.annotationsUnset() }
override def copy(
name: String = nameOrNull,
elements: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement] =
elementsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableEnum = underlying.copy(
name = name,
elements = elements,
__annotations = __annotations
)
override def merge(that: Enum): Unit = underlying.merge(that)
}
final class RawEnum
extends JavaEnumRaw[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement,
Enum,
RawEnum,
EnumMeta
]
with MutableEnum {
override def meta: EnumMeta = Enum
// fields
// Field #1 - name
private var _name: String = null // Underlying type: String
override def name: String = nameOrThrow
override def name_=(x: String): Unit = { _name = x }
override def nameOption: Option[String] = if (nameIsSet) Some(_name) else None
override def nameOrNull: String = _name
override def nameOrThrow: String =
if (nameIsSet) _name else throw new java.lang.NullPointerException("field name of Enum missing")
override def nameIsSet: Boolean = _name != null
override def nameUnset(): Unit = { _name = null }
// Field #2 - elements
private var _elements: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement] =
null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement]
override def elements: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement] =
elementsOrDefault
override def elements_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement]
): Unit = { _elements = x }
override def elementsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement]] =
if (elementsIsSet) Some(_elements) else None
override def elementsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement] =
if (elementsIsSet) _elements else scala.collection.Seq.empty
override def elementsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement] = _elements
override def elementsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement] =
if (elementsIsSet) _elements else throw new java.lang.NullPointerException("field elements of Enum missing")
override def elementsIsSet: Boolean = _elements != null
override def elementsUnset(): Unit = { _elements = null }
// Field #99 - annotations
private var _annotations: scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrDefault
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { _annotations = x }
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
if (annotationsIsSet) Some(_annotations) else None
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations else scala.collection.Seq.empty
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] = _annotations
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations
else throw new java.lang.NullPointerException("field __annotations of Enum missing")
override def annotationsIsSet: Boolean = _annotations != null
override def annotationsUnset(): Unit = { _annotations = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(Enum.ENUM_SDESC)
if (nameIsSet) {
oprot.writeFieldBegin(Enum.NAME_FDESC)
oprot.writeString(_name)
oprot.writeFieldEnd()
}
if (elementsIsSet) {
oprot.writeFieldBegin(Enum.ELEMENTS_FDESC)
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _elements.size)
)
_elements.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (annotationsIsSet) {
oprot.writeFieldBegin(Enum.ANNOTATIONS_FDESC)
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _annotations.size)
)
_annotations.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
Enum.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // name
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_name = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 2 => { // elements
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_elements = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 99 => { // annotations
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_annotations = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException("Error reading field %d in structure Enum".format(field_header.id), e)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: Enum): Unit = {
if (that.nameIsSet && !this.nameIsSet) {
this.name_=(that.nameOrNull)
}
if (that.elementsIsSet && !this.elementsIsSet) {
this.elements_=(that.elementsOrNull)
} else if (that.elementsIsSet && this.elementsIsSet) {
this.elements_=(this.elements ++ that.elements)
}
if (that.annotationsIsSet && !this.annotationsIsSet) {
this.__annotations_=(that.annotationsOrDefault)
} else if (that.annotationsIsSet && this.annotationsIsSet) {
this.__annotations_=(this.__annotations ++ that.__annotations)
}
}
override def mergeCopy(that: Enum): Enum = {
val ret = Enum.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: Enum => this.equals(o)
case _ => false
}
def equals(that: Enum): Boolean = {
that != null &&
(if (this.nameIsSet) (that.nameIsSet && this.nameOrNull == that.nameOrNull) else !that.nameIsSet) &&
(if (this.elementsIsSet) (that.elementsIsSet && this.elementsOrNull == that.elementsOrNull)
else !that.elementsIsSet) &&
(if (this.annotationsIsSet) (that.annotationsIsSet && this.annotationsOrDefault == that.annotationsOrDefault)
else !that.annotationsIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (nameIsSet) hasher.append(_name.##)
if (elementsIsSet) hasher.append(_elements.##)
if (annotationsIsSet) hasher.append(_annotations.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (nameIsSet) ret = nameOrNull :: ret
if (elementsIsSet) ret = elementsOrNull :: ret
if (annotationsIsSet) ret = annotationsOrDefault :: ret
ret.reverse
}
override def clear() {
nameUnset()
elementsUnset()
annotationsUnset()
unknownFields = Nil
}
def fieldForId(id: Int): Enum._Fields = id match {
case 1 => Enum._Fields.name
case 2 => Enum._Fields.elements
case 99 => Enum._Fields.__annotations
case _ => null
}
def isSet(field: Enum._Fields): Boolean = field match {
case Enum._Fields.name => nameIsSet
case Enum._Fields.elements => elementsIsSet
case Enum._Fields.__annotations => annotationsIsSet
case _ => false
}
def getFieldValue(field: Enum._Fields): AnyRef = field match {
case Enum._Fields.name => nameOrNull.asInstanceOf[AnyRef]
case Enum._Fields.elements => elementsOrNull.asInstanceOf[AnyRef]
case Enum._Fields.__annotations => annotationsOrDefault.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: Enum._Fields, value: AnyRef) {
field match {
case Enum._Fields.name => name_=(value.asInstanceOf[String])
case Enum._Fields.elements =>
elements_=(
value
.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement]]
)
case Enum._Fields.__annotations =>
__annotations_=(
value
.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
case _ =>
}
}
override def deepCopy(): RawEnum = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = Enum.createRawRecord
ret.read(prot)
ret
}
override def copy(
name: String = nameOrNull,
elements: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.EnumElement] =
elementsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): RawEnum = {
val ret = new RawEnum
if (name != null) ret.name_=(name)
if (elements != null) ret.elements_=(elements)
if (__annotations != null) ret.__annotations_=(__annotations)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object Field extends FieldMeta {
object Builder {
sealed trait HasIdentifier
sealed trait HasName
sealed trait HasTypeId
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasIdentifier with HasName with HasTypeId
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[Field] (private var obj: RawField) {
def identifier(v: Short): Field.Builder[State with Builder.HasIdentifier] = {
obj.identifier_=(v)
this.asInstanceOf[Field.Builder[State with Builder.HasIdentifier]]
}
def name(v: String): Field.Builder[State with Builder.HasName] = {
obj.name_=(v)
this.asInstanceOf[Field.Builder[State with Builder.HasName]]
}
def typeId(v: String): Field.Builder[State with Builder.HasTypeId] = {
obj.typeId_=(v)
this.asInstanceOf[Field.Builder[State with Builder.HasTypeId]]
}
def requiredness(
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness
): Field.Builder[State] = {
obj.requiredness_=(v)
this
}
def requiredness(
vOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness]
): Field.Builder[State] = {
vOpt match {
case Some(v) => obj.requiredness_=(v)
case None => obj.requirednessUnset()
}
this
}
def defaultValue(v: String): Field.Builder[State] = {
obj.defaultValue_=(v)
this
}
def defaultValue(vOpt: Option[String]): Field.Builder[State] = {
vOpt match {
case Some(v) => obj.defaultValue_=(v)
case None => obj.defaultValueUnset()
}
this
}
def __annotations(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Field.Builder[State] = {
obj.__annotations_=(v)
this
}
def __annotations(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
): Field.Builder[State] = {
vOpt match {
case Some(v) => obj.__annotations_=(v)
case None => obj.annotationsUnset()
}
this
}
def resultMutable()(
implicit ev0: State <:< Builder.HasIdentifier,
ev1: State <:< Builder.HasName,
ev2: State <:< Builder.HasTypeId
): MutableField = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("Field.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(
implicit ev0: State <:< Builder.HasIdentifier,
ev1: State <:< Builder.HasName,
ev2: State <:< Builder.HasTypeId
): Field = resultMutable()(ev0, ev1, ev2)
}
def newBuilder: Field.Builder.AllUnspecified = new Builder(Field.createRawRecord)
implicit val companionProvider: FieldCompanionProvider = new FieldCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[Field] = {
new _root_.scala.math.Ordering[Field] {
override def compare(x: Field, y: Field): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[Field] = {
new _root_.java.util.Comparator[Field] {
override def compare(x: Field, y: Field): Int = x.compare(y)
}
}
}
class FieldMeta
extends JavaFieldMeta[Field, RawField, FieldMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[Field] {
override def recordName: String = "Field"
// Thrift descriptors.
val FIELD_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("Field")
val IDENTIFIER_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"identifier",
org.apache.thrift.protocol.TType.I16,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val NAME_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"name",
org.apache.thrift.protocol.TType.STRING,
2, {
java.util.Collections.emptyMap[String, String]
}
)
val TYPEID_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"typeId",
org.apache.thrift.protocol.TType.STRING,
3, {
java.util.Collections.emptyMap[String, String]
}
)
val REQUIREDNESS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"requiredness",
org.apache.thrift.protocol.TType.I32,
4, {
java.util.Collections.emptyMap[String, String]
}
)
val DEFAULTVALUE_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"defaultValue",
org.apache.thrift.protocol.TType.STRING,
5, {
java.util.Collections.emptyMap[String, String]
}
)
val ANNOTATIONS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"annotations",
org.apache.thrift.protocol.TType.LIST,
99, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"identifier" -> IDENTIFIER_FDESC,
"name" -> NAME_FDESC,
"typeId" -> TYPEID_FDESC,
"requiredness" -> REQUIREDNESS_FDESC,
"defaultValue" -> DEFAULTVALUE_FDESC,
"annotations" -> ANNOTATIONS_FDESC
)
object _Fields {
case object identifier extends _Fields(1, "identifier")
case object name extends _Fields(2, "name")
case object typeId extends _Fields(3, "typeId")
case object requiredness extends _Fields(4, "requiredness")
case object defaultValue extends _Fields(5, "defaultValue")
case object __annotations extends _Fields(99, "annotations")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.identifier,
2.toShort -> _Fields.name,
3.toShort -> _Fields.typeId,
4.toShort -> _Fields.requiredness,
5.toShort -> _Fields.defaultValue,
99.toShort -> _Fields.__annotations
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: Field = createRawRecord
override def createRawRecord: RawField = new RawField
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[Field] = {
if (x.isInstanceOf[Field]) Some(x.asInstanceOf[Field]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations(
scala.collection.immutable.Vector(
("generate_proxy", "true")
)
)
// Spindle Descriptors.
val identifier =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[Short, Field, FieldMeta](
name = "identifier",
longName = "identifier",
id = 1,
annotations = Map(),
owner = this,
getter = _.identifierOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Field], v: Short) => {
r.asInstanceOf[RawField].identifier_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Field]) => {
r.asInstanceOf[RawField].identifierUnset()
},
manifest = manifest[Short]
)
val name =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, Field, FieldMeta](
name = "name",
longName = "name",
id = 2,
annotations = Map(),
owner = this,
getter = _.nameOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Field], v: String) => {
r.asInstanceOf[RawField].name_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Field]) => {
r.asInstanceOf[RawField].nameUnset()
},
manifest = manifest[String]
)
val typeId =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, Field, FieldMeta](
name = "typeId",
longName = "typeId",
id = 3,
annotations = Map(),
owner = this,
getter = _.typeIdOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Field], v: String) => {
r.asInstanceOf[RawField].typeId_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Field]) => {
r.asInstanceOf[RawField].typeIdUnset()
},
manifest = manifest[String]
)
val requiredness =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness,
Field,
FieldMeta
](
name = "requiredness",
longName = "requiredness",
id = 4,
annotations = Map(),
owner = this,
getter = _.requirednessOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Field],
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness
) => { r.asInstanceOf[RawField].requiredness_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Field]) => {
r.asInstanceOf[RawField].requirednessUnset()
},
manifest = manifest[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness]
)
val defaultValue =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, Field, FieldMeta](
name = "defaultValue",
longName = "defaultValue",
id = 5,
annotations = Map(),
owner = this,
getter = _.defaultValueOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Field], v: String) => {
r.asInstanceOf[RawField].defaultValue_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Field]) => {
r.asInstanceOf[RawField].defaultValueUnset()
},
manifest = manifest[String]
)
val __annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
], Field, FieldMeta](
name = "annotations",
longName = "annotations",
id = 99,
annotations = Map(),
owner = this,
getter = _.annotationsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Field],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
) => { r.asInstanceOf[RawField].__annotations_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Field]) => {
r.asInstanceOf[RawField].annotationsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Field, FieldMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Field, FieldMeta]](
identifier,
name,
typeId,
requiredness,
defaultValue,
__annotations
)
def apply(
identifier: Short,
name: String,
typeId: String,
requiredness: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness,
defaultValue: String,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Field = {
val ret = this.createRawRecord
ret.identifier_=(identifier)
ret.name_=(name)
ret.typeId_=(typeId)
ret.requiredness_=(requiredness)
ret.defaultValue_=(defaultValue)
ret.__annotations_=(__annotations)
ret
}
}
class FieldCompanionProvider extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[Field] {
type CompanionT = FieldMeta
override def provide: FieldMeta = Field
}
trait Field
extends JavaField[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation, Field, RawField, FieldMeta]
with org.apache.thrift.TBase[Field, Field._Fields] {
override def meta: FieldMeta
override def compare(that: Field): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.identifierIsSet.compareTo(that.identifierIsSet)
cmp != 0
}) cmp
else if (this.identifierIsSet && {
cmp = this.identifier.compareTo(that.identifier)
cmp != 0
}) cmp
else if ({
cmp = this.nameIsSet.compareTo(that.nameIsSet)
cmp != 0
}) cmp
else if (this.nameIsSet && {
cmp = this.nameOrNull.compareTo(that.nameOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.typeIdIsSet.compareTo(that.typeIdIsSet)
cmp != 0
}) cmp
else if (this.typeIdIsSet && {
cmp = this.typeIdOrNull.compareTo(that.typeIdOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.requirednessIsSet.compareTo(that.requirednessIsSet)
cmp != 0
}) cmp
else if (this.requirednessIsSet && {
cmp = this.requirednessOrNull.compareTo(that.requirednessOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.defaultValueIsSet.compareTo(that.defaultValueIsSet)
cmp != 0
}) cmp
else if (this.defaultValueIsSet && {
cmp = this.defaultValueOrNull.compareTo(that.defaultValueOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.annotationsIsSet.compareTo(that.annotationsIsSet)
cmp != 0
}) cmp
else if (this.annotationsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.__annotations).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.__annotations).asJava
)
cmp != 0
}) cmp
else 0
}
override def <(that: Field): Boolean = { this.compare(that) < 0 }
override def >(that: Field): Boolean = { this.compare(that) > 0 }
override def <=(that: Field): Boolean = { this.compare(that) <= 0 }
override def >=(that: Field): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: Field): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): Field
def copy(
identifier: java.lang.Short = identifierOrNull,
name: String = nameOrNull,
typeId: String = typeIdOrNull,
requiredness: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness = requirednessOrNull,
defaultValue: String = defaultValueOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): Field
def mutableCopy(): MutableField = {
val ret = Field.createRawRecord
if (identifierIsSet) ret.identifier_=(identifierOrDefault)
if (nameIsSet) ret.name_=(nameOrNull)
if (typeIdIsSet) ret.typeId_=(typeIdOrNull)
if (requirednessIsSet) ret.requiredness_=(requirednessOrNull)
if (defaultValueIsSet) ret.defaultValue_=(defaultValueOrNull)
if (annotationsIsSet) ret.__annotations_=(annotationsOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableField
def toBuilder(): Field.Builder.AllSpecified = {
val ret = new Field.Builder(Field.createRawRecord)
if (identifierIsSet) ret.identifier(identifierOrDefault)
if (nameIsSet) ret.name(nameOrNull)
if (typeIdIsSet) ret.typeId(typeIdOrNull)
if (requirednessIsSet) ret.requiredness(requirednessOrNull)
if (defaultValueIsSet) ret.defaultValue(defaultValueOrNull)
if (annotationsIsSet) ret.__annotations(annotationsOrNull)
ret
}
def mergeCopy(that: Field): Field
}
trait MutableField
extends Field
with JavaFieldMutable[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
Field,
RawField,
FieldMeta
] {
def identifier_=(x: Short): Unit
def identifierUnset(): Unit
def name_=(x: String): Unit
def nameUnset(): Unit
def typeId_=(x: String): Unit
def typeIdUnset(): Unit
def requiredness_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness): Unit
def requirednessUnset(): Unit
def defaultValue_=(x: String): Unit
def defaultValueUnset(): Unit
def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit
def annotationsUnset(): Unit
def merge(that: Field): Unit
def copy(
identifier: java.lang.Short = identifierOrNull,
name: String = nameOrNull,
typeId: String = typeIdOrNull,
requiredness: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness = requirednessOrNull,
defaultValue: String = defaultValueOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableField
override def mutable: MutableField = this
}
trait FieldProxy extends Field {
protected def underlying: Field
override def meta = underlying.meta
// field/proxy_primitive.ssp
override def identifier: Short = underlying.identifier
override def identifierOption: Option[Short] = underlying.identifierOption
override def identifierOrDefault: Short = underlying.identifierOrDefault
override def identifierOrNull: java.lang.Short = underlying.identifierOrNull
override def identifierOrThrow: Short = underlying.identifierOrThrow
override def identifierIsSet: Boolean = underlying.identifierIsSet
// field/proxy_ref.ssp
override def name: String = underlying.name
override def nameOption: Option[String] = underlying.nameOption
override def nameOrNull: String = underlying.nameOrNull
override def nameOrThrow: String = underlying.nameOrThrow
override def nameIsSet: Boolean = underlying.nameIsSet
// field/proxy_ref.ssp
override def typeId: String = underlying.typeId
override def typeIdOption: Option[String] = underlying.typeIdOption
override def typeIdOrNull: String = underlying.typeIdOrNull
override def typeIdOrThrow: String = underlying.typeIdOrThrow
override def typeIdIsSet: Boolean = underlying.typeIdIsSet
// field/proxy_ref.ssp
override def requirednessOption: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness] =
underlying.requirednessOption
override def requirednessOrNull: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness =
underlying.requirednessOrNull
override def requirednessOrThrow: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness =
underlying.requirednessOrThrow
override def requirednessIsSet: Boolean = underlying.requirednessIsSet
// field/proxy_ref.ssp
override def defaultValueOption: Option[String] = underlying.defaultValueOption
override def defaultValueOrNull: String = underlying.defaultValueOrNull
override def defaultValueOrThrow: String = underlying.defaultValueOrThrow
override def defaultValueIsSet: Boolean = underlying.defaultValueIsSet
// field/proxy_container.ssp
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.__annotations
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
underlying.annotationsOption
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrDefault
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrNull
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrThrow
override def annotationsIsSet: Boolean = underlying.annotationsIsSet
override def compare(that: Field): Int = underlying.compare(that)
override def clear() { underlying.clear }
override def read(iprot: org.apache.thrift.protocol.TProtocol) { underlying.read(iprot) }
override def write(oprot: org.apache.thrift.protocol.TProtocol) { underlying.write(oprot) }
override def copy(
identifier: java.lang.Short = identifierOrNull,
name: String = nameOrNull,
typeId: String = typeIdOrNull,
requiredness: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness = requirednessOrNull,
defaultValue: String = defaultValueOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): Field = underlying.copy(
identifier = identifier,
name = name,
typeId = typeId,
requiredness = requiredness,
defaultValue = defaultValue,
__annotations = __annotations
)
override def mutableCopy(): MutableField = underlying.mutableCopy()
override def mergeCopy(that: Field): Field = underlying.mergeCopy(that)
override def mutable: MutableField = underlying.mutable
override def deepCopy(): Field = underlying.deepCopy()
override def fieldForId(id: Int): Field._Fields = underlying.fieldForId(id)
override def isSet(field: Field._Fields): Boolean = underlying.isSet(field)
override def getFieldValue(field: Field._Fields): AnyRef = underlying.getFieldValue(field)
override def setFieldValue(field: Field._Fields, value: AnyRef) { underlying.setFieldValue(field, value) }
override def hashCode(): Int = underlying.hashCode
override def equals(that: Any): Boolean = underlying.equals(that)
override def toString(): String = underlying.toString
}
trait MutableFieldProxy extends MutableField with FieldProxy {
protected def underlying: MutableField
override def identifier_=(x: Short): Unit = { underlying.identifier_=(x) }
override def identifierUnset(): Unit = { underlying.identifierUnset() }
override def name_=(x: String): Unit = { underlying.name_=(x) }
override def nameUnset(): Unit = { underlying.nameUnset() }
override def typeId_=(x: String): Unit = { underlying.typeId_=(x) }
override def typeIdUnset(): Unit = { underlying.typeIdUnset() }
override def requiredness_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness): Unit = {
underlying.requiredness_=(x)
}
override def requirednessUnset(): Unit = { underlying.requirednessUnset() }
override def defaultValue_=(x: String): Unit = { underlying.defaultValue_=(x) }
override def defaultValueUnset(): Unit = { underlying.defaultValueUnset() }
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { underlying.__annotations_=(x) }
override def annotationsUnset(): Unit = { underlying.annotationsUnset() }
override def copy(
identifier: java.lang.Short = identifierOrNull,
name: String = nameOrNull,
typeId: String = typeIdOrNull,
requiredness: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness = requirednessOrNull,
defaultValue: String = defaultValueOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableField = underlying.copy(
identifier = identifier,
name = name,
typeId = typeId,
requiredness = requiredness,
defaultValue = defaultValue,
__annotations = __annotations
)
override def merge(that: Field): Unit = underlying.merge(that)
}
final class RawField
extends JavaFieldRaw[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
Field,
RawField,
FieldMeta
]
with MutableField {
override def meta: FieldMeta = Field
// fields
// Field #1 - identifier
private var _identifier: Short = 0 // Underlying type: Short
private var _identifierIsSet: Boolean = false
override def identifier: Short = identifierOrDefault
override def identifier_=(x: Short): Unit = { _identifier = x; _identifierIsSet = true }
override def identifierOption: Option[Short] = if (identifierIsSet) Some(_identifier) else None
override def identifierOrDefault: Short = _identifier
override def identifierOrNull: java.lang.Short = if (identifierIsSet) _identifier else null
override def identifierOrThrow: Short =
if (identifierIsSet) _identifier else throw new java.lang.NullPointerException("field identifier of Field missing")
override def identifierIsSet: Boolean = _identifierIsSet
override def identifierUnset(): Unit = { _identifierIsSet = false; _identifier = 0 }
// Field #2 - name
private var _name: String = null // Underlying type: String
override def name: String = nameOrThrow
override def name_=(x: String): Unit = { _name = x }
override def nameOption: Option[String] = if (nameIsSet) Some(_name) else None
override def nameOrNull: String = _name
override def nameOrThrow: String =
if (nameIsSet) _name else throw new java.lang.NullPointerException("field name of Field missing")
override def nameIsSet: Boolean = _name != null
override def nameUnset(): Unit = { _name = null }
// Field #3 - typeId
private var _typeId: String = null // Underlying type: String
override def typeId: String = typeIdOrThrow
override def typeId_=(x: String): Unit = { _typeId = x }
override def typeIdOption: Option[String] = if (typeIdIsSet) Some(_typeId) else None
override def typeIdOrNull: String = _typeId
override def typeIdOrThrow: String =
if (typeIdIsSet) _typeId else throw new java.lang.NullPointerException("field typeId of Field missing")
override def typeIdIsSet: Boolean = _typeId != null
override def typeIdUnset(): Unit = { _typeId = null }
// Field #4 - requiredness
private var _requiredness
: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness = null // Underlying type: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness
override def requiredness_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness): Unit = {
_requiredness = x
}
override def requirednessOption: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness] =
if (requirednessIsSet) Some(_requiredness) else None
override def requirednessOrNull: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness =
_requiredness
override def requirednessOrThrow: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness =
if (requirednessIsSet) _requiredness
else throw new java.lang.NullPointerException("field requiredness of Field missing")
override def requirednessIsSet: Boolean = _requiredness != null
override def requirednessUnset(): Unit = { _requiredness = null }
// Field #5 - defaultValue
private var _defaultValue: String = null // Underlying type: String
override def defaultValue_=(x: String): Unit = { _defaultValue = x }
override def defaultValueOption: Option[String] = if (defaultValueIsSet) Some(_defaultValue) else None
override def defaultValueOrNull: String = _defaultValue
override def defaultValueOrThrow: String = if (defaultValueIsSet) _defaultValue
else throw new java.lang.NullPointerException("field defaultValue of Field missing")
override def defaultValueIsSet: Boolean = _defaultValue != null
override def defaultValueUnset(): Unit = { _defaultValue = null }
// Field #99 - annotations
private var _annotations: scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrDefault
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { _annotations = x }
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
if (annotationsIsSet) Some(_annotations) else None
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations else scala.collection.Seq.empty
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] = _annotations
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations
else throw new java.lang.NullPointerException("field __annotations of Field missing")
override def annotationsIsSet: Boolean = _annotations != null
override def annotationsUnset(): Unit = { _annotations = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(Field.FIELD_SDESC)
if (identifierIsSet) {
oprot.writeFieldBegin(Field.IDENTIFIER_FDESC)
oprot.writeI16(_identifier)
oprot.writeFieldEnd()
}
if (nameIsSet) {
oprot.writeFieldBegin(Field.NAME_FDESC)
oprot.writeString(_name)
oprot.writeFieldEnd()
}
if (typeIdIsSet) {
oprot.writeFieldBegin(Field.TYPEID_FDESC)
oprot.writeString(_typeId)
oprot.writeFieldEnd()
}
if (requirednessIsSet) {
oprot.writeFieldBegin(Field.REQUIREDNESS_FDESC)
oprot.writeI32(_requiredness.getValue())
oprot.writeFieldEnd()
}
if (defaultValueIsSet) {
oprot.writeFieldBegin(Field.DEFAULTVALUE_FDESC)
oprot.writeString(_defaultValue)
oprot.writeFieldEnd()
}
if (annotationsIsSet) {
oprot.writeFieldBegin(Field.ANNOTATIONS_FDESC)
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _annotations.size)
)
_annotations.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
Field.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // identifier
if (field_header.`type` == org.apache.thrift.protocol.TType.I16) {
_identifier = iprot.readI16()
_identifierIsSet = true
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 2 => { // name
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_name = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 3 => { // typeId
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_typeId = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 4 => { // requiredness
if (field_header.`type` == org.apache.thrift.protocol.TType.I32 || field_header.`type` == org.apache.thrift.protocol.TType.ENUM) {
_requiredness = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness
.findByIdOrUnknown(iprot.readI32())
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 5 => { // defaultValue
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_defaultValue = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 99 => { // annotations
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_annotations = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException("Error reading field %d in structure Field".format(field_header.id), e)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: Field): Unit = {
if (that.identifierIsSet && !this.identifierIsSet) {
this.identifier_=(that.identifierOrDefault)
}
if (that.nameIsSet && !this.nameIsSet) {
this.name_=(that.nameOrNull)
}
if (that.typeIdIsSet && !this.typeIdIsSet) {
this.typeId_=(that.typeIdOrNull)
}
if (that.requirednessIsSet && !this.requirednessIsSet) {
this.requiredness_=(that.requirednessOrNull)
}
if (that.defaultValueIsSet && !this.defaultValueIsSet) {
this.defaultValue_=(that.defaultValueOrNull)
}
if (that.annotationsIsSet && !this.annotationsIsSet) {
this.__annotations_=(that.annotationsOrDefault)
} else if (that.annotationsIsSet && this.annotationsIsSet) {
this.__annotations_=(this.__annotations ++ that.__annotations)
}
}
override def mergeCopy(that: Field): Field = {
val ret = Field.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: Field => this.equals(o)
case _ => false
}
def equals(that: Field): Boolean = {
that != null &&
(if (this.identifierIsSet) (that.identifierIsSet && this.identifierOrDefault == that.identifierOrDefault)
else !that.identifierIsSet) &&
(if (this.nameIsSet) (that.nameIsSet && this.nameOrNull == that.nameOrNull) else !that.nameIsSet) &&
(if (this.typeIdIsSet) (that.typeIdIsSet && this.typeIdOrNull == that.typeIdOrNull) else !that.typeIdIsSet) &&
(if (this.requirednessIsSet) (that.requirednessIsSet && this.requirednessOrNull == that.requirednessOrNull)
else !that.requirednessIsSet) &&
(if (this.defaultValueIsSet) (that.defaultValueIsSet && this.defaultValueOrNull == that.defaultValueOrNull)
else !that.defaultValueIsSet) &&
(if (this.annotationsIsSet) (that.annotationsIsSet && this.annotationsOrDefault == that.annotationsOrDefault)
else !that.annotationsIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (identifierIsSet) hasher.append(_identifier.##)
if (nameIsSet) hasher.append(_name.##)
if (typeIdIsSet) hasher.append(_typeId.##)
if (requirednessIsSet) hasher.append(_requiredness.##)
if (defaultValueIsSet) hasher.append(_defaultValue.##)
if (annotationsIsSet) hasher.append(_annotations.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (identifierIsSet) ret = identifierOrDefault :: ret
if (nameIsSet) ret = nameOrNull :: ret
if (typeIdIsSet) ret = typeIdOrNull :: ret
if (requirednessIsSet) ret = requirednessOrNull :: ret
if (defaultValueIsSet) ret = defaultValueOrNull :: ret
if (annotationsIsSet) ret = annotationsOrDefault :: ret
ret.reverse
}
override def clear() {
identifierUnset()
nameUnset()
typeIdUnset()
requirednessUnset()
defaultValueUnset()
annotationsUnset()
unknownFields = Nil
}
def fieldForId(id: Int): Field._Fields = id match {
case 1 => Field._Fields.identifier
case 2 => Field._Fields.name
case 3 => Field._Fields.typeId
case 4 => Field._Fields.requiredness
case 5 => Field._Fields.defaultValue
case 99 => Field._Fields.__annotations
case _ => null
}
def isSet(field: Field._Fields): Boolean = field match {
case Field._Fields.identifier => identifierIsSet
case Field._Fields.name => nameIsSet
case Field._Fields.typeId => typeIdIsSet
case Field._Fields.requiredness => requirednessIsSet
case Field._Fields.defaultValue => defaultValueIsSet
case Field._Fields.__annotations => annotationsIsSet
case _ => false
}
def getFieldValue(field: Field._Fields): AnyRef = field match {
case Field._Fields.identifier => identifierOrDefault.asInstanceOf[AnyRef]
case Field._Fields.name => nameOrNull.asInstanceOf[AnyRef]
case Field._Fields.typeId => typeIdOrNull.asInstanceOf[AnyRef]
case Field._Fields.requiredness => requirednessOrNull.asInstanceOf[AnyRef]
case Field._Fields.defaultValue => defaultValueOrNull.asInstanceOf[AnyRef]
case Field._Fields.__annotations => annotationsOrDefault.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: Field._Fields, value: AnyRef) {
field match {
case Field._Fields.identifier => identifier_=(value.asInstanceOf[Short])
case Field._Fields.name => name_=(value.asInstanceOf[String])
case Field._Fields.typeId => typeId_=(value.asInstanceOf[String])
case Field._Fields.requiredness =>
requiredness_=(value.asInstanceOf[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness])
case Field._Fields.defaultValue => defaultValue_=(value.asInstanceOf[String])
case Field._Fields.__annotations =>
__annotations_=(
value
.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
case _ =>
}
}
override def deepCopy(): RawField = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = Field.createRawRecord
ret.read(prot)
ret
}
override def copy(
identifier: java.lang.Short = identifierOrNull,
name: String = nameOrNull,
typeId: String = typeIdOrNull,
requiredness: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Requiredness = requirednessOrNull,
defaultValue: String = defaultValueOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): RawField = {
val ret = new RawField
if (identifier != null) ret.identifier_=(identifier)
if (name != null) ret.name_=(name)
if (typeId != null) ret.typeId_=(typeId)
if (requiredness != null) ret.requiredness_=(requiredness)
if (defaultValue != null) ret.defaultValue_=(defaultValue)
if (__annotations != null) ret.__annotations_=(__annotations)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object Struct extends StructMeta {
object Builder {
sealed trait HasName
sealed trait HasFields
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasName with HasFields
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[Struct] (private var obj: RawStruct) {
def name(v: String): Struct.Builder[State with Builder.HasName] = {
obj.name_=(v)
this.asInstanceOf[Struct.Builder[State with Builder.HasName]]
}
def __fields(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
): Struct.Builder[State with Builder.HasFields] = {
obj.__fields_=(v)
this.asInstanceOf[Struct.Builder[State with Builder.HasFields]]
}
def __annotations(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Struct.Builder[State] = {
obj.__annotations_=(v)
this
}
def __annotations(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
): Struct.Builder[State] = {
vOpt match {
case Some(v) => obj.__annotations_=(v)
case None => obj.annotationsUnset()
}
this
}
def resultMutable()(implicit ev0: State <:< Builder.HasName, ev1: State <:< Builder.HasFields): MutableStruct = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("Struct.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(implicit ev0: State <:< Builder.HasName, ev1: State <:< Builder.HasFields): Struct =
resultMutable()(ev0, ev1)
}
def newBuilder: Struct.Builder.AllUnspecified = new Builder(Struct.createRawRecord)
implicit val companionProvider: StructCompanionProvider = new StructCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[Struct] = {
new _root_.scala.math.Ordering[Struct] {
override def compare(x: Struct, y: Struct): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[Struct] = {
new _root_.java.util.Comparator[Struct] {
override def compare(x: Struct, y: Struct): Int = x.compare(y)
}
}
}
class StructMeta
extends JavaStructMeta[Struct, RawStruct, StructMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[Struct] {
override def recordName: String = "Struct"
// Thrift descriptors.
val STRUCT_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("Struct")
val NAME_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"name",
org.apache.thrift.protocol.TType.STRING,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val FIELDS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"fields",
org.apache.thrift.protocol.TType.LIST,
2, {
java.util.Collections.emptyMap[String, String]
}
)
val ANNOTATIONS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"annotations",
org.apache.thrift.protocol.TType.LIST,
99, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"name" -> NAME_FDESC,
"fields" -> FIELDS_FDESC,
"annotations" -> ANNOTATIONS_FDESC
)
object _Fields {
case object name extends _Fields(1, "name")
case object __fields extends _Fields(2, "fields")
case object __annotations extends _Fields(99, "annotations")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.name,
2.toShort -> _Fields.__fields,
99.toShort -> _Fields.__annotations
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: Struct = createRawRecord
override def createRawRecord: RawStruct = new RawStruct
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[Struct] = {
if (x.isInstanceOf[Struct]) Some(x.asInstanceOf[Struct]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations(
scala.collection.immutable.Vector(
("generate_proxy", "true")
)
)
// Spindle Descriptors.
val name =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, Struct, StructMeta](
name = "name",
longName = "name",
id = 1,
annotations = Map(),
owner = this,
getter = _.nameOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Struct], v: String) => {
r.asInstanceOf[RawStruct].name_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Struct]) => {
r.asInstanceOf[RawStruct].nameUnset()
},
manifest = manifest[String]
)
val __fields =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field
], Struct, StructMeta](
name = "fields",
longName = "fields",
id = 2,
annotations = Map(),
owner = this,
getter = _.fieldsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Struct],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
) => { r.asInstanceOf[RawStruct].__fields_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Struct]) => {
r.asInstanceOf[RawStruct].fieldsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]]
)
val __annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
], Struct, StructMeta](
name = "annotations",
longName = "annotations",
id = 99,
annotations = Map(),
owner = this,
getter = _.annotationsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Struct],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
) => { r.asInstanceOf[RawStruct].__annotations_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Struct]) => {
r.asInstanceOf[RawStruct].annotationsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Struct, StructMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Struct, StructMeta]](
name,
__fields,
__annotations
)
def apply(
name: String,
__fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field],
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Struct = {
val ret = this.createRawRecord
ret.name_=(name)
ret.__fields_=(__fields)
ret.__annotations_=(__annotations)
ret
}
}
class StructCompanionProvider
extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[Struct] {
type CompanionT = StructMeta
override def provide: StructMeta = Struct
}
trait Struct
extends JavaStruct[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field,
Struct,
RawStruct,
StructMeta
]
with org.apache.thrift.TBase[Struct, Struct._Fields] {
override def meta: StructMeta
override def compare(that: Struct): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.nameIsSet.compareTo(that.nameIsSet)
cmp != 0
}) cmp
else if (this.nameIsSet && {
cmp = this.nameOrNull.compareTo(that.nameOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.fieldsIsSet.compareTo(that.fieldsIsSet)
cmp != 0
}) cmp
else if (this.fieldsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.__fields).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.__fields).asJava
)
cmp != 0
}) cmp
else if ({
cmp = this.annotationsIsSet.compareTo(that.annotationsIsSet)
cmp != 0
}) cmp
else if (this.annotationsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.__annotations).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.__annotations).asJava
)
cmp != 0
}) cmp
else 0
}
override def <(that: Struct): Boolean = { this.compare(that) < 0 }
override def >(that: Struct): Boolean = { this.compare(that) > 0 }
override def <=(that: Struct): Boolean = { this.compare(that) <= 0 }
override def >=(that: Struct): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: Struct): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): Struct
def copy(
name: String = nameOrNull,
__fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = fieldsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): Struct
def mutableCopy(): MutableStruct = {
val ret = Struct.createRawRecord
if (nameIsSet) ret.name_=(nameOrNull)
if (fieldsIsSet) ret.__fields_=(fieldsOrNull)
if (annotationsIsSet) ret.__annotations_=(annotationsOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableStruct
def toBuilder(): Struct.Builder.AllSpecified = {
val ret = new Struct.Builder(Struct.createRawRecord)
if (nameIsSet) ret.name(nameOrNull)
if (fieldsIsSet) ret.__fields(fieldsOrNull)
if (annotationsIsSet) ret.__annotations(annotationsOrNull)
ret
}
def mergeCopy(that: Struct): Struct
}
trait MutableStruct
extends Struct
with JavaStructMutable[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field,
Struct,
RawStruct,
StructMeta
] {
def name_=(x: String): Unit
def nameUnset(): Unit
def __fields_=(x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]): Unit
def fieldsUnset(): Unit
def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit
def annotationsUnset(): Unit
def merge(that: Struct): Unit
def copy(
name: String = nameOrNull,
__fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = fieldsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableStruct
override def mutable: MutableStruct = this
}
trait StructProxy extends Struct {
protected def underlying: Struct
override def meta = underlying.meta
// field/proxy_ref.ssp
override def name: String = underlying.name
override def nameOption: Option[String] = underlying.nameOption
override def nameOrNull: String = underlying.nameOrNull
override def nameOrThrow: String = underlying.nameOrThrow
override def nameIsSet: Boolean = underlying.nameIsSet
// field/proxy_container.ssp
override def __fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.__fields
override def fieldsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]] =
underlying.fieldsOption
override def fieldsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.fieldsOrDefault
override def fieldsOrNull: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.fieldsOrNull
override def fieldsOrThrow: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.fieldsOrThrow
override def fieldsIsSet: Boolean = underlying.fieldsIsSet
// field/proxy_container.ssp
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.__annotations
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
underlying.annotationsOption
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrDefault
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrNull
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrThrow
override def annotationsIsSet: Boolean = underlying.annotationsIsSet
override def compare(that: Struct): Int = underlying.compare(that)
override def clear() { underlying.clear }
override def read(iprot: org.apache.thrift.protocol.TProtocol) { underlying.read(iprot) }
override def write(oprot: org.apache.thrift.protocol.TProtocol) { underlying.write(oprot) }
override def copy(
name: String = nameOrNull,
__fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = fieldsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): Struct = underlying.copy(
name = name,
__fields = __fields,
__annotations = __annotations
)
override def mutableCopy(): MutableStruct = underlying.mutableCopy()
override def mergeCopy(that: Struct): Struct = underlying.mergeCopy(that)
override def mutable: MutableStruct = underlying.mutable
override def deepCopy(): Struct = underlying.deepCopy()
override def fieldForId(id: Int): Struct._Fields = underlying.fieldForId(id)
override def isSet(field: Struct._Fields): Boolean = underlying.isSet(field)
override def getFieldValue(field: Struct._Fields): AnyRef = underlying.getFieldValue(field)
override def setFieldValue(field: Struct._Fields, value: AnyRef) { underlying.setFieldValue(field, value) }
override def hashCode(): Int = underlying.hashCode
override def equals(that: Any): Boolean = underlying.equals(that)
override def toString(): String = underlying.toString
}
trait MutableStructProxy extends MutableStruct with StructProxy {
protected def underlying: MutableStruct
override def name_=(x: String): Unit = { underlying.name_=(x) }
override def nameUnset(): Unit = { underlying.nameUnset() }
override def __fields_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
): Unit = { underlying.__fields_=(x) }
override def fieldsUnset(): Unit = { underlying.fieldsUnset() }
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { underlying.__annotations_=(x) }
override def annotationsUnset(): Unit = { underlying.annotationsUnset() }
override def copy(
name: String = nameOrNull,
__fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = fieldsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableStruct = underlying.copy(
name = name,
__fields = __fields,
__annotations = __annotations
)
override def merge(that: Struct): Unit = underlying.merge(that)
}
final class RawStruct
extends JavaStructRaw[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field,
Struct,
RawStruct,
StructMeta
]
with MutableStruct {
override def meta: StructMeta = Struct
// fields
// Field #1 - name
private var _name: String = null // Underlying type: String
override def name: String = nameOrThrow
override def name_=(x: String): Unit = { _name = x }
override def nameOption: Option[String] = if (nameIsSet) Some(_name) else None
override def nameOrNull: String = _name
override def nameOrThrow: String =
if (nameIsSet) _name else throw new java.lang.NullPointerException("field name of Struct missing")
override def nameIsSet: Boolean = _name != null
override def nameUnset(): Unit = { _name = null }
// Field #2 - fields
private var _fields
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
override def __fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
fieldsOrDefault
override def __fields_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
): Unit = { _fields = x }
override def fieldsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]] =
if (fieldsIsSet) Some(_fields) else None
override def fieldsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
if (fieldsIsSet) _fields else scala.collection.Seq.empty
override def fieldsOrNull: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
_fields
override def fieldsOrThrow: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
if (fieldsIsSet) _fields else throw new java.lang.NullPointerException("field __fields of Struct missing")
override def fieldsIsSet: Boolean = _fields != null
override def fieldsUnset(): Unit = { _fields = null }
// Field #99 - annotations
private var _annotations: scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrDefault
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { _annotations = x }
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
if (annotationsIsSet) Some(_annotations) else None
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations else scala.collection.Seq.empty
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] = _annotations
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations
else throw new java.lang.NullPointerException("field __annotations of Struct missing")
override def annotationsIsSet: Boolean = _annotations != null
override def annotationsUnset(): Unit = { _annotations = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(Struct.STRUCT_SDESC)
if (nameIsSet) {
oprot.writeFieldBegin(Struct.NAME_FDESC)
oprot.writeString(_name)
oprot.writeFieldEnd()
}
if (fieldsIsSet) {
oprot.writeFieldBegin(Struct.FIELDS_FDESC)
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _fields.size))
_fields.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (annotationsIsSet) {
oprot.writeFieldBegin(Struct.ANNOTATIONS_FDESC)
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _annotations.size)
)
_annotations.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
Struct.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // name
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_name = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 2 => { // fields
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_fields = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 99 => { // annotations
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_annotations = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException(
"Error reading field %d in structure Struct".format(field_header.id),
e
)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: Struct): Unit = {
if (that.nameIsSet && !this.nameIsSet) {
this.name_=(that.nameOrNull)
}
if (that.fieldsIsSet && !this.fieldsIsSet) {
this.__fields_=(that.fieldsOrNull)
} else if (that.fieldsIsSet && this.fieldsIsSet) {
this.__fields_=(this.__fields ++ that.__fields)
}
if (that.annotationsIsSet && !this.annotationsIsSet) {
this.__annotations_=(that.annotationsOrDefault)
} else if (that.annotationsIsSet && this.annotationsIsSet) {
this.__annotations_=(this.__annotations ++ that.__annotations)
}
}
override def mergeCopy(that: Struct): Struct = {
val ret = Struct.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: Struct => this.equals(o)
case _ => false
}
def equals(that: Struct): Boolean = {
that != null &&
(if (this.nameIsSet) (that.nameIsSet && this.nameOrNull == that.nameOrNull) else !that.nameIsSet) &&
(if (this.fieldsIsSet) (that.fieldsIsSet && this.fieldsOrNull == that.fieldsOrNull) else !that.fieldsIsSet) &&
(if (this.annotationsIsSet) (that.annotationsIsSet && this.annotationsOrDefault == that.annotationsOrDefault)
else !that.annotationsIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (nameIsSet) hasher.append(_name.##)
if (fieldsIsSet) hasher.append(_fields.##)
if (annotationsIsSet) hasher.append(_annotations.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (nameIsSet) ret = nameOrNull :: ret
if (fieldsIsSet) ret = fieldsOrNull :: ret
if (annotationsIsSet) ret = annotationsOrDefault :: ret
ret.reverse
}
override def clear() {
nameUnset()
fieldsUnset()
annotationsUnset()
unknownFields = Nil
}
def fieldForId(id: Int): Struct._Fields = id match {
case 1 => Struct._Fields.name
case 2 => Struct._Fields.__fields
case 99 => Struct._Fields.__annotations
case _ => null
}
def isSet(field: Struct._Fields): Boolean = field match {
case Struct._Fields.name => nameIsSet
case Struct._Fields.__fields => fieldsIsSet
case Struct._Fields.__annotations => annotationsIsSet
case _ => false
}
def getFieldValue(field: Struct._Fields): AnyRef = field match {
case Struct._Fields.name => nameOrNull.asInstanceOf[AnyRef]
case Struct._Fields.__fields => fieldsOrNull.asInstanceOf[AnyRef]
case Struct._Fields.__annotations => annotationsOrDefault.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: Struct._Fields, value: AnyRef) {
field match {
case Struct._Fields.name => name_=(value.asInstanceOf[String])
case Struct._Fields.__fields =>
__fields_=(
value.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]]
)
case Struct._Fields.__annotations =>
__annotations_=(
value
.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
case _ =>
}
}
override def deepCopy(): RawStruct = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = Struct.createRawRecord
ret.read(prot)
ret
}
override def copy(
name: String = nameOrNull,
__fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = fieldsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): RawStruct = {
val ret = new RawStruct
if (name != null) ret.name_=(name)
if (__fields != null) ret.__fields_=(__fields)
if (__annotations != null) ret.__annotations_=(__annotations)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object Union extends UnionMeta {
object Builder {
sealed trait HasName
sealed trait HasFields
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasName with HasFields
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[Union] (private var obj: RawUnion) {
def name(v: String): Union.Builder[State with Builder.HasName] = {
obj.name_=(v)
this.asInstanceOf[Union.Builder[State with Builder.HasName]]
}
def __fields(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
): Union.Builder[State with Builder.HasFields] = {
obj.__fields_=(v)
this.asInstanceOf[Union.Builder[State with Builder.HasFields]]
}
def __annotations(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Union.Builder[State] = {
obj.__annotations_=(v)
this
}
def __annotations(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
): Union.Builder[State] = {
vOpt match {
case Some(v) => obj.__annotations_=(v)
case None => obj.annotationsUnset()
}
this
}
def resultMutable()(implicit ev0: State <:< Builder.HasName, ev1: State <:< Builder.HasFields): MutableUnion = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("Union.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(implicit ev0: State <:< Builder.HasName, ev1: State <:< Builder.HasFields): Union =
resultMutable()(ev0, ev1)
}
def newBuilder: Union.Builder.AllUnspecified = new Builder(Union.createRawRecord)
implicit val companionProvider: UnionCompanionProvider = new UnionCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[Union] = {
new _root_.scala.math.Ordering[Union] {
override def compare(x: Union, y: Union): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[Union] = {
new _root_.java.util.Comparator[Union] {
override def compare(x: Union, y: Union): Int = x.compare(y)
}
}
}
class UnionMeta
extends JavaUnionMeta[Union, RawUnion, UnionMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[Union] {
override def recordName: String = "Union"
// Thrift descriptors.
val UNION_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("Union")
val NAME_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"name",
org.apache.thrift.protocol.TType.STRING,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val FIELDS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"fields",
org.apache.thrift.protocol.TType.LIST,
2, {
java.util.Collections.emptyMap[String, String]
}
)
val ANNOTATIONS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"annotations",
org.apache.thrift.protocol.TType.LIST,
99, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"name" -> NAME_FDESC,
"fields" -> FIELDS_FDESC,
"annotations" -> ANNOTATIONS_FDESC
)
object _Fields {
case object name extends _Fields(1, "name")
case object __fields extends _Fields(2, "fields")
case object __annotations extends _Fields(99, "annotations")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.name,
2.toShort -> _Fields.__fields,
99.toShort -> _Fields.__annotations
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: Union = createRawRecord
override def createRawRecord: RawUnion = new RawUnion
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[Union] = {
if (x.isInstanceOf[Union]) Some(x.asInstanceOf[Union]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations(
scala.collection.immutable.Vector(
("generate_proxy", "true")
)
)
// Spindle Descriptors.
val name =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, Union, UnionMeta](
name = "name",
longName = "name",
id = 1,
annotations = Map(),
owner = this,
getter = _.nameOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Union], v: String) => {
r.asInstanceOf[RawUnion].name_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Union]) => {
r.asInstanceOf[RawUnion].nameUnset()
},
manifest = manifest[String]
)
val __fields =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field
], Union, UnionMeta](
name = "fields",
longName = "fields",
id = 2,
annotations = Map(),
owner = this,
getter = _.fieldsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Union],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
) => { r.asInstanceOf[RawUnion].__fields_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Union]) => {
r.asInstanceOf[RawUnion].fieldsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]]
)
val __annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
], Union, UnionMeta](
name = "annotations",
longName = "annotations",
id = 99,
annotations = Map(),
owner = this,
getter = _.annotationsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Union],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
) => { r.asInstanceOf[RawUnion].__annotations_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Union]) => {
r.asInstanceOf[RawUnion].annotationsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Union, UnionMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Union, UnionMeta]](
name,
__fields,
__annotations
)
def apply(
name: String,
__fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field],
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Union = {
val ret = this.createRawRecord
ret.name_=(name)
ret.__fields_=(__fields)
ret.__annotations_=(__annotations)
ret
}
}
class UnionCompanionProvider extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[Union] {
type CompanionT = UnionMeta
override def provide: UnionMeta = Union
}
trait Union
extends JavaUnion[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field,
Union,
RawUnion,
UnionMeta
]
with org.apache.thrift.TBase[Union, Union._Fields] {
override def meta: UnionMeta
override def compare(that: Union): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.nameIsSet.compareTo(that.nameIsSet)
cmp != 0
}) cmp
else if (this.nameIsSet && {
cmp = this.nameOrNull.compareTo(that.nameOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.fieldsIsSet.compareTo(that.fieldsIsSet)
cmp != 0
}) cmp
else if (this.fieldsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.__fields).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.__fields).asJava
)
cmp != 0
}) cmp
else if ({
cmp = this.annotationsIsSet.compareTo(that.annotationsIsSet)
cmp != 0
}) cmp
else if (this.annotationsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.__annotations).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.__annotations).asJava
)
cmp != 0
}) cmp
else 0
}
override def <(that: Union): Boolean = { this.compare(that) < 0 }
override def >(that: Union): Boolean = { this.compare(that) > 0 }
override def <=(that: Union): Boolean = { this.compare(that) <= 0 }
override def >=(that: Union): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: Union): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): Union
def copy(
name: String = nameOrNull,
__fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = fieldsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): Union
def mutableCopy(): MutableUnion = {
val ret = Union.createRawRecord
if (nameIsSet) ret.name_=(nameOrNull)
if (fieldsIsSet) ret.__fields_=(fieldsOrNull)
if (annotationsIsSet) ret.__annotations_=(annotationsOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableUnion
def toBuilder(): Union.Builder.AllSpecified = {
val ret = new Union.Builder(Union.createRawRecord)
if (nameIsSet) ret.name(nameOrNull)
if (fieldsIsSet) ret.__fields(fieldsOrNull)
if (annotationsIsSet) ret.__annotations(annotationsOrNull)
ret
}
def mergeCopy(that: Union): Union
}
trait MutableUnion
extends Union
with JavaUnionMutable[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field,
Union,
RawUnion,
UnionMeta
] {
def name_=(x: String): Unit
def nameUnset(): Unit
def __fields_=(x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]): Unit
def fieldsUnset(): Unit
def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit
def annotationsUnset(): Unit
def merge(that: Union): Unit
def copy(
name: String = nameOrNull,
__fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = fieldsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableUnion
override def mutable: MutableUnion = this
}
trait UnionProxy extends Union {
protected def underlying: Union
override def meta = underlying.meta
// field/proxy_ref.ssp
override def name: String = underlying.name
override def nameOption: Option[String] = underlying.nameOption
override def nameOrNull: String = underlying.nameOrNull
override def nameOrThrow: String = underlying.nameOrThrow
override def nameIsSet: Boolean = underlying.nameIsSet
// field/proxy_container.ssp
override def __fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.__fields
override def fieldsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]] =
underlying.fieldsOption
override def fieldsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.fieldsOrDefault
override def fieldsOrNull: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.fieldsOrNull
override def fieldsOrThrow: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.fieldsOrThrow
override def fieldsIsSet: Boolean = underlying.fieldsIsSet
// field/proxy_container.ssp
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.__annotations
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
underlying.annotationsOption
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrDefault
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrNull
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrThrow
override def annotationsIsSet: Boolean = underlying.annotationsIsSet
override def compare(that: Union): Int = underlying.compare(that)
override def clear() { underlying.clear }
override def read(iprot: org.apache.thrift.protocol.TProtocol) { underlying.read(iprot) }
override def write(oprot: org.apache.thrift.protocol.TProtocol) { underlying.write(oprot) }
override def copy(
name: String = nameOrNull,
__fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = fieldsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): Union = underlying.copy(
name = name,
__fields = __fields,
__annotations = __annotations
)
override def mutableCopy(): MutableUnion = underlying.mutableCopy()
override def mergeCopy(that: Union): Union = underlying.mergeCopy(that)
override def mutable: MutableUnion = underlying.mutable
override def deepCopy(): Union = underlying.deepCopy()
override def fieldForId(id: Int): Union._Fields = underlying.fieldForId(id)
override def isSet(field: Union._Fields): Boolean = underlying.isSet(field)
override def getFieldValue(field: Union._Fields): AnyRef = underlying.getFieldValue(field)
override def setFieldValue(field: Union._Fields, value: AnyRef) { underlying.setFieldValue(field, value) }
override def hashCode(): Int = underlying.hashCode
override def equals(that: Any): Boolean = underlying.equals(that)
override def toString(): String = underlying.toString
}
trait MutableUnionProxy extends MutableUnion with UnionProxy {
protected def underlying: MutableUnion
override def name_=(x: String): Unit = { underlying.name_=(x) }
override def nameUnset(): Unit = { underlying.nameUnset() }
override def __fields_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
): Unit = { underlying.__fields_=(x) }
override def fieldsUnset(): Unit = { underlying.fieldsUnset() }
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { underlying.__annotations_=(x) }
override def annotationsUnset(): Unit = { underlying.annotationsUnset() }
override def copy(
name: String = nameOrNull,
__fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = fieldsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableUnion = underlying.copy(
name = name,
__fields = __fields,
__annotations = __annotations
)
override def merge(that: Union): Unit = underlying.merge(that)
}
final class RawUnion
extends JavaUnionRaw[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field,
Union,
RawUnion,
UnionMeta
]
with MutableUnion {
override def meta: UnionMeta = Union
// fields
// Field #1 - name
private var _name: String = null // Underlying type: String
override def name: String = nameOrThrow
override def name_=(x: String): Unit = { _name = x }
override def nameOption: Option[String] = if (nameIsSet) Some(_name) else None
override def nameOrNull: String = _name
override def nameOrThrow: String =
if (nameIsSet) _name else throw new java.lang.NullPointerException("field name of Union missing")
override def nameIsSet: Boolean = _name != null
override def nameUnset(): Unit = { _name = null }
// Field #2 - fields
private var _fields
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
override def __fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
fieldsOrDefault
override def __fields_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
): Unit = { _fields = x }
override def fieldsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]] =
if (fieldsIsSet) Some(_fields) else None
override def fieldsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
if (fieldsIsSet) _fields else scala.collection.Seq.empty
override def fieldsOrNull: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
_fields
override def fieldsOrThrow: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
if (fieldsIsSet) _fields else throw new java.lang.NullPointerException("field __fields of Union missing")
override def fieldsIsSet: Boolean = _fields != null
override def fieldsUnset(): Unit = { _fields = null }
// Field #99 - annotations
private var _annotations: scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrDefault
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { _annotations = x }
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
if (annotationsIsSet) Some(_annotations) else None
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations else scala.collection.Seq.empty
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] = _annotations
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations
else throw new java.lang.NullPointerException("field __annotations of Union missing")
override def annotationsIsSet: Boolean = _annotations != null
override def annotationsUnset(): Unit = { _annotations = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(Union.UNION_SDESC)
if (nameIsSet) {
oprot.writeFieldBegin(Union.NAME_FDESC)
oprot.writeString(_name)
oprot.writeFieldEnd()
}
if (fieldsIsSet) {
oprot.writeFieldBegin(Union.FIELDS_FDESC)
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _fields.size))
_fields.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (annotationsIsSet) {
oprot.writeFieldBegin(Union.ANNOTATIONS_FDESC)
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _annotations.size)
)
_annotations.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
Union.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // name
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_name = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 2 => { // fields
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_fields = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 99 => { // annotations
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_annotations = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException("Error reading field %d in structure Union".format(field_header.id), e)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: Union): Unit = {
if (that.nameIsSet && !this.nameIsSet) {
this.name_=(that.nameOrNull)
}
if (that.fieldsIsSet && !this.fieldsIsSet) {
this.__fields_=(that.fieldsOrNull)
} else if (that.fieldsIsSet && this.fieldsIsSet) {
this.__fields_=(this.__fields ++ that.__fields)
}
if (that.annotationsIsSet && !this.annotationsIsSet) {
this.__annotations_=(that.annotationsOrDefault)
} else if (that.annotationsIsSet && this.annotationsIsSet) {
this.__annotations_=(this.__annotations ++ that.__annotations)
}
}
override def mergeCopy(that: Union): Union = {
val ret = Union.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: Union => this.equals(o)
case _ => false
}
def equals(that: Union): Boolean = {
that != null &&
(if (this.nameIsSet) (that.nameIsSet && this.nameOrNull == that.nameOrNull) else !that.nameIsSet) &&
(if (this.fieldsIsSet) (that.fieldsIsSet && this.fieldsOrNull == that.fieldsOrNull) else !that.fieldsIsSet) &&
(if (this.annotationsIsSet) (that.annotationsIsSet && this.annotationsOrDefault == that.annotationsOrDefault)
else !that.annotationsIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (nameIsSet) hasher.append(_name.##)
if (fieldsIsSet) hasher.append(_fields.##)
if (annotationsIsSet) hasher.append(_annotations.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (nameIsSet) ret = nameOrNull :: ret
if (fieldsIsSet) ret = fieldsOrNull :: ret
if (annotationsIsSet) ret = annotationsOrDefault :: ret
ret.reverse
}
override def clear() {
nameUnset()
fieldsUnset()
annotationsUnset()
unknownFields = Nil
}
def fieldForId(id: Int): Union._Fields = id match {
case 1 => Union._Fields.name
case 2 => Union._Fields.__fields
case 99 => Union._Fields.__annotations
case _ => null
}
def isSet(field: Union._Fields): Boolean = field match {
case Union._Fields.name => nameIsSet
case Union._Fields.__fields => fieldsIsSet
case Union._Fields.__annotations => annotationsIsSet
case _ => false
}
def getFieldValue(field: Union._Fields): AnyRef = field match {
case Union._Fields.name => nameOrNull.asInstanceOf[AnyRef]
case Union._Fields.__fields => fieldsOrNull.asInstanceOf[AnyRef]
case Union._Fields.__annotations => annotationsOrDefault.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: Union._Fields, value: AnyRef) {
field match {
case Union._Fields.name => name_=(value.asInstanceOf[String])
case Union._Fields.__fields =>
__fields_=(
value.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]]
)
case Union._Fields.__annotations =>
__annotations_=(
value
.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
case _ =>
}
}
override def deepCopy(): RawUnion = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = Union.createRawRecord
ret.read(prot)
ret
}
override def copy(
name: String = nameOrNull,
__fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = fieldsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): RawUnion = {
val ret = new RawUnion
if (name != null) ret.name_=(name)
if (__fields != null) ret.__fields_=(__fields)
if (__annotations != null) ret.__annotations_=(__annotations)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object Exception extends ExceptionMeta {
object Builder {
sealed trait HasName
sealed trait HasFields
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasName with HasFields
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[Exception] (private var obj: RawException) {
def name(v: String): Exception.Builder[State with Builder.HasName] = {
obj.name_=(v)
this.asInstanceOf[Exception.Builder[State with Builder.HasName]]
}
def __fields(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
): Exception.Builder[State with Builder.HasFields] = {
obj.__fields_=(v)
this.asInstanceOf[Exception.Builder[State with Builder.HasFields]]
}
def __annotations(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Exception.Builder[State] = {
obj.__annotations_=(v)
this
}
def __annotations(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
): Exception.Builder[State] = {
vOpt match {
case Some(v) => obj.__annotations_=(v)
case None => obj.annotationsUnset()
}
this
}
def resultMutable()(implicit ev0: State <:< Builder.HasName, ev1: State <:< Builder.HasFields): MutableException = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("Exception.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(implicit ev0: State <:< Builder.HasName, ev1: State <:< Builder.HasFields): Exception =
resultMutable()(ev0, ev1)
}
def newBuilder: Exception.Builder.AllUnspecified = new Builder(Exception.createRawRecord)
implicit val companionProvider: ExceptionCompanionProvider = new ExceptionCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[Exception] = {
new _root_.scala.math.Ordering[Exception] {
override def compare(x: Exception, y: Exception): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[Exception] = {
new _root_.java.util.Comparator[Exception] {
override def compare(x: Exception, y: Exception): Int = x.compare(y)
}
}
}
class ExceptionMeta
extends JavaExceptionMeta[Exception, RawException, ExceptionMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[Exception] {
override def recordName: String = "Exception"
// Thrift descriptors.
val EXCEPTION_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("Exception")
val NAME_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"name",
org.apache.thrift.protocol.TType.STRING,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val FIELDS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"fields",
org.apache.thrift.protocol.TType.LIST,
2, {
java.util.Collections.emptyMap[String, String]
}
)
val ANNOTATIONS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"annotations",
org.apache.thrift.protocol.TType.LIST,
99, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"name" -> NAME_FDESC,
"fields" -> FIELDS_FDESC,
"annotations" -> ANNOTATIONS_FDESC
)
object _Fields {
case object name extends _Fields(1, "name")
case object __fields extends _Fields(2, "fields")
case object __annotations extends _Fields(99, "annotations")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.name,
2.toShort -> _Fields.__fields,
99.toShort -> _Fields.__annotations
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: Exception = createRawRecord
override def createRawRecord: RawException = new RawException
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[Exception] = {
if (x.isInstanceOf[Exception]) Some(x.asInstanceOf[Exception]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations(
scala.collection.immutable.Vector(
("generate_proxy", "true")
)
)
// Spindle Descriptors.
val name =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[
String,
Exception,
ExceptionMeta
](
name = "name",
longName = "name",
id = 1,
annotations = Map(),
owner = this,
getter = _.nameOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Exception], v: String) => {
r.asInstanceOf[RawException].name_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Exception]) => {
r.asInstanceOf[RawException].nameUnset()
},
manifest = manifest[String]
)
val __fields =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field
], Exception, ExceptionMeta](
name = "fields",
longName = "fields",
id = 2,
annotations = Map(),
owner = this,
getter = _.fieldsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Exception],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
) => { r.asInstanceOf[RawException].__fields_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Exception]) => {
r.asInstanceOf[RawException].fieldsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]]
)
val __annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
], Exception, ExceptionMeta](
name = "annotations",
longName = "annotations",
id = 99,
annotations = Map(),
owner = this,
getter = _.annotationsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Exception],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
) => { r.asInstanceOf[RawException].__annotations_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Exception]) => {
r.asInstanceOf[RawException].annotationsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Exception, ExceptionMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Exception, ExceptionMeta]](
name,
__fields,
__annotations
)
def apply(
name: String,
__fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field],
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Exception = {
val ret = this.createRawRecord
ret.name_=(name)
ret.__fields_=(__fields)
ret.__annotations_=(__annotations)
ret
}
}
class ExceptionCompanionProvider
extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[Exception] {
type CompanionT = ExceptionMeta
override def provide: ExceptionMeta = Exception
}
trait Exception
extends JavaException[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field,
Exception,
RawException,
ExceptionMeta
]
with org.apache.thrift.TBase[Exception, Exception._Fields] {
override def meta: ExceptionMeta
override def compare(that: Exception): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.nameIsSet.compareTo(that.nameIsSet)
cmp != 0
}) cmp
else if (this.nameIsSet && {
cmp = this.nameOrNull.compareTo(that.nameOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.fieldsIsSet.compareTo(that.fieldsIsSet)
cmp != 0
}) cmp
else if (this.fieldsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.__fields).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.__fields).asJava
)
cmp != 0
}) cmp
else if ({
cmp = this.annotationsIsSet.compareTo(that.annotationsIsSet)
cmp != 0
}) cmp
else if (this.annotationsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.__annotations).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.__annotations).asJava
)
cmp != 0
}) cmp
else 0
}
override def <(that: Exception): Boolean = { this.compare(that) < 0 }
override def >(that: Exception): Boolean = { this.compare(that) > 0 }
override def <=(that: Exception): Boolean = { this.compare(that) <= 0 }
override def >=(that: Exception): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: Exception): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): Exception
def copy(
name: String = nameOrNull,
__fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = fieldsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): Exception
def mutableCopy(): MutableException = {
val ret = Exception.createRawRecord
if (nameIsSet) ret.name_=(nameOrNull)
if (fieldsIsSet) ret.__fields_=(fieldsOrNull)
if (annotationsIsSet) ret.__annotations_=(annotationsOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableException
def toBuilder(): Exception.Builder.AllSpecified = {
val ret = new Exception.Builder(Exception.createRawRecord)
if (nameIsSet) ret.name(nameOrNull)
if (fieldsIsSet) ret.__fields(fieldsOrNull)
if (annotationsIsSet) ret.__annotations(annotationsOrNull)
ret
}
def mergeCopy(that: Exception): Exception
}
trait MutableException
extends Exception
with JavaExceptionMutable[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field,
Exception,
RawException,
ExceptionMeta
] {
def name_=(x: String): Unit
def nameUnset(): Unit
def __fields_=(x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]): Unit
def fieldsUnset(): Unit
def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit
def annotationsUnset(): Unit
def merge(that: Exception): Unit
def copy(
name: String = nameOrNull,
__fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = fieldsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableException
override def mutable: MutableException = this
}
trait ExceptionProxy extends Exception {
protected def underlying: Exception
override def meta = underlying.meta
// field/proxy_ref.ssp
override def name: String = underlying.name
override def nameOption: Option[String] = underlying.nameOption
override def nameOrNull: String = underlying.nameOrNull
override def nameOrThrow: String = underlying.nameOrThrow
override def nameIsSet: Boolean = underlying.nameIsSet
// field/proxy_container.ssp
override def __fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.__fields
override def fieldsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]] =
underlying.fieldsOption
override def fieldsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.fieldsOrDefault
override def fieldsOrNull: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.fieldsOrNull
override def fieldsOrThrow: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.fieldsOrThrow
override def fieldsIsSet: Boolean = underlying.fieldsIsSet
// field/proxy_container.ssp
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.__annotations
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
underlying.annotationsOption
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrDefault
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrNull
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrThrow
override def annotationsIsSet: Boolean = underlying.annotationsIsSet
override def compare(that: Exception): Int = underlying.compare(that)
override def clear() { underlying.clear }
override def read(iprot: org.apache.thrift.protocol.TProtocol) { underlying.read(iprot) }
override def write(oprot: org.apache.thrift.protocol.TProtocol) { underlying.write(oprot) }
override def copy(
name: String = nameOrNull,
__fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = fieldsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): Exception = underlying.copy(
name = name,
__fields = __fields,
__annotations = __annotations
)
override def mutableCopy(): MutableException = underlying.mutableCopy()
override def mergeCopy(that: Exception): Exception = underlying.mergeCopy(that)
override def mutable: MutableException = underlying.mutable
override def deepCopy(): Exception = underlying.deepCopy()
override def fieldForId(id: Int): Exception._Fields = underlying.fieldForId(id)
override def isSet(field: Exception._Fields): Boolean = underlying.isSet(field)
override def getFieldValue(field: Exception._Fields): AnyRef = underlying.getFieldValue(field)
override def setFieldValue(field: Exception._Fields, value: AnyRef) { underlying.setFieldValue(field, value) }
override def hashCode(): Int = underlying.hashCode
override def equals(that: Any): Boolean = underlying.equals(that)
override def toString(): String = underlying.toString
}
trait MutableExceptionProxy extends MutableException with ExceptionProxy {
protected def underlying: MutableException
override def name_=(x: String): Unit = { underlying.name_=(x) }
override def nameUnset(): Unit = { underlying.nameUnset() }
override def __fields_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
): Unit = { underlying.__fields_=(x) }
override def fieldsUnset(): Unit = { underlying.fieldsUnset() }
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { underlying.__annotations_=(x) }
override def annotationsUnset(): Unit = { underlying.annotationsUnset() }
override def copy(
name: String = nameOrNull,
__fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = fieldsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableException = underlying.copy(
name = name,
__fields = __fields,
__annotations = __annotations
)
override def merge(that: Exception): Unit = underlying.merge(that)
}
final class RawException
extends JavaExceptionRaw[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field,
Exception,
RawException,
ExceptionMeta
]
with MutableException {
override def meta: ExceptionMeta = Exception
// fields
// Field #1 - name
private var _name: String = null // Underlying type: String
override def name: String = nameOrThrow
override def name_=(x: String): Unit = { _name = x }
override def nameOption: Option[String] = if (nameIsSet) Some(_name) else None
override def nameOrNull: String = _name
override def nameOrThrow: String =
if (nameIsSet) _name else throw new java.lang.NullPointerException("field name of Exception missing")
override def nameIsSet: Boolean = _name != null
override def nameUnset(): Unit = { _name = null }
// Field #2 - fields
private var _fields
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
override def __fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
fieldsOrDefault
override def __fields_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
): Unit = { _fields = x }
override def fieldsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]] =
if (fieldsIsSet) Some(_fields) else None
override def fieldsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
if (fieldsIsSet) _fields else scala.collection.Seq.empty
override def fieldsOrNull: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
_fields
override def fieldsOrThrow: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
if (fieldsIsSet) _fields else throw new java.lang.NullPointerException("field __fields of Exception missing")
override def fieldsIsSet: Boolean = _fields != null
override def fieldsUnset(): Unit = { _fields = null }
// Field #99 - annotations
private var _annotations: scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrDefault
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { _annotations = x }
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
if (annotationsIsSet) Some(_annotations) else None
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations else scala.collection.Seq.empty
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] = _annotations
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations
else throw new java.lang.NullPointerException("field __annotations of Exception missing")
override def annotationsIsSet: Boolean = _annotations != null
override def annotationsUnset(): Unit = { _annotations = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(Exception.EXCEPTION_SDESC)
if (nameIsSet) {
oprot.writeFieldBegin(Exception.NAME_FDESC)
oprot.writeString(_name)
oprot.writeFieldEnd()
}
if (fieldsIsSet) {
oprot.writeFieldBegin(Exception.FIELDS_FDESC)
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _fields.size))
_fields.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (annotationsIsSet) {
oprot.writeFieldBegin(Exception.ANNOTATIONS_FDESC)
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _annotations.size)
)
_annotations.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
Exception.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // name
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_name = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 2 => { // fields
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_fields = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 99 => { // annotations
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_annotations = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException(
"Error reading field %d in structure Exception".format(field_header.id),
e
)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: Exception): Unit = {
if (that.nameIsSet && !this.nameIsSet) {
this.name_=(that.nameOrNull)
}
if (that.fieldsIsSet && !this.fieldsIsSet) {
this.__fields_=(that.fieldsOrNull)
} else if (that.fieldsIsSet && this.fieldsIsSet) {
this.__fields_=(this.__fields ++ that.__fields)
}
if (that.annotationsIsSet && !this.annotationsIsSet) {
this.__annotations_=(that.annotationsOrDefault)
} else if (that.annotationsIsSet && this.annotationsIsSet) {
this.__annotations_=(this.__annotations ++ that.__annotations)
}
}
override def mergeCopy(that: Exception): Exception = {
val ret = Exception.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: Exception => this.equals(o)
case _ => false
}
def equals(that: Exception): Boolean = {
that != null &&
(if (this.nameIsSet) (that.nameIsSet && this.nameOrNull == that.nameOrNull) else !that.nameIsSet) &&
(if (this.fieldsIsSet) (that.fieldsIsSet && this.fieldsOrNull == that.fieldsOrNull) else !that.fieldsIsSet) &&
(if (this.annotationsIsSet) (that.annotationsIsSet && this.annotationsOrDefault == that.annotationsOrDefault)
else !that.annotationsIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (nameIsSet) hasher.append(_name.##)
if (fieldsIsSet) hasher.append(_fields.##)
if (annotationsIsSet) hasher.append(_annotations.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (nameIsSet) ret = nameOrNull :: ret
if (fieldsIsSet) ret = fieldsOrNull :: ret
if (annotationsIsSet) ret = annotationsOrDefault :: ret
ret.reverse
}
override def clear() {
nameUnset()
fieldsUnset()
annotationsUnset()
unknownFields = Nil
}
def fieldForId(id: Int): Exception._Fields = id match {
case 1 => Exception._Fields.name
case 2 => Exception._Fields.__fields
case 99 => Exception._Fields.__annotations
case _ => null
}
def isSet(field: Exception._Fields): Boolean = field match {
case Exception._Fields.name => nameIsSet
case Exception._Fields.__fields => fieldsIsSet
case Exception._Fields.__annotations => annotationsIsSet
case _ => false
}
def getFieldValue(field: Exception._Fields): AnyRef = field match {
case Exception._Fields.name => nameOrNull.asInstanceOf[AnyRef]
case Exception._Fields.__fields => fieldsOrNull.asInstanceOf[AnyRef]
case Exception._Fields.__annotations => annotationsOrDefault.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: Exception._Fields, value: AnyRef) {
field match {
case Exception._Fields.name => name_=(value.asInstanceOf[String])
case Exception._Fields.__fields =>
__fields_=(
value.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]]
)
case Exception._Fields.__annotations =>
__annotations_=(
value
.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
case _ =>
}
}
override def deepCopy(): RawException = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = Exception.createRawRecord
ret.read(prot)
ret
}
override def copy(
name: String = nameOrNull,
__fields: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = fieldsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): RawException = {
val ret = new RawException
if (name != null) ret.name_=(name)
if (__fields != null) ret.__fields_=(__fields)
if (__annotations != null) ret.__annotations_=(__annotations)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object Function extends FunctionMeta {
object Builder {
sealed trait HasName
sealed trait HasArgz
sealed trait HasThrowz
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasName with HasArgz with HasThrowz
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[Function] (private var obj: RawFunction) {
def name(v: String): Function.Builder[State with Builder.HasName] = {
obj.name_=(v)
this.asInstanceOf[Function.Builder[State with Builder.HasName]]
}
def argz(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
): Function.Builder[State with Builder.HasArgz] = {
obj.argz_=(v)
this.asInstanceOf[Function.Builder[State with Builder.HasArgz]]
}
def throwz(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
): Function.Builder[State with Builder.HasThrowz] = {
obj.throwz_=(v)
this.asInstanceOf[Function.Builder[State with Builder.HasThrowz]]
}
def returnTypeId(v: String): Function.Builder[State] = {
obj.returnTypeId_=(v)
this
}
def returnTypeId(vOpt: Option[String]): Function.Builder[State] = {
vOpt match {
case Some(v) => obj.returnTypeId_=(v)
case None => obj.returnTypeIdUnset()
}
this
}
def oneWay(v: Boolean): Function.Builder[State] = {
obj.oneWay_=(v)
this
}
def oneWay(vOpt: Option[Boolean]): Function.Builder[State] = {
vOpt match {
case Some(v) => obj.oneWay_=(v)
case None => obj.oneWayUnset()
}
this
}
def __annotations(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Function.Builder[State] = {
obj.__annotations_=(v)
this
}
def __annotations(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
): Function.Builder[State] = {
vOpt match {
case Some(v) => obj.__annotations_=(v)
case None => obj.annotationsUnset()
}
this
}
def resultMutable()(
implicit ev0: State <:< Builder.HasName,
ev1: State <:< Builder.HasArgz,
ev2: State <:< Builder.HasThrowz
): MutableFunction = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("Function.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(
implicit ev0: State <:< Builder.HasName,
ev1: State <:< Builder.HasArgz,
ev2: State <:< Builder.HasThrowz
): Function = resultMutable()(ev0, ev1, ev2)
}
def newBuilder: Function.Builder.AllUnspecified = new Builder(Function.createRawRecord)
implicit val companionProvider: FunctionCompanionProvider = new FunctionCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[Function] = {
new _root_.scala.math.Ordering[Function] {
override def compare(x: Function, y: Function): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[Function] = {
new _root_.java.util.Comparator[Function] {
override def compare(x: Function, y: Function): Int = x.compare(y)
}
}
}
class FunctionMeta
extends JavaFunctionMeta[Function, RawFunction, FunctionMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[Function] {
override def recordName: String = "Function"
// Thrift descriptors.
val FUNCTION_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("Function")
val NAME_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"name",
org.apache.thrift.protocol.TType.STRING,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val RETURNTYPEID_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"returnTypeId",
org.apache.thrift.protocol.TType.STRING,
2, {
java.util.Collections.emptyMap[String, String]
}
)
val ONEWAY_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"oneWay",
org.apache.thrift.protocol.TType.BOOL,
3, {
java.util.Collections.emptyMap[String, String]
}
)
val ARGZ_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"argz",
org.apache.thrift.protocol.TType.LIST,
4, {
java.util.Collections.emptyMap[String, String]
}
)
val THROWZ_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"throwz",
org.apache.thrift.protocol.TType.LIST,
5, {
java.util.Collections.emptyMap[String, String]
}
)
val ANNOTATIONS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"annotations",
org.apache.thrift.protocol.TType.LIST,
99, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"name" -> NAME_FDESC,
"returnTypeId" -> RETURNTYPEID_FDESC,
"oneWay" -> ONEWAY_FDESC,
"argz" -> ARGZ_FDESC,
"throwz" -> THROWZ_FDESC,
"annotations" -> ANNOTATIONS_FDESC
)
object _Fields {
case object name extends _Fields(1, "name")
case object returnTypeId extends _Fields(2, "returnTypeId")
case object oneWay extends _Fields(3, "oneWay")
case object argz extends _Fields(4, "argz")
case object throwz extends _Fields(5, "throwz")
case object __annotations extends _Fields(99, "annotations")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.name,
2.toShort -> _Fields.returnTypeId,
3.toShort -> _Fields.oneWay,
4.toShort -> _Fields.argz,
5.toShort -> _Fields.throwz,
99.toShort -> _Fields.__annotations
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: Function = createRawRecord
override def createRawRecord: RawFunction = new RawFunction
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[Function] = {
if (x.isInstanceOf[Function]) Some(x.asInstanceOf[Function]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations(
scala.collection.immutable.Vector(
("generate_proxy", "true")
)
)
// Spindle Descriptors.
val name =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, Function, FunctionMeta](
name = "name",
longName = "name",
id = 1,
annotations = Map(),
owner = this,
getter = _.nameOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Function], v: String) => {
r.asInstanceOf[RawFunction].name_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Function]) => {
r.asInstanceOf[RawFunction].nameUnset()
},
manifest = manifest[String]
)
val returnTypeId =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, Function, FunctionMeta](
name = "returnTypeId",
longName = "returnTypeId",
id = 2,
annotations = Map(),
owner = this,
getter = _.returnTypeIdOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Function], v: String) => {
r.asInstanceOf[RawFunction].returnTypeId_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Function]) => {
r.asInstanceOf[RawFunction].returnTypeIdUnset()
},
manifest = manifest[String]
)
val oneWay =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[
Boolean,
Function,
FunctionMeta
](
name = "oneWay",
longName = "oneWay",
id = 3,
annotations = Map(),
owner = this,
getter = _.oneWayOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Function], v: Boolean) => {
r.asInstanceOf[RawFunction].oneWay_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Function]) => {
r.asInstanceOf[RawFunction].oneWayUnset()
},
manifest = manifest[Boolean]
)
val argz =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field
], Function, FunctionMeta](
name = "argz",
longName = "argz",
id = 4,
annotations = Map(),
owner = this,
getter = _.argzOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Function],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
) => { r.asInstanceOf[RawFunction].argz_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Function]) => {
r.asInstanceOf[RawFunction].argzUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]]
)
val throwz =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field
], Function, FunctionMeta](
name = "throwz",
longName = "throwz",
id = 5,
annotations = Map(),
owner = this,
getter = _.throwzOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Function],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
) => { r.asInstanceOf[RawFunction].throwz_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Function]) => {
r.asInstanceOf[RawFunction].throwzUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]]
)
val __annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
], Function, FunctionMeta](
name = "annotations",
longName = "annotations",
id = 99,
annotations = Map(),
owner = this,
getter = _.annotationsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Function],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
) => { r.asInstanceOf[RawFunction].__annotations_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Function]) => {
r.asInstanceOf[RawFunction].annotationsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Function, FunctionMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Function, FunctionMeta]](
name,
returnTypeId,
oneWay,
argz,
throwz,
__annotations
)
def apply(
name: String,
returnTypeId: String,
oneWay: Boolean,
argz: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field],
throwz: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field],
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Function = {
val ret = this.createRawRecord
ret.name_=(name)
ret.returnTypeId_=(returnTypeId)
ret.oneWay_=(oneWay)
ret.argz_=(argz)
ret.throwz_=(throwz)
ret.__annotations_=(__annotations)
ret
}
}
class FunctionCompanionProvider
extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[Function] {
type CompanionT = FunctionMeta
override def provide: FunctionMeta = Function
}
trait Function
extends JavaFunction[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field,
Function,
RawFunction,
FunctionMeta
]
with org.apache.thrift.TBase[Function, Function._Fields] {
override def meta: FunctionMeta
override def compare(that: Function): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.nameIsSet.compareTo(that.nameIsSet)
cmp != 0
}) cmp
else if (this.nameIsSet && {
cmp = this.nameOrNull.compareTo(that.nameOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.returnTypeIdIsSet.compareTo(that.returnTypeIdIsSet)
cmp != 0
}) cmp
else if (this.returnTypeIdIsSet && {
cmp = this.returnTypeIdOrNull.compareTo(that.returnTypeIdOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.oneWayIsSet.compareTo(that.oneWayIsSet)
cmp != 0
}) cmp
else if (this.oneWayIsSet && {
cmp = this.oneWay.compareTo(that.oneWay)
cmp != 0
}) cmp
else if ({
cmp = this.argzIsSet.compareTo(that.argzIsSet)
cmp != 0
}) cmp
else if (this.argzIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.argz).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.argz).asJava
)
cmp != 0
}) cmp
else if ({
cmp = this.throwzIsSet.compareTo(that.throwzIsSet)
cmp != 0
}) cmp
else if (this.throwzIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.throwz).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.throwz).asJava
)
cmp != 0
}) cmp
else if ({
cmp = this.annotationsIsSet.compareTo(that.annotationsIsSet)
cmp != 0
}) cmp
else if (this.annotationsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.__annotations).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.__annotations).asJava
)
cmp != 0
}) cmp
else 0
}
override def <(that: Function): Boolean = { this.compare(that) < 0 }
override def >(that: Function): Boolean = { this.compare(that) > 0 }
override def <=(that: Function): Boolean = { this.compare(that) <= 0 }
override def >=(that: Function): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: Function): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): Function
def copy(
name: String = nameOrNull,
returnTypeId: String = returnTypeIdOrNull,
oneWay: java.lang.Boolean = oneWayOrNull,
argz: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = argzOrNull,
throwz: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = throwzOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): Function
def mutableCopy(): MutableFunction = {
val ret = Function.createRawRecord
if (nameIsSet) ret.name_=(nameOrNull)
if (returnTypeIdIsSet) ret.returnTypeId_=(returnTypeIdOrNull)
if (oneWayIsSet) ret.oneWay_=(oneWayOrDefault)
if (argzIsSet) ret.argz_=(argzOrNull)
if (throwzIsSet) ret.throwz_=(throwzOrNull)
if (annotationsIsSet) ret.__annotations_=(annotationsOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableFunction
def toBuilder(): Function.Builder.AllSpecified = {
val ret = new Function.Builder(Function.createRawRecord)
if (nameIsSet) ret.name(nameOrNull)
if (returnTypeIdIsSet) ret.returnTypeId(returnTypeIdOrNull)
if (oneWayIsSet) ret.oneWay(oneWayOrDefault)
if (argzIsSet) ret.argz(argzOrNull)
if (throwzIsSet) ret.throwz(throwzOrNull)
if (annotationsIsSet) ret.__annotations(annotationsOrNull)
ret
}
def mergeCopy(that: Function): Function
}
trait MutableFunction
extends Function
with JavaFunctionMutable[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field,
Function,
RawFunction,
FunctionMeta
] {
def name_=(x: String): Unit
def nameUnset(): Unit
def returnTypeId_=(x: String): Unit
def returnTypeIdUnset(): Unit
def oneWay_=(x: Boolean): Unit
def oneWayUnset(): Unit
def argz_=(x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]): Unit
def argzUnset(): Unit
def throwz_=(x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]): Unit
def throwzUnset(): Unit
def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit
def annotationsUnset(): Unit
def merge(that: Function): Unit
def copy(
name: String = nameOrNull,
returnTypeId: String = returnTypeIdOrNull,
oneWay: java.lang.Boolean = oneWayOrNull,
argz: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = argzOrNull,
throwz: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = throwzOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableFunction
override def mutable: MutableFunction = this
}
trait FunctionProxy extends Function {
protected def underlying: Function
override def meta = underlying.meta
// field/proxy_ref.ssp
override def name: String = underlying.name
override def nameOption: Option[String] = underlying.nameOption
override def nameOrNull: String = underlying.nameOrNull
override def nameOrThrow: String = underlying.nameOrThrow
override def nameIsSet: Boolean = underlying.nameIsSet
// field/proxy_ref.ssp
override def returnTypeIdOption: Option[String] = underlying.returnTypeIdOption
override def returnTypeIdOrNull: String = underlying.returnTypeIdOrNull
override def returnTypeIdOrThrow: String = underlying.returnTypeIdOrThrow
override def returnTypeIdIsSet: Boolean = underlying.returnTypeIdIsSet
// field/proxy_primitive.ssp
override def oneWay: Boolean = underlying.oneWay
override def oneWayOption: Option[Boolean] = underlying.oneWayOption
override def oneWayOrDefault: Boolean = underlying.oneWayOrDefault
override def oneWayOrNull: java.lang.Boolean = underlying.oneWayOrNull
override def oneWayOrThrow: Boolean = underlying.oneWayOrThrow
override def oneWayIsSet: Boolean = underlying.oneWayIsSet
// field/proxy_container.ssp
override def argz: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.argz
override def argzOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]] =
underlying.argzOption
override def argzOrDefault: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.argzOrDefault
override def argzOrNull: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.argzOrNull
override def argzOrThrow: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.argzOrThrow
override def argzIsSet: Boolean = underlying.argzIsSet
// field/proxy_container.ssp
override def throwz: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.throwz
override def throwzOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]] =
underlying.throwzOption
override def throwzOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.throwzOrDefault
override def throwzOrNull: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.throwzOrNull
override def throwzOrThrow: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
underlying.throwzOrThrow
override def throwzIsSet: Boolean = underlying.throwzIsSet
// field/proxy_container.ssp
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.__annotations
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
underlying.annotationsOption
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrDefault
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrNull
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrThrow
override def annotationsIsSet: Boolean = underlying.annotationsIsSet
override def compare(that: Function): Int = underlying.compare(that)
override def clear() { underlying.clear }
override def read(iprot: org.apache.thrift.protocol.TProtocol) { underlying.read(iprot) }
override def write(oprot: org.apache.thrift.protocol.TProtocol) { underlying.write(oprot) }
override def copy(
name: String = nameOrNull,
returnTypeId: String = returnTypeIdOrNull,
oneWay: java.lang.Boolean = oneWayOrNull,
argz: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = argzOrNull,
throwz: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = throwzOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): Function = underlying.copy(
name = name,
returnTypeId = returnTypeId,
oneWay = oneWay,
argz = argz,
throwz = throwz,
__annotations = __annotations
)
override def mutableCopy(): MutableFunction = underlying.mutableCopy()
override def mergeCopy(that: Function): Function = underlying.mergeCopy(that)
override def mutable: MutableFunction = underlying.mutable
override def deepCopy(): Function = underlying.deepCopy()
override def fieldForId(id: Int): Function._Fields = underlying.fieldForId(id)
override def isSet(field: Function._Fields): Boolean = underlying.isSet(field)
override def getFieldValue(field: Function._Fields): AnyRef = underlying.getFieldValue(field)
override def setFieldValue(field: Function._Fields, value: AnyRef) { underlying.setFieldValue(field, value) }
override def hashCode(): Int = underlying.hashCode
override def equals(that: Any): Boolean = underlying.equals(that)
override def toString(): String = underlying.toString
}
trait MutableFunctionProxy extends MutableFunction with FunctionProxy {
protected def underlying: MutableFunction
override def name_=(x: String): Unit = { underlying.name_=(x) }
override def nameUnset(): Unit = { underlying.nameUnset() }
override def returnTypeId_=(x: String): Unit = { underlying.returnTypeId_=(x) }
override def returnTypeIdUnset(): Unit = { underlying.returnTypeIdUnset() }
override def oneWay_=(x: Boolean): Unit = { underlying.oneWay_=(x) }
override def oneWayUnset(): Unit = { underlying.oneWayUnset() }
override def argz_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
): Unit = { underlying.argz_=(x) }
override def argzUnset(): Unit = { underlying.argzUnset() }
override def throwz_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
): Unit = { underlying.throwz_=(x) }
override def throwzUnset(): Unit = { underlying.throwzUnset() }
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { underlying.__annotations_=(x) }
override def annotationsUnset(): Unit = { underlying.annotationsUnset() }
override def copy(
name: String = nameOrNull,
returnTypeId: String = returnTypeIdOrNull,
oneWay: java.lang.Boolean = oneWayOrNull,
argz: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = argzOrNull,
throwz: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = throwzOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableFunction = underlying.copy(
name = name,
returnTypeId = returnTypeId,
oneWay = oneWay,
argz = argz,
throwz = throwz,
__annotations = __annotations
)
override def merge(that: Function): Unit = underlying.merge(that)
}
final class RawFunction
extends JavaFunctionRaw[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field,
Function,
RawFunction,
FunctionMeta
]
with MutableFunction {
override def meta: FunctionMeta = Function
// fields
// Field #1 - name
private var _name: String = null // Underlying type: String
override def name: String = nameOrThrow
override def name_=(x: String): Unit = { _name = x }
override def nameOption: Option[String] = if (nameIsSet) Some(_name) else None
override def nameOrNull: String = _name
override def nameOrThrow: String =
if (nameIsSet) _name else throw new java.lang.NullPointerException("field name of Function missing")
override def nameIsSet: Boolean = _name != null
override def nameUnset(): Unit = { _name = null }
// Field #2 - returnTypeId
private var _returnTypeId: String = null // Underlying type: String
override def returnTypeId_=(x: String): Unit = { _returnTypeId = x }
override def returnTypeIdOption: Option[String] = if (returnTypeIdIsSet) Some(_returnTypeId) else None
override def returnTypeIdOrNull: String = _returnTypeId
override def returnTypeIdOrThrow: String = if (returnTypeIdIsSet) _returnTypeId
else throw new java.lang.NullPointerException("field returnTypeId of Function missing")
override def returnTypeIdIsSet: Boolean = _returnTypeId != null
override def returnTypeIdUnset(): Unit = { _returnTypeId = null }
// Field #3 - oneWay
private var _oneWay: Boolean = false // Underlying type: Boolean
private var _oneWayIsSet: Boolean = false
override def oneWay: Boolean = oneWayOrDefault
override def oneWay_=(x: Boolean): Unit = { _oneWay = x; _oneWayIsSet = true }
override def oneWayOption: Option[Boolean] = if (oneWayIsSet) Some(_oneWay) else None
override def oneWayOrDefault: Boolean = _oneWay
override def oneWayOrNull: java.lang.Boolean = if (oneWayIsSet) _oneWay else null
override def oneWayOrThrow: Boolean =
if (oneWayIsSet) _oneWay else throw new java.lang.NullPointerException("field oneWay of Function missing")
override def oneWayIsSet: Boolean = _oneWayIsSet
override def oneWayUnset(): Unit = { _oneWayIsSet = false; _oneWay = false }
// Field #4 - argz
private var _argz
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
override def argz: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
argzOrDefault
override def argz_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
): Unit = { _argz = x }
override def argzOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]] =
if (argzIsSet) Some(_argz) else None
override def argzOrDefault: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
if (argzIsSet) _argz else scala.collection.Seq.empty
override def argzOrNull: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
_argz
override def argzOrThrow: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
if (argzIsSet) _argz else throw new java.lang.NullPointerException("field argz of Function missing")
override def argzIsSet: Boolean = _argz != null
override def argzUnset(): Unit = { _argz = null }
// Field #5 - throwz
private var _throwz
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
override def throwz: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
throwzOrDefault
override def throwz_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
): Unit = { _throwz = x }
override def throwzOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]] =
if (throwzIsSet) Some(_throwz) else None
override def throwzOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
if (throwzIsSet) _throwz else scala.collection.Seq.empty
override def throwzOrNull: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
_throwz
override def throwzOrThrow: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] =
if (throwzIsSet) _throwz else throw new java.lang.NullPointerException("field throwz of Function missing")
override def throwzIsSet: Boolean = _throwz != null
override def throwzUnset(): Unit = { _throwz = null }
// Field #99 - annotations
private var _annotations: scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrDefault
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { _annotations = x }
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
if (annotationsIsSet) Some(_annotations) else None
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations else scala.collection.Seq.empty
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] = _annotations
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations
else throw new java.lang.NullPointerException("field __annotations of Function missing")
override def annotationsIsSet: Boolean = _annotations != null
override def annotationsUnset(): Unit = { _annotations = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(Function.FUNCTION_SDESC)
if (nameIsSet) {
oprot.writeFieldBegin(Function.NAME_FDESC)
oprot.writeString(_name)
oprot.writeFieldEnd()
}
if (returnTypeIdIsSet) {
oprot.writeFieldBegin(Function.RETURNTYPEID_FDESC)
oprot.writeString(_returnTypeId)
oprot.writeFieldEnd()
}
if (oneWayIsSet) {
oprot.writeFieldBegin(Function.ONEWAY_FDESC)
oprot.writeBool(_oneWay)
oprot.writeFieldEnd()
}
if (argzIsSet) {
oprot.writeFieldBegin(Function.ARGZ_FDESC)
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _argz.size))
_argz.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (throwzIsSet) {
oprot.writeFieldBegin(Function.THROWZ_FDESC)
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _throwz.size))
_throwz.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (annotationsIsSet) {
oprot.writeFieldBegin(Function.ANNOTATIONS_FDESC)
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _annotations.size)
)
_annotations.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
Function.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // name
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_name = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 2 => { // returnTypeId
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_returnTypeId = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 3 => { // oneWay
if (field_header.`type` == org.apache.thrift.protocol.TType.BOOL) {
_oneWay = iprot.readBool()
_oneWayIsSet = true
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 4 => { // argz
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_argz = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 5 => { // throwz
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_throwz = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 99 => { // annotations
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_annotations = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException(
"Error reading field %d in structure Function".format(field_header.id),
e
)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: Function): Unit = {
if (that.nameIsSet && !this.nameIsSet) {
this.name_=(that.nameOrNull)
}
if (that.returnTypeIdIsSet && !this.returnTypeIdIsSet) {
this.returnTypeId_=(that.returnTypeIdOrNull)
}
if (that.oneWayIsSet && !this.oneWayIsSet) {
this.oneWay_=(that.oneWayOrDefault)
}
if (that.argzIsSet && !this.argzIsSet) {
this.argz_=(that.argzOrNull)
} else if (that.argzIsSet && this.argzIsSet) {
this.argz_=(this.argz ++ that.argz)
}
if (that.throwzIsSet && !this.throwzIsSet) {
this.throwz_=(that.throwzOrNull)
} else if (that.throwzIsSet && this.throwzIsSet) {
this.throwz_=(this.throwz ++ that.throwz)
}
if (that.annotationsIsSet && !this.annotationsIsSet) {
this.__annotations_=(that.annotationsOrDefault)
} else if (that.annotationsIsSet && this.annotationsIsSet) {
this.__annotations_=(this.__annotations ++ that.__annotations)
}
}
override def mergeCopy(that: Function): Function = {
val ret = Function.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: Function => this.equals(o)
case _ => false
}
def equals(that: Function): Boolean = {
that != null &&
(if (this.nameIsSet) (that.nameIsSet && this.nameOrNull == that.nameOrNull) else !that.nameIsSet) &&
(if (this.returnTypeIdIsSet) (that.returnTypeIdIsSet && this.returnTypeIdOrNull == that.returnTypeIdOrNull)
else !that.returnTypeIdIsSet) &&
(if (this.oneWayIsSet) (that.oneWayIsSet && this.oneWayOrDefault == that.oneWayOrDefault) else !that.oneWayIsSet) &&
(if (this.argzIsSet) (that.argzIsSet && this.argzOrNull == that.argzOrNull) else !that.argzIsSet) &&
(if (this.throwzIsSet) (that.throwzIsSet && this.throwzOrNull == that.throwzOrNull) else !that.throwzIsSet) &&
(if (this.annotationsIsSet) (that.annotationsIsSet && this.annotationsOrDefault == that.annotationsOrDefault)
else !that.annotationsIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (nameIsSet) hasher.append(_name.##)
if (returnTypeIdIsSet) hasher.append(_returnTypeId.##)
if (oneWayIsSet) hasher.append(_oneWay.##)
if (argzIsSet) hasher.append(_argz.##)
if (throwzIsSet) hasher.append(_throwz.##)
if (annotationsIsSet) hasher.append(_annotations.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (nameIsSet) ret = nameOrNull :: ret
if (returnTypeIdIsSet) ret = returnTypeIdOrNull :: ret
if (oneWayIsSet) ret = oneWayOrDefault :: ret
if (argzIsSet) ret = argzOrNull :: ret
if (throwzIsSet) ret = throwzOrNull :: ret
if (annotationsIsSet) ret = annotationsOrDefault :: ret
ret.reverse
}
override def clear() {
nameUnset()
returnTypeIdUnset()
oneWayUnset()
argzUnset()
throwzUnset()
annotationsUnset()
unknownFields = Nil
}
def fieldForId(id: Int): Function._Fields = id match {
case 1 => Function._Fields.name
case 2 => Function._Fields.returnTypeId
case 3 => Function._Fields.oneWay
case 4 => Function._Fields.argz
case 5 => Function._Fields.throwz
case 99 => Function._Fields.__annotations
case _ => null
}
def isSet(field: Function._Fields): Boolean = field match {
case Function._Fields.name => nameIsSet
case Function._Fields.returnTypeId => returnTypeIdIsSet
case Function._Fields.oneWay => oneWayIsSet
case Function._Fields.argz => argzIsSet
case Function._Fields.throwz => throwzIsSet
case Function._Fields.__annotations => annotationsIsSet
case _ => false
}
def getFieldValue(field: Function._Fields): AnyRef = field match {
case Function._Fields.name => nameOrNull.asInstanceOf[AnyRef]
case Function._Fields.returnTypeId => returnTypeIdOrNull.asInstanceOf[AnyRef]
case Function._Fields.oneWay => oneWayOrDefault.asInstanceOf[AnyRef]
case Function._Fields.argz => argzOrNull.asInstanceOf[AnyRef]
case Function._Fields.throwz => throwzOrNull.asInstanceOf[AnyRef]
case Function._Fields.__annotations => annotationsOrDefault.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: Function._Fields, value: AnyRef) {
field match {
case Function._Fields.name => name_=(value.asInstanceOf[String])
case Function._Fields.returnTypeId => returnTypeId_=(value.asInstanceOf[String])
case Function._Fields.oneWay => oneWay_=(value.asInstanceOf[Boolean])
case Function._Fields.argz =>
argz_=(
value.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]]
)
case Function._Fields.throwz =>
throwz_=(
value.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field]]
)
case Function._Fields.__annotations =>
__annotations_=(
value
.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
case _ =>
}
}
override def deepCopy(): RawFunction = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = Function.createRawRecord
ret.read(prot)
ret
}
override def copy(
name: String = nameOrNull,
returnTypeId: String = returnTypeIdOrNull,
oneWay: java.lang.Boolean = oneWayOrNull,
argz: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = argzOrNull,
throwz: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Field] = throwzOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): RawFunction = {
val ret = new RawFunction
if (name != null) ret.name_=(name)
if (returnTypeId != null) ret.returnTypeId_=(returnTypeId)
if (oneWay != null) ret.oneWay_=(oneWay)
if (argz != null) ret.argz_=(argz)
if (throwz != null) ret.throwz_=(throwz)
if (__annotations != null) ret.__annotations_=(__annotations)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object Service extends ServiceMeta {
object Builder {
sealed trait HasName
sealed trait HasFunctions
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasName with HasFunctions
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[Service] (private var obj: RawService) {
def name(v: String): Service.Builder[State with Builder.HasName] = {
obj.name_=(v)
this.asInstanceOf[Service.Builder[State with Builder.HasName]]
}
def functions(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function]
): Service.Builder[State with Builder.HasFunctions] = {
obj.functions_=(v)
this.asInstanceOf[Service.Builder[State with Builder.HasFunctions]]
}
def extendz(v: String): Service.Builder[State] = {
obj.extendz_=(v)
this
}
def extendz(vOpt: Option[String]): Service.Builder[State] = {
vOpt match {
case Some(v) => obj.extendz_=(v)
case None => obj.extendzUnset()
}
this
}
def __annotations(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Service.Builder[State] = {
obj.__annotations_=(v)
this
}
def __annotations(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
): Service.Builder[State] = {
vOpt match {
case Some(v) => obj.__annotations_=(v)
case None => obj.annotationsUnset()
}
this
}
def resultMutable()(
implicit ev0: State <:< Builder.HasName,
ev1: State <:< Builder.HasFunctions
): MutableService = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("Service.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(implicit ev0: State <:< Builder.HasName, ev1: State <:< Builder.HasFunctions): Service =
resultMutable()(ev0, ev1)
}
def newBuilder: Service.Builder.AllUnspecified = new Builder(Service.createRawRecord)
implicit val companionProvider: ServiceCompanionProvider = new ServiceCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[Service] = {
new _root_.scala.math.Ordering[Service] {
override def compare(x: Service, y: Service): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[Service] = {
new _root_.java.util.Comparator[Service] {
override def compare(x: Service, y: Service): Int = x.compare(y)
}
}
}
class ServiceMeta
extends JavaServiceMeta[Service, RawService, ServiceMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[Service] {
override def recordName: String = "Service"
// Thrift descriptors.
val SERVICE_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("Service")
val NAME_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"name",
org.apache.thrift.protocol.TType.STRING,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val EXTENDZ_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"extendz",
org.apache.thrift.protocol.TType.STRING,
2, {
java.util.Collections.emptyMap[String, String]
}
)
val FUNCTIONS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"functions",
org.apache.thrift.protocol.TType.LIST,
3, {
java.util.Collections.emptyMap[String, String]
}
)
val ANNOTATIONS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"annotations",
org.apache.thrift.protocol.TType.LIST,
99, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"name" -> NAME_FDESC,
"extendz" -> EXTENDZ_FDESC,
"functions" -> FUNCTIONS_FDESC,
"annotations" -> ANNOTATIONS_FDESC
)
object _Fields {
case object name extends _Fields(1, "name")
case object extendz extends _Fields(2, "extendz")
case object functions extends _Fields(3, "functions")
case object __annotations extends _Fields(99, "annotations")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.name,
2.toShort -> _Fields.extendz,
3.toShort -> _Fields.functions,
99.toShort -> _Fields.__annotations
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: Service = createRawRecord
override def createRawRecord: RawService = new RawService
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[Service] = {
if (x.isInstanceOf[Service]) Some(x.asInstanceOf[Service]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations(
scala.collection.immutable.Vector(
("generate_proxy", "true")
)
)
// Spindle Descriptors.
val name =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, Service, ServiceMeta](
name = "name",
longName = "name",
id = 1,
annotations = Map(),
owner = this,
getter = _.nameOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Service], v: String) => {
r.asInstanceOf[RawService].name_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Service]) => {
r.asInstanceOf[RawService].nameUnset()
},
manifest = manifest[String]
)
val extendz =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[String, Service, ServiceMeta](
name = "extendz",
longName = "extendz",
id = 2,
annotations = Map(),
owner = this,
getter = _.extendzOption,
setterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Service], v: String) => {
r.asInstanceOf[RawService].extendz_=(v)
},
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Service]) => {
r.asInstanceOf[RawService].extendzUnset()
},
manifest = manifest[String]
)
val functions =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function
], Service, ServiceMeta](
name = "functions",
longName = "functions",
id = 3,
annotations = Map(),
owner = this,
getter = _.functionsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Service],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function]
) => { r.asInstanceOf[RawService].functions_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Service]) => {
r.asInstanceOf[RawService].functionsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function]]
)
val __annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
], Service, ServiceMeta](
name = "annotations",
longName = "annotations",
id = 99,
annotations = Map(),
owner = this,
getter = _.annotationsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Service],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
) => { r.asInstanceOf[RawService].__annotations_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Service]) => {
r.asInstanceOf[RawService].annotationsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Service, ServiceMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Service, ServiceMeta]](
name,
extendz,
functions,
__annotations
)
def apply(
name: String,
extendz: String,
functions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function],
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Service = {
val ret = this.createRawRecord
ret.name_=(name)
ret.extendz_=(extendz)
ret.functions_=(functions)
ret.__annotations_=(__annotations)
ret
}
}
class ServiceCompanionProvider
extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[Service] {
type CompanionT = ServiceMeta
override def provide: ServiceMeta = Service
}
trait Service
extends JavaService[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function,
Service,
RawService,
ServiceMeta
]
with org.apache.thrift.TBase[Service, Service._Fields] {
override def meta: ServiceMeta
override def compare(that: Service): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.nameIsSet.compareTo(that.nameIsSet)
cmp != 0
}) cmp
else if (this.nameIsSet && {
cmp = this.nameOrNull.compareTo(that.nameOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.extendzIsSet.compareTo(that.extendzIsSet)
cmp != 0
}) cmp
else if (this.extendzIsSet && {
cmp = this.extendzOrNull.compareTo(that.extendzOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.functionsIsSet.compareTo(that.functionsIsSet)
cmp != 0
}) cmp
else if (this.functionsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.functions).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.functions).asJava
)
cmp != 0
}) cmp
else if ({
cmp = this.annotationsIsSet.compareTo(that.annotationsIsSet)
cmp != 0
}) cmp
else if (this.annotationsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.__annotations).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.__annotations).asJava
)
cmp != 0
}) cmp
else 0
}
override def <(that: Service): Boolean = { this.compare(that) < 0 }
override def >(that: Service): Boolean = { this.compare(that) > 0 }
override def <=(that: Service): Boolean = { this.compare(that) <= 0 }
override def >=(that: Service): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: Service): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): Service
def copy(
name: String = nameOrNull,
extendz: String = extendzOrNull,
functions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function] =
functionsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): Service
def mutableCopy(): MutableService = {
val ret = Service.createRawRecord
if (nameIsSet) ret.name_=(nameOrNull)
if (extendzIsSet) ret.extendz_=(extendzOrNull)
if (functionsIsSet) ret.functions_=(functionsOrNull)
if (annotationsIsSet) ret.__annotations_=(annotationsOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableService
def toBuilder(): Service.Builder.AllSpecified = {
val ret = new Service.Builder(Service.createRawRecord)
if (nameIsSet) ret.name(nameOrNull)
if (extendzIsSet) ret.extendz(extendzOrNull)
if (functionsIsSet) ret.functions(functionsOrNull)
if (annotationsIsSet) ret.__annotations(annotationsOrNull)
ret
}
def mergeCopy(that: Service): Service
}
trait MutableService
extends Service
with JavaServiceMutable[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function,
Service,
RawService,
ServiceMeta
] {
def name_=(x: String): Unit
def nameUnset(): Unit
def extendz_=(x: String): Unit
def extendzUnset(): Unit
def functions_=(x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function]): Unit
def functionsUnset(): Unit
def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit
def annotationsUnset(): Unit
def merge(that: Service): Unit
def copy(
name: String = nameOrNull,
extendz: String = extendzOrNull,
functions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function] =
functionsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableService
override def mutable: MutableService = this
}
trait ServiceProxy extends Service {
protected def underlying: Service
override def meta = underlying.meta
// field/proxy_ref.ssp
override def name: String = underlying.name
override def nameOption: Option[String] = underlying.nameOption
override def nameOrNull: String = underlying.nameOrNull
override def nameOrThrow: String = underlying.nameOrThrow
override def nameIsSet: Boolean = underlying.nameIsSet
// field/proxy_ref.ssp
override def extendzOption: Option[String] = underlying.extendzOption
override def extendzOrNull: String = underlying.extendzOrNull
override def extendzOrThrow: String = underlying.extendzOrThrow
override def extendzIsSet: Boolean = underlying.extendzIsSet
// field/proxy_container.ssp
override def functions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function] =
underlying.functions
override def functionsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function]] =
underlying.functionsOption
override def functionsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function] =
underlying.functionsOrDefault
override def functionsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function] =
underlying.functionsOrNull
override def functionsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function] =
underlying.functionsOrThrow
override def functionsIsSet: Boolean = underlying.functionsIsSet
// field/proxy_container.ssp
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.__annotations
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
underlying.annotationsOption
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrDefault
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrNull
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
underlying.annotationsOrThrow
override def annotationsIsSet: Boolean = underlying.annotationsIsSet
override def compare(that: Service): Int = underlying.compare(that)
override def clear() { underlying.clear }
override def read(iprot: org.apache.thrift.protocol.TProtocol) { underlying.read(iprot) }
override def write(oprot: org.apache.thrift.protocol.TProtocol) { underlying.write(oprot) }
override def copy(
name: String = nameOrNull,
extendz: String = extendzOrNull,
functions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function] =
functionsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): Service = underlying.copy(
name = name,
extendz = extendz,
functions = functions,
__annotations = __annotations
)
override def mutableCopy(): MutableService = underlying.mutableCopy()
override def mergeCopy(that: Service): Service = underlying.mergeCopy(that)
override def mutable: MutableService = underlying.mutable
override def deepCopy(): Service = underlying.deepCopy()
override def fieldForId(id: Int): Service._Fields = underlying.fieldForId(id)
override def isSet(field: Service._Fields): Boolean = underlying.isSet(field)
override def getFieldValue(field: Service._Fields): AnyRef = underlying.getFieldValue(field)
override def setFieldValue(field: Service._Fields, value: AnyRef) { underlying.setFieldValue(field, value) }
override def hashCode(): Int = underlying.hashCode
override def equals(that: Any): Boolean = underlying.equals(that)
override def toString(): String = underlying.toString
}
trait MutableServiceProxy extends MutableService with ServiceProxy {
protected def underlying: MutableService
override def name_=(x: String): Unit = { underlying.name_=(x) }
override def nameUnset(): Unit = { underlying.nameUnset() }
override def extendz_=(x: String): Unit = { underlying.extendz_=(x) }
override def extendzUnset(): Unit = { underlying.extendzUnset() }
override def functions_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function]
): Unit = { underlying.functions_=(x) }
override def functionsUnset(): Unit = { underlying.functionsUnset() }
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { underlying.__annotations_=(x) }
override def annotationsUnset(): Unit = { underlying.annotationsUnset() }
override def copy(
name: String = nameOrNull,
extendz: String = extendzOrNull,
functions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function] =
functionsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): MutableService = underlying.copy(
name = name,
extendz = extendz,
functions = functions,
__annotations = __annotations
)
override def merge(that: Service): Unit = underlying.merge(that)
}
final class RawService
extends JavaServiceRaw[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function,
Service,
RawService,
ServiceMeta
]
with MutableService {
override def meta: ServiceMeta = Service
// fields
// Field #1 - name
private var _name: String = null // Underlying type: String
override def name: String = nameOrThrow
override def name_=(x: String): Unit = { _name = x }
override def nameOption: Option[String] = if (nameIsSet) Some(_name) else None
override def nameOrNull: String = _name
override def nameOrThrow: String =
if (nameIsSet) _name else throw new java.lang.NullPointerException("field name of Service missing")
override def nameIsSet: Boolean = _name != null
override def nameUnset(): Unit = { _name = null }
// Field #2 - extendz
private var _extendz: String = null // Underlying type: String
override def extendz_=(x: String): Unit = { _extendz = x }
override def extendzOption: Option[String] = if (extendzIsSet) Some(_extendz) else None
override def extendzOrNull: String = _extendz
override def extendzOrThrow: String =
if (extendzIsSet) _extendz else throw new java.lang.NullPointerException("field extendz of Service missing")
override def extendzIsSet: Boolean = _extendz != null
override def extendzUnset(): Unit = { _extendz = null }
// Field #3 - functions
private var _functions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function] =
null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function]
override def functions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function] =
functionsOrDefault
override def functions_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function]
): Unit = { _functions = x }
override def functionsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function]] =
if (functionsIsSet) Some(_functions) else None
override def functionsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function] =
if (functionsIsSet) _functions else scala.collection.Seq.empty
override def functionsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function] = _functions
override def functionsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function] =
if (functionsIsSet) _functions else throw new java.lang.NullPointerException("field functions of Service missing")
override def functionsIsSet: Boolean = _functions != null
override def functionsUnset(): Unit = { _functions = null }
// Field #99 - annotations
private var _annotations: scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation
] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
override def __annotations
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrDefault
override def __annotations_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
): Unit = { _annotations = x }
override def annotationsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]] =
if (annotationsIsSet) Some(_annotations) else None
override def annotationsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations else scala.collection.Seq.empty
override def annotationsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] = _annotations
override def annotationsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
if (annotationsIsSet) _annotations
else throw new java.lang.NullPointerException("field __annotations of Service missing")
override def annotationsIsSet: Boolean = _annotations != null
override def annotationsUnset(): Unit = { _annotations = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(Service.SERVICE_SDESC)
if (nameIsSet) {
oprot.writeFieldBegin(Service.NAME_FDESC)
oprot.writeString(_name)
oprot.writeFieldEnd()
}
if (extendzIsSet) {
oprot.writeFieldBegin(Service.EXTENDZ_FDESC)
oprot.writeString(_extendz)
oprot.writeFieldEnd()
}
if (functionsIsSet) {
oprot.writeFieldBegin(Service.FUNCTIONS_FDESC)
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _functions.size)
)
_functions.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (annotationsIsSet) {
oprot.writeFieldBegin(Service.ANNOTATIONS_FDESC)
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _annotations.size)
)
_annotations.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
Service.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // name
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_name = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 2 => { // extendz
if (field_header.`type` == org.apache.thrift.protocol.TType.STRING) {
_extendz = iprot.readString()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 3 => { // functions
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_functions = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 99 => { // annotations
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_annotations = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException(
"Error reading field %d in structure Service".format(field_header.id),
e
)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: Service): Unit = {
if (that.nameIsSet && !this.nameIsSet) {
this.name_=(that.nameOrNull)
}
if (that.extendzIsSet && !this.extendzIsSet) {
this.extendz_=(that.extendzOrNull)
}
if (that.functionsIsSet && !this.functionsIsSet) {
this.functions_=(that.functionsOrNull)
} else if (that.functionsIsSet && this.functionsIsSet) {
this.functions_=(this.functions ++ that.functions)
}
if (that.annotationsIsSet && !this.annotationsIsSet) {
this.__annotations_=(that.annotationsOrDefault)
} else if (that.annotationsIsSet && this.annotationsIsSet) {
this.__annotations_=(this.__annotations ++ that.__annotations)
}
}
override def mergeCopy(that: Service): Service = {
val ret = Service.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: Service => this.equals(o)
case _ => false
}
def equals(that: Service): Boolean = {
that != null &&
(if (this.nameIsSet) (that.nameIsSet && this.nameOrNull == that.nameOrNull) else !that.nameIsSet) &&
(if (this.extendzIsSet) (that.extendzIsSet && this.extendzOrNull == that.extendzOrNull) else !that.extendzIsSet) &&
(if (this.functionsIsSet) (that.functionsIsSet && this.functionsOrNull == that.functionsOrNull)
else !that.functionsIsSet) &&
(if (this.annotationsIsSet) (that.annotationsIsSet && this.annotationsOrDefault == that.annotationsOrDefault)
else !that.annotationsIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (nameIsSet) hasher.append(_name.##)
if (extendzIsSet) hasher.append(_extendz.##)
if (functionsIsSet) hasher.append(_functions.##)
if (annotationsIsSet) hasher.append(_annotations.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (nameIsSet) ret = nameOrNull :: ret
if (extendzIsSet) ret = extendzOrNull :: ret
if (functionsIsSet) ret = functionsOrNull :: ret
if (annotationsIsSet) ret = annotationsOrDefault :: ret
ret.reverse
}
override def clear() {
nameUnset()
extendzUnset()
functionsUnset()
annotationsUnset()
unknownFields = Nil
}
def fieldForId(id: Int): Service._Fields = id match {
case 1 => Service._Fields.name
case 2 => Service._Fields.extendz
case 3 => Service._Fields.functions
case 99 => Service._Fields.__annotations
case _ => null
}
def isSet(field: Service._Fields): Boolean = field match {
case Service._Fields.name => nameIsSet
case Service._Fields.extendz => extendzIsSet
case Service._Fields.functions => functionsIsSet
case Service._Fields.__annotations => annotationsIsSet
case _ => false
}
def getFieldValue(field: Service._Fields): AnyRef = field match {
case Service._Fields.name => nameOrNull.asInstanceOf[AnyRef]
case Service._Fields.extendz => extendzOrNull.asInstanceOf[AnyRef]
case Service._Fields.functions => functionsOrNull.asInstanceOf[AnyRef]
case Service._Fields.__annotations => annotationsOrDefault.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: Service._Fields, value: AnyRef) {
field match {
case Service._Fields.name => name_=(value.asInstanceOf[String])
case Service._Fields.extendz => extendz_=(value.asInstanceOf[String])
case Service._Fields.functions =>
functions_=(
value.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function]]
)
case Service._Fields.__annotations =>
__annotations_=(
value
.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation]]
)
case _ =>
}
}
override def deepCopy(): RawService = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = Service.createRawRecord
ret.read(prot)
ret
}
override def copy(
name: String = nameOrNull,
extendz: String = extendzOrNull,
functions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Function] =
functionsOrNull,
__annotations: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Annotation] =
annotationsOrNull
): RawService = {
val ret = new RawService
if (name != null) ret.name_=(name)
if (extendz != null) ret.extendz_=(extendz)
if (functions != null) ret.functions_=(functions)
if (__annotations != null) ret.__annotations_=(__annotations)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object Program extends ProgramMeta {
object Builder {
sealed trait HasTypeRegistry
sealed trait MaybeSpecified
sealed class Specified extends MaybeSpecified
sealed class Unspecified extends MaybeSpecified
type HasAll = HasTypeRegistry
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[Program] (private var obj: RawProgram) {
def typeRegistry(
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry
): Program.Builder[State with Builder.HasTypeRegistry] = {
obj.typeRegistry_=(v)
this.asInstanceOf[Program.Builder[State with Builder.HasTypeRegistry]]
}
def namespaces(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace]
): Program.Builder[State] = {
obj.namespaces_=(v)
this
}
def namespaces(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace]]
): Program.Builder[State] = {
vOpt match {
case Some(v) => obj.namespaces_=(v)
case None => obj.namespacesUnset()
}
this
}
def includes(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include]
): Program.Builder[State] = {
obj.includes_=(v)
this
}
def includes(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include]]
): Program.Builder[State] = {
vOpt match {
case Some(v) => obj.includes_=(v)
case None => obj.includesUnset()
}
this
}
def constants(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const]
): Program.Builder[State] = {
obj.constants_=(v)
this
}
def constants(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const]]
): Program.Builder[State] = {
vOpt match {
case Some(v) => obj.constants_=(v)
case None => obj.constantsUnset()
}
this
}
def enums(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum]
): Program.Builder[State] = {
obj.enums_=(v)
this
}
def enums(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum]]
): Program.Builder[State] = {
vOpt match {
case Some(v) => obj.enums_=(v)
case None => obj.enumsUnset()
}
this
}
def typedefs(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef]
): Program.Builder[State] = {
obj.typedefs_=(v)
this
}
def typedefs(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef]]
): Program.Builder[State] = {
vOpt match {
case Some(v) => obj.typedefs_=(v)
case None => obj.typedefsUnset()
}
this
}
def structs(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct]
): Program.Builder[State] = {
obj.structs_=(v)
this
}
def structs(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct]]
): Program.Builder[State] = {
vOpt match {
case Some(v) => obj.structs_=(v)
case None => obj.structsUnset()
}
this
}
def unions(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union]
): Program.Builder[State] = {
obj.unions_=(v)
this
}
def unions(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union]]
): Program.Builder[State] = {
vOpt match {
case Some(v) => obj.unions_=(v)
case None => obj.unionsUnset()
}
this
}
def exceptions(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception]
): Program.Builder[State] = {
obj.exceptions_=(v)
this
}
def exceptions(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception]]
): Program.Builder[State] = {
vOpt match {
case Some(v) => obj.exceptions_=(v)
case None => obj.exceptionsUnset()
}
this
}
def services(
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service]
): Program.Builder[State] = {
obj.services_=(v)
this
}
def services(
vOpt: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service]]
): Program.Builder[State] = {
vOpt match {
case Some(v) => obj.services_=(v)
case None => obj.servicesUnset()
}
this
}
def resultMutable()(implicit ev0: State <:< Builder.HasTypeRegistry): MutableProgram = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("Program.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result()(implicit ev0: State <:< Builder.HasTypeRegistry): Program = resultMutable()(ev0)
}
def newBuilder: Program.Builder.AllUnspecified = new Builder(Program.createRawRecord)
implicit val companionProvider: ProgramCompanionProvider = new ProgramCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[Program] = {
new _root_.scala.math.Ordering[Program] {
override def compare(x: Program, y: Program): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[Program] = {
new _root_.java.util.Comparator[Program] {
override def compare(x: Program, y: Program): Int = x.compare(y)
}
}
}
class ProgramMeta
extends JavaProgramMeta[Program, RawProgram, ProgramMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[Program] {
override def recordName: String = "Program"
// Thrift descriptors.
val PROGRAM_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("Program")
val NAMESPACES_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"namespaces",
org.apache.thrift.protocol.TType.LIST,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val INCLUDES_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"includes",
org.apache.thrift.protocol.TType.LIST,
2, {
java.util.Collections.emptyMap[String, String]
}
)
val CONSTANTS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"constants",
org.apache.thrift.protocol.TType.LIST,
3, {
java.util.Collections.emptyMap[String, String]
}
)
val ENUMS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"enums",
org.apache.thrift.protocol.TType.LIST,
4, {
java.util.Collections.emptyMap[String, String]
}
)
val TYPEDEFS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"typedefs",
org.apache.thrift.protocol.TType.LIST,
5, {
java.util.Collections.emptyMap[String, String]
}
)
val STRUCTS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"structs",
org.apache.thrift.protocol.TType.LIST,
6, {
java.util.Collections.emptyMap[String, String]
}
)
val UNIONS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"unions",
org.apache.thrift.protocol.TType.LIST,
7, {
java.util.Collections.emptyMap[String, String]
}
)
val EXCEPTIONS_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"exceptions",
org.apache.thrift.protocol.TType.LIST,
8, {
java.util.Collections.emptyMap[String, String]
}
)
val SERVICES_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"services",
org.apache.thrift.protocol.TType.LIST,
9, {
java.util.Collections.emptyMap[String, String]
}
)
val TYPEREGISTRY_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"typeRegistry",
org.apache.thrift.protocol.TType.STRUCT,
98, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"namespaces" -> NAMESPACES_FDESC,
"includes" -> INCLUDES_FDESC,
"constants" -> CONSTANTS_FDESC,
"enums" -> ENUMS_FDESC,
"typedefs" -> TYPEDEFS_FDESC,
"structs" -> STRUCTS_FDESC,
"unions" -> UNIONS_FDESC,
"exceptions" -> EXCEPTIONS_FDESC,
"services" -> SERVICES_FDESC,
"typeRegistry" -> TYPEREGISTRY_FDESC
)
object _Fields {
case object namespaces extends _Fields(1, "namespaces")
case object includes extends _Fields(2, "includes")
case object constants extends _Fields(3, "constants")
case object enums extends _Fields(4, "enums")
case object typedefs extends _Fields(5, "typedefs")
case object structs extends _Fields(6, "structs")
case object unions extends _Fields(7, "unions")
case object exceptions extends _Fields(8, "exceptions")
case object services extends _Fields(9, "services")
case object typeRegistry extends _Fields(98, "typeRegistry")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.namespaces,
2.toShort -> _Fields.includes,
3.toShort -> _Fields.constants,
4.toShort -> _Fields.enums,
5.toShort -> _Fields.typedefs,
6.toShort -> _Fields.structs,
7.toShort -> _Fields.unions,
8.toShort -> _Fields.exceptions,
9.toShort -> _Fields.services,
98.toShort -> _Fields.typeRegistry
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: Program = createRawRecord
override def createRawRecord: RawProgram = new RawProgram
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[Program] = {
if (x.isInstanceOf[Program]) Some(x.asInstanceOf[Program]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations(
scala.collection.immutable.Vector(
("generate_proxy", "true")
)
)
// Spindle Descriptors.
val namespaces =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace
], Program, ProgramMeta](
name = "namespaces",
longName = "namespaces",
id = 1,
annotations = Map(),
owner = this,
getter = _.namespacesOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace]
) => { r.asInstanceOf[RawProgram].namespaces_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program]) => {
r.asInstanceOf[RawProgram].namespacesUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace]]
)
val includes =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include
], Program, ProgramMeta](
name = "includes",
longName = "includes",
id = 2,
annotations = Map(),
owner = this,
getter = _.includesOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include]
) => { r.asInstanceOf[RawProgram].includes_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program]) => {
r.asInstanceOf[RawProgram].includesUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include]]
)
val constants =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const
], Program, ProgramMeta](
name = "constants",
longName = "constants",
id = 3,
annotations = Map(),
owner = this,
getter = _.constantsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const]
) => { r.asInstanceOf[RawProgram].constants_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program]) => {
r.asInstanceOf[RawProgram].constantsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const]]
)
val enums =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum
], Program, ProgramMeta](
name = "enums",
longName = "enums",
id = 4,
annotations = Map(),
owner = this,
getter = _.enumsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum]
) => { r.asInstanceOf[RawProgram].enums_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program]) => {
r.asInstanceOf[RawProgram].enumsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum]]
)
val typedefs =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef
], Program, ProgramMeta](
name = "typedefs",
longName = "typedefs",
id = 5,
annotations = Map(),
owner = this,
getter = _.typedefsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef]
) => { r.asInstanceOf[RawProgram].typedefs_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program]) => {
r.asInstanceOf[RawProgram].typedefsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef]]
)
val structs =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct
], Program, ProgramMeta](
name = "structs",
longName = "structs",
id = 6,
annotations = Map(),
owner = this,
getter = _.structsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct]
) => { r.asInstanceOf[RawProgram].structs_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program]) => {
r.asInstanceOf[RawProgram].structsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct]]
)
val unions =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union
], Program, ProgramMeta](
name = "unions",
longName = "unions",
id = 7,
annotations = Map(),
owner = this,
getter = _.unionsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union]
) => { r.asInstanceOf[RawProgram].unions_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program]) => {
r.asInstanceOf[RawProgram].unionsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union]]
)
val exceptions =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception
], Program, ProgramMeta](
name = "exceptions",
longName = "exceptions",
id = 8,
annotations = Map(),
owner = this,
getter = _.exceptionsOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception]
) => { r.asInstanceOf[RawProgram].exceptions_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program]) => {
r.asInstanceOf[RawProgram].exceptionsUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception]]
)
val services =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.OptionalFieldDescriptor[scala.collection.Seq[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service
], Program, ProgramMeta](
name = "services",
longName = "services",
id = 9,
annotations = Map(),
owner = this,
getter = _.servicesOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program],
v: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service]
) => { r.asInstanceOf[RawProgram].services_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program]) => {
r.asInstanceOf[RawProgram].servicesUnset()
},
manifest = manifest[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service]]
)
val typeRegistry =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.StructFieldDescriptor[
Program,
ProgramMeta,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistryMeta
](
name = "typeRegistry",
longName = "typeRegistry",
id = 98,
annotations = Map(),
owner = this,
getter = _.typeRegistryOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program],
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry
) => { r.asInstanceOf[RawProgram].typeRegistry_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[Program]) => {
r.asInstanceOf[RawProgram].typeRegistryUnset()
},
structMeta = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry,
manifest = manifest[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Program, ProgramMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, Program, ProgramMeta]](
namespaces,
includes,
constants,
enums,
typedefs,
structs,
unions,
exceptions,
services,
typeRegistry
)
def apply(
namespaces: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace],
includes: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include],
constants: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const],
enums: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum],
typedefs: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef],
structs: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct],
unions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union],
exceptions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception],
services: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service],
typeRegistry: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry
): Program = {
val ret = this.createRawRecord
ret.namespaces_=(namespaces)
ret.includes_=(includes)
ret.constants_=(constants)
ret.enums_=(enums)
ret.typedefs_=(typedefs)
ret.structs_=(structs)
ret.unions_=(unions)
ret.exceptions_=(exceptions)
ret.services_=(services)
ret.typeRegistry_=(typeRegistry)
ret
}
}
class ProgramCompanionProvider
extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[Program] {
type CompanionT = ProgramMeta
override def provide: ProgramMeta = Program
}
trait Program
extends JavaProgram[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union,
Program,
RawProgram,
ProgramMeta
]
with org.apache.thrift.TBase[Program, Program._Fields] {
override def meta: ProgramMeta
override def compare(that: Program): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.namespacesIsSet.compareTo(that.namespacesIsSet)
cmp != 0
}) cmp
else if (this.namespacesIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.namespaces).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.namespaces).asJava
)
cmp != 0
}) cmp
else if ({
cmp = this.includesIsSet.compareTo(that.includesIsSet)
cmp != 0
}) cmp
else if (this.includesIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.includes).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.includes).asJava
)
cmp != 0
}) cmp
else if ({
cmp = this.constantsIsSet.compareTo(that.constantsIsSet)
cmp != 0
}) cmp
else if (this.constantsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.constants).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.constants).asJava
)
cmp != 0
}) cmp
else if ({
cmp = this.enumsIsSet.compareTo(that.enumsIsSet)
cmp != 0
}) cmp
else if (this.enumsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.enums).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.enums).asJava
)
cmp != 0
}) cmp
else if ({
cmp = this.typedefsIsSet.compareTo(that.typedefsIsSet)
cmp != 0
}) cmp
else if (this.typedefsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.typedefs).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.typedefs).asJava
)
cmp != 0
}) cmp
else if ({
cmp = this.structsIsSet.compareTo(that.structsIsSet)
cmp != 0
}) cmp
else if (this.structsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.structs).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.structs).asJava
)
cmp != 0
}) cmp
else if ({
cmp = this.unionsIsSet.compareTo(that.unionsIsSet)
cmp != 0
}) cmp
else if (this.unionsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.unions).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.unions).asJava
)
cmp != 0
}) cmp
else if ({
cmp = this.exceptionsIsSet.compareTo(that.exceptionsIsSet)
cmp != 0
}) cmp
else if (this.exceptionsIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.exceptions).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.exceptions).asJava
)
cmp != 0
}) cmp
else if ({
cmp = this.servicesIsSet.compareTo(that.servicesIsSet)
cmp != 0
}) cmp
else if (this.servicesIsSet && {
cmp = org.apache.thrift.TBaseHelper.compareTo(
scala.collection.JavaConverters.seqAsJavaListConverter(this.services).asJava,
scala.collection.JavaConverters.seqAsJavaListConverter(that.services).asJava
)
cmp != 0
}) cmp
else if ({
cmp = this.typeRegistryIsSet.compareTo(that.typeRegistryIsSet)
cmp != 0
}) cmp
else if (this.typeRegistryIsSet && {
cmp = this.typeRegistryOrNull.compareTo(that.typeRegistryOrNull)
cmp != 0
}) cmp
else 0
}
override def <(that: Program): Boolean = { this.compare(that) < 0 }
override def >(that: Program): Boolean = { this.compare(that) > 0 }
override def <=(that: Program): Boolean = { this.compare(that) <= 0 }
override def >=(that: Program): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: Program): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): Program
def copy(
namespaces: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace] =
namespacesOrNull,
includes: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include] = includesOrNull,
constants: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const] = constantsOrNull,
enums: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum] = enumsOrNull,
typedefs: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef] = typedefsOrNull,
structs: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct] = structsOrNull,
unions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union] = unionsOrNull,
exceptions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception] =
exceptionsOrNull,
services: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service] = servicesOrNull,
typeRegistry: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry = typeRegistryOrNull
): Program
def mutableCopy(): MutableProgram = {
val ret = Program.createRawRecord
if (namespacesIsSet) ret.namespaces_=(namespacesOrNull)
if (includesIsSet) ret.includes_=(includesOrNull)
if (constantsIsSet) ret.constants_=(constantsOrNull)
if (enumsIsSet) ret.enums_=(enumsOrNull)
if (typedefsIsSet) ret.typedefs_=(typedefsOrNull)
if (structsIsSet) ret.structs_=(structsOrNull)
if (unionsIsSet) ret.unions_=(unionsOrNull)
if (exceptionsIsSet) ret.exceptions_=(exceptionsOrNull)
if (servicesIsSet) ret.services_=(servicesOrNull)
if (typeRegistryIsSet) ret.typeRegistry_=(typeRegistryOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableProgram
def toBuilder(): Program.Builder.AllSpecified = {
val ret = new Program.Builder(Program.createRawRecord)
if (namespacesIsSet) ret.namespaces(namespacesOrNull)
if (includesIsSet) ret.includes(includesOrNull)
if (constantsIsSet) ret.constants(constantsOrNull)
if (enumsIsSet) ret.enums(enumsOrNull)
if (typedefsIsSet) ret.typedefs(typedefsOrNull)
if (structsIsSet) ret.structs(structsOrNull)
if (unionsIsSet) ret.unions(unionsOrNull)
if (exceptionsIsSet) ret.exceptions(exceptionsOrNull)
if (servicesIsSet) ret.services(servicesOrNull)
if (typeRegistryIsSet) ret.typeRegistry(typeRegistryOrNull)
ret
}
def mergeCopy(that: Program): Program
}
trait MutableProgram
extends Program
with JavaProgramMutable[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union,
Program,
RawProgram,
ProgramMeta
] {
def namespaces_=(x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace]): Unit
def namespacesUnset(): Unit
def includes_=(x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include]): Unit
def includesUnset(): Unit
def constants_=(x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const]): Unit
def constantsUnset(): Unit
def enums_=(x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum]): Unit
def enumsUnset(): Unit
def typedefs_=(x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef]): Unit
def typedefsUnset(): Unit
def structs_=(x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct]): Unit
def structsUnset(): Unit
def unions_=(x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union]): Unit
def unionsUnset(): Unit
def exceptions_=(x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception]): Unit
def exceptionsUnset(): Unit
def services_=(x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service]): Unit
def servicesUnset(): Unit
def typeRegistry_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry): Unit
def typeRegistryUnset(): Unit
def merge(that: Program): Unit
def copy(
namespaces: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace] =
namespacesOrNull,
includes: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include] = includesOrNull,
constants: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const] = constantsOrNull,
enums: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum] = enumsOrNull,
typedefs: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef] = typedefsOrNull,
structs: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct] = structsOrNull,
unions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union] = unionsOrNull,
exceptions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception] =
exceptionsOrNull,
services: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service] = servicesOrNull,
typeRegistry: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry = typeRegistryOrNull
): MutableProgram
override def mutable: MutableProgram = this
}
trait ProgramProxy extends Program {
protected def underlying: Program
override def meta = underlying.meta
// field/proxy_container.ssp
override def namespaces: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace] =
underlying.namespaces
override def namespacesOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace]] =
underlying.namespacesOption
override def namespacesOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace] =
underlying.namespacesOrDefault
override def namespacesOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace] =
underlying.namespacesOrNull
override def namespacesOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace] =
underlying.namespacesOrThrow
override def namespacesIsSet: Boolean = underlying.namespacesIsSet
// field/proxy_container.ssp
override def includes: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include] =
underlying.includes
override def includesOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include]] =
underlying.includesOption
override def includesOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include] =
underlying.includesOrDefault
override def includesOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include] =
underlying.includesOrNull
override def includesOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include] =
underlying.includesOrThrow
override def includesIsSet: Boolean = underlying.includesIsSet
// field/proxy_container.ssp
override def constants: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const] =
underlying.constants
override def constantsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const]] =
underlying.constantsOption
override def constantsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const] =
underlying.constantsOrDefault
override def constantsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const] =
underlying.constantsOrNull
override def constantsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const] =
underlying.constantsOrThrow
override def constantsIsSet: Boolean = underlying.constantsIsSet
// field/proxy_container.ssp
override def enums: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum] =
underlying.enums
override def enumsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum]] =
underlying.enumsOption
override def enumsOrDefault: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum] =
underlying.enumsOrDefault
override def enumsOrNull: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum] =
underlying.enumsOrNull
override def enumsOrThrow: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum] =
underlying.enumsOrThrow
override def enumsIsSet: Boolean = underlying.enumsIsSet
// field/proxy_container.ssp
override def typedefs: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef] =
underlying.typedefs
override def typedefsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef]] =
underlying.typedefsOption
override def typedefsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef] =
underlying.typedefsOrDefault
override def typedefsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef] =
underlying.typedefsOrNull
override def typedefsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef] =
underlying.typedefsOrThrow
override def typedefsIsSet: Boolean = underlying.typedefsIsSet
// field/proxy_container.ssp
override def structs: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct] =
underlying.structs
override def structsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct]] =
underlying.structsOption
override def structsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct] =
underlying.structsOrDefault
override def structsOrNull: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct] =
underlying.structsOrNull
override def structsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct] =
underlying.structsOrThrow
override def structsIsSet: Boolean = underlying.structsIsSet
// field/proxy_container.ssp
override def unions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union] =
underlying.unions
override def unionsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union]] =
underlying.unionsOption
override def unionsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union] =
underlying.unionsOrDefault
override def unionsOrNull: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union] =
underlying.unionsOrNull
override def unionsOrThrow: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union] =
underlying.unionsOrThrow
override def unionsIsSet: Boolean = underlying.unionsIsSet
// field/proxy_container.ssp
override def exceptions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception] =
underlying.exceptions
override def exceptionsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception]] =
underlying.exceptionsOption
override def exceptionsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception] =
underlying.exceptionsOrDefault
override def exceptionsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception] =
underlying.exceptionsOrNull
override def exceptionsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception] =
underlying.exceptionsOrThrow
override def exceptionsIsSet: Boolean = underlying.exceptionsIsSet
// field/proxy_container.ssp
override def services: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service] =
underlying.services
override def servicesOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service]] =
underlying.servicesOption
override def servicesOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service] =
underlying.servicesOrDefault
override def servicesOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service] =
underlying.servicesOrNull
override def servicesOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service] =
underlying.servicesOrThrow
override def servicesIsSet: Boolean = underlying.servicesIsSet
// field/proxy_ref.ssp
override def typeRegistry: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry =
underlying.typeRegistry
override def typeRegistryOption: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry] =
underlying.typeRegistryOption
override def typeRegistryOrNull: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry =
underlying.typeRegistryOrNull
override def typeRegistryOrThrow: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry =
underlying.typeRegistryOrThrow
override def typeRegistryIsSet: Boolean = underlying.typeRegistryIsSet
override def compare(that: Program): Int = underlying.compare(that)
override def clear() { underlying.clear }
override def read(iprot: org.apache.thrift.protocol.TProtocol) { underlying.read(iprot) }
override def write(oprot: org.apache.thrift.protocol.TProtocol) { underlying.write(oprot) }
override def copy(
namespaces: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace] =
namespacesOrNull,
includes: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include] = includesOrNull,
constants: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const] = constantsOrNull,
enums: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum] = enumsOrNull,
typedefs: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef] = typedefsOrNull,
structs: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct] = structsOrNull,
unions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union] = unionsOrNull,
exceptions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception] =
exceptionsOrNull,
services: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service] = servicesOrNull,
typeRegistry: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry = typeRegistryOrNull
): Program = underlying.copy(
namespaces = namespaces,
includes = includes,
constants = constants,
enums = enums,
typedefs = typedefs,
structs = structs,
unions = unions,
exceptions = exceptions,
services = services,
typeRegistry = typeRegistry
)
override def mutableCopy(): MutableProgram = underlying.mutableCopy()
override def mergeCopy(that: Program): Program = underlying.mergeCopy(that)
override def mutable: MutableProgram = underlying.mutable
override def deepCopy(): Program = underlying.deepCopy()
override def fieldForId(id: Int): Program._Fields = underlying.fieldForId(id)
override def isSet(field: Program._Fields): Boolean = underlying.isSet(field)
override def getFieldValue(field: Program._Fields): AnyRef = underlying.getFieldValue(field)
override def setFieldValue(field: Program._Fields, value: AnyRef) { underlying.setFieldValue(field, value) }
override def hashCode(): Int = underlying.hashCode
override def equals(that: Any): Boolean = underlying.equals(that)
override def toString(): String = underlying.toString
}
trait MutableProgramProxy extends MutableProgram with ProgramProxy {
protected def underlying: MutableProgram
override def namespaces_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace]
): Unit = { underlying.namespaces_=(x) }
override def namespacesUnset(): Unit = { underlying.namespacesUnset() }
override def includes_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include]
): Unit = { underlying.includes_=(x) }
override def includesUnset(): Unit = { underlying.includesUnset() }
override def constants_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const]
): Unit = { underlying.constants_=(x) }
override def constantsUnset(): Unit = { underlying.constantsUnset() }
override def enums_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum]
): Unit = { underlying.enums_=(x) }
override def enumsUnset(): Unit = { underlying.enumsUnset() }
override def typedefs_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef]
): Unit = { underlying.typedefs_=(x) }
override def typedefsUnset(): Unit = { underlying.typedefsUnset() }
override def structs_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct]
): Unit = { underlying.structs_=(x) }
override def structsUnset(): Unit = { underlying.structsUnset() }
override def unions_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union]
): Unit = { underlying.unions_=(x) }
override def unionsUnset(): Unit = { underlying.unionsUnset() }
override def exceptions_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception]
): Unit = { underlying.exceptions_=(x) }
override def exceptionsUnset(): Unit = { underlying.exceptionsUnset() }
override def services_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service]
): Unit = { underlying.services_=(x) }
override def servicesUnset(): Unit = { underlying.servicesUnset() }
override def typeRegistry_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry): Unit = {
underlying.typeRegistry_=(x)
}
override def typeRegistryUnset(): Unit = { underlying.typeRegistryUnset() }
override def copy(
namespaces: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace] =
namespacesOrNull,
includes: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include] = includesOrNull,
constants: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const] = constantsOrNull,
enums: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum] = enumsOrNull,
typedefs: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef] = typedefsOrNull,
structs: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct] = structsOrNull,
unions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union] = unionsOrNull,
exceptions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception] =
exceptionsOrNull,
services: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service] = servicesOrNull,
typeRegistry: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry = typeRegistryOrNull
): MutableProgram = underlying.copy(
namespaces = namespaces,
includes = includes,
constants = constants,
enums = enums,
typedefs = typedefs,
structs = structs,
unions = unions,
exceptions = exceptions,
services = services,
typeRegistry = typeRegistry
)
override def merge(that: Program): Unit = underlying.merge(that)
}
final class RawProgram
extends JavaProgramRaw[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union,
Program,
RawProgram,
ProgramMeta
]
with MutableProgram {
override def meta: ProgramMeta = Program
// fields
// Field #1 - namespaces
private var _namespaces: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace] =
null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace]
override def namespaces: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace] =
namespacesOrDefault
override def namespaces_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace]
): Unit = { _namespaces = x }
override def namespacesOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace]] =
if (namespacesIsSet) Some(_namespaces) else None
override def namespacesOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace] =
if (namespacesIsSet) _namespaces else scala.collection.Seq.empty
override def namespacesOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace] = _namespaces
override def namespacesOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace] = if (namespacesIsSet)
_namespaces
else throw new java.lang.NullPointerException("field namespaces of Program missing")
override def namespacesIsSet: Boolean = _namespaces != null
override def namespacesUnset(): Unit = { _namespaces = null }
// Field #2 - includes
private var _includes: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include] =
null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include]
override def includes: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include] =
includesOrDefault
override def includes_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include]
): Unit = { _includes = x }
override def includesOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include]] =
if (includesIsSet) Some(_includes) else None
override def includesOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include] =
if (includesIsSet) _includes else scala.collection.Seq.empty
override def includesOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include] = _includes
override def includesOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include] =
if (includesIsSet) _includes else throw new java.lang.NullPointerException("field includes of Program missing")
override def includesIsSet: Boolean = _includes != null
override def includesUnset(): Unit = { _includes = null }
// Field #3 - constants
private var _constants
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const]
override def constants: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const] =
constantsOrDefault
override def constants_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const]
): Unit = { _constants = x }
override def constantsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const]] =
if (constantsIsSet) Some(_constants) else None
override def constantsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const] =
if (constantsIsSet) _constants else scala.collection.Seq.empty
override def constantsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const] = _constants
override def constantsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const] =
if (constantsIsSet) _constants else throw new java.lang.NullPointerException("field constants of Program missing")
override def constantsIsSet: Boolean = _constants != null
override def constantsUnset(): Unit = { _constants = null }
// Field #4 - enums
private var _enums
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum]
override def enums: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum] =
enumsOrDefault
override def enums_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum]
): Unit = { _enums = x }
override def enumsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum]] =
if (enumsIsSet) Some(_enums) else None
override def enumsOrDefault: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum] =
if (enumsIsSet) _enums else scala.collection.Seq.empty
override def enumsOrNull: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum] =
_enums
override def enumsOrThrow: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum] =
if (enumsIsSet) _enums else throw new java.lang.NullPointerException("field enums of Program missing")
override def enumsIsSet: Boolean = _enums != null
override def enumsUnset(): Unit = { _enums = null }
// Field #5 - typedefs
private var _typedefs: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef] =
null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef]
override def typedefs: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef] =
typedefsOrDefault
override def typedefs_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef]
): Unit = { _typedefs = x }
override def typedefsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef]] =
if (typedefsIsSet) Some(_typedefs) else None
override def typedefsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef] =
if (typedefsIsSet) _typedefs else scala.collection.Seq.empty
override def typedefsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef] = _typedefs
override def typedefsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef] =
if (typedefsIsSet) _typedefs else throw new java.lang.NullPointerException("field typedefs of Program missing")
override def typedefsIsSet: Boolean = _typedefs != null
override def typedefsUnset(): Unit = { _typedefs = null }
// Field #6 - structs
private var _structs
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct]
override def structs: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct] =
structsOrDefault
override def structs_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct]
): Unit = { _structs = x }
override def structsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct]] =
if (structsIsSet) Some(_structs) else None
override def structsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct] =
if (structsIsSet) _structs else scala.collection.Seq.empty
override def structsOrNull: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct] =
_structs
override def structsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct] =
if (structsIsSet) _structs else throw new java.lang.NullPointerException("field structs of Program missing")
override def structsIsSet: Boolean = _structs != null
override def structsUnset(): Unit = { _structs = null }
// Field #7 - unions
private var _unions
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union] = null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union]
override def unions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union] =
unionsOrDefault
override def unions_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union]
): Unit = { _unions = x }
override def unionsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union]] =
if (unionsIsSet) Some(_unions) else None
override def unionsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union] =
if (unionsIsSet) _unions else scala.collection.Seq.empty
override def unionsOrNull: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union] =
_unions
override def unionsOrThrow: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union] =
if (unionsIsSet) _unions else throw new java.lang.NullPointerException("field unions of Program missing")
override def unionsIsSet: Boolean = _unions != null
override def unionsUnset(): Unit = { _unions = null }
// Field #8 - exceptions
private var _exceptions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception] =
null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception]
override def exceptions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception] =
exceptionsOrDefault
override def exceptions_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception]
): Unit = { _exceptions = x }
override def exceptionsOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception]] =
if (exceptionsIsSet) Some(_exceptions) else None
override def exceptionsOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception] =
if (exceptionsIsSet) _exceptions else scala.collection.Seq.empty
override def exceptionsOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception] = _exceptions
override def exceptionsOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception] = if (exceptionsIsSet)
_exceptions
else throw new java.lang.NullPointerException("field exceptions of Program missing")
override def exceptionsIsSet: Boolean = _exceptions != null
override def exceptionsUnset(): Unit = { _exceptions = null }
// Field #9 - services
private var _services: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service] =
null // Underlying type: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service]
override def services: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service] =
servicesOrDefault
override def services_=(
x: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service]
): Unit = { _services = x }
override def servicesOption
: Option[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service]] =
if (servicesIsSet) Some(_services) else None
override def servicesOrDefault
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service] =
if (servicesIsSet) _services else scala.collection.Seq.empty
override def servicesOrNull
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service] = _services
override def servicesOrThrow
: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service] =
if (servicesIsSet) _services else throw new java.lang.NullPointerException("field services of Program missing")
override def servicesIsSet: Boolean = _services != null
override def servicesUnset(): Unit = { _services = null }
// Field #98 - typeRegistry
private var _typeRegistry
: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry = null // Underlying type: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry
override def typeRegistry: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry =
typeRegistryOrThrow
override def typeRegistry_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry): Unit = {
_typeRegistry = x
}
override def typeRegistryOption: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry] =
if (typeRegistryIsSet) Some(_typeRegistry) else None
override def typeRegistryOrNull: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry =
_typeRegistry
override def typeRegistryOrThrow: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry =
if (typeRegistryIsSet) _typeRegistry
else throw new java.lang.NullPointerException("field typeRegistry of Program missing")
override def typeRegistryIsSet: Boolean = _typeRegistry != null
override def typeRegistryUnset(): Unit = { _typeRegistry = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(Program.PROGRAM_SDESC)
if (namespacesIsSet) {
oprot.writeFieldBegin(Program.NAMESPACES_FDESC)
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _namespaces.size)
)
_namespaces.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (includesIsSet) {
oprot.writeFieldBegin(Program.INCLUDES_FDESC)
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _includes.size)
)
_includes.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (constantsIsSet) {
oprot.writeFieldBegin(Program.CONSTANTS_FDESC)
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _constants.size)
)
_constants.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (enumsIsSet) {
oprot.writeFieldBegin(Program.ENUMS_FDESC)
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _enums.size))
_enums.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (typedefsIsSet) {
oprot.writeFieldBegin(Program.TYPEDEFS_FDESC)
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _typedefs.size)
)
_typedefs.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (structsIsSet) {
oprot.writeFieldBegin(Program.STRUCTS_FDESC)
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _structs.size))
_structs.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (unionsIsSet) {
oprot.writeFieldBegin(Program.UNIONS_FDESC)
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _unions.size))
_unions.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (exceptionsIsSet) {
oprot.writeFieldBegin(Program.EXCEPTIONS_FDESC)
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _exceptions.size)
)
_exceptions.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (servicesIsSet) {
oprot.writeFieldBegin(Program.SERVICES_FDESC)
oprot.writeListBegin(
new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, _services.size)
)
_services.foreach(element => {
element.write(oprot)
})
oprot.writeListEnd()
oprot.writeFieldEnd()
}
if (typeRegistryIsSet) {
oprot.writeFieldBegin(Program.TYPEREGISTRY_FDESC)
_typeRegistry.write(oprot)
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
Program.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // namespaces
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_namespaces = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 2 => { // includes
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_includes = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 3 => { // constants
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_constants = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 4 => { // enums
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_enums = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 5 => { // typedefs
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_typedefs = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 6 => { // structs
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_structs = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 7 => { // unions
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_unions = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 8 => { // exceptions
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_exceptions = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 9 => { // services
if (field_header.`type` == org.apache.thrift.protocol.TType.LIST) {
_services = {
val tlist: org.apache.thrift.protocol.TList = iprot.readListBegin()
val builder = scala.collection.immutable.Vector
.newBuilder[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service]
var i: Int = tlist.size
builder.sizeHint(tlist.size)
while (i > 0) {
builder += ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service.createRawRecord
s.read(iprot)
s
})
i -= 1
}
builder.result()
}
iprot.readListEnd()
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 98 => { // typeRegistry
if (field_header.`type` == org.apache.thrift.protocol.TType.STRUCT) {
_typeRegistry = ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry.createRawRecord
s.read(iprot)
s
})
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException(
"Error reading field %d in structure Program".format(field_header.id),
e
)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: Program): Unit = {
if (that.namespacesIsSet && !this.namespacesIsSet) {
this.namespaces_=(that.namespacesOrDefault)
} else if (that.namespacesIsSet && this.namespacesIsSet) {
this.namespaces_=(this.namespaces ++ that.namespaces)
}
if (that.includesIsSet && !this.includesIsSet) {
this.includes_=(that.includesOrDefault)
} else if (that.includesIsSet && this.includesIsSet) {
this.includes_=(this.includes ++ that.includes)
}
if (that.constantsIsSet && !this.constantsIsSet) {
this.constants_=(that.constantsOrDefault)
} else if (that.constantsIsSet && this.constantsIsSet) {
this.constants_=(this.constants ++ that.constants)
}
if (that.enumsIsSet && !this.enumsIsSet) {
this.enums_=(that.enumsOrDefault)
} else if (that.enumsIsSet && this.enumsIsSet) {
this.enums_=(this.enums ++ that.enums)
}
if (that.typedefsIsSet && !this.typedefsIsSet) {
this.typedefs_=(that.typedefsOrDefault)
} else if (that.typedefsIsSet && this.typedefsIsSet) {
this.typedefs_=(this.typedefs ++ that.typedefs)
}
if (that.structsIsSet && !this.structsIsSet) {
this.structs_=(that.structsOrDefault)
} else if (that.structsIsSet && this.structsIsSet) {
this.structs_=(this.structs ++ that.structs)
}
if (that.unionsIsSet && !this.unionsIsSet) {
this.unions_=(that.unionsOrDefault)
} else if (that.unionsIsSet && this.unionsIsSet) {
this.unions_=(this.unions ++ that.unions)
}
if (that.exceptionsIsSet && !this.exceptionsIsSet) {
this.exceptions_=(that.exceptionsOrDefault)
} else if (that.exceptionsIsSet && this.exceptionsIsSet) {
this.exceptions_=(this.exceptions ++ that.exceptions)
}
if (that.servicesIsSet && !this.servicesIsSet) {
this.services_=(that.servicesOrDefault)
} else if (that.servicesIsSet && this.servicesIsSet) {
this.services_=(this.services ++ that.services)
}
if (that.typeRegistryIsSet && !this.typeRegistryIsSet) {
this.typeRegistry_=(that.typeRegistryOrNull)
}
}
override def mergeCopy(that: Program): Program = {
val ret = Program.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: Program => this.equals(o)
case _ => false
}
def equals(that: Program): Boolean = {
that != null &&
(if (this.namespacesIsSet) (that.namespacesIsSet && this.namespacesOrDefault == that.namespacesOrDefault)
else !that.namespacesIsSet) &&
(if (this.includesIsSet) (that.includesIsSet && this.includesOrDefault == that.includesOrDefault)
else !that.includesIsSet) &&
(if (this.constantsIsSet) (that.constantsIsSet && this.constantsOrDefault == that.constantsOrDefault)
else !that.constantsIsSet) &&
(if (this.enumsIsSet) (that.enumsIsSet && this.enumsOrDefault == that.enumsOrDefault) else !that.enumsIsSet) &&
(if (this.typedefsIsSet) (that.typedefsIsSet && this.typedefsOrDefault == that.typedefsOrDefault)
else !that.typedefsIsSet) &&
(if (this.structsIsSet) (that.structsIsSet && this.structsOrDefault == that.structsOrDefault)
else !that.structsIsSet) &&
(if (this.unionsIsSet) (that.unionsIsSet && this.unionsOrDefault == that.unionsOrDefault) else !that.unionsIsSet) &&
(if (this.exceptionsIsSet) (that.exceptionsIsSet && this.exceptionsOrDefault == that.exceptionsOrDefault)
else !that.exceptionsIsSet) &&
(if (this.servicesIsSet) (that.servicesIsSet && this.servicesOrDefault == that.servicesOrDefault)
else !that.servicesIsSet) &&
(if (this.typeRegistryIsSet) (that.typeRegistryIsSet && this.typeRegistryOrNull == that.typeRegistryOrNull)
else !that.typeRegistryIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (namespacesIsSet) hasher.append(_namespaces.##)
if (includesIsSet) hasher.append(_includes.##)
if (constantsIsSet) hasher.append(_constants.##)
if (enumsIsSet) hasher.append(_enums.##)
if (typedefsIsSet) hasher.append(_typedefs.##)
if (structsIsSet) hasher.append(_structs.##)
if (unionsIsSet) hasher.append(_unions.##)
if (exceptionsIsSet) hasher.append(_exceptions.##)
if (servicesIsSet) hasher.append(_services.##)
if (typeRegistryIsSet) hasher.append(_typeRegistry.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (namespacesIsSet) ret = namespacesOrDefault :: ret
if (includesIsSet) ret = includesOrDefault :: ret
if (constantsIsSet) ret = constantsOrDefault :: ret
if (enumsIsSet) ret = enumsOrDefault :: ret
if (typedefsIsSet) ret = typedefsOrDefault :: ret
if (structsIsSet) ret = structsOrDefault :: ret
if (unionsIsSet) ret = unionsOrDefault :: ret
if (exceptionsIsSet) ret = exceptionsOrDefault :: ret
if (servicesIsSet) ret = servicesOrDefault :: ret
if (typeRegistryIsSet) ret = typeRegistryOrNull :: ret
ret.reverse
}
override def clear() {
namespacesUnset()
includesUnset()
constantsUnset()
enumsUnset()
typedefsUnset()
structsUnset()
unionsUnset()
exceptionsUnset()
servicesUnset()
typeRegistryUnset()
unknownFields = Nil
}
def fieldForId(id: Int): Program._Fields = id match {
case 1 => Program._Fields.namespaces
case 2 => Program._Fields.includes
case 3 => Program._Fields.constants
case 4 => Program._Fields.enums
case 5 => Program._Fields.typedefs
case 6 => Program._Fields.structs
case 7 => Program._Fields.unions
case 8 => Program._Fields.exceptions
case 9 => Program._Fields.services
case 98 => Program._Fields.typeRegistry
case _ => null
}
def isSet(field: Program._Fields): Boolean = field match {
case Program._Fields.namespaces => namespacesIsSet
case Program._Fields.includes => includesIsSet
case Program._Fields.constants => constantsIsSet
case Program._Fields.enums => enumsIsSet
case Program._Fields.typedefs => typedefsIsSet
case Program._Fields.structs => structsIsSet
case Program._Fields.unions => unionsIsSet
case Program._Fields.exceptions => exceptionsIsSet
case Program._Fields.services => servicesIsSet
case Program._Fields.typeRegistry => typeRegistryIsSet
case _ => false
}
def getFieldValue(field: Program._Fields): AnyRef = field match {
case Program._Fields.namespaces => namespacesOrDefault.asInstanceOf[AnyRef]
case Program._Fields.includes => includesOrDefault.asInstanceOf[AnyRef]
case Program._Fields.constants => constantsOrDefault.asInstanceOf[AnyRef]
case Program._Fields.enums => enumsOrDefault.asInstanceOf[AnyRef]
case Program._Fields.typedefs => typedefsOrDefault.asInstanceOf[AnyRef]
case Program._Fields.structs => structsOrDefault.asInstanceOf[AnyRef]
case Program._Fields.unions => unionsOrDefault.asInstanceOf[AnyRef]
case Program._Fields.exceptions => exceptionsOrDefault.asInstanceOf[AnyRef]
case Program._Fields.services => servicesOrDefault.asInstanceOf[AnyRef]
case Program._Fields.typeRegistry => typeRegistryOrNull.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: Program._Fields, value: AnyRef) {
field match {
case Program._Fields.namespaces =>
namespaces_=(
value
.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace]]
)
case Program._Fields.includes =>
includes_=(
value.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include]]
)
case Program._Fields.constants =>
constants_=(
value.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const]]
)
case Program._Fields.enums =>
enums_=(
value.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum]]
)
case Program._Fields.typedefs =>
typedefs_=(
value.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef]]
)
case Program._Fields.structs =>
structs_=(
value.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct]]
)
case Program._Fields.unions =>
unions_=(
value.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union]]
)
case Program._Fields.exceptions =>
exceptions_=(
value
.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception]]
)
case Program._Fields.services =>
services_=(
value.asInstanceOf[scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service]]
)
case Program._Fields.typeRegistry =>
typeRegistry_=(value.asInstanceOf[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry])
case _ =>
}
}
override def deepCopy(): RawProgram = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = Program.createRawRecord
ret.read(prot)
ret
}
override def copy(
namespaces: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Namespace] =
namespacesOrNull,
includes: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Include] = includesOrNull,
constants: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Const] = constantsOrNull,
enums: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Enum] = enumsOrNull,
typedefs: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typedef] = typedefsOrNull,
structs: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Struct] = structsOrNull,
unions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Union] = unionsOrNull,
exceptions: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Exception] =
exceptionsOrNull,
services: scala.collection.Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Service] = servicesOrNull,
typeRegistry: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TypeRegistry = typeRegistryOrNull
): RawProgram = {
val ret = new RawProgram
if (namespaces != null) ret.namespaces_=(namespaces)
if (includes != null) ret.includes_=(includes)
if (constants != null) ret.constants_=(constants)
if (enums != null) ret.enums_=(enums)
if (typedefs != null) ret.typedefs_=(typedefs)
if (structs != null) ret.structs_=(structs)
if (unions != null) ret.unions_=(unions)
if (exceptions != null) ret.exceptions_=(exceptions)
if (services != null) ret.services_=(services)
if (typeRegistry != null) ret.typeRegistry_=(typeRegistry)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object SimpleContainerType extends SimpleContainerTypeMeta {
object Builder {
type HasAll = Any
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[SimpleContainerType] (private var obj: RawSimpleContainerType) {
def listType(
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType
): SimpleContainerType.Builder[State] = {
obj.listType_=(v)
this
}
def listType(
vOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType]
): SimpleContainerType.Builder[State] = {
vOpt match {
case Some(v) => obj.listType_=(v)
case None => obj.listTypeUnset()
}
this
}
def setType(
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType
): SimpleContainerType.Builder[State] = {
obj.setType_=(v)
this
}
def setType(
vOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType]
): SimpleContainerType.Builder[State] = {
vOpt match {
case Some(v) => obj.setType_=(v)
case None => obj.setTypeUnset()
}
this
}
def mapType(
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType
): SimpleContainerType.Builder[State] = {
obj.mapType_=(v)
this
}
def mapType(
vOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType]
): SimpleContainerType.Builder[State] = {
vOpt match {
case Some(v) => obj.mapType_=(v)
case None => obj.mapTypeUnset()
}
this
}
def resultMutable(): MutableSimpleContainerType = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("SimpleContainerType.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result(): SimpleContainerType = resultMutable()
}
def newBuilder: SimpleContainerType.Builder.AllUnspecified = new Builder(SimpleContainerType.createRawRecord)
implicit val companionProvider: SimpleContainerTypeCompanionProvider = new SimpleContainerTypeCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[SimpleContainerType] = {
new _root_.scala.math.Ordering[SimpleContainerType] {
override def compare(x: SimpleContainerType, y: SimpleContainerType): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[SimpleContainerType] = {
new _root_.java.util.Comparator[SimpleContainerType] {
override def compare(x: SimpleContainerType, y: SimpleContainerType): Int = x.compare(y)
}
}
}
class SimpleContainerTypeMeta
extends JavaSimpleContainerTypeMeta[SimpleContainerType, RawSimpleContainerType, SimpleContainerTypeMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[SimpleContainerType] {
override def recordName: String = "SimpleContainerType"
// Thrift descriptors.
val SIMPLECONTAINERTYPE_SDESC: org.apache.thrift.protocol.TStruct =
new org.apache.thrift.protocol.TStruct("SimpleContainerType")
val LISTTYPE_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"listType",
org.apache.thrift.protocol.TType.STRUCT,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val SETTYPE_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"setType",
org.apache.thrift.protocol.TType.STRUCT,
2, {
java.util.Collections.emptyMap[String, String]
}
)
val MAPTYPE_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"mapType",
org.apache.thrift.protocol.TType.STRUCT,
3, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"listType" -> LISTTYPE_FDESC,
"setType" -> SETTYPE_FDESC,
"mapType" -> MAPTYPE_FDESC
)
object _Fields {
case object listType extends _Fields(1, "listType")
case object setType extends _Fields(2, "setType")
case object mapType extends _Fields(3, "mapType")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.listType,
2.toShort -> _Fields.setType,
3.toShort -> _Fields.mapType
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: SimpleContainerType = createRawRecord
override def createRawRecord: RawSimpleContainerType = new RawSimpleContainerType
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[SimpleContainerType] = {
if (x.isInstanceOf[SimpleContainerType]) Some(x.asInstanceOf[SimpleContainerType]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations.empty
// Spindle Descriptors.
val listType =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.StructFieldDescriptor[
SimpleContainerType,
SimpleContainerTypeMeta,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListTypeMeta
](
name = "listType",
longName = "listType",
id = 1,
annotations = Map(),
owner = this,
getter = _.listTypeOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[SimpleContainerType],
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType
) => { r.asInstanceOf[RawSimpleContainerType].listType_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[SimpleContainerType]) => {
r.asInstanceOf[RawSimpleContainerType].listTypeUnset()
},
structMeta = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType,
manifest = manifest[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType]
)
val setType =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.StructFieldDescriptor[
SimpleContainerType,
SimpleContainerTypeMeta,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetTypeMeta
](
name = "setType",
longName = "setType",
id = 2,
annotations = Map(),
owner = this,
getter = _.setTypeOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[SimpleContainerType],
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType
) => { r.asInstanceOf[RawSimpleContainerType].setType_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[SimpleContainerType]) => {
r.asInstanceOf[RawSimpleContainerType].setTypeUnset()
},
structMeta = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType,
manifest = manifest[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType]
)
val mapType =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.StructFieldDescriptor[
SimpleContainerType,
SimpleContainerTypeMeta,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapTypeMeta
](
name = "mapType",
longName = "mapType",
id = 3,
annotations = Map(),
owner = this,
getter = _.mapTypeOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[SimpleContainerType],
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType
) => { r.asInstanceOf[RawSimpleContainerType].mapType_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[SimpleContainerType]) => {
r.asInstanceOf[RawSimpleContainerType].mapTypeUnset()
},
structMeta = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType,
manifest = manifest[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[
_,
SimpleContainerType,
SimpleContainerTypeMeta
]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[
_,
SimpleContainerType,
SimpleContainerTypeMeta
]](
listType,
setType,
mapType
)
def apply(
listType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType,
setType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType,
mapType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType
): SimpleContainerType = {
val ret = this.createRawRecord
ret.listType_=(listType)
ret.setType_=(setType)
ret.mapType_=(mapType)
ret
}
}
class SimpleContainerTypeCompanionProvider
extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[SimpleContainerType] {
type CompanionT = SimpleContainerTypeMeta
override def provide: SimpleContainerTypeMeta = SimpleContainerType
}
trait SimpleContainerType
extends JavaSimpleContainerType[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType,
SimpleContainerType,
RawSimpleContainerType,
SimpleContainerTypeMeta
]
with org.apache.thrift.TBase[SimpleContainerType, SimpleContainerType._Fields] {
override def meta: SimpleContainerTypeMeta
override def compare(that: SimpleContainerType): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.listTypeIsSet.compareTo(that.listTypeIsSet)
cmp != 0
}) cmp
else if (this.listTypeIsSet && {
cmp = this.listTypeOrNull.compareTo(that.listTypeOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.setTypeIsSet.compareTo(that.setTypeIsSet)
cmp != 0
}) cmp
else if (this.setTypeIsSet && {
cmp = this.setTypeOrNull.compareTo(that.setTypeOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.mapTypeIsSet.compareTo(that.mapTypeIsSet)
cmp != 0
}) cmp
else if (this.mapTypeIsSet && {
cmp = this.mapTypeOrNull.compareTo(that.mapTypeOrNull)
cmp != 0
}) cmp
else 0
}
override def <(that: SimpleContainerType): Boolean = { this.compare(that) < 0 }
override def >(that: SimpleContainerType): Boolean = { this.compare(that) > 0 }
override def <=(that: SimpleContainerType): Boolean = { this.compare(that) <= 0 }
override def >=(that: SimpleContainerType): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: SimpleContainerType): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): SimpleContainerType
def copy(
listType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType = listTypeOrNull,
setType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType = setTypeOrNull,
mapType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType = mapTypeOrNull
): SimpleContainerType
def mutableCopy(): MutableSimpleContainerType = {
val ret = SimpleContainerType.createRawRecord
if (listTypeIsSet) ret.listType_=(listTypeOrNull)
if (setTypeIsSet) ret.setType_=(setTypeOrNull)
if (mapTypeIsSet) ret.mapType_=(mapTypeOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableSimpleContainerType
def toBuilder(): SimpleContainerType.Builder.AllSpecified = {
val ret = new SimpleContainerType.Builder(SimpleContainerType.createRawRecord)
if (listTypeIsSet) ret.listType(listTypeOrNull)
if (setTypeIsSet) ret.setType(setTypeOrNull)
if (mapTypeIsSet) ret.mapType(mapTypeOrNull)
ret
}
def mergeCopy(that: SimpleContainerType): SimpleContainerType
}
trait MutableSimpleContainerType
extends SimpleContainerType
with JavaSimpleContainerTypeMutable[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType,
SimpleContainerType,
RawSimpleContainerType,
SimpleContainerTypeMeta
] {
def listType_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType): Unit
def listTypeUnset(): Unit
def setType_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType): Unit
def setTypeUnset(): Unit
def mapType_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType): Unit
def mapTypeUnset(): Unit
def merge(that: SimpleContainerType): Unit
def copy(
listType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType = listTypeOrNull,
setType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType = setTypeOrNull,
mapType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType = mapTypeOrNull
): MutableSimpleContainerType
override def mutable: MutableSimpleContainerType = this
}
final class RawSimpleContainerType
extends JavaSimpleContainerTypeRaw[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType,
SimpleContainerType,
RawSimpleContainerType,
SimpleContainerTypeMeta
]
with MutableSimpleContainerType {
override def meta: SimpleContainerTypeMeta = SimpleContainerType
// fields
// Field #1 - listType
private var _listType
: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType = null // Underlying type: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType
override def listType_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType): Unit = {
_listType = x
}
override def listTypeOption: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType] =
if (listTypeIsSet) Some(_listType) else None
override def listTypeOrNull: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType = _listType
override def listTypeOrThrow: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType =
if (listTypeIsSet) _listType
else throw new java.lang.NullPointerException("field listType of SimpleContainerType missing")
override def listTypeIsSet: Boolean = _listType != null
override def listTypeUnset(): Unit = { _listType = null }
// Field #2 - setType
private var _setType
: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType = null // Underlying type: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType
override def setType_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType): Unit = {
_setType = x
}
override def setTypeOption: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType] =
if (setTypeIsSet) Some(_setType) else None
override def setTypeOrNull: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType = _setType
override def setTypeOrThrow: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType = if (setTypeIsSet)
_setType
else throw new java.lang.NullPointerException("field setType of SimpleContainerType missing")
override def setTypeIsSet: Boolean = _setType != null
override def setTypeUnset(): Unit = { _setType = null }
// Field #3 - mapType
private var _mapType
: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType = null // Underlying type: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType
override def mapType_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType): Unit = {
_mapType = x
}
override def mapTypeOption: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType] =
if (mapTypeIsSet) Some(_mapType) else None
override def mapTypeOrNull: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType = _mapType
override def mapTypeOrThrow: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType = if (mapTypeIsSet)
_mapType
else throw new java.lang.NullPointerException("field mapType of SimpleContainerType missing")
override def mapTypeIsSet: Boolean = _mapType != null
override def mapTypeUnset(): Unit = { _mapType = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(SimpleContainerType.SIMPLECONTAINERTYPE_SDESC)
if (listTypeIsSet) {
oprot.writeFieldBegin(SimpleContainerType.LISTTYPE_FDESC)
_listType.write(oprot)
oprot.writeFieldEnd()
}
if (setTypeIsSet) {
oprot.writeFieldBegin(SimpleContainerType.SETTYPE_FDESC)
_setType.write(oprot)
oprot.writeFieldEnd()
}
if (mapTypeIsSet) {
oprot.writeFieldBegin(SimpleContainerType.MAPTYPE_FDESC)
_mapType.write(oprot)
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
SimpleContainerType.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // listType
if (field_header.`type` == org.apache.thrift.protocol.TType.STRUCT) {
_listType = ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType.createRawRecord
s.read(iprot)
s
})
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 2 => { // setType
if (field_header.`type` == org.apache.thrift.protocol.TType.STRUCT) {
_setType = ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType.createRawRecord
s.read(iprot)
s
})
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 3 => { // mapType
if (field_header.`type` == org.apache.thrift.protocol.TType.STRUCT) {
_mapType = ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType.createRawRecord
s.read(iprot)
s
})
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException(
"Error reading field %d in structure SimpleContainerType".format(field_header.id),
e
)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: SimpleContainerType): Unit = {
if (that.listTypeIsSet && !this.listTypeIsSet) {
this.listType_=(that.listTypeOrNull)
}
if (that.setTypeIsSet && !this.setTypeIsSet) {
this.setType_=(that.setTypeOrNull)
}
if (that.mapTypeIsSet && !this.mapTypeIsSet) {
this.mapType_=(that.mapTypeOrNull)
}
}
override def mergeCopy(that: SimpleContainerType): SimpleContainerType = {
val ret = SimpleContainerType.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: SimpleContainerType => this.equals(o)
case _ => false
}
def equals(that: SimpleContainerType): Boolean = {
that != null &&
(if (this.listTypeIsSet) (that.listTypeIsSet && this.listTypeOrNull == that.listTypeOrNull)
else !that.listTypeIsSet) &&
(if (this.setTypeIsSet) (that.setTypeIsSet && this.setTypeOrNull == that.setTypeOrNull) else !that.setTypeIsSet) &&
(if (this.mapTypeIsSet) (that.mapTypeIsSet && this.mapTypeOrNull == that.mapTypeOrNull) else !that.mapTypeIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (listTypeIsSet) hasher.append(_listType.##)
if (setTypeIsSet) hasher.append(_setType.##)
if (mapTypeIsSet) hasher.append(_mapType.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (listTypeIsSet) ret = listTypeOrNull :: ret
if (setTypeIsSet) ret = setTypeOrNull :: ret
if (mapTypeIsSet) ret = mapTypeOrNull :: ret
ret.reverse
}
override def clear() {
listTypeUnset()
setTypeUnset()
mapTypeUnset()
unknownFields = Nil
}
def fieldForId(id: Int): SimpleContainerType._Fields = id match {
case 1 => SimpleContainerType._Fields.listType
case 2 => SimpleContainerType._Fields.setType
case 3 => SimpleContainerType._Fields.mapType
case _ => null
}
def isSet(field: SimpleContainerType._Fields): Boolean = field match {
case SimpleContainerType._Fields.listType => listTypeIsSet
case SimpleContainerType._Fields.setType => setTypeIsSet
case SimpleContainerType._Fields.mapType => mapTypeIsSet
case _ => false
}
def getFieldValue(field: SimpleContainerType._Fields): AnyRef = field match {
case SimpleContainerType._Fields.listType => listTypeOrNull.asInstanceOf[AnyRef]
case SimpleContainerType._Fields.setType => setTypeOrNull.asInstanceOf[AnyRef]
case SimpleContainerType._Fields.mapType => mapTypeOrNull.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: SimpleContainerType._Fields, value: AnyRef) {
field match {
case SimpleContainerType._Fields.listType =>
listType_=(value.asInstanceOf[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType])
case SimpleContainerType._Fields.setType =>
setType_=(value.asInstanceOf[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType])
case SimpleContainerType._Fields.mapType =>
mapType_=(value.asInstanceOf[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType])
case _ =>
}
}
override def deepCopy(): RawSimpleContainerType = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = SimpleContainerType.createRawRecord
ret.read(prot)
ret
}
override def copy(
listType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ListType = listTypeOrNull,
setType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.SetType = setTypeOrNull,
mapType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.MapType = mapTypeOrNull
): RawSimpleContainerType = {
val ret = new RawSimpleContainerType
if (listType != null) ret.listType_=(listType)
if (setType != null) ret.setType_=(setType)
if (mapType != null) ret.mapType_=(mapType)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
object SimpleType extends SimpleTypeMeta {
object Builder {
type HasAll = Any
type AllSpecified = Builder[HasAll]
type AllUnspecified = Builder[Any]
}
class Builder[+State] private[SimpleType] (private var obj: RawSimpleType) {
def baseType(v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType): SimpleType.Builder[State] = {
obj.baseType_=(v)
this
}
def baseType(
vOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType]
): SimpleType.Builder[State] = {
vOpt match {
case Some(v) => obj.baseType_=(v)
case None => obj.baseTypeUnset()
}
this
}
def containerType(
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType
): SimpleType.Builder[State] = {
obj.containerType_=(v)
this
}
def containerType(
vOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType]
): SimpleType.Builder[State] = {
vOpt match {
case Some(v) => obj.containerType_=(v)
case None => obj.containerTypeUnset()
}
this
}
def typeref(v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref): SimpleType.Builder[State] = {
obj.typeref_=(v)
this
}
def typeref(
vOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref]
): SimpleType.Builder[State] = {
vOpt match {
case Some(v) => obj.typeref_=(v)
case None => obj.typerefUnset()
}
this
}
def resultMutable(): MutableSimpleType = {
if (obj != null) {
val ret = obj
obj = null
ret
} else {
throw new IllegalStateException("SimpleType.Builder.result invoked multiple times. Use a new Builder.")
}
}
def result(): SimpleType = resultMutable()
}
def newBuilder: SimpleType.Builder.AllUnspecified = new Builder(SimpleType.createRawRecord)
implicit val companionProvider: SimpleTypeCompanionProvider = new SimpleTypeCompanionProvider
implicit val __$ordering: _root_.scala.math.Ordering[SimpleType] = {
new _root_.scala.math.Ordering[SimpleType] {
override def compare(x: SimpleType, y: SimpleType): Int = x.compare(y)
}
}
implicit val __$comparator: _root_.java.util.Comparator[SimpleType] = {
new _root_.java.util.Comparator[SimpleType] {
override def compare(x: SimpleType, y: SimpleType): Int = x.compare(y)
}
}
}
class SimpleTypeMeta
extends JavaSimpleTypeMeta[SimpleType, RawSimpleType, SimpleTypeMeta]
with io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RecordProvider[SimpleType] {
override def recordName: String = "SimpleType"
// Thrift descriptors.
val SIMPLETYPE_SDESC: org.apache.thrift.protocol.TStruct = new org.apache.thrift.protocol.TStruct("SimpleType")
val BASETYPE_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"baseType",
org.apache.thrift.protocol.TType.STRUCT,
1, {
java.util.Collections.emptyMap[String, String]
}
)
val CONTAINERTYPE_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"containerType",
org.apache.thrift.protocol.TType.STRUCT,
2, {
java.util.Collections.emptyMap[String, String]
}
)
val TYPEREF_FDESC: org.apache.thrift.protocol.TField =
new io.fsq.spindle.common.thrift.base.EnhancedTField(
"typeref",
org.apache.thrift.protocol.TType.STRUCT,
3, {
java.util.Collections.emptyMap[String, String]
}
)
val UNKNOWN_FIELD: org.apache.thrift.protocol.TField =
new org.apache.thrift.protocol.TField("", org.apache.thrift.protocol.TType.VOID, -1);
val wireNameToTField: Map[String, org.apache.thrift.protocol.TField] = Map(
"baseType" -> BASETYPE_FDESC,
"containerType" -> CONTAINERTYPE_FDESC,
"typeref" -> TYPEREF_FDESC
)
object _Fields {
case object baseType extends _Fields(1, "baseType")
case object containerType extends _Fields(2, "containerType")
case object typeref extends _Fields(3, "typeref")
}
sealed abstract class _Fields private (id: Short, name: String) extends org.apache.thrift.TFieldIdEnum {
def getThriftFieldId: Short = id
def getFieldName: String = name
}
val idToTFieldIdEnum: Map[Short, org.apache.thrift.TFieldIdEnum] = Map(
1.toShort -> _Fields.baseType,
2.toShort -> _Fields.containerType,
3.toShort -> _Fields.typeref
)
override def createUntypedRawRecord: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord =
createRawRecord
override def createRecord: SimpleType = createRawRecord
override def createRawRecord: RawSimpleType = new RawSimpleType
override def untypedIfInstanceFrom(
x: AnyRef
): Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedRecord] = ifInstanceFrom(x)
override def ifInstanceFrom(x: AnyRef): Option[SimpleType] = {
if (x.isInstanceOf[SimpleType]) Some(x.asInstanceOf[SimpleType]) else None
}
override val annotations: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations =
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.Annotations.empty
// Spindle Descriptors.
val baseType =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.StructFieldDescriptor[
SimpleType,
SimpleTypeMeta,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseTypeMeta
](
name = "baseType",
longName = "baseType",
id = 1,
annotations = Map(),
owner = this,
getter = _.baseTypeOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[SimpleType],
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType
) => { r.asInstanceOf[RawSimpleType].baseType_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[SimpleType]) => {
r.asInstanceOf[RawSimpleType].baseTypeUnset()
},
structMeta = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType,
manifest = manifest[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType]
)
val containerType =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.StructFieldDescriptor[
SimpleType,
SimpleTypeMeta,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerTypeMeta
](
name = "containerType",
longName = "containerType",
id = 2,
annotations = Map(),
owner = this,
getter = _.containerTypeOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[SimpleType],
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType
) => { r.asInstanceOf[RawSimpleType].containerType_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[SimpleType]) => {
r.asInstanceOf[RawSimpleType].containerTypeUnset()
},
structMeta = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType,
manifest = manifest[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType]
)
val typeref =
new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.StructFieldDescriptor[
SimpleType,
SimpleTypeMeta,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.TyperefMeta
](
name = "typeref",
longName = "typeref",
id = 3,
annotations = Map(),
owner = this,
getter = _.typerefOption,
setterRaw = (
r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[SimpleType],
v: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref
) => { r.asInstanceOf[RawSimpleType].typeref_=(v) },
unsetterRaw = (r: io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MutableRecord[SimpleType]) => {
r.asInstanceOf[RawSimpleType].typerefUnset()
},
structMeta = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref,
manifest = manifest[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref]
)
override def untypedFields: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UntypedFieldDescriptor] =
fields
override val fields
: Seq[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, SimpleType, SimpleTypeMeta]] =
Vector[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.FieldDescriptor[_, SimpleType, SimpleTypeMeta]](
baseType,
containerType,
typeref
)
def apply(
baseType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType,
containerType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType,
typeref: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref
): SimpleType = {
val ret = this.createRawRecord
ret.baseType_=(baseType)
ret.containerType_=(containerType)
ret.typeref_=(typeref)
ret
}
}
class SimpleTypeCompanionProvider
extends io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.CompanionProvider[SimpleType] {
type CompanionT = SimpleTypeMeta
override def provide: SimpleTypeMeta = SimpleType
}
trait SimpleType
extends JavaSimpleType[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref,
SimpleType,
RawSimpleType,
SimpleTypeMeta
]
with org.apache.thrift.TBase[SimpleType, SimpleType._Fields] {
override def meta: SimpleTypeMeta
override def compare(that: SimpleType): Int = {
var cmp: Int = 0
if (that == null) {
1
} else if ({
cmp = this.baseTypeIsSet.compareTo(that.baseTypeIsSet)
cmp != 0
}) cmp
else if (this.baseTypeIsSet && {
cmp = this.baseTypeOrNull.compareTo(that.baseTypeOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.containerTypeIsSet.compareTo(that.containerTypeIsSet)
cmp != 0
}) cmp
else if (this.containerTypeIsSet && {
cmp = this.containerTypeOrNull.compareTo(that.containerTypeOrNull)
cmp != 0
}) cmp
else if ({
cmp = this.typerefIsSet.compareTo(that.typerefIsSet)
cmp != 0
}) cmp
else if (this.typerefIsSet && {
cmp = this.typerefOrNull.compareTo(that.typerefOrNull)
cmp != 0
}) cmp
else 0
}
override def <(that: SimpleType): Boolean = { this.compare(that) < 0 }
override def >(that: SimpleType): Boolean = { this.compare(that) > 0 }
override def <=(that: SimpleType): Boolean = { this.compare(that) <= 0 }
override def >=(that: SimpleType): Boolean = { this.compare(that) >= 0 }
override def compareTo(that: SimpleType): Int = compare(that)
def write(oprot: org.apache.thrift.protocol.TProtocol): Unit
def deepCopy(): SimpleType
def copy(
baseType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType = baseTypeOrNull,
containerType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType = containerTypeOrNull,
typeref: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref = typerefOrNull
): SimpleType
def mutableCopy(): MutableSimpleType = {
val ret = SimpleType.createRawRecord
if (baseTypeIsSet) ret.baseType_=(baseTypeOrNull)
if (containerTypeIsSet) ret.containerType_=(containerTypeOrNull)
if (typerefIsSet) ret.typeref_=(typerefOrNull)
ret
}
/** Returns a pointer to a Mutable version of this record.
*
* If the underlying implementation is mutable, `this` will be returned.
* If the underlying implementation is immutable, a mutable copy will be returned.
*
* After mutating the instance returned by this method, the original instance
* (on which `mutable` was called) will be in an undefined state. It may or may
* not have been modified, depending on whether it was immutable or not.
*
* This is included as an optimization for when we want access to a Mutable record
* but don't want to pay the cost of copying every time.
*/
def mutable: MutableSimpleType
def toBuilder(): SimpleType.Builder.AllSpecified = {
val ret = new SimpleType.Builder(SimpleType.createRawRecord)
if (baseTypeIsSet) ret.baseType(baseTypeOrNull)
if (containerTypeIsSet) ret.containerType(containerTypeOrNull)
if (typerefIsSet) ret.typeref(typerefOrNull)
ret
}
def mergeCopy(that: SimpleType): SimpleType
}
trait MutableSimpleType
extends SimpleType
with JavaSimpleTypeMutable[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref,
SimpleType,
RawSimpleType,
SimpleTypeMeta
] {
def baseType_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType): Unit
def baseTypeUnset(): Unit
def containerType_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType): Unit
def containerTypeUnset(): Unit
def typeref_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref): Unit
def typerefUnset(): Unit
def merge(that: SimpleType): Unit
def copy(
baseType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType = baseTypeOrNull,
containerType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType = containerTypeOrNull,
typeref: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref = typerefOrNull
): MutableSimpleType
override def mutable: MutableSimpleType = this
}
final class RawSimpleType
extends JavaSimpleTypeRaw[
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref,
SimpleType,
RawSimpleType,
SimpleTypeMeta
]
with MutableSimpleType {
override def meta: SimpleTypeMeta = SimpleType
// fields
// Field #1 - baseType
private var _baseType
: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType = null // Underlying type: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType
override def baseType_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType): Unit = {
_baseType = x
}
override def baseTypeOption: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType] =
if (baseTypeIsSet) Some(_baseType) else None
override def baseTypeOrNull: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType = _baseType
override def baseTypeOrThrow: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType =
if (baseTypeIsSet) _baseType else throw new java.lang.NullPointerException("field baseType of SimpleType missing")
override def baseTypeIsSet: Boolean = _baseType != null
override def baseTypeUnset(): Unit = { _baseType = null }
// Field #2 - containerType
private var _containerType
: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType = null // Underlying type: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType
override def containerType_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType): Unit = {
_containerType = x
}
override def containerTypeOption: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType] =
if (containerTypeIsSet) Some(_containerType) else None
override def containerTypeOrNull: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType =
_containerType
override def containerTypeOrThrow: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType =
if (containerTypeIsSet) _containerType
else throw new java.lang.NullPointerException("field containerType of SimpleType missing")
override def containerTypeIsSet: Boolean = _containerType != null
override def containerTypeUnset(): Unit = { _containerType = null }
// Field #3 - typeref
private var _typeref
: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref = null // Underlying type: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref
override def typeref_=(x: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref): Unit = {
_typeref = x
}
override def typerefOption: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref] =
if (typerefIsSet) Some(_typeref) else None
override def typerefOrNull: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref = _typeref
override def typerefOrThrow: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref =
if (typerefIsSet) _typeref else throw new java.lang.NullPointerException("field typeref of SimpleType missing")
override def typerefIsSet: Boolean = _typeref != null
override def typerefUnset(): Unit = { _typeref = null }
// end fields
private var unknownFields: List[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = Nil
override def write(oprot: org.apache.thrift.protocol.TProtocol): Unit = {
oprot.writeStructBegin(SimpleType.SIMPLETYPE_SDESC)
if (baseTypeIsSet) {
oprot.writeFieldBegin(SimpleType.BASETYPE_FDESC)
_baseType.write(oprot)
oprot.writeFieldEnd()
}
if (containerTypeIsSet) {
oprot.writeFieldBegin(SimpleType.CONTAINERTYPE_FDESC)
_containerType.write(oprot)
oprot.writeFieldEnd()
}
if (typerefIsSet) {
oprot.writeFieldBegin(SimpleType.TYPEREF_FDESC)
_typeref.write(oprot)
oprot.writeFieldEnd()
}
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
unknownFields.reverse foreach { _.write(oprot) }
}
oprot.writeFieldStop()
oprot.writeStructEnd()
}
override def read(iprot: org.apache.thrift.protocol.TProtocol) {
// Unknown fields in this read go here.
var currentUnknownFieldsOpt: Option[io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields] = None
def currentUnknownFields(): io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields =
currentUnknownFieldsOpt match {
case Some(uf) => uf
case None => {
val uf = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.UnknownFields(
this,
io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.TProtocolInfo.getProtocolName(iprot)
)
unknownFields = uf :: unknownFields
currentUnknownFieldsOpt = Some(uf)
uf
}
}
iprot.readStructBegin()
var wire_field_header: org.apache.thrift.protocol.TField = iprot.readFieldBegin()
while (wire_field_header.`type` != org.apache.thrift.protocol.TType.STOP) {
// Some protocols, e.g., BSON and JSON, serialize the field name, not the id. If we don't have the id we use the
// name to look up the id and type. This allows us to use those protocols naturally.
var field_header: org.apache.thrift.protocol.TField = if (wire_field_header.id < 0) {
SimpleType.wireNameToTField.getOrElse(wire_field_header.name, wire_field_header)
} else {
wire_field_header
}
try {
field_header.id match {
case 1 => { // baseType
if (field_header.`type` == org.apache.thrift.protocol.TType.STRUCT) {
_baseType = ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType.createRawRecord
s.read(iprot)
s
})
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 2 => { // containerType
if (field_header.`type` == org.apache.thrift.protocol.TType.STRUCT) {
_containerType = ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType.createRawRecord
s.read(iprot)
s
})
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case 3 => { // typeref
if (field_header.`type` == org.apache.thrift.protocol.TType.STRUCT) {
_typeref = ({
val s = io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref.createRawRecord
s.read(iprot)
s
})
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
case _ => {
if (io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.RuntimeHelpers.preserveUnknownFields(this)) {
currentUnknownFields().readUnknownField(iprot, field_header, this) // May call this method recursively.
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field_header.`type`)
}
}
} // end match
} catch {
case e: org.apache.thrift.TException =>
throw new org.apache.thrift.TException(
"Error reading field %d in structure SimpleType".format(field_header.id),
e
)
}
iprot.readFieldEnd()
wire_field_header = iprot.readFieldBegin()
} // end while
iprot.readStructEnd()
}
override def merge(that: SimpleType): Unit = {
if (that.baseTypeIsSet && !this.baseTypeIsSet) {
this.baseType_=(that.baseTypeOrNull)
}
if (that.containerTypeIsSet && !this.containerTypeIsSet) {
this.containerType_=(that.containerTypeOrNull)
}
if (that.typerefIsSet && !this.typerefIsSet) {
this.typeref_=(that.typerefOrNull)
}
}
override def mergeCopy(that: SimpleType): SimpleType = {
val ret = SimpleType.createRawRecord
ret.merge(this)
ret.merge(that)
ret
}
override def equals(that: Any): Boolean = that match {
case null => false
case o: SimpleType => this.equals(o)
case _ => false
}
def equals(that: SimpleType): Boolean = {
that != null &&
(if (this.baseTypeIsSet) (that.baseTypeIsSet && this.baseTypeOrNull == that.baseTypeOrNull)
else !that.baseTypeIsSet) &&
(if (this.containerTypeIsSet) (that.containerTypeIsSet && this.containerTypeOrNull == that.containerTypeOrNull)
else !that.containerTypeIsSet) &&
(if (this.typerefIsSet) (that.typerefIsSet && this.typerefOrNull == that.typerefOrNull) else !that.typerefIsSet) &&
true
}
override def hashCode(): Int = {
// We use a fixed seed, for consistency.
val hasher = new io.fsq.spindle.__shaded_for_spindle_bootstrap__.runtime.MurmurHash[AnyRef](0)
if (baseTypeIsSet) hasher.append(_baseType.##)
if (containerTypeIsSet) hasher.append(_containerType.##)
if (typerefIsSet) hasher.append(_typeref.##)
hasher.hash
}
// Returns the values of the set fields on this object, in id order.
def getSetFields: Seq[Any] = {
var ret: List[Any] = Nil
if (baseTypeIsSet) ret = baseTypeOrNull :: ret
if (containerTypeIsSet) ret = containerTypeOrNull :: ret
if (typerefIsSet) ret = typerefOrNull :: ret
ret.reverse
}
override def clear() {
baseTypeUnset()
containerTypeUnset()
typerefUnset()
unknownFields = Nil
}
def fieldForId(id: Int): SimpleType._Fields = id match {
case 1 => SimpleType._Fields.baseType
case 2 => SimpleType._Fields.containerType
case 3 => SimpleType._Fields.typeref
case _ => null
}
def isSet(field: SimpleType._Fields): Boolean = field match {
case SimpleType._Fields.baseType => baseTypeIsSet
case SimpleType._Fields.containerType => containerTypeIsSet
case SimpleType._Fields.typeref => typerefIsSet
case _ => false
}
def getFieldValue(field: SimpleType._Fields): AnyRef = field match {
case SimpleType._Fields.baseType => baseTypeOrNull.asInstanceOf[AnyRef]
case SimpleType._Fields.containerType => containerTypeOrNull.asInstanceOf[AnyRef]
case SimpleType._Fields.typeref => typerefOrNull.asInstanceOf[AnyRef]
case _ => throw new IllegalStateException
}
def setFieldValue(field: SimpleType._Fields, value: AnyRef) {
field match {
case SimpleType._Fields.baseType =>
baseType_=(value.asInstanceOf[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType])
case SimpleType._Fields.containerType =>
containerType_=(value.asInstanceOf[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType])
case SimpleType._Fields.typeref =>
typeref_=(value.asInstanceOf[io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref])
case _ =>
}
}
override def deepCopy(): RawSimpleType = {
// May not be the most efficient way to create a deep copy, but we don't expect to use this intensively.
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val prot = new org.apache.thrift.protocol.TBinaryProtocol.Factory().getProtocol(trans)
write(prot)
val ret = SimpleType.createRawRecord
ret.read(prot)
ret
}
override def copy(
baseType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.BaseType = baseTypeOrNull,
containerType: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.ContainerType = containerTypeOrNull,
typeref: io.fsq.spindle.__shaded_for_spindle_bootstrap__.descriptors.Typeref = typerefOrNull
): RawSimpleType = {
val ret = new RawSimpleType
if (baseType != null) ret.baseType_=(baseType)
if (containerType != null) ret.containerType_=(containerType)
if (typeref != null) ret.typeref_=(typeref)
ret
}
override def toString: String = {
val trans = new org.apache.thrift.transport.TMemoryBuffer(1024)
val oprot = new io.fsq.spindle.common.thrift.base.TStringProtocol(trans)
write(oprot)
trans.toString("UTF8")
}
}
| foursquare/fsqio | src/jvm/io/fsq/spindle/codegen/__shaded_for_spindle_bootstrap__/descriptors/thrift_descriptors.scala | Scala | apache-2.0 | 700,228 |
/*
* Copyright 2013 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s
package parser
import cats.data.NonEmptyList
import cats.syntax.either._
import org.http4s.headers.Forwarded
import org.http4s.internal.bug
import org.http4s.syntax.literals._
import org.typelevel.ci._
class ForwardedHeaderSpec extends Http4sSuite {
import Forwarded.Element
import ForwardedHeaderSpec._
def parse(input: String) = Forwarded.parse(input)
test("FORWARDED should parse single simple elements") {
val values = List(
"by=4.3.2.1" -> Element.fromBy(uri"//4.3.2.1"),
"for=\\"[1:2:0::0:3:4]:56\\"" -> Element.fromFor(uri"//[1:2::3:4]:56"),
"BY=\\"_a.b-r.a_:_k.a-d._.b-r.a\\"" -> Element.fromBy(uri"//_a.b-r.a_#_k.a-d._.b-r.a"),
"for=unknown" -> Element.fromFor(uri"//unknown"),
"by=\\"unknown:451\\"" -> Element.fromBy(uri"//unknown:451"),
"For=\\"unknown:__p.0_r.t-\\"" -> Element.fromFor(uri"//unknown#__p.0_r.t-"),
"host=http4s.org" -> Element.fromHost(uri"//http4s.org"),
"hOSt=\\"http4s.org:12345\\"" -> Element.fromHost(uri"//http4s.org:12345"),
"host=\\"1.2.3.4:567\\"" -> Element.fromHost(uri"//1.2.3.4:567"),
"host=\\"[8:7:6:5:4:3:2:1]\\"" -> Element.fromHost(uri"//[8:7:6:5:4:3:2:1]"),
"proto=http" -> Element.fromProto(scheme"http"),
"proto=\\"https\\"" -> Element.fromProto(scheme"https"),
"prOtO=gopher" -> Element.fromProto(scheme"gopher"),
)
values.foreach { case (headerStr, parsedMod) =>
parse(headerStr) match {
case Right(v) => assertEquals(v.values, NEL(parsedMod))
case Left(e) => fail(e.message)
}
}
}
test("FORWARDED should parse single compound elements") {
val values = List(
"by=_abra;for=_kadabra" -> Element.fromBy(uri"//_abra").withFor(uri"//_kadabra"),
"by=_abra;for=_kadabra;host=http4s.org" -> Element
.fromBy(uri"//_abra")
.withFor(uri"//_kadabra")
.withHost(uri"//http4s.org"),
"by=_abra;for=_kadabra;host=\\"http4s.org\\";proto=http" -> Element
.fromBy(uri"//_abra")
.withFor(uri"//_kadabra")
.withHost(uri"//http4s.org")
.withProto(scheme"http"),
"for=_kadabra;by=_abra;proto=http;host=http4s.org" -> Element
.fromBy(uri"//_abra")
.withFor(uri"//_kadabra")
.withHost(uri"//http4s.org")
.withProto(scheme"http"),
"host=http4s.org;for=_kadabra;proto=http;by=_abra" -> Element
.fromBy(uri"//_abra")
.withFor(uri"//_kadabra")
.withHost(uri"//http4s.org")
.withProto(scheme"http"),
)
values.foreach { case (headerStr, parsedMod) =>
parse(headerStr) match {
case Right(v) => assertEquals(v.values, NEL(parsedMod))
case Left(e) => fail(e.message)
}
}
}
test("FORWARDED should parse multi elements") {
val values = List(
"by=_foo, for=_bar , host=foo.bar ,proto=foobar" -> NEL(
Element.fromBy(uri"//_foo"),
Element.fromFor(uri"//_bar"),
Element.fromHost(uri"//foo.bar"),
Element.fromProto(scheme"foobar"),
),
"by=_foo;for=_bar , host=foo.bar;proto=foobar" -> NEL(
Element.fromBy(uri"//_foo").withFor(uri"//_bar"),
Element.fromHost(uri"//foo.bar").withProto(scheme"foobar"),
),
"by=_foo ,for=_bar;host=foo.bar, proto=foobar" -> NEL(
Element.fromBy(uri"//_foo"),
Element.fromFor(uri"//_bar").withHost(uri"//foo.bar"),
Element.fromProto(scheme"foobar"),
),
)
values.foreach { case (headerStr, parsedMod) =>
parse(headerStr) match {
case Right(v) => assertEquals(v.values, parsedMod)
case Left(e) => fail(e.message)
}
}
}
test("FORWARDED should fail to parseunknown parameter") {
val values = Seq(
"bye=1.2.3.4",
"four=_foobar",
"ghost=foo.bar",
"proot=http",
)
values.foreach { headerStr =>
parse(headerStr) match {
case Right(_) => fail("Expected parser failure")
case Left(e) => assertNoDiff(e.sanitized, "Invalid Forwarded header")
}
}
}
test("FORWARDED should fail to parseunquoted non-token") {
val values =
Seq(
"by=[1:2:3::4:5:6]",
"for=_abra:_kadabra",
"host=foo.bar:123",
)
values.foreach { headerStr =>
parse(headerStr) match {
case Right(_) => fail("Expected parser failure")
case Left(e) => assertNoDiff(e.sanitized, "Invalid Forwarded header")
}
}
}
}
object ForwardedHeaderSpec {
import Forwarded.{Host, Node}
private val ObfuscatedRe = """^(_[\\p{Alnum}\\.\\_\\-]+)$""".r
private object UnCIString {
def unapply(cistr: CIString): Option[String] = Some(cistr.toString)
}
implicit def convertUriToNode(uri: Uri): Node =
Node(
uri.host match {
case Some(ipv4: Uri.Ipv4Address) => Node.Name.Ipv4(ipv4.address)
case Some(ipv6: Uri.Ipv6Address) => Node.Name.Ipv6(ipv6.address)
case Some(Uri.RegName(ci"unknown")) => Node.Name.Unknown
case Some(Uri.RegName(UnCIString(ObfuscatedRe(obfuscatedName)))) =>
Node.Obfuscated(obfuscatedName)
case Some(other) => throw bug(s"not allowed as host for node: $other")
case _ => throw bug(s"no host in URI: $uri")
},
uri.port.flatMap(Node.Port.fromInt(_).toOption).orElse {
// Convention: use the URI fragment to define an obfuscated port.
uri.fragment.flatMap {
PartialFunction.condOpt(_) { case ObfuscatedRe(obfuscatedPort) =>
Node.Obfuscated(obfuscatedPort)
}
}
},
)
implicit def convertUriToHost(uri: Uri): Host = Host.fromUri(uri).valueOr(throw _)
/** Just a shortcut for `NonEmptyList.of()` */
private def NEL[A](head: A, tail: A*) = NonEmptyList.of[A](head, tail: _*)
}
| http4s/http4s | tests/shared/src/test/scala/org/http4s/parser/ForwardedHeaderSpec.scala | Scala | apache-2.0 | 6,371 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this thing except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import SharedHelpers.{thisLineNumber, createTempDirectory}
import enablers.Definition
import Matchers._
import exceptions.TestFailedException
class ShouldBeDefinedLogicalAndExplicitSpec extends Spec {
val fileName: String = "ShouldBeDefinedLogicalAndExplicitSpec.scala"
def wasEqualTo(left: Any, right: Any): String =
FailureMessages.wasEqualTo(left, right)
def wasNotEqualTo(left: Any, right: Any): String =
FailureMessages.wasNotEqualTo(left, right)
def equaled(left: Any, right: Any): String =
FailureMessages.equaled(left, right)
def didNotEqual(left: Any, right: Any): String =
FailureMessages.didNotEqual(left, right)
def wasNotDefined(left: Any): String =
FailureMessages.wasNotDefined(left)
def wasDefined(left: Any): String =
FailureMessages.wasDefined(left)
def allError(message: String, lineNumber: Int, left: Any): String = {
val messageWithIndex = UnquotedString(" " + FailureMessages.forAssertionsGenTraversableMessageWithStackDepth(0, UnquotedString(message), UnquotedString(fileName + ":" + lineNumber)))
FailureMessages.allShorthandFailed(messageWithIndex, left)
}
trait Thing {
def isDefined: Boolean
}
val something = new Thing {
val isDefined = true
}
val nothing = new Thing {
val isDefined = false
}
val definition =
new Definition[Thing] {
def isDefined(thing: Thing): Boolean = thing.isDefined
}
object `Definition matcher` {
object `when work with 'thing should be (defined)'` {
def `should do nothing when thing is defined` {
(something should (equal (something) and be (defined))) (defaultEquality[Thing{val isDefined: Boolean}], definition)
(something should (be (defined) and equal (something))) (definition, defaultEquality[Thing{val isDefined: Boolean}])
(something should (be_== (something) and be (defined))) (definition)
(something should (be (defined) and be_== (something))) (definition)
}
def `should throw TestFailedException with correct stack depth when thing is not defined` {
val caught1 = intercept[TestFailedException] {
(nothing should (equal (nothing) and be (defined))) (defaultEquality[Thing{val isDefined: Boolean}], definition)
}
assert(caught1.message === Some(equaled(nothing, nothing) + ", but " + wasNotDefined(nothing)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[TestFailedException] {
(nothing should (be (defined) and equal (nothing))) (definition, defaultEquality[Thing{val isDefined: Boolean}])
}
assert(caught2.message === Some(wasNotDefined(nothing)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[TestFailedException] {
(nothing should (be_== (nothing) and be (defined))) (definition)
}
assert(caught3.message === Some(wasEqualTo(nothing, nothing) + ", but " + wasNotDefined(nothing)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[TestFailedException] {
(nothing should (be (defined) and be_== (nothing))) (definition)
}
assert(caught4.message === Some(wasNotDefined(nothing)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
object `when work with 'thing should not be defined'` {
def `should do nothing when thing is not defined` {
(nothing should (not equal something and not be defined)) (defaultEquality[Thing{val isDefined: Boolean}], definition)
(nothing should (not be defined and not equal something)) (definition, defaultEquality[Thing{val isDefined: Boolean}])
(nothing should (not be_== something and not be defined)) (definition)
(nothing should (not be defined and not be_== something)) (definition)
}
def `should throw TestFailedException with correct stack depth when xs is not defined` {
val caught1 = intercept[TestFailedException] {
(something should (not equal nothing and not be defined)) (defaultEquality[Thing{val isDefined: Boolean}], definition)
}
assert(caught1.message === Some(didNotEqual(something, nothing) + ", but " + wasDefined(something)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[TestFailedException] {
(something should (not be defined and not equal nothing)) (definition, defaultEquality[Thing{val isDefined: Boolean}])
}
assert(caught2.message === Some(wasDefined(something)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[TestFailedException] {
(something should (not be_== nothing and not be defined)) (definition)
}
assert(caught3.message === Some(wasNotEqualTo(something, nothing) + ", but " + wasDefined(something)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[TestFailedException] {
(something should (not be defined and not be_== nothing)) (definition)
}
assert(caught4.message === Some(wasDefined(something)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
object `when work with 'all(xs) should be (defined)'` {
def `should do nothing when all(xs) is defined` {
(all(List(something)) should (be_== (something) and be (defined))) (definition)
(all(List(something)) should (be (defined) and be_== (something))) (definition)
(all(List(something)) should (equal (something) and be (defined))) (defaultEquality[Thing{val isDefined: Boolean}], definition)
(all(List(something)) should (be (defined) and equal (something))) (definition, defaultEquality[Thing{val isDefined: Boolean}])
}
def `should throw TestFailedException with correct stack depth when all(xs) is not defined` {
val left1 = List(nothing)
val caught1 = intercept[TestFailedException] {
(all(left1) should (be_== (nothing) and be (defined))) (definition)
}
assert(caught1.message === Some(allError(wasEqualTo(nothing, nothing) + ", but " + wasNotDefined(nothing), thisLineNumber - 2, left1)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val left2 = List(nothing)
val caught2 = intercept[TestFailedException] {
(all(left2) should (be (defined) and be_== (nothing))) (definition)
}
assert(caught2.message === Some(allError(wasNotDefined(nothing), thisLineNumber - 2, left2)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val left3 = List(nothing)
val caught3 = intercept[TestFailedException] {
(all(left3) should (equal (nothing) and be (defined))) (defaultEquality[Thing{val isDefined: Boolean}], definition)
}
assert(caught3.message === Some(allError(equaled(nothing, nothing) + ", but " + wasNotDefined(nothing), thisLineNumber - 2, left3)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val left4 = List(nothing)
val caught4 = intercept[TestFailedException] {
(all(left4) should (be (defined) and equal (nothing))) (definition, defaultEquality[Thing{val isDefined: Boolean}])
}
assert(caught4.message === Some(allError(wasNotDefined(nothing), thisLineNumber - 2, left4)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
object `when work with 'all(xs) should not be defined'` {
def `should do nothing when all(xs) is not defined` {
(all(List(nothing)) should (not be defined and not be_== something)) (definition)
(all(List(nothing)) should (not be_== something and not be defined)) (definition)
(all(List(nothing)) should (not be defined and not equal something)) (definition, defaultEquality[Thing{val isDefined: Boolean}])
(all(List(nothing)) should (not equal something and not be defined)) (defaultEquality[Thing{val isDefined: Boolean}], definition)
}
def `should throw TestFailedException with correct stack depth when all(xs) is defined` {
val left1 = List(something)
val caught1 = intercept[TestFailedException] {
(all(left1) should (not be_== nothing and not be defined)) (definition)
}
assert(caught1.message === Some(allError(wasNotEqualTo(something, nothing) + ", but " + wasDefined(something), thisLineNumber - 2, left1)))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val left2 = List(something)
val caught2 = intercept[TestFailedException] {
(all(left2) should (not be defined and not be_== nothing)) (definition)
}
assert(caught2.message === Some(allError(wasDefined(something), thisLineNumber - 2, left2)))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val left3 = List(something)
val caught3 = intercept[TestFailedException] {
(all(left3) should (not equal nothing and not be defined)) (defaultEquality[Thing{val isDefined: Boolean}], definition)
}
assert(caught3.message === Some(allError(didNotEqual(something, nothing) + ", but " + wasDefined(something), thisLineNumber - 2, left3)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val left4 = List(something)
val caught4 = intercept[TestFailedException] {
(all(left4) should (not be defined and not equal nothing)) (definition, defaultEquality[Thing{val isDefined: Boolean}])
}
assert(caught4.message === Some(allError(wasDefined(something), thisLineNumber - 2, left4)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
}
}
}
| SRGOM/scalatest | scalatest-test/src/test/scala/org/scalatest/ShouldBeDefinedLogicalAndExplicitSpec.scala | Scala | apache-2.0 | 11,828 |
package com.sksamuel.elastic4s.requests.analyzers
@deprecated("use new analysis package", "7.0.1")
abstract class Normalizer(val name: String)
@deprecated("use new analysis package", "7.0.1")
case class CustomNormalizer(override val name: String) extends Normalizer(name)
| sksamuel/elastic4s | elastic4s-domain/src/main/scala/com/sksamuel/elastic4s/requests/analyzers/Normalizer.scala | Scala | apache-2.0 | 274 |
package com.github.slackey.codecs.responses
case class ChatDelete(
channel: String,
ts: String
)
| slackey/slackey | src/main/scala/com/github/slackey/codecs/responses/ChatDelete.scala | Scala | mit | 102 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.coordinator.transaction
import java.nio.charset.StandardCharsets
import kafka.utils.{Json, Logging}
import kafka.zk.{KafkaZkClient, ProducerIdBlockZNode}
import org.apache.kafka.common.KafkaException
import scala.collection.JavaConverters._
/**
* ProducerIdManager is the part of the transaction coordinator that provides ProducerIds in a unique way
* such that the same producerId will not be assigned twice across multiple transaction coordinators.
*
* ProducerIds are managed via ZooKeeper, where the latest producerId block is written on the corresponding ZK
* path by the manager who claims the block, where the written block_start and block_end are both inclusive.
*/
object ProducerIdManager extends Logging {
val CurrentVersion: Long = 1L
val PidBlockSize: Long = 1000L
def generateProducerIdBlockJson(producerIdBlock: ProducerIdBlock): Array[Byte] = {
Json.encodeAsBytes(Map("version" -> CurrentVersion,
"broker" -> producerIdBlock.brokerId,
"block_start" -> producerIdBlock.blockStartId.toString,
"block_end" -> producerIdBlock.blockEndId.toString).asJava
)
}
def parseProducerIdBlockData(jsonData: Array[Byte]): ProducerIdBlock = {
try {
Json.parseBytes(jsonData).map(_.asJsonObject).flatMap { js =>
val brokerId = js("broker").to[Int]
val blockStart = js("block_start").to[String].toLong
val blockEnd = js("block_end").to[String].toLong
Some(ProducerIdBlock(brokerId, blockStart, blockEnd))
}.getOrElse(throw new KafkaException(s"Failed to parse the producerId block json $jsonData"))
} catch {
case e: java.lang.NumberFormatException =>
// this should never happen: the written data has exceeded long type limit
fatal(s"Read jason data $jsonData contains producerIds that have exceeded long type limit")
throw e
}
}
}
case class ProducerIdBlock(brokerId: Int, blockStartId: Long, blockEndId: Long) {
override def toString: String = {
val producerIdBlockInfo = new StringBuilder
producerIdBlockInfo.append("(brokerId:" + brokerId)
producerIdBlockInfo.append(",blockStartProducerId:" + blockStartId)
producerIdBlockInfo.append(",blockEndProducerId:" + blockEndId + ")")
producerIdBlockInfo.toString()
}
}
class ProducerIdManager(val brokerId: Int, val zkClient: KafkaZkClient) extends Logging {
this.logIdent = "[ProducerId Manager " + brokerId + "]: "
private var currentProducerIdBlock: ProducerIdBlock = null
private var nextProducerId: Long = -1L
// grab the first block of producerIds
this synchronized {
getNewProducerIdBlock()
nextProducerId = currentProducerIdBlock.blockStartId
}
private def getNewProducerIdBlock(): Unit = {
var zkWriteComplete = false
while (!zkWriteComplete) {
// refresh current producerId block from zookeeper again
val (dataOpt, zkVersion) = zkClient.getDataAndVersion(ProducerIdBlockZNode.path)
// generate the new producerId block
currentProducerIdBlock = dataOpt match {
case Some(data) =>
val currProducerIdBlock = ProducerIdManager.parseProducerIdBlockData(data)
debug(s"Read current producerId block $currProducerIdBlock, Zk path version $zkVersion")
if (currProducerIdBlock.blockEndId > Long.MaxValue - ProducerIdManager.PidBlockSize) {
// we have exhausted all producerIds (wow!), treat it as a fatal error
fatal(s"Exhausted all producerIds as the next block's end producerId is will has exceeded long type limit (current block end producerId is ${currProducerIdBlock.blockEndId})")
throw new KafkaException("Have exhausted all producerIds.")
}
ProducerIdBlock(brokerId, currProducerIdBlock.blockEndId + 1L, currProducerIdBlock.blockEndId + ProducerIdManager.PidBlockSize)
case None =>
debug(s"There is no producerId block yet (Zk path version $zkVersion), creating the first block")
ProducerIdBlock(brokerId, 0L, ProducerIdManager.PidBlockSize - 1)
}
val newProducerIdBlockData = ProducerIdManager.generateProducerIdBlockJson(currentProducerIdBlock)
// try to write the new producerId block into zookeeper
val (succeeded, version) = zkClient.conditionalUpdatePath(ProducerIdBlockZNode.path,
newProducerIdBlockData, zkVersion, Some(checkProducerIdBlockZkData))
zkWriteComplete = succeeded
if (zkWriteComplete)
info(s"Acquired new producerId block $currentProducerIdBlock by writing to Zk with path version $version")
}
}
private def checkProducerIdBlockZkData(zkClient: KafkaZkClient, path: String, expectedData: Array[Byte]): (Boolean, Int) = {
try {
val expectedPidBlock = ProducerIdManager.parseProducerIdBlockData(expectedData)
zkClient.getDataAndVersion(ProducerIdBlockZNode.path) match {
case (Some(data), zkVersion) =>
val currProducerIdBLock = ProducerIdManager.parseProducerIdBlockData(data)
(currProducerIdBLock == expectedPidBlock, zkVersion)
case (None, _) => (false, -1)
}
} catch {
case e: Exception =>
warn(s"Error while checking for producerId block Zk data on path $path: expected data " +
s"${new String(expectedData, StandardCharsets.UTF_8)}", e)
(false, -1)
}
}
def generateProducerId(): Long = {
this synchronized {
// grab a new block of producerIds if this block has been exhausted
if (nextProducerId > currentProducerIdBlock.blockEndId) {
getNewProducerIdBlock()
nextProducerId = currentProducerIdBlock.blockStartId + 1
} else {
nextProducerId += 1
}
nextProducerId - 1
}
}
def shutdown(): Unit = {
info(s"Shutdown complete: last producerId assigned $nextProducerId")
}
}
| noslowerdna/kafka | core/src/main/scala/kafka/coordinator/transaction/ProducerIdManager.scala | Scala | apache-2.0 | 6,646 |
package pme.connect4.gui.d3
import pme.connect4.gui.d3.ConnectFourConfig3D._
import pme.connect4.gui.{ControlPane, InfoPane}
import scalafx.Includes._
import scalafx.application.JFXApp
import scalafx.beans.property.DoubleProperty
import scalafx.scene._
import scalafx.scene.input.MouseEvent
import scalafx.scene.layout.VBox
import scalafx.scene.paint.Color
import scalafx.scene.shape.Box
/** ScalaFX implementation of `MoleculeSampleApp` from tutorial
* [[http://docs.oracle.com/javafx/8/3d_graphics/jfxpub-3d_graphics.htm Getting Started with JavaFX 3D Graphics]]
* by Cindy Castillo and John Yoon.
*
* @author Jarek Sacha
*/
object ConnectFour3D extends JFXApp {
System.setProperty("prism.dirtyopts", "false")
private object Model {
val mousePosX = DoubleProperty(.0)
val mousePosY = DoubleProperty(.0)
val mouseOldX = DoubleProperty(.0)
val mouseOldY = DoubleProperty(.0)
val mouseDeltaX = DoubleProperty(.0)
val mouseDeltaY = DoubleProperty(.0)
}
private object View {
app =>
val root = new VBox()
val content3d = new Xform()
val camera: PerspectiveCamera = new PerspectiveCamera(true)
val cameraXform = new Xform()
val cameraXform2 = new Xform()
val cameraXform3 = new Xform()
val cameraDistance: Double = 450
val gameBoard: GameBoard3D = new GameBoard3D
private def buildScene() {
root.content = Seq(controlPane,subScene ,infoPanel )
}
private def subScene:SubScene = new SubScene(content3d, panelSize._1, panelSize._2-100,true, SceneAntialiasing.Disabled) {
camera = app.camera
}
lazy val controlPane = new ControlPane(gameBoard)
lazy val infoPanel = new InfoPane(gameBoard)
private def buildCamera() {
content3d.children += cameraXform
cameraXform.children += cameraXform2
cameraXform2.children += cameraXform3
cameraXform3.children += camera
cameraXform3.rotateZ = 180.0
camera.nearClip = 0.1
camera.farClip = 10000.0
camera.translateZ = -cameraDistance
cameraXform.ry.angle = 320.0
cameraXform.rx.angle = 40
}
private def buildGround() {
val ground = new Box(groundSize, -gameOffsetY, groundSize) {
translateY = gameOffsetY-fieldWidth
material = groundMaterial
}
content3d.children += ground
}
private def buildGameBoard() {
gameBoard.startNewGame()
content3d.children += gameBoard
}
buildScene()
buildCamera()
buildGround()
buildGameBoard()
}
stage = new JFXApp.PrimaryStage {
scene = new Scene(View.root, panelSize._1, panelSize._2, depthBuffer = true, antiAliasing = SceneAntialiasing.Balanced) {
fill = Color.Gray
title = "4 Connect"
}
}
handleMouse()
private def handleMouse() {
stage.scene().onMousePressed = (me: MouseEvent) => {
Model.mousePosX() = me.sceneX
Model.mousePosY() = me.sceneY
Model.mouseOldX() = me.sceneX
Model.mouseOldY() = me.sceneY
}
stage.scene().onMouseDragged = (me: MouseEvent) => {
Model.mouseOldX() = Model.mousePosX()
Model.mouseOldY() = Model.mousePosY()
Model.mousePosX() = me.sceneX
Model.mousePosY() = me.sceneY
Model.mouseDeltaX() = Model.mousePosX() - Model.mouseOldX()
Model.mouseDeltaY() = Model.mousePosY() - Model.mouseOldY()
val modifier = if (me.isControlDown) 0.1 else if (me.isShiftDown) 10 else 1.0
val modifierFactor = 0.1
if (me.isPrimaryButtonDown) {
View.cameraXform.ry.angle = View.cameraXform.ry.angle() - Model.mouseDeltaX() * modifierFactor * modifier * 2.0
View.cameraXform.rx.angle = View.cameraXform.rx.angle() + Model.mouseDeltaY() * modifierFactor * modifier * 2.0
} else if (me.isSecondaryButtonDown) {
val z = View.camera.translateZ()
val newZ = z + Model.mouseDeltaX() * modifierFactor * modifier
View.camera.translateZ = newZ
} else if (me.isMiddleButtonDown) {
View.cameraXform2.t.x = View.cameraXform2.t.x() + Model.mouseDeltaX() * modifierFactor * modifier * 0.3
View.cameraXform2.t.x = View.cameraXform2.t.y() + Model.mouseDeltaY() * modifierFactor * modifier * 0.3
}
}
}
}
| pme123/scala-connect4 | src/main/scala/pme/connect4/gui/d3/ConnectFour3D.scala | Scala | mit | 4,255 |
package firebase.messaging
import scala.scalajs.js
import js.annotation._
import js.|
@js.native
trait Messaging extends js.Object {
def deleteToken(token: String): firebase.Promise[js.Any] | Null = js.native
def getToken(): firebase.Promise[js.Any] | Null = js.native
def onMessage(nextOrObserver: Object): js.Function0[Any] = js.native
def onTokenRefresh(nextOrObserver: Object): js.Function0[Any] = js.native
def requestPermission(): firebase.Promise[js.Any] | Null = js.native
def setBackgroundMessageHandler(callback: js.Function1[Object, Any]): js.Dynamic = js.native
def useServiceWorker(registration: js.Any): js.Dynamic = js.native
} | gumdrop/quizleague-maintain | js/src/main/scala/firebase/messaging/Messaging.scala | Scala | mit | 703 |
package cfc.shale.redis_client
package containers
import commands._
case class RedisStringSet(key: String) extends RedisSet[String] {
override def get = RedisGetStringSet(key)
override def remove(value: String): RedisCommand[Unit] =
RedisRemoveFromStringSet(key, value)
override def add(value: String): RedisCommand[Unit] =
RedisAddToStringSet(key, value)
}
| cardforcoin/shale-scala | redis-client/src/main/scala/cfc/shale/redis_client/containers/RedisStringSet.scala | Scala | mit | 377 |
/*
* Licensed to Cloudera, Inc. under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Cloudera, Inc. licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cloudera.hue.livy.server.interactive
import java.net.URL
import java.util.concurrent.TimeUnit
import com.cloudera.hue.livy.Logging
import com.cloudera.hue.livy.msgs.ExecuteRequest
import com.cloudera.hue.livy.server.interactive.InteractiveSession.SessionFailedToStart
import com.cloudera.hue.livy.sessions._
import com.fasterxml.jackson.core.JsonParseException
import org.json4s.JsonAST.JString
import org.json4s.JsonDSL._
import org.json4s._
import org.scalatra._
import org.scalatra.json.JacksonJsonSupport
import scala.concurrent._
import scala.concurrent.duration._
object InteractiveSessionServlet extends Logging
class InteractiveSessionServlet(sessionManager: SessionManager)
extends ScalatraServlet
with FutureSupport
with MethodOverride
with JacksonJsonSupport
with UrlGeneratorSupport
{
override protected implicit def executor: ExecutionContextExecutor = ExecutionContext.global
override protected implicit def jsonFormats: Formats = DefaultFormats ++ Serializers.Formats
before() {
contentType = formats("json")
}
get("/") {
Map(
"sessions" -> sessionManager.getSessions
)
}
val getSession = get("/:sessionId") {
val sessionId = params("sessionId").toInt
sessionManager.get(sessionId) match {
case Some(session) => session
case None => NotFound("Session not found")
}
}
post("/") {
val createInteractiveRequest = parsedBody.extract[CreateInteractiveRequest]
new AsyncResult {
val is = {
val sessionFuture = sessionManager.createSession(createInteractiveRequest)
sessionFuture.map { case session =>
Created(session,
headers = Map(
"Location" -> url(getSession, "sessionId" -> session.id.toString)
)
)
}
}
}
}
post("/:sessionId/callback") {
val sessionId = params("sessionId").toInt
val callback = parsedBody.extract[CallbackRequest]
sessionManager.get(sessionId) match {
case Some(session) =>
if (session.state == Starting()) {
session.url = new URL(callback.url)
Accepted()
} else {
BadRequest("Session is in wrong state")
}
case None => NotFound("Session not found")
}
}
post("/:sessionId/stop") {
val sessionId = params("sessionId").toInt
sessionManager.get(sessionId) match {
case Some(session) =>
val future = session.stop()
new AsyncResult() { val is = for { _ <- future } yield NoContent() }
case None => NotFound("Session not found")
}
}
post("/:sessionId/interrupt") {
val sessionId = params("sessionId").toInt
sessionManager.get(sessionId) match {
case Some(session) =>
val future = for {
_ <- session.interrupt()
} yield Accepted()
// FIXME: this is silently eating exceptions.
new AsyncResult() { val is = for { _ <- future } yield NoContent() }
case None => NotFound("Session not found")
}
}
delete("/:sessionId") {
val sessionId = params("sessionId").toInt
val future = for {
_ <- sessionManager.delete(sessionId)
} yield Accepted()
new AsyncResult() { val is = for { _ <- future } yield NoContent() }
}
get("/:sessionId/log") {
val sessionId = params("sessionId").toInt
sessionManager.get(sessionId) match {
case None => NotFound("Session not found")
case Some(session: InteractiveSession) =>
val from = params.get("from").map(_.toInt)
val size = params.get("size").map(_.toInt)
val (from_, total, logLines) = Serializers.getLogs(session, from, size)
("id", session.id) ~
("from", from_) ~
("total", total) ~
("log", logLines)
}
}
get("/:sessionId/statements") {
val sessionId = params("sessionId").toInt
sessionManager.get(sessionId) match {
case None => NotFound("Session not found")
case Some(session: InteractiveSession) =>
val from = params.get("from").map(_.toInt).getOrElse(0)
val size = params.get("size").map(_.toInt).getOrElse(session.statements.length)
Map(
"total_statements" -> session.statements.length,
"statements" -> session.statements.view(from, from + size)
)
}
}
val getStatement = get("/:sessionId/statements/:statementId") {
val sessionId = params("sessionId").toInt
val statementId = params("statementId").toInt
val from = params.get("from").map(_.toInt)
val size = params.get("size").map(_.toInt)
sessionManager.get(sessionId) match {
case None => NotFound("Session not found")
case Some(session) =>
session.statements.lift(statementId) match {
case None => NotFound("Statement not found")
case Some(statement) =>
Serializers.serializeStatement(statement, from, size)
}
}
}
post("/:sessionId/statements") {
val sessionId = params("sessionId").toInt
val req = parsedBody.extract[ExecuteRequest]
sessionManager.get(sessionId) match {
case Some(session) =>
val statement = session.executeStatement(req)
Created(statement,
headers = Map(
"Location" -> url(getStatement,
"sessionId" -> session.id.toString,
"statementId" -> statement.id.toString)))
case None => NotFound("Session not found")
}
}
error {
case e: JsonParseException => BadRequest(e.getMessage)
case e: MappingException => BadRequest(e.getMessage)
case e: SessionFailedToStart => InternalServerError(e.getMessage)
case e: dispatch.StatusCode => ActionResult(ResponseStatus(e.code), e.getMessage, Map.empty)
case e =>
InteractiveSessionServlet.error("internal error", e)
InternalServerError(e.toString)
}
}
private case class CallbackRequest(url: String)
private object Serializers {
import JsonDSL._
def SessionFormats: List[CustomSerializer[_]] = List(SessionSerializer, SessionKindSerializer, SessionStateSerializer)
def StatementFormats: List[CustomSerializer[_]] = List(StatementSerializer, StatementStateSerializer)
def Formats: List[CustomSerializer[_]] = SessionFormats ++ StatementFormats
private def serializeSessionState(state: State) = JString(state.toString)
private def serializeSessionKind(kind: Kind) = JString(kind.toString)
private def serializeStatementState(state: Statement.State) = JString(state.toString)
def serializeSession(session: InteractiveSession): JValue = {
("id", session.id) ~
("state", serializeSessionState(session.state)) ~
("kind", serializeSessionKind(session.kind)) ~
("proxyUser", session.proxyUser) ~
("log", getLogs(session, None, Some(10))._3)
}
def getLogs(session: InteractiveSession, fromOpt: Option[Int], sizeOpt: Option[Int]) = {
val lines = session.logLines()
val size = sizeOpt.getOrElse(100)
var from = fromOpt.getOrElse(-1)
if (from < 0) {
from = math.max(0, lines.length - size)
}
val until = from + size
(from, lines.length, lines.view(from, until))
}
def serializeStatement(statement: Statement, from: Option[Int], size: Option[Int]): JValue = {
// Take a couple milliseconds to see if the statement has finished.
val output = try {
Await.result(statement.output(), Duration(100, TimeUnit.MILLISECONDS))
} catch {
case _: TimeoutException => null
}
("id" -> statement.id) ~
("state" -> serializeStatementState(statement.state)) ~
("output" -> output)
}
case object SessionSerializer extends CustomSerializer[InteractiveSession](implicit formats => ( {
// We don't support deserialization.
PartialFunction.empty
}, {
case session: InteractiveSession =>
serializeSession(session)
}
)
)
case object SessionKindSerializer extends CustomSerializer[Kind](implicit formats => ( {
case JString("spark") | JString("scala") => Spark()
case JString("pyspark") | JString("python") => PySpark()
}, {
case kind: Kind => serializeSessionKind(kind)
}
)
)
case object SessionStateSerializer extends CustomSerializer[State](implicit formats => ( {
// We don't support deserialization.
PartialFunction.empty
}, {
case state: State => JString(state.toString)
}
)
)
case object StatementSerializer extends CustomSerializer[Statement](implicit formats => ( {
// We don't support deserialization.
PartialFunction.empty
}, {
case statement: Statement =>
serializeStatement(statement, None, None)
}))
case object StatementStateSerializer extends CustomSerializer[Statement.State](implicit formats => ( {
// We don't support deserialization.
PartialFunction.empty
}, {
case state: Statement.State => JString(state.toString)
}
)
)
}
| kalahbrown/HueBigSQL | apps/spark/java/livy-server/src/main/scala/com/cloudera/hue/livy/server/interactive/InteractiveSessionServlet.scala | Scala | apache-2.0 | 9,681 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.streaming
import java.{util => ju}
import java.lang.{Long => JLong}
import java.util.UUID
import scala.collection.JavaConverters._
import scala.util.control.NonFatal
import org.json4s._
import org.json4s.JsonAST.JValue
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
import org.apache.spark.annotation.Experimental
/**
* :: Experimental ::
* Information about updates made to stateful operators in a [[StreamingQuery]] during a trigger.
*/
@Experimental
class StateOperatorProgress private[sql](
val numRowsTotal: Long,
val numRowsUpdated: Long) {
/** The compact JSON representation of this progress. */
def json: String = compact(render(jsonValue))
/** The pretty (i.e. indented) JSON representation of this progress. */
def prettyJson: String = pretty(render(jsonValue))
private[sql] def jsonValue: JValue = {
("numRowsTotal" -> JInt(numRowsTotal)) ~
("numRowsUpdated" -> JInt(numRowsUpdated))
}
}
/**
* :: Experimental ::
* Information about progress made in the execution of a [[StreamingQuery]] during
* a trigger. Each event relates to processing done for a single trigger of the streaming
* query. Events are emitted even when no new data is available to be processed.
*
* @param id An unique query id that persists across restarts. See `StreamingQuery.id()`.
* @param runId A query id that is unique for every start/restart. See `StreamingQuery.runId()`.
* @param name User-specified name of the query, null if not specified.
* @param timestamp Beginning time of the trigger in ISO8601 format, i.e. UTC timestamps.
* @param batchId A unique id for the current batch of data being processed. Note that in the
* case of retries after a failure a given batchId my be executed more than once.
* Similarly, when there is no data to be processed, the batchId will not be
* incremented.
* @param durationMs The amount of time taken to perform various operations in milliseconds.
* @param eventTime Statistics of event time seen in this batch. It may contain the following keys:
* {{{
* "max" -> "2016-12-05T20:54:20.827Z" // maximum event time seen in this trigger
* "min" -> "2016-12-05T20:54:20.827Z" // minimum event time seen in this trigger
* "avg" -> "2016-12-05T20:54:20.827Z" // average event time seen in this trigger
* "watermark" -> "2016-12-05T20:54:20.827Z" // watermark used in this trigger
* }}}
* All timestamps are in ISO8601 format, i.e. UTC timestamps.
* @param stateOperators Information about operators in the query that store state.
* @param sources detailed statistics on data being read from each of the streaming sources.
* @since 2.1.0
*/
@Experimental
class StreamingQueryProgress private[sql](
val id: UUID,
val runId: UUID,
val name: String,
val timestamp: String,
val batchId: Long,
val durationMs: ju.Map[String, JLong],
val eventTime: ju.Map[String, String],
val stateOperators: Array[StateOperatorProgress],
val sources: Array[SourceProgress],
val sink: SinkProgress) {
/** The aggregate (across all sources) number of records processed in a trigger. */
def numInputRows: Long = sources.map(_.numInputRows).sum
/** The aggregate (across all sources) rate of data arriving. */
def inputRowsPerSecond: Double = sources.map(_.inputRowsPerSecond).sum
/** The aggregate (across all sources) rate at which Spark is processing data. */
def processedRowsPerSecond: Double = sources.map(_.processedRowsPerSecond).sum
/** The compact JSON representation of this progress. */
def json: String = compact(render(jsonValue))
/** The pretty (i.e. indented) JSON representation of this progress. */
def prettyJson: String = pretty(render(jsonValue))
override def toString: String = prettyJson
private[sql] def jsonValue: JValue = {
def safeDoubleToJValue(value: Double): JValue = {
if (value.isNaN || value.isInfinity) JNothing else JDouble(value)
}
/** Convert map to JValue while handling empty maps. Also, this sorts the keys. */
def safeMapToJValue[T](map: ju.Map[String, T], valueToJValue: T => JValue): JValue = {
if (map.isEmpty) return JNothing
val keys = map.asScala.keySet.toSeq.sorted
keys.map { k => k -> valueToJValue(map.get(k)) : JObject }.reduce(_ ~ _)
}
("id" -> JString(id.toString)) ~
("runId" -> JString(runId.toString)) ~
("name" -> JString(name)) ~
("timestamp" -> JString(timestamp)) ~
("numInputRows" -> JInt(numInputRows)) ~
("inputRowsPerSecond" -> safeDoubleToJValue(inputRowsPerSecond)) ~
("processedRowsPerSecond" -> safeDoubleToJValue(processedRowsPerSecond)) ~
("durationMs" -> safeMapToJValue[JLong](durationMs, v => JInt(v.toLong))) ~
("eventTime" -> safeMapToJValue[String](eventTime, s => JString(s))) ~
("stateOperators" -> JArray(stateOperators.map(_.jsonValue).toList)) ~
("sources" -> JArray(sources.map(_.jsonValue).toList)) ~
("sink" -> sink.jsonValue)
}
}
/**
* :: Experimental ::
* Information about progress made for a source in the execution of a [[StreamingQuery]]
* during a trigger. See [[StreamingQueryProgress]] for more information.
*
* @param description Description of the source.
* @param startOffset The starting offset for data being read.
* @param endOffset The ending offset for data being read.
* @param numInputRows The number of records read from this source.
* @param inputRowsPerSecond The rate at which data is arriving from this source.
* @param processedRowsPerSecond The rate at which data from this source is being procressed by
* Spark.
* @since 2.1.0
*/
@Experimental
class SourceProgress protected[sql](
val description: String,
val startOffset: String,
val endOffset: String,
val numInputRows: Long,
val inputRowsPerSecond: Double,
val processedRowsPerSecond: Double) {
/** The compact JSON representation of this progress. */
def json: String = compact(render(jsonValue))
/** The pretty (i.e. indented) JSON representation of this progress. */
def prettyJson: String = pretty(render(jsonValue))
override def toString: String = prettyJson
private[sql] def jsonValue: JValue = {
def safeDoubleToJValue(value: Double): JValue = {
if (value.isNaN || value.isInfinity) JNothing else JDouble(value)
}
("description" -> JString(description)) ~
("startOffset" -> tryParse(startOffset)) ~
("endOffset" -> tryParse(endOffset)) ~
("numInputRows" -> JInt(numInputRows)) ~
("inputRowsPerSecond" -> safeDoubleToJValue(inputRowsPerSecond)) ~
("processedRowsPerSecond" -> safeDoubleToJValue(processedRowsPerSecond))
}
private def tryParse(json: String) = try {
parse(json)
} catch {
case NonFatal(e) => JString(json)
}
}
/**
* :: Experimental ::
* Information about progress made for a sink in the execution of a [[StreamingQuery]]
* during a trigger. See [[StreamingQueryProgress]] for more information.
*
* @param description Description of the source corresponding to this status.
* @since 2.1.0
*/
@Experimental
class SinkProgress protected[sql](
val description: String) {
/** The compact JSON representation of this progress. */
def json: String = compact(render(jsonValue))
/** The pretty (i.e. indented) JSON representation of this progress. */
def prettyJson: String = pretty(render(jsonValue))
override def toString: String = prettyJson
private[sql] def jsonValue: JValue = {
("description" -> JString(description))
}
}
| kimoonkim/spark | sql/core/src/main/scala/org/apache/spark/sql/streaming/progress.scala | Scala | apache-2.0 | 8,561 |
/*
* Wire
* Copyright (C) 2016 Wire Swiss GmbH
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.waz.service.otr
import com.waz.log.LogSE._
import com.waz.api.Verification
import com.waz.content._
import com.waz.log.BasicLogging.LogTag.DerivedLogTag
import com.waz.log.LogShow.SafeToLog
import com.waz.model.ConversationData.ConversationType
import com.waz.model._
import com.waz.model.otr.UserClients
import com.waz.service.messages.MessagesService
import com.waz.utils.Serialized
import scala.collection.breakOut
import scala.concurrent.Future
/**
* Updates `verified` flag on user and conversation when clients state changes.
* Conversation state changes generate messages in that conversation.
* Root cause for conv state change is tracked to determine resulting message type.
* If conv gets unverified because new client was added, then it's state is changed to UNVERIFIED,
* if device was manually unverified, then conv state goes to UNKNOWN
*/
class VerificationStateUpdater(selfUserId: UserId,
usersStorage: UsersStorage,
clientsStorage: OtrClientsStorage,
convs: ConversationStorage,
msgsService: MessagesService,
membersStorage: MembersStorage) extends DerivedLogTag {
import Verification._
import VerificationStateUpdater._
import com.waz.threading.Threading.Implicits.Background
import com.waz.utils.events.EventContext.Implicits.global
private val SerializationKey = serializationKey(selfUserId)
private val updateProcessor: VerificationStateUpdate => Future[Unit] = { update =>
addMessagesAfterVerificationUpdate(update.convUpdates, update.convUsers, update.changes) map { _ => Unit }
}
clientsStorage.getClients(selfUserId).map{ clients =>
onClientsChanged(Map(selfUserId -> (UserClients(selfUserId, clients.map(c => c.id -> c).toMap), ClientAdded)))
}
clientsStorage.onAdded { ucs =>
verbose(l"clientsStorage.onAdded: $ucs")
onClientsChanged(ucs.map { uc => uc.user -> (uc, ClientAdded)} (breakOut))
}
clientsStorage.onUpdated { updates =>
verbose(l"clientsStorage.onUpdated: $updates")
onClientsChanged(updates.map {
case update @ (_, uc) => uc.user -> (uc, VerificationChange(update))
} (breakOut))
}
membersStorage.onAdded{ members =>
Serialized.future(SerializationKey) {
updateConversations(members.map(_.convId).distinct, members.map { member => member.userId -> MemberAdded } (breakOut))
}
}
membersStorage.onDeleted{ members =>
Serialized.future(SerializationKey) {
updateConversations(members.map(_._2).distinct, members.map { _._1 -> Other } (breakOut))
}
}
private def addMessagesAfterVerificationUpdate(updates: Seq[(ConversationData, ConversationData)],
convUsers: Map[ConvId, Seq[UserData]],
changes: Map[UserId, VerificationChange]) =
Future.traverse(updates) {
case (_, up) if up.verified == Verification.VERIFIED => msgsService.addOtrVerifiedMessage(up.id).map(_ => ())
case (prev, up) if prev.verified == Verification.VERIFIED =>
verbose(l"addMessagesAfterVerificationUpdate with prev=${prev.verified} and up=${up.verified}")
val convId = up.id
val changedUsers = convUsers(convId).filter(!_.isVerified).flatMap { u => changes.get(u.id).map(u.id -> _) }
val (users, change) =
if (changedUsers.forall(c => c._2 == ClientAdded)) (changedUsers map (_._1), ClientAdded)
else if (changedUsers.forall(c => c._2 == MemberAdded)) (changedUsers map (_._1), MemberAdded)
else (changedUsers collect { case (user, ClientUnverified) => user }, ClientUnverified)
val (self, other) = users.partition(_ == selfUserId)
for {
_ <- if (self.nonEmpty) msgsService.addOtrUnverifiedMessage(convId, Seq(selfUserId), change) else Future.successful(())
_ <- if (other.nonEmpty) msgsService.addOtrUnverifiedMessage(convId, other, change) else Future.successful(())
} yield ()
case _ =>
Future.successful(())
}
private[service] def onClientsChanged(changes: Map[UserId, (UserClients, VerificationChange)]) = Serialized.future(SerializationKey) {
def updateUserVerified(user: UserData) = {
val clients = changes(user.id)._1.clients.values
user.copy(verified = user.verified.updated(clients.nonEmpty && clients.forall(_.isVerified)))
}
// update `UserData.verified` flag
def updateUsers() = usersStorage.updateAll2(changes.keys.toSeq, updateUserVerified)
def collectUserChanges(updates: Seq[(UserData, UserData)]): Map[UserId, VerificationChange] =
updates.map {
case (_, u) => u.id -> (if (u.isVerified) Other else changes(u.id)._2)
} (breakOut)
verbose(l"onClientsChanged: ${changes.values.map(_._1)}")
for {
updates <- updateUsers()
userChanges = collectUserChanges(updates)
convs <- Future.traverse(userChanges.keys) { membersStorage.getActiveConvs }
_ <- updateConversations(convs.flatten.toSeq.distinct, userChanges)
} yield ()
}
private[service] def updateConversations(ids: Seq[ConvId], changes: Map[UserId, VerificationChange]) = {
def convUsers = Future.traverse(ids) { convId =>
for {
userIds <- membersStorage.getActiveUsers(convId)
users <- usersStorage.getAll(userIds)
} yield convId -> users
}
def update(conv: ConversationData, us: Seq[Option[UserData]]) = {
// XXX: this condition is a bit complicated to avoid situations where conversation gets verified just because it temporarily contains just self user
// this could happen if conv was just created and new members are being added (race condition), or in pending conv requests.
// FIXME: this is not really correct, if all other users leave group conversation then it should actually get VERIFIED
val isVerified = us.nonEmpty && us.flatten.forall(_.verified == VERIFIED) && !us.exists(_.isEmpty) &&
(conv.convType == ConversationType.Group || conv.convType == ConversationType.OneToOne) &&
(us.size > 1 || conv.verified != UNKNOWN)
def deviceAdded = us.flatten.exists(u => changes.get(u.id).contains(ClientAdded)) || !us.exists(_.isEmpty)
val state = (isVerified, conv.verified) match {
case (false, VERIFIED) if deviceAdded => UNVERIFIED
case (false, VERIFIED) => UNKNOWN
case (true, _) => VERIFIED
case (false, current) => current
}
conv.copy(verified = state)
}
verbose(l"updateConversations: $ids")
for {
users <- convUsers
usersMap = users.filter(_._2.nonEmpty).toMap
updates <- convs.updateAll2(usersMap.keys.toSeq, { conv => update(conv, usersMap(conv.id)) })
_ = verbose(l"updateConversations: ${users.flatMap(_._2).map(_.map(_.getDisplayName))}")
flattened = usersMap.map { case (conv, userList) => conv -> userList.flatten }
_ <- updateProcessor(VerificationStateUpdate(updates, flattened, changes))
} yield ()
}
}
object VerificationStateUpdater extends DerivedLogTag {
def serializationKey(userId: UserId) = (userId, "verification-state-update")
// XXX: small hack to synchronize other operations with verification state updating,
// we sometimes need to make sure that this state is up to date before proceeding
def awaitUpdated(userId: UserId) = Serialized.future(serializationKey(userId)) { Future.successful(()) }
case class VerificationStateUpdate(convUpdates: Seq[(ConversationData, ConversationData)], convUsers: Map[ConvId, Seq[UserData]], changes: Map[UserId, VerificationChange])
sealed trait VerificationChange extends SafeToLog
case object ClientUnverified extends VerificationChange
case object ClientAdded extends VerificationChange
case object MemberAdded extends VerificationChange
case object Other extends VerificationChange
object VerificationChange {
def apply(update: (UserClients, UserClients)): VerificationChange = {
val (prev, uc) = update
val prevKeys = prev.clients.keySet
def clientAdded = uc.clients.keysIterator.exists(!prevKeys(_))
def clientUnverified = uc.clients.exists { case (id, c) => !c.isVerified && prev.clients.get(id).exists(_.isVerified) }
if (clientAdded) ClientAdded
else if (clientUnverified) ClientUnverified
else Other
}
}
}
| wireapp/wire-android-sync-engine | zmessaging/src/main/scala/com/waz/service/otr/VerificationStateUpdater.scala | Scala | gpl-3.0 | 9,289 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.