code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
package org.tinydvr.config
import java.sql.{Connection, DriverManager}
import org.squeryl.adapters.H2Adapter
import org.squeryl.internals.DatabaseAdapter
case class DatabaseConnectionInfo(url: String, username: String, password: String) {
private val H2DB_REGEX = """^jdbc:h2:.*""".r
def getAdapter: DatabaseAdapter = {
url match {
case H2DB_REGEX() => new H2Adapter
case _ => throw new IllegalArgumentException("Could not create adapter for url \"" + url + "\"")
}
}
def getConnection: Connection= {
DriverManager.getConnection(url, username, password)
}
}
case class RecordingsConfig(directory: String, fileName: String)
case class SchedulesDirectCredentials(username: String, password: String)
case class TunerConfiguration(executable: String, arguments: List[String])
case class ListingsConfiguration(
updateStationFrequencyInHours: Int,
updateListingsFrequencyInHours: Int,
retainProgramsPeriodInDays: Int,
fetchNumDaysOfListings: Int
)
|
lou-k/tinydvr
|
src/main/scala/org/tinydvr/config/ConfigurationTypes.scala
|
Scala
|
gpl-3.0
| 989
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.datacompaction
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.metadata.CarbonTableIdentifier
import org.apache.carbondata.core.util.CarbonProperties
/**
* FT for data compaction Boundary condition verification.
*/
class DataCompactionBoundaryConditionsTest extends QueryTest with BeforeAndAfterAll {
val carbonTableIdentifier: CarbonTableIdentifier =
new CarbonTableIdentifier("default", "boundarytest".toLowerCase(), "1")
override def beforeAll {
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.COMPACTION_SEGMENT_LEVEL_THRESHOLD, "2,2")
sql("drop table if exists boundarytest")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "mm/dd/yyyy")
sql(
"CREATE TABLE IF NOT EXISTS boundarytest (country String, ID Int, date " +
"Timestamp, name String, phonetype String, serialname String, salary Int) " +
"STORED AS carbondata"
)
}
/**
* Compaction verificatoin in case of no loads.
*/
test("check if compaction is completed correctly.") {
try {
sql("alter table boundarytest compact 'minor'")
sql("alter table boundarytest compact 'major'")
}
catch {
case e: Exception =>
assert(false)
}
}
/**
* Compaction verificatoin in case of one loads.
*/
test("check if compaction is completed correctly for one load.") {
var csvFilePath1 = s"$resourcesPath/compaction/compaction1.csv"
sql("LOAD DATA LOCAL INPATH '" + csvFilePath1 + "' INTO TABLE boundarytest " +
"OPTIONS" +
"('DELIMITER'= ',', 'QUOTECHAR'= '\"')"
)
sql("alter table boundarytest compact 'minor'")
sql("alter table boundarytest compact 'major'")
}
test("check if compaction is completed correctly for multiple load.") {
var csvFilePath1 = s"$resourcesPath/compaction/compaction1.csv"
sql("LOAD DATA LOCAL INPATH '" + csvFilePath1 + "' INTO TABLE boundarytest " +
"OPTIONS" +
"('DELIMITER'= ',', 'QUOTECHAR'= '\"')"
)
sql("LOAD DATA LOCAL INPATH '" + csvFilePath1 + "' INTO TABLE boundarytest " +
"OPTIONS" +
"('DELIMITER'= ',', 'QUOTECHAR'= '\"')"
)
val df = sql("select * from boundarytest")
sql("alter table boundarytest compact 'major'")
checkAnswer(df, sql("select * from boundarytest"))
}
override def afterAll {
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
.addProperty(CarbonCommonConstants.COMPACTION_SEGMENT_LEVEL_THRESHOLD,
CarbonCommonConstants.DEFAULT_SEGMENT_LEVEL_THRESHOLD)
sql("drop table if exists boundarytest")
}
}
|
zzcclp/carbondata
|
integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/DataCompactionBoundaryConditionsTest.scala
|
Scala
|
apache-2.0
| 3,729
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hbase
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hbase.client._
import org.apache.hadoop.hbase.filter._
import org.apache.hadoop.hbase.util.Bytes
import org.apache.hadoop.hbase.{HBaseConfiguration, _}
import org.apache.log4j.Logger
import org.apache.spark.TaskContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.{expressions, InternalRow}
import org.apache.spark.sql.execution.datasources.LogicalRelation
import org.apache.spark.sql.{Row, DataFrame, SQLContext}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.hbase.catalyst.NotPusher
import org.apache.spark.sql.hbase.catalyst.expressions.PartialPredicateOperations.partialPredicateReducer
import org.apache.spark.sql.hbase.types.Range
import org.apache.spark.sql.hbase.util._
import org.apache.spark.sql.sources._
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.collection.mutable.{ArrayBuffer, ListBuffer}
class HBaseSource extends SchemaRelationProvider {
// Returns a new HBase relation with the given parameters
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String],
schema: StructType): BaseRelation = {
val tableName = parameters("tableName")
val hbaseTable = parameters("hbaseTableName")
// This is HBase table namespace, not logical table's namespace, and actually we don't use it,
// may be it always is the default namespace.
val rawNamespace = if (hbaseTable.contains("\\\\.")) {
hbaseTable.split("\\\\.")(0)
} else {
""
}
val colsSeq = schema.fieldNames
val keyCols = parameters("keyCols").split(",").map(_.trim)
val colsMapping = parameters("colsMapping").split(",").map(_.trim)
val encodingFormat =
parameters.get("encodingFormat") match {
case Some(encoding) => encoding.toLowerCase
case None => "binaryformat"
}
val infoMap: Map[String, (String, String)] =
colsMapping.map { colMapping =>
val mapping = colMapping.split("=")
if (mapping.length != 2) {
throw new Exception(s"Syntax Error of column mapping($colMapping), " +
"(sqlCol=colFamily.colQualifier, needs \\"=\\")")
}
if (!colsSeq.contains(mapping(0))) {
throw new Exception(s"Syntax Error of column mapping($colMapping), " +
s"${mapping(0)} is not a column")
}
val info = mapping(1).split("\\\\.")
if (info.length != 2) {
throw new Exception(s"Syntax Error of column mapping($colMapping), " +
"(sqlCol=colFamily.colQualifier, needs \\".\\")")
}
mapping(0) ->(info(0), info(1))
}.toMap
val divideTableColsByKeyOrNonkey = schema.fields.partition {
case structField @ StructField(name, _, _, _) =>
keyCols.contains(name)
}
val keyColsWithDataType = divideTableColsByKeyOrNonkey._1.map { structType =>
structType.name -> structType.dataType
}.toMap
val nonKeyColsWithDataType = divideTableColsByKeyOrNonkey._2.map{ structType =>
val (family, qualifier) = infoMap.get(structType.name).get
(structType.name, structType.dataType, family, qualifier)
}
val allColumns = colsSeq.map {
case name =>
if (keyCols.contains(name)) {
KeyColumn(
name,
keyColsWithDataType.get(name).get,
keyCols.indexWhere(_ == name)
)
} else {
val nonKeyCol = nonKeyColsWithDataType.find(_._1 == name).get
NonKeyColumn(
name,
nonKeyCol._2,
nonKeyCol._3,
nonKeyCol._4
)
}
}
val hbaseCatalog = sqlContext.asInstanceOf[HBaseSQLContext].hbaseCatalog
hbaseCatalog.createTable(tableName, rawNamespace, hbaseTable, allColumns, null, encodingFormat)
}
}
/**
*
* @param tableName SQL table name
* @param hbaseNamespace physical HBase table namespace
* @param hbaseTableName physical HBase table name
* @param allColumns schema
* @param context HBaseSQLContext
*/
@SerialVersionUID(15298736227428789L)
private[hbase] case class HBaseRelation(
tableName: String,
hbaseNamespace: String,
hbaseTableName: String,
allColumns: Seq[AbstractColumn],
deploySuccessfully: Option[Boolean],
encodingFormat: String = "binaryformat")
(@transient var context: SQLContext)
extends BaseRelation with InsertableRelation with Serializable {
@transient lazy val logger = Logger.getLogger(getClass.getName)
@transient lazy val keyColumns = allColumns.filter(_.isInstanceOf[KeyColumn])
.asInstanceOf[Seq[KeyColumn]].sortBy(_.order)
// The sorting is by the ordering of the Column Family and Qualifier. This is for avoidance
// to sort cells per row, as needed in bulk loader
@transient lazy val nonKeyColumns = allColumns.filter(_.isInstanceOf[NonKeyColumn])
.asInstanceOf[Seq[NonKeyColumn]].sortWith(
(a: NonKeyColumn, b: NonKeyColumn) => {
val empty = new HBaseRawType(0)
KeyValue.COMPARATOR.compare(
new KeyValue(empty, a.familyRaw, a.qualifierRaw),
new KeyValue(empty, b.familyRaw, b.qualifierRaw)) < 0
}
)
@transient lazy val bytesUtils: BytesUtils = encodingFormat match {
case "stringformat" => StringBytesUtils
case _ => BinaryBytesUtils
}
lazy val partitionKeys = keyColumns.map(col => output.find(_.name == col.sqlName).get)
@transient lazy val columnMap = allColumns.map {
case key: KeyColumn => (key.sqlName, key.order)
case nonKey: NonKeyColumn => (nonKey.sqlName, nonKey)
}.toMap
allColumns.zipWithIndex.foreach(pi => pi._1.ordinal = pi._2)
private var serializedConfiguration: Array[Byte] = _
def setConfig(inconfig: Configuration) = {
config = inconfig
if (inconfig != null) {
serializedConfiguration = Util.serializeHBaseConfiguration(inconfig)
}
}
@transient var config: Configuration = _
private def getConf: Configuration = {
if (config == null) {
config = {
if (serializedConfiguration != null) {
Util.deserializeHBaseConfiguration(serializedConfiguration)
}
else {
HBaseConfiguration.create
}
}
}
config
}
logger.debug(s"HBaseRelation config has zkPort="
+ s"${getConf.get("hbase.zookeeper.property.clientPort")}")
@transient private var htable_ : HTable = _
def htable = {
if (htable_ == null) htable_ = new HTable(getConf, hbaseTableName)
htable_
}
def isNonKey(attr: AttributeReference): Boolean = {
keyIndex(attr) < 0
}
def keyIndex(attr: AttributeReference): Int = {
// -1 if nonexistent
partitionKeys.indexWhere(_.exprId == attr.exprId)
}
// find the index in a sequence of AttributeReferences that is a key; -1 if not present
def rowIndex(refs: Seq[Attribute], keyIndex: Int): Int = {
refs.indexWhere(_.exprId == partitionKeys(keyIndex).exprId)
}
def flushHTable() = {
if (htable_ != null) {
htable_.flushCommits()
}
}
def closeHTable() = {
if (htable_ != null) {
htable_.close()
htable_ = null
}
}
// corresponding logical relation
@transient lazy val logicalRelation = LogicalRelation(this)
var output: Seq[AttributeReference] = logicalRelation.output
@transient lazy val dts: Seq[DataType] = allColumns.map(_.dataType)
/**
* partitions are updated per table lookup to keep the info reasonably updated
*/
@transient lazy val partitionExpiration =
context.conf.asInstanceOf[HBaseSQLConf].partitionExpiration * 1000
@transient var partitionTS: Long = _
private[hbase] def fetchPartitions(): Unit = {
if (System.currentTimeMillis - partitionTS >= partitionExpiration) {
partitionTS = System.currentTimeMillis
partitions = {
val regionLocations = htable.getRegionLocations.asScala.toSeq
logger.info(s"Number of HBase regions for " +
s"table ${htable.getName.getNameAsString}: ${regionLocations.size}")
regionLocations.zipWithIndex.map {
case p =>
val start: Option[HBaseRawType] = {
if (p._1._1.getStartKey.isEmpty) {
None
} else {
Some(p._1._1.getStartKey)
}
}
val end: Option[HBaseRawType] = {
if (p._1._1.getEndKey.isEmpty) {
None
} else {
Some(p._1._1.getEndKey)
}
}
new HBasePartition(
p._2, p._2,
start,
end,
Some(p._1._2.getHostname), relation = this)
}
}
}
}
@transient var partitions: Seq[HBasePartition] = _
@transient private[hbase] lazy val dimSize = keyColumns.size
val scannerFetchSize = context.conf.asInstanceOf[HBaseSQLConf].scannerFetchSize
private[hbase] def generateRange(partition: HBasePartition, pred: Expression,
index: Int): Range[_] = {
def getData(dt: AtomicType, bound: Option[HBaseRawType]): Option[Any] = {
if (bound.isEmpty) {
None
} else {
/**
* the partition start/end could be incomplete byte array, so we need to make it
* a complete key first
*/
val finalRowKey = getFinalKey(bound)
val (start, length) = HBaseKVHelper.decodingRawKeyColumns(finalRowKey, keyColumns)(index)
Some(DataTypeUtils.bytesToData(finalRowKey, start, length, dt).asInstanceOf[dt.InternalType])
}
}
val dt = keyColumns(index).dataType.asInstanceOf[AtomicType]
val isLastKeyIndex = index == (keyColumns.size - 1)
val start = getData(dt, partition.start)
val end = getData(dt, partition.end)
val startInclusive = start.nonEmpty
val endInclusive = end.nonEmpty && !isLastKeyIndex
new Range(start, startInclusive, end, endInclusive, dt)
}
/**
* Return the start keys of all of the regions in this table,
* as a list of SparkImmutableBytesWritable.
*/
def getRegionStartKeys = {
val byteKeys: Array[HBaseRawType] = htable.getStartKeys
val ret = ArrayBuffer[HBaseRawType]()
// Since the size of byteKeys will be 1 if there is only one partition in the table,
// we need to omit that null element.
for (byteKey <- byteKeys if !(byteKeys.length == 1 && byteKeys(0).length == 0)) {
ret += byteKey
}
ret
}
/**
* build filter list based on critical point ranges
* @param output the projection list
* @param filterPred the predicate
* @param cprs the sequence of critical point ranges
* @return the filter list and expression tuple
*/
def buildCPRFilterList(output: Seq[Attribute], filterPred: Option[Expression],
cprs: Seq[MDCriticalPointRange[_]]):
(Option[filter.Filter], Option[Expression]) = {
val cprFilterList: FilterList = new FilterList(FilterList.Operator.MUST_PASS_ONE)
var expressionList: List[Expression] = List[Expression]()
var anyNonpushable = false
for (cpr <- cprs) {
val cprAndPushableFilterList: FilterList = new FilterList(FilterList.Operator.MUST_PASS_ALL)
val startKey: Option[Any] = cpr.lastRange.start
val endKey: Option[Any] = cpr.lastRange.end
val startInclusive = cpr.lastRange.startInclusive
val endInclusive = cpr.lastRange.endInclusive
val keyType: AtomicType = cpr.lastRange.dt
val predicate = Option(cpr.lastRange.pred)
val (pushable, nonPushable) = buildPushdownFilterList(predicate)
val items: Seq[(Any, AtomicType)] = cpr.prefix
val head: Seq[(HBaseRawType, AtomicType)] = items.map {
case (itemValue, itemType) =>
(DataTypeUtils.dataToBytes(itemValue, itemType), itemType)
}
val headExpression: Seq[Expression] = items.zipWithIndex.map { case (item, index) =>
val keyCol = keyColumns.find(_.order == index).get
val left = filterPred.get.references.find(_.name == keyCol.sqlName).get
val right = Literal.create(item._1, item._2)
expressions.EqualTo(left, right)
}
val tailExpression: Expression = {
val index = items.size
val keyCol = keyColumns.find(_.order == index).get
val left = filterPred.get.references.find(_.name == keyCol.sqlName).get
val startInclusive = cpr.lastRange.startInclusive
val endInclusive = cpr.lastRange.endInclusive
if (cpr.lastRange.isPoint) {
val right = Literal.create(cpr.lastRange.start.get, cpr.lastRange.dt)
expressions.EqualTo(left, right)
} else if (cpr.lastRange.start.isDefined && cpr.lastRange.end.isDefined) {
var right = Literal.create(cpr.lastRange.start.get, cpr.lastRange.dt)
val leftExpression = if (startInclusive) {
expressions.GreaterThanOrEqual(left, right)
} else {
expressions.GreaterThan(left, right)
}
right = Literal.create(cpr.lastRange.end.get, cpr.lastRange.dt)
val rightExpress = if (endInclusive) {
expressions.LessThanOrEqual(left, right)
} else {
expressions.LessThan(left, right)
}
expressions.And(leftExpression, rightExpress)
} else if (cpr.lastRange.start.isDefined) {
val right = Literal.create(cpr.lastRange.start.get, cpr.lastRange.dt)
if (startInclusive) {
expressions.GreaterThanOrEqual(left, right)
} else {
expressions.GreaterThan(left, right)
}
} else if (cpr.lastRange.end.isDefined) {
val right = Literal.create(cpr.lastRange.end.get, cpr.lastRange.dt)
if (endInclusive) {
expressions.LessThanOrEqual(left, right)
} else {
expressions.LessThan(left, right)
}
} else {
null
}
}
val combinedExpression: Seq[Expression] = headExpression :+ tailExpression
var andExpression: Expression = combinedExpression.reduceLeft(
(e1: Expression, e2: Expression) => expressions.And(e1, e2))
if (nonPushable.isDefined) {
anyNonpushable = true
andExpression = expressions.And(andExpression, nonPushable.get)
}
expressionList = expressionList :+ andExpression
val filter = {
if (cpr.lastRange.isPoint) {
// the last range is a point
val tail: (HBaseRawType, AtomicType) =
(DataTypeUtils.dataToBytes(startKey.get, keyType), keyType)
val rowKeys = head :+ tail
val row = HBaseKVHelper.encodingRawKeyColumns(rowKeys)
if (cpr.prefix.size == keyColumns.size - 1) {
// full dimension of row key
new RowFilter(CompareFilter.CompareOp.EQUAL, new BinaryComparator(row))
}
else {
new RowFilter(CompareFilter.CompareOp.EQUAL, new BinaryPrefixComparator(row))
}
} else {
// the last range is not a point
val startFilter: RowFilter = if (startKey.isDefined) {
val tail: (HBaseRawType, AtomicType) =
(DataTypeUtils.dataToBytes(startKey.get, keyType), keyType)
val rowKeys = head :+ tail
val row = HBaseKVHelper.encodingRawKeyColumns(rowKeys)
if (cpr.prefix.size == keyColumns.size - 1) {
// full dimension of row key
if (startInclusive) {
new RowFilter(CompareFilter.CompareOp.GREATER_OR_EQUAL, new BinaryComparator(row))
} else {
new RowFilter(CompareFilter.CompareOp.GREATER, new BinaryComparator(row))
}
}
else {
if (startInclusive) {
new RowFilter(CompareFilter.CompareOp.GREATER_OR_EQUAL,
new BinaryPrefixComparator(row))
} else {
new RowFilter(CompareFilter.CompareOp.GREATER, new BinaryPrefixComparator(row))
}
}
} else {
null
}
val endFilter: RowFilter = if (endKey.isDefined) {
val tail: (HBaseRawType, AtomicType) =
(DataTypeUtils.dataToBytes(endKey.get, keyType), keyType)
val rowKeys = head :+ tail
val row = HBaseKVHelper.encodingRawKeyColumns(rowKeys)
if (cpr.prefix.size == keyColumns.size - 1) {
// full dimension of row key
if (endInclusive) {
new RowFilter(CompareFilter.CompareOp.LESS_OR_EQUAL, new BinaryComparator(row))
} else {
new RowFilter(CompareFilter.CompareOp.LESS, new BinaryComparator(row))
}
} else {
if (endInclusive) {
new RowFilter(CompareFilter.CompareOp.LESS_OR_EQUAL,
new BinaryPrefixComparator(row))
} else {
new RowFilter(CompareFilter.CompareOp.LESS, new BinaryPrefixComparator(row))
}
}
} else {
null
}
/*
* create the filter, for example, k1 = 10, k2 < 5
* it will create 2 filters, first RowFilter = 10 (PrefixComparator),
* second, RowFilter < (10, 5) (PrefixComparator / Comparator)
*/
val prefixFilter = if (head.nonEmpty) {
val row = HBaseKVHelper.encodingRawKeyColumns(head)
new RowFilter(CompareFilter.CompareOp.EQUAL, new BinaryPrefixComparator(row))
} else {
null
}
if (startKey.isDefined && endKey.isDefined) {
// both start and end filters exist
val filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL)
if (prefixFilter != null) {
filterList.addFilter(prefixFilter)
}
filterList.addFilter(startFilter)
filterList.addFilter(endFilter)
filterList
} else if (startKey.isDefined) {
// start filter exists only
if (prefixFilter != null) {
val filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL)
filterList.addFilter(prefixFilter)
filterList.addFilter(startFilter)
filterList
} else {
startFilter
}
} else {
// end filter exists only
if (prefixFilter != null) {
val filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL)
filterList.addFilter(prefixFilter)
filterList.addFilter(endFilter)
filterList
} else {
endFilter
}
}
}
}
cprAndPushableFilterList.addFilter(filter)
if (pushable.isDefined) {
cprAndPushableFilterList.addFilter(pushable.get)
}
cprFilterList.addFilter(cprAndPushableFilterList)
}
val orExpression = if (anyNonpushable) {
Some(expressionList.reduceLeft((e1: Expression, e2: Expression) => expressions.Or(e1, e2)))
} else {
None
}
val finalFilterList: filter.Filter = if (cprFilterList.getFilters.size() == 1) {
cprFilterList.getFilters.get(0)
} else if (cprFilterList.getFilters.size() > 1) {
cprFilterList
} else {
require(requirement = false, "internal logic error: nonempty filter list is expected")
null
}
(Some(finalFilterList), orExpression)
}
/**
* create pushdown filter list based on predicate
* @param pred the predicate
* @return tuple(filter list, non-pushdownable expression, pushdown predicates)
*/
def buildPushdownFilterList(pred: Option[Expression]):
(Option[FilterList], Option[Expression]) = {
if (pred.isDefined) {
val predExp: Expression = pred.get
// build pred pushdown filters:
// 1. push any NOT through AND/OR
val notPushedPred = NotPusher(predExp)
// 2. classify the transformed predicate into pushdownable and non-pushdownable predicates
val classier = new ScanPredClassifier(this) // Right now only on primary key dimension
val (pushdownFilterPred, otherPred) = classier(notPushedPred)
// 3. build a FilterList mirroring the pushdownable predicate
val predPushdownFilterList = {
if (pushdownFilterPred.isEmpty) None else buildFilterListFromPred(pushdownFilterPred)
}
// 4. merge the above FilterList with the one from the projection
(predPushdownFilterList, otherPred)
} else {
(None, None)
}
}
/**
* add the filter to the filter list
* @param filters the filter list
* @param filtersToBeAdded the filter to be added
* @param operator the operator of the filter to be added
*/
private def addToFilterList(filters: java.util.ArrayList[filter.Filter],
filtersToBeAdded: Option[FilterList],
operator: FilterList.Operator) = {
if (filtersToBeAdded.isDefined) {
val filterList = filtersToBeAdded.get
val size = filterList.getFilters.size
if (size == 1 || filterList.getOperator == operator) {
filterList.getFilters.map(p => filters.add(p))
} else {
filters.add(filterList)
}
}
}
def createSingleColumnValueFilter(left: AttributeReference, right: Literal,
compareOp: CompareFilter.CompareOp): Option[FilterList] = {
val nonKeyColumn = nonKeyColumns.find(_.sqlName == left.name)
if (nonKeyColumn.isDefined) {
val column = nonKeyColumn.get
val filter = new SingleColumnValueFilter(column.familyRaw,
column.qualifierRaw,
compareOp,
DataTypeUtils.getBinaryComparator(bytesUtils.create(right.dataType), right))
filter.setFilterIfMissing(true)
Some(new FilterList(filter))
} else {
None
}
}
/**
* recursively create the filter list based on predicate
* @param pred the predicate
* @return the filter list, or None if predicate is not defined
*/
private def buildFilterListFromPred(pred: Option[Expression]): Option[FilterList] = {
if (pred.isEmpty) {
None
} else {
val expression = pred.get
expression match {
case expressions.And(left, right) =>
val filters = new java.util.ArrayList[filter.Filter]
if (left != null) {
val leftFilterList = buildFilterListFromPred(Some(left))
addToFilterList(filters, leftFilterList, FilterList.Operator.MUST_PASS_ALL)
}
if (right != null) {
val rightFilterList = buildFilterListFromPred(Some(right))
addToFilterList(filters, rightFilterList, FilterList.Operator.MUST_PASS_ALL)
}
Some(new FilterList(FilterList.Operator.MUST_PASS_ALL, filters))
case expressions.Or(left, right) =>
val filters = new java.util.ArrayList[filter.Filter]
if (left != null) {
val leftFilterList = buildFilterListFromPred(Some(left))
addToFilterList(filters, leftFilterList, FilterList.Operator.MUST_PASS_ONE)
}
if (right != null) {
val rightFilterList = buildFilterListFromPred(Some(right))
addToFilterList(filters, rightFilterList, FilterList.Operator.MUST_PASS_ONE)
}
Some(new FilterList(FilterList.Operator.MUST_PASS_ONE, filters))
case InSet(value@AttributeReference(name, dataType, _, _), hset) =>
val column = nonKeyColumns.find(_.sqlName == name)
if (column.isDefined) {
val filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE)
for (item <- hset) {
val filter = new SingleColumnValueFilter(column.get.familyRaw,
column.get.qualifierRaw,
CompareFilter.CompareOp.EQUAL,
DataTypeUtils.getBinaryComparator(bytesUtils.create(dataType),
Literal.create(item, dataType)))
filterList.addFilter(filter)
}
Some(filterList)
} else {
None
}
case expressions.In(value@AttributeReference(name, dataType, _, _), list) =>
val column = nonKeyColumns.find(_.sqlName == name)
if (column.isDefined) {
val filterList = new FilterList(FilterList.Operator.MUST_PASS_ONE)
for (item <- list) {
val filter = new SingleColumnValueFilter(column.get.familyRaw,
column.get.qualifierRaw,
CompareFilter.CompareOp.EQUAL,
DataTypeUtils.getBinaryComparator(bytesUtils.create(dataType),
item.asInstanceOf[Literal]))
filterList.addFilter(filter)
}
Some(filterList)
} else {
None
}
case expressions.GreaterThan(left: AttributeReference, right: Literal) =>
createSingleColumnValueFilter(left, right, CompareFilter.CompareOp.GREATER)
case expressions.GreaterThan(left: Literal, right: AttributeReference) =>
createSingleColumnValueFilter(right, left, CompareFilter.CompareOp.GREATER)
case expressions.GreaterThanOrEqual(left: AttributeReference, right: Literal) =>
createSingleColumnValueFilter(left, right,
CompareFilter.CompareOp.GREATER_OR_EQUAL)
case expressions.GreaterThanOrEqual(left: Literal, right: AttributeReference) =>
createSingleColumnValueFilter(right, left,
CompareFilter.CompareOp.GREATER_OR_EQUAL)
case expressions.EqualTo(left: AttributeReference, right: Literal) =>
createSingleColumnValueFilter(left, right, CompareFilter.CompareOp.EQUAL)
case expressions.EqualTo(left: Literal, right: AttributeReference) =>
createSingleColumnValueFilter(right, left, CompareFilter.CompareOp.EQUAL)
case expressions.LessThan(left: AttributeReference, right: Literal) =>
createSingleColumnValueFilter(left, right, CompareFilter.CompareOp.LESS)
case expressions.LessThan(left: Literal, right: AttributeReference) =>
createSingleColumnValueFilter(right, left, CompareFilter.CompareOp.LESS)
case expressions.LessThanOrEqual(left: AttributeReference, right: Literal) =>
createSingleColumnValueFilter(left, right, CompareFilter.CompareOp.LESS_OR_EQUAL)
case expressions.LessThanOrEqual(left: Literal, right: AttributeReference) =>
createSingleColumnValueFilter(right, left, CompareFilter.CompareOp.LESS_OR_EQUAL)
case _ => None
}
}
}
def buildPut(row: InternalRow): Put = {
// TODO: revisit this using new KeyComposer
val rowKey: HBaseRawType = null
new Put(rowKey)
}
def sqlContext = context
def schema: StructType = StructType(allColumns.map {
case KeyColumn(name, dt, _) => StructField(name, dt, nullable = false)
case NonKeyColumn(name, dt, _, _) => StructField(name, dt, nullable = true)
})
override def insert(data: DataFrame, overwrite: Boolean) = {
if (!overwrite) {
sqlContext.sparkContext.runJob(data.rdd, writeToHBase _)
} else {
// TODO: Support INSERT OVERWRITE INTO
sys.error("HBASE Table does not support INSERT OVERWRITE for now.")
}
}
def writeToHBase(context: TaskContext, iterator: Iterator[Row]) = {
// TODO:make the BatchMaxSize configurable
val BatchMaxSize = 100
var rowIndexInBatch = 0
var colIndexInBatch = 0
var puts = new ListBuffer[Put]()
while (iterator.hasNext) {
val row = iterator.next()
val seq = row.toSeq.map{
case s:String => UTF8String.fromString(s)
case other => other
}
val internalRow = InternalRow.fromSeq(seq)
val rawKeyCol = keyColumns.map(
kc => {
val rowColumn = DataTypeUtils.getRowColumnInHBaseRawType(
internalRow, kc.ordinal, kc.dataType)
colIndexInBatch += 1
(rowColumn, kc.dataType)
}
)
val key = HBaseKVHelper.encodingRawKeyColumns(rawKeyCol)
val put = new Put(key)
nonKeyColumns.foreach(
nkc => {
val rowVal = DataTypeUtils.getRowColumnInHBaseRawType(
internalRow, nkc.ordinal, nkc.dataType, bytesUtils)
colIndexInBatch += 1
put.add(nkc.familyRaw, nkc.qualifierRaw, rowVal)
}
)
puts += put
colIndexInBatch = 0
rowIndexInBatch += 1
if (rowIndexInBatch >= BatchMaxSize) {
htable.put(puts.toList)
puts.clear()
rowIndexInBatch = 0
}
}
if (puts.nonEmpty) {
htable.put(puts.toList)
}
closeHTable()
}
def delete(data: DataFrame) = {
sqlContext.sparkContext.runJob(data.rdd, deleteFromHBase _)
}
def deleteFromHBase(context: TaskContext, iterator: Iterator[Row]) = {
// TODO:make the BatchMaxSize configurable
val BatchMaxSize = 100
var rowIndexInBatch = 0
// note: this is a hack, we can not use scala list
// it will throw UnsupportedOperationException.
val deletes = new java.util.ArrayList[Delete]()
while (iterator.hasNext) {
val row = iterator.next()
val seq = row.toSeq.map{
case s: String => UTF8String.fromString(s)
case other => other
}
val internalRow = InternalRow.fromSeq(seq)
val rawKeyCol = keyColumns.map(
kc => {
val rowColumn = DataTypeUtils.getRowColumnInHBaseRawType(
internalRow, kc.ordinal, kc.dataType)
(rowColumn, kc.dataType)
}
)
val key = HBaseKVHelper.encodingRawKeyColumns(rawKeyCol)
val delete = new Delete(key)
deletes.add(delete)
rowIndexInBatch += 1
if (rowIndexInBatch >= BatchMaxSize) {
htable.delete(deletes)
deletes.clear()
rowIndexInBatch = 0
}
}
if (deletes.nonEmpty) {
htable.delete(deletes)
deletes.clear()
rowIndexInBatch = 0
}
closeHTable()
}
def buildScan(requiredColumns: Seq[Attribute], filters: Seq[Expression]): RDD[InternalRow] = {
require(filters.size < 2, "Internal logical error: unexpected filter list size")
val filterPredicate = filters.headOption
new HBaseSQLReaderRDD(
this,
context.conf.codegenEnabled,
context.conf.asInstanceOf[HBaseSQLConf].useCustomFilter,
requiredColumns,
subplan = None,
dummyRDD = null,
deploySuccessfully,
filterPredicate, // PartitionPred : Option[Expression]
context
)
}
def buildScan(start: Option[HBaseRawType], end: Option[HBaseRawType],
predicate: Option[Expression],
filters: Option[filter.Filter], otherFilters: Option[Expression],
useCustomFilter: Boolean,
projectionList: Seq[NamedExpression]): Scan = {
val scan = {
(start, end) match {
case (Some(lb), Some(ub)) => new Scan(lb, ub)
case (Some(lb), None) => new Scan(lb)
case (None, Some(ub)) => new Scan(Array[Byte](), ub)
case _ => new Scan
}
}
// set fetch size
scan.setCaching(scannerFetchSize)
// add Family to SCAN from projections
addColumnFamiliesToScan(scan, filters, otherFilters,
predicate, useCustomFilter, projectionList)
}
/**
* add projection and column to the scan
* @param scan the current scan
* @param filters the filter/filter list to be processed
* @param otherFilters the non-pushdownable predicates
* @param projectionList the projection list
* @return the proper scan
*/
def addColumnFamiliesToScan(scan: Scan, filters: Option[filter.Filter],
otherFilters: Option[Expression],
predicate: Option[Expression],
useCustomFilter: Boolean,
projectionList: Seq[NamedExpression]): Scan = {
var distinctProjectionList = projectionList.map(_.name)
var keyOnlyFilterPresent = false
if (otherFilters.isDefined) {
distinctProjectionList =
distinctProjectionList.union(otherFilters.get.references.toSeq.map(_.name)).distinct
}
// filter out the key columns
distinctProjectionList =
distinctProjectionList.filterNot(p => keyColumns.exists(_.sqlName == p))
var finalFilters = if (distinctProjectionList.isEmpty) {
if (filters.isDefined) {
filters.get
} else {
keyOnlyFilterPresent = true
new FirstKeyOnlyFilter
}
} else {
if (filters.isDefined) {
filters.get
} else {
null
}
}
if (predicate.nonEmpty) {
val pred = predicate.get
val predRefs = pred.references.toSeq
val predicateNameSet = predRefs.map(_.name).
filterNot(p => keyColumns.exists(_.sqlName == p)).toSet
if (distinctProjectionList.toSet.subsetOf(predicateNameSet)) {
// If the pushed down predicate is present and the projection is a subset
// of the columns of the pushed filters, use the columns as projections
// to avoid a full projection. The point is that by default without
// adding column explicitly, the HBase scan would return all columns.
// However there is some complexity when the predicate involves checks against
// nullness. For instance, "SELECT c1 from ... where c2 is null" would require
// the full projection before a check can be made since any existence of
// any other column would qualify the row. In contrast, a query of
// "SELECT c2 from ... where c2 is not null" will only require the existence
// of c2 so we can restrict the interested qualifiers to "c2" only.
distinctProjectionList = predicateNameSet.toSeq.distinct
val boundPred = BindReferences.bindReference(pred, predRefs)
val row = new GenericInternalRow(predRefs.size) // an all-null row
val prRes = boundPred.partialReduce(row, predRefs, checkNull = true)
val (addColumn, nkcols) = prRes match {
// At least one existing column has to be fetched to qualify the record,
// so we can just use the predicate's full projection
case (false, _) => (true, distinctProjectionList)
// Even an absent column may qualify the record, we have to fetch
// all columns before evaluate the predicate. Note that by doing this, the "fullness"
// of projection has a semantic scope of the HBase table, not the SQL table
// mapped to the HBase table.
case (true, _) => (false, null)
// Any and all absent columns aren't enough to determine the record qualification,
// so the 'remaining' prdeicate's projections have to be consulted and we
// can avoid full projection again by adding the 'remaining' prdeicate's projections
// to the scan's column map if the projections are non-key columns
case (null, reducedPred) =>
val nkRefs = reducedPred.references.map(_.name).filterNot(
p => keyColumns.exists(_.sqlName == p))
if (nkRefs.isEmpty) {
// Only key-related predicate is present, add FirstKeyOnlyFilter
if (!keyOnlyFilterPresent) {
if (finalFilters == null) {
finalFilters = new FirstKeyOnlyFilter
} else {
val filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL)
filterList.addFilter(new FirstKeyOnlyFilter)
filterList.addFilter(finalFilters)
finalFilters = filterList
}
}
(false, null)
} else {
(true, nkRefs.toSeq)
}
}
if (addColumn && nkcols.nonEmpty && nkcols.size < nonKeyColumns.size) {
nkcols.foreach {
case p =>
val nkc = nonKeyColumns.find(_.sqlName == p).get
scan.addColumn(nkc.familyRaw, nkc.qualifierRaw)
}
}
}
}
if (deploySuccessfully.isDefined && deploySuccessfully.get && useCustomFilter) {
if (finalFilters != null) {
if (otherFilters.isDefined) {
// add custom filter to handle other filters part
val customFilter = new HBaseCustomFilter(this, otherFilters.get)
val filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL)
filterList.addFilter(finalFilters)
filterList.addFilter(customFilter)
scan.setFilter(filterList)
} else {
scan.setFilter(finalFilters)
}
} else if (otherFilters.isDefined) {
val customFilter = new HBaseCustomFilter(this, otherFilters.get)
scan.setFilter(customFilter)
}
} else {
if (finalFilters != null) scan.setFilter(finalFilters)
}
scan
}
def buildGet(projectionList: Seq[NamedExpression], rowKey: HBaseRawType) {
new Get(rowKey)
// TODO: add columns to the Get
}
/**
*
* @param kv the cell value to work with
* @param projection the pair of projection and its index
* @param row the row to set values on
*/
private def setColumn(kv: Cell, projection: (Attribute, Int), row: MutableRow,
bytesUtils: BytesUtils = BinaryBytesUtils): Unit = {
if (kv == null || kv.getValueLength == 0) {
row.setNullAt(projection._2)
} else {
val dt = projection._1.dataType
if (dt.isInstanceOf[AtomicType]) {
DataTypeUtils.setRowColumnFromHBaseRawType(
row, projection._2, kv.getValueArray, kv.getValueOffset, kv.getValueLength, dt, bytesUtils)
} else {
// for complex types, deserialiation is involved and we aren't sure about buffer safety
val colValue = CellUtil.cloneValue(kv)
DataTypeUtils.setRowColumnFromHBaseRawType(
row, projection._2, colValue, 0, colValue.length, dt, bytesUtils)
}
}
}
def buildRowAfterCoprocessor(projections: Seq[(Attribute, Int)],
result: Result,
row: MutableRow): InternalRow = {
for (i <- projections.indices) {
setColumn(result.rawCells()(i), projections(i), row)
}
row
}
def buildRowInCoprocessor(projections: Seq[(Attribute, Int)],
result: java.util.ArrayList[Cell],
row: MutableRow): InternalRow = {
def getColumnLatestCell(family: Array[Byte],
qualifier: Array[Byte]): Cell = {
// 0 means equal, >0 means larger, <0 means smaller
def compareCellWithExpectedOne(cell: Cell): Int = {
val compare = Bytes.compareTo(
cell.getFamilyArray, cell.getFamilyOffset, cell.getFamilyLength,
family, 0, family.length)
if (compare != 0) compare
else {
Bytes.compareTo(
cell.getQualifierArray, cell.getQualifierOffset, cell.getQualifierLength,
qualifier, 0, qualifier.length)
}
}
def binarySearchTheArrayList(startIndex: Int, endIndex: Int): Cell = {
if (startIndex > endIndex) null
else {
val midIndex = (startIndex + endIndex) >>> 1
val cell = result.get(midIndex)
// 0 means equal, >0 means larger, <0 means smaller
compareCellWithExpectedOne(cell) match {
case 0 => cell
case i if i < 0 => binarySearchTheArrayList(midIndex + 1, endIndex)
case i if i > 0 => binarySearchTheArrayList(startIndex, midIndex - 1)
}
}
}
if (result == null || result.isEmpty) null
else {
binarySearchTheArrayList(0, result.length - 1)
}
}
lazy val rowKeys = HBaseKVHelper.decodingRawKeyColumns(
result.head.getRowArray, keyColumns, result.head.getRowLength, result.head.getRowOffset)
projections.foreach {
p =>
columnMap.get(p._1.name).get match {
case column: NonKeyColumn =>
val kv = getColumnLatestCell(column.familyRaw, column.qualifierRaw)
setColumn(kv, p, row, bytesUtils)
case keyIndex: Int =>
val (start, length) = rowKeys(keyIndex)
DataTypeUtils.setRowColumnFromHBaseRawType(
row, p._2, result.head.getRowArray, start, length, keyColumns(keyIndex).dataType)
}
}
row
}
def buildRow(projections: Seq[(Attribute, Int)],
result: Result,
row: MutableRow): InternalRow = {
lazy val rowKeys = HBaseKVHelper.decodingRawKeyColumns(result.getRow, keyColumns)
projections.foreach {
p =>
columnMap.get(p._1.name).get match {
case column: NonKeyColumn =>
val kv: Cell = result.getColumnLatestCell(column.familyRaw, column.qualifierRaw)
setColumn(kv, p, row, bytesUtils)
case keyIndex: Int =>
val (start, length) = rowKeys(keyIndex)
DataTypeUtils.setRowColumnFromHBaseRawType(
row, p._2, result.getRow, start, length, keyColumns(keyIndex).dataType)
}
}
row
}
/**
* Convert the row key to its proper format. Due to the nature of HBase, the start and
* end of partition could be partial row key, we may need to append 0x00 to make it comply
* with the definition of key columns, for example, add four 0x00 if a key column type is
* integer. Also string type (of UTF8) may need to be padded with the minimum UTF8
* continuation byte(s)
* @param rawKey the original row key
* @return the proper row key based on the definition of the key columns
*/
def getFinalKey(rawKey: Option[HBaseRawType]): HBaseRawType = {
val origRowKey: HBaseRawType = rawKey.get
/**
* Recursively run this function to check the key columns one by one.
* If the input raw key contains the whole part of this key columns, then continue to
* check the next one; otherwise, append the raw key by adding 0x00(or minimal UTF8
* continuation bytes) to its proper format and return it.
* @param rowIndex the start point of unchecked bytes in the input raw key
* @param curKeyIndex the next key column need to be checked
* @return the proper row key based on the definition of the key columns
*/
def getFinalRowKey(rowIndex: Int, curKeyIndex: Int): HBaseRawType = {
if (curKeyIndex >= keyColumns.length) origRowKey
else {
val typeOfKey = keyColumns(curKeyIndex)
if (typeOfKey.dataType == StringType) {
val indexOfStringEnd = origRowKey.indexOf(HBaseKVHelper.delimiter, rowIndex)
if (indexOfStringEnd == -1) {
val nOfUTF8StrBytes = HBaseRelation.numOfBytes(origRowKey(rowIndex))
val delta = if (nOfUTF8StrBytes > origRowKey.length - rowIndex) {
// padding of 1000 0000 is needed according to UTF-8 spec
Array.fill[Byte](nOfUTF8StrBytes - origRowKey.length
+ rowIndex)(HBaseRelation.utf8Padding) ++
new Array[Byte](getMinimum(curKeyIndex + 1))
} else {
new Array[Byte](getMinimum(curKeyIndex + 1))
}
origRowKey ++ delta
} else {
getFinalRowKey(indexOfStringEnd + 1, curKeyIndex + 1)
}
} else {
val nextRowIndex = rowIndex +
typeOfKey.dataType.asInstanceOf[AtomicType].defaultSize
if (nextRowIndex < origRowKey.length) {
getFinalRowKey(nextRowIndex, curKeyIndex + 1)
} else {
val delta: Array[Byte] = {
new Array[Byte](nextRowIndex - origRowKey.length + getMinimum(curKeyIndex + 1))
}
origRowKey ++ delta
}
}
}
}
/**
* Get the minimum key length based on the key columns definition
* @param startKeyIndex the start point of the key column
* @return the minimum length required for the remaining key columns
*/
def getMinimum(startKeyIndex: Int): Int = {
keyColumns.drop(startKeyIndex).map(k => {
k.dataType match {
case StringType => 1
case _ => k.dataType.asInstanceOf[AtomicType].defaultSize
}
}
).sum
}
getFinalRowKey(0, 0)
}
/**
* Convert a HBase row key into column values in their native data formats
* @param rawKey the HBase row key
* @return A sequence of column values from the row Key
*/
def nativeKeyConvert(rawKey: Option[HBaseRawType]): Seq[Any] = {
if (rawKey.isEmpty) Nil
else {
val finalRowKey = getFinalKey(rawKey)
HBaseKVHelper.decodingRawKeyColumns(finalRowKey, keyColumns).
zipWithIndex.map(pi => DataTypeUtils.bytesToData(finalRowKey,
pi._1._1, pi._1._2, keyColumns(pi._2).dataType))
}
}
}
private[hbase] object HBaseRelation {
// Copied from UTF8String for accessibility reasons therein
private val bytesOfCodePointInUTF8: Array[Int] = Array(2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
4, 4, 4, 4, 4, 4, 4, 4,
5, 5, 5, 5,
6, 6, 6, 6)
@inline
def numOfBytes(b: Byte): Int = {
val offset = (b & 0xFF) - 192
if (offset >= 0) bytesOfCodePointInUTF8(offset) else 1
}
val zeroByte: Array[Byte] = new Array(1)
val utf8Padding: Byte = 0x80.asInstanceOf[Byte]
}
|
jackylk/astro
|
src/main/scala/org/apache/spark/sql/hbase/HBaseRelation.scala
|
Scala
|
apache-2.0
| 46,861
|
package japgolly.scalajs.react.extra.internal
final class LazyVar[A](initArg: () => A) {
// Don't prevent GC of initArg or waste mem propagating the ref
private[this] var init = initArg
private[this] var value: A = _
def get(): A = {
if (init ne null)
set(init())
value
}
def set(a: A): Unit = {
value = a
init = null
}
}
|
matthughes/scalajs-react
|
extra/src/main/scala/japgolly/scalajs/react/extra/internal/LazyVar.scala
|
Scala
|
apache-2.0
| 362
|
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar
import java.nio.file.{Files, Paths}
package object precog {
type jPath = java.nio.file.Path
type =?>[-A, +B] = scala.PartialFunction[A, B]
type CTag[A] = scala.reflect.ClassTag[A]
def ctag[A](implicit z: CTag[A]): CTag[A] = z
def jPath(path: String): jPath = Paths get path
implicit class jPathOps(private val p: jPath) {
def slurpBytes(): Array[Byte] = Files readAllBytes p
}
}
|
drostron/quasar
|
precog/src/main/scala/quasar/precog/package.scala
|
Scala
|
apache-2.0
| 1,032
|
package com.gu.management
object HttpRequestsTimingMetric extends TimingMetric(
group = "application",
name = "http-requests",
title = "HTTP requests",
description = "HTTP requests as determined by the request logging filter")
object ExceptionCountMetric extends CountMetric(
group = "application",
name = "exception-count",
title = "exception-count",
description = "Counts the number of uncaught exceptions being sent to the client from the application"
)
object ServerErrorCounter extends CountMetric(
group = "application",
name = "server-error",
title = "server-error",
description = "The number of 5XX errors returned by the application")
object ClientErrorCounter extends CountMetric(
group = "application",
name = "client-error",
title = "client-error",
description = "The number of 4XX errors returned by the application")
|
guardian/guardian-management
|
management/src/main/scala/com/gu/management/RequestMetrics.scala
|
Scala
|
apache-2.0
| 867
|
package scala.meta.tests
package parsers
import org.scalatest.exceptions.TestFailedException
import scala.meta._
class ModSuite extends ParseSuite {
test("implicit") {
val Defn.Object(Seq(Mod.Implicit()), _, _) = templStat("implicit object A")
val Defn.Class(Seq(Mod.Implicit()), _, _, _, _) = templStat("implicit class A")
val Defn.Object(Seq(Mod.Implicit(), Mod.Case()), _, _) = templStat("implicit case object A")
val Defn.Class(_, _, _, Ctor.Primary(_, _,
Seq(Seq(Term.Param(Seq(Mod.Implicit(), Mod.ValParam()), _, _, _)))
), _) = templStat("case class A(implicit val a: Int)")
val Defn.Class(_, _, _, Ctor.Primary(_, _,
Seq(Seq(Term.Param(Seq(Mod.Implicit(), Mod.VarParam()), _, _, _)))
), _) = templStat("case class A(implicit var a: Int)")
val Defn.Def(_, _, _, Seq(Seq(Term.Param(Seq(Mod.Implicit()), _, _, _))), _, _) =
templStat("def foo(implicit a: Int): Int = a")
val Defn.Def(Seq(Mod.Implicit()), _, _, _, _, _) = templStat("implicit def foo(a: Int): Int = a")
val Defn.Val(Seq(Mod.Implicit()), _, _, _) = templStat("implicit val a: Int = 1")
val Decl.Val(Seq(Mod.Implicit()), _, _) = templStat("implicit val a: Int")
val Defn.Var(Seq(Mod.Implicit()), _, _, _) = templStat("implicit var a: Int = 1")
val Decl.Var(Seq(Mod.Implicit()), _, _) = templStat("implicit var a: Int")
interceptParseErrors(
"implicit implicit var a: Int",
"implicit implicit val a: Int",
"implicit implicit var a: Int = 1",
"implicit implicit val a: Int = 1",
"implicit implicit class A",
"implicit implicit object A",
"implicit implicit trait A",
"implicit implicit case class A(a: Int)",
"implicit implicit type A",
"implicit implicit type A = Int",
"implicit trait A",
"implicit type A",
"implicit type A = Int",
"implicit case class A(a: Int)"
)
}
test("final") {
val Defn.Object(Seq(Mod.Final()), _, _) = templStat("final object A")
val Defn.Class(Seq(Mod.Final()), _, _, _, _) = templStat("final class A")
val Defn.Class(Seq(Mod.Final(), Mod.Case()), _, _, _, _) = templStat("final case class A(a: Int)")
val Defn.Object(Seq(Mod.Final(), Mod.Case()), _, _) = templStat("final case object A")
val Defn.Class(_, _, _, Ctor.Primary(_, _,
Seq(Seq(Term.Param(Seq(Mod.Final(), Mod.ValParam()), _, _, _)))
), _) = templStat("case class A(final val a: Int)")
val Defn.Def(Seq(Mod.Final()), _, _, _, _, _) = templStat("final def foo(a: Int): Int = a")
val Defn.Val(Seq(Mod.Final()), _, _, _) = templStat("final val a: Int = 1")
val Decl.Val(Seq(Mod.Final()), _, _) = templStat("final val a: Int")
val Defn.Var(Seq(Mod.Final()), _, _, _) = templStat("final var a: Int = 1")
val Decl.Var(Seq(Mod.Final()), _, _) = templStat("final var a: Int")
val Defn.Type(Seq(Mod.Final()), _, _, _) = templStat("final type A = Int")
interceptParseErrors(
"final final var a: Int",
"final final val a: Int",
"final final var a: Int = 1",
"final final val a: Int = 1",
"final final class A",
"final final object A",
"final final trait A",
"final final case class A(a: Int)",
"final final type A",
"final trait A",
"def foo(final val a: Int): Int = a"
)
}
test("sealed") {
val Defn.Trait(Seq(Mod.Sealed()), _, _, _, _) = templStat("sealed trait A")
val Defn.Class(Seq(Mod.Sealed()), _, _, _, _) = templStat("sealed class A")
val Defn.Class(Seq(Mod.Sealed(), Mod.Abstract()), _, _, _, _) = templStat("sealed abstract class A")
val Defn.Class(Seq(Mod.Sealed(), Mod.Case()), _, _, _, _) = templStat("sealed case class A(a: Int)")
interceptParseErrors(
"sealed sealed var a: Int",
"sealed sealed val a: Int",
"sealed sealed var a: Int = 1",
"sealed sealed val a: Int = 1",
"sealed sealed class A",
"sealed sealed object A",
"sealed sealed trait A",
"sealed sealed case class A(a: Int)",
"sealed sealed type A",
"sealed object A",
"sealed case object A",
"sealed def foo(a: Int): Int = a",
"sealed val a: Int = 1",
"sealed val a: Int",
"sealed var a: Int = 1",
"sealed var a: Int",
"sealed type A",
"sealed type A = Int",
"def foo(sealed val a: Int): Int = a",
"class A(sealed val a: Int)"
)
}
test("override") {
val Defn.Object(Seq(Mod.Override()), _, _) = templStat("override object A")
val Defn.Object(Seq(Mod.Override(), Mod.Case()), _, _) = templStat("override case object A")
val Defn.Def(Seq(Mod.Override()), _, _, _, _, _) = templStat("override def foo(a: Int): Int = a")
val Defn.Val(Seq(Mod.Override()), _, _, _) = templStat("override val a: Int = 1")
val Defn.Var(Seq(Mod.Override()), _, _, _) = templStat("override var a: Int = 1")
val Defn.Type(Seq(Mod.Override()), _, _, _) = templStat("override type A = Int")
val Decl.Def(Seq(Mod.Override()), _, _, _, _) = templStat("override def foo(a: Int): Int")
val Decl.Val(Seq(Mod.Override()), _, _) = templStat("override val a: Int")
val Decl.Var(Seq(Mod.Override()), _, _) = templStat("override var a: Int")
val Decl.Type(Seq(Mod.Override()), _, _, _) = templStat("override type A")
interceptParseErrors(
"override override var a: Int",
"override override val a: Int",
"override override var a: Int = 1",
"override override val a: Int = 1",
"override override class A",
"override override object A",
"override override trait A",
"override override case class A(a: Int)",
"override override type A",
"def foo(override val a: Int): Int = a",
"override class A",
"override case class A(a: Int)",
"override trait A"
)
}
test("case") {
val Defn.Object(Seq(Mod.Case()), _, _) = templStat("case object A")
val Defn.Class(Seq(Mod.Case()), _, _, _, _) = templStat("case class A(a: Int)")
interceptParseErrors(
"case case var a: Int",
"case case val a: Int",
"case case var a: Int = 1",
"case case val a: Int = 1",
"case case class A",
"case case object A",
"case case trait A",
"case case case class A(a: Int)",
"case case type A",
"case val a: Int",
"case var a: Int",
"case val a: Int = 1",
"case var a: Int = 1",
"case def foo(a: Int): Int",
"case type A",
"case type A = Int",
"def foo(case val a: Int): Int = a",
"case def foo(val a: Int): Int = a",
"class A(case a: Int)"
)
}
test("abstract") {
val Defn.Trait(Seq(Mod.Abstract()), _, _, _, _) = templStat("abstract trait A")
val Defn.Class(Seq(Mod.Abstract()), _, _, _, _) = templStat("abstract class A")
val Defn.Class(Seq(Mod.Abstract(), Mod.Case()), _, _, _, _) = templStat("abstract case class A(a: Int)")
interceptParseErrors(
"abstract abstract var a: Int",
"abstract abstract val a: Int",
"abstract abstract var a: Int = 1",
"abstract abstract val a: Int = 1",
"abstract abstract class A",
"abstract abstract object A",
"abstract abstract trait A",
"abstract abstract case class A(a: Int)",
"abstract abstract type A",
"abstract val a: Int",
"abstract var a: Int",
"abstract val a: Int = 1",
"abstract var a: Int = 1",
"abstract def foo(a: Int): Int",
"abstract type A",
"abstract type A = Int",
"class A(abstract val a: Int)",
"def foo(abstract val a: Int): Int = a",
"abstract def foo(val a: Int): Int = a",
"abstract case object A",
"abstract object A"
)
}
test("lazy") {
val Defn.Val(Seq(Mod.Lazy()), _, _, _) = templStat("lazy val a: Int = 1")
interceptParseErrors(
"lazy lazy var a: Int",
"lazy lazy val a: Int",
"lazy lazy var a: Int = 1",
"lazy lazy val a: Int = 1",
"lazy lazy class A",
"lazy lazy object A",
"lazy lazy trait A",
"lazy lazy case class A(a: Int)",
"lazy lazy type A",
"lazy val a: Int",
"lazy var a: Int",
"lazy var a: Int = 1",
"lazy def foo(a: Int): Int",
"lazy type A",
"lazy type A = Int",
"def foo(lazy val a: Int): Int = a",
"class A(lazy val a: Int)",
"lazy def foo(val a: Int): Int = a",
"lazy case object A",
"lazy case class A(a: Int)",
"lazy class A",
"lazy object A"
)
}
test("abstract override") {
/* Non-trait members modified by `abstract override` receive a typechecking error */
val Defn.Object(Seq(Mod.Abstract(), Mod.Override()), _, _) = templStat("abstract override object A")
val Defn.Object(Seq(Mod.Abstract(), Mod.Override(), Mod.Case()), _, _) = templStat("abstract override case object A")
val Defn.Def(Seq(Mod.Abstract(), Mod.Override()), _, _, _, _, _) = templStat("abstract override def foo(a: Int): Int = a")
val Defn.Val(Seq(Mod.Abstract(), Mod.Override()), _, _, _) = templStat("abstract override val a: Int = 1")
val Defn.Var(Seq(Mod.Abstract(), Mod.Override()), _, _, _) = templStat("abstract override var a: Int = 1")
val Defn.Type(Seq(Mod.Abstract(), Mod.Override()), _, _, _) = templStat("abstract override type A = Int")
val Decl.Def(Seq(Mod.Abstract(), Mod.Override()), _, _, _, _) = templStat("abstract override def foo(a: Int): Int")
val Decl.Val(Seq(Mod.Abstract(), Mod.Override()), _, _) = templStat("abstract override val a: Int")
val Decl.Var(Seq(Mod.Abstract(), Mod.Override()), _, _) = templStat("abstract override var a: Int")
val Decl.Type(Seq(Mod.Abstract(), Mod.Override()), _, _, _) = templStat("abstract override type A")
interceptParseErrors(
"abstract override abstract override var a: Int",
"abstract override abstract override val a: Int",
"abstract override abstract override var a: Int = 1",
"abstract override abstract override val a: Int = 1",
"abstract override abstract override class A",
"abstract override abstract override object A",
"abstract override abstract override trait A",
"abstract override abstract override case class A(a: Int)",
"abstract override abstract override type A",
"def foo(abstract override val a: Int): Int = a",
"abstract override class A",
"abstract override case class A(a: Int)"
)
}
test("covariant") {
val Defn.Class(_, _,
Seq(Type.Param(Seq(Mod.Covariant()), _, _, _, _, _)),
_, _) = templStat("case class A[+T](t: T)")
val Defn.Class(_, _,
Seq(Type.Param(Seq(Mod.Covariant()), _, _, _, _, _)),
_, _) = templStat("class A[+T](t: T)")
val Defn.Type(_, _,
Seq(Type.Param(Seq(Mod.Covariant()), _, _, _, _, _)),
_) = templStat("type A[+T] = B[T]")
interceptParseErrors(
"def foo[+T](t: T): Int"
)
}
test("contravariant") {
val Defn.Class(_, _,
Seq(Type.Param(Seq(Mod.Contravariant()), _, _, _, _, _)),
_, _) = templStat("case class A[-T](t: T)")
val Defn.Class(_, _,
Seq(Type.Param(Seq(Mod.Contravariant()), _, _, _, _, _)),
_, _) = templStat("class A[-T](t: T)")
val Defn.Type(_, _,
Seq(Type.Param(Seq(Mod.Contravariant()), _, _, _, _, _)),
_) = templStat("type A[-T] = B[T]")
interceptParseErrors(
"def foo[-T](t: T): Int"
)
}
test("val param") {
val Defn.Class(_, _, _, Ctor.Primary(_, _,
Seq(Seq(Term.Param(Seq(Mod.ValParam()), _, _, _)))
), _) = templStat("case class A(val a: Int)")
val Defn.Class(_, _, _, Ctor.Primary(_, _,
Seq(Seq(Term.Param(Seq(Mod.ValParam()), _, _, _)))
), _) = templStat("class A(val a: Int)")
val Defn.Class(_, _, _, Ctor.Primary(_, _,
Seq(Seq(Term.Param(Seq(Mod.Implicit(), Mod.ValParam()), _, _, _)))
), _) = templStat("case class A(implicit val a: Int)")
val Defn.Class(_, _, _, Ctor.Primary(_, _,
Seq(Seq(Term.Param(Seq(Mod.Implicit(), Mod.ValParam()), _, _, _)))
), _) = templStat("class A(implicit val a: Int)")
// No ValParam detected inside parameter list
val Defn.Def(_, _, _, Seq(Seq(Term.Param(Seq(), _, _, _))), _, _) =
templStat("def foo(a: Int): Int = a")
interceptParseErrors(
"def foo(val a: Int): Int"
)
}
test("var param") {
val Defn.Class(_, _, _, Ctor.Primary(_, _,
Seq(Seq(Term.Param(Seq(Mod.VarParam()), _, _, _)))
), _) = templStat("case class A(var a: Int)")
val Defn.Class(_, _, _, Ctor.Primary(_, _,
Seq(Seq(Term.Param(Seq(Mod.VarParam()), _, _, _)))
), _) = templStat("class A(var a: Int)")
val Defn.Class(_, _, _, Ctor.Primary(_, _,
Seq(Seq(Term.Param(Seq(Mod.Implicit(), Mod.VarParam()), _, _, _)))
), _) = templStat("case class A(implicit var a: Int)")
val Defn.Class(_, _, _, Ctor.Primary(_, _,
Seq(Seq(Term.Param(Seq(Mod.Implicit(), Mod.VarParam()), _, _, _)))
), _) = templStat("class A(implicit var a: Int)")
interceptParseErrors(
"def foo(var a: Int): Int"
)
}
test("macro") {
val Defn.Macro(_, _, _, _, _, _) = templStat("def foo(a: Int): Int = macro myMacroImpl(a)")
}
test("final and abstract") {
// Only check these because abstract cannot only be used for classes
val Defn.Class(Seq(Mod.Final(), Mod.Abstract()), _, _, _, _) = templStat("final abstract class A")
val Defn.Class(Seq(Mod.Final(), Mod.Abstract(), Mod.Case()), _, _, _, _) = templStat("final abstract case class A(a: Int)")
interceptParseErrors(
"final abstract trait A",
// Abstract should be inferred
"final trait A"
)
}
test("final and sealed") {
// Only check these because sealed can only be used for classes
interceptParseErrors(
"final sealed class A(a: Int)",
"final sealed case class A(a: Int)",
"final sealed trait A"
)
}
test("invalid private and protected") {
interceptParseErrors(
"private protected class A",
"protected private class A",
"private[something] protected class A",
"protected private[something] class A",
"protected[something] private class A",
"private protected[something] class A",
"protected protected class A",
"private private class A"
)
}
}
|
Dveim/scalameta
|
scalameta/scalameta/src/test/scala/scala/meta/tests/parsers/ModSuite.scala
|
Scala
|
bsd-3-clause
| 14,310
|
package org.mbari.varspub
import java.io.{File, FileOutputStream}
import javax.imageio.ImageIO
import org.junit.runner.RunWith
import org.mbari.vars.varspub.WatermarkUtilities
import org.scalatest.junit.JUnitRunner
import org.scalatest.{Matchers, FlatSpec}
/**
*
*
* @author Brian Schlining
* @since 2015-03-24T12:18:00
*/
@RunWith(classOf[JUnitRunner])
class WatermarkUtilitiesSpec extends FlatSpec with Matchers {
"WatermarkUtilities" should "write png metadata" in {
val png = ImageIO.read(getClass.getResource("/images/Opisthoteuthis_spA_01.png"))
val metadata = Map("Title" -> "This is a title",
"Author" -> "Brian Schlining",
"Copyright" -> "2015",
"Software" -> getClass.getSimpleName)
val bytes = WatermarkUtilities.addMetadataAsPNG(png, metadata)
val os = new FileOutputStream(new File("target", s"${getClass.getSimpleName}.png"))
os.write(bytes)
os.close()
}
}
|
hohonuuli/vars
|
vars-standalone/src/test/scala/org/mbari/varspub/WatermarkUtilitiesSpec.scala
|
Scala
|
lgpl-2.1
| 928
|
import scala.scalajs.js
import js.annotation._
import js.|
package bz.otte.qtip2 {
// package QTip2 {
import org.scalajs.dom.Event
@js.native
trait Content extends js.Object {
var title: QTip2.Title | js.Any = js.native
var text: QTip2.Text = js.native
var attr: String = js.native
var button: String | JQuery | Boolean = js.native
}
@js.native
trait PositionAdjust extends js.Object {
var x: Double = js.native
var y: Double = js.native
var mouse: Boolean = js.native
var resize: Boolean = js.native
var scroll: Boolean = js.native
var method: String = js.native
}
@js.native
trait Position extends js.Object {
var my: String | Boolean = js.native
var at: String | Boolean = js.native
var target: QTip2.Target | Boolean = js.native
var container: JQuery | Boolean = js.native
var viewport: JQuery | Boolean = js.native
var effect: Boolean | js.Function3[Api, js.Any, js.Any, Unit] = js.native
var adjust: PositionAdjust = js.native
}
@js.native
trait Show extends js.Object {
var target: JQuery | Boolean = js.native
var event: String | Boolean = js.native
var delay: Double = js.native
var solo: JQuery | String | Boolean = js.native
var ready: Boolean = js.native
var effect: Boolean | js.Function1[js.Any, Unit] = js.native
var modal: Boolean | Modal = js.native
}
@js.native
trait Modal extends js.Object {
var on: Boolean = js.native
var blur: Boolean = js.native
var escape: Boolean = js.native
var stealfocus: Boolean = js.native
var effect: Boolean | js.Function1[js.Any, Unit] = js.native
}
@js.native
trait Hide extends js.Object {
var target: JQuery | Boolean = js.native
var event: String | Boolean = js.native
var delay: Double = js.native
var inactive: Double | Boolean = js.native
var fixed: Boolean = js.native
var leave: String | Boolean = js.native
var distance: Double | Boolean = js.native
var effect: Boolean | js.Function1[js.Any, Unit] = js.native
}
@js.native
trait Style extends js.Object {
var classes: String | Boolean = js.native
var `def`: Boolean = js.native
var widget: Boolean = js.native
var width: String | Double | Boolean = js.native
var height: String | Double | Boolean = js.native
var tip: String | Boolean | Tip = js.native
}
@js.native
trait Tip extends js.Object {
var corner: String | Boolean = js.native
var mimic: String | Boolean = js.native
var border: Double | Boolean = js.native
var width: Double = js.native
var height: Double = js.native
var offset: Double = js.native
}
@js.native
trait Events extends js.Object {
var render: QTip2.EventApiFunc = js.native
var show: QTip2.EventApiFunc = js.native
var hide: QTip2.EventApiFunc = js.native
var toggle: QTip2.EventApiFunc = js.native
var visible: QTip2.EventApiFunc = js.native
var hidden: QTip2.EventApiFunc = js.native
var move: QTip2.EventApiFunc = js.native
var focus: QTip2.EventApiFunc = js.native
var blur: QTip2.EventApiFunc = js.native
}
@js.native
trait QTipOptions extends js.Object {
var id: String | Boolean = js.native
var prerender: Boolean = js.native
var overwrite: Boolean = js.native
var suppress: Boolean = js.native
var metadata: js.Any = js.native
var content: QTip2.Text | Content = js.native
var position: String | Position = js.native
var style: String | Style = js.native
var show: String | Boolean | JQuery | Show = js.native
var hide: String | JQuery | Hide = js.native
var events: Events = js.native
}
@js.native
trait Api extends js.Object {
def get(propertyName: String): js.Dynamic = js.native
def set(properties: QTipOptions): Api = js.native
def set(propertyName: String, value: js.Any): Api = js.native
def toggle(state: Boolean = ???, event: Event = ???): Api = js.native
def show(event: Event = ???): Api = js.native
def hide(event: Event = ???): Api = js.native
def disable(state: Boolean = ???): Api = js.native
def enable(): Api = js.native
def reposition(event: Event = ???, effect: Boolean = ???): Api = js.native
def focus(event: Event = ???): Api = js.native
def blur(event: Event = ???): Api = js.native
def destroy(immediate: Boolean = ???): Api = js.native
}
@JSName("QTip2")
@js.native
object QTip2 extends js.Object {
type EventApiFunc = js.Function2[Event, Api, Unit]
type Title = String | JQuery | EventApiFunc | Boolean// | JQueryGenericPromise[js.Any]
type Text = String | JQuery | EventApiFunc | Boolean// | JQueryGenericPromise[js.Any]
type Target = JQuery | js.Array[Double] | String
}
// }
@js.native
trait JQuery extends js.Object {
def qtip(options: QTipOptions): JQuery = js.native
def qtip(methodName: String, p1: js.Any, p2: js.Any, p3: js.Any): js.Dynamic = js.native
}
}
|
wrotte/scala-js-cytoscape-js
|
src/main/scala/bz/otte/qtip2/QTip2.scala
|
Scala
|
mit
| 5,378
|
/*
Copyright 2013 Ilya Lakhin (Илья Александрович Лахин)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package name.lakhin.eliah.projects
package papacarlo.test
import name.lakhin.eliah.projects.papacarlo.test.utils.ParserSpec
import name.lakhin.eliah.projects.papacarlo.examples.Json
class JsonParserSpec extends ParserSpec("json") {
override def lexer = Json.lexer
override def parser = {
val lexer = Json.lexer
val syntax = Json.syntax(lexer)
(lexer, syntax)
}
}
|
Eliah-Lakhin/papa-carlo
|
src/test/scala/name.lakhin.eliah.projects/papacarlo/test/JsonParserSpec.scala
|
Scala
|
apache-2.0
| 1,022
|
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.sql
import slamdata.Predef._
import quasar.fp._
import quasar.fs._
import matryoshka._
import scalaz._, Scalaz._
sealed abstract class ParsingError { def message: String}
final case class GenericParsingError(message: String) extends ParsingError
final case class ParsingPathError(error: PathError) extends ParsingError {
def message = error.shows
}
object ParsingError {
implicit val parsingErrorShow: Show[ParsingError] = Show.showFromToString
}
|
drostron/quasar
|
frontend/src/main/scala/quasar/sql/ParsingError.scala
|
Scala
|
apache-2.0
| 1,075
|
package stronghold.graphs
/**
* problem description: http://rosalind.info/problems/trie/
*/
object PatternMatching {
object SampleData {
val sample: List[String] =
List(
"ATAGA",
"ATC",
"GAT"
)
}
import SampleData.sample
import utils.UtilityFunctions.{readInputData, writeListOfListsAsStringsToFile}
import Trie.traverseTrie
val inputFileName: String = "/stronghold/datasets/rosalind_trie.txt"
def getData(isPractice: Boolean): List[String] = if (isPractice) sample else readInputData(inputFileName)
def createTrieAdjacencyList(trie: Trie): List[String] = {
val edges: List[TrieEdge] = traverseTrie(trie).toList
for { TrieEdge(parentId, childId, letter) <- edges } yield
List(parentId.toString, childId.toString, letter.toString).mkString(" ")
}
def main(args: Array[String]): Unit = {
val strings: List[String] = getData(isPractice = false)
val trie: Trie = Trie(strings)
writeListOfListsAsStringsToFile(createTrieAdjacencyList(trie).map(row => List(row)))
}
}
|
ghostrider77/Bioinformatics
|
Bioinformatics/src/main/scala-2.11/stronghold/graphs/PatternMatching.scala
|
Scala
|
mit
| 1,063
|
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.stats
import org.junit.runner.RunWith
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class SeqStatTest extends Specification with StatTestHelper {
def newStat[T](observe: Boolean = true): SeqStat = {
val stat = Stat(sft, "MinMax(intAttr);IteratorStackCount();Enumeration(longAttr);Histogram(doubleAttr,20,0,200)")
if (observe) {
features.foreach { stat.observe }
}
stat.asInstanceOf[SeqStat]
}
"Seq stat" should {
"be empty initiallly" >> {
val stat = newStat(observe = false)
stat.stats must haveSize(4)
stat.isEmpty must beFalse
val mm = stat.stats(0).asInstanceOf[MinMax[java.lang.Integer]]
val ic = stat.stats(1).asInstanceOf[IteratorStackCount]
val eh = stat.stats(2).asInstanceOf[EnumerationStat[java.lang.Long]]
val rh = stat.stats(3).asInstanceOf[Histogram[java.lang.Double]]
mm.property mustEqual "intAttr"
mm.isEmpty must beTrue
ic.counter mustEqual 1
eh.property mustEqual "longAttr"
eh.enumeration must beEmpty
rh.property mustEqual "doubleAttr"
forall(0 until rh.length)(rh.count(_) mustEqual 0)
}
"observe correct values" >> {
val stat = newStat()
val stats = stat.stats
stats must haveSize(4)
stat.isEmpty must beFalse
val mm = stat.stats(0).asInstanceOf[MinMax[java.lang.Integer]]
val ic = stat.stats(1).asInstanceOf[IteratorStackCount]
val eh = stat.stats(2).asInstanceOf[EnumerationStat[java.lang.Long]]
val rh = stat.stats(3).asInstanceOf[Histogram[java.lang.Double]]
mm.bounds mustEqual (0, 99)
ic.counter mustEqual 1
eh.enumeration.size mustEqual 100
eh.enumeration(0L) mustEqual 1
eh.enumeration(100L) mustEqual 0
rh.length mustEqual 20
rh.count(rh.indexOf(0.0)) mustEqual 10
rh.count(rh.indexOf(50.0)) mustEqual 10
rh.count(rh.indexOf(100.0)) mustEqual 0
}
"serialize to json" >> {
val stat = newStat()
stat.toJson must not(beEmpty)
}
"serialize empty to json" >> {
val stat = newStat(observe = false)
stat.toJson must not(beEmpty)
}
"serialize and deserialize" >> {
val stat = newStat()
val packed = StatSerializer(sft).serialize(stat)
val unpacked = StatSerializer(sft).deserialize(packed)
unpacked.toJson mustEqual stat.toJson
}
"serialize and deserialize empty SeqStat" >> {
val stat = newStat(observe = false)
val packed = StatSerializer(sft).serialize(stat)
val unpacked = StatSerializer(sft).deserialize(packed)
unpacked.toJson mustEqual stat.toJson
}
"deserialize as immutable value" >> {
val stat = newStat()
val packed = StatSerializer(sft).serialize(stat)
val unpacked = StatSerializer(sft).deserialize(packed, immutable = true)
unpacked.toJson mustEqual stat.toJson
unpacked.clear must throwAn[Exception]
unpacked.+=(stat) must throwAn[Exception]
unpacked.observe(features.head) must throwAn[Exception]
unpacked.unobserve(features.head) must throwAn[Exception]
}
"combine two SeqStats" >> {
val stat = newStat()
val stat2 = newStat(observe = false)
val mm = stat.stats(0).asInstanceOf[MinMax[java.lang.Integer]]
val ic = stat.stats(1).asInstanceOf[IteratorStackCount]
val eh = stat.stats(2).asInstanceOf[EnumerationStat[java.lang.Long]]
val rh = stat.stats(3).asInstanceOf[Histogram[java.lang.Double]]
val mm2 = stat2.stats(0).asInstanceOf[MinMax[java.lang.Integer]]
val ic2 = stat2.stats(1).asInstanceOf[IteratorStackCount]
val eh2 = stat2.stats(2).asInstanceOf[EnumerationStat[java.lang.Long]]
val rh2 = stat2.stats(3).asInstanceOf[Histogram[java.lang.Double]]
ic2.counter mustEqual 1
mm2.isEmpty must beTrue
eh2.enumeration must beEmpty
rh2.length mustEqual 20
forall(0 until 20)(rh2.count(_) mustEqual 0)
features2.foreach { stat2.observe }
stat += stat2
mm.bounds mustEqual (0, 199)
ic.counter mustEqual 2
eh.enumeration.size mustEqual 200
eh.enumeration(0L) mustEqual 1
eh.enumeration(100L) mustEqual 1
rh.length mustEqual 20
rh.count(rh.indexOf(0.0)) mustEqual 10
rh.count(rh.indexOf(50.0)) mustEqual 10
rh.count(rh.indexOf(100.0)) mustEqual 10
mm2.bounds mustEqual (100, 199)
ic2.counter mustEqual 1
eh2.enumeration.size mustEqual 100
eh2.enumeration(0L) mustEqual 0
eh2.enumeration(100L) mustEqual 1
rh2.length mustEqual 20
rh2.count(rh2.indexOf(0.0)) mustEqual 0
rh2.count(rh2.indexOf(50.0)) mustEqual 0
rh2.count(rh2.indexOf(100.0)) mustEqual 10
}
"clear" >> {
val stat = newStat()
stat.isEmpty must beFalse
stat.clear()
val mm = stat.stats(0).asInstanceOf[MinMax[java.lang.Integer]]
val ic = stat.stats(1).asInstanceOf[IteratorStackCount]
val eh = stat.stats(2).asInstanceOf[EnumerationStat[java.lang.Long]]
val rh = stat.stats(3).asInstanceOf[Histogram[java.lang.Double]]
mm.property mustEqual "intAttr"
mm.isEmpty must beTrue
ic.counter mustEqual 1
eh.property mustEqual "longAttr"
eh.enumeration must beEmpty
rh.property mustEqual "doubleAttr"
forall(0 until rh.length)(rh.count(_) mustEqual 0)
}
}
}
|
ddseapy/geomesa
|
geomesa-utils/src/test/scala/org/locationtech/geomesa/utils/stats/SeqStatTest.scala
|
Scala
|
apache-2.0
| 5,965
|
package scorex.api.http
import play.api.libs.json.{JsPath, Reads}
import play.api.libs.functional.syntax._
case class SignedMessage(message: String, signature: String, publickey: String)
object SignedMessage {
implicit val messageReads: Reads[SignedMessage] = (
(JsPath \ "message").read[String] and
(JsPath \ "signature").read[String] and
(JsPath \ "publickey").read[String]
) (SignedMessage.apply _)
}
|
B83YPoj/Waves
|
src/main/scala/scorex/api/http/SignedMessage.scala
|
Scala
|
apache-2.0
| 432
|
package me.shengmin
import java.io._
object Io {
def useReaderWriter[R](arguments: Array[String])(block: (BufferedReader, PrintWriter) => R) = {
use(
new BufferedReader(
if (arguments.length == 0) new InputStreamReader(System.in)
else new FileReader(arguments(0))),
new PrintWriter(new BufferedWriter(new OutputStreamWriter(System.out))))(block)
}
def use[A <: Closeable, B <: Closeable, R](resourceA: A, resourceB: B)(block: (A, B) => R): R = {
try {
block(resourceA, resourceB)
} finally {
if (resourceA != null) resourceA.close()
if (resourceB != null) resourceB.close()
}
}
def use[T <: Closeable, R](resource: T)(block: T => R): R = {
try {
block(resource)
} finally {
if (resource != null) resource.close()
}
}
}
|
shengmin/coding-problem
|
common/src/main/scala/me/shengmin/core/Io.scala
|
Scala
|
mit
| 822
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming
import java.util.UUID
import scala.collection.mutable
import org.apache.spark.scheduler.{LiveListenerBus, SparkListener, SparkListenerEvent}
import org.apache.spark.sql.streaming.StreamingQueryListener
import org.apache.spark.util.ListenerBus
/**
* A bus to forward events to [[StreamingQueryListener]]s. This one will send received
* [[StreamingQueryListener.Event]]s to the Spark listener bus. It also registers itself with
* Spark listener bus, so that it can receive [[StreamingQueryListener.Event]]s and dispatch them
* to StreamingQueryListeners.
*
* Note that each bus and its registered listeners are associated with a single SparkSession
* and StreamingQueryManager. So this bus will dispatch events to registered listeners for only
* those queries that were started in the associated SparkSession.
*/
class StreamingQueryListenerBus(sparkListenerBus: LiveListenerBus)
extends SparkListener with ListenerBus[StreamingQueryListener, StreamingQueryListener.Event] {
import StreamingQueryListener._
sparkListenerBus.addListener(this)
/**
* RunIds of active queries whose events are supposed to be forwarded by this ListenerBus
* to registered `StreamingQueryListeners`.
*
* Note 1: We need to track runIds instead of ids because the runId is unique for every started
* query, even it its a restart. So even if a query is restarted, this bus will identify them
* separately and correctly account for the restart.
*
* Note 2: This list needs to be maintained separately from the
* `StreamingQueryManager.activeQueries` because a terminated query is cleared from
* `StreamingQueryManager.activeQueries` as soon as it is stopped, but the this ListenerBus
* must clear a query only after the termination event of that query has been posted.
*/
private val activeQueryRunIds = new mutable.HashSet[UUID]
/**
* Post a StreamingQueryListener event to the added StreamingQueryListeners.
* Note that only the QueryStarted event is posted to the listener synchronously. Other events
* are dispatched to Spark listener bus. This method is guaranteed to be called by queries in
* the same SparkSession as this listener.
*/
def post(event: StreamingQueryListener.Event) {
event match {
case s: QueryStartedEvent =>
activeQueryRunIds.synchronized { activeQueryRunIds += s.runId }
sparkListenerBus.post(s)
// post to local listeners to trigger callbacks
postToAll(s)
case _ =>
sparkListenerBus.post(event)
}
}
/**
* Override the parent `postToAll` to remove the query id from `activeQueryRunIds` after all
* the listeners process `QueryTerminatedEvent`. (SPARK-19594)
*/
override def postToAll(event: Event): Unit = {
super.postToAll(event)
event match {
case t: QueryTerminatedEvent =>
activeQueryRunIds.synchronized { activeQueryRunIds -= t.runId }
case _ =>
}
}
override def onOtherEvent(event: SparkListenerEvent): Unit = {
event match {
case e: StreamingQueryListener.Event =>
// SPARK-18144: we broadcast QueryStartedEvent to all listeners attached to this bus
// synchronously and the ones attached to LiveListenerBus asynchronously. Therefore,
// we need to ignore QueryStartedEvent if this method is called within SparkListenerBus
// thread
if (!LiveListenerBus.withinListenerThread.value || !e.isInstanceOf[QueryStartedEvent]) {
postToAll(e)
}
case _ =>
}
}
/**
* Dispatch events to registered StreamingQueryListeners. Only the events associated queries
* started in the same SparkSession as this ListenerBus will be dispatched to the listeners.
*/
override protected def doPostEvent(
listener: StreamingQueryListener,
event: StreamingQueryListener.Event): Unit = {
def shouldReport(runId: UUID): Boolean = {
activeQueryRunIds.synchronized { activeQueryRunIds.contains(runId) }
}
event match {
case queryStarted: QueryStartedEvent =>
if (shouldReport(queryStarted.runId)) {
listener.onQueryStarted(queryStarted)
}
case queryProgress: QueryProgressEvent =>
if (shouldReport(queryProgress.progress.runId)) {
listener.onQueryProgress(queryProgress)
}
case queryTerminated: QueryTerminatedEvent =>
if (shouldReport(queryTerminated.runId)) {
listener.onQueryTerminated(queryTerminated)
}
case _ =>
}
}
}
|
wangyixiaohuihui/spark2-annotation
|
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/StreamingQueryListenerBus.scala
|
Scala
|
apache-2.0
| 5,509
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.consumer
import org.junit.Assert._
import org.scalatest.junit.JUnitSuite
import org.junit.Test
import kafka.common.Topic
class TopicFilterTest extends JUnitSuite {
@Test
def testWhitelists() {
val topicFilter1 = new Whitelist("white1,white2")
assertTrue(topicFilter1.isTopicAllowed("white2", excludeInternalTopics = true))
assertTrue(topicFilter1.isTopicAllowed("white2", excludeInternalTopics = false))
assertFalse(topicFilter1.isTopicAllowed("black1", excludeInternalTopics = true))
assertFalse(topicFilter1.isTopicAllowed("black1", excludeInternalTopics = false))
val topicFilter2 = new Whitelist(".+")
assertTrue(topicFilter2.isTopicAllowed("alltopics", excludeInternalTopics = true))
assertFalse(topicFilter2.isTopicAllowed(Topic.GroupMetadataTopicName, excludeInternalTopics = true))
assertTrue(topicFilter2.isTopicAllowed(Topic.GroupMetadataTopicName, excludeInternalTopics = false))
val topicFilter3 = new Whitelist("white_listed-topic.+")
assertTrue(topicFilter3.isTopicAllowed("white_listed-topic1", excludeInternalTopics = true))
assertFalse(topicFilter3.isTopicAllowed("black1", excludeInternalTopics = true))
val topicFilter4 = new Whitelist("test-(?!bad\\b)[\\w]+")
assertTrue(topicFilter4.isTopicAllowed("test-good", excludeInternalTopics = true))
assertFalse(topicFilter4.isTopicAllowed("test-bad", excludeInternalTopics = true))
}
@Test
def testBlacklists() {
val topicFilter1 = new Blacklist("black1")
assertTrue(topicFilter1.isTopicAllowed("white2", excludeInternalTopics = true))
assertTrue(topicFilter1.isTopicAllowed("white2", excludeInternalTopics = false))
assertFalse(topicFilter1.isTopicAllowed("black1", excludeInternalTopics = true))
assertFalse(topicFilter1.isTopicAllowed("black1", excludeInternalTopics = false))
assertFalse(topicFilter1.isTopicAllowed(Topic.GroupMetadataTopicName, excludeInternalTopics = true))
assertTrue(topicFilter1.isTopicAllowed(Topic.GroupMetadataTopicName, excludeInternalTopics = false))
}
@Test
def testWildcardTopicCountGetTopicCountMapEscapeJson() {
def getTopicCountMapKey(regex: String): String = {
val topicCount = new WildcardTopicCount(null, "consumerId", new Whitelist(regex), 1, true)
topicCount.getTopicCountMap.head._1
}
//lets make sure that the JSON strings are escaping as we expect
//if they are not then when they get saved to zookeeper and read back out they will be broken on parse
assertEquals("-\\\"-", getTopicCountMapKey("-\"-"))
assertEquals("-\\\\-", getTopicCountMapKey("-\\-"))
assertEquals("-\\/-", getTopicCountMapKey("-/-"))
assertEquals("-\\\\b-", getTopicCountMapKey("-\\b-"))
assertEquals("-\\\\f-", getTopicCountMapKey("-\\f-"))
assertEquals("-\\\\n-", getTopicCountMapKey("-\\n-"))
assertEquals("-\\\\r-", getTopicCountMapKey("-\\r-"))
assertEquals("-\\\\t-", getTopicCountMapKey("-\\t-"))
assertEquals("-\\\\u0000-", getTopicCountMapKey("-\\u0000-"))
assertEquals("-\\\\u001f-", getTopicCountMapKey("-\\u001f-"))
assertEquals("-\\\\u007f-", getTopicCountMapKey("-\\u007f-"))
assertEquals("-\\\\u009f-", getTopicCountMapKey("-\\u009f-"))
}
}
|
eribeiro/kafka
|
core/src/test/scala/unit/kafka/consumer/TopicFilterTest.scala
|
Scala
|
apache-2.0
| 4,053
|
/*
* AudioFileOut.scala
* (FScape)
*
* Copyright (c) 2001-2022 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Affero General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss.fscape
package graph
import java.net.URI
import de.sciss.audiofile.AudioFileSpec
import de.sciss.fscape.Graph.{ProductReader, RefMapIn}
import de.sciss.fscape.UGen.Adjunct
import de.sciss.fscape.UGenSource.unwrap
import de.sciss.fscape.stream.{StreamIn, StreamOut}
import scala.collection.immutable.{IndexedSeq => Vec}
object AudioFileOut extends ProductReader[AudioFileOut] {
override def read(in: RefMapIn, key: String, arity: Int): AudioFileOut = {
require (arity == 2)
val _in = in.readGE()
val _file = in.readURI()
val _spec = in.readProductT[AudioFileSpec]()
new AudioFileOut(_in, _file, _spec)
}
}
/** A UGen that reads in an audio file. The output signal
* is the monotonously increasing number of frames written,
* which can be used to monitor progress or determine the
* end of the writing process. The UGen keeps running until
* the `in` signal ends.
*
* @param in the signal to write.
* @param file the file to write to
* @param spec the spec for the audio file, including numbers of channels and sample-rate
*/
final case class AudioFileOut(in: GE, file: URI, spec: AudioFileSpec) extends UGenSource.SingleOut {
protected def makeUGens(implicit b: UGenGraph.Builder): UGenInLike = unwrap(this, in.expand.outputs)
protected def makeUGen(args: Vec[UGenIn])(implicit b: UGenGraph.Builder): UGenInLike =
UGen.SingleOut(this, inputs = args,
adjuncts = Adjunct.FileOut(file) :: Adjunct.AudioFileSpec(spec) :: Nil, isIndividual = true, hasSideEffect = true)
private[fscape] def makeStream(args: Vec[StreamIn])(implicit b: stream.Builder): StreamOut = {
stream.AudioFileOut(uri = file, spec = spec, in = args.map(_.toDouble))
}
}
|
Sciss/FScape-next
|
core/jvm/src/main/scala/de/sciss/fscape/graph/AudioFileOut.scala
|
Scala
|
agpl-3.0
| 2,019
|
package com.mentatlabs.nsa
package scalac
package options
/* -Ywarn-nullary-override
* =======================
* 2.9.1 - 2.11.1: Warn when non-nullary overrides nullary, e.g. `def foo()` over `def foo`.
* 2.11.2 - 2.12.0: Warn when non-nullary `def f()' overrides nullary `def f'.
*/
case object ScalacYWarnNullaryOverride
extends ScalacOptionBoolean("-Ywarn-nullary-override", ScalacVersions.`2.9.1`)
|
melezov/sbt-nsa
|
nsa-core/src/main/scala/com/mentatlabs/nsa/scalac/options/private/ScalacYWarnNullaryOverride.scala
|
Scala
|
bsd-3-clause
| 416
|
def hello:String = {
println("in hello")
"hello"
}
|
grzegorzbalcerek/scala-book-examples
|
examples/ByNameParameters2.scala
|
Scala
|
mit
| 55
|
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testsuite.javalib.util.function
import org.junit.Assert._
import org.junit.Test
import java.util.function._
class ToLongBiFunctionTest {
@Test def applyAsLong(): Unit = {
val op = new ToLongBiFunction[String, String] {
override def applyAsLong(t: String, u: String): Long = t.toLong * u.toLong
}
assertEquals(op.applyAsLong("11111111", "2222222"), 24691355308642L)
}
}
|
scala-js/scala-js
|
test-suite/shared/src/test/scala/org/scalajs/testsuite/javalib/util/function/ToLongBiFunctionTest.scala
|
Scala
|
apache-2.0
| 688
|
// Starter Code for Exercise 2
// From "Overloading" atom
import com.atomicscala.AtomicTest._
f() is 0
f(1) is 1
f(1, 2) is 3
f(1, 2, 3) is 6
f(1, 2, 3, 4) is 10
|
P7h/ScalaPlayground
|
Atomic Scala/atomic-scala-solutions/24_Overloading/Starter-2.scala
|
Scala
|
apache-2.0
| 163
|
/*
* Copyright 2013 websudos ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.websudos.phantom.dsl.specialized
import org.scalatest.concurrent.PatienceConfiguration
import org.scalatest.time.SpanSugar._
import com.websudos.phantom.Implicits._
import com.websudos.phantom.tables._
import com.websudos.phantom.testing.PhantomCassandraTestSuite
import com.websudos.util.testing._
class JodaDateTimeColumn extends PhantomCassandraTestSuite {
implicit val s: PatienceConfiguration.Timeout = timeout(10 seconds)
override def beforeAll(): Unit = {
super.beforeAll()
PrimitivesJoda.insertSchema()
}
it should "correctly insert and extract a JodaTime date" in {
val row = gen[JodaRow]
val w = PrimitivesJoda.insert
.value(_.pkey, row.pkey)
.value(_.intColumn, row.int)
.value(_.timestamp, row.bi)
.future() flatMap {
_ => PrimitivesJoda.select.where(_.pkey eqs row.pkey).one()
}
w successful {
res => res.get shouldEqual row
}
}
it should "correctly insert and extract a JodaTime date with Twitter Futures" in {
val row = gen[JodaRow]
val w = PrimitivesJoda.insert
.value(_.pkey, row.pkey)
.value(_.intColumn, row.int)
.value(_.timestamp, row.bi)
.execute() flatMap {
_ => PrimitivesJoda.select.where(_.pkey eqs row.pkey).get()
}
w successful {
res => res.get shouldEqual row
}
}
}
|
nosheenzaza/phantom-data-centric
|
phantom-dsl/src/test/scala/com/websudos/phantom/dsl/specialized/JodaDateTimeColumn.scala
|
Scala
|
gpl-2.0
| 1,959
|
package com.getjenny.starchat.services
/**
* Created by Angelo Leto <angelo@getjenny.com> on 23/08/17.
*/
import akka.actor.{Actor, Props}
import com.getjenny.starchat.SCActorSystem
import scala.collection.parallel.ParSeq
import scala.concurrent.duration._
import scala.language.postfixOps
import scala.util.{Failure, Success, Try}
/** Download and update the decision tables from elasticsearch
*/
object CronReloadDTService extends CronService {
protected[this] val indexManagementService: LanguageIndexManagementService.type = LanguageIndexManagementService
class ReloadAnalyzersTickActor extends Actor {
protected[this] var updateTimestamp: Long = 0
def receive: PartialFunction[Any, Unit] = {
case `tickMessage` =>
val startUpdateTimestamp: Long = System.currentTimeMillis
val maxItemsIndexesToUpdate: Long = math.max(analyzerService.dtMaxTables, analyzerService.analyzersMap.size)
log.debug("Start DT reloading session: {} items({})", startUpdateTimestamp, maxItemsIndexesToUpdate)
val indexCheck: ParSeq[(String, Boolean)] =
instanceRegistryService.allEnabledInstanceTimestamp(Some(updateTimestamp), Some(maxItemsIndexesToUpdate))
.par.map { dtReloadEntry =>
val indexAnalyzers: Option[ActiveAnalyzers] =
analyzerService.analyzersMap.get(dtReloadEntry.indexName)
val localReloadIndexTimestamp = indexAnalyzers match {
case Some(ts) => ts.lastReloadingTimestamp
case _ => InstanceRegistryDocument.InstanceRegistryTimestampDefault
}
if (dtReloadEntry.timestamp > 0 && localReloadIndexTimestamp < dtReloadEntry.timestamp) {
log.info("dt reloading for index(" + dtReloadEntry.indexName +
") timestamp (" + startUpdateTimestamp + ") : " + dtReloadEntry.timestamp)
Try(analyzerService.loadAnalyzers(indexName = dtReloadEntry.indexName,
incremental = dtReloadEntry.incremental)) match {
case Success(relRes) =>
updateTimestamp = math.max(updateTimestamp, localReloadIndexTimestamp)
log.info("Analyzer loaded for index(" + dtReloadEntry + "), timestamp (" +
startUpdateTimestamp + ") res(" + relRes + ") remote ts: " + dtReloadEntry)
analyzerService.analyzersMap(dtReloadEntry.indexName)
.lastReloadingTimestamp = dtReloadEntry.timestamp
(dtReloadEntry.indexName, true)
case Failure(e) =>
log.error("unable to load analyzers for index({}), timestamp({}), cron job: ",
dtReloadEntry, startUpdateTimestamp, e)
(dtReloadEntry.indexName, false)
}
} else {
(dtReloadEntry.indexName, true)
}
}
indexCheck.par.filter { case (_, check) => !check }.foreach { case (index, _) =>
val indexMgmRes = indexManagementService.check(index)
if (indexMgmRes.check) {
log.error("Index exists but loading results in an error: " + indexMgmRes.message)
} else {
instanceRegistryService.markAsDeleted(ids = List(index))
log.debug("Deleted update record for the index: " + index)
}
}
}
}
def scheduleAction(): Unit = {
if (nodeDtLoadingStatusService.elasticClient.dtReloadCheckFrequency > 0) {
val reloadDecisionTableActorRef =
SCActorSystem.system.actorOf(Props(new ReloadAnalyzersTickActor))
SCActorSystem.system.scheduler.scheduleWithFixedDelay(
0 seconds,
nodeDtLoadingStatusService.elasticClient.dtReloadCheckFrequency seconds,
reloadDecisionTableActorRef,
tickMessage)
}
}
}
|
GetJenny/starchat
|
src/main/scala/com/getjenny/starchat/services/CronReloadDTService.scala
|
Scala
|
gpl-2.0
| 3,813
|
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// Licence: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.core.javac
import java.nio.charset.Charset
import scala.collection.JavaConversions._
import scala.collection.breakOut
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.Await
import scala.concurrent.duration.Duration
import akka.actor.ActorRef
import com.sun.source.tree.{ Scope, IdentifierTree, MemberSelectTree, Tree }
import com.sun.source.util.TreePath
import javax.lang.model.`type`.TypeMirror
import javax.lang.model.element.{ Element, ExecutableElement, PackageElement, TypeElement, VariableElement }
import javax.lang.model.util.ElementFilter
import org.ensime.api._
import org.ensime.core.CompletionUtil
import org.ensime.util.file._
trait JavaCompletion { this: JavaCompiler =>
import CompletionUtil._
protected def scopeForPoint(file: SourceFileInfo, offset: Int): Option[(Compilation, Scope)]
protected def pathToPoint(file: SourceFileInfo, offset: Int): Option[(Compilation, TreePath)]
protected def indexer: ActorRef
def completionsAt(info: SourceFileInfo, offset: Int, maxResultsArg: Int, caseSens: Boolean): CompletionInfoList = {
val maxResults = if (maxResultsArg == 0) Int.MaxValue else maxResultsArg
val s = contentsAsString(info, DefaultCharset)
val preceding = s.slice(Math.max(0, offset - 100), offset)
val defaultPrefix = JavaIdentRegexp.findFirstMatchIn(preceding) match {
case Some(m) => m.group(1)
case _ => ""
}
val constructing = ConstructingRegexp.findFirstMatchIn(preceding).isDefined
val indexAfterTarget = Math.max(0, offset - defaultPrefix.length - 1)
val precedingChar = s(indexAfterTarget)
val isMemberAccess = precedingChar == '.'
val candidates: List[CompletionInfo] = (if (ImportSubtypeRegexp.findFirstMatchIn(preceding).isDefined) {
// Erase the trailing partial subtype (it breaks type resolution).
val patched = s.substring(0, indexAfterTarget) + " " + s.substring(indexAfterTarget + defaultPrefix.length + 1);
(pathToPoint(SourceFileInfo(info.file, Some(patched), None), indexAfterTarget - 1) map {
case (c: Compilation, path: TreePath) => {
memberCandidates(c, path.getLeaf, defaultPrefix, true, caseSens)
}
})
} else if (ImportRegexp.findFirstMatchIn(preceding).isDefined) {
(pathToPoint(info, indexAfterTarget) flatMap {
case (c: Compilation, path: TreePath) => {
getEnclosingMemberSelectTree(path).map { m =>
packageMemberCandidates(c, m, defaultPrefix, caseSens)
}
}
})
} else if (isMemberAccess) {
// Erase the trailing partial member (it breaks type resolution).
val patched = s.substring(0, indexAfterTarget) + ".wait()" + s.substring(indexAfterTarget + defaultPrefix.length + 1);
(pathToPoint(SourceFileInfo(info.file, Some(patched), None), indexAfterTarget + 1) flatMap {
case (c: Compilation, path: TreePath) => {
getEnclosingMemberSelectTree(path).map { m =>
memberCandidates(c, m.getExpression(), defaultPrefix, false, caseSens)
}
}
})
} else {
// Kick off an index search if the name looks like a type.
val typeSearch = if (TypeNameRegex.findFirstMatchIn(defaultPrefix).isDefined) {
Some(fetchTypeSearchCompletions(defaultPrefix, maxResults, indexer))
} else None
(scopeForPoint(info, indexAfterTarget) map {
case (c: Compilation, s: Scope) => {
scopeMemberCandidates(c, s, defaultPrefix, caseSens, constructing)
}
}) map { scopeCandidates =>
val typeSearchResult = typeSearch.flatMap(Await.result(_, Duration.Inf)).getOrElse(List())
scopeCandidates ++ typeSearchResult
}
}).getOrElse(List())
CompletionInfoList(defaultPrefix, candidates.sortWith({ (c1, c2) =>
c1.relevance > c2.relevance ||
(c1.relevance == c2.relevance &&
c1.name.length < c2.name.length)
}).take(maxResults))
}
private def getEnclosingMemberSelectTree(path: TreePath): Option[MemberSelectTree] = {
var p = path
while (p != null) {
p.getLeaf match {
case m: MemberSelectTree => return Some(m)
case _ => {}
}
p = p.getParentPath
}
None
}
private def selectedPackageName(m: MemberSelectTree): String = {
val name = m.getIdentifier.toString
m.getExpression match {
case m: MemberSelectTree => selectedPackageName(m) + "." + name
case i: IdentifierTree => i.getName.toString() + "." + name
case _ => name
}
}
private def packageMemberCandidates(
compilation: Compilation,
select: MemberSelectTree,
prefix: String,
caseSense: Boolean
): List[CompletionInfo] = {
val pkg = selectedPackageName(select)
val candidates = (Option(compilation.elements.getPackageElement(pkg)) map { p: PackageElement =>
p.getEnclosedElements().flatMap { e => filterElement(compilation, e, prefix, caseSense, true, false) }
}).getOrElse(List())
candidates.toList
}
private def filterElement(c: Compilation, e: Element, prefix: String, caseSense: Boolean,
typesOnly: Boolean, constructors: Boolean, baseRelevance: Int = 0): List[CompletionInfo] = {
val s = e.getSimpleName.toString
// reward case case-sensitive matches
val relevance = if (s.startsWith(prefix)) baseRelevance + 50 else baseRelevance
if (matchesPrefix(s, prefix, matchEntire = false, caseSens = caseSense) && !s.contains("$") && !s.contains("<init>")) {
e match {
case e: ExecutableElement if !typesOnly => List(methodInfo(e, relevance + 5))
case e: VariableElement if !typesOnly => List(fieldInfo(e, relevance + 10))
case e: TypeElement => if (constructors) constructorInfos(c, e, relevance + 5) else List(typeInfo(e, relevance))
case _ => List()
}
} else List()
}
private def scopeMemberCandidates(
compilation: Compilation,
scope: Scope,
prefix: String,
caseSense: Boolean,
constructing: Boolean
): List[CompletionInfo] = {
var candidates = ArrayBuffer[CompletionInfo]()
// Note Scope#getLocalElements does not include fields / members of
// enclosing classes. Need to add those manually.
//
def addTypeMembers(tel: TypeElement, relevance: Int): Unit = {
for (el <- compilation.elements.getAllMembers(tel)) {
for (info <- filterElement(compilation, el, prefix, caseSense, false, constructing, relevance)) {
candidates += info
}
}
}
var relevance = 0
for (tel <- Option(scope.getEnclosingClass())) {
addTypeMembers(tel, relevance)
var t = tel.getEnclosingElement()
while (t != null) {
t match {
case tel: TypeElement => addTypeMembers(tel, relevance)
case _ =>
}
t = t.getEnclosingElement()
relevance -= 10
}
}
relevance = 0
var s = scope
while (s != null) {
for (el <- s.getLocalElements()) {
for (info <- filterElement(compilation, el, prefix, caseSense, false, constructing, relevance)) {
candidates += info
}
}
s = s.getEnclosingScope()
relevance -= 10
}
candidates.toList
}
private def memberCandidates(
c: Compilation,
target: Tree,
prefix: String,
importing: Boolean,
caseSense: Boolean
): List[CompletionInfo] = {
typeElement(c, target).toList.flatMap {
case tel: TypeElement =>
val path = c.trees.getPath(c.compilationUnit, target)
val scope = c.trees.getScope(path)
val isAccessible: Element => Boolean = c.trees
.isAccessible(scope, _, c.types.getDeclaredType(tel))
c.elements.getAllMembers(tel).filter(isAccessible).flatMap { el =>
filterElement(c, el, prefix, caseSense, importing, false)
}(breakOut)
case e =>
log.warn("Unrecognized type element " + e)
List.empty
}
}
private def methodInfo(e: ExecutableElement, relevance: Int): CompletionInfo =
CompletionInfo(
Some(methodToTypeInfo(e)),
e.getSimpleName.toString,
CompletionSignature(
List(e.getParameters().map { p => (p.getSimpleName.toString, p.asType.toString) }.toList),
e.getReturnType.toString,
false
),
true, relevance, None
)
private def fieldInfo(e: VariableElement, relevance: Int): CompletionInfo = {
val s = e.getSimpleName.toString
CompletionInfo(
None,
s, CompletionSignature(List(), e.asType.toString, false), false, relevance, None
)
}
private def typeInfo(e: TypeElement, relevance: Int): CompletionInfo = {
val s = e.getSimpleName.toString
CompletionInfo(
None,
s, CompletionSignature(List(), e.asType.toString, false), false, relevance, None
)
}
private def constructorInfos(compilation: Compilation, e: TypeElement, relevance: Int): List[CompletionInfo] = {
val s = e.getSimpleName.toString
ElementFilter.constructorsIn(compilation.elements.getAllMembers(e)).map(methodInfo(_, relevance)).map { m =>
m.copy(name = s)
}.toList
}
private def localTypeName(tm: TypeMirror) = {
val s = tm.toString
val (front, back) = s.split("\\\\.").partition { s => s.forall(Character.isLowerCase) }
if (back.isEmpty) s else back.mkString(".")
}
private def contentsAsString(sf: SourceFileInfo, charset: Charset) = sf match {
case SourceFileInfo(f, None, None) => f.readString()
case SourceFileInfo(f, Some(contents), None) => contents
case SourceFileInfo(f, None, Some(contentsIn)) => contentsIn.readString()
}
}
|
d1egoaz/ensime-sbt
|
src/sbt-test/sbt-ensime/ensime-server/core/src/main/scala/org/ensime/core/javac/JavaCompletion.scala
|
Scala
|
apache-2.0
| 9,777
|
// Copyright 2011 Foursquare Labs Inc. All Rights Reserved.
package io.fsq.rogue
import com.mongodb.BasicDBObjectBuilder
import java.util.regex.Pattern
abstract class QueryClause[V](
val fieldName: String,
val actualIndexBehavior: MaybeIndexed,
val conditions: (CondOps.Value, V)*
) {
def extend(q: BasicDBObjectBuilder, signature: Boolean) {
conditions foreach { case (op, v) => q.add(op.toString, if (signature) 0 else v) }
}
var negated: Boolean = false
var expectedIndexBehavior: MaybeIndexed = Index
}
abstract class IndexableQueryClause[V, Ind <: MaybeIndexed](fname: String, actualIB: Ind, conds: (CondOps.Value, V)*)
extends QueryClause[V](fname, actualIB, conds: _*)
trait ShardKeyClause
case class AllQueryClause[V](override val fieldName: String, vs: java.util.List[V])
extends IndexableQueryClause[java.util.List[V], Index](fieldName, Index, CondOps.All -> vs) {}
case class InQueryClause[V](override val fieldName: String, vs: java.util.List[V])
extends IndexableQueryClause[java.util.List[V], Index](fieldName, Index, CondOps.In -> vs) {}
case class GtQueryClause[V](override val fieldName: String, v: V)
extends IndexableQueryClause[V, PartialIndexScan](fieldName, PartialIndexScan, CondOps.Gt -> v) {}
case class GtEqQueryClause[V](override val fieldName: String, v: V)
extends IndexableQueryClause[V, PartialIndexScan](fieldName, PartialIndexScan, CondOps.GtEq -> v) {}
case class LtQueryClause[V](override val fieldName: String, v: V)
extends IndexableQueryClause[V, PartialIndexScan](fieldName, PartialIndexScan, CondOps.Lt -> v) {}
case class LtEqQueryClause[V](override val fieldName: String, v: V)
extends IndexableQueryClause[V, PartialIndexScan](fieldName, PartialIndexScan, CondOps.LtEq -> v) {}
case class BetweenQueryClause[V](override val fieldName: String, lower: V, upper: V)
extends IndexableQueryClause[V, PartialIndexScan](
fieldName,
PartialIndexScan,
CondOps.GtEq -> lower,
CondOps.LtEq -> upper
) {}
case class StrictBetweenQueryClause[V](override val fieldName: String, lower: V, upper: V)
extends IndexableQueryClause[V, PartialIndexScan](
fieldName,
PartialIndexScan,
CondOps.Gt -> lower,
CondOps.Lt -> upper
) {}
case class NeQueryClause[V](override val fieldName: String, v: V)
extends IndexableQueryClause[V, PartialIndexScan](fieldName, PartialIndexScan, CondOps.Ne -> v) {}
case class NearQueryClause[V](override val fieldName: String, v: V)
extends IndexableQueryClause[V, PartialIndexScan](fieldName, PartialIndexScan, CondOps.Near -> v) {}
case class NearSphereQueryClause[V](override val fieldName: String, lat: Double, lng: Double, radians: Radians)
extends IndexableQueryClause[V, PartialIndexScan](fieldName, PartialIndexScan) {
override def extend(q: BasicDBObjectBuilder, signature: Boolean) {
q.add(CondOps.NearSphere.toString, if (signature) 0 else QueryHelpers.list(List(lat, lng)))
q.add(CondOps.MaxDistance.toString, if (signature) 0 else radians.value)
}
}
case class ModQueryClause[V](override val fieldName: String, v: java.util.List[V])
extends IndexableQueryClause[java.util.List[V], IndexScan](fieldName, IndexScan, CondOps.Mod -> v) {}
case class TypeQueryClause(override val fieldName: String, v: MongoType.Value)
extends IndexableQueryClause[Int, IndexScan](fieldName, IndexScan, CondOps.Type -> v.id) {}
case class ExistsQueryClause(override val fieldName: String, v: Boolean)
extends IndexableQueryClause[Boolean, IndexScan](fieldName, IndexScan, CondOps.Exists -> v) {}
case class NinQueryClause[V](override val fieldName: String, vs: java.util.List[V])
extends IndexableQueryClause[java.util.List[V], DocumentScan](fieldName, DocumentScan, CondOps.Nin -> vs) {}
case class SizeQueryClause(override val fieldName: String, v: Int)
extends IndexableQueryClause[Int, DocumentScan](fieldName, DocumentScan, CondOps.Size -> v) {}
case class RegexQueryClause[Ind <: MaybeIndexed](override val fieldName: String, actualIB: Ind, p: Pattern)
extends IndexableQueryClause[Pattern, Ind](fieldName, actualIB) {
val flagMap = Map(
Pattern.CANON_EQ -> "c",
Pattern.CASE_INSENSITIVE -> "i",
Pattern.COMMENTS -> "x",
Pattern.DOTALL -> "s",
Pattern.LITERAL -> "t",
Pattern.MULTILINE -> "m",
Pattern.UNICODE_CASE -> "u",
Pattern.UNIX_LINES -> "d"
)
def flagsToString(flags: Int) = {
(for {
(mask, char) <- flagMap
if (flags & mask) != 0
} yield char).mkString
}
override def extend(q: BasicDBObjectBuilder, signature: Boolean) {
q.add("$regex", if (signature) 0 else p.toString)
q.add("$options", if (signature) 0 else flagsToString(p.flags))
}
}
case class RawQueryClause(f: BasicDBObjectBuilder => Unit) extends IndexableQueryClause("raw", DocumentScan) {
override def extend(q: BasicDBObjectBuilder, signature: Boolean) {
f(q)
}
}
case class EmptyQueryClause[V](override val fieldName: String)
extends IndexableQueryClause[V, Index](fieldName, Index) {
override def extend(q: BasicDBObjectBuilder, signature: Boolean) {}
}
case class EqClause[V, Ind <: MaybeIndexed](override val fieldName: String, value: V)
extends IndexableQueryClause[V, Index](fieldName, Index) {
override def extend(q: BasicDBObjectBuilder, signature: Boolean): Unit = {
q.add(fieldName, if (signature) 0 else value)
}
}
case class WithinCircleClause[V](override val fieldName: String, lat: Double, lng: Double, radius: Double)
extends IndexableQueryClause[V, PartialIndexScan](fieldName, PartialIndexScan) {
override def extend(q: BasicDBObjectBuilder, signature: Boolean): Unit = {
val value = if (signature) 0 else QueryHelpers.list(List(QueryHelpers.list(List(lat, lng)), radius))
q.push("$within").add("$center", value).pop
}
}
case class WithinBoxClause[V](override val fieldName: String, lat1: Double, lng1: Double, lat2: Double, lng2: Double)
extends IndexableQueryClause[V, PartialIndexScan](fieldName, PartialIndexScan) {
override def extend(q: BasicDBObjectBuilder, signature: Boolean): Unit = {
val value =
if (signature) 0
else {
QueryHelpers.list(List(QueryHelpers.list(lat1, lng1), QueryHelpers.list(lat2, lng2)))
}
q.push("$within").add("$box", value).pop
}
}
case class ElemMatchWithPredicateClause[V](override val fieldName: String, clauses: Seq[QueryClause[_]])
extends IndexableQueryClause[V, DocumentScan](fieldName, DocumentScan) {
override def extend(q: BasicDBObjectBuilder, signature: Boolean): Unit = {
import io.fsq.rogue.MongoHelpers.AndCondition
val nested = q.push("$elemMatch")
MongoHelpers.MongoBuilder.buildCondition(AndCondition(clauses.toList, None), nested, signature)
nested.pop
}
}
class ModifyClause(val operator: ModOps.Value, fields: (String, _)*) {
def extend(q: BasicDBObjectBuilder): Unit = {
fields foreach { case (name, value) => q.add(name, value) }
}
}
class ModifyAddEachClause(fieldName: String, values: Traversable[_]) extends ModifyClause(ModOps.AddToSet) {
override def extend(q: BasicDBObjectBuilder): Unit = {
q.push(fieldName).add("$each", QueryHelpers.list(values)).pop
}
}
class ModifyPushEachClause(fieldName: String, values: Traversable[_]) extends ModifyClause(ModOps.Push) {
override def extend(q: BasicDBObjectBuilder): Unit = {
q.push(fieldName).add("$each", QueryHelpers.list(values)).pop
}
}
class ModifyPushEachSliceClause(fieldName: String, slice: Int, values: Traversable[_])
extends ModifyClause(ModOps.Push) {
override def extend(q: BasicDBObjectBuilder): Unit = {
q.push(fieldName).add("$each", QueryHelpers.list(values)).add("$slice", slice).pop
}
}
class ModifyBitClause(fieldName: String, value: Int, op: BitOps.Value) extends ModifyClause(ModOps.Bit) {
override def extend(q: BasicDBObjectBuilder): Unit = {
q.push(fieldName).add(op.toString, value).pop
}
}
class ModifyLongBitClause(fieldName: String, value: Long, op: BitOps.Value) extends ModifyClause(ModOps.Bit) {
override def extend(q: BasicDBObjectBuilder): Unit = {
q.push(fieldName).add(op.toString, value).pop
}
}
class ModifyPullWithPredicateClause[V](fieldName: String, clauses: Seq[QueryClause[_]])
extends ModifyClause(ModOps.Pull) {
override def extend(q: BasicDBObjectBuilder): Unit = {
import io.fsq.rogue.MongoHelpers.AndCondition
MongoHelpers.MongoBuilder.buildCondition(AndCondition(clauses.toList, None), q, false)
}
}
class ModifyPullObjWithPredicateClause[V](fieldName: String, clauses: Seq[QueryClause[_]])
extends ModifyClause(ModOps.Pull) {
override def extend(q: BasicDBObjectBuilder): Unit = {
import io.fsq.rogue.MongoHelpers.AndCondition
val nested = q.push(fieldName)
MongoHelpers.MongoBuilder.buildCondition(AndCondition(clauses.toList, None), nested, false)
nested.pop
}
}
|
foursquare/fsqio
|
src/jvm/io/fsq/rogue/QueryClause.scala
|
Scala
|
apache-2.0
| 8,856
|
package com.github.rosmith.nlp.query.filter
import com.github.rosmith.nlp.query.filter.BinaryOperator._
import com.github.rosmith.nlp.query.Variable
import com.github.rosmith.nlp.query.OperandValue
class BinaryQueryFilter(lv: OperandValue, op: BinaryOperator, rv: OperandValue) extends SingleQueryFilter {
private var _operator: BinaryOperator = op
def this(lv: OperandValue, stringOp: String, rv: OperandValue) {
this(lv, BinaryQueryFilter.toOperator(stringOp), rv)
}
def leftVariable = lv.value
def rightVariable = rv.value
def operator = _operator
def rightValueIsVariable = rv.isVar
}
object BinaryQueryFilter {
def toOperator(stringOp: String): BinaryOperator = {
if ("EQUALS".equals(stringOp)) {
EQUALS
} else if ("AND".equals(stringOp)) {
AND
} else if ("OR".equals(stringOp)) {
OR
} else if ("NOT_EQUALS".equals(stringOp)) {
NOT_EQUALS
} else if ("LOWER".equals(stringOp)) {
LOWER
} else if ("LOWER_OR_EQUAL".equals(stringOp)) {
LOWER_OR_EQUAL
} else if ("GREATER".equals(stringOp)) {
GREATER
} else if ("GREATER_OR_EQUAL".equals(stringOp)) {
GREATER_OR_EQUAL
} else {
null
}
}
}
|
rosmith/giet
|
src/main/scala/com/github/rosmith/nlp/query/filter/BinaryQueryFilter.scala
|
Scala
|
mit
| 1,215
|
package cz.vse.easyminer.util
import com.github.kxbmap.configs._
import com.typesafe.config.ConfigFactory
object Conf {
private val config = new Conf(new EnrichTypesafeConfig(ConfigFactory.load))
def apply() = config
}
class Conf(etc: EnrichTypesafeConfig) {
def get[T: AtPath](path: String): T = etc.get(path)
def opt[T: AtPath](path: String)(implicit cc: CatchCond = CatchCond.configException): Option[T] = etc.opt(path)
def getOrElse[T: AtPath](path: String, default: => T)(implicit cc: CatchCond = CatchCond.missing): T = etc.getOrElse(path, default)
}
|
KIZI/EasyMiner-Apriori-R
|
src/main/scala/cz/vse/easyminer/util/Conf.scala
|
Scala
|
bsd-3-clause
| 583
|
/*
* Copyright (c) 2012-2014 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics
package snowplow
package enrich
package common
package adapters
package registry
package snowplow
// Java
import java.util.Map.{Entry => JMapEntry}
// Jackson
import com.fasterxml.jackson.databind.JsonNode
// Scala
import scala.collection.JavaConversions._
// Iglu
import iglu.client.{Resolver, SchemaCriterion, SchemaKey}
import iglu.client.validation.ValidatableJsonMethods._
// Scalaz
import scalaz._
import Scalaz._
import Validation.FlatMap._
// json4s
import org.json4s._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
// This project
import loaders.CollectorPayload
import utils.{JsonUtils => JU}
import utils.{ConversionUtils => CU}
/**
* The Redirect Adapter is essentially a pre-processor for
* Snowplow Tracker Protocol v2 above (although it doesn't
* use the TP2 code above directly).
*
* The &u= parameter used for a redirect is converted into
* a URI Redirect entity and then either stored as an
* unstructured event, added to an existing contexts array
* or used to initialize a new contexts array.
*/
object RedirectAdapter extends Adapter {
// Tracker version for an Iglu-compatible webhook
private val TrackerVersion = "r-tp2"
// Our default tracker platform
private val TrackerPlatform = "web"
// Schema for a URI redirect. Could end up being an event or a context
// depending on what else is in the payload
private object SchemaUris {
val UriRedirect = SchemaKey("com.snowplowanalytics.snowplow",
"uri_redirect",
"jsonschema",
"1-0-0").toSchemaUri
}
/**
* Converts a CollectorPayload instance into raw events.
* Assumes we have a GET querystring with a u parameter
* for the URI redirect and other parameters per the
* Snowplow Tracker Protocol.
*
* @param payload The CollectorPaylod containing one or more
* raw events as collected by a Snowplow collector
* @param resolver (implicit) The Iglu resolver used for
* schema lookup and validation. Not used
* @return a Validation boxing either a NEL of RawEvents on
* Success, or a NEL of Failure Strings
*/
def toRawEvents(payload: CollectorPayload)(implicit resolver: Resolver): ValidatedRawEvents = {
val originalParams = toMap(payload.querystring)
if (originalParams.isEmpty) {
"Querystring is empty: cannot be a valid URI redirect".failureNel
} else {
originalParams.get("u") match {
case None =>
"Querystring does not contain u parameter: not a valid URI redirect".failureNel
case Some(u) => {
val json = buildUriRedirect(u)
val newParams: Validation[NonEmptyList[String],
scala.collection.immutable.Map[String, String]] =
if (originalParams.contains("e")) {
// Already have an event so add the URI redirect as a context (more fiddly)
def newCo =
Map("co" -> compact(toContexts(json))).success[NonEmptyList[String]]
(originalParams.get("cx"), originalParams.get("co")) match {
case (None, None) => newCo
case (None, Some(co)) if co == "" => newCo
case (None, Some(co)) =>
addToExistingCo(json, co).map(str => Map("co" -> str))
case (Some(cx), _) =>
addToExistingCx(json, cx).map(str => Map("cx" -> str))
}
} else {
// Add URI redirect as an unstructured event
Map("e" -> "ue", "ue_pr" -> compact(toUnstructEvent(json))).successNel
}
val fixedParams = Map(
"tv" -> TrackerVersion,
"p" -> originalParams.getOrElse("p", TrackerPlatform) // Required field
)
newParams.map { np =>
NonEmptyList(
RawEvent(
api = payload.api,
parameters = (originalParams - "u") ++ np ++ fixedParams,
contentType = payload.contentType,
source = payload.source,
context = payload.context
))
}
}
}
}
}
/**
* Builds a self-describing JSON representing a
* URI redirect entity.
*
* @param uri The URI we are redirecting to
* @return a URI redirect as a self-describing
* JValue
*/
private def buildUriRedirect(uri: String): JValue =
("schema" -> SchemaUris.UriRedirect) ~
("data" -> (
("uri" -> uri)
))
/**
* Adds a context to an existing non-Base64-encoded
* self-describing contexts stringified JSON.
*
* Does the minimal amount of validation required
* to ensure the context can be safely added, or
* returns a Failure.
*
* @param new The context to add to the
* existing list of contexts
* @param existing The existing contexts as a
* non-Base64-encoded stringified JSON
* @return an updated non-Base64-encoded self-
* describing contexts stringified JSON
*/
private def addToExistingCo(newContext: JValue, existing: String): Validated[String] =
for {
node <- JU.extractJson("co|cx", existing).toValidationNel: Validated[JsonNode]
jvalue = fromJsonNode(node)
merged = jvalue merge render("data" -> List(newContext))
} yield compact(merged)
/**
* Adds a context to an existing Base64-encoded
* self-describing contexts stringified JSON.
*
* Does the minimal amount of validation required
* to ensure the context can be safely added, or
* returns a Failure.
*
* @param new The context to add to the
* existing list of contexts
* @param existing The existing contexts as a
* non-Base64-encoded stringified JSON
* @return an updated non-Base64-encoded self-
* describing contexts stringified JSON
*/
private def addToExistingCx(newContext: JValue, existing: String): Validated[String] =
for {
decoded <- CU.decodeBase64Url("cx", existing).toValidationNel: Validated[String]
added <- addToExistingCo(newContext, decoded)
recoded = CU.encodeBase64Url(added)
} yield recoded
}
|
TimothyKlim/snowplow
|
3-enrich/scala-common-enrich/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/adapters/registry/snowplow/RedirectAdapter.scala
|
Scala
|
apache-2.0
| 7,049
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.metrics.reporter
import java.util.HashMap
import java.util.Map
import scala.beans.BeanProperty
object MetricsHeader {
def fromMap(map: Map[String, Object]): MetricsHeader = {
new MetricsHeader(
map.get("job-name").toString,
map.get("job-id").toString,
map.get("container-name").toString,
map.get("source").toString,
map.get("version").toString,
map.get("samza-version").toString,
map.get("host").toString,
map.get("time").asInstanceOf[Number].longValue,
map.get("reset-time").asInstanceOf[Number].longValue)
}
}
/**
* Immutable metric header snapshot.
*/
class MetricsHeader(
@BeanProperty val jobName: String,
@BeanProperty val jobId: String,
@BeanProperty val containerName: String,
@BeanProperty val source: String,
@BeanProperty val version: String,
@BeanProperty val samzaVersion: String,
@BeanProperty val host: String,
@BeanProperty val time: Long,
@BeanProperty val resetTime: Long) {
def getAsMap: Map[String, Object] = {
val map = new HashMap[String, Object]
map.put("job-name", jobName)
map.put("job-id", jobId)
map.put("container-name", containerName)
map.put("source", source)
map.put("version", version)
map.put("samza-version", samzaVersion)
map.put("host", host)
map.put("time", time: java.lang.Long)
map.put("reset-time", resetTime: java.lang.Long)
map
}
}
|
vjagadish/samza-clone
|
samza-core/src/main/scala/org/apache/samza/metrics/reporter/MetricsHeader.scala
|
Scala
|
apache-2.0
| 2,242
|
package maven2sbt.core
/**
* @author Kevin Lee
* @since 2019-04-21
*/
sealed trait Dependency
object Dependency extends DependencyPlus {
final case class Scala(
groupId: GroupId,
artifactId: ArtifactId,
version: Version,
scope: Scope,
exclusions: List[Exclusion]
) extends Dependency
final case class Java(
groupId: GroupId,
artifactId: ArtifactId,
version: Version,
scope: Scope,
exclusions: List[Exclusion]
) extends Dependency
def scala(
groupId: GroupId,
artifactId: ArtifactId,
version: Version,
scope: Scope,
exclusions: List[Exclusion]
): Dependency = Scala(groupId, artifactId, version, scope, exclusions)
def java(
groupId: GroupId,
artifactId: ArtifactId,
version: Version,
scope: Scope,
exclusions: List[Exclusion]
): Dependency = Java(groupId, artifactId, version, scope, exclusions)
implicit final class DependencyOps(val dependency: Dependency) extends AnyVal {
def artifactId: ArtifactId = Dependency.artifactId(dependency)
def scope: Scope = Dependency.scope(dependency)
def exclusions: List[Exclusion] = Dependency.exclusions(dependency)
def isScalaLib: Boolean = Dependency.isScalaLib(dependency)
def isJavaLib: Boolean = Dependency.isJavaLib(dependency)
def tupled: (GroupId, ArtifactId, Version, Scope, List[Exclusion]) =
Dependency.tupled(dependency)
}
}
|
Kevin-Lee/maven2sbt
|
core/src/main/scala/maven2sbt/core/Dependency.scala
|
Scala
|
mit
| 1,425
|
package org.npmaven
package rest
import java.io.OutputStream
import model._
import commonjs. {Registry => NpmRegistry}
import util.FutureConversions._
import artifacts.Artifactory._
import net.liftweb.common.Loggable
import net.liftweb.http._
import net.liftweb.http.rest.RestHelper
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
object NpmRest extends RestHelper with Loggable {
lazy val npm = new NpmRegistry("registry.npmjs.org/")
def toResponse(pkg:Package, art:Artifact):Future[LiftResponse] = art match {
case Pom => Future.successful(XmlResponse(pom(pkg)))
case Jar => jar(pkg).map(b => OutputStreamResponse((out: OutputStream) => out write b))
case Sha1(Pom) => {
val sum = sha1(XmlResponse(pom(pkg)).toResponse.data)
Future.successful(PlainTextResponse(sum, 200))
}
case Sha1(Jar) => jar(pkg).map(b => PlainTextResponse(sha1(b), 200))
case _ => Future.successful(NotFoundResponse())
}
serve {
case "repo" :: "npm" :: "org" :: "npmaven" :: name :: version :: artifact :: Nil RestReq _ => {
val pkg = Package(name, version)
val art = Artifact(artifact)
logger.info(S.request)
val f = npm.get(pkg)
.flatMap(p => art.map(a => toResponse(p, a)).openOr(Future.successful(NotFoundResponse())))
.recover{case e:Exception => logger.trace(e); NotFoundResponse()}
.la
f
}
}
private object RestReq {
def unapply(r: Req): Option[(List[String], Req)] = Some(r.path.partPath -> r)
}
}
|
npmaven/npmaven
|
src/main/scala/org/npmaven/rest/NpmRest.scala
|
Scala
|
apache-2.0
| 1,546
|
package org.positronicnet.content
import _root_.android.content.Context
import _root_.android.content.ContentValues
import _root_.android.content.ContentUris
import _root_.android.content.ContentResolver
import _root_.android.net.Uri
import _root_.android.util.Log
import _root_.android.database.ContentObserver // android.database? really?!!!
import org.positronicnet.facility.AppFacility
import org.positronicnet.facility.WorkerThread
import org.positronicnet.notifications.{ Future,
DataStream,
StreamQuery,
StreamQueryable }
/** Simple [[org.positronicnet.facility.AppFacility]] for interacting with
* Android ContentProviders using the Positronic Net
* [[org.positronicnet.content.ContentQuery]] convenience shorthands,
* or the [[org.positronicnet.orm]]. Once the facility has been
* opened (via `openInContext`), it can be used to produce
* [[org.positronicnet.content.ContentQuery]] objects that refer to
* individual `ContentProvider`s, including those provided by other
* apps or the platform itself.
*
* The best treatment of row-IDs here isn't quite clear. The
* underlying platform `insert` method on a `ContentProvider` returns
* a URI for the newly created row, but by convention that row will
* almost always include a `Long` which can be obtained from the URI
* itself via the `ContentURIs.parseID` method provided by the
* framework, and which will also be used to identify the row in the
* same bulk queries. Since it isn't obvious which is better, we
* give you both choices, as follows:
*
* Let's say you have the URI of some
* `ContentProvider` --- say `android.provider.CallLog.Calls.CONTENT_URI`,
* just to be concrete. Once you've got a resolver opened like so:
* {{{
* object Resolver extends PositronicContentResolver( "call_log_app" )
* ...
* class SomeActivity {
* onCreate {
* useAppFacility( Resolver )
* }
* }
* }}}
* you can either say
* {{{
* queryHandle = Resolver( Calls.CONTENT_URI )
* }}}
* to get a [[org.positronicnet.orm.ContentQuery]] on which `insert` will
* return a `Long` ID, or
* {{{
* queryHandle = Resolver.withUriIds( Calls.CONTENT_URI )
* }}}
* to obtain one on which `insert` is typed to return the raw URIs.
*
* There's no other difference between the two, but only the latter
* will work with the [[org.positronicnet.orm]], which requires long
* IDs for the moment.
*
* In general, the `Long`-id variants are easier to deal with for
* providers which support that convention (most of the standard
* ones on the platform, for starters), but the `withUriIds` variant
* is there if you prefer it.
*
* There are also limited facilities for using
* [[org.positronicnet.content.PositronicContentResolver]] for asynch
* operations, using actor-like syntax; in particular, batch updates are
* supported via [[org.positronicnet.content.BatchOperation]].
*
* (The underlying `android.content.ContentResolver` is available by
* calling `realResolver`, q.v.; this, of course, supports the full,
* unrestricted plafrom API.)
*/
object PositronicContentResolver
extends AppFacility( "PositronicContentResolver" )
with StreamQueryable[ ContentResolverQueryNonce ]
with WorkerThread
{
var logTag: String = "PositronicContentResolver"
override def getLogTag = logTag
private[positronicnet] var realResolver: android.content.ContentResolver = null
override protected def realOpen(ctx: Context) = {
super.realOpen( ctx )
realResolver = ctx.getContentResolver
}
/** Response to stream queries. Right now, the sole supported query
* argument is OnDataAvailable(uri), which creates a ContentObserver
* that for the URI in question which sends a Unit when new data is
* available (as well as a courtesy notification the first time a
* listener subscribes, as per usual, to open the pipes.
*
* The usual convention is to map that Unit to something else, like
* the result of a query, either direct, or through the ORM...
*/
def ??[V]( q: StreamQuery[ ContentResolverQueryNonce, V ] ) = q match {
case OnDataAvailable( uri, andSubtree ) =>
new ContentObservationDataStream( uri, andSubtree )
}
/** Return a [[org.positronicnet.orm.ContentQuery]] obeying the `Long`-id
* convention, as described above.
*/
def apply( uri: Uri ) =
new ContentProviderQuery( new LongIdContentResolverRepository(realResolver,
this ),
uri )
/** Return a [[org.positronicnet.orm.ContentQuery]] obeying the URI-id
* convention, as described above.
*/
def withUriIds( uri: Uri ) =
new ContentProviderQuery( new UriIdContentResolverRepository(realResolver,
this),
uri )
/** Run a content-resolver action on a private thread, invoking
* callbacks on the caller's thread when done.
*/
def !( action: ContentResolverAction ) =
action match {
case batch: BatchAction =>
val wrappedBatch = batch.withWrappedCallbacks
this.runOnThread { this.onThisThread( wrappedBatch ) }
case _ =>
this.runOnThread { this.onThisThread( action ) }
}
/** Run a content-resolver action on the current thread */
def onThisThread( action: ContentResolverAction ) =
action match {
case batch: BatchAction =>
try {
Log.d( "PositronicContentResolver", "Running batch:" )
for (i <- Range( 0, batch.operations.size ))
Log.d( "PositronicContentResolver",
batch.operations.get(i).toString )
Log.d( "PositronicContentResolver", "End batch" )
val results = realResolver.applyBatch( batch.authority,
batch.operations )
batch.successCallback( results )
}
catch {
case ex: Exception =>
Log.e( "PositronicContentResolver", "Batch operation failure", ex )
batch.failureCallback( ex )
}
case _ =>
throw new RuntimeException( "Unknown ContentResolverAction" )
}
}
/** Action on the content resolver, considered as an actor */
abstract class ContentResolverAction
private [content]
abstract class BaseContentResolverRepo[ IdType ]( realResolver: ContentResolver,
facilityArg: AppFacility )
extends ContentRepository[ Uri, IdType ]
{
def facility = facilityArg
def getLogTag = facility.getLogTag
def delete( whence: Uri, where: String, whereArgs: Array[String] ) =
realResolver.delete( whence, where, whereArgs )
def update( whence: Uri, vals: ContentValues,
where: String, whereArgs: Array[ String ] ) =
realResolver.update( whence, vals, where, whereArgs )
// Note that we ignore limit, groupBy, and order; ContentProviderQuery
// gives users no way to set them, so they're NULL unless someone's
// playing very nasty games on us...
def query( whence: Uri, cols: Array[ String ],
where: String, whereArgs: Array[ String ],
groupBy: String, having: String,
order: String, limit: String ) =
realResolver.query( whence, cols, where, whereArgs, order )
}
class UriIdContentResolverRepository( realResolver: ContentResolver,
facility: AppFacility )
extends BaseContentResolverRepo[ Uri ]( realResolver, facility )
{
def insert( where: Uri, vals: ContentValues ) =
realResolver.insert( where, vals )
}
class LongIdContentResolverRepository( realResolver: ContentResolver,
facility: AppFacility )
extends BaseContentResolverRepo[ Long ]( realResolver, facility )
{
def insert( where: Uri, vals: ContentValues ) =
ContentUris.parseId( realResolver.insert( where, vals ))
}
/** Queries on ContentProviders. See
* [[org.positronicnet.content.PositronicContentResolver]]
*/
class ContentProviderQuery[IdType]( source: BaseContentResolverRepo[IdType],
uri: Uri,
orderString: String = null,
whereString: String = null,
whereValues: Array[String] = null
)
extends ContentQuery( source, uri, orderString,
whereString, whereValues,
limitString = null )
{
protected def dinkedCopy( source: BaseContentResolverRepo[IdType]=this.source,
uri: android.net.Uri = this.uri,
orderString: String = this.orderString,
whereString: String = this.whereString,
whereValues: Array[String] = this.whereValues ) =
new ContentProviderQuery( source, uri, orderString,
whereString, whereValues )
def contentUri = uri
def order( s: String ) = dinkedCopy( orderString = s )
def where( s: String, vals: ContentValue* ) =
withUpdatedWhere( s, vals.toArray ){ (str, arr) =>
dinkedCopy( whereString = str, whereValues = arr )}
def whereEq( pairs: (String, ContentValue)* ) =
withUpdatedWhere( pairs ){ (str, arr) =>
dinkedCopy( whereString = str, whereValues = arr )}
def facility = source.facility
def count:Long =
throw new RuntimeException( "Count not supported on ContentResolvers" )
def limit( s: String ) =
throw new RuntimeException( "Limit not supported on ContentResolvers" )
}
class ContentResolverStreamQuery[V]
extends StreamQuery[ContentResolverQueryNonce, V]
class ContentResolverQueryNonce
case class OnDataAvailable( uri: Uri, andSubtree: Boolean = false )
extends ContentResolverStreamQuery[ Unit ]
private[ positronicnet ]
class ContentObservationDataStream( uri: Uri, andSubtree: Boolean )
extends DataStream[Unit]
{
protected def initialFuture = Future( () )
override def addListener( tag: AnyRef, handler: Unit => Unit ) = {
super.addListener( tag, handler )
maybeStartObserving
}
override def removeListener( tag: AnyRef ) = {
super.removeListener( tag )
if (!this.hasListeners)
maybeStopObserving
}
private [this] var observer: ContentObserver = null
private [this] var observing = false
import PositronicContentResolver.realResolver
private [this] def maybeStartObserving = {
if (!observing) {
PositronicContentResolver.threadHandlerFut.map { handler => {
observer = new ContentObserver( handler ) {
override def onChange( dummy: Boolean ) = noteNewValue( () )
}
realResolver.registerContentObserver( uri, andSubtree, observer )
}}
}
}
private [this] def maybeStopObserving = {
if (observing) {
observing = false
realResolver.unregisterContentObserver( observer )
}
}
}
|
rst/positronic_net
|
src/main/scala/content/PositronicContentResolver.scala
|
Scala
|
bsd-3-clause
| 11,250
|
/*
This function is usually called `zipWith`. The discussion about stack usage from the explanation of `map` also applies here. By putting the `f` in the second argument list, Scala can infer its type from the previous argument list.
*/
def zipWith[A,B,C](a: List[A], b: List[B])(f: (A,B) => C): List[C] = (a,b) match {
case (Nil, _) => Nil
case (_, Nil) => Nil
case (Cons(h1,t1), Cons(h2,t2)) => Cons(f(h1,h2), zipWith(t1,t2)(f))
}
|
ud3sh/coursework
|
functional-programming-in-scala-textbook/answerkey/datastructures/23.answer.scala
|
Scala
|
unlicense
| 440
|
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.lambda.stream.kafka
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.atomic.AtomicLong
import java.util.concurrent.locks.ReentrantLock
import com.typesafe.scalalogging.LazyLogging
import org.joda.time.{DateTime, DateTimeZone}
import org.locationtech.geomesa.lambda.stream.OffsetManager.OffsetListener
import org.locationtech.geomesa.lambda.stream.kafka.KafkaFeatureCache.{ExpiringFeatureCache, ReadableFeatureCache, WritableFeatureCache}
import org.opengis.feature.simple.SimpleFeature
import scala.collection.mutable.ArrayBuffer
/**
* Locally cached features
*/
class KafkaFeatureCache(topic: String) extends WritableFeatureCache with ReadableFeatureCache
with ExpiringFeatureCache with OffsetListener with LazyLogging {
// map of feature id -> current feature
private val features = new ConcurrentHashMap[String, SimpleFeature]
// technically we should synchronize all access to the following arrays, since we expand them if needed;
// however, in normal use it will be created up front and then only read.
// if partitions are added at runtime, we may have synchronization issues...
// array, indexed by partition, of queues of (offset, create time, feature), sorted by offset
private val queues = ArrayBuffer.empty[(ReentrantLock, java.util.ArrayDeque[(Long, Long, SimpleFeature)])]
private val offsets = ArrayBuffer.empty[AtomicLong]
private val debug = logger.underlying.isDebugEnabled()
override def partitionAssigned(partition: Int, offset: Long): Unit = {
logger.debug(s"Partition assigned: [$topic:$partition:$offset]")
ensurePartition(partition, offset)
}
override def get(id: String): SimpleFeature = features.get(id)
override def all(): Iterator[SimpleFeature] = {
import scala.collection.JavaConverters._
features.values.iterator.asScala
}
override def add(feature: SimpleFeature, partition: Int, offset: Long, created: Long): Unit = {
if (offsets(partition).get < offset) {
logger.trace(s"Adding [$partition:$offset] $feature created at ${new DateTime(created, DateTimeZone.UTC)}")
features.put(feature.getID, feature)
val (lock, queue) = queues(partition)
lock.lock()
try { queue.addLast((offset, created, feature)) } finally {
lock.unlock()
}
} else {
logger.trace(s"Ignoring [$partition:$offset] $feature created at ${new DateTime(created, DateTimeZone.UTC)}")
}
}
override def delete(feature: SimpleFeature, partition: Int, offset: Long, created: Long): Unit = {
logger.trace(s"Deleting [$partition:$offset] $feature created at ${new DateTime(created, DateTimeZone.UTC)}")
features.remove(feature.getID)
}
override def expired(expiry: Long): Seq[Int] = {
val result = ArrayBuffer.empty[Int]
var i = 0
while (i < this.queues.length) {
val (lock, queue) = queues(i)
lock.lock()
val peek = try { queue.peek } finally { lock.unlock() }
peek match {
case null => // no-op
case (_, created, _) => if (expiry > created) { result += i }
}
i += 1
}
result
}
override def expired(partition: Int, expiry: Long): (Long, Seq[(Long, SimpleFeature)]) = {
val expired = ArrayBuffer.empty[(Long, SimpleFeature)]
val (lock, queue) = this.queues(partition)
var loop = true
while (loop) {
lock.lock()
val poll = queue.poll()
if (poll == null) {
lock.unlock()
loop = false
} else if (poll._2 > expiry) {
// note: add back to the queue before unlocking
try { queue.addFirst(poll) } finally {
lock.unlock()
}
loop = false
} else {
lock.unlock()
expired += ((poll._1, poll._3))
}
}
logger.debug(s"Checking [$topic:$partition] for expired entries: found ${expired.size} expired and ${queue.size} remaining")
val maxExpiredOffset = if (expired.isEmpty) { -1L } else { expired(expired.length - 1)._1 }
// only remove from feature cache (and persist) if there haven't been additional updates
val latest = expired.filter { case (_, feature) => remove(feature) }
(maxExpiredOffset, latest)
}
override def offsetChanged(partition: Int, offset: Long): Unit = {
logger.debug(s"Offsets changed for [$topic:$partition]: -> $offset")
if (queues.length <= partition) {
ensurePartition(partition, offset)
return
}
val (lock, queue) = queues(partition)
// remove the expired features from the cache
val (featureSize, queueSize, start) = if (!debug) { (0, 0, 0L) } else {
(features.size, queue.size, System.currentTimeMillis())
}
var loop = true
while (loop) {
lock.lock()
val poll = queue.poll()
if (poll == null) {
lock.unlock()
loop = false
} else if (poll._1 > offset) {
// note: add back to the queue before unlocking
try { queue.addFirst(poll) } finally {
lock.unlock()
}
loop = false
} else {
lock.unlock()
// only remove from feature cache if there haven't been additional updates
remove(poll._3)
}
}
// update the valid offset
var last = offsets(partition).get
while(last < offset && !offsets(partition).compareAndSet(last, offset)) {
last = offsets(partition).get
}
logger.debug(s"Size of cached state for [$topic:$partition]: features (total): " +
s"${diff(featureSize, features.size)}, offsets: ${diff(queueSize, queue.size)} in " +
s"${System.currentTimeMillis() - start}ms")
}
private def ensurePartition(partition: Int, offset: Long): Unit = synchronized {
while (queues.length <= partition) {
queues += ((new ReentrantLock, new java.util.ArrayDeque[(Long, Long, SimpleFeature)]))
offsets += new AtomicLong(-1L)
}
offsets(partition).set(offset)
}
// conditionally removes the simple feature from the feature cache if it is the latest version
private def remove(feature: SimpleFeature): Boolean = {
// note: there isn't an atomic remove that checks identity, so check first and then do an equality remove.
// there is a small chance that the feature will be updated in between the identity and equality checks,
// and removed incorrectly, however the alternative is full synchronization on inserts and deletes.
// also, with standard usage patterns of many updates and only a few writes, the first check (which is
// cheaper) will be false, and we can short-circuit the second check
feature.eq(features.get(feature.getID)) && features.remove(feature.getID, feature)
}
// debug message
private def diff(original: Int, updated: Int): String = f"$updated%d (${updated - original}%+d)"
}
object KafkaFeatureCache {
trait ReadableFeatureCache {
/**
* Returns most recent versions of all features currently in this cache
*
* @return
*/
def all(): Iterator[SimpleFeature]
/**
* Returns the most recent version of a feature in this cache, by feature ID
*
* @param id feature id
* @return
*/
def get(id: String): SimpleFeature
}
trait WritableFeatureCache {
/**
* Initialize this cached state for a given partition and offset
*
* @param partition partition
* @param offset offset
*/
def partitionAssigned(partition: Int, offset: Long): Unit
/**
* Add a feature to the cached state
*
* @param feature feature
* @param partition partition corresponding to the add message
* @param offset offset corresponding to the add message
* @param created time feature was created
*/
def add(feature: SimpleFeature, partition: Int, offset: Long, created: Long): Unit
/**
* Deletes a feature from the cached state
*
* @param feature feature
* @param partition partition corresponding to the delete message
* @param offset offset corresponding to the delete message
* @param created time feature was deleted
*/
def delete(feature: SimpleFeature, partition: Int, offset: Long, created: Long): Unit
}
trait ExpiringFeatureCache {
/**
* Checks for any expired features
*
* @param expiry expiry
* @return partitions which may contain expired features, if any
*/
def expired(expiry: Long): Seq[Int]
/**
* Remove and return any expired features
*
* @param partition partition
* @param expiry expiry
* @return (maxExpiredOffset, (offset, expired feature)), ordered by offset
*/
def expired(partition: Int, expiry: Long): (Long, Seq[(Long, SimpleFeature)])
}
}
|
ronq/geomesa
|
geomesa-lambda/geomesa-lambda-datastore/src/main/scala/org/locationtech/geomesa/lambda/stream/kafka/KafkaFeatureCache.scala
|
Scala
|
apache-2.0
| 9,232
|
package parser.px.analyzers
import parser.px.Analyzer
trait Filter {
protected type Tk
def ignore(name:Tk):Boolean
}
trait NoFilter extends Filter {
protected type Tk
def ignore(name:Tk):Boolean = false
}
|
Y-P-/data-processing-binding
|
Parser/src/parser/px/analyzers/Filter.scala
|
Scala
|
gpl-3.0
| 234
|
package japgolly.scalajs.react.extra.internal
import japgolly.scalajs.react._
import japgolly.scalajs.react.internal.CoreGeneral._
import japgolly.scalajs.react.internal.EffectUtil
import japgolly.scalajs.react.util.DefaultEffects
import japgolly.scalajs.react.util.Effect.Sync
import japgolly.scalajs.react.vdom.html_<^._
import org.scalajs.dom.ext.KeyCode
import org.scalajs.dom.html.Input
/**
* Checkbox that can have three states: Checked, Unchecked, Indeterminate.
*
* @since 0.11.0
*/
object TriStateCheckboxF {
sealed abstract class State extends Product with Serializable {
final def nextDeterminate: Determinate =
this match {
case Checked => Unchecked
case Indeterminate
| Unchecked => Checked
}
final def nextGrow: State =
this match {
case Checked => Unchecked
case Indeterminate => Checked
case Unchecked => Indeterminate
}
final def nextShrink: State =
this match {
case Checked => Indeterminate
case Indeterminate => Unchecked
case Unchecked => Checked
}
}
sealed abstract class Determinate extends State
case object Checked extends Determinate
case object Unchecked extends Determinate
case object Indeterminate extends State
}
class TriStateCheckboxF[F[_]](implicit F: Sync[F]) {
final type State = TriStateCheckboxF.State
final type Determinate = TriStateCheckboxF.Determinate
final val Checked = TriStateCheckboxF.Checked
final val Unchecked = TriStateCheckboxF.Unchecked
final val Indeterminate = TriStateCheckboxF.Indeterminate
case class Props(state : State,
setNextState: F[Unit],
disabled : Boolean = false,
tagMod : Reusable[TagMod] = Reusable.emptyVdom,
) {
@inline def render: VdomElement = Component(this)
}
private def render($: ScalaComponent.MountedPure[Props, Unit, Unit], p: Props) = {
val props = F.transSync($.props)(DefaultEffects.Sync)
val setNext = F.flatMap(props)(p => if (p.disabled) F.empty else p.setNextState) // Only access .setNextState inside Sync for Reusability
<.input.checkbox(
p.tagMod,
^.disabled := p.disabled,
TagMod.unless(p.disabled)(eventHandlers(setNext)))
}
/**
* Clicking or pressing space = change.
*/
def eventHandlers(onChange: F[Unit]): TagMod = {
def handleKey(e: ReactKeyboardEventFromHtml): F[Unit] =
F.delay {
EffectUtil.unsafeAsEventDefaultOption_(e)(
EffectUtil.unsafeKeyCodeSwitch(e) {
case KeyCode.Space => F.runSync(onChange)
}
)
}
TagMod(
^.onClick --> onChange,
^.onKeyDown ==> handleKey)
}
private def updateDom[P, S, B]($: ScalaComponent.MountedImpure[P, S, B], nextProps: Props): F[Unit] = {
val s = nextProps.state
F.delay {
$.getDOMNode.toElement.map(_.domCast[Input]).foreach { d =>
d.checked = s == Checked
d.indeterminate = s == Indeterminate
}
}
}
implicit val reusabilityState: Reusability[State] =
Reusability.by_==
implicit val reusabilityProps: Reusability[Props] =
Reusability.caseClassExcept("setNextState") // .setNextState is never accessed outside of a Sync[Unit]
val Component = ScalaComponent.builder[Props]("TriStateCheckbox")
.stateless
.noBackend
.render(i => render(i.mountedPure, i.props))
.componentDidMount(i => updateDom(i.mountedImpure, i.props))
.componentDidUpdate(i => updateDom(i.mountedImpure, i.currentProps))
.configure(Reusability.shouldComponentUpdate)
.build
}
|
japgolly/scalajs-react
|
extra/src/main/scala/japgolly/scalajs/react/extra/internal/TriStateCheckboxF.scala
|
Scala
|
apache-2.0
| 3,716
|
package com.danielasfregola.twitter4s.http.clients.rest.application.parameters
import com.danielasfregola.twitter4s.http.marshalling.Parameters
private[twitter4s] final case class RatesParameters(resources: Option[String]) extends Parameters
|
DanielaSfregola/twitter4s
|
src/main/scala/com/danielasfregola/twitter4s/http/clients/rest/application/parameters/RatesParameters.scala
|
Scala
|
apache-2.0
| 244
|
package ecommerce.shipping.view
import ecommerce.shipping.ShippingStatus
import ecommerce.shipping.ShippingStatus.ShippingStatus
import pl.newicom.dddd.aggregate.EntityId
import slick.jdbc.JdbcProfile
import slick.jdbc.meta.MTable._
import scala.concurrent.ExecutionContext
class ShipmentDao(implicit val profile: JdbcProfile, ex: ExecutionContext) {
import profile.api._
implicit val shipmentStatusColumnType = MappedColumnType.base[ShippingStatus, String](
{ c => c.toString },
{ s => ShippingStatus.withName(s)}
)
val shipmentsTableName = "shippings"
class Shipments(tag: Tag) extends Table[ShipmentView](tag, shipmentsTableName) {
def id = column[EntityId]("ID", O.PrimaryKey)
def orderId = column[EntityId]("ORDER_ID")
def status = column[ShippingStatus]("STATUS")
def * = (id, orderId, status) <> (ShipmentView.tupled, ShipmentView.unapply)
}
val shipments = TableQuery[Shipments]
/**
* Queries impl
*/
private val by_id = shipments.findBy(_.id)
private val by_order_id = shipments.findBy(_.orderId)
/**
* Public interface
*/
def all = shipments.result
def byId(id: EntityId) = by_id(id).result.headOption
def byOrderId(orderId: EntityId) = by_order_id(orderId).result
def createOrUpdate(view: ShipmentView) = {
shipments.insertOrUpdate(view)
}
def remove(id: EntityId) =
by_id(id).delete
def ensureSchemaDropped =
getTables(shipmentsTableName).headOption.flatMap {
case Some(table) => shipments.schema.drop.map(_ => ())
case None => DBIO.successful(())
}
def ensureSchemaCreated =
getTables(shipmentsTableName).headOption.flatMap {
case Some(table) => DBIO.successful(())
case None => shipments.schema.create.map(_ => ())
}
}
|
pawelkaczor/ddd-leaven-akka-v2
|
shipping/read-back/src/main/scala/ecommerce/shipping/view/ShipmentDao.scala
|
Scala
|
mit
| 1,773
|
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.storage.cassandra
import com.twitter.zipkin.common.{Endpoint, Annotation, Span}
import com.twitter.zipkin.config.CassandraStorageConfig
import com.twitter.zipkin.gen
import com.twitter.cassie.tests.util.FakeCassandra
import com.twitter.conversions.time._
import com.twitter.ostrich.admin.RuntimeEnvironment
import com.twitter.util.Eval
import java.nio.ByteBuffer
import org.specs.mock.{ClassMocker, JMocker}
import org.specs.Specification
import com.twitter.io.TempFile
class CassandraStorageSpec extends Specification with JMocker with ClassMocker {
object FakeServer extends FakeCassandra
var cassandraStorage: CassandraStorage = null
def binaryAnnotation(key: String, value: String) =
gen.BinaryAnnotation(key, ByteBuffer.wrap(value.getBytes), gen.AnnotationType.String, Some(ep.toThrift))
val ep = Endpoint(123, 123, "service")
val spanId = 456
val ann1 = Annotation(1, "cs", Some(ep))
val ann2 = Annotation(2, "sr", None)
val ann3 = Annotation(2, "custom", Some(ep))
val ann4 = Annotation(2, "custom", Some(ep))
val span1 = Span(123, "methodcall", spanId, None, List(ann1, ann3),
List(binaryAnnotation("BAH", "BEH")))
"CassandraStorage" should {
doBefore {
FakeServer.start()
val test = TempFile.fromResourcePath("/CassandraStorageConfig.scala")
val env = RuntimeEnvironment(this, Array("-f", test.toString))
val config = new Eval().apply[CassandraStorageConfig](env.configFile)
config.cassandraConfig.port = FakeServer.port.get
cassandraStorage = config.apply()
}
doAfter {
cassandraStorage.close()
FakeServer.stop()
}
"getTraceById" in {
cassandraStorage.storeSpan(span1)()
val trace = cassandraStorage.getTraceById(span1.traceId)()
trace.spans.isEmpty mustEqual false
trace.spans(0) mustEqual span1
}
"getTracesByIds" in {
cassandraStorage.storeSpan(span1)()
val actual1 = cassandraStorage.getTracesByIds(List(span1.traceId))()
actual1.isEmpty mustEqual false
actual1(0).spans.isEmpty mustEqual false
actual1(0).spans(0) mustEqual span1
val span2 = Span(666, "methodcall2", spanId, None, List(ann2),
List(binaryAnnotation("BAH2", "BEH2")))
cassandraStorage.storeSpan(span2)()
val actual2 = cassandraStorage.getTracesByIds(List(span1.traceId, span2.traceId))()
actual2.isEmpty mustEqual false
actual2(0).spans.isEmpty mustEqual false
actual2(0).spans(0) mustEqual span1
actual2(1).spans.isEmpty mustEqual false
actual2(1).spans(0) mustEqual span2
}
"getTracesByIds should return empty list if no trace exists" in {
val actual1 = cassandraStorage.getTracesByIds(List(span1.traceId))()
actual1.isEmpty mustEqual true
}
"set time to live on a trace and then get it" in {
cassandraStorage.storeSpan(span1)()
cassandraStorage.setTimeToLive(span1.traceId, 1234.seconds)()
cassandraStorage.getTimeToLive(span1.traceId)() mustEqual 1234.seconds
}
}
}
|
lanrion/zipkin
|
zipkin-server/src/test/scala/com/twitter/zipkin/storage/cassandra/CassandraStorageSpec.scala
|
Scala
|
apache-2.0
| 3,659
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v3
import uk.gov.hmrc.ct.box.{Calculated, CtBigDecimal, CtBoxIdentifier}
import uk.gov.hmrc.ct.ct600.v3.calculations.CorporationTaxCalculator
import uk.gov.hmrc.ct.ct600.v3.retriever.CT600BoxRetriever
// was B86
case class B525(value: BigDecimal) extends CtBoxIdentifier(name = "Self-assessment of tax payable") with CtBigDecimal
object B525 extends CorporationTaxCalculator with Calculated[B525, CT600BoxRetriever] {
override def calculate(fieldValueRetriever: CT600BoxRetriever): B525 =
calculateSATaxPayable(
fieldValueRetriever.b510(),
fieldValueRetriever.b515()
)
}
|
hmrc/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/ct600/v3/B525.scala
|
Scala
|
apache-2.0
| 1,239
|
package net.cucumbersome.rpgRoller.warhammer.player
import net.cucumbersome.rpgRoller.warhammer.player.CombatActor.{Health, Id, Name}
case class CombatActor(id: Id, name: Name, statistics: Statistics, hp: Health)
object CombatActor{
case class Id(data: String) extends AnyVal with Serializable
case class Health(data: Int) extends AnyVal
case class Name(data: String) extends AnyVal
}
|
CucumisSativus/rpgRollerBackend
|
src/main/scala/net/cucumbersome/rpgRoller/warhammer/player/CombatActor.scala
|
Scala
|
mit
| 397
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.examples
import org.apache.spark.sql.{SaveMode, SparkSession}
import org.apache.carbondata.examples.util.ExampleUtils
object CarbonDataFrameExample {
def main(args: Array[String]) {
val spark = ExampleUtils.createSparkSession("CarbonDataFrameExample")
exampleBody(spark)
spark.close()
}
def exampleBody(spark : SparkSession): Unit = {
// Writes Dataframe to CarbonData file:
import spark.implicits._
val df = spark.sparkContext.parallelize(1 to 100)
.map(x => ("a" + x % 10, "b", x))
.toDF("c1", "c2", "number")
// Saves dataframe to carbondata file
df.write
.format("carbondata")
.option("tableName", "carbon_df_table")
.option("partitionColumns", "c1") // a list of column names
.mode(SaveMode.Overwrite)
.save()
spark.sql(""" SELECT * FROM carbon_df_table """).show()
spark.sql("SHOW PARTITIONS carbon_df_table").show()
// Specify schema
import org.apache.spark.sql.types.{StructType, StructField, StringType, IntegerType}
val customSchema = StructType(Array(
StructField("c1", StringType),
StructField("c2", StringType),
StructField("number", IntegerType)))
// Reads carbondata to dataframe
val carbondf = spark.read
.format("carbondata")
.schema(customSchema)
// .option("dbname", "db_name") the system will use "default" as dbname if not set this option
.option("tableName", "carbon_df_table")
.load()
// Dataframe operations
carbondf.printSchema()
carbondf.select($"c1", $"number" + 10).show()
carbondf.filter($"number" > 31).show()
spark.sql("DROP TABLE IF EXISTS carbon_df_table")
}
}
|
zzcclp/carbondata
|
examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonDataFrameExample.scala
|
Scala
|
apache-2.0
| 2,517
|
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.algebird
/**
* This is an associative, but not commutative monoid
* Also, you must start on the right, with a value, and all subsequent RightFolded must
* be RightFoldedToFold objects or zero
*
* If you add two Folded values together, you always get the one on the left,
* so this forms a kind of reset of the fold.
*/
object RightFolded {
def monoid[In, Out](foldfn: (In, Out) => Out) =
new Monoid[RightFolded[In, Out]] {
val zero = RightFoldedZero
def plus(left: RightFolded[In, Out], right: RightFolded[In, Out]) = left match {
case RightFoldedValue(_) => left
case RightFoldedZero => right
case RightFoldedToFold(lList) => right match {
case RightFoldedZero => RightFoldedToFold(lList)
case RightFoldedValue(vr) => RightFoldedValue(lList.foldRight(vr)(foldfn))
case RightFoldedToFold(rList) => RightFoldedToFold(lList ++ rList)
}
}
}
}
sealed abstract class RightFolded[+In, +Out]
case object RightFoldedZero extends RightFolded[Nothing, Nothing]
case class RightFoldedValue[+Out](v: Out) extends RightFolded[Nothing, Out]
case class RightFoldedToFold[+In](in: List[In]) extends RightFolded[In, Nothing]
|
avibryant/algebird
|
algebird-core/src/main/scala/com/twitter/algebird/RightFolded.scala
|
Scala
|
apache-2.0
| 1,786
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.rules.physical.stream
import org.apache.calcite.plan.{RelOptRule, RelTraitSet}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.convert.ConverterRule
import org.apache.flink.table.planner.plan.nodes.FlinkConventions
import org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalWatermarkAssigner
import org.apache.flink.table.planner.plan.nodes.physical.stream.StreamPhysicalWatermarkAssigner
/**
* Rule that converts [[FlinkLogicalWatermarkAssigner]] to [[StreamPhysicalWatermarkAssigner]].
*/
class StreamPhysicalWatermarkAssignerRule
extends ConverterRule(
classOf[FlinkLogicalWatermarkAssigner],
FlinkConventions.LOGICAL,
FlinkConventions.STREAM_PHYSICAL,
"StreamPhysicalWatermarkAssignerRule") {
override def convert(rel: RelNode): RelNode = {
val watermarkAssigner = rel.asInstanceOf[FlinkLogicalWatermarkAssigner]
val convertInput = RelOptRule.convert(
watermarkAssigner.getInput, FlinkConventions.STREAM_PHYSICAL)
val traitSet: RelTraitSet = rel.getTraitSet.replace(FlinkConventions.STREAM_PHYSICAL)
new StreamPhysicalWatermarkAssigner(
watermarkAssigner.getCluster,
traitSet,
convertInput,
watermarkAssigner.rowtimeFieldIndex,
watermarkAssigner.watermarkExpr
)
}
}
object StreamPhysicalWatermarkAssignerRule {
val INSTANCE = new StreamPhysicalWatermarkAssignerRule
}
|
lincoln-lil/flink
|
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/rules/physical/stream/StreamPhysicalWatermarkAssignerRule.scala
|
Scala
|
apache-2.0
| 2,241
|
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testsuite.javalib.util
import org.junit.Test
import java.{util => ju}
import scala.reflect.ClassTag
class ArrayListTest extends AbstractListTest {
override def factory: AbstractListFactory = new ArrayListFactory
@Test def `should_not_fail_with_pre-allocation_methods`(): Unit = {
// note that these methods become no ops in js
val al = new ju.ArrayList[String]
al.ensureCapacity(0)
al.ensureCapacity(34)
al.trimToSize()
}
}
class ArrayListFactory extends AbstractListFactory {
override def implementationName: String =
"java.util.ArrayList"
override def empty[E: ClassTag]: ju.ArrayList[E] =
new ju.ArrayList[E]
}
|
nicolasstucki/scala-js
|
test-suite/shared/src/test/scala/org/scalajs/testsuite/javalib/util/ArrayListTest.scala
|
Scala
|
apache-2.0
| 954
|
package org.http4s.client.blaze
import java.security.SecureRandom
import java.security.cert.X509Certificate
import javax.net.ssl.{SSLContext, X509TrustManager}
import org.http4s.BuildInfo
import org.http4s.blaze.util.TickWheelExecutor
import org.http4s.headers.{AgentProduct, `User-Agent`}
import scala.concurrent.duration._
private[blaze] object bits {
// Some default objects
val DefaultResponseHeaderTimeout: Duration = 10.seconds
val DefaultTimeout: Duration = 60.seconds
val DefaultBufferSize: Int = 8 * 1024
val DefaultUserAgent = Some(`User-Agent`(AgentProduct("http4s-blaze", Some(BuildInfo.version))))
val DefaultMaxTotalConnections = 10
val DefaultMaxWaitQueueLimit = 256
val ClientTickWheel = new TickWheelExecutor()
/** Caution: trusts all certificates and disables endpoint identification */
lazy val TrustingSslContext: SSLContext = {
val trustManager = new X509TrustManager {
def getAcceptedIssuers(): Array[X509Certificate] = Array.empty
def checkClientTrusted(certs: Array[X509Certificate], authType: String): Unit = {}
def checkServerTrusted(certs: Array[X509Certificate], authType: String): Unit = {}
}
val sslContext = SSLContext.getInstance("TLS")
sslContext.init(null, Array(trustManager), new SecureRandom)
sslContext
}
}
|
reactormonk/http4s
|
blaze-client/src/main/scala/org/http4s/client/blaze/bits.scala
|
Scala
|
apache-2.0
| 1,309
|
package com.programmaticallyspeaking.ncd.config
import com.programmaticallyspeaking.ncd.testing.UnitTest
class ConfTest extends UnitTest {
case class Arguments(args: String*) {
val conf = new Conf(args)
}
"NCDbg configuration" - {
"without arguments" - new Arguments {
"has a default listen address" in {
conf.listen() should be (Address("localhost", 7778))
}
"has a default debug-connect address" in {
conf.connect() should be (Address("localhost", 7777))
}
}
"with --listen" - {
"supports port only" in new Arguments("--listen", "9999") {
conf.listen() should be (Address("localhost", 9999))
}
"supports host:port" in new Arguments("--listen", "somehost:9999") {
conf.listen() should be (Address("somehost", 9999))
}
}
"with --connect" - {
"supports port only" in new Arguments("--connect", "9999") {
conf.connect() should be (Address("localhost", 9999))
}
"supports host:port" in new Arguments("--connect", "somehost:9999") {
conf.connect() should be (Address("somehost", 9999))
}
}
"with --lazy" - {
"defaults to not lazy" in new Arguments {
conf.isLazy() should be (false)
}
"can be set to lazy" in new Arguments("--lazy") {
conf.isLazy() should be (true)
}
"can be set to not lazy" in new Arguments("--nolazy") {
conf.isLazy() should be (false)
}
}
}
}
|
provegard/ncdbg
|
src/test/scala/com/programmaticallyspeaking/ncd/config/ConfTest.scala
|
Scala
|
bsd-3-clause
| 1,499
|
package sharry.restserver
import scala.concurrent.duration._
import cats.data.Kleisli
import cats.data.OptionT
import cats.effect._
import cats.implicits._
import fs2.Stream
import sharry.backend.auth.AuthToken
import sharry.common.LenientUri
import sharry.common.syntax.all._
import sharry.restserver.http4s.EnvMiddleware
import sharry.restserver.routes._
import sharry.restserver.webapp._
import org.http4s._
import org.http4s.blaze.client.BlazeClientBuilder
import org.http4s.blaze.server.BlazeServerBuilder
import org.http4s.client.Client
import org.http4s.dsl.Http4sDsl
import org.http4s.headers.Location
import org.http4s.implicits._
import org.http4s.server.Router
import org.http4s.server.middleware.Logger
import org.log4s.getLogger
object RestServer {
private[this] val logger = getLogger
def stream[F[_]: Async](cfg: Config, pools: Pools): Stream[F, Nothing] = {
val templates = TemplateRoutes[F](cfg)
val app = for {
restApp <- RestAppImpl.create[F](cfg, pools.connectEC)
_ <- Resource.eval(restApp.init)
client <- BlazeClientBuilder[F].resource
httpApp = Router(
"/api/v2/open/" -> openRoutes(cfg, client, restApp),
"/api/v2/sec/" -> Authenticate(restApp.backend.login, cfg.backend.auth) { token =>
securedRoutes(cfg, restApp, token)
},
"/api/v2/alias/" -> Authenticate.alias(restApp.backend.login, cfg.backend.auth) {
token =>
aliasRoutes[F](cfg, restApp, token)
},
"/api/v2/admin/" -> Authenticate(restApp.backend.login, cfg.backend.auth) {
token =>
if (token.account.admin) adminRoutes(cfg, restApp)
else notFound[F](token)
},
"/api/doc" -> templates.doc,
"/app/assets" -> EnvMiddleware(WebjarRoutes.appRoutes[F]),
"/app" -> EnvMiddleware(templates.app),
"/sw.js" -> EnvMiddleware(templates.serviceWorker),
"/" -> redirectTo("/app")
).orNotFound
// With Middlewares in place
finalHttpApp = Logger.httpApp(false, false)(httpApp)
} yield finalHttpApp
Stream
.resource(app)
.flatMap(httpApp =>
BlazeServerBuilder[F]
.bindHttp(cfg.bind.port, cfg.bind.address)
.withResponseHeaderTimeout(cfg.responseTimeout.toScala)
.withIdleTimeout(cfg.responseTimeout.toScala + 30.seconds)
.withHttpApp(httpApp)
.withoutBanner
.serve
)
}.drain
def aliasRoutes[F[_]: Async](
cfg: Config,
restApp: RestApp[F],
token: AuthToken
): HttpRoutes[F] =
Router(
"upload" -> ShareUploadRoutes(
restApp.backend,
token,
cfg,
LenientUri.EmptyPath / "api" / "v2" / "alias" / "upload"
),
"mail" -> NotifyRoutes(restApp.backend, token, cfg)
)
def securedRoutes[F[_]: Async](
cfg: Config,
restApp: RestApp[F],
token: AuthToken
): HttpRoutes[F] =
Router(
"auth" -> LoginRoutes.session(restApp.backend.login, cfg),
"settings" -> SettingRoutes(restApp.backend, token),
"alias-member" ->
(if (cfg.aliasMemberEnabled) AliasMemberRoutes(restApp.backend, token)
else notFound[F](token)),
"alias" -> AliasRoutes(restApp.backend, token),
"share" -> ShareRoutes(restApp.backend, token, cfg),
"upload" -> ShareUploadRoutes(
restApp.backend,
token,
cfg,
LenientUri.EmptyPath / "api" / "v2" / "sec" / "upload"
),
"mail" -> MailRoutes(restApp.backend, token, cfg)
)
def adminRoutes[F[_]: Async](
cfg: Config,
restApp: RestApp[F]
): HttpRoutes[F] =
Router(
"signup" -> RegisterRoutes(restApp.backend, cfg).genInvite,
"account" -> AccountRoutes(restApp.backend)
)
def openRoutes[F[_]: Async](
cfg: Config,
client: Client[F],
restApp: RestApp[F]
): HttpRoutes[F] =
Router(
"info" -> InfoRoutes(cfg),
"auth" -> LoginRoutes.login(restApp.backend, client, cfg),
"signup" -> RegisterRoutes(restApp.backend, cfg).signup,
"share" -> OpenShareRoutes(restApp.backend, cfg)
)
def notFound[F[_]: Async](token: AuthToken): HttpRoutes[F] =
Kleisli(_ =>
OptionT.liftF(
logger
.finfo[F](s"Non-admin '${token.account}' calling admin routes")
.map(_ => Response.notFound[F])
)
)
def redirectTo[F[_]: Async](path: String): HttpRoutes[F] = {
val dsl = new Http4sDsl[F] {}
import dsl._
HttpRoutes.of { case GET -> Root =>
Response[F](
Status.SeeOther,
body = Stream.empty,
headers = Headers(Location(Uri(path = Uri.Path.unsafeFromString(path))))
).pure[F]
}
}
}
|
eikek/sharry
|
modules/restserver/src/main/scala/sharry/restserver/RestServer.scala
|
Scala
|
gpl-3.0
| 4,739
|
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package java.text
import java.util.Locale
/** Dummy implementation of `DecimalFormatSymbols`.
*
* It is even worse than most other dummies, in the sense that it
* special-cases the locales that we use in our tests (`FormatterTestEx`).
* It is incorrect for most locales.
*/
class DecimalFormatSymbols(locale: Locale) extends NumberFormat {
def getZeroDigit(): Char = {
val ext = locale.getExtension('u')
if (ext != null && ext.contains("nu-deva"))
'\\u0966' // '०' DEVANAGARI DIGIT ZERO
else
'0'
}
def getGroupingSeparator(): Char = {
locale.getLanguage() match {
case "fr" => '\\u202F' // NARROW NO-BREAK SPACE
case "" | "en" | "hi" => ','
case _ => unsupported()
}
}
def getDecimalSeparator(): Char = {
locale.getLanguage() match {
case "fr" => ','
case "" | "en" | "hi" => '.'
case _ => unsupported()
}
}
private def unsupported(): Nothing =
throw new Error(s"Unsupported locale '$locale' in DecimalFormatSymbols")
}
object DecimalFormatSymbols {
def getInstance(locale: Locale): DecimalFormatSymbols =
new DecimalFormatSymbols(locale)
}
|
scala-js/scala-js
|
javalib-ext-dummies/src/main/scala/java/text/DecimalFormatSymbols.scala
|
Scala
|
apache-2.0
| 1,483
|
package gapt.provers.escargot.impl
import gapt.expr.formula.hol.universalClosure
import gapt.proofs.{ ContextSection, HOLClause, HOLSequent, Sequent }
import gapt.proofs.resolution._
import gapt.provers.escargot.{ LPO, TermOrdering }
import gapt.provers.viper.spin._
import gapt.provers.sat.Sat4j
import gapt.utils.Logger
import org.sat4j.minisat.SolverFactory
import Sat4j._
import gapt.expr.formula.And
import gapt.expr.formula.Atom
import gapt.expr.formula.Formula
import gapt.expr.util.{ constants, expressionSize, freeVariables, variables }
import gapt.proofs.context.mutable.MutableContext
import gapt.proofs.rup.RupProof
import gapt.provers.viper.aip.axioms.Axiom
import org.sat4j.specs.{ ContradictionException, IConstr, ISolverService }
import org.sat4j.tools.SearchListenerAdapter
import cats.implicits._
object EscargotLogger extends Logger( "escargot" ); import EscargotLogger._
import scala.collection.mutable
/**
* Data structure for clauses derived in Escargot.
*
* @param state Prover state that "produced" this clause.
* @param proof Resolution proof ending in this clause.
* @param index Serial number issued by the [[EscargotState]]. The index is a deterministic
* hash code, and also indicates how old the clause is.
*/
class Cls( val state: EscargotState, val proof: ResolutionProof, val index: Int ) {
val clause = proof.conclusion
def assertion = proof.assertions
val ass = state.intern( assertion )
def clauseWithAssertions = ( clause, assertion )
val maximal = for {
( a, i ) <- clause.zipWithIndex.elements
if !clause.elements.exists { x => a != x && state.termOrdering.lt( a, x ) }
} yield i
val selected = ( maximal.filter { _.isAnt } ++ clause.indicesSequent.antecedent ).take( 1 )
val weight = clause.elements.map { expressionSize( _ ) }.sum
val freeVars = freeVariables( clause )
val literalFeatureVecs = clause.map( TermFeatureVec( _ ) )
val featureVec = ClauseFeatureVec( literalFeatureVecs )
override def toString = s"[$index] ${proof.stringifiedConclusion( state.ctx )} (max = ${maximal mkString ", "}) (sel = ${selected mkString ", "}) (w = $weight)"
override def hashCode = index
}
class IndexedClsSet private (
val clauses: Set[Cls],
val state: EscargotState,
indices: Map[Index[_], AnyRef] ) {
def size: Int = clauses.size
def getIndex[T]( idx: Index[T] ): T =
indices( idx ).asInstanceOf[T]
def addIndex[T <: AnyRef]( idx: Index[T] ): IndexedClsSet =
new IndexedClsSet( clauses, state,
indices.updated( idx, indices.getOrElse( idx, idx.add( idx.empty, clauses ) ) ) )
def +( c: Cls ): IndexedClsSet = this ++ Some( c )
def ++( cs: Iterable[Cls] ): IndexedClsSet =
new IndexedClsSet(
clauses = clauses ++ cs,
indices = Map() ++ indices.view.map {
case ( i, t ) =>
i -> i.asInstanceOf[Index[AnyRef]].add( t, cs )
},
state = state )
def -( c: Cls ): IndexedClsSet =
if ( !clauses( c ) ) this else
new IndexedClsSet(
clauses = clauses - c,
indices = Map() ++ indices.view.map {
case ( i, t ) =>
i -> i.asInstanceOf[Index[AnyRef]].remove( t, c )
},
state = state )
}
object IndexedClsSet {
def apply( state: EscargotState ): IndexedClsSet =
new IndexedClsSet( Set(), state, Map() )
}
trait Index[T] {
type I = T
def empty: I
def add( t: I, cs: Iterable[Cls] ): I = cs.foldLeft( t )( add )
def add( t: I, c: Cls ): I
def remove( t: I, c: Cls ): I = remove( t, Set( c ) )
def remove( t: I, cs: Set[Cls] ): I
}
/**
* Main class of the Escargot superposition prover.
*
* A practical introduction to superposition provers can be found in [1], Section 3.
*
* Essentially, we start with a set of clauses and apply inferences until we either:
* 1. have derived the empty clause, or
* 2. applied all possible inferences without producing new clauses
*
* The clauses are stored in various sets, the main two ones are:
* * workedOff: all inferences between clauses in this set have already been applied
* * usable: these clauses have not yet been used in inferences
*
* In every iteration of the prover (see the loop method), we
* 1. pick a "given" clause from usable (using the choose method)
* 2. perform all inferences between the given clause and all clauses in workedOff
* 2a. add the given clause to workedOff (unless discarded by an inference)
* 3. now newlyDerived contains the newly derived clauses, and we perform preprocessing on them
* 3a. the preprocessed clauses get moved to usable
*
* (The names are non-standard and picked from different sources with no regard for consistency, sorry.)
*
* Inferences: an [[InferenceRule]] is an operation that looks at the given clause,
* and the set of worked off clauses; it returns a set of new clauses, plus a set of clauses that should be discarded.
*
* For example, [[StandardInferences.BackwardSubsumption]] is an inference rule: it returns no new clauses,
* but the subsumed clauses in usable are returned as discarded.
*
* Avatar splitting: Escargot employs the Avatar splitting regime [2]. Clauses are annotated with
* propositional assertions, see [[gapt.proofs.resolution.ResolutionProof]] for the syntax. We always have a propositional
* model (avatarModel), and only consider clauses whose assertions are true in this model (called "active" here).
* Clauses whose assertions are false in the model are stored in locked. Whenever we derive an empty clause,
* we call the SAT solver to obtain a model in which every empty clause has a false assertion.
* If there is no such model, then we have found a proof!
*
* [1] Weidenbach, Combining Superposition, Sorts and Splitting. Handbook of Automated Reasoning II, 2001
* [2] Voronkov, AVATAR: The Architecture for first-order theorem provers. CAV 2014
*/
class EscargotState( val ctx: MutableContext ) {
var termOrdering: TermOrdering = LPO()
var nameGen = ctx.newNameGenerator
var preprocessingRules = Seq[PreprocessingRule]()
var inferences = Seq[InferenceRule]()
def addIndex[T <: AnyRef]( idx: Index[T] ): Unit =
workedOff = workedOff.addIndex( idx )
private var clsIdx = 0
def InputCls( clause: HOLSequent ): Cls = InputCls( Input( clause ) )
def InputCls( proof: ResolutionProof ): Cls = { clsIdx += 1; new Cls( this, proof, clsIdx ) }
def SimpCls( parent: Cls, newProof: ResolutionProof ): Cls = new Cls( this, newProof, parent.index )
def DerivedCls( parent: Cls, newProof: ResolutionProof ): Cls = { clsIdx += 1; new Cls( this, newProof, clsIdx ) }
def DerivedCls( parent1: Cls, parent2: Cls, newProof: ResolutionProof ): Cls = { clsIdx += 1; new Cls( this, newProof, clsIdx ) }
/** Clauses that have been derived in the current iteration. */
var newlyDerived = Set[Cls]()
/** We have not yet used these clauses in inferences. */
val usable = mutable.Set[Cls]()
/** All inferences between these clauses have already been applied. */
var workedOff = IndexedClsSet( this )
/**
* Locked clauses have assertions that are false in the current model,
* or are subsumed by a clause whose assertion is true in the current model.
*
* The optional clause is the assertion of the subsuming clause.
*/
val locked = mutable.Set[( Cls, Option[Set[Int]] )]()
/** This formula should always be unsatisfiable. */
def stateAsFormula: Formula = And {
( newlyDerived.view ++ usable ++ workedOff.clauses ++ locked.map( _._1 ) ++ emptyClauses.values ).map { c =>
c.proof.assertions.toNegConjunction --> universalClosure( c.proof.conclusion.toFormula )
}
} | And { ( newlyDerived.view ++ usable ++ workedOff.clauses ).map { c => universalClosure( c.proof.conclusion.toFormula ) } }
/** SAT solver instance */
val solver = SolverFactory.newDefault()
val drup = mutable.Buffer[RupProof.Line]()
solver.setSearchListener( new SearchListenerAdapter[ISolverService] {
override def learnUnit( p: Int ) = drup += RupProof.Rup( Set( p ) )
override def learn( c: IConstr ) = drup += RupProof.Rup( c )
} )
/** Map from assertion atoms to SAT solver atoms */
val atomToSatSolver = mutable.Map[Atom, Int]()
val satSolverToAtom = mutable.Map[Int, Atom]()
def intern( atom: Atom ): Int =
atomToSatSolver.getOrElseUpdate( atom, {
val i = solver.nextFreeVarId( true )
satSolverToAtom( i ) = atom
i
} )
def intern( assertions: HOLClause ): Set[Int] =
assertions.map( intern, -intern( _ ) ).elements.toSet
def deintern( i: Int ): Atom =
satSolverToAtom( i )
def deinternLiteral( i: Int ): Formula =
if ( i < 0 ) -deintern( -i ) else deintern( i )
/** Current propositional Avatar model. */
var avatarModel = Set[Int]()
/** * Empty clauses that have already been derived. All assertions in the empty clauses are false. */
var emptyClauses = mutable.Map[Set[Int], Cls]()
/** Is the assertion of cls true in the current model? */
def isActive( cls: Cls ): Boolean = isActive( cls.ass )
/** Is the assertion true in the current model? */
def isActive( assertion: HOLClause ): Boolean =
intern( assertion ).subsetOf( avatarModel )
/** Is the assertion true in the current model? */
def isActive( assertion: Set[Int] ): Boolean =
assertion.subsetOf( avatarModel )
/** Pre-processes the clauses in newlyDerived. The result is again in newlyDerived. */
def preprocessing() =
for ( r <- preprocessingRules )
newlyDerived = r.preprocess( newlyDerived, workedOff )
def trySetAssertion( assertion: Set[Int], value: Boolean ) =
for ( a <- assertion ) trySetAvatarAtom( if ( value ) a else -a )
def trySetAvatarAtom( atom: Int ) =
if ( !avatarModel( -atom ) ) avatarModel += atom
/** Moves clauses from newlyDerived into usable and locked. */
def clauseProcessing() = {
// extend avatar model
for ( c <- newlyDerived )
trySetAssertion( c.ass, c.clause.nonEmpty )
for ( c <- newlyDerived ) {
if ( c.clause.isEmpty ) {
emptyClauses( c.ass ) = c
solver.addClause( c.ass.toSeq.map( -_ ) )
if ( isActive( c.ass ) )
usable += c // trigger model recomputation
}
if ( isActive( c ) ) {
usable += c
} else if ( c.clause.nonEmpty ) {
locked += ( c -> None )
}
}
newlyDerived = Set()
}
/** Performs inferences between given and workedOff, and adds given to workedOff. */
def inferenceComputation( given: Cls ): Boolean = {
val inferred = mutable.Set[Cls]()
var discarded = false
for ( r <- inferences if !discarded ) {
val ( i, d ) = r( given, workedOff )
inferred ++= i
for ( ( c, reason ) <- d ) {
workedOff -= c
if ( c == given ) discarded = true
if ( !reason.subsetOf( c.ass ) )
locked += ( c -> Some( reason ) )
}
}
if ( !discarded ) workedOff += given
newlyDerived ++= inferred
discarded
}
var strategy = 0
/** Chooses the next clause from usable. */
def choose(): Cls = {
strategy = ( strategy + 1 ) % 6
if ( strategy < 1 ) usable minBy { _.index }
else if ( strategy < 3 ) {
val pos = usable filter { _.clause.antecedent.isEmpty }
if ( pos isEmpty ) choose()
else pos minBy { cls => ( cls.weight, cls.index ) }
} else if ( strategy < 5 ) {
val nonPos = usable filter { _.clause.antecedent.nonEmpty }
if ( nonPos isEmpty ) choose()
else nonPos minBy { cls => ( cls.weight, cls.index ) }
} else {
usable minBy { cls => ( cls.weight, cls.index ) }
}
}
def switchToNewModel() = {
avatarModel = solver.model().toSet
for ( ( cls, reason ) <- locked.toSet if isActive( cls ) && reason.forall { !isActive( _ ) } ) {
locked -= ( cls -> reason )
usable += cls
}
for ( cls <- usable.toSeq if cls.clause.isEmpty ) usable -= cls
for ( cls <- workedOff.clauses if !isActive( cls ) ) {
workedOff -= cls
locked += ( cls -> None )
}
for ( cls <- usable.toSeq if !isActive( cls ) ) {
usable -= cls
locked += ( cls -> None )
}
}
def mkSatProof(): ResolutionProof =
RupProof( emptyClauses.keys.toSeq.map( cls => RupProof.Input( cls.map( -_ ) ) ) ++ drup :+ RupProof.Rup( Set() ) ).
toRes.toResolution( satSolverToAtom, cls => {
val p = emptyClauses( cls.map( -_ ) ).proof
if ( p.assertions.isEmpty ) p else AvatarContradiction( p )
} )
var clausesForInduction = List.empty[HOLSequent]
def axiomClause( section: ContextSection, axiom: Axiom ): ( Set[Cls], Map[HOLSequent, ResolutionProof] ) = {
val seq = axiom.formula +: Sequent()
val ground = section groundSequent seq
val cnf = structuralCNF( ground )( ctx )
val cnfMap = cnf.view.map( p => p.conclusion -> p ).toMap
val clauses = cnfMap.keySet.map( _.map( _.asInstanceOf[Atom] ) )
( clauses map InputCls, cnfMap )
}
/** Main inference loop. */
def loop( spin: Option[SuperpositionInductionProver] = None ): Option[( ResolutionProof, Set[Axiom], Map[HOLSequent, ResolutionProof] )] = {
var inductedClauses = Set.empty[HOLSequent]
var addedAxioms = Set.empty[Axiom]
val possibleAxioms = mutable.Queue.empty[Axiom]
var cnfMap = Map.empty[HOLSequent, ResolutionProof]
val addInductions = spin.isDefined
var loopCount = 0
var inductCutoff = 16
val section = new ContextSection( ctx )
try {
preprocessing()
clauseProcessing()
while ( true ) {
if ( usable exists {
_.clause.isEmpty
} ) {
for ( cls <- usable if cls.clause.isEmpty && cls.assertion.isEmpty )
return Some( cls.proof, addedAxioms, cnfMap )
if ( solver.isSatisfiable ) {
info( s"sat splitting model: ${
solver.model().filter( _ >= 0 ).map( deintern ).
sortBy( _.toString ).mkString( ", " )
}".replace( '\\n', ' ' ) )
switchToNewModel()
} else {
return Some( mkSatProof(), addedAxioms, cnfMap )
}
}
if ( addInductions && ( usable.isEmpty || loopCount >= inductCutoff ) ) {
loopCount = 0
do {
if ( possibleAxioms.isEmpty && usable.isEmpty )
return None
if ( possibleAxioms.nonEmpty ) {
val newAxiom = possibleAxioms.dequeue()
val ( clauses, newMap ) = axiomClause( section, newAxiom )
addedAxioms += newAxiom
cnfMap ++= newMap
newlyDerived ++= clauses
preprocessing()
clauseProcessing()
if ( addedAxioms.size % 5 == 0 )
inductCutoff += 1
}
} while ( usable.isEmpty )
}
if ( usable.isEmpty )
return None
val given = choose()
usable -= given
spin match {
case Some( s ) =>
// TODO: this should probably be less restrictive now that we perform more subgoal generalization
if ( s.performGeneralization || given.clause.exists( constants( _ ) exists ( s.isInductive( _ )( ctx ) ) ) &&
!inductedClauses.contains( given.clause ) ) {
EscargotLogger.time( "axiom_gen" ) {
s.clauseAxioms( given.clause )( ctx ) foreach ( possibleAxioms.enqueue( _ ) )
}
inductedClauses += given.clause
}
case None =>
}
val discarded = inferenceComputation( given )
info( s"[wo=${workedOff.size},us=${usable.size}] ${if ( discarded ) "discarded" else "kept"}: $given".replace( '\\n', ' ' ) )
preprocessing()
clauseProcessing()
loopCount += 1
}
None
} catch {
case _: ContradictionException =>
Some( mkSatProof(), addedAxioms, cnfMap )
} finally {
if ( addInductions ) {
EscargotLogger.metric( "candidates", inductedClauses.size )
EscargotLogger.metric( "added_axioms", addedAxioms.size )
}
}
}
}
|
gapt/gapt
|
core/src/main/scala/gapt/provers/escargot/state.scala
|
Scala
|
gpl-3.0
| 16,117
|
package com.olvind
package requiresjs
import jdk.nashorn.internal.ir._
abstract class VisitorHelperNameStack[N <: Node, Out](n: N) extends VisitorHelper[N, Out](n) {
protected var nameStack: List[VarName] = Nil
override def enterPropertyNode(n: PropertyNode): Boolean =
matcher(n.getKey) {
case (i: IdentNode) =>
nameStack = VarName(i.getName) :: nameStack
}
override def leavePropertyNode(n: PropertyNode): Node = {
(nameStack.headOption, n.getKey) match {
case (Some(n1), n2: IdentNode) if n1.value == n2.getName =>
nameStack = nameStack drop 1
case _ => ()
}
n
}
override def enterVarNode(n: VarNode): Boolean =
matcher(n.getName) {
case name =>
nameStack = VarName(name.getName) :: nameStack
}
override def leaveVarNode(n: VarNode): Node = {
(nameStack.headOption, n.getName) match {
case (Some(n1), n2) if n1.value == n2.getName =>
nameStack = nameStack drop 1
case _ => ()
}
n
}
override protected def assertions(): Unit =
require(nameStack.isEmpty)
}
|
chandu0101/scalajs-react-components
|
gen/src/main/scala/com/olvind/requiresjs/VisitorHelperNameStack.scala
|
Scala
|
apache-2.0
| 1,093
|
package org.jetbrains.plugins.scala
package codeInspection
package shadow
import com.intellij.codeInspection.{InspectionManager, LocalQuickFix, ProblemDescriptor, ProblemHighlightType}
import com.intellij.openapi.project.Project
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.extensions.ObjectExt
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.base.ScStableCodeReference
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.{ScCaseClause, ScReferencePattern}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.{createPatternFromText, createReferenceFromText}
import org.jetbrains.plugins.scala.lang.resolve.StdKinds
import org.jetbrains.plugins.scala.lang.resolve.processor.ResolveProcessor
class VariablePatternShadowInspection extends AbstractRegisteredInspection {
import VariablePatternShadowInspection._
override protected def problemDescriptor(element: PsiElement,
maybeQuickFix: Option[LocalQuickFix],
descriptionTemplate: String,
highlightType: ProblemHighlightType)
(implicit manager: InspectionManager, isOnTheFly: Boolean): Option[ProblemDescriptor] = {
element match {
case refPat: ScReferencePattern if isInCaseClause(refPat) && doesShadowOtherPattern(refPat) =>
val quickFixes = Array[LocalQuickFix](
new ConvertToStableIdentifierPatternFix(refPat),
new RenameVariablePatternFix(refPat)
)
val descriptor =
manager.createProblemDescriptor(refPat.nameId, description, isOnTheFly, quickFixes, ProblemHighlightType.GENERIC_ERROR_OR_WARNING)
Some(descriptor)
case _ =>
None
}
}
}
object VariablePatternShadowInspection {
def description: String = ScalaInspectionBundle.message("suspicious.shadowing.by.a.variable.pattern")
def isInCaseClause(ref: ScReferencePattern): Boolean =
ScalaPsiUtil.nameContext(ref).is[ScCaseClause]
def doesShadowOtherPattern(ref: ScReferencePattern): Boolean = (
for {
// createReferenceFromText might return null in invalid code, e.g. if ')' is absent in case pattern
dummyRef <- Option(createReferenceFromText(ref.name, ref.getContext.getContext, ref))
proc = new ResolveProcessor(StdKinds.valuesRef, dummyRef, ref.name)
results = dummyRef.asInstanceOf[ScStableCodeReference].doResolve(proc)
} yield results.exists(rr => proc.isAccessible(rr.getElement, ref))
).getOrElse(false)
}
class ConvertToStableIdentifierPatternFix(r: ScReferencePattern)
extends AbstractFixOnPsiElement(ScalaInspectionBundle.message("convert.to.stable.identifier.pattern", r.getText), r) {
override protected def doApplyFix(ref: ScReferencePattern)
(implicit project: Project): Unit = {
val stableIdPattern = createPatternFromText(s"`${ref.getText}`")
ref.replace(stableIdPattern)
}
}
class RenameVariablePatternFix(ref: ScReferencePattern) extends RenameElementQuickfix(ref, ScalaInspectionBundle.message("rename.variable.pattern"))
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/codeInspection/shadow/VariablePatternShadowInspection.scala
|
Scala
|
apache-2.0
| 3,227
|
/*
* Copyright 2018 Branislav Lazic
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.akkacqrs.validator
import java.util.UUID
import cats.data._
import cats.implicits._
object IssueCommandsValidator {
import org.akkacqrs.write.IssueRepository._
case class ValidationError(code: String, message: String)
private def validateSummary(summary: String): ValidatedNel[ValidationError, String] =
summary match {
case null => Validated.invalidNel(ValidationError("summary", "Summary cannot be null."))
case s if s.length < 2 =>
Validated.invalidNel(ValidationError("summary", "Summary must have at least 2 characters."))
case s if s.length >= 100 =>
Validated.invalidNel(ValidationError("summary", "Summary cannot have more than 100 characters."))
case s => Validated.valid(s)
}
private def validateDescription(description: String): ValidatedNel[ValidationError, String] =
if (description.length <= 1500) Validated.valid(description)
else Validated.invalidNel(ValidationError("description", "Description cannot have more than 1500 characters."))
def validateCreateIssue(createIssue: CreateIssue): ValidatedNel[ValidationError, CreateIssue] = {
val validId: ValidatedNel[ValidationError, UUID] = Validated.valid(createIssue.id)
val validSummary = validateSummary(createIssue.summary)
val validDescription = validateDescription(createIssue.description)
val validDate = Validated.valid(createIssue.date)
val validStatus = Validated.valid(createIssue.status)
(validId, validSummary, validDescription, validDate, validStatus) mapN CreateIssue
}
def validateUpdateIssue(updateIssue: UpdateIssue): ValidatedNel[ValidationError, UpdateIssue] = {
val validId: ValidatedNel[ValidationError, UUID] = Validated.valid(updateIssue.id)
val validSummary = validateSummary(updateIssue.summary)
val validDescription = validateDescription(updateIssue.description)
val validDate = Validated.valid(updateIssue.date)
(validId, validSummary, validDescription, validDate) mapN UpdateIssue
}
}
|
BranislavLazic/akka-cqrs-activator
|
src/main/scala/org/akkacqrs/validator/IssueCommandsValidator.scala
|
Scala
|
apache-2.0
| 2,836
|
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs105.boxes
import uk.gov.hmrc.ct.accounts.frs105.retriever.Frs105AccountsBoxRetriever
import uk.gov.hmrc.ct.accounts.{AccountsMoneyValidationFixture, MockFrs105AccountsRetriever}
class AC406Spec extends AccountsMoneyValidationFixture[Frs105AccountsBoxRetriever] with MockFrs105AccountsRetriever {
testAccountsMoneyValidation("AC406", AC406.apply)
}
|
pncampbell/ct-calculations
|
src/test/scala/uk/gov/hmrc/ct/accounts/frs105/boxes/AC406Spec.scala
|
Scala
|
apache-2.0
| 994
|
/*
* Copyright 2018 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import play.api.mvc.Result
import play.api.test.FakeRequest
import play.api.test.Helpers._
import service.KeystoreService
import service.KeystoreService._
import scala.concurrent.Future
import uk.gov.hmrc.http.SessionKeys
class PostTriggerPensionInputsControllerSpec extends test.BaseSpec {
val endPointURL = "/paac/moneyPurchasePostTriggerValue"
val DC_2016_FLAG = s"${DC_FLAG_PREFIX}2016"
val DC_2016_KEY = s"${DC_PREFIX}2016"
trait ControllerWithMockKeystore extends MockKeystoreFixture {
MockKeystore.map = MockKeystore.map + (CURRENT_INPUT_YEAR_KEY -> "2015")
MockKeystore.map = MockKeystore.map + (SELECTED_INPUT_YEARS_KEY -> "2015")
object ControllerWithMockKeystore extends PostTriggerPensionInputsController with AppTestSettings {
def keystore: KeystoreService = MockKeystore
}
}
"onPageLoad" should {
"redirect if trigger date not present in keystore" in {
val result: Option[Future[Result]] = route(FakeRequest(GET, endPointURL))
result.isDefined shouldBe true
status(result.get) shouldBe 303
}
"redirect if trigger date is blank in keystore" in new ControllerWithMockKeystore {
MockKeystore.map = MockKeystore.map + (KeystoreService.TRIGGER_DATE_KEY -> "")
val sessionData = List((SessionKeys.sessionId,SESSION_ID),
(IS_EDIT_KEY -> "true"),
(CURRENT_INPUT_YEAR_KEY -> "2015"),
(SELECTED_INPUT_YEARS_KEY -> "2015"))
val request = FakeRequest(GET,"").withSession(sessionData: _*)
val result : Future[Result] = ControllerWithMockKeystore.onPageLoad()(request)
status(result) shouldBe 303
}
"display p1 input amount page with previous value if trigger date was period 1" in new ControllerWithMockKeystore {
// setup
val request = FakeRequest(GET,"").withSession{(SessionKeys.sessionId,SESSION_ID)}
MockKeystore.map = MockKeystore.map + (TRIGGER_DATE_KEY -> "2015-6-15")
MockKeystore.map = MockKeystore.map + (P1_TRIGGER_DC_KEY -> "1200")
MockKeystore.map = MockKeystore.map + (P2_TRIGGER_DC_KEY -> "5600")
// test
val result : Future[Result] = ControllerWithMockKeystore.onPageLoad()(request)
// check
status(result) shouldBe 200
val htmlPage = contentAsString(await(result))
htmlPage should include (s"""<input type="text" name="${P1_TRIGGER_DC_KEY}" id="${P1_TRIGGER_DC_KEY}" """)
dumpHtml("empty_moneyPurchasePostTriggerValue", htmlPage)
}
"display p2 input amount page with previous value if trigger date was period 2" in new ControllerWithMockKeystore {
// setup
val sessionData = List((SessionKeys.sessionId,SESSION_ID))
MockKeystore.map = MockKeystore.map + (TRIGGER_DATE_KEY -> "2015-11-15")
MockKeystore.map = MockKeystore.map + (P1_TRIGGER_DC_KEY -> "1200")
MockKeystore.map = MockKeystore.map + (P2_TRIGGER_DC_KEY -> "5600")
val request = FakeRequest(GET,"").withSession(sessionData: _*)
// test
val result : Future[Result] = ControllerWithMockKeystore.onPageLoad()(request)
// check
status(result) shouldBe 200
val htmlPage = contentAsString(await(result))
htmlPage should include (s"""<input type="text" name="${P2_TRIGGER_DC_KEY}" id="${P2_TRIGGER_DC_KEY}" """)
}
}
"onSubmit" should {
"display errors if amount is negative" in new ControllerWithMockKeystore {
// set up
val sessionData = List((SessionKeys.sessionId,SESSION_ID),
(TRIGGER_DATE_KEY -> "2015-11-15"),
(IS_EDIT_KEY -> "false"),
(P1_TRIGGER_DC_KEY -> "1234"),
(P2_TRIGGER_DC_KEY -> "5678"),
(CURRENT_INPUT_YEAR_KEY, "2015"),
(SELECTED_INPUT_YEARS_KEY, "2015"))
implicit val request = FakeRequest(POST, endPointURL).withSession(sessionData: _*).withFormUrlEncodedBody((P2_TRIGGER_DC_KEY -> "-1"))
// test
val result: Future[Result] = ControllerWithMockKeystore.onSubmit()(request)
// check
status(result) shouldBe 200
val htmlPage = contentAsString(await(result))
htmlPage should include ("Enter an amount that is £5,000,000 or less.")
}
"display errors if Period-2 DC amount is blank" in new ControllerWithMockKeystore {
// set up
val sessionData = List((SessionKeys.sessionId,SESSION_ID),
(TRIGGER_DATE_KEY -> "2015-11-15"),
(IS_EDIT_KEY -> "false"),
(P1_TRIGGER_DC_KEY -> "1234"),
(P2_TRIGGER_DC_KEY -> "5678"),
(CURRENT_INPUT_YEAR_KEY, "2015"),
(SELECTED_INPUT_YEARS_KEY, "2015"))
implicit val request = FakeRequest(POST, endPointURL).withSession(sessionData: _*).withFormUrlEncodedBody((P2_TRIGGER_DC_KEY -> ""))
// test
val result: Future[Result] = ControllerWithMockKeystore.onSubmit()(request)
// check
status(result) shouldBe 200
val htmlPage = contentAsString(await(result))
htmlPage should include ("Enter your defined contribution pension savings for rest of period 2 even if it is 0.")
//htmlPage should include ("""Enter your defined contribution pension savings for rest of 2016 to 2017 even if it is 0.""")
dumpHtml("error_moneyPurchasePostTriggerValue", htmlPage)
}
"display errors if Period-1 DC amount is blank" in new ControllerWithMockKeystore {
// set up
val sessionData = List((SessionKeys.sessionId,SESSION_ID),
(TRIGGER_DATE_KEY -> "2015-4-15"),
(IS_EDIT_KEY -> "false"),
(P1_TRIGGER_DC_KEY -> "1234"),
(P2_TRIGGER_DC_KEY -> "5678"),
(CURRENT_INPUT_YEAR_KEY, "2015"),
(SELECTED_INPUT_YEARS_KEY, "2015"))
implicit val request = FakeRequest(POST, endPointURL).withSession(sessionData: _*).withFormUrlEncodedBody((P1_TRIGGER_DC_KEY -> ""))
// test
val result: Future[Result] = ControllerWithMockKeystore.onSubmit()(request)
// check
status(result) shouldBe 200
val htmlPage = contentAsString(await(result))
htmlPage should include ("Enter your defined contribution pension savings for rest of period 1 even if it is 0.")
}
"display errors if amount is blank when trigger is 2016" in new ControllerWithMockKeystore {
// set up
val sessionData = List((SessionKeys.sessionId,SESSION_ID),
(TRIGGER_DATE_KEY -> "2016-11-15"),
(IS_EDIT_KEY -> "false"),
(DC_2016_FLAG -> "true"),
(CURRENT_INPUT_YEAR_KEY, "2016"),
(SELECTED_INPUT_YEARS_KEY, "2016"))
implicit val request = FakeRequest(POST, endPointURL).withSession(sessionData: _*).withFormUrlEncodedBody((TRIGGER_DC_KEY -> ""))
// test
val result: Future[Result] = ControllerWithMockKeystore.onSubmit()(request)
// check
status(result) shouldBe 200
val htmlPage = contentAsString(await(result))
htmlPage should include ("Enter your defined contribution pension savings for rest of 2016 to 2017 even if it is 0.")
}
"saves p2 amount in keystore if valid form" in new ControllerWithMockKeystore {
// set up
val sessionData = List((SessionKeys.sessionId,SESSION_ID),
(TRIGGER_DATE_KEY -> "2015-11-15"),
(IS_EDIT_KEY -> "false"),
(CURRENT_INPUT_YEAR_KEY, "2015"),
(P2_DC_KEY, "4000100"),
(SELECTED_INPUT_YEARS_KEY, "2015"))
implicit val request = FakeRequest(POST, endPointURL).withSession(sessionData: _*).withFormUrlEncodedBody((P2_TRIGGER_DC_KEY -> "40000"))
// test
val result: Future[Result] = ControllerWithMockKeystore.onSubmit()(request)
// check
status(result) shouldBe 303
MockKeystore.map should contain key (P2_TRIGGER_DC_KEY)
MockKeystore.map should contain value ("4000000")
}
"saves p1 amount in keystore if valid form" in new ControllerWithMockKeystore {
// set up
val sessionData = List((SessionKeys.sessionId,SESSION_ID),
(TRIGGER_DATE_KEY -> "2015-4-15"),
(IS_EDIT_KEY -> "false"),
(P1_DC_KEY, "4000100"),
(CURRENT_INPUT_YEAR_KEY, "2015"),
(SELECTED_INPUT_YEARS_KEY, "2015"))
implicit val request = FakeRequest(POST, endPointURL).withSession(sessionData: _*).withFormUrlEncodedBody((P1_TRIGGER_DC_KEY -> "40000"))
// test
val result: Future[Result] = ControllerWithMockKeystore.onSubmit()(request)
// check
status(result) shouldBe 303
MockKeystore.map should contain key (P1_TRIGGER_DC_KEY)
MockKeystore.map should contain value ("4000000")
}
"displays error if P1 amount is greater than total savings for the pension input period" in new ControllerWithMockKeystore {
// set up
val sessionData = List((SessionKeys.sessionId,SESSION_ID),
(TRIGGER_DATE_KEY -> "2015-4-15"),
(IS_EDIT_KEY -> "false"),
(P1_DC_KEY, "3999900"),
(CURRENT_INPUT_YEAR_KEY, "2015"),
(SELECTED_INPUT_YEARS_KEY, "2015"))
implicit val request = FakeRequest(POST, endPointURL).withSession(sessionData: _*).withFormUrlEncodedBody((P1_TRIGGER_DC_KEY -> "40000"))
// test
val result: Future[Result] = ControllerWithMockKeystore.onSubmit()(request)
// check
status(result) shouldBe 200
val htmlPage = contentAsString(await(result))
htmlPage should include ("Enter an amount less than your total pension savings of £39,999")
}
"displays error if P2 amount is greater than total savings for the pension input period" in new ControllerWithMockKeystore {
// set up
val sessionData = List((SessionKeys.sessionId,SESSION_ID),
(TRIGGER_DATE_KEY -> "2015-9-15"),
(IS_EDIT_KEY -> "false"),
(P2_DC_KEY, "3999900"),
(CURRENT_INPUT_YEAR_KEY, "2015"),
(SELECTED_INPUT_YEARS_KEY, "2015"))
implicit val request = FakeRequest(POST, endPointURL).withSession(sessionData: _*).withFormUrlEncodedBody((P2_TRIGGER_DC_KEY -> "40000"))
// test
val result: Future[Result] = ControllerWithMockKeystore.onSubmit()(request)
// check
status(result) shouldBe 200
val htmlPage = contentAsString(await(result))
htmlPage should include ("Enter an amount less than your total pension savings of £39,999")
}
"displays error if 2016 amount is greater than total savings for the pension input period" in new ControllerWithMockKeystore {
// set up
val sessionData = List((SessionKeys.sessionId,SESSION_ID),
(TRIGGER_DATE_KEY -> "2016-9-15"),
(IS_EDIT_KEY -> "false"),
(DC_2016_KEY, "0"),
(CURRENT_INPUT_YEAR_KEY, "2016"),
(SELECTED_INPUT_YEARS_KEY, "2016"),
(FIRST_DC_YEAR_KEY -> "2016"),
(TE_YES_NO_KEY -> "Yes"))
implicit val request = FakeRequest(POST, endPointURL).withSession(sessionData: _*)
.withFormUrlEncodedBody((TRIGGER_DC_KEY -> "40000"))
// test
val result: Future[Result] = ControllerWithMockKeystore.onSubmit()(request)
// check
status(result) shouldBe 200
val htmlPage = contentAsString(await(result))
htmlPage should include ("Enter an amount less than your total pension savings of £0")
}
"move to next page if 2016 amount is equal to total savings for the pension input period" in new ControllerWithMockKeystore {
// set up
val sessionData = List((SessionKeys.sessionId,SESSION_ID),
(TRIGGER_DATE_KEY -> "2016-9-15"),
(IS_EDIT_KEY -> "false"),
(DC_2016_KEY, "0"),
(CURRENT_INPUT_YEAR_KEY, "2016"),
(SELECTED_INPUT_YEARS_KEY, "2016"),
(FIRST_DC_YEAR_KEY -> "2016"),
(TE_YES_NO_KEY -> "Yes"))
implicit val request = FakeRequest(POST, endPointURL).withSession(sessionData: _*)
.withFormUrlEncodedBody((TRIGGER_DC_KEY -> "0"))
// test
val result: Future[Result] = ControllerWithMockKeystore.onSubmit()(request)
// check
status(result) shouldBe 303
val htmlPage = contentAsString(await(result))
redirectLocation(result) shouldBe Some("/paac/review")
}
}
"onBack" should {
"during edit return to review page" in new ControllerWithMockKeystore {
// set up
val request = FakeRequest(GET,"").withSession((SessionKeys.sessionId,SESSION_ID),
(IS_EDIT_KEY -> "true"),
(CURRENT_INPUT_YEAR_KEY -> "2015"),
(SELECTED_INPUT_YEARS_KEY -> "2015"))
// test
val result : Future[Result] = ControllerWithMockKeystore.onBack()(request)
// check
status(result) shouldBe 303
redirectLocation(result) shouldBe Some("/paac/review")
}
"during edit return to date page" in new ControllerWithMockKeystore {
// set up
val request = FakeRequest(GET,"").withSession((SessionKeys.sessionId,SESSION_ID),
(IS_EDIT_KEY -> "false"),
(FIRST_DC_YEAR_KEY -> "2015"),
(TE_YES_NO_KEY -> "Yes"),
(CURRENT_INPUT_YEAR_KEY -> "2015"),
(SELECTED_INPUT_YEARS_KEY -> "2015"))
// test
val result : Future[Result] = ControllerWithMockKeystore.onBack()(request)
// check
status(result) shouldBe 303
redirectLocation(result) shouldBe Some("/paac/dateofmpaate")
}
}
}
|
hmrc/paac-frontend
|
test/controllers/PostTriggerPensionInputsControllerSpec.scala
|
Scala
|
apache-2.0
| 15,540
|
/*
* Copyright 2017 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.unicomplex
import akka.actor.ActorSystem
import akka.pattern._
import akka.testkit.TestKit
import com.typesafe.config.ConfigFactory
import org.scalatest.OptionValues._
import org.scalatest.{AsyncFlatSpecLike, Matchers}
import scala.util.Failure
object JavaFailedFlowSvcSpec {
val dummyJarsDir = getClass.getClassLoader.getResource("classpaths").getPath
val classPath = dummyJarsDir + "/JavaFailedFlowSvc/META-INF/squbs-meta.conf"
val config = ConfigFactory.parseString(
s"""
|squbs {
| actorsystem-name = JavaFailedFlowSvcSpec
| ${JMX.prefixConfig} = true
|}
|default-listener.bind-port = 0
|akka.http.server.remote-address-header = on
""".stripMargin
)
import Timeouts._
val boot = UnicomplexBoot(config)
.createUsing {(name, config) => ActorSystem(name, config)}
.scanResources(withClassPath = false, classPath)
.start(startupTimeout)
}
class JavaFailedFlowSvcSpec extends TestKit(JavaFailedFlowSvcSpec.boot.actorSystem) with AsyncFlatSpecLike with Matchers {
"The JavaFailedFlowSvc" should "fail" in {
import Timeouts._
Unicomplex(system).uniActor ? SystemState map { state =>
state shouldBe Failed
}
}
"The JavaFailedFlowSvc" should "expose errors" in {
import Timeouts._
(Unicomplex(system).uniActor ? ReportStatus).mapTo[StatusReport] map { report =>
report.state shouldBe Failed
val initTry = report.cubes.values.head._2.value.reports.values.head.value
initTry should matchPattern { case Failure(e: InstantiationException) => }
}
}
}
|
Harikiranvuyyuru/squbs
|
squbs-unicomplex/src/test/scala/org/squbs/unicomplex/JavaFailedFlowSvcSpec.scala
|
Scala
|
apache-2.0
| 2,189
|
/*
* Copyright 2001-2015 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.exceptions.TestFailedException
import SharedHelpers.thisLineNumber
import scala.util.Failure
import scala.util.Success
import org.scalatest.exceptions.TestCanceledException
import java.util.Date
import org.scalactic.Prettifier
import org.scalactic.exceptions.NullArgumentException
import scala.concurrent.Future
import scala.concurrent.ExecutionContext
import org.scalatest.concurrent.ScalaFutures
class RecoverMethodsSpec extends FunSpec with RecoverMethods with ScalaFutures {
// SKIP-SCALATESTJS-START
implicit val execCtx = scala.concurrent.ExecutionContext.Implicits.global
// SKIP-SCALATESTJS-END
//SCALATESTJS-ONLY implicit val execCtx = org.scalatest.concurrent.TestExecutionContext.runNow
val fileName: String = "RecoverMethodsSpec.scala"
describe("The recoverToExceptionIf method") {
it("should recover to subtypes") {
class MyException extends RuntimeException
class MyExceptionSubClass extends MyException
val myEx = new MyException
val futureMyEx =
recoverToExceptionIf[MyException] {
Future { throw myEx }
}
assert(futureMyEx.futureValue eq myEx)
val myExSub = new MyExceptionSubClass
val futureMyExSub =
recoverToExceptionIf[MyException] {
Future { throw myExSub }
}
assert(futureMyExSub.futureValue eq myExSub)
// Try with a trait
trait MyTrait {
def someRandomMethod: Int = 42
}
class AnotherException extends RuntimeException with MyTrait
val futureCaught =
recoverToExceptionIf[MyTrait] {
Future { throw new AnotherException }
}
// Make sure the result type is the type passed in, so I can
// not cast and still invoke any method on it I want
val futureInt = futureCaught map { caught => caught.someRandomMethod }
assert(futureInt.futureValue == 42)
}
it("should fail with TFE if no exception is thrown") {
val futureTfe =
recoverToExceptionIf[IllegalArgumentException] { Future { "hi" } }
assert(futureTfe.failed.futureValue.isInstanceOf[TestFailedException])
}
it("should return the caught exception") {
val e = new RuntimeException
val futureResult =
recoverToExceptionIf[RuntimeException] {
Future { throw e }
}
assert(futureResult.futureValue eq e)
}
describe("when the bit of code throws the wrong exception") {
it("should include that wrong exception as the TFE's cause") {
val wrongException = new RuntimeException("oops!")
val futureCaught =
recoverToExceptionIf[IllegalArgumentException] {
Future { throw wrongException }
}
val caught = futureCaught.failed.futureValue
assert(caught.isInstanceOf[TestFailedException])
assert(caught.getCause eq wrongException)
}
}
}
describe("The recoverToSucceededIf method") {
it("should recover to subtypes") {
class MyException extends RuntimeException
class MyExceptionSubClass extends MyException
val myEx = new MyException
val futureMyEx =
recoverToSucceededIf[MyException] {
Future { throw myEx }
}
assert(futureMyEx.futureValue eq Succeeded)
val myExSub = new MyExceptionSubClass
val futureMyExSub =
recoverToSucceededIf[MyException] {
Future { throw myExSub }
}
assert(futureMyExSub.futureValue eq Succeeded)
// Try with a trait
trait MyTrait {
def someRandomMethod: Int = 42
}
class AnotherException extends RuntimeException with MyTrait
val futureCaught =
recoverToSucceededIf[MyTrait] {
Future { throw new AnotherException }
}
assert(futureCaught.futureValue eq Succeeded)
}
it("should return Succeeded") {
val e = new RuntimeException
val futureResult =
recoverToSucceededIf[RuntimeException] {
Future { throw e }
}
assert(futureResult.futureValue eq Succeeded)
}
it("should fail with TFE if no exception is thrown") {
val futureTfe =
recoverToSucceededIf[IllegalArgumentException] { Future { "hi" } }
assert(futureTfe.failed.futureValue.isInstanceOf[TestFailedException])
}
describe("when the bit of code throws the wrong exception") {
it("should include that wrong exception as the TFE's cause") {
val wrongException = new RuntimeException("oops!")
val futureCaught =
recoverToSucceededIf[IllegalArgumentException] {
Future { throw wrongException }
}
val caught = futureCaught.failed.futureValue
assert(caught.isInstanceOf[TestFailedException])
assert(caught.getCause eq wrongException)
}
}
}
}
|
dotty-staging/scalatest
|
scalatest-test/src/test/scala/org/scalatest/RecoverMethodsSpec.scala
|
Scala
|
apache-2.0
| 5,465
|
/*
* This file is part of the "silex" library of helpers for Apache Spark.
*
* Copyright (c) 2015 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.redhat.et.silex.frame
import org.apache.spark.sql.DataFrame
import scala.language.implicitConversions
trait NaturalJoining {
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types._
/**
* Performs a natural join of two data frames.
*
* The frames are joined by equality on all of the columns they have in common.
* The resulting frame has the common columns (in the order they appeared in <code>left</code>),
* followed by the columns that only exist in <code>left</code>, followed by the columns that
* only exist in <code>right</code>.
*/
def natjoin(left: DataFrame, right: DataFrame): DataFrame = {
val leftCols = left.columns
val rightCols = right.columns
val commonCols = leftCols.toSet intersect rightCols.toSet
if(commonCols.isEmpty)
left.limit(0).join(right.limit(0))
else
left
.join(right, commonCols.map {col => left(col) === right(col) }.reduce(_ && _))
.select(leftCols.collect { case c if commonCols.contains(c) => left(c) } ++
leftCols.collect { case c if !commonCols.contains(c) => left(c) } ++
rightCols.collect { case c if !commonCols.contains(c) => right(c) } : _*)
}
}
private[frame] case class DFWithNatJoin(df: DataFrame) extends NaturalJoining {
def natjoin(other: DataFrame): DataFrame = super.natjoin(df, other)
}
/**
* Module for natural join functionality. Import <code>NaturalJoin._</code> for static access
* to the <code>natjoin</code> method, or import <code>NaturalJoin.implicits._</code> to pimp
* Spark DataFrames with a <code>natjoin</code> member method.
*/
object NaturalJoin extends NaturalJoining {
object implicits {
implicit def dfWithNatJoin(df: DataFrame) = DFWithNatJoin(df)
}
}
|
erikerlandson/silex
|
src/main/scala/com/redhat/et/silex/frame/natjoin.scala
|
Scala
|
apache-2.0
| 2,487
|
/*
*
* * Copyright 2014 Commonwealth Computer Research, Inc.
* *
* * Licensed under the Apache License, Version 2.0 (the License);
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an AS IS BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package org.locationtech.geomesa.core.iterators
import java.text.DecimalFormat
import com.google.common.collect.HashBasedTable
import com.typesafe.scalalogging.slf4j.Logging
import com.vividsolutions.jts.geom.Envelope
import org.geotools.data._
import org.geotools.data.simple.SimpleFeatureIterator
import org.geotools.filter.text.ecql.ECQL
import org.geotools.filter.visitor.ExtractBoundsFilterVisitor
import org.geotools.geometry.jts.ReferencedEnvelope
import org.geotools.referencing.crs.DefaultGeographicCRS
import org.junit.runner.RunWith
import org.locationtech.geomesa.core.data.{AccumuloDataStore, AccumuloFeatureStore}
import org.locationtech.geomesa.core.index.{IndexSchemaBuilder, QueryHints}
import org.locationtech.geomesa.utils.geotools.Conversions.RichSimpleFeature
import org.locationtech.geomesa.utils.geotools.GridSnap
import org.opengis.feature.simple.SimpleFeature
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class LiveDensityIteratorTest extends Specification with Logging {
sequential
/**
* WARNING: this test runs against a live accumulo instance
*/
val params = Map(
"instanceId" -> "mycloud",
"zookeepers" -> "zoo1,zoo2,zoo3",
"user" -> "myuser",
"password" -> "mypass",
"auths" -> "user,admin",
"visibilities" -> "",
"tableName" -> "mytable",
"indexSchemaFormat" -> new IndexSchemaBuilder("~").randomNumber(3).constant("TEST").geoHash(0, 3).date("yyyyMMdd").nextPart().geoHash(3, 2).nextPart().id().build(),
"featureEncoding" -> "avro")
val sftName = "fr"
val bbox = new { val lowx = -78.57; val lowy = 40.96; val highx = -77.23; val highy = 42.29 }
val dates = "'2013-01-06T00:00:00.000Z' and '2013-01-09T00:00:00.000Z'"
val size = new { val width = 300; val height = 150 }
var snap: GridSnap = null
val map = HashBasedTable.create[Double, Double, Long]()
def getDataStore: AccumuloDataStore = {
DataStoreFinder.getDataStore(params).asInstanceOf[AccumuloDataStore]
}
def printFeatures(featureIterator: SimpleFeatureIterator): Unit = {
val features = new Iterator[SimpleFeature] {
def hasNext = {
val next = featureIterator.hasNext
if (!next)
featureIterator.close
next
}
def next = {
featureIterator.next
}
}.toList
logger.debug("dates: {}", dates)
logger.debug(s"total points: ${features.size}")
logger.debug(s"unique points: ${features.groupBy(_.getDefaultGeometry).size}")
val weights = features.map(_.getProperty("weight").getValue.toString.toDouble)
logger.debug(s"total weight: ${weights.sum}")
logger.debug(s"max weight: ${weights.max}")
features.foreach {
f =>
val point = f.point
map.put(point.getY, point.getX, map.get(point.getY, point.getX) + f.getProperty("weight").getValue.toString.toDouble.toLong)
}
logger.debug(s"max joined weight: ${map.values().max}")
val output = new StringBuilder()
val df = new DecimalFormat("0")
map.rowMap().foreach {
case (rowIdx, cols) =>
cols.foreach {
case (colIdx, v) =>
if (v == 0) {
output.append(" ")
} else {
output.append(df.format(v))
}
}
output.append("\\n")
}
logger.trace(output.toString())
}
def getQuery(query: String, width: Int, height: Int): Query = {
val q = new Query(sftName, ECQL.toFilter(query))
val geom = q.getFilter.accept(ExtractBoundsFilterVisitor.BOUNDS_VISITOR, null).asInstanceOf[Envelope]
val env = new ReferencedEnvelope(geom, DefaultGeographicCRS.WGS84)
q.getHints.put(QueryHints.DENSITY_KEY, java.lang.Boolean.TRUE)
q.getHints.put(QueryHints.BBOX_KEY, env)
q.getHints.put(QueryHints.WIDTH_KEY, width)
q.getHints.put(QueryHints.HEIGHT_KEY, height)
// re-create the snap and populate each point
snap = new GridSnap(env, width, height)
var i = 0
while(i < width) {
var j = 0
while(j < height) {
map.put(snap.y(j), snap.x(i), 0)
j = j + 1
}
i = i + 1
}
q
}
"AccumuloDataStore" should {
"connect to accumulo" in {
skipped("Meant for integration testing")
val ds = getDataStore
val query = getQuery(s"(dtg between $dates) and BBOX(geom, ${bbox.lowx}, ${bbox.lowy}, ${bbox.highx}, ${bbox.highy})", size.width, size.height)
// get the feature store used to query the GeoMesa data
val featureStore = ds.getFeatureSource(sftName).asInstanceOf[AccumuloFeatureStore]
// execute the query
val results = featureStore.getFeatures(query)
// loop through all results
printFeatures(results.features)
success
}
}
}
|
nhambletCCRI/geomesa
|
geomesa-core/src/test/scala/org/locationtech/geomesa/core/iterators/LiveDensityIteratorTest.scala
|
Scala
|
apache-2.0
| 5,564
|
/**
* http://www.codechef.com/SPT2015/problems/SPIT3
* GitHub: https://github.com/amezhenin/codechef_problems
*/
object Main {
/**
* Checkout https://github.com/amezhenin/codechef_scala_template to test your solutions with sbt-doctest
* {{{
* >>> Main.alg("abcdefgh")
* false
*
* >>> Main.alg("SPIT_Coders_Club_2.0")
* true
*
* }}}
* */
def alg(a: String): Boolean = {
a.length >= 5 &&
a.exists(x => x >= 'a' && x <= 'z') &&
a.exists(x => x >= 'A' && x <= 'Z') &&
a.exists(x => x >= '0' && x <= '9')
}
def main(args : Array[String]) = {
val res = alg(readLine())
if (res) println("YES")
else println("NO")
}
}
|
amezhenin/codechef_problems
|
contests/SPT2015/spit3.scala
|
Scala
|
mit
| 685
|
package dbis.pig.cep.nfa
import scala.reflect.ClassTag
import scala.collection.mutable.ListBuffer
import dbis.pig.backends.{SchemaClass => Event}
/**
* @brief a controller class to construct the NFA for detecting the complex event.
* The user should create the states, edges and transitions by calling particular methods
*/
class NFAController[T <: Event: ClassTag] extends Serializable {
/**
* an edge counter to assign a unique id for each edge
*/
val edgeID = { var eid = 0; () => { eid += 1; eid } }
/**
* a state counter to assign a unique id for each state
*/
val stateID = { var sid = 0; () => { sid += 1; sid } }
/**
* a list to store the transitions between the states.
* Each transition must contain an edge which has a predicate to evaluate
*/
var transitions: ListBuffer[ForwardEdge[T]] = new ListBuffer()
/**
* a list to store all states except final, kleene and negated states and start state
*/
var normalStates: ListBuffer[NormalState[T]] = new ListBuffer()
/**
* a list to store the final states. In any NFA,
* we have multiple states from this type.
*/
var finalStates: ListBuffer[FinalState[T]] = new ListBuffer()
/**
* the start state for this NFA
*/
var startState: StartState[T] = null
/**
* creates the start state for this NFA and assigns its name
* @param name the name of the start state
*/
def createAndGetStartState(name: String): StartState[T] = {
startState = new StartState(stateID(), Some(name))
startState
}
/**
* creates a normal state for this NFA and assign its name, this state should not
* be final state or start state or kleene state
* @param name the name of this normal state
* @return a pointer to the normal state
*/
def createAndGetNormalState(name: String): NormalState[T] = {
val normalState = new NormalState(stateID(), Some(name))
normalStates += normalState
normalState
}
/**
* creates a final state for this NFA and assign its name
* @param name the name of the final state
* @return a pointer to the final state
*/
def createAndGetFinalState(name: String): FinalState[T] = {
val finalState = new FinalState(stateID(), Some(name))
finalStates += finalState
finalState
}
/**
* creates a forward edge for this NFA for a given predicate
* @param predicate the predicate of this edge
* @return a pointer to a forward edge
*/
def createAndGetForwardEdge(predicate: (T) => Boolean): ForwardEdge[T] = {
val transition = new ForwardEdge(predicate, edgeID(), None)
transitions += transition
transition
}
/**
* creates a forward transition for this NFA between two states via an edge
* @param src the source state of this transition
* @param dest the destination state of this transition
* @param edge an edge to connect both the source and destination nodes
*/
def createForwardTransition(src: State[T], edge: Edge[T], dest: State[T]): Unit = {
val forwardEdge = edge.asInstanceOf[ForwardEdge[T]]
val srcState = src.asInstanceOf[NormalState[T]]
forwardEdge.setDestState(dest)
srcState.addEdge(forwardEdge)
}
/**
* get a pointer to the start state
* @return as above
*/
def getStartState = startState
/**
* get the id of the start state
* @return as above
*/
def getStartStateID(): Int = startState.id
}
|
ksattler/piglet
|
ceplib/src/main/scala/dbis/pig/cep/nfa/NFAController.scala
|
Scala
|
apache-2.0
| 3,403
|
/*
* Copyright (C) FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.mq.fabric
import org.osgi.service.cm.ConfigurationException
import org.osgi.service.cm.ManagedServiceFactory
import org.slf4j.LoggerFactory
import reflect.BeanProperty
import java.util.{Properties, Dictionary}
import java.util.concurrent.atomic.{AtomicInteger, AtomicBoolean}
import org.springframework.core.io.Resource
import org.apache.activemq.spring.Utils
import org.apache.xbean.spring.context.ResourceXmlApplicationContext
import org.springframework.beans.factory.xml.XmlBeanDefinitionReader
import java.beans.PropertyEditorManager
import java.net.{URL, URI}
import org.apache.xbean.spring.context.impl.URIEditor
import org.springframework.beans.factory.FactoryBean
import org.apache.activemq.util.IntrospectionSupport
import org.apache.activemq.broker.BrokerService
import scala.collection.JavaConversions._
import java.lang.{ThreadLocal, Thread}
import org.apache.activemq.ActiveMQConnectionFactory
import org.apache.activemq.spring.SpringBrokerContext
import org.osgi.framework.{ServiceReference, ServiceRegistration, BundleContext}
import org.apache.activemq.network.DiscoveryNetworkConnector
import collection.mutable
import org.apache.curator.framework.CuratorFramework
import org.fusesource.mq.fabric.FabricDiscoveryAgent.ActiveMQNode
import org.fusesource.fabric.groups.{Group, GroupListener}
import GroupListener.GroupEvent
import org.fusesource.fabric.api.FabricService
import org.apache.xbean.classloader.MultiParentClassLoader
import org.osgi.util.tracker.{ServiceTrackerCustomizer, ServiceTracker}
object ActiveMQServiceFactory {
final val LOG= LoggerFactory.getLogger(classOf[ActiveMQServiceFactory])
final val CONFIG_PROPERTIES = new ThreadLocal[Properties]()
PropertyEditorManager.registerEditor(classOf[URI], classOf[URIEditor])
def info(str: String, args: AnyRef*) = if (LOG.isInfoEnabled) {
LOG.info(String.format(str, args:_*))
}
def debug(str: String, args: AnyRef*) = if (LOG.isDebugEnabled) {
LOG.debug(String.format(str, args:_*))
}
def warn(str: String, args: AnyRef*) = if (LOG.isWarnEnabled) {
LOG.warn(String.format(str, args:_*))
}
implicit def toProperties(properties: Dictionary[_, _]) = {
var props: Properties = new Properties
var ek = properties.keys
while (ek.hasMoreElements) {
var key = ek.nextElement
var value = properties.get(key)
props.put(key.toString, if (value != null) value.toString else "")
}
props
}
def arg_error[T](msg:String):T = {
throw new IllegalArgumentException(msg)
}
def createBroker(uri: String, properties:Properties) = {
CONFIG_PROPERTIES.set(properties)
try {
val classLoader = new MultiParentClassLoader("xbean", Array[URL](), Array[ClassLoader](this.getClass.getClassLoader, classOf[BrokerService].getClassLoader))
Thread.currentThread().setContextClassLoader(classLoader)
var resource: Resource = Utils.resourceFromString(uri)
val ctx = new ResourceXmlApplicationContext((resource)) {
protected override def initBeanDefinitionReader(reader: XmlBeanDefinitionReader): Unit = {
reader.setValidating(false)
}
}
var names: Array[String] = ctx.getBeanNamesForType(classOf[BrokerService])
val broker = names.flatMap{ name=> Option(ctx.getBean(name).asInstanceOf[BrokerService]) }.headOption.getOrElse(arg_error("Configuration did not contain a BrokerService"))
val networks = Option(properties.getProperty("network")).getOrElse("").split(",")
networks.foreach {name =>
if (!name.isEmpty) {
LOG.info("Adding network connector " + name)
val nc = new DiscoveryNetworkConnector(new URI("fabric:" + name))
nc.setName("fabric-" + name)
// copy properties as IntrospectionSupport removes them
val network_properties = new mutable.HashMap[String, Object]()
network_properties.putAll(properties.asInstanceOf[java.util.Map[String, String]])
IntrospectionSupport.setProperties(nc, network_properties, "network.")
broker.addNetworkConnector(nc)
}
}
var brokerContext = new SpringBrokerContext
brokerContext.setConfigurationUrl(resource.getURL.toExternalForm)
brokerContext.setApplicationContext(ctx)
broker.setBrokerContext(brokerContext)
(ctx, broker, resource)
} finally {
CONFIG_PROPERTIES.remove()
}
}
}
class ConfigurationProperties extends FactoryBean[Properties] {
def getObject = new Properties(ActiveMQServiceFactory.CONFIG_PROPERTIES.get())
def getObjectType = classOf[Properties]
def isSingleton = false
}
class ActiveMQServiceFactory(bundleContext: BundleContext) extends ManagedServiceFactory
with ServiceTrackerCustomizer[CuratorFramework,CuratorFramework] {
import ActiveMQServiceFactory._
//
// Pool management
//
var owned_pools = Set[String]()
def can_own_pool(cc:ClusteredConfiguration) = this.synchronized {
if( cc.pool==null )
true
else
!owned_pools.contains(cc.pool)
}
def take_pool(cc:ClusteredConfiguration) = this.synchronized {
if( cc.pool==null ) {
true
} else {
if( owned_pools.contains(cc.pool) ) {
false
} else {
owned_pools += cc.pool
fire_pool_change(cc)
true
}
}
}
def return_pool(cc:ClusteredConfiguration) = this.synchronized {
if( cc.pool!=null ) {
owned_pools -= cc.pool
fire_pool_change(cc)
}
}
def fire_pool_change(cc:ClusteredConfiguration) = {
new Thread(){
override def run() {
ActiveMQServiceFactory.this.synchronized {
configurations.values.foreach { c=>
if ( c!=cc && c.pool == cc.pool ) {
c.update_pool_state()
}
}
}
}
}.start()
}
case class ClusteredConfiguration(properties:Properties) {
val name = Option(properties.getProperty("broker-name")).getOrElse(System.getProperty("karaf.name"))
val data = Option(properties.getProperty("data")).getOrElse("data" + System.getProperty("file.separator") + name)
val config = Option(properties.getProperty("config")).getOrElse(arg_error("config property must be set"))
val group = Option(properties.getProperty("group")).getOrElse("default")
val pool = Option(properties.getProperty("standby.pool")).getOrElse("default")
val connectors = Option(properties.getProperty("connectors")).getOrElse("").split("""\\s""")
val replicating:Boolean = "true".equalsIgnoreCase(Option(properties.getProperty("replicating")).getOrElse("false"))
val standalone:Boolean = "true".equalsIgnoreCase(Option(properties.getProperty("standalone")).getOrElse("false"))
val registerService:Boolean = "true".equalsIgnoreCase(Option(properties.getProperty("registerService")).getOrElse("true"))
val config_check = "true".equalsIgnoreCase(Option(properties.getProperty("config.check")).getOrElse("true"))
val started = new AtomicBoolean
val startAttempt = new AtomicInteger
var pool_enabled = false
def update_pool_state() = this.synchronized {
val value = can_own_pool(this)
if( pool_enabled != value ) {
pool_enabled = value
if( value ) {
if( pool!=null ) {
info("Broker %s added to pool %s.", name, pool)
}
discoveryAgent.start()
} else {
if( pool!=null ) {
info("Broker %s removed from pool %s.", name, pool)
}
discoveryAgent.stop()
}
}
}
var discoveryAgent:FabricDiscoveryAgent = null
var start_thread:Thread = _
var stop_thread:Thread = _
@volatile
var server:(ResourceXmlApplicationContext, BrokerService, Resource) = _
var cfServiceRegistration:ServiceRegistration[_] = null
var last_modified:Long = -1
def updateCurator(curator: CuratorFramework) = {
if (discoveryAgent != null) {
discoveryAgent.stop()
discoveryAgent = null
if (started.compareAndSet(true, false)) {
info("Lost zookeeper service for broker %s, stopping the broker.", name)
stop()
waitForStop()
return_pool(this)
pool_enabled = false
}
}
waitForStop()
if (curator != null) {
info("Found zookeeper service for broker %s.", name)
discoveryAgent = new FabricDiscoveryAgent
discoveryAgent.setAgent(System.getProperty("karaf.name"))
discoveryAgent.setId(name)
discoveryAgent.setGroupName(group)
discoveryAgent.setCurator(curator)
if (replicating) {
discoveryAgent.start()
if (started.compareAndSet(false, true)) {
info("Replicating broker %s is starting.", name)
start()
}
} else {
discoveryAgent.getGroup.add(new GroupListener[ActiveMQNode]() {
def groupEvent(group: Group[ActiveMQNode], event: GroupEvent) {
if (event.equals(GroupEvent.CONNECTED) || event.equals(GroupEvent.CHANGED)) {
if (discoveryAgent.getGroup.isMaster(name)) {
if (started.compareAndSet(false, true)) {
if (take_pool(ClusteredConfiguration.this)) {
info("Broker %s is now the master, starting the broker.", name)
start()
} else {
update_pool_state()
started.set(false)
}
}
} else {
if (started.compareAndSet(true, false)) {
return_pool(ClusteredConfiguration.this)
info("Broker %s is now a slave, stopping the broker.", name)
stop()
} else {
if (event.equals(GroupEvent.CHANGED)) {
info("Broker %s is slave", name)
discoveryAgent.setServices(Array[String]())
}
}
}
} else {
info("Disconnected from the group", name)
discoveryAgent.setServices(Array[String]())
}
}
})
info("Broker %s is waiting to become the master", name)
update_pool_state()
}
}
}
def ensure_broker_name_is_set = {
if (!properties.containsKey("broker-name")) {
properties.setProperty("broker-name", name)
}
if (!properties.containsKey("data")) {
properties.setProperty("data", data)
}
}
ensure_broker_name_is_set
if (standalone) {
if (started.compareAndSet(false, true)) {
info("Standalone broker %s is starting.", name)
start()
}
} else {
updateCurator(curator)
}
def close() = {
this.synchronized {
if( discoveryAgent!=null ) {
discoveryAgent.stop()
}
if( pool_enabled ) {
return_pool(ClusteredConfiguration.this)
}
if(started.compareAndSet(true, false)) {
stop()
}
}
waitForStop()
}
def osgiRegister(broker: BrokerService): Unit = {
val connectionFactory = new ActiveMQConnectionFactory("vm://" + broker.getBrokerName + "?create=false")
cfServiceRegistration = bundleContext.registerService(classOf[javax.jms.ConnectionFactory].getName, connectionFactory, mutable.HashMap("name" -> broker.getBrokerName))
debug("registerService of type " + classOf[javax.jms.ConnectionFactory].getName + " as: " + connectionFactory + " with name: " + broker.getBrokerName + "; " + cfServiceRegistration)
}
def osgiUnregister(broker: BrokerService): Unit = {
if (cfServiceRegistration != null) cfServiceRegistration.unregister()
debug("unregister connection factory for: " + broker.getBrokerName + "; " + cfServiceRegistration)
}
def start() = this.synchronized {
// Startup async so that we do not block the ZK event thread.
info("Broker %s is being started.", name)
if (start_thread == null) {
start_thread = new Thread("Startup for ActiveMQ Broker-" + startAttempt.incrementAndGet() + ": "+name) {
override def run() {
waitForStop()
doStart()
}
}
start_thread.start()
}
}
def stop() = this.synchronized {
info("Broker %s is being stopped.", name)
if (stop_thread == null) {
stop_thread = new Thread("Stop for ActiveMQ Broker: "+name) {
override def run() {
interruptAndWaitForStart()
doStop()
ClusteredConfiguration.this.synchronized { stop_thread = null }
}
}
stop_thread.start()
}
}
private def doStart() {
var start_failure:Throwable = null
try {
// If we are in a fabric, let pass along the zk password in the props.
val fs = fabricService.getService
if( fs != null ) {
val container = fs.getCurrentContainer
if( !properties.containsKey("container.id") ) {
properties.setProperty("container.id", container.getId)
}
if( !properties.containsKey("container.ip") ) {
properties.setProperty("container.ip", container.getIp)
}
if( !properties.containsKey("zookeeper.url") ) {
properties.setProperty("zookeeper.url", fs.getZookeeperUrl)
}
if( !properties.containsKey("zookeeper.password") ) {
properties.setProperty("zookeeper.password", fs.getZookeeperPassword)
}
}
// ok boot up the server..
server = createBroker(config, properties)
// configure ports
server._2.getTransportConnectors.foreach {
t => {
val portKey = t.getName + "-port"
if (properties.containsKey(portKey)) {
val template = t.getUri
t.setUri(new URI(template.getScheme, template.getUserInfo, template.getHost,
Integer.valueOf("" + properties.get(portKey)),
template.getPath, template.getQuery, template.getFragment))
}
}
}
server._2.start()
info("Broker %s has started.", name)
server._2.waitUntilStarted
server._2.addShutdownHook(new Runnable(){
def run():Unit = {
// Start up the server again if it shutdown. Perhaps
// it has lost a Locker and wants a restart.
if(started.get && server!=null && server._2.isRestartAllowed && server._2.isRestartRequested){
info("restarting after shutdown on restart request")
start()
}
}
})
// Update the advertised endpoint URIs that clients can use.
if (!standalone || replicating) {
discoveryAgent.setServices( connectors.flatMap { name=>
val connector = server._2.getConnectorByName(name)
if ( connector==null ) {
warn("ActiveMQ broker '%s' does not have a connector called '%s'", name, name)
None
} else {
Some(connector.getConnectUri.getScheme + "://${zk:" + System.getProperty("karaf.name") + "/ip}:" + connector.getPublishableConnectURI.getPort)
}
})
}
if (registerService) osgiRegister(server._2)
} catch {
case e:Throwable =>
info("Broker %s failed to start. Will try again in 10 seconds", name)
LOG.error("Exception on start: " + e, e)
try {
Thread.sleep(1000 * 10)
} catch {
case ignore:InterruptedException =>
}
start_failure = e
} finally {
if(started.get && start_failure!=null){
start()
} else {
this.synchronized { start_thread = null }
if (server!=null && server._3!=null)
last_modified = server._3.lastModified()
}
}
}
private def doStop() {
val s = server // working with a volatile
if( s!=null ) {
try {
s._2.stop()
s._2.waitUntilStopped()
if (registerService) {
osgiUnregister(s._2)
}
} catch {
case e:Throwable => LOG.debug("Exception on stop: " + e, e)
}
try {
s._1.close()
} catch {
case e:Throwable => LOG.debug("Exception on close: " + e, e)
}
server = null
}
}
private def interruptAndWaitForStart() {
var t = this.synchronized { start_thread }
while (t != null) {
t.interrupt()
info("Waiting for thread " + t.getName)
t.join()
info("Thread " + t.getName + " finished")
t = this.synchronized { start_thread } // when the start up thread gives up trying to start this gets set to null.
}
}
private def waitForStop() {
var t = this.synchronized { stop_thread }
while (t != null ) {
info("Waiting for thread " + t.getName)
t.join()
info("Thread " + t.getName + " finished")
t = this.synchronized { stop_thread } // when the stop thread is done this gets set to null.
}
}
}
////////////////////////////////////////////////////////////////////////////
// Maintain a registry of configuration based on ManagedServiceFactory events.
////////////////////////////////////////////////////////////////////////////
val configurations = new mutable.HashMap[String, ClusteredConfiguration]
class ConfigThread extends Thread {
var running = true
override def run() {
while (running) {
configurations.values.foreach(c => {
try {
if (c.config_check && c.last_modified != -1 && c.server != null) {
val lm = c.server._3.lastModified()
if( lm != c.last_modified ) {
c.last_modified = lm
info("updating " + c.properties)
updated(c.properties.get("service.pid").asInstanceOf[String], c.properties.asInstanceOf[Dictionary[java.lang.String, _]])
}
}
} catch {
case t: Throwable => {}
}
})
try {
Thread.sleep(5 * 1000)
} catch {
case e : InterruptedException => {}
}
}
}
}
def updated(pid: String, properties: Dictionary[java.lang.String, _]): Unit = this.synchronized {
try {
deleted(pid)
configurations.put(pid, ClusteredConfiguration(properties))
} catch {
case e: Exception => throw new ConfigurationException(null, "Unable to parse ActiveMQ configuration: " + e.getMessage).initCause(e).asInstanceOf[ConfigurationException]
}
}
def deleted(pid: String): Unit = this.synchronized {
configurations.remove(pid).foreach(_.close())
}
def getName: String = "ActiveMQ Server Controller"
val config_thread : ConfigThread = new ConfigThread
config_thread.setName("ActiveMQ Configuration Watcher")
config_thread.start()
//
// Curator and FabricService tracking
//
val fabricService = new ServiceTracker[FabricService,FabricService](bundleContext, classOf[FabricService], null)
fabricService.open()
var curator: CuratorFramework = _
val boundCuratorRefs = new java.util.ArrayList[ServiceReference[CuratorFramework]]()
val curatorService = new ServiceTracker[CuratorFramework,CuratorFramework](bundleContext, classOf[CuratorFramework], this)
curatorService.open()
def addingService(reference: ServiceReference[CuratorFramework]): CuratorFramework = {
val curator = bundleContext.getService(reference)
boundCuratorRefs.add( reference )
java.util.Collections.sort( boundCuratorRefs )
val bind = boundCuratorRefs.get( 0 )
if( bind == reference )
bindCurator( curator )
else
bindCurator( curatorService.getService( bind ) )
curator
}
def modifiedService(reference: ServiceReference[CuratorFramework], service: CuratorFramework) = {
}
def removedService(reference: ServiceReference[CuratorFramework], service: CuratorFramework) = {
boundCuratorRefs.remove( reference )
if( boundCuratorRefs.isEmpty )
bindCurator( null )
else
bindCurator( curatorService.getService( boundCuratorRefs.get( 0 ) ) )
}
def bindCurator( curator: CuratorFramework ) = {
this.curator = curator
ActiveMQServiceFactory.this.synchronized {
configurations.values.foreach { c=>
c.updateCurator(curator)
}
}
}
//
// Lifecycle
//
def destroy(): Unit = this.synchronized {
config_thread.running = false
config_thread.interrupt()
config_thread.join()
configurations.keys.toArray.foreach(deleted)
fabricService.close()
curatorService.close()
}
}
|
janstey/fuse
|
mq/mq-fabric/src/main/scala/org/fusesource/mq/fabric/ActiveMQServiceFactory.scala
|
Scala
|
apache-2.0
| 21,537
|
package fpinscala.state
trait RNG {
def nextInt: (Int, RNG) // Should generate a random `Int`. We'll later define other functions in terms of `nextInt`.
}
object RNG {
case class SimpleRNG(seed: Long) extends RNG {
def nextInt: (Int, RNG) = {
val newSeed = (seed * 0x5DEECE66DL + 0xBL) & 0xFFFFFFFFFFFFL // `&` is bitwise AND. We use the current seed to generate a new seed.
val nextRNG = SimpleRNG(newSeed) // The next state, which is an `RNG` instance created from the new seed.
val n = (newSeed >>> 16).toInt // `>>>` is right binary shift with zero fill. The value `n` is our new pseudo-random integer.
(n, nextRNG) // The return value is a tuple containing both a pseudo-random integer and the next `RNG` state.
}
}
type Rand[+A] = RNG => (A, RNG)
val int: Rand[Int] = _.nextInt
val boolean: Rand[Boolean] = map(int)(_ % 2 == 0)
def unit[A](a: A): Rand[A] = rng => (a, rng)
def map[A, B](s: Rand[A])(f: A => B): Rand[B] =
rng => {
val (a, r1) = s(rng)
(f(a), r1)
}
// Exercise 1: Write a function that uses RNG.nextInt to generate a random integer between 0 and
// Int.MaxValue (inclusive).
def nonNegativeInt(rng: RNG): (Int, RNG) = {
val (i, r) = rng.nextInt
(if (i < 0) -i + 1 else i, r)
}
// Exercise 2: Write a function to generate a Double between 0 and 1 , not including 1
def double(rng: RNG): (Double, RNG) = {
val (i, r) = nonNegativeInt(rng)
((i % Int.MaxValue).toDouble / Int.MaxValue, r)
}
// Exercise 3: Write functions to generate an (Int, Double) pair, a (Double, Int) pair, and a
// (Double, Double, Double) 3-tuple. You should be able to reuse the functions you’ve
// already written.
def intDouble(rng: RNG): ((Int, Double), RNG) = {
val (i, r1) = rng.nextInt
val (d, r2) = double(r1)
((i, d), r2)
}
def doubleInt(rng: RNG): ((Double, Int), RNG) = {
val ((i, d), r) = intDouble(rng)
((d, i), r)
}
def double3(rng: RNG): ((Double, Double, Double), RNG) = {
val (d1, r1) = double(rng)
val (d2, r2) = double(r1)
val (d3, r3) = double(r2)
((d1, d2, d3), r3)
}
// Exercise 4: Write a function to generate a list of random integers.
def ints(count: Int)(rng: RNG): (List[Int], RNG) = {
@annotation.tailrec
def go(c: Int, rng: RNG, acc: List[Int]): (List[Int], RNG) = {
if (c > 0) {
val (i, r) = rng.nextInt
go(c - 1, r, i :: acc)
} else
(acc, rng)
}
go(count, rng, List.empty)
}
// Exercise 5: Use map to reimplement double in a more elegant way
val double2: Rand[Double] =
map(nonNegativeInt)(_ / (Int.MaxValue.toDouble + 1))
// Exercise 6: Write the implementation of map2
def map2[A, B, C](ra: Rand[A], rb: Rand[B])(f: (A, B) => C): Rand[C] =
r0 => {
val (a, r1) = ra(r0)
val (b, r2) = rb(r1)
(f(a, b), r2)
}
// Exercise 7: Implement sequence for combining a List of transitions into a single
// transition.
def sequence[A](fs: List[Rand[A]]): Rand[List[A]] =
rng => {
val (finalAs, finalR) = fs.foldLeft((List.empty[A], rng)) {
case ((as, r), ra) =>
val (a, rNext) = ra(r)
(a :: as, rNext)
}
// does it really matter that the values are reversed?
(finalAs.reverse, finalR)
}
def ints2(count: Int): Rand[List[Int]] =
sequence(List.fill(count)(nonNegativeInt))
// Exercise 8: Implement flatMap, and then use it to implement nonNegativeLessThan.
def flatMap[A, B](f: Rand[A])(g: A => Rand[B]): Rand[B] =
rng => {
val (a, r1) = f(rng)
g(a)(r1)
}
def nonNegativeLessThan(n: Int): Rand[Int] =
flatMap(nonNegativeInt) { i =>
val mod = i % n
if (i + (n - 1) - mod >= 0) unit(mod) else nonNegativeLessThan(n)
}
// Exercise 9: Reimplement map and map2 in terms of flatMap
def mapViaFlatMap[A, B](s: Rand[A])(f: A => B): Rand[B] =
flatMap(s)(a => unit(f(a)))
def map2ViaFlatMap[A, B, C](ra: Rand[A], rb: Rand[B])(
f: (A, B) => C): Rand[C] =
flatMap(ra) { a =>
map(rb) { b =>
f(a, b)
}
}
}
// Exercise 10: Generalize the functions unit , map , map2 , flatMap , and sequence . Add them as methods on the State case class where possible.
// Otherwise you should put them in a State companion object.
case class State[S, +A](run: S => (A, S)) {
def map[B](f: A => B): State[S, B] =
flatMap(a => State.unit(f(a)))
def map2[B, C](sb: State[S, B])(f: (A, B) => C): State[S, C] =
flatMap(a => sb.map(b => f(a, b)))
def flatMap[B](f: A => State[S, B]): State[S, B] =
State { s =>
val (a, s1) = run(s)
f(a).run(s1)
}
}
object State {
type Rand[A] = State[RNG, A]
def unit[S, A](a: A): State[S, A] =
State(s => (a, s))
def sequence[S, A](ss: List[State[S, A]]): State[S, List[A]] =
ss.foldRight(unit[S, List[A]](List.empty)) { (s, acc) =>
s.map2(acc)(_ :: _)
}
def modify[S](f: S => S): State[S, Unit] =
for {
s <- get // Gets the current state and assigns it to `s`.
_ <- set(f(s)) // Sets the new state to `f` applied to `s`.
} yield ()
def get[S]: State[S, S] = State(s => (s, s))
def set[S](s: S): State[S, Unit] = State(_ => ((), s))
// Exercise 11: To gain experience with the use of State, implement a finite state automaton that models a simple candy dispenser.
// The machine has two types of input: you can insert a coin, or you can turn the knob to dispense candy.
// It can be in one of two states: locked or unlocked. It also tracks how many candies are left and how many coins it contains.
sealed trait Input
case object Coin extends Input
case object Turn extends Input
case class Machine(locked: Boolean, candies: Int, coins: Int)
def input(i: Input)(m: Machine): Machine =
(m, i) match {
case (Machine(_, 0, _), _) => m
case (Machine(true, _, _), Turn) => m
case (Machine(false, _, _), Coin) => m
case (Machine(true, ca, co), Coin) => Machine(locked = false, ca, co + 1)
case (Machine(false, ca, co), Turn) => Machine(locked = true, ca - 1, co)
}
def simulateMachine(inputs: List[Input]): State[Machine, (Int, Int)] =
for {
_ <- State.sequence(inputs.map(i => State.modify(input(i))))
s <- State.get
} yield (s.candies, s.coins)
}
|
goboss/fpinscala
|
exercises/src/main/scala/fpinscala/state/State.scala
|
Scala
|
mit
| 6,353
|
package com.eclipsesource.schema.internal.draft4.constraints
import com.eclipsesource.schema.{SchemaMap, SchemaObject, SchemaProp, SchemaResolutionContext, SchemaType, SchemaValue}
import com.eclipsesource.schema.internal.constraints.Constraints._
import com.eclipsesource.schema.internal.validation.VA
import com.osinka.i18n.Lang
import play.api.libs.json.{JsNumber, JsObject, JsValue}
import scalaz.std.option._
import scalaz.std.set._
import scalaz.syntax.semigroup._
import scalaz.Success
import com.eclipsesource.schema.internal._
case class ObjectConstraints4(additionalProps: Option[SchemaType] = None,
dependencies: Option[Map[String, SchemaType]] = None,
patternProps: Option[Map[String, SchemaType]] = None,
required: Option[Seq[String]] = None,
minProperties: Option[Int] = None,
maxProperties: Option[Int] = None,
any: AnyConstraints = AnyConstraints4()
) extends HasAnyConstraint with ObjectConstraints {
type A = ObjectConstraints4
import com.eclipsesource.schema.internal.validators.ObjectValidators._
override def subSchemas: Set[SchemaType] =
(additionalProps.map(Set(_)) |+| dependencies.map(_.values.toSet) |+| patternProps.map(_.values.toSet))
.getOrElse(Set.empty[SchemaType]) ++ any.subSchemas
override def resolvePath(path: String): Option[SchemaType] = path match {
case Keywords.Object.AdditionalProperties => additionalProps
case Keywords.Object.Dependencies => dependencies.map(entries =>
SchemaMap(Keywords.Object.Dependencies, entries.toSeq.map(e => SchemaProp(e._1, e._2)))
)
case Keywords.Object.PatternProperties => patternProps.map(patternProps => SchemaMap(
Keywords.Object.PatternProperties,
patternProps.toSeq.map(e => SchemaProp(e._1, e._2)))
)
case Keywords.Object.MinProperties => minProperties.map(min => SchemaValue(JsNumber(min)))
case Keywords.Object.MaxProperties => maxProperties.map(max => SchemaValue(JsNumber(max)))
case other => any.resolvePath(other)
}
override def validate(schema: SchemaType, json: JsValue, context: SchemaResolutionContext)
(implicit lang: Lang): VA[JsValue] =
(schema, json) match {
case (obj@SchemaObject(_, _, _), jsObject@JsObject(_)) =>
val validation = for {
_ <- validateDependencies(schema, dependencies, jsObject)
remaining <- validateProps(obj.properties, required, jsObject)
unmatched <- validatePatternProps(patternProps, jsObject.fields)
_ <- validateAdditionalProps(additionalProps, unmatched.intersect(remaining), json)
_ <- validateMinProperties(minProperties, jsObject)
_ <- validateMaxProperties(maxProperties, jsObject)
} yield schema
val (_, _, result) = validation.run(context, Success(json))
result
case _ => Success(json)
}
}
object ObjectConstraints4 {
def emptyObject: SchemaType = SchemaObject(Seq.empty, ObjectConstraints4())
}
|
edgarmueller/play-json-schema-validator
|
src/main/scala/com/eclipsesource/schema/internal/draft4/constraints/ObjectConstraints4.scala
|
Scala
|
apache-2.0
| 3,151
|
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.geojson.servlet
import java.io.Closeable
import java.util.concurrent.ConcurrentHashMap
import org.geotools.data.DataStore
import org.json4s.{DefaultFormats, Formats}
import org.locationtech.geomesa.geojson.GeoJsonGtIndex
import org.locationtech.geomesa.utils.cache.FilePersistence
import org.locationtech.geomesa.web.core.GeoMesaDataStoreServlet
import org.scalatra._
import org.scalatra.json.NativeJsonSupport
import scala.collection.mutable.ArrayBuffer
class GeoJsonServlet(val persistence: FilePersistence) extends GeoMesaDataStoreServlet with NativeJsonSupport {
// TODO GEOMESA-1452 ability to update features
// TODO GEOMESA-1452 ability to transform query responses
override val root: String = "geojson"
override protected implicit val jsonFormats: Formats = DefaultFormats
private val indexCache = new ConcurrentHashMap[DataStore, GeoJsonGtIndex]
before() {
contentType = formats("json")
}
/**
* Create a new index (i.e. schema)
*/
post("/index/:alias/:index/?") {
try {
withGeoJsonIndex((geoJsonIndex) => {
val index = params.get("index").orNull
if (index == null) {
BadRequest(GeoJsonServlet.NoIndex)
} else {
val points = params.get("points").map(java.lang.Boolean.valueOf).getOrElse(java.lang.Boolean.FALSE)
geoJsonIndex.createIndex(index, params.get("id"), params.get("date"), points)
Created()
}
})
} catch {
case e: IllegalArgumentException => BadRequest(e.getMessage)
case e: Exception => handleError(s"Error creating index:", e)
}
}
/**
* Delete an index and everything in it
*/
delete("/index/:alias/:index/?") {
try {
withGeoJsonIndex((geoJsonIndex) => {
val index = params.get("index").orNull
if (index == null) {
BadRequest(GeoJsonServlet.NoIndex)
} else {
geoJsonIndex.deleteIndex(index)
NoContent() // 204
}
})
} catch {
case e: IllegalArgumentException => BadRequest(e.getMessage)
case e: Exception => handleError(s"Error creating index:", e)
}
}
/**
* Add new features to an index
*/
post("/index/:alias/:index/features/?") {
try {
withGeoJsonIndex((geoJsonIndex) => {
val index = params.get("index").orNull
val json = request.body
if (index == null || json == null || json.isEmpty) {
val msg = ArrayBuffer.empty[String]
if (index == null) { msg.append(GeoJsonServlet.NoIndex) }
if (json == null || json.isEmpty) { msg.append(GeoJsonServlet.NoJson) }
BadRequest(msg.mkString(" "))
} else {
val ids = geoJsonIndex.add(index, json)
Ok(ids)
}
})
} catch {
case e: IllegalArgumentException => BadRequest(e.getMessage)
case e: Exception => handleError(s"Error adding features:", e)
}
}
/**
* Update existing features in an index
*/
put("/index/:alias/:index/features/?") {
try {
withGeoJsonIndex((geoJsonIndex) => {
val index = params.get("index").orNull
val json = request.body
if (index == null || json == null || json.isEmpty) {
val msg = ArrayBuffer.empty[String]
if (index == null) { msg.append(GeoJsonServlet.NoIndex) }
if (json == null || json.isEmpty) { msg.append(GeoJsonServlet.NoJson) }
BadRequest(msg.mkString(" "))
} else {
geoJsonIndex.update(index, json)
Ok()
}
})
} catch {
case e: IllegalArgumentException => BadRequest(e.getMessage)
case e: Exception => handleError(s"Error updating features:", e)
}
}
/**
* Update existing features in an index
*/
put("/index/:alias/:index/features/:fid") {
try {
withGeoJsonIndex((geoJsonIndex) => {
val index = params.get("index").orNull
val json = request.body
if (index == null || json == null || json.isEmpty) {
val msg = ArrayBuffer.empty[String]
if (index == null) { msg.append(GeoJsonServlet.NoIndex) }
if (json == null || json.isEmpty) { msg.append(GeoJsonServlet.NoJson) }
BadRequest(msg.mkString(" "))
} else {
val fids = params("fid").split(",") // can't match this path without a fid
geoJsonIndex.update(index, fids, json)
Ok()
}
})
} catch {
case e: IllegalArgumentException => BadRequest(e.getMessage)
case e: Exception => handleError(s"Error updating features:", e)
}
}
/**
* Delete features from an index
*/
delete("/index/:alias/:index/features/:fid") {
try {
withGeoJsonIndex((geoJsonIndex) => {
val index = params.get("index").orNull
if (index == null) {
BadRequest(GeoJsonServlet.NoIndex)
} else {
val fids = params("fid").split(",") // can't match this path without a fid
geoJsonIndex.delete(index, fids)
Ok()
}
})
} catch {
case e: IllegalArgumentException => BadRequest(e.getMessage)
case e: Exception => handleError(s"Error adding features:", e)
}
}
/**
* Query features in an index
*/
get("/index/:alias/:index/features/?") {
try {
withGeoJsonIndex((geoJsonIndex) => {
val index = params.get("index").orNull
if (index == null) {
BadRequest(GeoJsonServlet.NoIndex)
} else {
val query = params.get("q").getOrElse("")
outputFeatures(geoJsonIndex.query(index, query))
Unit // return Unit to indicate we've processed the response
}
})
} catch {
case e: IllegalArgumentException => BadRequest(e.getMessage)
case e: Exception => handleError(s"Error querying features:", e)
}
}
/**
* Query a single feature in an index
*/
get("/index/:alias/:index/features/:fid") {
try {
withGeoJsonIndex((geoJsonIndex) => {
val index = params.get("index").orNull
if (index == null) {
BadRequest(GeoJsonServlet.NoIndex)
} else {
val fids = params("fid").split(",") // can't match this path without a fid
val features = geoJsonIndex.get(index, fids)
if (features.isEmpty) { NotFound() } else {
outputFeatures(features)
Unit // return Unit to indicate we've processed the response
}
}
})
} catch {
case e: IllegalArgumentException => BadRequest(e.getMessage)
case e: Exception => handleError(s"Error querying features:", e)
}
}
private def withGeoJsonIndex[T](method: (GeoJsonGtIndex) => T): Any = {
withDataStore((ds: DataStore) => {
val index = Option(indexCache.get(ds)).getOrElse {
val index = new GeoJsonGtIndex(ds)
indexCache.put(ds, index)
index
}
method(index)
})
}
private def outputFeatures(features: Iterator[String] with Closeable): Unit = {
try {
response.setStatus(200)
val output = response.getOutputStream
output.print("""{"type":"FeatureCollection","features":[""")
if (features.hasNext) {
output.print(features.next)
while (features.hasNext) {
output.print(',')
output.print(features.next)
}
}
output.print("]}")
} finally {
features.close()
}
}
}
object GeoJsonServlet {
val NoIndex = "Index name not specified."
val NoJson = "GEOJSON not specified."
}
|
ddseapy/geomesa
|
geomesa-geojson/geomesa-geojson-rest/src/main/scala/org/locationtech/geomesa/geojson/servlet/GeoJsonServlet.scala
|
Scala
|
apache-2.0
| 8,035
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.broadcast
import scala.reflect.ClassTag
import org.apache.spark.SecurityManager
import org.apache.spark.SparkConf
/**
* An interface for all the broadcast implementations in Spark (to allow
* multiple broadcast implementations). SparkContext uses a BroadcastFactory
* implementation to instantiate a particular broadcast for the entire Spark job.
*/
private[spark] trait BroadcastFactory {
def initialize(isDriver: Boolean, conf: SparkConf, securityMgr: SecurityManager): Unit
/**
* Creates a new broadcast variable.
*
* @param value value to broadcast
* @param isLocal whether we are in local mode (single JVM process)
* @param id unique id representing this broadcast variable
*/
def newBroadcast[T: ClassTag](value: T, isLocal: Boolean, id: Long): Broadcast[T]
def unbroadcast(id: Long, removeFromDriver: Boolean, blocking: Boolean): Unit
def stop(): Unit
}
|
bravo-zhang/spark
|
core/src/main/scala/org/apache/spark/broadcast/BroadcastFactory.scala
|
Scala
|
apache-2.0
| 1,727
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
* file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
* to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package kafka.admin
import java.util.Collections
import java.util.Properties
import kafka.admin.ReassignPartitionsCommand._
import kafka.common.{AdminCommandFailedException, TopicAndPartition}
import kafka.server.{KafkaConfig, KafkaServer}
import kafka.utils.TestUtils._
import kafka.utils.ZkUtils._
import kafka.utils.{Logging, TestUtils, ZkUtils}
import kafka.zk.ZooKeeperTestHarness
import org.junit.Assert.{assertEquals, assertTrue}
import org.junit.{After, Before, Test}
import kafka.admin.ReplicationQuotaUtils._
import org.apache.kafka.clients.admin.AdminClientConfig
import org.apache.kafka.clients.admin.{AdminClient => JAdminClient}
import org.apache.kafka.common.TopicPartitionReplica
import scala.collection.JavaConverters._
import scala.collection.Map
import scala.collection.Seq
import scala.util.Random
import java.io.File
class ReassignPartitionsClusterTest extends ZooKeeperTestHarness with Logging {
val partitionId = 0
var servers: Seq[KafkaServer] = null
val topicName = "my-topic"
val delayMs = 1000
var adminClient: JAdminClient = null
def zkUpdateDelay(): Unit = Thread.sleep(delayMs)
@Before
override def setUp() {
super.setUp()
}
def startBrokers(brokerIds: Seq[Int]) {
servers = brokerIds.map(i => createBrokerConfig(i, zkConnect, logDirCount = 3))
.map(c => createServer(KafkaConfig.fromProps(c)))
}
def createAdminClient(servers: Seq[KafkaServer]): JAdminClient = {
val props = new Properties()
props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, TestUtils.getBrokerListStrFromServers(servers))
props.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, "10000")
JAdminClient.create(props)
}
def getRandomLogDirAssignment(brokerId: Int): String = {
val server = servers.find(_.config.brokerId == brokerId).get
val logDirs = server.config.logDirs
new File(logDirs(Random.nextInt(logDirs.size))).getAbsolutePath
}
@After
override def tearDown() {
if (adminClient != null) {
adminClient.close()
adminClient = null
}
TestUtils.shutdownServers(servers)
super.tearDown()
}
@Test
def shouldMoveSinglePartition(): Unit = {
//Given a single replica on server 100
startBrokers(Seq(100, 101))
adminClient = createAdminClient(servers)
val partition = 0
// Get a random log directory on broker 101
val expectedLogDir = getRandomLogDirAssignment(101)
createTopic(zkUtils, topicName, Map(partition -> Seq(100)), servers = servers)
//When we move the replica on 100 to broker 101
val topicJson: String = s"""{"version":1,"partitions":[{"topic":"$topicName","partition":0,"replicas":[101],"log_dirs":["$expectedLogDir"]}]}"""
ReassignPartitionsCommand.executeAssignment(zkUtils, Some(adminClient), topicJson, NoThrottle)
waitForReassignmentToComplete()
//Then the replica should be on 101
assertEquals(Seq(101), zkUtils.getPartitionAssignmentForTopics(Seq(topicName)).get(topicName).get(partition))
// The replica should be in the expected log directory on broker 101
val replica = new TopicPartitionReplica(topicName, 0, 101)
assertEquals(expectedLogDir, adminClient.describeReplicaLogDirs(Collections.singleton(replica)).all().get.get(replica).getCurrentReplicaLogDir)
}
@Test
def shouldMoveSinglePartitionWithinBroker() {
// Given a single replica on server 100
startBrokers(Seq(100, 101))
adminClient = createAdminClient(servers)
val expectedLogDir = getRandomLogDirAssignment(100)
createTopic(zkUtils, topicName, Map(0 -> Seq(100)), servers = servers)
// When we execute an assignment that moves an existing replica to another log directory on the same broker
val topicJson: String = s"""{"version":1,"partitions":[{"topic":"$topicName","partition":0,"replicas":[100],"log_dirs":["$expectedLogDir"]}]}"""
ReassignPartitionsCommand.executeAssignment(zkUtils, Some(adminClient), topicJson, NoThrottle)
val replica = new TopicPartitionReplica(topicName, 0, 100)
TestUtils.waitUntilTrue(() => {
expectedLogDir == adminClient.describeReplicaLogDirs(Collections.singleton(replica)).all().get.get(replica).getCurrentReplicaLogDir
}, "Partition should have been moved to the expected log directory", 1000)
}
@Test
def shouldExpandCluster() {
val brokers = Array(100, 101, 102)
startBrokers(brokers)
adminClient = createAdminClient(servers)
createTopic(zkUtils, topicName, Map(
0 -> Seq(100, 101),
1 -> Seq(100, 101),
2 -> Seq(100, 101)
), servers = servers)
//When rebalancing
val newAssignment = generateAssignment(zkUtils, brokers, json(topicName), true)._1
// Find a partition in the new assignment on broker 102 and a random log directory on broker 102,
// which currently does not have any partition for this topic
val partition1 = newAssignment.find { case (replica, brokerIds) => brokerIds.contains(102)}.get._1.partition
val replica1 = new TopicPartitionReplica(topicName, partition1, 102)
val expectedLogDir1 = getRandomLogDirAssignment(102)
// Find a partition in the new assignment on broker 100 and a random log directory on broker 100,
// which currently has partition for this topic
val partition2 = newAssignment.find { case (replica, brokerIds) => brokerIds.contains(100)}.get._1.partition
val replica2 = new TopicPartitionReplica(topicName, partition2, 100)
val expectedLogDir2 = getRandomLogDirAssignment(100)
// Generate a replica assignment to reassign replicas on broker 100 and 102 respectively to a random log directory on the same broker.
// Before this reassignment, the replica already exists on broker 100 but does not exist on broker 102
val newReplicaAssignment = Map(replica1 -> expectedLogDir1, replica2 -> expectedLogDir2)
ReassignPartitionsCommand.executeAssignment(zkUtils, Some(adminClient),
ReassignPartitionsCommand.formatAsReassignmentJson(newAssignment, newReplicaAssignment), NoThrottle)
waitForReassignmentToComplete()
// Then the replicas should span all three brokers
val actual = zkUtils.getPartitionAssignmentForTopics(Seq(topicName))(topicName)
assertEquals(Seq(100, 101, 102), actual.values.flatten.toSeq.distinct.sorted)
// The replica should be in the expected log directory on broker 102 and 100
waitUntilTrue(() => {
expectedLogDir1 == adminClient.describeReplicaLogDirs(Collections.singleton(replica1)).all().get.get(replica1).getCurrentReplicaLogDir
}, "Partition should have been moved to the expected log directory on broker 102", 1000)
waitUntilTrue(() => {
expectedLogDir2 == adminClient.describeReplicaLogDirs(Collections.singleton(replica2)).all().get.get(replica2).getCurrentReplicaLogDir
}, "Partition should have been moved to the expected log directory on broker 100", 1000)
}
@Test
def shouldShrinkCluster() {
//Given partitions on 3 of 3 brokers
val brokers = Array(100, 101, 102)
startBrokers(brokers)
createTopic(zkUtils, topicName, Map(
0 -> Seq(100, 101),
1 -> Seq(101, 102),
2 -> Seq(102, 100)
), servers = servers)
//When rebalancing
val newAssignment = generateAssignment(zkUtils, Array(100, 101), json(topicName), true)._1
ReassignPartitionsCommand.executeAssignment(zkUtils, None,
ReassignPartitionsCommand.formatAsReassignmentJson(newAssignment, Map.empty), NoThrottle)
waitForReassignmentToComplete()
//Then replicas should only span the first two brokers
val actual = zkUtils.getPartitionAssignmentForTopics(Seq(topicName))(topicName)
assertEquals(Seq(100, 101), actual.values.flatten.toSeq.distinct.sorted)
}
@Test
def shouldMoveSubsetOfPartitions() {
//Given partitions on 3 of 3 brokers
val brokers = Array(100, 101, 102)
startBrokers(brokers)
adminClient = createAdminClient(servers)
createTopic(zkUtils, "topic1", Map(
0 -> Seq(100, 101),
1 -> Seq(101, 102),
2 -> Seq(102, 100)
), servers = servers)
createTopic(zkUtils, "topic2", Map(
0 -> Seq(100, 101),
1 -> Seq(101, 102),
2 -> Seq(102, 100)
), servers = servers)
val proposed: Map[TopicAndPartition, Seq[Int]] = Map(
TopicAndPartition("topic1", 0) -> Seq(100, 102),
TopicAndPartition("topic1", 2) -> Seq(100, 102),
TopicAndPartition("topic2", 1) -> Seq(101, 100),
TopicAndPartition("topic2", 2) -> Seq(100, 102)
)
val replica1 = new TopicPartitionReplica("topic1", 0, 102)
val replica2 = new TopicPartitionReplica("topic2", 1, 100)
val proposedReplicaAssignment: Map[TopicPartitionReplica, String] = Map(
replica1 -> getRandomLogDirAssignment(102),
replica2 -> getRandomLogDirAssignment(100)
)
//When rebalancing
ReassignPartitionsCommand.executeAssignment(zkUtils, Some(adminClient),
ReassignPartitionsCommand.formatAsReassignmentJson(proposed, proposedReplicaAssignment), NoThrottle)
waitForReassignmentToComplete()
//Then the proposed changes should have been made
val actual = zkUtils.getPartitionAssignmentForTopics(Seq("topic1", "topic2"))
assertEquals(Seq(100, 102), actual("topic1")(0))//changed
assertEquals(Seq(101, 102), actual("topic1")(1))
assertEquals(Seq(100, 102), actual("topic1")(2))//changed
assertEquals(Seq(100, 101), actual("topic2")(0))
assertEquals(Seq(101, 100), actual("topic2")(1))//changed
assertEquals(Seq(100, 102), actual("topic2")(2))//changed
// The replicas should be in the expected log directories
val replicaDirs = adminClient.describeReplicaLogDirs(List(replica1, replica2).asJavaCollection).all().get()
assertEquals(proposedReplicaAssignment(replica1), replicaDirs.get(replica1).getCurrentReplicaLogDir)
assertEquals(proposedReplicaAssignment(replica2), replicaDirs.get(replica2).getCurrentReplicaLogDir)
}
@Test
def shouldExecuteThrottledReassignment() {
//Given partitions on 3 of 3 brokers
val brokers = Array(100, 101, 102)
startBrokers(brokers)
createTopic(zkUtils, topicName, Map(
0 -> Seq(100, 101)
), servers = servers)
//Given throttle set so replication will take a certain number of secs
val initialThrottle = Throttle(10 * 1000 * 1000, -1, () => zkUpdateDelay)
val expectedDurationSecs = 5
val numMessages: Int = 500
val msgSize: Int = 100 * 1000
produceMessages(servers, topicName, numMessages, acks = 0, msgSize)
assertEquals(expectedDurationSecs, numMessages * msgSize / initialThrottle.interBrokerLimit)
//Start rebalance which will move replica on 100 -> replica on 102
val newAssignment = generateAssignment(zkUtils, Array(101, 102), json(topicName), true)._1
val start = System.currentTimeMillis()
ReassignPartitionsCommand.executeAssignment(zkUtils, None,
ReassignPartitionsCommand.formatAsReassignmentJson(newAssignment, Map.empty), initialThrottle)
//Check throttle config. Should be throttling replica 0 on 100 and 102 only.
checkThrottleConfigAddedToZK(initialThrottle.interBrokerLimit, servers, topicName, "0:100,0:101", "0:102")
//Await completion
waitForReassignmentToComplete()
val took = System.currentTimeMillis() - start - delayMs
//Check move occurred
val actual = zkUtils.getPartitionAssignmentForTopics(Seq(topicName))(topicName)
assertEquals(Seq(101, 102), actual.values.flatten.toSeq.distinct.sorted)
//Then command should have taken longer than the throttle rate
assertTrue(s"Expected replication to be > ${expectedDurationSecs * 0.9 * 1000} but was $took",
took > expectedDurationSecs * 0.9 * 1000)
assertTrue(s"Expected replication to be < ${expectedDurationSecs * 2 * 1000} but was $took",
took < expectedDurationSecs * 2 * 1000)
}
@Test
def shouldOnlyThrottleMovingReplicas() {
//Given 6 brokers, two topics
val brokers = Array(100, 101, 102, 103, 104, 105)
startBrokers(brokers)
createTopic(zkUtils, "topic1", Map(
0 -> Seq(100, 101),
1 -> Seq(100, 101),
2 -> Seq(103, 104) //will leave in place
), servers = servers)
createTopic(zkUtils, "topic2", Map(
0 -> Seq(104, 105),
1 -> Seq(104, 105),
2 -> Seq(103, 104)//will leave in place
), servers = servers)
//Given throttle set so replication will take a while
val throttle: Long = 1000 * 1000
produceMessages(servers, "topic1", 100, acks = 0, 100 * 1000)
produceMessages(servers, "topic2", 100, acks = 0, 100 * 1000)
//Start rebalance
val newAssignment = Map(
TopicAndPartition("topic1", 0) -> Seq(100, 102),//moved 101=>102
TopicAndPartition("topic1", 1) -> Seq(100, 102),//moved 101=>102
TopicAndPartition("topic2", 0) -> Seq(103, 105),//moved 104=>103
TopicAndPartition("topic2", 1) -> Seq(103, 105),//moved 104=>103
TopicAndPartition("topic1", 2) -> Seq(103, 104), //didn't move
TopicAndPartition("topic2", 2) -> Seq(103, 104) //didn't move
)
ReassignPartitionsCommand.executeAssignment(zkUtils, None,
ReassignPartitionsCommand.formatAsReassignmentJson(newAssignment, Map.empty), Throttle(throttle))
//Check throttle config. Should be throttling specific replicas for each topic.
checkThrottleConfigAddedToZK(throttle, servers, "topic1",
"1:100,1:101,0:100,0:101", //All replicas for moving partitions should be leader-throttled
"1:102,0:102" //Move destinations should be follower throttled.
)
checkThrottleConfigAddedToZK(throttle, servers, "topic2",
"1:104,1:105,0:104,0:105", //All replicas for moving partitions should be leader-throttled
"1:103,0:103" //Move destinations should be follower throttled.
)
}
@Test
def shouldChangeThrottleOnRerunAndRemoveOnVerify() {
//Given partitions on 3 of 3 brokers
val brokers = Array(100, 101, 102)
startBrokers(brokers)
createTopic(zkUtils, topicName, Map(
0 -> Seq(100, 101)
), servers = servers)
//Given throttle set so replication will take at least 20 sec (we won't wait this long)
val initialThrottle: Long = 1000 * 1000
produceMessages(servers, topicName, numMessages = 200, acks = 0, valueBytes = 100 * 1000)
//Start rebalance
val newAssignment = generateAssignment(zkUtils, Array(101, 102), json(topicName), true)._1
ReassignPartitionsCommand.executeAssignment(zkUtils, None,
ReassignPartitionsCommand.formatAsReassignmentJson(newAssignment, Map.empty), Throttle(initialThrottle))
//Check throttle config
checkThrottleConfigAddedToZK(initialThrottle, servers, topicName, "0:100,0:101", "0:102")
//Ensure that running Verify, whilst the command is executing, should have no effect
verifyAssignment(zkUtils, None, ReassignPartitionsCommand.formatAsReassignmentJson(newAssignment, Map.empty))
//Check throttle config again
checkThrottleConfigAddedToZK(initialThrottle, servers, topicName, "0:100,0:101", "0:102")
//Now re-run the same assignment with a larger throttle, which should only act to increase the throttle and make progress
val newThrottle = initialThrottle * 1000
ReassignPartitionsCommand.executeAssignment(zkUtils, None,
ReassignPartitionsCommand.formatAsReassignmentJson(newAssignment, Map.empty), Throttle(newThrottle))
//Check throttle was changed
checkThrottleConfigAddedToZK(newThrottle, servers, topicName, "0:100,0:101", "0:102")
//Await completion
waitForReassignmentToComplete()
//Verify should remove the throttle
verifyAssignment(zkUtils, None, ReassignPartitionsCommand.formatAsReassignmentJson(newAssignment, Map.empty))
//Check removed
checkThrottleConfigRemovedFromZK(topicName, servers)
//Check move occurred
val actual = zkUtils.getPartitionAssignmentForTopics(Seq(topicName))(topicName)
assertEquals(Seq(101, 102), actual.values.flatten.toSeq.distinct.sorted)
}
@Test(expected = classOf[AdminCommandFailedException])
def shouldFailIfProposedDoesNotMatchExisting() {
//Given a single replica on server 100
startBrokers(Seq(100, 101))
createTopic(zkUtils, topicName, Map(0 -> Seq(100)), servers = servers)
//When we execute an assignment that includes an invalid partition (1:101 in this case)
val topicJson = s"""{"version":1,"partitions":[{"topic":"$topicName","partition":1,"replicas":[101]}]}"""
ReassignPartitionsCommand.executeAssignment(zkUtils, None, topicJson, NoThrottle)
}
@Test(expected = classOf[AdminCommandFailedException])
def shouldFailIfProposedHasEmptyReplicaList() {
//Given a single replica on server 100
startBrokers(Seq(100, 101))
createTopic(zkUtils, topicName, Map(0 -> Seq(100)), servers = servers)
//When we execute an assignment that specifies an empty replica list (0: empty list in this case)
val topicJson = s"""{"version":1,"partitions":[{"topic":"$topicName","partition":0,"replicas":[]}]}"""
ReassignPartitionsCommand.executeAssignment(zkUtils, None, topicJson, NoThrottle)
}
@Test(expected = classOf[AdminCommandFailedException])
def shouldFailIfProposedHasInvalidBrokerID() {
//Given a single replica on server 100
startBrokers(Seq(100, 101))
createTopic(zkUtils, topicName, Map(0 -> Seq(100)), servers = servers)
//When we execute an assignment that specifies an invalid brokerID (102: invalid broker ID in this case)
val topicJson = s"""{"version":1,"partitions":[{"topic":"$topicName","partition":0,"replicas":[101, 102]}]}"""
ReassignPartitionsCommand.executeAssignment(zkUtils, None, topicJson, NoThrottle)
}
@Test(expected = classOf[AdminCommandFailedException])
def shouldFailIfProposedHasInvalidLogDir() {
// Given a single replica on server 100
startBrokers(Seq(100, 101))
adminClient = createAdminClient(servers)
createTopic(zkUtils, topicName, Map(0 -> Seq(100)), servers = servers)
// When we execute an assignment that specifies an invalid log directory
val topicJson: String = s"""{"version":1,"partitions":[{"topic":"$topicName","partition":0,"replicas":[101],"log_dirs":["invalidDir"]}]}"""
ReassignPartitionsCommand.executeAssignment(zkUtils, Some(adminClient), topicJson, NoThrottle)
}
@Test(expected = classOf[AdminCommandFailedException])
def shouldFailIfProposedHasInconsistentReplicasAndLogDirs() {
// Given a single replica on server 100
startBrokers(Seq(100, 101))
adminClient = createAdminClient(servers)
val logDir = getRandomLogDirAssignment(100)
createTopic(zkUtils, topicName, Map(0 -> Seq(100)), servers = servers)
// When we execute an assignment whose length of replicas doesn't match that of replicas
val topicJson: String = s"""{"version":1,"partitions":[{"topic":"$topicName","partition":0,"replicas":[101],"log_dirs":["$logDir", "$logDir"]}]}"""
ReassignPartitionsCommand.executeAssignment(zkUtils, Some(adminClient), topicJson, NoThrottle)
}
@Test
def shouldPerformThrottledReassignmentOverVariousTopics() {
val throttle = Throttle(1000L)
//Given four brokers
servers = TestUtils.createBrokerConfigs(4, zkConnect, false).map(conf => TestUtils.createServer(KafkaConfig.fromProps(conf)))
//With up several small topics
createTopic(zkUtils, "orders", Map(0 -> List(0, 1, 2), 1 -> List(0, 1, 2)), servers)
createTopic(zkUtils, "payments", Map(0 -> List(0, 1), 1 -> List(0, 1)), servers)
createTopic(zkUtils, "deliveries", Map(0 -> List(0)), servers)
createTopic(zkUtils, "customers", Map(0 -> List(0), 1 -> List(1), 2 -> List(2), 3 -> List(3)), servers)
//Define a move for some of them
val move = Map(
TopicAndPartition("orders", 0) -> Seq(0, 2, 3),//moves
TopicAndPartition("orders", 1) -> Seq(0, 1, 2),//stays
TopicAndPartition("payments", 1) -> Seq(1, 2), //only define one partition as moving
TopicAndPartition("deliveries", 0) -> Seq(1, 2) //increase replication factor
)
//When we run a throttled reassignment
new ReassignPartitionsCommand(zkUtils, None, move).reassignPartitions(throttle)
waitForReassignmentToComplete()
//Check moved replicas did move
assertEquals(Seq(0, 2, 3), zkUtils.getReplicasForPartition("orders", 0))
assertEquals(Seq(0, 1, 2), zkUtils.getReplicasForPartition("orders", 1))
assertEquals(Seq(1, 2), zkUtils.getReplicasForPartition("payments", 1))
assertEquals(Seq(1, 2), zkUtils.getReplicasForPartition("deliveries", 0))
//Check untouched replicas are still there
assertEquals(Seq(0, 1), zkUtils.getReplicasForPartition("payments", 0))
assertEquals(Seq(0), zkUtils.getReplicasForPartition("customers", 0))
assertEquals(Seq(1), zkUtils.getReplicasForPartition("customers", 1))
assertEquals(Seq(2), zkUtils.getReplicasForPartition("customers", 2))
assertEquals(Seq(3), zkUtils.getReplicasForPartition("customers", 3))
}
/**
* Verifies that the Controller sets a watcher for the reassignment znode after reassignment completion.
* This includes the case where the znode is set immediately after it's deleted (i.e. before the watch is set).
* This case relies on the scheduling of the operations, so it won't necessarily fail every time, but it fails
* often enough to detect a regression.
*/
@Test
def shouldPerformMultipleReassignmentOperationsOverVariousTopics() {
servers = TestUtils.createBrokerConfigs(4, zkConnect, false).map(conf => TestUtils.createServer(KafkaConfig.fromProps(conf)))
createTopic(zkUtils, "orders", Map(0 -> List(0, 1, 2), 1 -> List(0, 1, 2)), servers)
createTopic(zkUtils, "payments", Map(0 -> List(0, 1), 1 -> List(0, 1)), servers)
createTopic(zkUtils, "deliveries", Map(0 -> List(0)), servers)
createTopic(zkUtils, "customers", Map(0 -> List(0), 1 -> List(1), 2 -> List(2), 3 -> List(3)), servers)
val firstMove = Map(
TopicAndPartition("orders", 0) -> Seq(0, 2, 3), //moves
TopicAndPartition("orders", 1) -> Seq(0, 1, 2), //stays
TopicAndPartition("payments", 1) -> Seq(1, 2), //only define one partition as moving
TopicAndPartition("deliveries", 0) -> Seq(1, 2) //increase replication factor
)
new ReassignPartitionsCommand(zkUtils, None, firstMove).reassignPartitions()
waitForReassignmentToComplete()
// Check moved replicas did move
assertEquals(Seq(0, 2, 3), zkUtils.getReplicasForPartition("orders", 0))
assertEquals(Seq(0, 1, 2), zkUtils.getReplicasForPartition("orders", 1))
assertEquals(Seq(1, 2), zkUtils.getReplicasForPartition("payments", 1))
assertEquals(Seq(1, 2), zkUtils.getReplicasForPartition("deliveries", 0))
// Check untouched replicas are still there
assertEquals(Seq(0, 1), zkUtils.getReplicasForPartition("payments", 0))
assertEquals(Seq(0), zkUtils.getReplicasForPartition("customers", 0))
assertEquals(Seq(1), zkUtils.getReplicasForPartition("customers", 1))
assertEquals(Seq(2), zkUtils.getReplicasForPartition("customers", 2))
assertEquals(Seq(3), zkUtils.getReplicasForPartition("customers", 3))
// Define a move for some of them
val secondMove = Map(
TopicAndPartition("orders", 0) -> Seq(0, 2, 3), // stays
TopicAndPartition("orders", 1) -> Seq(3, 1, 2), // moves
TopicAndPartition("payments", 1) -> Seq(2, 1), // changed preferred leader
TopicAndPartition("deliveries", 0) -> Seq(1, 2, 3) //increase replication factor
)
new ReassignPartitionsCommand(zkUtils, None, secondMove).reassignPartitions()
waitForReassignmentToComplete()
// Check moved replicas did move
assertEquals(Seq(0, 2, 3), zkUtils.getReplicasForPartition("orders", 0))
assertEquals(Seq(3, 1, 2), zkUtils.getReplicasForPartition("orders", 1))
assertEquals(Seq(2, 1), zkUtils.getReplicasForPartition("payments", 1))
assertEquals(Seq(1, 2, 3), zkUtils.getReplicasForPartition("deliveries", 0))
//Check untouched replicas are still there
assertEquals(Seq(0, 1), zkUtils.getReplicasForPartition("payments", 0))
assertEquals(Seq(0), zkUtils.getReplicasForPartition("customers", 0))
assertEquals(Seq(1), zkUtils.getReplicasForPartition("customers", 1))
assertEquals(Seq(2), zkUtils.getReplicasForPartition("customers", 2))
assertEquals(Seq(3), zkUtils.getReplicasForPartition("customers", 3))
// We set the znode and then continuously attempt to set it again to exercise the case where the znode is set
// immediately after deletion (i.e. before we set the watcher again)
val thirdMove = Map(TopicAndPartition("orders", 0) -> Seq(1, 2, 3))
new ReassignPartitionsCommand(zkUtils, None, thirdMove).reassignPartitions()
val fourthMove = Map(TopicAndPartition("payments", 1) -> Seq(2, 3))
// Continuously attempt to set the reassignment znode with `fourthMove` until it succeeds. It will only succeed
// after `thirdMove` completes.
Iterator.continually {
try new ReassignPartitionsCommand(zkUtils, None, fourthMove).reassignPartitions()
catch {
case _: AdminCommandFailedException => false
}
}.exists(identity)
waitForReassignmentToComplete()
// Check moved replicas for thirdMove and fourthMove
assertEquals(Seq(1, 2, 3), zkUtils.getReplicasForPartition("orders", 0))
assertEquals(Seq(2, 3), zkUtils.getReplicasForPartition("payments", 1))
//Check untouched replicas are still there
assertEquals(Seq(3, 1, 2), zkUtils.getReplicasForPartition("orders", 1))
assertEquals(Seq(1, 2, 3), zkUtils.getReplicasForPartition("deliveries", 0))
assertEquals(Seq(0, 1), zkUtils.getReplicasForPartition("payments", 0))
assertEquals(Seq(0), zkUtils.getReplicasForPartition("customers", 0))
assertEquals(Seq(1), zkUtils.getReplicasForPartition("customers", 1))
assertEquals(Seq(2), zkUtils.getReplicasForPartition("customers", 2))
assertEquals(Seq(3), zkUtils.getReplicasForPartition("customers", 3))
}
def waitForReassignmentToComplete() {
waitUntilTrue(() => !zkUtils.pathExists(ReassignPartitionsPath), s"Znode ${ZkUtils.ReassignPartitionsPath} wasn't deleted")
}
def json(topic: String*): String = {
val topicStr = topic.map { t => "{\\"topic\\": \\"" + t + "\\"}" }.mkString(",")
s"""{"topics": [$topicStr],"version":1}"""
}
}
|
themarkypantz/kafka
|
core/src/test/scala/unit/kafka/admin/ReassignPartitionsClusterTest.scala
|
Scala
|
apache-2.0
| 27,062
|
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.app.nlp.segment
import cc.factorie.app.nlp.{Document, DocumentAnnotator, Sentence, Token}
import cc.factorie.app.strings.StringSegmentIterator
object DefaultRules {
val contractionsAndPossessives = """((?i)'(s|d|m|l+|ve|re)\\b)|((?i)n't\\b)"""
val singleLetterAcronyms = """[\\p{L}]\\.[\\p{L}\\.]*"""
val allAbbrevs = """([\\p{L}]+\\.)"""
val ordinals = "[0-9]{1,2}[sthnrd]+[\\\\-\\\\p{L}]+"
val notEndingInDot = "[0-9\\\\-.\\\\:/,\\\\+\\\\=%><]+[0-9\\\\-:/,\\\\+\\\\=%><]"
val possiblyEndingInDot = "[0-9\\\\-.\\\\:/,\\\\+\\\\=%]+"
val email = """(?i)\\b[\\p{L}\\p{Nd}._%+-]+@[\\p{L}\\p{Nd}.-]+\\.[A-Z]{2,4}\\b"""
val url1 = """\\b(https?|ftp|file)://[-\\p{L}\\p{Nd}+&@#/%?=~_|!:,.;]*[-\\p{L}\\p{Nd}+&@#/%=~_|]"""
val url2 = """\\b[wW]{3}.(([-\\p{L}\\p{Nd}+&@#/%?=~_|!:,;]+(?=\\.))\\.)+[A-Za-z]{2,4}(/[-\\p{L}\\p{Nd}+&@#/%?=~_|!:,;]*)?"""
val finalPunctuation1 = """[.?!]["')}\\]]?"""
// why does this have square and curly brackets in it??
val finalPunctuation2 = """["')}\\]]?[.?!]"""
val midSentenceQuotes = "[`'\\"]+"
val otherSymbols = """[,\\-:;$?&@\\(\\)]+"""
val alphanumericsAndHyphensPrecedingContractionsOrPossessives = """[\\p{L}\\p{N}\\-]+(?=(?i)('(s|d|m|l+|ve|re))|(n't))"""
val wordsWithSequencesOfSingleDashesInside = "[\\\\w]+(-[\\\\w]+)*"
val wordWithNumberAndApostrophe = "[\\\\w']+"
val commonAbbreviations = Set(
"inc", "corp", "dec", "jan", "feb", "mar", "apr", "jun", "jul", "aug", "sep", "oct", "nov", "ala",
"ariz", "ark", "colo", "conn", "del", "fla", "ill", "ind", "kans", "kan", "ken", "kent", "mass", "mich",
"minn", "miss", "mont", "nebr", "neb", "nev", "dak", "okla", "oreg", "tenn", "tex", "virg", "wash", "wis",
"wyo", "mr", "ms", "mrs", "calif", "oct", "vol", "rev", "ltd", "dea", "est", "capt", "hev", "gen", "ltd", "etc", "sci",
"comput", "univ", "ave", "cent", "col", "comdr", "cpl", "dept", "dust,", "div", "est", "gal", "gov", "hon",
"grad", "inst", "lib", "mus", "pseud", "ser", "alt", "Inc", "Corp", "Dec", "Jan", "Feb", "Mar", "Apr",
"Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Ala", "Ariz", "Ark", "Colo", "Conn", "Del", "Fla", "Ill",
"Ind", "Kans", "Kan", "Ken", "Kent", "Mass", "Mich", "Minn", "Miss", "Mont", "Nebr", "Neb", "Nev", "Dak",
"Okla", "Oreg", "Tenn", "Tex", "Virg", "Wash", "Wis", "Wyo", "Mrs", "Calif", "Oct", "Vol", "Rev", "Ltd",
"Dea", "Est", "Capt", "Hev", "Gen", "Ltd", "Etc", "Sci", "Comput", "Univ", "Ave", "Cent", "Col", "Comdr",
"Cpl", "Dept", "Dust,", "Div", "Est", "Gal", "Gov", "Hon", "Grad", "Inst", "Lib", "Mus", "Pseud", "Ser", "Alt",
"Mr", "Ms")
val commonSentenceStarters = Set("The")
val defaultRuleset = Seq(
contractionsAndPossessives
, singleLetterAcronyms
, allAbbrevs
, ordinals
, possiblyEndingInDot
, email
, url1
, url2
, finalPunctuation1
, finalPunctuation2
, midSentenceQuotes
, otherSymbols
, alphanumericsAndHyphensPrecedingContractionsOrPossessives
, wordsWithSequencesOfSingleDashesInside
, wordWithNumberAndApostrophe)
val defaultRulesetNoSentenceBoundaries = Seq(
contractionsAndPossessives
, singleLetterAcronyms
, ordinals
, notEndingInDot
, email
, url1
, url2
, commonAbbreviations.mkString("|")
, finalPunctuation1
, finalPunctuation2
, midSentenceQuotes
, otherSymbols
, alphanumericsAndHyphensPrecedingContractionsOrPossessives
, wordsWithSequencesOfSingleDashesInside
, wordWithNumberAndApostrophe)
}
sealed trait SentenceBoundaryInference
case object PerDocument extends SentenceBoundaryInference
case object JointlyAcrossDocuments extends SentenceBoundaryInference
case object Non extends SentenceBoundaryInference
object PunktTokenizer extends PunktTokenizer
class PunktTokenizer extends DocumentAnnotator {
def tokenAnnotationString(token: Token) = token.string + "\\t"
def commonAbbreviations: Set[String] = DefaultRules.commonAbbreviations
def commonSentenceStarters: Set[String] = DefaultRules.commonSentenceStarters
def sentenceBoundaryInference: SentenceBoundaryInference = JointlyAcrossDocuments
def ruleset: Seq[String] =
if (sentenceBoundaryInference == Non) DefaultRules.defaultRulesetNoSentenceBoundaries
else DefaultRules.defaultRuleset
private[this] val regex = ruleset.mkString("|").r
// def apply(s: String): StringSegmentIterator = new StringSegmentIterator {
// val doc = new Document(s)
// process(doc)
// var i = 0
// val len = doc.tokens.length
// def hasNext = i < len - 1
// def next: String = { val result = doc.tokens(i).string; i += 1; result }
// def start = doc.tokens(i).stringStart
// def end = doc.tokens(i).stringEnd
// //doc.tokens.map(_.string).iterator
// }
def apply(s: String): StringSegmentIterator = new StringSegmentIterator {
val tokenIterator = for (section <- process(new Document(s)).sections.iterator; token <- section.tokens.iterator) yield token
var token: Token = null
def hasNext = tokenIterator.hasNext
def next(): String = { token = tokenIterator.next(); token.string }
def start = token.stringStart
def end = token.stringEnd
}
// TODO Fix this to fit better into the DocumentProcessor framework, e.g. setting postAttrs
def process(documents: Seq[Document]): Unit = processLogic(documents, sentenceBoundaryInference)
def process(document: Document): Document = { processLogic(Seq(document), sentenceBoundaryInference); document }
def prereqAttrs: Iterable[Class[_]] = Nil
def postAttrs: Iterable[Class[_]] = Vector[Class[_]](classOf[Token], classOf[Sentence])
// TODO Fix to obey document.sections! -akm
private[this] def processLogic(documents: Seq[Document], inference: SentenceBoundaryInference): Unit = inference match {
case PerDocument => documents.foreach(d => processLogic(Seq(d), JointlyAcrossDocuments))
case Non =>
for (d <- documents; section <- d.sections) {
val tokenIterator = regex.findAllIn(section.string)
while (tokenIterator.hasNext) {
tokenIterator.next()
new Token(d, tokenIterator.start, tokenIterator.end)
}
new Sentence(section, 0, d.tokenCount)
}
case JointlyAcrossDocuments =>
val docString = documents.map(_.string).mkString(" ")
val sentenceSegmented = PunktSentenceSegmenter.findSentenceBoundaries(docString, abvSet = commonAbbreviations).toArray
var tokensSoFar = 0
var d = 0
var currentDocument = documents(d)
var docOffset = 0
val segmentsIterator = sentenceSegmented.sliding(2)
while (segmentsIterator.hasNext) {
var Array((start, _), (end, endTy)) = segmentsIterator.next()
val endIsAbbrev = endTy == AS || endTy == A
/* end isn't an abbrev, so remove the period by making end -= 1 and then in the regex you can have all things containing '.' be abbrevs */
if (!endIsAbbrev) {end -= 1}
if (end > docOffset + currentDocument.string.length + 1) {
d += 1
docOffset += currentDocument.string.length + 1
currentDocument = documents(d)
tokensSoFar = 0
}
val currentDocumentOffset = start - docOffset
val tokenIterator = regex.findAllIn(docString.substring(start, end))
var numTokens = 0
while (tokenIterator.hasNext) {
tokenIterator.next()
new Token(currentDocument, math.max(0, currentDocumentOffset + tokenIterator.start), currentDocumentOffset + tokenIterator.end) // really?
numTokens += 1
}
if (!endIsAbbrev) {
new Token(currentDocument.asSection, end - docOffset, end + 1 - docOffset)
numTokens += 1
}
new Sentence(currentDocument.asSection, tokensSoFar, numTokens) // really?
tokensSoFar += numTokens
}
}
}
|
patverga/factorie
|
src/main/scala/cc/factorie/app/nlp/segment/PunktTokenizer.scala
|
Scala
|
apache-2.0
| 8,732
|
package models
case class Messages(totalCount: Int, edges: List[MessageEdges], pageInfo: MessagePageInfo)
|
sysgears/apollo-universal-starter-kit
|
modules/chat/server-scala/src/main/scala/models/Messages.scala
|
Scala
|
mit
| 107
|
/*
* Copyright 2017 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.actormonitor.testcube
import akka.actor.Actor
class TestActor extends Actor {
def receive = {
case x => sender ! x
}
}
class TestActorWithRoute extends Actor {
def receive = {
case x => sender ! x
}
}
class TestActor1 extends Actor {
def receive = {
case x =>context.stop(self)
}
}
|
Harikiranvuyyuru/squbs
|
squbs-actormonitor/src/test/scala/org/squbs/actormonitor/TestCube/TestCube.scala
|
Scala
|
apache-2.0
| 926
|
package com.wavesplatform.lang.contract.meta
import com.wavesplatform.lang.v1.compiler.Types.FINAL
import com.wavesplatform.protobuf.dapp.DAppMeta
private[meta] trait MetaMapperStrategy[V <: MetaVersion] {
def toProto(data: List[List[FINAL]], nameMap: Map[String, String] = Map.empty): Either[String, DAppMeta]
def fromProto(meta: DAppMeta): Either[String, List[List[FINAL]]]
}
|
wavesplatform/Waves
|
lang/shared/src/main/scala/com/wavesplatform/lang/contract/meta/MetaMapperStrategy.scala
|
Scala
|
mit
| 384
|
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
/* TODO: Large parts of this code are identical or at least very similar to the code that
* implements the support for quantified permissions to fields - merge it
*/
package viper.silicon.supporters.qps
import viper.silver.ast
import viper.silver.ast.{Predicate}
import viper.silicon.{Config, Map, Set, toSet}
import viper.silicon.decider.PreambleFileEmitter
import viper.silicon.interfaces.PreambleEmitter
import viper.silicon.interfaces.decider.Prover
import viper.silicon.state.terms._
import viper.silicon.state.{SymbolConvert, terms}
trait PredicateSnapFunctionsEmitter extends PreambleEmitter
class DefaultPredicateSnapFunctionsEmitter(prover: => Prover,
symbolConverter: SymbolConvert,
predicateSnapGenerator: PredicateSnapGenerator,
preambleFileEmitter: PreambleFileEmitter[String, String],
config: Config
)
extends PredicateSnapFunctionsEmitter {
private var collectedPredicates = Set[Predicate]()
private var collectedSorts = Set[terms.sorts.PredicateSnapFunction]()
def sorts: Set[Sort] = toSet(collectedSorts)
/* Scala's immutable sets are invariant in their element type, hence
* Set[PSF] is not a subtype of Set[Sort], although PSF is one of Sort.
*/
def analyze(program: ast.Program) {
program visit {
case ast.utility.QuantifiedPermissions.QPPForall(_, _, _, _, _, _, predAccpred) =>
val predicate = program.findPredicate(predAccpred.loc.predicateName)
collectedPredicates += predicate
}
collectedSorts = (
collectedPredicates.map(predicate => terms.sorts.PredicateSnapFunction(predicateSnapGenerator.getSnap(predicate)._1))
+ terms.sorts.PredicateSnapFunction(terms.sorts.Snap)
)
}
def declareSorts() {
prover.declare(SortDecl(terms.sorts.Set(terms.sorts.Snap)))
collectedSorts foreach (s => prover.declare(SortDecl(s)))
}
def declareSymbols() {
//declare Set properties
val setDecl = "/dafny_axioms/sets_declarations_dafny.smt2"
val setSort = terms.sorts.Snap
val substitutions = Map("$S$" -> prover.termConverter.convert(setSort))
prover.logComment(s"$setDecl [$setSort")
preambleFileEmitter.emitParametricAssertions(setDecl, substitutions)
collectedPredicates foreach { predicate =>
val sort = predicateSnapGenerator.getSnap(predicate)._1
val sort2 = prover.termConverter.convert(terms.sorts.Snap)
val id = predicate.name
val substitutions = Map("$PRD$" -> id, "$S$" -> prover.termConverter.convert(sort))
val psfDeclarations = "/predicate_snap_functions_declarations.smt2"
prover.logComment(s"$psfDeclarations [$id: $sort: $sort2]")
preambleFileEmitter.emitParametricAssertions(psfDeclarations, substitutions)
}
}
def emitAxioms() {
/* Axioms that have to be emitted for each field that is dereferenced from
* a quantified receiver
*/
collectedPredicates foreach { predicate =>
val sort = predicateSnapGenerator.getSnap(predicate)._1
val id = predicate.name
val psfSubstitutions = Map("$PRD$" -> id, "$S$" -> prover.termConverter.convert(sort))
val psfAxioms = if (config.disableISCTriggers()) "/predicate_snap_functions_axioms_no_triggers.smt2" else "/predicate_snap_functions_axioms.smt2"
prover.logComment(s"$psfAxioms [$id: $sort]")
preambleFileEmitter.emitParametricAssertions(psfAxioms, psfSubstitutions)
}
}
/* Lifetime */
def reset() {
collectedPredicates = collectedPredicates.empty
}
def stop() {}
def start() {}
}
|
sccblom/vercors
|
viper/silicon/src/main/scala/supporters/qps/PredicateSnapFunctionsEmitter.scala
|
Scala
|
mpl-2.0
| 3,937
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.tools.dashboard
import com.typesafe.config.ConfigFactory
import org.apache.predictionio.authentication.KeyAuthentication
import org.apache.predictionio.configuration.SSLConfiguration
import org.apache.predictionio.data.storage.Storage
import spray.can.server.ServerSettings
import scala.concurrent.ExecutionContext
import akka.actor.{ActorContext, Actor, ActorSystem, Props}
import akka.io.IO
import akka.pattern.ask
import akka.util.Timeout
import com.github.nscala_time.time.Imports.DateTime
import grizzled.slf4j.Logging
import spray.can.Http
import spray.http._
import spray.http.MediaTypes._
import spray.routing._
import scala.concurrent.duration._
case class DashboardConfig(
ip: String = "localhost",
port: Int = 9000)
object Dashboard extends Logging with SSLConfiguration {
def main(args: Array[String]): Unit = {
val parser = new scopt.OptionParser[DashboardConfig]("Dashboard") {
opt[String]("ip") action { (x, c) =>
c.copy(ip = x)
} text("IP to bind to (default: localhost).")
opt[Int]("port") action { (x, c) =>
c.copy(port = x)
} text("Port to bind to (default: 9000).")
}
parser.parse(args, DashboardConfig()) map { dc =>
createDashboard(dc).awaitTermination
}
}
def createDashboard(dc: DashboardConfig): ActorSystem = {
val systemName = "pio-dashboard"
implicit val system = ActorSystem(systemName)
val service =
system.actorOf(Props(classOf[DashboardActor], dc), "dashboard")
implicit val timeout = Timeout(5.seconds)
val settings = ServerSettings(system)
val serverConfig = ConfigFactory.load("server.conf")
val sslEnforced = serverConfig.getBoolean("org.apache.predictionio.server.ssl-enforced")
IO(Http) ? Http.Bind(
service,
interface = dc.ip,
port = dc.port,
settings = Some(settings.copy(sslEncryption = sslEnforced)))
system
}
}
class DashboardActor(
val dc: DashboardConfig)
extends Actor with DashboardService {
def actorRefFactory: ActorContext = context
def receive: Actor.Receive = runRoute(dashboardRoute)
}
trait DashboardService extends HttpService with KeyAuthentication with CORSSupport {
implicit def executionContext: ExecutionContext = actorRefFactory.dispatcher
val dc: DashboardConfig
val evaluationInstances = Storage.getMetaDataEvaluationInstances
val pioEnvVars = sys.env.filter(kv => kv._1.startsWith("PIO_"))
val serverStartTime = DateTime.now
val dashboardRoute =
path("") {
authenticate(withAccessKeyFromFile) { request =>
get {
respondWithMediaType(`text/html`) {
complete {
val completedInstances = evaluationInstances.getCompleted
html.index(
dc,
serverStartTime,
pioEnvVars,
completedInstances).toString
}
}
}
}
} ~
pathPrefix("engine_instances" / Segment) { instanceId =>
path("evaluator_results.txt") {
get {
respondWithMediaType(`text/plain`) {
evaluationInstances.get(instanceId).map { i =>
complete(i.evaluatorResults)
} getOrElse {
complete(StatusCodes.NotFound)
}
}
}
} ~
path("evaluator_results.html") {
get {
respondWithMediaType(`text/html`) {
evaluationInstances.get(instanceId).map { i =>
complete(i.evaluatorResultsHTML)
} getOrElse {
complete(StatusCodes.NotFound)
}
}
}
} ~
path("evaluator_results.json") {
get {
respondWithMediaType(`application/json`) {
evaluationInstances.get(instanceId).map { i =>
complete(i.evaluatorResultsJSON)
} getOrElse {
complete(StatusCodes.NotFound)
}
}
}
} ~
cors {
path("local_evaluator_results.json") {
get {
respondWithMediaType(`application/json`) {
evaluationInstances.get(instanceId).map { i =>
complete(i.evaluatorResultsJSON)
} getOrElse {
complete(StatusCodes.NotFound)
}
}
}
}
}
} ~
pathPrefix("assets") {
getFromResourceDirectory("assets")
}
}
|
himanshudhami/PredictionIO
|
tools/src/main/scala/org/apache/predictionio/tools/dashboard/Dashboard.scala
|
Scala
|
apache-2.0
| 5,235
|
package controllers
import javax.inject.Inject
import play.api.mvc._
/**
* A very small controller that renders a home page.
*/
class HomeController @Inject()(cc: ControllerComponents) extends AbstractController(cc) {
def index = Action { implicit request =>
Ok("Hello, this is Play!")
}
}
|
Shenker93/playframework
|
framework/src/sbt-plugin/src/sbt-test/play-sbt-plugin/generated-keystore/src/main/scala/controllers/HomeController.scala
|
Scala
|
apache-2.0
| 304
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import org.apache.spark.sql.execution._
import org.apache.spark.sql.execution.adaptive.{DisableAdaptiveExecutionSuite, EnableAdaptiveExecutionSuite}
import org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources.TestOptionsSource
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types.{IntegerType, StructField, StructType}
trait ExplainSuiteHelper extends QueryTest with SharedSparkSession {
protected def getNormalizedExplain(df: DataFrame, mode: ExplainMode): String = {
val output = new java.io.ByteArrayOutputStream()
Console.withOut(output) {
df.explain(mode.name)
}
output.toString.replaceAll("#\\\\d+", "#x")
}
/**
* Get the explain from a DataFrame and run the specified action on it.
*/
protected def withNormalizedExplain(df: DataFrame, mode: ExplainMode)(f: String => Unit) = {
f(getNormalizedExplain(df, mode))
}
/**
* Get the explain by running the sql. The explain mode should be part of the
* sql text itself.
*/
protected def withNormalizedExplain(queryText: String)(f: String => Unit) = {
val output = new java.io.ByteArrayOutputStream()
Console.withOut(output) {
sql(queryText).show(false)
}
val normalizedOutput = output.toString.replaceAll("#\\\\d+", "#x")
f(normalizedOutput)
}
/**
* Runs the plan and makes sure the plans contains all of the keywords.
*/
protected def checkKeywordsExistsInExplain(
df: DataFrame, mode: ExplainMode, keywords: String*): Unit = {
withNormalizedExplain(df, mode) { normalizedOutput =>
for (key <- keywords) {
assert(normalizedOutput.contains(key))
}
}
}
protected def checkKeywordsExistsInExplain(df: DataFrame, keywords: String*): Unit = {
checkKeywordsExistsInExplain(df, ExtendedMode, keywords: _*)
}
/**
* Runs the plan and makes sure the plans does not contain any of the keywords.
*/
protected def checkKeywordsNotExistsInExplain(
df: DataFrame, mode: ExplainMode, keywords: String*): Unit = {
withNormalizedExplain(df, mode) { normalizedOutput =>
for (key <- keywords) {
assert(!normalizedOutput.contains(key))
}
}
}
}
class ExplainSuite extends ExplainSuiteHelper with DisableAdaptiveExecutionSuite {
import testImplicits._
test("SPARK-23034 show rdd names in RDD scan nodes (Dataset)") {
val rddWithName = spark.sparkContext.parallelize(Row(1, "abc") :: Nil).setName("testRdd")
val df = spark.createDataFrame(rddWithName, StructType.fromDDL("c0 int, c1 string"))
checkKeywordsExistsInExplain(df, keywords = "Scan ExistingRDD testRdd")
}
test("SPARK-23034 show rdd names in RDD scan nodes (DataFrame)") {
val rddWithName = spark.sparkContext.parallelize(ExplainSingleData(1) :: Nil).setName("testRdd")
val df = spark.createDataFrame(rddWithName)
checkKeywordsExistsInExplain(df, keywords = "Scan testRdd")
}
test("SPARK-24850 InMemoryRelation string representation does not include cached plan") {
val df = Seq(1).toDF("a").cache()
checkKeywordsExistsInExplain(df,
keywords = "InMemoryRelation", "StorageLevel(disk, memory, deserialized, 1 replicas)")
}
test("optimized plan should show the rewritten aggregate expression") {
withTempView("test_agg") {
sql(
"""
|CREATE TEMPORARY VIEW test_agg AS SELECT * FROM VALUES
| (1, true), (1, false),
| (2, true),
| (3, false), (3, null),
| (4, null), (4, null),
| (5, null), (5, true), (5, false) AS test_agg(k, v)
""".stripMargin)
// simple explain of queries having every/some/any aggregates. Optimized
// plan should show the rewritten aggregate expression.
val df = sql("SELECT k, every(v), some(v), any(v) FROM test_agg GROUP BY k")
checkKeywordsExistsInExplain(df,
"Aggregate [k#x], [k#x, every(v#x) AS every(v)#x, some(v#x) AS some(v)#x, " +
"any(v#x) AS any(v)#x]")
}
}
test("explain inline tables cross-joins") {
val df = sql(
"""
|SELECT * FROM VALUES ('one', 1), ('three', null)
| CROSS JOIN VALUES ('one', 1), ('three', null)
""".stripMargin)
checkKeywordsExistsInExplain(df,
"Join Cross",
":- LocalRelation [col1#x, col2#x]",
"+- LocalRelation [col1#x, col2#x]")
}
test("explain table valued functions") {
checkKeywordsExistsInExplain(sql("select * from RaNgE(2)"), "Range (0, 2, step=1, splits=None)")
checkKeywordsExistsInExplain(sql("SELECT * FROM range(3) CROSS JOIN range(3)"),
"Join Cross",
":- Range (0, 3, step=1, splits=None)",
"+- Range (0, 3, step=1, splits=None)")
}
test("explain lateral joins") {
checkKeywordsExistsInExplain(
sql("SELECT * FROM VALUES (0, 1) AS (a, b), LATERAL (SELECT a)"),
"LateralJoin lateral-subquery#x [a#x], Inner",
"Project [outer(a#x) AS a#x]"
)
}
test("explain string functions") {
// Check if catalyst combine nested `Concat`s
val df1 = sql(
"""
|SELECT (col1 || col2 || col3 || col4) col
| FROM (SELECT id col1, id col2, id col3, id col4 FROM range(10))
""".stripMargin)
checkKeywordsExistsInExplain(df1,
"Project [concat(cast(id#xL as string), cast(id#xL as string), cast(id#xL as string)" +
", cast(id#xL as string)) AS col#x]")
// Check if catalyst combine nested `Concat`s if concatBinaryAsString=false
withSQLConf(SQLConf.CONCAT_BINARY_AS_STRING.key -> "false") {
val df2 = sql(
"""
|SELECT ((col1 || col2) || (col3 || col4)) col
|FROM (
| SELECT
| string(id) col1,
| string(id + 1) col2,
| encode(string(id + 2), 'utf-8') col3,
| encode(string(id + 3), 'utf-8') col4
| FROM range(10)
|)
""".stripMargin)
checkKeywordsExistsInExplain(df2,
"Project [concat(cast(id#xL as string), cast((id#xL + 1) as string), " +
"cast(encode(cast((id#xL + 2) as string), utf-8) as string), " +
"cast(encode(cast((id#xL + 3) as string), utf-8) as string)) AS col#x]")
val df3 = sql(
"""
|SELECT (col1 || (col3 || col4)) col
|FROM (
| SELECT
| string(id) col1,
| encode(string(id + 2), 'utf-8') col3,
| encode(string(id + 3), 'utf-8') col4
| FROM range(10)
|)
""".stripMargin)
checkKeywordsExistsInExplain(df3,
"Project [concat(cast(id#xL as string), " +
"cast(encode(cast((id#xL + 2) as string), utf-8) as string), " +
"cast(encode(cast((id#xL + 3) as string), utf-8) as string)) AS col#x]")
}
}
test("check operator precedence") {
// We follow Oracle operator precedence in the table below that lists the levels
// of precedence among SQL operators from high to low:
// ---------------------------------------------------------------------------------------
// Operator Operation
// ---------------------------------------------------------------------------------------
// +, - identity, negation
// *, / multiplication, division
// +, -, || addition, subtraction, concatenation
// =, !=, <, >, <=, >=, IS NULL, LIKE, BETWEEN, IN comparison
// NOT exponentiation, logical negation
// AND conjunction
// OR disjunction
// ---------------------------------------------------------------------------------------
checkKeywordsExistsInExplain(sql("select 'a' || 1 + 2"),
"Project [null AS (concat(a, 1) + 2)#x]")
checkKeywordsExistsInExplain(sql("select 1 - 2 || 'b'"),
"Project [-1b AS concat((1 - 2), b)#x]")
checkKeywordsExistsInExplain(sql("select 2 * 4 + 3 || 'b'"),
"Project [11b AS concat(((2 * 4) + 3), b)#x]")
checkKeywordsExistsInExplain(sql("select 3 + 1 || 'a' || 4 / 2"),
"Project [4a2.0 AS concat(concat((3 + 1), a), (4 / 2))#x]")
checkKeywordsExistsInExplain(sql("select 1 == 1 OR 'a' || 'b' == 'ab'"),
"Project [true AS ((1 = 1) OR (concat(a, b) = ab))#x]")
checkKeywordsExistsInExplain(sql("select 'a' || 'c' == 'ac' AND 2 == 3"),
"Project [false AS ((concat(a, c) = ac) AND (2 = 3))#x]")
}
test("explain for these functions; use range to avoid constant folding") {
val df = sql("select ifnull(id, 'x'), nullif(id, 'x'), nvl(id, 'x'), nvl2(id, 'x', 'y') " +
"from range(2)")
checkKeywordsExistsInExplain(df,
"Project [coalesce(cast(id#xL as string), x) AS ifnull(id, x)#x, " +
"id#xL AS nullif(id, x)#xL, coalesce(cast(id#xL as string), x) AS nvl(id, x)#x, " +
"x AS nvl2(id, x, y)#x]")
}
test("SPARK-26659: explain of DataWritingCommandExec should not contain duplicate cmd.nodeName") {
withTable("temptable") {
val df = sql("create table temptable using parquet as select * from range(2)")
withNormalizedExplain(df, SimpleMode) { normalizedOutput =>
assert("Create\\\\w*?TableAsSelectCommand".r.findAllMatchIn(normalizedOutput).length == 1)
}
}
}
test("SPARK-33853: explain codegen - check presence of subquery") {
withSQLConf(SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key -> "true") {
withTempView("df") {
val df1 = spark.range(1, 100)
df1.createTempView("df")
val sqlText = "EXPLAIN CODEGEN SELECT (SELECT min(id) FROM df)"
val expectedText = "Found 3 WholeStageCodegen subtrees."
withNormalizedExplain(sqlText) { normalizedOutput =>
assert(normalizedOutput.contains(expectedText))
}
}
}
}
test("explain formatted - check presence of subquery in case of DPP") {
withTable("df1", "df2") {
withSQLConf(SQLConf.DYNAMIC_PARTITION_PRUNING_ENABLED.key -> "true",
SQLConf.DYNAMIC_PARTITION_PRUNING_REUSE_BROADCAST_ONLY.key -> "false",
SQLConf.EXCHANGE_REUSE_ENABLED.key -> "false") {
spark.range(1000).select(col("id"), col("id").as("k"))
.write
.partitionBy("k")
.format("parquet")
.mode("overwrite")
.saveAsTable("df1")
spark.range(100)
.select(col("id"), col("id").as("k"))
.write
.partitionBy("k")
.format("parquet")
.mode("overwrite")
.saveAsTable("df2")
val sqlText =
"""
|EXPLAIN FORMATTED SELECT df1.id, df2.k
|FROM df1 JOIN df2 ON df1.k = df2.k AND df2.id < 2
|""".stripMargin
val expected_pattern1 =
"Subquery:1 Hosting operator id = 1 Hosting Expression = k#xL IN subquery#x"
val expected_pattern2 =
"PartitionFilters: \\\\[isnotnull\\\\(k#xL\\\\), dynamicpruningexpression\\\\(k#xL " +
"IN subquery#x\\\\)\\\\]"
val expected_pattern3 =
"Location: InMemoryFileIndex \\\\[\\\\S*org.apache.spark.sql.ExplainSuite" +
"/df2/\\\\S*, ... 99 entries\\\\]"
val expected_pattern4 =
"Location: InMemoryFileIndex \\\\[\\\\S*org.apache.spark.sql.ExplainSuite" +
"/df1/\\\\S*, ... 999 entries\\\\]"
withNormalizedExplain(sqlText) { normalizedOutput =>
assert(expected_pattern1.r.findAllMatchIn(normalizedOutput).length == 1)
assert(expected_pattern2.r.findAllMatchIn(normalizedOutput).length == 1)
assert(expected_pattern3.r.findAllMatchIn(normalizedOutput).length == 2)
assert(expected_pattern4.r.findAllMatchIn(normalizedOutput).length == 1)
}
}
}
}
test("SPARK-33850: explain formatted - check presence of subquery in case of AQE") {
withSQLConf(SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "true") {
withTempView("df") {
val df = spark.range(1, 100)
df.createTempView("df")
val sqlText = "EXPLAIN FORMATTED SELECT (SELECT min(id) FROM df) as v"
val expected_pattern =
"Subquery:1 Hosting operator id = 2 Hosting Expression = Subquery subquery#x"
withNormalizedExplain(sqlText) { normalizedOutput =>
assert(expected_pattern.r.findAllMatchIn(normalizedOutput).length == 1)
}
}
}
}
test("Support ExplainMode in Dataset.explain") {
val df1 = Seq((1, 2), (2, 3)).toDF("k", "v1")
val df2 = Seq((2, 3), (1, 1)).toDF("k", "v2")
val testDf = df1.join(df2, "k").groupBy("k").agg(count("v1"), sum("v1"), avg("v2"))
val simpleExplainOutput = getNormalizedExplain(testDf, SimpleMode)
assert(simpleExplainOutput.startsWith("== Physical Plan =="))
Seq("== Parsed Logical Plan ==",
"== Analyzed Logical Plan ==",
"== Optimized Logical Plan ==").foreach { planType =>
assert(!simpleExplainOutput.contains(planType))
}
checkKeywordsExistsInExplain(
testDf,
ExtendedMode,
"== Parsed Logical Plan ==" ::
"== Analyzed Logical Plan ==" ::
"== Optimized Logical Plan ==" ::
"== Physical Plan ==" ::
Nil: _*)
checkKeywordsExistsInExplain(
testDf,
CostMode,
"Statistics(sizeInBytes=" ::
Nil: _*)
checkKeywordsExistsInExplain(
testDf,
CodegenMode,
"WholeStageCodegen subtrees" ::
"Generated code:" ::
Nil: _*)
checkKeywordsExistsInExplain(
testDf,
FormattedMode,
"* LocalTableScan (1)" ::
"(1) LocalTableScan [codegen id :" ::
Nil: _*)
}
test("SPARK-34970: Redact Map type options in explain output") {
val password = "MyPassWord"
val token = "MyToken"
val value = "value"
val options = Map("password" -> password, "token" -> token, "key" -> value)
val cmd = SaveIntoDataSourceCommand(spark.range(10).logicalPlan, new TestOptionsSource,
options, SaveMode.Overwrite)
Seq(SimpleMode, ExtendedMode, FormattedMode).foreach { mode =>
checkKeywordsExistsInExplain(cmd, mode, value)
}
Seq(SimpleMode, ExtendedMode, CodegenMode, CostMode, FormattedMode).foreach { mode =>
checkKeywordsNotExistsInExplain(cmd, mode, password)
checkKeywordsNotExistsInExplain(cmd, mode, token)
}
}
test("SPARK-34970: Redact CaseInsensitiveMap type options in explain output") {
val password = "MyPassWord"
val token = "MyToken"
val value = "value"
val tableName = "t"
withTable(tableName) {
val df1 = spark.range(10).toDF()
df1.write.format("json").saveAsTable(tableName)
val df2 = spark.read
.option("key", value)
.option("password", password)
.option("token", token)
.table(tableName)
checkKeywordsExistsInExplain(df2, ExtendedMode, value)
Seq(SimpleMode, ExtendedMode, CodegenMode, CostMode, FormattedMode).foreach { mode =>
checkKeywordsNotExistsInExplain(df2, mode, password)
checkKeywordsNotExistsInExplain(df2, mode, token)
}
}
}
test("Dataset.toExplainString has mode as string") {
val df = spark.range(10).toDF
def assertExplainOutput(mode: ExplainMode): Unit = {
assert(df.queryExecution.explainString(mode).replaceAll("#\\\\d+", "#x").trim ===
getNormalizedExplain(df, mode).trim)
}
assertExplainOutput(SimpleMode)
assertExplainOutput(ExtendedMode)
assertExplainOutput(CodegenMode)
assertExplainOutput(CostMode)
assertExplainOutput(FormattedMode)
val errMsg = intercept[IllegalArgumentException] {
ExplainMode.fromString("unknown")
}.getMessage
assert(errMsg.contains("Unknown explain mode: unknown"))
}
test("SPARK-31504: Output fields in formatted Explain should have determined order") {
withTempPath { path =>
spark.range(10).selectExpr("id as a", "id as b", "id as c", "id as d", "id as e")
.write.mode("overwrite").parquet(path.getAbsolutePath)
val df1 = spark.read.parquet(path.getAbsolutePath)
val df2 = spark.read.parquet(path.getAbsolutePath)
assert(getNormalizedExplain(df1, FormattedMode) === getNormalizedExplain(df2, FormattedMode))
}
}
test("Coalesced bucket info should be a part of explain string") {
withTable("t1", "t2") {
withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "0",
SQLConf.COALESCE_BUCKETS_IN_JOIN_ENABLED.key -> "true") {
Seq(1, 2).toDF("i").write.bucketBy(8, "i").saveAsTable("t1")
Seq(2, 3).toDF("i").write.bucketBy(4, "i").saveAsTable("t2")
val df1 = spark.table("t1")
val df2 = spark.table("t2")
val joined = df1.join(df2, df1("i") === df2("i"))
checkKeywordsExistsInExplain(
joined,
SimpleMode,
"SelectedBucketsCount: 8 out of 8 (Coalesced to 4)" :: Nil: _*)
}
}
}
test("Explain formatted output for scan operator for datasource V2") {
withTempDir { dir =>
Seq("parquet", "orc", "csv", "json").foreach { fmt =>
val basePath = dir.getCanonicalPath + "/" + fmt
val pushFilterMaps = Map (
"parquet" ->
"|PushedFilters: \\\\[IsNotNull\\\\(value\\\\), GreaterThan\\\\(value,2\\\\)\\\\]",
"orc" ->
"|PushedFilters: \\\\[.*\\\\(id\\\\), .*\\\\(value\\\\), .*\\\\(id,1\\\\), .*\\\\(value,2\\\\)\\\\]",
"csv" ->
"|PushedFilters: \\\\[IsNotNull\\\\(value\\\\), GreaterThan\\\\(value,2\\\\)\\\\]",
"json" ->
"|remove_marker"
)
val expected_plan_fragment1 =
s"""
|\\\\(1\\\\) BatchScan
|Output \\\\[2\\\\]: \\\\[value#x, id#x\\\\]
|DataFilters: \\\\[isnotnull\\\\(value#x\\\\), \\\\(value#x > 2\\\\)\\\\]
|Format: $fmt
|Location: InMemoryFileIndex\\\\([0-9]+ paths\\\\)\\\\[.*\\\\]
|PartitionFilters: \\\\[isnotnull\\\\(id#x\\\\), \\\\(id#x > 1\\\\)\\\\]
${pushFilterMaps.get(fmt).get}
|ReadSchema: struct\\\\<value:int\\\\>
|""".stripMargin.replaceAll("\\nremove_marker", "").trim
spark.range(10)
.select(col("id"), col("id").as("value"))
.write.option("header", true)
.partitionBy("id")
.format(fmt)
.save(basePath)
val readSchema =
StructType(Seq(StructField("id", IntegerType), StructField("value", IntegerType)))
withSQLConf(SQLConf.USE_V1_SOURCE_LIST.key -> "") {
val df = spark
.read
.schema(readSchema)
.option("header", true)
.format(fmt)
.load(basePath).where($"id" > 1 && $"value" > 2)
val normalizedOutput = getNormalizedExplain(df, FormattedMode)
assert(expected_plan_fragment1.r.findAllMatchIn(normalizedOutput).length == 1)
}
}
}
}
test("Explain UnresolvedRelation with CaseInsensitiveStringMap options") {
val tableName = "test"
withTable(tableName) {
val df1 = Seq((1L, "a"), (2L, "b"), (3L, "c")).toDF("id", "data")
df1.write.saveAsTable(tableName)
val df2 = spark.read
.option("key1", "value1")
.option("KEY2", "VALUE2")
.table(tableName)
// == Parsed Logical Plan ==
// 'UnresolvedRelation [test], [key1=value1, KEY2=VALUE2]
checkKeywordsExistsInExplain(df2, keywords = "[key1=value1, KEY2=VALUE2]")
}
}
test("SPARK-35225: Handle empty output for analyzed plan") {
withTempView("test") {
checkKeywordsExistsInExplain(
sql("CREATE TEMPORARY VIEW test AS SELECT 1"),
"== Analyzed Logical Plan ==\\nCreateViewCommand")
}
}
}
class ExplainSuiteAE extends ExplainSuiteHelper with EnableAdaptiveExecutionSuite {
import testImplicits._
test("SPARK-35884: Explain Formatted") {
val df1 = Seq((1, 2), (2, 3)).toDF("k", "v1")
val df2 = Seq((2, 3), (1, 1)).toDF("k", "v2")
val testDf = df1.join(df2, "k").groupBy("k").agg(count("v1"), sum("v1"), avg("v2"))
// trigger the final plan for AQE
testDf.collect()
// AdaptiveSparkPlan (21)
// +- == Final Plan ==
// * HashAggregate (12)
// +- CustomShuffleReader (11)
// +- ShuffleQueryStage (10)
// +- Exchange (9)
// +- * HashAggregate (8)
// +- * Project (7)
// +- * BroadcastHashJoin Inner BuildRight (6)
// :- * LocalTableScan (1)
// +- BroadcastQueryStage (5)
// +- BroadcastExchange (4)
// +- * Project (3)
// +- * LocalTableScan (2)
// +- == Initial Plan ==
// HashAggregate (20)
// +- Exchange (19)
// +- HashAggregate (18)
// +- Project (17)
// +- BroadcastHashJoin Inner BuildRight (16)
// :- Project (14)
// : +- LocalTableScan (13)
// +- BroadcastExchange (15)
// +- Project (3)
// +- LocalTableScan (2)
checkKeywordsExistsInExplain(
testDf,
FormattedMode,
"""
|(5) BroadcastQueryStage
|Output [2]: [k#x, v2#x]
|Arguments: 0""".stripMargin,
"""
|(10) ShuffleQueryStage
|Output [5]: [k#x, count#xL, sum#xL, sum#x, count#xL]
|Arguments: 1""".stripMargin,
"""
|(11) CustomShuffleReader
|Input [5]: [k#x, count#xL, sum#xL, sum#x, count#xL]
|""".stripMargin,
"""
|(16) BroadcastHashJoin
|Left keys [1]: [k#x]
|Right keys [1]: [k#x]
|Join condition: None
|""".stripMargin,
"""
|(19) Exchange
|Input [5]: [k#x, count#xL, sum#xL, sum#x, count#xL]
|""".stripMargin,
"""
|(21) AdaptiveSparkPlan
|Output [4]: [k#x, count(v1)#xL, sum(v1)#xL, avg(v2)#x]
|Arguments: isFinalPlan=true
|""".stripMargin
)
checkKeywordsNotExistsInExplain(testDf, FormattedMode, "unknown")
}
test("SPARK-35884: Explain should only display one plan before AQE takes effect") {
val df = (0 to 10).toDF("id").where('id > 5)
val modes = Seq(SimpleMode, ExtendedMode, CostMode, FormattedMode)
modes.foreach { mode =>
checkKeywordsExistsInExplain(df, mode, "AdaptiveSparkPlan")
checkKeywordsNotExistsInExplain(df, mode, "Initial Plan", "Current Plan")
}
df.collect()
modes.foreach { mode =>
checkKeywordsExistsInExplain(df, mode, "Initial Plan", "Final Plan")
checkKeywordsNotExistsInExplain(df, mode, "unknown")
}
}
test("SPARK-35884: Explain formatted with subquery") {
withTempView("t1", "t2") {
spark.range(100).select('id % 10 as "key", 'id as "value").createOrReplaceTempView("t1")
spark.range(10).createOrReplaceTempView("t2")
val query =
"""
|SELECT key, value FROM t1
|JOIN t2 ON t1.key = t2.id
|WHERE value > (SELECT MAX(id) FROM t2)
|""".stripMargin
val df = sql(query).toDF()
df.collect()
checkKeywordsExistsInExplain(df, FormattedMode,
"""
|(2) Filter [codegen id : 2]
|Input [1]: [id#xL]
|Condition : ((id#xL > Subquery subquery#x, [id=#x]) AND isnotnull((id#xL % 10)))
|""".stripMargin,
"""
|(6) BroadcastQueryStage
|Output [1]: [id#xL]
|Arguments: 0""".stripMargin,
"""
|(12) AdaptiveSparkPlan
|Output [2]: [key#xL, value#xL]
|Arguments: isFinalPlan=true
|""".stripMargin,
"""
|Subquery:1 Hosting operator id = 2 Hosting Expression = Subquery subquery#x, [id=#x]
|""".stripMargin,
"""
|(16) ShuffleQueryStage
|Output [1]: [max#xL]
|Arguments: 0""".stripMargin,
"""
|(20) AdaptiveSparkPlan
|Output [1]: [max(id)#xL]
|Arguments: isFinalPlan=true
|""".stripMargin
)
checkKeywordsNotExistsInExplain(df, FormattedMode, "unknown")
}
}
test("SPARK-35133: explain codegen should work with AQE") {
withSQLConf(SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key -> "true") {
withTempView("df") {
val df = spark.range(5).select(col("id").as("key"), col("id").as("value"))
df.createTempView("df")
val sqlText = "EXPLAIN CODEGEN SELECT key, MAX(value) FROM df GROUP BY key"
val expectedCodegenText = "Found 2 WholeStageCodegen subtrees."
val expectedNoCodegenText = "Found 0 WholeStageCodegen subtrees."
withNormalizedExplain(sqlText) { normalizedOutput =>
assert(normalizedOutput.contains(expectedNoCodegenText))
}
val aggDf = df.groupBy('key).agg(max('value))
withNormalizedExplain(aggDf, CodegenMode) { normalizedOutput =>
assert(normalizedOutput.contains(expectedNoCodegenText))
}
// trigger the final plan for AQE
aggDf.collect()
withNormalizedExplain(aggDf, CodegenMode) { normalizedOutput =>
assert(normalizedOutput.contains(expectedCodegenText))
}
}
}
}
}
case class ExplainSingleData(id: Int)
|
wangmiao1981/spark
|
sql/core/src/test/scala/org/apache/spark/sql/ExplainSuite.scala
|
Scala
|
apache-2.0
| 26,407
|
package org.jetbrains.plugins.scala.compilationCharts.ui
sealed trait VAlign
object VAlign {
final case object Center extends VAlign
final case object Bottom extends VAlign
final case object Top extends VAlign
}
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/compilationCharts/ui/VAlign.scala
|
Scala
|
apache-2.0
| 221
|
package breeze.linalg.operators
import breeze.collection.mutable.OpenAddressHashArray
import breeze.generic.UFunc
import breeze.generic.UFunc.UImpl2
import breeze.linalg.support.{CanCopy, CanTraverseValues, CanZipMapKeyValues, CanZipMapValues}
import breeze.linalg._
import breeze.macros.expand
import breeze.math.{Field, Ring, Semiring}
import breeze.storage.Zero
import breeze.macros._
import breeze.macros._
import breeze.math.PowImplicits._
import scala.reflect.ClassTag
import scala.{specialized => spec}
trait HashVectorOps
extends HashVectorExpandOps
with DenseVector_HashVector_Ops
with HashVector_DenseVector_Ops
with HashVector_SparseVector_Ops
with SparseVector_HashVector_Ops
trait DenseVector_HashVector_Ops extends GenericOps with DenseVectorOps with HashVectorExpandOps {
// @expand
// implicit def impl_Op_InPlace_DV_HV[
// @expand.args(Int, Double, Float, Long) T,
// @expand.args(OpMulScalar, OpDiv, OpSet, OpMod, OpPow) Op <: OpType](
// implicit @expand.sequence[Op]({ _ * _ }, { _ / _ }, { (__x, __y) =>
// __y
// }, { _ % _ }, { _.pow(_) })
// op: Op.Impl1[T, T, T]): Op.InPlaceImpl2[DenseVector[T], HashVector[T]] =
// new Op.InPlaceImpl2[DenseVector[T], HashVector[T]] {
// def apply(a: DenseVector[T], b: HashVector[T]): Unit = {
// require(a.length == b.length, "Vectors must have the same length")
// val ad = a.data
// var aoff = a.offset
// val astride = a.stride
//
// // TODO: replace OpMulScalar with faster variant for common cases?
// cforRange(0 until a.length) { i =>
// ad(aoff) = op(ad(aoff), b(i))
// aoff += astride
// }
//
// }
// }
@expand
@expand.valify
implicit def impl_scaleAdd_InPlace_DV_T_HV[@expand.args(Int, Double, Float, Long) T]
: scaleAdd.InPlaceImpl3[DenseVector[T], T, HashVector[T]] =
new scaleAdd.InPlaceImpl3[DenseVector[T], T, HashVector[T]] {
def apply(dv: DenseVector[T], scalar: T, hv: HashVector[T]) = {
require(dv.length == hv.length, "Vectors must have the same length")
val ad = dv.data
val bd = hv.data
val bi = hv.index
val bsize = hv.iterableSize
if (scalar != 0)
cforRange(0 until bsize) { i =>
val aoff = dv.offset + bi(i) * dv.stride
if (hv.isActive(i))
ad(aoff) += scalar * bd(i)
}
}
implicitly[TernaryUpdateRegistry[Vector[T], T, Vector[T], scaleAdd.type]].register(this)
}
@expand
@expand.valify
implicit def impl_OpMulInner_DV_HV_eq_S[@expand.args(Int, Double, Float, Long) T](
implicit @expand.sequence[T](0, 0.0, 0f, 0L) zero: T)
: breeze.linalg.operators.OpMulInner.Impl2[DenseVector[T], HashVector[T], T] = {
new breeze.linalg.operators.OpMulInner.Impl2[DenseVector[T], HashVector[T], T] {
def apply(a: DenseVector[T], b: HashVector[T]) = {
var result: T = zero
val bd = b.data
val bi = b.index
val bsize = b.iterableSize
val adata = a.data
val aoff = a.offset
val stride = a.stride
cforRange(0 until bsize) { i =>
if (b.isActive(i))
result += adata(aoff + bi(i) * stride) * bd(i)
}
result
}
implicitly[BinaryRegistry[Vector[T], Vector[T], OpMulInner.type, T]].register(this)
}
}
}
trait HashVector_DenseVector_Ops extends DenseVector_HashVector_Ops {
@expand
@expand.valify
implicit def impl_Op_InPlace_HV_DV[
@expand.args(Int, Double, Float, Long) T,
@expand.args(OpAdd, OpSub, OpMulScalar, OpDiv, OpSet, OpMod, OpPow) Op <: OpType](
implicit @expand.sequence[Op]({ _ + _ }, { _ - _ }, { _ * _ }, { _ / _ }, { (__x, __y) =>
__y
}, { _ % _ }, { _.pow(_) })
op: Op.Impl2[T, T, T]): Op.InPlaceImpl2[HashVector[T], DenseVector[T]] =
new Op.InPlaceImpl2[HashVector[T], DenseVector[T]] {
def apply(a: HashVector[T], b: DenseVector[T]): Unit = {
require(a.length == b.length, "Vectors must have the same length")
var i = 0
while (i < b.length) {
a(i) = op(a(i), b(i))
i += 1
}
}
implicitly[BinaryUpdateRegistry[Vector[T], Vector[T], Op.type]].register(this)
}
@expand
@expand.valify
implicit def impl_Op_HV_DV_eq_HV[
@expand.args(Int, Double, Float, Long) T,
@expand.args(OpAdd, OpSub, OpMulScalar, OpDiv, OpSet, OpMod, OpPow) Op <: OpType](
implicit @expand.sequence[Op]({ _ + _ }, { _ - _ }, { _ * _ }, { _ / _ }, { (__x, __y) =>
__y
}, { _ % _ }, { _.pow(_) })
op: Op.Impl2[T, T, T]): Op.Impl2[HashVector[T], DenseVector[T], DenseVector[T]] = {
new Op.Impl2[HashVector[T], DenseVector[T], DenseVector[T]] {
def apply(a: HashVector[T], b: DenseVector[T]) = {
require(a.length == b.length, "Vectors must have the same length")
val result = DenseVector.zeros[T](a.length)
var i = 0
while (i < b.length) {
result(i) = op(a(i), b(i))
i += 1
}
result
}
implicitly[BinaryRegistry[Vector[T], Vector[T], Op.type, Vector[T]]].register(this)
}
}
@expand
implicit def impl_OpMulInner_HV_DV_eq_T[@expand.args(Int, Float, Double, Long) T]
: breeze.linalg.operators.OpMulInner.Impl2[HashVector[T], DenseVector[T], T] = {
new breeze.linalg.operators.OpMulInner.Impl2[HashVector[T], DenseVector[T], T] {
def apply(a: HashVector[T], b: DenseVector[T]) = {
require(b.length == a.length, "Vectors must be the same length!")
b.dot(a)
}
}
// Vector.canDotProductV_T.register(this)
}
}
trait HashVectorExpandOps extends VectorOps with HashVector_GenericOps {
@expand
@expand.valify
implicit def impl_scaleAdd_InPlace_HV_S_HV[@expand.args(Int, Double, Float, Long) T]
: scaleAdd.InPlaceImpl3[HashVector[T], T, HashVector[T]] = {
new scaleAdd.InPlaceImpl3[HashVector[T], T, HashVector[T]] {
def apply(dest: HashVector[T], scalar: T, source: HashVector[T]) = {
require(dest.length == source.length, "Vectors must have the same length")
val bsize = source.iterableSize
if (scalar != 0) {
val bd = source.data
val bi = source.index
cforRange(0 until bsize) { i =>
if (source.isActive(i))
dest(bi(i)) += scalar * bd(i)
}
}
}
implicitly[TernaryUpdateRegistry[Vector[T], T, Vector[T], scaleAdd.type]].register(this)
}
}
@expand
@expand.valify
implicit def impl_Op_HV_HV_eq_HV[
@expand.args(Int, Double, Float, Long) T,
@expand.args(OpAdd, OpSub) Op <: OpType](
implicit @expand.sequence[Op]({ _ + _ }, { _ - _ })
op: Op.Impl2[T, T, T]): Op.Impl2[HashVector[T], HashVector[T], HashVector[T]] =
new Op.Impl2[HashVector[T], HashVector[T], HashVector[T]] {
def apply(a: HashVector[T], b: HashVector[T]): HashVector[T] = {
require(b.length == a.length, "Vectors must be the same length!")
// if we're adding, we can do it the other way
// upcast to prevent warning when Op = OpSub
if ((Op: Any) == OpAdd && a.activeSize < b.activeSize) {
return apply(b, a)
}
val result = a.copy
cforRange(0 until b.iterableSize) { boff =>
if (b.isActive(boff)) {
val k = b.index(boff)
val v = b.data(boff)
result(k) = op(a(k), v)
}
}
result
}
implicitly[BinaryRegistry[Vector[T], Vector[T], Op.type, Vector[T]]].register(this)
}
@expand
@expand.valify
implicit def impl_OpMulScalar_HV_HV_eq_HV[@expand.args(Int, Double, Float, Long) T](
implicit @expand.sequence[T](0, 0.0, 0.0f, 0L) zero: T)
: OpMulScalar.Impl2[HashVector[T], HashVector[T], HashVector[T]] =
new OpMulScalar.Impl2[HashVector[T], HashVector[T], HashVector[T]] {
def apply(a: HashVector[T], b: HashVector[T]): HashVector[T] = {
require(b.length == a.length, "Vectors must be the same length!")
// this op has the property that if either lhs or rhs is 0, then the result is 0
if (a.activeSize < b.activeSize) return apply(b, a)
val builder = new VectorBuilder[T](a.length)
for ((k, v) <- b.activeIterator) {
val r = a(k) * v
if (r != zero)
builder.add(k, r)
}
builder.toHashVector
}
implicitly[BinaryRegistry[Vector[T], Vector[T], OpMulScalar.type, Vector[T]]].register(this)
}
@expand
@expand.valify
implicit def impl_Op_HV_HV_eq_HV_lhs_nilpotent[
@expand.args(Int, Double, Float, Long) T,
@expand.args(OpDiv, OpMod, OpPow) Op <: OpType](
implicit @expand.sequence[Op]({ _ / _ }, { _ % _ }, { _.pow(_) })
op: Op.Impl2[T, T, T]): Op.Impl2[HashVector[T], HashVector[T], HashVector[T]] =
new Op.Impl2[HashVector[T], HashVector[T], HashVector[T]] {
def apply(a: HashVector[T], b: HashVector[T]): HashVector[T] = {
require(b.length == a.length, "Vectors must be the same length!")
val result =
new HashVector[T](new OpenAddressHashArray[T](a.length, default = 0, initialSize = a.array.iterableSize))
if (b.activeSize != b.size) {
// have 0s in RHS, will produce non-zero results with LHS 0s (NaN or throw)
for ((k, v) <- a.iterator) {
result(k) = op(v, b(k))
}
} else {
for ((k, v) <- a.activeIterator) {
result(k) = op(v, b(k))
}
}
result
}
implicitly[BinaryRegistry[Vector[T], Vector[T], Op.type, Vector[T]]].register(this)
}
@expand
@expand.valify
implicit def impl_Op_HV_V_eq_HV[
@expand.args(Int, Double, Float, Long) T,
@expand.args(OpAdd, OpSub, OpMulScalar, OpDiv, OpSet, OpMod, OpPow) Op <: OpType](
implicit @expand.sequence[Op]({ _ + _ }, { _ - _ }, { _ * _ }, { _ / _ }, { (__x, __y) =>
__y
}, { _ % _ }, { _.pow(_) })
op: Op.Impl2[T, T, T]): Op.Impl2[HashVector[T], Vector[T], HashVector[T]] =
new Op.Impl2[HashVector[T], Vector[T], HashVector[T]] {
def apply(a: HashVector[T], b: Vector[T]): HashVector[T] = {
require(b.length == a.length, "Vectors must be the same length!")
val result = HashVector.zeros[T](a.length)
var i = 0
while (i < a.length) {
result(i) = op(a(i), b(i))
i += 1
}
result
}
implicitly[BinaryRegistry[Vector[T], Vector[T], Op.type, Vector[T]]].register(this)
}
@expand
@expand.valify
implicit def impl_Op_HV_S_eq_HV_add[
@expand.args(Int, Double, Float, Long) T,
@expand.args(OpAdd, OpSub) Op <: OpType](
implicit @expand.sequence[Op]({ _ + _ }, { _ - _ })
op: Op.Impl2[T, T, T],
@expand.sequence[T](0, 0.0, 0.0f, 0L)
zero: T): Op.Impl2[HashVector[T], T, HashVector[T]] = new Op.Impl2[HashVector[T], T, HashVector[T]] {
def apply(a: HashVector[T], b: T): HashVector[T] = {
if (b == 0) {
return a.copy
}
val result = HashVector.zeros[T](a.length)
var i = 0
while (i < a.length) {
result(i) = op(a(i), b)
i += 1
}
result
}
implicitly[BinaryRegistry[Vector[T], T, Op.type, Vector[T]]].register(this)
}
@expand
@expand.valify
implicit def impl_Op_HV_S_eq_HV_zeroy[
@expand.args(Int, Double, Float, Long) T,
@expand.args(OpMulScalar, OpMulMatrix, OpDiv, OpMod, OpPow) Op <: OpType](
implicit @expand.sequence[Op]({ _ * _ }, { _ * _ }, { _ / _ }, { _ % _ }, { _.pow(_) })
op: Op.Impl2[T, T, T],
@expand.sequence[T](0, 0.0, 0.0f, 0L)
zero: T): Op.Impl2[HashVector[T], T, HashVector[T]] = new Op.Impl2[HashVector[T], T, HashVector[T]] {
def apply(a: HashVector[T], b: T): HashVector[T] = {
val result = HashVector.zeros[T](a.length)
// can short-circuit multiplication by 0
// upcast to prevent warning
if (((Op: Any) == OpMulScalar || (Op: Any) == OpMulMatrix) && b == 0) {
return result
}
if (b == 0) { // in a degenerate case, need to iterate all
for ((k, v) <- a.iterator) {
result(k) = op(v, b)
}
} else {
for ((k, v) <- a.activeIterator) {
result(k) = op(v, b)
}
}
result
}
implicitly[BinaryRegistry[Vector[T], T, Op.type, Vector[T]]].register(this)
}
@expand
@expand.valify
implicit def impl_OpSet_InPlace_HV_HV[@expand.args(Int, Double, Float, Long) T]: OpSet.InPlaceImpl2[HashVector[T], HashVector[T]] =
new OpSet.InPlaceImpl2[HashVector[T], HashVector[T]] {
def apply(a: HashVector[T], b: HashVector[T]): Unit = {
require(b.length == a.length, "Vectors must be the same length!")
b.array.copyTo(a.array)
}
implicitly[BinaryUpdateRegistry[Vector[T], Vector[T], OpSet.type]].register(this)
}
@expand
@expand.valify
implicit def impl_Op_InPlace_HV_S_idempotent[
@expand.args(Int, Double, Float, Long) T,
@expand.args(OpAdd, OpSub) Op <: OpType](
implicit @expand.sequence[Op]({ _ + _ }, { _ - _ })
op: Op.Impl2[T, T, T]): Op.InPlaceImpl2[HashVector[T], T] = new Op.InPlaceImpl2[HashVector[T], T] {
def apply(a: HashVector[T], b: T): Unit = {
if (b == 0) return
var i = 0
while (i < a.length) {
a(i) = op(a(i), b)
i += 1
}
}
implicitly[BinaryUpdateRegistry[Vector[T], T, Op.type]].register(this)
}
@expand
@expand.valify
implicit def impl_OpMulScalar_InPlace_HV_S[@expand.args(Int, Double, Float, Long) T]
: OpMulScalar.InPlaceImpl2[HashVector[T], T] = new OpMulScalar.InPlaceImpl2[HashVector[T], T] {
def apply(a: HashVector[T], b: T): Unit = {
if (b == 0) {
a.clear()
return
}
for ((k, v) <- a.activeIterator) {
a(k) = v * b
}
}
implicitly[BinaryUpdateRegistry[Vector[T], T, OpMulScalar.type]].register(this)
}
@expand
@expand.valify
implicit def impl_OpSet_InPlace_HV_S[@expand.args(Int, Double, Float, Long) T]: OpSet.InPlaceImpl2[HashVector[T], T] =
new OpSet.InPlaceImpl2[HashVector[T], T] {
def apply(a: HashVector[T], b: T): Unit = {
if (b == 0) {
a.clear()
return
}
var i = 0
while (i < a.length) {
a(i) = b
i += 1
}
}
implicitly[BinaryUpdateRegistry[Vector[T], T, OpSet.type]].register(this)
}
@expand
@expand.valify
implicit def impl_Op_InPlace_HV_S_LHS_nilpotent[
@expand.args(Int, Double, Float, Long) T,
@expand.args(OpDiv, OpMod, OpPow) Op <: OpType](
implicit @expand.sequence[Op]({ _ / _ }, { _ % _ }, { _.pow(_) })
op: Op.Impl2[T, T, T]): Op.InPlaceImpl2[HashVector[T], T] = new Op.InPlaceImpl2[HashVector[T], T] {
def apply(a: HashVector[T], b: T): Unit = {
if (b == 0) {
// scalar 0 does bad things with these ops
var i = 0
while (i < a.length) {
a(i) = op(a(i), b)
i += 1
}
} else {
for ((k, v) <- a.activeIterator) {
a(k) = op(v, b)
}
}
}
implicitly[BinaryUpdateRegistry[Vector[T], T, Op.type]].register(this)
}
@expand
@expand.valify
implicit def impl_OpMulInner_HV_HV_eq_S[@expand.args(Int, Long, Double, Float) T](
implicit @expand.sequence[T](0, 0L, 0.0, 0f) zero: T)
: breeze.linalg.operators.OpMulInner.Impl2[HashVector[T], HashVector[T], T] = {
new breeze.linalg.operators.OpMulInner.Impl2[HashVector[T], HashVector[T], T] {
def apply(a: HashVector[T], b: HashVector[T]): T = {
require(b.length == a.length, "Vectors must be the same length!")
if (a.iterableSize > b.iterableSize) {
apply(b, a)
} else {
var result: T = zero
for ((k, v) <- a.activeIterator) {
result += v * b(k)
}
result
}
}
implicitly[BinaryRegistry[Vector[T], Vector[T], OpMulInner.type, T]].register(this)
}
}
// TODO(perf): do we need these specialized ones?
@expand
@expand.valify
implicit def impl_CanTraverseValues_HV[@expand.args(Int, Double, Float, Long) T]: CanTraverseValues[HashVector[T], T] = {
new CanTraverseValues[HashVector[T], T] {
/** Traverses all values from the given collection. */
override def traverse(from: HashVector[T], fn: CanTraverseValues.ValuesVisitor[T]): fn.type = {
cforRange(0 until from.iterableSize) { i =>
if (from.isActive(i)) {
fn.visit(from.data(i))
}
}
fn.zeros(from.size - from.activeSize, 0)
fn
}
override def isTraversableAgain(from: HashVector[T]): Boolean = true
}
}
implicit def impl_CanTraverseValues_HV_Generic[T]: CanTraverseValues[HashVector[T], T] = {
new CanTraverseValues[HashVector[T], T] {
/** Traverses all values from the given collection. */
override def traverse(from: HashVector[T], fn: CanTraverseValues.ValuesVisitor[T]): fn.type = {
cforRange(0 until from.iterableSize) { i =>
if (from.isActive(i)) {
fn.visit(from.data(i))
}
}
fn.zeros(from.size - from.activeSize, from.default)
fn
}
override def isTraversableAgain(from: HashVector[T]): Boolean = true
}
}
}
trait HashVector_SparseVector_Ops extends HashVectorExpandOps with SparseVectorOps {
@expand
@expand.valify
implicit def impl_Op_HV_SV_eq_HV_lhs_nilpotent[
@expand.args(Int, Double, Float, Long) T,
@expand.args(OpDiv, OpSet, OpMod, OpPow) Op <: OpType](
implicit @expand.sequence[Op]({ _ / _ }, { (__x, __y) =>
__y
}, { _ % _ }, { _.pow(_) })
op: Op.Impl2[T, T, T],
@expand.sequence[T](0, 0.0, 0.0f, 0L) zero: T): Op.Impl2[HashVector[T], SparseVector[T], HashVector[T]] =
new Op.Impl2[HashVector[T], SparseVector[T], HashVector[T]] {
def apply(a: HashVector[T], b: SparseVector[T]): HashVector[T] = {
require(b.length == a.length, "Vectors must be the same length!")
val builder = new VectorBuilder[T](a.length)
if ((Op: Any) == OpSet || b.activeSize != b.length) {
cforRange(0 until b.length) { k =>
val r = op(a(k), b.otherApply(k))
if (r != zero)
builder.add(k, r)
}
} else {
for ((k, v) <- a.activeIterator) {
val r = op(v, b.otherApply(k))
if (r != zero)
builder.add(k, r)
}
}
builder.toHashVector
}
implicitly[BinaryRegistry[Vector[T], Vector[T], Op.type, Vector[T]]].register(this)
}
@expand
@expand.valify
implicit def impl_OpMulScalar_HV_SV_eq_HV[@expand.args(Int, Double, Float, Long) T](
implicit @expand.sequence[T](0, 0.0, 0.0f, 0L) zero: T)
: OpMulScalar.Impl2[HashVector[T], SparseVector[T], HashVector[T]] =
new OpMulScalar.Impl2[HashVector[T], SparseVector[T], HashVector[T]] {
def apply(a: HashVector[T], b: SparseVector[T]): HashVector[T] = {
require(b.length == a.length, "Vectors must be the same length!")
val builder = new VectorBuilder[T](a.length)
// TODO: profile this and choose a threshold
if (b.activeSize < a.iterableSize) {
cforRange(0 until b.activeSize) { boff =>
val i = b.indexAt(boff)
val v = b.valueAt(boff)
val r = a(i) * v
if (r != zero)
builder.add(i, r)
}
} else {
cforRange(0 until a.iterableSize) { aoff =>
if (a.isActive(aoff)) {
val i = a.index(aoff)
val v = a.data(aoff)
val r = v * b(i)
if (r != zero)
builder.add(i, r)
}
}
}
builder.toHashVector
}
implicitly[BinaryRegistry[Vector[T], Vector[T], OpMulScalar.type, Vector[T]]].register(this)
}
@expand.valify
@expand
implicit def impl_scaleAdd_InPlace_HV_S_SV[@expand.args(Int, Double, Float, Long) T]
: scaleAdd.InPlaceImpl3[HashVector[T], T, SparseVector[T]] = {
new scaleAdd.InPlaceImpl3[HashVector[T], T, SparseVector[T]] {
def apply(a: HashVector[T], scale: T, b: SparseVector[T]): Unit = {
require(b.length == a.length, "Vectors must be the same length!")
if (scale != 0) {
cforRange(0 until b.activeSize) { boff =>
val k = b.indexAt(boff)
val v = b.valueAt(boff)
a(k) += scale * v
}
}
}
implicitly[TernaryUpdateRegistry[Vector[T], T, Vector[T], scaleAdd.type]].register(this)
}
}
@expand
@expand.valify
implicit def impl_OpMulInner_HV_SV_eq_S[@expand.args(Int, Long, Float, Double) T](
implicit @expand.sequence[T](0, 0L, 0f, 0.0) zero: T)
: breeze.linalg.operators.OpMulInner.Impl2[HashVector[T], SparseVector[T], T] = {
new breeze.linalg.operators.OpMulInner.Impl2[HashVector[T], SparseVector[T], T] {
def apply(a: HashVector[T], b: SparseVector[T]) = {
require(b.length == a.length, "Vectors must be the same length!")
var result: T = zero
// TODO: if a has much less nnz then b, then it would make sense to do a instead. profile and choose a threshold
// but iterating over b is faster than iterating over a and indexing into a is faster than indexing into b
// TODO: choose a threshold
cforRange(0 until b.activeSize) { boff =>
result += a(b.indexAt(boff)) * b.valueAt(boff)
}
result
}
implicitly[BinaryRegistry[Vector[T], Vector[T], OpMulInner.type, T]].register(this)
}
}
}
trait SparseVector_HashVector_Ops extends HashVectorExpandOps with HashVector_SparseVector_Ops with SparseVectorOps {
@expand.valify
@expand
implicit def impl_Op_SV_HV_eq_SV_lhs_nilpotent[
@expand.args(Int, Double, Float, Long) T,
@expand.args(OpDiv, OpSet, OpMod, OpPow) Op <: OpType](
implicit @expand.sequence[Op]({ _ / _ }, { (__x, __y) =>
__y
}, { _ % _ }, { _.pow(_) })
op: Op.Impl2[T, T, T],
@expand.sequence[T](0, 0.0, 0.0f, 0L) zero: T): Op.Impl2[SparseVector[T], HashVector[T], SparseVector[T]] =
new Op.Impl2[SparseVector[T], HashVector[T], SparseVector[T]] {
def apply(a: SparseVector[T], b: HashVector[T]): SparseVector[T] = {
require(b.length == a.length, "Vectors must be the same length!")
val builder = new VectorBuilder[T](a.length)
if ((Op: Any) == OpSet || b.activeSize != b.length) {
cforRange(0 until a.length) { k =>
val r: T = op(a.otherApply(k), b(k))
if (r != zero)
builder.add(k, r)
}
} else {
cforRange(0 until a.activeSize) { aoff =>
val k = a.indexAt(aoff)
val v = a.valueAt(aoff)
val r = op(v, b(k))
if (r != zero)
builder.add(k, r)
}
}
builder.toSparseVector(alreadySorted = true, keysAlreadyUnique = true)
}
implicitly[BinaryRegistry[Vector[T], Vector[T], Op.type, Vector[T]]].register(this)
}
@expand.valify
@expand
implicit def impl_OpMulScalar_SV_HV_eq_SV[@expand.args(Int, Double, Float, Long) T](
implicit @expand.sequence[T](0, 0.0, 0.0f, 0L) zero: T)
: OpMulScalar.Impl2[SparseVector[T], HashVector[T], SparseVector[T]] =
new OpMulScalar.Impl2[SparseVector[T], HashVector[T], SparseVector[T]] {
def apply(a: SparseVector[T], b: HashVector[T]): SparseVector[T] = {
require(b.length == a.length, "Vectors must be the same length!")
val builder = new VectorBuilder[T](a.length)
cforRange(0 until a.activeSize) { aoff =>
val k = a.indexAt(aoff)
val v = a.valueAt(aoff)
val r = v * b(k)
if (r != zero)
builder.add(k, r)
}
builder.toSparseVector(alreadySorted = true, keysAlreadyUnique = true)
}
implicitly[BinaryRegistry[Vector[T], Vector[T], OpMulScalar.type, Vector[T]]].register(this)
}
@expand
@expand.valify
implicit def impl_Op_SV_HV_eq_SV[@expand.args(Int, Double, Float, Long) T, @expand.args(OpAdd, OpSub) Op <: OpType](
implicit @expand.sequence[Op]({ _ + _ }, { _ - _ })
op: Op.Impl2[T, T, T]): Op.Impl2[SparseVector[T], HashVector[T], SparseVector[T]] =
new Op.Impl2[SparseVector[T], HashVector[T], SparseVector[T]] {
def apply(a: SparseVector[T], b: HashVector[T]): SparseVector[T] = {
require(b.length == a.length, "Vectors must be the same length!")
val builder = new VectorBuilder[T](a.length)
var aoff = 0
while (aoff < a.activeSize) {
val k = a.indexAt(aoff)
val v = a.valueAt(aoff)
builder.add(k, v)
aoff += 1
}
for ((k, v) <- b.activeIterator) {
builder.add(k, v)
}
builder.toSparseVector
}
implicitly[BinaryRegistry[Vector[T], Vector[T], Op.type, Vector[T]]].register(this)
}
implicit def impl_OpMulInner_SV_HV_eq_T[T](implicit op: OpMulInner.Impl2[HashVector[T], SparseVector[T], T])
: breeze.linalg.operators.OpMulInner.Impl2[SparseVector[T], HashVector[T], T] =
(a: SparseVector[T], b: HashVector[T]) => b dot a
// // TODO: switch to using VectorBuilders
// @expand.valify
// @expand
// implicit def impl_Op_InPlace_SV_HV[
// @expand.args(Int, Double, Float, Long) T,
// @expand.args(OpAdd, OpSub, OpMulScalar, OpDiv, OpSet, OpMod, OpPow) Op <: OpType]
// : Op.InPlaceImpl2[SparseVector[T], HashVector[T]] = {
// // this shouldn't be necessary but somehow Scala 2.12 can't handle it
// implicitly[BinaryUpdateRegistry[Vector[T], Vector[T], Op.type]]
// .register(GenericOps.updateFromPure[Op.type, SparseVector[T], HashVector[T], SparseVector[T]](
// implicitly,
// super.impl_OpSet_InPlace_SV_SV_Generic
// ))
// }
}
|
scalanlp/breeze
|
math/src/main/codegen/breeze/linalg/operators/HashVectorOps.scala
|
Scala
|
apache-2.0
| 26,195
|
package com.github.mdr.mash.parser
sealed trait RedirectOperator
object RedirectOperator {
case object StandardInput extends RedirectOperator
case object StandardOutput extends RedirectOperator
}
|
mdr/mash
|
src/main/scala/com/github/mdr/mash/parser/RedirectOperator.scala
|
Scala
|
mit
| 206
|
package com.sksamuel.elastic4s.bulk
import com.sksamuel.elastic4s.DocumentRef
import com.sksamuel.elastic4s.index.RichIndexResponse
import org.elasticsearch.action.DocWriteRequest.OpType
import org.elasticsearch.action.bulk.BulkItemResponse
import org.elasticsearch.action.bulk.BulkItemResponse.Failure
import org.elasticsearch.action.delete.DeleteResponse
import org.elasticsearch.action.index.IndexResponse
case class RichBulkItemResponse(original: BulkItemResponse) {
def failure: Failure = original.getFailure
def failureOpt: Option[Failure] = Option(failure)
def failureMessage: String = original.getFailureMessage
def failureMessageOpt: Option[String] = Option(failureMessage)
def index: String = original.getIndex
def `type`: String = original.getType
def id: String = original.getId
def ref = DocumentRef(index, `type`, id)
def version: Long = original.getVersion
def itemId: Int = original.getItemId
def opType: OpType = original.getOpType
@deprecated("use toDeleteResult", "5.0.0")
def deleteResponse(): Option[DeleteResponse] = original.getResponse match {
case d: DeleteResponse => Some(d)
case _ => None
}
@deprecated("use toIndexResult", "5.0.0")
def indexResult: Option[RichIndexResponse] = toIndexResult
def toIndexResult: Option[RichIndexResponse] = original.getResponse match {
case i: IndexResponse => Some(RichIndexResponse(i))
case _ => None
}
def isFailure: Boolean = original.isFailed
}
|
aroundus-inc/elastic4s
|
elastic4s-tcp/src/main/scala/com/sksamuel/elastic4s/bulk/RichBulkItemResponse.scala
|
Scala
|
apache-2.0
| 1,473
|
/*
* Copyright 2014 Treode, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.treode.store.locks
import org.scalamock.scalatest.MockFactory
import org.scalatest.WordSpec
import com.treode.async.Async
import com.treode.async.stubs.StubScheduler
import com.treode.async.stubs.implicits._
import com.treode.store.{Bytes, StoreTestConfig, StoreTestTools, TxClock}
import com.treode.pickle.Picklers
import StoreTestTools._
class LockSpec extends WordSpec with MockFactory {
private implicit class RichLockSpace (space: LockSpace) {
def read (rt: Int, k1: String, ks: String*): Async [Unit] =
space.read (rt, (k1 +: ks) .map (_.hashCode))
def write (ft: Int, k1: String, ks: String*): Async [LockSet] =
space.write (ft, (k1 +: ks) .map (_.hashCode))
}
def assertClock (expected: Long) (actual: Option [TxClock]): Unit =
assertResult (Some (new TxClock (expected))) (actual)
"A Lock" when {
"not previously held" should {
"grant a reader immediately rather than invoke grant later" in {
val lock = new Lock
val r = mock [LockReader]
(r.rt _) .expects() .returns (0) .anyNumberOfTimes()
(r.grant _) .expects() .never()
assertResult (true) (lock.read (r))
}
"grant a writer immediately rather than invoke grant later" in {
val lock = new Lock
val w = mock [LockWriter]
(w.ft _) .expects() .returns (0) .anyNumberOfTimes()
(w.grant _) .expects (TxClock.MinValue) .never()
assertResult (Some (TxClock.MinValue)) (lock.write (w))
}}
"previously held by a reader" should {
"grant a writer immediately and not invoke the callback" in {
val lock = new Lock
val r = mock [LockReader]
(r.rt _) .expects() .returns (1) .anyNumberOfTimes()
(r.grant _) .expects() .never()
assertResult (true) (lock.read (r))
val w = mock [LockWriter]
(w.ft _) .expects() .returns (0) .anyNumberOfTimes()
(w.grant _) .expects (TxClock.MinValue) .never()
assertClock (1) (lock.write (w))
}}
"currently held by a writer" should {
"grant an earlier reader immediately" in {
val lock = new Lock
val w = mock [LockWriter]
(w.ft _) .expects() .returns (1) .anyNumberOfTimes()
(w.grant _) .expects (TxClock.MinValue) .never()
assertClock (1) (lock.write (w))
val r = mock [LockReader]
(r.rt _) .expects() .returns (0) .anyNumberOfTimes()
(r.grant _) .expects() .never()
assertResult (true) (lock.read (r))
lock.release (w)
}
"hold a later reader until release" in {
val lock = new Lock
val w = mock [LockWriter]
(w.ft _) .expects() .returns (1) .anyNumberOfTimes()
(w.grant _) .expects (TxClock.MinValue) .never()
assertClock (1) (lock.write (w))
val r = mock [LockReader]
(r.rt _) .expects() .returns (2) .anyNumberOfTimes()
(r.grant _) .expects() .never()
assertResult (false) (lock.read (r))
(r.grant _) .expects() .once()
lock.release (w)
}
"release all readers at once" in {
val lock = new Lock
val w = mock [LockWriter]
(w.ft _) .expects() .returns (1) .anyNumberOfTimes()
(w.grant _) .expects (TxClock.MinValue) .never()
assertClock (1) (lock.write (w))
val r1 = mock [LockReader]
(r1.rt _) .expects() .returns (2) .anyNumberOfTimes()
(r1.grant _) .expects() .never()
assertResult (false) (lock.read (r1))
val r2 = mock [LockReader]
(r2.rt _) .expects() .returns (2) .anyNumberOfTimes()
(r2.grant _) .expects() .never()
assertResult (false) (lock.read (r2))
(r1.grant _) .expects() .once()
(r2.grant _) .expects() .once()
lock.release (w)
}
"hold the second writer until release" in {
val lock = new Lock
val w1 = mock [LockWriter]
(w1.ft _) .expects() .returns (0) .anyNumberOfTimes()
(w1.grant _) .expects (TxClock.MinValue) .never()
assertClock (0) (lock.write (w1))
val w2 = mock [LockWriter]
(w2.ft _) .expects() .returns (0) .anyNumberOfTimes()
(w2.grant _) .expects (TxClock.MinValue) .never()
assertResult (None) (lock.write (w2))
(w2.grant _) .expects (TxClock.MinValue) .once()
lock.release (w1)
}
"release only one writer" in {
val lock = new Lock
val w1 = mock [LockWriter]
(w1.ft _) .expects() .returns (0) .anyNumberOfTimes()
(w1.grant _) .expects (TxClock.MinValue) .never()
assertClock (0) (lock.write (w1))
val w2 = mock [LockWriter]
(w2.ft _) .expects() .returns (0) .anyNumberOfTimes()
(w2.grant _) .expects (TxClock.MinValue) .never()
assertResult (None) (lock.write (w2))
val w3 = mock [LockWriter]
(w3.ft _) .expects() .returns (0) .anyNumberOfTimes()
(w3.grant _) .expects (TxClock.MinValue) .never()
assertResult (None) (lock.write (w3))
(w2.grant _) .expects (TxClock.MinValue) .once()
lock.release (w1)
}}}
"A set of locks" should {
val Apple = "apple"
val Banana = "banana"
val Orange = "orange"
"acquire all locks before proceeding" in {
implicit val scheduler = StubScheduler.random()
val config = StoreTestConfig (lockSpaceBits = 8)
import config._
val locks = new LockSpace
val w1 = locks.write (1, Apple, Banana) .expectPass()
val w2 = locks.write (2, Banana, Orange) .capture()
w2.assertNotInvoked()
val r3 = locks.read (3, Apple, Orange) .capture()
w2.assertNotInvoked()
r3.assertNotInvoked()
w1.release()
r3.assertNotInvoked()
w2.assertPassed().release()
r3.assertPassed()
}}}
|
Treode/store
|
store/test/com/treode/store/locks/LockSpec.scala
|
Scala
|
apache-2.0
| 6,431
|
package pt.tecnico.dsi.akkastrator
import scala.collection.immutable.{HashMap, SortedMap}
import scala.collection.{immutable, mutable}
import akka.actor.{Actor, ActorLogging, ActorPath, PossiblyHarmful}
import akka.persistence._
import shapeless.HNil
object Orchestrator {
case class StartOrchestrator(id: Long)
trait Success[R] extends Serializable {
def id: Long
def result: R
}
case class Finished[R](result: R, id: Long) extends Success[R]
trait Failure extends Serializable {
def id: Long
def cause: Throwable
}
case class Aborted(cause: Throwable, id: Long) extends Failure
case class TaskAborted[R](instigatorReport: Report[R], cause: Throwable, id: Long) extends Failure
case object Status
case class StatusResponse(tasks: Seq[Report[_]])
case object ShutdownOrchestrator extends PossiblyHarmful
protected[akkastrator] case class StartTask(index: Int)
}
/**
* An Orchestrator executes a set of, possibly dependent, `Tasks`.
* A task corresponds to sending a message to an actor, handling its response and possibly
* mutate the internal state of the Orchestrator.
*
* The Orchestrator together with the Task is able to:
*
* - Delivering messages with at-least-once delivery guarantee. The `DistinctIdsOrchestrator` ensures each destination
* will see an independent strictly monotonically increasing sequence number without gaps.
* - Handling Status messages, that is, if some actor is interested in querying the Orchestrator for its current
* status, the Orchestrator will respond with the status of each task.
* - When all the dependencies of a task finish that task will be started and the Orchestrator will
* be prepared to handle the messages that the task destination sends.
* - If the Orchestrator crashes, the state of each task will be correctly restored.
*
* NOTE: the responses that are received must be Serializable.
*
* In order for the Orchestrator and the Tasks to be able to achieve all of this they have to access and modify
* each others state directly. This means they are very tightly coupled with each other. To make this relation more
* obvious and to enforce it, you will only be able to create tasks if you have a reference to an orchestrator
* (which is passed implicitly to a task).
*
* If you have the need to refactor the creation of tasks so that you can use them in multiple orchestrators you can
* leverage self type annotations.
*
* @param settings the settings to use for this orchestrator.
* @tparam R the type of result this orchestrator returns when it finishes.
*/
sealed abstract class AbstractOrchestrator[R](val settings: Settings)
extends PersistentActor with AtLeastOnceDelivery with ActorLogging with IdImplicits {
import Orchestrator._
/** The type of Id this orchestrator handles. */
type ID <: Id
/** This exists to make the creation of FullTasks easier. */
final implicit val orchestrator: AbstractOrchestrator[_] = this
/**
* All the tasks this orchestrator will have will be stored here, which allows them to have a stable index.
* Once every task is added to this list, this becomes "immutable", that is, it will no longer be modified.
* This is a Buffer because we want to ensure indexing elements is very fast. Also mutable.Buffer has about
* half the memory overhead of a List. See the excellent blog post of Li Haoyi for more details:
* http://www.lihaoyi.com/post/BenchmarkingScalaCollections.html#lists-vs-mutablebuffer
*/
private[this] final val _tasks = mutable.Buffer.empty[FullTask[_, _]]
final def tasks: immutable.Seq[FullTask[_, _]] = _tasks.toVector // To make sure the mutability does not escape
/** We use a HashMap to ensure remove/insert operations are very fast O(eC). The keys are the task indexes. */
protected[this] final var _waitingTasks = HashMap.empty[Int, Task[_]]
final def waitingTasks: HashMap[Int, Task[_]] = _waitingTasks
private[this] final var _finishedTasks = 0
/** How many tasks of this orchestrator have successfully finished. Aborted tasks do not count as a finished task. */
final def finishedTasks: Int = _finishedTasks
// The id obtained in the StartOrchestrator message which prompted the execution of this orchestrator tasks
// This is mainly used for TaskSpawnOrchestrator
private[this] final var _startId: Long = _
final def startId: Long = _startId
def withLogPrefix(message: => String): String = s"[${self.path.name}] $message"
/** Computes ID from the deliveryId of akka-persistence. */
def computeID(destination: ActorPath, deliveryId: DeliveryId): ID
/** Converts ID to the deliveryId needed for the confirmDelivery method of akka-persistence. */
def deliveryIdOf(destination: ActorPath, id: ID): DeliveryId
/** Ensures the received message was in fact destined to be received by `task`. */
def matchId(task: Task[_], id: Long): Boolean
private[akkastrator] def addTask(task: FullTask[_, _]): Int = {
val index = tasks.length
_tasks += task
// By adding the tasks without dependencies directly to _waitingTasks, we ensure that when the StartOrchestrator message
// is received we do not need to iterate through all the tasks to compute which ones can start right away.
if (task.dependencies == HNil) {
_waitingTasks += index -> task.innerCreateTask()
}
index
}
private[akkastrator] def taskStarted(task: FullTask[_, _], innerTask: Task[_]): Unit = {
_waitingTasks += task.index -> innerTask
context become computeCurrentBehavior()
onTaskStart(task, innerTask)
}
/**
* User overridable callback. Its called every time a task starts.
*
* By default just logs the `task` as started.
*
* { @see onTaskFinish} for a callback when a task finishes.
* { @see onTaskAbort} for a callback when a task aborts.
*/
def onTaskStart(task: FullTask[_, _], innerTask: Task[_]): Unit = {
log.debug(task.withOrchestratorAndTaskPrefix(s"Started $innerTask."))
}
/**
* User overridable callback. Its called after the orchestrator starts but before any of the tasks start.
*
* By default logs that the Orchestrator has started.
*/
def onStart(startId: Long): Unit = {
log.info(withLogPrefix(s"Started with StardId = $startId"))
}
private[akkastrator] def taskFinished(task: FullTask[_, _]): Unit = {
_waitingTasks -= task.index
_finishedTasks += 1
context become computeCurrentBehavior()
onTaskFinish(task)
if (finishedTasks == tasks.size) {
onFinish()
}
}
/**
* User overridable callback. Its called every time a task finishes.
*
* You can use this to implement very refined termination strategies.
*
* By default just logs the `task` has finished.
*
* { @see onTaskStart} for a callback when a task starts.
* { @see onTaskAbort} for a callback when a task aborts.
*/
def onTaskFinish(task: FullTask[_, _]): Unit = {
log.debug(task.withOrchestratorAndTaskPrefix("Finished."))
}
/**
* User overridable callback. Its called after every task finishes.
* If a task aborts then it will prevent this method from being invoked.
*
* By default logs that the Orchestrator has finished then stops it.
*
* You can use this to implement your termination strategy.
*
* If a orchestrator starts without tasks it will finish right away.
*/
def onFinish(): Unit = {
log.info(withLogPrefix("Finished!"))
// TODO: it would be nice to have a default Success case to send to the parent
context stop self
}
private[akkastrator] def taskAborted(task: FullTask[_, _], cause: Throwable): Unit = {
_waitingTasks -= task.index
// We do not increment finishedTasks because the task did not finish
context become computeCurrentBehavior()
onTaskAbort(task, cause)
}
// TODO check why invoking become/unbecome voids the guarantee
/**
* User overridable callback. Its called every time a task aborts.
*
* You can use this to implement very refined termination strategies.
*
* By default aborts the orchestrator via `onAbort` with a `TaskAborted` failure.
*
* Note: if you invoke become/unbecome inside this method, the contract that states
* <cite>"Waiting tasks or tasks which do not have this task as a dependency will
* remain untouched"</cite> will no longer be guaranteed.
* If you wish to still have this guarantee you can do {{{
* context.become(computeCurrentBehavior() orElse yourBehavior)
* }}}
*
* { @see onTaskStart} for a callback when a task starts.
* { @see onTaskFinish } for a callback when a task finishes.
*
* @param task the task that aborted.
*/
def onTaskAbort(task: FullTask[_, _], cause: Throwable): Unit = {
log.debug(task.withOrchestratorAndTaskPrefix("Aborted."))
onAbort(TaskAborted(task.report, cause, startId))
}
/**
* User overridable callback. Its called when the orchestrator is aborted. By default an orchestrator
* aborts as soon as a task aborts. However this functionality can be changed by overriding `onTaskAbort`.
*
* By default logs that the orchestrator has aborted, sends a message to its parent explaining why the
* orchestrator aborted then stops it.
*
* You can use this to implement your termination strategy.
*/
def onAbort(failure: Failure): Unit = {
log.info(withLogPrefix(s"Aborted due to exception: ${failure.cause}!"))
context.parent ! failure
context stop self
}
final def recoveryAwarePersist(event: Any)(handler: => Unit): Unit = {
if (recoveryRunning) {
// When recovering, the event is already persisted no need to persist it again.
log.info(withLogPrefix(s"Recovering $event."))
handler
} else {
persist(event) { persistedEvent =>
log.debug(withLogPrefix(s"Persisted $persistedEvent."))
handler
}
}
}
final def computeCurrentBehavior(): Receive = {
val baseCommands: Actor.Receive = alwaysAvailableCommands orElse {
case StartTask(index) => tasks(index).start()
}
// baseCommands is the first receive to guarantee the messages vital to the correct working of the orchestrator
// won't be taken first by one of the tasks behaviors or the extraCommands. Similarly extraCommands
// is the last receive to ensure it doesn't take one of the messages of the waiting task behaviors.
waitingTasks.values.map(_.behaviorHandlingTimeout).fold(baseCommands)(_ orElse _) orElse extraCommands
}
final def unstarted: Actor.Receive = {
case m @ StartOrchestrator(id) =>
recoveryAwarePersist(m) {
_startId = id
onStart(_startId)
if (tasks.isEmpty) {
onFinish()
} else if (!recoveryRunning) {
// Every task adds itself to `tasks`. If it has no dependencies then it will also be added to `waitingTasks`.
// So in order to start the orchestrator we only need to start the tasks in the `waitingTasks`.
// With this ruse we don't have to iterate through `tasks` in order to compute which ones are ready to start.
// Once a task finishes it will notify every task that depend on it that it was finished. When all the dependencies
// of a task finish a StartTask is scheduled.
waitingTasks.values.foreach(_.start())
// If recovery is running we don't need to start the tasks because we will eventually handle a TaskStarted
// which will start the task(s).
}
}
}
final def alwaysAvailableCommands: Actor.Receive = {
case Status =>
sender() ! StatusResponse(tasks.map(_.report))
case ShutdownOrchestrator =>
context stop self
}
final def receiveCommand: Actor.Receive = unstarted orElse alwaysAvailableCommands orElse extraCommands
/**
* Override this method to add extra commands that are always handled by this orchestrator (except when recovering).
*/
def extraCommands: Actor.Receive = PartialFunction.empty[Any, Unit]
def receiveRecover: Actor.Receive = unstarted orElse {
case Event.TaskStarted(taskIndex) =>
tasks(taskIndex).start()
case Event.TaskFinished(taskIndex, result) =>
waitingTasks(taskIndex).asInstanceOf[Task[Any]].finish(result)
case Event.TaskAborted(taskIndex, cause) =>
waitingTasks(taskIndex).abort(cause)
case RecoveryCompleted =>
log.debug(withLogPrefix(s"""Recovery completed:${tasks.map(t => t.withTaskPrefix(t.state.toString)).mkString("\\n\\t", "\\n\\t", "")}
|\\tNumber of unconfirmed messages: $numberOfUnconfirmed""".stripMargin))
}
}
/**
* In a simple orchestrator the same sequence number (of akka-persistence) is used for all the
* destinations of the orchestrator. Because of this, ID = DeliveryId, and matchId only checks the deliveryId
* as that will be enough information to disambiguate which task should handle the response.
*/
abstract class Orchestrator[R](settings: Settings = new Settings()) extends AbstractOrchestrator[R](settings) {
final type ID = DeliveryId
final def computeID(destination: ActorPath, deliveryId: DeliveryId): DeliveryId = deliveryId
final def deliveryIdOf(destination: ActorPath, id: ID): DeliveryId = id
final def matchId(task: Task[_], id: Long): Boolean = {
val deliveryId: DeliveryId = id
val matches = task.expectedID.contains(deliveryId)
log.debug(task.withOrchestratorAndTaskPrefix{
String.format(
s"""matchId:
| │ DeliveryId
|──────────┼─────────────────
| VALUE │ %s
| EXPECTED │ %s
| Matches: %s""".stripMargin,
Some(deliveryId), task.expectedID,
matches.toString.toUpperCase
)
})
matches
}
}
/**
* In a DistinctIdsOrchestrator an independent sequence is used for each destination of the orchestrator.
* In this orchestrator the delivery id is not sufficient to disambiguate which task should handle the message,
* so the ID = CorrelationId and the matchId also needs to check the sender.
* There is also the added necessity of being able to compute the correlation id for a given (sender, deliveryId)
* as well as translating a correlation id back to a delivery id.
*/
abstract class DistinctIdsOrchestrator[R](settings: Settings = new Settings()) extends AbstractOrchestrator[R](settings) {
final type ID = CorrelationId
// By using a SortedMap as opposed to a Map we can also extract the latest correlationId per sender
private val idsPerDestination = mutable.Map.empty[ActorPath, SortedMap[CorrelationId, DeliveryId]]
.withDefaultValue(SortedMap.empty[CorrelationId, DeliveryId]) // We cannot use .withDefaultValue on the SortedMap :(
final def computeID(destination: ActorPath, deliveryId: DeliveryId): CorrelationId = {
val correlationId: CorrelationId = idsPerDestination(destination)
.keySet.lastOption
.map[CorrelationId](_.self + 1L)
.getOrElse(0L)
idsPerDestination(destination) += correlationId -> deliveryId
log.debug(s"State for $destination is now:\\n\\t" + idsPerDestination(destination).mkString("\\n\\t"))
correlationId
}
final def deliveryIdOf(destination: ActorPath, id: ID): DeliveryId = idsPerDestination(destination).getOrElse(id, 0L)
final def matchId(task: Task[_], id: Long): Boolean = {
val correlationId: CorrelationId = id
lazy val destinationPath = task.destination.toStringWithoutAddress
lazy val (matchesSender, expectedDestination, extraInfo) = sender().path match {
case s if s == context.system.deadLetters.path && recoveryRunning =>
(true, context.system.deadLetters.path, s"The expected SenderPath isn't $destinationPath because recovery is running\\n")
case s if s == self.path =>
(true, self.path, s"The expected SenderPath isn't $destinationPath because this is a Timeout\\n")
case s =>
(s == task.destination, task.destination, "")
}
val matches = task.expectedID.contains(correlationId) && matchesSender
log.debug(task.withOrchestratorAndTaskPrefix{
val senderPathString = sender().path.toStringWithoutAddress
val destinationString = expectedDestination.toStringWithoutAddress
val length = senderPathString.length max destinationString.length
String.format(
s"""MatchId:
| │ %${length}s │ DeliveryId
|──────────┼─%${length}s─┼──────────────────────────────
| VALUE │ %${length}s │ %s
| EXPECTED │ %${length}s │ %s
| %sMatches: %s""".stripMargin,
"SenderPath", "─" * length,
senderPathString, Some(correlationId),
destinationString, task.expectedID,
extraInfo, matches.toString.toUpperCase
)
})
matches
}
}
|
ist-dsi/akkastrator
|
src/main/scala/pt/tecnico/dsi/akkastrator/Orchestrator.scala
|
Scala
|
mit
| 17,193
|
package com.eevolution.context.dictionary.domain.api.service
import com.eevolution.context.dictionary.api
import com.eevolution.context.dictionary.domain.model.DynamicValidationRule
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: emeris.hernandez@e-evolution.com, http://www.e-evolution.com , http://github.com/EmerisScala
* Created by emeris.hernandez@e-evolution.com , www.e-evolution.com on 10/11/17.
*/
/**
* Dynamic Validation Rule Service
*/
trait DynamicValidationRuleService extends api.Service[DynamicValidationRule, Int] {
//Definition
}
|
adempiere/ADReactiveSystem
|
dictionary-api/src/main/scala/com/eevolution/context/dictionary/domain/api/service/DynamicValidationRuleService.scala
|
Scala
|
gpl-3.0
| 1,266
|
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testsuite.jsinterop
import scala.scalajs.js
import scala.scalajs.js.annotation._
import org.junit.Assert._
import org.junit.Assume._
import org.junit.Test
import org.scalajs.testsuite.utils.JSAssert._
import org.scalajs.testsuite.utils.Platform
import org.scalajs.testsuite.utils.AssertThrows.assertThrows
class NonNativeJSTypeTest {
import org.scalajs.testsuite.jsinterop.{NonNativeJSTypeTestSeparateRun => SepRun}
import NonNativeJSTypeTest._
@Test def minimalDefinition(): Unit = {
val obj = new Minimal
assertEquals("object", js.typeOf(obj))
assertEquals(List[String](), js.Object.keys(obj).toList)
assertEquals("[object Object]", obj.toString())
assertNull(obj.getClass().asInstanceOf[js.Any])
assertTrue((obj: Any).isInstanceOf[Minimal])
assertTrue((obj: Any).isInstanceOf[js.Object])
assertFalse((obj: Any).isInstanceOf[js.Error])
}
@Test def minimalStaticObjectWithLazyInitialization(): Unit = {
assertEquals(0, staticNonNativeObjectInitCount)
val obj = StaticNonNativeObject
assertEquals(1, staticNonNativeObjectInitCount)
assertSame(obj, StaticNonNativeObject)
assertEquals(1, staticNonNativeObjectInitCount)
assertEquals("object", js.typeOf(obj))
assertEquals(List[String](), js.Object.keys(obj).toList)
assertEquals("[object Object]", obj.toString())
assertNull(obj.getClass().asInstanceOf[js.Any])
assertFalse((obj: Any).isInstanceOf[Minimal])
assertTrue((obj: Any).isInstanceOf[js.Object])
assertFalse((obj: Any).isInstanceOf[js.Error])
}
@Test def simpleMethod(): Unit = {
val obj = new SimpleMethod
assertEquals(8, obj.foo(5))
assertEquals("hello42", obj.bar("hello", 42))
val dyn = obj.asInstanceOf[js.Dynamic]
assertEquals(8, dyn.foo(5))
assertEquals("hello42", dyn.bar("hello", 42))
}
@Test def staticObjectWithSimpleMethod(): Unit = {
val obj = StaticObjectSimpleMethod
assertEquals(8, obj.foo(5))
assertEquals("hello42", obj.bar("hello", 42))
val dyn = obj.asInstanceOf[js.Dynamic]
assertEquals(8, dyn.foo(5))
assertEquals("hello42", dyn.bar("hello", 42))
}
@Test def simpleField(): Unit = {
val obj = new SimpleField
assertEquals(List("x", "y"), js.Object.keys(obj).toList)
assertEquals(5, obj.x)
assertEquals(10, obj.y)
assertEquals(15, obj.sum())
obj.y = 3
assertEquals(3, obj.y)
assertEquals(8, obj.sum())
val dyn = obj.asInstanceOf[js.Dynamic]
assertEquals(5, dyn.x)
assertEquals(3, dyn.y)
assertEquals(8, dyn.sum())
dyn.y = 89
assertEquals(89, dyn.y)
assertEquals(89, obj.y)
assertEquals(94, dyn.sum())
}
@Test def staticObjectWithSimpleField(): Unit = {
val obj = StaticObjectSimpleField
assertEquals(List("x", "y"), js.Object.keys(obj).toList)
assertEquals(5, obj.x)
assertEquals(10, obj.y)
assertEquals(15, obj.sum())
obj.y = 3
assertEquals(3, obj.y)
assertEquals(8, obj.sum())
val dyn = obj.asInstanceOf[js.Dynamic]
assertEquals(5, dyn.x)
assertEquals(3, dyn.y)
assertEquals(8, dyn.sum())
dyn.y = 89
assertEquals(89, dyn.y)
assertEquals(89, obj.y)
assertEquals(94, dyn.sum())
}
@Test def simpleAccessors(): Unit = {
val obj = new SimpleAccessors
assertEquals(List("x"), js.Object.keys(obj).toList)
assertEquals(1, obj.x)
assertEquals(2, obj.readPlus1)
assertEquals(-1, obj.neg)
obj.neg = 4
assertEquals(-4, obj.x)
assertEquals(4, obj.neg)
assertEquals(-3, obj.readPlus1)
val dyn = obj.asInstanceOf[js.Dynamic]
assertEquals(-4, dyn.x)
assertEquals(-3, dyn.readPlus1)
assertEquals(4, dyn.neg)
dyn.neg = -9
assertEquals(9, dyn.x)
assertEquals(-9, dyn.neg)
assertEquals(10, dyn.readPlus1)
}
@Test def simpleConstructor(): Unit = {
val obj = new SimpleConstructor(5, 10)
assertEquals(List("x", "y"), js.Object.keys(obj).toList)
assertEquals(5, obj.x)
assertEquals(10, obj.y)
assertEquals(15, obj.sum())
obj.y = 3
assertEquals(3, obj.y)
assertEquals(8, obj.sum())
val dyn = obj.asInstanceOf[js.Dynamic]
assertEquals(5, dyn.x)
assertEquals(3, dyn.y)
assertEquals(8, dyn.sum())
dyn.y = 89
assertEquals(89, dyn.y)
assertEquals(89, obj.y)
assertEquals(94, dyn.sum())
}
@Test def simpleConstructorWithAutomaticFields(): Unit = {
val obj = new SimpleConstructorAutoFields(5, 10)
assertEquals(List("x", "y"), js.Object.keys(obj).toList)
assertEquals(5, obj.x)
assertEquals(10, obj.y)
assertEquals(15, obj.sum())
obj.y = 3
assertEquals(3, obj.y)
assertEquals(8, obj.sum())
val dyn = obj.asInstanceOf[js.Dynamic]
assertEquals(5, dyn.x)
assertEquals(3, dyn.y)
assertEquals(8, dyn.sum())
dyn.y = 89
assertEquals(89, dyn.y)
assertEquals(89, obj.y)
assertEquals(94, dyn.sum())
}
@Test def simpleConstructorWithParamAccessors(): Unit = {
val obj = new SimpleConstructorParamAccessors(5, 10)
assertNotEquals(Array("x", "y"), js.Object.keys(obj).toArray)
assertEquals(15, obj.sum())
val dyn = obj.asInstanceOf[js.Dynamic]
assertEquals(15, dyn.sum())
}
@Test def constructorWithParamNameClashes_Issue3933(): Unit = {
val obj = new ConstructorWithParamNameClashes(1, 2, 3, 4, 5, 6)
assertEquals(List(1, 2, 3, 4, 5, 6), obj.allArgs)
}
@Test def defaultValuesForFields(): Unit = {
val obj = new DefaultFieldValues
assertEquals(0, obj.int)
assertEquals(false, obj.bool)
assertEquals(0, obj.char.toInt)
assertNull(obj.string)
assertJSUndefined(obj.unit)
/* There is an additional test for value class fields in
* NonNativeJSTypeTestScala2.scala, which asserts that they are (wrongly)
* instantiated to `null`.
*/
}
@Test def lazyVals(): Unit = {
val obj1 = new LazyValFields()
assertEquals(0, obj1.initCount)
assertEquals(42, obj1.field)
assertEquals(1, obj1.initCount)
assertEquals(42, obj1.field)
assertEquals(1, obj1.initCount)
assertEquals(42, obj1.asInstanceOf[js.Dynamic].field)
assertEquals(1, obj1.initCount)
assertEquals(42, (obj1: LazyValFieldsSuperTrait).field)
assertEquals(1, obj1.initCount)
val obj2 = new LazyValFields().asInstanceOf[js.Dynamic]
assertEquals(0, obj2.initCount)
assertEquals(42, obj2.field)
assertEquals(1, obj2.initCount)
assertEquals(42, obj2.field)
assertEquals(1, obj2.initCount)
assertEquals(42, obj2.asInstanceOf[LazyValFields].field)
assertEquals(1, obj2.initCount)
assertEquals(42, obj2.asInstanceOf[LazyValFieldsSuperTrait].field)
assertEquals(1, obj2.initCount)
val obj3: LazyValFieldsSuperTrait = new LazyValFields()
assertEquals(0, obj3.initCount)
assertEquals(42, obj3.field)
assertEquals(1, obj3.initCount)
assertEquals(42, obj3.field)
assertEquals(1, obj3.initCount)
assertEquals(42, obj3.asInstanceOf[LazyValFields].field)
assertEquals(1, obj3.initCount)
assertEquals(42, obj3.asInstanceOf[js.Dynamic].field)
assertEquals(1, obj3.initCount)
}
@Test def overrideLazyVals(): Unit = {
val obj1 = new OverrideLazyValFields()
assertEquals(0, obj1.initCount)
assertEquals(53, obj1.field)
assertEquals(1, obj1.initCount)
assertEquals(53, obj1.field)
assertEquals(1, obj1.initCount)
assertEquals(53, obj1.asInstanceOf[js.Dynamic].field)
assertEquals(1, obj1.initCount)
assertEquals(53, (obj1: LazyValFieldsSuperTrait).field)
assertEquals(1, obj1.initCount)
assertEquals(53, (obj1: LazyValFields).field)
assertEquals(1, obj1.initCount)
val obj2 = new OverrideLazyValFields()
assertEquals(0, obj2.initCount)
assertEquals(53, (obj2: LazyValFields).field)
assertEquals(1, obj2.initCount)
assertEquals(53, obj2.field)
assertEquals(1, obj2.initCount)
assertEquals(53, obj2.field)
assertEquals(1, obj2.initCount)
assertEquals(53, obj2.asInstanceOf[js.Dynamic].field)
assertEquals(1, obj2.initCount)
assertEquals(53, (obj2: LazyValFieldsSuperTrait).field)
assertEquals(1, obj2.initCount)
}
@Test def nullingOutLazyValField_Issue3422(): Unit = {
assertEquals("foo", new NullingOutLazyValFieldBug3422("foo").str)
}
@Test def simpleInheritedFromNativeClass(): Unit = {
val obj = new SimpleInheritedFromNative(3, 5)
assertEquals(3, obj.x)
assertEquals(5, obj.y)
assertEquals(6, obj.bar)
assertTrue(obj.isInstanceOf[SimpleInheritedFromNative])
assertTrue(obj.isInstanceOf[NativeParentClass])
}
@Test def doubleUnderscoreInMemberNames_Issue3784(): Unit = {
class DoubleUnderscoreInMemberNames extends js.Object {
val x__y: String = "xy"
def foo__bar(x: Int): Int = x + 1
def ba__bar: String = "babar"
}
val obj = new DoubleUnderscoreInMemberNames
assertEquals("xy", obj.x__y)
assertEquals(6, obj.foo__bar(5))
assertEquals("babar", obj.ba__bar)
}
@Test def lambdaInsideMethod_Issue2220(): Unit = {
class LambdaInsideMethod extends js.Object {
def foo(): Int = {
List(1, 2, 3).map(_ * 2).sum
}
}
assertEquals(12, new LambdaInsideMethod().foo())
}
@Test def nestedInsideScalaClass(): Unit = {
class OuterScalaClass(val x: Int) {
class InnerJSClass(val y: Int) extends js.Object {
def sum(z: Int): Int = x + y + z
}
}
val outerObj = new OuterScalaClass(3)
val obj = new outerObj.InnerJSClass(6)
assertEquals(6, obj.y)
assertEquals(20, obj.sum(11))
}
@Test def nestedInsideScalaJSDefinedJSClass(): Unit = {
class OuterJSClass(val x: Int) extends js.Object {
class InnerJSClass(val y: Int) extends js.Object {
def sum(z: Int): Int = x + y + z
}
}
val outerObj = new OuterJSClass(3)
val obj = new outerObj.InnerJSClass(6)
assertEquals(6, obj.y)
assertEquals(20, obj.sum(11))
}
@Test def scalaClassNestedInsideScalaJSDefinedJSClass(): Unit = {
class OuterJSClass(val x: Int) extends js.Object {
class InnerScalaClass(val y: Int) {
def sum(z: Int): Int = x + y + z
}
}
val outerObj = new OuterJSClass(3)
val obj = new outerObj.InnerScalaClass(6)
assertEquals(6, obj.y)
assertEquals(20, obj.sum(11))
}
@Test def scalaObjectNestedInsideScalaJSDefinedJSClass(): Unit = {
class Foo extends js.Object {
var innerInitCount: Int = _
object Inner {
innerInitCount += 1
}
}
val foo = new Foo
assertEquals(0, foo.innerInitCount)
val inner1 = foo.Inner
assertEquals(1, foo.innerInitCount)
assertTrue((foo.Inner: AnyRef) eq inner1)
assertEquals(1, foo.innerInitCount)
val dyn = (new Foo).asInstanceOf[js.Dynamic]
assertEquals(0, dyn.innerInitCount)
val inner2 = dyn.Inner
assertEquals(1, dyn.innerInitCount)
assertTrue((dyn.Inner: AnyRef) eq inner2)
assertEquals(1, dyn.innerInitCount)
assertFalse((inner2: AnyRef) eq inner1)
}
// #2772
@Test def scalaObjectNestedInsideScalaJSDefinedJSClassJSName(): Unit = {
class Foo extends js.Object {
var innerInitCount: Int = _
@JSName("innerName")
object Inner {
innerInitCount += 1
}
}
val foo = new Foo
assertEquals(0, foo.innerInitCount)
val inner1 = foo.Inner
assertEquals(1, foo.innerInitCount)
assertTrue((foo.Inner: AnyRef) eq inner1)
assertEquals(1, foo.innerInitCount)
val dyn = (new Foo).asInstanceOf[js.Dynamic]
assertEquals(0, dyn.innerInitCount)
val inner2 = dyn.innerName
assertEquals(1, dyn.innerInitCount)
assertTrue((dyn.innerName: AnyRef) eq inner2)
assertEquals(1, dyn.innerInitCount)
assertFalse((inner2: AnyRef) eq inner1)
}
@Test def anonymousClassWithCaptures(): Unit = {
val x = (() => 5)()
val obj = new js.Object {
val y = 10
def sum(z: Int): Int = x + y + z
}
val dyn = obj.asInstanceOf[js.Dynamic]
assertEquals(10, dyn.y)
assertEquals(26, dyn.sum(11))
}
@Test def anonymousClassHasNoOwnPrototype(): Unit = {
val obj = new js.Object {
val x = 1
}
assertEquals(1, obj.asInstanceOf[js.Dynamic].x)
assertSame(js.Object.getPrototypeOf(obj),
js.constructorOf[js.Object].prototype)
}
@Test def localClassHasOwnPrototype(): Unit = {
class Local extends js.Object {
val x = 1
}
val obj = new Local
assertEquals(1, obj.asInstanceOf[js.Dynamic].x)
val prototype = js.Object.getPrototypeOf(obj)
assertNotSame(prototype, js.constructorOf[js.Object].prototype)
assertSame(prototype, js.constructorOf[Local].prototype)
}
@Test def anonymousClassNonTrivialSupertype(): Unit = {
val obj = new SimpleConstructor(1, 2) {
val z = sum()
}
assertEquals(3, obj.asInstanceOf[js.Dynamic].z)
}
@Test def anonymousClassUsingOwnMethodInCtor(): Unit = {
val obj = new js.Object {
val y = inc(0)
def inc(x: Int) = x + 1
}
assertEquals(1, obj.asInstanceOf[js.Dynamic].y)
}
@Test def anonymousClassUninitializedFields(): Unit = {
val obj = new js.Object {
var x: String = _
var y: Int = _
}
assertNull(obj.asInstanceOf[js.Dynamic].x)
assertEquals(0, obj.asInstanceOf[js.Dynamic].y)
}
@Test def anonymousClassFieldInitOrder(): Unit = {
val obj = new js.Object {
val x = getY
val y = "Hello World"
private def getY: String = y
}.asInstanceOf[js.Dynamic]
assertNull(obj.x)
assertEquals("Hello World", obj.y)
}
@Test def anonymousClassDependentFields(): Unit = {
val obj = new js.Object {
val x = 1
val y = x + 1
}
assertEquals(2, obj.asInstanceOf[js.Dynamic].y)
}
@Test def anonymousClassUseThisInCtor(): Unit = {
var obj0: js.Object = null
val obj1 = new js.Object {
obj0 = this
}
assertSame(obj0, obj1)
}
@Test def nestedAnonymousClasses(): Unit = {
val outer = new js.Object {
private var _x = 1
def x = _x
val inner = new js.Object {
def inc() = _x += 1
}
}.asInstanceOf[js.Dynamic]
val inner = outer.inner
assertEquals(1, outer.x)
inner.inc()
assertEquals(2, outer.x)
}
@Test def nestedAnonymousClassesAndLambdas(): Unit = {
def call(f: Int => js.Any) = f(1)
// Also check that f's capture is properly transformed.
val obj = call(x => new js.Object { val f: js.Any = (y: Int) => x + y })
val res = obj.asInstanceOf[js.Dynamic].f(3)
assertEquals(4, res)
assertEquals(1, call(x => x))
}
@Test def anonymousClassesPrivateFieldsAreNotVisible_Issue2748(): Unit = {
trait TheOuter extends js.Object {
val id: String
val paint: js.UndefOr[TheInner] = js.undefined
}
trait TheInner extends js.Object {
val options: js.UndefOr[String] = js.undefined
}
def someValue = "some-value"
val pcFn = someValue
val r0 = new TheOuter {
override val id: String = "some-" + pcFn
override val paint: js.UndefOr[TheInner] = {
new TheInner {
override val options: js.UndefOr[String] = "{" + pcFn + "}"
}
}
}
assertEquals(
"""{"id":"some-some-value","paint":{"options":"{some-value}"}}""",
js.JSON.stringify(r0))
}
@Test def localObjectIsLazy(): Unit = {
var initCount: Int = 0
object Obj extends js.Object {
initCount += 1
}
assertEquals(0, initCount)
val obj = Obj
import js.DynamicImplicits.truthValue
assertTrue(obj.asInstanceOf[js.Dynamic])
assertEquals(1, initCount)
assertSame(obj, Obj)
assertEquals(1, initCount)
}
@Test def localObjectWithCaptures(): Unit = {
val x = (() => 5)()
object Obj extends js.Object {
val y = 10
def sum(z: Int): Int = x + y + z
}
assertEquals(10, Obj.y)
assertEquals(26, Obj.sum(11))
val dyn = Obj.asInstanceOf[js.Dynamic]
assertEquals(10, dyn.y)
assertEquals(26, dyn.sum(11))
}
@Test def objectInScalaJSDefinedJSClass(): Unit = {
class Foo extends js.Object {
var innerInitCount: Int = _
object Inner extends js.Object {
innerInitCount += 1
}
}
val foo = new Foo
assertEquals(0, foo.innerInitCount)
val inner1 = foo.Inner
assertEquals(1, foo.innerInitCount)
assertSame(inner1, foo.Inner)
assertEquals(1, foo.innerInitCount)
val dyn = (new Foo).asInstanceOf[js.Dynamic]
assertEquals(0, dyn.innerInitCount)
val inner2 = dyn.Inner
assertEquals(1, dyn.innerInitCount)
assertSame(inner2, dyn.Inner)
assertEquals(1, dyn.innerInitCount)
assertNotSame(inner1, inner2)
}
@Test def localDefsAreNotExposed(): Unit = {
class LocalDefsAreNotExposed extends js.Object {
def foo(): String = {
def bar(): String = "hello"
bar()
}
}
val obj = new LocalDefsAreNotExposed
assertFalse(js.Object.properties(obj).exists(_.contains("bar")))
}
@Test def localObjectsAreNotExposed(): Unit = {
class LocalObjectsAreNotExposed extends js.Object {
def foo(): String = {
object Bar
Bar.toString()
}
}
val obj = new LocalObjectsAreNotExposed
assertFalse(js.Object.properties(obj).exists(_.contains("Bar")))
}
@Test def localDefsWithCaptures_Issue1975(): Unit = {
class LocalDefsWithCaptures extends js.Object {
def foo(suffix: String): String = {
def bar(): String = "hello " + suffix
bar()
}
}
val obj = new LocalDefsWithCaptures
assertEquals("hello world", obj.foo("world"))
}
@Test def methodsWithExplicitName(): Unit = {
class MethodsWithExplicitName extends js.Object {
@JSName("theAnswer")
def bar(): Int = 42
@JSName("doubleTheParam")
def double(x: Int): Int = x*2
}
val foo = new MethodsWithExplicitName
assertEquals(42, foo.bar())
assertEquals(6, foo.double(3))
val dyn = foo.asInstanceOf[js.Dynamic]
assertJSUndefined(dyn.bar)
assertEquals(js.typeOf(dyn.theAnswer), "function")
assertEquals(42, dyn.theAnswer())
assertEquals(6, dyn.doubleTheParam(3))
}
@Test def methodsWithConstantFoldedName(): Unit = {
class MethodsWithConstantFoldedName extends js.Object {
@JSName(JSNameHolder.MethodName)
def bar(): Int = 42
}
val foo = new MethodsWithConstantFoldedName
assertEquals(42, foo.bar())
val dyn = foo.asInstanceOf[js.Dynamic]
assertJSUndefined(dyn.bar)
assertEquals(42, dyn.myMethod())
}
@Test def protectedMethods(): Unit = {
class ProtectedMethods extends js.Object {
protected def bar(): Int = 42
protected[testsuite] def foo(): Int = 100
}
val foo = new ProtectedMethods
assertEquals(100, foo.foo())
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(js.typeOf(dyn.bar), "function")
assertEquals(42, dyn.bar())
assertEquals(js.typeOf(dyn.foo), "function")
assertEquals(100, dyn.foo())
}
@Test def readonlyProperties(): Unit = {
// Named classes
class Foo extends js.Object {
def bar: Int = 1
}
val x: js.Dynamic = (new Foo()).asInstanceOf[js.Dynamic]
assertThrows(classOf[js.JavaScriptException], {
x.bar = 2
})
// Anonymous classes
val y = new js.Object {
def bar: Int = 1
}.asInstanceOf[js.Dynamic]
assertThrows(classOf[js.JavaScriptException], {
y.bar = 2
})
}
@Test def propertiesAreNotEnumerable(): Unit = {
// Named classes
class Foo extends js.Object {
def myProp: Int = 1
}
val x: js.Any = (new Foo()).asInstanceOf[js.Any]
assertFalse(js.Object.properties(x).contains("myProp"))
// Anonymous classes
val y = new js.Object {
def myProp: Int = 1
}
assertFalse(js.Object.properties(y).contains("myProp"))
}
@Test def propertiesAreConfigurable(): Unit = {
// Named classes
class Foo extends js.Object {
def myProp: Int = 1
}
// Delete property from prototype.
val prototype = js.constructorOf[Foo].prototype
js.special.delete(prototype, "myProp")
// Check it is actually gone.
assertTrue(js.isUndefined((new Foo()).asInstanceOf[js.Dynamic].myProp))
// Anonymous classes
val y = new js.Object {
def myProp: Int = 1
}
// The property should be on the instance itself.
assertTrue(y.hasOwnProperty("myProp"))
js.special.delete(y, "myProp")
assertTrue(js.isUndefined(y.asInstanceOf[js.Dynamic].myProp))
assertFalse(y.hasOwnProperty("myProp"))
}
@Test def propertiesWithExplicitName(): Unit = {
class PropertiesWithExplicitName extends js.Object {
private[this] var myY: String = "hello"
@JSName("answer")
val answerScala: Int = 42
@JSName("x")
var xScala: Int = 3
@JSName("doubleX")
def doubleXScala: Int = xScala*2
@JSName("y")
def yGetter: String = myY + " get"
@JSName("y")
def ySetter_=(v: String): Unit = myY = v + " set"
}
val foo = new PropertiesWithExplicitName
assertEquals(42, foo.answerScala)
assertEquals(3, foo.xScala)
assertEquals(6, foo.doubleXScala)
foo.xScala = 23
assertEquals(23, foo.xScala)
assertEquals(46, foo.doubleXScala)
assertEquals("hello get", foo.yGetter)
foo.ySetter_=("world")
assertEquals("world set get", foo.yGetter)
val dyn = (new PropertiesWithExplicitName).asInstanceOf[js.Dynamic]
assertJSUndefined(dyn.answerScala)
assertEquals(js.typeOf(dyn.answer), "number")
assertEquals(42, dyn.answer)
assertEquals(3, dyn.x)
assertEquals(6, dyn.doubleX)
dyn.x = 23
assertEquals(23, dyn.x)
assertEquals(46, dyn.doubleX)
assertEquals("hello get", dyn.y)
dyn.y = "world"
assertEquals("world set get", dyn.y)
}
@Test def protectedProperties(): Unit = {
class ProtectedProperties extends js.Object {
protected val x: Int = 42
protected[testsuite] val y: Int = 43
}
val foo = new ProtectedProperties
assertEquals(43, foo.y)
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(42, dyn.x)
assertEquals(43, dyn.y)
}
@Test def simpleOverloadedMethods(): Unit = {
class SimpleOverloadedMethods extends js.Object {
def foo(): Int = 42
def foo(x: Int): Int = x*2
}
val foo = new SimpleOverloadedMethods
assertEquals(42, foo.foo())
assertEquals(6, foo.foo(3))
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(js.typeOf(dyn.foo), "function")
assertEquals(42, dyn.foo())
assertEquals(6, dyn.foo(3))
}
@Test def simpleOverloadedMethodsAnonJSClass_Issue3054(): Unit = {
trait SimpleOverloadedMethodsAnonJSClass extends js.Object {
def foo(): Int
def foo(x: Int): Int
}
val foo = new SimpleOverloadedMethodsAnonJSClass {
def foo(): Int = 42
def foo(x: Int): Int = x * 2
}
assertEquals(42, foo.foo())
assertEquals(6, foo.foo(3))
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(js.typeOf(dyn.foo), "function")
assertEquals(42, dyn.foo())
assertEquals(6, dyn.foo(3))
}
@Test def renamedOverloadedMethods(): Unit = {
class RenamedOverloadedMethods extends js.Object {
@JSName("foobar")
def foo(): Int = 42
@JSName("foobar")
def bar(x: Int): Int = x*2
}
val foo = new RenamedOverloadedMethods
assertEquals(42, foo.foo())
assertEquals(6, foo.bar(3))
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(js.typeOf(dyn.foobar), "function")
assertEquals(42, dyn.foobar())
assertEquals(6, dyn.foobar(3))
}
@Test def overloadedMethodsWithVarargs(): Unit = {
class OverloadedMethodsWithVarargs extends js.Object {
def foo(x: Int): Int = x * 2
def foo(strs: String*): Int = strs.foldLeft(0)(_ + _.length)
}
val foo = new OverloadedMethodsWithVarargs
assertEquals(42, foo.foo(21))
assertEquals(0, foo.foo())
assertEquals(3, foo.foo("bar"))
assertEquals(8, foo.foo("bar", "babar"))
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(js.typeOf(dyn.foo), "function")
assertEquals(42, dyn.foo(21))
assertEquals(0, dyn.foo())
assertEquals(3, dyn.foo("bar"))
assertEquals(8, dyn.foo("bar", "babar"))
}
@Test def overloadedMethodsWithVarargsAnonJSClass_Issue3054(): Unit = {
trait OverloadedMethodsWithVarargsAnonJSClass extends js.Object {
def foo(x: Int): Int
def foo(strs: String*): Int
}
val foo = new OverloadedMethodsWithVarargsAnonJSClass {
def foo(x: Int): Int = x * 2
def foo(strs: String*): Int = strs.foldLeft(0)(_ + _.length)
}
assertEquals(42, foo.foo(21))
assertEquals(0, foo.foo())
assertEquals(3, foo.foo("bar"))
assertEquals(8, foo.foo("bar", "babar"))
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(js.typeOf(dyn.foo), "function")
assertEquals(42, dyn.foo(21))
assertEquals(0, dyn.foo())
assertEquals(3, dyn.foo("bar"))
assertEquals(8, dyn.foo("bar", "babar"))
}
@Test def overloadedConstructorsNumParametersResolution(): Unit = {
assertEquals(1, new OverloadedConstructorParamNumber(1).foo)
assertEquals(3, new OverloadedConstructorParamNumber(1, 2).foo)
}
@Test def overloadedConstructorsParameterTypeResolution(): Unit = {
assertEquals(1, new OverloadedConstructorParamType(1).foo)
assertEquals(3, new OverloadedConstructorParamType("abc").foo)
}
@Test def overloadedConstructorsWithCapturedParameters(): Unit = {
class OverloadedConstructorWithOuterContextOnly(val x: Int) extends js.Object {
def this(y: String) = this(y.length)
}
val z = (() => 5)()
class OverloadedConstructorWithValCapture(val x: Int) extends js.Object {
def this(y: String) = this(z)
}
assertEquals(1, new OverloadedConstructorWithOuterContextOnly(1).x)
assertEquals(3, new OverloadedConstructorWithOuterContextOnly("abc").x)
assertEquals(1, new OverloadedConstructorWithValCapture(1).x)
assertEquals(5, new OverloadedConstructorWithValCapture("abc").x)
}
@Test def overloadedConstructorsWithSuperClass(): Unit = {
class OverloadedConstructorSup(val x: Int) extends js.Object {
def this(y: String) = this(y.length)
}
class OverloadedConstructorSub(x: Int)
extends OverloadedConstructorSup(3 * x) {
def this(y: String) = this(2 * y.length)
}
assertEquals(1, new OverloadedConstructorSup(1).x)
assertEquals(3, new OverloadedConstructorSup("abc").x)
assertEquals(9, new OverloadedConstructorSub(3).x)
assertEquals(12, new OverloadedConstructorSub("ab").x)
}
@Test def overloadedConstructorsWithRepeatedParameters(): Unit = {
class OverloadedConstructorWithRepeatedParameters(xs: Int*)
extends js.Object {
def this(y: String, ys: String*) = this(y.length +: ys.map(_.length): _*)
def sum: Int = xs.sum
}
assertEquals(0, new OverloadedConstructorWithRepeatedParameters().sum)
assertEquals(1, new OverloadedConstructorWithRepeatedParameters(1).sum)
assertEquals(3, new OverloadedConstructorWithRepeatedParameters(1, 2).sum)
assertEquals(7, new OverloadedConstructorWithRepeatedParameters(1, 2, 4).sum)
assertEquals(3, new OverloadedConstructorWithRepeatedParameters("abc").sum)
assertEquals(3, new OverloadedConstructorWithRepeatedParameters("ab", "c").sum)
assertEquals(3, new OverloadedConstructorWithRepeatedParameters("a", "b", "c").sum)
}
@Test def overloadedConstructorsComplexResolution(): Unit = {
val bazPrim = new OverloadedConstructorComplex(1, 2)
assertEquals(1, bazPrim.foo)
assertEquals(2, bazPrim.bar)
val baz1 = new OverloadedConstructorComplex()
assertEquals(5, baz1.foo)
assertEquals(6, baz1.bar)
val baz2 = new OverloadedConstructorComplex(3)
assertEquals(3, baz2.foo)
assertEquals(3, baz2.bar)
val baz3 = new OverloadedConstructorComplex(7, 8, 9)
assertEquals(7, baz3.foo)
assertEquals(9, baz3.bar)
val baz4 = new OverloadedConstructorComplex("abc")
assertEquals(3, baz4.foo)
assertEquals(3, baz4.bar)
val baz5 = new OverloadedConstructorComplex("abc", 10)
assertEquals(3, baz5.foo)
assertEquals(10, baz5.bar)
val baz6 = new OverloadedConstructorComplex(11, "abc")
assertEquals(11, baz6.foo)
assertEquals(3, baz6.bar)
val baz7 = new OverloadedConstructorComplex(1, 2, 4, 8)
assertEquals(3, baz7.foo)
assertEquals(4, baz7.bar)
val baz8 = new OverloadedConstructorComplex("abc", "abcd")
assertEquals(3, baz8.foo)
assertEquals(4, baz8.bar)
val baz9 = new OverloadedConstructorComplex("abc", "abcd", "zx")
assertEquals(5, baz9.foo)
assertEquals(4, baz9.bar)
val baz10 = new OverloadedConstructorComplex("abc", "abcd", "zx", "tfd")
assertEquals(5, baz10.foo)
assertEquals(7, baz10.bar)
}
@Test def secondaryConstructorUseDefaultParam(): Unit = {
val a = new SecondaryConstructorUseDefaultParam(1)
assertEquals(a.y, "1y")
val b = new SecondaryConstructorUseDefaultParam()()
assertEquals(b.y, "xy")
}
@Test def constructorsWithPatternMatch_Issue4581(): Unit = {
val a = new PrimaryConstructorWithPatternMatch_Issue4581(5 :: Nil)
assertEquals(5, a.head)
val b = new SecondaryConstructorWithPatternMatch_Issue4581()
assertEquals(0, b.head)
val c = new SecondaryConstructorWithPatternMatch_Issue4581(6 :: Nil)
assertEquals(6, c.head)
}
@Test def polytypeNullaryMethod_Issue2445(): Unit = {
class PolyTypeNullaryMethod extends js.Object {
def emptyArray[T]: js.Array[T] = js.Array()
}
val obj = new PolyTypeNullaryMethod
val a = obj.emptyArray[Int]
assertTrue((a: Any).isInstanceOf[js.Array[_]])
assertEquals(0, a.length)
val dyn = obj.asInstanceOf[js.Dynamic]
val b = dyn.emptyArray
assertTrue((b: Any).isInstanceOf[js.Array[_]])
assertEquals(0, b.length)
}
@Test def defaultParameters(): Unit = {
class DefaultParameters extends js.Object {
def bar(x: Int, y: Int = 1): Int = x + y
def dependent(x: Int)(y: Int = x + 1): Int = x + y
def foobar(x: Int): Int = bar(x)
}
object DefaultParametersMod extends js.Object {
def bar(x: Int, y: Int = 1): Int = x + y
def dependent(x: Int)(y: Int = x + 1): Int = x + y
def foobar(x: Int): Int = bar(x)
}
val foo = new DefaultParameters
assertEquals(9, foo.bar(4, 5))
assertEquals(5, foo.bar(4))
assertEquals(4, foo.foobar(3))
assertEquals(9, foo.dependent(4)(5))
assertEquals(17, foo.dependent(8)())
assertEquals(9, DefaultParametersMod.bar(4, 5))
assertEquals(5, DefaultParametersMod.bar(4))
assertEquals(4, DefaultParametersMod.foobar(3))
assertEquals(9, DefaultParametersMod.dependent(4)(5))
assertEquals(17, DefaultParametersMod.dependent(8)())
def testDyn(dyn: js.Dynamic): Unit = {
assertEquals(9, dyn.bar(4, 5))
assertEquals(5, dyn.bar(4))
assertEquals(4, dyn.foobar(3))
assertEquals(9, dyn.dependent(4, 5))
assertEquals(17, dyn.dependent(8))
}
testDyn(foo.asInstanceOf[js.Dynamic])
testDyn(DefaultParametersMod.asInstanceOf[js.Dynamic])
}
@Test def overrideDefaultParameters(): Unit = {
class OverrideDefaultParametersParent extends js.Object {
def bar(x: Int, y: Int = 1): Int = x + y
def dependent(x: Int)(y: Int = x + 1): Int = x + y
def foobar(x: Int): Int = bar(x)
}
class OverrideDefaultParametersChild
extends OverrideDefaultParametersParent {
override def bar(x: Int, y: Int = 10): Int = super.bar(x, y)
override def dependent(x: Int)(y: Int = x * 2): Int = x + y
}
val foo = new OverrideDefaultParametersChild
assertEquals(9, foo.bar(4, 5))
assertEquals(14, foo.bar(4))
assertEquals(13, foo.foobar(3))
assertEquals(9, foo.dependent(4)(5))
assertEquals(24, foo.dependent(8)())
val parent: OverrideDefaultParametersParent = foo
assertEquals(9, parent.bar(4, 5))
assertEquals(14, parent.bar(4))
assertEquals(13, parent.foobar(3))
assertEquals(9, parent.dependent(4)(5))
assertEquals(24, parent.dependent(8)())
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(9, dyn.bar(4, 5))
assertEquals(14, dyn.bar(4))
assertEquals(13, dyn.foobar(3))
assertEquals(9, dyn.dependent(4, 5))
assertEquals(24, dyn.dependent(8))
}
@Test def overrideMethodWithDefaultParametersWithoutNewDefault(): Unit = {
class OverrideDefaultParametersWithoutDefaultParent extends js.Object {
def bar(x: Int, y: Int = 1): Int = x + y
def dependent(x: Int)(y: Int = x + 1): Int = x + y
def foobar(x: Int): Int = bar(x)
}
class OverrideDefaultParametersWithoutDefaultChild
extends OverrideDefaultParametersWithoutDefaultParent {
override def bar(x: Int, y: Int): Int = x - y
override def dependent(x: Int)(y: Int): Int = x - y
}
val foo = new OverrideDefaultParametersWithoutDefaultChild
assertEquals(-1, foo.bar(4, 5))
assertEquals(3, foo.bar(4))
assertEquals(2, foo.foobar(3))
assertEquals(-4, foo.dependent(4)(8))
assertEquals(-1, foo.dependent(8)())
val parent: OverrideDefaultParametersWithoutDefaultParent = foo
assertEquals(-1, parent.bar(4, 5))
assertEquals(3, parent.bar(4))
assertEquals(2, parent.foobar(3))
assertEquals(-4, parent.dependent(4)(8))
assertEquals(-1, parent.dependent(8)())
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(-1, dyn.bar(4, 5))
assertEquals(3, dyn.bar(4))
assertEquals(2, dyn.foobar(3))
assertEquals(-4, dyn.dependent(4, 8))
assertEquals(-1, dyn.dependent(8))
}
@Test def constructorsWithDefaultParametersNonNativeNone(): Unit = {
assertEquals(-1, new ConstructorDefaultParamJSNonNativeNone().foo)
assertEquals(1, new ConstructorDefaultParamJSNonNativeNone(1).foo)
assertEquals(5, new ConstructorDefaultParamJSNonNativeNone(5).foo)
}
@Test def constructorsWithDefaultParametersNonNativeNonNative(): Unit = {
assertEquals(-1, new ConstructorDefaultParamJSNonNativeJSNonNative().foo)
assertEquals(1, new ConstructorDefaultParamJSNonNativeJSNonNative(1).foo)
assertEquals(5, new ConstructorDefaultParamJSNonNativeJSNonNative(5).foo)
}
@Test def constructorsWithDefaultParametersNonNativeScala(): Unit = {
assertEquals(-1, new ConstructorDefaultParamJSNonNativeScala().foo)
assertEquals(1, new ConstructorDefaultParamJSNonNativeScala(1).foo)
assertEquals(5, new ConstructorDefaultParamJSNonNativeScala(5).foo)
}
@Test def constructorsWithDefaultParametersScalaNonNative(): Unit = {
assertEquals(-1, new ConstructorDefaultParamScalaJSNonNative().foo)
assertEquals(1, new ConstructorDefaultParamScalaJSNonNative(1).foo)
assertEquals(5, new ConstructorDefaultParamScalaJSNonNative(5).foo)
}
@Test def constructorsWithDefaultParametersNativeNone(): Unit = {
assertEquals(-1, new ConstructorDefaultParamJSNativeNone().foo)
assertEquals(1, new ConstructorDefaultParamJSNativeNone(1).foo)
assertEquals(5, new ConstructorDefaultParamJSNativeNone(5).foo)
}
@Test def constructorsWithDefaultParametersNativeScala(): Unit = {
assertEquals(-1, new ConstructorDefaultParamJSNativeScala().foo)
assertEquals(1, new ConstructorDefaultParamJSNativeScala(1).foo)
assertEquals(5, new ConstructorDefaultParamJSNativeScala(5).foo)
}
@Test def constructorsWithDefaultParametersNativeNonNative(): Unit = {
assertEquals(-1, new ConstructorDefaultParamJSNativeJSNonNative().foo)
assertEquals(1, new ConstructorDefaultParamJSNativeJSNonNative(1).foo)
assertEquals(5, new ConstructorDefaultParamJSNativeJSNonNative(5).foo)
}
@Test def constructorsWithDefaultParametersNativeNative(): Unit = {
assertEquals(-1, new ConstructorDefaultParamJSNativeJSNative().foo)
assertEquals(1, new ConstructorDefaultParamJSNativeJSNative(1).foo)
assertEquals(5, new ConstructorDefaultParamJSNativeJSNative(5).foo)
}
@Test def constructorsWithDefaultParametersScalaScala(): Unit = {
assertEquals(-1, new ConstructorDefaultParamScalaScala().foo)
assertEquals(1, new ConstructorDefaultParamScalaScala(1).foo)
assertEquals(5, new ConstructorDefaultParamScalaScala(5).foo)
}
@Test def constructorsWithDefaultParametersScalaNone(): Unit = {
assertEquals(-1, new ConstructorDefaultParamScalaNone().foo)
assertEquals(1, new ConstructorDefaultParamScalaNone(1).foo)
assertEquals(5, new ConstructorDefaultParamScalaNone(5).foo)
}
@Test def constructorsWithDefaultParametersInMultiParamLists(): Unit = {
val foo1 = new ConstructorDefaultParamMultiParamList(5)("foobar")
assertEquals(5, foo1.default)
assertEquals("foobar", foo1.title)
assertEquals("5", foo1.description)
val foo2 = new ConstructorDefaultParamMultiParamList(56)("babar", "desc")
assertEquals(56, foo2.default)
assertEquals("babar", foo2.title)
assertEquals("desc", foo2.description)
}
@Test def constructorsWithDefaultParametersInMultiParamListsAndOverloading(): Unit = {
val foo1 = new ConstructorDefaultParamMultiParamListWithOverloading(5)(
"foobar")
assertEquals(5, foo1.default)
assertEquals("foobar", foo1.title)
assertEquals("5", foo1.description)
val foo2 = new ConstructorDefaultParamMultiParamListWithOverloading(56)(
"babar", "desc")
assertEquals(56, foo2.default)
assertEquals("babar", foo2.title)
assertEquals("desc", foo2.description)
val foo3 = new ConstructorDefaultParamMultiParamListWithOverloading('A')
assertEquals(65, foo3.default)
assertEquals("char", foo3.title)
assertEquals("a char", foo3.description)
val foo4 = new ConstructorDefaultParamMultiParamListWithOverloading(123, 456)
assertEquals(123, foo4.default)
assertEquals("456", foo4.title)
assertEquals(js.undefined, foo4.description)
}
@Test def callSuperConstructorWithColonAsterisk(): Unit = {
class CallSuperCtorWithSpread(x: Int, y: Int, z: Int)
extends NativeParentClassWithVarargs(x, Seq(y, z): _*)
val foo = new CallSuperCtorWithSpread(4, 8, 23)
assertEquals(4, foo.x)
assertJSArrayEquals(js.Array(8, 23), foo.args)
val dyn = foo.asInstanceOf[js.Dynamic]
/* Dark magic is at play here: everywhere else in this compilation unit,
* it's fine to do `assertEquals(4, dyn.x)` (for example, in the test
* `override_native_method` below), but right here, it causes scalac to die
* with a completely nonsensical compile error:
*
* > applyDynamic does not support passing a vararg parameter
*
* Extracting it in a separate `val` works around it.
*/
val dynx = dyn.x
assertEquals(4, dynx)
val args = dyn.args.asInstanceOf[js.Array[Int]]
assertJSArrayEquals(js.Array(8, 23), args)
}
@Test def overrideNativeMethod(): Unit = {
class OverrideNativeMethod extends NativeParentClass(3) {
override def foo(s: String): String = s + s + x
}
val foo = new OverrideNativeMethod
assertEquals(3, foo.x)
assertEquals("hellohello3", foo.foo("hello"))
val parent: NativeParentClass = foo
assertEquals(3, parent.x)
assertEquals("hellohello3", parent.foo("hello"))
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(3, dyn.x)
assertEquals("hellohello3", dyn.foo("hello"))
}
@Test def overrideNonNativeMethod(): Unit = {
class OverrideNonNativeMethod extends NonNativeParentClass(3) {
override def foo(s: String): String = s + s + x
}
val foo = new OverrideNonNativeMethod
assertEquals(3, foo.x)
assertEquals("hellohello3", foo.foo("hello"))
val parent: NonNativeParentClass = foo
assertEquals(3, parent.x)
assertEquals("hellohello3", parent.foo("hello"))
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(3, dyn.x)
assertEquals("hellohello3", dyn.foo("hello"))
}
@Test def overrideNonNativeMethodWithSeparateCompilation(): Unit = {
val foo = new SepRun.SimpleChildClass
assertEquals(6, foo.foo(3))
val fooParent: SepRun.SimpleParentClass = foo
assertEquals(6, fooParent.foo(3))
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(6, foo.foo(3))
}
@Test def overrideNativeMethodAndCallSuper(): Unit = {
class OverrideNativeMethodSuperCall extends NativeParentClass(3) {
override def foo(s: String): String = super.foo("bar") + s
}
val foo = new OverrideNativeMethodSuperCall
assertEquals(3, foo.x)
assertEquals("bar3hello", foo.foo("hello"))
val parent: NativeParentClass = foo
assertEquals(3, parent.x)
assertEquals("bar3hello", parent.foo("hello"))
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(3, dyn.x)
assertEquals("bar3hello", dyn.foo("hello"))
}
@Test def overrideNonNativeMethodAndCallSuper(): Unit = {
class OverrideNonNativeMethodSuperCall extends NonNativeParentClass(3) {
override def foo(s: String): String = super.foo("bar") + s
}
val foo = new OverrideNonNativeMethodSuperCall
assertEquals(3, foo.x)
assertEquals("bar3hello", foo.foo("hello"))
val parent: NonNativeParentClass = foo
assertEquals(3, parent.x)
assertEquals("bar3hello", parent.foo("hello"))
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(3, dyn.x)
assertEquals("bar3hello", dyn.foo("hello"))
}
@Test def overloadSuperMethod_Issue4452(): Unit = {
class Base extends js.Object {
def f(x: Int, y: Int*): String = "Base " + y.length
def g(x: Int, y: String): Unit = ()
}
class Sub extends Base {
def f(x: String, y: Int*): String = "Sub " + y.length
def g(x: Int): Unit = ()
}
val base = new Base
val sub = new Sub
assertEquals("Base 3", base.f(0, 1, 2, 3))
assertEquals("Base 3", sub.f(0, 1, 2, 3))
assertEquals("Sub 3", sub.f("0", 1, 2, 3))
// Just check they don't throw.
base.g(1, "0")
sub.g(1, "0")
sub.g(1)
}
@Test def superMethodCallInAnonJSClass_Issue3055(): Unit = {
class Foo extends js.Object {
def bar(msg: String): String = "super: " + msg
}
val foo = new Foo {
override def bar(msg: String): String = super.bar("foo: " + msg)
}
assertEquals("super: foo: foobar", foo.bar("foobar"))
}
@Test def overrideNativeVal(): Unit = {
class OverrideNativeVal extends NativeParentClass(3) {
override val x: Int = 42
}
val foo = new OverrideNativeVal
assertEquals(42, foo.x)
assertEquals(84, foo.bar)
assertEquals("hello42", foo.foo("hello"))
val parent: NativeParentClass = foo
assertEquals(42, parent.x)
assertEquals(84, parent.bar)
assertEquals("hello42", parent.foo("hello"))
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(42, dyn.x)
assertEquals(84, dyn.bar)
assertEquals("hello42", dyn.foo("hello"))
}
@Test def overrideNonNativeVal(): Unit = {
class OverrideNonNativeVal extends NonNativeParentClass(3) {
override val x: Int = 42
}
val foo = new OverrideNonNativeVal
assertEquals(42, foo.x)
assertEquals(84, foo.bar)
assertEquals("hello42", foo.foo("hello"))
val parent: NonNativeParentClass = foo
assertEquals(42, parent.x)
assertEquals(84, parent.bar)
assertEquals("hello42", parent.foo("hello"))
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(42, dyn.x)
assertEquals(84, dyn.bar)
assertEquals("hello42", dyn.foo("hello"))
}
@Test def overrideNativeGetter(): Unit = {
class OverrideNativeGetter extends NativeParentClass(3) {
override def bar: Int = x * 3
}
val foo = new OverrideNativeGetter
assertEquals(3, foo.x)
assertEquals(9, foo.bar)
val parent: NativeParentClass = foo
assertEquals(3, parent.x)
assertEquals(9, parent.bar)
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(3, dyn.x)
assertEquals(9, dyn.bar)
}
@Test def overrideNonNativeGetter(): Unit = {
class OverrideNonNativeGetter extends NonNativeParentClass(3) {
override def bar: Int = x * 3
}
val foo = new OverrideNonNativeGetter
assertEquals(3, foo.x)
assertEquals(9, foo.bar)
val parent: NonNativeParentClass = foo
assertEquals(3, parent.x)
assertEquals(9, parent.bar)
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(3, dyn.x)
assertEquals(9, dyn.bar)
}
@Test def overrideNativeGetterWithVal(): Unit = {
class OverrideNativeGetterWithVal extends NativeParentClass(3) {
override val bar: Int = 1
}
val foo = new OverrideNativeGetterWithVal
assertEquals(3, foo.x)
assertEquals(1, foo.bar)
val parent: NativeParentClass = foo
assertEquals(3, parent.x)
assertEquals(1, parent.bar)
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(3, dyn.x)
assertEquals(1, dyn.bar)
}
@Test def overrideNonNativeGetterWithVal(): Unit = {
class OverrideNonNativeGetterWithVal extends NonNativeParentClass(3) {
override val bar: Int = 1
}
val foo = new OverrideNonNativeGetterWithVal
assertEquals(3, foo.x)
assertEquals(1, foo.bar)
val parent: NonNativeParentClass = foo
assertEquals(3, parent.x)
assertEquals(1, parent.bar)
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(3, dyn.x)
assertEquals(1, dyn.bar)
}
@Test def overrideGetterWithSuper(): Unit = {
class OverrideGetterSuperParent extends js.Object {
def bar: Int = 43
}
class OverrideGetterSuperChild extends OverrideGetterSuperParent {
override def bar: Int = super.bar * 3
}
val foo = new OverrideGetterSuperChild
assertEquals(129, foo.bar)
val parent: OverrideGetterSuperParent = foo
assertEquals(129, parent.bar)
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(129, dyn.bar)
}
@Test def overrideSetterWithSuper(): Unit = {
class OverrideSetterSuperParent extends js.Object {
var x: Int = 43
def bar_=(v: Int): Unit = x = v
}
class OverrideSetterSuperChild extends OverrideSetterSuperParent {
override def bar_=(v: Int): Unit = super.bar_=(v * 3)
}
val foo = new OverrideSetterSuperChild
foo.bar_=(4)
assertEquals(12, foo.x)
val parent: OverrideSetterSuperParent = foo
parent.bar_=(5)
assertEquals(15, parent.x)
val dyn = foo.asInstanceOf[js.Dynamic]
dyn.bar = 6
assertEquals(18, dyn.x)
}
@Test def superPropertyGetSetInAnonJSClass_Issue3055(): Unit = {
class Foo extends js.Object {
var x: Int = 1
var lastSetValue: Int = 0
def bar: Int = x
def bar_=(v: Int): Unit = x = v
}
val foo = new Foo {
override def bar: Int = super.bar * 2
override def bar_=(v: Int): Unit = {
lastSetValue = v
super.bar = v + 3
}
}
assertEquals(2, foo.bar)
foo.bar = 6
assertEquals(6, foo.lastSetValue)
assertEquals(9, foo.x)
assertEquals(18, foo.bar)
}
@Test def addOverloadInSubclass(): Unit = {
class AddOverloadInSubclassParent extends js.Object {
def bar(): Int = 53
}
class AddOverloadInSubclassChild extends AddOverloadInSubclassParent {
def bar(x: Int): Int = x + 2
}
val foo = new AddOverloadInSubclassChild
assertEquals(53, foo.bar())
assertEquals(7, foo.bar(5))
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(53, dyn.bar())
assertEquals(7, dyn.bar(5))
}
@Test def addSetterInSubclass(): Unit = {
class AddSetterInSubclassParent extends js.Object {
var x: Int = 43
def bar: Int = x
}
class AddSetterInSubclassChild extends AddSetterInSubclassParent {
def bar_=(v: Int): Unit = x = v
}
val foo = new AddSetterInSubclassChild
foo.bar = 4
assertEquals(4, foo.x)
assertEquals(4, foo.bar)
val dyn = foo.asInstanceOf[js.Dynamic]
dyn.bar = 6
assertEquals(6, dyn.x)
assertEquals(6, dyn.bar)
}
@Test def addGetterInSubclass(): Unit = {
class AddGetterInSubclassParent extends js.Object {
var x: Int = 43
def bar_=(v: Int): Unit = x = v
}
class AddGetterInSubclassChild extends AddGetterInSubclassParent {
def bar: Int = x
}
val foo = new AddGetterInSubclassChild
foo.bar = 4
assertEquals(4, foo.x)
assertEquals(4, foo.bar)
val dyn = foo.asInstanceOf[js.Dynamic]
dyn.bar = 6
assertEquals(6, dyn.x)
assertEquals(6, dyn.bar)
}
@Test def overloadNativeMethod(): Unit = {
class OverloadNativeMethod extends NativeParentClass(3) {
def foo(s: String, y: Int): String = foo(s) + " " + y
}
val foo = new OverloadNativeMethod
assertEquals("hello3", foo.foo("hello"))
assertEquals("hello3 4", foo.foo("hello", 4))
val parent: NativeParentClass = foo
assertEquals("hello3", parent.foo("hello"))
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals("hello3", dyn.foo("hello"))
assertEquals("hello3 4", dyn.foo("hello", 4))
}
@Test def overloadNonNativeMethod(): Unit = {
class OverloadNonNativeMethod extends NonNativeParentClass(3) {
def foo(s: String, y: Int): String = foo(s) + " " + y
}
val foo = new OverloadNonNativeMethod
assertEquals("hello3", foo.foo("hello"))
assertEquals("hello3 4", foo.foo("hello", 4))
val parent: NonNativeParentClass = foo
assertEquals("hello3", parent.foo("hello"))
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals("hello3", dyn.foo("hello"))
assertEquals("hello3 4", dyn.foo("hello", 4))
}
@Test def overloadWithDefaultParameter(): Unit = {
class OverloadDefaultParameter extends js.Object {
def foo(x: Int): Int = x
def foo(x: String = ""): String = x
}
val foo = new OverloadDefaultParameter
assertEquals(5, foo.foo(5))
assertEquals("", foo.foo())
assertEquals("hello", foo.foo("hello"))
}
@Test def implementSimpleTrait(): Unit = {
class ImplementSimpleTrait extends js.Object with SimpleTrait {
def foo(x: Int): Int = x + 1
}
val foo = new ImplementSimpleTrait
assertEquals(4, foo.foo(3))
val fooTrait: SimpleTrait = foo
assertEquals(6, fooTrait.foo(5))
}
@Test def implementSimpleTraitUnderSeparateCompilation(): Unit = {
class ImplementSimpleTraitSepRun extends js.Object with SepRun.SimpleTrait {
def foo(x: Int): Int = x + 1
}
val foo = new ImplementSimpleTraitSepRun
assertEquals(4, foo.foo(3))
val fooTrait: SepRun.SimpleTrait = foo
assertEquals(6, fooTrait.foo(5))
}
@Test def implementTraitWithVal(): Unit = {
trait TraitWithVal extends js.Object {
val x: Int
}
class ImplWithVal extends TraitWithVal {
val x: Int = 3
}
val foo = new ImplWithVal
assertEquals(3, foo.x)
val fooTrait: TraitWithVal = foo
assertEquals(3, fooTrait.x)
}
@Test def implementTraitWithVar(): Unit = {
trait TraitWithVar extends js.Object {
var x: Int
}
class ImplWithVar extends TraitWithVar {
var x: Int = 3
}
val foo = new ImplWithVar
assertEquals(3, foo.x)
val fooTrait: TraitWithVar = foo
assertEquals(3, fooTrait.x)
foo.x = 5
assertEquals(5, fooTrait.x)
fooTrait.x = 19
assertEquals(19, foo.x)
}
@Test def implementTraitExtendingNativeJSClass(): Unit = {
trait TraitExtendsJSClass extends NativeParentClass {
def foobar(x: Int): Int
}
class ImplExtendsJSClassAndTrait
extends NativeParentClass(5) with TraitExtendsJSClass {
def foobar(x: Int): Int = x * 3
}
val foo = new ImplExtendsJSClassAndTrait
assertEquals(18, foo.foobar(6))
}
@Test def implementAbstractMembersComingFromNativeJSClass(): Unit = {
class ImplDeferredMembersFromJSParent
extends NativeParentClassWithDeferred {
val x: Int = 43
def bar(y: Int): Int = y * 2
}
val FooResult = (12 + 4) * 2 + 43
val foo = new ImplDeferredMembersFromJSParent
assertEquals(43, foo.x)
assertEquals(64, foo.bar(32))
assertEquals(FooResult, foo.foo(12))
val fooParent: NativeParentClassWithDeferred = foo
assertEquals(43, fooParent.x)
assertEquals(64, fooParent.bar(32))
assertEquals(FooResult, fooParent.foo(12))
val dyn = foo.asInstanceOf[js.Dynamic]
assertEquals(43, dyn.x)
assertEquals(64, dyn.bar(32))
assertEquals(FooResult, dyn.foo(12))
}
@Test def overrideMethodWithDefaultValuesFromNativeJSClass(): Unit = {
class OverrideDefault extends NativeParentClass(7) {
override def methodWithDefault(x: Int = 9): Int = x * 2
}
val child = new OverrideDefault
assertEquals(18, child.methodWithDefault())
assertEquals(14, child.methodWithDefault(7))
val parent: NativeParentClass = child
assertEquals(18, parent.methodWithDefault())
assertEquals(14, parent.methodWithDefault(7))
}
// #2603
@Test def defaultValuesInNonExposedMethods(): Unit = {
class DefaultParameterss(val default: Int) extends js.Object {
/* We don't use a constant default value to make sure it actually comes
* from the default parameter accessors.
*/
private def privateWithDefault(x: Int = default) = x
def callPrivate(): Int = privateWithDefault()
def callNested(): Int = {
def nested(x: Int = default) = x
nested()
}
}
val x = new DefaultParameterss(5)
assertEquals(5, x.callPrivate())
assertEquals(5, x.callNested())
}
// #3939
@Test def javaLangObjectMethodNames(): Unit = {
class JavaLangObjectMethods extends js.Object {
@JSName("clone")
def myClone(): String = "myClone"
@JSName("equals")
def myEquals(): String = "myEquals"
@JSName("finalize")
def myFinalize(): String = "myFinalize"
@JSName("hashCode")
def myHashCode(): String = "myHashCode"
@JSName("notify")
def myNotify(): String = "myNotify"
@JSName("notifyAll")
def myNotifyAll(): String = "myNotifyAll"
@JSName("wait")
def myWait(): String = "myWait"
}
val x = (new JavaLangObjectMethods).asInstanceOf[js.Dynamic]
assertEquals("myClone", x.applyDynamic("clone")())
}
}
object NonNativeJSTypeTest {
// Defined in test-suite/src/test/resources/NonNativeJSTypeTestNatives.js
@JSGlobal("NonNativeJSTypeTestNativeParentClass")
@js.native
class NativeParentClass(val x: Int) extends js.Object {
def foo(s: String): String = js.native
def bar: Int = js.native
def methodWithDefault(x: Int = 5): Int = js.native
}
class NonNativeParentClass(val x: Int) extends js.Object {
def foo(s: String): String = s + x
def bar: Int = x * 2
}
@js.native
trait NativeTraitWithDeferred extends js.Object {
val x: Int
}
// Defined in test-suite/src/test/resources/NonNativeJSTypeTestNatives.js
@JSGlobal("NonNativeJSTypeTestNativeParentClassWithDeferred")
@js.native
abstract class NativeParentClassWithDeferred extends NativeTraitWithDeferred {
def foo(y: Int): Int = js.native // = bar(y + 4) + x
def bar(y: Int): Int
}
// Defined in test-suite/src/test/resources/NonNativeJSTypeTestNatives.js
@JSGlobal("NonNativeJSTypeTestNativeParentClassWithVarargs")
@js.native
class NativeParentClassWithVarargs(
_x: Int, _args: Int*) extends js.Object {
val x: Int = js.native
val args: js.Array[Int] = js.native
}
trait SimpleTrait extends js.Any {
def foo(x: Int): Int
}
class Minimal extends js.Object
private var staticNonNativeObjectInitCount: Int = _
object StaticNonNativeObject extends js.Object {
staticNonNativeObjectInitCount += 1
}
class SimpleMethod extends js.Object {
def foo(x: Int): Int = x + 3
def bar(s: String, i: Int): String = s + i
}
object StaticObjectSimpleMethod extends js.Object {
def foo(x: Int): Int = x + 3
def bar(s: String, i: Int): String = s + i
}
class SimpleField extends js.Object {
val x = 5
var y = 10
def sum(): Int = x + y
}
object StaticObjectSimpleField extends js.Object {
val x = 5
var y = 10
def sum(): Int = x + y
}
class SimpleAccessors extends js.Object {
var x = 1
def readPlus1: Int = x + 1
def neg: Int = -x
def neg_=(v: Int): Unit = x = -v
}
class SimpleConstructor(_x: Int, _y: Int) extends js.Object {
val x = _x
var y = _y
def sum(): Int = x + y
}
class ConstructorDefaultParamJSNonNativeNone(val foo: Int = -1) extends js.Object
class ConstructorDefaultParamJSNonNativeJSNonNative(val foo: Int = -1) extends js.Object
object ConstructorDefaultParamJSNonNativeJSNonNative extends js.Object
class ConstructorDefaultParamJSNonNativeScala(val foo: Int = -1) extends js.Object
object ConstructorDefaultParamJSNonNativeScala
class ConstructorDefaultParamScalaJSNonNative(val foo: Int = -1)
object ConstructorDefaultParamScalaJSNonNative extends js.Object
@js.native
@JSGlobal("ConstructorDefaultParam")
class ConstructorDefaultParamJSNativeNone(val foo: Int = -1) extends js.Object
@js.native
@JSGlobal("ConstructorDefaultParam")
class ConstructorDefaultParamJSNativeScala(val foo: Int = -1) extends js.Object
object ConstructorDefaultParamJSNativeScala
@js.native
@JSGlobal("ConstructorDefaultParam")
class ConstructorDefaultParamJSNativeJSNonNative(val foo: Int = -1) extends js.Object
object ConstructorDefaultParamJSNativeJSNonNative extends js.Object
@js.native
@JSGlobal("ConstructorDefaultParam")
class ConstructorDefaultParamJSNativeJSNative(val foo: Int = -1) extends js.Object
@js.native
@JSGlobal("ConstructorDefaultParam")
object ConstructorDefaultParamJSNativeJSNative extends js.Object
// sanity check
object ConstructorDefaultParamScalaScala
class ConstructorDefaultParamScalaScala(val foo: Int = -1)
// sanity check
class ConstructorDefaultParamScalaNone(val foo: Int = -1)
class ConstructorDefaultParamMultiParamList(val default: Int)(
val title: String, val description: js.UndefOr[String] = default.toString)
extends js.Object
class ConstructorDefaultParamMultiParamListWithOverloading(val default: Int)(
val title: String, val description: js.UndefOr[String] = default.toString)
extends js.Object {
def this(c: Char) = this(c.toInt)("char", "a char")
def this(x: Int, y: Int) = this(x)(y.toString, js.undefined)
}
class OverloadedConstructorParamNumber(val foo: Int) extends js.Object {
def this(x: Int, y: Int) = this(x + y)
def this(x: Int, y: Int, z: Int) = this(x + y, z)
}
class OverloadedConstructorParamType(val foo: Int) extends js.Object {
def this(x: String) = this(x.length)
def this(x: Option[String]) = this(x.get)
}
class OverloadedConstructorComplex(val foo: Int, var bar: Int) extends js.Object {
def this() = this(5, 6)
def this(x: Int) = this(x, x)
def this(x: Int, y: Int, z: Int) = {
this(x, y)
bar = z
}
def this(x: String) = this(x.length)
def this(x: String, y: Int) = this(x.length, y)
def this(x: Int, y: String) = this(x, y.length)
def this(w: Int, x: Int, y: Int, z: Int) = {
this(w + x, y, z)
bar = y
}
def this(a: String, x: String, b: String = "", y: String = "") =
this((a + b).length, (x + y).length)
}
class SecondaryConstructorUseDefaultParam(x: String = "x")(val y: String = x + "y") extends js.Object {
def this(x: Int) = this(x.toString())()
}
class PrimaryConstructorWithPatternMatch_Issue4581(xs: List[Int]) extends js.Object {
var head: Int = 0
xs match {
case x :: xr => head = x
case _ => fail(xs.toString())
}
}
class SecondaryConstructorWithPatternMatch_Issue4581 extends js.Object {
var head: Int = 0
def this(xs: List[Int]) = {
this()
xs match {
case x :: xr => head = x
case _ => fail(xs.toString())
}
}
}
class SimpleConstructorAutoFields(val x: Int, var y: Int) extends js.Object {
def sum(): Int = x + y
}
class SimpleConstructorParamAccessors(x: Int, y: Int) extends js.Object {
def sum(): Int = x + y
}
class ConstructorWithParamNameClashes(arg: Int, arg$1: Int, arg$2: Int,
prep: Int, prep$1: Int, prep$2: Int)
extends js.Object {
val allArgs = List(arg, arg$1, arg$2, prep, prep$1, prep$2)
}
class DefaultFieldValues extends js.Object {
var int: Int = _
var bool: Boolean = _
var char: Char = _
var string: String = _
var unit: Unit = _
}
trait LazyValFieldsSuperTrait extends js.Object {
def initCount: Int
def field: Int
}
class LazyValFields extends js.Object with LazyValFieldsSuperTrait {
var initCount: Int = 0
lazy val field: Int = {
initCount += 1
42
}
}
class OverrideLazyValFields extends LazyValFields {
override lazy val field: Int = {
initCount += 1
53
}
}
class NullingOutLazyValFieldBug3422(initStr: String) extends js.Object {
lazy val str: String = initStr
}
class SimpleInheritedFromNative(
x: Int, val y: Int) extends NativeParentClass(x)
object JSNameHolder {
final val MethodName = "myMethod"
}
// #3998
trait SelfTypeTest1 extends js.Object { self => }
trait SelfTypeTest2 extends js.Object { self: js.Date => }
trait SelfTypeTest3 extends js.Object { this: js.Date => }
}
|
scala-js/scala-js
|
test-suite/js/src/test/scala/org/scalajs/testsuite/jsinterop/NonNativeJSTypeTest.scala
|
Scala
|
apache-2.0
| 62,032
|
package org.apache.spark.sql.cassandra
import org.apache.spark.Logging
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.planning.PhysicalOperation
import org.apache.spark.sql.catalyst.plans.logical
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.SparkPlan
private[cassandra] trait CassandraStrategies {
// Possibly being too clever with types here... or not clever enough.
self: SQLContext#SparkPlanner =>
val cassandraContext: CassandraSQLContext
object DataSinks extends Strategy {
def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case logical.InsertIntoTable(table: CassandraRelation, partition, child, overwrite) =>
InsertIntoCassandraTable(table, planLater(child), overwrite)(cassandraContext) :: Nil
case _ => Nil
}
}
/** Retrieves data using a CassandraTableScan.
* Partition pruning predicates are also detected an applied. */
object CassandraTableScans extends Strategy with Logging {
def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case PhysicalOperation(projectList, predicates, relation: CassandraRelation) =>
logInfo(s"projectList: ${projectList.toString()}")
logInfo(s"predicates: ${predicates.toString()}")
val pushDown = new PredicatePushDown(predicates.toSet, relation.tableDef)
val pushdownPredicates = pushDown.predicatesToPushDown.toSeq
val otherPredicates = pushDown.predicatesToPreserve.toSeq
logInfo(s"pushdown predicates: ${pushdownPredicates.toString()}")
logInfo(s"remaining predicates: ${otherPredicates.toString()}")
pruneFilterProject(
projectList,
otherPredicates,
identity[Seq[Expression]],
CassandraTableScan(_, relation, pushdownPredicates)(cassandraContext)) :: Nil
case _ =>
Nil
}
}
}
|
boneill42/spark-cassandra-connector
|
spark-cassandra-connector/src/main/scala/org/apache/spark/sql/cassandra/CassandraStrategies.scala
|
Scala
|
apache-2.0
| 1,954
|
package com.tribbloids.spookystuff.utils
import scala.collection.mutable.ArrayBuffer
/**
* heavily stateful iterator that can revert to any previous state using time machine
* currently, maxBacktracking is forced to infinite, which consumes huge amount of memory
* this will need further optimisation if streaming support is on the map
*/
case class BacktrackingIterator[T](
self: Iterator[T],
maxBacktracking: Int = -1
) extends Iterator[T] {
val history: ArrayBuffer[T] = ArrayBuffer.empty
def historySansCurrent: Seq[T] = history.init
@volatile var _backtracking: Int = -1
def backtracking: Int = {
_backtracking
}
def backtracking_=(v: Int): Unit = {
if (v >= history.length) _backtracking = -1
else _backtracking = v
}
//
// def scanned: ArrayBuffer[T] = preceding :+ current
override def hasNext: Boolean = {
(backtracking >= 0) || self.hasNext
}
override def next(): T = {
if (backtracking >= 0) {
val result = history(backtracking)
backtracking += 1
result
} else {
val v = self.next()
history += v
v
}
}
val checkpoints: ArrayBuffer[Int] = ArrayBuffer.empty
def snapshot(): Int = {
checkpoints += history.length
checkpoints.size - 1
}
def revert(i: Int): Unit = {
assert(i <= checkpoints.length, s"index $i exceeds number of checkpoints ${checkpoints.size} - 1")
val checkpoint = checkpoints(i)
_backtracking = checkpoint
}
}
|
tribbloid/spookystuff
|
mldsl/src/main/scala/com/tribbloids/spookystuff/utils/BacktrackingIterator.scala
|
Scala
|
apache-2.0
| 1,484
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.message
import org.apache.kafka.common.record._
import java.io.ByteArrayInputStream
import java.io.ByteArrayOutputStream
import scala.collection._
import org.scalatest.junit.JUnitSuite
import org.junit._
import org.junit.Assert._
class MessageCompressionTest extends JUnitSuite {
@Test
def testLZ4FramingV0() {
val output = CompressionFactory(LZ4CompressionCodec, Message.MagicValue_V0, new ByteArrayOutputStream())
assertTrue(output.asInstanceOf[KafkaLZ4BlockOutputStream].useBrokenFlagDescriptorChecksum())
val input = CompressionFactory(LZ4CompressionCodec, Message.MagicValue_V0, new ByteArrayInputStream(Array[Byte](0x04, 0x22, 0x4D, 0x18, 0x60, 0x40, 0x1A)))
assertTrue(input.asInstanceOf[KafkaLZ4BlockInputStream].ignoreFlagDescriptorChecksum())
}
@Test
def testLZ4FramingV1() {
val output = CompressionFactory(LZ4CompressionCodec, Message.MagicValue_V1, new ByteArrayOutputStream())
assertFalse(output.asInstanceOf[KafkaLZ4BlockOutputStream].useBrokenFlagDescriptorChecksum())
val input = CompressionFactory(LZ4CompressionCodec, Message.MagicValue_V1, new ByteArrayInputStream(Array[Byte](0x04, 0x22, 0x4D, 0x18, 0x60, 0x40, -126)))
assertFalse(input.asInstanceOf[KafkaLZ4BlockInputStream].ignoreFlagDescriptorChecksum())
}
@Test
def testSimpleCompressDecompress() {
val codecs = mutable.ArrayBuffer[CompressionCodec](GZIPCompressionCodec)
if(isSnappyAvailable)
codecs += SnappyCompressionCodec
if(isLZ4Available)
codecs += LZ4CompressionCodec
for(codec <- codecs)
testSimpleCompressDecompress(codec)
}
// A quick test to ensure any growth or increase in compression size is known when upgrading libraries
@Test
def testCompressSize() {
val bytes1k: Array[Byte] = (0 until 1000).map(_.toByte).toArray
val bytes2k: Array[Byte] = (1000 until 2000).map(_.toByte).toArray
val bytes3k: Array[Byte] = (3000 until 4000).map(_.toByte).toArray
val messages: List[Message] = List(new Message(bytes1k, Message.NoTimestamp, Message.MagicValue_V1),
new Message(bytes2k, Message.NoTimestamp, Message.MagicValue_V1),
new Message(bytes3k, Message.NoTimestamp, Message.MagicValue_V1))
testCompressSize(GZIPCompressionCodec, messages, 396)
if(isSnappyAvailable)
testCompressSize(SnappyCompressionCodec, messages, 1063)
if(isLZ4Available)
testCompressSize(LZ4CompressionCodec, messages, 387)
}
def testSimpleCompressDecompress(compressionCodec: CompressionCodec) {
val messages = List[Message](new Message("hi there".getBytes), new Message("I am fine".getBytes), new Message("I am not so well today".getBytes))
val messageSet = new ByteBufferMessageSet(compressionCodec = compressionCodec, messages = messages:_*)
assertEquals(compressionCodec, messageSet.shallowIterator.next().message.compressionCodec)
val decompressed = messageSet.iterator.map(_.message).toList
assertEquals(messages, decompressed)
}
def testCompressSize(compressionCodec: CompressionCodec, messages: List[Message], expectedSize: Int) {
val messageSet = new ByteBufferMessageSet(compressionCodec = compressionCodec, messages = messages:_*)
assertEquals(s"$compressionCodec size has changed.", expectedSize, messageSet.sizeInBytes)
}
def isSnappyAvailable: Boolean = {
try {
new org.xerial.snappy.SnappyOutputStream(new ByteArrayOutputStream())
true
} catch {
case _: UnsatisfiedLinkError | _: org.xerial.snappy.SnappyError => false
}
}
def isLZ4Available: Boolean = {
try {
new net.jpountz.lz4.LZ4BlockOutputStream(new ByteArrayOutputStream())
true
} catch {
case _: UnsatisfiedLinkError => false
}
}
}
|
eribeiro/kafka
|
core/src/test/scala/unit/kafka/message/MessageCompressionTest.scala
|
Scala
|
apache-2.0
| 4,611
|
package uk.skelty.ScalIRC
import java.io._
import scala.io._
class Message (writer: BufferedWriter, ident: String, channel: String, msgtype: String) {
/**
* Process a message.
* @type String
*/
def process (isadmin: Boolean, extra: String = "") {
println("Processing message type: " + msgtype)
if (msgtype == "PRIVMSG") {
val msg = extra.substring(1)
println("Processing message: " + msg)
val parts = msg.split(" ")
if (parts(0) == "!S") {
val command = parts(1)
command match {
case "remind" => Responses.Reminder.respond(writer, channel, msg)
}
}
}
}
}
|
SkylarKelty/scalirc
|
src/main/scala/uk/skelty/ScalIRC/Message.scala
|
Scala
|
mit
| 607
|
// This implementation of map2 passes the initial RNG to the first argument
// and the resulting RNG to the second argument. It's not necessarily wrong
// to do this the other way around, since the results are random anyway.
// We could even pass the initial RNG to both `f` and `g`, but that might
// have unexpected results. E.g. if both arguments are `RNG.int` then we would
// always get two of the same `Int` in the result. When implementing functions
// like this, it's important to consider how we would test them for
// correctness.
def map2[A,B,C](ra: Rand[A], rb: Rand[B])(f: (A, B) => C): Rand[C] =
rng => {
val (a, r1) = ra(rng)
val (b, r2) = rb(r1)
(f(a, b), r2)
}
|
ud3sh/coursework
|
functional-programming-in-scala-textbook/answerkey/state/06.answer.scala
|
Scala
|
unlicense
| 693
|
/**
* Copyright 2014 Jorge Aliss (jaliss at gmail dot com) - twitter: @jaliss
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package service
import com.typesafe.config.ConfigFactory
import play.api.mvc.{ Call, RequestHeader }
import securesocial.core.IdentityProvider
import securesocial.core.services.RoutesService
/**
* A RoutesService implementation which customizes the redirect URL
*/
class CustomRoutesService extends RoutesService {
lazy val conf = play.api.Play.current.configuration
val FaviconKey = "securesocial.faviconPath"
val JQueryKey = "securesocial.jqueryPath"
val CustomCssKey = "securesocial.customCssPath"
val DefaultFaviconPath = "images/favicon.png"
val DefaultJqueryPath = "javascripts/jquery-1.7.1.min.js"
val removeTrailingSlash: String => String = s =>
if(s.endsWith("/")) s.substring(0, s.length() - 1) else s
val config = ConfigFactory.load()
val baseURL = removeTrailingSlash(config.getString("application.mailer.baseURL"))
override def loginPageUrl(implicit req: RequestHeader): String = {
baseURL + "/login"
}
override def startSignUpUrl(implicit req: RequestHeader): String = {
baseURL + "/signup"
}
override def handleStartSignUpUrl(implicit req: RequestHeader): String = {
baseURL + "/signup"
}
override def signUpUrl(mailToken: String)(implicit req: RequestHeader): String = {
baseURL + "/signup/" + mailToken
}
override def handleSignUpUrl(mailToken: String)(implicit req: RequestHeader): String = {
baseURL + "/signup/" + mailToken
}
override def startResetPasswordUrl(implicit request: RequestHeader): String = {
baseURL + "/reset"
}
override def handleStartResetPasswordUrl(implicit req: RequestHeader): String = {
baseURL + "/reset"
}
override def resetPasswordUrl(mailToken: String)(implicit req: RequestHeader): String = {
baseURL + "/reset"
}
override def handleResetPasswordUrl(mailToken: String)(implicit req: RequestHeader): String = {
baseURL + "/reset/" + mailToken
}
override def passwordChangeUrl(implicit req: RequestHeader): String = {
baseURL + "/password-change"
}
override def handlePasswordChangeUrl(implicit req: RequestHeader): String = {
baseURL +"/password-change"
}
override def authenticationUrl(provider: String, redirectTo: Option[String] = None)(implicit req: RequestHeader): String = {
baseURL + "/login/" + provider + redirectTo.map(u => "?redirect-to=" + u).getOrElse("")
}
protected def valueFor(key: String, default: String) = {
val value = conf.getString(key).getOrElse(default)
securesocial.controllers.routes.Assets.at(value)
}
/**
* Loads the Favicon to use from configuration, using a default one if not provided
* @return the path to Favicon file to use
*/
override val faviconPath = valueFor(FaviconKey, DefaultFaviconPath)
/**
* Loads the Jquery file to use from configuration, using a default one if not provided
* @return the path to Jquery file to use
*/
override val jqueryPath = valueFor(JQueryKey, DefaultJqueryPath)
/**
* Loads the Custom Css file to use from configuration. If there is none define, none will be used
* @return Option containing a custom css file or None
*/
override val customCssPath: Option[Call] = {
val path = conf.getString(CustomCssKey).map(securesocial.controllers.routes.Assets.at)
path
}
}
|
didmar/securesocial-ws-sample
|
app/service/CustomRoutesService.scala
|
Scala
|
apache-2.0
| 3,923
|
/**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.oss.tools.osstrackerscraper
import org.joda.time.format.{DateTimeFormat, ISODateTimeFormat}
import org.slf4j.LoggerFactory
import play.api.libs.json._
import org.joda.time.{DateTime, DateTimeZone}
import java.util.Date
import com.netflix.oss.tools.osstrackerscraper.OssLifecycle.OssLifecycle
import scala.collection.mutable.ListBuffer
class GithubScraper(githubOrg: String, cassHost: String, cassPort: Int, esHost: String, esPort: Int, reportWriter: ReportWriter) {
def logger = LoggerFactory.getLogger(getClass)
val now = new DateTime().withZone(DateTimeZone.UTC)
val dtfISO8601 = ISODateTimeFormat.dateTimeNoMillis()
val dtfSimple = DateTimeFormat.forPattern("yyyy-MM-dd")
def asOfISO = dtfISO8601.print(now)
def asOfYYYYMMDD = dtfSimple.print(now)
def updateElasticSearch(): Boolean = {
val es = new ElasticSearchAccess(esHost, esPort)
val cass = new CassandraAccesss(cassHost, cassPort)
val github = new GithubAccess(asOfYYYYMMDD, asOfISO, true)
try {
println(Console.RED + s"remaining calls ${github.getRemainingHourlyRate()}" + Console.RESET)
// get all the known repos from cassandra, sorted in case we run out github API calls
val cassRepos = cass.getAllRepos()
val cassReposNames = cassRepos.map(_.name).toSet
logger.debug(s"repos(${cassReposNames.size}) in cass = $cassReposNames")
// get all of the known repos from github
val githubRepos = github.getAllRepositoriesForOrg(githubOrg)
val githubReposNames = githubRepos.map(_.getName).toSet
logger.debug(s"repos(${githubReposNames.size}) on GH = $githubReposNames")
val commonRepoNames = cassReposNames.intersect(githubReposNames)
val onlyInCassReposNames = cassReposNames.diff(githubReposNames)
val onlyInGHReposNames = githubReposNames.diff(cassReposNames)
logger.error(s"need to delete the following repos from cassandra - $onlyInCassReposNames")
logger.info(s"new repos detected on github that aren't in cassandra - $onlyInGHReposNames")
val commonReposCassRepos = commonRepoNames.map(name => cassRepos.find(name == _.name).get)
val commonReposCassReposOrderByLastUpdate = collection.SortedSet[RepoInfo]()(ESDateOrdering) ++ commonReposCassRepos
val commonReposCassReposOrderByLastUpdateNames = commonReposCassReposOrderByLastUpdate.toList.map(_.name)
val orderToUpdate = commonReposCassReposOrderByLastUpdateNames ++ onlyInGHReposNames
val docsList = new ListBuffer[JsObject]()
// create or validate that ES document exists for each repo
for (repoName <- orderToUpdate) {
val ghRepo = githubRepos.find(_.getName == repoName).get
val cassRepo = cassRepos.find(_.name == repoName)
val (public, ossLifecycle) = cassRepo match {
case Some(repo) => (repo.public, repo.osslifecycle)
case _ => (false, OssLifecycle.Unknown)
}
val alreadyExistsDoc = es.getESDocForRepo(asOfYYYYMMDD, repoName)
if (alreadyExistsDoc.isEmpty) {
val stat = github.getRepoStats(ghRepo, public, ossLifecycle)
val indexed = es.indexDocInES("/osstracker/repo_stats", stat.toString)
if (!indexed) {
return false
}
docsList += stat
}
else {
logger.info(s"skipping up index of repo doc for ${repoName}, ${asOfYYYYMMDD}")
docsList += alreadyExistsDoc.get
}
val success = cass.markReposLastUpdateDateES(repoName)
if (!success) {
return false
}
}
val alreadyExists = !es.getESDocForRepos(asOfYYYYMMDD).isEmpty
if (alreadyExists) {
logger.info(s"skipping up index of all repos doc for ${asOfYYYYMMDD}")
}
else {
val numRepos = docsList.size
val forks: Int = (docsList(0) \\ "forks").as[Int]
val totalForks = docsList.map(obj => (obj \\ "forks").as[Int]).sum
val totalStars = docsList.map(obj => (obj \\ "stars").as[Int]).sum
val totalOpenIssues = docsList.map(obj => (obj \\ "issues" \\ "openCount").as[Int]).sum
val totalClosedIssues = docsList.map(obj => (obj \\ "issues" \\ "closedCount").as[Int]).sum
val totalOpenPRs = docsList.map(obj => (obj \\ "pullRequests" \\ "openCount").as[Int]).sum
val totalClosedPRs = docsList.map(obj => (obj \\ "pullRequests" \\ "closedCount").as[Int]).sum
val reposJsonDoc: JsObject = Json.obj(
"asOfISO" -> asOfISO,
"asOfYYYYMMDD" -> asOfYYYYMMDD,
"avgForks" -> totalForks / numRepos,
"avgStars" -> totalStars / numRepos,
// "numContributors" -> contributorLogins.length, // TODO: Need to fold all of the repos together
"issues" -> Json.obj(
"avgOpenCount" -> totalOpenIssues / numRepos,
"avgClosedCount" -> totalClosedIssues / numRepos,
"totalOpenCount" -> totalOpenIssues,
"totalClosedCount" -> totalClosedIssues
// "avgTimeToCloseInDays" -> avgIssues // TODO: Need to compute average
),
"pullRequests" -> Json.obj(
"avgOpenCount" -> totalOpenPRs / numRepos,
"avgClosedCount" -> totalClosedPRs / numRepos,
"totalOpenCount" -> totalOpenPRs,
"totalClosedCount" -> totalClosedPRs
// "avgTimeToCloseInDays" -> avgPRs // TODO: Need to compute average
),
"commits" -> Json.obj(
// "daysSinceLastCommit" -> daysSinceLastCommit // TODO: Need to compute average
),
"repos" -> docsList
)
logger.debug("allrepos info json = " + reposJsonDoc)
val indexed = es.indexDocInES("/osstracker/allrepos_stats", reposJsonDoc.toString)
if (!indexed) {
return false
}
}
println(Console.RED + s"remaining calls ${github.getRemainingHourlyRate()}" + Console.RESET)
}
finally {
cass.close()
}
true
}
def updateCassandra(): Boolean = {
val cass = new CassandraAccesss(cassHost, cassPort)
val github = new GithubAccess(asOfYYYYMMDD, asOfISO, true)
val report = StringBuilder.newBuilder
report.append(s"OSSTracker Report for ${asOfYYYYMMDD}\\n\\n")
try {
println(Console.RED + s"remaining calls ${github.getRemainingHourlyRate()}" + Console.RESET)
// get all the known repos from cassandra, sorted in case we run out github API calls
val cassRepos = cass.getAllRepos()
val cassReposNames = cassRepos.map(_.name).toSet
logger.debug(s"repos(${cassReposNames.size}) in cass = $cassReposNames")
// get all of the known repos from github
val githubRepos = github.getAllRepositoriesForOrg(githubOrg)
val githubReposNames = githubRepos.map(_.getName).toSet
logger.debug(s"repos(${githubReposNames.size}) on GH = $githubReposNames")
val commonRepoNames = cassReposNames.intersect(githubReposNames)
val onlyInCassReposNames = cassReposNames.diff(githubReposNames)
val onlyInGHReposNames = githubReposNames.diff(cassReposNames)
// add new repos to cassandra
logger.debug(s"repos that should be added to cassandra = $onlyInGHReposNames")
if (onlyInGHReposNames.size > 0) {
report.append(s"Found the following new repositories:\\n")
report.append(s"**************************************************\\n")
for (repoName <- onlyInGHReposNames) {
report.append(s"\\t$repoName\\n")
}
report.append("\\n")
}
val reposToAdd = onlyInGHReposNames.map(repoName => {
val githubRepo = githubRepos.find(ghRepo => ghRepo.getName == repoName).get
val repoInfo = new RepoInfo(repoName, Conf.SENTINAL_DEV_LEAD_ID, Conf.SENTINAL_MGR_LEAD_ID,
Conf.SENTINAL_ORG, new Date(0), new Date(0), !githubRepo.isPrivate, githubOrg, true, OssLifecycle.Unknown)
val success = cass.newRepo(repoInfo)
if (!success) {
return false
}
})
// see what repos we should mark as non-existant in cassandra
logger.error(s"repos that should be deleted from the database = $onlyInCassReposNames")
if (onlyInCassReposNames.size > 0) {
report.append(s"These repos should be deleted from the DB:\\n")
report.append(s"**************************************************\\n")
for (repoName <- onlyInCassReposNames) {
report.append(s"\\t$repoName\\n")
}
report.append("\\n")
}
val success1 = cass.markReposAsNonExistant(onlyInCassReposNames.toList)
if (!success1) {
return false
}
val cassReposNow = cass.getAllRepos()
logger.debug(s"cassReposNow = $cassReposNow")
val wentPublic = ListBuffer[String]()
val wentPrivate = ListBuffer[String]()
// see what repos we should change public/private in cassandra
for (repo <- cassReposNow) {
val cassPublic = repo.public
val githubRepo = githubRepos.find(_.getName == repo.name)
githubRepo match {
case Some(ghRepo) => {
val ghPublic = !ghRepo.isPrivate
if (cassPublic != ghPublic) {
logger.info(s"updating repo ${repo.name} with public = $ghPublic")
val success = cass.updateGHPublicForRepo(repo.name, ghPublic)
if (!success) {
return false
}
if (ghPublic) {
wentPublic += ghRepo.getName
}
else {
wentPrivate += ghRepo.getName
}
}
}
case _ => {
logger.error(s"github no longer has the repo ${repo.name}")
}
}
}
if (wentPublic.size > 0) {
report.append(s"These repos went public:\\n")
report.append(s"**************************************************\\n")
for (repoName <- wentPublic) {
report.append(s"\\t$repoName\\n")
}
report.append("\\n")
}
if (wentPrivate.size > 0) {
report.append(s"These repos went private:\\n")
report.append(s"**************************************************\\n")
for (repoName <- wentPrivate) {
report.append(s"\\t$repoName\\n")
}
report.append("\\n")
}
val changedLifecycle = ListBuffer[(String, OssLifecycle, OssLifecycle)]()
val unknownLifecycle = ListBuffer[String]()
// see what repos have changed OSS Lifecycle
for (repo <- cassReposNow) {
val githubRepo = githubRepos.find(_.getName == repo.name)
githubRepo match {
case Some(ghRepo) => {
val lifecycle = github.getOSSMetaDataOSSLifecycle(ghRepo)
if (lifecycle == OssLifecycle.Unknown) {
unknownLifecycle += ghRepo.getName
}
if (lifecycle != repo.osslifecycle) {
logger.info(s"updating repo ${repo.name} lifecycle from ${repo.osslifecycle} to $lifecycle")
val success = cass.updateLifecycleForRepo(repo.name, lifecycle)
if (!success) {
return false
}
changedLifecycle += ((ghRepo.getName, repo.osslifecycle, lifecycle))
}
}
case _ => {
logger.error(s"github no longer has the repo ${repo.name}")
}
}
}
if (unknownLifecycle.size > 0) {
report.append(s"These repos do not have correct OSS Lifecycle files:\\n")
report.append(s"**************************************************\\n")
for (repoName <- unknownLifecycle) {
report.append(s"\\t$repoName\\n")
}
report.append("\\n")
}
if (changedLifecycle.size > 0) {
report.append(s"These repos changed oss lifecycle:\\n")
report.append(s"**************************************************\\n")
for (change <- changedLifecycle) {
report.append(s"\\t${change._1} went from ${change._2} to ${change._3}\\n")
}
report.append("\\n")
}
// mark all of the repos as last updated now
logger.info("updating all repos in cassandra for last updated")
val success2 = cass.markReposLastUpdateDateDB(cassReposNow.map(_.name))
if (!success2) {
return false
}
println(Console.RED + s"remaining calls ${github.getRemainingHourlyRate()}" + Console.RESET)
reportWriter.processReport(report.toString)
}
finally {
cass.close()
}
true
}
}
|
Netflix/osstracker
|
osstracker-scraper/src/main/scala/com/netflix/oss/tools/osstrackerscraper/GithubScraper.scala
|
Scala
|
apache-2.0
| 13,145
|
package models.viewhelper
import scala.language.higherKinds
import controllers.sugar.Requests.OreRequest
import ore.db.access.ModelView
import ore.db.impl.OrePostgresDriver.api._
import ore.db.impl.schema.{OrganizationRoleTable, OrganizationTable, UserTable}
import ore.db.{Model, ModelService}
import ore.models.organization.Organization
import ore.models.project.Project
import ore.models.user.User
import ore.models.user.role.OrganizationUserRole
import ore.permission._
import ore.permission.role.Role
import ore.permission.scope.GlobalScope
import cats.syntax.all._
import cats.{Monad, Parallel}
import slick.lifted.TableQuery
// TODO separate Scoped UserData
case class UserData(
headerData: HeaderData,
user: Model[User],
isOrga: Boolean,
projectCount: Int,
orgas: Seq[(Model[Organization], Model[User], Model[OrganizationUserRole], Model[User])],
globalRoles: Set[Role],
userPerm: Permission,
orgaPerm: Permission
) {
def global: HeaderData = headerData
def hasUser: Boolean = global.hasUser
def currentUser: Option[Model[User]] = global.currentUser
def isCurrent: Boolean = currentUser.contains(user)
}
object UserData {
private def queryRoles(user: Model[User]) =
for {
role <- TableQuery[OrganizationRoleTable] if role.userId === user.id.value
org <- TableQuery[OrganizationTable] if role.organizationId === org.id
orgUser <- TableQuery[UserTable] if org.id === orgUser.id
owner <- TableQuery[UserTable] if org.ownerId === owner.id
} yield (org, orgUser, role, owner)
def of[F[_]](request: OreRequest[_], user: Model[User])(
implicit service: ModelService[F],
F: Monad[F],
par: Parallel[F]
): F[UserData] =
for {
isOrga <- user.toMaybeOrganization(ModelView.now(Organization)).isDefined
projectCount <- user.projects(ModelView.now(Project)).size
t <- perms(user)
(globalRoles, userPerms, orgaPerms) = t
orgas <- service.runDBIO(queryRoles(user).result)
} yield UserData(request.headerData, user, isOrga, projectCount, orgas, globalRoles, userPerms, orgaPerms)
def perms[F[_]](user: Model[User])(
implicit service: ModelService[F],
F: Monad[F],
par: Parallel[F]
): F[(Set[Role], Permission, Permission)] = {
(
user.permissionsIn(GlobalScope),
user.toMaybeOrganization(ModelView.now(Organization)).semiflatMap(user.permissionsIn(_)).value,
user.globalRoles.allFromParent
).parMapN { (userPerms, orgaPerms, globalRoles) =>
(globalRoles.map(_.toRole).toSet, userPerms, orgaPerms.getOrElse(Permission.None))
}
}
}
|
SpongePowered/Ore
|
orePlayCommon/app/models/viewhelper/UserData.scala
|
Scala
|
mit
| 2,668
|
class B extends A
{
override var x = 3
}
|
jamesward/xsbt
|
sbt/src/sbt-test/source-dependencies/var/B.scala
|
Scala
|
bsd-3-clause
| 41
|
import scala.util.control.Exception._
def safe(c: => Int) = (catching(classOf[ArithmeticException]) or
catching(classOf[NumberFormatException])).
andFinally{ println("finally safe") }.either(c)
|
grzegorzbalcerek/scala-book-examples
|
examples/ToValues2.scala
|
Scala
|
mit
| 240
|
package org.jetbrains.plugins.scala
package lang.psi.light.scala
import com.intellij.psi.impl.light.LightElement
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScNamedElement
/**
* Nikolay.Tropin
* 2014-08-15
*/
object isLightScNamedElement {
def unapply(lightElem: LightElement): Option[ScNamedElement] = lightElem match {
case light: ScLightBindingPattern => Some(light.b)
case light: ScLightFieldId => Some(light.f)
case light: ScLightFunctionDeclaration => Some(light.fun)
case light: ScLightFunctionDefinition => Some(light.fun)
case light: ScLightParameter => Some(light.param)
case light: ScLightTypeAliasDeclaration => Some(light.ta)
case light: ScLightTypeAliasDefinition => Some(light.ta)
case _ => None
}
}
|
LPTK/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/psi/light/scala/isLightScNamedElement.scala
|
Scala
|
apache-2.0
| 768
|
package com.avsystem.commons
package macros.misc
import com.avsystem.commons.macros.MacroCommons
import scala.reflect.macros.blackbox
/**
* Author: ghik
* Created: 20/11/15.
*/
class LazyLoggingMacros(val c: blackbox.Context) extends MacroCommons {
import c.universe._
val DelegationCls = tq"$MiscPkg.Delegation"
def warningImpl(msg: Tree) =
q"""
if(${c.prefix}.rawLog.isWarningEnabled) {
${c.prefix}.rawLog.warning($msg)
}
"""
def infoImpl(msg: Tree) =
q"""
if(${c.prefix}.rawLog.isInfoEnabled) {
${c.prefix}.rawLog.info($msg)
}
"""
def debugImpl(msg: Tree) =
q"""
if(${c.prefix}.rawLog.isDebugEnabled) {
${c.prefix}.rawLog.debug($msg)
}
"""
}
|
AVSystem/scala-commons
|
commons-macros/src/main/scala/com/avsystem/commons/macros/misc/LazyLoggingMacros.scala
|
Scala
|
mit
| 756
|
package com.github.tweets.gateway.github
import com.github.tweets.common.GitHubProjectInfo
import org.mockito.Mockito
import org.scalatest.FunSuite
import org.scalatest.mock.MockitoSugar
import org.springframework.web.client.RestTemplate
class GitHubAPIGatewayImplTest extends FunSuite with MockitoSugar {
implicit val restTemplate = mock[RestTemplate]
val projectInfo = mock[GitHubProjectInfo]
val projectsCollection = GitHubProjectInfoCollectionWrapper(List.fill(15)(projectInfo))
val gateway = new GitHubAPIGatewayImpl
test("should create appropriate request, execute it and fetch top 10 projects") {
assert(projectsCollection.items.size === 15)
Mockito.when(
restTemplate.getForObject("https://api.github.com/search/repositories?q=word&sort=stars", classOf[GitHubProjectInfoCollectionWrapper])
).thenReturn(projectsCollection)
val result = gateway.fetchTopProjects("word")
assert(result === List.fill(10)(projectInfo))
Mockito.verify(restTemplate)
.getForObject("https://api.github.com/search/repositories?q=word&sort=stars", classOf[GitHubProjectInfoCollectionWrapper])
}
}
|
andrei-l/tweets-for-github-projects-gatherer
|
src/test/scala/com/github/tweets/gateway/github/GitHubAPIGatewayImplTest.scala
|
Scala
|
mit
| 1,137
|
package zzb.datatype
import java.text.SimpleDateFormat
import java.util.concurrent.atomic.AtomicReference
import com.github.nscala_time.time.Imports
import org.joda.time.format.DateTimeFormatter
//import spray.json.DefaultJsonProtocol._
import BasicJsonFormats._
import spray.json._
import zzb.datatype.meta.EnumTypeInfo
import scala.annotation.tailrec
import scala.language.implicitConversions
import scala.reflect._
/**
* Created with IntelliJ IDEA.
* User: Simon Xiao
* Date: 13-11-19
* Time: 下午5:08
* Copyright baoxian.com 2012~2020
*/
trait TString extends TMono[String] {
val vtm = classTag[String]
def parse(str: String): Pack = Pack(str)
implicit val valueFormat = TStringJsonFormat
}
object TString extends TString {
override val t_memo_ : String = "String"
def apply(code: String, memo: String) = new TString {
override val t_memo_ = memo
override lazy val t_code_ = code
}
}
trait TInt extends TMono[Int] {
val vtm = classTag[Int]
def parse(str: String): Pack = Pack(str.toInt)
implicit val valueFormat = TIntJsonFormat
}
object TInt extends TInt {
override val t_memo_ : String = "Int"
def apply(code: String, memo: String) = new TInt {
override val t_memo_ = memo
override lazy val t_code_ = code
}
}
trait TLong extends TMono[Long] {
val vtm = classTag[Long]
def parse(str: String): Pack = Pack(str.toLong)
implicit val valueFormat = TLongJsonFormat
}
object TLong extends TLong {
override val t_memo_ : String = "Long"
def apply(code: String, memo: String) = new TLong {
override val t_memo_ = memo
override lazy val t_code_ = code
}
}
trait TShort extends TMono[Short] {
val vtm = classTag[Short]
def parse(str: String): Pack = Pack(str.toShort)
implicit val valueFormat = TShortJsonFormat
}
object TShort extends TShort {
override val t_memo_ : String = "Short"
def apply(code: String, memo: String) = new TShort {
override val t_memo_ = memo
override lazy val t_code_ = code
}
}
trait TByte extends TMono[Byte] {
val vtm = classTag[Byte]
def parse(str: String): Pack = Pack(str.toByte)
implicit val valueFormat = TByteJsonFormat
}
object TByte extends TByte {
override val t_memo_ : String = "Byte"
def apply(code: String, memo: String) = new TByte {
override val t_memo_ = memo
override lazy val t_code_ = code
}
}
trait TDouble extends TMono[Double] {
val vtm = classTag[Double]
def parse(str: String): Pack = Pack(str.toDouble)
implicit val valueFormat = TDoubleJsonFormat
}
object TDouble extends TDouble {
override val t_memo_ : String = "Double"
def apply(code: String, memo: String) = new TDouble {
override val t_memo_ = memo
override lazy val t_code_ = code
}
}
trait TFloat extends TMono[Float] {
val vtm = classTag[Float]
def parse(str: String): Pack = Pack(str.toFloat)
implicit val valueFormat = TFloatJsonFormat
}
object TFloat extends TFloat {
override val t_memo_ : String = "Float"
def apply(code: String, memo: String) = new TFloat {
override val t_memo_ = memo
override lazy val t_code_ = code
}
}
trait TBigDecimal extends TMono[BigDecimal] {
val vtm = classTag[BigDecimal]
def parse(str: String): Pack = Pack(BigDecimal.apply(str))
implicit val valueFormat = TBigDecimalJsonFormat
}
object TBigDecimal extends TBigDecimal {
override val t_memo_ : String = "BigDecimal"
def apply(code: String, memo: String) = new TBigDecimal {
override val t_memo_ = memo
override lazy val t_code_ = code
}
}
import com.github.nscala_time.time.Imports._
trait TDateTime extends TMono[DateTime] {
val vtm = classTag[DateTime]
def parse(str: String): Pack = string2DatePack(str)
def parse(str: String, pattern: String): Pack = Pack(DateTimeFormat.forPattern(pattern).parseDateTime(str))
def format(dt: Pack)(implicit pattern: String = "yyyy-MM-dd HH:mm:ss"): String = dt.value.toString(pattern)
override protected def packToString(i: ValuePack[DateTime]): String = i.value.toString("yyyy-MM-dd HH:mm:ss")
implicit def dataPack2DateTime(i: Pack): Imports.DateTime = i.value
implicit def dataPack2String(i: Pack)(implicit pattern: String = "yyyy-MM-dd HH:mm:ss"): String = format(i)(pattern)
implicit def string2DatePack(dateTimeStr: String)(implicit pattern: String = "yyyy-MM-dd HH:mm:ss"): Pack = {
TDateTime.string2DateTime(dateTimeStr) match {
case Some(v) => Pack(v)
case None => throw new IllegalArgumentException("Invalid date time format: \\"" + dateTimeStr + '"')
}
}
implicit val valueFormat = DateTimeJsonFormat
}
object TDateTime extends TDateTime {
override val t_memo_ : String = "DateTime"
def apply(code: String, memo: String) = new TDateTime {
override val t_memo_ = memo
override lazy val t_code_ = code
}
val pattens = "yyyy-MM-dd HH:mm:ss" ::
"yyyy-MM-dd HH:mm:ss.SSS" ::
"yyyy-MM-dd" ::
"HH:mm:ss" ::
"HH:mm:ss.SSSZZ" ::
"HH:mm:ssZZ" ::
"yyyy-MM-dd'T'HH:mm:ss.SSSZZ" ::
Nil
val defaultPatterns = pattens.distinct.map(new SimpleDateFormat(_))
val init: Option[DateTime] = None
def tryParse(dateTimeStr: String, pf: SimpleDateFormat) = {
try {
Some(new DateTime(pf.parse(dateTimeStr).getTime))
} catch {
case e: Throwable =>
e.printStackTrace()
None
}
}
def parseFirstSuccess(pts: List[SimpleDateFormat], dateTimeStr: String): Option[DateTime] = {
for (p <- pts) {
val d = tryParse(dateTimeStr, p)
if (d.isDefined) return d
}
None
}
def string2DateTime(dateTimeStr: String)(implicit pattern: String = ""): Option[DateTime] = {
val pts = (if (pattern.length > 0 && !pattens.contains(pattern)) new SimpleDateFormat(pattern) :: defaultPatterns else defaultPatterns).distinct
parseFirstSuccess(pts, dateTimeStr)
}
def date2String(date: DateTime)(implicit pattern: String = "yyyy-MM-dd HH:mm:ss") = date.toString(pattern)
}
trait TBoolean extends TMono[Boolean] {
val vtm = classTag[Boolean]
def YesTexts = List("true", "True", "TRUE", "Y", "y", "YES", "yes", "1", "是", "有", "真")
def NoTexts = List("false", "False", "FALSE", "N", "n", "No", "no", "0", "否", "无", "非", "空", "假", "")
val YesName = "true"
val NoName = "false"
def parse(str: String): Pack = string2BoolPack(str)
implicit def string2BoolPack(str: String): Pack =
if (str == YesName || YesTexts.contains(str)) Pack(true)
else if (str == NoName || NoTexts.contains(str) || str == NoName) Pack(false)
else throw new IllegalArgumentException("Invalid boolean text: \\"" + str + '"')
implicit def Int2BoolPack(i: Int): Pack =
if (i != 0) Pack(true)
else Pack(false)
implicit def string2BoolValue(str: String): Boolean =
if (str == YesName || YesTexts.contains(str)) true
else if (str == NoName || NoTexts.contains(str)) false
else throw new IllegalArgumentException("Invalid boolean text: \\"" + str + '"')
implicit def boolPack2Bool(i: Pack): Boolean = i.value
implicit def boolPack2String(i: Pack): String = if (i.value) YesName else NoName
override protected def packToString(i: ValuePack[Boolean]): String = if (i.value) YesName else NoName
implicit val valueFormat = TBooleanJsonFormat
}
object TBoolean extends TBoolean {
override val t_memo_ : String = "Boolean"
def apply(code: String, memo: String) = new TBoolean {
override val t_memo_ = memo
override lazy val t_code_ = code
}
def apply(code: String, memo: String, yesText: String, noText: String) = new TBoolean {
override val t_memo_ = memo
override lazy val t_code_ = code
override val YesName = yesText
override val NoName = noText
}
}
case class EnumIdx(idx: Int)
trait TEnum extends TMono[EnumIdx] {
this: Enumeration =>
val vtm = classTag[EnumIdx]
EnumRegistry.register(getClass.getName.replace("$", ""), this)
override protected def packToString(i: ValuePack[EnumIdx]): String = i.value.idx.toString
override def parse(str: String): Pack = name2EnumPack(str)
implicit def int2EnumValue(id: Int) = this.apply(id)
implicit def name2EnumValue(name: String) = this.withName(name)
implicit def int2EnumPack(id: Int): Pack = Pack(EnumIdx(id))
implicit def name2EnumPack(name: String): Pack = Pack(EnumIdx(this.withName(name).id))
implicit def enumValue2Int(ev: this.type#Value): Int = ev.id
implicit def enumValue2Name(ev: this.type#Value): String = ev.toString
implicit def int2Name(id: Int): String = this.apply(id).toString
implicit def enumValue2Pack(ev: this.type#Value) = Pack(EnumIdx(ev.id))
implicit def enumValue2PackOption(ev: this.type#Value) = Some(Pack(EnumIdx(ev.id)))
//implicit def Packe2EnumValue(ev: this.type#Value) = Pack(EnumIdx(ev.id))
implicit def enumPack2Int(ei: Pack): Int = ei.value.idx
implicit def enumPack2Name(ei: Pack): String = this.apply(ei.value.idx).toString
implicit def enumPack2EnumValue(ei: Pack) = this(ei.value.idx)
implicit def EnumIdx2EnumValue(idx: EnumIdx) = this(idx.idx)
implicit def EnumValue2EnumIdx(ev: this.type#Value): EnumIdx = EnumIdx(ev.id)
implicit val valueFormat = EnumIdxFormat
implicit object EnumIdxFormat extends JsonFormat[EnumIdx] {
def write(x: EnumIdx) = JsObject("idx" -> JsNumber(x.idx), "name" -> JsString(int2Name(x.idx)))
def read(value: JsValue) = value match {
case JsNumber(x) if x.intValue() > 0 && x.intValue() < maxId => int2EnumPack(x.intValue())
case JsNumber(x) => deserializationError(s"Expected enum value in 0 .. $maxId, but got " + x)
case JsString(x) if values.exists(_.toString == x) => name2EnumPack(x)
case JsString(x) if isIntStr(x) =>
val x2 = x.toInt
if (x2 >= 0 && x2 < maxId) int2EnumPack(x2)
else deserializationError(s" $x is not a allow value")
case JsString(x) => deserializationError(s" $x is not a allow value")
case x: JsObject if x.fields.contains("idx") => EnumIdx(x.fields("idx").convertTo[Int])
case x => deserializationError("Expected enum value as JsString or JsNumber, but got " + x)
}
}
private def isIntStr(s: String) = try {
s.toInt
true
} catch {
case _: Throwable => false
}
override def AnyToPack(v: Any): Option[ValuePack[EnumIdx]] = {
super.AnyToPack(v) match {
case Some(p: ValuePack[_]) =>
if (p.value.idx >= maxId) None
else Some(p)
case None =>
v match {
case num: Int if num < maxId => Some(int2EnumPack(num))
case txt: String if values.exists(_.toString == txt) => Some(name2EnumPack(txt))
case _ => None
}
}
}
override def typeInfo: EnumTypeInfo =
new EnumTypeInfo(getClass.getName.replace("$", ""), t_memo_,
values.toList.map(v => (v.id, v.toString)))
}
object TEnum extends Enumeration with TEnum {
override val t_memo_ : String = "Enum"
}
object EnumRegistry {
//保存所有的 TStruct 实例,以 typeName 为key
private[this] val _registry = new AtomicReference(Map.empty[String, TEnum])
@tailrec
def register(key: String, dt: TEnum): TEnum = {
val reg = _registry.get
val updated = reg.updated(key, dt)
if (_registry.compareAndSet(reg, updated)) dt
else register(key, dt)
}
def get(key: String): Option[TEnum] = _registry.get.get(key)
def all = _registry.get.map(_._2).toList
}
|
stepover/zzb
|
zzb-datatype/src/main/scala/zzb/datatype/BasicTypes.scala
|
Scala
|
mit
| 11,407
|
package cromwell.backend.validation
import cats.data.ValidatedNel
import cats.syntax.validated._
import cromwell.core.{OptionNotFoundException, WorkflowOptions}
import common.util.TryUtil
import wom.core.EvaluatedRuntimeAttributes
import wom.types.WomType
import wom.values.WomValue
import scala.util.{Failure, Try}
object RuntimeAttributesDefault {
def workflowOptionsDefault(options: WorkflowOptions, mapping: Map[String, Traversable[WomType]]):
Try[Map[String, WomValue]] = {
options.defaultRuntimeOptions flatMap { attrs =>
TryUtil.sequenceMap(attrs collect {
case (k, v) if mapping.contains(k) =>
val maybeTriedValue = mapping(k) map { _.coerceRawValue(v) } find { _.isSuccess } getOrElse {
Failure(new RuntimeException(s"Could not parse JsonValue $v to valid WomValue for runtime attribute $k"))
}
k -> maybeTriedValue
}, "Failed to coerce default runtime options")
} recover {
case _: OptionNotFoundException => Map.empty[String, WomValue]
}
}
/**
* Traverse defaultsList in order, and for each of them add the missing (and only missing) runtime attributes.
*/
def withDefaults(attrs: EvaluatedRuntimeAttributes, defaultsList: List[EvaluatedRuntimeAttributes]): EvaluatedRuntimeAttributes = {
defaultsList.foldLeft(attrs)((acc, default) => {
acc ++ default.filterKeys(!acc.keySet.contains(_))
})
}
def noValueFoundFor[A](attribute: String): ValidatedNel[String, A] = s"Can't find an attribute value for key $attribute".invalidNel
}
|
ohsu-comp-bio/cromwell
|
backend/src/main/scala/cromwell/backend/validation/RuntimeAttributesDefault.scala
|
Scala
|
bsd-3-clause
| 1,561
|
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.guacamole.commands
import org.apache.spark.Logging
import org.apache.spark.rdd.RDD
import org.bdgenomics.adam.cli.Args4j
import org.bdgenomics.formats.avro.{ Contig, Variant, GenotypeAllele, Genotype }
import org.bdgenomics.guacamole.Common.Arguments.{ TumorNormalReads, Output }
import org.bdgenomics.guacamole.pileup.{ Insertion, Deletion, Pileup }
import org.bdgenomics.guacamole._
import org.bdgenomics.guacamole.reads.Read
import scala.collection.JavaConversions
object SomaticPoCIndelCaller extends Command with Serializable with Logging {
override val name = "somatic-poc"
override val description = "call simple insertion and deletion variants between a tumor and a normal"
private class Arguments
extends DistributedUtil.Arguments
with Output
with TumorNormalReads {
}
override def run(rawArgs: Array[String]): Unit = {
val args = Args4j[Arguments](rawArgs)
val sc = Common.createSparkContext(appName = Some(name))
val filters = Read.InputFilters(mapped = true, nonDuplicate = true, passedVendorQualityChecks = true)
val (tumorReads, normalReads) = Common.loadTumorNormalReadsFromArguments(args, sc, filters)
assert(tumorReads.sequenceDictionary == normalReads.sequenceDictionary,
"Tumor and normal samples have different sequence dictionaries. Tumor dictionary: %s.\\nNormal dictionary: %s."
.format(tumorReads.sequenceDictionary, normalReads.sequenceDictionary))
val loci = Common.loci(args, normalReads)
val lociPartitions = DistributedUtil.partitionLociAccordingToArgs(args, loci, tumorReads.mappedReads, normalReads.mappedReads)
val genotypes: RDD[Genotype] = DistributedUtil.pileupFlatMapTwoRDDs[Genotype](
tumorReads.mappedReads,
normalReads.mappedReads,
lociPartitions,
skipEmpty = true, // skip empty pileups
(pileupTumor, pileupNormal) => callSimpleIndelsAtLocus(
pileupTumor,
pileupNormal
).iterator
)
Common.writeVariantsFromArguments(args, genotypes)
DelayedMessages.default.print()
}
def callSimpleIndelsAtLocus(pileupTumor: Pileup, pileupNormal: Pileup): Seq[Genotype] = {
val tumorDeletions = pileupTumor.elements.map(_.alignment).collect { case d: Deletion => d }
val normalDeletions = pileupNormal.elements.map(_.alignment).collect { case d: Deletion => d }
// As a PoC here I'm just emitting a deletion if more tumor reads showed a deletion than normal reads did.
val deletions =
if (tumorDeletions.size > normalDeletions.size) {
val tumorDeletion = tumorDeletions.head
Seq(
Genotype
.newBuilder()
.setAlleles(JavaConversions.seqAsJavaList(Seq(GenotypeAllele.Ref, GenotypeAllele.Alt)))
.setSampleId("somatic")
.setVariant(
Variant
.newBuilder()
.setContig(
Contig.newBuilder.setContigName(pileupNormal.referenceName).build
)
.setStart(pileupNormal.locus)
.setReferenceAllele(Bases.basesToString(tumorDeletion.referenceBases))
.setAlternateAllele(Bases.baseToString(tumorDeletion.referenceBases(0)))
.build
)
.build
)
} else {
Nil
}
val tumorInsertions = pileupTumor.elements.map(_.alignment).collect { case d: Insertion => d }
val normalInsertions = pileupNormal.elements.map(_.alignment).collect { case d: Insertion => d }
val insertions =
if (tumorInsertions.size > normalInsertions.size) {
val tumorInsertion = tumorInsertions.head
Seq(
Genotype
.newBuilder()
.setAlleles(JavaConversions.seqAsJavaList(Seq(GenotypeAllele.Ref, GenotypeAllele.Alt)))
.setSampleId("somatic")
.setVariant(
Variant
.newBuilder()
.setContig(
Contig.newBuilder.setContigName(pileupNormal.referenceName).build
)
.setStart(pileupNormal.locus)
.setReferenceAllele(Bases.basesToString(tumorInsertion.referenceBases))
.setAlternateAllele(Bases.basesToString(tumorInsertion.sequencedBases))
.build
)
.build
)
} else {
Nil
}
deletions ++ insertions
}
}
|
ryan-williams/guacamole
|
src/main/scala/org/bdgenomics/guacamole/commands/SomaticPoCIndelCaller.scala
|
Scala
|
apache-2.0
| 5,215
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.