code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.linalg.distributed
import breeze.linalg.{DenseMatrix => BDM}
import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.mllib.linalg.Vectors
class CoordinateMatrixSuite extends SparkFunSuite with MLlibTestSparkContext {
val m = 5
val n = 4
var mat: CoordinateMatrix = _
override def beforeAll() {
super.beforeAll()
val entries = sc.parallelize(Seq(
(0, 0, 1.0),
(0, 1, 2.0),
(1, 1, 3.0),
(1, 2, 4.0),
(2, 2, 5.0),
(2, 3, 6.0),
(3, 0, 7.0),
(3, 3, 8.0),
(4, 1, 9.0)), 3).map { case (i, j, value) =>
MatrixEntry(i, j, value)
}
mat = new CoordinateMatrix(entries)
}
test("size") {
assert(mat.numRows() === m)
assert(mat.numCols() === n)
}
test("empty entries") {
val entries = sc.parallelize(Seq[MatrixEntry](), 1)
val emptyMat = new CoordinateMatrix(entries)
intercept[RuntimeException] {
emptyMat.numCols()
}
intercept[RuntimeException] {
emptyMat.numRows()
}
}
test("toBreeze") {
val expected = BDM(
(1.0, 2.0, 0.0, 0.0),
(0.0, 3.0, 4.0, 0.0),
(0.0, 0.0, 5.0, 6.0),
(7.0, 0.0, 0.0, 8.0),
(0.0, 9.0, 0.0, 0.0))
assert(mat.toBreeze() === expected)
}
test("transpose") {
val transposed = mat.transpose()
assert(mat.toBreeze().t === transposed.toBreeze())
}
test("toIndexedRowMatrix") {
val indexedRowMatrix = mat.toIndexedRowMatrix()
val expected = BDM(
(1.0, 2.0, 0.0, 0.0),
(0.0, 3.0, 4.0, 0.0),
(0.0, 0.0, 5.0, 6.0),
(7.0, 0.0, 0.0, 8.0),
(0.0, 9.0, 0.0, 0.0))
assert(indexedRowMatrix.toBreeze() === expected)
}
test("toRowMatrix") {
val rowMatrix = mat.toRowMatrix()
val rows = rowMatrix.rows.collect().toSet
val expected = Set(
Vectors.dense(1.0, 2.0, 0.0, 0.0),
Vectors.dense(0.0, 3.0, 4.0, 0.0),
Vectors.dense(0.0, 0.0, 5.0, 6.0),
Vectors.dense(7.0, 0.0, 0.0, 8.0),
Vectors.dense(0.0, 9.0, 0.0, 0.0))
assert(rows === expected)
}
test("toBlockMatrix") {
val blockMat = mat.toBlockMatrix(2, 2)
assert(blockMat.numRows() === m)
assert(blockMat.numCols() === n)
assert(blockMat.toBreeze() === mat.toBreeze())
intercept[IllegalArgumentException] {
mat.toBlockMatrix(-1, 2)
}
intercept[IllegalArgumentException] {
mat.toBlockMatrix(2, 0)
}
}
}
|
practice-vishnoi/dev-spark-1
|
mllib/src/test/scala/org/apache/spark/mllib/linalg/distributed/CoordinateMatrixSuite.scala
|
Scala
|
apache-2.0
| 3,284
|
package org.treppo.mocoscala.wrapper
import com.github.dreamhead.moco.extractor.PlainExtractor
import com.github.dreamhead.moco.matcher.{ContainMatcher, EndsWithMatcher, EqRequestMatcher, StartsWithMatcher}
import org.scalatest.FlatSpec
import org.scalatest.Matchers._
class ExtractorMatcherTest extends FlatSpec {
val extractor: PlainExtractor = new PlainExtractor("hello world")
val matcher: ExtractorMatcher = ExtractorMatcher(extractor)
"a extractor matcher" should "be able to do exact match" in {
(matcher === "hello") shouldBe a[EqRequestMatcher[_]]
}
"a extractor matcher" should "be able to do contain match" in {
(matcher contains "wor") shouldBe a[ContainMatcher[_]]
}
"a extractor matcher" should "be able to do startsWith match" in {
(matcher startsWith "hell") shouldBe a[StartsWithMatcher[_]]
}
"a extractor matcher" should "be able to do endsWith match" in {
(matcher endsWith "olrd") shouldBe a[EndsWithMatcher[_]]
}
}
|
treppo/moco-scala
|
src/test/scala/org/treppo/mocoscala/wrapper/ExtractorMatcherTest.scala
|
Scala
|
mit
| 979
|
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.sparta.serving.core.models
import com.stratio.sparta.sdk.{Parser, Input, JsoneyString}
import com.stratio.sparta.serving.core.constants.AppConstant
case class TransformationsModel(`type`: String,
order: Integer,
inputField: String = Input.RawDataKey,
outputFields: Seq[OutputFieldsModel],
configuration: Map[String, JsoneyString] = Map()) {
val outputFieldsTransformed = outputFields.map(field =>
OutputFieldsTransformedModel(field.name,
field.`type`.getOrElse(Parser.TypesFromParserClass.getOrElse(`type`.toLowerCase, Parser.DefaultOutputType))
))
}
case class OutputFieldsModel(name: String, `type`: Option[String] = None)
case class OutputFieldsTransformedModel(name: String, `type`: String)
|
danielcsant/sparta
|
serving-core/src/main/scala/com/stratio/sparta/serving/core/models/TransformationsModel.scala
|
Scala
|
apache-2.0
| 1,484
|
package org.http4s
package util
import java.nio.charset.StandardCharsets
import cats.implicits._
import fs2._
import fs2.text.utf8Decode
class DecodeSpec extends Http4sSpec {
"decode" should {
"be consistent with utf8Decode" in prop { (s: String, chunkSize: Int) =>
(chunkSize > 0) ==> {
val source = Stream.emits {
s.getBytes(StandardCharsets.UTF_8)
.grouped(chunkSize)
.map(_.toArray)
.map(Chunk.bytes)
.toSeq
}.flatMap(Stream.chunk)
val utf8Decoded = utf8Decode(source).toList.combineAll
val decoded = decode(Charset.`UTF-8`)(source).toList.combineAll
decoded must_== utf8Decoded
}
}
"be consistent with String constructor over aggregated output" in prop { (cs: Charset, s: String, chunkSize: Int) =>
// x-COMPOUND_TEXT fails with a read only buffer.
(chunkSize > 0 && cs.nioCharset.canEncode && cs.nioCharset.name != "x-COMPOUND_TEXT") ==> {
val source: Stream[Pure, Byte] = Stream.emits {
s.getBytes(cs.nioCharset)
.grouped(chunkSize)
.map(Chunk.bytes)
.toSeq
}.flatMap(Stream.chunk).pure
val expected = new String(source.toVector.toArray, cs.nioCharset)
!expected.contains("\ufffd") ==> {
// \ufffd means we generated a String unrepresentable by the charset
val decoded = decode(cs)(source).toList.combineAll
decoded must_== expected
}
}
}
}
}
|
ZizhengTai/http4s
|
tests/src/test/scala/org/http4s/util/DecodeSpec.scala
|
Scala
|
apache-2.0
| 1,519
|
package scala.slick.driver
import scala.slick.SLICKException
import scala.slick.ast._
import scala.slick.util._
import scala.slick.ql._
import scala.slick.ql.ColumnOps._
trait BasicStatementBuilderComponent { driver: BasicDriver =>
/** Builder for SELECT and UPDATE statements. */
class QueryBuilder(val ast: Node, val linearizer: ValueLinearizer[_]) {
protected final val b = new SQLBuilder
protected val mayLimit0 = true
protected val scalarFrom: Option[String] = None
protected val supportsTuples = true
protected val supportsCast = true
protected val concatOperator: Option[String] = None
def sqlBuilder = b
final def buildSelect(): QueryBuilderResult = {
buildComprehension(ast, true)
QueryBuilderResult(b.build, linearizer)
}
protected def buildComprehension(n: Node, liftExpression: Boolean): Unit = n match {
case Comprehension(from, where, orderBy, select) =>
b += "select "
select match {
case Some(n) => buildSelectClause(n)
case None =>
if(from.length <= 1) b += "*"
else b += symbolName(from.last._1) += ".*"
}
if(from.isEmpty) buildScalarFrom
else {
b += " from "
b.sep(from, ", ") { case (sym, n) =>
buildFrom(n, Some(sym))
}
}
if(!where.isEmpty) {
b += " where "
expr(where.reduceLeft(And))
}
if(!orderBy.isEmpty) appendOrderClause(orderBy)
case Pure(CountAll(q)) =>
b += "select count(*) from "
buildFrom(q, None)
case p @ Pure(_) =>
b += "select "
buildSelectClause(p)
buildScalarFrom
case TableNode(name) =>
b += "select * from " += quoteIdentifier(name)
case TakeDrop(from, take, drop) => buildTakeDrop(from, take, drop)
case Union(left, right, all, _, _) =>
b += "select * from "
buildFrom(left, None)
b += (if(all) " union all " else " union ")
buildFrom(right, None)
case n =>
if(liftExpression) buildComprehension(Pure(n), false)
else throw new SLICKException("Unexpected node "+n+" -- SQL prefix: "+b.build.sql)
}
protected def buildScalarFrom: Unit = scalarFrom.foreach { s => b += " from " += s }
protected def buildTakeDrop(from: Node, take: Option[Int], drop: Option[Int]) {
if(take == Some(0)) {
b += "select * from "
buildFrom(from, None)
b += " where 1=0"
} else {
buildComprehension(from, true)
appendTakeDropClause(take, drop)
}
}
protected def appendTakeDropClause(take: Option[Int], drop: Option[Int]) = (take, drop) match {
/* SQL:2008 syntax */
case (Some(t), Some(d)) => b += " offset " += d += " row fetch next " += t += " row only"
case (Some(t), None) => b += " fetch next " += t += " row only"
case (None, Some(d)) => b += " offset " += d += " row"
case _ =>
}
protected def buildSelectClause(n: Node): Unit = n match {
case Pure(StructNode(ch)) =>
b.sep(ch, ", ") { case (sym, n) =>
expr(n)
b += " as " += symbolName(sym)
}
case Pure(ProductNode(ch @ _*)) =>
b.sep(ch, ", ")(expr)
case Pure(n) => expr(n)
}
protected def buildFrom(n: Node, alias: Option[Symbol]){
def addAlias = alias foreach { s => b += ' ' += symbolName(s) }
n match {
case TableNode(name) =>
b += quoteIdentifier(name)
addAlias
case BaseJoin(leftGen, rightGen, left, right, jt) =>
buildFrom(left, Some(leftGen))
b += ' ' += jt.sqlName += " join "
buildFrom(right, Some(rightGen))
case FilteredJoin(leftGen, rightGen, left, right, jt, on) =>
buildFrom(left, Some(leftGen))
b += ' ' += jt.sqlName += " join "
buildFrom(right, Some(rightGen))
b += " on "
expr(on)
case n =>
b += '('
buildComprehension(n, true)
b += ')'
addAlias
}
}
protected def symbolName(s: Symbol): String = s match {
case AnonSymbol(name) => name
case s => quoteIdentifier(s.name)
}
def expr(n: Node): Unit = n match {
case ConstColumn(null) => b += "null"
case Not(Is(l, ConstColumn(null))) => b += '('; expr(l); b += " is not null)"
case Not(e) => b += "(not "; expr(e); b+= ')'
case i @ InSet(e, seq, bind) => if(seq.isEmpty) expr(ConstColumn.FALSE) else {
b += '('; expr(e); b += " in ("
if(bind) b.sep(seq, ",")(x => b +?= { (p, param) => i.tm(driver).setValue(x, p) })
else b += seq.map(i.tm(driver).valueToSQLLiteral).mkString(",")
b += "))"
}
case Is(l, ConstColumn(null)) => b += '('; expr(l); b += " is null)"
case Is(left: ProductNode, right: ProductNode) =>
if(supportsTuples) {
b += "("
expr(left)
b += " = "
expr(right)
b += ")"
} else {
val cols = left.nodeChildren zip right.nodeChildren
b += "("
b.sep(cols, " and "){ case (l,r) => expr(l); b += "="; expr(r) }
b += ")"
}
case ProductNode(ch @ _*) =>
b += "("
b.sep(ch, ", ")(expr)
b += ")"
case Is(l, r) => b += '('; expr(l); b += '='; expr(r); b += ')'
case EscFunction("concat", l, r) if concatOperator.isDefined =>
b += '('; expr(l); b += concatOperator.get; expr(r); b += ')'
case s: SimpleFunction =>
if(s.scalar) b += "{fn "
b += s.name += '('
b.sep(s.nodeChildren, ",")(expr)
b += ')'
if(s.scalar) b += '}'
case SimpleLiteral(w) => b += w
case s: SimpleExpression => s.toSQL(this)
case Between(left, start, end) => expr(left); b += " between "; expr(start); b += " and "; expr(end)
case CountDistinct(e) => b += "count(distinct "; expr(e); b += ')'
case Like(l, r, esc) =>
b += '('; expr(l); b += " like "; expr(r);
esc.foreach { ch =>
if(ch == '\'' || ch == '%' || ch == '_') throw new SLICKException("Illegal escape character '"+ch+"' for LIKE expression")
// JDBC defines an {escape } syntax but the unescaped version is understood by more DBs/drivers
b += " escape '" += ch += "'"
}
b += ')'
case a @ AsColumnOf(ch, name) =>
val tn = name.getOrElse(mapTypeName(a.typeMapper(driver)))
if(supportsCast) {
b += "cast("
expr(ch)
b += " as " += tn += ")"
} else {
b += "{fn convert("
expr(ch)
b += ',' += tn += ")}"
}
case s: SimpleBinaryOperator => b += '('; expr(s.left); b += ' ' += s.name += ' '; expr(s.right); b += ')'
case c @ ConstColumn(v) => b += c.typeMapper(driver).valueToSQLLiteral(v)
case c @ BindColumn(v) => b +?= { (p, param) => c.typeMapper(driver).setValue(v, p) }
case pc @ ParameterColumn(_, extractor) => b +?= { (p, param) =>
pc.typeMapper(driver).setValue(extractor.asInstanceOf[(Any => Any)](param), p)
}
case c: Case.CaseNode =>
b += "(case"
c.clauses.foldRight(()) { (w,_) =>
b += " when "
expr(w.asInstanceOf[Case.WhenNode].left)
b += " then "
expr(w.asInstanceOf[Case.WhenNode].right)
}
c.elseClause match {
case ConstColumn(null) =>
case n =>
b += " else "
expr(n)
}
b += " end)"
case FieldRef(struct, field) => b += symbolName(struct) += '.' += symbolName(field)
//TODO case CountAll(q) => b += "count(*)"; localTableName(q)
//TODO case query:Query[_, _] => b += "("; subQueryBuilderFor(query).innerBuildSelect(b, false); b += ")"
//TODO case sq @ Subquery(_, _) => b += quoteIdentifier(localTableName(sq)) += ".*"
case n => // try to build a sub-query
b += '('
buildComprehension(n, false)
b += ')'
//case _ => throw new SLICKException("Don't know what to do with node "+n+" in an expression")
}
protected def appendOrderClause(order: Seq[(Node, Ordering)]) {
b += " order by "
b.sep(order, ", "){ case (n, o) => appendOrdering(n, o) }
}
protected def appendOrdering(n: Node, o: Ordering) {
expr(n)
if(o.direction.desc) b += " desc"
if(o.nulls.first) b += " nulls first"
else if(o.nulls.last) b += " nulls last"
}
def buildUpdate: QueryBuilderResult = {
val (gen, from, where, select) = ast match {
case Comprehension(Seq((sym, from: TableNode)), where, _, Some(Pure(select))) => select match {
case f @ FieldRef(struct, _) if struct == sym => (sym, from, where, Seq(f.field))
case ProductNode(ch @ _*) if ch.forall{ case FieldRef(struct, _) if struct == sym => true; case _ => false} =>
(sym, from, where, ch.map{ case FieldRef(_, field) => field })
case _ => throw new SLICKException("A query for an UPDATE statement must select table columns only -- Unsupported shape: "+select)
}
case _ => throw new SLICKException("A query for an UPDATE statement must resolve to a comprehension with a single table -- Unsupported shape: "+ast)
}
b += "update " += quoteIdentifier(from.tableName) += ' ' += symbolName(gen) += " set "
b.sep(select, ", ")(field => b += symbolName(field) += " = ?")
if(!where.isEmpty) {
b += " where "
expr(where.reduceLeft(And))
}
//TODO nc = nc.overrideName(table, tableName) // Alias table to itself because UPDATE does not support aliases
QueryBuilderResult(b.build, linearizer)
}
def buildDelete: QueryBuilderResult = {
val (gen, from, where) = ast match {
case Comprehension(Seq((sym, from: TableNode)), where, _, Some(Pure(select))) => (sym, from, where)
case _ => throw new SLICKException("A query for a DELETE statement must resolve to a comprehension with a single table -- Unsupported shape: "+ast)
}
b += "delete from " += quoteIdentifier(from.tableName) += ' ' += symbolName(gen)
if(!where.isEmpty) {
b += " where "
expr(where.reduceLeft(And))
}
//TODO nc = nc.overrideName(table, tableName) // Alias table to itself because UPDATE does not support aliases
QueryBuilderResult(b.build, linearizer)
}
protected def rewriteCountStarQuery(q: Query[_, _]) =
/*q.modifiers.isEmpty &&*/ (q.packed match {
case _: TableNode => true
case _ => false
})
protected def innerBuildSelectNoRewrite(rename: Boolean): Unit = sys.error("obsolete")
protected def appendClauses(): Unit = sys.error("obsolete")
/*TODO
final protected def appendGroupClause(): Unit = query.typedModifiers[Grouping] match {
case Nil =>
case xs => b += " group by "; b.sep(xs, ",")(x => expr(x.by))
}
*/
protected def innerExpr(c: Node): Unit = sys.error("obsolete")
final protected def appendConditions(): Unit = sys.error("obsolete")
}
/** Builder for INSERT statements. */
class InsertBuilder(val column: Any) {
def buildInsert: String = {
val (table, cols, vals) = buildParts
"INSERT INTO " + quoteIdentifier(table) + " (" + cols + ") VALUES (" + vals + ")"
}
def buildInsert(query: Query[_, _]): QueryBuilderResult = {
val (table, cols, _) = buildParts
val qb = driver.createQueryBuilder(query)
qb.sqlBuilder += "INSERT INTO " += quoteIdentifier(table) += " (" += cols.toString += ") "
qb.buildSelect()
}
protected def buildParts: (String, StringBuilder, StringBuilder) = {
val cols = new StringBuilder
val vals = new StringBuilder
var table:String = null
def f(c: Any): Unit = c match {
case p:Projection[_] =>
for(i <- 0 until p.productArity)
f(Node(p.productElement(i)))
case t:TableNode => f(Node(t.nodeShaped_*.value))
case n:NamedColumn[_] =>
if(table eq null) table = n.table.asInstanceOf[TableNode].tableName
else if(table != n.table.asInstanceOf[TableNode].tableName) throw new SLICKException("Inserts must all be to the same table")
appendNamedColumn(n.raw, cols, vals)
case Wrapped(t: TableNode, n: RawNamedColumn) =>
if(table eq null) table = t.tableName
else if(table != t.tableName) throw new SLICKException("Inserts must all be to the same table")
appendNamedColumn(n, cols, vals)
case _ => throw new SLICKException("Cannot use column "+c+" in INSERT statement")
}
f(Node(column))
if(table eq null) throw new SLICKException("No table to insert into")
(table, cols, vals)
}
protected def appendNamedColumn(n: RawNamedColumn, cols: StringBuilder, vals: StringBuilder) {
if(!cols.isEmpty) {
cols append ","
vals append ","
}
cols append quoteIdentifier(n.name)
vals append '?'
}
}
/** Builder for various DDL statements. */
class DDLBuilder(val table: AbstractBasicTable[_]) {
//TODO: Move AutoInc handling to extended profile
protected def createColumnDDLBuilder(c: RawNamedColumn) = new ColumnDDLBuilder(c)
protected class ColumnDDLBuilder(protected val column: RawNamedColumn) {
protected val tmDelegate = column.typeMapper(driver)
protected var sqlType: String = null
protected var notNull = !tmDelegate.nullable
protected var autoIncrement = false
protected var primaryKey = false
protected var defaultLiteral: String = null
init()
protected def init() {
for(o <- column.options) handleColumnOption(o)
if(sqlType eq null) sqlType = mapTypeName(tmDelegate)
}
protected def handleColumnOption(o: ColumnOption[_,_]): Unit = o match {
case BasicColumnOption.DBType(s) => sqlType = s
case BasicColumnOption.NotNull => notNull = true
case BasicColumnOption.Nullable => notNull = false
case ExtendedColumnOption.AutoInc => autoIncrement = true
case BasicColumnOption.PrimaryKey => primaryKey = true
case BasicColumnOption.Default(v) => defaultLiteral =
column.asInstanceOf[RawNamedColumn].typeMapper(driver).asInstanceOf[TypeMapperDelegate[Any]].valueToSQLLiteral(v)
}
def appendColumn(sb: StringBuilder) {
sb append quoteIdentifier(column.name) append ' '
sb append sqlType
appendOptions(sb)
}
protected def appendOptions(sb: StringBuilder) {
if(defaultLiteral ne null) sb append " DEFAULT " append defaultLiteral
if(notNull) sb append " NOT NULL"
if(autoIncrement) sb append " AUTO_INCREMENT"
if(primaryKey) sb append " PRIMARY KEY"
}
}
def buildDDL: DDL = {
val createTable = {
val b = new StringBuilder append "CREATE TABLE " append quoteIdentifier(table.tableName) append " ("
var first = true
for(n <- table.create_*) {
if(first) first = false
else b append ","
createColumnDDLBuilder(n).appendColumn(b)
}
b append ")"
b.toString
}
val createIndexes = table.indexes.map(createIndex)
val foreignKeys = table.foreignKeys
val primaryKeys = table.primaryKeys
if(primaryKeys.size > 1)
throw new SLICKException("Table "+table.tableName+" defines multiple primary keys")
new DDL {
val createPhase1 = Iterable(createTable) ++ primaryKeys.map(createPrimaryKey) ++ createIndexes
val createPhase2 = foreignKeys.map(createForeignKey)
val dropPhase1 = foreignKeys.map(dropForeignKey)
val dropPhase2 = primaryKeys.map(dropPrimaryKey) ++ Iterable("DROP TABLE " + quoteIdentifier(table.tableName))
}
}
protected def createIndex(idx: Index) = {
val b = new StringBuilder append "CREATE "
if(idx.unique) b append "UNIQUE "
b append "INDEX " append quoteIdentifier(idx.name) append " ON " append quoteIdentifier(table.tableName) append "("
addIndexColumnList(idx.on, b, idx.table.tableName)
b append ")"
b.toString
}
protected def createForeignKey(fk: ForeignKey[_ <: TableNode, _]) = {
val sb = new StringBuilder append "ALTER TABLE " append quoteIdentifier(table.tableName) append " ADD "
addForeignKey(fk, sb)
sb.toString
}
protected def addForeignKey(fk: ForeignKey[_ <: TableNode, _], sb: StringBuilder) {
sb append "CONSTRAINT " append quoteIdentifier(fk.name) append " FOREIGN KEY("
addForeignKeyColumnList(fk.linearizedSourceColumns, sb, table.tableName)
sb append ") REFERENCES " append quoteIdentifier(fk.targetTable.tableName) append "("
addForeignKeyColumnList(fk.linearizedTargetColumnsForOriginalTargetTable, sb, fk.targetTable.tableName)
sb append ") ON UPDATE " append fk.onUpdate.action
sb append " ON DELETE " append fk.onDelete.action
}
protected def createPrimaryKey(pk: PrimaryKey) = {
val sb = new StringBuilder append "ALTER TABLE " append quoteIdentifier(table.tableName) append " ADD "
addPrimaryKey(pk, sb)
sb.toString
}
protected def addPrimaryKey(pk: PrimaryKey, sb: StringBuilder) {
sb append "CONSTRAINT " append quoteIdentifier(pk.name) append " PRIMARY KEY("
addPrimaryKeyColumnList(pk.columns, sb, table.tableName)
sb append ")"
}
protected def dropForeignKey(fk: ForeignKey[_ <: TableNode, _]) = {
"ALTER TABLE " + quoteIdentifier(table.tableName) + " DROP CONSTRAINT " + quoteIdentifier(fk.name)
}
protected def dropPrimaryKey(pk: PrimaryKey) = {
"ALTER TABLE " + quoteIdentifier(table.tableName) + " DROP CONSTRAINT " + quoteIdentifier(pk.name)
}
protected def addIndexColumnList(columns: IndexedSeq[Node], sb: StringBuilder, requiredTableName: String) =
addColumnList(columns, sb, requiredTableName, "index")
protected def addForeignKeyColumnList(columns: IndexedSeq[Node], sb: StringBuilder, requiredTableName: String) =
addColumnList(columns, sb, requiredTableName, "foreign key constraint")
protected def addPrimaryKeyColumnList(columns: IndexedSeq[Node], sb: StringBuilder, requiredTableName: String) =
addColumnList(columns, sb, requiredTableName, "foreign key constraint")
protected def addColumnList(columns: IndexedSeq[Node], sb: StringBuilder, requiredTableName: String, typeInfo: String) = {
var first = true
for(c <- columns) c match {
case Wrapped(t: TableNode, n: RawNamedColumn) =>
if(first) first = false
else sb append ","
sb append quoteIdentifier(n.name)
if(requiredTableName != t.tableName)
throw new SLICKException("All columns in "+typeInfo+" must belong to table "+requiredTableName)
case _ => throw new SLICKException("Cannot use column "+c+
" in "+typeInfo+" (only named columns are allowed)")
}
}
}
/** Builder for DDL statements for sequences. */
class SequenceDDLBuilder(seq: Sequence[_]) {
def buildDDL: DDL = {
val b = new StringBuilder append "CREATE SEQUENCE " append quoteIdentifier(seq.name)
seq._increment.foreach { b append " INCREMENT " append _ }
seq._minValue.foreach { b append " MINVALUE " append _ }
seq._maxValue.foreach { b append " MAXVALUE " append _ }
seq._start.foreach { b append " START " append _ }
if(seq._cycle) b append " CYCLE"
new DDL {
val createPhase1 = Iterable(b.toString)
val createPhase2 = Nil
val dropPhase1 = Nil
val dropPhase2 = Iterable("DROP SEQUENCE " + quoteIdentifier(seq.name))
}
}
}
}
case class QueryBuilderResult(sbr: SQLBuilder.Result, linearizer: ValueLinearizer[_]) {
def sql = sbr.sql
def setter = sbr.setter
}
|
szeiger/scala-query
|
src/main/scala/scala/slick/driver/BasicStatementBuilderComponent.scala
|
Scala
|
bsd-2-clause
| 20,030
|
//name = i
def foo(x: Int) {
/*start*/1 + 1/*end*/
}
/*
//name = i
def foo(x: Int, i: Int) {
/*start*/i/*end*/
}
*/
|
ilinum/intellij-scala
|
testdata/introduceParameter/simple/OtherName.scala
|
Scala
|
apache-2.0
| 119
|
package com.kubukoz.adventofcode2016
import com.kubukoz.adventofcode2016.Day1._
import org.scalatest.{FlatSpec, Matchers}
class Day1Tests extends FlatSpec with Matchers {
"The first part" should "work for the given cases" in {
findDistanceToLast("R2, L3") shouldBe 5
findDistanceToLast("R2, R2, R2") shouldBe 2
findDistanceToLast("R5, L5, R5, R3") shouldBe 12
}
"The second part" should "work for the given case" in {
findDistanceToFirstRepeated("R8, R4, R4, R8") shouldBe 4
}
}
|
kubukoz/advent-of-code-2016
|
src/test/scala/com/kubukoz/adventofcode2016/Day1Tests.scala
|
Scala
|
apache-2.0
| 506
|
package controllers
import akka.LifeActors
import models.StartCommand._
import models.World
import org.kirhgoff.ap.core.{WorldModel, WorldModelListener, WorldPrinter}
import org.kirhgoff.ap.model.lifegame.LifeGameWorldGenerator
import play.api.Play
import play.api.libs.EventSource
import play.api.libs.concurrent.Execution.Implicits._
import play.api.libs.iteratee.{Concurrent, Enumeratee}
import play.api.libs.json.{JsValue, Json}
import play.api.mvc._
object PlayWorldModelListener extends WorldModelListener {
override def worldUpdated(world: WorldModel): Unit = {
val worldPrinter:WorldPrinter = world.printer
val json: JsValue = Json.toJson(World(worldPrinter.toAsciiSquare(world)))
Application.lifeChannel.push(json)
}
}
//TODO cut org.kirhgoff.ap out and put it to another module
object Application extends Controller {
//val logger = Logger(this.getClass)
//Move to Seed
val MaxSize = 50
val MaxIterations = 500
val DefaultSize = 10
val DefaultIterations = 100
val LifeRatio = 0.6
val (lifeOut, lifeChannel) = Concurrent.broadcast[JsValue]
def index = Action {
print("index aaa")
//logger.info(s"Received index request: $request")
Ok(views.html.react.render("Life Game"))
}
def start = Action (parse.json) {request =>
val commandJsResult = request.body.validate[StartCommand]
val command = commandJsResult.getOrElse(StartCommand(DefaultSize, DefaultSize, Some(DefaultIterations)))
//TODO extract to check method
command match {
case StartCommand(width, height, _) if width > MaxSize || height > MaxSize => {
//logger.warn(s"Refusing to start, world is too big: [$width, $height]")
BadRequest(s"Cannot create world that big size, should be lesser than $MaxSize")
}
case StartCommand(_, _, Some(iterations)) if iterations > MaxIterations => {
//logger.warn(s"Refusing to start, too many iterations: $iterations")
BadRequest(s"More than $MaxIterations are not allowed")
}
case StartCommand(width, height, iterationsOption) => {
val world: WorldModel = new LifeGameWorldGenerator(LifeRatio)
.generate(width, height)
print ("Starting with world:\n" + world.printer.toAsciiSquare(world))
//logger.info("Starting with world:\n" + world.printer.toAsciiSquare(world))
LifeActors.run(world, PlayWorldModelListener, iterationsOption match {
case None => DefaultIterations
case Some(iterations) => iterations
})
//logger.info("Big Soup started")
Ok("Life started")
}
case _ => {
//logger.warn(s"Incorrect request: $request")
print(s"Incorrect request: $request")
BadRequest("Incorrect data, cannot parse")
}
}
}
def stop = Action { implicit request =>
print("stopped")
//logger.info("Stopped with " + request)
LifeActors.stop
Ok("Stopped")
}
def shutdown = Action { implicit request =>
print("sutdown")
//logger.info("Stopped with " + request)
LifeActors.stop
Play.stop()
Ok("Shutdown")
}
def lifeFeed() = Action { request =>
print(request.remoteAddress + " - client connected")
//logger.info(request.remoteAddress + " - client connected")
//WTF!?
Ok.feed(lifeOut
&> connDeathWatch(request.remoteAddress)
&> EventSource()
).as("text/event-stream")
}
def connDeathWatch(addr: String): Enumeratee[JsValue, JsValue] =
Enumeratee.onIterateeDone{ () => print(addr + " - disconnected")
}
}
|
kirhgoff/life-server
|
app/controllers/Application.scala
|
Scala
|
mit
| 3,551
|
package rocks.molarialessandro.coordinates
import rocks.molarialessandro.coordinates.converters.RGBAToHSBAConverter
case class RGBACoordinates(r: Int, g: Int, b: Int, a: Int) extends Coordinates {
private val converterToHSBA: RGBAToHSBAConverter = new RGBAToHSBAConverter
override def convertToHSBA: HSBACoordinates = converterToHSBA.convert(this)
override def convertToRGBA: RGBACoordinates = this
}
|
alem0lars/claps
|
src/main/scala/rocks/molarialessandro/coordinates/RGBACoordinates.scala
|
Scala
|
apache-2.0
| 411
|
package org.me.hotel
import org.scalatest.Suites
class MySuites extends Suites(
new GuestTest,
new RoomTest)
|
Scalera/scalatest-handson-introduction
|
hotel-management/src/test/scala/org/me/hotel/MySuites.scala
|
Scala
|
apache-2.0
| 114
|
def f(s: String) = "f(" + s + ")"
def g(s: String) = "g(" + s + ")"
val fComposeG = f _ compose g _
fComposeG("yay")
val fAndThenG = f _ andThen g _
fAndThenG("yay")
val one: PartialFunction[Int, String] = {
case 1 => "one"
}
one.isDefinedAt(1)
one.isDefinedAt(2)
one(1)
val two: PartialFunction[Int, String] = {
case 2 => "two"
}
val three: PartialFunction[Int, String] = {
case 3 => "three"
}
val wildcard: PartialFunction[Int, String] = {
case _ => "something else"
}
val partial = one orElse two orElse three orElse wildcard
partial(5)
partial(3)
partial(2)
partial(1)
partial(0)
case class PhoneExt(name: String, ext: Int)
val extensions = List(PhoneExt("steve", 100), PhoneExt("robey", 200))
extensions.filter{
case PhoneExt(name, extension) => extension < 200
}
|
cmonkey/scala-school
|
src/main/scala/Patterns-Matching.scala
|
Scala
|
gpl-3.0
| 806
|
package com.example
case class StatisticsResponse(query: Query)
case class Query(results: Results)
case class Results(quote: List[StatisticData])
case class StatisticData(
//time: Timestamp, //"2016-04-14T20:56:05Z" //This field is not in the response
symbol: String,
Ask: String,
AverageDailyVolume: String,
AskRealtime: Option[String],
BidRealtime: Option[String],
Bid: String,
BookValue: String,
Change_PercentChange: String,
Change: String,
Commission: Option[String],
Currency: String,
ChangeRealtime: Option[String],
AfterHoursChangeRealtime: Option[String],
DividendShare: Option[Double],
LastTradeDate: String,
TradeDate: Option[String],
EarningsShare: String,
ErrorIndicationreturnedforsymbolchangedinvalid: Option[String],
EPSEstimateCurrentYear: String,
EPSEstimateNextYear: String,
EPSEstimateNextQuarter: String,
DaysLow: String,
DaysHigh: String,
YearLow: String,
TickerTrend: Option[String],
OneyrTargetPrice: String,
Volume: String,
HoldingsValue: Option[String],
HoldingsValueRealtime: Option[String],
YearRange: String,
DaysValueChange: Option[Double],
DaysValueChangeRealtime: Option[Double],
StockExchange: String,
DividendYield: Option[Double],
PercentChange: String
)
|
frossi85/financial-statistics-crawler
|
src/main/scala/com/example/Entities.scala
|
Scala
|
apache-2.0
| 2,154
|
package net.gnmerritt.tetris.player
import net.gnmerritt.tetris.engine.{Field, Piece, Placement, Position}
/**
* Given a board and piece, generate a sequence of placements
*/
class PlacementGenerator(field: Field, piece: Piece) {
def allLegal = {
this.all.filter(_.isLegal)
}
def all = {
for (x <- 0 until field.width; y <- 0 until field.height)
yield new Placement(piece, field, Position(x, y))
}
}
|
gnmerritt/aig-tetris
|
src/main/scala/net/gnmerritt/tetris/player/PlacementGenerator.scala
|
Scala
|
mit
| 427
|
package org.scalatest.suiteprop
import org.scalatest._
class TwoSlowAndOneWeakTestExamples extends SuiteExamples {
trait Services {
val theTestNames = Vector("first test", "second test")
}
trait NestedTestNames extends Services {
override val theTestNames = Vector("A subject should first test", "A subject should second test")
}
trait DeeplyNestedTestNames extends Services {
override val theTestNames = Vector("A subject when created should first test", "A subject when created should second test")
}
trait NestedTestNamesWithMust extends Services {
override val theTestNames = Vector("A subject must first test", "A subject must second test")
}
trait DeeplyNestedTestNamesWithMust extends Services {
override val theTestNames = Vector("A subject when created must first test", "A subject when created must second test")
}
trait NestedTestNamesWithCan extends Services {
override val theTestNames = Vector("A subject can first test", "A subject can second test")
}
trait DeeplyNestedTestNamesWithCan extends Services {
override val theTestNames = Vector("A subject when created can first test", "A subject when created can second test")
}
type FixtureServices = Services
class SuiteExample extends Suite with Services {
@SlowAsMolasses @WeakAsAKitten def testFirst {}
@SlowAsMolasses def testSecond {}
override val theTestNames = Vector("testFirst", "testSecond")
}
class FixtureSuiteExample extends StringFixtureSuite with Services {
@SlowAsMolasses @WeakAsAKitten def testFirst(s: String) {}
@SlowAsMolasses def testSecond(s: String) {}
override val theTestNames = Vector("testFirst(FixtureParam)", "testSecond(FixtureParam)")
}
class FunSuiteExample extends FunSuite with Services {
test("first test", mytags.SlowAsMolasses, mytags.WeakAsAKitten) {}
test("second test", mytags.SlowAsMolasses) {}
}
class FixtureFunSuiteExample extends StringFixtureFunSuite with Services {
test("first test", mytags.SlowAsMolasses, mytags.WeakAsAKitten) { s => }
test("second test", mytags.SlowAsMolasses) { s => }
}
class FunSpecExample extends FunSpec with Services {
it("first test", mytags.SlowAsMolasses, mytags.WeakAsAKitten) {}
it("second test", mytags.SlowAsMolasses) {}
}
class NestedFunSpecExample extends FunSpec with NestedTestNames {
describe("A subject") {
it("should first test", mytags.SlowAsMolasses, mytags.WeakAsAKitten) {}
it("should second test", mytags.SlowAsMolasses) {}
}
}
class DeeplyNestedFunSpecExample extends FunSpec with DeeplyNestedTestNames {
describe("A subject") {
describe("when created") {
it("should first test", mytags.SlowAsMolasses, mytags.WeakAsAKitten) {}
it("should second test", mytags.SlowAsMolasses) {}
}
}
}
class FixtureFunSpecExample extends StringFixtureFunSpec with Services {
it("first test", mytags.SlowAsMolasses, mytags.WeakAsAKitten) { s => }
it("second test", mytags.SlowAsMolasses) { s => }
}
class NestedFixtureFunSpecExample extends StringFixtureFunSpec with NestedTestNames {
describe("A subject") {
it("should first test", mytags.SlowAsMolasses, mytags.WeakAsAKitten) { s => }
it("should second test", mytags.SlowAsMolasses) { s => }
}
}
class DeeplyNestedFixtureFunSpecExample extends StringFixtureFunSpec with DeeplyNestedTestNames {
describe("A subject") {
describe("when created") {
it("should first test", mytags.SlowAsMolasses, mytags.WeakAsAKitten) { s => }
it("should second test", mytags.SlowAsMolasses) { s => }
}
}
}
class PathFunSpecExample extends path.FunSpec with Services {
it("first test", mytags.SlowAsMolasses, mytags.WeakAsAKitten) {}
it("second test", mytags.SlowAsMolasses) {}
override def newInstance = new PathFunSpecExample
}
class NestedPathFunSpecExample extends path.FunSpec with NestedTestNames {
describe("A subject") {
it("should first test", mytags.SlowAsMolasses, mytags.WeakAsAKitten) {}
it("should second test", mytags.SlowAsMolasses) {}
}
override def newInstance = new NestedPathFunSpecExample
}
class DeeplyNestedPathFunSpecExample extends path.FunSpec with DeeplyNestedTestNames {
describe("A subject") {
describe("when created") {
it("should first test", mytags.SlowAsMolasses, mytags.WeakAsAKitten) {}
it("should second test", mytags.SlowAsMolasses) {}
}
}
override def newInstance = new DeeplyNestedPathFunSpecExample
}
class WordSpecExample extends WordSpec with Services {
"first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
"second test" taggedAs (mytags.SlowAsMolasses) in {}
}
class NestedWordSpecExample extends WordSpec with NestedTestNames {
"A subject" should {
"first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
"second test" taggedAs (mytags.SlowAsMolasses) in {}
}
}
class DeeplyNestedWordSpecExample extends WordSpec with DeeplyNestedTestNames {
"A subject" when {
"created" should {
"first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
"second test" taggedAs (mytags.SlowAsMolasses) in {}
}
}
}
class FixtureWordSpecExample extends StringFixtureWordSpec with Services {
"first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in { s => }
"second test" taggedAs (mytags.SlowAsMolasses) in { s => }
}
class NestedFixtureWordSpecExample extends StringFixtureWordSpec with NestedTestNames {
"A subject" should {
"first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in { s => }
"second test" taggedAs (mytags.SlowAsMolasses) in { s => }
}
}
class DeeplyNestedFixtureWordSpecExample extends StringFixtureWordSpec with DeeplyNestedTestNames {
"A subject" when {
"created" should {
"first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in { s => }
"second test" taggedAs (mytags.SlowAsMolasses) in { s => }
}
}
}
class NestedWordSpecWithMustExample extends WordSpec with NestedTestNamesWithMust {
"A subject" must {
"first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
"second test" taggedAs (mytags.SlowAsMolasses) in {}
}
}
class DeeplyNestedWordSpecWithMustExample extends WordSpec with DeeplyNestedTestNamesWithMust {
"A subject" when {
"created" must {
"first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
"second test" taggedAs (mytags.SlowAsMolasses) in {}
}
}
}
class NestedFixtureWordSpecWithMustExample extends StringFixtureWordSpec with NestedTestNamesWithMust {
"A subject" must {
"first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in { s => }
"second test" taggedAs (mytags.SlowAsMolasses) in { s => }
}
}
class DeeplyNestedFixtureWordSpecWithMustExample extends StringFixtureWordSpec with DeeplyNestedTestNamesWithMust {
"A subject" when {
"created" must {
"first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in { s => }
"second test" taggedAs (mytags.SlowAsMolasses) in { s => }
}
}
}
class NestedWordSpecWithCanExample extends WordSpec with NestedTestNamesWithCan {
"A subject" can {
"first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
"second test" taggedAs (mytags.SlowAsMolasses) in {}
}
}
class DeeplyNestedWordSpecWithCanExample extends WordSpec with DeeplyNestedTestNamesWithCan {
"A subject" when {
"created" can {
"first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
"second test" taggedAs (mytags.SlowAsMolasses) in {}
}
}
}
class NestedFixtureWordSpecWithCanExample extends StringFixtureWordSpec with NestedTestNamesWithCan {
"A subject" can {
"first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in { s => }
"second test" taggedAs (mytags.SlowAsMolasses) in { s => }
}
}
class DeeplyNestedFixtureWordSpecWithCanExample extends StringFixtureWordSpec with DeeplyNestedTestNamesWithCan {
"A subject" when {
"created" can {
"first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in { s => }
"second test" taggedAs (mytags.SlowAsMolasses) in { s => }
}
}
}
class FlatSpecExample extends FlatSpec with Services {
it should "first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
it should "second test" taggedAs (mytags.SlowAsMolasses) in {}
override val theTestNames = Vector("should first test", "should second test")
}
class SubjectFlatSpecExample extends FlatSpec with NestedTestNames {
behavior of "A subject"
it should "first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
it should "second test" taggedAs (mytags.SlowAsMolasses) in {}
}
class ShorthandSubjectFlatSpecExample extends FlatSpec with NestedTestNames {
"A subject" should "first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
it should "second test" taggedAs (mytags.SlowAsMolasses) in {}
}
class FixtureFlatSpecExample extends StringFixtureFlatSpec with Services {
it should "first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in { s => }
it should "second test" taggedAs (mytags.SlowAsMolasses) in { s => }
override val theTestNames = Vector("should first test", "should second test")
}
class SubjectFixtureFlatSpecExample extends StringFixtureFlatSpec with NestedTestNames {
behavior of "A subject"
it should "first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in { s => }
it should "second test" taggedAs (mytags.SlowAsMolasses) in { s => }
}
class ShorthandSubjectFixtureFlatSpecExample extends StringFixtureFlatSpec with NestedTestNames {
"A subject" should "first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in { s => }
it should "second test" taggedAs (mytags.SlowAsMolasses) in { s => }
}
class FlatSpecWithMustExample extends FlatSpec with Services {
it must "first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
it must "second test" taggedAs (mytags.SlowAsMolasses) in {}
override val theTestNames = Vector("must first test", "must second test")
}
class SubjectFlatSpecWithMustExample extends FlatSpec with NestedTestNamesWithMust {
behavior of "A subject"
it must "first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
it must "second test" taggedAs (mytags.SlowAsMolasses) in {}
}
class ShorthandSubjectFlatSpecWithMustExample extends FlatSpec with NestedTestNamesWithMust {
"A subject" must "first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
it must "second test" taggedAs (mytags.SlowAsMolasses) in {}
}
class FixtureFlatSpecWithMustExample extends StringFixtureFlatSpec with Services {
it must "first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in { s => }
it must "second test" taggedAs (mytags.SlowAsMolasses) in { s => }
override val theTestNames = Vector("must first test", "must second test")
}
class SubjectFixtureFlatSpecWithMustExample extends StringFixtureFlatSpec with NestedTestNamesWithMust {
behavior of "A subject"
it must "first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in { s => }
it must "second test" taggedAs (mytags.SlowAsMolasses) in { s => }
}
class ShorthandSubjectFixtureFlatSpecWithMustExample extends StringFixtureFlatSpec with NestedTestNamesWithMust {
"A subject" must "first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in { s => }
it must "second test" taggedAs (mytags.SlowAsMolasses) in { s => }
}
class FlatSpecWithCanExample extends FlatSpec with Services {
it can "first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
it can "second test" taggedAs (mytags.SlowAsMolasses) in {}
override val theTestNames = Vector("can first test", "can second test")
}
class SubjectFlatSpecWithCanExample extends FlatSpec with NestedTestNamesWithCan {
behavior of "A subject"
it can "first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
it can "second test" taggedAs (mytags.SlowAsMolasses) in {}
}
class ShorthandSubjectFlatSpecWithCanExample extends FlatSpec with NestedTestNamesWithCan {
"A subject" can "first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
it can "second test" taggedAs (mytags.SlowAsMolasses) in {}
}
class FixtureFlatSpecWithCanExample extends StringFixtureFlatSpec with Services {
it can "first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in { s => }
it can "second test" taggedAs (mytags.SlowAsMolasses) in { s => }
override val theTestNames = Vector("can first test", "can second test")
}
class SubjectFixtureFlatSpecWithCanExample extends StringFixtureFlatSpec with NestedTestNamesWithCan {
behavior of "A subject"
it can "first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in { s => }
it can "second test" taggedAs (mytags.SlowAsMolasses) in { s => }
}
class ShorthandSubjectFixtureFlatSpecWithCanExample extends StringFixtureFlatSpec with NestedTestNamesWithCan {
"A subject" can "first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in { s => }
it can "second test" taggedAs (mytags.SlowAsMolasses) in { s => }
}
class FreeSpecExample extends FreeSpec with Services {
"first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
"second test" taggedAs (mytags.SlowAsMolasses) in {}
}
class NestedFreeSpecExample extends FreeSpec with NestedTestNames {
"A subject" - {
"should first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
"should second test" taggedAs (mytags.SlowAsMolasses) in {}
}
}
class DeeplyNestedFreeSpecExample extends FreeSpec with DeeplyNestedTestNames {
"A subject" - {
"when created" - {
"should first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
"should second test" taggedAs (mytags.SlowAsMolasses) in {}
}
}
}
class FixtureFreeSpecExample extends StringFixtureFreeSpec with Services {
"first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in { s => }
"second test" taggedAs (mytags.SlowAsMolasses) in { s => }
}
class NestedFixtureFreeSpecExample extends StringFixtureFreeSpec with NestedTestNames {
"A subject" - {
"should first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in { s => }
"should second test" taggedAs (mytags.SlowAsMolasses) in { s => }
}
}
class DeeplyNestedFixtureFreeSpecExample extends StringFixtureFreeSpec with DeeplyNestedTestNames {
"A subject" - {
"when created" - {
"should first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in { s => }
"should second test" taggedAs (mytags.SlowAsMolasses) in { s => }
}
}
}
class PathFreeSpecExample extends path.FreeSpec with Services {
"first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
"second test" taggedAs (mytags.SlowAsMolasses) in {}
override def newInstance = new PathFreeSpecExample
}
class NestedPathFreeSpecExample extends path.FreeSpec with NestedTestNames {
"A subject" - {
"should first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
"should second test" taggedAs (mytags.SlowAsMolasses) in {}
}
override def newInstance = new NestedPathFreeSpecExample
}
class DeeplyNestedPathFreeSpecExample extends path.FreeSpec with DeeplyNestedTestNames {
"A subject" - {
"when created" - {
"should first test" taggedAs (mytags.SlowAsMolasses, mytags.WeakAsAKitten) in {}
"should second test" taggedAs (mytags.SlowAsMolasses) in {}
}
}
override def newInstance = new DeeplyNestedPathFreeSpecExample
}
class FeatureSpecExample extends FeatureSpec with Services {
scenario("first test", mytags.SlowAsMolasses, mytags.WeakAsAKitten) {}
scenario("second test", mytags.SlowAsMolasses) {}
override val theTestNames = Vector("Scenario: first test", "Scenario: second test")
}
class NestedFeatureSpecExample extends FeatureSpec with Services {
feature("A feature") {
scenario("first test", mytags.SlowAsMolasses, mytags.WeakAsAKitten) {}
scenario("second test", mytags.SlowAsMolasses) {}
}
override val theTestNames = Vector("Feature: A feature Scenario: first test", "Feature: A feature Scenario: second test")
}
class FixtureFeatureSpecExample extends StringFixtureFeatureSpec with Services {
scenario("first test", mytags.SlowAsMolasses, mytags.WeakAsAKitten) { s => }
scenario("second test", mytags.SlowAsMolasses) { s => }
override val theTestNames = Vector("Scenario: first test", "Scenario: second test")
}
class NestedFixtureFeatureSpecExample extends StringFixtureFeatureSpec with Services {
feature("A feature") {
scenario("first test", mytags.SlowAsMolasses, mytags.WeakAsAKitten) { s => }
scenario("second test", mytags.SlowAsMolasses) { s => }
}
override val theTestNames = Vector("Feature: A feature Scenario: first test", "Feature: A feature Scenario: second test")
}
class PropSpecExample extends PropSpec with Services {
property("first test", mytags.SlowAsMolasses, mytags.WeakAsAKitten) {}
property("second test", mytags.SlowAsMolasses) {}
}
class FixturePropSpecExample extends StringFixturePropSpec with Services {
property("first test", mytags.SlowAsMolasses, mytags.WeakAsAKitten) { s => }
property("second test", mytags.SlowAsMolasses) { s => }
}
lazy val suite = new SuiteExample
lazy val fixtureSuite = new FixtureSuiteExample
lazy val funSuite = new FunSuiteExample
lazy val fixtureFunSuite = new FixtureFunSuiteExample
lazy val funSpec = new FunSpecExample
lazy val nestedFunSpec = new NestedFunSpecExample
lazy val deeplyNestedFunSpec = new DeeplyNestedFunSpecExample
lazy val fixtureFunSpec = new FixtureFunSpecExample
lazy val nestedFixtureFunSpec = new NestedFixtureFunSpecExample
lazy val deeplyNestedFixtureFunSpec = new DeeplyNestedFixtureFunSpecExample
lazy val pathFunSpec = new PathFunSpecExample
lazy val nestedPathFunSpec = new NestedPathFunSpecExample
lazy val deeplyNestedPathFunSpec = new DeeplyNestedPathFunSpecExample
lazy val wordSpec = new WordSpecExample
lazy val nestedWordSpec = new NestedWordSpecExample
lazy val deeplyNestedWordSpec = new DeeplyNestedWordSpecExample
lazy val fixtureWordSpec = new FixtureWordSpecExample
lazy val nestedFixtureWordSpec = new NestedFixtureWordSpecExample
lazy val deeplyNestedFixtureWordSpec = new DeeplyNestedFixtureWordSpecExample
lazy val nestedWordSpecWithMust = new NestedWordSpecWithMustExample
lazy val deeplyNestedWordSpecWithMust = new DeeplyNestedWordSpecWithMustExample
lazy val nestedFixtureWordSpecWithMust = new NestedFixtureWordSpecWithMustExample
lazy val deeplyNestedFixtureWordSpecWithMust = new DeeplyNestedFixtureWordSpecWithMustExample
lazy val nestedWordSpecWithCan = new NestedWordSpecWithCanExample
lazy val deeplyNestedWordSpecWithCan = new DeeplyNestedWordSpecWithCanExample
lazy val nestedFixtureWordSpecWithCan = new NestedFixtureWordSpecWithCanExample
lazy val deeplyNestedFixtureWordSpecWithCan = new DeeplyNestedFixtureWordSpecWithCanExample
lazy val flatSpec = new FlatSpecExample
lazy val subjectFlatSpec = new SubjectFlatSpecExample
lazy val shorthandSubjectFlatSpec = new ShorthandSubjectFlatSpecExample
lazy val fixtureFlatSpec = new FixtureFlatSpecExample
lazy val subjectFixtureFlatSpec = new SubjectFixtureFlatSpecExample
lazy val shorthandSubjectFixtureFlatSpec = new ShorthandSubjectFixtureFlatSpecExample
lazy val flatSpecWithMust = new FlatSpecWithMustExample
lazy val subjectFlatSpecWithMust = new SubjectFlatSpecWithMustExample
lazy val shorthandSubjectFlatSpecWithMust = new ShorthandSubjectFlatSpecWithMustExample
lazy val fixtureFlatSpecWithMust = new FixtureFlatSpecWithMustExample
lazy val subjectFixtureFlatSpecWithMust = new SubjectFixtureFlatSpecWithMustExample
lazy val shorthandSubjectFixtureFlatSpecWithMust = new ShorthandSubjectFixtureFlatSpecWithMustExample
lazy val flatSpecWithCan = new FlatSpecWithCanExample
lazy val subjectFlatSpecWithCan = new SubjectFlatSpecWithCanExample
lazy val shorthandSubjectFlatSpecWithCan = new ShorthandSubjectFlatSpecWithCanExample
lazy val fixtureFlatSpecWithCan = new FixtureFlatSpecWithCanExample
lazy val subjectFixtureFlatSpecWithCan = new SubjectFixtureFlatSpecWithCanExample
lazy val shorthandSubjectFixtureFlatSpecWithCan = new ShorthandSubjectFixtureFlatSpecWithCanExample
lazy val freeSpec = new FreeSpecExample
lazy val nestedFreeSpec = new NestedFreeSpecExample
lazy val deeplyNestedFreeSpec = new DeeplyNestedFreeSpecExample
lazy val fixtureFreeSpec = new FixtureFreeSpecExample
lazy val nestedFixtureFreeSpec = new NestedFixtureFreeSpecExample
lazy val deeplyNestedFixtureFreeSpec = new DeeplyNestedFixtureFreeSpecExample
lazy val pathFreeSpec = new PathFreeSpecExample
lazy val nestedPathFreeSpec = new NestedPathFreeSpecExample
lazy val deeplyNestedPathFreeSpec = new DeeplyNestedPathFreeSpecExample
lazy val featureSpec = new FeatureSpecExample
lazy val nestedFeatureSpec = new NestedFeatureSpecExample
lazy val fixtureFeatureSpec = new FixtureFeatureSpecExample
lazy val nestedFixtureFeatureSpec = new NestedFixtureFeatureSpecExample
lazy val propSpec = new PropSpecExample
lazy val fixturePropSpec = new FixturePropSpecExample
}
|
hubertp/scalatest
|
src/test/scala/org/scalatest/suiteprop/TwoSlowAndOneWeakTestExamples.scala
|
Scala
|
apache-2.0
| 22,083
|
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.restassured.scalatra
import java.net.URLDecoder
import java.util.{Date, Scanner}
import javax.servlet.http.Cookie
import net.liftweb.json.Extraction._
import net.liftweb.json.JsonAST._
import net.liftweb.json.JsonDSL._
import net.liftweb.json.Printer._
import net.liftweb.json.{DefaultFormats, JsonParser}
import org.apache.commons.io.IOUtils
import org.apache.commons.lang3.StringUtils
import org.scalatra.ScalatraServlet
import scala.collection.JavaConversions._
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
import scala.xml.Elem
class ScalatraRestExample extends ScalatraServlet {
// To allow for json extract
implicit val formats = DefaultFormats
case class Winner(id: Long, numbers: List[Int])
case class Lotto(id: Long, winningNumbers: List[Int], winners: List[Winner], drawDate: Option[java.util.Date])
val winners = List(Winner(23, List(2, 45, 34, 23, 3, 5)), Winner(54, List(52, 3, 12, 11, 18, 22)))
val lotto = Lotto(5, List(2, 45, 34, 23, 7, 5, 3), winners, None)
before() {
contentType = "application/json"
}
post("/hello") {
val json = ("hello" -> "Hello Scalatra")
compact(render(json))
}
get("/hello") {
val json = ("hello" -> "Hello Scalatra")
compact(render(json))
}
get("/getWithContent") {
if (request.body == "hullo") {
status = 200
} else {
status = 400
"No or incorrect content"
}
}
options("/greetXML") {
contentType = "text/xml"
greetXML
}
get("/greetXML") {
greetXML
}
get("/xmlWithContentTypeTextXml") {
contentType = "text/xml; charset=iso-8859-1"
<greeting>
<firstName>{params("firstName")}</firstName>
<lastName>{params("lastName")}</lastName>
</greeting>
}
get("/xmlWithCustomXmlContentType") {
contentType = "application/something+xml; charset=iso-8859-1"
<greeting>
<firstName>{params("firstName")}</firstName>
<lastName>{params("lastName")}</lastName>
</greeting>
}
get("/greetXMLAttribute") {
contentType = "application/xml"
<greeting>
<name firstName={params("firstName")} lastName={params("lastName")} />
</greeting>
}
get("/i18n") {
"""{ "ön" : "Är ån"}"""
}
get("/something.json") {
"""{ "value" : "something" }"""
}
get("/utf8-body-json") {
"""{ "value" : "啊 ☆" }"""
}
get("/utf8-body-xml") {
contentType = "application/xml"
"""<value>啊 ☆</value>"""
}
get("/jsonStore") {
"{ \\"store\\": {\\n" +
" \\"book\\": [ \\n" +
" { \\"category\\": \\"reference\\",\\n" +
" \\"author\\": \\"Nigel Rees\\",\\n" +
" \\"title\\": \\"Sayings of the Century\\",\\n" +
" \\"price\\": 8.95\\n" +
" },\\n" +
" { \\"category\\": \\"fiction\\",\\n" +
" \\"author\\": \\"Evelyn Waugh\\",\\n" +
" \\"title\\": \\"Sword of Honour\\",\\n" +
" \\"price\\": 12.99\\n" +
" },\\n" +
" { \\"category\\": \\"fiction\\",\\n" +
" \\"author\\": \\"Herman Melville\\",\\n" +
" \\"title\\": \\"Moby Dick\\",\\n" +
" \\"isbn\\": \\"0-553-21311-3\\",\\n" +
" \\"price\\": 8.99\\n" +
" },\\n" +
" { \\"category\\": \\"fiction\\",\\n" +
" \\"author\\": \\"J. R. R. Tolkien\\",\\n" +
" \\"title\\": \\"The Lord of the Rings\\",\\n" +
" \\"isbn\\": \\"0-395-19395-8\\",\\n" +
" \\"price\\": 22.99\\n" +
" }\\n" +
" ],\\n" +
" \\"bicycle\\": {\\n" +
" \\"color\\": \\"red\\",\\n" +
" \\"price\\": 19.95" +
" }\\n" +
" }\\n" +
"}"
}
get("/requestUrl") {
request.getRequestURL + "?" + request.getQueryString
}
get("/anonymous_list_with_numbers") {
contentType = "application/json"
"""[100, 50, 31.0]"""
}
get("/russian") {
contentType = "application/json"
"""{ "title" : "Информационные технологии, интернет, телеком" }"""
}
get("/products") {
contentType = "application/json"
"""[
{
"id": 2,
"name": "An ice sculpture",
"price": 12.50,
"tags": ["cold", "ice"],
"dimensions": {
"length": 7.0,
"width": 12.0,
"height": 9.5
},
"warehouseLocation": {
"latitude": -78.75,
"longitude": 20.4
}
},
{
"id": 3,
"name": "A blue mouse",
"price": 25.50,
"dimensions": {
"length": 3.1,
"width": 1.0,
"height": 1.0
},
"warehouseLocation": {
"latitude": 54.4,
"longitude": -32.7
}
}
]"""
}
get("/shopping") {
contentType = "application/xml"
<shopping>
<category type="groceries">
<item>Chocolate</item>
<item>Coffee</item>
</category>
<category type="supplies">
<item>Paper</item>
<item quantity="4">Pens</item>
</category>
<category type="present">
<item when="Aug 10">Kathryn's Birthday</item>
</category>
</shopping>
}
get("/videos") {
contentType = "application/xml"
<videos>
<music>
<title>Video Title 1 </title>
<artist>Artist 1</artist>
</music>
<music >
<title>Video Title 2</title>
<artist>Artist 2</artist>
<artist>Artist 3</artist>
</music>
</videos>
}
get("/videos-not-formatted") {
contentType = "application/xml"
<videos><music><title>Video Title 1</title><artist>Artist 1</artist></music><music ><title>Video Title 2</title><artist>Artist 2</artist><artist>Artist 3</artist></music></videos>
}
get("/greetJSON") {
"{ \\"greeting\\" : { \\n" +
" \\"firstName\\" : \\""+{params("firstName")}+"\\", \\n" +
" \\"lastName\\" : \\""+{params("lastName")}+"\\" \\n" +
" }\\n" +
"}"
}
post("/greetXML") {
greetXML
}
get("/anotherGreetXML") {
anotherGreetXML
}
post("/anotherGreetXML") {
anotherGreetXML
}
post("/threeMultiValueParam") {
"{ \\"list\\" : \\""+multiParams("list").mkString(",") +"\\", " +
"\\"list2\\" : \\"" + multiParams("list2").mkString(",") + "\\", " +
"\\"list3\\" : \\"" + multiParams("list3").mkString(",") + "\\"}"
}
get("/multiValueParam") {
"{ \\"list\\" : \\""+multiParams("list").mkString(",") +"\\" }"
}
put("/multiValueParam") {
"{ \\"list\\" : \\""+multiParams("list").mkString(",") +"\\" }"
}
post("/multiValueParam") {
"{ \\"list\\" : \\""+multiParams("list").mkString(",") +"\\" }"
}
patch("/multiValueParam") {
"{ \\"list\\" : \\""+findMultiParamIn(request.body, "list").mkString(",") +"\\" }"
}
patch("/jsonGreet") {
contentType = "application/json"
val json = JsonParser.parse(request.body)
"{ \\"fullName\\" : \\"" + (json \\ "firstName").extract[String] + " "+ (json \\ "lastName").extract[String] + "\\" }"
}
get("/hello") {
val json = ("hello" -> "Hello Scalatra")
compact(render(json))
}
get("/text-json") {
contentType = "text/json"
"""{"test":true}"""
}
get("/lotto") {
val json = ("lotto" -> ("lottoId" -> lotto.id) ~
("winning-numbers" -> lotto.winningNumbers) ~
("drawDate" -> lotto.drawDate.map(_.toString)) ~
("winners" -> lotto.winners.map { w =>
(("winnerId" -> w.id) ~ ("numbers" -> w.numbers))}))
compact(render(json))
}
get("/reflect") {
reflect
}
put("/reflect") {
reflect
}
put("/reflect") {
reflect
}
patch("/reflect") {
reflect
}
post("/reflect") {
reflect
}
post("/param-reflect") {
compact(render(decompose(params)))
}
post("/:pathParam/manyParams") {
val queryParam = {params("queryParam")}
val pathParam = {params("pathParam")}
val formParam = {params("formParam")}
queryParam + " " + pathParam + " " + formParam
}
post("/charEncoding") {
contentType = "text/plain"
request.getCharacterEncoding
}
put("/serializedJsonParameter") {
val something = {params("something")}
val serialized = {params("serialized")}
serialized
}
patch("/serializedJsonParameter") {
val something = {params("something")}
val serialized = {params("serialized")}
serialized
}
get("/contentTypeButNoBody") {
contentType = "application/json"
}
get("/contentTypeAsBody") {
request.contentType.getOrElse("null")
}
post("/contentTypeAsBody") {
request.contentType.getOrElse("null")
}
post("/textUriList") {
if (!request.getContentType.contains("text")) {
status = 400
} else {
contentType = "application/json"
val content = IOUtils.toString(request.getInputStream)
val uris = content.split("\\n")
val json = "uris" -> decompose(uris)
compact(render(json))
}
}
get("/:firstName/:lastName") {
val firstName = {params("firstName")}
val lastName = {params("lastName")}
val fullName: String = firstName + " " + lastName
val json = ("firstName" -> firstName) ~ ("lastName" -> lastName) ~ ("fullName" -> fullName)
compact(render(json))
}
get("/:firstName/:middleName/:lastName") {
val firstName = {params("firstName")}
val middleName = {params("middleName")}
val lastName = {params("lastName")}
val json = ("firstName" -> firstName) ~ ("lastName" -> lastName) ~ ("middleName" -> middleName)
compact(render(json))
}
get("/409") {
contentType = "text/plain"
response.setStatus(409)
"ERROR"
}
get("/user-favorite-xml") {
contentType = "application/xml"
<user user-id="24985">
<date-created>2008-11-17T08:00:00Z</date-created>
<date-modified>2012-09-27T02:29:43.883Z</date-modified>
<userFavorite application-id="1" favorite-id="28" userData="someData" sequence-number="1">
<date-created>2011-01-20T19:59:47.887Z</date-created>
<date-modified>2012-09-25T23:52:21.167Z</date-modified>
</userFavorite>
</user>
}
get("/package-db-xml") {
contentType = "application/xml"
<package-database xmlns="http://marklogic.com/manage/package/databases">
<metadata xmlns:db="http://marklogic.com/manage/package/databases">
<package-version>2.0</package-version>
</metadata>
</package-database>
}
get("/namespace-example") {
contentType = "application/xml"
<foo xmlns:ns="http://localhost/">
<bar>sudo </bar>
<ns:bar>make me a sandwich!</ns:bar>
</foo>
}
get("/namespace-example2") {
contentType = "application/xml"
<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/">
<soapenv:Body>
<ns1:getBankResponse xmlns:ns1="http://thomas-bayer.com/blz/">
<ns1:details>
<ns1:bezeichnung>ABK-Kreditbank</ns1:bezeichnung>
<ns1:bic>ABKBDEB1XXX</ns1:bic>
<ns1:ort>Berlin</ns1:ort>
<ns1:plz>10789</ns1:plz>
</ns1:details>
</ns1:getBankResponse>
</soapenv:Body>
</soapenv:Envelope>
}
get("/amount") {
"""{
"amount": 250.00
}"""
}
get("/game") {
"""{
"playerOneId": "a084a81a-6bc9-418d-b107-5cb5ce249b77",
"playerTwoId": "88867e23-0b38-4c43-ad8e-161ba5062c7d",
"status": "ongoing",
"rounds": [
],
"_links": {
"self": {
"href": "http://localhost:8080/2dd68f2d-37df-4eed-9fce-5d9ce23a6745"
},
"make-move": {
"href": "http://localhost:8080/2dd68f2d-37df-4eed-9fce-5d9ce23a6745/make-move"
}
},
"id": "2dd68f2d-37df-4eed-9fce-5d9ce23a6745"
}"""
}
put("/greetPut") {
// For some reason Scalatra doesn't seem to handle form parameters in PUT requests
if(request.getParameterNames.exists { _ == "firstName" }) {
greetJson
} else {
val content: String = IOUtils.toString(request.getInputStream)
val name = "Greetings " + {
findParamIn(content, "firstName")
} + " " + {
findParamIn(content, "lastName")
}
val json = ("greeting" -> name)
compact(render(json))
}
}
patch("/greetPatch") {
if(request.getParameterNames.exists { _ == "firstName" }) {
greetJson
} else {
val content: String = IOUtils.toString(request.getInputStream)
val name = "Greetings " + {
findParamIn(content, "firstName")
} + " " + {
findParamIn(content, "lastName")
}
val json = ("greeting" -> name)
compact(render(json))
}
}
delete("/greet") {
greetJson
}
get("/greet") {
greetJson
}
get("/xmlWithMinusInRoot") {
contentType = "application/xml"
<a-greeting><firstName>{params("firstName")}</firstName>
<lastName>{params("lastName")}</lastName>
</a-greeting>
}
get("/xmlWithMinusInChild") {
contentType = "application/xml"
<greeting><your-firstName>{params("firstName")}</your-firstName>
<your-lastName>{params("lastName")}</your-lastName>
</greeting>
}
get("/xmlWithUnderscoreInChild") {
contentType = "application/xml"
<greeting><your_firstName>{params("firstName")}</your_firstName>
<your_lastName>{params("lastName")}</your_lastName>
</greeting>
}
get("/customMimeType") {
contentType = "application/something-custom"
<body>
<message>Custom mime-type</message>
</body>
}
get("/mimeTypeWithPlusXml") {
contentType = "application/something+xml"
<body><message>Custom mime-type ending with +xml</message></body>
}
get("/mimeTypeWithPlusJson") {
contentType = "application/something+json"
"""{ "message" : "It works" }"""
}
get("/mimeTypeWithPlusHtml") {
contentType ="application/something+html"
<html>
<head>
<title>my title</title>
</head>
<body>
<p>paragraph 1</p>
<p>paragraph 2</p>
</body>
</html>
}
get("/noValueParam") {
"Params: "+params.foldLeft(new StringBuilder)( (b,t) => b.append(t._1+"="+t._2)).toString()
}
put("/noValueParam") {
val content: String = IOUtils.toString(request.getInputStream)
if(content.contains("=")) {
throw new IllegalArgumentException("One of the parameters had a value")
}
"OK"
}
patch("/noValueParam") {
val content: String = IOUtils.toString(request.getInputStream)
if(content.contains("=")) {
throw new IllegalArgumentException("One of the parameters had a value")
}
"OK"
}
post("/noValueParam") {
"Params: "+params.foldLeft(new StringBuilder)( (b,t) => b.append(t._1+"="+t._2)).toString()
}
post("/redirect") {
response.setHeader("Location", "http://localhost:8080/redirect/1")
response.setStatus(301)
"""{ "id" : 1 } """
}
get("/redirect") {
val url: String = {params("url")}
redirect(url)
}
get("/redirect-and-set-cookie") {
val url: String = {params("url")}
val cookie: Cookie = new Cookie("cookieName", "cookieValue")
response.addCookie(cookie)
redirect(url)
}
get("/customMimeTypeJsonCompatible") {
contentType = "application/vnd.uoml+json"
"""{ "message" : "It works" }"""
}
get("/customMimeTypeJsonCompatible2") {
contentType = "application/vnd.uoml+something"
"""{ "message" : "It works" }"""
}
get("/noContentTypeJsonCompatible") {
contentType = ""
"""{ "message" : "It works" }"""
}
get("/customMimeTypeNonJsonCompatible") {
contentType = "application/something+json"
"This is not JSON"
}
get("/contentTypeJsonButBodyIsNotJson") {
contentType = "application/json"
"This is not JSON"
}
get("/cookie_with_no_value") {
contentType = "text/plain"
val cookies = request.getCookies
val name: String = cookies(0).getName
name
}
get("/html_with_cookie") {
contentType = "text/html"
response.addHeader("Set-Cookie", "JSESSIONID=B3134D534F40968A3805968207273EF5; Path=/")
"""<html xmlns="http://www.w3.org/1999/xhtml" lang="en">
<body>body</body>
</html>"""
}
get("/response_cookie_with_no_value") {
val cookie: Cookie = new Cookie("PLAY_FLASH", "")
val time: Long = new Date().getTime
cookie.setMaxAge(time.intValue());
response.addCookie(cookie)
}
get("/key_only_cookie") {
contentType = "text/plain"
response.setHeader("Set-Cookie", "some_cookie")
"OK"
}
get("/multiCookie") {
contentType = "text/plain"
val cookie1: Cookie = new Cookie("cookie1", "cookieValue1")
cookie1.setDomain("localhost")
val cookie2 = new Cookie("cookie1", "cookieValue2")
cookie2.setPath("/")
cookie2.setMaxAge(1234567)
cookie2.setComment("My Purpose")
cookie2.setDomain("localhost")
cookie2.setSecure(true)
cookie2.setVersion(1)
response.addCookie(cookie1)
response.addCookie(cookie2)
"OK"
}
get("/multiCookieRequest") {
val cookies = request.getCookies
.map(cookie => Map(cookie.getName -> cookie.getValue))
.foldLeft(mutable.ListBuffer[Map[String, String]]())((list, cookie) => {
list.add(cookie); list
})
compact(render(cookies))
}
post("/j_spring_security_check") {
contentType = "text/plain"
securityCheck("jsessionid", () => true)
}
post("/j_spring_security_check_with_csrf") {
contentType = "text/plain"
securityCheck("jsessionid", () => params.get("_csrf").get == "8adf2ea1-b246-40aa-8e13-a85fb7914341")
}
post("/j_spring_security_check_with_csrf_header") {
contentType = "text/plain"
securityCheck("jsessionid", () => request.getHeader("_csrf") == "8adf2ea1-b246-40aa-8e13-a85fb7914341")
}
post("/j_spring_security_check_phpsessionid") {
contentType = "text/plain"
securityCheck("phpsessionid", () => true)
}
def securityCheck(sessionIdName: String, additionalChecks: () => Boolean) : Any = {
val userName = params.get("j_username").get
val password = params.get("j_password").get
if (userName == "John" && password == "Doe") {
if (!additionalChecks.apply()) {
"NO"
} else {
response.setHeader("Set-Cookie", sessionIdName + "=1234")
}
} else {
"NO"
}
}
get("/formAuth") {
formAuth(() => loginPage)
}
get("/formAuthCsrf") {
formAuth(() => loginPageWithCsrf)
}
get("/formAuthCsrfInHeader") {
formAuth(() => loginPageWithCsrfHeader)
}
get("/jsonWithAtSign") {
"""{
"body" : { "@id" : 10, "content": "some content" }
}"""
}
get("/malformedJson") {
"""{
"a": 123456
"b":"string"
}"""
}
post("/greet") {
greetJson
}
post("/body") {
getStringBody
}
put("/body") {
getStringBody
}
patch("/body") {
getStringBody
}
delete("/body") {
getStringBody
}
put("/binaryBody") {
getBinaryBodyResponse
}
patch("/binaryBody") {
getBinaryBodyResponse
}
post("/binaryBody") {
getBinaryBodyResponse
}
post("/jsonBody") {
contentType = "text/plain";
val header: String = request.getHeader("Content-Type")
if (!header.contains("application/json")) {
"FAILURE"
} else {
val json = JsonParser.parse(request.body)
(json \\ "message").extract[String]
}
}
post("/jsonBodyAcceptHeader") {
val accept: String = request.getHeader("Accept")
if (!accept.contains("application/json")) {
"FAILURE"
} else {
val json = JsonParser.parse(request.body)
(json \\ "message").extract[String]
}
}
get("/setCookies") {
setCookies
}
get("/setCommonIdCookies") {
setCommonIdCookies
}
post("/header") {
getHeaders
}
get("/header") {
getHeaders
}
get("/matrix") {
val matrixParams = StringUtils.substringAfter(URLDecoder.decode(request.getRequestURI, "UTF-8"), ";")
val nameValueMap = StringUtils.split(matrixParams, "&")
.map(nameValue => {
val nameAndValue = StringUtils.split(nameValue, "=")
(nameAndValue(0), nameAndValue(1))})
.foldLeft(mutable.HashMap[String, String]())((map, nameAndValue) => {
map.put(nameAndValue._1, nameAndValue._2)
map
}).toMap // Convert map to an immutable map so that JSON gets rendered correctly, see http://stackoverflow.com/questions/6271386/how-do-you-serialize-a-map-to-json-in-scala
compact(render(decompose(nameValueMap)))
}
get("/headersWithValues") {
headersWithValues
}
post("/headersWithValues") {
headersWithValues
}
get("/multiHeaderReflect") {
contentType = "text/plain"
val headerNames = request.getHeaderNames()
while (headerNames.hasMoreElements()) {
val name = headerNames.nextElement.toString
val headerValues = request.getHeaders(name)
while (headerValues.hasMoreElements) {
val headerValue: String = headerValues.nextElement().toString
response.addHeader(name, headerValue)
}
}
}
get("/multiValueHeader") {
contentType = "text/plain"
response.addHeader("MultiHeader", "Value 1")
response.addHeader("MultiHeader", "Value 2")
""
}
post("/cookie") {
getCookies
}
get("/cookie") {
getCookies
}
put("/cookie") {
getCookies
}
delete("/cookie") {
getCookies
}
patch("/cookie") {
getCookies
}
get("/jsonList") {
"""[
{ "name" : "Anders",
"address" : "Spangatan"
},
{ "name" : "Sven",
"address" : "Skolgatan"
}
]"""
}
get("/emptyBody") {
}
get("/textXML") {
contentType = "text/xml"
<xml>something</xml>
}
get("/textHTML") {
contentType = "text/html"
<html>
<head>
<title>my title</title>
</head>
<body>
<p>paragraph 1</p>
<p>paragraph 2</p>
</body>
</html>
}
get("/textHTML-not-formatted") {
contentType = "text/html"
<html><head><title>my title</title></head><body><p>paragraph 1</p><p>paragraph 2</p></body></html>
}
get("/statusCode500") {
contentType = "text/plain"
response.setStatus(500)
"An expected error occurred"
}
get("/rss") {
contentType = "application/rss+xml"
<rss>
<item>
<title>rss title</title>
</item>
</rss>
}
get("/jsonp") {
contentType = "application/javascript"
params("callback") + "(" + greetJson + ");"
}
get("/statusCode409WithNoBody") {
contentType = "application/json"
response.setStatus(409)
}
get("/sessionId") {
def setSessionId {
response.setHeader("Set-Cookie", "jsessionid=1234")
}
val cookies: Array[Cookie] = request.getCookies()
if (cookies == null) {
setSessionId
} else {
val cookie = cookies.find(_.getName.equalsIgnoreCase("jsessionid"))
if (cookie == None) {
setSessionId
} else if (cookie.get.getValue == "1234") {
"Success"
} else {
response.sendError(409, "Invalid sessionid")
}
}
}
get("/bigRss") {
contentType = "application/rss+xml"
<rss xmlns:dc="http://purl.org/dc/elements/1.1/" version="2.0">
<channel>
<title>something</title>
<link>http://www.someone.com</link>
<description>something RSS</description>
<dc:creator>someone</dc:creator>
<item>
<title>A title</title>
<link>http://www.something.com/link/1</link>
<description>Description 1</description>
<enclosure url="http://www.someone.com/somejpg.jpg" length="2721" type="image/jpg" />
<pubDate>Mon, 10 Jan 2011 19:31:46 GMT</pubDate>
<guid isPermaLink="false">http://www.something.com/link/1</guid>
<dc:date>2011-01-10T19:31:46Z</dc:date>
</item>
<item>
<title>Title 2</title>
<link>http://www.something.com/link/2</link>
<description>Description 2</description>
<enclosure url="http://www.someone.com/someotherjpg.jpg" length="2721" type="image/jpg" />
<pubDate>Mon, 10 Jan 2011 19:41:46 GMT</pubDate>
<guid isPermaLink="false">http://www.something.com/link/2</guid>
<dc:date>2011-01-10T19:42:46Z</dc:date>
</item>
</channel>
</rss>
}
get("/carRecords") {
contentType = "application/xml"
<records>
<car name='HSV Maloo' make='Holden' year='2006'>
<country>Australia</country>
<record type='speed'>Production Pickup Truck with speed of 271kph</record>
</car>
<car name='P50' make='Peel' year='1962'>
<country>Isle of Man</country>
<record type='size'>Smallest Street-Legal Car at 99cm wide and 59 kg in weight</record>
</car>
<car name='Royale' make='Bugatti' year='1931'>
<country>France</country>
<record type='price'>Most Valuable Car at $15 million</record>
</car>
</records>
}
post("/validateContentTypeIsDefinedAndReturnBody") {
if (request.getContentType == null) {
response.setStatus(304)
}
contentType = request.getContentType
request.body
}
post("/file") {
val content: String = IOUtils.toString(request.getInputStream)
content
}
put("/file") {
val content: String = IOUtils.toString(request.getInputStream)
content
}
def getBinaryBodyResponse: String = {
contentType = "text/plain";
Stream.continually(request.getInputStream().read).takeWhile(_ != -1).map(_.toByte).toList.mkString(", ")
}
def getHeaders: String = {
contentType = "text/plain"
val headerNames = request.getHeaderNames()
val names = ListBuffer[String]()
while (headerNames.hasMoreElements()) {
val name = headerNames.nextElement.toString
names.append(name)
}
names.mkString(", ")
}
def getStringBody: String = {
contentType = "text/plain";
request.body
}
def getCookies: String = {
contentType = "text/plain"
request.getCookies().map(_.getName).mkString(", ")
}
def setCookies: String = {
contentType = "text/plain"
response.addCookie(new Cookie("key1", "value1"))
response.addCookie(new Cookie("key2", "value2"))
response.addCookie(new Cookie("key3", "value3"))
"ok"
}
def setCommonIdCookies: String = {
contentType = "text/plain"
response.addCookie(new Cookie("key1", "value1"))
response.addCookie(new Cookie("key1", "value2"))
response.addCookie(new Cookie("key1", "value3"))
"ok"
}
notFound {
response.setStatus(404)
"Not found"
}
def greetJson: String = {
val name = "Greetings " + {
params("firstName")
} + " " + {
params("lastName")
}
val json = ("greeting" -> name)
compact(render(json))
}
def loginPage: String = {
contentType = "text/html"
"""<html>
<head>
<title>Login</title>
</head>
<body>
<form action="j_spring_security_check" method="POST">
<table>
<tr><td>User: </td><td><input type='text' name='j_username'></td></tr>
<tr><td>Password:</td><td><input type='password' name='j_password'></td></tr>
<tr><td colspan='2'><input name="submit" type="submit"/></td></tr>
</table>
</form>
</body>
</html>"""
}
def loginPageWithCsrf: String = {
contentType = "text/html"
"""<html>
<head>
<title>Login</title>
</head>
<body>
<form action="j_spring_security_check_with_csrf" method="POST">
<table>
<tr><td>User: </td><td><input type='text' name='j_username'></td></tr>
<tr><td>Password:</td><td><input type='password' name='j_password'></td></tr>
<tr><td colspan='2'><input name="submit" type="submit"/></td></tr>
</table>
<input type="hidden" name="_csrf" value="8adf2ea1-b246-40aa-8e13-a85fb7914341"/>
</form>
</body>
</html>"""
}
def loginPageWithCsrfHeader: String = {
contentType = "text/html"
"""<html>
<head>
<title>Login</title>
</head>
<body>
<form action="j_spring_security_check_with_csrf_header" method="POST">
<table>
<tr><td>User: </td><td><input type='text' name='j_username'></td></tr>
<tr><td>Password:</td><td><input type='password' name='j_password'></td></tr>
<tr><td colspan='2'><input name="submit" type="submit"/></td></tr>
</table>
<input type="hidden" name="_csrf" value="8adf2ea1-b246-40aa-8e13-a85fb7914341"/>
</form>
</body>
</html>"""
}
def greetXML: Elem = {
contentType = "application/xml"
<greeting><firstName>{params("firstName")}</firstName>
<lastName>{params("lastName")}</lastName>
</greeting>
}
def anotherGreetXML: Elem = {
contentType = "application/xml"
<greeting>
<name>
<firstName>{params("firstName")}</firstName>
<lastName>{params("lastName")}</lastName>
</name>
</greeting>
}
get("/demoRequestSpecification") {
val category = params("category")
val userName = request.getCookies.filter(_.getName == "user")(0).getValue
if (category == "books" && userName == "admin") {
"Catch 22"
} else {
"Unknown entity"
}
}
get("/demoResponseSpecification") {
contentType = "application/json"
val fullName = params("name")
val firstAndLastName = fullName.split(" ")
val firstName = firstAndLastName(0)
val lastName = firstAndLastName(1)
"{ \\"firstName\\" : \\""+firstName+"\\",\\"lastName\\" : \\""+lastName+"\\", \\"responseType\\" : \\"simple\\" }"
}
get("/contentTypeJsonButContentIsNotJson") {
contentType = "application/json"
"This is not a valid JSON document"
}
get("/contentTypeHtmlButContentIsJson") {
contentType = "text/html"
"{ \\"key\\" : \\"value\\", \\"42\\" : \\"23\\"}"
}
get("/xmlWithBom") {
contentType = "application/xml"
IOUtils.toByteArray(getClass.getResourceAsStream("/bom_example.xml"))
}
get("/xmlWithHeaderAndFooter") {
contentType = "application/xml"
"""733
<?xml version="1.0" encoding="utf-8"?><soapenv:Envelope
xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/"
xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<some>thing</some>
</soapenv:Envelope>
0"""
}
head("/returnContentTypeAsBody") {
contentType = "text/plain"
request.getContentType
}
options("/returnBodyAsBody") {
contentType = "text/plain"
request.body
}
post("/returnContentTypeAsBody") {
contentType = "text/plain"
request.getContentType
}
get("/returnContentTypeAsBody") {
contentType = "text/plain"
request.getContentType
}
post("/return204WithContentType") {
contentType = "application/json"
status = 204
}
def formAuth(loginPage: () => String) = {
contentType = "text/plain"
val cookies: Array[Cookie] = request.getCookies
if(cookies == null) {
loginPage.apply()
} else {
val cookie = cookies.find(sessionName => sessionName.getName.equalsIgnoreCase("jsessionid") || sessionName.getName.equalsIgnoreCase("phpsessionid")).get
if(cookie == null) {
loginPageWithCsrf
} else if (cookie.getValue == "1234") {
"OK"
} else {
"NOT AUTHORIZED"
}
}
}
def reflect: String = {
contentType = request.getContentType
val cookies = request.getCookies
if (cookies != null) {
cookies.foreach {
response.addCookie(_)
}
}
request.body
}
def findParamIn(content: String, param: String): String = {
var value: String = StringUtils.substringBetween(content, param + "=", "&")
if (value == null) {
value = StringUtils.substringAfter(content, param + "=")
}
return value
}
def findMultiParamIn(content: String, param: String): scala.collection.mutable.MutableList[String] = {
val scanner: Scanner = new Scanner(content).useDelimiter("&")
val myList = scala.collection.mutable.MutableList[String]()
while (scanner.hasNext) {
val next: String = scanner.next
myList += next.split('=')(1)
}
myList
}
def headersWithValues: String = {
contentType = "application/json"
val headerNames = request.getHeaderNames.map(_.toString)
val map: Map[String, List[String]] = headerNames.map(headerName => (headerName, request.getHeaders(headerName).map(_.toString).toList)).
foldLeft(mutable.HashMap[String, List[String]]())((map, header) => {
map.put(header._1, header._2.toList)
map
}).toMap // Convert map to an immutable map so that JSON gets rendered correctly, see http://stackoverflow.com/questions/6271386/how-do-you-serialize-a-map-to-json-in-scala
compact(render(decompose(map)))
}
}
|
BenSeverson/rest-assured
|
examples/scalatra-example/src/main/scala/io/restassured/scalatra/ScalatraRestExample.scala
|
Scala
|
apache-2.0
| 33,981
|
/*
* Copyright 2014 Frugal Mechanic (http://frugalmechanic.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fm.xml
import fm.common.{ClassUtil, Logging, Resource}
import java.io.{Closeable, InputStream, OutputStream, Reader, StringReader, StringWriter, Writer}
import javax.xml.bind.{JAXBContext, Marshaller, Unmarshaller}
import javax.xml.bind.annotation.XmlRootElement
import javax.xml.stream.{XMLInputFactory, XMLOutputFactory, XMLStreamReader, XMLStreamWriter}
import scala.reflect.ClassTag
object JAXBMarshaller {
private val inputFactory: XMLInputFactory = XMLInputFactory.newInstance()
private val outputFactory: XMLOutputFactory = XMLOutputFactory.newInstance()
// XMLStreamWriter doesn't implement AutoCloseable or Closeable so we need a rich wrapper for Resource.using to work
private final implicit class XMLStreamWriterCloseable(writer: XMLStreamWriter) extends Closeable { def close(): Unit = writer.close() }
// XMLStreamReader doesn't implement AutoCloseable or Closeable so we need a rich wrapper for Resource.using to work
private final implicit class XMLStreamReaderCloseable(reader: XMLStreamReader) extends Closeable { def close(): Unit = reader.close() }
}
/**
* A wraper around the JAXB Marshaller/Unmarshaller. This class is thread-safe.
*/
final class JAXBMarshaller[T: ClassTag](
packageName: String,
rootElement: String,
fragment: Boolean = true,
format: Boolean = true,
indent: String = " "
) extends Logging {
import JAXBMarshaller._
// The JAXBContext is thread-safe
private[this] val context: JAXBContext = {
val classes: Set[Class[_]] = ClassUtil.findAnnotatedClasses(packageName, classOf[XmlRootElement])
try {
JAXBContext.newInstance(classes.toArray: _*)
} catch {
case ex: Exception =>
logger.error(s"Caught exception trying to create JAXBContext for Jaxb2Marshaller($packageName) with classes: ${classes.map{_.getName}.mkString(", ")}", ex)
throw ex
}
}
// Marshaller is not thread-safe
private[this] val marshaller: ThreadLocal[Marshaller] = new ThreadLocal[Marshaller] {
override protected def initialValue: Marshaller = {
val m: Marshaller = context.createMarshaller()
m.setProperty(Marshaller.JAXB_FRAGMENT, fragment)
m.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, format)
m.setProperty(Marshaller.JAXB_ENCODING, "UTF-8")
try {
// This probably won't work on non stock JAXB implementations
m.setProperty("com.sun.xml.internal.bind.indentString", indent)
} catch {
case _: javax.xml.bind.PropertyException => // ignore
}
m
}
}
// Unmarshaller is not thread-safe
private[this] val unmarshaller: ThreadLocal[Unmarshaller] = new ThreadLocal[Unmarshaller] {
override protected def initialValue: Unmarshaller = context.createUnmarshaller()
}
def toXML(obj: T): String = {
val writer: StringWriter = new StringWriter()
Resource.using(outputFactory.createXMLStreamWriter(writer)) { writeXML(obj, _) }
writer.toString()
}
def toXML(obj: T, comments: XMLCommentProvider): String = {
val writer: StringWriter = new StringWriter()
Resource.using(outputFactory.createXMLStreamWriter(writer)) { writer: XMLStreamWriter =>
val wrapped: CommentingXMLStreamWriter = new CommentingXMLStreamWriter(new IndentingXMLStreamWriter(writer), comments)
writeXML(obj, wrapped)
}
writer.toString()
}
def writeXML(obj: T, os: OutputStream): Unit = writeXML(obj, os, "UTF-8")
def writeXML(obj: T, os: OutputStream, encoding: String): Unit = Resource.using(outputFactory.createXMLStreamWriter(os, encoding)){ writeXML(obj, _) }
def writeXML(obj: T, w: Writer): Unit = Resource.using(outputFactory.createXMLStreamWriter(w)){ writeXML(obj, _) }
def writeXML(obj: T, writer: XMLStreamWriter): Unit = {
val wrapped: XMLStreamWriter = if (format) IndentingXMLStreamWriter(writer, indent = indent) else writer
marshaller.get().marshal(obj, wrapped)
}
def writeXML(obj: T, writer: IndentingXMLStreamWriter): Unit = marshaller.get().marshal(obj, writer)
def writeXML(obj: T, writer: CommentingXMLStreamWriter): Unit = marshaller.get().marshal(obj, writer)
def fromXML(xml: String): T = Resource.using(inputFactory.createXMLStreamReader(new StringReader(xml))){ readXML }
def readXML(is: InputStream): T = Resource.using(inputFactory.createXMLStreamReader(is)){ readXML }
def readXML(is: InputStream, encoding: String): T = Resource.using(inputFactory.createXMLStreamReader(is, encoding)){ readXML }
def readXML(reader: Reader): T = Resource.using(inputFactory.createXMLStreamReader(reader)){ readXML }
def readXML(reader: XMLStreamReader): T = unmarshaller.get().unmarshal(reader).asInstanceOf[T]
}
|
frugalmechanic/fm-xml
|
src/main/scala/fm/xml/JAXBMarshaller.scala
|
Scala
|
apache-2.0
| 5,343
|
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.it.action
import akka.actor.ActorSystem
import akka.stream.{ ActorMaterializer, Materializer }
import play.api._
import play.api.data._
import play.api.data.Forms._
import play.api.data.format.Formats._
import play.api.libs.Files.TemporaryFile
import play.api.mvc.MultipartFormData
import play.api.mvc.Results._
import play.api.test.{ FakeRequest, PlaySpecification, WithApplication, WsTestClient }
import play.api.routing.Router
class FormActionSpec extends PlaySpecification with WsTestClient {
case class User(
name: String,
email: String,
age: Int
)
val userForm = Form(
mapping(
"name" -> of[String],
"email" -> of[String],
"age" -> of[Int]
)(User.apply)(User.unapply)
)
def application: Application = {
val context = ApplicationLoader.createContext(Environment.simple())
new BuiltInComponentsFromContext(context) with NoHttpFiltersComponents {
import play.api.routing.sird.{ POST => SirdPost, _ }
override lazy val actorSystem: ActorSystem = ActorSystem("form-action-spec")
override implicit lazy val materializer: Materializer = ActorMaterializer()(this.actorSystem)
override def router: Router = Router.from {
case SirdPost(p"/multipart") => defaultActionBuilder(playBodyParsers.multipartFormData) { implicit request =>
val user = userForm.bindFromRequest().get
Ok(s"${user.name} - ${user.email}")
}
case SirdPost(p"/multipart/max-length") => defaultActionBuilder(playBodyParsers.multipartFormData(1024)) { implicit request =>
val user = userForm.bindFromRequest().get
Ok(s"${user.name} - ${user.email}")
}
case SirdPost(p"/multipart/wrapped-max-length") => defaultActionBuilder(playBodyParsers.maxLength(1024, playBodyParsers.multipartFormData)(this.materializer)) { implicit request =>
val user = userForm.bindFromRequest().get
Ok(s"${user.name} - ${user.email}")
}
}
}.application
}
"Form Actions" should {
"When POSTing" in {
val multipartBody = MultipartFormData[TemporaryFile](
dataParts = Map(
"name" -> Seq("Player"),
"email" -> Seq("play@email.com"),
"age" -> Seq("10")
),
files = Seq.empty,
badParts = Seq.empty
)
"bind all parameters for multipart request" in new WithApplication(application) {
val request = FakeRequest(POST, "/multipart").withMultipartFormDataBody(multipartBody)
contentAsString(route(app, request).get) must beEqualTo("Player - play@email.com")
}
"bind all parameters for multipart request with max length" in new WithApplication(application) {
val request = FakeRequest(POST, "/multipart/max-length").withMultipartFormDataBody(multipartBody)
contentAsString(route(app, request).get) must beEqualTo("Player - play@email.com")
}
"bind all parameters for multipart request to temporary file" in new WithApplication(application) {
val request = FakeRequest(POST, "/multipart/wrapped-max-length").withMultipartFormDataBody(multipartBody)
contentAsString(route(app, request).get) must beEqualTo("Player - play@email.com")
}
}
}
}
|
Shruti9520/playframework
|
framework/src/play-integration-test/src/test/scala/play/it/action/FormActionSpec.scala
|
Scala
|
apache-2.0
| 3,338
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.nodes.physical.batch
import org.apache.flink.table.functions.UserDefinedFunction
import org.apache.flink.table.planner.calcite.FlinkRelBuilder.PlannerNamedWindowProperty
import org.apache.flink.table.planner.plan.nodes.exec.ExecEdge
import org.apache.flink.table.planner.plan.logical.LogicalWindow
import org.apache.calcite.plan.{RelOptCluster, RelTraitSet}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rel.core.AggregateCall
import org.apache.calcite.tools.RelBuilder
import java.util
import scala.collection.JavaConversions._
class BatchExecSortWindowAggregate(
cluster: RelOptCluster,
relBuilder: RelBuilder,
traitSet: RelTraitSet,
inputRel: RelNode,
outputRowType: RelDataType,
inputRowType: RelDataType,
aggInputRowType: RelDataType,
grouping: Array[Int],
auxGrouping: Array[Int],
aggCallToAggFunction: Seq[(AggregateCall, UserDefinedFunction)],
window: LogicalWindow,
inputTimeFieldIndex: Int,
inputTimeIsDate: Boolean,
namedProperties: Seq[PlannerNamedWindowProperty],
enableAssignPane: Boolean = false,
isMerge: Boolean)
extends BatchExecSortWindowAggregateBase(
cluster,
relBuilder,
traitSet,
inputRel,
outputRowType,
inputRowType,
aggInputRowType,
grouping,
auxGrouping,
aggCallToAggFunction,
window,
inputTimeFieldIndex,
inputTimeIsDate,
namedProperties,
enableAssignPane,
isMerge,
isFinal = true) {
override def copy(traitSet: RelTraitSet, inputs: java.util.List[RelNode]): RelNode = {
new BatchExecSortWindowAggregate(
cluster,
relBuilder,
traitSet,
inputs.get(0),
getRowType,
inputRowType,
aggInputRowType,
grouping,
auxGrouping,
aggCallToAggFunction,
window,
inputTimeFieldIndex,
inputTimeIsDate,
namedProperties,
enableAssignPane,
isMerge)
}
//~ ExecNode methods -----------------------------------------------------------
override def getInputEdges: util.List[ExecEdge] = List(ExecEdge.DEFAULT)
}
|
greghogan/flink
|
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/nodes/physical/batch/BatchExecSortWindowAggregate.scala
|
Scala
|
apache-2.0
| 2,983
|
package almond.protocol
import java.nio.charset.StandardCharsets
import java.{util => ju}
import scala.util.hashing.MurmurHash3
import scala.util.Try
// adapted from https://github.com/plokhotnyuk/jsoniter-scala/blob/209d918a030b188f064ee55505a6c47257731b4b/jsoniter-scala-macros/src/test/scala/com/github/plokhotnyuk/jsoniter_scala/macros/JsonCodecMakerSpec.scala#L645-L666
final case class RawJson(value: Array[Byte]) {
override lazy val hashCode: Int = MurmurHash3.arrayHash(value)
override def equals(obj: Any): Boolean = obj match {
case that: RawJson => ju.Arrays.equals(value, that.value)
case _ => false
}
override def toString: String =
Try(new String(value, StandardCharsets.UTF_8))
.toOption
.getOrElse(value.toString)
}
object RawJson {
import com.github.plokhotnyuk.jsoniter_scala.core._
implicit val codec: JsonValueCodec[RawJson] = new JsonValueCodec[RawJson] {
def decodeValue(in: JsonReader, default: RawJson): RawJson =
new RawJson(in.readRawValAsBytes())
def encodeValue(x: RawJson, out: JsonWriter): Unit =
out.writeRawVal(x.value)
val nullValue: RawJson =
new RawJson(new Array[Byte](0))
}
val emptyObj: RawJson =
RawJson("{}".getBytes(StandardCharsets.UTF_8))
}
|
alexarchambault/jupyter-scala
|
modules/shared/protocol/src/main/scala/almond/protocol/RawJson.scala
|
Scala
|
apache-2.0
| 1,264
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.example.udfpredictor
import com.intel.analytics.bigdl.example.utils.WordMeta
import com.intel.analytics.bigdl.utils.{Engine, LoggerFilter}
import org.apache.spark.SparkContext
import org.apache.spark.sql.functions._
import org.apache.log4j.{Level, Logger}
import org.apache.spark.sql.SQLContext
object DataframePredictor {
LoggerFilter.redirectSparkInfoLogs()
Logger.getLogger("com.intel.analytics.bigdl.example").setLevel(Level.INFO)
def main(args: Array[String]): Unit = {
Utils.localParser.parse(args, TextClassificationUDFParams()).foreach { param =>
val conf = Engine.createSparkConf()
conf.setAppName("Text classification")
.set("spark.task.maxFailures", "1")
val sc = new SparkContext(conf)
Engine.init
// Create spark session
val spark = new SQLContext(sc)
import spark.implicits._
var word2Meta = None: Option[Map[String, WordMeta]]
var word2Index = None: Option[Map[String, Int]]
var word2Vec = None: Option[Map[Float, Array[Float]]]
val result = Utils.getModel(sc, param)
val model = result._1
word2Meta = result._2
word2Vec = result._3
val sampleShape = result._4
// if not train, load word meta from file
if (word2Meta.isEmpty) {
val word2IndexMap = sc.textFile(s"${param.baseDir}/word2Meta.txt").map(item => {
val tuple = item.stripPrefix("(").stripSuffix(")").split(",")
(tuple(0), tuple(1).toInt)
}).collect()
word2Index = Some(word2IndexMap.toMap)
} else {
// already trained, use existing word meta
val word2IndexMap = collection.mutable.HashMap.empty[String, Int]
for((word, wordMeta) <- word2Meta.get) {
word2IndexMap += (word -> wordMeta.index)
}
word2Index = Some(word2IndexMap.toMap)
}
// if not train, create word vec
if (word2Vec.isEmpty) {
word2Vec = Some(Utils.getWord2Vec(word2Index.get))
}
val predict = Utils.genUdf(sc, model, sampleShape, word2Index.get, word2Vec.get)
// register udf for data frame
val classifierUDF = udf(predict)
val data = Utils.loadTestData(param.testDir)
val df = spark.createDataFrame(data)
// static dataframe
val types = sc.textFile(Utils.getResourcePath("/example/udfpredictor/types"))
.filter(!_.contains("textType"))
.map { line =>
val words = line.split(",")
(words(0).trim, words(1).trim.toInt)
}.toDF("textType", "textLabel")
val classifyDF1 = df.withColumn("textLabel", classifierUDF($"text"))
.select("filename", "text", "textLabel")
classifyDF1.show()
val filteredDF1 = df.filter(classifierUDF($"text") === 9)
filteredDF1.show()
val df_join = classifyDF1.join(types, "textLabel")
df_join.show()
// aggregation
val typeCount = classifyDF1.groupBy($"textLabel").count()
typeCount.show()
// play with udf in sqlcontext
spark.udf.register("textClassifier", predict)
df.registerTempTable("textTable")
val classifyDF2 = spark
.sql("SELECT filename, textClassifier(text) AS textType_sql, text " +
"FROM textTable")
classifyDF2.show()
val filteredDF2 = spark
.sql("SELECT filename, textClassifier(text) AS textType_sql, text " +
"FROM textTable WHERE textClassifier(text) = 9")
filteredDF2.show()
}
}
}
|
psyyz10/BigDL
|
spark/dl/src/main/scala/com/intel/analytics/bigdl/example/udfpredictor/DataframePredictor.scala
|
Scala
|
apache-2.0
| 4,106
|
package com.alexitc.coinalerts.services
import javax.inject.Inject
import com.alexitc.coinalerts.core.AuthorizationToken
import com.alexitc.coinalerts.data.async.UserFutureDataHandler
import com.alexitc.coinalerts.errors.IncorrectPasswordError
import com.alexitc.coinalerts.models._
import com.alexitc.coinalerts.services.validators.UserValidator
import com.alexitc.playsonify.core.FutureOr.Implicits.{FutureOps, OrOps}
import com.alexitc.playsonify.core.{ApplicationErrors, FutureApplicationResult}
import org.mindrot.jbcrypt.BCrypt
import org.scalactic._
import play.api.i18n.Lang
import scala.concurrent.{ExecutionContext, Future}
class UserService @Inject()(
emailMessagesProvider: EmailMessagesProvider,
emailService: EmailServiceTrait,
userDataHandler: UserFutureDataHandler,
userValidator: UserValidator,
jwtService: JWTService)(implicit ec: ExecutionContext) {
def create(createUserModel: CreateUserModel)(implicit lang: Lang): Future[User Or ApplicationErrors] = {
val result = for {
validatedModel <- userValidator
.validateCreateUserModel(createUserModel)
.toFutureOr
user <- userDataHandler
.create(validatedModel.email, UserHiddenPassword.fromPassword(validatedModel.password))
.toFutureOr
token <- userDataHandler.createVerificationToken(user.id).toFutureOr
// send verification token by email
_ <- emailService
.sendEmail(user.email, emailMessagesProvider.verifyEmailSubject, emailMessagesProvider.verifyEmailText(token))
.toFutureOr
} yield user
result.toFuture
}
def verifyEmail(token: UserVerificationToken): FutureApplicationResult[AuthorizationToken] = {
val result = for {
user <- userDataHandler.verifyEmail(token).toFutureOr
} yield jwtService.createToken(user)
result.toFuture
}
def loginByEmail(email: UserEmail, password: UserPassword): FutureApplicationResult[AuthorizationToken] = {
val result = for {
_ <- enforcePasswordMatches(email, password).toFutureOr
user <- userDataHandler.getVerifiedUserByEmail(email).toFutureOr
} yield jwtService.createToken(user)
result.toFuture
}
def enforcePasswordMatches(email: UserEmail, password: UserPassword): FutureApplicationResult[Unit] = {
val result = for {
existingPassword <- userDataHandler.getVerifiedUserPassword(email).toFutureOr
_ <- Good(BCrypt.checkpw(password.string, existingPassword.string)).filter { matches =>
if (matches) Pass
else Fail(One(IncorrectPasswordError))
}.toFutureOr
} yield ()
result.toFuture
}
def userById(userId: UserId): FutureApplicationResult[User] = {
userDataHandler.getVerifiedUserById(userId)
}
def getPreferences(userId: UserId): FutureApplicationResult[UserPreferences] = {
userDataHandler.getUserPreferences(userId)
}
def setPreferences(
userId: UserId,
preferencesModel: SetUserPreferencesModel): FutureApplicationResult[UserPreferences] = {
val result = for {
validatedPreferences <- userValidator.validateSetUserPreferencesModel(preferencesModel).toFutureOr
userPreferences <- userDataHandler.setUserPreferences(userId, validatedPreferences).toFutureOr
} yield userPreferences
result.toFuture
}
}
|
AlexITC/crypto-coin-alerts
|
alerts-server/app/com/alexitc/coinalerts/services/UserService.scala
|
Scala
|
gpl-3.0
| 3,301
|
/*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.drelephant.spark.data
import com.linkedin.drelephant.spark.fetchers.statusapiv1.{ApplicationInfo, ExecutorSummary, JobData, StageData}
case class SparkRestDerivedData(
applicationInfo: ApplicationInfo,
jobDatas: Seq[JobData],
stageDatas: Seq[StageData],
executorSummaries: Seq[ExecutorSummary]
)
|
bretlowery/dr-elephant-mapr
|
app/com/linkedin/drelephant/spark/data/SparkRestDerivedData.scala
|
Scala
|
apache-2.0
| 923
|
package ore.models.admin
import ore.db._
import ore.db.impl.ModelCompanionPartial
import ore.models.project.{Page, Project, Version}
import ore.models.user.{LoggedActionType, LoggedActionContext, User, UserOwned}
import com.github.tminglei.slickpg.InetString
import slick.lifted.TableQuery
case class LoggedProject(
id: Option[DbRef[Project]],
pluginId: Option[String],
slug: Option[String],
ownerName: Option[String]
)
case class LoggedProjectVersion(id: Option[DbRef[Version]], versionString: Option[String])
case class LoggedProjectPage(id: Option[DbRef[Page]], name: Option[String], slug: Option[String])
case class LoggedSubject(id: Option[DbRef[_]], username: Option[String])
case class LoggedActionViewModel[Ctx](
userId: DbRef[User],
userName: String,
address: InetString,
action: LoggedActionType[Ctx],
actionContext: LoggedActionContext[Ctx],
newState: String,
oldState: String,
project: LoggedProject,
version: LoggedProjectVersion,
page: LoggedProjectPage,
subject: LoggedSubject
)
|
SpongePowered/Ore
|
models/src/main/scala/ore/models/admin/LoggedActionView.scala
|
Scala
|
mit
| 1,058
|
val x: /*ref*/SourceObjectPackage = null
/*
import `object`.foo.SourceObjectPackage
val x: SourceObjectPackage = null
*/
|
whorbowicz/intellij-scala
|
testdata/autoImport/all/ObjectPackage.scala
|
Scala
|
apache-2.0
| 121
|
package com.gu.arts.music.net
import dispatch._
import dispatch.thread.ThreadSafeHttpClient
import org.apache.http.params.HttpParams
import org.apache.http.conn.params.ConnRouteParams
import org.apache.http.HttpHost
import org.slf4j.LoggerFactory
object ConfiguredHttp extends Http {
val logger = LoggerFactory getLogger getClass
override def make_client = new ThreadSafeHttpClient(new Http.CurrentCredentials(None), maxConnections = 50, maxConnectionsPerRoute = 50) {
override protected def configureProxy(params: HttpParams) = {
val httpProxyPort = System.getProperty("http.proxyPort", "")
val httpProxyHost = System.getProperty("http.proxyHost", "")
if (httpProxyHost != "" && httpProxyPort != ""){
logger.info("Using proxy settings for HTTP request %s:%s".format(httpProxyHost, httpProxyPort))
ConnRouteParams.setDefaultProxy(params, new HttpHost(httpProxyHost, httpProxyPort.toInt))
}
params
}
}
}
|
guardian/music-api-scala-client
|
api-base/src/main/scala/com/gu/arts/music/net/ConfiguredHttp.scala
|
Scala
|
apache-2.0
| 966
|
package knot.data
import java.nio.ByteBuffer
import java.nio.charset.{Charset, StandardCharsets}
import scala.collection.IndexedSeqOptimized
import scala.collection.immutable.IndexedSeq
object ByteNode {
def apply(text: String): ByteNode = new ByteNode(text.getBytes(StandardCharsets.UTF_8))
def wrap(bytes: Array[Byte]): ByteNode = new ByteNode(bytes)
def apply(bytes: ByteBuffer): ByteNode = {
if (bytes.remaining < 1) empty
else {
val array = new Array[Byte](bytes.remaining)
bytes.get(array)
ByteNode.wrap(array)
}
}
val empty: ByteNode = ByteNode.wrap(Array.empty[Byte])
}
class ByteNode(protected val bytes: Array[Byte])
extends IndexedSeq[Byte]
with IndexedSeqOptimized[Byte, ByteNode]
with Serializable {
def utf8String: String = {
decodeString(StandardCharsets.UTF_8)
}
def decodeString(charset: Charset): String = {
if (isEmpty)
""
else
new String(bytes, charset)
}
def ++(that: ByteNode): ByteNode = {
if (this.isEmpty) that
else if (that.isEmpty) this
else {
val ar = new Array[Byte](this.length + that.length)
this.copyToArray(ar, 0, this.length)
that.copyToArray(ar, this.length, that.length)
ByteNode.wrap(ar)
}
}
override protected[this] def newBuilder = new ByteNodeBuilder
override def iterator: ByteArrayIterator = ByteArrayIterator(bytes, 0, this.length)
override def isEmpty: Boolean = bytes.isEmpty
override def indexOf[B >: Byte](elem: B, from: Int): Int = {
if (from >= length) -1
else {
var found = -1
var i = math.max(from, 0)
while (i < length && found == -1) {
if (bytes(i) == elem)
found = i
i += 1
}
found
}
}
override def length: Int = bytes.length
override def apply(idx: Int): Byte = bytes(idx)
override def slice(from: Int, until: Int): ByteNode = ByteNodeWindow(bytes).slice(from, until)
override def drop(n: Int): ByteNode = ByteNodeWindow(bytes).drop(n)
override def take(n: Int): ByteNode = ByteNodeWindow(bytes).take(n)
override def copyToArray[B >: Byte](xs: Array[B], start: Int, len: Int): Unit = iterator.copyToArray(xs, start, len)
def copyToBuffer(buffer: ByteBuffer): Int = {
writeToBuffer(buffer, 0, length)
}
def asByteBuffer: ByteBuffer = ByteBuffer.wrap(bytes).asReadOnlyBuffer()
def shrink: ByteNode = this
protected[this] def writeToBuffer(buffer: ByteBuffer, offset: Int, length: Int): Int = {
val copyLength = Math.min(buffer.remaining, length)
if (copyLength > 0) {
buffer.put(bytes, offset, copyLength)
}
copyLength
}
}
|
defvar/knot
|
knot-data/src/main/scala/knot/data/ByteNode.scala
|
Scala
|
mit
| 2,657
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.plans.logical._
class DSLHintSuite extends AnalysisTest {
lazy val a = 'a.int
lazy val b = 'b.string
lazy val c = 'c.string
lazy val r1 = LocalRelation(a, b, c)
test("various hint parameters") {
comparePlans(
r1.hint("hint1"),
UnresolvedHint("hint1", Seq(), r1)
)
comparePlans(
r1.hint("hint1", 1, "a"),
UnresolvedHint("hint1", Seq(1, "a"), r1)
)
comparePlans(
r1.hint("hint1", 1, $"a"),
UnresolvedHint("hint1", Seq(1, $"a"), r1)
)
comparePlans(
r1.hint("hint1", Seq(1, 2, 3), Seq($"a", $"b", $"c")),
UnresolvedHint("hint1", Seq(Seq(1, 2, 3), Seq($"a", $"b", $"c")), r1)
)
}
}
|
pgandhi999/spark
|
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/DSLHintSuite.scala
|
Scala
|
apache-2.0
| 1,665
|
/*
Copyright (c) 2019, Robby, Kansas State University
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
// @formatter:off
// This file was auto-generated from org.sireum.pilar.ast.Node
package org.sireum.pilar.ast
import org.sireum.util.Json._
object Json {
type FromExtern = PartialFunction[Any, String]
type ToExtern = PartialFunction[String, Any]
final val externMap = scala.collection.mutable.Map[String, (FromExtern, ToExtern)]().withDefaultValue((Map(), Map()))
import scala.language.implicitConversions
implicit def fromNode(o: org.sireum.pilar.ast.Node): ujson.Obj =
o match {
case o: org.sireum.pilar.ast.Annotation =>
ujson.Obj(
(".class", ujson.Str("Annotation")),
("id", fromNode(o.id)),
("lit", fromNode(o.lit))
)
case o: org.sireum.pilar.ast.AssertAction =>
ujson.Obj(
(".class", ujson.Str("AssertAction")),
("exp", fromNode(o.exp)),
("annotations", fromSeq(o.annotations)(fromNode))
)
case o: org.sireum.pilar.ast.AssignAction =>
ujson.Obj(
(".class", ujson.Str("AssignAction")),
("lhs", fromNode(o.lhs)),
("rhs", fromNode(o.rhs)),
("annotations", fromSeq(o.annotations)(fromNode))
)
case o: org.sireum.pilar.ast.AssumeAction =>
ujson.Obj(
(".class", ujson.Str("AssumeAction")),
("exp", fromNode(o.exp)),
("annotations", fromSeq(o.annotations)(fromNode))
)
case o: org.sireum.pilar.ast.BlockLocation =>
ujson.Obj(
(".class", ujson.Str("BlockLocation")),
("label", fromNode(o.label)),
("actions", fromSeq(o.actions)(fromNode)),
("jump", fromNode(o.jump)),
("annotations", fromSeq(o.annotations)(fromNode))
)
case o: org.sireum.pilar.ast.CallLocation =>
ujson.Obj(
(".class", ujson.Str("CallLocation")),
("label", fromNode(o.label)),
("lhsOpt", fromOption(o.lhsOpt)(fromNode)),
("id", fromNode(o.id)),
("args", fromSeq(o.args)(fromNode)),
("target", fromNode(o.target)),
("annotations", fromSeq(o.annotations)(fromNode))
)
case o: org.sireum.pilar.ast.ExtAction =>
ujson.Obj(
(".class", ujson.Str("ExtAction")),
("id", fromNode(o.id)),
("args", fromSeq(o.args)(fromNode)),
("annotations", fromSeq(o.annotations)(fromNode))
)
case o: org.sireum.pilar.ast.ExtExp =>
ujson.Obj(
(".class", ujson.Str("ExtExp")),
("exp", fromNode(o.exp)),
("args", fromSeq(o.args)(fromNode))
)
case o: org.sireum.pilar.ast.ExtJump =>
ujson.Obj(
(".class", ujson.Str("ExtJump")),
("id", fromNode(o.id)),
("args", fromSeq(o.args)(fromNode)),
("annotations", fromSeq(o.annotations)(fromNode))
)
case o: org.sireum.pilar.ast.ExtLit =>
ujson.Obj(
(".class", ujson.Str("ExtLit")),
("value", fromStr(externMap("ExtLit")._1(o.value)))
)
case o: org.sireum.pilar.ast.GlobalVarDecl =>
ujson.Obj(
(".class", ujson.Str("GlobalVarDecl")),
("id", fromNode(o.id)),
("annotations", fromSeq(o.annotations)(fromNode))
)
case o: org.sireum.pilar.ast.GotoJump =>
ujson.Obj(
(".class", ujson.Str("GotoJump")),
("target", fromNode(o.target)),
("annotations", fromSeq(o.annotations)(fromNode))
)
case o: org.sireum.pilar.ast.IdExp =>
ujson.Obj(
(".class", ujson.Str("IdExp")),
("id", fromNode(o.id))
)
case o: org.sireum.pilar.ast.IfJump =>
ujson.Obj(
(".class", ujson.Str("IfJump")),
("exp", fromNode(o.exp)),
("tTarget", fromNode(o.tTarget)),
("fTarget", fromNode(o.fTarget)),
("annotations", fromSeq(o.annotations)(fromNode))
)
case o: org.sireum.pilar.ast.InfixExp =>
ujson.Obj(
(".class", ujson.Str("InfixExp")),
("left", fromNode(o.left)),
("op", fromNode(o.op)),
("right", fromNode(o.right)),
("rest", fromSeq(o.rest)(fromTuple2))
)
case o: org.sireum.pilar.ast.LiteralExp =>
ujson.Obj(
(".class", ujson.Str("LiteralExp")),
("id", fromNode(o.id)),
("lit", fromNode(o.lit))
)
case o: org.sireum.pilar.ast.LocalVarDecl =>
ujson.Obj(
(".class", ujson.Str("LocalVarDecl")),
("id", fromNode(o.id)),
("annotations", fromSeq(o.annotations)(fromNode))
)
case o: org.sireum.pilar.ast.Model =>
ujson.Obj(
(".class", ujson.Str("Model")),
("elements", fromSeq(o.elements)(fromNode)),
("annotations", fromSeq(o.annotations)(fromNode))
)
case o: org.sireum.pilar.ast.ParamDecl =>
ujson.Obj(
(".class", ujson.Str("ParamDecl")),
("id", fromNode(o.id)),
("annotations", fromSeq(o.annotations)(fromNode))
)
case o: org.sireum.pilar.ast.ProcedureBody =>
ujson.Obj(
(".class", ujson.Str("ProcedureBody")),
("locals", fromSeq(o.locals)(fromNode)),
("locations", fromSeq(o.locations)(fromNode))
)
case o: org.sireum.pilar.ast.ProcedureDecl =>
ujson.Obj(
(".class", ujson.Str("ProcedureDecl")),
("id", fromNode(o.id)),
("params", fromSeq(o.params)(fromNode)),
("bodyOpt", fromOption(o.bodyOpt)(fromNode)),
("annotations", fromSeq(o.annotations)(fromNode))
)
case o: org.sireum.pilar.ast.RawLit =>
ujson.Obj(
(".class", ujson.Str("RawLit")),
("value", fromStr(o.value))
)
case o: org.sireum.pilar.ast.ReturnJump =>
ujson.Obj(
(".class", ujson.Str("ReturnJump")),
("expOpt", fromOption(o.expOpt)(fromNode)),
("annotations", fromSeq(o.annotations)(fromNode))
)
case o: org.sireum.pilar.ast.SwitchCase =>
ujson.Obj(
(".class", ujson.Str("SwitchCase")),
("expOpt", fromOption(o.expOpt)(fromNode)),
("target", fromNode(o.target))
)
case o: org.sireum.pilar.ast.SwitchJump =>
ujson.Obj(
(".class", ujson.Str("SwitchJump")),
("exp", fromNode(o.exp)),
("cases", fromSeq(o.cases)(fromNode)),
("annotations", fromSeq(o.annotations)(fromNode))
)
case o: org.sireum.pilar.ast.TupleExp =>
ujson.Obj(
(".class", ujson.Str("TupleExp")),
("exps", fromSeq(o.exps)(fromNode)),
("annotations", fromSeq(o.annotations)(fromNode))
)
case o: org.sireum.pilar.ast._Id =>
ujson.Obj(
(".class", ujson.Str("_Id")),
("value", fromStr(o.value))
)
}
implicit def toNode[T <: org.sireum.pilar.ast.Node](v: ujson.Value): T =
(v: @unchecked) match {
case o: ujson.Obj =>
(o.value.head._2.asInstanceOf[ujson.Str].value match {
case "Annotation" =>
org.sireum.pilar.ast.Annotation(toNode[Id](o.value.toSeq(1)._2), toNode[Lit](o.value.toSeq(2)._2))
case "AssertAction" =>
org.sireum.pilar.ast.AssertAction(toNode[Exp](o.value.toSeq(1)._2), toVector(o.value.toSeq(2)._2)(toNode[Annotation]))
case "AssignAction" =>
org.sireum.pilar.ast.AssignAction(toNode[Exp](o.value.toSeq(1)._2), toNode[Exp](o.value.toSeq(2)._2), toVector(o.value.toSeq(3)._2)(toNode[Annotation]))
case "AssumeAction" =>
org.sireum.pilar.ast.AssumeAction(toNode[Exp](o.value.toSeq(1)._2), toVector(o.value.toSeq(2)._2)(toNode[Annotation]))
case "BlockLocation" =>
org.sireum.pilar.ast.BlockLocation(toNode[Id](o.value.toSeq(1)._2), toVector(o.value.toSeq(2)._2)(toNode[Action]), toNode[Jump](o.value.toSeq(3)._2), toVector(o.value.toSeq(4)._2)(toNode[Annotation]))
case "CallLocation" =>
org.sireum.pilar.ast.CallLocation(toNode[Id](o.value.toSeq(1)._2), toOption(o.value.toSeq(2)._2)(toNode[Exp]), toNode[Id](o.value.toSeq(3)._2), toVector(o.value.toSeq(4)._2)(toNode[Exp]), toNode[Id](o.value.toSeq(5)._2), toVector(o.value.toSeq(6)._2)(toNode[Annotation]))
case "ExtAction" =>
org.sireum.pilar.ast.ExtAction(toNode[Id](o.value.toSeq(1)._2), toVector(o.value.toSeq(2)._2)(toNode[Exp]), toVector(o.value.toSeq(3)._2)(toNode[Annotation]))
case "ExtExp" =>
org.sireum.pilar.ast.ExtExp(toNode[Exp](o.value.toSeq(1)._2), toVector(o.value.toSeq(2)._2)(toNode[Exp]))
case "ExtJump" =>
org.sireum.pilar.ast.ExtJump(toNode[Id](o.value.toSeq(1)._2), toVector(o.value.toSeq(2)._2)(toNode[Exp]), toVector(o.value.toSeq(3)._2)(toNode[Annotation]))
case "ExtLit" =>
org.sireum.pilar.ast.ExtLit(externMap("ExtLit")._2(toStr(o.value.toSeq(1)._2)))
case "GlobalVarDecl" =>
org.sireum.pilar.ast.GlobalVarDecl(toNode[Id](o.value.toSeq(1)._2), toVector(o.value.toSeq(2)._2)(toNode[Annotation]))
case "GotoJump" =>
org.sireum.pilar.ast.GotoJump(toNode[Id](o.value.toSeq(1)._2), toVector(o.value.toSeq(2)._2)(toNode[Annotation]))
case "IdExp" =>
org.sireum.pilar.ast.IdExp(toNode[Id](o.value.toSeq(1)._2))
case "IfJump" =>
org.sireum.pilar.ast.IfJump(toNode[Exp](o.value.toSeq(1)._2), toNode[Id](o.value.toSeq(2)._2), toNode[Id](o.value.toSeq(3)._2), toVector(o.value.toSeq(4)._2)(toNode[Annotation]))
case "InfixExp" =>
org.sireum.pilar.ast.InfixExp(toNode[Exp](o.value.toSeq(1)._2), toNode[Id](o.value.toSeq(2)._2), toNode[Exp](o.value.toSeq(3)._2), toVector(o.value.toSeq(4)._2)(toTuple2))
case "LiteralExp" =>
org.sireum.pilar.ast.LiteralExp(toNode[Id](o.value.toSeq(1)._2), toNode[Lit](o.value.toSeq(2)._2))
case "LocalVarDecl" =>
org.sireum.pilar.ast.LocalVarDecl(toNode[Id](o.value.toSeq(1)._2), toVector(o.value.toSeq(2)._2)(toNode[Annotation]))
case "Model" =>
org.sireum.pilar.ast.Model(toVector(o.value.toSeq(1)._2)(toNode[ModelElement]), toVector(o.value.toSeq(2)._2)(toNode[Annotation]))
case "ParamDecl" =>
org.sireum.pilar.ast.ParamDecl(toNode[Id](o.value.toSeq(1)._2), toVector(o.value.toSeq(2)._2)(toNode[Annotation]))
case "ProcedureBody" =>
org.sireum.pilar.ast.ProcedureBody(toVector(o.value.toSeq(1)._2)(toNode[LocalVarDecl]), toVector(o.value.toSeq(2)._2)(toNode[Location]))
case "ProcedureDecl" =>
org.sireum.pilar.ast.ProcedureDecl(toNode[Id](o.value.toSeq(1)._2), toVector(o.value.toSeq(2)._2)(toNode[ParamDecl]), toOption(o.value.toSeq(3)._2)(toNode[ProcedureBody]), toVector(o.value.toSeq(4)._2)(toNode[Annotation]))
case "RawLit" =>
org.sireum.pilar.ast.RawLit(toStr(o.value.toSeq(1)._2))
case "ReturnJump" =>
org.sireum.pilar.ast.ReturnJump(toOption(o.value.toSeq(1)._2)(toNode[Exp]), toVector(o.value.toSeq(2)._2)(toNode[Annotation]))
case "SwitchCase" =>
org.sireum.pilar.ast.SwitchCase(toOption(o.value.toSeq(1)._2)(toNode[LiteralExp]), toNode[Id](o.value.toSeq(2)._2))
case "SwitchJump" =>
org.sireum.pilar.ast.SwitchJump(toNode[Exp](o.value.toSeq(1)._2), toVector(o.value.toSeq(2)._2)(toNode[SwitchCase]), toVector(o.value.toSeq(3)._2)(toNode[Annotation]))
case "TupleExp" =>
org.sireum.pilar.ast.TupleExp(toVector(o.value.toSeq(1)._2)(toNode[Exp]), toVector(o.value.toSeq(2)._2)(toNode[Annotation]))
case "_Id" =>
org.sireum.pilar.ast._Id(toStrIntern(o.value.toSeq(1)._2))
}).asInstanceOf[T]
}
}
|
sireum/v3
|
pilar/shared/src/main/scala/org/sireum/pilar/ast/Json.scala
|
Scala
|
bsd-2-clause
| 13,243
|
import scala.reflect.runtime.universe._
import scala.tools.reflect.Eval
object Test extends App {
reify {
class C {
def x = 2
def y = x * x
}
class D extends C {
override def x = 3
}
println(new D().y * new C().x)
}.eval
}
|
felixmulder/scala
|
test/files/run/reify_inheritance.scala
|
Scala
|
bsd-3-clause
| 267
|
package com.etsy.sbt.checkstyle
import sbt.File
import scala.io.Source
/**
* Represents a Checkstyle XML configuration located locally, on the class path or remotely at a URL
*
* @author Joseph Earl
*/
sealed abstract class CheckstyleConfigLocation(val location: String) {
def read(resources: Seq[File]): String
}
object CheckstyleConfigLocation {
case class URL(url: String) extends CheckstyleConfigLocation(url) {
override def read(resources: Seq[sbt.File]): String = Source.fromURL(url).mkString
}
case class File(path: String) extends CheckstyleConfigLocation(path) {
override def read(resources: Seq[sbt.File]): String = Source.fromFile(path).mkString
}
case class Classpath(name: String) extends CheckstyleConfigLocation(name) {
override def read(resources: Seq[sbt.File]): String = {
val classpath = resources.map((f) => f.toURI.toURL)
val loader = new java.net.URLClassLoader(classpath.toArray, getClass.getClassLoader)
Source.fromInputStream(loader.getResourceAsStream(name)).mkString
}
}
}
|
etsy/sbt-checkstyle-plugin
|
src/main/scala/com/etsy/sbt/checkstyle/CheckstyleConfigLocation.scala
|
Scala
|
mit
| 1,096
|
/*
* Copyright 2013 - 2015, Daniel Krzywicki <daniel.krzywicki@agh.edu.pl>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package pl.edu.agh.scalamas.util
import org.apache.commons.math3.random.RandomDataGenerator
import scala.collection.generic.CanBuildFrom
import scala.collection.mutable.ArrayBuffer
object Util {
implicit class Shuffled[T, CC[X] <: TraversableOnce[X]](xs: CC[T]) {
def shuffled(implicit rand: RandomDataGenerator, bf: CanBuildFrom[CC[T], T, CC[T]]): CC[T] = {
val buf = new ArrayBuffer[T] ++= xs
val size = buf.size
val perm = rand.nextPermutation(size, size)
permutate(buf, perm)
(bf(xs) ++= buf).result()
}
private[util] def permutate(data: ArrayBuffer[T], perm: Array[Int]) = {
for (i <- 0 until data.size) {
val x = data(i)
var current = i
var next = perm(i)
perm(i) = i
while (next != i) {
data(current) = data(next)
current = next
next = perm(current)
perm(current) = current
}
data(current) = x
}
}
}
}
|
ros3n/IntOb
|
core/src/main/scala/pl/edu/agh/scalamas/util/Util.scala
|
Scala
|
mit
| 2,129
|
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.request
import java.util.{ ArrayList => JArrayList, Collections => JCollections, List => JList }
import scala.annotation.tailrec
import scala.collection.JavaConverters._
import io.gatling.commons.validation._
import io.gatling.core.session.{ Expression, Session }
import org.asynchttpclient.Param
package object builder {
val EmptyParamJListSuccess: Validation[JList[Param]] = JCollections.emptyList[Param].success
implicit class HttpParams(val params: List[HttpParam]) extends AnyVal {
def mergeWithFormIntoParamJList(formMaybe: Option[Expression[Map[String, Seq[String]]]], session: Session): Validation[JList[Param]] = {
val formParams = params.resolveParamJList(session)
formMaybe match {
case Some(form) =>
for {
resolvedFormParams <- formParams
resolvedForm <- form(session)
} yield {
val formParamsByName = resolvedFormParams.asScala.groupBy(_.getName)
val formFieldsByName = resolvedForm.map { case (key, values) => key -> values.map(value => new Param(key, value)) }
// override form with formParams
val javaParams: JList[Param] = (formFieldsByName ++ formParamsByName).values.flatten.toSeq.asJava
javaParams
}
case None =>
formParams
}
}
def resolveParamJList(session: Session): Validation[JList[Param]] = {
def update(ahcParams: JList[Param], param: HttpParam): Validation[JList[Param]] = param match {
case SimpleParam(key, value) =>
for {
key <- key(session)
value <- value(session)
} yield {
ahcParams.add(new Param(key, value.toString))
ahcParams
}
case MultivaluedParam(key, values) =>
for {
key <- key(session)
values <- values(session)
} yield {
values.foreach(value => ahcParams.add(new Param(key, value.toString)))
ahcParams
}
case ParamSeq(seq) =>
for {
seq <- seq(session)
} yield {
seq.foreach { case (key, value) => ahcParams.add(new Param(key, value.toString)) }
ahcParams
}
case ParamMap(map) =>
for {
map <- map(session)
} yield {
map.foreach { case (key, value) => ahcParams.add(new Param(key, value.toString)) }
ahcParams
}
}
@tailrec
def resolveParamJListRec(ahcParams: JList[Param], currentParams: List[HttpParam]): Validation[JList[Param]] =
currentParams match {
case Nil => ahcParams.success
case head :: tail =>
update(ahcParams, head) match {
case Success(newAhcParams) => resolveParamJListRec(newAhcParams, tail)
case f => f
}
}
if (params.isEmpty)
EmptyParamJListSuccess
else
resolveParamJListRec(new JArrayList[Param](params.size), params)
}
}
}
|
timve/gatling
|
gatling-http/src/main/scala/io/gatling/http/request/builder/package.scala
|
Scala
|
apache-2.0
| 3,789
|
package keystoneml.nodes.nlp
import org.scalatest.FunSuite
import keystoneml.workflow.PipelineContext
class HashingTFSuite extends FunSuite with PipelineContext {
test("HashingTF with no collisions") {
val dims = 4000
val hashingTF = HashingTF[Seq[String]](dims)
val testDatum = Seq("1", "2", "4", "4", "4", "4", "2")
val vector = hashingTF(testDatum)
// Assert that the vector is actually sparse and has the right number of active positions
assert(vector.activeSize === 3)
assert(vector.length === dims)
val termFrequenciesSet = vector.toArray.toSet
// Assert that there are indices with all of the correct values
assert(termFrequenciesSet === Set(0, 1, 2, 4))
}
test("HashingTF with collisions") {
val hashingTF = HashingTF[Seq[String]](2)
val testDatum = Seq("1", "2", "4", "4", "4", "4", "2")
val vector = hashingTF(testDatum)
assert(vector.activeSize === 2)
assert(vector.length === 2)
// Assert that the sum of the tf's is still correct even though there were collisions
assert(vector.toArray.sum === testDatum.size)
}
}
|
amplab/keystone
|
src/test/scala/keystoneml/nodes/nlp/HashingTFSuite.scala
|
Scala
|
apache-2.0
| 1,115
|
package org.scalatra
package oauth2
package auth
import scala.util.control.Exception._
import scalaz._
import Scalaz._
import org.scribe.model.Verifier
import org.scribe.oauth.{ OAuth20ServiceImpl, OAuth10aServiceImpl, OAuthService }
import OAuth2Imports._
import java.util.concurrent.ConcurrentHashMap
import scala.collection.JavaConverters._
import org.scalatra.auth.{ ScentrySupport, ScentryStrategy }
import org.scalatra.validation.ValidationError
import javax.servlet.http.{ HttpServletResponse, HttpServletRequest }
import model.{ Account, AuthSession }
import org.scalatra.auth.ScentryAuthStore.CookieAuthStore
import org.scalatra.json.NativeJsonSupport
object OAuthToken {
def apply(scribeToken: org.scribe.model.Token): OAuthToken = OAuthToken(scribeToken.getToken, scribeToken.getSecret)
}
case class OAuthToken(token: String, secret: String)
trait ScribeAuthStrategyContext[UserClass >: Null <: AppAuthSession[_ <: AppUser[_]]] {
def oauthService: OAuthService
def name: String
def app: ScalatraBase with FlashMapSupport with ScribeAuthSupport[UserClass]
def findOrCreateUser(accessToken: OAuthToken): Validation[ValidationError, UserClass]
}
trait ScribeAuthSupport[UserClass >: Null <: AppAuthSession[_ <: AppUser[_]]] extends AuthenticationSupport[UserClass] { self: ScalatraBase with SessionSupport with FlashMapSupport with NativeJsonSupport ⇒
private[this] val oauthServicesRegistry = new ConcurrentHashMap[String, ScribeAuthStrategyContext[UserClass]].asScala
private[this] val thisApp = this
protected def sslRequired: Boolean = true
def registerOAuthService(name: String, service: OAuthService)(findOrCreateUser: OAuthToken ⇒ Validation[ValidationError, UserClass]) = {
val nm = name
val fn = findOrCreateUser
val ctxt = new ScribeAuthStrategyContext[UserClass] {
lazy val oauthService = service
val name = nm
val app = thisApp
def findOrCreateUser(accessToken: OAuthToken) = {
session("oauth.accessToken") = accessToken
try {
fn(accessToken)
} catch {
case e ⇒
e.printStackTrace()
ValidationError("Couldn't fetch the access token").fail[UserClass]
}
}
}
oauthServicesRegistry += name -> ctxt
}
get("/:provider") {
if (!oauthServicesRegistry.contains(params("provider"))) halt(404, "The provider [" + params("provider") + "] is not available.")
oauthServicesRegistry get params("provider") flatMap {
_.oauthService match {
case svc: OAuth10aServiceImpl ⇒
val tok = svc.getRequestToken
if (tok == null) halt(502, "Couldn't obtain a request token for " + params("provider"))
ScribeAuthStrategy.requestTokens(tok.getToken) = tok
svc.getAuthorizationUrl(tok).blankOption
case svc ⇒ svc.getAuthorizationUrl(null).blankOption
}
} foreach redirect
flash("error") = "Couldn't get a authorization url for oauth provider: %s" format params("provider")
unauthenticated()
}
get("/:provider/callback") {
scentry.authenticate(params("provider"))
userOption.fold(
sess ⇒ {
val u = sess.account
authService.validate(u.asInstanceOf[Account]).fold(
errs ⇒ {
clearUser()
contentType = "text/html"
jade("incomplete_oauth", "errors" -> errs.list, "login" -> u.login, "email" -> u.email, "name" -> u.name)
},
uu ⇒ loggedIn(sess, uu.login + " logged in from " + params("provider") + "."))
},
unauthenticated())
}
protected def clearUser() = {
scentry.user = null
scentry.store.invalidate
}
post("/:provider/callback") {
if (isAnonymous) unauthenticated()
else {
trySavingCompletedProfile().fold(
errs ⇒ {
contentType = "text/html"
jade("incomplete_oauth", "errors" -> errs.list)
},
u ⇒ loggedIn(u, u.account.login + " logged in from " + params("provider") + "."))
}
}
protected def trySavingCompletedProfile(): ValidationNEL[ValidationError, UserClass]
/**
* Registers authentication strategies.
*/
override protected def registerAuthStrategies {
oauthServicesRegistry foreach {
case (k, v) ⇒ scentry.register(k, _ ⇒ new ScribeAuthStrategy(v))
}
}
protected def authCookieOptions: CookieOptions
override protected def configureScentry {
scentry.store = new CookieAuthStore(this)(authCookieOptions)
}
def unauthenticated() {
session(scentryConfig.returnToKey) = request.uri.toASCIIString
redirect(scentryConfig.failureUrl)
}
}
object ScribeAuthStrategy {
private[auth] val requestTokens = new ConcurrentHashMap[String, org.scribe.model.Token].asScala
}
class ScribeAuthStrategy[UserClass >: Null <: AppAuthSession[_ <: AppUser[_]]](context: ScribeAuthStrategyContext[UserClass]) extends ScentryStrategy[UserClass] {
import ScribeAuthStrategy._
override val name = context.name
protected val app = context.app
override def isValid =
app.request.requestMethod == Get &&
app.params.contains("provider") &&
app.params("provider") == name &&
matchesForOAuthVersion
private[this] def matchesForOAuthVersion = context.oauthService match {
case _: OAuth20ServiceImpl ⇒ hasKey("code")
case _: OAuth10aServiceImpl ⇒ hasKey("oauth_token") && hasKey("oauth_verifier")
case _ ⇒ false
}
private[this] def hasKey(key: String) = app.params.get(key).flatMap(_.blankOption).isDefined
private[this] def verifier: String = context.oauthService match {
case _: OAuth20ServiceImpl ⇒ app.params("code")
case _: OAuth10aServiceImpl ⇒ app.params("oauth_verifier")
}
override def unauthenticated() {
// app.unauthenticated()
}
def authenticate(): Option[UserClass] =
(allCatch withApply logError) {
val reqToken = app.params.get("oauth_token").flatMap(requestTokens.get)
reqToken foreach (requestTokens -= _.getToken)
val verif = verifier
val accessToken = OAuthToken(context.oauthService.getAccessToken(reqToken.orNull, new Verifier(verif)))
context.findOrCreateUser(accessToken).toOption
}
private[this] def logError(ex: Throwable): Option[UserClass] = {
logger.error("There was a problem authenticating with " + name, ex)
none[UserClass]
}
}
|
scalatra/oauth2-server
|
src/main/scala/org/scalatra/oauth2/auth/ScribeAuthStrategy.scala
|
Scala
|
mit
| 6,412
|
package builder.api_json
import com.bryzek.apidoc.spec.v0.models.Service
import com.bryzek.apidoc.spec.v0.models.json._
import lib.ServiceValidator
import play.api.libs.json.{Json, JsError, JsSuccess}
import com.fasterxml.jackson.core.{JsonParseException, JsonProcessingException}
import scala.util.{Failure, Success, Try}
case class ServiceJsonServiceValidator(
json: String
) extends ServiceValidator[Service] {
def validate(): Either[Seq[String], Service] = {
Try(Json.parse(json)) match {
case Success(js) => {
js.validate[Service] match {
case e: JsError => {
Left(Seq("Not a valid service.json document: " + e.toString))
}
case s: JsSuccess[Service] => {
Right(s.get)
}
}
}
case Failure(ex) => ex match {
case e: JsonParseException => {
Left(Seq("Invalid JSON: " + e.getMessage))
}
case e: JsonProcessingException => {
Left(Seq("Invalid JSON: " + e.getMessage))
}
}
}
}
}
|
Seanstoppable/apidoc
|
core/src/main/scala/core/builder/api_json/ServiceJsonServiceValidator.scala
|
Scala
|
mit
| 1,053
|
/*
* Copyright (C) 2009-2011 Mathias Doenitz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.parboiled.examples.json
import org.testng.annotations.Test
import org.scalatest.testng.TestNGSuiteLike
import org.testng.Assert.assertEquals
import org.parboiled.scala.testing.ParboiledTest
import org.parboiled.scala.parserunners.ReportingParseRunner
class JsonParserTest extends ParboiledTest with TestNGSuiteLike {
val parser = new JsonParser1()
type Result = parser.AstNode
@Test
def testJsonParser() {
val json = """|{
| "simpleKey" : "some value",
| "key with spaces": null,
| "zero": 0,
| "number": -1.2323424E-5,
| "Boolean yes":true,
| "Boolean no": false,
| "Unic\\u00f8de" : "Long string with newline\\nescape",
| "key with \\"quotes\\"" : "string",
| "sub object" : {
| "sub key": 26.5,
| "a": "b",
| "array": [1, 2, { "yes":1, "no":0 }, ["a", "b", null], false]
| }
|}""".stripMargin
val rootNode = parser.parseJson(json)
assertEquals(printAst(rootNode),
"""|{
| "simpleKey" : "some value"
| "key with spaces" : null
| "zero" : 0
| "number" : -0.000012323424
| "Boolean yes" : true
| "Boolean no" : false
| "Unicøde" : "Long string with newline\\nescape"
| "key with \\"quotes\\"" : "string"
| "sub object" : {
| "sub key" : 26.5
| "a" : "b"
| "array" : [1, 2, {
| "yes" : 1
| "no" : 0
| }, ["a", "b", null], false]
| }
|}""".stripMargin)
}
@Test
def testJsonParserError() {
failParse(ReportingParseRunner(parser.Json), "XYZ") {
assertEquals(errors,
"""|Invalid input 'X', expected Json (line 1, pos 1):
|XYZ
|^
|""".stripMargin
)
}
}
def printAst(node: JsonParser1#AstNode, indent: String = ""): String = node match {
case n: JsonParser1#ObjectNode => "{\\n" + (for (sub <- n.members) yield printAst(sub, indent + " ")).mkString + indent + "}"
case n: JsonParser1#MemberNode => indent + '"' + n.key + "\\" : " + printAst(n.value, indent) + "\\n"
case n: JsonParser1#ArrayNode => '[' + (for (sub <- n.elements) yield printAst(sub, indent + " ")).mkString(", ") + "]"
case n: JsonParser1#StringNode => '"' + n.text + '"'
case n: JsonParser1#NumberNode => n.value.toString
case parser.True => "true"
case parser.False => "false"
case parser.Null => "null"
}
}
|
OpenMaths/parboiled
|
examples-scala/src/test/scala/org/parboiled/examples/json/JsonParserTest.scala
|
Scala
|
apache-2.0
| 3,299
|
package funpep.server
package service
import java.time.Instant.now
import scala.concurrent.ExecutionContext
import scalaz.concurrent._
import scalaz.stream._
import scalaz.std.option._
import scalaz.std.string._
import scalaz.syntax.std.boolean._
import atto._
import argonaut._
import argonaut.Argonaut._
import org.http4s._
import org.http4s.argonaut._
import org.http4s.dsl._
import funpep._
import funpep.data._
import funpep.util.functions._
import funpep.util.ops.path._
import funpep.server.util.codecs._
import funpep.server.util.extractors._
import funpep.server.util.functions._
final class AnalyzerService[A] private (
val queue: AnalyzerQueue[A]
)(implicit parser: Parser[A], ev: A ⇒ Compound) {
import AnalyzerService._
def analyzer: Analyzer[A] = queue.analyzer
def service(implicit ec: ExecutionContext): HttpService = HttpService {
case GET -> Root / "queue" ⇒ queueSize
case GET -> Root / "queue" / UUID(uuid) ⇒ queuePosition(uuid)
case GET -> Root / UUID(uuid) ⇒ analysisData(uuid)
case GET -> Root / UUID(uuid) / file ⇒ analysisFile(uuid, file)
case req @ POST -> Root ⇒ req.decode[AnalysisWrapper[A]](createAnalysis)
}
def queueSize: Process[Task, Response] = {
def content(size: Int): Json =
("size" := size) ->: ("time" := now) ->: jEmptyObject
ok { content(queue.count) }
}
def queuePosition(uuid: java.util.UUID): Process[Task, Response] = {
def content(pos: Int): Json =
("uuid" := uuid) ->: ("position" := pos) ->: ("time" := now) ->: jEmptyObject
queue.position(uuid).cata(pos ⇒ ok(content(pos)), notFound)
}
def createAnalysis(aw: AnalysisWrapper[A]): Process[Task, Response] =
queue.push(aw.reference, aw.comparing, aw.threshold, aw.annotations).flatMap[Task, Response] {
analysis ⇒ ok(analysis.asJson)
}
@SuppressWarnings(Array("org.brianmckenna.wartremover.warts.Any"))
def analysisData(uuid: java.util.UUID): Process[Task, Response] = {
lazy val directory: java.nio.file.Path =
analyzer.database / uuid.toString
lazy val readAnalysis: Process[Task, Json] =
AnalysisParser.fromFileW(directory / "analysis.data").map(_.asJson)
directory.exists.flatMap[Task, Response] {
_ ? readAnalysis.flatMap(ok(_)) | notFound
}
}
@SuppressWarnings(Array("org.brianmckenna.wartremover.warts.Any"))
def analysisFile(uuid: java.util.UUID, file: String): Process[Task, Response] = {
lazy val directory: java.nio.file.Path =
analyzer.database / uuid.toString
lazy val readAnalysis: Process[Task, Analysis] =
AnalysisParser.fromFileW(directory / "analysis.data")
directory.exists.flatMap[Task, Response] {
_ ? readAnalysis.flatMap(analysisFile(_, file)) | notFound
}
}
// FIXME: performs effect and wraps result in Process, this is wrong
private def analysisFile(analysis: Analysis, file: String): Process[Task, Response] = {
val f = (analysis.directory / file).toFile
StaticFile.fromFile(f, none[Request]).fold(notFound)(AsyncP(_))
}
}
object AnalyzerService {
final case class AnalysisWrapper[A](
reference: Fasta[A],
comparing: Fasta[A],
threshold: Double,
annotations: Analysis.Annotations
)
object AnalysisWrapper {
implicit def WrapperDecode[A](implicit p: Parser[A], ev: A ⇒ Compound): DecodeJson[AnalysisWrapper[A]] =
jdecode4L(AnalysisWrapper.apply[A])("reference", "comparing", "threshold", "annotations")
implicit def WrapperEntityDecoder[A](implicit p: Parser[A], ev: A ⇒ Compound): EntityDecoder[AnalysisWrapper[A]] =
jsonOf[AnalysisWrapper[A]]
}
def apply[A](queue: AnalyzerQueue[A])(implicit p: Parser[A], ev: A ⇒ Compound): AnalyzerService[A] =
new AnalyzerService(queue)
def service[A](queue: AnalyzerQueue[A])(implicit p: Parser[A], ev: A ⇒ Compound, ec: ExecutionContext): HttpService =
new AnalyzerService(queue).service
}
|
agjacome/funpep
|
server/src/main/scala/funpep/server/service/AnalyzerService.scala
|
Scala
|
mit
| 3,972
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.batch
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.config.ExecutionConfigOptions
import org.apache.flink.table.api.internal.TableEnvironmentInternal
import org.apache.flink.table.planner.utils.TableTestBase
import org.apache.flink.table.types.logical.{BigIntType, IntType, VarCharType}
import org.junit.runner.RunWith
import org.junit.runners.Parameterized
import org.junit.{Before, Test}
@RunWith(classOf[Parameterized])
class ExplainTest(extended: Boolean) extends TableTestBase {
private val extraDetails = if (extended) {
Array(ExplainDetail.CHANGELOG_MODE, ExplainDetail.ESTIMATED_COST)
} else {
Array.empty[ExplainDetail]
}
private val util = batchTestUtil()
util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
util.addDataStream[(Int, Long, String)]("MyTable1", 'a, 'b, 'c)
util.addDataStream[(Int, Long, String)]("MyTable2", 'd, 'e, 'f)
val STRING = VarCharType.STRING_TYPE
val LONG = new BigIntType()
val INT = new IntType()
@Before
def before(): Unit = {
util.tableEnv.getConfig.getConfiguration.setInteger(
ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 4)
}
@Test
def testExplainWithTableSourceScan(): Unit = {
util.verifyExplain("SELECT * FROM MyTable", extraDetails: _*)
}
@Test
def testExplainWithDataStreamScan(): Unit = {
util.verifyExplain("SELECT * FROM MyTable1", extraDetails: _*)
}
@Test
def testExplainWithFilter(): Unit = {
util.verifyExplain("SELECT * FROM MyTable1 WHERE mod(a, 2) = 0", extraDetails: _*)
}
@Test
def testExplainWithAgg(): Unit = {
util.verifyExplain("SELECT COUNT(*) FROM MyTable1 GROUP BY a", extraDetails: _*)
}
@Test
def testExplainWithJoin(): Unit = {
// TODO support other join operators when them are supported
util.tableEnv.getConfig.getConfiguration.setString(
ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "HashJoin, NestedLoopJoin")
util.verifyExplain("SELECT a, b, c, e, f FROM MyTable1, MyTable2 WHERE a = d", extraDetails: _*)
}
@Test
def testExplainWithUnion(): Unit = {
util.verifyExplain("SELECT * FROM MyTable1 UNION ALL SELECT * FROM MyTable2", extraDetails: _*)
}
@Test
def testExplainWithSort(): Unit = {
util.verifyExplain("SELECT * FROM MyTable1 ORDER BY a LIMIT 5", extraDetails: _*)
}
@Test
def testExplainWithSingleSink(): Unit = {
val table = util.tableEnv.sqlQuery("SELECT * FROM MyTable1 WHERE a > 10")
val sink = util.createCollectTableSink(Array("a", "b", "c"), Array(INT, LONG, STRING))
util.verifyExplainInsert(table, sink, "sink", extraDetails: _*)
}
@Test
def testExplainWithMultiSinks(): Unit = {
val stmtSet = util.tableEnv.createStatementSet()
val table = util.tableEnv.sqlQuery("SELECT a, COUNT(*) AS cnt FROM MyTable1 GROUP BY a")
util.tableEnv.registerTable("TempTable", table)
val table1 = util.tableEnv.sqlQuery("SELECT * FROM TempTable WHERE cnt > 10")
val sink1 = util.createCollectTableSink(Array("a", "cnt"), Array(INT, LONG))
util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink1", sink1)
stmtSet.addInsert("sink1", table1)
val table2 = util.tableEnv.sqlQuery("SELECT * FROM TempTable WHERE cnt < 10")
val sink2 = util.createCollectTableSink(Array("a", "cnt"), Array(INT, LONG))
util.tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal("sink2", sink2)
stmtSet.addInsert("sink2", table2)
util.verifyExplain(stmtSet, extraDetails: _*)
}
@Test
def testExplainMultipleInput(): Unit = {
util.tableEnv.getConfig.getConfiguration.setString(
ExecutionConfigOptions.TABLE_EXEC_DISABLED_OPERATORS, "NestedLoopJoin,SortMergeJoin")
val sql =
"""
|select * from
| (select a, sum(b) from MyTable1 group by a) v1,
| (select d, sum(e) from MyTable2 group by d) v2
| where a = d
|""".stripMargin
util.verifyExplain(sql, extraDetails: _*)
}
}
object ExplainTest {
@Parameterized.Parameters(name = "extended={0}")
def parameters(): java.util.Collection[Boolean] = {
java.util.Arrays.asList(true, false)
}
}
|
lincoln-lil/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/batch/ExplainTest.scala
|
Scala
|
apache-2.0
| 5,080
|
package org.antipathy.mvn_scalafmt.io
import java.io.File
import java.nio.charset.StandardCharsets
import java.nio.file.{Files, Paths}
import org.antipathy.mvn_scalafmt.model.RemoteConfig
import org.apache.commons.io.FileUtils
import org.apache.maven.plugin.logging.SystemStreamLog
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.GivenWhenThen
import org.scalatest.matchers.should.Matchers
class RemoteConfigWriterSpec extends AnyFlatSpec with GivenWhenThen with Matchers {
behavior of "RemoteConfigWriter"
it should "Write a config to a local path" in {
val localPath = s"${System.getProperty("java.io.tmpdir")}${File.separator}.scalafmt.conf"
val contents = """version = "1.5.1"
|maxColumn = 120
|align = false
|rewrite.rules = [SortImports]
|danglingParentheses = true
|importSelectors = singleLine
|binPack.parentConstructors = true
|includeCurlyBraceInSelectChains = false""".stripMargin
val writer = new RemoteConfigWriter(new SystemStreamLog)
val input = RemoteConfig(contents, Paths.get(localPath))
writer.write(input)
new String(Files.readAllBytes(new File(localPath).toPath))
Files.delete(input.location)
}
it should "Overwrite a config in a local path" in {
val localPath = s"${System.getProperty("java.io.tmpdir")}${File.separator}.scalafmt2.conf"
val contents = """version = "1.5.1"
|maxColumn = 120
|align = false
|rewrite.rules = [SortImports]
|danglingParentheses = true
|importSelectors = singleLine
|binPack.parentConstructors = true
|includeCurlyBraceInSelectChains = false""".stripMargin
val oldContents = "SomeOldConfig"
val writer = new RemoteConfigWriter(new SystemStreamLog)
val input = RemoteConfig(contents, Paths.get(localPath))
FileUtils.writeStringToFile(new File(localPath), oldContents, StandardCharsets.UTF_8)
new String(Files.readAllBytes(new File(localPath).toPath)) should be(oldContents)
writer.write(input)
new String(Files.readAllBytes(new File(localPath).toPath)) should be(contents)
Files.delete(input.location)
}
}
|
SimonJPegg/mvn_scalafmt
|
src/test/scala/org/antipathy/mvn_scalafmt/io/RemoteConfigWriterSpec.scala
|
Scala
|
apache-2.0
| 2,371
|
package ml.combust.mleap.runtime.transformer.feature
import java.io.File
import ml.combust.bundle.BundleFile
import ml.combust.mleap.core.feature.MinMaxScalerModel
import ml.combust.mleap.core.types._
import ml.combust.mleap.runtime.frame.{DefaultLeapFrame, Row}
import ml.combust.mleap.runtime.transformer.Pipeline
import ml.combust.mleap.tensor.Tensor
import org.apache.spark.ml.linalg.Vectors
import org.scalatest.FunSpec
import resource.managed
import ml.combust.mleap.runtime.MleapSupport._
/**
* Created by mikhail on 9/25/16.
*/
class MinMaxScalerSpec extends FunSpec{
val schema = StructType(Seq(StructField("test_vec", TensorType(BasicType.Double)))).get
val dataset = Seq(Row(Tensor.denseVector(Array(0.0, 20.0, 20.0))))
val frame = DefaultLeapFrame(schema, dataset)
val minMaxScaler = MinMaxScaler(
shape = NodeShape.feature(inputCol = "test_vec", outputCol = "test_normalized"),
model = MinMaxScalerModel(Vectors.dense(Array(0.0, 0.0, 0.0)), Vectors.dense(Array(10.0, 20.0, 40.0))))
describe("#transform") {
it("scales the input data between min / max value vectors") {
val frame2 = minMaxScaler.transform(frame).get
val data = frame2.dataset.toArray
val norm = data(0).getTensor[Double](1)
assert(norm(0) == 0.0)
assert(norm(1) == 1.0)
assert(norm(2) == 0.5)
}
describe("with invalid input column") {
val minMaxScaler2 = minMaxScaler.copy(shape = NodeShape.feature(inputCol = "bad_feature"))
it("returns a Failure") {
assert(minMaxScaler2.transform(frame).isFailure)
}
}
}
describe("input/output schema") {
it("has the correct inputs and outputs") {
assert(minMaxScaler.schema.fields ==
Seq(StructField("test_vec", TensorType.Double(3)),
StructField("test_normalized", TensorType.Double(3))))
}
}
describe("min max scaler with defaults for min/max still works") {
it ("loads correctly in mleap") {
val file = new File(getClass.getResource("/min_max_scaler_tf.zip").toURI)
val pipeline = (for (bf <- managed(BundleFile(file))) yield {
bf.loadMleapBundle().get.root
}).tried.get.asInstanceOf[Pipeline]
assert(pipeline.model.transformers.size == 2)
}
}
}
|
combust/mleap
|
mleap-runtime/src/test/scala/ml/combust/mleap/runtime/transformer/feature/MinMaxScalerSpec.scala
|
Scala
|
apache-2.0
| 2,257
|
package main.scala.calculator
class InfixCalculator {
def calculate(infixExpression: String): Either[String, ExpressionError] = {
val safeInfixExpression = if (infixExpression == null) "" else infixExpression
val infixTokens = InfixTokenizer.tokenize(safeInfixExpression) match {
case Left(tokens) => tokens
case Right(error) => return Right(error)
}
val postfixTokens = NotationConverter.fromInfixToPostfix(infixTokens) match {
case Left(tokens) => tokens
case Right(error) => return Right(error)
}
PostfixExpressionEvaluator.evaluate(postfixTokens) match {
case Left(result) => Left(result.toString)
case Right(error) => Right(error)
}
}
}
|
sirIrishman/calculator
|
src/main/scala/calculator/InfixCalculator.scala
|
Scala
|
bsd-3-clause
| 712
|
package filodb.cli
import akka.actor.{ActorRef, ActorSystem}
import akka.pattern.ask
import akka.util.Timeout
import com.opencsv.{CSVReader, CSVWriter}
import com.typesafe.scalalogging.slf4j.StrictLogging
import org.velvia.filo.{ArrayStringRowReader, RowReader}
import scala.concurrent.{Await, Future, ExecutionContext}
import scala.concurrent.duration._
import scala.language.postfixOps
import filodb.coordinator.client.{Client, LocalClient}
import filodb.coordinator.sources.CsvSourceActor
import filodb.coordinator.RowSource
import filodb.core._
import filodb.core.store.MetaStore
// Turn off style rules for CLI classes
//scalastyle:off
trait CsvImportExport extends StrictLogging {
def system: ActorSystem
val metaStore: MetaStore
def coordinatorActor: ActorRef
def client: LocalClient
var exitCode = 0
implicit val ec: ExecutionContext
import scala.collection.JavaConversions._
def ingestCSV(dataset: DatasetRef,
version: Int,
csvPath: String,
delimiter: Char,
timeout: FiniteDuration): Unit = {
val fileReader = new java.io.FileReader(csvPath)
// TODO: consider using a supervisor actor to start these
val csvActor = system.actorOf(CsvSourceActor.props(fileReader, dataset, version, coordinatorActor))
Client.actorAsk(csvActor, RowSource.Start, timeout) {
case RowSource.SetupError(e) =>
println(s"Error $e setting up CSV ingestion of $dataset/$version at $csvPath")
exitCode = 2
return
case RowSource.AllDone =>
}
// There might still be rows left after the latest flush is done, so initiate another flush
val activeRows = client.ingestionStats(dataset, version).headOption.map(_.numRowsActive).getOrElse(-1)
if (activeRows > 0) {
logger.info(s"Still $activeRows left to flush in active memTable, triggering flush....")
client.flush(dataset, version, timeout)
} else if (activeRows < 0) {
logger.warn(s"Unable to obtain any stats from ingestion, something is wrong.")
}
println(s"Ingestion of $csvPath finished!")
exitCode = 0
}
}
|
markhamstra/FiloDB
|
cli/src/main/scala/filodb.cli/CsvImportExport.scala
|
Scala
|
apache-2.0
| 2,131
|
package io.vamp.model.reader
import io.vamp.model.artifact._
import io.vamp.model.reader.YamlSourceReader._
object TemplateReader extends YamlReader[Template] {
override protected def parse(implicit source: YamlSourceReader): Template = {
Template(name, metadata, first[Any]("definition", "def") match {
case Some(ds: YamlSourceReader) ⇒ ds.flatten()
case _ ⇒ Map()
})
}
}
|
magneticio/vamp
|
model/src/main/scala/io/vamp/model/reader/TemplateReader.scala
|
Scala
|
apache-2.0
| 429
|
/*
* Copyright (c) 2013 Daniel Krzywicki <daniel.krzywicki@agh.edu.pl>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package pl.edu.agh.scalamas.random
import org.apache.commons.math3.random.{RandomDataGenerator, Well19937c, RandomGenerator}
import pl.edu.agh.scalamas.app.AgentRuntimeComponent
import net.ceedubs.ficus.Ficus._
/**
* Mixin component for random number generation. The random generators are based on apache commons math library.
*/
trait RandomGeneratorComponent {
this: AgentRuntimeComponent =>
/**
* The global seed used in the application.
* A common value may not guarantee repeatable results in the case of concurrent applications.
* A distinct value guarantees distinct results.
*/
def globalSeed = agentRuntime.config.as[Option[Long]]("mas.seed").getOrElse(System.currentTimeMillis())
/** Factory method for creating a random generator. Override this to choose a different RNG algorithm.
*/
def randomGeneratorFactory(seed: Long): RandomGenerator = new Well19937c(seed)
/**
* Provide the RNG. Shortcut for randomData.getRandomGenerator()
*/
def random: RandomGenerator = randomData.getRandomGenerator
/**
* Provides a RandomDataGenerator for distribution-based operations.
*/
def randomData: RandomDataGenerator
}
|
eleaar/scala-mas
|
core/src/main/scala/pl/edu/agh/scalamas/random/RandomGeneratorComponent.scala
|
Scala
|
mit
| 2,326
|
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala
package reflect
package io
import java.io.{BufferedOutputStream, ByteArrayOutputStream, IOException, InputStream, OutputStream}
import java.io.{File => JFile}
import java.net.URL
import java.nio.ByteBuffer
import scala.collection.AbstractIterable
/**
* An abstraction over files for use in the reflection/compiler libraries.
*
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
object AbstractFile {
/** Returns "getFile(new File(path))". */
def getFile(path: String): AbstractFile = getFile(File(path))
def getFile(path: Path): AbstractFile = getFile(path.toFile)
/**
* If the specified File exists and is a regular file, returns an
* abstract regular file backed by it. Otherwise, returns `null`.
*/
def getFile(file: File): AbstractFile =
if (!file.isDirectory) new PlainFile(file) else null
/** Returns "getDirectory(new File(path))". */
def getDirectory(path: Path): AbstractFile = getDirectory(path.toFile)
/**
* If the specified File exists and is either a directory or a
* readable zip or jar archive, returns an abstract directory
* backed by it. Otherwise, returns `null`.
*/
def getDirectory(file: File): AbstractFile =
if (file.isDirectory) new PlainFile(file)
else if (file.isFile && Path.isExtensionJarOrZip(file.jfile)) ZipArchive fromFile file
else null
/**
* If the specified URL exists and is a regular file or a directory, returns an
* abstract regular file or an abstract directory, respectively, backed by it.
* Otherwise, returns `null`.
*/
def getURL(url: URL): AbstractFile =
if (url.getProtocol == "file") {
val f = new java.io.File(url.toURI)
if (f.isDirectory) getDirectory(f)
else getFile(f)
} else null
def getResources(url: URL): AbstractFile = ZipArchive fromManifestURL url
}
/**
* <p>
* This class and its children serve to unify handling of files and
* directories. These files and directories may or may not have some
* real counter part within the file system. For example, some file
* handles reference files within a zip archive or virtual ones
* that exist only in memory.
* </p>
* <p>
* Every abstract file has a path (i.e. a full name) and a name
* (i.e. a short name) and may be backed by some real File. There are
* two different kinds of abstract files: regular files and
* directories. Regular files may be read and have a last modification
* time. Directories may list their content and look for subfiles with
* a specified name or path and of a specified kind.
* </p>
* <p>
* The interface does <b>not</b> allow to access the content.
* The class `symtab.classfile.AbstractFileReader` accesses
* bytes, knowing that the character set of classfiles is UTF-8. For
* all other cases, the class `SourceFile` is used, which honors
* `global.settings.encoding.value`.
* </p>
*
* ''Note: This library is considered experimental and should not be used unless you know what you are doing.''
*/
abstract class AbstractFile extends AbstractIterable[AbstractFile] {
/** Returns the name of this abstract file. */
def name: String
/** Returns the path of this abstract file. */
def path: String
/** Returns the path of this abstract file in a canonical form. */
def canonicalPath: String = if (file == null) path else file.getCanonicalPath
/** Checks extension case insensitively. */
def hasExtension(other: String) = extension == other.toLowerCase
private lazy val extension: String = Path.extension(name)
/** The absolute file, if this is a relative file. */
def absolute: AbstractFile
/** Returns the containing directory of this abstract file */
def container : AbstractFile
/** Returns the underlying File if any and null otherwise. */
def file: JFile
/** An underlying source, if known. Mostly, a zip/jar file. */
def underlyingSource: Option[AbstractFile] = None
/** Does this abstract file denote an existing file? */
def exists: Boolean = {
//if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(IOStats.fileExistsCount)
(file eq null) || file.exists
}
/** Does this abstract file represent something which can contain classfiles? */
def isClassContainer = isDirectory || (file != null && (extension == "jar" || extension == "zip"))
/** Create a file on disk, if one does not exist already. */
def create(): Unit
/** Delete the underlying file or directory (recursively). */
def delete(): Unit
/** Is this abstract file a directory? */
def isDirectory: Boolean
/** Does this abstract file correspond to something on-disk? */
def isVirtual: Boolean = false
/** Returns the time that this abstract file was last modified. */
def lastModified: Long
/** returns an input stream so the file can be read */
def input: InputStream
/** Returns an output stream for writing the file */
def output: OutputStream
/** Returns a buffered output stream for writing the file - defaults to out */
def bufferedOutput: BufferedOutputStream = new BufferedOutputStream(output)
/** size of this file if it is a concrete file. */
def sizeOption: Option[Int] = None
def toURL: URL = if (file == null) null else file.toURI.toURL
/** Returns contents of file (if applicable) in a Char array.
* warning: use `Global.getSourceFile()` to use the proper
* encoding when converting to the char array.
*/
@throws(classOf[IOException])
def toCharArray = new String(toByteArray).toCharArray
/** Returns contents of file (if applicable) in a byte array.
*/
@throws(classOf[IOException])
def toByteArray: Array[Byte] = {
val in = input
sizeOption match {
case Some(size) =>
var rest = size
val arr = new Array[Byte](rest)
while (rest > 0) {
val res = in.read(arr, arr.length - rest, rest)
if (res == -1)
throw new IOException("read error")
rest -= res
}
in.close()
arr
case None =>
val out = new ByteArrayOutputStream()
var c = in.read()
while(c != -1) {
out.write(c)
c = in.read()
}
in.close()
out.toByteArray()
}
}
def toByteBuffer: ByteBuffer = ByteBuffer.wrap(toByteArray)
/** Returns all abstract subfiles of this abstract directory. */
def iterator: Iterator[AbstractFile]
override def isEmpty: Boolean = iterator.isEmpty
/** Returns the abstract file in this abstract directory with the specified
* name. If there is no such file, returns `null`. The argument
* `directory` tells whether to look for a directory or
* a regular file.
*/
def lookupName(name: String, directory: Boolean): AbstractFile
/** Returns an abstract file with the given name. It does not
* check that it exists.
*/
def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile
/** Return an abstract file that does not check that `path` denotes
* an existing file.
*/
def lookupPathUnchecked(path: String, directory: Boolean): AbstractFile = {
lookup((f, p, dir) => f.lookupNameUnchecked(p, dir), path, directory)
}
private def lookup(getFile: (AbstractFile, String, Boolean) => AbstractFile,
path0: String,
directory: Boolean): AbstractFile = {
val separator = java.io.File.separatorChar
// trim trailing '/'s
val path: String = if (path0.last == separator) path0 dropRight 1 else path0
val length = path.length()
assert(length > 0 && !(path.last == separator), path)
var file = this
var start = 0
while (true) {
val index = path.indexOf(separator, start)
assert(index < 0 || start < index, ((path, directory, start, index)))
val name = path.substring(start, if (index < 0) length else index)
file = getFile(file, name, if (index < 0) directory else true)
if ((file eq null) || index < 0) return file
start = index + 1
}
file
}
private def fileOrSubdirectoryNamed(name: String, isDir: Boolean): AbstractFile = {
val lookup = lookupName(name, isDir)
if (lookup != null) lookup
else {
val jfile = new JFile(file, name)
if (isDir) jfile.mkdirs() else jfile.createNewFile()
new PlainFile(jfile)
}
}
/**
* Get the file in this directory with the given name,
* creating an empty file if it does not already existing.
*/
def fileNamed(name: String): AbstractFile = {
assert(isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path))
fileOrSubdirectoryNamed(name, isDir = false)
}
/**
* Get the subdirectory with a given name, creating it if it
* does not already exist.
*/
def subdirectoryNamed(name: String): AbstractFile = {
assert (isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path))
fileOrSubdirectoryNamed(name, isDir = true)
}
protected def unsupported(): Nothing = unsupported(null)
protected def unsupported(msg: String): Nothing = throw new UnsupportedOperationException(msg)
/** Returns the path of this abstract file. */
override def toString() = path
}
|
martijnhoekstra/scala
|
src/reflect/scala/reflect/io/AbstractFile.scala
|
Scala
|
apache-2.0
| 9,578
|
/**
* Copyright (C) 2010 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.servlet
import java.io._
import java.{util ⇒ ju}
import javax.servlet.ServletContext
import javax.servlet.http.{HttpServletRequest, HttpServletResponse}
import org.orbeon.oxf.externalcontext._
import org.orbeon.oxf.http.{Headers, HttpMethod}
import org.orbeon.oxf.pipeline.InitUtils
import org.orbeon.oxf.pipeline.api.PipelineContext
import org.orbeon.oxf.properties.Properties
import org.orbeon.oxf.util.StringUtils._
import org.orbeon.oxf.util._
import scala.collection.JavaConverters._
/*
* Servlet-specific implementation of ExternalContext.
*/
object ServletExternalContext {
val Logger = LoggerFactory.createLogger(classOf[ServletExternalContext])
private val HttpPageCacheHeadersDefault = "Cache-Control: private, max-age=0; Pragma:"
private val HttpPageCacheHeadersProperty = "oxf.http.page.cache-headers"
private val HttpResourceCacheHeadersDefault = "Cache-Control: public; Pragma:"
private val HttpResourceCacheHeadersProperty = "oxf.http.resource.cache-headers"
private val HttpNocacheCacheHeadersDefault = "Cache-Control: no-cache, no-store, must-revalidate; Pragma: no-cache; Expires: 0"
private val HttpNocacheCacheHeadersProperty = "oxf.http.nocache.cache-headers"
lazy val pageCacheHeaders = decodeCacheString(HttpPageCacheHeadersProperty, HttpPageCacheHeadersDefault)
lazy val resourceCacheHeaders = decodeCacheString(HttpResourceCacheHeadersProperty, HttpResourceCacheHeadersDefault)
lazy val nocacheCacheHeaders = decodeCacheString(HttpNocacheCacheHeadersProperty, HttpNocacheCacheHeadersDefault)
private def decodeCacheString(name: String, defaultValue: String): List[(String, String)] =
for {
header ← Properties.instance.getPropertySet.getString(name, defaultValue).splitTo[List](sep = ";")
parts = header.splitTo[List](sep = ":")
if parts.size == 2
name :: value :: Nil = parts
} yield
name.trimAllToEmpty → value.trimAllToEmpty
}
class ServletExternalContext(
val pipelineContext : PipelineContext,
val webAppContext : WebAppContext,
val nativeRequest : HttpServletRequest,
val nativeResponse : HttpServletResponse
) extends ExternalContext {
private class RequestImpl extends ExternalContext.Request with ServletPortletRequest {
private var getParameterMapMultipartFormDataCalled = false
private var getInputStreamCalled = false
private var inputStreamCharsetOpt: Option[String] = None
// Delegate to underlying request
def getPathInfo = nativeRequest.getPathInfo
def getRemoteAddr = nativeRequest.getRemoteAddr
def getAuthType = nativeRequest.getAuthType
def isSecure = nativeRequest.isSecure
def getContentLength = nativeRequest.getContentLength
def getContentType = nativeRequest.getContentType
def getServerName = nativeRequest.getServerName
def getServerPort = nativeRequest.getServerPort
def getMethod = HttpMethod.withNameInsensitive(nativeRequest.getMethod)
def getProtocol = nativeRequest.getProtocol
def getRemoteHost = nativeRequest.getRemoteHost
def getScheme = nativeRequest.getScheme
def getPathTranslated = nativeRequest.getPathTranslated
def getRequestedSessionId = nativeRequest.getRequestedSessionId
def getServletPath = nativeRequest.getServletPath
def getLocale = nativeRequest.getLocale
def getLocales = nativeRequest.getLocales
def isRequestedSessionIdValid = nativeRequest.isRequestedSessionIdValid
def getContainerType = "servlet"
def getContainerNamespace = getResponse.getNamespacePrefix
private def servletIncludeAttributeOpt(name: String) =
Option(nativeRequest.getAttribute(s"javax.servlet.include.$name").asInstanceOf[String])
// NOTE: Servlet 2.4 spec says: "These attributes [javax.servlet.include.*] are accessible from the
// included servlet via the getAttribute method on the request object and their values must be equal to
// the request URI, context path, servlet path, path info, and query string of the included servlet,
// respectively."
// NOTE: This is very different from the similarly-named forward attributes, which reflect the values of the
// first servlet in the chain!
lazy val getContextPath: String =
servletIncludeAttributeOpt("context_path") getOrElse nativeRequest.getContextPath
// Use included / forwarded servlet's value
// NOTE: Servlet 2.4 spec says: "These attributes [javax.servlet.include.*] are accessible from the
// included servlet via the getAttribute method on the request object and their values must be equal to the
// request URI, context path, servlet path, path info, and query string of the included servlet,
// respectively."
// NOTE: This is very different from the similarly-named forward attributes, which reflect the values of the
// first servlet in the chain!
lazy val getQueryString: String =
servletIncludeAttributeOpt("query_string") getOrElse nativeRequest.getQueryString
// Use included / forwarded servlet's value
// NOTE: Servlet 2.4 spec says: "These attributes [javax.servlet.include.*] are accessible from the
// included servlet via the getAttribute method on the request object and their values must be equal to the
// request URI, context path, servlet path, path info, and query string of the included servlet,
// respectively."
// NOTE: This is very different from the similarly-named forward attributes, which reflect the values of the
// first servlet in the chain!
lazy val getRequestURI: String =
servletIncludeAttributeOpt("request_uri") getOrElse nativeRequest.getRequestURI
lazy val getRequestPath = NetUtils.getRequestPathInfo(nativeRequest)
lazy val getAttributesMap: ju.Map[String, AnyRef] = new InitUtils.RequestMap(nativeRequest)
// NOTE: Normalize names to lowercase to ensure consistency between servlet containers
protected[ServletExternalContext] lazy val headerValuesMap: Map[String, Array[String]] = (
for (name ← nativeRequest.getHeaderNames.asScala)
yield name.toLowerCase → StringConversions.stringEnumerationToArray(nativeRequest.getHeaders(name))
).toMap
def getHeaderValuesMap = headerValuesMap.asJava
lazy val getParameterMap: ju.Map[String, Array[AnyRef]] = {
// NOTE: Regular form POST uses application/x-www-form-urlencoded. In this case, the servlet container
// exposes parameters with getParameter*() methods (see SRV.4.1.1).
if ((getContentType ne null) && getContentType.startsWith("multipart/form-data")) {
if (getInputStreamCalled)
throw new IllegalStateException(
s"Cannot call `getParameterMap` after `getInputStream` when a form was posted with `multipart/form-data`"
)
// Decode the multipart data
val result = Multipart.getParameterMapMultipart(pipelineContext, getRequest, ExternalContext.StandardHeaderCharacterEncoding)
// Remember that we were called, so we can display a meaningful exception if getInputStream() is called after this
getParameterMapMultipartFormDataCalled = true
result.asJava
} else {
// Set the input character encoding before getting the stream as this can cause issues with Jetty
handleInputEncoding()
// Just use native request parameters
val paramsIt =
for (name ← nativeRequest.getParameterNames.asScala)
yield name → nativeRequest.getParameterValues(name).asInstanceOf[Array[AnyRef]]
paramsIt.toMap.asJava
}
}
def getSession(create: Boolean): ExternalContext.Session =
ServletExternalContext.this.getSession(create)
def sessionInvalidate(): Unit = {
val session = nativeRequest.getSession(false)
if (session ne null)
session.invalidate()
}
def getCharacterEncoding: String =
inputStreamCharsetOpt getOrElse nativeRequest.getCharacterEncoding
lazy val getRequestURL: String = {
// NOTE: If this is included from a portlet, we may not have a request URL
val requestUrl = nativeRequest.getRequestURL
// TODO: check if we should return null or "" or sth else
if (requestUrl ne null)
requestUrl.toString
else
null
}
def getInputStream: InputStream = {
if (getParameterMapMultipartFormDataCalled)
throw new IllegalStateException(
s"Cannot call `getInputStream` after `getParameterMap` when a form was posted with `multipart/form-data`"
)
// Set the input character encoding before getting the stream as this can cause issues with Jetty
handleInputEncoding()
// Remember that we were called, so we can display a meaningful exception if getParameterMap() is called after this
getInputStreamCalled = true
nativeRequest.getInputStream
}
def getPortletMode = null
def getWindowState = null
def getNativeRequest = nativeRequest
private def handleInputEncoding(): Unit =
if (! getInputStreamCalled)
inputStreamCharsetOpt = Option(
Option(nativeRequest.getCharacterEncoding) match {
case Some(requestCharacterEncoding) ⇒
requestCharacterEncoding
case None ⇒
nativeRequest.setCharacterEncoding(ExternalContext.StandardFormCharacterEncoding)
ExternalContext.StandardFormCharacterEncoding
}
)
}
private class ResponseImpl(urlRewriter: URLRewriter) extends ExternalContext.Response with CachingResponseSupport {
// Delegate to underlying response
def getOutputStream = nativeResponse.getOutputStream
def getWriter = nativeResponse.getWriter
def isCommitted = nativeResponse.isCommitted
def reset() = nativeResponse.reset()
def setContentType(contentType: String) = nativeResponse.setContentType(contentType)
def setHeader(name: String, value: String) = nativeResponse.setHeader(name, value)
def addHeader(name: String, value: String) = nativeResponse.addHeader(name, value)
def setContentLength(len: Int) = nativeResponse.setContentLength(len)
def sendError(code: Int) = nativeResponse.sendError(code)
// We assume below that `nativeResponse.getCharacterEncoding` reflects the encoding set with
// `nativeResponse.setContentType` if any.
def getCharacterEncoding: String =
Option(nativeResponse.getCharacterEncoding) getOrElse
ExternalContext.StandardCharacterEncoding
def setStatus(status: Int): Unit = {
// If anybody ever sets a non-success status code, we disable caching of the output. This covers the
// following scenario:
//
// - request with If-Modified-Since arrives and causes PFC to run
// - oxf:http-serializer runs and sees pipeline NOT cacheable so reads the input
// - during execution of pipeline, HttpStatusCodeException is thrown
// - PFC catches it and calls setStatus()
// - error, not found, or unauthorized pipeline runs
// - oxf:http-serializer runs and sees pipeline IS cacheable so sends a 403
// - client sees wrong result!
if (! NetUtils.isSuccessCode(status))
responseCachingDisabled = true
nativeResponse.setStatus(status)
}
def sendRedirect(location: String, isServerSide: Boolean, isExitPortal: Boolean): Unit =
// Create URL
if (isServerSide) {
// Server-side redirect: do a forward
val requestDispatcher = nativeRequest.getRequestDispatcher(location)
// TODO: handle `isNoRewrite` like in XFormsSubmissionUtils.openOptimizedConnection(): absolute path can then
// be used to redirect to other servlet context
// Destroy the pipeline context before doing the forward. Nothing significant
// should be allowed on "this side" of the forward after the forward return.
pipelineContext.destroy(true)
// Execute the forward
val wrappedRequest = new ForwardServletRequestWrapper(nativeRequest, location)
requestDispatcher.forward(wrappedRequest, nativeResponse)
} else {
// Client-side redirect: send the redirect to the client
nativeResponse.sendRedirect(
if (isEmbedded)
PathUtils.recombineQuery(
URLRewriterUtils.rewriteServiceURL(
getRequest,
location,
URLRewriter.REWRITE_MODE_ABSOLUTE_PATH
),
List("orbeon-embeddable" → "true")
)
else
location
)
}
def getNamespacePrefix: String =
urlRewriter.getNamespacePrefix
def setTitle(title: String) = ()
def getNativeResponse: AnyRef = nativeResponse
def rewriteActionURL(urlString: String): String =
urlRewriter.rewriteActionURL(urlString)
def rewriteRenderURL(urlString: String): String =
urlRewriter.rewriteRenderURL(urlString)
def rewriteActionURL(urlString: String, portletMode: String, windowState: String): String =
urlRewriter.rewriteActionURL(urlString, portletMode, windowState)
def rewriteRenderURL(urlString: String, portletMode: String, windowState: String): String =
urlRewriter.rewriteRenderURL(urlString, portletMode, windowState)
def rewriteResourceURL(urlString: String, rewriteMode: Int): String =
urlRewriter.rewriteResourceURL(urlString, rewriteMode)
}
def getWebAppContext: WebAppContext = webAppContext
private lazy val requestImpl = new RequestImpl
def getRequest: ExternalContext.Request = requestImpl
private def isEmbedded: Boolean = {
// NOTE: use request.getHeaderValuesMap() which normalizes header names to lowercase. This is important if
// the headers map is generated internally as in that case it might be lowercase already.
val clientHeaderOpt = Headers.firstHeaderIgnoreCase(requestImpl.headerValuesMap, Headers.OrbeonClient)
clientHeaderOpt exists Headers.EmbeddedClientValues.contains
}
// NOTE: This whole logic below could be used by ServletExternalContext and PortletExternalContext
// Check if there is an override of container type. This is currently used by the proxy portlet and by
// XHTMLToPDF, as both require a specific type of URL rewriting to take place. Using this header means that
// using a global property is not required anymore.
lazy val getResponse: ExternalContext.Response =
new ResponseImpl(
if (isEmbedded)
// Always set wsrpEncodeResources to true if the client is a remote portlet
new WSRPURLRewriter(URLRewriterUtils.getPathMatchersCallable, getRequest, wsrpEncodeResources = true)
else
new ServletURLRewriter(getRequest)
)
private var sessionImplOpt: Option[ExternalContext.Session] = None
def getSession(create: Boolean): ExternalContext.Session =
sessionImplOpt getOrElse {
// Force creation if whoever forwarded to us did have a session
// This is to work around a Tomcat issue whereby a session is newly created in the original servlet, but
// somehow we can't know about it when the request is forwarded to us.
val nativeSession = nativeRequest.getSession(
create || getRequest.getAttributesMap.get(OrbeonXFormsFilter.RendererHasSessionAttributeName) == "true"
)
if (nativeSession ne null) {
val newSessionImpl = new ServletSessionImpl(nativeSession)
sessionImplOpt = Some(newSessionImpl)
newSessionImpl
} else
null
}
def getStartLoggerString: String = getRequest.getRequestPath + " - Received request"
def getEndLoggerString : String = getRequest.getRequestPath
def getRequestDispatcher(path: String, isContextRelative: Boolean): ExternalContext.RequestDispatcher = {
val servletContext = webAppContext.getNativeContext.asInstanceOf[ServletContext]
if (isContextRelative) {
// Path is relative to the current context root
val slashServletContext = servletContext.getContext("/")
new ServletToExternalContextRequestDispatcherWrapper(
servletContext.getRequestDispatcher(path),
slashServletContext eq servletContext
)
} else {
// Path is relative to the server document root
val otherServletContext = servletContext.getContext(path)
if (otherServletContext eq null)
return null
val slashServletContext = servletContext.getContext("/")
val (modifiedPath, isDefaultContext) =
if (slashServletContext ne otherServletContext) {
// Remove first path element
val newPath = NetUtils.removeFirstPathElement(path)
if (newPath eq null)
return null
newPath → false
} else {
// No need to remove first path element because the servlet context is ""
path → true
}
new ServletToExternalContextRequestDispatcherWrapper(
otherServletContext.getRequestDispatcher(modifiedPath),
isDefaultContext
)
}
}
}
|
brunobuzzi/orbeon-forms
|
src/main/scala/org/orbeon/oxf/servlet/ServletExternalContext.scala
|
Scala
|
lgpl-2.1
| 18,122
|
/* Title: Pure/System/system_channel.scala
Author: Makarius
Socket-based system channel for inter-process communication.
*/
package isabelle
import java.io.{InputStream, OutputStream}
import java.net.{ServerSocket, InetAddress}
object System_Channel
{
def apply(): System_Channel = new System_Channel
}
class System_Channel private
{
private val server = new ServerSocket(0, 2, InetAddress.getByName("127.0.0.1"))
def params: List[String] = List("127.0.0.1", server.getLocalPort.toString)
def prover_args: List[String] = List("-P", "127.0.0.1:" + server.getLocalPort)
def rendezvous(): (OutputStream, InputStream) =
{
val socket = server.accept
socket.setTcpNoDelay(true)
(socket.getOutputStream, socket.getInputStream)
}
def accepted() { server.close }
}
|
wneuper/libisabelle
|
pide/2015/src/main/scala/System/system_channel.scala
|
Scala
|
mit
| 809
|
package scalapoi
package dragons
import org.apache.poi.hssf.usermodel._
import org.apache.poi.ss.util.WorkbookUtil._
import java.io.{File, FileOutputStream}
import scala.collection.JavaConverters._
import utils._
object Sheet {
// totes unsafe
def create(name: String, wb: Workbook): HSSFSheet = {
val safeName = createSafeSheetName(name)
val sheet = wb.hssfWb.createSheet(safeName)
val fileOut = new FileOutputStream(wb.file);
wb.hssfWb.write(fileOut);
fileOut.close();
sheet
}
}
|
hamishdickson/scalapoi
|
src/main/scala/scalapoi/dragons/Sheet.scala
|
Scala
|
mit
| 513
|
trait Result[+T]
case class Succ[T](m: List[T], rest: List[T]) extends Result[T]
case class Err(msg: String) extends Result[Nothing]
case object EOF extends Result[Nothing]
object ParserCombinator {
trait Parser[T] {
def apply(input: List[T]): Result[T]
}
abstract class PredicateParser[T](pred: T => Boolean) extends Parser[T] {
def apply(input: List[T]): Result[T] = input match {
case Nil => EOF
case head::tail => if (pred(head)) Succ(List(head), tail) else Err("Not matched: " + head.toString)
}
}
case class Pure[T](e: T) extends PredicateParser[T](c => e.equals(c))
case class AnyParser[T]() extends PredicateParser[T](c => true)
case class OptionParser[T](p: Parser[T]) extends Parser[T] {
override def apply(input: List[T]): Result[T] = p(input) match {
case ok@Succ(m, rest) => ok
case _ => Succ(Nil, input)
}
}
case class AlternateParser[T](p: Parser[T], q: Parser[T]) extends Parser[T] {
override def apply(input: List[T]): Result[T] = {
p(input) match {
case a@Succ(m, rest) => a
case _ => q(input) match {
case b@Succ(m, rest) => b
case other@_ => other
}
}
}
}
case class UnionParser[T](p: Parser[T], q: Parser[T]) extends Parser[T] {
override def apply(input: List[T]): Result[T] = {
p(input) match {
case Succ(m0, rest0) => q(rest0) match {
case Succ(m1, rest1) => Succ(m0 ++ m1, rest1)
case other@_ => other
}
case other@_ => other
}
}
}
case class RepeatParser[T](p: Parser[T]) extends Parser[T] {
override def apply(input: List[T]): Result[T] = {
def doMatch(xs: List[T], result: List[T]): Result[T] = xs match {
case Nil => Succ(result, xs)
case r@_ => p(r) match {
case Succ(m, rest) => {
doMatch(rest, result ++ m)
}
case _ => Succ(result, xs)
}
}
doMatch(input, Nil)
}
}
def main(args: Array[String]): Unit = {
val parse = RepeatParser(new UnionParser(Pure('a'), Pure('b')))
println(parse("ababababbabba".toList))
}
}
|
binape/code-snippets
|
00-QuickExamples/ParserCombinator.scala
|
Scala
|
mit
| 2,165
|
//
// Scaled - a scalable editor extensible via JVM languages
// http://github.com/scaled/scaled/blob/master/LICENSE
package scaled
import reactual.SignalV
/** Handles the invocation of fns and their binding to names and key combinations. */
abstract class Dispatcher {
/** A signal that is emitted with the name of the `fn` about to be invoked.
* Any changes to the buffer made by hooks registered here will accumulate to the
* to-be-invoked fn for undo purposes. */
def willInvoke :SignalV[String]
/** A signal that is emitted with the name of the `fn` that was just invoked.
* Any changes to the buffer made by hooks registered here will accumulate to the
* just-invoked fn for undo purposes. */
def didInvoke :SignalV[String]
/** The name of the currently executing fn. Is null when no fn is executing. */
def curFn :String
/** The name of the previously executed fn. Will be null until at least one fn has been executed.
* Used by certain fns to specialize their behavior when invoked repeatedly, and for other
* nefarious reasons. */
def prevFn :String
/** Returns the names of all available fns. */
def fns :Set[String]
/** Returns `(defining mode, key sequence, fn name)` for all registered key bindings. */
def triggers :Seq[(String,String,String)]
/** Returns the documentation for `fn` if such fn exists. */
def describeFn (fn :String) :Option[String]
/** Returns the names of all known major modes. */
def majorModes :Set[String]
/** Returns the names of all known minor modes. */
def minorModes :Set[String]
/** Returns a list of all active modes. The last mode will be the major mode. */
def modes :List[Mode]
/** Toggles the activation of the minor mode named `mode`. */
def toggleMode (mode :String) :Unit
/** Resolves the fn named `fn`. If found, it is invoked and `true` is returned. If no fn is found
* with that name, `false` is returned. */
def invoke (fn :String) :Boolean
/** Simulates the press of the key sequence represented by `trigger` (e.g. `C-x C-c`). The
* associated `fn` is resolved and invoked, or the major mode's `missedFn` is invoked if no
* bound `fn` could be found. */
def press (trigger :String) :Unit
}
|
swhgoon/scaled
|
api/src/main/scala/scaled/Dispatcher.scala
|
Scala
|
bsd-3-clause
| 2,249
|
/**
* * Copyright (C) 2013 Carnegie Mellon University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tdb.util
import scala.collection.mutable.Buffer
import tdb.list.ListInput
class LiveJournalData(input: ListInput[Int, Array[Int]])
extends Data[Int, Array[Int]] {
val file = "data.txt"
def generate() {
var lines = scala.io.Source.fromFile("livejournal.txt").getLines().toBuffer
var links = Buffer[Int]()
var currentFrom = -1
for (line <- lines) {
if (!line.startsWith("#")) {
val split = line.split("\\t")
val from = split(0).toInt
if (from == currentFrom) {
links += split(0).toInt
} else {
table(currentFrom) = links.toArray
links = Buffer(split(0).toInt)
currentFrom = from
}
}
}
}
def load() = {
for ((key, value) <- table) {
input.put(key, value)
}
}
def update(): Int = 0
def hasUpdates(): Boolean = false
}
|
twmarshall/tdb
|
core/src/main/scala/tdb/util/LiveJournalData.scala
|
Scala
|
apache-2.0
| 1,494
|
package controllers
import models.{ Model, ModelCompanion}
import services.DatabaseService
import sql.ApiTable
import java.util.UUID
import scala.concurrent.Future
import scala.async.Async.{ async, await }
import scala.util.{ Try, Success, Failure }
import play.api._
import play.api.mvc._
import play.api.libs.json.{ Json, Format, JsObject }
import play.api.libs.concurrent.Execution.Implicits._
abstract class ApiModelController[T <: Model[_], U <: ApiTable[T]](companion: ModelCompanion[T],
service: DatabaseService[T,U]) extends ApiController {
implicit val format: Format[T] = companion.format
def list(offset: Int, limit: Int) = CORSAction { implicit request =>
service.list(offset, limit).map(ms => Ok(Json.toJson(ms)))
}
def create = CORSAction { implicit request =>
withJson[T] { model =>
service.insert(model).map(_ => Ok(s"""{"created": ${model.id}}"""))
}
}
def show(id: UUID) = CORSAction { implicit request =>
service.findById(id).map {
case Some(model) => Ok(Json.toJson(model))
case None => NotFound
}
}
def delete(id: UUID) = CORSAction { implicit request =>
service.delete(id).map(id => Ok(s"""{"deleted": "${id}"}"""))
}
def update(id: UUID) = CORSAction { implicit request =>
withJson[JsObject] { updateObj =>
async {
val mOpt = await(service.findById(id))
if (mOpt.isDefined) {
val patched = companion.patch(mOpt.get, updateObj)
val success = await(service.update(id, patched))
Ok(Json.toJson(patched))
} else {
NotFound
}
}
}
}
def replace(id: UUID) = CORSAction { implicit request =>
withJson[T] { model =>
service.update(id, model).map(_ => Ok)
}
}
}
|
gilbertw1/personal-api
|
app/controllers/ApiModelController.scala
|
Scala
|
gpl-2.0
| 1,824
|
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.config
import com.twitter.zipkin.storage.cassandra.CassandraAggregates
import com.twitter.cassie.codecs.{LongCodec, Utf8Codec}
import com.twitter.cassie.{ColumnFamily, ReadConsistency, WriteConsistency}
trait CassandraAggregatesConfig extends AggregatesConfig { self =>
def cassandraConfig: CassandraConfig
var topAnnotationsCf: String = "TopAnnotations"
def apply(): CassandraAggregates = {
val _topAnnotations = cassandraConfig.keyspace.columnFamily[String, Long, String](
topAnnotationsCf,Utf8Codec, LongCodec, Utf8Codec
).consistency(WriteConsistency.One).consistency(ReadConsistency.One)
new CassandraAggregates {
val topAnnotations: ColumnFamily[String, Long, String] = _topAnnotations
}
}
}
|
rodzyn0688/zipkin
|
zipkin-server/src/main/scala/com/twitter/zipkin/config/CassandraAggregatesConfig.scala
|
Scala
|
apache-2.0
| 1,363
|
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import uk.gov.hmrc.ct.accounts.frs102.retriever.Frs102AccountsBoxRetriever
import uk.gov.hmrc.ct.box._
case class AC77(value: Option[Int]) extends CtBoxIdentifier(name = "Revaluation reserve (previous PoA)")
with CtOptionalInteger
with Input
with ValidatableBox[Frs102AccountsBoxRetriever]
with Validators {
override def validate(boxRetriever: Frs102AccountsBoxRetriever): Set[CtValidation] = {
collectErrors(
validateMoney(value)
)
}
}
|
liquidarmour/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC77.scala
|
Scala
|
apache-2.0
| 1,118
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.data.store.java
import org.apache.predictionio.data.storage.Event
import org.apache.predictionio.data.storage.PropertyMap
import org.apache.predictionio.data.store.PEventStore
import org.apache.spark.SparkContext
import org.apache.spark.api.java.JavaRDD
import org.joda.time.DateTime
import scala.collection.JavaConversions
/** This Java-friendly object provides a set of operation to access Event Store
* with Spark's parallelization
*/
object PJavaEventStore {
/** Read events from Event Store
*
* @param appName return events of this app
* @param channelName return events of this channel (default channel if it's None)
* @param startTime return events with eventTime >= startTime
* @param untilTime return events with eventTime < untilTime
* @param entityType return events of this entityType
* @param entityId return events of this entityId
* @param eventNames return events with any of these event names.
* @param targetEntityType return events of this targetEntityType:
* - None means no restriction on targetEntityType
* - Some(None) means no targetEntityType for this event
* - Some(Some(x)) means targetEntityType should match x.
* @param targetEntityId return events of this targetEntityId
* - None means no restriction on targetEntityId
* - Some(None) means no targetEntityId for this event
* - Some(Some(x)) means targetEntityId should match x.
* @param sc Spark context
* @return JavaRDD[Event]
*/
def find(
appName: String,
channelName: Option[String],
startTime: Option[DateTime],
untilTime: Option[DateTime],
entityType: Option[String],
entityId: Option[String],
eventNames: Option[java.util.List[String]],
targetEntityType: Option[Option[String]],
targetEntityId: Option[Option[String]],
sc: SparkContext): JavaRDD[Event] = {
val eventNamesSeq = eventNames.map(JavaConversions.asScalaBuffer(_).toSeq)
PEventStore.find(
appName,
channelName,
startTime,
untilTime,
entityType,
entityId,
eventNamesSeq,
targetEntityType,
targetEntityId
)(sc)
}
/** Aggregate properties of entities based on these special events:
* \$set, \$unset, \$delete events.
*
* @param appName use events of this app
* @param entityType aggregate properties of the entities of this entityType
* @param channelName use events of this channel (default channel if it's None)
* @param startTime use events with eventTime >= startTime
* @param untilTime use events with eventTime < untilTime
* @param required only keep entities with these required properties defined
* @param sc Spark context
* @return JavaRDD[(String, PropertyMap)] JavaRDD of entityId and PropetyMap pair
*/
def aggregateProperties(
appName: String,
entityType: String,
channelName: Option[String],
startTime: Option[DateTime],
untilTime: Option[DateTime],
required: Option[java.util.List[String]],
sc: SparkContext): JavaRDD[(String, PropertyMap)] = {
PEventStore.aggregateProperties(
appName,
entityType,
channelName,
startTime,
untilTime
)(sc)
}
}
|
pferrel/PredictionIO
|
data/src/main/scala/org/apache/predictionio/data/store/java/PJavaEventStore.scala
|
Scala
|
apache-2.0
| 4,059
|
/**
* Copyright (c) 2013-2015 Patrick Nicolas - Scala for Machine Learning - All rights reserved
*
* The source code in this file is provided by the author for the sole purpose of illustrating the
* concepts and algorithms presented in "Scala for Machine Learning". It should not be used to
* build commercial applications.
* ISBN: 978-1-783355-874-2 Packt Publishing.
* Unless required by applicable law or agreed to in writing, software is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* Version 0.98
*/
package org.scalaml.trading
import org.scalaml.ga.{Operator, Gene, Discretization}
import org.scalaml.core.Types.ScalaMl._
import Signal._
import scala.collection.mutable.ListBuffer
import org.scalaml.core.XTSeries
import org.scalaml.util.DisplayUtils
import scala.collection.mutable.TreeSet
/**
* <p>Trading Strategy defined as a list of trading signals. The signals are linked through
* OR boolean operator IF( signal1 == true OR signal2 == true OR ...</p>
* @constructor Create an instance of a trading strategy
* @throws IllegalArgumenException if the list of signals is either undefined or empty
* @param name Identifier or name of the strategy
* @param signals List or sequence of trading signals used in this strategy.
*
* @author Patrick Nicolas
* @since May 7, 2014
* @note Scale for Machine Learning Appendix/Finances 101
*/
case class TradingStrategy(val name: String ="", signals: List[Signal]) {
require( !signals.isEmpty, s"TradingStrategy The list of signals is undefined")
}
/**
* <p>Factory for trading strategies. The factory collects all the trading signals needed
* to implement the trading strategy. The strategies are generated as the list of all
* combination of nSignals trading signals, once and only once when requested. The Factory
* is mainly used for initializing the population for the genetic algorithm or the
* extended learning classifiers.</p>
* @constructor Instantiate a factory for all the combination of nSignals trading signals.
* @throws IllegalArgumentException if the number of signals is less than 1
* @param nSignals Number of trading signals used in any trading strategy.
* @param discr Discretization function to convert signal to discrete value and vice versa
*
* @author Patrick Nicolas
* @since May 7, 2014
* @note Scala for Machine Learning Chapter 10: Genetic Algorithms
*/
class StrategyFactory(nSignals: Int) (implicit discr: Discretization){
import org.scalaml.ga.Chromosome
import Chromosome._
require(nSignals > 0, s"StrategyFactory Number of signals $nSignals should be >0")
private[this] val signals = new ListBuffer[Signal]
/**
* <p>Create and add a new signal to the pool of this factory. The signal is defined by
* its identifier, id, target vablue, operator, the observations its acts upon and optionally the weights
* @param id Identifier for the signal created and collected
* @param target target value (or threshold) for the signal created and collected
* @param op Operator of type SOperator of the signal added to the pool
* @param obs Observations or scalar time series used by the signal added to the pool
* @param weights weights for the observations used by the signal (optional)
*/
def += (id: String, target: Double, op: Operator, obs: DblVector, weights: DblVector): Unit = {
checkArguments(obs, weights)
signals.append(Signal(id, target, op, obs, weights) )
}
/**
* <p>Create and add a new signal to the pool of this factory. The signal is defined by its identifier, id,
* target value, operator, the observations its acts upon and optionally the weights.</p>
* @param id Identifier for the signal created and collected
* @param target target value (or threshold) for the signal created and collected
* @param op Operator of type SOperator of the signal added to the pool
* @param xt Scalar time series used by the signal added to the pool
* @param weights weights for the observations used by the signal (optional)
*/
def += (id: String, target: Double, op: Operator, xt: XTSeries[Double], weights: DblVector): Unit = {
checkArguments(xt.toArray, weights)
signals.append(Signal(id, target, op, xt.toArray,weights) )
}
/**
* <p>Generates the trading strategies as any unique combinations of <b>nSignals</b>
* of signals currently in the pool of signals. The list of strategies is computed on demand
* only once
* @return strategies extracted from the pool of signals.
*/
lazy val strategies: Pool[Signal] = {
implicit val ordered = Signal.orderedSignals
val xss = new Pool[Signal]
val treeSet = new TreeSet[Signal] ++= signals.toList
val subsetsIterator = treeSet.subsets(nSignals)
while( subsetsIterator.hasNext) {
val subset = subsetsIterator.next
val signalList: List[Signal] = subset.toList
xss.append(Chromosome[Signal](signalList))
}
xss
}
private def checkArguments(xt: DblVector, weights: DblVector): Unit = {
require( !xt.isEmpty,
"StrategyFactory.checkArgument Input to this trading strategy is undefined")
require( !weights.isEmpty,
"StrategyFactory.checkArgument Input to this trading strategy is undefined")
}
}
// ------------------------ EOF --------------------------------------------------------
|
batermj/algorithm-challenger
|
books/cs/machine-learning/scala-for-machine-learning/1rst-edition/original-src-from-the-book/src/main/scala/org/scalaml/trading/TradingStrategy.scala
|
Scala
|
apache-2.0
| 5,426
|
package monocle.law.discipline.function
import monocle.function.Cons1._
import monocle.function._
import monocle.law.discipline.{IsoTests, LensTests}
import org.scalacheck.Arbitrary
import org.typelevel.discipline.Laws
import monocle.catssupport.Implicits._
import cats.instances.tuple._
object Cons1Tests extends Laws {
def apply[S: Equal : Arbitrary, H: Equal : Arbitrary, T: Equal : Arbitrary](implicit evCons1: Cons1[S, H, T],
arbHTHT: Arbitrary[((H,T)) => ((H,T))], arbHH: Arbitrary[H => H], arbTT: Arbitrary[T => T]): RuleSet =
new SimpleRuleSet("Cons1",
IsoTests(cons1[S, H, T]).props ++
LensTests(head[S, H, T]).props ++
LensTests(tail[S, H, T]).props: _*)
}
|
fkz/Monocle
|
law/shared/src/main/scala/monocle/law/discipline/function/Cons1Tests.scala
|
Scala
|
mit
| 700
|
/*
* Copyright 2012 Eike Kettner
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eknet.publet.web
import util.{PubletWeb, PropertiesMap}
import org.eknet.publet.vfs.{ContentResource, Path, Container}
import org.eknet.publet.Publet
import com.google.common.eventbus.{Subscribe, EventBus}
import com.google.inject.{Singleton, Inject}
import com.google.inject.name.Named
import org.eknet.publet.event.Event
import org.eknet.publet.vfs.events.ContentWrittenEvent
import grizzled.slf4j.Logging
/**
* Represents the `settings.properties` file.
*
* @author Eike Kettner eike.kettner@gmail.com
* @since 10.10.12 21:27
*/
@Singleton
class Settings @Inject() (@Named("contentroot") contentRoot: Container, eventBus: EventBus) extends PropertiesMap(eventBus) with Logging {
private var lastModification: Option[Long] = None
//initial load
reload()
@Subscribe
def reloadOnChange(event: ContentWrittenEvent) {
reloadIfChanged()
}
// @Subscribe
// def reloadOnPush(event: PostReceiveEvent) {
// reloadIfChanged()
// }
def reloadIfChanged() {
getSettingsResource map { newFile =>
if (lastModification.getOrElse(0L) != newFile.lastModification.getOrElse(0L)) {
info("Reload settings due to file change")
reload()
}
}
}
private def getSettingsResource = contentRoot.lookup(Path(Publet.allIncludes+"config/settings.properties"))
override def file = getSettingsResource
.collect({case cc: ContentResource => lastModification = cc.lastModification; cc})
.map(_.inputStream)
protected def createEvent() = SettingsReloadedEvent(this)
}
object Settings {
def get = PubletWeb.publetSettings
def apply(key: String) = get(key)
}
case class SettingsReloadedEvent(settings: Settings) extends Event
|
eikek/publet
|
web/src/main/scala/org/eknet/publet/web/Settings.scala
|
Scala
|
apache-2.0
| 2,295
|
import sbt._
import sbt.Keys._
object Build extends sbt.Build {
lazy val project = Project(
id = "akka-actor-statsd",
base = file("."),
settings = Defaults.coreDefaultSettings ++ Seq(
name := "akka-actor-statsd",
organization := "com.deploymentzone",
version := "0.4-SNAPSHOT",
licenses := Seq("MIT" -> url("http://opensource.org/licenses/MIT")),
homepage := Some(url("https://github.com/cfeduke/akka-actor-statsd/")),
scalaVersion := "2.11.7",
scalacOptions := Seq("-deprecation", "-feature", "-encoding", "utf8"),
libraryDependencies ++= Dependencies.allDeps,
publishMavenStyle := true,
publishTo := {
val nexus = "https://oss.sonatype.org/"
if (version.value.trim.endsWith("SNAPSHOT"))
Some("snapshots" at nexus + "content/repositories/snapshots")
else
Some("releases" at nexus + "service/local/staging/deploy/maven2")
},
pomIncludeRepository := { _ => false },
pomExtra :=
<scm>
<url>git@github.com:cfeduke/akka-actor-statsd.git</url>
<connection>scm:git:git@github.com:cfeduke/akka-actor-statsd.git</connection>
</scm>
<developers>
<developer>
<id>cfeduke</id>
<name>Charles Feduke</name>
<url>http://www.deploymentzone.com</url>
</developer>
</developers>
)
)
object Dependencies {
object Versions {
val akka = "2.3.11"
val scalatest = "2.2.6"
val logback = "1.1.6"
}
val compileDependencies = Seq(
"com.typesafe.akka" %% "akka-actor" % Versions.akka,
"com.typesafe.akka" %% "akka-slf4j" % Versions.akka,
"ch.qos.logback" % "logback-classic" % Versions.logback
)
val testDependencies = Seq(
"com.typesafe.akka" %% "akka-testkit" % Versions.akka % "test",
"org.scalatest" %% "scalatest" % Versions.scalatest % "test"
)
val allDeps: Seq[ModuleID] = compileDependencies ++ testDependencies
}
}
|
edmundnoble/akka-actor-statsd
|
project/Build.scala
|
Scala
|
mit
| 2,232
|
package io.youi.event
import io.youi._
import io.youi.component.Component
import reactify.Channel
class Swipe(component: Component,
events: Events,
onlyMobile: Boolean,
directions: Set[Swipe.Direction] = Swipe.Direction.All) {
private var dragging: Option[SwipeEvent] = None
lazy val start: Channel[SwipeEvent] = Channel[SwipeEvent]
lazy val move: Channel[SwipeEvent] = Channel[SwipeEvent]
lazy val end: Channel[SwipeEvent] = Channel[SwipeEvent]
events.pointers.dragged.attach { p =>
if (!onlyMobile || isMobileDevice) {
val absX = math.abs(p.movedFromStart.deltaX)
val absY = math.abs(p.movedFromStart.deltaY)
val plane = dragging.map(_.direction.plane).orElse(
if (absX > absY && absX > Swipe.Start) {
Some(Plane.Horizontal)
} else if (absX < absY && absY > Swipe.Start) {
Some(Plane.Vertical)
} else {
None
}
)
val option = plane match {
case Some(Plane.Horizontal) if p.deltaX < 0.0 => Some((Swipe.Direction.Left, p.movedFromStart.deltaX, p.velocityX))
case Some(Plane.Horizontal) => Some((Swipe.Direction.Right, p.movedFromStart.deltaX, p.velocityX))
case Some(Plane.Vertical) if p.deltaY < 0.0 => Some((Swipe.Direction.Up, p.movedFromStart.deltaY, p.velocityY))
case Some(Plane.Vertical) => Some((Swipe.Direction.Down, p.movedFromStart.deltaY, p.velocityY))
case _ => None
}
option.foreach {
case (direction, distance, acceleration) => if (directions.contains(direction)) {
val event = SwipeEvent(direction, p, distance, acceleration)
if (dragging.isEmpty) {
dragging = Some(event)
start @= event
}
move @= event
}
}
}
}
events.pointers.removed.on {
dragging match {
case Some(event) => {
end @= event
dragging = None
}
case _ => // Ignore
}
}
}
object Swipe {
/**
* The distance of the line before it is considering a swipe.
*/
var Start: Double = 50.0
sealed trait Direction {
def plane: Plane
}
object Direction {
case object Up extends Direction {
override def plane: Plane = Plane.Vertical
}
case object Down extends Direction {
override def plane: Plane = Plane.Vertical
}
case object Left extends Direction {
override def plane: Plane = Plane.Horizontal
}
case object Right extends Direction {
override def plane: Plane = Plane.Horizontal
}
lazy val All: Set[Direction] = Set(Up, Down, Left, Right)
}
}
|
outr/youi
|
gui/src/main/scala/io/youi/event/Swipe.scala
|
Scala
|
mit
| 2,641
|
package examples
import com.github.astonbitecode.di.diDefine
import com.github.astonbitecode.di.inject
object Example1 extends App {
/*
* The UseCase Class contains a val that should be injected. The injected class is of type 'HelpfulClass'.
* Try to initialize the UseCase before calling the diDefine for the service that should be injected in it (1)
*/
try {
val shouldFail = new UseCase
} catch {
case error: Throwable => println(error.getMessage)
}
/*
* Define a construction method for the 'HelpfulClass'
*/
def c() = new HelpfulClass()
/*
* Call the diDefine for the above
* This could be also written as: diDefine(() => new ConfigurationService("/my/path"))
*/
diDefine(c)
/*
* Now the diDefine has been called for the HelpfulClass, the creation of UseCase succeeds
*/
val shouldSucceed = new UseCase
/*
* This prints 'I am doing something helpful'
*/
shouldSucceed.execute
}
class UseCase {
// Inject a HelpfulClass instance
val helpfulClass = inject[HelpfulClass]
def execute(): Unit = {
// Use the injected helpfulClass
helpfulClass.doSomethingHelpful
}
}
class HelpfulClass {
def doSomethingHelpful = {
println("I am doing something helpful")
}
}
|
astonbitecode/kind-of-di
|
src/main/scala/examples/Example1.scala
|
Scala
|
mit
| 1,260
|
package feh.util.file
import java.io.{FileInputStream, FileOutputStream, File => JFile}
import feh.util._
import scala.util.Try
import scala.language.implicitConversions
trait FileUtilWrappers{
val File: FileUtils
import File._
/** default string to path wrapper, uses File.separator to split the string */
implicit def stringToPath(string: String): Path = Path(string, separatorChar)
implicit class FilePath(val path: Path) {
def file = new JFile(path.toString)
def delete() = Try{file.delete()}
}
implicit class ByteArrayToFileWrapper(arr: Array[Byte]){
def toFile(file: String): JFile = toFile(new JFile(file))
def toFile(file: JFile): JFile = file $$ (_.withOutputStream(_.write(arr)))
}
implicit class StringToFileWrapper(str: String){
def toFile: JFile = new JFile(str)
}
implicit class FileWrapper(file: JFile){
def withOutputStream[R](f: FileOutputStream => R, append: Boolean = false): Try[R] = {
val stream = new FileOutputStream(file, append)
Try(f(stream)) $$ { _ =>
stream.flush()
stream.close()
}
}
def withInputStream[R](f: FileInputStream => R): Try[R] = {
val stream = new FileInputStream(file)
Try(f(stream)) $$ { _ =>
stream.close()
}
}
def mv(path: Path, `override`: Boolean = false) = Try{
val dest = new JFile(path.toString)
if(dest.exists()) if(`override`) dest.delete() else sys.error(s"destination file $dest exists")
file.renameTo(dest)
dest
}
def createIfNotExists() = Try{
if(!file.exists()) createFile(Path.absolute(file.toString, separatorChar))
file
}
def copy(file: JFile, overwrite: Boolean = false): Try[JFile] = cp(Path.absolute(file.toString, separatorChar), overwrite)
def cp(path: Path, overwrite: Boolean = false): Try[JFile] = Try{
val dest = path.file
val parentsDir = path.tail
if(!parentsDir.file.exists()) parentsDir.file.mkdirs().ensuring(b => b,"failed to create parent dirs")
if(dest.exists()) if(overwrite) dest.delete() else sys.error(s"destination file $dest exists")
dest.withOutputStream(write(file)).get
dest
}
def affect(f: JFile => Unit) = Try{
f(file)
file
}
def existing[R](f: JFile => R): Option[R] = if(exists) Some(f(file)) else None
def name = file.getName
def ls(filter: JFile => Boolean = null) = file.listFiles().toList |>> (Option(filter), _.filter)
def dir_? = file.isDirectory
def isDir = file.isDirectory
def exists = file.exists()
def mkDir() = Try{
file.mkdirs()
file
}
def path = file.getAbsolutePath
}
}
|
fehu/util
|
src/main/scala/feh/util/file/FileUtilWrappers.scala
|
Scala
|
mit
| 2,681
|
/*
* Copyright 2001-2015 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.examples.asyncfunspec.ignoreall
import org.scalatest.AsyncFunSpec
import scala.concurrent.Future
import org.scalatest.Ignore
@Ignore
class AddSpec extends AsyncFunSpec {
def addSoon(addends: Int*): Future[Int] = Future { addends.sum }
describe("addSoon") {
it("will eventually compute a sum of passed Ints") {
val futureSum: Future[Int] = addSoon(1, 2)
// You can map assertions onto a Future, then return
// the resulting Future[Assertion] to ScalaTest:
futureSum map { sum => assert(sum == 3) }
}
}
def addNow(addends: Int*): Int = addends.sum
describe("addNow") {
it("will immediately compute a sum of passed Ints") {
val sum: Int = addNow(1, 2)
// You can also write synchronous tests. The body
// must have result type Assertion:
assert(sum == 3)
}
}
}
|
dotty-staging/scalatest
|
examples/src/test/scala/org/scalatest/examples/asyncfunspec/ignoreall/AddSpec.scala
|
Scala
|
apache-2.0
| 1,464
|
package skinny.micro.contrib
import skinny.micro.SkinnyMicroBase
import skinny.micro.base.BeforeAfterDsl
import skinny.micro.context.SkinnyContext
import skinny.micro.contrib.csrf.CSRFTokenGenerator
/**
* Provides cross-site request forgery protection.
*
* If a request is determined to be forged, the `handleForgery()` hook is invoked.
* Otherwise, a token for the next request is prepared with `prepareCsrfToken`.
*/
trait CSRFTokenSupport { this: SkinnyMicroBase with BeforeAfterDsl =>
before(isForged) { handleForgery() }
before() { prepareCsrfToken() }
/**
* Tests whether a request with a unsafe method is a potential cross-site
* forgery.
*
* @return true if the request is an unsafe method (POST, PUT, DELETE, TRACE,
* CONNECT, PATCH) and the request parameter at `csrfKey` does not match
* the session key of the same name.
*/
protected def isForged: Boolean = {
implicit val ctx = context
!request.requestMethod.isSafe &&
session.get(csrfKey) != params.get(csrfKey) &&
!CSRFTokenSupport.HeaderNames.map(request.headers.get).contains(session.get(csrfKey))
}
/**
* Take an action when a forgery is detected. The default action
* halts further request processing and returns a 403 HTTP status code.
*/
protected def handleForgery(): Unit = {
halt(403, "Request tampering detected!")
}
/**
* Prepares a CSRF token. The default implementation uses `GenerateId`
* and stores it on the session.
*/
// NOTE: keep return type as Any for backward compatibility
protected def prepareCsrfToken(): Any = {
session(context).getOrElseUpdate(csrfKey, CSRFTokenGenerator.apply()).toString
}
/**
* The key used to store the token on the session, as well as the parameter
* of the request.
*/
def csrfKey: String = CSRFTokenSupport.DefaultKey
/**
* Returns the token from the session.
*/
protected[skinny] def csrfToken(implicit context: SkinnyContext): String =
context.request.getSession.getAttribute(csrfKey).asInstanceOf[String]
}
object CSRFTokenSupport {
val DefaultKey = "skinny.micro.CSRFTokenSupport.key"
val HeaderNames = Vector("X-CSRF-TOKEN")
}
|
xerial/skinny-micro
|
micro/src/main/scala/skinny/micro/contrib/CSRFTokenSupport.scala
|
Scala
|
bsd-2-clause
| 2,189
|
object Ambiguous
{
implicit val c1: C = ???
implicit val c2: C = ???
implicit def f1: D = ???
implicit def f2: D = ???
implicitly[D]
}
|
tek/splain
|
core/src/test/resources-2.13.7+/latest/splain/plugin/PluginSpec/ambiguous/code.scala
|
Scala
|
mit
| 145
|
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package viper.silicon.supporters
import viper.silver.components.StatefulComponent
import viper.silicon.Config
import viper.silicon.interfaces.{Unreachable, VerificationResult}
import viper.silicon.interfaces.decider.Decider
import viper.silicon.interfaces.state._
import viper.silicon.reporting.Bookkeeper
import viper.silicon.state.terms.{Not, And, Term}
import viper.silicon.state.DefaultContext
import viper.silicon.utils.Counter
trait Brancher[ST <: Store[ST],
H <: Heap[H],
S <: State[ST, H, S],
C <: Context[C]] {
def branch(σ: S,
ts: Term,
c: C,
fTrue: C => VerificationResult,
fFalse: C => VerificationResult)
: VerificationResult
/* TODO: Remove this method, keep only the above */
def branch(σ: S,
ts: List[Term],
c: C,
fTrue: C => VerificationResult,
fFalse: C => VerificationResult)
: VerificationResult
}
/*
* Implementations
*/
trait DefaultBrancher[ST <: Store[ST],
H <: Heap[H],
S <: State[ST, H, S]]
extends Brancher[ST, H, S, DefaultContext[H]]
with StatefulComponent {
private[this] type C = DefaultContext[H]
private var branchCounter: Counter = _
protected val decider: Decider[ST, H, S, C]
protected val config: Config
protected val bookkeeper: Bookkeeper
protected val heapCompressor: HeapCompressor[ST, H, S, C]
import decider.assume
def branch(σ: S,
t: Term,
c: C,
fTrue: C => VerificationResult,
fFalse: C => VerificationResult)
: VerificationResult =
branch(σ, t :: Nil, c, fTrue, fFalse)
def branch(σ: S,
ts: List[Term],
c: C,
fTrue: C => VerificationResult,
fFalse: C => VerificationResult)
: VerificationResult = {
val guardsTrue = And(ts: _*)
val guardsFalse = And(ts map (t => Not(t)): _*)
val exploreTrueBranch = !decider.check(σ, guardsFalse, config.checkTimeout())
val exploreFalseBranch = !exploreTrueBranch || !decider.check(σ, guardsTrue, config.checkTimeout())
val additionalPaths =
if (exploreTrueBranch && exploreFalseBranch) 1
else 0
bookkeeper.branches += additionalPaths
val cnt = branchCounter.next()
/* See comment in DefaultDecider.tryOrFail */
var originalChunks: Option[Iterable[Chunk]] = None
def compressHeapIfRetrying(c: C, σ: S) {
if (c.retrying) {
originalChunks = Some(σ.h.values)
heapCompressor.compress(σ, σ.h, c)
}
}
def restoreHeapIfPreviouslyCompressed(σ: S) {
originalChunks match {
case Some(chunks) => σ.h.replace(chunks)
case None => /* Nothing to do here */
}
}
((if (exploreTrueBranch) {
val cTrue = c//.copy(branchConditions = guardsTrue +: c.branchConditions)
val result =
decider.locally {
decider.prover.logComment(s"[then-branch $cnt] $guardsTrue")
// assume(guardsTrue)
decider.setCurrentBranchCondition(guardsTrue)
compressHeapIfRetrying(cTrue, σ)
val r = fTrue(cTrue)
restoreHeapIfPreviouslyCompressed(σ)
r
}
result
} else {
decider.prover.logComment(s"[dead then-branch $cnt] $guardsTrue")
Unreachable()
})
&&
(if (exploreFalseBranch) {
val cFalse = c//.copy(branchConditions = guardsFalse +: c.branchConditions)
val result =
decider.locally {
decider.prover.logComment(s"[else-branch $cnt] $guardsFalse")
// assume(guardsFalse)
decider.setCurrentBranchCondition(guardsFalse)
compressHeapIfRetrying(cFalse, σ)
val r = fFalse(cFalse)
restoreHeapIfPreviouslyCompressed(σ)
r
}
result
} else {
decider.prover.logComment(s"[dead else-branch $cnt] $guardsFalse")
Unreachable()
}))
}
/* Lifecycle */
abstract override def start() {
super.start()
branchCounter = new Counter()
}
abstract override def reset() {
super.reset()
branchCounter.reset()
}
abstract override def stop() = {
super.stop()
}
}
|
sccblom/vercors
|
viper/silicon/src/main/scala/supporters/Brancher.scala
|
Scala
|
mpl-2.0
| 4,685
|
package s99
import scala.annotation.tailrec
object P01 {
def last[E](list: List[E]): E = {
list match {
case _ :+ e => e
case _ => throw new NoSuchElementException("last of empty list")
}
}
@tailrec
def lastRec[E](list: List[E]): E = {
list match {
case head :: Nil => head
case _ :: tail => lastRec(tail)
case _ => throw new NoSuchElementException("last of empty list")
}
}
}
|
qilab-/algorithm-problems
|
s-99/src/main/scala/s99/P01.scala
|
Scala
|
unlicense
| 455
|
/*
* Copyright (C) 2009-2011 Mathias Doenitz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.parboiled.scala
import org.parboiled.buffers.{InputBuffer, IndentDedentInputBuffer, DefaultInputBuffer}
/**
* Simple Input abstraction serving as the target of a number of implicit conversions defined in the
* org.parboiled.scala package object.
*/
class Input(val input: Array[Char], bufferCreator: (Array[Char] => InputBuffer) = new DefaultInputBuffer(_)) {
lazy val inputBuffer: InputBuffer = bufferCreator(input)
/**
* Causes the input to be wrapped with a IndentDedentInputBuffer.
* @param tabStop the number of characters in a tab stop.
* @param lineCommentStart the string starting a line comment or null, if line comments are not defined
* @param strict signals whether an IllegalIndentationException should be thrown on
* "semi-dedents", if false the buffer silently accepts these
*/
def transformIndents(tabStop: Int = 2, lineCommentStart: String = null, strict: Boolean = false,
skipEmptyLines: Boolean = true): Input =
new Input(input, new IndentDedentInputBuffer(_, tabStop, lineCommentStart, strict, skipEmptyLines))
}
|
OpenMaths/parboiled
|
parboiled-scala/src/main/scala/org/parboiled/scala/Input.scala
|
Scala
|
apache-2.0
| 1,726
|
package kmeans
import scala.annotation.tailrec
import scala.collection._
import scala.util.Random
import org.scalameter._
import common._
class KMeans {
def generatePoints(k: Int, num: Int): Seq[Point] = {
val randx = new Random(1)
val randy = new Random(3)
val randz = new Random(5)
(0 until num)
.map({ i =>
val x = ((i + 1) % k) * 1.0 / k + randx.nextDouble() * 0.5
val y = ((i + 5) % k) * 1.0 / k + randy.nextDouble() * 0.5
val z = ((i + 7) % k) * 1.0 / k + randz.nextDouble() * 0.5
new Point(x, y, z)
}).to[mutable.ArrayBuffer]
}
def initializeMeans(k: Int, points: Seq[Point]): Seq[Point] = {
val rand = new Random(7)
(0 until k).map(_ => points(rand.nextInt(points.length))).to[mutable.ArrayBuffer]
}
def findClosest(p: Point, means: GenSeq[Point]): Point = {
assert(means.size > 0)
var minDistance = p.squareDistance(means(0))
var closest = means(0)
var i = 1
while (i < means.length) {
val distance = p.squareDistance(means(i))
if (distance < minDistance) {
minDistance = distance
closest = means(i)
}
i += 1
}
closest
}
def classify(points: GenSeq[Point], means: GenSeq[Point]): GenMap[Point, GenSeq[Point]] = {
if (points.isEmpty) means.map(m => m -> GenSeq()).toMap else points.groupBy(point => findClosest(point, means))
}
def findAverage(oldMean: Point, points: GenSeq[Point]): Point = if (points.length == 0) oldMean else {
var x = 0.0
var y = 0.0
var z = 0.0
points.seq.foreach { p =>
x += p.x
y += p.y
z += p.z
}
new Point(x / points.length, y / points.length, z / points.length)
}
def update(classified: GenMap[Point, GenSeq[Point]], oldMeans: GenSeq[Point]): GenSeq[Point] = {
oldMeans.map(point => findAverage(point, classified(point)))
}
def converged(eta: Double)(oldMeans: GenSeq[Point], newMeans: GenSeq[Point]): Boolean = {
oldMeans.zip(newMeans).forall{ case (o, n) => o.squareDistance(n) <= eta }
}
@tailrec
final def kMeans(points: GenSeq[Point], means: GenSeq[Point], eta: Double): GenSeq[Point] = {
val classified = classify(points, means)
val updated = update(classified, means)
if (!converged(eta)(means, updated)) kMeans(points, updated, eta) else updated // your implementation need to be tail recursive
}
}
/** Describes one point in three-dimensional space.
*
* Note: deliberately uses reference equality.
*/
class Point(val x: Double, val y: Double, val z: Double) {
private def square(v: Double): Double = v * v
def squareDistance(that: Point): Double = {
square(that.x - x) + square(that.y - y) + square(that.z - z)
}
private def round(v: Double): Double = (v * 100).toInt / 100.0
override def toString = s"(${round(x)}, ${round(y)}, ${round(z)})"
}
object KMeansRunner {
val standardConfig = config(
Key.exec.minWarmupRuns -> 20,
Key.exec.maxWarmupRuns -> 40,
Key.exec.benchRuns -> 25,
Key.verbose -> true
) withWarmer(new Warmer.Default)
def main(args: Array[String]) {
val kMeans = new KMeans()
val numPoints = 500000
val eta = 0.01
val k = 32
val points = kMeans.generatePoints(k, numPoints)
val means = kMeans.initializeMeans(k, points)
val seqtime = standardConfig measure {
kMeans.kMeans(points, means, eta)
}
println(s"sequential time: $seqtime ms")
val partime = standardConfig measure {
val parPoints = points.par
val parMeans = means.par
kMeans.kMeans(parPoints, parMeans, eta)
}
println(s"parallel time: $partime ms")
println(s"speedup: ${seqtime / partime}")
}
}
|
syhan/coursera
|
parprog1/kmeans/src/main/scala/kmeans/KMeans.scala
|
Scala
|
gpl-3.0
| 3,690
|
package parsec
import parsec.optimised.OptimisedParsers
object TestFastParse {
import fastparse.all._
case class NamedFunction[T, V](f: T => V, name: String) extends (T => V) {
def apply(t: T): V = f(t)
override def toString(): String = name
}
val Whitespace = NamedFunction(" \\r\\n".contains(_: Char), "Whitespace")
val space = P(CharsWhile(Whitespace).?)
val `false` = P("false".!)
val `true` = P("true".!)
val booleans = `true` | `false`
val manyBools = P("[" ~/ booleans.rep(sep = ",".~/ ~ space) ~ space ~ "]")
def main(args: Array[String]): Unit = {
println("oh hai!")
import scala.io.Source
val fileName = "data/booleans-6600.json"
val fileContent = Source.fromFile(fileName).mkString
val Parsed.Success(res, rest) = manyBools.parse(fileContent)
println(res.length)
}
}
object Test extends OptimisedParsers {
def main(args: Array[String]): Unit = {
println("greetings lion")
import scala.io.Source
val fileName = "data/booleans-6600.json"
val fileContent = Source.fromFile(fileName).mkString
//val myReader = CharReader(fileContent.toArray)
val myReader = CharReader("421hello people".toArray)
val stringLitParser = optimise {
opt(digit2Int) ~ digit2Int//.map { case (a, b) => a }
}
val Success(res, rest) = stringLitParser(myReader)
println(res)
}
}
|
manojo/parsequery
|
core/src/main/scala/parsec/Test.scala
|
Scala
|
mit
| 1,375
|
object Dependency {
val v = 1
}
|
felixmulder/scala
|
test/files/scalacheck/testdir/dep.scala
|
Scala
|
bsd-3-clause
| 38
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy
import java.io.{ByteArrayOutputStream, PrintStream}
import java.lang.reflect.InvocationTargetException
import java.net.URI
import java.nio.charset.StandardCharsets
import java.util.{List => JList}
import java.util.jar.JarFile
import scala.collection.JavaConverters._
import scala.collection.mutable.{ArrayBuffer, HashMap}
import scala.io.Source
import org.apache.spark.deploy.SparkSubmitAction._
import org.apache.spark.launcher.SparkSubmitArgumentsParser
import org.apache.spark.util.Utils
/**
* Parses and encapsulates arguments from the spark-submit script.
* The env argument is used for testing.
*/
private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, String] = sys.env)
extends SparkSubmitArgumentsParser {
var master: String = null
var deployMode: String = null
var executorMemory: String = null
var executorCores: String = null
var totalExecutorCores: String = null
var propertiesFile: String = null
var driverMemory: String = null
var driverExtraClassPath: String = null
var driverExtraLibraryPath: String = null
var driverExtraJavaOptions: String = null
var queue: String = null
var numExecutors: String = null
var files: String = null
var archives: String = null
var mainClass: String = null
var primaryResource: String = null
var name: String = null
var childArgs: ArrayBuffer[String] = new ArrayBuffer[String]()
var jars: String = null
var packages: String = null
var repositories: String = null
var ivyRepoPath: String = null
var packagesExclusions: String = null
var verbose: Boolean = false
var isPython: Boolean = false
var pyFiles: String = null
var isR: Boolean = false
var action: SparkSubmitAction = null
val sparkProperties: HashMap[String, String] = new HashMap[String, String]()
var proxyUser: String = null
var principal: String = null
var keytab: String = null
// Standalone cluster mode only
var supervise: Boolean = false
var driverCores: String = null
var submissionToKill: String = null
var submissionToRequestStatusFor: String = null
var useRest: Boolean = true // used internally
/** Default properties present in the currently defined defaults file. */
lazy val defaultSparkProperties: HashMap[String, String] = {
val defaultProperties = new HashMap[String, String]()
// scalastyle:off println
if (verbose) SparkSubmit.printStream.println(s"Using properties file: $propertiesFile")
Option(propertiesFile).foreach { filename =>
Utils.getPropertiesFromFile(filename).foreach { case (k, v) =>
defaultProperties(k) = v
if (verbose) SparkSubmit.printStream.println(s"Adding default property: $k=$v")
}
}
// scalastyle:on println
defaultProperties
}
// Set parameters from command line arguments
try {
parse(args.asJava)
} catch {
case e: IllegalArgumentException =>
SparkSubmit.printErrorAndExit(e.getMessage())
}
// Populate `sparkProperties` map from properties file
mergeDefaultSparkProperties()
// Remove keys that don't start with "spark." from `sparkProperties`.
ignoreNonSparkProperties()
// Use `sparkProperties` map along with env vars to fill in any missing parameters
loadEnvironmentArguments()
validateArguments()
/**
* Merge values from the default properties file with those specified through --conf.
* When this is called, `sparkProperties` is already filled with configs from the latter.
*/
private def mergeDefaultSparkProperties(): Unit = {
// Use common defaults file, if not specified by user
propertiesFile = Option(propertiesFile).getOrElse(Utils.getDefaultPropertiesFile(env))
// Honor --conf before the defaults file
defaultSparkProperties.foreach { case (k, v) =>
if (!sparkProperties.contains(k)) {
sparkProperties(k) = v
}
}
}
/**
* Remove keys that don't start with "spark." from `sparkProperties`.
*/
private def ignoreNonSparkProperties(): Unit = {
sparkProperties.foreach { case (k, v) =>
if (!k.startsWith("spark.")) {
sparkProperties -= k
SparkSubmit.printWarning(s"Ignoring non-spark config property: $k=$v")
}
}
}
/**
* Load arguments from environment variables, Spark properties etc.
*/
private def loadEnvironmentArguments(): Unit = {
master = Option(master)
.orElse(sparkProperties.get("spark.master"))
.orElse(env.get("MASTER"))
.orNull
driverExtraClassPath = Option(driverExtraClassPath)
.orElse(sparkProperties.get("spark.driver.extraClassPath"))
.orNull
driverExtraJavaOptions = Option(driverExtraJavaOptions)
.orElse(sparkProperties.get("spark.driver.extraJavaOptions"))
.orNull
driverExtraLibraryPath = Option(driverExtraLibraryPath)
.orElse(sparkProperties.get("spark.driver.extraLibraryPath"))
.orNull
driverMemory = Option(driverMemory)
.orElse(sparkProperties.get("spark.driver.memory"))
.orElse(env.get("SPARK_DRIVER_MEMORY"))
.orNull
driverCores = Option(driverCores)
.orElse(sparkProperties.get("spark.driver.cores"))
.orNull
executorMemory = Option(executorMemory)
.orElse(sparkProperties.get("spark.executor.memory"))
.orElse(env.get("SPARK_EXECUTOR_MEMORY"))
.orNull
executorCores = Option(executorCores)
.orElse(sparkProperties.get("spark.executor.cores"))
.orElse(env.get("SPARK_EXECUTOR_CORES"))
.orNull
totalExecutorCores = Option(totalExecutorCores)
.orElse(sparkProperties.get("spark.cores.max"))
.orNull
name = Option(name).orElse(sparkProperties.get("spark.app.name")).orNull
jars = Option(jars).orElse(sparkProperties.get("spark.jars")).orNull
files = Option(files).orElse(sparkProperties.get("spark.files")).orNull
ivyRepoPath = sparkProperties.get("spark.jars.ivy").orNull
packages = Option(packages).orElse(sparkProperties.get("spark.jars.packages")).orNull
packagesExclusions = Option(packagesExclusions)
.orElse(sparkProperties.get("spark.jars.excludes")).orNull
deployMode = Option(deployMode)
.orElse(sparkProperties.get("spark.submit.deployMode"))
.orElse(env.get("DEPLOY_MODE"))
.orNull
numExecutors = Option(numExecutors)
.getOrElse(sparkProperties.get("spark.executor.instances").orNull)
keytab = Option(keytab).orElse(sparkProperties.get("spark.yarn.keytab")).orNull
principal = Option(principal).orElse(sparkProperties.get("spark.yarn.principal")).orNull
// Try to set main class from JAR if no --class argument is given
if (mainClass == null && !isPython && !isR && primaryResource != null) {
val uri = new URI(primaryResource)
val uriScheme = uri.getScheme()
uriScheme match {
case "file" =>
try {
val jar = new JarFile(uri.getPath)
// Note that this might still return null if no main-class is set; we catch that later
mainClass = jar.getManifest.getMainAttributes.getValue("Main-Class")
} catch {
case e: Exception =>
SparkSubmit.printErrorAndExit(s"Cannot load main class from JAR $primaryResource")
}
case _ =>
SparkSubmit.printErrorAndExit(
s"Cannot load main class from JAR $primaryResource with URI $uriScheme. " +
"Please specify a class through --class.")
}
}
// Global defaults. These should be keep to minimum to avoid confusing behavior.
master = Option(master).getOrElse("local[*]")
// In YARN mode, app name can be set via SPARK_YARN_APP_NAME (see SPARK-5222)
if (master.startsWith("yarn")) {
name = Option(name).orElse(env.get("SPARK_YARN_APP_NAME")).orNull
}
// Set name from main class if not given
name = Option(name).orElse(Option(mainClass)).orNull
if (name == null && primaryResource != null) {
name = Utils.stripDirectory(primaryResource)
}
// Action should be SUBMIT unless otherwise specified
action = Option(action).getOrElse(SUBMIT)
}
/** Ensure that required fields exists. Call this only once all defaults are loaded. */
private def validateArguments(): Unit = {
action match {
case SUBMIT => validateSubmitArguments()
case KILL => validateKillArguments()
case REQUEST_STATUS => validateStatusRequestArguments()
}
}
private def validateSubmitArguments(): Unit = {
if (args.length == 0) {
printUsageAndExit(-1)
}
if (primaryResource == null) {
SparkSubmit.printErrorAndExit("Must specify a primary resource (JAR or Python or R file)")
}
if (mainClass == null && SparkSubmit.isUserJar(primaryResource)) {
SparkSubmit.printErrorAndExit("No main class set in JAR; please specify one with --class")
}
if (pyFiles != null && !isPython) {
SparkSubmit.printErrorAndExit("--py-files given but primary resource is not a Python script")
}
if (master.startsWith("yarn")) {
val hasHadoopEnv = env.contains("HADOOP_CONF_DIR") || env.contains("YARN_CONF_DIR")
if (!hasHadoopEnv && !Utils.isTesting) {
throw new Exception(s"When running with master '$master' " +
"either HADOOP_CONF_DIR or YARN_CONF_DIR must be set in the environment.")
}
}
if (proxyUser != null && principal != null) {
SparkSubmit.printErrorAndExit("Only one of --proxy-user or --principal can be provided.")
}
}
private def validateKillArguments(): Unit = {
if (!master.startsWith("spark://") && !master.startsWith("mesos://")) {
SparkSubmit.printErrorAndExit(
"Killing submissions is only supported in standalone or Mesos mode!")
}
if (submissionToKill == null) {
SparkSubmit.printErrorAndExit("Please specify a submission to kill.")
}
}
private def validateStatusRequestArguments(): Unit = {
if (!master.startsWith("spark://") && !master.startsWith("mesos://")) {
SparkSubmit.printErrorAndExit(
"Requesting submission statuses is only supported in standalone or Mesos mode!")
}
if (submissionToRequestStatusFor == null) {
SparkSubmit.printErrorAndExit("Please specify a submission to request status for.")
}
}
def isStandaloneCluster: Boolean = {
master.startsWith("spark://") && deployMode == "cluster"
}
override def toString: String = {
s"""Parsed arguments:
| master $master
| deployMode $deployMode
| executorMemory $executorMemory
| executorCores $executorCores
| totalExecutorCores $totalExecutorCores
| propertiesFile $propertiesFile
| driverMemory $driverMemory
| driverCores $driverCores
| driverExtraClassPath $driverExtraClassPath
| driverExtraLibraryPath $driverExtraLibraryPath
| driverExtraJavaOptions $driverExtraJavaOptions
| supervise $supervise
| queue $queue
| numExecutors $numExecutors
| files $files
| pyFiles $pyFiles
| archives $archives
| mainClass $mainClass
| primaryResource $primaryResource
| name $name
| childArgs [${childArgs.mkString(" ")}]
| jars $jars
| packages $packages
| packagesExclusions $packagesExclusions
| repositories $repositories
| verbose $verbose
|
|Spark properties used, including those specified through
| --conf and those from the properties file $propertiesFile:
|${sparkProperties.mkString(" ", "\n ", "\n")}
""".stripMargin
}
/** Fill in values by parsing user options. */
override protected def handle(opt: String, value: String): Boolean = {
opt match {
case NAME =>
name = value
case MASTER =>
master = value
case CLASS =>
mainClass = value
case DEPLOY_MODE =>
if (value != "client" && value != "cluster") {
SparkSubmit.printErrorAndExit("--deploy-mode must be either \"client\" or \"cluster\"")
}
deployMode = value
case NUM_EXECUTORS =>
numExecutors = value
case TOTAL_EXECUTOR_CORES =>
totalExecutorCores = value
case EXECUTOR_CORES =>
executorCores = value
case EXECUTOR_MEMORY =>
executorMemory = value
case DRIVER_MEMORY =>
driverMemory = value
case DRIVER_CORES =>
driverCores = value
case DRIVER_CLASS_PATH =>
driverExtraClassPath = value
case DRIVER_JAVA_OPTIONS =>
driverExtraJavaOptions = value
case DRIVER_LIBRARY_PATH =>
driverExtraLibraryPath = value
case PROPERTIES_FILE =>
propertiesFile = value
case KILL_SUBMISSION =>
submissionToKill = value
if (action != null) {
SparkSubmit.printErrorAndExit(s"Action cannot be both $action and $KILL.")
}
action = KILL
case STATUS =>
submissionToRequestStatusFor = value
if (action != null) {
SparkSubmit.printErrorAndExit(s"Action cannot be both $action and $REQUEST_STATUS.")
}
action = REQUEST_STATUS
case SUPERVISE =>
supervise = true
case QUEUE =>
queue = value
case FILES =>
files = Utils.resolveURIs(value)
case PY_FILES =>
pyFiles = Utils.resolveURIs(value)
case ARCHIVES =>
archives = Utils.resolveURIs(value)
case JARS =>
jars = Utils.resolveURIs(value)
case PACKAGES =>
packages = value
case PACKAGES_EXCLUDE =>
packagesExclusions = value
case REPOSITORIES =>
repositories = value
case CONF =>
val (confName, confValue) = SparkSubmit.parseSparkConfProperty(value)
sparkProperties(confName) = confValue
case PROXY_USER =>
proxyUser = value
case PRINCIPAL =>
principal = value
case KEYTAB =>
keytab = value
case HELP =>
printUsageAndExit(0)
case VERBOSE =>
verbose = true
case VERSION =>
SparkSubmit.printVersionAndExit()
case USAGE_ERROR =>
printUsageAndExit(1)
case _ =>
throw new IllegalArgumentException(s"Unexpected argument '$opt'.")
}
true
}
/**
* Handle unrecognized command line options.
*
* The first unrecognized option is treated as the "primary resource". Everything else is
* treated as application arguments.
*/
override protected def handleUnknown(opt: String): Boolean = {
if (opt.startsWith("-")) {
SparkSubmit.printErrorAndExit(s"Unrecognized option '$opt'.")
}
primaryResource =
if (!SparkSubmit.isShell(opt) && !SparkSubmit.isInternal(opt)) {
Utils.resolveURI(opt).toString
} else {
opt
}
isPython = SparkSubmit.isPython(opt)
isR = SparkSubmit.isR(opt)
false
}
override protected def handleExtraArgs(extra: JList[String]): Unit = {
childArgs ++= extra.asScala
}
private def printUsageAndExit(exitCode: Int, unknownParam: Any = null): Unit = {
// scalastyle:off println
val outStream = SparkSubmit.printStream
if (unknownParam != null) {
outStream.println("Unknown/unsupported param " + unknownParam)
}
val command = sys.env.get("_SPARK_CMD_USAGE").getOrElse(
"""Usage: spark-submit [options] <app jar | python file> [app arguments]
|Usage: spark-submit --kill [submission ID] --master [spark://...]
|Usage: spark-submit --status [submission ID] --master [spark://...]
|Usage: spark-submit run-example [options] example-class [example args]""".stripMargin)
outStream.println(command)
val mem_mb = Utils.DEFAULT_DRIVER_MEM_MB
outStream.println(
s"""
|Options:
| --master MASTER_URL spark://host:port, mesos://host:port, yarn, or local.
| --deploy-mode DEPLOY_MODE Whether to launch the driver program locally ("client") or
| on one of the worker machines inside the cluster ("cluster")
| (Default: client).
| --class CLASS_NAME Your application's main class (for Java / Scala apps).
| --name NAME A name of your application.
| --jars JARS Comma-separated list of local jars to include on the driver
| and executor classpaths.
| --packages Comma-separated list of maven coordinates of jars to include
| on the driver and executor classpaths. Will search the local
| maven repo, then maven central and any additional remote
| repositories given by --repositories. The format for the
| coordinates should be groupId:artifactId:version.
| --exclude-packages Comma-separated list of groupId:artifactId, to exclude while
| resolving the dependencies provided in --packages to avoid
| dependency conflicts.
| --repositories Comma-separated list of additional remote repositories to
| search for the maven coordinates given with --packages.
| --py-files PY_FILES Comma-separated list of .zip, .egg, or .py files to place
| on the PYTHONPATH for Python apps.
| --files FILES Comma-separated list of files to be placed in the working
| directory of each executor.
|
| --conf PROP=VALUE Arbitrary Spark configuration property.
| --properties-file FILE Path to a file from which to load extra properties. If not
| specified, this will look for conf/spark-defaults.conf.
|
| --driver-memory MEM Memory for driver (e.g. 1000M, 2G) (Default: ${mem_mb}M).
| --driver-java-options Extra Java options to pass to the driver.
| --driver-library-path Extra library path entries to pass to the driver.
| --driver-class-path Extra class path entries to pass to the driver. Note that
| jars added with --jars are automatically included in the
| classpath.
|
| --executor-memory MEM Memory per executor (e.g. 1000M, 2G) (Default: 1G).
|
| --proxy-user NAME User to impersonate when submitting the application.
| This argument does not work with --principal / --keytab.
|
| --help, -h Show this help message and exit.
| --verbose, -v Print additional debug output.
| --version, Print the version of current Spark.
|
| Spark standalone with cluster deploy mode only:
| --driver-cores NUM Cores for driver (Default: 1).
|
| Spark standalone or Mesos with cluster deploy mode only:
| --supervise If given, restarts the driver on failure.
| --kill SUBMISSION_ID If given, kills the driver specified.
| --status SUBMISSION_ID If given, requests the status of the driver specified.
|
| Spark standalone and Mesos only:
| --total-executor-cores NUM Total cores for all executors.
|
| Spark standalone and YARN only:
| --executor-cores NUM Number of cores per executor. (Default: 1 in YARN mode,
| or all available cores on the worker in standalone mode)
|
| YARN-only:
| --driver-cores NUM Number of cores used by the driver, only in cluster mode
| (Default: 1).
| --queue QUEUE_NAME The YARN queue to submit to (Default: "default").
| --num-executors NUM Number of executors to launch (Default: 2).
| If dynamic allocation is enabled, the initial number of
| executors will be at least NUM.
| --archives ARCHIVES Comma separated list of archives to be extracted into the
| working directory of each executor.
| --principal PRINCIPAL Principal to be used to login to KDC, while running on
| secure HDFS.
| --keytab KEYTAB The full path to the file that contains the keytab for the
| principal specified above. This keytab will be copied to
| the node running the Application Master via the Secure
| Distributed Cache, for renewing the login tickets and the
| delegation tokens periodically.
""".stripMargin
)
if (SparkSubmit.isSqlShell(mainClass)) {
outStream.println("CLI options:")
outStream.println(getSqlShellOptions())
}
// scalastyle:on println
SparkSubmit.exitFn(exitCode)
}
/**
* Run the Spark SQL CLI main class with the "--help" option and catch its output. Then filter
* the results to remove unwanted lines.
*
* Since the CLI will call `System.exit()`, we install a security manager to prevent that call
* from working, and restore the original one afterwards.
*/
private def getSqlShellOptions(): String = {
val currentOut = System.out
val currentErr = System.err
val currentSm = System.getSecurityManager()
try {
val out = new ByteArrayOutputStream()
val stream = new PrintStream(out)
System.setOut(stream)
System.setErr(stream)
val sm = new SecurityManager() {
override def checkExit(status: Int): Unit = {
throw new SecurityException()
}
override def checkPermission(perm: java.security.Permission): Unit = {}
}
System.setSecurityManager(sm)
try {
Utils.classForName(mainClass).getMethod("main", classOf[Array[String]])
.invoke(null, Array(HELP))
} catch {
case e: InvocationTargetException =>
// Ignore SecurityException, since we throw it above.
if (!e.getCause().isInstanceOf[SecurityException]) {
throw e
}
}
stream.flush()
// Get the output and discard any unnecessary lines from it.
Source.fromString(new String(out.toByteArray(), StandardCharsets.UTF_8)).getLines
.filter { line =>
!line.startsWith("log4j") && !line.startsWith("usage")
}
.mkString("\n")
} finally {
System.setSecurityManager(currentSm)
System.setOut(currentOut)
System.setErr(currentErr)
}
}
}
|
sh-cho/cshSpark
|
deploy/SparkSubmitArguments.scala
|
Scala
|
apache-2.0
| 24,174
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package whisk.core.database.test
import java.time.Instant
import java.util.concurrent.atomic.AtomicInteger
import akka.stream.ActorMaterializer
import common.{LoggedFunction, WskActorSystem}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FlatSpec, Matchers}
import whisk.core.database.Batcher
import whisk.utils.retry
import scala.collection.mutable
import scala.concurrent.duration._
import scala.concurrent.{Await, Future, Promise}
@RunWith(classOf[JUnitRunner])
class BatcherTests extends FlatSpec with Matchers with WskActorSystem {
implicit val materializer: ActorMaterializer = ActorMaterializer()
def await[V](f: Future[V]) = Await.result(f, 10.seconds)
def between(start: Instant, end: Instant) =
Duration.fromNanos(java.time.Duration.between(start, end).toNanos)
val promiseDelay = 100.milliseconds
def resolveDelayed(p: Promise[Unit], delay: FiniteDuration = promiseDelay) =
akka.pattern.after(delay, actorSystem.scheduler) {
p.success(())
Future.successful(())
}
behavior of "Batcher"
it should "batch based on batch size" in {
val ps = Seq.fill(3)(Promise[Unit]())
val batchPromises = mutable.Queue(ps: _*)
val transform = (i: Int) => i + 1
val batchOperation = LoggedFunction((els: Seq[Int]) => {
batchPromises.dequeue().future.map(_ => els.map(transform))
})
val batcher = new Batcher[Int, Int](2, 1)(batchOperation)
val values = 1 to 5
val results = values.map(batcher.put)
// First "batch"
retry(batchOperation.calls should have size 1, (promiseDelay.toMillis * 2).toInt)
batchOperation.calls(0) should have size 1
// Allow batch to build up
resolveDelayed(ps(0))
// Second batch
retry(batchOperation.calls should have size 2, (promiseDelay.toMillis * 2).toInt)
batchOperation.calls(1) should have size 2
// Allow batch to build up
resolveDelayed(ps(1))
// Third batch
retry(batchOperation.calls should have size 3, (promiseDelay.toMillis * 2).toInt)
batchOperation.calls(2) should have size 2
ps(2).success(())
await(Future.sequence(results)) shouldBe values.map(transform)
}
it should "run batches through the operation in parallel" in {
val p = Promise[Unit]()
val parallel = new AtomicInteger(0)
val concurrency = 2
val batcher = new Batcher[Int, Int](1, concurrency)(els => {
parallel.incrementAndGet()
p.future.map(_ => els)
})
val values = 1 to 3
val results = values.map(batcher.put)
// Before we resolve the promise, 2 batches should have entered the batch operation
// which is now hanging and waiting for the promise to be resolved.
retry(parallel.get shouldBe concurrency, 100)
p.success(())
await(Future.sequence(results)) shouldBe values
}
it should "complete batched values with the thrown exception" in {
val batcher = new Batcher[Int, Int](2, 1)(_ => Future.failed(new Exception))
val r1 = batcher.put(1)
val r2 = batcher.put(2)
an[Exception] should be thrownBy await(r1)
an[Exception] should be thrownBy await(r2)
// the batcher is still intact
val r3 = batcher.put(3)
val r4 = batcher.put(4)
an[Exception] should be thrownBy await(r3)
an[Exception] should be thrownBy await(r4)
}
}
|
paulcastro/openwhisk
|
tests/src/test/scala/whisk/core/database/test/BatcherTests.scala
|
Scala
|
apache-2.0
| 4,142
|
package nl.jappieklooster.gapl.lib.dsl.execution
import nl.jappieklooster.gapl.lib.model.Agent
/**
*
* @param agent
* @param environment the environment which handles synchronization
*/
class GoalExecutionDsl(agent:Agent, val environment:AnyRef) extends BelieveExecutionDsl(agent){
}
|
jappeace/Gapl
|
library/src/main/scala/nl/jappieklooster/gapl/lib/dsl/execution/GoalExecutionDsl.scala
|
Scala
|
gpl-3.0
| 290
|
/**
* Copyright (C) 2015 DANS - Data Archiving and Networked Services (info@dans.knaw.nl)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.knaw.dans.easy.solr
import nl.knaw.dans.lib.logging.DebugEnhancedLogging
import org.joda.time.{ DateTime, DateTimeZone }
import scala.collection.immutable.Seq
import scala.util.Try
import scala.xml._
object SolrDocumentGenerator {
def apply(fedora: FedoraProvider, pid: String): Try[SolrDocumentGenerator] = {
for {
emdXml <- fedora.getEmd(pid).map(XML.loadString)
amdXml <- fedora.getAmd(pid).map(XML.loadString)
prslXml <- fedora.getPrsql(pid).map(XML.loadString)
relsExtXml <- fedora.getRelsExt(pid).map(XML.loadString)
} yield new SolrDocumentGenerator(pid) {
override val emd: Elem = emdXml
override val amd: Elem = amdXml
override val relsExt: Elem = relsExtXml
override val prsl: Elem = prslXml
}
}
}
abstract class SolrDocumentGenerator(pid: String) extends DebugEnhancedLogging {
val DC_NAMESPACE: String = "http://purl.org/dc/elements/1.1/"
val EAS_NAMESPACE: String = "http://easy.dans.knaw.nl/easy/easymetadata/eas/"
val RDF_NAMESPACE: String = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"
val emd: Elem
val amd: Elem
val prsl: Elem
val relsExt: Elem
/* dc */
/* with sort fields */
def extractMappingFromEmd(name: String)(f: Node => String): (String, Seq[String]) = {
s"dc_$name" -> (emd \ name \ "_").map(f).filter(_.nonEmpty)
}
lazy val dcTitleFromEmdMapping @ (dcTitleKey, dcTitleValues) = {
extractMappingFromEmd("title")(_.text)
}
lazy val dcPublisherFromEmdMapping @ (dcPublisherKey, dcPublisherValues) = {
extractMappingFromEmd("publisher")(_.text)
}
def extractPersonOrganizationForDc(p: Node): String = {
// formatting the person's name
val nameStart = (p \ "surname").text
val nameEnd = List("title", "initials", "prefix")
.map(s => (p \ s).text)
.filter(_.nonEmpty)
.mkString(" ")
val name = List(nameStart, nameEnd).filter(_.nonEmpty).mkString(", ")
val role = (p \ "role").text
val org = (p \ "organization").text
(name, role, org) match {
case ("", "", "") => ""
case ("", "", o) => o
case ("", r, "") => r
case ("", r, o) => s"$o, $r"
case (n, "", "") => n
case (n, "", o) => s"$n ($o)"
case (n, r, "") => s"$n, $r"
case (n, r, o) => s"$n, $r ($o)"
}
}
lazy val dcCreatorFromEmdMapping @ (dcCreatorKey, dcCreatorValues) = {
extractMappingFromEmd("creator") {
case n if n.namespace == EAS_NAMESPACE => extractPersonOrganizationForDc(n)
case n => n.text
}
}
lazy val dcContributorFromEmdMapping @ (dcContributorKey, dcContributorValues) = {
extractMappingFromEmd("contributor") {
case n if n.namespace == EAS_NAMESPACE => extractPersonOrganizationForDc(n)
case n => n.text
}
}
/* without sort fields */
lazy val dcOtherFromEmdMappings: List[(String, Seq[String])] = {
List("description", "subject", "type", "format", "identifier", "source", "language", "rights")
.map(extractMappingFromEmd(_)(_.text))
}
lazy val dcDateFromEmdMapping: (String, Seq[String]) = {
extractMappingFromEmd("date") {
case n if isFormattableDate(n) => IsoDate.format(n.text, getPrecision(n))
case n => n.text
}
}
def extractRelationForDc(relation: Node): String = {
((relation \\ "subject-title").text, (relation \\ "subject-link").text) match {
case (title, "") => s"title=$title"
case ("", uri) => s"URI=$uri"
case (title, uri) => s"title=$title URI=$uri"
}
}
lazy val dcRelationFromEmdMapping: (String, Seq[String]) = {
extractMappingFromEmd("relation") {
case n if n.namespace == EAS_NAMESPACE => extractRelationForDc(n)
case n => n.text
}
}
def extractPlaceForDC(spatial: Node): String = {
(spatial \ "place").map(_.text).withFilter(_.nonEmpty).map(place => s"place=$place").mkString(", ")
}
def extractPointForDc(point: Node): String = {
s"scheme=${ point.attribute(EAS_NAMESPACE, "scheme").orNull } x=${ (point \ "x").text } y=${ (point \ "y").text }"
}
def extractBoxForDc(box: Node): String = {
val coordinates = List("north", "east", "south", "west")
.map(cn => s"$cn=${ (box \ cn).text }")
.mkString(" ")
s"scheme=${ box.attribute(EAS_NAMESPACE, "scheme").orNull } $coordinates"
}
def extractPolygonForDc(polygon: Node): String = {
(polygon \\ "place").map(_.text).withFilter(_.nonEmpty).map(place => s"place=$place").mkString(", ")
}
def extractSpatialForDc(spatial: Node): String = {
((spatial \ "point", spatial \ "box", spatial \ "polygon") match {
case (Seq(), Seq(), Seq()) => spatial.text :: Nil
case (Seq(point, _ @ _*), Seq(), Seq()) => extractPlaceForDC(spatial) :: extractPointForDc(point) :: Nil
case (Seq(), Seq(box, _ @ _*), Seq()) => extractPlaceForDC(spatial) :: extractBoxForDc(box) :: Nil
case (Seq(), Seq(), Seq(polygon, _ @ _*)) => extractPlaceForDC(spatial) :: extractPolygonForDc(polygon) :: Nil
case (Seq(), Seq(), _) => List.empty
/*
To future developers: we do currently not index a polygon, even though this kind of 'Spatial'
was added to DDM, EMD, etc. for the PAN use case. If we want to index polygons in the future,
that's fine, as long as you keep the following thing in mind:
- PAN sends us a polygon of the town (gemeente) in which an object is found, and we are NOT!!!
allowed to convert this to a point in order to show this on our map. If you want to index
this polygon, make sure it is never/nowhere used as a specific point. This currently also
includes boxes, as they get converted to a center coordinate in our current map
implementation.
*/
}).filter(_.nonEmpty).mkString(", ")
}
lazy val dcCoverageFromEmdMapping: (String, Seq[String]) = {
extractMappingFromEmd("coverage") {
case n if n.label == "spatial" => extractSpatialForDc(n)
case n => n.text
}
}
/* combine */
lazy val dcMappings: List[(String, Seq[String])] = {
dcTitleFromEmdMapping ::
dcPublisherFromEmdMapping ::
dcCreatorFromEmdMapping ::
dcContributorFromEmdMapping ::
dcDateFromEmdMapping ::
dcRelationFromEmdMapping ::
dcCoverageFromEmdMapping ::
dcOtherFromEmdMappings
}
lazy val dcMappingsSort: List[(String, Seq[String])] = {
List(
s"${ dcCreatorKey }_s" -> dcCreatorValues,
s"${ dcContributorKey }_s" -> dcContributorValues,
s"${ dcTitleKey }_s" -> dcTitleValues,
s"${ dcPublisherKey }_s" -> dcPublisherValues
)
}
/* emd */
lazy val emdDateMappings: List[(String, Seq[String])] = {
val emdOrAmd = List("submitted", "published", "deleted")
.map(s => s"emd_date_$s" -> getEasOrAmdDateElement(s).map(n => toUtcTimestamp(n.text)))
val emdOnly = List("created", "available")
.map(s => s"emd_date_$s" -> getEasDateElement(s).map(n => toUtcTimestamp(n.text)))
emdOrAmd ++ emdOnly
}
def getEasOrAmdDateElement(typeOfDate: String): NodeSeq = {
getEasDateElement(typeOfDate) match {
case Seq() => getAmdDateElement(typeOfDate)
case otherwise => otherwise
}
}
def getEasDateElement(typeOfDate: String): NodeSeq = {
(emd \ "date" \ typeOfDate).filter(_.namespace == EAS_NAMESPACE) match {
case es @ Seq(element, _ @ _*) =>
val size = es.size
if (size > 1)
logger.warn(s"Found $size date $typeOfDate elements but only one should be allowed. Metadata may be wrong! Using the first element found.")
NodeSeq.fromSeq(element)
case e => e
}
}
implicit private def dateTimeOrdering: Ordering[DateTime] = Ordering.fromLessThan(_ isBefore _)
def getAmdDateElement(typeOfDate: String): NodeSeq = {
val dates = (amd \ "stateChangeDates" \ "stateChangeDate")
.collect { case change if (change \ "toState").text.toLowerCase == typeOfDate => (change \ "changeDate").text }
if (dates.isEmpty)
NodeSeq.Empty
else
<node>{dates.maxBy(DateTime.parse)}</node>
}
def toUtcTimestamp(s: String): String = DateTime.parse(s).withZone(DateTimeZone.UTC).toString
lazy val emdFormattedDateMappings: List[(String, Seq[String])] = {
List("created", "available")
.map(s => s"emd_date_${ s }_formatted" ->
getEasDateElement(s)
.withFilter(isFormattableDate)
.map(n => IsoDate.format(n.text, getPrecision(n))))
}
def isFormattableDate(n: Node): Boolean = {
(n.attribute(EAS_NAMESPACE, "format"), n.attribute(EAS_NAMESPACE, "scheme")) match {
case (Some(Seq(_)), Some(Seq(_))) => true
case _ => false
}
}
def getPrecision(n: Node): String = {
n.attribute(EAS_NAMESPACE, "format") match {
case Some(Seq(p)) => p.text
case _ => ""
}
}
lazy val otherMappings: List[(String, Seq[String])] = {
List(
"amd_assignee_id" -> (amd \ "workflowData" \ "assigneeId").map(_.text),
"amd_depositor_id" -> (amd \ "depositorId").map(_.text),
"amd_workflow_progress" -> List((amd \ "workflowData" \\ "workflow").count(isRequiredAndCompletedStep).toString),
"ds_state" -> (amd \ "datasetState").map(_.text),
"ds_accesscategory" -> (emd \ "rights" \ "accessRights").map(_.text),
"emd_audience" -> (emd \ "audience" \ "audience").map(_.text),
"psl_permission_status" -> (prsl \ "sequences" \\ "sequence").map(formatPrslString),
"archaeology_dc_subject" -> (emd \ "subject" \ "subject").filter(isArchaeologySubject).map(_.text),
"archaeology_dcterms_temporal" -> (emd \ "coverage" \ "temporal").filter(isArchaeologyTemporal).map(_.text),
"dai_creator" -> ((emd \\ "creator" \ "creator").filter(_.namespace == EAS_NAMESPACE) \ "entityId").filter(hasDaiScheme).map(_.text),
"dai_contributor" -> ((emd \\ "contributor" \ "contributor").filter(_.namespace == EAS_NAMESPACE) \ "entityId").filter(hasDaiScheme).map(_.text),
"easy_collections" -> (relsExt \\ "Description" \ "isCollectionMember")
.map(_.attribute(RDF_NAMESPACE, "resource").fold("")(_.text.replace("info:fedora/", "")))
)
}
def isRequiredAndCompletedStep(n: Node): Boolean = {
val required = n \ "required"
val completed = n \ "completed"
List(required, completed).forall(p => p.nonEmpty && p.text == "true")
}
def hasDaiScheme(n: Node): Boolean = {
n.attribute(EAS_NAMESPACE, "scheme") match {
case Some(Seq(s)) => s.text == "DAI"
case _ => false
}
}
def isArchaeologySubject(n: Node): Boolean = {
(n.attribute(EAS_NAMESPACE, "scheme"), n.attribute(EAS_NAMESPACE, "schemeId")) match {
case (Some(Seq(scheme)), Some(Seq(schemeId))) => scheme.text == "ABR" && schemeId.text == "archaeology.dc.subject"
case _ => false
}
}
def isArchaeologyTemporal(n: Node): Boolean = {
(n.attribute(EAS_NAMESPACE, "scheme"), n.attribute(EAS_NAMESPACE, "schemeId")) match {
case (Some(Seq(scheme)), Some(Seq(schemeId))) => scheme.text == "ABR" && schemeId.text == "archaeology.dcterms.temporal"
case _ => false
}
}
def formatPrslString(n: Node): String = {
List(
(n \ "requesterId").text,
(n \ "state").text,
(n \ "stateLastModified").text
).mkString(" ")
}
def toXml: Elem = {
<doc>
<!-- Some standard fields that need to be here, in this order. Don't ask ... -->
<field name="type">easy-dataset</field>
<field name="type">dataset</field>
<field name="repository_id">easy</field>
<!-- Fields based on metadata -->
<field name="sid">{pid}</field>
{dcMappings.flatMap((createField _).tupled)}
{dcMappingsSort.flatMap((createSortField _).tupled)}
{emdDateMappings.flatMap((createField _).tupled)}
{emdFormattedDateMappings.flatMap((createField _).tupled)}
{otherMappings.flatMap((createField _).tupled)}
</doc>
}
def createField(name: String, values: Seq[String]): NodeSeq = {
values.map(value => <field name={name}>{value}</field>)
}
def createSortField(name: String, values: Seq[String]): NodeSeq = {
values match {
case Seq() => NodeSeq.Empty
case vs => <field name={name}>{vs.mkString(" ")}</field>
}
}
}
|
DANS-KNAW/easy-update-solr-index
|
lib/src/main/scala/nl.knaw.dans.easy.solr/SolrDocumentGenerator.scala
|
Scala
|
apache-2.0
| 12,905
|
package hoconspring
import java.lang.reflect.{Constructor, Method}
import org.springframework.core.ParameterNameDiscoverer
/**
* Author: ghik
* Created: 11/09/15.
*/
class AnnotationParameterNameDiscoverer extends ParameterNameDiscoverer {
def getParameterNames(ctor: Constructor[_]): Array[String] =
Option(ctor.getAnnotation(classOf[ParamNames])).map(_.value).orNull
def getParameterNames(method: Method): Array[String] =
Option(method.getAnnotation(classOf[ParamNames])).map(_.value).orNull
}
|
ghik/hocon-spring
|
src/test/scala/hoconspring/AnnotationParameterNameDiscoverer.scala
|
Scala
|
apache-2.0
| 515
|
package com.crobox.clickhouse.dsl.language
import com.crobox.clickhouse.DslTestSpec
import com.crobox.clickhouse.dsl._
class LogicalFunctionTokenizerTest extends DslTestSpec {
def noto(other: LogicalOpsMagnet): ExpressionColumn[Boolean] = LogicalFunction(other, Not, other)
val select = SelectQuery(Seq(shieldId))
it should "add brackets (not operator and OR)" in {
toSQL(noto(shieldId isEq "a") or noto(shieldId isEq "b")) should matchSQL(
"not(shield_id = 'a') OR not(shield_id = 'b')"
)
// explicit double quotes
toSQL((noto(shieldId isEq "a")) or (noto(shieldId isEq "b"))) should matchSQL(
s"not(shield_id = 'a') OR not(shield_id = 'b')"
)
}
it should "add brackets (not operator and AND)" in {
toSQL(noto(shieldId isEq "a") and noto(shieldId isEq "b")) should matchSQL(
s"not(shield_id = 'a') AND not(shield_id = 'b')"
)
// explicit double quotes
toSQL((noto(shieldId isEq "a")) and (noto(shieldId isEq "b"))) should matchSQL(
s"not(shield_id = 'a') AND not(shield_id = 'b')"
)
}
it should "add brackets (OR and AND)" in {
toSQL(shieldId isEq "a" or ((shieldId isEq "b") and (shieldId isEq "c"))) should matchSQL(
s"shield_id = 'a' OR (shield_id = 'b' AND shield_id = 'c')"
)
}
it should "add brackets (AND and OR)" in {
toSQL(shieldId isEq "a" and ((shieldId isEq "b") or (shieldId isEq "c"))) should matchSQL(
s"shield_id = 'a' AND (shield_id = 'b' OR shield_id = 'c')"
)
}
it should "add brackets nested OR (left double, right single)" in {
toSQL((shieldId < "a" and (shieldId isEq "b")) or shieldId < "c") should matchSQL(
s"(shield_id < 'a' AND shield_id = 'b') OR shield_id < 'c'"
)
}
it should "add brackets nested AND (left double, right single)" in {
toSQL((shieldId < "a" and (shieldId isEq "b")) and shieldId < "c") should matchSQL(
s"shield_id < 'a' AND shield_id = 'b' AND shield_id < 'c'"
)
}
it should "add brackets nested OR (left double, right double)" in {
toSQL((shieldId < "a" and (shieldId isEq "b")) or (shieldId < "c" or shieldId > "d")) should matchSQL(
s"(shield_id < 'a' AND shield_id = 'b') OR shield_id < 'c' OR shield_id > 'd'"
)
}
it should "add brackets nested AND (left double, right double)" in {
toSQL((shieldId < "a" and (shieldId isEq "b")) and (shieldId < "c" or shieldId > "d")) should matchSQL(
s"shield_id < 'a' AND shield_id = 'b' AND (shield_id < 'c' OR shield_id > 'd')"
)
}
it should "add brackets NOT (single function)" in {
toSQL(
shieldId isEq "a" or ((shieldId isEq "b") and (shieldId isEq "c")) or (noto(shieldId isEq "d") and noto(
shieldId isEq "e"
))
) should matchSQL(
s"shield_id = 'a' OR (shield_id = 'b' AND shield_id = 'c') OR (not(shield_id = 'd') AND not(shield_id = 'e'))"
)
}
it should "add brackets triple OR/OR/OR" in {
toSQL(shieldId isEq "a" or ((shieldId isEq "b") or (shieldId isEq "c") or (shieldId isEq "d"))) should matchSQL(
"shield_id = 'a' OR shield_id = 'b' OR shield_id = 'c' OR shield_id = 'd'"
)
}
it should "add brackets triple OR/AND/AND" in {
toSQL(shieldId isEq "a" or ((shieldId isEq "b") and (shieldId isEq "c") and (shieldId isEq "d"))) should matchSQL(
s"shield_id = 'a' OR (shield_id = 'b' AND shield_id = 'c' AND shield_id = 'd')"
)
}
it should "add brackets triple OR/AND/OR" in {
toSQL(shieldId isEq "a" or ((shieldId isEq "b") and (shieldId isEq "c") or (shieldId isEq "d"))) should matchSQL(
s"shield_id = 'a' OR (shield_id = 'b' AND shield_id = 'c') OR shield_id = 'd'"
)
}
it should "add brackets triple AND/AND/OR" in {
toSQL(shieldId isEq "a" and ((shieldId isEq "b") and (shieldId isEq "c") or (shieldId isEq "d"))) should matchSQL(
s"shield_id = 'a' AND ((shield_id = 'b' AND shield_id = 'c') OR shield_id = 'd')"
)
}
it should "add brackets triple OR/OR/AND" in {
toSQL(shieldId isEq "a" or ((shieldId isEq "b") or (shieldId isEq "c") and (shieldId isEq "d"))) should matchSQL(
s"shield_id = 'a' OR ((shield_id = 'b' OR shield_id = 'c') AND shield_id = 'd')"
)
}
def conditionOr(nr: Seq[Int]): Option[TableColumn[Boolean]] = Option(nr.map(x => col2 === x).reduce((a, b) => a or b))
def conditionAnd(nr: Seq[Int]): Option[TableColumn[Boolean]] =
Option(nr.map(x => col2 === x).reduce((a, b) => a and b))
it should "tokenize numbers OR with NONE" in {
toSQL(None and conditionOr(Seq(1, 3)) and None and conditionOr(Seq(3, 4)) and None) should matchSQL(
s"column_2 = 1 OR column_2 = 3 AND (column_2 = 3 OR column_2 = 4)"
)
}
//
// OR
//
it should "true using Multiple values and/None/and OR" in {
toSQL((1 == 1) and None and conditionOr(Seq(2, 3))) should matchSQL("column_2 = 2 OR column_2 = 3")
}
it should "true using Multiple values and/None/or OR" in {
toSQL((1 == 1) and None or conditionOr(Seq(2, 3))) should matchSQL("1")
}
it should "true using Multiple values and/None/xor OR" in {
toSQL((1 == 1) and None xor conditionOr(Seq(2, 3))) should matchSQL("not(column_2 = 2 OR column_2 = 3)")
}
it should "true using Multiple values or/None/or OR" in {
toSQL((1 == 1) or None or conditionOr(Seq(2, 3))) should matchSQL("1")
}
it should "true using Multiple values or/None/and OR" in {
toSQL((1 == 1) or None and conditionOr(Seq(2, 3))) should matchSQL("column_2 = 2 OR column_2 = 3")
}
it should "true using Multiple values or/None/xor OR" in {
toSQL((1 == 1) or None xor conditionOr(Seq(2, 3))) should matchSQL("not(column_2 = 2 OR column_2 = 3)")
}
it should "false using Multiple values and/None/and OR" in {
toSQL((1 == 2) and None and conditionOr(Seq(2, 3))) should matchSQL("0")
}
it should "false using Multiple values and/None/or OR" in {
toSQL((1 == 2) and None or conditionOr(Seq(2, 3))) should matchSQL("column_2 = 2 OR column_2 = 3")
}
it should "false using Multiple values and/None/xor OR" in {
toSQL((1 == 2) and None xor conditionOr(Seq(2, 3))) should matchSQL("column_2 = 2 OR column_2 = 3")
}
it should "false using Multiple values or/None/or OR" in {
toSQL((1 == 2) or None or conditionOr(Seq(2, 3))) should matchSQL("column_2 = 2 OR column_2 = 3")
}
it should "false using Multiple values or/None/and OR" in {
toSQL((1 == 2) or None and conditionOr(Seq(2, 3))) should matchSQL("0")
}
it should "false using Multiple values or/None/xor OR" in {
toSQL((1 == 2) or None xor conditionOr(Seq(2, 3))) should matchSQL("column_2 = 2 OR column_2 = 3")
}
//
// AND
//
it should "true using Multiple values and/None/and AND" in {
toSQL((1 == 1) and None and conditionAnd(Seq(2, 3))) should matchSQL("column_2 = 2 AND column_2 = 3")
}
it should "true using Multiple values and/None/or AND" in {
toSQL((1 == 1) and None or conditionAnd(Seq(2, 3))) should matchSQL("1")
}
it should "true using Multiple values and/None/xor AND" in {
toSQL((1 == 1) and None xor conditionAnd(Seq(2, 3))) should matchSQL("not(column_2 = 2 AND column_2 = 3)")
}
it should "true using Multiple values or/None/or AND" in {
toSQL((1 == 1) or None or conditionAnd(Seq(2, 3))) should matchSQL("1")
}
it should "true using Multiple values or/None/and AND" in {
toSQL((1 == 1) or None and conditionAnd(Seq(2, 3))) should matchSQL("column_2 = 2 AND column_2 = 3")
}
it should "true using Multiple values or/None/xor AND" in {
toSQL((1 == 1) or None xor conditionAnd(Seq(2, 3))) should matchSQL("not(column_2 = 2 AND column_2 = 3)")
}
it should "false using Multiple values and/None/and AND" in {
toSQL((1 == 2) and None and conditionAnd(Seq(2, 3))) should matchSQL("0")
}
it should "false using Multiple values and/None/or AND" in {
toSQL((1 == 2) and None or conditionAnd(Seq(2, 3))) should matchSQL("column_2 = 2 AND column_2 = 3")
}
it should "false using Multiple values and/None/xor AND" in {
toSQL((1 == 2) and None xor conditionAnd(Seq(2, 3))) should matchSQL("column_2 = 2 AND column_2 = 3")
}
it should "false using Multiple values or/None/or AND" in {
toSQL((1 == 2) or None or conditionAnd(Seq(2, 3))) should matchSQL("column_2 = 2 AND column_2 = 3")
}
it should "false using Multiple values or/None/and AND" in {
toSQL((1 == 2) or None and conditionAnd(Seq(2, 3))) should matchSQL("0")
}
it should "false using Multiple values or/None/xor AND" in {
toSQL((1 == 2) or None xor conditionAnd(Seq(2, 3))) should matchSQL("column_2 = 2 AND column_2 = 3")
}
it should "maintain brackets 1" in {
toSQL(shieldId.isEq("a") and None.and(None).and(shieldId.isEq("b") or shieldId.isEq("c"))) should matchSQL(
"shield_id = 'a' AND (shield_id = 'b' OR shield_id = 'c')"
)
}
it should "maintain brackets 2" in {
toSQL(None.and(None).and(shieldId.isEq("b") or shieldId.isEq("c"))) should matchSQL(
"shield_id = 'b' OR shield_id = 'c'"
)
}
}
|
crobox/clickhouse-scala-client
|
dsl/src/test/scala/com/crobox/clickhouse/dsl/language/LogicalFunctionTokenizerTest.scala
|
Scala
|
lgpl-3.0
| 9,052
|
/*
* This file is part of the sohva project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gnieh.sohva
import scala.concurrent.Future
import akka.stream.ActorMaterializer
import akka.http.scaladsl.model._
import akka.http.scaladsl.unmarshalling._
/** A list that can be queried for a given view.
*
* @author Lucas Satabin
*/
class CList(
val design: String,
val db: Database,
val list: String) {
import db.ec
import db.couch.system
import db.couch.materializer
protected[this] val uri = db.uri / "_design" / design / "_list" / list
/** Indicates whether this view exists */
def exists: Future[Boolean] =
for (h <- db.couch.rawHttp(HttpRequest(HttpMethods.HEAD, uri)))
yield h.status == StatusCodes.OK
def query[T: FromEntityUnmarshaller](viewName: String, format: Option[String] = None): Future[T] =
for {
resp <- db.couch.rawHttp(HttpRequest(uri = uri / viewName <<? format.map(f => ("format", f))))
t <- Unmarshal(resp).to[T]
} yield t
override def toString =
uri.toString
}
|
gnieh/sohva
|
src/main/scala/gnieh/sohva/CList.scala
|
Scala
|
apache-2.0
| 1,560
|
import scala.reflect.runtime.universe._
import scala.tools.reflect.ToolBox
import scala.tools.reflect.Eval
object Test extends dotty.runtime.LegacyApp {
object C {
type T = Int
val code = reify {
List[C.T](2)
}
println(code.eval)
}
C
}
|
yusuke2255/dotty
|
tests/disabled/macro/run/reify_newimpl_31.scala
|
Scala
|
bsd-3-clause
| 266
|
package com.datastax.spark.connector.util
import java.lang.{Iterable => JIterable}
import java.util.{Collection => JCollection}
import java.util.{Map => JMap}
import scala.collection.JavaConversions._
import scala.reflect._
import scala.reflect.api.{Mirror, TypeCreator, _}
import scala.reflect.runtime.universe._
import org.apache.spark.sql.catalyst.ReflectionLock.SparkReflectionLock
import org.apache.spark.api.java.function.{Function => JFunction}
import com.datastax.spark.connector.CassandraRow
import com.datastax.spark.connector.mapper.{ColumnMapper, JavaBeanColumnMapper}
import com.datastax.spark.connector.rdd.reader.RowReaderFactory
import com.datastax.spark.connector.writer.RowWriterFactory
/** A helper class to make it possible to access components written in Scala from Java code.
* INTERNAL API
*/
object JavaApiHelper {
def mirror = runtimeMirror(Thread.currentThread().getContextClassLoader)
/** Returns a `TypeTag` for the given class. */
def getTypeTag[T](clazz: Class[T]): TypeTag[T] = SparkReflectionLock.synchronized {
TypeTag.apply(mirror, new TypeCreator {
override def apply[U <: Universe with Singleton](m: Mirror[U]): U#Type = {
m.staticClass(clazz.getName).toTypeConstructor
}
})
}
/** Returns a `TypeTag` for the given class and type parameters. */
def getTypeTag[T](clazz: Class[_], typeParams: TypeTag[_]*): TypeTag[T] = SparkReflectionLock.synchronized {
TypeTag.apply(mirror, new TypeCreator {
override def apply[U <: Universe with Singleton](m: Mirror[U]) = {
val ct = m.staticClass(clazz.getName).toTypeConstructor.asInstanceOf[m.universe.Type]
val tpt = typeParams.map(_.in(m).tpe.asInstanceOf[m.universe.Type]).toList
m.universe.appliedType(ct, tpt).asInstanceOf[U#Type]
}
})
}
/** Returns a `ClassTag` of a given runtime class. */
def getClassTag[T](clazz: Class[T]): ClassTag[T] = ClassTag(clazz)
/** Returns a `ClassTag` of a given runtime class. */
def getClassTag2[T](clazz: Class[_]): ClassTag[T] = ClassTag(clazz)
def toScalaFunction1[T1, R](f: JFunction[T1, R]): T1 => R = f.call
def valuesAsJavaIterable[K, V, IV <: Iterable[V]]: ((K, IV)) => (K, JIterable[V]) = {
case (k, iterable) => (k, asJavaIterable(iterable))
}
def valuesAsJavaCollection[K, V, IV <: Iterable[V]]: ((K, IV)) => (K, JCollection[V]) = {
case (k, iterable) => (k, asJavaCollection(iterable))
}
/** Returns a runtime class of a given `TypeTag`. */
def getRuntimeClass[T](typeTag: TypeTag[T]): Class[T] = SparkReflectionLock.synchronized(
mirror.runtimeClass(typeTag.tpe).asInstanceOf[Class[T]])
/** Returns a runtime class of a given `ClassTag`. */
def getRuntimeClass[T](classTag: ClassTag[T]): Class[T] =
classTag.runtimeClass.asInstanceOf[Class[T]]
/** Converts a Java `Map` to a Scala immutable `Map`. */
def toScalaMap[K, V](map: JMap[K, V]): Map[K, V] = Map(map.toSeq: _*)
/** Converts an array to a Scala `Seq`. */
def toScalaSeq[T](array: Array[T]): Seq[T] = array
/** Converts an array to a Scala `Seq`. */
def toScalaImmutableSeq[T](array: Array[T]): scala.collection.immutable.Seq[T] = array.toIndexedSeq
/** Converts a Java `Iterable` to Scala `Seq`. */
def toScalaSeq[T](iterable: java.lang.Iterable[T]): Seq[T] = iterable.toSeq
/** Returns the default `RowWriterFactory` initialized with the given `ColumnMapper`. */
def defaultRowWriterFactory[T](typeTag: TypeTag[T], mapper: ColumnMapper[T]): RowWriterFactory[T] = {
RowWriterFactory.defaultRowWriterFactory(typeTag, mapper)
}
/** Returns the `JavaBeanColumnMapper` instance for the given `ClassTag` and column mapping. */
def javaBeanColumnMapper[T](
classTag: ClassTag[T],
columnNameOverride: JMap[String, String]
): ColumnMapper[T] =
new JavaBeanColumnMapper[T](toScalaMap(columnNameOverride))(classTag)
/** Returns the default `RowReaderFactory`. */
def genericRowReaderFactory: RowReaderFactory[CassandraRow] = RowReaderFactory.GenericRowReader$
val none = None
}
|
maasg/spark-cassandra-connector
|
spark-cassandra-connector/src/main/scala/com/datastax/spark/connector/util/JavaApiHelper.scala
|
Scala
|
apache-2.0
| 4,060
|
package skinny.assets
import org.scalatest._
import org.scalatest.matchers._
class SassCompilerSpec extends FlatSpec with ShouldMatchers {
behavior of "SassCompiler"
it should "compile scss code" in {
val compiler = SassCompiler
val css = compiler.compile("font.scss",
"""$font-stack: Helvetica, sans-serif;
|$primary-color: #333;
|
|body {
| font: 100% $font-stack;
| color: $primary-color;
|}
""".stripMargin)
css.replaceFirst("\\n$", "") should equal(
"""body {
| font: 100% Helvetica, sans-serif;
| color: #333333; }""".stripMargin)
}
it should "compile indented-sass code" in {
val compiler = SassCompiler
val css = compiler.compileIndented("main.sass",
"""#main
| color: blue
| font-size: 0.3em
""".stripMargin)
css.replaceFirst("\\n$", "") should equal(
"""#main {
| color: blue;
| font-size: 0.3em; }""".stripMargin)
}
}
|
BlackPrincess/skinny-framework
|
assets/src/test/scala/skinny/assets/SassCompilerSpec.scala
|
Scala
|
mit
| 1,009
|
/*
* Copyright 2018 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.pipeline
import akka.NotUsed
import akka.actor._
import akka.stream.{javadsl, scaladsl}
import com.typesafe.config.ConfigObject
import scala.jdk.CollectionConverters._
import scala.util.Try
sealed trait PipelineType
case object ServerPipeline extends PipelineType
case object ClientPipeline extends PipelineType
case class Context(name: String, pipelineType: PipelineType)
package japi {
import akka.stream.javadsl.BidiFlow
/**
* Java API
*/
abstract class PipelineFlowFactory {
def create(context: Context, system: ActorSystem):
javadsl.BidiFlow[RequestContext, RequestContext, RequestContext, RequestContext, NotUsed]
def abortable(flow: BidiFlow[RequestContext, RequestContext, RequestContext, RequestContext, NotUsed]):
javadsl.BidiFlow[RequestContext, RequestContext, RequestContext, RequestContext, NotUsed] =
AbortableBidiFlow(flow.asScala).abortable.asJava
}
}
trait PipelineFlowFactory extends japi.PipelineFlowFactory {
def create(context: Context)(implicit system: ActorSystem):
scaladsl.BidiFlow[RequestContext, RequestContext, RequestContext, RequestContext, NotUsed]
override def create(context: Context, system: ActorSystem):
javadsl.BidiFlow[RequestContext, RequestContext, RequestContext, RequestContext, NotUsed] = create(context)(system).asJava
}
class PipelineExtensionImpl(flowFactoryMap: Map[String, japi.PipelineFlowFactory],
serverDefaultFlows: (Option[String], Option[String]),
clientDefaultFlows: (Option[String], Option[String]))
(implicit system: ActorSystem) extends Extension {
def getFlow(pipelineSetting: PipelineSetting, context: Context): Option[PipelineFlow] = {
val (appFlow, defaultsOn) = pipelineSetting
val (defaultPreFlow, defaultPostFlow) =
if (defaultsOn getOrElse true) {
context.pipelineType match {
case ServerPipeline => serverDefaultFlows
case ClientPipeline => clientDefaultFlows
}
} else (None, None)
val pipelineFlowNames = (defaultPreFlow :: appFlow :: defaultPostFlow :: Nil).flatten
buildPipeline(pipelineFlowNames, context)
}
private def buildPipeline(flowNames: Seq[String], context: Context) = {
val flows = flowNames map { case name =>
val flowFactory = flowFactoryMap
.getOrElse(name, throw new IllegalArgumentException(s"Invalid pipeline name $name"))
flowFactory match {
case factory: PipelineFlowFactory => factory.create(context)
case factory: japi.PipelineFlowFactory => factory.create(context, system).asScala
case factory => throw new IllegalArgumentException(s"Unsupported flow factory type ${factory.getClass.getName}")
}
}
flows.reduceLeftOption(_ atop _)
}
}
object PipelineExtension extends ExtensionId[PipelineExtensionImpl] with ExtensionIdProvider {
override def createExtension(system: ExtendedActorSystem): PipelineExtensionImpl = {
val config = system.settings.config
val flows = config.root.asScala.toSeq collect {
case (n, v: ConfigObject) if v.toConfig.hasPath("type") && v.toConfig.getString("type") == "squbs.pipelineflow" =>
(n, v.toConfig)
}
var flowMap = Map.empty[String, japi.PipelineFlowFactory]
flows foreach { case (name, config) =>
val factoryClassName = config.getString("factory")
val flowFactory = Class.forName(factoryClassName).newInstance().asInstanceOf[japi.PipelineFlowFactory]
flowMap = flowMap + (name -> flowFactory)
}
val serverDefaultPreFlow = Try(config.getString("squbs.pipeline.server.default.pre-flow")).toOption
val serverDefaultPostFlow = Try(config.getString("squbs.pipeline.server.default.post-flow")).toOption
val clientDefaultPreFlow = Try(config.getString("squbs.pipeline.client.default.pre-flow")).toOption
val clientDefaultPostFlow = Try(config.getString("squbs.pipeline.client.default.post-flow")).toOption
new PipelineExtensionImpl(
flowMap,
(serverDefaultPreFlow, serverDefaultPostFlow),
(clientDefaultPreFlow, clientDefaultPostFlow))(system)
}
override def lookup: ExtensionId[_ <: Extension] = PipelineExtension
/**
* Java API: retrieve the Pipeline extension for the given system.
*/
override def get(system: ActorSystem): PipelineExtensionImpl = super.get(system)
}
|
akara/squbs
|
squbs-pipeline/src/main/scala/org/squbs/pipeline/PipelineExtension.scala
|
Scala
|
apache-2.0
| 5,001
|
package eventstreams.support
import akka.actor.{ActorRef, ActorSystem, Props, Terminated}
import com.typesafe.config._
import core.sysevents.SyseventOps.symbolToSyseventOps
import core.sysevents.WithSyseventPublisher
import core.sysevents.ref.ComponentWithBaseSysevents
import core.sysevents.support.EventAssertions
import eventstreams.UUIDTools
import eventstreams.core.actors.ActorWithComposableBehavior
import org.scalatest.{BeforeAndAfterEach, Suite, Tag}
import org.slf4j.LoggerFactory
import scala.concurrent.Await
import scala.concurrent.duration.DurationInt
trait MultiActorSystemTestContextSysevents extends ComponentWithBaseSysevents {
override def componentId: String = "Test.ActorSystem"
val ActorSystemCreated = 'ActorSystemCreated.trace
val ActorSystemTerminated = 'ActorSystemTerminated.trace
val TerminatingActorSystem = 'TerminatingActorSystem.trace
val DestroyingAllSystems = 'DestroyingAllSystems.trace
val DestroyingActor = 'DestroyingActor.trace
val AllActorsTerminated = 'AllActorsTerminated.trace
}
trait ActorSystemWrapper {
def underlyingSystem: ActorSystem
def config: Config
def stopActor(id: String)
def start(props: Props, id: String): ActorRef
def actorSelection(id: String) = underlyingSystem.actorSelection(id)
def rootUserActorSelection(id: String) = actorSelection(s"/user/$id")
}
private case class Watch(ref: ActorRef)
private case class StopAll()
private trait WatcherSysevents extends ComponentWithBaseSysevents {
val Watching = 'Watching.trace
val WatchedActorGone = 'WatchedActorGone.trace
val AllWatchedActorsGone = 'AllWatchedActorsGone.trace
val TerminatingActor = 'TerminatingActor.trace
override def componentId: String = "Test.Watcher"
}
private object WatcherActor extends WatcherSysevents {
def props(componentId: String) = Props(new WatcherActor(componentId))
}
private class WatcherActor(id: String) extends ActorWithComposableBehavior with WatcherSysevents with WithSyseventPublisher {
override def commonBehavior: Receive = handler orElse super.commonBehavior
override def commonFields: Seq[(Symbol, Any)] = super.commonFields ++ Seq('InstanceId -> id)
var watched = Set[ActorRef]()
def handler: Receive = {
case StopAll() =>
if (watched.isEmpty) {
AllWatchedActorsGone >> ()
}
watched.foreach { a =>
TerminatingActor >> ('Actor -> a)
context.stop(a)
}
case Watch(ref) =>
Watching >> ('Ref -> ref)
watched = watched + ref
context.watch(ref)
case Terminated(ref) =>
watched = watched match {
case w if w contains ref =>
WatchedActorGone >> ('Ref -> ref, 'Path -> ref.path.toSerializationFormat)
if (w.size == 1) AllWatchedActorsGone >> ()
w - ref
case w => w
}
}
}
trait MultiActorSystemTestContext extends BeforeAndAfterEach with MultiActorSystemTestContextSysevents with WithSyseventPublisher {
self: Suite with ActorSystemManagement with EventAssertions =>
object OnlyThisTest extends Tag("OnlyThisTest")
case class Wrapper(config: Config, underlyingSystem: ActorSystem, id: String, configName: String) extends ActorSystemWrapper {
private val watcherComponentId = UUIDTools.generateShortUUID
private val watcher = underlyingSystem.actorOf(WatcherActor.props(watcherComponentId))
override def start(props: Props, id: String): ActorRef = {
val newActor = underlyingSystem.actorOf(props, id)
watcher ! Watch(newActor)
newActor
}
override def stopActor(id: String) = {
val futureActor = rootUserActorSelection(id).resolveOne(5.seconds)
val actor = Await.result(futureActor, 5.seconds)
DestroyingActor >> ('Actor -> actor)
underlyingSystem.stop(actor)
expectSomeEventsWithTimeout(5000, WatcherActor.WatchedActorGone, 'Path -> actor.path.toSerializationFormat, 'InstanceId -> watcherComponentId)
clearComponentEvents(watcherComponentId)
}
def stop() = {
TerminatingActorSystem >> ('Name -> configName)
val startCheckpoint = System.nanoTime()
try { stopActors() } catch {
case x : Throwable => x.printStackTrace()
}
underlyingSystem.stop(watcher)
underlyingSystem.shutdown()
underlyingSystem.awaitTermination(60.seconds)
ActorSystemTerminated >> ('Name -> configName, 'TerminatedInMs -> (System.nanoTime() - startCheckpoint)/1000000)
}
def stopActors() = {
val startCheckpoint = System.nanoTime()
clearComponentEvents(watcherComponentId)
watcher ! StopAll()
expectSomeEventsWithTimeout(30000, WatcherActor.AllWatchedActorsGone, 'InstanceId -> watcherComponentId)
clearComponentEvents(watcherComponentId)
AllActorsTerminated >> ('TerminatedInMs -> (System.nanoTime() - startCheckpoint)/1000000, 'System -> configName)
}
}
override protected def beforeEach(): Unit = {
StorageStub.clear()
LoggerFactory.getLogger("testseparator").debug("\\n" * 3 + "-" * 120)
super.beforeEach()
}
def configs: Map[String, Config]
private var systems = Map[String, Wrapper]()
def getSystem(configName: String) = systems.get(configName) match {
case None =>
val config = configs.get(configName).get
val sys = Wrapper(config, ActorSystem("hub", config), "hub", configName)
ActorSystemCreated >> ('Name -> "hub", 'ConfigName -> configName)
systems = systems + (configName -> sys)
sys
case Some(x) => x
}
def withSystem[T](configName: String)(f: ActorSystemWrapper => T): T = f(getSystem(configName))
def destroySystem(name: String) = {
systems.get(name).foreach(_.stop())
systems = systems - name
}
def destroyAllSystems() = {
DestroyingAllSystems >> ()
systems.values.foreach(_.stop())
systems = Map()
}
def destroyAllActors() = {
systems.values.foreach(_.stopActors())
}
override protected def afterEach(): Unit = {
LoggerFactory.getLogger("testseparator").debug(" " * 10 + "~" * 40 + " test finished " + "~" * 40)
super.afterEach()
}
}
|
intelix/eventstreams
|
es-core/es-api/src/test/scala/eventstreams/support/MultiActorSystemTestContext.scala
|
Scala
|
apache-2.0
| 6,089
|
package cmwell.analytics.main
import java.nio.file.Paths
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import cmwell.analytics.data.{DataWriterFactory, IndexWithUuidOnly}
import cmwell.analytics.downloader.PartitionedDownloader
import cmwell.analytics.util.TimestampConversion.timestampConverter
import cmwell.analytics.util.{DiscoverEsTopology, FindContactPoints}
import org.apache.commons.io.FileUtils
import org.apache.log4j.LogManager
import org.rogach.scallop.{ScallopConf, ScallopOption}
import scala.concurrent.ExecutionContextExecutor
object DumpUuidOnlyFromEs {
def main(args: Array[String]): Unit = {
val logger = LogManager.getLogger(DumpUuidOnlyFromEs.getClass)
// Since we expect this to be run on a CM-Well node, the default parallelism is to use half the processors
// so as to avoid starving the CM-Well node from processor resources. A higher level of parallelism might
// be possible (without interfering with CM-Well) since most of the work will actually be on the ES side.
val defaultParallelism = 1 max (Runtime.getRuntime.availableProcessors / 2)
implicit val system: ActorSystem = ActorSystem("dump-uuid-only-from-es")
implicit val executionContext: ExecutionContextExecutor = system.dispatcher
implicit val actorMaterializer: ActorMaterializer = ActorMaterializer()
try {
object Opts extends ScallopConf(args) {
val readIndex: ScallopOption[String] = opt[String]("read-index", short = 'i', descr = "The name of the index to read from (default: cm_well_all)", required = false)
val parallelism: ScallopOption[Int] = opt[Int]("parallelism", short = 'p', descr = "The parallelism level", default = Some(defaultParallelism))
val currentOnly: ScallopOption[Boolean] = opt[Boolean]("current-only", short = 'c', descr = "Only download current uuids")
val lastModifiedGteFilter: ScallopOption[java.sql.Timestamp] = opt[java.sql.Timestamp]("lastmodified-gte-filter", descr = "Filter on lastModified >= <value>, where value is an ISO8601 timestamp", default = None)(timestampConverter)
val pathPrefixFilter: ScallopOption[String] = opt[String]("path-prefix-filter", descr = "Filter on the path prefix matching <value>", default = None)
val out: ScallopOption[String] = opt[String]("out", short = 'o', descr = "The path to save the output to", required = true)
val format: ScallopOption[String] = opt[String]("format", short = 'f', descr = "The data format: either 'parquet' or 'csv'", default = Some("parquet"))
val url: ScallopOption[String] = trailArg[String]("url", descr = "A CM-Well URL", required = true)
val sourceFilter: ScallopOption[Boolean] = toggle("source-filter", noshort = true, default=Some(true), prefix = "no-",
descrNo = "Do not filter _source fields (workaround for bad index)", descrYes = "Use source filtering to reduce network traffic")
verify()
}
val esContactPoint = FindContactPoints.es(Opts.url())
val indexesOrAliasesToRead = Opts.readIndex.toOption.fold(Seq("cm_well_all"))(Seq(_))
val esTopology = DiscoverEsTopology(esContactPoint = esContactPoint, aliases = indexesOrAliasesToRead)
// Calling script should clear output directory as necessary.
val objectExtractor = IndexWithUuidOnly
val dataWriterFactory = DataWriterFactory.file(format = Opts.format(), objectExtractor, outDirectory = Opts.out())
PartitionedDownloader.runDownload(
esTopology = esTopology,
parallelism = Opts.parallelism(),
currentOnly = Opts.currentOnly(),
lastModifiedGteFilter = Opts.lastModifiedGteFilter.toOption,
pathPrefixFilter = Opts.pathPrefixFilter.toOption,
objectExtractor = objectExtractor,
dataWriterFactory = dataWriterFactory,
sourceFilter = Opts.sourceFilter())
// The Hadoop convention is to touch the (empty) _SUCCESS file to signal successful completion.
FileUtils.touch(Paths.get(Opts.out(), "_SUCCESS").toFile)
}
catch {
case ex: Throwable =>
logger.error(ex.getMessage, ex)
System.exit(1)
}
finally {
system.terminate()
}
}
}
|
dudi3001/CM-Well
|
tools/dataConsistencyTool/extract-index-from-es/src/main/scala/cmwell/analytics/main/DumpUuidOnlyFromEs.scala
|
Scala
|
apache-2.0
| 4,216
|
package org.scaladebugger.api.lowlevel.threads
import org.scaladebugger.api.lowlevel.PendingRequestSupport
/**
* Provides pending thread death capabilities to an existing
* thread death manager.
*/
trait PendingThreadDeathSupportLike
extends ThreadDeathManager
with PendingRequestSupport
{
/**
* Processes all pending thread death requests.
*
* @return The collection of successfully-processed thread death requests
*/
def processAllPendingThreadDeathRequests(): Seq[ThreadDeathRequestInfo]
/**
* Retrieves a list of pending thread death requests.
*
* @return The collection of thread death requests
*/
def pendingThreadDeathRequests: Seq[ThreadDeathRequestInfo]
}
|
ensime/scala-debugger
|
scala-debugger-api/src/main/scala/org/scaladebugger/api/lowlevel/threads/PendingThreadDeathSupportLike.scala
|
Scala
|
apache-2.0
| 708
|
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frsse2008.micro
import uk.gov.hmrc.ct.accounts.retriever.AccountsBoxRetriever
import uk.gov.hmrc.ct.box._
case class AC405(value: Option[Int]) extends CtBoxIdentifier(name = "Current Other Income")
with CtOptionalInteger with Input
with SelfValidatableBox[AccountsBoxRetriever, Option[Int]] {
override def validate(boxRetriever: AccountsBoxRetriever): Set[CtValidation] = {
validateMoney(value)
}
}
|
pncampbell/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/accounts/frsse2008/micro/AC405.scala
|
Scala
|
apache-2.0
| 1,120
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.client
import java.io.File
import java.lang.reflect.InvocationTargetException
import java.net.{URL, URLClassLoader}
import java.util
import scala.util.Try
import org.apache.commons.io.{FileUtils, IOUtils}
import org.apache.commons.lang3.{JavaVersion, SystemUtils}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
import org.apache.hadoop.hive.shims.ShimLoader
import org.apache.spark.SparkConf
import org.apache.spark.deploy.SparkSubmitUtils
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.util.quietly
import org.apache.spark.sql.hive.HiveUtils
import org.apache.spark.sql.internal.NonClosableMutableURLClassLoader
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.util.{MutableURLClassLoader, Utils}
/** Factory for `IsolatedClientLoader` with specific versions of hive. */
private[hive] object IsolatedClientLoader extends Logging {
/**
* Creates isolated Hive client loaders by downloading the requested version from maven.
*/
def forVersion(
hiveMetastoreVersion: String,
hadoopVersion: String,
sparkConf: SparkConf,
hadoopConf: Configuration,
config: Map[String, String] = Map.empty,
ivyPath: Option[String] = None,
sharedPrefixes: Seq[String] = Seq.empty,
barrierPrefixes: Seq[String] = Seq.empty): IsolatedClientLoader = synchronized {
val resolvedVersion = hiveVersion(hiveMetastoreVersion)
// We will use Hadoop 2.7 if we cannot resolve the Hadoop artifact.
val files = if (resolvedVersions.contains((resolvedVersion, hadoopVersion))) {
resolvedVersions((resolvedVersion, hadoopVersion))
} else {
val remoteRepos = sparkConf.get(SQLConf.ADDITIONAL_REMOTE_REPOSITORIES)
val (downloadedFiles, actualHadoopVersion) =
try {
(downloadVersion(resolvedVersion, hadoopVersion, ivyPath, remoteRepos), hadoopVersion)
} catch {
case e: RuntimeException if e.getMessage.contains("hadoop") =>
// If the error message contains hadoop, it is probably because the hadoop
// version cannot be resolved.
val fallbackVersion = "2.7.4"
logWarning(s"Failed to resolve Hadoop artifacts for the version $hadoopVersion. We " +
s"will change the hadoop version from $hadoopVersion to $fallbackVersion and try " +
"again. It is recommended to set jars used by Hive metastore client through " +
"spark.sql.hive.metastore.jars in the production environment.")
(downloadVersion(
resolvedVersion, fallbackVersion, ivyPath, remoteRepos), fallbackVersion)
}
resolvedVersions.put((resolvedVersion, actualHadoopVersion), downloadedFiles)
resolvedVersions((resolvedVersion, actualHadoopVersion))
}
new IsolatedClientLoader(
hiveVersion(hiveMetastoreVersion),
sparkConf,
execJars = files,
hadoopConf = hadoopConf,
config = config,
sharedPrefixes = sharedPrefixes,
barrierPrefixes = barrierPrefixes)
}
def hiveVersion(version: String): HiveVersion = version match {
case "12" | "0.12" | "0.12.0" => hive.v12
case "13" | "0.13" | "0.13.0" | "0.13.1" => hive.v13
case "14" | "0.14" | "0.14.0" => hive.v14
case "1.0" | "1.0.0" | "1.0.1" => hive.v1_0
case "1.1" | "1.1.0" | "1.1.1" => hive.v1_1
case "1.2" | "1.2.0" | "1.2.1" | "1.2.2" => hive.v1_2
case "2.0" | "2.0.0" | "2.0.1" => hive.v2_0
case "2.1" | "2.1.0" | "2.1.1" => hive.v2_1
case "2.2" | "2.2.0" => hive.v2_2
case "2.3" | "2.3.0" | "2.3.1" | "2.3.2" | "2.3.3" | "2.3.4" | "2.3.5" | "2.3.6" | "2.3.7" =>
hive.v2_3
case "3.0" | "3.0.0" => hive.v3_0
case "3.1" | "3.1.0" | "3.1.1" | "3.1.2" => hive.v3_1
case version =>
throw new UnsupportedOperationException(s"Unsupported Hive Metastore version ($version). " +
s"Please set ${HiveUtils.HIVE_METASTORE_VERSION.key} with a valid version.")
}
private def downloadVersion(
version: HiveVersion,
hadoopVersion: String,
ivyPath: Option[String],
remoteRepos: String): Seq[URL] = {
val hadoopJarNames = if (hadoopVersion.startsWith("3")) {
Seq(s"org.apache.hadoop:hadoop-client-api:$hadoopVersion",
s"org.apache.hadoop:hadoop-client-runtime:$hadoopVersion")
} else {
Seq(s"org.apache.hadoop:hadoop-client:$hadoopVersion")
}
val hiveArtifacts = version.extraDeps ++
Seq("hive-metastore", "hive-exec", "hive-common", "hive-serde")
.map(a => s"org.apache.hive:$a:${version.fullVersion}") ++
Seq("com.google.guava:guava:14.0.1") ++ hadoopJarNames
val extraExclusions = if (hadoopVersion.startsWith("3")) {
// this introduced from lower version of Hive could conflict with jars in Hadoop 3.2+, so
// exclude here in favor of the ones in Hadoop 3.2+
Seq("org.apache.hadoop:hadoop-auth")
} else {
Seq.empty
}
val classpath = quietly {
SparkSubmitUtils.resolveMavenCoordinates(
hiveArtifacts.mkString(","),
SparkSubmitUtils.buildIvySettings(
Some(remoteRepos),
ivyPath),
exclusions = version.exclusions ++ extraExclusions)
}
val allFiles = classpath.split(",").map(new File(_)).toSet
// TODO: Remove copy logic.
val tempDir = Utils.createTempDir(namePrefix = s"hive-${version}")
allFiles.foreach(f => FileUtils.copyFileToDirectory(f, tempDir))
logInfo(s"Downloaded metastore jars to ${tempDir.getCanonicalPath}")
tempDir.listFiles().map(_.toURI.toURL)
}
// A map from a given pair of HiveVersion and Hadoop version to jar files.
// It is only used by forVersion.
private val resolvedVersions =
new scala.collection.mutable.HashMap[(HiveVersion, String), Seq[URL]]
}
/**
* Creates a [[HiveClient]] using a classloader that works according to the following rules:
* - Shared classes: Java, Scala, logging, and Spark classes are delegated to `baseClassLoader`
* allowing the results of calls to the [[HiveClient]] to be visible externally.
* - Hive classes: new instances are loaded from `execJars`. These classes are not
* accessible externally due to their custom loading.
* - [[HiveClientImpl]]: a new copy is created for each instance of `IsolatedClassLoader`.
* This new instance is able to see a specific version of hive without using reflection. Since
* this is a unique instance, it is not visible externally other than as a generic
* [[HiveClient]], unless `isolationOn` is set to `false`.
*
* @param version The version of hive on the classpath. used to pick specific function signatures
* that are not compatible across versions.
* @param execJars A collection of jar files that must include hive and hadoop.
* @param config A set of options that will be added to the HiveConf of the constructed client.
* @param isolationOn When true, custom versions of barrier classes will be constructed. Must be
* true unless loading the version of hive that is on Spark's classloader.
* @param baseClassLoader The spark classloader that is used to load shared classes.
*/
private[hive] class IsolatedClientLoader(
val version: HiveVersion,
val sparkConf: SparkConf,
val hadoopConf: Configuration,
val execJars: Seq[URL] = Seq.empty,
val config: Map[String, String] = Map.empty,
val isolationOn: Boolean = true,
val baseClassLoader: ClassLoader = Thread.currentThread().getContextClassLoader,
val sharedPrefixes: Seq[String] = Seq.empty,
val barrierPrefixes: Seq[String] = Seq.empty)
extends Logging {
/** All jars used by the hive specific classloader. */
protected def allJars = execJars.toArray
protected def isSharedClass(name: String): Boolean = {
val isHadoopClass =
name.startsWith("org.apache.hadoop.") && !name.startsWith("org.apache.hadoop.hive.")
name.startsWith("org.slf4j") ||
name.startsWith("org.apache.log4j") || // log4j1.x
name.startsWith("org.apache.logging.log4j") || // log4j2
name.startsWith("org.apache.spark.") ||
isHadoopClass ||
name.startsWith("scala.") ||
(name.startsWith("com.google") && !name.startsWith("com.google.cloud")) ||
name.startsWith("java.") ||
name.startsWith("javax.sql.") ||
sharedPrefixes.exists(name.startsWith)
}
/** True if `name` refers to a spark class that must see specific version of Hive. */
protected def isBarrierClass(name: String): Boolean =
name.startsWith(classOf[HiveClientImpl].getName) ||
name.startsWith(classOf[Shim].getName) ||
name.startsWith(classOf[ShimLoader].getName) ||
barrierPrefixes.exists(name.startsWith)
protected def classToPath(name: String): String =
name.replaceAll("\\\\.", "/") + ".class"
/**
* The classloader that is used to load an isolated version of Hive.
* This classloader is a special URLClassLoader that exposes the addURL method.
* So, when we add jar, we can add this new jar directly through the addURL method
* instead of stacking a new URLClassLoader on top of it.
*/
private[hive] val classLoader: MutableURLClassLoader = {
val isolatedClassLoader =
if (isolationOn) {
if (allJars.isEmpty) {
// See HiveUtils; this is the Java 9+ + builtin mode scenario
baseClassLoader
} else {
val rootClassLoader: ClassLoader =
if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_9)) {
// In Java 9, the boot classloader can see few JDK classes. The intended parent
// classloader for delegation is now the platform classloader.
// See http://java9.wtf/class-loading/
val platformCL =
classOf[ClassLoader].getMethod("getPlatformClassLoader").
invoke(null).asInstanceOf[ClassLoader]
// Check to make sure that the root classloader does not know about Hive.
assert(Try(platformCL.loadClass("org.apache.hadoop.hive.conf.HiveConf")).isFailure)
platformCL
} else {
// The boot classloader is represented by null (the instance itself isn't accessible)
// and before Java 9 can see all JDK classes
null
}
new URLClassLoader(allJars, rootClassLoader) {
override def loadClass(name: String, resolve: Boolean): Class[_] = {
val loaded = findLoadedClass(name)
if (loaded == null) doLoadClass(name, resolve) else loaded
}
def doLoadClass(name: String, resolve: Boolean): Class[_] = {
val classFileName = name.replaceAll("\\\\.", "/") + ".class"
if (isBarrierClass(name)) {
// For barrier classes, we construct a new copy of the class.
val bytes = IOUtils.toByteArray(baseClassLoader.getResourceAsStream(classFileName))
logDebug(s"custom defining: $name - ${util.Arrays.hashCode(bytes)}")
defineClass(name, bytes, 0, bytes.length)
} else if (!isSharedClass(name)) {
logDebug(s"hive class: $name - ${getResource(classToPath(name))}")
super.loadClass(name, resolve)
} else {
// For shared classes, we delegate to baseClassLoader, but fall back in case the
// class is not found.
logDebug(s"shared class: $name")
try {
baseClassLoader.loadClass(name)
} catch {
case _: ClassNotFoundException =>
super.loadClass(name, resolve)
}
}
}
}
}
} else {
baseClassLoader
}
// Right now, we create a URLClassLoader that gives preference to isolatedClassLoader
// over its own URLs when it loads classes and resources.
// We may want to use ChildFirstURLClassLoader based on
// the configuration of spark.executor.userClassPathFirst, which gives preference
// to its own URLs over the parent class loader (see Executor's createClassLoader method).
new NonClosableMutableURLClassLoader(isolatedClassLoader)
}
private[hive] def addJar(path: URL): Unit = synchronized {
classLoader.addURL(path)
}
/** The isolated client interface to Hive. */
private[hive] def createClient(): HiveClient = synchronized {
val warehouseDir = Option(hadoopConf.get(ConfVars.METASTOREWAREHOUSE.varname))
if (!isolationOn) {
return new HiveClientImpl(version, warehouseDir, sparkConf, hadoopConf, config,
baseClassLoader, this)
}
// Pre-reflective instantiation setup.
logDebug("Initializing the logger to avoid disaster...")
val origLoader = Thread.currentThread().getContextClassLoader
Thread.currentThread.setContextClassLoader(classLoader)
try {
classLoader
.loadClass(classOf[HiveClientImpl].getName)
.getConstructors.head
.newInstance(version, warehouseDir, sparkConf, hadoopConf, config, classLoader, this)
.asInstanceOf[HiveClient]
} catch {
case e: InvocationTargetException =>
if (e.getCause().isInstanceOf[NoClassDefFoundError]) {
val cnf = e.getCause().asInstanceOf[NoClassDefFoundError]
throw new ClassNotFoundException(
s"$cnf when creating Hive client using classpath: ${execJars.mkString(", ")}\\n" +
"Please make sure that jars for your version of hive and hadoop are included in the " +
s"paths passed to ${HiveUtils.HIVE_METASTORE_JARS.key}.", e)
} else {
throw e
}
} finally {
Thread.currentThread.setContextClassLoader(origLoader)
}
}
/**
* The place holder for shared Hive client for all the HiveContext sessions (they share an
* IsolatedClientLoader).
*/
private[hive] var cachedHive: Any = null
}
|
shuangshuangwang/spark
|
sql/hive/src/main/scala/org/apache/spark/sql/hive/client/IsolatedClientLoader.scala
|
Scala
|
apache-2.0
| 14,843
|
package spark.streaming
import akka.actor.Actor
import akka.actor.IO
import akka.actor.IOManager
import akka.actor.Props
import akka.util.ByteString
import dstream.SparkFlumeEvent
import java.net.{InetSocketAddress, SocketException, Socket, ServerSocket}
import java.io.{File, BufferedWriter, OutputStreamWriter}
import java.util.concurrent.{TimeUnit, ArrayBlockingQueue}
import collection.mutable.{SynchronizedBuffer, ArrayBuffer}
import util.ManualClock
import spark.storage.StorageLevel
import spark.streaming.receivers.Receiver
import spark.Logging
import scala.util.Random
import org.apache.commons.io.FileUtils
import org.scalatest.BeforeAndAfter
import org.apache.flume.source.avro.AvroSourceProtocol
import org.apache.flume.source.avro.AvroFlumeEvent
import org.apache.flume.source.avro.Status
import org.apache.avro.ipc.{specific, NettyTransceiver}
import org.apache.avro.ipc.specific.SpecificRequestor
import java.nio.ByteBuffer
import collection.JavaConversions._
import java.nio.charset.Charset
import com.google.common.io.Files
class InputStreamsSuite extends TestSuiteBase with BeforeAndAfter {
val testPort = 9999
override def checkpointDir = "checkpoint"
before {
System.setProperty("spark.streaming.clock", "spark.streaming.util.ManualClock")
}
after {
// To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
System.clearProperty("spark.driver.port")
}
test("socket input stream") {
// Start the server
val testServer = new TestServer()
testServer.start()
// Set up the streaming context and input streams
val ssc = new StreamingContext(master, framework, batchDuration)
val networkStream = ssc.socketTextStream("localhost", testServer.port, StorageLevel.MEMORY_AND_DISK)
val outputBuffer = new ArrayBuffer[Seq[String]] with SynchronizedBuffer[Seq[String ]]
val outputStream = new TestOutputStream(networkStream, outputBuffer)
def output = outputBuffer.flatMap(x => x)
ssc.registerOutputStream(outputStream)
ssc.start()
// Feed data to the server to send to the network receiver
val clock = ssc.scheduler.clock.asInstanceOf[ManualClock]
val input = Seq(1, 2, 3, 4, 5)
val expectedOutput = input.map(_.toString)
Thread.sleep(1000)
for (i <- 0 until input.size) {
testServer.send(input(i).toString + "\\n")
Thread.sleep(500)
clock.addToTime(batchDuration.milliseconds)
}
Thread.sleep(1000)
logInfo("Stopping server")
testServer.stop()
logInfo("Stopping context")
ssc.stop()
// Verify whether data received was as expected
logInfo("--------------------------------")
logInfo("output.size = " + outputBuffer.size)
logInfo("output")
outputBuffer.foreach(x => logInfo("[" + x.mkString(",") + "]"))
logInfo("expected output.size = " + expectedOutput.size)
logInfo("expected output")
expectedOutput.foreach(x => logInfo("[" + x.mkString(",") + "]"))
logInfo("--------------------------------")
// Verify whether all the elements received are as expected
// (whether the elements were received one in each interval is not verified)
assert(output.size === expectedOutput.size)
for (i <- 0 until output.size) {
assert(output(i) === expectedOutput(i))
}
}
test("flume input stream") {
// Set up the streaming context and input streams
val ssc = new StreamingContext(master, framework, batchDuration)
val flumeStream = ssc.flumeStream("localhost", testPort, StorageLevel.MEMORY_AND_DISK)
val outputBuffer = new ArrayBuffer[Seq[SparkFlumeEvent]]
with SynchronizedBuffer[Seq[SparkFlumeEvent]]
val outputStream = new TestOutputStream(flumeStream, outputBuffer)
ssc.registerOutputStream(outputStream)
ssc.start()
val clock = ssc.scheduler.clock.asInstanceOf[ManualClock]
val input = Seq(1, 2, 3, 4, 5)
Thread.sleep(1000)
val transceiver = new NettyTransceiver(new InetSocketAddress("localhost", testPort));
val client = SpecificRequestor.getClient(
classOf[AvroSourceProtocol], transceiver);
for (i <- 0 until input.size) {
val event = new AvroFlumeEvent
event.setBody(ByteBuffer.wrap(input(i).toString.getBytes()))
event.setHeaders(Map[CharSequence, CharSequence]("test" -> "header"))
client.append(event)
Thread.sleep(500)
clock.addToTime(batchDuration.milliseconds)
}
val startTime = System.currentTimeMillis()
while (outputBuffer.size < input.size && System.currentTimeMillis() - startTime < maxWaitTimeMillis) {
logInfo("output.size = " + outputBuffer.size + ", input.size = " + input.size)
Thread.sleep(100)
}
Thread.sleep(1000)
val timeTaken = System.currentTimeMillis() - startTime
assert(timeTaken < maxWaitTimeMillis, "Operation timed out after " + timeTaken + " ms")
logInfo("Stopping context")
ssc.stop()
val decoder = Charset.forName("UTF-8").newDecoder()
assert(outputBuffer.size === input.length)
for (i <- 0 until outputBuffer.size) {
assert(outputBuffer(i).size === 1)
val str = decoder.decode(outputBuffer(i).head.event.getBody)
assert(str.toString === input(i).toString)
assert(outputBuffer(i).head.event.getHeaders.get("test") === "header")
}
}
test("file input stream") {
// Disable manual clock as FileInputDStream does not work with manual clock
System.clearProperty("spark.streaming.clock")
// Set up the streaming context and input streams
val testDir = Files.createTempDir()
val ssc = new StreamingContext(master, framework, batchDuration)
val fileStream = ssc.textFileStream(testDir.toString)
val outputBuffer = new ArrayBuffer[Seq[String]] with SynchronizedBuffer[Seq[String]]
def output = outputBuffer.flatMap(x => x)
val outputStream = new TestOutputStream(fileStream, outputBuffer)
ssc.registerOutputStream(outputStream)
ssc.start()
// Create files in the temporary directory so that Spark Streaming can read data from it
val input = Seq(1, 2, 3, 4, 5)
val expectedOutput = input.map(_.toString)
Thread.sleep(1000)
for (i <- 0 until input.size) {
val file = new File(testDir, i.toString)
FileUtils.writeStringToFile(file, input(i).toString + "\\n")
logInfo("Created file " + file)
Thread.sleep(batchDuration.milliseconds)
Thread.sleep(1000)
}
val startTime = System.currentTimeMillis()
Thread.sleep(1000)
val timeTaken = System.currentTimeMillis() - startTime
assert(timeTaken < maxWaitTimeMillis, "Operation timed out after " + timeTaken + " ms")
logInfo("Stopping context")
ssc.stop()
// Verify whether data received by Spark Streaming was as expected
logInfo("--------------------------------")
logInfo("output, size = " + outputBuffer.size)
outputBuffer.foreach(x => logInfo("[" + x.mkString(",") + "]"))
logInfo("expected output, size = " + expectedOutput.size)
expectedOutput.foreach(x => logInfo("[" + x.mkString(",") + "]"))
logInfo("--------------------------------")
// Verify whether all the elements received are as expected
// (whether the elements were received one in each interval is not verified)
assert(output.toList === expectedOutput.toList)
FileUtils.deleteDirectory(testDir)
// Enable manual clock back again for other tests
System.setProperty("spark.streaming.clock", "spark.streaming.util.ManualClock")
}
test("actor input stream") {
// Start the server
val testServer = new TestServer()
val port = testServer.port
testServer.start()
// Set up the streaming context and input streams
val ssc = new StreamingContext(master, framework, batchDuration)
val networkStream = ssc.actorStream[String](Props(new TestActor(port)), "TestActor",
StorageLevel.MEMORY_AND_DISK) //Had to pass the local value of port to prevent from closing over entire scope
val outputBuffer = new ArrayBuffer[Seq[String]] with SynchronizedBuffer[Seq[String]]
val outputStream = new TestOutputStream(networkStream, outputBuffer)
def output = outputBuffer.flatMap(x => x)
ssc.registerOutputStream(outputStream)
ssc.start()
// Feed data to the server to send to the network receiver
val clock = ssc.scheduler.clock.asInstanceOf[ManualClock]
val input = 1 to 9
val expectedOutput = input.map(x => x.toString)
Thread.sleep(1000)
for (i <- 0 until input.size) {
testServer.send(input(i).toString)
Thread.sleep(500)
clock.addToTime(batchDuration.milliseconds)
}
Thread.sleep(1000)
logInfo("Stopping server")
testServer.stop()
logInfo("Stopping context")
ssc.stop()
// Verify whether data received was as expected
logInfo("--------------------------------")
logInfo("output.size = " + outputBuffer.size)
logInfo("output")
outputBuffer.foreach(x => logInfo("[" + x.mkString(",") + "]"))
logInfo("expected output.size = " + expectedOutput.size)
logInfo("expected output")
expectedOutput.foreach(x => logInfo("[" + x.mkString(",") + "]"))
logInfo("--------------------------------")
// Verify whether all the elements received are as expected
// (whether the elements were received one in each interval is not verified)
assert(output.size === expectedOutput.size)
for (i <- 0 until output.size) {
assert(output(i) === expectedOutput(i))
}
}
}
/** This is server to test the network input stream */
class TestServer() extends Logging {
val queue = new ArrayBlockingQueue[String](100)
val serverSocket = new ServerSocket(0)
val servingThread = new Thread() {
override def run() {
try {
while(true) {
logInfo("Accepting connections on port " + port)
val clientSocket = serverSocket.accept()
logInfo("New connection")
try {
clientSocket.setTcpNoDelay(true)
val outputStream = new BufferedWriter(new OutputStreamWriter(clientSocket.getOutputStream))
while(clientSocket.isConnected) {
val msg = queue.poll(100, TimeUnit.MILLISECONDS)
if (msg != null) {
outputStream.write(msg)
outputStream.flush()
logInfo("Message '" + msg + "' sent")
}
}
} catch {
case e: SocketException => logError("TestServer error", e)
} finally {
logInfo("Connection closed")
if (!clientSocket.isClosed) clientSocket.close()
}
}
} catch {
case ie: InterruptedException =>
} finally {
serverSocket.close()
}
}
}
def start() { servingThread.start() }
def send(msg: String) { queue.add(msg) }
def stop() { servingThread.interrupt() }
def port = serverSocket.getLocalPort
}
object TestServer {
def main(args: Array[String]) {
val s = new TestServer()
s.start()
while(true) {
Thread.sleep(1000)
s.send("hello")
}
}
}
class TestActor(port: Int) extends Actor with Receiver {
def bytesToString(byteString: ByteString) = byteString.utf8String
override def preStart = IOManager(context.system).connect(new InetSocketAddress(port))
def receive = {
case IO.Read(socket, bytes) =>
pushBlock(bytesToString(bytes))
}
}
|
koeninger/spark
|
streaming/src/test/scala/spark/streaming/InputStreamsSuite.scala
|
Scala
|
bsd-3-clause
| 11,403
|
/* scala-stm - (c) 2009-2010, Stanford University, PPL */
package scala.concurrent.stm
import scala.collection.{immutable, mutable, generic}
object TSet {
object View extends generic.MutableSetFactory[TSet.View] {
implicit def canBuildFrom[A]: generic.CanBuildFrom[Coll, A, TSet.View[A]] = setCanBuildFrom[A]
override def empty[A] = TSet.empty[A].single
override def newBuilder[A] = new mutable.Builder[A, View[A]] {
private val underlying = TSet.newBuilder[A]
def clear() { underlying.clear() }
def += (x: A): this.type = { underlying += x ; this }
def result() = underlying.result().single
}
override def apply[A](xs: A*): TSet.View[A] = (TSet.newBuilder[A] ++= xs).result().single
}
/** A `Set` that provides atomic execution of all of its methods. */
trait View[A] extends mutable.Set[A] with mutable.SetLike[A, View[A]] {
/** Returns the `TSet` perspective on this transactional set, which
* provides set functionality only inside atomic blocks.
*/
def tset: TSet[A]
def clone: TSet.View[A]
/** Takes an atomic snapshot of this transactional set. */
def snapshot: immutable.Set[A]
override def empty: View[A] = TSet.empty[A].single
override def companion: generic.GenericCompanion[View] = View
override protected[this] def newBuilder: mutable.Builder[A, View[A]] = View.newBuilder[A]
}
/** Constructs and returns a new empty `TSet`. */
def empty[A]: TSet[A] = impl.STMImpl.instance.newTSet[A]
/** Returns a builder of `TSet`. */
def newBuilder[A]: mutable.Builder[A, TSet[A]] = impl.STMImpl.instance.newTSetBuilder[A]
/** Constructs and returns a new `TSet` that will contain the elements from
* `xs`.
*/
def apply[A](xs: A*): TSet[A] = (newBuilder[A] ++= xs).result()
/** Allows a `TSet` in a transactional context to be used as a `Set`. */
implicit def asSet[A](s: TSet[A])(implicit txn: InTxn): View[A] = s.single
}
/** A transactional set implementation that requires that all of its set-like
* operations be called from inside an atomic block. Rather than extending
* `Set`, an implicit conversion is provided from `TSet` to `Set` if the
* current scope is part of an atomic block (see `TSet.asSet`).
*
* The elements (with type `A`) must be immutable, or at least not modified
* while they are in the set. The `TSet` implementation assumes that it can
* safely perform equality and hash checks outside a transaction without
* affecting atomicity.
*
* @author Nathan Bronson
*/
trait TSet[A] {
/** Returns an instance that provides transactional set functionality without
* requiring that operations be performed inside the static scope of an
* atomic block.
*/
def single: TSet.View[A]
def clone(implicit txn: InTxn): TSet[A] = single.clone.tset
// The following methods work fine via the asSet mechanism, but are heavily
// used. We add transactional versions of them to allow overrides.
def isEmpty(implicit txn: InTxn): Boolean
def size(implicit txn: InTxn): Int
def foreach[U](f: A => U)(implicit txn: InTxn)
def contains(elem: A)(implicit txn: InTxn): Boolean
def apply(elem: A)(implicit txn: InTxn): Boolean = contains(elem)
def add(elem: A)(implicit txn: InTxn): Boolean
def update(elem: A, included: Boolean)(implicit txn: InTxn) { if (included) add(elem) else remove(elem) }
def remove(elem: A)(implicit txn: InTxn): Boolean
// The following methods return the wrong receiver when invoked via the asSet
// conversion. They are exactly the methods of mutable.Set whose return type
// is this.type.
def += (x: A)(implicit txn: InTxn): this.type = { add(x) ; this }
def += (x1: A, x2: A, xs: A*)(implicit txn: InTxn): this.type = { this += x1 += x2 ++= xs }
def ++= (xs: TraversableOnce[A])(implicit txn: InTxn): this.type = { for (x <- xs) this += x ; this }
def -= (x: A)(implicit txn: InTxn): this.type = { remove(x) ; this }
def -= (x1: A, x2: A, xs: A*)(implicit txn: InTxn): this.type = { this -= x1 -= x2 --= xs }
def --= (xs: TraversableOnce[A])(implicit txn: InTxn): this.type = { for (x <- xs) this -= x ; this }
def retain(p: A => Boolean)(implicit txn: InTxn): this.type
}
|
djspiewak/scala-stm
|
src/main/scala/scala/concurrent/stm/TSet.scala
|
Scala
|
bsd-3-clause
| 4,241
|
object SeveralDuplicates {
def foo(i: Int) {
/*start*/
println(i + 1)
/*end*/
println(2 + 1)
}
def bar() {
println(3 + 1)
}
println(4 + 1)
}
/*
object SeveralDuplicates {
def foo(i: Int) {
/*start*/
testMethodName(i)
/*end*/
testMethodName(2)
}
def testMethodName(i: Int) {
println(i + 1)
}
def bar() {
testMethodName(3)
}
testMethodName(4)
}
*/
|
consulo/consulo-scala
|
testdata/extractMethod/duplicates/SeveralDuplicates.scala
|
Scala
|
apache-2.0
| 418
|
package org.atmosphere.cpr
import java.net.URI
import org.scalatra.atmosphere.{ RedisScalatraBroadcaster, ScalatraBroadcaster }
trait BroadcasterConf {
def broadcasterClass: Class[_ <: ScalatraBroadcaster]
def uri: URI
def extraSetup: Broadcaster => Unit // To perform optional plugin-specific Broadcaster setup
}
/**
*
* Basic Configuration-holder for Scalatra Atmosphere Broadcaster configuration
* @param broadcasterClass Class[_<:ScalatraBroadcaster]
* @param uri [[URI]] defaults to http://127.0.0.1
* @param extraSetup Broadcaster => Unit Function that is passed an initialized [[Broadcaster]] in order to allow for
* optional plugin-specific Broadcaster setup. Defaults to doing nothing.
*/
sealed case class ScalatraBroadcasterConfig(broadcasterClass: Class[_ <: ScalatraBroadcaster],
uri: URI = URI.create("http://127.0.0.1"),
extraSetup: Broadcaster => Unit = { b => }) extends BroadcasterConf
/**
* Convenient configuration class for RedisBroadcaster
*
* Using this class will automatically take care of setting Redis auth on the underlying
* RedisBroadcaster if the auth parameter is given an argument
*
* @param uri [[URI]] for the Redis Server. Defaults to redis://127.0.0.1:6379
* @param auth An Option[String] if the Redis Server requires a password. Defaults to None
*/
sealed case class RedisScalatraBroadcasterConfig(uri: URI = URI.create("redis://127.0.0.1:6379"), auth: Option[String] = None) extends BroadcasterConf {
final def broadcasterClass = classOf[RedisScalatraBroadcaster]
final def extraSetup = { b: Broadcaster =>
auth.foreach(b.asInstanceOf[RedisScalatraBroadcaster].setAuth(_))
}
}
|
dozed/scalatra
|
atmosphere/src/main/scala/org/atmosphere/cpr/broadcaster_configs.scala
|
Scala
|
bsd-2-clause
| 1,673
|
package org.greencheek.spray.cache.memcached
// adds await on the future
import spray.util._
import org.greencheek.util.memcached.{WithMemcached, MemcachedBasedSpec}
import akka.actor.ActorSystem
import net.spy.memcached.ConnectionFactoryBuilder.Protocol
import scala.concurrent._
import ExecutionContext.Implicits.global
import org.greencheek.util.PortUtil
import org.specs2.runner.JUnitRunner
import org.junit.runner.RunWith
/**
* Created by dominictootell on 30/03/2014.
*/
@RunWith(classOf[JUnitRunner])
class MultiMemcachedHostsSpec extends MemcachedBasedSpec {
implicit val system = ActorSystem()
val memcachedContext = WithMemcached(false)
"A Memcached cache" >> {
"can store values when one host is unavailable" in memcachedContext {
val randomPort = portUtil.getPort(portUtil.findFreePort)
val hosts = "localhost:"+memcachedContext.memcached.port + ",localhost:"+randomPort
val cache = new MemcachedCache[String] ( memcachedHosts = hosts, protocol = Protocol.TEXT,
doHostConnectionAttempt = true)
cache("1")("A").await === "A"
cache("2")("B").await === "B"
cache.get("1") must beSome
cache.get("2") must beSome
}
}
}
|
tootedom/spray-cache-spymemcached
|
src/test/scala/org/greencheek/spray/cache/memcached/MultiMemcachedHostsSpec.scala
|
Scala
|
apache-2.0
| 1,209
|
package chat.tox.antox.callbacks
import android.content.Context
import chat.tox.antox.data.State
import chat.tox.antox.utils.AntoxLog
import chat.tox.antox.wrapper.CallNumber
import im.tox.tox4j.av.data.{AudioChannels, SamplingRate}
class AntoxOnAudioReceiveFrameCallback(private var ctx: Context) {
def audioReceiveFrame(callNumber: CallNumber, pcm: Array[Short], channels: AudioChannels, samplingRate: SamplingRate)(state: Unit): Unit = {
State.callManager.get(callNumber).foreach(_.onAudioFrame(pcm, channels, samplingRate))
AntoxLog.log(AntoxLog.Priority.DEBUG, s"received audio frame of length ${pcm.length} with channels $channels")
}
}
|
subliun/Antox
|
app/src/main/scala/chat/tox/antox/callbacks/AntoxOnAudioReceiveFrameCallback.scala
|
Scala
|
gpl-3.0
| 657
|
package toguru.api
import toguru.impl.{ToggleState, TogglesString}
/**
* Contains the information needed by [[toguru.api.Toggle]]s to make toggling decisions.
*/
trait Toggling {
def client: ClientInfo
def activations: Activations
/**
* Returns the activation state of the given toggle.
*
* @param toggle the toggle in question
* @return
*/
def apply(toggle: Toggle): Boolean =
client.forcedToggle(toggle.id).getOrElse(activations(toggle).applies(client))
/**
* Returns all activations.
*
* @return
*/
def apply(): Iterable[ToggleState] = activations()
/**
* Returns a toggling string for downstream services.
*
* @param service the name of the downstream service - must match the "service" tag or be included in the "services" tag defined on the toggle on
* the toguru server.
* @return
*/
def toggleStringForService(service: String): String = {
val toggleStates =
activations
.togglesFor(service)
.map {
case (toggleId, condition) =>
toggleId -> client.forcedToggle(toggleId).getOrElse(condition.applies(client))
}
TogglesString.build(toggleStates)
}
/**
* Returns a toggling string that can be sent upstream that considers:
* - Any toggle that was forced by the client
* - Any toggle that is tagged with the services parameter and it is either not rolled out at 0 or 100%,
* or locally defined as always on.
*
* @param services A collection of service names to be evaluated against the service or services tags of the toggle.
* Typically, if a service B is interested in receiving toggle X from service A, toggle X should add
* service A to its `services` tag (i.e the `services` tag should contain the name of the services
* that should forward the toggle).
* @return a string that can be added to a toguru querystring or header
*/
def buildForwardingToggleString(services: Set[String]): String =
TogglesString.build(
activations
.apply()
.map(toggleState =>
(
toggleState.id,
client.forcedToggle(toggleState.id),
services.exists(toggleState.serviceTagsContains),
toggleState.rolloutPercentage,
toggleState.condition.applies(client)
)
)
.collect {
case (id, Some(forcedToggleValue), _, _, _) => id -> forcedToggleValue
case (id, _, true, Some(percentage), condition) if percentage > 0 && percentage < 100 => id -> condition
case (id, _, true, None, true) => id -> true
}
)
}
/**
* Simple case class-based implementation of the Toggling trait. See also [[toguru.api.ToguruClient#apply]]
*
* @param client the client information to use
* @param activations the activation conditions to use
*/
final case class TogglingInfo(client: ClientInfo, activations: Activations) extends Toggling
|
AutoScout24/toguru-scala-client
|
core/src/main/scala/toguru/api/Toggling.scala
|
Scala
|
mit
| 3,132
|
package breeze.stats.random
/*
Copyright 2009 David Hall, Daniel Ramage
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* Class for generating random numbers given
* the ability to generate a uniform int between
* MIN_INT and MAX_INT.
*
* This is designed to take the place of java.util.Random.
*
* @author dlwh
*/
trait RandomGenerator {
def nextInt:Int
def nextInt(n: Int):Int = {
if (n<=0) throw new IllegalArgumentException("n must be positive");
var result = 0;
var bits = 0;
do {
bits = math.abs(nextInt)
result = (bits) % n;
} while(bits - result + (n-1) < 0);
result
}
def nextDouble:Double = {
// Taken from Cern
val ret = (nextLong.toDouble - -9.223372036854776E18) * 5.421010862427522E-20;
if(ret > 0.0 && ret <1.0) ret
else nextDouble
}
def nextLong = {
// also from CERN:
((nextInt & 0xFFFFFFFFL) << 32) | ((nextInt & 0xFFFFFFFFL));
}
def nextFloat: Float = nextDouble.toFloat;
def nextBoolean:Boolean = if((nextInt & 1) == 0) false else true;
private var haveNextNextGaussian: Boolean = false;
private var nextNextGaussian: Double = 0.0;
def nextGaussian: Double = synchronized {
// Cribbed from Sun's javadocs
if (haveNextNextGaussian) {
haveNextNextGaussian = false;
nextNextGaussian;
} else {
var v1 = 0.0;
var v2 = 0.0;
var s = 0.0;
do {
v1 = 2 * nextDouble - 1; // between -1.0 and 1.0
v2 = 2 * nextDouble - 1; // between -1.0 and 1.0
s = v1 * v1 + v2 * v2;
} while (s >= 1 || s == 0);
val multiplier = math.sqrt(-2 * math.log(s)/s);
nextNextGaussian = v2 * multiplier;
haveNextNextGaussian = true;
v1 * multiplier;
}
}
}
|
tjhunter/scalanlp-core
|
learn/src/main/scala/breeze/stats/random/RandomGenerator.scala
|
Scala
|
apache-2.0
| 2,238
|
package temportalist.esotericraft.galvanization.common.task.ai.world
import net.minecraft.entity.{EntityCreature, EntityLivingBase}
import net.minecraft.item.ItemStack
import net.minecraft.util.EnumFacing.Axis
import net.minecraft.util.math.BlockPos
import net.minecraft.util.{EnumFacing, EnumHand}
import net.minecraftforge.items.IItemHandler
import temportalist.esotericraft.api.galvanize.ai.{EnumTaskType, GalvanizeTask}
import temportalist.esotericraft.api.init.Details
import temportalist.esotericraft.galvanization.common.entity.IEntityItemUser
import temportalist.esotericraft.galvanization.common.task.ai.core.TaskBase
import temportalist.esotericraft.galvanization.common.task.ai.interfaces.ITaskInventory
import temportalist.origin.api.common.lib.Vect
import temportalist.origin.api.common.utility.Capabilities
/**
*
* Created by TheTemportalist on 6/6/2016.
*
* @author TheTemportalist
*/
@GalvanizeTask(modid = Details.MOD_ID,
name = "itemExtract",
displayName = "Take Items"
)
class TaskItemExtract(
pos: BlockPos, face: EnumFacing
) extends TaskBase(pos, face) with ITaskInventory {
private val speed: Double = 1.2D
private val posVec = new Vect(this.pos) + Vect.CENTER + new Vect(this.face)
// ~~~~~ Task Info ~~~~~
override def getTaskType: EnumTaskType = EnumTaskType.WORLD_INTERACTION
// ~~~~~ AI ~~~~~
override def shouldExecute(entity: EntityCreature): Boolean = {
for (hand <- EnumHand.values()) {
if (entity.getHeldItem(hand) != null) return false
}
this.getFirstUsageStackSlot(entity) >= 0
}
def canEntityUseItem(entity: EntityLivingBase, slot: Int, inventory: IItemHandler = null): Boolean = {
val inv = if (inventory != null) inventory else {
val targetTile = entity.getEntityWorld.getTileEntity(this.getPosition)
if (Capabilities.isInventory(targetTile, this.getFace))
Capabilities.getInventory(targetTile, this.getFace)
else null
}
if (inv == null) return false
inv.getStackInSlot(slot) match {
case stack: ItemStack => this.canEntityUseItem(entity, stack)
case _ => // null
false
}
}
def canEntityUseItem(entity: EntityLivingBase, stack: ItemStack): Boolean = {
entity match {
case itemUser: IEntityItemUser =>
val use = itemUser.canUse(stack)
use
case _ => false
}
}
def getFirstUsageStackSlot(entity: EntityLivingBase): Int = {
val targetTile = entity.getEntityWorld.getTileEntity(this.getPosition)
if (Capabilities.isInventory(targetTile, this.getFace)) {
val inventory = Capabilities.getInventory(targetTile, this.getFace)
for (slot <- 0 until inventory.getSlots) {
if (this.canEntityUseItem(entity, slot, inventory))
return slot
}
}
-1
}
override def updateTask(entity: EntityCreature): Unit = {
val slot = this.getFirstUsageStackSlot(entity)
if (slot < 0) return
val position = new Vect(this.getPosition)
val targetPosMove = position + Vect.CENTER
val targetPosDist = position + Vect.CENTER.suppressAxisGet(Axis.Y)
val ownerDistanceToInventory = (new Vect(entity) - targetPosDist).length
if (ownerDistanceToInventory > 2D)
this.moveEntityTowards(entity,
targetPosMove.x, targetPosMove.y, targetPosMove.z,
this.speed, this.getCanFly)
else {
if (this.canEntityUseItem(entity, slot))
this.extractItem(entity, slot)
}
}
def extractItem(entity: EntityLivingBase, slot: Int): Unit = {
val targetTile = entity.getEntityWorld.getTileEntity(this.getPosition)
if (Capabilities.isInventory(targetTile, this.getFace)) {
val inventory = Capabilities.getInventory(targetTile, this.getFace)
this.extractItem(entity, inventory, slot)
}
}
def extractItem(entity: EntityLivingBase, inventory: IItemHandler, slot: Int): Unit ={
var simStack = inventory.extractItem(slot, 1, true)
if (simStack != null) {
simStack = inventory.extractItem(slot, 1, false)
entity.setHeldItem(EnumHand.MAIN_HAND, simStack.copy())
}
}
// ~~~~~ End ~~~~~
}
|
TheTemportalist/EsoTeriCraft
|
src/main/scala/temportalist/esotericraft/galvanization/common/task/ai/world/TaskItemExtract.scala
|
Scala
|
apache-2.0
| 3,949
|
package com.ing.baker.runtime.javadsl
import java.util
import java.util.Optional
import java.util.concurrent.CompletableFuture
import java.util.function.{BiConsumer, Consumer}
import com.ing.baker.il.{CompiledRecipe, RecipeVisualStyle}
import com.ing.baker.runtime.common.LanguageDataStructures.JavaApi
import com.ing.baker.runtime.common.{RecipeRecord, SensoryEventStatus}
import com.ing.baker.runtime.{common, scaladsl}
import com.ing.baker.types.Value
import javax.annotation.Nonnull
import scala.collection.JavaConverters._
import scala.compat.java8.FutureConverters
import scala.concurrent.{Await, Future}
import scala.concurrent.duration._
class Baker(private val baker: scaladsl.Baker) extends common.Baker[CompletableFuture] with JavaApi with AutoCloseable {
override type SensoryEventResultType = SensoryEventResult
override type EventResolutionsType = EventResolutions
override type EventInstanceType = EventInstance
override type RecipeInstanceStateType = RecipeInstanceState
override type InteractionInstanceType = InteractionInstance
override type InteractionInstanceDescriptorType = InteractionInstanceDescriptor
override type BakerEventType = BakerEvent
override type RecipeInstanceMetadataType = RecipeInstanceMetadata
override type RecipeInformationType = RecipeInformation
override type EventMomentType = EventMoment
override type RecipeMetadataType = RecipeEventMetadata
override def close(): Unit = {
Await.result(baker.gracefulShutdown(), 10.seconds)
}
/**
* Adds a recipe to baker and returns a recipeId for the recipe.
*
* This function is idempotent, if the same (equal) recipe was added earlier this will return the same recipeId.
*
* @param recipeRecord The RecipeRecord recipe.
* @return A recipe identifier.
*/
def addRecipe(@Nonnull recipeRecord: RecipeRecord): CompletableFuture[String] =
toCompletableFuture(baker.addRecipe(recipeRecord))
/**
* Adds a recipe to baker and returns a recipeId for the recipe.
*
* This function is idempotent, if the same (equal) recipe was added earlier this will return the same recipeId
*
* @param compiledRecipe The compiled recipe.
* @return A recipeId
*/
override def addRecipe(compiledRecipe: CompiledRecipe, timeCreated: Long, validate: Boolean): CompletableFuture[String] = addRecipe(RecipeRecord.of(compiledRecipe, updated = timeCreated, validate = validate))
/**
* Adds a recipe to baker and returns a recipeId for the recipe.
*
* This function is idempotent, if the same (equal) recipe was added earlier this will return the same recipeId
*
* @param compiledRecipe The compiled recipe.
* @return A recipeId
*/
override def addRecipe(compiledRecipe: CompiledRecipe, validate: Boolean): CompletableFuture[String] = addRecipe(compiledRecipe, System.currentTimeMillis(), validate)
/**
* Attempts to gracefully shutdown the baker system.
*/
def gracefulShutdown(): CompletableFuture[Unit] =
toCompletableFuture(baker.gracefulShutdown())
/**
* This bakes (creates) a new process instance of the recipe.
*
* @param recipeId The recipe this instance will be baked for
* @param recipeInstanceId The process identifier
*/
def bake(@Nonnull recipeId: String, @Nonnull recipeInstanceId: String): CompletableFuture[Unit] =
toCompletableFuture(baker.bake(recipeId, recipeInstanceId))
def fireEventAndResolveWhenReceived(@Nonnull recipeInstanceId: String, @Nonnull event: EventInstance, @Nonnull correlationId: String): CompletableFuture[SensoryEventStatus] =
fireEventAndResolveWhenReceived(recipeInstanceId, event, Optional.of(correlationId))
def fireEventAndResolveWhenCompleted(@Nonnull recipeInstanceId: String, @Nonnull event: EventInstance, @Nonnull correlationId: String): CompletableFuture[SensoryEventResult] =
fireEventAndResolveWhenCompleted(recipeInstanceId, event, Optional.of(correlationId))
def fireEventAndResolveOnEvent(@Nonnull recipeInstanceId: String, @Nonnull event: EventInstance, @Nonnull onEvent: String, @Nonnull correlationId: String): CompletableFuture[SensoryEventResult] =
fireEventAndResolveOnEvent(recipeInstanceId, event, onEvent, Optional.of(correlationId))
def fireEvent(@Nonnull recipeInstanceId: String, @Nonnull event: EventInstance, @Nonnull correlationId: String): EventResolutions =
fireEvent(recipeInstanceId, event, Optional.of(correlationId))
def fireEventAndResolveWhenReceived(@Nonnull recipeInstanceId: String, @Nonnull event: EventInstance): CompletableFuture[SensoryEventStatus] =
fireEventAndResolveWhenReceived(recipeInstanceId, event, Optional.empty[String]())
def fireEventAndResolveWhenCompleted(@Nonnull recipeInstanceId: String, @Nonnull event: EventInstance): CompletableFuture[SensoryEventResult] =
fireEventAndResolveWhenCompleted(recipeInstanceId, event, Optional.empty[String]())
def fireEventAndResolveOnEvent(@Nonnull recipeInstanceId: String, @Nonnull event: EventInstance, @Nonnull onEvent: String): CompletableFuture[SensoryEventResult] =
fireEventAndResolveOnEvent(recipeInstanceId, event, onEvent, Optional.empty[String]())
def fireEvent(@Nonnull recipeInstanceId: String, @Nonnull event: EventInstance): EventResolutions =
fireEvent(recipeInstanceId, event, Optional.empty[String]())
def fireEventAndResolveWhenReceived(@Nonnull recipeInstanceId: String, @Nonnull event: EventInstance, @Nonnull correlationId: Optional[String]): CompletableFuture[SensoryEventStatus] =
toCompletableFuture(baker.fireEventAndResolveWhenReceived(recipeInstanceId, event.asScala, Option.apply(correlationId.orElse(null))))
def fireEventAndResolveWhenCompleted(@Nonnull recipeInstanceId: String, @Nonnull event: EventInstance, @Nonnull correlationId: Optional[String]): CompletableFuture[SensoryEventResult] =
toCompletableFuture(baker.fireEventAndResolveWhenCompleted(recipeInstanceId, event.asScala, Option.apply(correlationId.orElse(null)))).thenApply { result =>
SensoryEventResult(
sensoryEventStatus = result.sensoryEventStatus,
eventNames = result.eventNames.asJava,
ingredients = result.ingredients.asJava
)
}
def fireEventAndResolveOnEvent(@Nonnull recipeInstanceId: String, @Nonnull event: EventInstance, @Nonnull onEvent: String, @Nonnull correlationId: Optional[String]): CompletableFuture[SensoryEventResult] =
toCompletableFuture(baker.fireEventAndResolveOnEvent(recipeInstanceId, event.asScala, onEvent, Option.apply(correlationId.orElse(null)))).thenApply { result =>
SensoryEventResult(
sensoryEventStatus = result.sensoryEventStatus,
eventNames = result.eventNames.asJava,
ingredients = result.ingredients.asJava
)
}
def fireEvent(@Nonnull recipeInstanceId: String, @Nonnull event: EventInstance, @Nonnull correlationId: Optional[String]): EventResolutions = {
val scalaResult = baker.fireEvent(recipeInstanceId, event.asScala)
EventResolutions(
resolveWhenReceived = toCompletableFuture(scalaResult.resolveWhenReceived),
resolveWhenCompleted = toCompletableFuture(scalaResult.resolveWhenCompleted).thenApply { result =>
SensoryEventResult(
sensoryEventStatus = result.sensoryEventStatus,
eventNames = result.eventNames.asJava,
ingredients = result.ingredients.asJava
)
})
}
/**
* Retries a blocked interaction.
*
* @param recipeInstanceId The process identifier.
* @param interactionName The name of the blocked interaction.
* @return
*/
def retryInteraction(@Nonnull recipeInstanceId: String, @Nonnull interactionName: String): CompletableFuture[Unit] =
toCompletableFuture(baker.retryInteraction(recipeInstanceId, interactionName))
/**
* Resolves a blocked interaction by giving it's output.
*
* @param recipeInstanceId The process identifier.
* @param interactionName The name of the blocked interaction.
* @param event The output of the interaction.
* @return
*/
def resolveInteraction(@Nonnull recipeInstanceId: String, @Nonnull interactionName: String, @Nonnull event: EventInstance): CompletableFuture[Unit] =
toCompletableFuture(baker.resolveInteraction(recipeInstanceId, interactionName, event.asScala))
/**
* Stops a retrying interaction.
*
* @param recipeInstanceId The process identifier.
* @param interactionName The name of the retrying interaction.
* @return
*/
def stopRetryingInteraction(@Nonnull recipeInstanceId: String, @Nonnull interactionName: String): CompletableFuture[Unit] =
toCompletableFuture(baker.stopRetryingInteraction(recipeInstanceId, interactionName))
/**
* Returns the state of a process instance. This includes the ingredients and names of the events.
*
* @param recipeInstanceId The process identifier
* @return The state of the process instance
*/
def getRecipeInstanceState(@Nonnull recipeInstanceId: String): CompletableFuture[RecipeInstanceState] =
toCompletableFuture(baker.getRecipeInstanceState(recipeInstanceId)).thenApply(_.asJava)
/**
* Returns all the ingredients that are accumulated for a given process.
*
* @param recipeInstanceId The process identifier
* @return
*/
def getIngredients(@Nonnull recipeInstanceId: String): CompletableFuture[java.util.Map[String, Value]] =
toCompletableFutureMap(baker.getIngredients(recipeInstanceId))
/**
* Returns all fired events for a given RecipeInstance id.
*
* @param recipeInstanceId The process id.
* @return The events
*/
def getEvents(@Nonnull recipeInstanceId: String): CompletableFuture[java.util.List[EventMoment]] =
toCompletableFuture(baker.getEvents(recipeInstanceId)).thenApply(_.map(_.asJava()).asJava)
/**
* Returns all names of fired events for a given RecipeInstance id.
*
* @param recipeInstanceId The process id.
* @return The event names
*/
def getEventNames(@Nonnull recipeInstanceId: String): CompletableFuture[java.util.List[String]] =
toCompletableFuture(baker.getEventNames(recipeInstanceId)).thenApply(_.asJava)
/**
* Returns the recipe information for the given RecipeId
*
* @param recipeId the recipeId
* @return The JRecipeInformation recipe
*/
def getRecipe(@Nonnull recipeId: String): CompletableFuture[RecipeInformation] =
toCompletableFuture(baker.getRecipe(recipeId)).thenApply(_.asJava)
def getRecipeVisual(recipeId: String, style: RecipeVisualStyle): CompletableFuture[String] =
toCompletableFuture(baker.getRecipeVisual(recipeId))
/**
* Return alls recipes added to this Baker
*
* @return A map with all recipes from recipeId -> JRecipeInformation
*/
def getAllRecipes: CompletableFuture[java.util.Map[String, RecipeInformation]] =
FutureConverters.toJava(baker.getAllRecipes).toCompletableFuture.thenApply(_.view.map { case (key, value) => (key, value.asJava)}.toMap.asJava)
def getInteraction(interactionName: String): CompletableFuture[Optional[InteractionInstanceDescriptorType]] =
FutureConverters
.toJava(baker.getInteraction(interactionName))
.toCompletableFuture
.thenApply(e => Optional.ofNullable(e.map(_.asJava()).orNull))
def getAllInteractions: CompletableFuture[java.util.List[InteractionInstanceDescriptorType]] =
FutureConverters
.toJava(baker.getAllInteractions)
.toCompletableFuture
.thenApply(_.map(_.asJava()).asJava)
/**
* Returns an index of all processes.
*
* Can potentially return a partial index when baker runs in cluster mode
* and not all shards can be reached within the given timeout.
*
* Does not include deleted processes.
*
* @return An index of all processes
*/
def getAllRecipeInstancesMetadata: CompletableFuture[util.Set[RecipeInstanceMetadata]] =
FutureConverters
.toJava(baker.getAllRecipeInstancesMetadata)
.toCompletableFuture
.thenApply(_.map(_.asJava).asJava)
/**
* Registers a listener to all runtime events for this baker instance.
*
* Note that:
*
* - The delivery guarantee is *AT MOST ONCE*. Practically this means you can miss events when the application terminates (unexpected or not).
* - The delivery is local (JVM) only, you will NOT receive events from other nodes when running in cluster mode.
*
* Because of these constraints you should not use an event listener for critical functionality. Valid use cases might be:
*
* - logging
* - metrics
* - unit tests
* - ...
*
* @param recipeName the name of all recipes this event listener should be triggered for
* @param listenerFunction The listener to subscribe to events.
*/
override def registerEventListener(@Nonnull recipeName: String, @Nonnull listenerFunction: BiConsumer[RecipeEventMetadata, EventInstance]): CompletableFuture[Unit] =
toCompletableFuture(baker.registerEventListener(recipeName,
(recipeEventMetadata: scaladsl.RecipeEventMetadata, event: scaladsl.EventInstance) => listenerFunction.accept(recipeEventMetadata.asJava, event.asJava)))
/**
* Registers a listener function to all runtime events for this baker instance.
*
* Note that:
*
* - The delivery guarantee is *AT MOST ONCE*. Practically this means you can miss events when the application terminates (unexpected or not).
* - The delivery is local (JVM) only, you will NOT receive events from other nodes when running in cluster mode.
*
* Because of these constraints you should not use an event listener for critical functionality. Valid use cases might be:
*
* - logging
* - metrics
* - unit tests
* - ...
*
* @param listenerFunction The listener function that is called once these events occur
*/
override def registerEventListener(@Nonnull listenerFunction: BiConsumer[RecipeEventMetadata, EventInstance]): CompletableFuture[Unit] =
toCompletableFuture(baker.registerEventListener(
(recipeEventMetadata: scaladsl.RecipeEventMetadata, event: scaladsl.EventInstance) => listenerFunction.accept(recipeEventMetadata.asJava, event.asJava)))
/**
* Registers a listener function to all runtime events for this baker instance.
*
* Note that:
*
* - The delivery guarantee is *AT MOST ONCE*. Practically this means you can miss events when the application terminates (unexpected or not).
* - The delivery is local (JVM) only, you will NOT receive events from other nodes when running in cluster mode.
*
* Because of these constraints you should not use an event listener for critical functionality. Valid use cases might be:
*
* - logging
* - metrics
* - unit tests
* - ...
*
* @param eventListener The EventListener class the processEvent will be called once these events occur
*/
@deprecated(message = "Replaced with the consumer function variant", since = "3.0.0")
def registerEventListener(@Nonnull eventListener: EventListener): Future[Unit] =
baker.registerEventListener((recipeEventMetadata: scaladsl.RecipeEventMetadata, runtimeEvent: scaladsl.EventInstance) => eventListener.processEvent(recipeEventMetadata.recipeInstanceId, runtimeEvent.asJava))
/**
* Registers a listener that listens to all Baker events
*
* @param listenerFunction
* @return
*/
override def registerBakerEventListener(@Nonnull listenerFunction: Consumer[BakerEvent]): CompletableFuture[Unit] =
toCompletableFuture(baker.registerBakerEventListener((event: scaladsl.BakerEvent) => listenerFunction.accept(event.asJava)))
/**
* Returns the visual state of the recipe in dot format with a default timeout of 20 seconds
*
* @param recipeInstanceId The process identifier
* @return
*/
def getVisualState(@Nonnull recipeInstanceId: String): CompletableFuture[String] =
toCompletableFuture(baker.getVisualState(recipeInstanceId, RecipeVisualStyle.default))
/**
* Returns the visual state of the recipe in dot format with a default timeout of 20 seconds
*
* @param recipeInstanceId The process identifier
* @return
*/
def getVisualState(@Nonnull recipeInstanceId: String, @Nonnull style: RecipeVisualStyle): CompletableFuture[String] =
toCompletableFuture(baker.getVisualState(recipeInstanceId, style))
private def toCompletableFuture[T](@Nonnull scalaFuture: Future[T]): CompletableFuture[T] =
FutureConverters.toJava(scalaFuture).toCompletableFuture
private def toCompletableFutureMap[K, V](@Nonnull scalaFuture: Future[Map[K, V]]): CompletableFuture[java.util.Map[K, V]] =
FutureConverters.toJava(
scalaFuture)
.toCompletableFuture
.thenApply(_.asJava)
}
|
ing-bank/baker
|
core/baker-interface/src/main/scala/com/ing/baker/runtime/javadsl/Baker.scala
|
Scala
|
mit
| 16,871
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.util
import java.util.{Collections, Properties}
import java.util.concurrent.locks.ReentrantReadWriteLock
import kafka.cluster.EndPoint
import kafka.log.{LogCleaner, LogConfig, LogManager}
import kafka.server.DynamicBrokerConfig._
import kafka.utils.{CoreUtils, Logging, PasswordEncoder}
import kafka.zk.{AdminZkClient, KafkaZkClient}
import org.apache.kafka.common.Reconfigurable
import org.apache.kafka.common.config.{ConfigDef, ConfigException, SslConfigs}
import org.apache.kafka.common.metrics.MetricsReporter
import org.apache.kafka.common.config.types.Password
import org.apache.kafka.common.network.{ListenerName, ListenerReconfigurable}
import org.apache.kafka.common.security.authenticator.LoginManager
import org.apache.kafka.common.utils.Utils
import scala.collection._
import scala.collection.JavaConverters._
/**
* Dynamic broker configurations are stored in ZooKeeper and may be defined at two levels:
* <ul>
* <li>Per-broker configs persisted at <tt>/configs/brokers/{brokerId}</tt>: These can be described/altered
* using AdminClient using the resource name brokerId.</li>
* <li>Cluster-wide defaults persisted at <tt>/configs/brokers/<default></tt>: These can be described/altered
* using AdminClient using an empty resource name.</li>
* </ul>
* The order of precedence for broker configs is:
* <ol>
* <li>DYNAMIC_BROKER_CONFIG: stored in ZK at /configs/brokers/{brokerId}</li>
* <li>DYNAMIC_DEFAULT_BROKER_CONFIG: stored in ZK at /configs/brokers/<default></li>
* <li>STATIC_BROKER_CONFIG: properties that broker is started up with, typically from server.properties file</li>
* <li>DEFAULT_CONFIG: Default configs defined in KafkaConfig</li>
* </ol>
* Log configs use topic config overrides if defined and fallback to broker defaults using the order of precedence above.
* Topic config overrides may use a different config name from the default broker config.
* See [[kafka.log.LogConfig#TopicConfigSynonyms]] for the mapping.
* <p>
* AdminClient returns all config synonyms in the order of precedence when configs are described with
* <code>includeSynonyms</code>. In addition to configs that may be defined with the same name at different levels,
* some configs have additional synonyms.
* </p>
* <ul>
* <li>Listener configs may be defined using the prefix <tt>listener.name.{listenerName}.{configName}</tt>. These may be
* configured as dynamic or static broker configs. Listener configs have higher precedence than the base configs
* that don't specify the listener name. Listeners without a listener config use the base config. Base configs
* may be defined only as STATIC_BROKER_CONFIG or DEFAULT_CONFIG and cannot be updated dynamically.<li>
* <li>Some configs may be defined using multiple properties. For example, <tt>log.roll.ms</tt> and
* <tt>log.roll.hours</tt> refer to the same config that may be defined in milliseconds or hours. The order of
* precedence of these synonyms is described in the docs of these configs in [[kafka.server.KafkaConfig]].</li>
* </ul>
*
*/
object DynamicBrokerConfig {
private[server] val DynamicSecurityConfigs = SslConfigs.RECONFIGURABLE_CONFIGS.asScala
val AllDynamicConfigs = mutable.Set[String]()
AllDynamicConfigs ++= DynamicSecurityConfigs
AllDynamicConfigs ++= LogCleaner.ReconfigurableConfigs
AllDynamicConfigs ++= DynamicLogConfig.ReconfigurableConfigs
AllDynamicConfigs ++= DynamicThreadPool.ReconfigurableConfigs
AllDynamicConfigs ++= Set(KafkaConfig.MetricReporterClassesProp)
AllDynamicConfigs ++= DynamicListenerConfig.ReconfigurableConfigs
private val PerBrokerConfigs = DynamicSecurityConfigs ++
DynamicListenerConfig.ReconfigurableConfigs
val ListenerConfigRegex = """listener\\.name\\.[^.]*\\.(.*)""".r
private[server] val DynamicPasswordConfigs = {
val passwordConfigs = KafkaConfig.configKeys.filter(_._2.`type` == ConfigDef.Type.PASSWORD).keySet
AllDynamicConfigs.intersect(passwordConfigs)
}
def brokerConfigSynonyms(name: String, matchListenerOverride: Boolean): List[String] = {
name match {
case KafkaConfig.LogRollTimeMillisProp | KafkaConfig.LogRollTimeHoursProp =>
List(KafkaConfig.LogRollTimeMillisProp, KafkaConfig.LogRollTimeHoursProp)
case KafkaConfig.LogRollTimeJitterMillisProp | KafkaConfig.LogRollTimeJitterHoursProp =>
List(KafkaConfig.LogRollTimeJitterMillisProp, KafkaConfig.LogRollTimeJitterHoursProp)
case KafkaConfig.LogFlushIntervalMsProp => // LogFlushSchedulerIntervalMsProp is used as default
List(KafkaConfig.LogFlushIntervalMsProp, KafkaConfig.LogFlushSchedulerIntervalMsProp)
case KafkaConfig.LogRetentionTimeMillisProp | KafkaConfig.LogRetentionTimeMinutesProp | KafkaConfig.LogRetentionTimeHoursProp =>
List(KafkaConfig.LogRetentionTimeMillisProp, KafkaConfig.LogRetentionTimeMinutesProp, KafkaConfig.LogRetentionTimeHoursProp)
case ListenerConfigRegex(baseName) if matchListenerOverride => List(name, baseName)
case _ => List(name)
}
}
private[server] def addDynamicConfigs(configDef: ConfigDef): Unit = {
KafkaConfig.configKeys.filterKeys(AllDynamicConfigs.contains).values.foreach { config =>
configDef.define(config.name, config.`type`, config.defaultValue, config.validator,
config.importance, config.documentation, config.group, config.orderInGroup, config.width,
config.displayName, config.dependents, config.recommender)
}
}
}
class DynamicBrokerConfig(private val kafkaConfig: KafkaConfig) extends Logging {
private[server] val staticBrokerConfigs = ConfigDef.convertToStringMapWithPasswordValues(kafkaConfig.originalsFromThisConfig).asScala
private[server] val staticDefaultConfigs = ConfigDef.convertToStringMapWithPasswordValues(KafkaConfig.defaultValues.asJava).asScala
private val dynamicBrokerConfigs = mutable.Map[String, String]()
private val dynamicDefaultConfigs = mutable.Map[String, String]()
private val reconfigurables = mutable.Buffer[Reconfigurable]()
private val brokerReconfigurables = mutable.Buffer[BrokerReconfigurable]()
private val lock = new ReentrantReadWriteLock
private var currentConfig = kafkaConfig
private val dynamicConfigPasswordEncoder = maybeCreatePasswordEncoder(kafkaConfig.passwordEncoderSecret)
private[server] def initialize(zkClient: KafkaZkClient): Unit = {
val adminZkClient = new AdminZkClient(zkClient)
updateDefaultConfig(adminZkClient.fetchEntityConfig(ConfigType.Broker, ConfigEntityName.Default))
val props = adminZkClient.fetchEntityConfig(ConfigType.Broker, kafkaConfig.brokerId.toString)
val brokerConfig = maybeReEncodePasswords(props, adminZkClient)
updateBrokerConfig(kafkaConfig.brokerId, brokerConfig)
}
def addReconfigurables(kafkaServer: KafkaServer): Unit = {
addBrokerReconfigurable(new DynamicThreadPool(kafkaServer))
if (kafkaServer.logManager.cleaner != null)
addBrokerReconfigurable(kafkaServer.logManager.cleaner)
addReconfigurable(new DynamicLogConfig(kafkaServer.logManager))
addReconfigurable(new DynamicMetricsReporters(kafkaConfig.brokerId, kafkaServer))
addBrokerReconfigurable(new DynamicListenerConfig(kafkaServer))
}
def addReconfigurable(reconfigurable: Reconfigurable): Unit = CoreUtils.inWriteLock(lock) {
require(reconfigurable.reconfigurableConfigs.asScala.forall(AllDynamicConfigs.contains))
reconfigurables += reconfigurable
}
def addBrokerReconfigurable(reconfigurable: BrokerReconfigurable): Unit = CoreUtils.inWriteLock(lock) {
require(reconfigurable.reconfigurableConfigs.forall(AllDynamicConfigs.contains))
brokerReconfigurables += reconfigurable
}
def removeReconfigurable(reconfigurable: Reconfigurable): Unit = CoreUtils.inWriteLock(lock) {
reconfigurables -= reconfigurable
}
// Visibility for testing
private[server] def currentKafkaConfig: KafkaConfig = CoreUtils.inReadLock(lock) {
currentConfig
}
private[server] def currentDynamicBrokerConfigs: Map[String, String] = CoreUtils.inReadLock(lock) {
dynamicBrokerConfigs.clone()
}
private[server] def currentDynamicDefaultConfigs: Map[String, String] = CoreUtils.inReadLock(lock) {
dynamicDefaultConfigs.clone()
}
private[server] def updateBrokerConfig(brokerId: Int, persistentProps: Properties): Unit = CoreUtils.inWriteLock(lock) {
try {
val props = fromPersistentProps(persistentProps, perBrokerConfig = true)
dynamicBrokerConfigs.clear()
dynamicBrokerConfigs ++= props.asScala
updateCurrentConfig()
} catch {
case e: Exception => error(s"Per-broker configs of $brokerId could not be applied: $persistentProps", e)
}
}
private[server] def updateDefaultConfig(persistentProps: Properties): Unit = CoreUtils.inWriteLock(lock) {
try {
val props = fromPersistentProps(persistentProps, perBrokerConfig = false)
dynamicDefaultConfigs.clear()
dynamicDefaultConfigs ++= props.asScala
updateCurrentConfig()
} catch {
case e: Exception => error(s"Cluster default configs could not be applied: $persistentProps", e)
}
}
private def maybeCreatePasswordEncoder(secret: Option[Password]): Option[PasswordEncoder] = {
secret.map { secret =>
new PasswordEncoder(secret,
kafkaConfig.passwordEncoderKeyFactoryAlgorithm,
kafkaConfig.passwordEncoderCipherAlgorithm,
kafkaConfig.passwordEncoderKeyLength,
kafkaConfig.passwordEncoderIterations)
}
}
private def passwordEncoder: PasswordEncoder = {
dynamicConfigPasswordEncoder.getOrElse(throw new ConfigException("Password encoder secret not configured"))
}
private[server] def toPersistentProps(configProps: Properties, perBrokerConfig: Boolean): Properties = {
val props = configProps.clone().asInstanceOf[Properties]
def encodePassword(configName: String): Unit = {
val value = props.getProperty(configName)
if (value != null) {
if (!perBrokerConfig)
throw new ConfigException("Password config can be defined only at broker level")
props.setProperty(configName, passwordEncoder.encode(new Password(value)))
}
}
DynamicPasswordConfigs.foreach(encodePassword)
props
}
private[server] def fromPersistentProps(persistentProps: Properties,
perBrokerConfig: Boolean): Properties = {
val props = persistentProps.clone().asInstanceOf[Properties]
// Remove all invalid configs from `props`
removeInvalidConfigs(props, perBrokerConfig)
def removeInvalidProps(invalidPropNames: Set[String], errorMessage: String): Unit = {
if (invalidPropNames.nonEmpty) {
invalidPropNames.foreach(props.remove)
error(s"$errorMessage: $invalidPropNames")
}
}
removeInvalidProps(nonDynamicConfigs(props), "Non-dynamic configs configured in ZooKeeper will be ignored")
removeInvalidProps(securityConfigsWithoutListenerPrefix(props),
"Security configs can be dynamically updated only using listener prefix, base configs will be ignored")
if (!perBrokerConfig)
removeInvalidProps(perBrokerConfigs(props), "Per-broker configs defined at default cluster level will be ignored")
def decodePassword(configName: String): Unit = {
val value = props.getProperty(configName)
if (value != null) {
try {
props.setProperty(configName, passwordEncoder.decode(value).value)
} catch {
case e: Exception =>
error(s"Dynamic password config $configName could not be decoded, ignoring.", e)
props.remove(configName)
}
}
}
DynamicPasswordConfigs.foreach(decodePassword)
props
}
// If the secret has changed, password.encoder.old.secret contains the old secret that was used
// to encode the configs in ZK. Decode passwords using the old secret and update ZK with values
// encoded using the current secret. Ignore any errors during decoding since old secret may not
// have been removed during broker restart.
private def maybeReEncodePasswords(persistentProps: Properties, adminZkClient: AdminZkClient): Properties = {
val props = persistentProps.clone().asInstanceOf[Properties]
if (!props.asScala.keySet.exists(DynamicPasswordConfigs.contains)) {
maybeCreatePasswordEncoder(kafkaConfig.passwordEncoderOldSecret).foreach { passwordDecoder =>
DynamicPasswordConfigs.foreach { configName =>
val value = props.getProperty(configName)
if (value != null) {
val decoded = try {
Some(passwordDecoder.decode(value).value)
} catch {
case _: Exception =>
debug(s"Dynamic password config $configName could not be decoded using old secret, new secret will be used.")
None
}
decoded.foreach { value => props.put(configName, passwordEncoder.encode(new Password(value))) }
}
}
adminZkClient.changeBrokerConfig(Seq(kafkaConfig.brokerId), props)
}
}
props
}
private[server] def validate(props: Properties, perBrokerConfig: Boolean): Unit = CoreUtils.inReadLock(lock) {
def checkInvalidProps(invalidPropNames: Set[String], errorMessage: String): Unit = {
if (invalidPropNames.nonEmpty)
throw new ConfigException(s"$errorMessage: $invalidPropNames")
}
checkInvalidProps(nonDynamicConfigs(props), "Cannot update these configs dynamically")
checkInvalidProps(securityConfigsWithoutListenerPrefix(props),
"These security configs can be dynamically updated only per-listener using the listener prefix")
validateConfigTypes(props)
val newProps = mutable.Map[String, String]()
newProps ++= staticBrokerConfigs
if (perBrokerConfig) {
overrideProps(newProps, dynamicDefaultConfigs)
overrideProps(newProps, props.asScala)
} else {
checkInvalidProps(perBrokerConfigs(props),
"Cannot update these configs at default cluster level, broker id must be specified")
overrideProps(newProps, props.asScala)
overrideProps(newProps, dynamicBrokerConfigs)
}
processReconfiguration(newProps, validateOnly = true)
}
private def perBrokerConfigs(props: Properties): Set[String] = {
val configNames = props.asScala.keySet
configNames.intersect(PerBrokerConfigs) ++ configNames.filter(ListenerConfigRegex.findFirstIn(_).nonEmpty)
}
private def nonDynamicConfigs(props: Properties): Set[String] = {
props.asScala.keySet.intersect(DynamicConfig.Broker.nonDynamicProps)
}
private def securityConfigsWithoutListenerPrefix(props: Properties): Set[String] = {
DynamicSecurityConfigs.filter(props.containsKey)
}
private def validateConfigTypes(props: Properties): Unit = {
val baseProps = new Properties
props.asScala.foreach {
case (ListenerConfigRegex(baseName), v) => baseProps.put(baseName, v)
case (k, v) => baseProps.put(k, v)
}
DynamicConfig.Broker.validate(baseProps)
}
private def removeInvalidConfigs(props: Properties, perBrokerConfig: Boolean): Unit = {
try {
validateConfigTypes(props)
props.asScala
} catch {
case e: Exception =>
val invalidProps = props.asScala.filter { case (k, v) =>
val props1 = new Properties
props1.put(k, v)
try {
validateConfigTypes(props1)
false
} catch {
case _: Exception => true
}
}
invalidProps.foreach(props.remove)
val configSource = if (perBrokerConfig) "broker" else "default cluster"
error(s"Dynamic $configSource config contains invalid values: $invalidProps, these configs will be ignored", e)
}
}
private[server] def maybeReconfigure(reconfigurable: Reconfigurable, oldConfig: KafkaConfig, newConfig: util.Map[String, _]): Unit = {
if (reconfigurable.reconfigurableConfigs.asScala.exists(key => oldConfig.originals.get(key) != newConfig.get(key)))
reconfigurable.reconfigure(newConfig)
}
private def updatedConfigs(newProps: java.util.Map[String, _], currentProps: java.util.Map[_, _]): mutable.Map[String, _] = {
newProps.asScala.filter {
case (k, v) => v != currentProps.get(k)
}
}
/**
* Updates values in `props` with the new values from `propsOverride`. Synonyms of updated configs
* are removed from `props` to ensure that the config with the higher precedence is applied. For example,
* if `log.roll.ms` was defined in server.properties and `log.roll.hours` is configured dynamically,
* `log.roll.hours` from the dynamic configuration will be used and `log.roll.ms` will be removed from
* `props` (even though `log.roll.hours` is secondary to `log.roll.ms`).
*/
private def overrideProps(props: mutable.Map[String, String], propsOverride: mutable.Map[String, String]): Unit = {
propsOverride.foreach { case (k, v) =>
// Remove synonyms of `k` to ensure the right precedence is applied. But disable `matchListenerOverride`
// so that base configs corresponding to listener configs are not removed. Base configs should not be removed
// since they may be used by other listeners. It is ok to retain them in `props` since base configs cannot be
// dynamically updated and listener-specific configs have the higher precedence.
brokerConfigSynonyms(k, matchListenerOverride = false).foreach(props.remove)
props.put(k, v)
}
}
private def updateCurrentConfig(): Unit = {
val newProps = mutable.Map[String, String]()
newProps ++= staticBrokerConfigs
overrideProps(newProps, dynamicDefaultConfigs)
overrideProps(newProps, dynamicBrokerConfigs)
val oldConfig = currentConfig
val (newConfig, brokerReconfigurablesToUpdate) = processReconfiguration(newProps, validateOnly = false)
if (newConfig ne currentConfig) {
currentConfig = newConfig
kafkaConfig.updateCurrentConfig(newConfig)
// Process BrokerReconfigurable updates after current config is updated
brokerReconfigurablesToUpdate.foreach(_.reconfigure(oldConfig, newConfig))
}
}
private def processReconfiguration(newProps: Map[String, String], validateOnly: Boolean): (KafkaConfig, List[BrokerReconfigurable]) = {
val newConfig = new KafkaConfig(newProps.asJava, !validateOnly, None)
val updatedMap = updatedConfigs(newConfig.originalsFromThisConfig, currentConfig.originals)
if (updatedMap.nonEmpty) {
try {
val customConfigs = new util.HashMap[String, Object](newConfig.originalsFromThisConfig) // non-Kafka configs
newConfig.valuesFromThisConfig.keySet.asScala.foreach(customConfigs.remove)
reconfigurables.foreach {
case listenerReconfigurable: ListenerReconfigurable =>
val listenerName = listenerReconfigurable.listenerName
val oldValues = currentConfig.valuesWithPrefixOverride(listenerName.configPrefix)
val newValues = newConfig.valuesFromThisConfigWithPrefixOverride(listenerName.configPrefix)
val updatedKeys = updatedConfigs(newValues, oldValues).keySet
if (needsReconfiguration(listenerReconfigurable.reconfigurableConfigs, updatedKeys))
processReconfigurable(listenerReconfigurable, updatedKeys, newValues, customConfigs, validateOnly)
case reconfigurable =>
if (needsReconfiguration(reconfigurable.reconfigurableConfigs, updatedMap.keySet))
processReconfigurable(reconfigurable, updatedMap.keySet, newConfig.valuesFromThisConfig, customConfigs, validateOnly)
}
// BrokerReconfigurable updates are processed after config is updated. Only do the validation here.
val brokerReconfigurablesToUpdate = mutable.Buffer[BrokerReconfigurable]()
brokerReconfigurables.foreach { reconfigurable =>
if (needsReconfiguration(reconfigurable.reconfigurableConfigs.asJava, updatedMap.keySet)) {
reconfigurable.validateReconfiguration(newConfig)
if (!validateOnly)
brokerReconfigurablesToUpdate += reconfigurable
}
}
(newConfig, brokerReconfigurablesToUpdate.toList)
} catch {
case e: Exception =>
if (!validateOnly)
error(s"Failed to update broker configuration with configs : ${newConfig.originalsFromThisConfig}", e)
throw new ConfigException("Invalid dynamic configuration", e)
}
}
else
(currentConfig, List.empty)
}
private def needsReconfiguration(reconfigurableConfigs: util.Set[String], updatedKeys: Set[String]): Boolean = {
reconfigurableConfigs.asScala.intersect(updatedKeys).nonEmpty
}
private def processReconfigurable(reconfigurable: Reconfigurable,
updatedConfigNames: Set[String],
allNewConfigs: util.Map[String, _],
newCustomConfigs: util.Map[String, Object],
validateOnly: Boolean): Unit = {
val newConfigs = new util.HashMap[String, Object]
allNewConfigs.asScala.foreach { case (k, v) => newConfigs.put(k, v.asInstanceOf[AnyRef]) }
newConfigs.putAll(newCustomConfigs)
try {
reconfigurable.validateReconfiguration(newConfigs)
} catch {
case e: ConfigException => throw e
case _: Exception =>
throw new ConfigException(s"Validation of dynamic config update of $updatedConfigNames failed with class ${reconfigurable.getClass}")
}
if (!validateOnly) {
info(s"Reconfiguring $reconfigurable, updated configs: $updatedConfigNames custom configs: $newCustomConfigs")
reconfigurable.reconfigure(newConfigs)
}
}
}
trait BrokerReconfigurable {
def reconfigurableConfigs: Set[String]
def validateReconfiguration(newConfig: KafkaConfig): Unit
def reconfigure(oldConfig: KafkaConfig, newConfig: KafkaConfig): Unit
}
object DynamicLogConfig {
// Exclude message.format.version for now since we need to check that the version
// is supported on all brokers in the cluster.
val ExcludedConfigs = Set(KafkaConfig.LogMessageFormatVersionProp)
val ReconfigurableConfigs = LogConfig.TopicConfigSynonyms.values.toSet -- ExcludedConfigs
val KafkaConfigToLogConfigName = LogConfig.TopicConfigSynonyms.map { case (k, v) => (v, k) }
}
class DynamicLogConfig(logManager: LogManager) extends Reconfigurable with Logging {
override def configure(configs: util.Map[String, _]): Unit = {}
override def reconfigurableConfigs(): util.Set[String] = {
DynamicLogConfig.ReconfigurableConfigs.asJava
}
override def validateReconfiguration(configs: util.Map[String, _]): Unit = {
// For update of topic config overrides, only config names and types are validated
// Names and types have already been validated. For consistency with topic config
// validation, no additional validation is performed.
}
override def reconfigure(configs: util.Map[String, _]): Unit = {
val currentLogConfig = logManager.currentDefaultConfig
val newBrokerDefaults = new util.HashMap[String, Object](currentLogConfig.originals)
configs.asScala.filterKeys(DynamicLogConfig.ReconfigurableConfigs.contains).foreach { case (k, v) =>
if (v != null) {
DynamicLogConfig.KafkaConfigToLogConfigName.get(k).foreach { configName =>
newBrokerDefaults.put(configName, v.asInstanceOf[AnyRef])
}
}
}
logManager.reconfigureDefaultLogConfig(LogConfig(newBrokerDefaults))
logManager.allLogs.foreach { log =>
val props = mutable.Map.empty[Any, Any]
props ++= newBrokerDefaults.asScala
props ++= log.config.originals.asScala.filterKeys(log.config.overriddenConfigs.contains)
val logConfig = LogConfig(props.asJava)
log.updateConfig(newBrokerDefaults.asScala.keySet, logConfig)
}
}
}
object DynamicThreadPool {
val ReconfigurableConfigs = Set(
KafkaConfig.NumIoThreadsProp,
KafkaConfig.NumNetworkThreadsProp,
KafkaConfig.NumReplicaFetchersProp,
KafkaConfig.NumRecoveryThreadsPerDataDirProp,
KafkaConfig.BackgroundThreadsProp)
}
class DynamicThreadPool(server: KafkaServer) extends BrokerReconfigurable {
override def reconfigurableConfigs: Set[String] = {
DynamicThreadPool.ReconfigurableConfigs
}
override def validateReconfiguration(newConfig: KafkaConfig): Unit = {
newConfig.values.asScala.filterKeys(DynamicThreadPool.ReconfigurableConfigs.contains).foreach { case (k, v) =>
val newValue = v.asInstanceOf[Int]
val oldValue = currentValue(k)
if (newValue != oldValue) {
val errorMsg = s"Dynamic thread count update validation failed for $k=$v"
if (newValue <= 0)
throw new ConfigException(s"$errorMsg, value should be at least 1")
if (newValue < oldValue / 2)
throw new ConfigException(s"$errorMsg, value should be at least half the current value $oldValue")
if (newValue > oldValue * 2)
throw new ConfigException(s"$errorMsg, value should not be greater than double the current value $oldValue")
}
}
}
override def reconfigure(oldConfig: KafkaConfig, newConfig: KafkaConfig): Unit = {
if (newConfig.numIoThreads != oldConfig.numIoThreads)
server.requestHandlerPool.resizeThreadPool(newConfig.numIoThreads)
if (newConfig.numNetworkThreads != oldConfig.numNetworkThreads)
server.socketServer.resizeThreadPool(oldConfig.numNetworkThreads, newConfig.numNetworkThreads)
if (newConfig.numReplicaFetchers != oldConfig.numReplicaFetchers)
server.replicaManager.replicaFetcherManager.resizeThreadPool(newConfig.numReplicaFetchers)
if (newConfig.numRecoveryThreadsPerDataDir != oldConfig.numRecoveryThreadsPerDataDir)
server.getLogManager.resizeRecoveryThreadPool(newConfig.numRecoveryThreadsPerDataDir)
if (newConfig.backgroundThreads != oldConfig.backgroundThreads)
server.kafkaScheduler.resizeThreadPool(newConfig.backgroundThreads)
}
private def currentValue(name: String): Int = {
name match {
case KafkaConfig.NumIoThreadsProp => server.config.numIoThreads
case KafkaConfig.NumNetworkThreadsProp => server.config.numNetworkThreads
case KafkaConfig.NumReplicaFetchersProp => server.config.numReplicaFetchers
case KafkaConfig.NumRecoveryThreadsPerDataDirProp => server.config.numRecoveryThreadsPerDataDir
case KafkaConfig.BackgroundThreadsProp => server.config.backgroundThreads
case n => throw new IllegalStateException(s"Unexpected config $n")
}
}
}
class DynamicMetricsReporters(brokerId: Int, server: KafkaServer) extends Reconfigurable {
private val dynamicConfig = server.config.dynamicConfig
private val metrics = server.metrics
private val propsOverride = Map[String, AnyRef](KafkaConfig.BrokerIdProp -> brokerId.toString)
private val currentReporters = mutable.Map[String, MetricsReporter]()
createReporters(dynamicConfig.currentKafkaConfig.getList(KafkaConfig.MetricReporterClassesProp),
Collections.emptyMap[String, Object])
private[server] def currentMetricsReporters: List[MetricsReporter] = currentReporters.values.toList
override def configure(configs: util.Map[String, _]): Unit = {}
override def reconfigurableConfigs(): util.Set[String] = {
val configs = new util.HashSet[String]()
configs.add(KafkaConfig.MetricReporterClassesProp)
currentReporters.values.foreach {
case reporter: Reconfigurable => configs.addAll(reporter.reconfigurableConfigs)
case _ =>
}
configs
}
override def validateReconfiguration(configs: util.Map[String, _]): Unit = {
val updatedMetricsReporters = metricsReporterClasses(configs)
// Ensure all the reporter classes can be loaded and have a default constructor
updatedMetricsReporters.foreach { className =>
val clazz = Utils.loadClass(className, classOf[MetricsReporter])
clazz.getConstructor()
}
// Validate the new configuration using every reconfigurable reporter instance that is not being deleted
currentReporters.values.foreach {
case reporter: Reconfigurable =>
if (updatedMetricsReporters.contains(reporter.getClass.getName))
reporter.validateReconfiguration(configs)
case _ =>
}
}
override def reconfigure(configs: util.Map[String, _]): Unit = {
val updatedMetricsReporters = metricsReporterClasses(configs)
val deleted = currentReporters.keySet -- updatedMetricsReporters
deleted.foreach(removeReporter)
currentReporters.values.foreach {
case reporter: Reconfigurable => dynamicConfig.maybeReconfigure(reporter, dynamicConfig.currentKafkaConfig, configs)
case _ =>
}
val added = updatedMetricsReporters -- currentReporters.keySet
createReporters(added.asJava, configs)
}
private def createReporters(reporterClasses: util.List[String],
updatedConfigs: util.Map[String, _]): Unit = {
val props = new util.HashMap[String, AnyRef]
updatedConfigs.asScala.foreach { case (k, v) => props.put(k, v.asInstanceOf[AnyRef]) }
propsOverride.foreach { case (k, v) => props.put(k, v) }
val reporters = dynamicConfig.currentKafkaConfig.getConfiguredInstances(reporterClasses, classOf[MetricsReporter], props)
reporters.asScala.foreach { reporter =>
metrics.addReporter(reporter)
currentReporters += reporter.getClass.getName -> reporter
}
server.notifyClusterListeners(reporters.asScala)
}
private def removeReporter(className: String): Unit = {
currentReporters.remove(className).foreach(metrics.removeReporter)
}
private def metricsReporterClasses(configs: util.Map[String, _]): mutable.Buffer[String] = {
configs.get(KafkaConfig.MetricReporterClassesProp).asInstanceOf[util.List[String]].asScala
}
}
object DynamicListenerConfig {
val ReconfigurableConfigs = Set(
// Listener configs
KafkaConfig.AdvertisedListenersProp,
KafkaConfig.ListenersProp,
KafkaConfig.ListenerSecurityProtocolMapProp,
// SSL configs
KafkaConfig.PrincipalBuilderClassProp,
KafkaConfig.SslProtocolProp,
KafkaConfig.SslProviderProp,
KafkaConfig.SslCipherSuitesProp,
KafkaConfig.SslEnabledProtocolsProp,
KafkaConfig.SslKeystoreTypeProp,
KafkaConfig.SslKeystoreLocationProp,
KafkaConfig.SslKeystorePasswordProp,
KafkaConfig.SslKeyPasswordProp,
KafkaConfig.SslTruststoreTypeProp,
KafkaConfig.SslTruststoreLocationProp,
KafkaConfig.SslTruststorePasswordProp,
KafkaConfig.SslKeyManagerAlgorithmProp,
KafkaConfig.SslTrustManagerAlgorithmProp,
KafkaConfig.SslEndpointIdentificationAlgorithmProp,
KafkaConfig.SslSecureRandomImplementationProp,
KafkaConfig.SslClientAuthProp,
// SASL configs
KafkaConfig.SaslMechanismInterBrokerProtocolProp,
KafkaConfig.SaslJaasConfigProp,
KafkaConfig.SaslEnabledMechanismsProp,
KafkaConfig.SaslKerberosServiceNameProp,
KafkaConfig.SaslKerberosKinitCmdProp,
KafkaConfig.SaslKerberosTicketRenewWindowFactorProp,
KafkaConfig.SaslKerberosTicketRenewJitterProp,
KafkaConfig.SaslKerberosMinTimeBeforeReloginProp,
KafkaConfig.SaslKerberosPrincipalToLocalRulesProp
)
}
class DynamicListenerConfig(server: KafkaServer) extends BrokerReconfigurable with Logging {
override def reconfigurableConfigs: Set[String] = {
DynamicListenerConfig.ReconfigurableConfigs
}
def validateReconfiguration(newConfig: KafkaConfig): Unit = {
def immutableListenerConfigs(kafkaConfig: KafkaConfig, prefix: String): Map[String, AnyRef] = {
newConfig.originals.asScala
.filterKeys(_.startsWith(prefix))
.filterKeys(k => !DynamicSecurityConfigs.contains(k))
}
val oldConfig = server.config
val newListeners = listenersToMap(newConfig.listeners)
val newAdvertisedListeners = listenersToMap(newConfig.advertisedListeners)
val oldListeners = listenersToMap(oldConfig.listeners)
if (!newAdvertisedListeners.keySet.subsetOf(newListeners.keySet))
throw new ConfigException(s"Advertised listeners '$newAdvertisedListeners' must be a subset of listeners '$newListeners'")
if (newListeners.keySet != newConfig.listenerSecurityProtocolMap.keySet)
throw new ConfigException(s"Listeners '$newListeners' and listener map '${newConfig.listenerSecurityProtocolMap}' don't match")
newListeners.keySet.intersect(oldListeners.keySet).foreach { listenerName =>
val prefix = listenerName.configPrefix
val newListenerProps = immutableListenerConfigs(newConfig, prefix)
val oldListenerProps = immutableListenerConfigs(oldConfig, prefix)
if (newListenerProps != oldListenerProps)
throw new ConfigException(s"Configs cannot be updated dynamically for existing listener $listenerName, " +
"restart broker or create a new listener for update")
if (oldConfig.listenerSecurityProtocolMap(listenerName) != newConfig.listenerSecurityProtocolMap(listenerName))
throw new ConfigException(s"Security protocol cannot be updated for existing listener $listenerName")
}
if (!newAdvertisedListeners.contains(newConfig.interBrokerListenerName))
throw new ConfigException(s"Advertised listener must be specified for inter-broker listener ${newConfig.interBrokerListenerName}")
}
def reconfigure(oldConfig: KafkaConfig, newConfig: KafkaConfig): Unit = {
val newListeners = newConfig.listeners
val newListenerMap = listenersToMap(newListeners)
val oldListeners = oldConfig.listeners
val oldListenerMap = listenersToMap(oldListeners)
val listenersRemoved = oldListeners.filterNot(e => newListenerMap.contains(e.listenerName))
val listenersAdded = newListeners.filterNot(e => oldListenerMap.contains(e.listenerName))
// Clear SASL login cache to force re-login
if (listenersAdded.nonEmpty || listenersRemoved.nonEmpty)
LoginManager.closeAll()
server.socketServer.removeListeners(listenersRemoved)
if (listenersAdded.nonEmpty)
server.socketServer.addListeners(listenersAdded)
server.zkClient.updateBrokerInfoInZk(server.createBrokerInfo)
}
private def listenersToMap(listeners: Seq[EndPoint]): Map[ListenerName, EndPoint] =
listeners.map(e => (e.listenerName, e)).toMap
}
|
MyPureCloud/kafka
|
core/src/main/scala/kafka/server/DynamicBrokerConfig.scala
|
Scala
|
apache-2.0
| 35,307
|
package org.jetbrains.plugins.scala.debugger.evaluateExpression
import org.jetbrains.plugins.scala.debugger.ScalaDebuggerTestCase
/**
* User: Alefas
* Date: 15.10.11
*/
class ScalaLocalMethodEvaluationTest extends ScalaDebuggerTestCase {
def testSimple() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| def foo: Int = 1
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 3)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo", "1")
}
}
def testLocalWithParameters() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| val y = "test"
| def foo(x: Int): Int = x + y.length
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 4)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo(3)", "7")
}
}
def testSimpleLocalWithParams() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| val x = 2
| def foo: Int = x - 1
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 4)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo", "1")
}
}
def testSimpleLocalWithDiffParams1() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| val x = 2
| val y = "c"
| def foo: Int = x - y.length()
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 5)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo", "1")
}
}
def testSimpleLocalWithDiffParams2() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| val y = "c"
| val x = 2
| def foo: Int = x - y.length()
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 5)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo", "1")
}
}
def testSimpleLocalWithDiffParams3() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| val y = "c"
| val x = 2
| def foo: Int = - y.length() + x
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 5)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo", "1")
}
}
def testLocalWithLocalObject() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| object y {val y = 1}
| val x = 2
| def foo: Int = x - y.y
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 5)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo", "1")
}
}
def testLocalWithField() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| val g = 1
| def moo(x: Int) = g + x
| val zz = (y: Int) => {
| val uu = (x: Int) => {
| g
| "stop here"
| }
| uu(1)
| }
| zz(2)
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 7)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("moo(x)", "2")
}
}
def testLocalFromAnonymous() {
addFileToProject("Sample.scala",
"""
|object Sample {
| val y = 1
| def main(args: Array[String]) {
| val x = 2
| def foo: Int = x - y
| "stop here"
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 5)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("foo", "1")
}
}
def testClojure() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| def outer() {
| val s = "start"
| def inner(a: String, b: String): String = {
| "stop here"
| s + a + b
| }
| inner("aa", "bb")
| }
| outer()
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 6)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("a", "aa")
evalEquals("b", "bb")
evalEquals("s", "start")
evalEquals("inner(\\"qq\\", \\"ww\\")", "startqqww")
}
}
def testLocalWithDefaultAndNamedParams() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| def outer() {
| def inner(a: String, b: String = "default", c: String = "other"): String = {
| "stop here"
| a + b + c
| }
| inner("aa")
| }
| outer()
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 5)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("a", "aa")
evalEquals("b", "default")
evalEquals("c", "other")
evalEquals("inner(\\"aa\\", \\"bb\\")", "aabbother")
evalEquals("inner(\\"aa\\")", "aadefaultother")
evalEquals("inner(\\"aa\\", c = \\"cc\\")", "aadefaultcc")
}
}
//
// def testLocalMethodsWithSameName() {
// addFileToProject("Sample.scala",
// """
// |object Sample {
// | def main(args: Array[String]) {
// | def foo(i: Int = 1) = {
// | def foo(j: Int = 2) = j
// | i
// | }
// | "stop"
// | def other() {
// | def foo(i: Int = 3) = i
// | "stop"
// | }
// | def third() {
// | def foo(i: Int = 4) = i
// | "stop"
// | }
// | foo()
// | other()
// | third()
// | }
// |}
// """.stripMargin.trim())
// addBreakpoint("Sample.scala", 4)
// addBreakpoint("Sample.scala", 6)
// addBreakpoint("Sample.scala", 9)
// addBreakpoint("Sample.scala", 13)
// runDebugger("Sample") {
// //todo test for multiple breakpoints?
// waitForBreakpoint()
// evalEquals("foo()", "1")
// waitForBreakpoint()
// evalEquals("foo()", "2")
//
//
// }
// }
//todo this test should work, but it doesn't (last two assertions)
def testClojureWithDefaultParameter() {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| def outer() {
| val s = "start"
| val d = "default"
| def inner(a: String, b: String = d): String = {
| "stop here"
| s + a + b
| }
| inner("aa")
| }
| outer()
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 6)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("a", "aa")
evalEquals("b", "default")
evalEquals("s", "start")
evalEquals("inner(\\"aa\\", \\"bb\\")", "startaabb")
evalEquals("inner(\\"aa\\")", "startaadefault")
}
}
def testFunctionsWithLocalParameters(): Unit = {
addFileToProject("Sample.scala",
"""
|object Sample {
| def main(args: Array[String]) {
| val x = 1
| val y = 2
| def outer() = {
| val s = "start"
| val d = "default"
| def inner(a: String, b: String = d): String = {
| val z = s + a + b + y
| def inInner() = {
| z + x
| }
| inInner()
| "stop here"
| z
| }
| inner("aa")
| }
| outer()
| }
|}
""".stripMargin.trim()
)
addBreakpoint("Sample.scala", 13)
runDebugger("Sample") {
waitForBreakpoint()
evalEquals("a", "aa")
evalEquals("b", "default")
evalEquals("x", "1")
evalEquals("y", "2")
evalEquals("s", "start")
evalEquals("z", "startaadefault2")
evalEquals("inInner()", "startaadefault21")
evalEquals("inner(\\"aa\\", \\"bb\\")", "startaabb2")
evalEquals("inner(\\"aa\\")", "startaadefault2")
evalEquals("outer()", "startaadefault2")
}
}
}
|
triggerNZ/intellij-scala
|
test/org/jetbrains/plugins/scala/debugger/evaluateExpression/ScalaLocalMethodEvaluationTest.scala
|
Scala
|
apache-2.0
| 8,858
|
package diffson
package jsonpatch
import lcsdiff._
import jsonpointer._
import cats._
import cats.implicits._
import diffson.lcs.Patience
import org.scalatest._
import org.scalatest.flatspec.AnyFlatSpec
import scala.util.Try
import scala.language.implicitConversions
import org.scalatest.matchers.should.Matchers
abstract class TestJsonDiff[Json](implicit Json: Jsony[Json]) extends AnyFlatSpec with Matchers with TestProtocol[Json] {
implicit val lcsalg: Patience[Json] = new lcs.Patience[Json]
"a diff" should "be empty if created between two equal values" in {
val json = parseJson("true")
diff(json, json) should be(JsonPatch[Json](Nil))
}
it should "be a simple replacement if the two values are completely different" in {
diff(parseJson("true"), parseJson("13")) should be(JsonPatch[Json](Replace(Pointer.Root, 13: Json)))
}
it should "contain an add operation for each added field" in {
val json1 = parseJson("""{"lbl": 32}""")
val json2 = parseJson("""{"lbl": 32, "new": false}""")
val json3 = parseJson("""{"lbl": 32, "new1": false, "new2": null}""")
val json4 = parseJson("""{"a": 3, "b": {"a": true }}""")
val json5 = parseJson("""{"a": 3, "b": {"a": true, "b": 43}, "c": null}""")
diff(json1, json2) should be(JsonPatch[Json](Add(Pointer("new"), false: Json)))
diff(json1, json3) should be(JsonPatch[Json](Add(Pointer("new2"), Json.Null), Add(Pointer("new1"), false: Json)))
diff(json4, json5) should be(JsonPatch[Json](Add(Pointer("b", "b"), 43: Json), Add(Pointer("c"), Json.Null)))
}
it should "contain a remove operation for each removed field" in {
val json1 = parseJson("""{"lbl": 32}""")
val json2 = parseJson("""{"lbl": 32, "old": false}""")
val json3 = parseJson("""{"lbl": 32, "old1": false, "old2": null}""")
val json4 = parseJson("""{"a": 3, "b": {"a": true }}""")
val json5 = parseJson("""{"a": 3, "b": {"a": true, "b": 43}, "c": null}""")
diff(json2, json1) should be(JsonPatch[Json](Remove(Pointer("old"))))
diff(json3, json1) should be(JsonPatch[Json](Remove(Pointer("old2")), Remove(Pointer("old1"))))
diff(json5, json4) should be(JsonPatch[Json](Remove(Pointer("b", "b")), Remove(Pointer("c"))))
}
it should "correctly handle array diffs in objects" in {
val json1 = parseJson("""{"lbl": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]}""")
val json2 = parseJson("""{"lbl": [1, 4, 5, 11, 6, 7]}""")
diff(json1, json2) should be(JsonPatch[Json](Remove(Pointer("lbl", "2")), Remove(Pointer("lbl", "1")), Add(Pointer("lbl", "3"), 11: Json), Remove(Pointer("lbl", "8")), Remove(Pointer("lbl", "7")), Remove(Pointer("lbl", "6"))))
}
it should "contain a replace operation for each changed field value" in {
val json1 = parseJson("""{"lbl": 32}""")
val json2 = parseJson("""{"lbl": 60}""")
val json3 = parseJson("""{"lbl": {"a": true}}""")
val json4 = parseJson("""{"lbl": {"a": null}}""")
diff(json1, json2) should be(JsonPatch[Json](Replace(Pointer("lbl"), 60: Json)))
diff(json1, json3) should be(JsonPatch[Json](Replace(Pointer("lbl"), parseJson("""{"a": true}"""))))
diff(json3, json4) should be(JsonPatch[Json](Replace(Pointer("lbl", "a"), Json.Null)))
}
it should "contain an add operation for each added element" in {
val json1 = parseJson("[]")
val json2 = parseJson("[1, 2, 3]")
val json3 = parseJson("[1, 2, 4, 5, 6, 3]")
diff(json1, json2) should be(
parsePatch("""[
| {"op": "add", "path": "/-", "value": 1},
| {"op": "add", "path": "/-", "value": 2},
| {"op": "add", "path": "/-", "value": 3}
| ]""".stripMargin))
diff(json2, json3) should be(
parsePatch("""[
| {"op": "add", "path": "/2", "value": 4},
| {"op": "add", "path": "/3", "value": 5},
| {"op": "add", "path": "/4", "value": 6}
| ]""".stripMargin))
}
it should "contain a remove operation for each deleted element" in {
val json1 = parseJson("[]")
val json2 = parseJson("[1, 2, 3]")
val json3 = parseJson("[1, 2, 4, 5, 6, 3]")
diff(json2, json1) should be(
parsePatch("""[
| {"op": "remove", "path": "/2"},
| {"op": "remove", "path": "/1"},
| {"op": "remove", "path": "/0"}
| ]""".stripMargin))
diff(json3, json2) should be(
parsePatch("""[
| {"op": "remove", "path": "/4"},
| {"op": "remove", "path": "/3"},
| {"op": "remove", "path": "/2"}
| ]""".stripMargin))
}
it should "contain a replace operation for each value that changed" in {
val json1 = parseJson("[1, 2, 3]")
val json2 = parseJson("[1, 2, 4]")
val json3 = parseJson("[1, 6, 3]")
val json4 = parseJson("""[1, {"a": 2}, 3]""")
val json5 = parseJson("""[1, {"a": 7}, 3]""")
diff(json1, json2) should be(
parsePatch("""[
| {"op": "replace", "path": "/2", "value": 4}
| ]""".stripMargin))
diff(json1, json3) should be(
parsePatch("""[
| {"op": "replace", "path": "/1", "value": 6}
| ]""".stripMargin))
diff(json4, json5) should be(
parsePatch("""[
| {"op": "replace", "path": "/1/a", "value": 7}
| ]""".stripMargin))
diff(json4, json3) should be(
parsePatch("""[
| {"op": "replace", "path": "/1", "value": 6}
| ]""".stripMargin))
}
"applying a diff" should "be a fix point when applied to the first object used for the diff" in {
val json1 = parseJson("""{"lbl": 32, "b": {"c": "gruik"}}""")
val json2 = parseJson("""{"a": 3, "b": {"a": true, "b": 43}, "c": null}""")
diff(json1, json2).apply[Try](json1).get should be(json2)
}
"applying a diff to strings" should "provide a correct string representation" in {
val json1 = parseJson("""{
| "a": 1,
| "b": true,
| "c": "test"
|}""".stripMargin)
val json2 = parseJson("""{"a":6,"c":"test2","d":false}""".stripMargin)
val json3 = diff(json1, json2).apply[Try](json1).get
json3 should be(json2)
}
"a remembering diff" should "correctly add removed values in array diffs" in {
val json1 = parseJson("""{"lbl": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]}""")
val json2 = parseJson("""{"lbl": [1, 4, 5, 11, 6, 7]}""")
import remembering._
diff(json1, json2) should be(JsonPatch(
Remove(Pointer("lbl", "2"), Some(3: Json)),
Remove(Pointer("lbl", "1"), Some(2: Json)),
Add(Pointer("lbl", "3"), 11: Json),
Remove(Pointer("lbl", "8"), Some(10: Json)),
Remove(Pointer("lbl", "7"), Some(9: Json)),
Remove(Pointer("lbl", "6"), Some(8: Json))))
}
it should "correctly add removed values in object diffs" in {
val json1 = parseJson("""{"a": 1, "b": true}""")
val json2 = parseJson("""{"a": 1}""")
import remembering._
diff(json1, json2) should be(JsonPatch(Remove(Pointer("b"), Some(true: Json))))
}
it should "correctly add replaced values in object diffs" in {
val json1 = parseJson("""{"a": 1, "b": false}""")
val json2 = parseJson("""{"a": 1, "b": "test"}""")
import remembering._
diff(json1, json2) should be(JsonPatch(Replace(Pointer("b"), "test": Json, Some(false: Json))))
}
}
|
gnieh/diffson
|
testkit/shared/src/main/scala/diffson/TestJsonDiff.scala
|
Scala
|
apache-2.0
| 7,545
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.