code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package scorex.account
import scorex.crypto.{Base58, RIPEMD160}
class Account(val address: String) extends Serializable {
override def toString = address
override def equals(b: Any) = b match {
case a: Account => a.address == address
case _ => false
}
override def hashCode(): Int = address.hashCode()
}
object Account {
import scorex.crypto.HashFunctionsImpl._
val AddressLength = 25
val AddressVersion: Byte = 58
val ChecksumLength = 4
def addressFromPubkey(publicKey: Array[Byte]) = {
val publicKeyHash = new RIPEMD160().digest(hash(publicKey))
val withoutChecksum = publicKeyHash :+ AddressVersion //prepend ADDRESS_VERSION
val checkSum = doubleHash(withoutChecksum).take(ChecksumLength)
Base58.encode(withoutChecksum ++ checkSum)
}
def isValidAddress(address: String): Boolean =
Base58.decode(address).map { addressBytes =>
if (addressBytes.length != Account.AddressLength)
false
else {
val checkSum = addressBytes.takeRight(ChecksumLength)
val dh = doubleHash(addressBytes.dropRight(ChecksumLength))
val checkSumGenerated = dh.take(ChecksumLength)
checkSum.sameElements(checkSumGenerated)
}
}.getOrElse(false)
} | pozharko/Scorex-Lagonaki | scorex-crypto/src/main/scala/scorex/account/Account.scala | Scala | cc0-1.0 | 1,244 |
package xyz.joaovasques.sparkapi.actors
import org.scalatest._
class SparkActorSpec extends FunSpec with Matchers {
describe("A Spark actor") {
it("should have a test") {
pending
}
}
}
| JoaoVasques/spark-api | src/test/scala/xyz/joaovasques/sparkapi/SparkActorSpec.scala | Scala | mit | 206 |
import sbt._
import sbt.Keys.scalaVersion
import org.portablescala.sbtplatformdeps.PlatformDepsPlugin.autoImport._
object Deps {
import Def.setting
def scalacheck = setting("org.scalacheck" %%% "scalacheck" % "1.15.4")
def shapeless = setting("com.chuusai" %%% "shapeless" % "2.3.7")
def utest = setting("com.lihaoyi" %%% "utest" % "0.7.10")
}
| alexarchambault/scalacheck-shapeless | project/Deps.scala | Scala | apache-2.0 | 356 |
package org.scalatra.ssgi
package examples.servlet
import scala.xml.NodeSeq
class HelloWorldApp extends Application {
def apply(v1: Request) = Response(body = <h1>Hello, world!</h1>)
} | scalatra/ssgi | examples/servlet/src/main/scala/org/scalatra/ssgi/examples/servlet/HelloWorldApp.scala | Scala | bsd-2-clause | 188 |
package com.thoughtworks.pact.verify.json
import com.thoughtworks.pact.verify.pact.PactRequest
import org.apache.commons.logging.LogFactory
import play.api.libs.json._
import scala.util.{Success, Try}
/**
* Created by xfwu on 12/07/2017.
*/
object PlaceHolder {
private val logger = LogFactory.getFactory.getInstance(this.getClass)
private val PlaceHolderR = """"\\$([a-zA-Z\\.]+)\\$"""".r
private val PlaceHolderWithoutQuoR = """\\$([a-zA-Z\\.]+)\\$""".r
def getParameterFormBody(responseBody: JsValue, setParametersOpt: Option[Map[String, String]],
parameterStack: Map[String, JsLookupResult]): Map[String, JsLookupResult] = {
setParametersOpt.map(_.foldLeft(parameterStack){(acc,v) => {
v match {
case (k, r) if(r.startsWith("$.body")) =>
acc + ((k, JsonPath.select(responseBody, r)))
case (k, r) if(!r.startsWith("$.body")) =>
acc + ((k, JsDefined(calcParameter(r,acc))))
}
}}).getOrElse(Map[String, JsLookupResult]())
}
private def calcParameter(rawEval:String, parameterStack: Map[String, JsLookupResult]): JsValue = {
val eval = relacePlaceHolder(rawEval, parameterStack)
logger.debug(s"rawEval: $rawEval, eval: $eval, parameterStack:$parameterStack")
println((s"rawEval: $rawEval, eval: $eval, parameterStack:$parameterStack"))
val tokens = eval.split("\\\\+").toSeq
val tokenNumbersTry = Try(tokens.map(_.trim.toDouble))
tokenNumbersTry match {
case Success(numberTokens) => JsNumber(plusNumbers(numberTokens))
case _ => JsString(concatString(tokens))
}
}
private def plusNumbers(tokens: Seq[Double]):BigDecimal = tokens.reduce(_+_)
private def concatString(tokens: Seq[String]):String = tokens.reduce(_+_)
private def relacePlaceHolder(raw: String, parametersStack: Map[String, JsLookupResult]): String = {
var temp = raw
PlaceHolderWithoutQuoR.findAllMatchIn(raw).map { m => m.group(1) }.foreach { placeId =>
val placeJsValueOpt = parametersStack.get(placeId)
if (placeJsValueOpt.isDefined && !placeJsValueOpt.get.isInstanceOf[JsUndefined]) {
val placeValue = placeJsValueOpt.get.get match {
case JsString(s) => s
case v: JsValue => Json.stringify(v)
}
temp = temp.replaceAll("\\\\$" + placeId + "\\\\$", placeValue)
}
}
temp
}
def replacePlaceHolderParameter(request: PactRequest, parametersStack: Map[String, JsLookupResult]): PactRequest = {
//parameters
val body = request.body
var requestBufOpt = body.map(_.toString())
if (request.body.isDefined) {
PlaceHolderR.findAllMatchIn(request.body.get.toString()).map { m => m.group(1) }.foreach { placeId =>
val placeJsValueOpt: Option[JsLookupResult] = parametersStack.get(placeId)
if (placeJsValueOpt.isDefined && !placeJsValueOpt.get.isInstanceOf[JsUndefined]) {
val placeValue = placeJsValueOpt.get.get.result.get.toString().drop(1).dropRight(1)
logger.debug(placeValue)
logger.trace(requestBufOpt)
requestBufOpt = requestBufOpt.map(requestBuf => requestBuf.replaceAll("\\\\$" + placeId + "\\\\$", placeValue))
logger.trace(requestBufOpt)
}
}
}
val url = relacePlaceHolder(request.path, parametersStack)
request.copy(path = url, body = requestBufOpt.map(requestBuf => Json.parse(requestBuf)))
}
}
| XuefengWu/pact_verify | src/main/scala/com/thoughtworks/pact/verify/json/PlaceHolder.scala | Scala | mit | 3,402 |
/*
* Copyright 2018 Analytics Zoo Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.zoo.examples.localEstimator
import java.nio.ByteBuffer
import java.nio.file.{Files, Paths}
import com.intel.analytics.bigdl.dataset.ByteRecord
import com.intel.analytics.bigdl.dataset.image.LabeledBGRImage
import com.intel.analytics.bigdl.utils.File
import com.intel.analytics.zoo.pipeline.estimator.EstimateSupportive
import org.slf4j.LoggerFactory
import scala.collection.mutable.ArrayBuffer
object Cifar10DataLoader extends ImageProcessing with EstimateSupportive {
val logger = LoggerFactory.getLogger(getClass)
val trainMean = (0.4913996898739353, 0.4821584196221302, 0.44653092422369434)
val trainStd = (0.24703223517429462, 0.2434851308749409, 0.26158784442034005)
val testMean = (0.4942142913295297, 0.4851314002725445, 0.45040910258647154)
val testStd = (0.2466525177466614, 0.2428922662655766, 0.26159238066790275)
val resizeW = 32
val resizeH = 32
def loadTrainData(imageDirPath: String): Array[LabeledBGRImage] = {
val records = timing("load bytes from bin") {
loadTrain(imageDirPath)
}
logger.info(s"${records.length} train data loaded")
val labeledBGRImages = timing("transfer bytes to bgrImages") {
records.map(record => bytesToLabeledBGRImage(record, resizeW, resizeH))
}
val normalized = timing("normalize the images") {
labeledBGRImages.map(image => bgrImgNormalize(image, trainMean, trainStd))
}
val hFliped = timing("hflip the images") {
normalized.map(image => hFlip(image, 0.5))
}
val randomCropped = timing("randomCrop the images") {
hFliped.map(image => bgrImageRandomCrop(image, 32, 32, 4))
}
randomCropped
}
def loadTestData(imageDirPath: String): Array[LabeledBGRImage] = {
val records = timing("load bytes from bin") {
loadTest(imageDirPath)
}
logger.info(s"${records.length} test data loaded")
val labeledBGRImages = timing("transfer bytes to bgrImages") {
records.map(record => bytesToLabeledBGRImage(record, resizeW, resizeH))
}
val normalized = timing("normalize the images") {
labeledBGRImages.map(image => bgrImgNormalize(image, testMean, testStd))
}
normalized
}
def loadTrain(dataFile: String): Array[ByteRecord] = {
val allFiles = Array(
dataFile + "/data_batch_1.bin",
dataFile + "/data_batch_2.bin",
dataFile + "/data_batch_3.bin",
dataFile + "/data_batch_4.bin",
dataFile + "/data_batch_5.bin"
)
val result = new ArrayBuffer[ByteRecord]()
allFiles.foreach(load(_, result))
result.toArray
}
def loadTest(dataFile: String): Array[ByteRecord] = {
val result = new ArrayBuffer[ByteRecord]()
val testFile = dataFile + "/test_batch.bin"
load(testFile, result)
result.toArray
}
private def load(featureFile: String, result: ArrayBuffer[ByteRecord]): Unit = {
val rowNum = 32
val colNum = 32
val imageOffset = rowNum * colNum * 3 + 1
val channelOffset = rowNum * colNum
val bufferOffset = 8
val featureBuffer = if (featureFile.startsWith("hdfs:")) {
ByteBuffer.wrap(File.readHdfsByte(featureFile))
} else {
ByteBuffer.wrap(Files.readAllBytes(Paths.get(featureFile)))
}
val featureArray = featureBuffer.array()
val featureCount = featureArray.length / (rowNum * colNum * 3 + 1)
var i = 0
while (i < featureCount) {
val img = new Array[Byte]((rowNum * colNum * 3 + bufferOffset))
val byteBuffer = ByteBuffer.wrap(img)
byteBuffer.putInt(rowNum)
byteBuffer.putInt(colNum)
val label = featureArray(i * imageOffset).toFloat
var y = 0
val start = i * imageOffset + 1
while (y < rowNum) {
var x = 0
while (x < colNum) {
img((x + y * colNum) * 3 + 2 + bufferOffset) =
featureArray(start + x + y * colNum)
img((x + y * colNum) * 3 + 1 + bufferOffset) =
featureArray(start + x + y * colNum + channelOffset)
img((x + y * colNum) * 3 + bufferOffset) =
featureArray(start + x + y * colNum + 2 * channelOffset)
x += 1
}
y += 1
}
result.append(ByteRecord(img, label + 1.0f))
i += 1
}
}
}
| intel-analytics/analytics-zoo | zoo/src/main/scala/com/intel/analytics/zoo/examples/localEstimator/Cifar10DataLoader.scala | Scala | apache-2.0 | 4,821 |
package playchat.api
trait Provider | MrBogomips/PlayChat | app/playchat/api/Provider.scala | Scala | gpl-2.0 | 36 |
package controllers
import io.flow.delta.www.lib.UiData
import io.flow.play.controllers.{FlowController, FlowControllerComponents}
import play.api.i18n.{I18nSupport, MessagesApi}
import play.api.mvc.ControllerComponents
class LogoutController @javax.inject.Inject() (
override val messagesApi: MessagesApi,
val controllerComponents: ControllerComponents,
val flowControllerComponents: FlowControllerComponents
) extends FlowController with I18nSupport {
def logged_out = Action { implicit request =>
Ok(
views.html.logged_out(
UiData(requestPath = request.path)
)
)
}
def index() = Action {
Redirect("/logged_out").withNewSession
}
}
| flowcommerce/delta | www/app/controllers/LogoutController.scala | Scala | mit | 686 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.jdbc
import java.sql.Connection
import org.apache.spark.sql.types._
import org.apache.spark.annotation.DeveloperApi
/**
* :: DeveloperApi ::
* A database type definition coupled with the jdbc type needed to send null
* values to the database.
* @param databaseTypeDefinition The database type definition
* @param jdbcNullType The jdbc type (as defined in java.sql.Types) used to
* send a null value to the database.
*/
@DeveloperApi
case class JdbcType(databaseTypeDefinition : String, jdbcNullType : Int)
/**
* :: DeveloperApi ::
* Encapsulates everything (extensions, workarounds, quirks) to handle the
* SQL dialect of a certain database or jdbc driver.
* Lots of databases define types that aren't explicitly supported
* by the JDBC spec. Some JDBC drivers also report inaccurate
* information---for instance, BIT(n>1) being reported as a BIT type is quite
* common, even though BIT in JDBC is meant for single-bit values. Also, there
* does not appear to be a standard name for an unbounded string or binary
* type; we use BLOB and CLOB by default but override with database-specific
* alternatives when these are absent or do not behave correctly.
*
* Currently, the only thing done by the dialect is type mapping.
* `getCatalystType` is used when reading from a JDBC table and `getJDBCType`
* is used when writing to a JDBC table. If `getCatalystType` returns `null`,
* the default type handling is used for the given JDBC type. Similarly,
* if `getJDBCType` returns `(null, None)`, the default type handling is used
* for the given Catalyst type.
*/
@DeveloperApi
abstract class JdbcDialect extends Serializable {
/**
* Check if this dialect instance can handle a certain jdbc url.
* @param url the jdbc url.
* @return True if the dialect can be applied on the given jdbc url.
* @throws NullPointerException if the url is null.
*/
def canHandle(url : String): Boolean
/**
* Get the custom datatype mapping for the given jdbc meta information.
* @param sqlType The sql type (see java.sql.Types)
* @param typeName The sql type name (e.g. "BIGINT UNSIGNED")
* @param size The size of the type.
* @param md Result metadata associated with this type.
* @return The actual DataType (subclasses of [[org.apache.spark.sql.types.DataType]])
* or null if the default type mapping should be used.
*/
def getCatalystType(
sqlType: Int, typeName: String, size: Int, md: MetadataBuilder): Option[DataType] = None
/**
* Retrieve the jdbc / sql type for a given datatype.
* @param dt The datatype (e.g. [[org.apache.spark.sql.types.StringType]])
* @return The new JdbcType if there is an override for this DataType
*/
def getJDBCType(dt: DataType): Option[JdbcType] = None
/**
* Quotes the identifier. This is used to put quotes around the identifier in case the column
* name is a reserved keyword, or in case it contains characters that require quotes (e.g. space).
*/
def quoteIdentifier(colName: String): String = {
s""""$colName""""
}
/**
* Get the SQL query that should be used to find if the given table exists. Dialects can
* override this method to return a query that works best in a particular database.
* @param table The name of the table.
* @return The SQL query to use for checking the table.
*/
def getTableExistsQuery(table: String): String = {
s"SELECT * FROM $table WHERE 1=0"
}
/**
* Override connection specific properties to run before a select is made. This is in place to
* allow dialects that need special treatment to optimize behavior.
* @param connection The connection object
* @param properties The connection properties. This is passed through from the relation.
*/
def beforeFetch(connection: Connection, properties: Map[String, String]): Unit = {
}
}
/**
* :: DeveloperApi ::
* Registry of dialects that apply to every new jdbc [[org.apache.spark.sql.DataFrame]].
*
* If multiple matching dialects are registered then all matching ones will be
* tried in reverse order. A user-added dialect will thus be applied first,
* overwriting the defaults.
*
* Note that all new dialects are applied to new jdbc DataFrames only. Make
* sure to register your dialects first.
*/
@DeveloperApi
object JdbcDialects {
/**
* Register a dialect for use on all new matching jdbc [[org.apache.spark.sql.DataFrame]].
* Readding an existing dialect will cause a move-to-front.
*
* @param dialect The new dialect.
*/
def registerDialect(dialect: JdbcDialect) : Unit = {
dialects = dialect :: dialects.filterNot(_ == dialect)
}
/**
* Unregister a dialect. Does nothing if the dialect is not registered.
*
* @param dialect The jdbc dialect.
*/
def unregisterDialect(dialect : JdbcDialect) : Unit = {
dialects = dialects.filterNot(_ == dialect)
}
private[this] var dialects = List[JdbcDialect]()
registerDialect(MySQLDialect)
registerDialect(PostgresDialect)
registerDialect(DB2Dialect)
registerDialect(MsSqlServerDialect)
registerDialect(DerbyDialect)
registerDialect(OracleDialect)
/**
* Fetch the JdbcDialect class corresponding to a given database url.
*/
private[sql] def get(url: String): JdbcDialect = {
val matchingDialects = dialects.filter(_.canHandle(url))
matchingDialects.length match {
case 0 => NoopDialect
case 1 => matchingDialects.head
case _ => new AggregatedDialect(matchingDialects)
}
}
}
/**
* NOOP dialect object, always returning the neutral element.
*/
private object NoopDialect extends JdbcDialect {
override def canHandle(url : String): Boolean = true
}
| chenc10/Spark-PAF | sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala | Scala | apache-2.0 | 6,544 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.expressions
import org.apache.flink.table.api._
import org.apache.flink.table.planner.expressions.utils.ExpressionTestBase
import org.apache.flink.table.types.DataType
import org.apache.flink.types.Row
import org.junit.Test
class DecimalTypeTest extends ExpressionTestBase {
@Test
def testDecimalLiterals(): Unit = {
// implicit double
testAllApis(
11.2,
"11.2",
"11.2")
// implicit double
testAllApis(
0.7623533651719233,
"0.7623533651719233",
"0.7623533651719233")
// explicit decimal (with precision of 19)
testAllApis(
BigDecimal("1234567891234567891"),
"1234567891234567891",
"1234567891234567891")
// explicit decimal (high precision, not SQL compliant)
testTableApi(
BigDecimal("123456789123456789123456789"),
"123456789123456789123456789p",
"123456789123456789123456789")
// explicit decimal (high precision, not SQL compliant)
testTableApi(
BigDecimal("12.3456789123456789123456789"),
"12.3456789123456789123456789p",
"12.3456789123456789123456789")
}
@Test
def testDecimalBorders(): Unit = {
testAllApis(
Double.MaxValue,
Double.MaxValue.toString,
Double.MaxValue.toString)
testAllApis(
Double.MinValue,
Double.MinValue.toString,
Double.MinValue.toString)
testAllApis(
Double.MinValue.cast(DataTypes.FLOAT),
s"CAST(${Double.MinValue} AS FLOAT)",
Float.NegativeInfinity.toString)
testAllApis(
Byte.MinValue.cast(DataTypes.TINYINT),
s"CAST(${Byte.MinValue} AS TINYINT)",
Byte.MinValue.toString)
testAllApis(
Byte.MinValue.cast(DataTypes.TINYINT) - 1.cast(DataTypes.TINYINT),
s"CAST(${Byte.MinValue} AS TINYINT) - CAST(1 AS TINYINT)",
Byte.MaxValue.toString)
testAllApis(
Short.MinValue.cast(DataTypes.SMALLINT),
s"CAST(${Short.MinValue} AS SMALLINT)",
Short.MinValue.toString)
testAllApis(
Int.MinValue.cast(DataTypes.INT) - 1,
s"CAST(${Int.MinValue} AS INT) - 1",
Int.MaxValue.toString)
testAllApis(
Long.MinValue.cast(DataTypes.BIGINT()),
s"CAST(${Long.MinValue} AS BIGINT)",
Long.MinValue.toString)
}
@Test
def testDefaultDecimalCasting(): Unit = {
// from String
testTableApi(
"123456789123456789123456789".cast(DataTypes.DECIMAL(38, 0)),
"123456789123456789123456789")
// from double
testAllApis(
'f3.cast(DataTypes.DECIMAL(38, 0)),
"CAST(f3 AS DECIMAL)",
"4")
}
@Test
def testDecimalCasting(): Unit = {
testSqlApi(
"CAST(f3 AS DECIMAL(10,2))",
"4.20"
)
// to double
testAllApis(
'f0.cast(DataTypes.DOUBLE),
"CAST(f0 AS DOUBLE)",
"1.2345678912345679E8")
// to int
testAllApis(
'f4.cast(DataTypes.INT),
"CAST(f4 AS INT)",
"123456789")
// to long
testAllApis(
'f4.cast(DataTypes.BIGINT()),
"CAST(f4 AS BIGINT)",
"123456789")
// to boolean (not SQL compliant)
testTableApi(
'f1.cast(DataTypes.BOOLEAN),
"true")
testTableApi(
'f5.cast(DataTypes.BOOLEAN),
"false")
testTableApi(
BigDecimal("123456789.123456789123456789").cast(DataTypes.DOUBLE),
"1.2345678912345679E8")
// testing padding behaviour
testSqlApi(
"CAST(CAST(f67 AS DECIMAL(10, 5)) AS VARCHAR)",
"1.00000"
)
}
@Test
def testDecimalArithmetic(): Unit = {
// note: calcite type inference:
// Decimal+ExactNumeric => Decimal
// Decimal+Double => Double.
// implicit cast to decimal
testAllApis(
'f1 + 12,
"f1 + 12",
"123456789123456789123456801")
// implicit cast to decimal
testAllApis(
lit(12) + 'f1,
"12 + f1",
"123456789123456789123456801")
testAllApis(
'f1 + BigDecimal("12.3"),
"f1 + 12.3",
"123456789123456789123456801.3"
)
testAllApis(
lit(BigDecimal("12.3").bigDecimal) + 'f1,
"12.3 + f1",
"123456789123456789123456801.3")
testAllApis(
'f1 + 'f1,
"f1 + f1",
"246913578246913578246913578")
testAllApis(
'f1 - 'f1,
"f1 - f1",
"0")
// exceeds max precision 38.
// 'f1 * 'f1,
// "f1 * f1",
// "f1 * f1",
// "15241578780673678546105778281054720515622620750190521")
testAllApis(
'f1 / 'f1,
"f1 / f1",
"1.00000000")
// Decimal(30,0) / Decimal(30, 0) => Decimal(61,31) => Decimal(38,8)
testAllApis(
'f1 % 'f1,
"MOD(f1, f1)",
"0")
testAllApis(
-'f0,
"-f0",
"-123456789.123456789123456789")
}
@Test
def testDecimalComparison(): Unit = {
testAllApis(
'f1 < 12,
"f1 < 12",
"false")
testAllApis(
'f1 > 12,
"f1 > 12",
"true")
testAllApis(
'f1 === 12,
"f1 = 12",
"false")
testAllApis(
'f5 === 0,
"f5 = 0",
"true")
testAllApis(
'f1 === BigDecimal("123456789123456789123456789"),
"f1 = CAST('123456789123456789123456789' AS DECIMAL(30, 0))",
"true")
testAllApis(
'f1 !== BigDecimal("123456789123456789123456789"),
"f1 <> CAST('123456789123456789123456789' AS DECIMAL(30, 0))",
"false")
testAllApis(
'f4 < 'f0,
"f4 < f0",
"true")
testAllApis(
12.toExpr < 'f1,
"12 < f1",
"true")
testAllApis(
12.toExpr > 'f1,
"12 > f1",
"12 > f1",
"false")
testAllApis(
12.toExpr - 'f37,
"12 - f37",
"10")
testAllApis(
12.toExpr + 'f37,
"12 + f37",
"14")
testAllApis(
12.toExpr * 'f37,
"12 * f37",
"24")
testAllApis(
12.toExpr / 'f37,
"12 / f37",
"6")
}
@Test
def testFieldAccess(): Unit = {
// the most basic case
testAllApis(
'f6,
"f6",
"123")
testAllApis(
'f7,
"f7",
"123.45")
// data from source are rounded to their declared scale before entering next step
testAllApis(
'f8,
"f8",
"100.00")
testAllApis(
'f8 + 'f8,
"f8 + f8",
"200.00")
// trailing zeros are padded to the scale
testAllApis(
'f9,
"f9",
"100.10")
testAllApis(
'f9 + 'f9,
"f9 + f9",
"200.20")
// source data is within precision after rounding
testAllApis(
'f10,
"f10",
"100.00")
testAllApis(
'f10 + 'f10,
"f10 + f10",
"200.00")
// source data overflows over precision (after rounding)
testAllApis(
'f11,
"f11",
"null")
testAllApis(
'f12,
"f12",
"null")
}
@Test
def testUnaryPlusMinus(): Unit = {
testAllApis(
+ 'f6,
"+f6",
"123")
testAllApis(
- 'f7,
"-f7",
"-f7",
"-123.45")
testAllApis(
- (( + 'f6) - ( - 'f7)),
"- (( + f6) - ( - f7))",
"-246.45")
}
@Test
def testPlusMinus(): Unit = {
// see calcite ReturnTypes.DECIMAL_SUM
// s = max(s1,s2), p-s = max(p1-s1, p2-s2) + 1
// p then is capped at 38
testAllApis(
'f13 + 'f14,
"f13 + f14",
"300.2434")
testAllApis(
'f13 - 'f14,
"f13 - f14",
"-100.0034")
// INT => DECIMAL(10,0)
// approximate + exact => approximate
testAllApis(
'f7 + 'f2,
"f7 + f2",
"165.45")
testAllApis(
'f2 + 'f7,
"f2 + f7",
"165.45")
testAllApis(
'f7 + 'f3,
"f7 + f3",
"127.65")
testAllApis(
'f3 + 'f7,
"f3 + f7",
"127.65")
// our result type precision is capped at 38
// SQL2003 $6.26 -- result scale is dictated as max(s1,s2). no approximation allowed.
// calcite -- scale is not reduced; integral part may be reduced. overflow may occur
// (38,10)+(38,28)=>(57,28)=>(38,28)
// T-SQL -- scale may be reduced to keep the integral part. approximation may occur
// (38,10)+(38,28)=>(57,28)=>(38,9)
testAllApis(
'f15 + 'f16,
"f15 + f16",
"300.0246913578012345678901234567")
testAllApis(
'f15 - 'f16,
"f15 - f16",
"-100.0000000000012345678901234567")
// 10 digits integral part
testAllApis(
'f17 + 'f18,
"f17 + f18",
"null")
testAllApis(
'f17 - 'f18,
"f17 - f18",
"null")
// requires 39 digits
testAllApis(
'f19 + 'f19,
"f19 + f19",
"null")
// overflows in subexpression
testAllApis(
'f19 + 'f19 - 'f19,
"f19 + f19 - f19",
"null")
}
@Test
def testMultiply(): Unit = {
// see calcite ReturnTypes.DECIMAL_PRODUCT
// s = s1+s2, p = p1+p2
// both p&s are capped at 38
// if s>38, result is rounded to s=38, and the integral part can only be zero
testAllApis(
'f20 * 'f20,
"f20 * f20",
"1.0000")
testAllApis(
'f20 * 'f21,
"f20 * f21",
"2.000000")
// INT => DECIMAL(10,0)
// approximate * exact => approximate
testAllApis(
'f20 * 'f22,
"f20 * f22",
"200.00")
testAllApis(
'f22 * 'f20,
"f22 * f20",
"200.00")
testAllApis(
'f20 * 'f23,
"f20 * f23",
"3.14")
testAllApis(
'f23 * 'f20,
"f23 * f20",
"3.14")
// precision is capped at 38; scale will not be reduced (unless over 38)
// similar to plus&minus, and calcite behavior is different from T-SQL.
testAllApis(
'f24 * 'f24,
"f24 * f24",
"1.000000000000")
testAllApis(
'f24 * 'f25,
"f24 * f25",
"2.0000000000000000")
testAllApis(
'f26 * 'f26,
"f26 * f26",
"0.00010000000000000000000000000000000000"
)
// scalastyle:off
// we don't have this ridiculous behavior:
// https://blogs.msdn.microsoft.com/sqlprogrammability/2006/03/29/multiplication-and-division-with-numerics/
// scalastyle:on
testAllApis(
'f27 * 'f28,
"f27 * f28",
"0.00000060000000000000"
)
// result overflow
testAllApis(
'f29 * 'f29,
"f29 * f29",
"null"
)
//(60,40)=>(38,38), no space for integral part
testAllApis(
'f30 * 'f30,
"f30 * f30",
"null"
)
}
@Test
def testDivide(): Unit = {
// the default impl of Calcite apparently borrows from T-SQL, but differs in details.
// Flink overrides it to follow T-SQL exactly. See FlinkTypeFactory.createDecimalQuotient()
testAllApis(
'f31 / 'f32,
"f31 / f32",
"0.333333")
testAllApis(
'f31 / 'f33,
"f31 / f33",
"0.3333333")
testAllApis(
'f31 / 'f34,
"f31 / f34",
"f31 / f34",
"0.3333333333")
testAllApis(
'f31 / 'f35,
"f31 / f35",
"0.333333")
// INT => DECIMAL(10,0)
// approximate / exact => approximate
testAllApis(
'f36 / 'f37,
"f36 / f37",
"0.5000000000000")
testAllApis(
'f37 / 'f36,
"f37 / f36",
"2.00000000000")
testAllApis(
'f36 / 'f38,
"f36 / f38",
(1.0/3.0).toString)
testAllApis(
'f38 / 'f36,
"f38 / f36",
(3.0/1.0).toString)
// result overflow, because result type integral part is reduced
testAllApis(
'f39 / 'f40,
"f39 / f40",
"null")
}
@Test
def testMod(): Unit = {
// MOD(Exact1, Exact2) => Exact2
testAllApis(
'f41 % 'f42,
"mod(f41, f42)",
"3.0000")
testAllApis(
'f42 % 'f41,
"mod(f42, f41)",
"2.0000")
testAllApis(
'f41 % 'f43,
"mod(f41, f43)",
"3.00")
testAllApis(
'f43 % 'f41,
"mod(f43, f41)",
"1.00")
// signs. consistent with Java's % operator.
testAllApis(
'f44 % 'f45,
"mod(f44, f45)",
(3%5).toString)
testAllApis(
-'f44 % 'f45,
"mod(-f44, f45)",
((-3)%5).toString)
testAllApis(
'f44 % -'f45,
"mod(f44, -f45)",
(3%(-5)).toString)
testAllApis(
-'f44 % -'f45,
"mod(-f44, -f45)",
((-3)%(-5)).toString)
// rounding in case s1>s2. note that SQL2003 requires s1=s2=0.
// (In T-SQL, s2 is expanded to s1, so that there's no rounding.)
testAllApis(
'f46 % 'f47,
"mod(f46, f47)",
"3.1234")
}
@Test // functions that treat Decimal as exact value
def testExactionFunctions(): Unit = {
testAllApis(
ifThenElse('f48 > 'f49, 'f48, 'f49),
"if(f48 > f49, f48, f49)",
"3.14")
testAllApis(
'f48.abs(),
"abs(f48)",
"3.14"
)
testAllApis(
(-'f48).abs(),
"abs(-f48)",
"3.14"
)
testAllApis(
'f48.floor(),
"floor(f48)",
"3"
)
testAllApis(
'f48.ceil(),
"ceil(f48)",
"4"
)
// calcite: SIGN(Decimal(p,s))=>Decimal(p,s)
testAllApis(
'f48.sign(),
"sign(f48)",
"1.00"
)
testAllApis(
(-'f48).sign(),
"sign(-f48)",
"-1.00"
)
testAllApis(
('f48 - 'f48).sign(),
"sign(f48 - f48)",
"0.00"
)
// ROUND(Decimal(p,s)[,INT])
testAllApis(
'f50.round(0),
"round(f50)",
"647")
testAllApis(
'f50.round(0),
"round(f50,0)",
"647")
testAllApis(
'f50.round(1),
"round(f50,1)",
"646.6")
testAllApis(
'f50.round(2),
"f50.round(2)",
"round(f50,2)",
"646.65")
testAllApis(
'f50.round(3),
"round(f50,3)",
"646.646")
testAllApis(
'f50.round(4),
"round(f50,4)",
"646.646")
testAllApis(
'f50.round(-1),
"round(f50,-1)",
"650")
testAllApis(
'f50.round(-2),
"round(f50,-2)",
"600")
testAllApis(
'f50.round(-3),
"round(f50,-3)",
"1000")
testAllApis(
'f50.round(-4),
"round(f50,-4)",
"0")
testAllApis(
'f51.round(1),
"round(f51,1)",
"100.0")
testAllApis(
(-'f51).round(1),
"round(-f51,1)",
"-100.0")
testAllApis(
('f51).round(-1),
"round(f51,-1)",
"100")
testAllApis(
(-'f51).round(-1),
"round(-f51,-1)",
"-100")
testAllApis(
('f52).round(-1),
"round(f52,-1)",
"null")
}
@Test // functions e.g. sin() that treat Decimal as double
def testApproximateFunctions(): Unit = {
// skip moving ApproximateFunctions tests from
// sql/DecimalITCase.scala and table/DecimalITcase.scala
// because these tests will run fail until FLINK-14036 is fixed
}
@Test
def testCaseWhen(): Unit = {
// result type: SQL2003 $9.23, calcite RelDataTypeFactory.leastRestrictive()
testSqlApi(
"case f53 when 0 then f53 else f54 end",
"0.0100")
testSqlApi(
"case f53 when 0 then f53 else f2 end",
"42.0000")
testSqlApi(
"case f53 when 0 then f23 else f53 end",
BigDecimal("0.0001").doubleValue().toString)
}
@Test
def testCast(): Unit = {
// String, numeric/Decimal => Decimal
testSqlApi(
"cast(f48 as Decimal(8,4))",
"3.1400")
testSqlApi(
"cast(f2 as Decimal(8,4))",
"42.0000")
testSqlApi(
"cast(f3 as Decimal(8,4))",
"4.2000")
testSqlApi(
"cast(f55 as Decimal(8,4))",
"3.1400")
// round up
testSqlApi(
"cast(f56 as Decimal(8,1))",
"3.2")
testSqlApi(
"cast(f57 as Decimal(8,1))",
"3.2")
testSqlApi(
"cast(f58 as Decimal(8,1))",
"3.2")
testSqlApi(
"cast(f59 as Decimal(3,2))",
"null")
// Decimal => String, numeric
testSqlApi(
"cast(f60 as VARCHAR(64))",
"1.99")
testSqlApi(
"cast(f61 as DOUBLE)",
"1.99")
testSqlApi(
"cast(f62 as INT)",
"1")
}
@Test
def testEquality(): Unit = {
// expressions that test equality.
// =, CASE, NULLIF, IN, IS DISTINCT FROM
testSqlApi(
"f63=f64",
"true")
testSqlApi(
"f63=f65",
"true")
testSqlApi(
"f63=f66",
"true")
testSqlApi(
"f64=f63",
"true")
testSqlApi(
"f65=f63",
"true")
testSqlApi(
"f66=f63",
"true")
testSqlApi(
"f63 IN(f64)",
"true")
testSqlApi(
"f63 IN(f65)",
"true")
testSqlApi(
"f63 IN(f66)",
"true")
testSqlApi(
"f64 IN(f63)",
"true")
testSqlApi(
"f65 IN(f63)",
"true")
testSqlApi(
"f66 IN(f63)",
"true")
testSqlApi(
"f63 IS DISTINCT FROM f64",
"false")
testSqlApi(
"f64 IS DISTINCT FROM f63",
"false")
testSqlApi(
"f63 IS DISTINCT FROM f65",
"false")
testSqlApi(
"f65 IS DISTINCT FROM f63",
"false")
testSqlApi(
"f63 IS DISTINCT FROM f66",
"false")
testSqlApi(
"f66 IS DISTINCT FROM f63",
"false")
testSqlApi(
"NULLIF(f63,f64)",
"null"
)
testSqlApi(
"NULLIF(f63,f65)",
"null"
)
testSqlApi(
"NULLIF(f63,f66)",
"null"
)
testSqlApi(
"NULLIF(f64,f63)",
"null"
)
testSqlApi(
"NULLIF(f65,f63)",
"null"
)
testSqlApi(
"NULLIF(f66,f63)",
"null"
)
testSqlApi(
"NULLIF(f63,f64)",
"null"
)
testSqlApi(
"case f63 when f64 then 1 else 0 end",
"1"
)
testSqlApi(
"case f63 when f65 then 1 else 0 end",
"1"
)
testSqlApi(
"case f63 when f66 then 1 else 0 end",
"1"
)
testSqlApi(
"case f64 when f63 then 1 else 0 end",
"1"
)
testSqlApi(
"case f65 when f64 then 1 else 0 end",
"1"
)
testSqlApi(
"case f66 when f65 then 1 else 0 end",
"1"
)
}
@Test
def testComparison(): Unit = {
testSqlApi(
"f63 < f64",
"false")
testSqlApi(
"f63 < f65",
"false")
testSqlApi(
"f63 < f66",
"false")
testSqlApi(
"f64 < f63",
"false")
testSqlApi(
"f65 < f63",
"false")
testSqlApi(
"f66 < f63",
"false")
// no overflow during type conversion.
// conceptually both operands are promoted to infinite precision before comparison.
testSqlApi(
"f67 < f68",
"true")
testSqlApi(
"f67 < f69",
"true")
testSqlApi(
"f67 < f70",
"true")
testSqlApi(
"f68 < f67",
"false")
testSqlApi(
"f69 < f67",
"false")
testSqlApi(
"f70 < f67",
"false")
testSqlApi(
"f63 between f64 and 1",
"true")
testSqlApi(
"f64 between f63 and 1",
"true")
testSqlApi(
"f63 between f65 and 1",
"true")
testSqlApi(
"f65 between f63 and 1",
"true")
testSqlApi(
"f63 between f66 and 1",
"true")
testSqlApi(
"f66 between f63 and 1",
"true")
testSqlApi(
"f63 between 0 and f64",
"true")
testSqlApi(
"f64 between 0 and f63",
"true")
testSqlApi(
"f63 between 0 and f65",
"true")
testSqlApi(
"f65 between 0 and f63",
"true")
testSqlApi(
"f63 between 0 and f66",
"true")
testSqlApi(
"f66 between 0 and f63",
"true")
}
@Test
def testCompareDecimalColWithNull(): Unit = {
testSqlApi("f35>cast(1234567890123.123 as decimal(20,16))", "null")
}
// ----------------------------------------------------------------------------------------------
override def testData: Row = {
val testData = new Row(71)
testData.setField(0, BigDecimal("123456789.123456789123456789").bigDecimal)
testData.setField(1, BigDecimal("123456789123456789123456789").bigDecimal)
testData.setField(2, 42)
testData.setField(3, 4.2)
testData.setField(4, BigDecimal("123456789").bigDecimal)
testData.setField(5, BigDecimal("0.000").bigDecimal)
testData.setField(6, BigDecimal("123").bigDecimal)
testData.setField(7, BigDecimal("123.45").bigDecimal)
testData.setField(8, BigDecimal("100.004").bigDecimal)
testData.setField(9, BigDecimal("100.1").bigDecimal)
testData.setField(10, BigDecimal("100.0040").bigDecimal)
testData.setField(11, BigDecimal("123").bigDecimal)
testData.setField(12, BigDecimal("123.0000").bigDecimal)
testData.setField(13, BigDecimal("100.12").bigDecimal)
testData.setField(14, BigDecimal("200.1234").bigDecimal)
testData.setField(15, BigDecimal("100.0123456789").bigDecimal)
testData.setField(16, BigDecimal("200.0123456789012345678901234567").bigDecimal)
testData.setField(17, BigDecimal("1e10").bigDecimal)
testData.setField(18, BigDecimal("0").bigDecimal)
testData.setField(19, BigDecimal("5e37").bigDecimal)
testData.setField(20, BigDecimal("1.00").bigDecimal)
testData.setField(21, BigDecimal("2.0000").bigDecimal)
testData.setField(22, 200)
testData.setField(23, 3.14)
testData.setField(24, BigDecimal("1").bigDecimal)
testData.setField(25, BigDecimal("2").bigDecimal)
testData.setField(26, BigDecimal("0.01").bigDecimal)
testData.setField(27, BigDecimal("0.0000006").bigDecimal)
testData.setField(28, BigDecimal("1.0").bigDecimal)
testData.setField(29, BigDecimal("1e19").bigDecimal)
testData.setField(30, BigDecimal("1.0").bigDecimal)
testData.setField(31, BigDecimal("1.00").bigDecimal)
testData.setField(32, BigDecimal("3").bigDecimal)
testData.setField(33, BigDecimal("3").bigDecimal)
testData.setField(34, BigDecimal("3").bigDecimal)
testData.setField(35, BigDecimal("3").bigDecimal)
testData.setField(36, BigDecimal("1.00").bigDecimal)
testData.setField(37, 2)
testData.setField(38, 3.0)
testData.setField(39, BigDecimal("1e20").bigDecimal)
testData.setField(40, BigDecimal("1e-15").bigDecimal)
testData.setField(41, BigDecimal("3.00").bigDecimal)
testData.setField(42, BigDecimal("5.00").bigDecimal)
testData.setField(43, 7)
testData.setField(44, BigDecimal("3").bigDecimal)
testData.setField(45, BigDecimal("5").bigDecimal)
testData.setField(46, BigDecimal("3.1234").bigDecimal)
testData.setField(47, BigDecimal("5").bigDecimal)
testData.setField(48, BigDecimal("3.14").bigDecimal)
testData.setField(49, BigDecimal("2.17").bigDecimal)
testData.setField(50, BigDecimal("646.646").bigDecimal)
testData.setField(51, BigDecimal("99.99").bigDecimal)
testData.setField(52, BigDecimal("1E38").bigDecimal.subtract(BigDecimal("1").bigDecimal))
testData.setField(53, BigDecimal("0.0001").bigDecimal)
testData.setField(54, BigDecimal("0.01").bigDecimal)
testData.setField(55, "3.14")
testData.setField(56, BigDecimal("3.15").bigDecimal)
testData.setField(57, 3.15)
testData.setField(58, "3.15")
testData.setField(59, "13.14")
testData.setField(60, BigDecimal("1.99").bigDecimal)
testData.setField(61, "1.99")
testData.setField(62, 1)
testData.setField(63, BigDecimal("1").bigDecimal)
testData.setField(64, BigDecimal("1").bigDecimal)
testData.setField(65, 1)
testData.setField(66, 1.0)
testData.setField(67, BigDecimal("1").bigDecimal)
testData.setField(68, BigDecimal("99").bigDecimal)
testData.setField(69, 99)
testData.setField(70, 99.0)
testData
}
override def testDataType: DataType = DataTypes.ROW(
DataTypes.FIELD("f0", DataTypes.DECIMAL(30, 18)),
DataTypes.FIELD("f1", DataTypes.DECIMAL(30, 0)),
DataTypes.FIELD("f2", DataTypes.INT()),
DataTypes.FIELD("f3", DataTypes.DOUBLE()),
DataTypes.FIELD("f4", DataTypes.DECIMAL(10, 0)),
DataTypes.FIELD("f5", DataTypes.DECIMAL(10, 3)),
DataTypes.FIELD("f6", DataTypes.DECIMAL(10, 0)),
DataTypes.FIELD("f7", DataTypes.DECIMAL(7, 2)),
DataTypes.FIELD("f8", DataTypes.DECIMAL(7, 2)),
DataTypes.FIELD("f9", DataTypes.DECIMAL(7, 2)),
DataTypes.FIELD("f10", DataTypes.DECIMAL(5, 2)),
DataTypes.FIELD("f11", DataTypes.DECIMAL(2, 0)),
DataTypes.FIELD("f12", DataTypes.DECIMAL(4, 2)),
DataTypes.FIELD("f13", DataTypes.DECIMAL(10, 2)),
DataTypes.FIELD("f14", DataTypes.DECIMAL(10, 4)),
DataTypes.FIELD("f15", DataTypes.DECIMAL(38, 10)),
DataTypes.FIELD("f16", DataTypes.DECIMAL(38, 28)),
DataTypes.FIELD("f17", DataTypes.DECIMAL(38, 10)),
DataTypes.FIELD("f18", DataTypes.DECIMAL(38, 28)),
DataTypes.FIELD("f19", DataTypes.DECIMAL(38, 0)),
DataTypes.FIELD("f20", DataTypes.DECIMAL(5, 2)),
DataTypes.FIELD("f21", DataTypes.DECIMAL(10, 4)),
DataTypes.FIELD("f22", DataTypes.INT()),
DataTypes.FIELD("f23", DataTypes.DOUBLE()),
DataTypes.FIELD("f24", DataTypes.DECIMAL(30, 6)),
DataTypes.FIELD("f25", DataTypes.DECIMAL(30, 10)),
DataTypes.FIELD("f26", DataTypes.DECIMAL(30, 20)),
DataTypes.FIELD("f27", DataTypes.DECIMAL(38, 10)),
DataTypes.FIELD("f28", DataTypes.DECIMAL(38, 10)),
DataTypes.FIELD("f29", DataTypes.DECIMAL(38, 0)),
DataTypes.FIELD("f30", DataTypes.DECIMAL(30, 20)),
DataTypes.FIELD("f31", DataTypes.DECIMAL(20, 2)),
DataTypes.FIELD("f32", DataTypes.DECIMAL(2, 1)),
DataTypes.FIELD("f33", DataTypes.DECIMAL(4, 3)),
DataTypes.FIELD("f34", DataTypes.DECIMAL(20, 10)),
DataTypes.FIELD("f35", DataTypes.DECIMAL(20, 16)),
DataTypes.FIELD("f36", DataTypes.DECIMAL(10, 2)),
DataTypes.FIELD("f37", DataTypes.INT()),
DataTypes.FIELD("f38", DataTypes.DOUBLE()),
DataTypes.FIELD("f39", DataTypes.DECIMAL(30, 0)),
DataTypes.FIELD("f40", DataTypes.DECIMAL(30, 20)),
DataTypes.FIELD("f41", DataTypes.DECIMAL(10, 2)),
DataTypes.FIELD("f42", DataTypes.DECIMAL(10, 4)),
DataTypes.FIELD("f43", DataTypes.INT()),
DataTypes.FIELD("f44", DataTypes.DECIMAL(1, 0)),
DataTypes.FIELD("f45", DataTypes.DECIMAL(1, 0)),
DataTypes.FIELD("f46", DataTypes.DECIMAL(10, 4)),
DataTypes.FIELD("f47", DataTypes.DECIMAL(10, 2)),
DataTypes.FIELD("f48", DataTypes.DECIMAL(10, 2)),
DataTypes.FIELD("f49", DataTypes.DECIMAL(10, 2)),
DataTypes.FIELD("f50", DataTypes.DECIMAL(10, 3)),
DataTypes.FIELD("f51", DataTypes.DECIMAL(4, 2)),
DataTypes.FIELD("f52", DataTypes.DECIMAL(38, 0)),
DataTypes.FIELD("f53", DataTypes.DECIMAL(8, 4)),
DataTypes.FIELD("f54", DataTypes.DECIMAL(10, 2)),
DataTypes.FIELD("f55", DataTypes.STRING()),
DataTypes.FIELD("f56", DataTypes.DECIMAL(8, 2)),
DataTypes.FIELD("f57", DataTypes.DOUBLE()),
DataTypes.FIELD("f58", DataTypes.STRING()),
DataTypes.FIELD("f59", DataTypes.STRING()),
DataTypes.FIELD("f60", DataTypes.DECIMAL(4, 2)),
DataTypes.FIELD("f61", DataTypes.STRING()),
DataTypes.FIELD("f62", DataTypes.INT()),
DataTypes.FIELD("f63", DataTypes.DECIMAL(8, 2)),
DataTypes.FIELD("f64", DataTypes.DECIMAL(8, 4)),
DataTypes.FIELD("f65", DataTypes.INT()),
DataTypes.FIELD("f66", DataTypes.DOUBLE()),
DataTypes.FIELD("f67", DataTypes.DECIMAL(1, 0)),
DataTypes.FIELD("f68", DataTypes.DECIMAL(2, 0)),
DataTypes.FIELD("f69", DataTypes.INT()),
DataTypes.FIELD("f70", DataTypes.DOUBLE())
)
override def containsLegacyTypes: Boolean = false
}
| clarkyzl/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/expressions/DecimalTypeTest.scala | Scala | apache-2.0 | 28,247 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import uk.gov.hmrc.ct.accounts.frs102.retriever.Frs102AccountsBoxRetriever
import uk.gov.hmrc.ct.box._
case class AC214D(value: Option[Int]) extends CtBoxIdentifier(name = "Tangible assets - Office Equipment - depreciation - other adjustments")
with CtOptionalInteger
with Input
with ValidatableBox[Frs102AccountsBoxRetriever]
with Validators {
override def validate(boxRetriever: Frs102AccountsBoxRetriever): Set[CtValidation] = {
collectErrors(
validateMoney(value)
)
}
}
| liquidarmour/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC214D.scala | Scala | apache-2.0 | 1,154 |
/*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 the "License";
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalawebtest.json
import org.scalatest.exceptions.TestFailedException
import org.scalatest.{AppendedClues, Assertions}
import org.scalatest.matchers.should.Matchers
import play.api.libs.json._
import scala.language.implicitConversions
/**
* Helper object to provide functions to fluently build a [[org.scalawebtest.json.Gauge]]. Which in turn is used to test if
* a [[play.api.libs.json.JsLookupResult]] or [[play.api.libs.json.JsValue]] fits the gauge definition.
*
* ==Overview==
* Import [[org.scalawebtest.json.JsonGauge.JsonGaugeFromJsLookup]] or [[org.scalawebtest.json.JsonGauge.JsonGaugeFromJsValue]], then follow the documentation of the [[org.scalawebtest.json.JsonGauge]] trait.
*/
object JsonGauge extends JsonGauge
/**
* Trait which provides functions to fluently build a [[org.scalawebtest.json.Gauge]]. Which in turn is used to test if
* a [[play.api.libs.json.JsLookupResult]] or [[play.api.libs.json.JsValue]] fits the provided gauge definition.
*
* ==Overview==
* Start with a [[play.api.libs.json.JsLookupResult]] followed by [[org.scalawebtest.json.JsonGauge.JsonGaugeFromJsLookup#fits fits]], [[org.scalawebtest.json.JsonGauge.JsonGaugeFromJsLookup#fit fit]] or [[org.scalawebtest.json.JsonGauge.JsonGaugeFromJsLookup#containsElementFitting containsElementFitting]]
* or [[play.api.libs.json.JsValue]] followed by [[org.scalawebtest.json.JsonGauge.JsonGaugeFromJsValue#fits fits]], [[org.scalawebtest.json.JsonGauge.JsonGaugeFromJsValue#fit fit]] or [[org.scalawebtest.json.JsonGauge.JsonGaugeFromJsValue#containsElementFitting containsElementFitting]]
*
* Next you choose the [[org.scalawebtest.json.JsonGauge.GaugeType]], which has to be one of the following [[org.scalawebtest.json.JsonGauge#types$ types]], [[org.scalawebtest.json.JsonGauge#typesAndArraySizes$ typesAndArraySizes]], [[org.scalawebtest.json.JsonGauge#values$ values]] or [[org.scalawebtest.json.JsonGauge.JsonGaugeFromJsLookup#containsElementFitting containsElementFitting]]
*
* Last is the definition of the JSON `gauge` wrapped in [[org.scalawebtest.json.JsonGaugeFits!.of(definition:String)* of]] or [[org.scalawebtest.json.JsonGaugeFits!.of(definition:play\\.api\\.libs\\.json\\.JsValue)* of]]. The definition has to be either a String, which contains a valid JSON document or a JsValue.
*
* ==Example==
* {{{
* val dijkstra: JsValue = Json.parse("""{"name": "Dijkstra", "firstName": "Edsger"}""")
* dijkstra fits values of """{"firstName": "Edsger"}"""
* }}}
*
*/
trait JsonGauge {
/**
* Implicit class, to build a Gauge from a JsLookupResult
*/
implicit class JsonGaugeFromJsLookup(jsLookup: JsLookupResult) extends JsonGaugeFromPlayJson(json = jsLookup.get) {
}
/**
* Implicit class, to build a Gauge from a JsValue
*/
implicit class JsonGaugeFromJsValue(jsValue: JsValue) extends JsonGaugeFromPlayJson(json = jsValue) {
}
class JsonGaugeFromPlayJson(json: JsValue) {
/**
* Build a JsonGauge, which checks if the testee fits the gaugeDefinition, by the rules of the gaugeType.
* The testee might contain properties, which are not specified in the gaugeDefinition.
*/
def fits(gaugeType: GaugeType): JsonGaugeFits = JsonGaugeFits(gaugeByType(gaugeType))
/**
* Synonym for @see [[org.scalawebtest.json.JsonGauge.JsonGaugeFromPlayJson#fits]]
*/
def fit(gaugeType: GaugeType): JsonGaugeFits = fits(gaugeType)
/**
* Build a JsonGauge, which checks if the testee fits the gaugeDefinition, by the rules of the gaugeType.
* More restrictive then @see [[org.scalawebtest.json.JsonGauge.JsonGaugeFromPlayJson#fits]]. All properties
* of the testee have to be specified by the gaugeDefinition.
*/
def completelyFits(gaugeType: GaugeType): JsonGaugeFits = JsonGaugeFits(gaugeByType(gaugeType, allPropertiesDefined = true))
/**
* Synonym for @see [[org.scalawebtest.json.JsonGauge.JsonGaugeFromPlayJson#completelyFits]]
*/
def completelyFit(gaugeType: GaugeType): JsonGaugeFits = completelyFits(gaugeType)
def containsElementFitting(gaugeType: GaugeType): JsonGaugeArrayContains = JsonGaugeArrayContains(gaugeByType(gaugeType))
protected def gaugeByType(gaugeType: GaugeType, allPropertiesDefined: Boolean = false): Gauge = gaugeType match {
case `types` =>
Gauge(
testee = json,
fitValues = false,
fitArraySizes = false,
ignoreArrayOrder = true,
allPropertiesDefined = allPropertiesDefined)
case `typesAndArraySizes` =>
Gauge(
testee = json,
fitValues = false,
fitArraySizes = true,
ignoreArrayOrder = true,
allPropertiesDefined = allPropertiesDefined)
case `values` =>
Gauge(
testee = json,
fitValues = true,
fitArraySizes = true,
ignoreArrayOrder = false,
allPropertiesDefined = allPropertiesDefined)
case `valuesIgnoringArrayOrder` =>
Gauge(
testee = json,
fitValues = true,
fitArraySizes = true,
ignoreArrayOrder = true,
allPropertiesDefined = allPropertiesDefined)
}
}
/**
* marker object to build a gauge, which only verifies by type
*/
object types extends GaugeType
/**
* marker object to build a gauge, which only verifies by type,
* but checks array sizes as well
*/
object typesAndArraySizes extends GaugeType
/**
* marker object to build a gauge, which verifies values
*/
object values extends GaugeType
/**
* marker object to build a gauge, which verifies values,
* but ignores their order within arrays
*/
object valuesIgnoringArrayOrder extends GaugeType
/**
* base trait for the marker objects, which are used to select the behavior of the [[org.scalawebtest.json.Gauge]]
*/
sealed trait GaugeType
}
case class JsonGaugeFits(gauge: Gauge) {
def of(definition: String): Unit = gauge.fits(Json.parse(definition))
def of(definition: JsValue): Unit = gauge.fits(definition)
}
case class JsonGaugeArrayContains(gauge: Gauge) extends Assertions with AppendedClues with Matchers {
def of(definition: String): Unit = {
of(Json.parse(definition))
}
def of(definition: JsValue): Unit = {
gauge.testee match {
case array: JsArray =>
if (!hasMatchingElement(array, definition)) {
fail(s"${gauge.testee.toString()} did not contain an element, which matched the gauge definition $definition")
}
case v => v shouldBe a[JsArray]
}
}
private def hasMatchingElement(array: JsArray, definition: JsValue) = {
array.value.exists(e => {
try {
gauge.withTestee(e).fits(definition)
//the next line is only reached, if all array elements fit the definition
true
} catch {
//silent catch, it is expected that some elements do not fit the provided gauge
case _: TestFailedException => false
}
})
}
}
class PrettyPrintBreadcrumb(breadcrumb: List[String]) {
def prettyPrint: String = breadcrumb.reverse.mkString("'", ".", "'")
}
| unic/ScalaWebTest | scalawebtest-json/src/main/scala/org/scalawebtest/json/JsonGaugeBuilder.scala | Scala | apache-2.0 | 7,787 |
package com.shashank.sql
import org.apache.spark.sql.Encoders
import org.apache.spark.sql.catalyst.encoders.{OuterScopes, ExpressionEncoder}
import org.apache.spark.sql.catalyst.expressions.{GenericInternalRowWithSchema, UnsafeProjection, GenericMutableRow, GenericInternalRow}
import org.apache.spark.{SparkConf, SparkContext}
/**
* Created by shashank on 27/1/16.
*/
object FromCollections {
def main(args: Array[String]) {
val sc: SparkContext = new SparkContext("local","meetup",new SparkConf())
val sqlContext = new org.apache.spark.sql.SQLContext(sc)
import sqlContext.implicits._
val ds = Seq(1, 2, 3).toDS()
ds.map(_ + 1).foreach(println(_))
}
}
| shashankgowdal/introduction_to_dataset | src/main/scala/com/shashank/creation/FromCollections.scala | Scala | apache-2.0 | 687 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gearpump.metrics
import com.codahale.metrics.{Meter => CodaHaleMeter}
/** See org.apache.gearpump.codahale.metrics.Meter */
class Meter(val name: String, meter: CodaHaleMeter, sampleRate: Int = 1) {
private var sampleCount = 0L
private var toBeMarked = 0L
def mark() {
meter.mark(1)
}
def mark(n: Long) {
toBeMarked += n
sampleCount += 1
if (null != meter && sampleCount % sampleRate == 0) {
meter.mark(toBeMarked)
toBeMarked = 0
}
}
def getOneMinuteRate(): Double = {
meter.getOneMinuteRate
}
} | manuzhang/incubator-gearpump | core/src/main/scala/org/apache/gearpump/metrics/Meter.scala | Scala | apache-2.0 | 1,379 |
package model
import akka.http.scaladsl.model.FormData
import akka.http.scaladsl.model.Uri.Query
/**
* The SlashRequest companion object to match FormData
*/
case class SlashCommandRequest(command: String = "", username: String = "", text: String = "")
object SlashCommandRequest {
val extractFormDataField: (Query, String) => Option[(String, String)] = (x: Query, y: String) =>
x.find(_._1 == y)
def apply(formData: FormData): SlashCommandRequest = {
val command = extractFormDataField(formData.fields, "command")
val username = extractFormDataField(formData.fields, "user_name")
val text = extractFormDataField(formData.fields, "text")
(command, username, text) match {
case (Some(x), Some(y), Some(z)) =>
new SlashCommandRequest(x._2, y._2, z._2)
case _ => new SlashCommandRequest
}
}
}
/**
* token=XXXXXXXXXXXXXXXXXX
team_id=T0001
team_domain=example
channel_id=C2147483705
channel_name=test
timestamp=1355517523.000005
user_id=U2147483697
user_name=Steve
text=googlebot: What is the air-speed velocity of an unladen swallow?
trigger_word=googlebot:
*/
case class OutgoingHookRequest(username: String = "", text: String = "", triggerWord: String = "")
object OutgoingHookRequest {
val extractFormDataField: (Query, String) => Option[(String, String)] = (x: Query, y: String) =>
x.find(_._1 == y)
def apply(formData: FormData): Option[OutgoingHookRequest] = {
val triggerWord = extractFormDataField(formData.fields, "trigger_word")
val username = extractFormDataField(formData.fields, "user_name")
val text = extractFormDataField(formData.fields, "text")
(username, text) match {
case (Some(x), Some(y)) =>
if (triggerWord.isDefined) {
Some(OutgoingHookRequest(x._2, y._2, triggerWord.get._2))
} else {
Some(OutgoingHookRequest(x._2, y._2))
}
case _ => None
}
}
}
| Freshwood/matterbridge | modules/model/src/main/scala/model/CommandRequest.scala | Scala | mit | 1,936 |
package com.chrisrebert.lmvtfy.server
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.util.{Try,Success,Failure}
import akka.actor.ActorRef
import akka.io.IO
import akka.pattern.ask
import akka.util.{ByteString, Timeout}
import spray.can.Http
import spray.http.{HttpCharsets, Uri, MediaTypes, HttpResponse, StatusCodes=>HttpStatusCodes}
import spray.httpx.RequestBuilding._
import spray.json._
import com.chrisrebert.lmvtfy.{MarkdownAboutBootstrap, ValidationRequest, ValidationResult}
import com.chrisrebert.lmvtfy.bootlint.{BootlintProblem, BootlintJsonProtocol, MarkdownRenderer}
import com.chrisrebert.lmvtfy.live_examples._
import com.chrisrebert.lmvtfy.util.Utf8ByteString
class BootlintActor(commenter: ActorRef) extends ActorWithLogging {
implicit val timeout = Timeout(30.seconds)
private lazy val bootlintUrl: Uri = {
val settings = Settings(context.system)
val dockerEnvVar = s"BOOTLINT_PORT_${settings.BootlintPort}_TCP_ADDR"
val host = System.getenv(dockerEnvVar) match {
case null => Uri.NamedHost("localhost")
case ipAddr => Uri.IPv4Host(ipAddr)
}
val authority = Uri.Authority(host, settings.BootlintPort)
val url = Uri(Uri.httpScheme(securedConnection = false), authority, Uri.Path.Empty)
log.info(s"Using Bootlint URL: ${url}")
url
}
private def lintFor(html: ByteString, mention: LiveExampleMention): Seq[BootlintProblem] = {
implicit val system = context.system
val settings = Settings(context.system)
val entity = html.asUtf8HtmlHttpEntity
val request = Post(bootlintUrl, entity) ~> addHeader("Accept", MediaTypes.`application/json`.toString())
val respFuture = (IO(Http) ? request).mapTo[HttpResponse]
Try{ Await.result(respFuture, timeout.duration) } match {
case Success(response) => {
if (response.status.isSuccess) {
import BootlintJsonProtocol._
val jsonString = response.entity.asString(HttpCharsets.`UTF-8`)
Try { jsonString.parseJson.convertTo[Seq[BootlintProblem]] } match {
case Failure(exc) => {
log.error(s"Bootlint response JSON either malformed or did not conform to expected schema")
Nil
}
case Success(lintProblems) => lintProblems
}
}
else {
if (response.status == HttpStatusCodes.RequestEntityTooLarge) {
Seq(BootlintProblem(
id = "XX0",
message = "Your example is too large for Bootlint to process! Please simplify your example and make it smaller.",
location = None
))
}
else {
log.error(s"Failed to fetch Bootlint for ${mention}; HTTP status: ${response.status}")
Nil
}
}
}
case Failure(exc) => {
log.error(exc, s"Failed to fetch Bootlint for ${mention}")
Nil
}
}
}
private def isUnnecessaryWarningAboutMissingJquery(problem: BootlintProblem): Boolean = {
problem.id == "W005" && problem.message == "Unable to locate jQuery, which is required for Bootstrap's JavaScript plugins to work; however, you might not be using Bootstrap's JavaScript"
}
private val bootplyLintIdToIgnore = "W002"
private val jsFiddleLintIdsToIgnore = Set("W001", "W002", "W003")
private val codePenLintIdsToIgnore = Set("W002", "W003", "W005")
private def withoutIrrelevantLints(lintProblems: Seq[BootlintProblem], example: LiveExample): Seq[BootlintProblem] = {
(example match {
case _:BootplyExample => lintProblems.filter{ _.id != bootplyLintIdToIgnore }
case _:JsFiddleExample => lintProblems.filter{ problem => !(jsFiddleLintIdsToIgnore contains problem.id) }
case _:CodePenExample => lintProblems.filter{ problem => !(codePenLintIdsToIgnore contains problem.id) }
case _ => lintProblems
}).filter{ !isUnnecessaryWarningAboutMissingJquery(_) }
}
override def receive = {
case req@ValidationRequest(htmlBytes, mention) => {
val lintProblems = withoutIrrelevantLints(lintFor(htmlBytes, mention), mention.example)
if (lintProblems.isEmpty) {
log.info(s"No Bootlint problems for ${mention}")
}
else {
log.info(s"${lintProblems.length} Bootlint problems for ${mention}")
val lintsAsMarkdown = MarkdownAboutBootstrap(MarkdownRenderer.markdownFor(lintProblems))
commenter ! ValidationResult(lintsAsMarkdown, mention)
}
}
}
}
| cvrebert/lmvtfy | src/main/scala/com/chrisrebert/lmvtfy/server/BootlintActor.scala | Scala | mit | 4,494 |
/*
* Copyright 2008-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package util
import scala.collection.mutable._
import scala.language.postfixOps
import scala.util.parsing.combinator._
/**
* Parser a VCard entry such as
*
* BEGIN:VCARD
* VERSION:2.1
* N:Gump;Forrest
* FN:Forrest Gump
* ORG:Bubba Gump Shrimp Co.
* TITLE:Shrimp Man
* TEL;WORK;VOICE:(111) 555-1212
* TEL;HOME;VOICE:(404) 555-1212
* ADR;WORK:;;100 Waters Edge;Baytown;LA;30314;United States of America
* END:VCARD
*
*/
object VCardParser extends Parsers {
import scala.language.implicitConversions
type Elem = Char
implicit def strToInput(in: String): Input = new scala.util.parsing.input.CharArrayReader(in.toCharArray)
case class VCardKey(name: String, props: List[(String, String)])
case class VCardEntry(key: VCardKey, value: List[String])
lazy val multiLineSep = opt(elem('\\n') ~ elem(' '))
lazy val value = (multiLineSep ~> elem("value", {c => !c.isControl && c != ';'}) <~ multiLineSep).* ^^ {case l => l.mkString}
lazy val spaces = (elem(' ') | elem('\\t') | elem('\\n') | elem('\\r'))*
lazy val key = elem("key", {c => c.isLetterOrDigit || c == '-' || c == '_'}).+ ^^ {case list => list.mkString}
lazy val props = ((((elem(';') ~> key <~ elem('=')) ~ key) ^^ {case a ~ b => (a, b)}) | ((elem(';') ~> key) ^^ {case a => (a, "")}))*
lazy val left = (key ~ props) ^^ {case k ~ l => VCardKey(k, l)}
lazy val expr = (((spaces ~> left ~! elem(':')) ~ repsep(value, ';')) ^^ {case a ~ _ ~ b => VCardEntry(a, b)})+
def parse(in: String): Either[List[VCardEntry], String] = expr(in) match {
case Success(v, r) => Left(v)
case err @ _ => Right(err toString)
}
}
| lzpfmh/framework-2 | core/util/src/main/scala/net/liftweb/util/VCardParser.scala | Scala | apache-2.0 | 2,253 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.executor
import java.nio.ByteBuffer
import org.apache.spark.TaskState.TaskState
/**
* A pluggable interface used by the Executor to send updates to the cluster scheduler.
*/
private[spark] trait ExecutorBackend {
def statusUpdate(taskId: Long, state: TaskState, data: ByteBuffer)
}
| mkolod/incubator-spark | core/src/main/scala/org/apache/spark/executor/ExecutorBackend.scala | Scala | apache-2.0 | 1,115 |
package com.softwaremill.bootzooka.passwordreset.api
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Route
import com.softwaremill.bootzooka.passwordreset.application.{
PasswordResetCodeDao,
PasswordResetConfig,
PasswordResetService
}
import com.softwaremill.bootzooka.passwordreset.domain.PasswordResetCode
import com.softwaremill.bootzooka.test.{BaseRoutesSpec, TestHelpersWithDb}
import com.softwaremill.bootzooka.user.domain.User
import com.typesafe.config.ConfigFactory
class PasswordResetRoutesSpec extends BaseRoutesSpec with TestHelpersWithDb { spec =>
lazy val config = new PasswordResetConfig {
override def rootConfig = ConfigFactory.load()
}
val passwordResetCodeDao = new PasswordResetCodeDao(sqlDatabase)
val passwordResetService =
new PasswordResetService(userDao, passwordResetCodeDao, emailService, emailTemplatingEngine, config)
val routes = Route.seal(new PasswordResetRoutes with TestRoutesSupport {
override val userService = spec.userService
override val passwordResetService = spec.passwordResetService
}.passwordResetRoutes)
"POST /" should "send e-mail to user" in {
// given
val user = newRandomStoredUser()
// when
Post("/passwordreset", Map("login" -> user.login)) ~> routes ~> check {
emailService.wasEmailSentTo(user.email) should be(true)
}
}
"POST /[code] with password" should "change the password" in {
// given
val user = newRandomStoredUser()
val code = PasswordResetCode(randomString(), user)
passwordResetCodeDao.add(code).futureValue
val newPassword = randomString()
// when
Post(s"/passwordreset/${code.code}", Map("password" -> newPassword)) ~> routes ~> check {
responseAs[String] should be("ok")
User.passwordsMatch(newPassword, userDao.findById(user.id).futureValue.get) should be(true)
}
}
"POST /[code] without password" should "result in an error" in {
// given
val user = newRandomStoredUser()
val code = PasswordResetCode(randomString(), user)
passwordResetCodeDao.add(code).futureValue
// when
Post("/passwordreset/123") ~> routes ~> check {
status should be(StatusCodes.BadRequest)
}
}
"POST /[code] with password but with invalid code" should "result in an error" in {
// given
val user = newRandomStoredUser()
val code = PasswordResetCode(randomString(), user)
passwordResetCodeDao.add(code).futureValue
val newPassword = randomString()
// when
Post("/passwordreset/123", Map("password" -> newPassword)) ~> routes ~> check {
status should be(StatusCodes.Forbidden)
User.passwordsMatch(newPassword, userDao.findById(user.id).futureValue.get) should be(false)
}
}
}
| aywengo/bootzooka | backend/src/test/scala/com/softwaremill/bootzooka/passwordreset/api/PasswordResetRoutesSpec.scala | Scala | apache-2.0 | 2,767 |
/*******************************************************************************
Copyright (c) 2013-2014, S-Core, KAIST.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.analysis.typing.models.Tizen
import kr.ac.kaist.jsaf.analysis.typing.AddressManager._
import kr.ac.kaist.jsaf.analysis.cfg.{CFG, CFGExpr, InternalError}
import kr.ac.kaist.jsaf.analysis.typing.domain.{BoolFalse => F, BoolTrue => T, _}
import kr.ac.kaist.jsaf.analysis.typing.models._
import kr.ac.kaist.jsaf.analysis.typing._
import kr.ac.kaist.jsaf.analysis.typing.models.builtin.BuiltinArray
import kr.ac.kaist.jsaf.analysis.typing.domain.Context
import kr.ac.kaist.jsaf.analysis.typing.domain.Heap
object TIZENapplicationObj extends Tizen {
val name = "application"
/* predefined locations */
val loc_obj = TIZENtizen.loc_application
val loc_proto = newSystemRecentLoc(name + "Proto")
val loc_app: Loc = newSystemLoc("Application", Old)
val loc_appinfo: Loc = newSystemLoc("ApplicationInformation", Old)
val loc_appinfoarr: Loc = newSystemLoc("ApplicationInformationArr", Old)
val loc_appctrl: Loc = newSystemLoc("ApplicationControl", Old)
val loc_appctrldata: Loc = newSystemLoc("ApplicationControlData", Old)
val loc_appctrldataarr: Loc = newSystemLoc("ApplicationControlDataArr", Old)
val loc_appctxt: Loc = newSystemLoc("ApplicationContext", Old)
val loc_appctxtarr: Loc = newSystemLoc("ApplicationContextArr", Old)
val loc_strarr: Loc = newSystemLoc("appStrArr", Old)
val loc_appcert: Loc = newSystemLoc("ApplicationCertificate", Old)
val loc_appcertarr: Loc = newSystemLoc("ApplicationCertificateArr", Old)
val loc_reqappctrl: Loc = newSystemLoc("RequestedApplicationControl", Old)
override def getInitList(): List[(Loc, List[(String, AbsProperty)])] = List(
(loc_obj, prop_obj), (loc_proto, prop_proto), (loc_app, prop_app_ins), (loc_appinfo, prop_appinfo_ins),
(loc_strarr, prop_strarr_ins), (loc_appctrldata, prop_appctrldata_ins), (loc_appctrldataarr, prop_appctrldataarr_ins),
(loc_appinfoarr, prop_appinfoarr_ins), (loc_appctrl, prop_appctrl_ins), (loc_appctxt, prop_appctxt_ins),
(loc_appctxtarr, prop_appctxtarr_ins), (loc_appcert, prop_appcert_ins), (loc_appcertarr, prop_appcertarr_ins),
(loc_reqappctrl, prop_reqappctrl_ins)
)
/* constructor or object*/
private val prop_obj: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(loc_proto), F, F, F)))),
("@extensible", AbsConstValue(PropValue(T)))
)
/* prototype */
private val prop_proto: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("CallbackObject")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(ObjProtoLoc), F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("getCurrentApplication", AbsBuiltinFunc("tizen.applicationObj.getCurrentApplication",0)),
("kill", AbsBuiltinFunc("tizen.applicationObj.kill",3)),
("launch", AbsBuiltinFunc("tizen.applicationObj.launch",3)),
("launchAppControl", AbsBuiltinFunc("tizen.applicationObj.launchAppControl",5)),
("findAppControl", AbsBuiltinFunc("tizen.applicationObj.findAppControl",3)),
("getAppsContext", AbsBuiltinFunc("tizen.applicationObj.getAppsContext",2)),
("getAppContext", AbsBuiltinFunc("tizen.applicationObj.getAppContext",1)),
("getAppsInfo", AbsBuiltinFunc("tizen.applicationObj.getAppsInfo",2)),
("getAppInfo", AbsBuiltinFunc("tizen.applicationObj.getAppInfo",1)),
("getAppCerts", AbsBuiltinFunc("tizen.applicationObj.getAppCerts",1)),
("getAppSharedURI", AbsBuiltinFunc("tizen.applicationObj.getAppSharedURI",1)),
("addAppInfoEventListener", AbsBuiltinFunc("tizen.applicationObj.addAppInfoEventListener",1)),
("removeAppInfoEventListener", AbsBuiltinFunc("tizen.applicationObj.removeAppInfoEventListener",1))
)
private val prop_appinfo_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(TIZENApplicationInformation.loc_proto, F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("id", AbsConstValue(PropValue(ObjectValue(Value(StrTop), F, T, T)))),
("name", AbsConstValue(PropValue(ObjectValue(Value(StrTop), F, T, T)))),
("iconPath", AbsConstValue(PropValue(ObjectValue(Value(StrTop), F, T, T)))),
("version", AbsConstValue(PropValue(ObjectValue(Value(StrTop), F, T, T)))),
("show", AbsConstValue(PropValue(ObjectValue(Value(BoolTop), F, T, T)))),
("categories", AbsConstValue(PropValue(ObjectValue(Value(loc_strarr), F, T, T)))),
("installDate", AbsConstValue(PropValue(ObjectValue(Value(TIZENtizen.loc_date), F, T, T)))),
("size", AbsConstValue(PropValue(ObjectValue(Value(NumTop), F, T, T)))),
("packageId", AbsConstValue(PropValue(ObjectValue(Value(StrTop), F, T, T))))
)
private val prop_appinfoarr_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Array")))),
("@proto", AbsConstValue(PropValue(ObjectValue(BuiltinArray.ProtoLoc, F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("length", AbsConstValue(PropValue(ObjectValue(UInt, T, F, F)))),
(Str_default_number, AbsConstValue(PropValue(ObjectValue(Value(loc_appinfo), T, T, T))))
)
private val prop_app_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(TIZENApplication.loc_proto, F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("appInfo", AbsConstValue(PropValue(ObjectValue(Value(loc_appinfo), F, T, T)))),
("contextId", AbsConstValue(PropValue(ObjectValue(Value(StrTop), F, T, T))))
)
private val prop_appctrl_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(TIZENApplicationControlData.loc_proto, F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("operation", AbsConstValue(PropValue(ObjectValue(Value(StrTop), F, T, T)))),
("uri", AbsConstValue(PropValue(ObjectValue(Value(PValue(UndefBot, NullTop, BoolBot, NumBot, StrTop)), F, T, T)))),
("mime", AbsConstValue(PropValue(ObjectValue(Value(PValue(UndefBot, NullTop, BoolBot, NumBot, StrTop)), F, T, T)))),
("category", AbsConstValue(PropValue(ObjectValue(Value(PValue(UndefBot, NullTop, BoolBot, NumBot, StrTop)), F, T, T)))),
("data", AbsConstValue(PropValue(ObjectValue(Value(loc_appctrldataarr), F, T, T))))
)
private val prop_appctrldata_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(TIZENApplicationControlData.loc_proto, F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("key", AbsConstValue(PropValue(ObjectValue(Value(StrTop), F, T, T)))),
("value", AbsConstValue(PropValue(ObjectValue(Value(loc_strarr), F, T, T))))
)
private val prop_appctrldataarr_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Array")))),
("@proto", AbsConstValue(PropValue(ObjectValue(BuiltinArray.ProtoLoc, F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("length", AbsConstValue(PropValue(ObjectValue(UInt, T, F, F)))),
(Str_default_number, AbsConstValue(PropValue(ObjectValue(Value(loc_appctrldata), T, T, T))))
)
private val prop_appctxt_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(TIZENApplicationContext.loc_proto, F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("id", AbsConstValue(PropValue(ObjectValue(Value(StrTop), F, T, T)))),
("appId", AbsConstValue(PropValue(ObjectValue(Value(StrTop), F, T, T))))
)
private val prop_appctxtarr_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Array")))),
("@proto", AbsConstValue(PropValue(ObjectValue(BuiltinArray.ProtoLoc, F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("length", AbsConstValue(PropValue(ObjectValue(UInt, T, F, F)))),
(Str_default_number, AbsConstValue(PropValue(ObjectValue(Value(loc_appctxt), T, T, T))))
)
private val prop_appcert_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(TIZENApplicationCertificate.loc_proto, F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("type", AbsConstValue(PropValue(ObjectValue(Value(AbsString.alpha("AUTHOR_ROOT") +
AbsString.alpha("AUTHOR_INTERMEDIATE") +
AbsString.alpha("AUTHOR_SIGNER") +
AbsString.alpha("DISTRIBUTOR_ROOT") +
AbsString.alpha("DISTRIBUTOR_INTERMEDIATE") +
AbsString.alpha("DISTRIBUTOR_SIGNER") +
AbsString.alpha("DISTRIBUTOR2_ROOT") +
AbsString.alpha("DISTRIBUTOR2_INTERMEDIATE") +
AbsString.alpha("DISTRIBUTOR2_SIGNER")), F, T, T)))),
("value", AbsConstValue(PropValue(ObjectValue(Value(StrTop), F, T, T))))
)
private val prop_appcertarr_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Array")))),
("@proto", AbsConstValue(PropValue(ObjectValue(BuiltinArray.ProtoLoc, F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("length", AbsConstValue(PropValue(ObjectValue(UInt, T, F, F)))),
(Str_default_number, AbsConstValue(PropValue(ObjectValue(Value(loc_appcert), T, T, T))))
)
private val prop_reqappctrl_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(TIZENRequestedApplicationControl.loc_proto, F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("appControl", AbsConstValue(PropValue(ObjectValue(Value(loc_appctrl), F, T, T)))),
("callerAppId", AbsConstValue(PropValue(ObjectValue(Value(StrTop), F, T, T))))
)
private val prop_strarr_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Array")))),
("@proto", AbsConstValue(PropValue(ObjectValue(BuiltinArray.ProtoLoc, F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("length", AbsConstValue(PropValue(ObjectValue(UInt, T, F, F)))),
(Str_default_number, AbsConstValue(PropValue(ObjectValue(Value(StrTop), T, T, T))))
)
override def getSemanticMap(): Map[String, SemanticFun] = {
Map(
("tizen.applicationObj.getCurrentApplication" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val est = Set[WebAPIException](UnknownError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, est)
((Helper.ReturnStore(h, Value(loc_app)), ctx), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.applicationObj.kill" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = (cp._1._1, set_addr.head)
val addr1 = cfg.getAPIAddress(addr_env, 0)
val l_r1 = addrToLoc(addr1, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val ctxid = getArgValue(h_1,ctx_1,args,"0")
val n_arglen = Operator.ToUInt32(getArgValue(h_1, ctx_1, args, "length"))
val es =
if (ctxid._1._1 </ UndefBot && ctxid._1._2 </ NullBot)
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
/* register successCallback and errorCallback */
val (h_2, es_1) = AbsNumber.getUIntSingle(n_arglen) match {
case Some(n) if n == 1 =>
(h_1,TizenHelper.TizenExceptionBot)
case Some(n) if n == 2 =>
val sucCB = getArgValue(h_1,ctx_1,args,"1")
val es_2 =
if (sucCB._2.exists((l) => Helper.IsCallable(h_1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val h_2 = TizenHelper.addCallbackHandler(h_1, AbsString.alpha("successCB"), Value(sucCB._2), Value(UndefTop))
(h_2, es_2)
case Some(n) if n == 3 =>
val sucCB = getArgValue(h_1,ctx_1,args,"1")
val errCB = getArgValue(h_1,ctx_1,args,"2")
val es_2 =
if (sucCB._2.exists((l) => Helper.IsCallable(h_1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es_3 =
if (errCB._2.exists((l) => Helper.IsCallable(h_1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr = Helper.NewArrayObject(AbsNumber.alpha(0)).
update("0", PropValue(ObjectValue(Value(LocSet(TIZENtizen.loc_notFounderr) ++ LocSet(TIZENtizen.loc_invalidValueserr) ++ LocSet(TIZENtizen.loc_unknownerr)), T, T, T)))
val h_2 = h_1.update(l_r1, o_arr)
val h_3 = TizenHelper.addCallbackHandler(h_2, AbsString.alpha("successCB"), Value(sucCB._2), Value(UndefTop))
val h_4 = TizenHelper.addCallbackHandler(h_3, AbsString.alpha("errorCB"), Value(errCB._2), Value(l_r1))
(h_4, es_2 ++ es_3)
case _ => {
(HeapBot, TizenHelper.TizenExceptionBot)
}
}
val est = Set[WebAPIException](SecurityError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es ++ es_1++ est)
((h_2, ctx_1), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.applicationObj.launch" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = (cp._1._1, set_addr.head)
val addr1 = cfg.getAPIAddress(addr_env, 0)
val l_r1 = addrToLoc(addr1, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val appid = getArgValue(h_1,ctx_1,args,"0")
val n_arglen = Operator.ToUInt32(getArgValue(h_1, ctx_1, args, "length"))
val es =
if (appid._1._1 </ UndefBot && appid._1._2 </ NullBot)
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
/* register successCallback and errorCallback */
val (h_2, es_1) = AbsNumber.getUIntSingle(n_arglen) match {
case Some(n) if n == 1 =>
(h_1,TizenHelper.TizenExceptionBot)
case Some(n) if n == 2 =>
val sucCB = getArgValue(h_1,ctx_1,args,"1")
val es_2 =
if (sucCB._2.exists((l) => Helper.IsCallable(h_1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val h_2 = TizenHelper.addCallbackHandler(h_1, AbsString.alpha("successCB"), Value(sucCB._2), Value(UndefTop))
(h_2, es_2)
case Some(n) if n == 3 =>
val sucCB = getArgValue(h_1,ctx_1,args,"1")
val errCB = getArgValue(h_1,ctx_1,args,"2")
val es_2 =
if (sucCB._2.exists((l) => Helper.IsCallable(h_1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es_3 =
if (errCB._2.exists((l) => Helper.IsCallable(h_1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(LocSet(TIZENtizen.loc_notFounderr) ++ LocSet(TIZENtizen.loc_invalidValueserr) ++
LocSet(TIZENtizen.loc_unknownerr)), T, T, T)))
val h_2 = h_1.update(l_r1, o_arr)
val h_3 = TizenHelper.addCallbackHandler(h_2, AbsString.alpha("successCB"), Value(sucCB._2), Value(UndefTop))
val h_4 = TizenHelper.addCallbackHandler(h_3, AbsString.alpha("errorCB"), Value(errCB._2), Value(l_r1))
(h_4, es_2 ++ es_3)
case _ => {
(HeapBot, TizenHelper.TizenExceptionBot)
}
}
val est = Set[WebAPIException](SecurityError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es ++ es_1 ++ est)
((h_2, ctx_1), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.applicationObj.launchAppControl" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = (cp._1._1, set_addr.head)
val addr1 = cfg.getAPIAddress(addr_env, 0)
val addr2 = cfg.getAPIAddress(addr_env, 1)
val l_r1 = addrToLoc(addr1, Recent)
val l_r2 = addrToLoc(addr2, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val (h_2, ctx_2) = Helper.Oldify(h_1, ctx_1, addr2)
val appctrl = getArgValue(h_2,ctx_2,args,"0")
val n_arglen = Operator.ToUInt32(getArgValue(h_2, ctx_2, args, "length"))
val (b_1, es_1) = TizenHelper.instanceOf(h_2, appctrl, Value(TIZENApplicationControl.loc_proto))
val es_2 =
if (b_1._1._3 <= F)
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
/* register successCallback and errorCallback */
val (h_3, es_3) = AbsNumber.getUIntSingle(n_arglen) match {
case Some(n) if n == 1 =>
(h_2,TizenHelper.TizenExceptionBot)
case Some(n) if n == 2 =>
val appid = getArgValue(h_2, ctx_2, args, "1")
val es =
if (appid._1._1 </ UndefBot && appid._1._2 </ NullBot)
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
(h_2,es)
case Some(n) if n == 3 =>
val appid = getArgValue(h_2, ctx_2, args, "1")
val sucCB = getArgValue(h_2, ctx_2, args, "2")
val es =
if (appid._1._1 </ UndefBot && appid._1._2 </ NullBot)
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es_3 =
if (sucCB._2.exists((l) => Helper.IsCallable(h_2, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val h_3 = TizenHelper.addCallbackHandler(h_2, AbsString.alpha("successCB"), Value(sucCB._2), Value(UndefTop))
(h_3, es ++ es_3)
case Some(n) if n == 4 =>
val appid = getArgValue(h_2, ctx_2, args, "1")
val sucCB = getArgValue(h_2,ctx_2,args,"2")
val errCB = getArgValue(h_2,ctx_2,args,"3")
val es =
if (appid._1._1 </ UndefBot && appid._1._2 </ NullBot)
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es_3 =
if (sucCB._2.exists((l) => Helper.IsCallable(h_2, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es_4 =
if (errCB._2.exists((l) => Helper.IsCallable(h_2, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(LocSet(TIZENtizen.loc_notFounderr) ++ LocSet(TIZENtizen.loc_invalidValueserr) ++
LocSet(TIZENtizen.loc_unknownerr)), T, T, T)))
val h_3 = h_2.update(l_r1, o_arr)
val h_4 = TizenHelper.addCallbackHandler(h_3, AbsString.alpha("successCB"), Value(sucCB._2), Value(UndefTop))
val h_5 = TizenHelper.addCallbackHandler(h_4, AbsString.alpha("errorCB"), Value(errCB._2), Value(l_r1))
(h_5, es ++ es_3 ++ es_4)
case Some(n) if n >= 5 =>
val appid = getArgValue(h_2, ctx_2, args, "1")
val sucCB = getArgValue(h_2,ctx_2,args,"2")
val errCB = getArgValue(h_2,ctx_2,args,"3")
val replyCB = getArgValue(h_2,ctx_2,args,"4")
val es =
if (appid._1._1 </ UndefBot && appid._1._2 </ NullBot)
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es_3 =
if (sucCB._2.exists((l) => Helper.IsCallable(h_2, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es_4 =
if (errCB._2.exists((l) => Helper.IsCallable(h_2, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val (h_3, es_5) = replyCB._2.foldLeft((h_2, TizenHelper.TizenExceptionBot))((_he, l) => {
val v1 = Helper.Proto(_he._1, l, AbsString.alpha("onsuccess"))
val v2 = Helper.Proto(_he._1, l, AbsString.alpha("onfailure"))
val es1 =
if (v1._2.exists((l) => Helper.IsCallable(_he._1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es2 =
if (v2._2.exists((l) => Helper.IsCallable(_he._1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr2 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(TIZENapplicationObj.loc_appctrldataarr), T, T, T)))
val h_3 = _he._1.update(l_r2, o_arr2)
val h_4 = TizenHelper.addCallbackHandler(h_3, AbsString.alpha("AppCtrlDataArrayReplyCB.onsuccess"), Value(v1._2), Value(l_r2))
val h_5 = TizenHelper.addCallbackHandler(h_4, AbsString.alpha("AppCtrlDataArrayReplyCB.onfailure"), Value(v2._2), Value(UndefTop))
(h_5, _he._2 ++ es1 ++ es2)
})
val o_arr = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(LocSet(TIZENtizen.loc_notFounderr) ++ LocSet(TIZENtizen.loc_invalidValueserr) ++
LocSet(TIZENtizen.loc_unknownerr)), T, T, T)))
val h_4 = h_3.update(l_r1, o_arr)
val h_5 = TizenHelper.addCallbackHandler(h_4, AbsString.alpha("successCB"), Value(sucCB._2), Value(UndefTop))
val h_6 = TizenHelper.addCallbackHandler(h_5, AbsString.alpha("errorCB"), Value(errCB._2), Value(l_r1))
(h_6, es ++ es_3 ++ es_4 ++ es_5)
case _ => {
(HeapBot, TizenHelper.TizenExceptionBot)
}
}
val est = Set[WebAPIException](SecurityError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es_1 ++ es_2 ++ es_3 ++ est)
((h_3, ctx_2), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.applicationObj.findAppControl" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = (cp._1._1, set_addr.head)
val addr1 = cfg.getAPIAddress(addr_env, 0)
val addr2 = cfg.getAPIAddress(addr_env, 1)
val l_r = addrToLoc(addr1, Recent)
val l_r1 = addrToLoc(addr2, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val (h_2, ctx_2) = Helper.Oldify(h_1, ctx_1, addr2)
val appctrl = getArgValue(h_2, ctx_2, args, "0")
val sucCB = getArgValue(h_2, ctx_2, args, "1")
val n_arglen = Operator.ToUInt32(getArgValue(h_2, ctx_2, args, "length"))
val (b_1, es_1) = TizenHelper.instanceOf(h_2, appctrl, Value(TIZENApplicationControl.loc_proto))
val es_2 =
if (b_1._1._3 <= F)
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es_3 =
if (sucCB._2.exists((l) => Helper.IsCallable(h_2, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr = Helper.NewArrayObject(AbsNumber.alpha(2)).
update("0", PropValue(ObjectValue(Value(TIZENapplicationObj.loc_appinfoarr), T, T, T))).
update("1", PropValue(ObjectValue(Value(appctrl._2), T, T, T)))
val h_3 = h_2.update(l_r, o_arr)
val h_4 = TizenHelper.addCallbackHandler(h_3, AbsString.alpha("FindAppCtrlSuccessCB"), Value(sucCB._2), Value(l_r))
/* register success Callback and error Callback */
val (h_5, es_4) = AbsNumber.getUIntSingle(n_arglen) match {
case Some(n) if n == 2 =>
(h_4,TizenHelper.TizenExceptionBot)
case Some(n) if n >= 3 =>
val errCB = getArgValue(h_4, ctx_2, args, "2")
val es_4 =
if (errCB._2.exists((l) => Helper.IsCallable(h_4, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr1 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(LocSet(TIZENtizen.loc_invalidValueserr) ++ LocSet(TIZENtizen.loc_unknownerr)), T, T, T)))
val h_5 = h_4.update(l_r1, o_arr1)
val h_6 = TizenHelper.addCallbackHandler(h_5, AbsString.alpha("errorCB"), Value(errCB._2), Value(l_r1))
(h_6, es_4)
case _ => {
(h_2, TizenHelper.TizenExceptionBot)
}
}
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es_1 ++ es_2 ++ es_3 ++ es_4)
((h_5, ctx_2), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.applicationObj.getAppsContext" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = (cp._1._1, set_addr.head)
val addr1 = cfg.getAPIAddress(addr_env, 0)
val addr2 = cfg.getAPIAddress(addr_env, 1)
val l_r = addrToLoc(addr1, Recent)
val l_r1 = addrToLoc(addr2, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val (h_2, ctx_2) = Helper.Oldify(h_1, ctx_1, addr2)
val sucCB = getArgValue(h_2, ctx_2, args, "0")
val n_arglen = Operator.ToUInt32(getArgValue(h_2, ctx_2, args, "length"))
val es_1 =
if (sucCB._2.exists((l) => Helper.IsCallable(h_2, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(TIZENapplicationObj.loc_appctxtarr), T, T, T)))
val h_3 = h_2.update(l_r, o_arr)
val h_4 = TizenHelper.addCallbackHandler(h_3, AbsString.alpha("AppContextArraySuccessCB"), Value(sucCB._2), Value(l_r))
val (h_5, es_2) = AbsNumber.getUIntSingle(n_arglen) match {
case Some(n) if n == 1 =>
(h_4,TizenHelper.TizenExceptionBot)
case Some(n) if n >= 2 =>
val errCB = getArgValue(h_4, ctx_2, args, "1")
val es_2 =
if (errCB._2.exists((l) => Helper.IsCallable(h_2, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr1 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(LocSet(TIZENtizen.loc_unknownerr)), T, T, T)))
val h_5 = h_4.update(l_r1, o_arr1)
val h_6 = TizenHelper.addCallbackHandler(h_5, AbsString.alpha("errorCB"), Value(errCB._2), Value(l_r1))
(h_6, es_2)
case _ => {
(h_2, TizenHelper.TizenExceptionBot)
}
}
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es_1 ++ es_2)
((h_5, ctx_2), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.applicationObj.getAppContext" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = (cp._1._1, set_addr.head)
val addr1 = cfg.getAPIAddress(addr_env, 0)
val l_r = addrToLoc(addr1, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val ctxid = getArgValue(h_1, ctx_1, args, "0")
val o_new = Obj.empty.update("@class", PropValue(AbsString.alpha("Object"))).
update("@proto", PropValue(ObjectValue(Value(TIZENApplicationContext.loc_proto), F, F, F))).
update("@extensible", PropValue(T)).
update("appId", PropValue(ObjectValue(Value(StrTop), F, T, T)))
val o_new2 =
if (ctxid._1._2 </ NullBot || ctxid._1._1 </ UndefBot)
o_new.update("id", PropValue(ObjectValue(Value(StrTop), F, T, T)))
else
o_new.update("id", PropValue(ObjectValue(Value(Helper.toString(ctxid._1)), F, T, T)))
val h_2 = h_1.update(l_r, o_new2)
val est = Set[WebAPIException](NotFoundError, UnknownError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, est)
((Helper.ReturnStore(h_2, Value(l_r)), ctx_1), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.applicationObj.getAppsInfo" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = (cp._1._1, set_addr.head)
val addr1 = cfg.getAPIAddress(addr_env, 0)
val addr2 = cfg.getAPIAddress(addr_env, 1)
val l_r = addrToLoc(addr1, Recent)
val l_r1 = addrToLoc(addr2, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val (h_2, ctx_2) = Helper.Oldify(h_1, ctx_1, addr2)
val sucCB = getArgValue(h_2, ctx_2, args, "0")
val n_arglen = Operator.ToUInt32(getArgValue(h_2, ctx_2, args, "length"))
val es_1 =
if (sucCB._2.exists((l) => Helper.IsCallable(h_2, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(TIZENapplicationObj.loc_appinfoarr), T, T, T)))
val h_3 = h_2.update(l_r, o_arr)
val h_4 = TizenHelper.addCallbackHandler(h_3, AbsString.alpha("AppInfoArraySuccessCB"), Value(sucCB._2), Value(l_r))
val (h_5, es_2) = AbsNumber.getUIntSingle(n_arglen) match {
case Some(n) if n == 1 =>
(h_4,TizenHelper.TizenExceptionBot)
case Some(n) if n >= 2 =>
val errCB = getArgValue(h_4, ctx_2, args, "1")
val es_2 =
if (errCB._2.exists((l) => Helper.IsCallable(h_2, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr1 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(LocSet(TIZENtizen.loc_unknownerr)), T, T, T)))
val h_5 = h_4.update(l_r1, o_arr1)
val h_6 = TizenHelper.addCallbackHandler(h_5, AbsString.alpha("errorCB"), Value(errCB._2), Value(l_r1))
(h_6, es_2)
case _ => {
(HeapBot, TizenHelper.TizenExceptionBot)
}
}
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es_1 ++ es_2)
((h_5, ctx_2), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.applicationObj.getAppInfo" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = (cp._1._1, set_addr.head)
val addr1 = cfg.getAPIAddress(addr_env, 0)
val l_r = addrToLoc(addr1, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val appid = getArgValue(h_1, ctx_1, args, "0")
val o_new = Obj.empty.update("@class", PropValue(AbsString.alpha("Object"))).
update("@proto", PropValue(ObjectValue(Value(TIZENApplicationInformation.loc_proto), F, F, F))).
update("@extensible", PropValue(T)).
update("name", PropValue(ObjectValue(Value(StrTop), F, T, T))).
update("iconPath", PropValue(ObjectValue(Value(StrTop), F, T, T))).
update("version", PropValue(ObjectValue(Value(StrTop), F, T, T))).
update("show", PropValue(ObjectValue(Value(BoolTop), F, T, T))).
update("categories", PropValue(ObjectValue(Value(StrTop), F, T, T))).
update("installDate", PropValue(ObjectValue(Value(StrTop), F, T, T))).
update("size", PropValue(ObjectValue(Value(NumTop), F, T, T))).
update("packageId", PropValue(ObjectValue(Value(StrTop), F, T, T)))
val o_new2 =
if (appid._1._2 </ NullBot || appid._1._1 </ UndefBot)
o_new.update("id", PropValue(ObjectValue(Value(StrTop), F, T, T)))
else
o_new.update("id", PropValue(ObjectValue(Value(Helper.toString(appid._1)), F, T, T)))
val h_2 = h_1.update(l_r, o_new2)
val est = Set[WebAPIException](NotFoundError, UnknownError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, est)
((Helper.ReturnStore(h_2, Value(l_r)), ctx_1), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.applicationObj.getAppCerts" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val appid = getArgValue(h, ctx, args, "0")
val est = Set[WebAPIException](SecurityError, NotFoundError, UnknownError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, est)
((Helper.ReturnStore(h, Value(TIZENapplicationObj.loc_appcertarr)), ctx), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.applicationObj.getAppSharedURI" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val v = getArgValue(h, ctx, args, "0")
val est = Set[WebAPIException](NotFoundError, UnknownError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, est)
((Helper.ReturnStore(h, Value(StrTop)), ctx), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.applicationObj.addAppInfoEventListener" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = (cp._1._1, set_addr.head)
val addr1 = cfg.getAPIAddress(addr_env, 0)
val addr2 = cfg.getAPIAddress(addr_env, 1)
val addr3 = cfg.getAPIAddress(addr_env, 2)
val l_r1 = addrToLoc(addr1, Recent)
val l_r2 = addrToLoc(addr2, Recent)
val l_r3 = addrToLoc(addr3, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val (h_2, ctx_2) = Helper.Oldify(h_1, ctx_1, addr2)
val (h_3, ctx_3) = Helper.Oldify(h_2, ctx_2, addr3)
val eventCB = getArgValue(h_3, ctx_3, args, "0")
val (h_4, es) = eventCB._2.foldLeft((h_3, TizenHelper.TizenExceptionBot))((_he, l) => {
val v1 = Helper.Proto(_he._1, l, AbsString.alpha("oninstalled"))
val v2 = Helper.Proto(_he._1, l, AbsString.alpha("onupdated"))
val v3 = Helper.Proto(_he._1, l, AbsString.alpha("onuninstalled"))
val es1 =
if (v1._2.exists((l) => Helper.IsCallable(_he._1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es2 =
if (v2._2.exists((l) => Helper.IsCallable(_he._1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es3 =
if (v3._2.exists((l) => Helper.IsCallable(_he._1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr1 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(TIZENapplicationObj.loc_appinfo), T, T, T)))
val o_arr2 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(TIZENapplicationObj.loc_appinfo), T, T, T)))
val o_arr3 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(StrTop), T, T, T)))
val h_2 = _he._1.update(l_r1, o_arr1).update(l_r2, o_arr2).update(l_r3, o_arr3)
val h_3 = TizenHelper.addCallbackHandler(h_2, AbsString.alpha("AppInfoEventCB.oninstalled"), Value(v1._2), Value(l_r1))
val h_4 = TizenHelper.addCallbackHandler(h_3, AbsString.alpha("AppInfoEventCB.onupdated"), Value(v2._2), Value(l_r2))
val h_5 = TizenHelper.addCallbackHandler(h_4, AbsString.alpha("AppInfoEventCB.onuninstalled"), Value(v3._2), Value(l_r3))
(h_5, _he._2 ++ es1 ++ es2 ++ es3)
})
val est = Set[WebAPIException](UnknownError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es ++ est)
((Helper.ReturnStore(h_4, Value(NumTop)), ctx_3), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.applicationObj.removeAppInfoEventListener" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val v = getArgValue(h, ctx, args, "0")
val es =
if (v._1._4 </ NumTop) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val est = Set[WebAPIException](InvalidValuesError, NotFoundError, UnknownError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es ++ est)
((h, ctx), (he + h_e, ctxe + ctx_e))
}
))
)
}
override def getPreSemanticMap(): Map[String, SemanticFun] = {
Map()
}
override def getDefMap(): Map[String, AccessFun] = {
Map()
}
override def getUseMap(): Map[String, AccessFun] = {
Map()
}
}
| darkrsw/safe | src/main/scala/kr/ac/kaist/jsaf/analysis/typing/models/Tizen/TIZENapplicationObj.scala | Scala | bsd-3-clause | 42,550 |
/* sbt -- Simple Build Tool
* Copyright 2008, 2009, 2010 Mark Harrah
*/
package sbt
import java.io.File
import java.net.URL
final case class Artifact(name: String, `type`: String, extension: String, classifier: Option[String], configurations: Iterable[Configuration], url: Option[URL], extraAttributes: Map[String, String]) {
def extra(attributes: (String, String)*) = Artifact(name, `type`, extension, classifier, configurations, url, extraAttributes ++ ModuleID.checkE(attributes))
}
import Configurations.{ config, Docs, Optional, Pom, Sources, Test }
object Artifact {
def apply(name: String): Artifact = Artifact(name, DefaultType, DefaultExtension, None, Nil, None)
def apply(name: String, extra: Map[String, String]): Artifact = Artifact(name, DefaultType, DefaultExtension, None, Nil, None, extra)
def apply(name: String, classifier: String): Artifact = Artifact(name, DefaultType, DefaultExtension, Some(classifier), Nil, None)
def apply(name: String, `type`: String, extension: String): Artifact = Artifact(name, `type`, extension, None, Nil, None)
def apply(name: String, `type`: String, extension: String, classifier: String): Artifact = Artifact(name, `type`, extension, Some(classifier), Nil, None)
def apply(name: String, url: URL): Artifact = Artifact(name, extract(url, DefaultType), extract(url, DefaultExtension), None, Nil, Some(url))
def apply(name: String, `type`: String, extension: String, classifier: Option[String], configurations: Iterable[Configuration], url: Option[URL]): Artifact =
Artifact(name, `type`, extension, classifier, configurations, url, Map.empty)
val DefaultExtension = "jar"
val DefaultType = "jar"
def sources(name: String) = classified(name, SourceClassifier)
def javadoc(name: String) = classified(name, DocClassifier)
def pom(name: String) = Artifact(name, PomType, PomType, None, Pom :: Nil, None)
val DocClassifier = "javadoc"
val SourceClassifier = "sources"
val DocType = "doc"
val SourceType = "src"
val PomType = "pom"
val TestsClassifier = "tests"
def extract(url: URL, default: String): String = extract(url.toString, default)
def extract(name: String, default: String): String =
{
val i = name.lastIndexOf('.')
if (i >= 0)
name.substring(i + 1)
else
default
}
def defaultArtifact(file: File) =
{
val name = file.getName
val i = name.lastIndexOf('.')
val base = if (i >= 0) name.substring(0, i) else name
Artifact(base, extract(name, DefaultType), extract(name, DefaultExtension), None, Nil, Some(file.toURI.toURL))
}
def artifactName(scalaVersion: ScalaVersion, module: ModuleID, artifact: Artifact): String =
{
import artifact._
val classifierStr = classifier match { case None => ""; case Some(c) => "-" + c }
val cross = CrossVersion(module.crossVersion, scalaVersion.full, scalaVersion.binary)
val base = CrossVersion.applyCross(artifact.name, cross)
base + "-" + module.revision + classifierStr + "." + artifact.extension
}
val classifierConfMap = Map(SourceClassifier -> Sources, DocClassifier -> Docs)
val classifierTypeMap = Map(SourceClassifier -> SourceType, DocClassifier -> DocType)
def classifierConf(classifier: String): Configuration =
if (classifier.startsWith(TestsClassifier))
Test
else
classifierConfMap.getOrElse(classifier, Optional)
def classifierType(classifier: String): String = classifierTypeMap.getOrElse(classifier.stripPrefix(TestsClassifier + "-"), DefaultType)
def classified(name: String, classifier: String): Artifact =
Artifact(name, classifierType(classifier), DefaultExtension, Some(classifier), classifierConf(classifier) :: Nil, None)
}
| jaceklaskowski/sbt | ivy/src/main/scala/sbt/Artifact.scala | Scala | bsd-3-clause | 3,741 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package scalaguide.tests
// #basic-spec
package models
import org.junit.runner.RunWith
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class UserSpec extends Specification {
"User" should {
"have a name" in {
val user = User(id = "user-id", name = "Player", email = "user@email.com")
user.name must beEqualTo("Player")
}
}
}
// #basic-spec
class AnotherSpec extends Specification {
"Some example" in {
// #assertion-example
"Hello world" must endWith("world")
// #assertion-example
}
}
// #import-mockito
import org.specs2.mock._
// #import-mockito
| benmccann/playframework | documentation/manual/working/scalaGuide/main/tests/code/models/UserSpec.scala | Scala | apache-2.0 | 721 |
package org.scalex
package object index {
final class OutdatedDatabaseException(msg: String) extends Exception(msg)
}
| ornicar/scalex | src/main/scala/index/package.scala | Scala | mit | 122 |
class Bar { val x = } | sbt/sbt | sbt-app/src/sbt-test/nio/code-formatter/changes/Bar-bad.scala | Scala | apache-2.0 | 21 |
/*
* Copyright 2016 Nikolay Donets
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.nikdon.telepooz.json
import java.time.Duration
import java.util.Date
import com.github.nikdon.telepooz.model._
import com.github.nikdon.telepooz.model.inline._
import com.github.nikdon.telepooz.model.payments._
import com.github.nikdon.telepooz.utils._
import io.circe.Decoder
import io.circe.generic.semiauto._
trait CirceDecoders {
// Models
implicit val dateDecoder: Decoder[Date] = Decoder[Long].map(d ⇒ new Date(d)) // TODO Check
implicit val durationDecoder: Decoder[Duration] = Decoder[Int].map(d ⇒ Duration.ofSeconds(d.toLong)) // TODO Check
implicit val audioDecoder: Decoder[Audio] = deriveDecoder[Audio]
implicit val chatTypeDecoder: Decoder[ChatType] = Decoder[String].map(a ⇒ ChatType.unsafe(pascalize(a)))
implicit val chatDecoder: Decoder[Chat] = deriveDecoder[Chat]
implicit val contactDecoder: Decoder[Contact] = deriveDecoder[Contact]
implicit val documentDecoder: Decoder[Document] = deriveDecoder[Document]
implicit val fileDecoder: Decoder[File] = deriveDecoder[File]
implicit val callbackGameDecoder: Decoder[CallbackGame] = deriveDecoder[CallbackGame]
implicit val inlineKeyboardButtonDecoder: Decoder[InlineKeyboardButton] = deriveDecoder[InlineKeyboardButton]
implicit val keyboardButtonDecoder: Decoder[KeyboardButton] = deriveDecoder[KeyboardButton]
implicit val locationDecoder: Decoder[Location] = deriveDecoder[Location]
implicit val messageEntityTypeDecoder: Decoder[MessageEntityType] =
Decoder[String].map(a ⇒ MessageEntityType.unsafe(pascalize(a)))
implicit val messageEntityDecoder: Decoder[MessageEntity] = deriveDecoder[MessageEntity]
implicit val webhookInfoDecoder: Decoder[WebhookInfo] = deriveDecoder[WebhookInfo]
implicit val parseModeDecoder: Decoder[ParseMode] = Decoder[String].map(ParseMode.unsafe)
implicit val photoSizeDecoder: Decoder[PhotoSize] = deriveDecoder[PhotoSize]
implicit val replyMarkupDecoder: Decoder[ReplyMarkup] = deriveDecoder[ReplyMarkup]
implicit val stickerDecoder: Decoder[Sticker] = deriveDecoder[Sticker]
implicit val userDecoder: Decoder[User] = deriveDecoder[User]
implicit val userProfilePhotosDecoder: Decoder[UserProfilePhotos] = deriveDecoder[UserProfilePhotos]
implicit val venueDecoder: Decoder[Venue] = deriveDecoder[Venue]
implicit val videoDecoder: Decoder[Video] = deriveDecoder[Video]
implicit val videoNoteDecoder: Decoder[VideoNote] = deriveDecoder[VideoNote]
implicit val voiceDecoder: Decoder[Voice] = deriveDecoder[Voice]
// Game
implicit val gameHighScoreDecoder: Decoder[GameHighScore] = deriveDecoder[GameHighScore]
implicit val animationDecoder: Decoder[Animation] = deriveDecoder[Animation]
implicit val gameDecoder: Decoder[Game] = deriveDecoder[Game]
implicit val messageDecoder: Decoder[Message] = deriveDecoder[Message]
implicit val callbackQueryDecoder: Decoder[CallbackQuery] = deriveDecoder[CallbackQuery]
implicit val memberStatusDecoder: Decoder[MemberStatus] = Decoder[String].map(a ⇒ MemberStatus.unsafe(pascalize(a)))
implicit val chatMemberDecoder: Decoder[ChatMember] = deriveDecoder[ChatMember]
// Inline
implicit val inlineQueryDecoder: Decoder[InlineQuery] = deriveDecoder[InlineQuery]
implicit val chosenInlineQueryDecoder: Decoder[ChosenInlineQuery] = deriveDecoder[ChosenInlineQuery]
implicit val inputContactMessageContent: Decoder[InputContactMessageContent] =
deriveDecoder[InputContactMessageContent]
implicit val inputVenueMessageContentDecoder: Decoder[InputVenueMessageContent] =
deriveDecoder[InputVenueMessageContent]
implicit val inputLocationMessageContentDecoder: Decoder[InputLocationMessageContent] =
deriveDecoder[InputLocationMessageContent]
implicit val inputTextMessageContentDecoder: Decoder[InputTextMessageContent] =
deriveDecoder[InputTextMessageContent]
// Payments
implicit val labeledPriceDecoder: Decoder[LabeledPrice] = deriveDecoder[LabeledPrice]
implicit val invoiceDecoder: Decoder[Invoice] = deriveDecoder[Invoice]
implicit val shippingAddressDecoder: Decoder[ShippingAddress] = deriveDecoder[ShippingAddress]
implicit val shippingQueryDecoder: Decoder[ShippingQuery] = deriveDecoder[ShippingQuery]
implicit val orderInfoDecoder: Decoder[OrderInfo] = deriveDecoder[OrderInfo]
implicit val preCheckoutQueryDecoder: Decoder[PreCheckoutQuery] = deriveDecoder[PreCheckoutQuery]
implicit val shippingOptionDecoder: Decoder[ShippingOption] = deriveDecoder[ShippingOption]
implicit val successfulPaymentDecoder: Decoder[SuccessfulPayment] = deriveDecoder[SuccessfulPayment]
implicit def eitherResponseDecoder[A, B](implicit D: Decoder[A], DD: Decoder[B]): Decoder[Either[A, B]] =
deriveDecoder[Either[A, B]]
implicit val updateDecoder: Decoder[Update] = deriveDecoder[Update]
implicit def responseDecoder[T](implicit D: Decoder[T]): Decoder[Response[T]] = deriveDecoder[Response[T]]
}
| nikdon/telepooz | src/main/scala/com/github/nikdon/telepooz/json/CirceDecoders.scala | Scala | apache-2.0 | 5,955 |
package gh.test.gh3
import gh3.models.GH3Base
import net.liftweb.json._
import org.scalatest.{FlatSpec, Matchers}
class GH3BaseTest extends FlatSpec with Matchers
{
"A valid GH3Base" must "be correctly parsed" in {
val json = parse(
"""
| {
|
| "label":"baxterthehacker:master",
| "ref":"master",
| "sha":"9049f1265b7d61be4a8904a9a27120d2064dab3b",
| "user":{
| "login":"baxterthehacker",
| "id":6752317,
| "avatar_url":"https://avatars.githubusercontent.com/u/6752317?v=3",
| "gravatar_id":"",
| "url":"https://api.github.com/users/baxterthehacker",
| "html_url":"https://github.com/baxterthehacker",
| "followers_url":"https://api.github.com/users/baxterthehacker/followers",
| "following_url":"https://api.github.com/users/baxterthehacker/following{/other_user}",
| "gists_url":"https://api.github.com/users/baxterthehacker/gists{/gist_id}",
| "starred_url":"https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}",
| "subscriptions_url":"https://api.github.com/users/baxterthehacker/subscriptions",
| "organizations_url":"https://api.github.com/users/baxterthehacker/orgs",
| "repos_url":"https://api.github.com/users/baxterthehacker/repos",
| "events_url":"https://api.github.com/users/baxterthehacker/events{/privacy}",
| "received_events_url":"https://api.github.com/users/baxterthehacker/received_events",
| "type":"User",
| "site_admin":false
| },
| "repo":{
| "id":35129377,
| "name":"public-repo",
| "full_name":"baxterthehacker/public-repo",
| "owner":{
| "login":"baxterthehacker",
| "id":6752317,
| "avatar_url":"https://avatars.githubusercontent.com/u/6752317?v=3",
| "gravatar_id":"",
| "url":"https://api.github.com/users/baxterthehacker",
| "html_url":"https://github.com/baxterthehacker",
| "followers_url":"https://api.github.com/users/baxterthehacker/followers",
| "following_url":"https://api.github.com/users/baxterthehacker/following{/other_user}",
| "gists_url":"https://api.github.com/users/baxterthehacker/gists{/gist_id}",
| "starred_url":"https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}",
| "subscriptions_url":"https://api.github.com/users/baxterthehacker/subscriptions",
| "organizations_url":"https://api.github.com/users/baxterthehacker/orgs",
| "repos_url":"https://api.github.com/users/baxterthehacker/repos",
| "events_url":"https://api.github.com/users/baxterthehacker/events{/privacy}",
| "received_events_url":"https://api.github.com/users/baxterthehacker/received_events",
| "type":"User",
| "site_admin":false
| },
| "private":false,
| "html_url":"https://github.com/baxterthehacker/public-repo",
| "description":"",
| "fork":false,
| "url":"https://api.github.com/repos/baxterthehacker/public-repo",
| "forks_url":"https://api.github.com/repos/baxterthehacker/public-repo/forks",
| "keys_url":"https://api.github.com/repos/baxterthehacker/public-repo/keys{/key_id}",
| "collaborators_url":"https://api.github.com/repos/baxterthehacker/public-repo/collaborators{/collaborator}",
| "teams_url":"https://api.github.com/repos/baxterthehacker/public-repo/teams",
| "hooks_url":"https://api.github.com/repos/baxterthehacker/public-repo/hooks",
| "issue_events_url":"https://api.github.com/repos/baxterthehacker/public-repo/issues/events{/number}",
| "events_url":"https://api.github.com/repos/baxterthehacker/public-repo/events",
| "assignees_url":"https://api.github.com/repos/baxterthehacker/public-repo/assignees{/user}",
| "branches_url":"https://api.github.com/repos/baxterthehacker/public-repo/branches{/branch}",
| "tags_url":"https://api.github.com/repos/baxterthehacker/public-repo/tags",
| "blobs_url":"https://api.github.com/repos/baxterthehacker/public-repo/git/blobs{/sha}",
| "git_tags_url":"https://api.github.com/repos/baxterthehacker/public-repo/git/tags{/sha}",
| "git_refs_url":"https://api.github.com/repos/baxterthehacker/public-repo/git/refs{/sha}",
| "trees_url":"https://api.github.com/repos/baxterthehacker/public-repo/git/trees{/sha}",
| "statuses_url":"https://api.github.com/repos/baxterthehacker/public-repo/statuses/{sha}",
| "languages_url":"https://api.github.com/repos/baxterthehacker/public-repo/languages",
| "stargazers_url":"https://api.github.com/repos/baxterthehacker/public-repo/stargazers",
| "contributors_url":"https://api.github.com/repos/baxterthehacker/public-repo/contributors",
| "subscribers_url":"https://api.github.com/repos/baxterthehacker/public-repo/subscribers",
| "subscription_url":"https://api.github.com/repos/baxterthehacker/public-repo/subscription",
| "commits_url":"https://api.github.com/repos/baxterthehacker/public-repo/commits{/sha}",
| "git_commits_url":"https://api.github.com/repos/baxterthehacker/public-repo/git/commits{/sha}",
| "comments_url":"https://api.github.com/repos/baxterthehacker/public-repo/comments{/number}",
| "issue_comment_url":"https://api.github.com/repos/baxterthehacker/public-repo/issues/comments{/number}",
| "contents_url":"https://api.github.com/repos/baxterthehacker/public-repo/contents/{+path}",
| "compare_url":"https://api.github.com/repos/baxterthehacker/public-repo/compare/{base}...{head}",
| "merges_url":"https://api.github.com/repos/baxterthehacker/public-repo/merges",
| "archive_url":"https://api.github.com/repos/baxterthehacker/public-repo/{archive_format}{/ref}",
| "downloads_url":"https://api.github.com/repos/baxterthehacker/public-repo/downloads",
| "issues_url":"https://api.github.com/repos/baxterthehacker/public-repo/issues{/number}",
| "pulls_url":"https://api.github.com/repos/baxterthehacker/public-repo/pulls{/number}",
| "milestones_url":"https://api.github.com/repos/baxterthehacker/public-repo/milestones{/number}",
| "notifications_url":"https://api.github.com/repos/baxterthehacker/public-repo/notifications{?since,all,participating}",
| "labels_url":"https://api.github.com/repos/baxterthehacker/public-repo/labels{/name}",
| "releases_url":"https://api.github.com/repos/baxterthehacker/public-repo/releases{/id}",
| "created_at":"2015-05-05T23:40:12Z",
| "updated_at":"2015-05-05T23:40:12Z",
| "pushed_at":"2015-05-05T23:40:26Z",
| "git_url":"git://github.com/baxterthehacker/public-repo.git",
| "ssh_url":"git@github.com:baxterthehacker/public-repo.git",
| "clone_url":"https://github.com/baxterthehacker/public-repo.git",
| "svn_url":"https://github.com/baxterthehacker/public-repo",
| "homepage":null,
| "size":0,
| "stargazers_count":0,
| "watchers_count":0,
| "language":null,
| "has_issues":true,
| "has_downloads":true,
| "has_wiki":true,
| "has_pages":true,
| "forks_count":0,
| "mirror_url":null,
| "open_issues_count":1,
| "forks":0,
| "open_issues":1,
| "watchers":0,
| "default_branch":"master"
| }
| }
""".stripMargin)
GH3Base(json).isDefined shouldBe true
}
}
| mgoeminne/github_etl | src/test/scala/gh/test/gh3/GH3BaseTest.scala | Scala | mit | 8,749 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.sql
import slamdata.Predef._
import quasar.TreeMatchers
import quasar.sql.fixpoint._
import matryoshka._
import pathy.Path._
import scalaz._, Scalaz._
class SemanticsSpec extends quasar.Qspec with TreeMatchers {
val asc: OrderType = ASC
"sort key projection" should {
"add single field for order by" in {
val q = SelectR(SelectAll,
Proj(IdentR("name"), None) :: Nil,
Some(TableRelationAST(file("person"), None)),
None,
None,
Some(OrderBy((asc, IdentR("height")).wrapNel)))
SemanticAnalysis.projectSortKeys(q) must beTree(
SelectR(SelectAll,
Proj(IdentR("name"), None) :: Proj(IdentR("height"), Some("__sd__0")) :: Nil,
Some(TableRelationAST(file("person"), None)),
None,
None,
Some(OrderBy((asc, IdentR("__sd__0")).wrapNel)))
)
}
"not add a field that appears in the projections" in {
val q = SelectR(SelectAll,
Proj(IdentR("name"), None) :: Nil,
Some(TableRelationAST(file("person"), None)),
None,
None,
Some(OrderBy((asc, IdentR("name")).wrapNel)))
SemanticAnalysis.projectSortKeys(q) must beTree(q)
}
"not add a field that appears as an alias in the projections" in {
val q = SelectR(SelectAll,
Proj(IdentR("foo"), Some("name")) :: Nil,
Some(TableRelationAST(file("person"), None)),
None,
None,
Some(OrderBy((asc, IdentR("name")).wrapNel)))
SemanticAnalysis.projectSortKeys(q) must beTree(q)
}
"not add a field with wildcard present" in {
val q = SelectR(SelectAll,
Proj(SpliceR(None), None) :: Nil,
Some(TableRelationAST(file("person"), None)),
None,
None,
Some(OrderBy((asc, IdentR("height")).wrapNel)))
SemanticAnalysis.projectSortKeys(q) must beTree(q)
}
"add single field for order by" in {
val q = SelectR(SelectAll,
Proj(IdentR("name"), None) :: Nil,
Some(TableRelationAST(file("person"), None)),
None,
None,
Some(OrderBy(NonEmptyList(
(asc, IdentR("height")),
(asc, IdentR("name"))))))
SemanticAnalysis.projectSortKeys(q) must beTree(
SelectR(SelectAll,
Proj(IdentR("name"), None) ::
Proj(IdentR("height"), Some("__sd__0")) ::
Nil,
Some(TableRelationAST(file("person"), None)),
None,
None,
Some(OrderBy(NonEmptyList(
(asc, IdentR("__sd__0")),
(asc, IdentR("name")))))))
}
"SemanticAnalysis.projectSortKeys sub-select" in {
val q = SelectR(SelectAll,
Proj(SpliceR(None), None) :: Nil,
Some(TableRelationAST(file("foo"), None)),
Some(
BinopR(
IdentR("a"),
SelectR(SelectAll,
Proj(IdentR("a"), None) :: Nil,
Some(TableRelationAST(file("bar"), None)),
None,
None,
Some(OrderBy((asc, IdentR("b")).wrapNel))),
In)),
None,
None)
SemanticAnalysis.projectSortKeys(q) must beTree(
SelectR(SelectAll,
Proj(SpliceR(None), None) :: Nil,
Some(TableRelationAST(file("foo"), None)),
Some(
BinopR(
IdentR("a"),
SelectR(SelectAll,
Proj(IdentR("a"), None) ::
Proj(IdentR("b"), Some("__sd__0")) ::
Nil,
Some(TableRelationAST(file("bar"), None)),
None,
None,
Some(OrderBy((asc, IdentR("__sd__0")).wrapNel))),
In)),
None,
None))
}
}
}
| jedesah/Quasar | sql/src/test/scala/quasar/sql/semantics.scala | Scala | apache-2.0 | 5,320 |
package junctions
import Chisel._
import NastiConstants._
import cde.Parameters
class StreamChannel(w: Int) extends Bundle {
val data = UInt(width = w)
val last = Bool()
override def cloneType = new StreamChannel(w).asInstanceOf[this.type]
}
class StreamIO(w: Int) extends Bundle {
val out = Decoupled(new StreamChannel(w))
val in = Decoupled(new StreamChannel(w)).flip
override def cloneType = new StreamIO(w).asInstanceOf[this.type]
}
class NastiIOStreamIOConverter(w: Int)(implicit p: Parameters) extends Module {
val io = new Bundle {
val nasti = (new NastiIO).flip
val stream = new StreamIO(w)
}
val streamSize = UInt(log2Up(w / 8))
assert(!io.nasti.ar.valid || io.nasti.ar.bits.size === streamSize,
"read channel wrong size on stream")
assert(!io.nasti.ar.valid || io.nasti.ar.bits.len === UInt(0) ||
io.nasti.ar.bits.burst === BURST_FIXED,
"read channel wrong burst type on stream")
assert(!io.nasti.aw.valid || io.nasti.aw.bits.size === streamSize,
"write channel wrong size on stream")
assert(!io.nasti.aw.valid || io.nasti.aw.bits.len === UInt(0) ||
io.nasti.aw.bits.burst === BURST_FIXED,
"write channel wrong burst type on stream")
assert(!io.nasti.w.valid || io.nasti.w.bits.strb.andR,
"write channel cannot take partial writes")
val read_id = Reg(io.nasti.ar.bits.id)
val read_cnt = Reg(io.nasti.ar.bits.len)
val reading = Reg(init = Bool(false))
io.nasti.ar.ready := !reading
io.nasti.r.valid := reading && io.stream.in.valid
io.nasti.r.bits := io.stream.in.bits
io.nasti.r.bits.resp := UInt(0)
io.nasti.r.bits.id := read_id
io.stream.in.ready := reading && io.nasti.r.ready
when (io.nasti.ar.fire()) {
read_id := io.nasti.ar.bits.id
read_cnt := io.nasti.ar.bits.len
reading := Bool(true)
}
when (io.nasti.r.fire()) {
when (read_cnt === UInt(0)) {
reading := Bool(false)
} .otherwise {
read_cnt := read_cnt - UInt(1)
}
}
val write_id = Reg(io.nasti.aw.bits.id)
val writing = Reg(init = Bool(false))
val write_resp = Reg(init = Bool(false))
io.nasti.aw.ready := !writing && !write_resp
io.nasti.w.ready := writing && io.stream.out.ready
io.stream.out.valid := writing && io.nasti.w.valid
io.stream.out.bits := io.nasti.w.bits
io.nasti.b.valid := write_resp
io.nasti.b.bits.resp := UInt(0)
io.nasti.b.bits.id := write_id
when (io.nasti.aw.fire()) {
write_id := io.nasti.aw.bits.id
writing := Bool(true)
}
when (io.nasti.w.fire() && io.nasti.w.bits.last) {
writing := Bool(false)
write_resp := Bool(true)
}
when (io.nasti.b.fire()) { write_resp := Bool(false) }
}
class StreamNarrower(win: Int, wout: Int) extends Module {
require(win > wout, "Stream narrower input width must be larger than input width")
require(win % wout == 0, "Stream narrower input width must be multiple of output width")
val io = new Bundle {
val in = Decoupled(new StreamChannel(win)).flip
val out = Decoupled(new StreamChannel(wout))
}
val n_pieces = win / wout
val buffer = Reg(Bits(width = win))
val (piece_idx, pkt_done) = Counter(io.out.fire(), n_pieces)
val pieces = Vec.tabulate(n_pieces) { i => buffer(wout * (i + 1) - 1, wout * i) }
val last_piece = (piece_idx === UInt(n_pieces - 1))
val sending = Reg(init = Bool(false))
val in_last = Reg(Bool())
when (io.in.fire()) {
buffer := io.in.bits.data
in_last := io.in.bits.last
sending := Bool(true)
}
when (pkt_done) { sending := Bool(false) }
io.out.valid := sending
io.out.bits.data := pieces(piece_idx)
io.out.bits.last := in_last && last_piece
io.in.ready := !sending
}
class StreamExpander(win: Int, wout: Int) extends Module {
require(win < wout, "Stream expander input width must be smaller than input width")
require(wout % win == 0, "Stream narrower output width must be multiple of input width")
val io = new Bundle {
val in = Decoupled(new StreamChannel(win)).flip
val out = Decoupled(new StreamChannel(wout))
}
val n_pieces = wout / win
val buffer = Reg(Vec(n_pieces, UInt(width = win)))
val last = Reg(Bool())
val collecting = Reg(init = Bool(true))
val (piece_idx, pkt_done) = Counter(io.in.fire(), n_pieces)
when (io.in.fire()) { buffer(piece_idx) := io.in.bits.data }
when (pkt_done) { last := io.in.bits.last; collecting := Bool(false) }
when (io.out.fire()) { collecting := Bool(true) }
io.in.ready := collecting
io.out.valid := !collecting
io.out.bits.data := buffer.toBits
io.out.bits.last := last
}
object StreamUtils {
def connectStreams(a: StreamIO, b: StreamIO) {
a.in <> b.out
b.in <> a.out
}
}
trait Serializable {
def nbits: Int
}
class Serializer[T <: Data with Serializable](w: Int, typ: T) extends Module {
val io = new Bundle {
val in = Decoupled(typ).flip
val out = Decoupled(Bits(width = w))
}
val narrower = Module(new StreamNarrower(typ.nbits, w))
narrower.io.in.bits.data := io.in.bits.toBits
narrower.io.in.bits.last := Bool(true)
narrower.io.in.valid := io.in.valid
io.in.ready := narrower.io.in.ready
io.out.valid := narrower.io.out.valid
io.out.bits := narrower.io.out.bits.data
narrower.io.out.ready := io.out.ready
}
class Deserializer[T <: Data with Serializable](w: Int, typ: T) extends Module {
val io = new Bundle {
val in = Decoupled(Bits(width = w)).flip
val out = Decoupled(typ)
}
val expander = Module(new StreamExpander(w, typ.nbits))
expander.io.in.valid := io.in.valid
expander.io.in.bits.data := io.in.bits
expander.io.in.bits.last := Bool(true)
io.in.ready := expander.io.in.ready
io.out.valid := expander.io.out.valid
io.out.bits := typ.cloneType.fromBits(expander.io.out.bits.data)
expander.io.out.ready := io.out.ready
}
| masc-ucsc/cmpe220fall16 | riscv_cores/zscale_modified/junctions/src/main/scala/stream.scala | Scala | apache-2.0 | 5,827 |
package org.odfi.wsb.fwapp.lib.files.semantic
import org.odfi.wsb.fwapp.module.semantic.SemanticView
trait FileSemanticUpload extends SemanticView {
this.addLibrary("filesemantic") {
case (source, node) =>
onNode(node) {
script(createAssetsResolverURI("fwapp/lib/files/semantic-files.js")) {
}
defaultAssetsResolverScripts("fwapp/external/jquery-fileupload/9.19.1/js/",List("vendor/jquery.ui.widget.js","jquery.iframe-transport.js","jquery.fileupload.js"))
stylesheet(createAssetsResolverURI("fwapp/external/jquery-fileupload/9.19.1/css/jquery.fileupload.css")) {
}
}
}
this.registerNamedAction("file.put") {
req =>
println("Put Action")
}
def semanticFilesInput(fieldId: String,validationText:String) = {
div {
val finput = s"#fileupload-$fieldId fileupload" :: input {
+@("type" -> "file")
fieldName("uploadedFile")
+@("multiple" -> "true")
+@("style" -> "display:none")
currentNode.getId
//+@("onchange" -> "fwapp.lib.files.updateFilesList($(this).parent(),this.files)")
}
"ui buttons" :: div {
"ui primary button" :: div {
text("Select Files")
+@("onclick" -> "$(this).parent().parent().find('input').click()")
}
"or" :: div {
data("text" -> "then")
}
"ui success button disabled upload-button" :: div {
text("Upload All")
+@("onclick" -> "fwapp.lib.files.uploadAll(this)")
}
"ui primary button disabled validate-button" :: div {
text(validationText)
+@("onclick" -> "fwapp.lib.files.submitUploadForm(this)")
}
}
"files-content" :: div {
"ui divided items" :: div {
}
}
}
/*semanticOnSubmitButton("Upload") {
println("Upload done:"+withRequestParameter("uploadedFile"))
}*/
}
} | opendesignflow/fwapp | src/main/scala/org/odfi/wsb/fwapp/lib/files/semantic/FileSemanticUpload.scala | Scala | agpl-3.0 | 2,011 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 www.iReact.io
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package io.ireact.bioinformatics.part1.support
import scala.reflect.macros.whitebox.Context
object SpectrumMacro {
def applyMacro(c: Context)(value: c.Expr[String]): c.Expr[Spectrum] = {
import c.universe._
value.tree match {
case Literal(stringConst) ⇒
val literalValue = stringConst.value.toString
if (!Spectrum.isValid(literalValue))
c.abort(c.enclosingPosition, "Spectrum can only contain numbers composed out of digits seperated by spaces")
case _ ⇒
c.abort(c.enclosingPosition, "Spectrum macro only works on String Literals, use Spectrum.from(String) instead.")
}
// As state by Eugene Burmako in http://stackoverflow.com/questions/19170137/scala-using-private-constructor-in-a-macro
// we cannot call a private method, so we do it via an unattractive method, called unsafe.
// Didn't use the vampire method approach, that would loose the AnyVal benefit!
reify {
Spectrum.unsafeFrom(value.splice)
}
// If using UnsafeFrom this is a way to make it happen:
// q"""
// class UnsafeI extends chapters.DNAString.UnsafeFrom {
// def unsafeFromI(value: String): DNAString = unsafeFrom(value)
// }
// (new UnsafeI).unsafeFromI($value)
// """
}
}
| samdebacker/BioinformaticsAlgorithmsPart1 | src/main/scala/io/ireact/bioinformatics/part1/support/SpectrumMacro.scala | Scala | mit | 2,431 |
val stmt = ("This statement is", false) | hmemcpy/milewski-ctfp-pdf | src/content/1.6/code/scala/snippet12.scala | Scala | gpl-3.0 | 39 |
package peerchat.model
import akka.actor.Actor
import com.mongodb.casbah.Imports._
import com.mongodb.casbah.MongoClient
import xitrum.Log
import peerchat.Config
case class Dump(msgObj:Map[String,Any], name:String)
object DBManager {
val NAME = "DBManager"
val DATABASE = "peerchat"
val COLLECTION = "dump"
val mongoClient = MongoClient(Config.db.host, Config.db.port)
def getDB() ={
mongoClient(DATABASE)
}
def getDumpCollection() ={
mongoClient(DATABASE)(COLLECTION)
}
}
class DBManager extends Actor with Log {
Log.debug("DBManager Initialized")
def receive = {
case Dump(msg, senderName) =>
val m = MongoDBObject("msg" -> msg.asDBObject, "senderName" -> senderName)
DBManager.getDumpCollection.insert(m)
case unexpected =>
log.warn("Unexpected message at DBManager: " + unexpected)
}
} | georgeOsdDev/peerchat | server/src/main/scala/peerchat/model/DBManager.scala | Scala | mit | 862 |
package by.pavelverk.hardwrite.core
import scala.concurrent.Future
class InMemoryStorage[Key, Value <: {val id: Key}] {
private var state: Seq[Value] = Nil
def getAll(): Future[Seq[Value]] =
Future.successful(state)
def get(id: Key): Future[Option[Value]] =
Future.successful(state.find(_.id == id))
def save(value: Value): Future[Value] =
Future.successful {
state = state.filterNot(_.id == value.id)
state = state :+ value
value
}
def find(query: Value => Boolean): Future[Option[Value]] = {
Future.successful(state.find(query))
}
}
| VerkhovtsovPavel/BSUIR_Labs | Master/back/akka-http-rest-master/src/main/scala/by/pavelverk/hardwrite/core/InMemoryStorage.scala | Scala | mit | 591 |
object main extends App {
/*
* Assertions
*/
class MyElement(val width: Int, val height: Int)
val e = new MyElement(1, 2)
println(e.width)
// Assert the elements have same width
def assertSameWidth(elem1: MyElement, elem2: MyElement): Unit = {
assert(elem1.width == elem2.width)
println("There two elements are of the same width!")
}
def expandWidthByValue(elem: MyElement, myValue: Int): MyElement = {
val newElem = new MyElement(width = elem.width + 1, height = elem.height)
if (myValue > 0) {
newElem
} else {
elem
} ensuring (newElem.width >= elem.width)
}
var e1 = new MyElement(width = 1, height = 2)
val e2 = new MyElement(width = 1, height = 3)
assertSameWidth(e1, e2)
println(e1.width)
e1 = expandWidthByValue(elem = e1, 0)
println(e1.width)
} | arcyfelix/Courses | 18-10-18-Programming-in-Scala-by-Martin-Odersky-Lex-Spoon-and-Bill-Venners/39-Assertions/src/main.scala | Scala | apache-2.0 | 828 |
/*
* Facsimile: A Discrete-Event Simulation Library
* Copyright © 2004-2020, Michael J Allen.
*
* This file is part of Facsimile.
*
* Facsimile is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later
* version.
*
* Facsimile is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*
* You should have received a copy of the GNU Lesser General Public License along with Facsimile. If not, see
* http://www.gnu.org/licenses/lgpl.
*
* The developers welcome all comments, suggestions and offers of assistance. For further information, please visit the
* project home page at:
*
* http://facsim.org/
*
* Thank you for your interest in the Facsimile project!
*
* IMPORTANT NOTE: All patches (modifications to existing files and/or the addition of new files) submitted for
* inclusion as part of the official Facsimile code base, must comply with the published Facsimile Coding Standards. If
* your code fails to comply with the standard, then your patches will be rejected. For further information, please
* visit the coding standards at:
*
* http://facsim.org/Documentation/CodingStandards/
* =====================================================================================================================
* Scala source file belonging to the org.facsim.stats package.
*/
package org.facsim.stats
import org.facsim.requireValid
/**
* Histogram class.
*
* Records the frequency of observed value ranges by in ''bins''. Values that are lower than anticipated are stored in
* an ''underflow'' bin; those that are higher than anticipated are stored in an ''overflow'' bin.
*
* @constructor Create a new Histogram class instance.
*
* @param minimumValue Minimum expected value. Observed values lower than this will be placed into an ''underflow'' bin.
*
* @param bins Number of regular bins in the histogram. Two additional bins, an ''underflow'' bin—storing values
* less than '''minimumValue'''—and an ''overflow'' bin—storing values greater than '''minimumValue''' +
* ('''bins''' * '''binWidth'''—will be included in addition to these bins. This argument must be a positive
* integer or an exception will be thrown.
*
* @param binWidth Width of each regular bin in the histogram. This value must be greater than zero, or an exception
* will be thrown.
*
* @tparam D Underlying data type to be stored by this class, which must be a type of number.
*
* @throws IllegalArgumentException if '''bins''' is zero or negative, or if '''binWidth''' is zero or negative.
*
* @since 0.0
*/
final class Histogram[D <: Number[D]](private val minValue: D, private val bins: Int, private val binWidth: D)
extends SummaryStatistics[D, Histogram[D]] {
/*
* Argument sanity checks.
*/
requireValid(bins, bins > 0)
requireValid(binWidth, binWidth > 0.0)
/**
* Frequency of values observed in each bin.
*
* @note Bin 0 is the ''underflow'' bin, which records the frequency of observed values less than the specified
* minimum value. Similarly, bin (bins + 1) is the ''overflow'' bin, which records the frequency of observed values
* that exceed (minimumValue + bins * binWidth).
*/
private val frequency = new Array[Int](bins + 2)
/**
* Overflow bin number.
*/
private val overflowBin = frequency.length - 1
/**
* Retrieve frequency observed in specified bin to date.
*
* @param bin Number of bin for which frequency sought. Bin number 0 is the underflow bin, bin number
* ([[org.facsim.stats.Histogram!.length]] - 1) is the overflow bin. If an invalid bin number is passed, then an
* exception will be thrown.
*
* @return Frequency of observations recorded for specified '''bin''' so far.
*
* @throws java.lang.ArrayIndexOutOfBoundsException if '''bin''' is outside of the range: [0,
* [[org.facsim.stats.Histogram!.length]]).
*
* @since 0.0
*/
@inline
final def apply(bin: Int) = synchronized {
frequency(bin) ensuring(_ >= 0)
}
/**
* Retrieve number of bins, including underflow and overflow bins.
*
* @return Number of bins in the histogram including the underflow and overflow bins.
*
* @since 0.0
*/
@inline
final def length = frequency.length ensuring(_ >= 3)
/**
* @inheritdoc
*/
protected[stats] final override def processObservation(value: Double): Unit = {
/*
* Determine which bin to place this observation in.
*/
val bin = ((value - minimumValue) / binWidth).toInt + 1
/*
* Is this value in the underflow bin?
*/
if(bin <= 0) frequency.update(0, frequency(0) + 1)
/*
* Otherwise, is this value in the overflow bin?
*/
else if(bin >= overflowBin) frequency.update(overflowBin, frequency(overflowBin) + 1)
/*
* Otherwise, add it to the corresponding bin.
*/
else frequency.update(bin, frequency(bin) + 1)
}
}
| MichaelJAllen/facsimile | core/src/main/scala/org/facsim/stats/Histogram.scala | Scala | lgpl-3.0 | 5,245 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.catalog
import org.apache.spark.sql.catalyst.analysis.{FunctionAlreadyExistsException, NoSuchDatabaseException, NoSuchFunctionException, NoSuchTableException}
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.types.StructType
import org.apache.spark.util.ListenerBus
/**
* Interface for the system catalog (of functions, partitions, tables, and databases).
*
* This is only used for non-temporary items, and implementations must be thread-safe as they
* can be accessed in multiple threads. This is an external catalog because it is expected to
* interact with external systems.
*
* Implementations should throw [[NoSuchDatabaseException]] when databases don't exist.
*/
abstract class ExternalCatalog
extends ListenerBus[ExternalCatalogEventListener, ExternalCatalogEvent] {
import CatalogTypes.TablePartitionSpec
protected def requireDbExists(db: String): Unit = {
if (!databaseExists(db)) {
throw new NoSuchDatabaseException(db)
}
}
protected def requireTableExists(db: String, table: String): Unit = {
if (!tableExists(db, table)) {
throw new NoSuchTableException(db = db, table = table)
}
}
protected def requireFunctionExists(db: String, funcName: String): Unit = {
if (!functionExists(db, funcName)) {
throw new NoSuchFunctionException(db = db, func = funcName)
}
}
protected def requireFunctionNotExists(db: String, funcName: String): Unit = {
if (functionExists(db, funcName)) {
throw new FunctionAlreadyExistsException(db = db, func = funcName)
}
}
// --------------------------------------------------------------------------
// Databases
// --------------------------------------------------------------------------
final def createDatabase(dbDefinition: CatalogDatabase, ignoreIfExists: Boolean): Unit = {
val db = dbDefinition.name
postToAll(CreateDatabasePreEvent(db))
doCreateDatabase(dbDefinition, ignoreIfExists)
postToAll(CreateDatabaseEvent(db))
}
protected def doCreateDatabase(dbDefinition: CatalogDatabase, ignoreIfExists: Boolean): Unit
final def dropDatabase(db: String, ignoreIfNotExists: Boolean, cascade: Boolean): Unit = {
postToAll(DropDatabasePreEvent(db))
doDropDatabase(db, ignoreIfNotExists, cascade)
postToAll(DropDatabaseEvent(db))
}
protected def doDropDatabase(db: String, ignoreIfNotExists: Boolean, cascade: Boolean): Unit
/**
* Alter a database whose name matches the one specified in `dbDefinition`,
* assuming the database exists.
*
* Note: If the underlying implementation does not support altering a certain field,
* this becomes a no-op.
*/
def alterDatabase(dbDefinition: CatalogDatabase): Unit
def getDatabase(db: String): CatalogDatabase
def databaseExists(db: String): Boolean
def listDatabases(): Seq[String]
def listDatabases(pattern: String): Seq[String]
def setCurrentDatabase(db: String): Unit
// --------------------------------------------------------------------------
// Tables
// --------------------------------------------------------------------------
final def createTable(tableDefinition: CatalogTable, ignoreIfExists: Boolean): Unit = {
val db = tableDefinition.database
val name = tableDefinition.identifier.table
postToAll(CreateTablePreEvent(db, name))
doCreateTable(tableDefinition, ignoreIfExists)
postToAll(CreateTableEvent(db, name))
}
protected def doCreateTable(tableDefinition: CatalogTable, ignoreIfExists: Boolean): Unit
final def dropTable(
db: String,
table: String,
ignoreIfNotExists: Boolean,
purge: Boolean): Unit = {
postToAll(DropTablePreEvent(db, table))
doDropTable(db, table, ignoreIfNotExists, purge)
postToAll(DropTableEvent(db, table))
}
protected def doDropTable(
db: String,
table: String,
ignoreIfNotExists: Boolean,
purge: Boolean): Unit
final def renameTable(db: String, oldName: String, newName: String): Unit = {
postToAll(RenameTablePreEvent(db, oldName, newName))
doRenameTable(db, oldName, newName)
postToAll(RenameTableEvent(db, oldName, newName))
}
protected def doRenameTable(db: String, oldName: String, newName: String): Unit
/**
* Alter a table whose database and name match the ones specified in `tableDefinition`, assuming
* the table exists. Note that, even though we can specify database in `tableDefinition`, it's
* used to identify the table, not to alter the table's database, which is not allowed.
*
* Note: If the underlying implementation does not support altering a certain field,
* this becomes a no-op.
*/
def alterTable(tableDefinition: CatalogTable): Unit
/**
* Alter the schema of a table identified by the provided database and table name. The new schema
* should still contain the existing bucket columns and partition columns used by the table. This
* method will also update any Spark SQL-related parameters stored as Hive table properties (such
* as the schema itself).
*
* @param db Database that table to alter schema for exists in
* @param table Name of table to alter schema for
* @param schema Updated schema to be used for the table (must contain existing partition and
* bucket columns)
*/
def alterTableSchema(db: String, table: String, schema: StructType): Unit
def getTable(db: String, table: String): CatalogTable
def getTableOption(db: String, table: String): Option[CatalogTable]
def tableExists(db: String, table: String): Boolean
def listTables(db: String): Seq[String]
def listTables(db: String, pattern: String): Seq[String]
/**
* Loads data into a table.
*
* @param isSrcLocal Whether the source data is local, as defined by the "LOAD DATA LOCAL"
* HiveQL command.
*/
def loadTable(
db: String,
table: String,
loadPath: String,
isOverwrite: Boolean,
isSrcLocal: Boolean): Unit
/**
* Loads data into a partition.
*
* @param isSrcLocal Whether the source data is local, as defined by the "LOAD DATA LOCAL"
* HiveQL command.
*/
def loadPartition(
db: String,
table: String,
loadPath: String,
partition: TablePartitionSpec,
isOverwrite: Boolean,
inheritTableSpecs: Boolean,
isSrcLocal: Boolean): Unit
def loadDynamicPartitions(
db: String,
table: String,
loadPath: String,
partition: TablePartitionSpec,
replace: Boolean,
numDP: Int): Unit
// --------------------------------------------------------------------------
// Partitions
// --------------------------------------------------------------------------
def createPartitions(
db: String,
table: String,
parts: Seq[CatalogTablePartition],
ignoreIfExists: Boolean): Unit
def dropPartitions(
db: String,
table: String,
parts: Seq[TablePartitionSpec],
ignoreIfNotExists: Boolean,
purge: Boolean,
retainData: Boolean): Unit
/**
* Override the specs of one or many existing table partitions, assuming they exist.
* This assumes index i of `specs` corresponds to index i of `newSpecs`.
*/
def renamePartitions(
db: String,
table: String,
specs: Seq[TablePartitionSpec],
newSpecs: Seq[TablePartitionSpec]): Unit
/**
* Alter one or many table partitions whose specs that match those specified in `parts`,
* assuming the partitions exist.
*
* Note: If the underlying implementation does not support altering a certain field,
* this becomes a no-op.
*/
def alterPartitions(
db: String,
table: String,
parts: Seq[CatalogTablePartition]): Unit
def getPartition(db: String, table: String, spec: TablePartitionSpec): CatalogTablePartition
/**
* Returns the specified partition or None if it does not exist.
*/
def getPartitionOption(
db: String,
table: String,
spec: TablePartitionSpec): Option[CatalogTablePartition]
/**
* List the names of all partitions that belong to the specified table, assuming it exists.
*
* For a table with partition columns p1, p2, p3, each partition name is formatted as
* `p1=v1/p2=v2/p3=v3`. Each partition column name and value is an escaped path name, and can be
* decoded with the `ExternalCatalogUtils.unescapePathName` method.
*
* The returned sequence is sorted as strings.
*
* A partial partition spec may optionally be provided to filter the partitions returned, as
* described in the `listPartitions` method.
*
* @param db database name
* @param table table name
* @param partialSpec partition spec
*/
def listPartitionNames(
db: String,
table: String,
partialSpec: Option[TablePartitionSpec] = None): Seq[String]
/**
* List the metadata of all partitions that belong to the specified table, assuming it exists.
*
* A partial partition spec may optionally be provided to filter the partitions returned.
* For instance, if there exist partitions (a='1', b='2'), (a='1', b='3') and (a='2', b='4'),
* then a partial spec of (a='1') will return the first two only.
*
* @param db database name
* @param table table name
* @param partialSpec partition spec
*/
def listPartitions(
db: String,
table: String,
partialSpec: Option[TablePartitionSpec] = None): Seq[CatalogTablePartition]
/**
* List the metadata of partitions that belong to the specified table, assuming it exists, that
* satisfy the given partition-pruning predicate expressions.
*
* @param db database name
* @param table table name
* @param predicates partition-pruning predicates
* @param defaultTimeZoneId default timezone id to parse partition values of TimestampType
*/
def listPartitionsByFilter(
db: String,
table: String,
predicates: Seq[Expression],
defaultTimeZoneId: String): Seq[CatalogTablePartition]
// --------------------------------------------------------------------------
// Functions
// --------------------------------------------------------------------------
final def createFunction(db: String, funcDefinition: CatalogFunction): Unit = {
val name = funcDefinition.identifier.funcName
postToAll(CreateFunctionPreEvent(db, name))
doCreateFunction(db, funcDefinition)
postToAll(CreateFunctionEvent(db, name))
}
protected def doCreateFunction(db: String, funcDefinition: CatalogFunction): Unit
final def dropFunction(db: String, funcName: String): Unit = {
postToAll(DropFunctionPreEvent(db, funcName))
doDropFunction(db, funcName)
postToAll(DropFunctionEvent(db, funcName))
}
protected def doDropFunction(db: String, funcName: String): Unit
final def renameFunction(db: String, oldName: String, newName: String): Unit = {
postToAll(RenameFunctionPreEvent(db, oldName, newName))
doRenameFunction(db, oldName, newName)
postToAll(RenameFunctionEvent(db, oldName, newName))
}
protected def doRenameFunction(db: String, oldName: String, newName: String): Unit
def getFunction(db: String, funcName: String): CatalogFunction
def functionExists(db: String, funcName: String): Boolean
def listFunctions(db: String, pattern: String): Seq[String]
override protected def doPostEvent(
listener: ExternalCatalogEventListener,
event: ExternalCatalogEvent): Unit = {
listener.onEvent(event)
}
}
| wangyixiaohuihui/spark2-annotation | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/ExternalCatalog.scala | Scala | apache-2.0 | 12,749 |
package com.github.tarao.nonempty.collection
import scala.collection.immutable.WrappedString
/** Methods inherited from `StringOps` that preserve non-emptiness. */
trait StringOps[+A, +C <: Iterable[A]] extends Any {
self: NonEmpty[A, C] =>
/** Returns a new string containing the chars from this string
* followed by the chars from the right hand operand.
*
* @param suffix the string to append.
* @return a new string which contains all chars
* of this string followed by all chars of `suffix`.
* @see [[scala.collection.StringOps!.concat(suffix:String)*]]
*/
@inline final def concat(suffix: String)(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] =
unsafeApply[Char, WrappedString](coll(value).unwrap.concat(suffix))
/** Alias for `concat` */
@inline final def ++(suffix: String)(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] = concat(suffix)
/** A copy of the string with an element prepended
* @see [[scala.collection.StringOps!.prepended(c:Char)*]]
*/
def prepended(c: Char)(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] =
unsafeApply[Char, WrappedString](coll(value).unwrap.prepended(c))
/** Alias for `prepended` */
@inline final def +:(c: Char)(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] = prepended(c)
/** A copy of the string with an element prepended
* @see [[scala.collection.StringOps!.prependedAll(prefix:String)*]]
*/
def prependedAll(prefix: String)(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] =
unsafeApply[Char, WrappedString](prefix + coll(value).unwrap)
/** Alias for `prependedAll` */
@inline final def ++: (prefix: String)(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] = prependedAll(prefix)
/** A copy of the string with an element appended
* @see [[scala.collection.StringOps!.appended(c:Char)*]]
*/
def appended(c: Char)(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] =
unsafeApply[Char, WrappedString](coll(value).unwrap.appended(c))
/** Alias for `appended` */
@inline final def :+(c: Char)(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] = appended(c)
/** A copy of the string with another string appended
* @see [[scala.collection.StringOps!.appendedAll(suffix:String)*]]
*/
@inline final def appendedAll(suffix: String)(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] =
unsafeApply[Char, WrappedString](coll(value).unwrap + suffix)
/** Alias for `appendedAll` */
@inline final def :++ (suffix: String)(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] = appendedAll(suffix)
/** Returns a string with a char appended until a given target length
* is reached.
*
* @param len the target length
* @param elem the padding value
* @return a string consisting of this string followed by the
* minimal number of occurrences of `elem` so that the
* resulting string has a length of at least `len`.
*/
def padTo(len: Int, elem: Char)(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] =
unsafeApply[Char, WrappedString](coll(value).unwrap.padTo(len, elem))
/** A copy of this string with one single replaced element.
* @param index the position of the replacement
* @param elem the replacing element
* @return a new string which is a copy of this string with the element at position `index` replaced by `elem`.
* @throws scala.IndexOutOfBoundsException if `index` does not satisfy `0 <= index < length`.
* @see [[scala.collection.StringOps!.updated]]
*/
def updated(index: Int, elem: Char)(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] =
unsafeApply[Char, WrappedString](coll(value).unwrap.updated(index, elem))
/** Return the current string concatenated `n` times */
def *(n: Int)(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] =
unsafeApply[Char, WrappedString](coll(value).unwrap * n)
/** Returns this string with first character converted to upper case.
* If the first character of the string is capitalized, it is
* returned unchanged. This method does not convert characters
* outside the Basic Multilingual Plane (BMP).
* @see [[scala.collection.StringOps!.capitalize]]
*/
def capitalize(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] =
unsafeApply[Char, WrappedString](coll(value).unwrap.capitalize)
/** Uses the underlying string as a pattern (in a fashion similar to
* printf in C), and uses the supplied arguments to fill in the
* holes.
*
* The interpretation of the formatting patterns is described in
* `java.util.Formatter`, with the addition that classes deriving
* from `ScalaNumber` (such as [[scala.BigInt]] and
* [[scala.BigDecimal]]) are unwrapped to pass a type which
* `Formatter` understands.
*
* @param args the arguments used to instantiating the pattern.
* @throws scala.IllegalArgumentException
* @see [[scala.collection.StringOps!.format]]
*/
def format(args: Any*)(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] =
unsafeApply[Char, WrappedString](coll(value).unwrap.format(args: _*))
/** Like `format(args*)` but takes an initial `Locale` parameter
* which influences formatting as in `java.lang.String`'s format.
*
* The interpretation of the formatting patterns is described in
* `java.util.Formatter`, with the addition that classes deriving
* from `ScalaNumber` (such as `scala.BigInt` and
* `scala.BigDecimal`) are unwrapped to pass a type which
* `Formatter` understands.
*
* @param l an instance of `java.util.Locale`
* @param args the arguments used to instantiating the pattern.
* @throws scala.IllegalArgumentException
* @see [[scala.collection.StringOps!.formatLocal]]
*/
def formatLocal(l: java.util.Locale, args: Any*)(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] =
unsafeApply[Char, WrappedString](coll(value).unwrap.formatLocal(l, args: _*))
/** Converts all of the characters in this `String` to lower
* case using the rules of the default locale. This is equivalent to calling
* `toLowerCase(Locale.getDefault())`.
*
* Note: This method is locale sensitive, and may produce
* unexpected results if used for strings that are intended to be
* interpreted locale independently. Examples are programming
* language identifiers, protocol keys, and HTML tags. For
* instance, `"TITLE".toLowerCase()` in a Turkish locale returns
* `"t\\u005Cu0131tle"`, where '\\u005Cu0131' is the LATIN SMALL
* LETTER DOTLESS I character. To obtain correct results for
* locale insensitive strings, use `toLowerCase(Locale.ROOT)`.
*
* @return the `String}`, converted to lowercase.
*/
def toLowerCase()(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] =
unsafeApply[Char, WrappedString](coll(value).unwrap.toLowerCase)
/** Converts all of the characters in this `String` to lower case
* using the rules of the given `Locale`. Case mapping is based on
* the Unicode Standard version specified by the
* `java.lang.Character` class. Since case mappings are not always
* 1:1 char mappings, the resulting `String` may be a different
* length than the original `String`.
*
* @param locale use the case transformation rules for this locale
* @return the `String`, converted to lowercase.
*/
def toLowerCase(locale: java.util.Locale)(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] =
unsafeApply[Char, WrappedString](coll(value).unwrap.toLowerCase(locale))
/** Converts all of the characters in this `String` to upper
* case using the rules of the default locale. This method is equivalent to
* `toUpperCase(Locale.getDefault())`.
*
* Note: This method is locale sensitive, and may produce
* unexpected results if used for strings that are intended to be
* interpreted locale independently. Examples are programming
* language identifiers, protocol keys, and HTML tags. For
* instance, `"title".toUpperCase()` in a Turkish locale returns
* `"T\\u005Cu0130TLE"`, where '\\u005Cu0130' is the LATIN CAPITAL
* LETTER I WITH DOT ABOVE character. To obtain correct results
* for locale insensitive strings, use `toUpperCase(Locale.ROOT)`.
*
* @return the `String`, converted to uppercase.
*/
def toUpperCase()(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] =
unsafeApply[Char, WrappedString](coll(value).unwrap.toUpperCase)
/** Converts all of the characters in this `String` to upper case
* using the rules of the given `Locale`. Case mapping is based on
* the Unicode Standard version specified by the
* `java.lang.Character` class. Since case mappings are not always
* 1:1 char mappings, the resulting `String` may be a different
* length than the original `String`.
*
* @param locale use the case transformation rules for this locale
* @return the `String`, converted to uppercase.
*/
def toUpperCase(locale: java.util.Locale)(implicit
coll: C => WrappedString
): NonEmpty[Char, WrappedString] =
unsafeApply[Char, WrappedString](coll(value).unwrap.toUpperCase(locale))
// We are not providing `reverse` and `grouped` here because it
// conflicts with `SeqOps.reverse` and `IterableOps.grouped`.
}
| tarao/nonempty-scala | src/main/scala/com/github/tarao/nonempty/collection/StringOps.scala | Scala | mit | 9,781 |
import scala.tools.nsc.doc.model._
import scala.tools.nsc.doc.model.diagram._
import scala.tools.partest.ScaladocModelTest
object Test extends ScaladocModelTest {
override def code = """
package scala.test.scaladoc.diagrams.inherited.nodes {
/** @contentDiagram
* @inheritanceDiagram hideDiagram */
trait T1 {
trait A1
trait A2 extends A1
trait A3 extends A2
}
/** @contentDiagram
* @inheritanceDiagram hideDiagram */
trait T2 extends T1 {
trait B1 extends A1
trait B2 extends A2 with B1
trait B3 extends A3 with B2
}
/** @contentDiagram
* @inheritanceDiagram hideDiagram */
trait T3 {
self: T1 with T2 =>
trait C1 extends B1
trait C2 extends B2 with C1
trait C3 extends B3 with C2
}
/** @contentDiagram
* @inheritanceDiagram hideDiagram */
trait T4 extends T3 with T2 with T1 {
trait D1 extends C1
trait D2 extends C2 with D1
trait D3 extends C3 with D2
}
}
"""
// diagrams must be started. In case there's an error with dot, it should not report anything
def scaladocSettings = "-diagrams"
def testModel(rootPackage: Package) = {
// get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s))
import access._
// base package
// Assert we have 7 nodes and 6 edges
val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("diagrams")._package("inherited")._package("nodes")
def checkDiagram(t: String, nodes: Int, edges: Int) = {
// trait T1
val T = base._trait(t)
val TDiag = T.contentDiagram.get
assert(TDiag.nodes.length == nodes, t + ": " + TDiag.nodes + ".length == " + nodes)
assert(TDiag.edges.map(_._2.length).sum == edges, t + ": " + TDiag.edges.mkString("List(\\n", ",\\n", "\\n)") + ".map(_._2.length).sum == " + edges)
}
checkDiagram("T1", 3, 2)
checkDiagram("T2", 6, 7)
checkDiagram("T3", 3, 2)
checkDiagram("T4", 12, 17)
}
}
| lrytz/scala | test/scaladoc/run/diagrams-inherited-nodes.scala | Scala | apache-2.0 | 2,260 |
package forimpatient.chapter14
/**
* Created by Iryna Kharaborkina on 8/12/16.
*
* Solution to the Chapter 14 Exercise 06 'Scala for the Impatient' by Horstmann C.S.
*
* A better way of modeling such trees is with case classes. Let’s start with binary trees.
* sealed abstract class BinaryTree
* case class Leaf(value: Int) extends BinaryTree
* case class Node(left: BinaryTree, right: BinaryTree) extends BinaryTree
* Write a function to compute the sum of all elements in the leaves.
*/
object Exercise06 extends App {
println("Chapter 14 Exercise 06")
println(leafSum(Node(Node(Leaf(3), Leaf(8)), Node(Leaf(2), Node(Leaf(5), null)))))
def leafSum(tree: BinaryTree): Int = tree match {
case Node(l, r) => leafSum(l) + leafSum(r)
case Leaf(v) => v
case _ => 0
}
sealed abstract class BinaryTree
case class Leaf(value: Int) extends BinaryTree
case class Node(left: BinaryTree, right: BinaryTree) extends BinaryTree
}
| Kiryna/Scala-for-the-Impatient | src/forimpatient/chapter14/Exercise06.scala | Scala | apache-2.0 | 984 |
package com.wlangiewicz.xbot.util
import com.wlangiewicz.xbot.test.UnitSpec
class SimpleSpreadOfferRateEstimatorTest extends UnitSpec {
"An OfferRateEstimator" should "overbid this BID offer" in {
val e = new SimpleSpreadOfferRateEstimator(0.005, 0.01, 1000000)
val estimate = e.estimateBidRate((13.36 * BotUtil.BASE).toLong, (13.67 * BotUtil.BASE).toLong)
assert(BotUtil.rateToLong(13.37) === estimate)
}
it should "underbid BID offer" in {
val e = new SimpleSpreadOfferRateEstimator(0.005, 0.01, 1000000)
val estimate = e.estimateBidRate((BigDecimal("13.55") * BotUtil.BASE).toLong, (BigDecimal("13.67") * BotUtil.BASE).toLong)
assert(BotUtil.rateToLong(13.39000000) === estimate)
}
it should "underbid this ASK offer" in {
val e = new SimpleSpreadOfferRateEstimator(0.005, 0.01, 1000000)
val estimate = e.estimateAskRate((13.30 * BotUtil.BASE).toLong, (13.67 * BotUtil.BASE).toLong)
assert(BotUtil.rateToLong(13.66) === estimate)
}
it should "overbid this ASK offer" in {
val e = new SimpleSpreadOfferRateEstimator(0.005, 0.01, 1000000)
val estimate = e.estimateAskRate((13.55 * BotUtil.BASE).toLong, (13.67 * BotUtil.BASE).toLong)
assert(BotUtil.rateToLong(13.83000000) === estimate)
}
it should "estimate this ASK correctly" in {
val e = new SimpleSpreadOfferRateEstimator(0.005, 0.01, 1000000)
val estimate = e.estimateAskRate((13.50 * BotUtil.BASE).toLong, (13.70 * BotUtil.BASE).toLong)
assert(BotUtil.rateToLong(13.69000000) === estimate)
}
it should "estimate BID with small spread" in {
val e = new SimpleSpreadOfferRateEstimator(0.0049, 0.012, 10000)
val estimate = e.estimateBidRate(BotUtil.rateToLong(1229.0000), BotUtil.rateToLong(1229.9999))
assert(BotUtil.rateToLong(1200.47990000) === estimate)
}
}
| wlk/xbot | src/test/scala/com/wlangiewicz/xbot/util/SimpleSpreadOfferRateEstimatorTest.scala | Scala | apache-2.0 | 1,824 |
package com.twitter.finagle.thrift
import com.twitter.finagle.context.Contexts
import com.twitter.finagle.tracing.Trace
import com.twitter.finagle.util.ByteArrays
import com.twitter.finagle.{Service, SimpleFilter, Dtab, Dentry}
import com.twitter.io.Buf
import com.twitter.util.Future
import java.util.ArrayList
import org.apache.thrift.protocol.TProtocolFactory
/**
* TTwitterFilter implements the upnegotiated TTwitter transport, which
* has some additional features beyond TFramed:
*
* - Dapper-style RPC tracing
* - Passing client IDs
* - Request contexts
* - Name delegation
*
* @param isUpgraded Whether this connection is with a server that
* has been upgraded to TTwitter
*/
private[thrift] class TTwitterClientFilter(
serviceName: String,
isUpgraded: Boolean,
clientId: Option[ClientId],
protocolFactory: TProtocolFactory)
extends SimpleFilter[ThriftClientRequest, Array[Byte]]
{
private[this] val clientIdBuf = clientId map { id => Buf.Utf8(id.name) }
/**
* Produces an upgraded TTwitter ThriftClientRequest based on Trace,
* ClientId, and Dtab state.
*/
private[this] def mkTTwitterRequest(baseRequest: ThriftClientRequest): ThriftClientRequest = {
val header = new thrift.RequestHeader
clientId match {
case Some(clientId) =>
header.setClient_id(clientId.toThrift)
case None =>
}
val traceId = Trace.id
header.setSpan_id(traceId.spanId.toLong)
traceId._parentId.foreach { id => header.setParent_span_id(id.toLong) }
header.setTrace_id(traceId.traceId.toLong)
header.setFlags(traceId.flags.toLong)
traceId.sampled match {
case Some(s) => header.setSampled(s)
case None => header.unsetSampled()
}
val contexts = Contexts.broadcast.marshal().iterator
val ctxs = new ArrayList[thrift.RequestContext]()
if (contexts.hasNext) {
while (contexts.hasNext) {
val (k, buf) = contexts.next()
// Note: we need to skip the caller-provided client id here,
// since the existing value is derived from whatever code
// calls into here. This should never happen in practice;
// however if the ClientIdContext handler failed to load for
// some reason, a pass-through context would be used instead.
if (k != ClientId.clientIdCtx.marshalId) {
val c = new thrift.RequestContext(
Buf.ByteBuffer.Owned.extract(k), Buf.ByteBuffer.Owned.extract(buf))
ctxs.add(c)
}
}
}
clientIdBuf match {
case Some(buf) =>
val ctx = new thrift.RequestContext(
Buf.ByteBuffer.Owned.extract(ClientId.clientIdCtx.marshalId),
Buf.ByteBuffer.Owned.extract(buf))
ctxs.add(ctx)
case None => // skip
}
if (!ctxs.isEmpty)
header.setContexts(ctxs)
val dtab = Dtab.local
if (dtab.nonEmpty) {
val delegations = new ArrayList[thrift.Delegation](dtab.size)
for (Dentry(src, dst) <- dtab)
delegations.add(new thrift.Delegation(src.show, dst.show))
header.setDelegations(delegations)
}
new ThriftClientRequest(
ByteArrays.concat(
OutputBuffer.messageToArray(header, protocolFactory),
baseRequest.message
),
baseRequest.oneway
)
}
def apply(request: ThriftClientRequest,
service: Service[ThriftClientRequest, Array[Byte]]
): Future[Array[Byte]] = {
// Create a new span identifier for this request.
val msg = new InputBuffer(request.message, protocolFactory)().readMessageBegin()
Trace.recordRpc(msg.name)
val thriftRequest =
if (isUpgraded)
mkTTwitterRequest(request)
else
request
val reply = service(thriftRequest)
if (thriftRequest.oneway) {
// Oneway requests don't contain replies, so they can't be traced.
reply
} else {
reply map { response =>
if (isUpgraded) {
// Peel off the ResponseHeader.
InputBuffer.peelMessage(response, new thrift.ResponseHeader, protocolFactory)
} else
response
}
}
}
}
| latur19318/finagle | finagle-thrift/src/main/scala/com/twitter/finagle/thrift/TTwitterClientFilter.scala | Scala | apache-2.0 | 4,102 |
import scala.quoted._
import scala.quoted.staging._
object Test:
given Toolbox = Toolbox.make(getClass.getClassLoader)
def main(args: Array[String]): Unit =
def reduce[T: Type](using QuoteContext)(succ: Expr[T] => Expr[T], zero: Expr[T]): Expr[T] = '{
var z = $zero
${ succ('z) }
}
def resCode2(using QuoteContext): Expr[Int] =
reduce[Int](x => '{$x + 1}, '{0})
println(withQuoteContext(resCode2.show))
| som-snytt/dotty | tests/run-staging/abstract-int-quote.scala | Scala | apache-2.0 | 445 |
package org.awong
package object searching {
} | alanktwong/algorithms-scala | searching/src/main/scala/org/awong/searching/package.scala | Scala | mit | 48 |
package mesosphere.marathon
package api.v2
import java.util.Collections
import akka.actor.ActorSystem
import akka.stream.{ActorMaterializer, ActorMaterializerSettings}
import mesosphere.UnitTest
import mesosphere.marathon.api.{RestResource, TaskKiller, TestAuthFixture}
import mesosphere.marathon.test.JerseyTest
import scala.concurrent.ExecutionContext.Implicits.global
import mesosphere.marathon.core.deployment.{DeploymentPlan, DeploymentStep}
import mesosphere.marathon.core.group.GroupManager
import mesosphere.marathon.core.health.HealthCheckManager
import mesosphere.marathon.core.instance.{Instance, TestInstanceBuilder}
import mesosphere.marathon.core.task.Task
import mesosphere.marathon.core.task.termination.KillService
import mesosphere.marathon.core.task.tracker.InstanceTracker
import mesosphere.marathon.plugin.auth.Identity
import mesosphere.marathon.state.PathId.StringPathId
import mesosphere.marathon.state._
import mesosphere.marathon.test.GroupCreation
import org.mockito.Matchers
import org.mockito.Mockito._
import scala.collection.immutable.Seq
import scala.concurrent.Future
import scala.concurrent.duration._
class TasksResourceTest extends UnitTest with GroupCreation with JerseyTest {
case class Fixture(
auth: TestAuthFixture = new TestAuthFixture,
instanceTracker: InstanceTracker = mock[InstanceTracker],
taskKiller: TaskKiller = mock[TaskKiller],
config: MarathonConf = mock[MarathonConf],
groupManager: GroupManager = mock[GroupManager],
healthCheckManager: HealthCheckManager = mock[HealthCheckManager],
implicit val identity: Identity = mock[Identity]) {
val killService = mock[KillService]
val taskResource: TasksResource = new TasksResource(
instanceTracker,
taskKiller,
config,
groupManager,
healthCheckManager,
auth.auth,
auth.auth
)
}
"TasksResource" should {
"list (txt) tasks with less ports than the current app version" in new Fixture {
// Regression test for #234
Given("one app with one task with less ports than required")
val app = AppDefinition("/foo".toRootPath, portDefinitions = Seq(PortDefinition(0), PortDefinition(0)), cmd = Some("sleep"))
val instance = TestInstanceBuilder.newBuilder(app.id).addTaskRunning().getInstance()
config.zkTimeoutDuration returns 5.seconds
val tasksByApp = InstanceTracker.InstancesBySpec.forInstances(instance)
instanceTracker.instancesBySpec returns Future.successful(tasksByApp)
val rootGroup = createRootGroup(apps = Map(app.id -> app))
groupManager.rootGroup() returns rootGroup
assert(app.servicePorts.size > instance.appTask.status.networkInfo.hostPorts.size)
When("Getting the txt tasks index")
val response = asyncRequest { r => taskResource.indexTxt(auth.request, r) }
Then("The status should be 200")
response.getStatus shouldEqual 200
}
"list apps when there are no apps" in new Fixture {
// Regression test for #4932
Given("no apps")
config.zkTimeoutDuration returns 5.seconds
instanceTracker.instancesBySpec returns Future.successful(InstanceTracker.InstancesBySpec.empty)
groupManager.apps(any) returns Map.empty
When("Getting the tasks index")
val response = asyncRequest { r =>
taskResource.indexJson("status", new java.util.ArrayList[String], auth.request, r)
}
Then("The status should be 200")
response.getStatus shouldEqual 200
}
"killTasks" in new Fixture {
Given("two apps and 1 task each")
val app1 = "/my/app-1".toRootPath
val app2 = "/my/app-2".toRootPath
val instance1 = TestInstanceBuilder.newBuilder(app1).addTaskStaged().getInstance()
val instance2 = TestInstanceBuilder.newBuilder(app2).addTaskStaged().getInstance()
val (taskId1, _) = instance1.tasksMap.head
val (taskId2, _) = instance2.tasksMap.head
val body = s"""{"ids": ["${taskId1.idString}", "${taskId2.idString}"]}"""
val bodyBytes = body.toCharArray.map(_.toByte)
config.zkTimeoutDuration returns 5.seconds
instanceTracker.instancesBySpec returns Future.successful(InstanceTracker.InstancesBySpec.forInstances(instance1, instance2))
taskKiller.kill(any, any, any)(any) returns Future.successful(Seq.empty[Instance])
groupManager.app(app1) returns Some(AppDefinition(app1))
groupManager.app(app2) returns Some(AppDefinition(app2))
When("we ask to kill both tasks")
val response = asyncRequest { r =>
taskResource.killTasks(scale = false, force = false, wipe = false, body = bodyBytes, auth.request, r)
}
Then("The response should be OK")
response.getStatus shouldEqual 200
And("The response is the list of killed tasks")
response.getEntity shouldEqual """{"tasks":[]}"""
And("Both tasks should be requested to be killed")
verify(taskKiller).kill(Matchers.eq(app1), any, any)(any)
verify(taskKiller).kill(Matchers.eq(app2), any, any)(any)
And("nothing else should be called on the TaskKiller")
noMoreInteractions(taskKiller)
}
"try to kill pod instances" in new Fixture {
Given("two apps and 1 task each")
val pod1 = "/pod".toRootPath
val instance = TestInstanceBuilder.newBuilder(pod1).addTaskRunning(Some("container1")).getInstance()
val (container, _) = instance.tasksMap.head
val body = s"""{"ids": ["${container.idString}"]}"""
val bodyBytes = body.toCharArray.map(_.toByte)
config.zkTimeoutDuration returns 5.seconds
instanceTracker.instancesBySpec returns Future.successful(InstanceTracker.InstancesBySpec.forInstances(instance))
taskKiller.kill(any, any, any)(any) returns Future.successful(Seq.empty[Instance])
groupManager.app(any) returns None
When("we ask to kill the pod container")
val response = asyncRequest { r =>
taskResource.killTasks(scale = false, force = false, wipe = false, body = bodyBytes, auth.request, r)
}
Then("The response should be OK")
response.getStatus shouldEqual 200
And("No task should be called on the TaskKiller")
noMoreInteractions(taskKiller)
}
"killTasks with force" in new Fixture {
Given("two apps and 1 task each")
val app1 = "/my/app-1".toRootPath
val app2 = "/my/app-2".toRootPath
val instance1 = TestInstanceBuilder.newBuilder(app1).addTaskRunning().getInstance()
val instance2 = TestInstanceBuilder.newBuilder(app2).addTaskStaged().getInstance()
val (taskId1, _) = instance1.tasksMap.head
val (taskId2, _) = instance2.tasksMap.head
val body = s"""{"ids": ["${taskId1.idString}", "${taskId2.idString}"]}"""
val bodyBytes = body.toCharArray.map(_.toByte)
val deploymentPlan = new DeploymentPlan("plan", createRootGroup(), createRootGroup(), Seq.empty[DeploymentStep], Timestamp.zero)
config.zkTimeoutDuration returns 5.seconds
instanceTracker.instancesBySpec returns Future.successful(InstanceTracker.InstancesBySpec.forInstances(instance1, instance2))
taskKiller.killAndScale(any, any)(any) returns Future.successful(deploymentPlan)
groupManager.app(app1) returns Some(AppDefinition(app1))
groupManager.app(app2) returns Some(AppDefinition(app2))
When("we ask to kill both tasks")
val response = asyncRequest { r =>
taskResource.killTasks(scale = true, force = true, wipe = false, body = bodyBytes, auth.request, r)
}
Then("The response should be OK")
response.getStatus shouldEqual 200
response.getMetadata.containsKey(RestResource.DeploymentHeader) should be(true)
And("Should create a deployment")
response.getEntity shouldEqual """{"version":"1970-01-01T00:00:00.000Z","deploymentId":"plan"}"""
And("app1 and app2 is killed with force")
verify(taskKiller).killAndScale(Matchers.eq(Map(app1 -> Seq(instance1), app2 -> Seq(instance2))), Matchers.eq(true))(any)
And("nothing else should be called on the TaskKiller")
noMoreInteractions(taskKiller)
}
"killTasks with scale and wipe fails" in new Fixture {
Given("a request")
val app1 = "/my/app-1".toRootPath
val instance1 = Instance.Id.forRunSpec(app1)
val taskId1 = Task.Id(instance1).idString
val body = s"""{"ids": ["$taskId1"]}"""
val bodyBytes = body.toCharArray.map(_.toByte)
When("we ask to scale AND wipe")
val response = asyncRequest { r => taskResource.killTasks(scale = true, force = false, wipe = true, body = bodyBytes, auth.request, r) }
Then("an exception should occur")
response.getStatus should be(400)
response.getEntity shouldEqual """{"message":"You cannot use scale and wipe at the same time."}"""
}
"killTasks with wipe delegates to taskKiller with wipe value" in new Fixture {
Given("a task that shall be killed")
val app1 = "/my/app-1".toRootPath
val instance1 = TestInstanceBuilder.newBuilder(app1).addTaskRunning().getInstance()
val List(taskId1) = instance1.tasksMap.keys.toList
val body = s"""{"ids": ["${taskId1.idString}"]}"""
val bodyBytes = body.toCharArray.map(_.toByte)
config.zkTimeoutDuration returns 5.seconds
instanceTracker.instancesBySpec returns Future.successful(InstanceTracker.InstancesBySpec.forInstances(instance1))
instanceTracker.specInstances(app1) returns Future.successful(Seq(instance1))
taskKiller.kill(Matchers.eq(app1), any, Matchers.eq(true))(any) returns Future.successful(List(instance1))
groupManager.app(app1) returns Some(AppDefinition(app1))
When("we send the request")
val response = asyncRequest { r =>
taskResource.killTasks(scale = false, force = false, wipe = true, body = bodyBytes, auth.request, r)
}
Then("The response should be OK")
response.getStatus shouldEqual 200
And("the taskKiller receives the wipe flag")
verify(taskKiller).kill(Matchers.eq(app1), any, Matchers.eq(true))(any)
And("nothing else should be called on the TaskKiller")
noMoreInteractions(taskKiller)
}
"killTask without authentication is denied when the affected app exists" in new Fixture {
Given("An unauthenticated request")
auth.authenticated = false
val req = auth.request
val appId = "/my/app".toRootPath
val instance1 = Instance.Id.forRunSpec(appId)
val instance2 = Instance.Id.forRunSpec(appId)
val instance3 = Instance.Id.forRunSpec(appId)
val taskId1 = Task.Id(instance1).idString
val taskId2 = Task.Id(instance2).idString
val taskId3 = Task.Id(instance3).idString
val body = s"""{"ids": ["$taskId1", "$taskId2", "$taskId3"]}""".getBytes
Given("the app exists")
groupManager.app(appId) returns Some(AppDefinition(appId))
When("kill task is called")
val killTasks = asyncRequest { r => taskResource.killTasks(scale = true, force = false, wipe = false, body, req, r) }
Then("we receive a NotAuthenticated response")
killTasks.getStatus should be(auth.NotAuthenticatedStatus)
}
"killTask without authentication is not allowed when the affected app does not exist" in new Fixture {
Given("An unauthenticated request")
auth.authenticated = false
val req = auth.request
val appId = "/my/app".toRootPath
val instance1 = Instance.Id.forRunSpec(appId)
val instance2 = Instance.Id.forRunSpec(appId)
val instance3 = Instance.Id.forRunSpec(appId)
val taskId1 = Task.Id(instance1).idString
val taskId2 = Task.Id(instance2).idString
val taskId3 = Task.Id(instance3).idString
val body = s"""{"ids": ["$taskId1", "$taskId2", "$taskId3"]}""".getBytes
Given("the app does not exist")
groupManager.app(appId) returns None
When("kill task is called")
val killTasks = asyncRequest { r => taskResource.killTasks(scale = true, force = false, wipe = false, body, req, r) }
Then("we receive a NotAuthenticated response")
killTasks.getStatus should be(auth.NotAuthenticatedStatus)
}
"indexTxt and IndexJson without authentication aren't allowed" in new Fixture {
Given("An unauthenticated request")
auth.authenticated = false
val req = auth.request
When("the index as json is fetched")
val running = asyncRequest { r => taskResource.indexJson("status", Collections.emptyList(), req, r) }
Then("we receive a NotAuthenticated response")
running.getStatus should be(auth.NotAuthenticatedStatus)
When("one index as txt is fetched")
val cancel = asyncRequest { r => taskResource.indexTxt(req, r) }
Then("we receive a NotAuthenticated response")
cancel.getStatus should be(auth.NotAuthenticatedStatus)
}
"access without authorization is denied if the affected app exists" in new Fixture {
Given("An unauthorized request")
auth.authenticated = true
auth.authorized = false
val req = auth.request
val appId = "/my/app".toRootPath
val instance1 = Instance.Id.forRunSpec(appId)
val instance2 = Instance.Id.forRunSpec(appId)
val instance3 = Instance.Id.forRunSpec(appId)
val taskId1 = Task.Id(instance1).idString
val taskId2 = Task.Id(instance2).idString
val taskId3 = Task.Id(instance3).idString
val body = s"""{"ids": ["$taskId1", "$taskId2", "$taskId3"]}""".getBytes
implicit val system = ActorSystem("test")
def materializerSettings = ActorMaterializerSettings(system)
implicit val mat = ActorMaterializer(materializerSettings)
override val taskKiller = new TaskKiller(
instanceTracker, groupManager, config, auth.auth, auth.auth, killService)
override val taskResource = new TasksResource(
instanceTracker,
taskKiller,
config,
groupManager,
healthCheckManager,
auth.auth,
auth.auth
)
Given("the app exists")
config.zkTimeoutDuration returns 5.seconds
groupManager.app(appId) returns Some(AppDefinition(appId))
instanceTracker.instancesBySpec returns Future.successful(InstanceTracker.InstancesBySpec.empty)
When("kill task is called")
val killTasks = asyncRequest { r => taskResource.killTasks(scale = false, force = false, wipe = false, body, req, r) }
Then("we receive a not authorized response")
killTasks.getStatus should be(auth.UnauthorizedStatus)
}
"killTasks fails for invalid taskId" in new Fixture {
Given("a valid and an invalid taskId")
val app1 = "/my/app-1".toRootPath
val instance1 = Instance.Id.forRunSpec(app1)
val taskId1 = Task.Id(instance1).idString
val body = s"""{"ids": ["$taskId1", "invalidTaskId"]}"""
val bodyBytes = body.toCharArray.map(_.toByte)
When("we ask to kill those two tasks")
val response = asyncRequest { r => taskResource.killTasks(scale = false, force = false, wipe = false, body = bodyBytes, auth.request, r) }
Then("An exception should be thrown that points to the invalid taskId")
response.getStatus should be(400)
response.getEntity.toString should include ("invalidTaskId")
And("the taskKiller should not be called at all")
verifyNoMoreInteractions(taskKiller)
}
}
}
| gsantovena/marathon | src/test/scala/mesosphere/marathon/api/v2/TasksResourceTest.scala | Scala | apache-2.0 | 15,432 |
if (args.length != 2) {
println("USAGE: AccuracyRTE.sh file threshold")
sys.exit()
}
import java.io.BufferedReader
import java.io.FileReader
val file = args(0)
val threshold = args(1).toDouble
var total = 0
var correct = 0
var y_gold = 0
var y_sys = 0
var y_correct = 0
val br = new BufferedReader(new FileReader(file))
def loop() {
val s = br.readLine()
if (s != null) {
val sp = s.split(",")
val gold = sp(2)
val sys = if (sp(3).toDouble > threshold) "Y" else "N"
total += 1
if (gold == sys) correct += 1
if (gold == "Y") y_gold += 1
if (sys == "Y") y_sys += 1
if (gold == "Y" && sys == "Y") y_correct += 1
loop()
}
}
loop()
br.close()
println("total: " + total)
println("correct: " + correct)
println("accuracy: " + (correct.toDouble / total))
println("precision: " + (y_correct.toDouble / y_sys))
println("recall: " + (y_correct.toDouble / y_gold))
| tianran/tifmo | demos/AccuracyRTE.scala | Scala | bsd-2-clause | 880 |
//
// MessagePack for Java
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package org.msgpack.core.buffer
import akka.util.ByteString
import org.msgpack.core.{MessagePackSpec, MessageUnpacker}
class ByteStringTest
extends MessagePackSpec {
val unpackedString = "foo"
val byteString = ByteString(createMessagePackData(_.packString(unpackedString)))
def unpackString(messageBuffer: MessageBuffer) = {
val input = new
MessageBufferInput {
private var isRead = false
override def next(): MessageBuffer =
if (isRead) {
null
}
else {
isRead = true
messageBuffer
}
override def close(): Unit = {}
}
new
MessageUnpacker(input).unpackString()
}
"Unpacking a ByteString's ByteBuffer" should {
"fail with a regular MessageBuffer" in {
// can't demonstrate with new ByteBufferInput(byteString.asByteBuffer)
// as Travis tests run with JDK6 that picks up MessageBufferU
a[RuntimeException] shouldBe thrownBy(unpackString(new
MessageBuffer(byteString.asByteBuffer)))
}
"succeed with a MessageBufferU" in {
unpackString(new
MessageBufferU(byteString.asByteBuffer)) shouldBe unpackedString
}
}
}
| jackyglony/msgpack-java | msgpack-core/src/test/scala/org/msgpack/core/buffer/ByteStringTest.scala | Scala | apache-2.0 | 1,814 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License,
*
* Contributors:
* Hao Jiang - initial API and implementation
*
*/
package edu.uchicago.cs.encsel.common
import java.util.Comparator
import java.util.concurrent.Callable
object Conversions {
implicit def funToRunnable(fun: () => Unit): Runnable = new Runnable() {
def run() = fun()
}
implicit def funToCallable[T](fun: () => T): Callable[T] = new Callable[T]() {
def call(): T = fun()
}
implicit def comparatorToOrdering[T](comparator: Comparator[T]) = new Ordering[T] {
override def compare(x: T, y: T): Int = comparator.compare(x, y)
}
} | harperjiang/enc-selector | src/main/scala/edu/uchicago/cs/encsel/common/Conversions.scala | Scala | apache-2.0 | 1,375 |
/*
This file is part of Intake24.
Copyright 2015, 2016 Newcastle University.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package uk.ac.ncl.openlab.intake24.foodxml
import uk.ac.ncl.openlab.intake24.api.data.{GuideImage, GuideImageWeightRecord}
import scala.xml.NodeSeq
import scala.xml.NodeSeq.seqToNodeSeq
object GuideImageDef {
def toXml(guideImages: Seq[GuideImage]) =
<guide-images>
{
guideImages.map(image =>
<guide-image id={ image.id } description={ image.description }>
{
image.weights.map(weight =>
<weight id={ weight.objectId.toString() } description={ weight.description } value={ weight.weight.toString() }/>)
}
</guide-image>)
}
</guide-images>
def parseXml(root: NodeSeq): Map[String, GuideImage] = {
(root \ "guide-image").map(n => {
val name = n.attribute("id").get.text
val desc = n.attribute("description").map(_.text).getOrElse("(no description for guide image " + name + ")");
val weights = (n \ "weight").map(n => {
val desc = n.attribute("description").get.text
val id: java.lang.Integer = n.attribute("id").get.text.toInt
val value: java.lang.Double = n.attribute("value").get.text.toDouble
GuideImageWeightRecord(desc, id, value)
})
(name, GuideImage(name, desc, weights))
}).toMap
}
} | digitalinteraction/intake24 | FoodDataXML/src/main/scala/uk/ac/ncl/openlab/intake24/foodxml/GuideImageDef.scala | Scala | apache-2.0 | 1,882 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.2
* @date Wed Feb 5 17:41:18 EST 2014
* @see LICENSE (MIT style license file).
*/
package scalation.util
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Locatable` trait provides location information/coordinates for objects
* (e.g., `Component`s).
*/
trait Locatable extends Error
{
/** Where this object is at (its location)
*/
private var _at: Array [Double] = null
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get the location where this object is currently at.
*/
def at: Array [Double] = _at
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set the location of this object.
* @param at the location of this object
*/
def at_= (at: Array [Double])
{
if (_at == null && at != null) {
_at = at
} else {
flaw ("at_=", "location may only be set once")
} // if
} // at_=
} // Locatable trait
| mvnural/scalation | src/main/scala/scalation/util/Locatable.scala | Scala | mit | 1,155 |
package com.sksamuel.elastic4s.requests.searches.aggs
import com.sksamuel.elastic4s.requests.script.Script
import com.sksamuel.elastic4s.ext.OptionImplicits._
case class MinAggregation(name: String,
field: Option[String] = None,
format: Option[String] = None,
missing: Option[AnyRef] = None,
script: Option[Script] = None,
subaggs: Seq[AbstractAggregation] = Nil,
metadata: Map[String, AnyRef] = Map.empty)
extends Aggregation {
type T = MinAggregation
def field(field: String): T = copy(field = field.some)
def format(format: String): T = copy(format = format.some)
def missing(missing: AnyRef): T = copy(missing = missing.some)
def script(script: Script): T = copy(script = script.some)
override def subAggregations(aggs: Iterable[AbstractAggregation]): T = copy(subaggs = aggs.toSeq)
override def metadata(map: Map[String, AnyRef]): T = copy(metadata = map)
}
| sksamuel/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/requests/searches/aggs/MinAggregation.scala | Scala | apache-2.0 | 1,068 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.precog.common.accounts
import quasar.precog.common.Path
import quasar.precog.common.security.{ APIKey, Permission, ReadPermission, WritePermission, DeletePermission }
import quasar.precog.common.security.Permission._
import quasar.blueeyes._, json._
import quasar.blueeyes.json.serialization.DefaultSerialization._
import quasar.blueeyes.json.serialization.Versioned._
import scalaz.syntax.plus._
import shapeless.HNil
import java.time.LocalDateTime
case class AccountPlan(planType: String)
object AccountPlan {
val Root = AccountPlan("Root")
val Free = AccountPlan("Free")
val schema = "type" :: HNil
implicit val (decomposer, extractor) = serializationV[AccountPlan](schema, None)
}
case class Account(accountId: AccountId,
email: String,
passwordHash: String,
passwordSalt: String,
accountCreationDate: LocalDateTime,
apiKey: APIKey,
rootPath: Path,
plan: AccountPlan,
parentId: Option[String] = None,
lastPasswordChangeTime: Option[LocalDateTime] = None,
profile: Option[JValue] = None)
object Account {
val schemaV1 = "accountId" :: "email" :: "passwordHash" :: "passwordSalt" :: "accountCreationDate" :: "apiKey" :: "rootPath" :: "plan" :: "parentId" :: "lastPasswordChangeTime" :: "profile" :: HNil
val extractorPreV = extractorV[Account](schemaV1, None)
val extractorV1 = extractorV[Account](schemaV1, Some("1.1".v))
implicit val accountExtractor = extractorV1 <+> extractorPreV
implicit val decomposerV1 = decomposerV[Account](schemaV1, Some("1.1".v))
private val randomSource = new java.security.SecureRandom
def randomSalt() = {
val saltBytes = new Array[Byte](256)
randomSource.nextBytes(saltBytes)
saltBytes.flatMap(byte => Integer.toHexString(0xFF & byte))(collection.breakOut): String
}
def newAccountPermissions(accountId: AccountId, accountPath: Path): Set[Permission] = {
// Path is "/" so that an account may read data it wrote no matter what path it exists under.
// See AccessControlSpec, NewGrantRequest
Set[Permission](
WritePermission(accountPath, WriteAsAny),
DeletePermission(accountPath, WrittenByAny),
ReadPermission(Path.Root, WrittenByAccount(accountId))
)
}
}
case class WrappedAccountId(accountId: AccountId)
object WrappedAccountId {
val schema = "accountId" :: HNil
implicit val (wrappedAccountIdDecomposer, wrappedAccountIdExtractor) = serializationV[WrappedAccountId](schema, None)
}
| drostron/quasar | blueeyes/src/main/scala/quasar/precog/common/accounts/Account.scala | Scala | apache-2.0 | 3,281 |
package lazybenchmarks
import leon.lazyeval._
import leon.lang._
import leon.annotation._
import leon.instrumentation._
//import leon.invariant._
object MergeSort1 {
// TODO: making this parametric will break many things. Fix them
sealed abstract class LList {
def size: BigInt = {
this match {
case SNil() => BigInt(0)
case SCons(x, t) => 1 + ssize(t)
}
} ensuring (_ >= 0)
}
case class SCons(x: BigInt, tail: $[LList]) extends LList
case class SNil() extends LList
def ssize(l: $[LList]): BigInt = (l*).size
sealed abstract class List {
def size: BigInt = {
this match {
case Cons(_, xs) => 1 + xs.size
case _ => BigInt(0)
}
} ensuring (_ >= 0)
}
case class Cons(x: BigInt, tail: List) extends List
case class Nil() extends List
def length(l: List): BigInt = {
l match {
case Nil() => BigInt(0)
case Cons(x, xs) => 1 + length(xs)
}
} ensuring (res => res >= 0 && res == l.size)
def split(l: List, n: BigInt): (List, List) = {
require(n > 0 && n < l.size)
l match {
case Nil() => (Nil(), l)
case Cons(x, xs) =>
if (n == 1) {
(Cons(x, Nil()), xs)
} else {
val (fst, snd) = split(xs, n - 1)
(Cons(x, fst), snd)
}
}
} ensuring (res => res._2.size == l.size - n && res._1.size == n && stack <= 25 * l.size - 1)
/*
* Note: merge is not recursive due to closures.
* However, concretizing merge may invoke merge or msort recursively.
* So proving standalone bound for merge requires preconditions.
*/
def merge(a: $[LList], b: $[LList]): LList = (b.value match {
case SNil() => a.value
case bl @ SCons(x, xs) =>
a.value match {
case SNil() => bl
case SCons(y, ys) =>
if (y < x)
SCons(y, $(merge(ys, b)))
else
SCons(x, $(merge(a, xs)))
}
}) //ensuring (res => ssize(a) + ssize(b) == res.size)
/**
* For proving time, we need a term of order \\Omega(n) with strict
* inverse triangle inequality i.e, f(a + b) > f(a) + f(b)
* Log satisfies this but it is very expensive. Is there another function ?
*/
def mergeSort(l: List): LList = (l match {
case Nil() => SNil()
case Cons(x, Nil()) => SCons(x, $(empty))
case _ =>
val (fst, snd) = split(l, length(l) / 2)
merge($(mergeSort(fst)), $(mergeSort(snd)))
}) ensuring (res => stack <= 81 * l.size + 35) // res.size == l.size
// TODO: inlining this creates problems. why ?
def empty: LList = {
SNil()
}
}
| epfl-lara/leon | testcases/lazy-datastructures/ManualnOutdated/LazyMegeSort1-Time.scala | Scala | gpl-3.0 | 2,629 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.process.analytic
import org.geotools.data.collection.ListFeatureCollection
import org.junit.runner.RunWith
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.utils.collection.SelfClosingIterator
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class StatsProcessTest extends Specification {
val process = new StatsProcess
val sft = SimpleFeatureTypes.createType("stats", "track:String,dtg:Date,*geom:Point:srid=4326")
val fc = new ListFeatureCollection(sft)
val features = (0 until 10).map { i =>
val sf = new ScalaSimpleFeature(sft, i.toString)
sf.setAttribute(0, s"t-${i % 2}")
sf.setAttribute(1, s"2017-05-24T00:00:0$i.000Z")
sf.setAttribute(2, s"POINT(45 5$i)")
sf
}
step {
features.foreach(fc.add)
}
"StatsProcess" should {
"manually visit a feature collection" in {
val result = SelfClosingIterator(process.execute(fc, "Count()", encode = false, null).features).toSeq
result.map(_.getAttribute(0)) mustEqual Seq("""{"count":10}""")
}
"manually visit a feature collection with projections" in {
import scala.collection.JavaConversions._
val props = Seq("m=strConcat('m:', track)")
val result = SelfClosingIterator(process.execute(fc, "Enumeration(m)", encode = false, props).features).toSeq
result.map(_.getAttribute(0)) mustEqual Seq("""{"m:t-0":5,"m:t-1":5}""")
}
}
}
| aheyne/geomesa | geomesa-process/geomesa-process-vector/src/test/scala/org/locationtech/geomesa/process/analytic/StatsProcessTest.scala | Scala | apache-2.0 | 2,041 |
package org.clulab.twitter4food.struct
/**
* Stores information about a single Twitter account, including handle, description, tweets, network
* User: mihais
* Date: 12/14/15
*/
class TwitterAccount (
// TODO: add network information
val handle: String,
val id: Long,
val name: String,
val lang: String,
val url: String,
val location: String,
val description: String,
val tweets: Seq[Tweet],
val activeFollowers: Seq[TwitterAccount]) {
override def toString = s"$handle: ($name, $description)"
/**
* Returns a copy of this [[TwitterAccount]], optionally specifying new input values
*/
def copy(
handle: String = this.handle,
id:Long = this.id,
name:String = this.name,
lang:String = this.lang,
url:String = this.url,
location:String = this.location,
description:String = this.description,
tweets:Seq[Tweet] = this.tweets,
activeFollowers:Seq[TwitterAccount] = this.activeFollowers): TwitterAccount = {
new TwitterAccount(handle, id, name, lang, url, location, description, tweets, activeFollowers)
}
/**
* Returns a merged [[TwitterAccount]] with all the tweets of both input accounts. Argument account's other info
* (e.g. handle) is discarded.
*/
def merge(that: TwitterAccount): TwitterAccount = {
assert(this.id == that.id, "They must be instantiations of the same account!")
this.copy(tweets = mergeTweets(this.tweets ++ that.tweets).sortBy(_.createdAt).reverse)
}
def mergeTweets(tweets: Seq[Tweet]): Seq[Tweet] = {
val groupedById = tweets.groupBy(_.id)
val merged = groupedById.map{
case (_, sameId) =>
sameId.reduce{
(a, b) => a.merge(b)
}
}
merged.toSeq.sortBy(_.createdAt)
}
/**
* Returns only those tweets that are "normal", i.e. neither retweets nor addressed to other accounts
*/
def normalTweets: Seq[Tweet] = this.tweets.filter(_.isNormal)
}
| clulab/twitter4food | src/main/scala/org/clulab/twitter4food/struct/TwitterAccount.scala | Scala | apache-2.0 | 1,939 |
package org.jetbrains.sbt
package project
import com.intellij.openapi.externalSystem.model.ProjectSystemId
/**
* @author Pavel Fatin
*/
object SbtProjectSystem {
val Id = new ProjectSystemId("SBT", Sbt.Name)
} | LPTK/intellij-scala | src/org/jetbrains/sbt/project/SbtProjectSystem.scala | Scala | apache-2.0 | 215 |
package fr.laas.fape.graph.printers
import fr.laas.fape.graph.core
import fr.laas.fape.graph.core._
import scala.collection.mutable
/** Simple translator from [[core.Graph]] to the DOT syntax of graphviz.
*
* The resulting file will contain something like:
* {{{
* digraph g {
* node [shape=plaintext] rankdir="TB";
* 0 [label="String representation of node 0"];
* 1 [label="String representation of node 1"];
* 2 [label="String representation of node 2"];
* ...
* 0 -> 1 [label="String representation of the edge label"];
* 0 -> 2 [label="String representation of the edge label"];
* ...
* }
* }}}
*
* Such a graph, written to graph.dot can be converted using the dot command line utility, for instance:
* `dot -Tps graph.dot > graph.ps`
*
* @param g Graph to export.
* @param nep A node/edge printer that is used to translate both nodes and edges' labels to string.
* @tparam V Type of vertices.
* @tparam EL Type of edge labels.
* @tparam E Type of edges.
*/
class GraphDotPrinter[V,EL,E <: Edge[V]](val g: Graph[V,EL,E],
val printNode : (V => String),
val printEdge : (EL => String),
val excludeNode : (V => Boolean),
val excludeEdge : (E => Boolean)) {
def this (g :Graph[V,EL,E], nep :NodeEdgePrinterInterface[V, EL, E]) =
this(g, nep.printNode, nep.printEdge, nep.excludeNode, nep.excludeEdge)
def this(g :Graph[V,EL,E]) = this(g, new NodeEdgePrinter[V,EL,E])
def writeToFile(filename: String, s: String): Unit = {
val pw = new java.io.PrintWriter(new java.io.File(filename))
try pw.write(s) finally pw.close()
}
val header = g match {
case dg:DirectedGraph[V,EL,E] => "digraph g {\n node [shape=plaintext] rankdir=\"TB\";\n\n"
case _ => "digraph g {\n node [shape=plaintext] rankdir=\"TB\";\n\n" //TODO not a digraph
}
val footer = "\n}"
/** Translate an edge to dot syntax such as `0 -- 1;` (for undirected graphs) or `0 -> 1;`
* where 0 and 1 are the ids of respectively the source and target node of the graph.
* @param e Edge to output.
* @return Dot syntax for the declaration of the edge.
*/
def edge2Str(e:E) = {
val link = g match {
case udg:UndirectedGraph[V,EL,E] => "--"
case dg:DirectedGraph[V,EL,E] => "->"
}
val label = e match {
case e:LabeledEdge[V,EL] => " [label=\"%s\"]".format(printEdge(e.l).replaceAll("\n", "\\\\n"))
case _ => ""
}
" " + nodeId(e.u) +" "+ link +" "+ nodeId(e.v) + label +";"
}
private val nodeId = mutable.Map[V, Int]()
/** Translate a node to dot syntax, for instance `1 [label="some text"];`
* where 1 is the id of the node that will be used to draw edges. and `some text`
* is the string representation of the node given by a [[NodeEdgePrinter]].
*
* @param v Vertex to output.
* @return Declaration of the vertex in dot syntax.
*/
def node2Str(v:V) = {
if(!nodeId.contains(v)) {
nodeId(v) = nodeId.size
" " + nodeId(v) +" [label=\""+printNode(v).replaceAll("\n", "\\\\n")+"\"];\n"
} else {
""
}
}
/**
* Creates the dot representation of the graph.
* @return
*/
def graph2DotString : String = {
var out = header
for(v <- g.vertices.filter(!excludeNode(_)))
out += node2Str(v)
out += g.vertices.filter(!excludeNode(_)).map(node2Str(_)).mkString("\n")
out += "\n"
out += g.edges().filter(!excludeEdge(_)).map(edge2Str(_)).mkString("\n")
out += footer
out
}
/**
* Print the graph to a file in dot format.
* @param file File to write the graph to
*/
def print2Dot(file:String) {
writeToFile(file, graph2DotString)
}
}
| athy/fape | structures/src/main/scala/fr/laas/fape/graph/printers/GraphDotPrinter.scala | Scala | bsd-2-clause | 3,859 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.nodes.calcite
import org.apache.flink.table.runtime.rank.{RankRange, RankType}
import org.apache.calcite.plan.{Convention, RelOptCluster, RelTraitSet}
import org.apache.calcite.rel.`type`.RelDataTypeField
import org.apache.calcite.rel.{RelCollation, RelNode}
import org.apache.calcite.util.ImmutableBitSet
import java.util
import scala.collection.JavaConversions._
/**
* Sub-class of [[Rank]] that is a relational expression which returns
* the rows in which the rank function value of each row is in the given range.
* This class corresponds to Calcite logical rel.
*/
final class LogicalRank(
cluster: RelOptCluster,
traitSet: RelTraitSet,
input: RelNode,
partitionKey: ImmutableBitSet,
orderKey: RelCollation,
rankType: RankType,
rankRange: RankRange,
rankNumberType: RelDataTypeField,
outputRankNumber: Boolean)
extends Rank(
cluster,
traitSet,
input,
partitionKey,
orderKey,
rankType,
rankRange,
rankNumberType,
outputRankNumber) {
override def copy(traitSet: RelTraitSet, inputs: util.List[RelNode]): RelNode = {
new LogicalRank(
cluster,
traitSet,
inputs.head,
partitionKey,
orderKey,
rankType,
rankRange,
rankNumberType,
outputRankNumber
)
}
}
object LogicalRank {
def create(
input: RelNode,
partitionKey: ImmutableBitSet,
orderKey: RelCollation,
rankType: RankType,
rankRange: RankRange,
rankNumberType: RelDataTypeField,
outputRankNumber: Boolean): LogicalRank = {
val traits = input.getCluster.traitSetOf(Convention.NONE)
new LogicalRank(
input.getCluster,
traits,
input,
partitionKey,
orderKey,
rankType,
rankRange,
rankNumberType,
outputRankNumber
)
}
}
| shaoxuan-wang/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/plan/nodes/calcite/LogicalRank.scala | Scala | apache-2.0 | 2,679 |
package models.contests
import org.joda.time.LocalDate
import java.util.UUID
//ADD problem type, eg regression, classification, recommendation
case class Contest(
contest_id:Int,
contest_name: String,
user_id:UUID,
contest_description:String,
contest_created: LocalDate,
contest_start:LocalDate,
contest_end:LocalDate,
evaluator_id:Int,
benchmark_value:Option[Double],
contest_folder:String
)
case class ContestDisplay(
contest_id:Int,
contest_name: String,
user_name:String,
contest_description:String,
contest_created: LocalDate,
contest_start:LocalDate,
contest_end:LocalDate,
evaluator_name:String,
benchmark_value:Option[Double],
contest_folder:String
) | rynmccrmck/thunderdome | app/models/contests/Contests.scala | Scala | apache-2.0 | 743 |
package api.show
import sangria.macros.derive._
import sangria.schema._
object Schema {
import Repository._
case class TVSeries(
id: Int,
seriesid: Option[String],
seriesname: Option[String],
genre: Option[String],
rating: Option[String],
imdb_id: Option[String]
)
case class TVSeason(
id: Int,
season: Int
)
case class TVEpisode(
id: Int,
episodenumber: Int,
episodename: Option[String],
firstaired: Option[String]
)
}
object Arguments {
val TVSeriesName = Argument("name", StringType, description = "name of television show")
//val TVSeasonId = Argument("id", IntType, description = "id of television show")
}
object Fragments {
import Schema._
val TVEpisodeType = deriveObjectType[Unit, TVEpisode](
DocumentField("id", "id of episode"),
DocumentField("episodenumber", "number used to order episodes"),
DocumentField("episodename", "name episode was released with"),
DocumentField("firstaired", "date episode was aired")
)
val TVSeasonType = deriveObjectType[api.Repository, TVSeason](
DocumentField("id", "id of season, used to grab episodes"),
DocumentField("season", "number of the season"),
AddFields(
Field("episodes", ListType(TVEpisodeType),
resolve = (ctx: Context[api.Repository, TVSeason]) => ctx.ctx.getEpisodes(ctx.value.id))
)
)
val TVSeriesType = deriveObjectType[api.Repository, TVSeries](
DocumentField("seriesid", "id related to the series"),
DocumentField("seriesname", "name of television show"),
DocumentField("genre", "genre of the show"),
DocumentField("rating", "tv guide ratings (TV-PG, TV-MA, etc)"),
AddFields(
Field("seasons", ListType(TVSeasonType),
resolve = (ctx: Context[api.Repository, TVSeries]) => ctx.ctx.getSeasons(ctx.value.id)
)
)
)
}
object Fields {
import Schema._
import Fragments._
import Arguments._
val TVApiType = ObjectType(
"TVSeriesApi", "tv show related queries",
fields[api.Repository, Unit](
Field("series", ListType(TVSeriesType),
arguments = TVSeriesName :: Nil,
resolve = ctx => ctx.ctx.searchTVSeries(ctx.arg(TVSeriesName)))
)
)
//fieldList: _*
val fieldList = List(
Field("series", ListType(TVSeriesType),
arguments = TVSeriesName :: Nil,
resolve = (ctx:Context[api.Repository, Unit]) => ctx.ctx.searchTVSeries(ctx.arg(TVSeriesName)))
)
}
| andykais/telebum2 | services/web-api/src/main/scala/api/show/Schema.scala | Scala | mit | 2,463 |
package com.weather.scalacass.scsession
import com.weather.scalacass.ScalaSession
class SelectUnitTests extends ActionUnitTests {
case class SelectiveSelect(i: Int)
case class Query(str: String)
"select" should "be selective" in {
val query = ss.select[SelectiveSelect](table, Query("asdf"))
println(query.getStringRepr.right.value)
println(query.execute().right.value)
}
it should "not need to be selective" in {
val query = ss.select[ScalaSession.Star](table, Query("asdf"))
println(query.getStringRepr.right.value)
println(query.execute().right.value)
}
it should "limit" in {
val query = ss.select[ScalaSession.Star](table, Query("asdf")).limit(100)
println(query.getStringRepr.right.value)
println(query.execute().right.value)
}
it should "allow filtering" in {
val query = ss.select[ScalaSession.Star](table, Query("asdf")).allowFiltering
println(query.getStringRepr.right.value)
println(query.execute().right.value)
}
it should "do everything" in {
val query = ss.select[SelectiveSelect](table, Query("asdf")).limit(100).allowFiltering
println(query.getStringRepr.right.value)
println(query.execute().right.value)
}
}
| thurstonsand/fast-cass | src/test/scala/com/weather/scalacass/scsession/SelectUnitTests.scala | Scala | mit | 1,212 |
package com.lucaswilkins.newtonfractal
import java.awt.Event
import java.awt.event.{ActionEvent, ActionListener, KeyEvent}
import javax.swing._
import com.lucaswilkins.newtonfractals.{util, image, MainPanel}
/**
* Created by lucas on 15/11/2014.
*/
class MainWindow extends JFrame("Newton-Raphson Fractal Manipulator") {
val w = 600
val h = 600
/*
* Look and feel
*/
try {
UIManager.setLookAndFeel("javax.swing.plaf.nimbus.NimbusLookAndFeel")
} catch {
case _: Throwable ⇒ println("Failed to set look and feel")
}
/*
* Menu bar
*/
def run(f: Function0[Unit]) =
new ActionListener(){
def actionPerformed(e: ActionEvent) {
f.apply
}
}
val menu = new JMenuBar
val file = new JMenu("File")
file.setMnemonic(KeyEvent.VK_F)
menu.add(file)
val save = new JMenuItem("Save Image")
save.setMnemonic(KeyEvent.VK_S)
save.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_S, Event.CTRL_MASK))
save.addActionListener(run {case _ ⇒ image.saveBufferedImage(panel.canvas,this)})
file.add(save)
file.addSeparator
val exit = new JMenuItem("Exit")
exit.setMnemonic(KeyEvent.VK_X)
exit.addActionListener(run({case _ ⇒ System.exit(0) }))
file.add(exit)
val control = new JMenu("Control")
control.setMnemonic(KeyEvent.VK_C)
menu.add(control)
val deleteNode = new JMenuItem("Remove selected control node")
deleteNode.setMnemonic(KeyEvent.VK_D)
deleteNode.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_DELETE.toChar))
deleteNode.addActionListener(run{case _ ⇒ panel.deleteSelectedNode})
control.add(deleteNode)
val clearNode = new JMenuItem("Clear all control nodes")
clearNode.setMnemonic(KeyEvent.VK_A)
clearNode.addActionListener(run {case _ ⇒ panel.deleteAllNodes})
control.add(clearNode)
control.addSeparator
val showRoots = new JCheckBoxMenuItem("Show control nodes")
showRoots.setMnemonic(KeyEvent.VK_N)
showRoots.setSelected(true)
showRoots.addActionListener(run{case _ ⇒ panel.showNodes = showRoots.isSelected})
control.add(showRoots)
val showPoly = new JCheckBoxMenuItem("Show data")
showPoly.setMnemonic(KeyEvent.VK_P)
showPoly.setSelected(false)
showPoly.addActionListener(run {
case _ ⇒ panel.polyDisplay.setVisible(showPoly.isSelected)
})
control.add(showPoly)
setJMenuBar(menu)
val colourSchemes = new JMenu("Colours")
colourSchemes.setMnemonic(KeyEvent.VK_S)
menu.add(colourSchemes)
val help = new JMenu("Help")
help.setMnemonic(KeyEvent.VK_H)
menu.add(help)
val website = new JMenuItem("Online Documentation")
website.setMnemonic(KeyEvent.VK_D)
website.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_F1, 0))
website.addActionListener(run {case _ ⇒ util.launchWebsite("http://www.lucaswilkins.com/newtonfractal/")})
help.add(website)
/*
* Main panel
*/
val ins = getInsets
val panel = new MainPanel(w-(ins.left+ins.right), h-(ins.top-ins.bottom))
/*
* Populate colour scheme menu
*/
var schemeNameMenuMap: scala.collection.mutable.Map[String, JCheckBoxMenuItem]
= scala.collection.mutable.Map[String, JCheckBoxMenuItem]()
for (schemeName ← panel.colourSchemes.keys){
val menuItem: JCheckBoxMenuItem = new JCheckBoxMenuItem(schemeName)
menuItem.setSelected(schemeName == panel.defaultSchemeName)
menuItem.addActionListener(run {case _ ⇒ {
panel.setColourScheme(schemeName)
schemeNameMenuMap.values.map(_ setSelected false)
menuItem.setSelected(true)
}
})
colourSchemes.add(menuItem)
schemeNameMenuMap.put(schemeName, menuItem)
}
/*
* Other basic swing stuff
*/
add(panel)
setSize(600,600)
setResizable(false)
setLocationRelativeTo(null)
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE)
setVisible(true)
}
object MainWindow {
def main(args: Array[String]): Unit = {
new MainWindow
}
}
| drlucaswilkins/newtonfractal | NewtonFractal/src/main/scala/com/lucaswilkins/newtonfractals/MainWindow.scala | Scala | gpl-2.0 | 3,925 |
package demo.test
import java.net.InetAddress
import akka.actor.{ActorRefFactory, ActorSystem}
import com.persist.logging._
import logging_demo.BuildInfo
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.language.postfixOps
import scala.concurrent.Future
case class AppenderClass() extends ClassLogging {
def demo(): Unit = {
log.info("Test")
log.error("Foo failed")
log.warn(Map("@msg" -> "fail", "value" -> 23))
}
}
object FlatAppender extends LogAppenderBuilder {
def apply(factory: ActorRefFactory, stdHeaders: Map[String, RichMsg])
= new FlatAppender(factory, stdHeaders)
}
class FlatAppender(factory: ActorRefFactory, stdHeaders: Map[String, RichMsg]) extends LogAppender {
def append(msg: Map[String, RichMsg], category: String) {
if (category == "common") {
val level = msg.get("@severity") match {
case Some(s: String) => s
case _ => "???"
}
val time = msg.get("@timestamp") match {
case Some(s: String) => s
case _ => "???"
}
val message = richToString(msg.getOrElse("msg","???"))
println(s"$time\\t$level\\t$message")
}
}
def finish(): Future[Unit] = Future.successful(())
def stop(): Future[Unit] = Future.successful(())
}
object Appender {
def main(args: Array[String]) {
val system = ActorSystem("test")
val host = InetAddress.getLocalHost.getHostName
val loggingSystem = LoggingSystem(system, BuildInfo.name,
BuildInfo.version, host, appenderBuilders = Seq(FileAppender, FlatAppender))
val sc = new SimpleClass()
sc.demo()
Await.result(loggingSystem.stop, 30 seconds)
Await.result(system.terminate(), 20 seconds)
}
}
| nestorpersist/logging | demo/src/main/scala/demo/test/Appender.scala | Scala | apache-2.0 | 1,712 |
package com.seanshubin.todo.sample.core.http
import java.net.URLDecoder
import scala.collection.mutable.ArrayBuffer
sealed abstract case class Verb(name: String) {
Verb.valuesBuffer += this
def matchesString(s: String) = name.equalsIgnoreCase(s)
def unapplySeq(methodAndPath: (String, String)): Option[Seq[String]] = {
val (method, path) = methodAndPath
if (matchesString(method))
Some(path.split("/", -1).map(URLDecoder.decode(_, "UTF-8")).toList.tail)
else None
}
}
object Verb {
private val valuesBuffer = new ArrayBuffer[Verb]
lazy val values = valuesBuffer.toSeq
val Post = new Verb("POST") {}
val Get = new Verb("GET") {}
val Put = new Verb("PUT") {}
val Patch = new Verb("PATCH") {}
val Delete = new Verb("DELETE") {}
val Options = new Verb("OPTIONS") {}
val enumName: String = getClass.getSimpleName.takeWhile(c => c != '$')
def fromString(target: String) = {
val maybeValue = maybeFromString(target)
maybeValue match {
case Some(value) => value
case None =>
val idealMatchingStringsSeq = values.map(value => value.name)
val idealMatchingStrings = idealMatchingStringsSeq.mkString(", ")
throw new RuntimeException(
s"'$target' does not match a valid $enumName, valid values are $idealMatchingStrings")
}
}
def maybeFromString(target: String) = values.find(value => value.matchesString(target))
}
| SeanShubin/javascript-todo-samples | core/src/main/scala/com/seanshubin/todo/sample/core/http/Verb.scala | Scala | unlicense | 1,421 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package integration.interpreter.scala
import java.io.{ByteArrayOutputStream, File}
import org.apache.spark.toree.test.utils.JarUtils
import org.apache.toree.annotations.SbtForked
import org.apache.toree.global.StreamState
import org.apache.toree.interpreter._
import org.apache.toree.kernel.api.KernelLike
import org.apache.toree.kernel.interpreter.scala.ScalaInterpreter
import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.{BeforeAndAfter, FunSpec, Ignore, Matchers}
@SbtForked
@Ignore
class AddExternalJarMagicSpecForIntegration
extends FunSpec with Matchers with MockitoSugar with BeforeAndAfter
{
private val outputResult = new ByteArrayOutputStream()
private var interpreter: Interpreter = _
private var tempdir: File = _
before {
interpreter = new ScalaInterpreter {
override protected def bindKernelVariable(kernel: KernelLike): Unit = { }
}
// interpreter.start()
interpreter.init(mock[KernelLike])
StreamState.setStreams(outputStream = outputResult)
tempdir = JarUtils.createTemporaryDir()
}
after {
interpreter.stop()
outputResult.reset()
}
describe("ScalaInterpreter") {
describe("#addJars") {
it("should do something") {
interpreter.interpret("1+1")
}
it("should be able to load Java jars") {
val testJar1Url =
JarUtils.createDummyJar(tempdir.toString, "test1", "TestClass")
// Should fail since jars were not added to paths
interpreter.interpret(
"import test1.TestClass")._1 should be (Results.Error)
// Add jars to paths
interpreter.addJars(testJar1Url)
// Should now succeed
interpreter.interpret(
"import test1.TestClass")._1 should be (Results.Success)
// Should now run
interpreter.interpret(
"""println(test1.TestClass.sayHello("Chip"))"""
) should be ((Results.Success, Left(Map())))
outputResult.toString should be ("Hello, Chip\\n")
outputResult.reset()
interpreter.interpret(
"""println(test1.TestClass.addStuff(2,1))"""
) should be ((Results.Success, Left(Map())))
outputResult.toString should be ("3\\n")
outputResult.reset()
}
ignore("should support Scala jars") {
val locationURL = "https://repo1.maven.org/maven2/org/scala-rules/rule-engine-core_2.11/0.5.1/rule-engine-core_2.11-0.5.1.jar"
val testJarUrl = JarUtils.downloadJar(tempdir.toString, locationURL)
// Should fail since jar was not added to paths
interpreter.interpret(
"import org.scalarules.utils._")._1 should be (Results.Error)
// Add jar to paths
interpreter.addJars(testJarUrl)
// Should now succeed
interpreter.interpret(
"import org.scalarules.utils._")._1 should be (Results.Success)
// Should now run
/*
interpreter.interpret(
"""println(new TestClass().runMe())"""
) should be ((Results.Success, Left(Map())))
outputResult.toString should be ("You ran me!\\n")
*/
}
it("should be able to add multiple jars at once") {
val testJar1Url =
JarUtils.createDummyJar(tempdir.toString, "test1", "TestClass")
val testJar2Url =
JarUtils.createDummyJar(tempdir.toString, "test2", "TestClass")
// Should fail since jars were not added to paths
interpreter.interpret(
"import test1.TestClass")._1 should be (Results.Error)
interpreter.interpret(
"import test2.TestClass")._1 should be (Results.Error)
// Add jars to paths
interpreter.addJars(testJar1Url, testJar2Url)
// Should now succeed
interpreter.interpret(
"import test1.TestClass")._1 should be (Results.Success)
interpreter.interpret(
"import test2.TestClass")._1 should be (Results.Success)
// Should now run
interpreter.interpret(
"""println(test1.TestClass.sayHello("Chip"))"""
) should be ((Results.Success, Left(Map())))
outputResult.toString should be ("Hello, Chip\\n")
outputResult.reset()
interpreter.interpret(
"""println(test2.TestClass.addStuff(2,1))"""
) should be ((Results.Success, Left(Map())))
outputResult.toString should be ("3\\n")
outputResult.reset()
}
it("should be able to add multiple jars in consecutive calls to addjar") {
val testJar1Url =
JarUtils.createDummyJar(tempdir.toString, "test1", "TestClass")
val testJar2Url =
JarUtils.createDummyJar(tempdir.toString, "test2", "TestClass")
// Should fail since jars were not added to paths
interpreter.interpret(
"import test1.TestClass")._1 should be (Results.Error)
interpreter.interpret(
"import test2.TestClass")._1 should be (Results.Error)
// Add jars to paths
interpreter.addJars(testJar1Url)
interpreter.addJars(testJar2Url)
// Should now succeed
interpreter.interpret(
"import test1.TestClass")._1 should be (Results.Success)
interpreter.interpret(
"import test2.TestClass")._1 should be (Results.Success)
// Should now run
interpreter.interpret(
"""println(test1.TestClass.sayHello("Chip"))"""
) should be ((Results.Success, Left(Map())))
outputResult.toString should be ("Hello, Chip\\n")
outputResult.reset()
interpreter.interpret(
"""println(test2.TestClass.addStuff(2,1))"""
) should be ((Results.Success, Left(Map())))
outputResult.toString should be ("3\\n")
}
// Todo: rebinding is kinda finicky in Scala 2.11
ignore("should not have issues with previous variables") {
val testJar1Url =
this.getClass.getClassLoader.getResource("TestJar.jar")
val testJar2Url =
this.getClass.getClassLoader.getResource("TestJar2.jar")
// Add a jar, which reinitializes the symbols
interpreter.addJars(testJar1Url)
interpreter.interpret(
"""
|val t = new com.ibm.testjar.TestClass()
""".stripMargin)._1 should be (Results.Success)
// Add a second jar, which reinitializes the symbols and breaks the
// above variable
interpreter.addJars(testJar2Url)
interpreter.interpret(
"""
|def runMe(testClass: com.ibm.testjar.TestClass) =
|testClass.sayHello("Hello")
""".stripMargin)._1 should be (Results.Success)
// This line should NOT explode if variable is rebound correctly
// otherwise you get the error of
//
// Message: <console>:16: error: type mismatch;
// found : com.ibm.testjar.com.ibm.testjar.com.ibm.testjar.com.ibm.
// testjar.com.ibm.testjar.TestClass
// required: com.ibm.testjar.com.ibm.testjar.com.ibm.testjar.com.ibm.
// testjar.com.ibm.testjar.TestClass
// runMe(t)
// ^
val ans = interpreter.interpret(
"""
|runMe(t)
""".stripMargin)
ans._1 should be (Results.Success)
}
}
}
}
| lresende/incubator-toree | scala-interpreter/src/test/scala/integration/interpreter/scala/AddExternalJarMagicSpecForIntegration.scala | Scala | apache-2.0 | 8,131 |
package function
trait TestEvaluation {
/**
* evaluate a function taking one parameter
* @param fn
* @param params
* @param results
* @param threshold
* @return
*/
def evaluate1(fn:Double => Double, params:List[Double], results:List[Double], threshold:Double) = {
val output = params.map { parm => {
// allow break point
val result = fn(parm)
result
}}
val tests = results.zip(output)
tests.foreach {
pair => println(s"${pair._1} ~ ${pair._2}")
}
tests.forall {
pair => Math.abs(pair._1 - pair._2) < threshold
}
}
/**
* evaluate a function with 2 parameters
* @param fn
* @param params
* @param results
* @param threshold
* @return
*/
def evaluate2(fn:(Double, Double) => Double, params:List[(Double,Double)], results:List[Double], threshold:Double) = {
val output = params.map { pair => {
// allow break point
val result = fn(pair._1, pair._2)
result
}}
val tests = results.zip(output)
tests.foreach {
pair => println(s"${pair._1} ~ ${pair._2}")
}
tests.forall {
pair => Math.abs(pair._1 - pair._2) < threshold
}
}
}
| cxd/scala-au.id.cxd.math | math/src/test/scala/function/TestEvaluation.scala | Scala | mit | 1,205 |
package provingground.scalahott
import provingground._
import HoTT._
import spire.math._
import spire.implicits._
import scala.language.implicitConversions
object QField extends SymbolicField[Rational] {
override def toString = "Q"
val QTyp: QField.LocalTyp.type = LocalTyp
sealed trait PosWit extends Term with Subs[PosWit] {
val value: LocalTerm
lazy val typ = Pos(value)
def +(that: PosWit) = PosWitSum(this, that)
}
case class PosWitSum(a: PosWit, b: PosWit) extends PosWit {
lazy val value: QField.LocalTerm = a.value + b.value
def newobj: PosWit =
throw new IllegalArgumentException(
s"trying to use the constant $this as a variable (or a component of one)"
)
def subs(x: Term, y: Term) = PosWitSum(a.replace(x, y), b.replace(x, y))
}
case class PosWitProd(a: PosWit, b: PosWit) extends PosWit {
lazy val value: QField.LocalTerm = a.value * b.value
def newobj: PosWit =
throw new IllegalArgumentException(
s"trying to use the constant $this as a variable (or a component of one)"
)
def subs(x: Term, y: Term) = PosWitSum(a.replace(x, y), b.replace(x, y))
}
case class PosLiteral(a: Rational) extends PosWit {
require(a >= 0, s"Rational number $a not positive")
val value: QField.LocalTerm = Literal(a)
def newobj: PosLiteral =
throw new IllegalArgumentException(
s"trying to use the constant $this as a variable (or a component of one)"
)
def subs(x: Term, y: Term): PosLiteral = this
}
case object PosZero extends PosWit {
val value: QField.LocalTerm = Literal(0)
def newobj: PosZero.type =
throw new IllegalArgumentException(
s"trying to use the constant $this as a variable (or a component of one)"
)
def subs(x: Term, y: Term): PosZero.type = this
}
case class SymbPosWit(name: AnySym, value: LocalTerm)
extends PosWit
with Symbolic {
override def toString: String = name.toString + " : (" + typ.toString + ")"
def newobj: SymbPosWit = SymbPosWit(InnerSym[Term](this), value)
def subs(x: Term, y: Term): PosWit =
if (x == this) y.asInstanceOf[PosWit]
else {
def symbobj(sym: AnySym) = (typ.replace(x, y): Pos).symbObj(sym)
symSubs(symbobj)(x, y)(name)
}
}
case class Pos(value: LocalTerm) extends Typ[PosWit] with Subs[Pos] {
def subs(x: Term, y: Term) = Pos(value.replace(x, y))
type Obj = PosWit
val typ: Universe = Type
def newobj: Pos = Pos(value.newobj)
def variable(sym: AnySym) = SymbPosWit(sym, value)
}
val x: ScalaTerm[Rational] = "x" :: LocalTyp
val y: ScalaTerm[Rational] = "y" :: LocalTyp
lazy val leq
: FuncLike[ScalaTerm[Rational], FuncLike[ScalaTerm[Rational], Pos]] = x :~> (y :~> Pos(
y - x
))
// val possum = x :~> (y :~> (Pos(x) ~>: (Pos(y) ~>: Pos(x + y))))
//
// val posprod = x :~> (y :~> (Pos(x) ~>: (Pos(y) ~>: Pos(x * y))))
val dichotomy: FuncLike[ScalaTerm[Rational], Term] =
"positivity-dichotomy" :: (x ~>: (Pos(x) || Pos(-x)))
val posAndNegPos
: FuncLike[ScalaTerm[Rational], FuncLike[Pos, FuncLike[Pos, Equality[
ScalaTerm[Rational]
]]]] =
"positive-and-negation-positive" :: (
x ~>: (Pos(x) ~>: (Pos(-x) ~>: (x =:= Literal(0))))
)
val squarePositive: FuncLike[ScalaTerm[Rational], PosWit] =
"square-positive" :: x ~>: Pos(x * x)
val sumPositive: FuncLike[ScalaTerm[Rational], FuncLike[
ScalaTerm[Rational],
Func[PosWit, Func[PosWit, PosWit]]
]] =
"sum-positive" :: x ~>: (y ~>: (Pos(x) ->: Pos(y) ->: Pos(x + y)))
def showPositive(x: LocalTerm): Option[PosWit] = x match {
case Literal(a) if a >= 0 => Some(PosLiteral(a))
case PiTerm(multElems) =>
if (multElems.exists(_._2 % 2 == 1)) None
else {
val sqrt = PiTerm(multElems.map { case (a, n) => (a, n / 2) })
Some(squarePositive(sqrt))
}
case Comb(`sum`, a, b) =>
for {
pf1 <- showPositive(a)
pf2 <- showPositive(b)
} yield sumPositive(a)(b)(pf1)(pf2)
case SigmaTerm(elems) =>
elems.map(y => showPositive(y).map(y -> _)).reduce[Option[(LocalTerm, PosWit)]]{
case (Some((a, pa)), Some((b, pb))) => Some((a + b) -> sumPositive(a)(b)(pa)(pb))
case _ => None
}.map(_._2)
case _ => None
}
val z: ScalaTerm[Rational] = "z" :: LocalTyp
val w: ScalaTerm[Rational] = "w" :: LocalTyp
import IdentityTyp.transport
val transpEqL
: FuncLike[ScalaTerm[Rational], FuncLike[ScalaTerm[Rational], FuncLike[
ScalaTerm[Rational],
Func[Equality[ScalaTerm[Rational]], Func[PosWit, PosWit]]
]]] =
x :~> (
y :~> (z :~> (transport(w :-> (leq(w)(x)))(y)(z)))
) !: x ~>: (
y ~>: (
z ~>: (
(y =:= z) ->: leq(y)(x) ->: leq(z)(x)
)
)
)
val transpEqR
: FuncLike[ScalaTerm[Rational], FuncLike[ScalaTerm[Rational], FuncLike[
ScalaTerm[Rational],
Func[Equality[ScalaTerm[Rational]], Func[PosWit, PosWit]]
]]] =
x :~> (
y :~> (z :~> (transport(w :-> (leq(x)(w)))(y)(z)))
) !: x ~>: (
y ~>: (
z ~>: (
(y =:= z) ->: leq(x)(y) ->: leq(x)(z)
)
)
)
}
| siddhartha-gadgil/ProvingGround | core/src/main/scala/provingground/scalahott/QField.scala | Scala | mit | 5,283 |
package org.jetbrains.sbt
package project
import org.jetbrains.plugins.scala.SlowTests
import org.jetbrains.sbt.project.ProjectStructureDsl._
import org.junit.experimental.categories.Category
@Category(Array(classOf[SlowTests]))
class ProjectImportingTest extends ImportingTestCase with InexactMatch {
def testSimple() = runTest(
new project("simple") {
lazy val scalaLibrary = new library("SBT: org.scala-lang:scala-library:2.11.6:jar") {
classes += (IvyCacheDir / "org.scala-lang" / "scala-library" / "jars" / "scala-library-2.11.6.jar").getAbsolutePath
}
libraries += scalaLibrary
modules += new module("simple") {
contentRoots += getProjectPath
ProjectStructureDsl.sources := Seq("src/main/scala", "src/main/java")
testSources := Seq("src/test/scala", "src/test/java")
resources := Seq("src/main/resources")
testResources := Seq("src/test/resources")
excluded := Seq("target")
libraryDependencies += scalaLibrary
}
modules += new module("simple-build") {
ProjectStructureDsl.sources := Seq("")
excluded := Seq("project/target", "target")
}
})
def testMultiModule() = runTest(
new project("multiModule") {
lazy val foo = new module("foo") {
moduleDependencies += new dependency(bar) {
isExported := true
}
}
lazy val bar = new module("bar")
lazy val root = new module("multiModule")
modules := Seq(root, foo, bar)
})
def testUnmanagedDependency() = runTest(
new project("unmanagedDependency") {
modules += new module("unmanagedDependency") {
lazy val unmanagedLibrary = new library("SBT: unmanaged-jars") {
classes += (testProjectDir / "lib" / "unmanaged.jar").getAbsolutePath
}
libraries += unmanagedLibrary
libraryDependencies += unmanagedLibrary
}
}
)
def testSharedSources() = runTest(
new project("sharedSources") {
lazy val sharedSourcesModule = new module("sharedSources-sources") {
contentRoots += getProjectPath + "/shared"
ProjectStructureDsl.sources += "src/main/scala"
}
lazy val foo = new module("foo") {
moduleDependencies += sharedSourcesModule
}
lazy val bar = new module("bar") {
moduleDependencies += sharedSourcesModule
}
modules := Seq(foo, bar, sharedSourcesModule)
}
)
def testExcludedDirectories() = runTest(
new project("root") {
modules += new module("root") {
excluded := Seq(
"directory-to-exclude-1",
"directory/to/exclude/2"
)
}
}
)
}
| ilinum/intellij-scala | test/org/jetbrains/sbt/project/ProjectImportingTest.scala | Scala | apache-2.0 | 2,693 |
package beppo2k.ftp.util
import java.text.SimpleDateFormat
import java.util.Date
import java.io.PrintStream
trait LogLevel{
val level:Int
val levelStr:String
override def toString() :String = {
return levelStr
}
}
case class Debug(level:Int = 0 , levelStr:String = "DEBUG") extends LogLevel
case class Info(level:Int = 1 , levelStr:String = "INFO") extends LogLevel
case class Warn(level:Int = 2 , levelStr:String = "WARN") extends LogLevel
case class Error(level:Int = 3 , levelStr:String = "ERROR") extends LogLevel
object Log {
var logLevel:LogLevel = Info()
val dateFormatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
def debug(format:String , param:Any*) = {
writeLog(System.out, Debug() , format , param: _*)
}
def info(format:String , param:Any*) = {
writeLog(System.out, Info() , format , param: _*)
}
def warn(format:String , param:Any*) = {
writeLog(System.out, Warn() , format , param: _*)
}
def error(format:String , param:Any*) = {
writeLog(System.err, Error() , format , param: _*)
}
private def writeLog(stream:PrintStream , level:LogLevel , format:String , param:Any*) = {
if(logLevel.level <= level.level){
val f = dateFormatter.format(new Date()) + " [" + level.toString() +"] " + format
try{
val s = f.format(param: _*)
stream.println(s)
}catch{
case e:Exception => {
e.printStackTrace()
}
}
}
}
} | beppo2k/ftpserver | src/main/scala/beppo2k/ftp/util/Log.scala | Scala | mit | 1,583 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.tools.stats
import com.beust.jcommander.Parameters
import org.locationtech.geomesa.accumulo.data.AccumuloDataStore
import org.locationtech.geomesa.accumulo.tools.{AccumuloDataStoreCommand, AccumuloDataStoreParams}
import org.locationtech.geomesa.tools.stats.{StatsHistogramCommand, StatsHistogramParams}
class AccumuloStatsHistogramCommand extends StatsHistogramCommand[AccumuloDataStore] with AccumuloDataStoreCommand {
override val params = new AccumuloStatsHistogramParams
}
@Parameters(commandDescription = "View or calculate counts of attribute in a GeoMesa feature type, grouped by sorted values")
class AccumuloStatsHistogramParams extends StatsHistogramParams with AccumuloDataStoreParams
| tkunicki/geomesa | geomesa-accumulo/geomesa-accumulo-tools/src/main/scala/org/locationtech/geomesa/accumulo/tools/stats/AccumuloStatsHistogramCommand.scala | Scala | apache-2.0 | 1,205 |
package im.actor.server.mtproto.transport
import scodec.bits.BitVector
@SerialVersionUID(1L)
case class Pong(randomBytes: BitVector) extends MTProto {
val header = Pong.header
}
object Pong {
val header = 0x2
}
| boneyao/actor-platform | actor-server/actor-models/src/main/scala/im/actor/server/mtproto/transport/Pong.scala | Scala | mit | 218 |
class A(val options: Seq[String])
object Test {
implicit def ss: Equiv[Seq[String]] = sys.error("dummy")
implicit def equivA(implicit seqEq: Equiv[Seq[String]]): Equiv[A] = sys.error("dummy")
implicitly[Equiv[A]]
}
| yusuke2255/dotty | tests/untried/pos/relax_implicit_divergence.scala | Scala | bsd-3-clause | 222 |
/**
* Copyright 2015 Mohiva Organisation (license at mohiva dot com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mohiva.play.silhouette.impl.providers.custom
import com.mohiva.play.silhouette.api.LoginInfo
import com.mohiva.play.silhouette.api.util.{ ExtractableRequest, HTTPLayer, MockWSRequest }
import com.mohiva.play.silhouette.impl.exceptions.{ ProfileRetrievalException, UnexpectedResponseException }
import com.mohiva.play.silhouette.impl.providers.OAuth2Provider._
import com.mohiva.play.silhouette.impl.providers.SocialProfileBuilder._
import com.mohiva.play.silhouette.impl.providers._
import com.mohiva.play.silhouette.impl.providers.oauth2.FacebookProvider._
import com.mohiva.play.silhouette.impl.providers.oauth2.{ BaseFacebookProvider, FacebookProfileParser }
import play.api.libs.json.{ JsValue, Json }
import play.api.test.{ FakeRequest, WithApplication }
import test.Helper
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{ ExecutionContext, Future }
/**
* Test case for the [[com.mohiva.play.silhouette.impl.providers.oauth2.FacebookProvider]] class which uses a custom social profile.
*/
class FacebookProviderSpec extends OAuth2ProviderSpec {
"The `withSettings` method" should {
"create a new instance with customized settings" in new WithApplication with Context {
val s = provider.withSettings { s =>
s.copy(accessTokenURL = "new-access-token-url")
}
s.settings.accessTokenURL must be equalTo "new-access-token-url"
}
}
"The `authenticate` method" should {
"fail with UnexpectedResponseException for an unexpected response" in new WithApplication with Context {
val wsRequest = mock[MockWSRequest]
val wsResponse = mock[MockWSRequest#Response]
implicit val req = FakeRequest(GET, "?" + Code + "=my.code")
wsResponse.status returns 401
wsResponse.body returns "Unauthorized"
wsRequest.withHttpHeaders(any) returns wsRequest
wsRequest.post[Map[String, Seq[String]]](any)(any) returns Future.successful(wsResponse)
httpLayer.url(oAuthSettings.accessTokenURL) returns wsRequest
stateProvider.unserialize(anyString)(any[ExtractableRequest[String]], any[ExecutionContext]) returns Future.successful(state)
stateProvider.state(any[ExecutionContext]) returns Future.successful(state)
failed[UnexpectedResponseException](provider.authenticate()) {
case e => e.getMessage must startWith(UnexpectedResponse.format(provider.id, "Unauthorized", 401))
}
}
"fail with UnexpectedResponseException if OAuth2Info can be build because of an unexpected response" in new WithApplication with Context {
val wsRequest = mock[MockWSRequest]
val wsResponse = mock[MockWSRequest#Response]
implicit val req = FakeRequest(GET, "?" + Code + "=my.code")
wsResponse.status returns 200
wsResponse.json returns Json.obj()
wsRequest.withHttpHeaders(any) returns wsRequest
wsRequest.post[Map[String, Seq[String]]](any)(any) returns Future.successful(wsResponse)
httpLayer.url(oAuthSettings.accessTokenURL) returns wsRequest
stateProvider.unserialize(anyString)(any[ExtractableRequest[String]], any[ExecutionContext]) returns Future.successful(state)
stateProvider.state(any[ExecutionContext]) returns Future.successful(state)
failed[UnexpectedResponseException](provider.authenticate()) {
case e => e.getMessage must startWith(InvalidInfoFormat.format(provider.id, ""))
}
}
"return the auth info" in new WithApplication with Context {
val wsRequest = mock[MockWSRequest]
val wsResponse = mock[MockWSRequest#Response]
implicit val req = FakeRequest(GET, "?" + Code + "=my.code")
wsResponse.status returns 200
wsResponse.json returns oAuthInfo
wsRequest.withHttpHeaders(any) returns wsRequest
wsRequest.post[Map[String, Seq[String]]](any)(any) returns Future.successful(wsResponse)
httpLayer.url(oAuthSettings.accessTokenURL) returns wsRequest
stateProvider.unserialize(anyString)(any[ExtractableRequest[String]], any[ExecutionContext]) returns Future.successful(state)
stateProvider.state(any[ExecutionContext]) returns Future.successful(state)
authInfo(provider.authenticate())(_ must be equalTo oAuthInfo.as[OAuth2Info])
}
}
"The `retrieveProfile` method" should {
"fail with ProfileRetrievalException if API returns error" in new WithApplication with Context {
val wsRequest = mock[MockWSRequest]
val wsResponse = mock[MockWSRequest#Response]
wsResponse.status returns 400
wsResponse.json returns Helper.loadJson("providers/custom/facebook.error.json")
wsRequest.get() returns Future.successful(wsResponse)
httpLayer.url(API.format("my.access.token")) returns wsRequest
failed[ProfileRetrievalException](provider.retrieveProfile(oAuthInfo.as[OAuth2Info])) {
case e => e.getMessage must equalTo(SpecifiedProfileError.format(
provider.id,
"An active access token must be used to query information about the current user.",
"OAuthException",
2500))
}
}
"fail with ProfileRetrievalException if an unexpected error occurred" in new WithApplication with Context {
val wsRequest = mock[MockWSRequest]
val wsResponse = mock[MockWSRequest#Response]
wsResponse.status returns 500
wsResponse.json throws new RuntimeException("")
wsRequest.get() returns Future.successful(wsResponse)
httpLayer.url(API.format("my.access.token")) returns wsRequest
failed[ProfileRetrievalException](provider.retrieveProfile(oAuthInfo.as[OAuth2Info])) {
case e => e.getMessage must equalTo(UnspecifiedProfileError.format(provider.id))
}
}
"return the social profile" in new WithApplication with Context {
val wsRequest = mock[MockWSRequest]
val wsResponse = mock[MockWSRequest#Response]
wsResponse.status returns 200
wsResponse.json returns Helper.loadJson("providers/custom/facebook.success.json")
wsRequest.get() returns Future.successful(wsResponse)
httpLayer.url(API.format("my.access.token")) returns wsRequest
profile(provider.retrieveProfile(oAuthInfo.as[OAuth2Info])) { p =>
p must be equalTo CustomSocialProfile(
loginInfo = LoginInfo(provider.id, "134405962728980"),
firstName = Some("Apollonia"),
lastName = Some("Vanova"),
fullName = Some("Apollonia Vanova"),
email = Some("apollonia.vanova@watchmen.com"),
avatarURL = Some("https://fbcdn-sphotos-g-a.akamaihd.net/hphotos-ak-ash2/t1/36245_155530314499277_2350717_n.jpg?lvh=1"),
gender = Some("male")
)
}
}
}
/**
* Defines the context for the abstract OAuth2 provider spec.
*
* @return The Context to use for the abstract OAuth2 provider spec.
*/
override protected def context: OAuth2ProviderSpecContext = new Context {}
/**
* The context.
*/
trait Context extends OAuth2ProviderSpecContext {
/**
* The OAuth2 settings.
*/
override lazy val oAuthSettings = spy(OAuth2Settings(
authorizationURL = Some("https://graph.facebook.com/oauth/authorize"),
accessTokenURL = "https://graph.facebook.com/oauth/access_token",
redirectURL = Some("https://www.mohiva.com"),
clientID = "my.client.id",
clientSecret = "my.client.secret",
scope = Some("email")))
/**
* The OAuth2 info returned by Facebook.
*
* @see https://developers.facebook.com/docs/facebook-login/access-tokens
*/
override lazy val oAuthInfo = Helper.loadJson("providers/oauth2/facebook.access.token.json")
/**
* The provider to test.
*/
lazy val provider = new CustomFacebookProvider(httpLayer, stateProvider, oAuthSettings)
}
/**
* A custom social profile for testing purpose.
*/
case class CustomSocialProfile(
loginInfo: LoginInfo,
firstName: Option[String] = None,
lastName: Option[String] = None,
fullName: Option[String] = None,
email: Option[String] = None,
avatarURL: Option[String] = None,
gender: Option[String] = None) extends SocialProfile
/**
* A custom Facebook profile parser for testing purpose.
*/
class CustomFacebookProfileParser extends SocialProfileParser[JsValue, CustomSocialProfile, OAuth2Info] {
/**
* The common social profile parser.
*/
val commonParser = new FacebookProfileParser
/**
* Parses the social profile.
*
* @param json The content returned from the provider.
* @param authInfo The auth info to query the provider again for additional data.
* @return The social profile from given result.
*/
def parse(json: JsValue, authInfo: OAuth2Info) = commonParser.parse(json, authInfo).map { commonProfile =>
val gender = (json \\ "gender").as[String]
CustomSocialProfile(
loginInfo = commonProfile.loginInfo,
firstName = commonProfile.firstName,
lastName = commonProfile.lastName,
fullName = commonProfile.fullName,
avatarURL = commonProfile.avatarURL,
email = commonProfile.email,
gender = Some(gender))
}
}
/**
* The custom Facebook OAuth2 Provider.
*
* @param httpLayer The HTTP layer implementation.
* @param stateHandler The state provider implementation.
* @param settings The provider settings.
*/
class CustomFacebookProvider(
protected val httpLayer: HTTPLayer,
protected val stateHandler: SocialStateHandler,
val settings: OAuth2Settings)
extends BaseFacebookProvider {
/**
* The type of this class.
*/
type Self = CustomFacebookProvider
/**
* The type of the profile a profile builder is responsible for.
*/
type Profile = CustomSocialProfile
/**
* The profile parser.
*/
val profileParser = new CustomFacebookProfileParser
/**
* Gets a provider initialized with a new settings object.
*
* @param f A function which gets the settings passed and returns different settings.
* @return An instance of the provider initialized with new settings.
*/
def withSettings(f: (Settings) => Settings) = {
new CustomFacebookProvider(httpLayer, stateHandler, f(settings))
}
}
}
| mohiva/play-silhouette | silhouette/test/com/mohiva/play/silhouette/impl/providers/custom/FacebookProviderSpec.scala | Scala | apache-2.0 | 10,927 |
package be.wegenenverkeer.atomium.server.jdbc
import java.sql.ResultSet
import org.joda.time.DateTime
/**
* The entry model as it is stored in the DB.
*
* @param sequenceNo The unique DB id, this has no business meaning to the outside world, except to the extend that it is used
* to define the order of the entries.
* @param uuid The business id for this entry.
* @param value The actual data stored in the entry, serialized as a sring.
* @param timestamp The time this entry was created and stored.
*/
case class EntryDbModel(sequenceNo: Option[Long], uuid: String, value: String, timestamp: DateTime)
object EntryDbModel {
def apply(rs: ResultSet): EntryDbModel = EntryDbModel(
sequenceNo = Some(rs.getLong(EntryDbModel.Table.idColumn)),
uuid = rs.getString(EntryDbModel.Table.uuidColumn),
value = rs.getString(EntryDbModel.Table.valueColumn),
timestamp = new DateTime(rs.getDate(EntryDbModel.Table.timestampColumn))
)
object Table {
val idColumn = "id"
val uuidColumn = "uuid"
val valueColumn = "value"
val timestampColumn = "timestamp"
}
}
| joachimvda/atomium | modules/server-jdbc/src/main/scala/be/wegenenverkeer/atomium/server/jdbc/EntryDbModel.scala | Scala | mit | 1,118 |
/*
* Copyright (c) 2014 Robert Conrad - All Rights Reserved.
* Unauthorized copying of this file, via any medium is strictly prohibited.
* This file is proprietary and confidential.
* Last modified by rconrad, 12/24/14 4:37 PM
*/
package base.rest.route
import base.common.logging.Loggable
import base.entity.ApiStrings
import base.entity.error.{ ApiException, ApiError }
import base.entity.perm.PermException
import org.json4s.native.Serialization
import spray.http.StatusCodes._
import spray.http.{ ContentType, HttpEntity, MediaTypes, StatusCodes }
import spray.httpx.Json4sSupport
import spray.routing.{ ExceptionHandler, HttpService, MalformedRequestContentRejection, RejectionHandler }
import spray.util.LoggingContext
/**
* Separates out the OOB routing event handlers so that they can be mixed in to tests.
* @author rconrad
*/
private[rest] trait RoutingHandlers extends HttpService with Json4sSupport with Loggable {
private val errorPrefix = "The request content was malformed: "
/**
* Overrides for specific rejection handlers
*/
private val customRejectionHandlers = RejectionHandler.apply {
case MalformedRequestContentRejection(msg, _) :: _ =>
// we drop all lines but the first from content rejections since they are meaningless to
// API users and may inadvertently expose internals. (would be nice to catch this
// at the source but since that's deep in json4s it's not happening)
val errorParts = msg.split("\\n")
debug(errorPrefix + errorParts.mkString(". "))
super[HttpService].complete(BadRequest, errorPrefix + errorParts.head)
case x => RejectionHandler.Default(x)
}
/**
* Rather than responding with a standard text body rejection, respond with a json object describing errors
*
* NOTA BENE: After a great deal of debugging I found that the rejections handler
* ABSOLUTELY MUST be on the top level actor to catch every possible rejection
* (particularly errors in parsing or extracting JSON). This must be here.
*/
implicit val customRejectionHandler = RejectionHandler {
case rejections =>
compressResponse() {
mapHttpResponse { response =>
val asString = response.entity.asString
val unquoted = asString.charAt(0) == '"' && asString.charAt(asString.length - 1) == '"' match {
case true => asString.substring(1, asString.length - 1)
case false => asString
}
val noNewlines = unquoted.replace("\\n", " ")
val errorString = Serialization.write(ApiError(noNewlines, response.status))
response
.withEntity(HttpEntity(ContentType(MediaTypes.`application/json`), errorString))
.withHeaders(RoutingActor.corsHeaders)
} {
customRejectionHandlers(rejections)
}
}
}
/**
* See notes for customRejectionHandler above
*/
implicit def customExceptionHandler(implicit log: LoggingContext) = ExceptionHandler {
case t: Throwable =>
compressResponse() {
respondWithHeaders(RoutingActor.corsHeaders) {
requestUri { uri =>
error("Request threw ", t)
val status = t.isInstanceOf[ApiException] match {
case true => t.asInstanceOf[ApiException].status
case false => StatusCodes.InternalServerError
}
val apiError = ApiError(ApiStrings.serverErrorCodeDesc, status, t)
respondWithStatus(StatusCodes.InternalServerError).apply { ctx =>
ctx.complete(apiError)
}
}
}
}
}
}
| robconrad/base-api | project-rest/src/main/scala/base/rest/route/RoutingHandlers.scala | Scala | mit | 3,610 |
/**
* Copyright (C) 2012 Inria, University Lille 1.
*
* This file is part of PowerAPI.
*
* PowerAPI is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* PowerAPI is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with PowerAPI. If not, see <http://www.gnu.org/licenses/>.
*
* Contact: powerapi-user-list@googlegroups.com.
*/
package fr.inria.powerapi.processor.aggregator.process
import fr.inria.powerapi.processor.aggregator.timestamp.AggregatedMessage
import fr.inria.powerapi.processor.aggregator.timestamp.TimestampAggregator
import fr.inria.powerapi.core.Tick
import fr.inria.powerapi.core.TickSubscription
/**
* Aggregates FormulaMessages by their timestamps and processes.
*
* @author abourdon
*/
class ProcessAggregator extends TimestampAggregator {
def byProcesses(implicit timestamp: Long): Iterable[AggregatedMessage] = {
val base = cache(timestamp)
for (byProcess <- base.messages.groupBy(_.tick.subscription.process)) yield (AggregatedMessage(
tick = Tick(TickSubscription(byProcess._1, base.tick.subscription.duration), timestamp),
device = "all",
messages = byProcess._2)
)
}
override def send(implicit timestamp: Long) {
byProcesses foreach publish
}
} | abourdon/powerapi-akka | processors/process-aggregator/src/main/scala/fr/inria/powerapi/processor/aggregator/process/ProcessAggregator.scala | Scala | agpl-3.0 | 1,694 |
package user
import akka.actor.ActorSystem
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Route
import authentication.{UserAuthResult, UserRegistrationRequest, UserAuthService, UserAuthRepository}
import core.ErrorWrapper
import org.scalatest.time.{Millis, Seconds, Span}
import token.TokenRepository
import utils.{FlatSpecWithRedis, FlatSpecWithSql, BaseRoutesSpec}
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
import user.UserJsonProtocol._
import core.CommonJsonProtocol._
class UserRouterSpec extends BaseRoutesSpec with FlatSpecWithSql with FlatSpecWithRedis {
spec =>
override implicit val actorSystem: ActorSystem = spec.system
val tokenRepo = new TokenRepository
val userRepo = new UserRepository
val userAuthRepo = new UserAuthRepository
val userAuthService = new UserAuthService(tokenRepo, userAuthRepo, userRepo)
val userService = new UserService(tokenRepo, userAuthRepo, userRepo)
val userRouter = new UserRouter with TestRoutesSupport {
override val userService = spec.userService
override implicit val redis = spec.redis
}
val routes = Route.seal(userRouter.userRoutes)
// how long it should wait before declaring that the future has timed out
implicit val defaultPatience = PatienceConfig(timeout = Span(2, Seconds), interval = Span(50, Millis))
private def withLoggedInUser(email: String, password: String, role: Int = 0)(body: RequestTransformer => Unit) = {
userAuthService.register(UserRegistrationRequest(email, password, "Jan", "Kowalski", 1, role)).futureValue match {
case UserAuthResult.Success(token) => body(addHeader("Auth-Token", token))
case _ => body(addHeader("Auth-Token", ""))
}
}
"GET /me" should "get logged user data" in {
withLoggedInUser("jan@gmail.com", "pass") { transform =>
Get("/users/me") ~> transform ~> routes ~> check {
status should be(StatusCodes.OK)
entityAs[User].firstName should be("Jan")
}
}
}
"GET /me" should "result in an error when user is not authenticated" in {
Get("/users/me") ~> routes ~> check {
status should be(StatusCodes.Unauthorized)
}
}
"PUT /me" should "update logged user data" in {
withLoggedInUser("jan@gmail.com", "pass") { transform =>
Put("/users/me", UserUpdateRequest("Marcin", "Nowak", 1, None, None, None)) ~> transform ~> routes ~> check {
status should be(StatusCodes.OK)
}
}
}
"GET /" should "get users" in {
registerRandomUsers(40)
withLoggedInUser("jan@gmail.com", "pass") { transform =>
Get("/users?offset=0&limit=10") ~> transform ~> routes ~> check {
status should be(StatusCodes.OK)
val usersResponse = entityAs[UsersResponse]
usersResponse.count should be(10)
usersResponse.users should have size 10
}
}
}
"GET /10" should "get user data with id=10" in {
registerRandomUsers(40)
withLoggedInUser("jan@gmail.com", "pass") { transform =>
Get("/users/10") ~> transform ~> routes ~> check {
status should be(StatusCodes.OK)
entityAs[User].lastName should be("Kowalski10")
}
}
}
"GET /10 with invalid user id" should "return 404 with an error message" in {
withLoggedInUser("jan@gmail.com", "pass") { transform =>
Get("/users/10") ~> transform ~> routes ~> check {
status should be(StatusCodes.NotFound)
entityAs[ErrorWrapper].userMessage should be("User with id=10 does not exist")
}
}
}
"DELETE /10 with wrong user role" should "return 403 with an error message" in {
registerRandomUsers(20)
withLoggedInUser("jan@gmail.com", "pass", 0) { transform =>
Delete("/users/10") ~> transform ~> routes ~> check {
status should be(StatusCodes.Forbidden)
entityAs[ErrorWrapper].userMessage should be("User is not authorized to execute the request")
}
}
}
"DELETE /10" should "delete user with id=10" in {
registerRandomUsers(20)
withLoggedInUser("jan@gmail.com", "pass", 1) { transform =>
Delete("/users/10") ~> transform ~> routes ~> check {
status should be(StatusCodes.OK)
}
}
}
"DELETE /10 with invalid user id" should "return 404 with an error message" in {
withLoggedInUser("jan@gmail.com", "pass", 1) { transform =>
Delete("/users/10") ~> transform ~> routes ~> check {
status should be(StatusCodes.NotFound)
}
}
}
"PATCH /10/role" should "update user role with user id equal 10" in {
registerRandomUsers(20)
withLoggedInUser("jan@gmail.com", "pass", 1) { transform =>
Patch("/users/10/role", UserRoleUpdateRequest(1)) ~> transform ~> routes ~> check {
status should be(StatusCodes.OK)
}
}
}
"DELETE /10/role with invalid user id" should "return 404 with an error message" in {
withLoggedInUser("jan@gmail.com", "pass", 1) { transform =>
Patch("/users/10/role", UserRoleUpdateRequest(1)) ~> transform ~> routes ~> check {
status should be(StatusCodes.NotFound)
}
}
}
private def registerRandomUsers(number: Int) {
for (i <- 1 to number) {
userAuthService.register(UserRegistrationRequest(s"jan.kowalski$i@gmail.com", "password", "Jan", s"Kowalski$i", 1, 0)).futureValue
}
}
}
| piobab/akka-http-rest-api | src/test/scala/user/UserRouterSpec.scala | Scala | mit | 5,328 |
package br.gov.lexml.parser.pl.ws.tasks
import br.gov.lexml.parser.pl.output.LexmlRenderer
import scala.util.matching.Regex
object FragmentFormatter {
abstract sealed class Numero {
val n : Int
}
final case object Unico extends Numero {
override val n = 1
}
final case class Algum(n : Int) extends Numero
val compRe: Regex = "^([a-z]+)((?:1u|[0-9-])*)$".r
type Comp = (String,List[Numero])
def readInt : String => Numero = {
case "1u" => Unico
case x => Algum(x.toInt)
}
def format(urnFrag : String): String = {
val comps = urnFrag
.split("_").toList
.flatMap(compRe.findFirstMatchIn(_))
.map(m => (m.group(1),m.group(2).split("-").toList.filter(!_.isEmpty).map(readInt)))
.flatMap(formatComp(_))
.reverse
comps match {
case ((_,t) :: r) => t + r.map({ case (g,txt) => "d" + g + " " + txt }).mkString(" "," ","")
case _ => ""
}
}
type FormattedComp = (String,String)
val agregadores : Map[String,(String,String)] = Map(
"prt" -> ("a","parte"),
"liv" -> ("o", "livro"),
"cap" -> ("o", "capítulo"),
"sec" -> ("a", "seção"),
"sub" -> ("a", "subseção")
)
def formatComp : Comp => Option[FormattedComp] = {
case ("art",Unico :: _) =>
Some(("o","artigo único"))
case ("art",Algum(n) :: cs) =>
Some(("o","artigo " + formatOrdinal(n) + formatComplementos(cs)))
case ("cpt",_) => None
case ("par",Unico :: _) => Some(("o","parágrafo único"))
case ("par",Algum(n) :: cs) =>
Some(("o","parágrafo " + formatOrdinal(n) + formatComplementos(cs)))
case ("inc",n :: cs) =>
Some(("o","inciso " + formatRomano(n.n).toUpperCase + formatComplementos(cs)))
case ("ali",n :: cs) =>
Some(("a","alínea " + formatAlfa(n.n).toLowerCase + formatComplementos(cs)))
case ("ite",n :: cs) =>
Some(("o","item " + n.n.toString + formatComplementos(cs)))
case (tip,n :: cs) if agregadores contains tip =>
val (g,t) = agregadores(tip)
val ntxt = n match {
case Unico => "único"
case Algum(num) => formatRomano(num).toUpperCase
}
Some((g,t + " " + ntxt))
case _ => None
}
def formatOrdinal(num : Int) : String = LexmlRenderer.renderOrdinal(num)
def formatRomano(n : Int) : String = LexmlRenderer.renderRomano(n)
def formatAlfa(n : Int) : String = LexmlRenderer.renderAlphaSeq(n - 1)
def formatComplementos(cs : List[Numero]) : String = cs.map(c => formatComplemento(c.n)).map("-" + _).mkString("")
def formatComplemento(n : Int) : String = formatAlfa(n-1).toUpperCase
} | lexml/lexml-parser-projeto-lei-ws | src/main/scala/br/gov/lexml/parser/pl/ws/tasks/FragmentFormatter.scala | Scala | gpl-2.0 | 2,780 |
package com.warningrc.test.scalatest
object ScalaTestTest {
def main(args: Array[String]): Unit = {
(1 to 9).map("*" * _).foreach(println(_))
(1 until 8).filter(_ % 2 == 0).foreach { println _ }
println((1 to 9).reduceLeft(_ * _))
}
} | warningrc/learn-java | scala-test/src/test/scala/com/warningrc/test/scalatest/ScalaTestTest.scala | Scala | apache-2.0 | 269 |
package net.javachallenge.entity
import net.javachallenge.api.TradeType
/**
* Abstract class to represent a trade.
*
* @param id the id of the trade
* @param publisherId the id of the player publishing the trade
* @param material the material of the offer
* @param amount the amount of material in the offer
* @param price the price for ONE unity of the material
* @param done the boolean indicating if the transaction has been completed or not
* @param canceled the boolean indicating if the transaction has been canceled
*/
abstract sealed class Trade private[entity] (val publisherId: Int, val material: Material, val amount: Int, val price: Int)
extends net.javachallenge.api.PlayerTrade {
require(material != null)
require(amount > 0, "Amount must be positive")
require(price > 0, "Price must be positive")
override def getPlayerId = publisherId
override def getMaterial = material
override def getAmount = amount
override def getPricePerOneMaterial = price
override def getTradeType = if (this.isInstanceOf[Offer]) TradeType.Offer else TradeType.Demand
def publisher(game: Game) = game.players(publisherId)
/**
* Executes the core of the transaction, changing the amount of material left in the trade.
*
* @param game the game instance which contains whole game states
* @param customerId the id of the customer who wants to make the transaction with the trade
* @param transactionAmount the amount of material to be traded in the transaction
* @throws TradeException if the transaction cannot be achieved
*/
protected def makeCoreTransaction(game: Game, customerId: Int, transactionAmount: Int): (Map[(Int, Material), Trade], Player, Player)
/**
* Executes the transaction, changing the amount of material left in the trade.
*
* @param game the game instance which contains whole game states
* @param customerId the id of the customer who wants to make the transaction with the trade
* @param transactionAmount the amount of material to be traded in the transaction
* @throws TradeException if the transaction cannot be achieved
*/
def makeTransaction(game: Game, customerId: Int, transactionAmount: Int) = {
if (transactionAmount > amount) {
throw new InvalidCommandException("The material amound for selling/buying should be less than the offered/demanded amount.")
}
makeCoreTransaction(game, customerId, transactionAmount)
}
/**
* Cancels the transaction.
*/
def cancel(player: Player): Player
}
/**
* An offer put by a person who wants to sell material.
* {@inheritdoc}
* @constructor creates a new offer
*/
case class Offer private[entity] (sellerId: Int, offerMaterial: Material, offerAmount: Int, offerPrice: Int)
extends Trade(sellerId, offerMaterial, offerAmount, offerPrice) {
/**
* {@inheritdoc}
* Changes the amount of money and material of both players in the transaction.
*/
def makeCoreTransaction(game: Game, buyerId: Int, transactionAmount: Int) = {
val totalPrice = transactionAmount * price
val buyer = game.players(buyerId)
val newSeller = publisher(game).changeMoney(totalPrice)
val newBuyer = buyer.changeMoney(-totalPrice).changeMaterial(material, transactionAmount)
val newAmount = offerAmount - transactionAmount
(if (newAmount == 0) Map.empty else Map((publisherId, material) -> this.copy(offerAmount = newAmount)),
newSeller, newBuyer)
}
/**
* {@inheritdoc}
*/
override def cancel(player: Player) = {
require(player.id == sellerId)
player.changeMaterial(material, amount)
}
}
/**
* Companion object for Offer class containing factory method.
*/
object Offer {
def publish(game: Game, sellerId: Int, material: Material, amount: Int, price: Int) = {
val seller = game.players(sellerId)
(new Offer(sellerId, material, amount, price),
seller.changeMaterial(material, -amount))
}
}
/**
* A demand put by a person who wants to buy material.
* {@inheritdoc}
* @constructor creates a new demand
*/
case class Demand private[entity] (buyerId: Int, demandMaterial: Material, demandAmount: Int, demandPrice: Int)
extends Trade(buyerId, demandMaterial, demandAmount, demandPrice) {
/**
* {@inheritdoc}
* Changes the amount of money and material of both players in the transaction.
*/
override def makeCoreTransaction(game: Game, sellerId: Int, transactionAmount: Int) = {
val totalPrice = transactionAmount * price
val seller = game.players(sellerId)
val newBuyer = publisher(game).changeMaterial(material, transactionAmount)
val newSeller = seller.changeMoney(totalPrice).changeMaterial(material, -transactionAmount)
val newAmount = demandAmount - transactionAmount
(if (newAmount == 0) Map.empty else Map((publisherId, material) -> this.copy(demandAmount = newAmount)),
newBuyer, newSeller)
}
/**
* {@inheritdoc}
*/
override def cancel(player: Player) = {
require(player.id == buyerId)
player.changeMoney(price * amount)
}
}
/**
* Companion object for Demand class containing factory method.
*/
object Demand {
def publish(game: Game, buyerId: Int, material: Material, amount: Int, price: Int) = {
val buyer = game.players(buyerId)
val totalPrice: Int = price * amount
(new Demand(buyerId, material, amount, price), buyer.changeMoney(-totalPrice))
}
}
| AI-comp/JavaChallenge2012 | src/main/scala/net/javachallenge/entity/Trade.scala | Scala | apache-2.0 | 5,391 |
import scala.quoted._
def test(using QuoteContext) = {
val x: Int = 0
'{
val qctx: QuoteContext = ???
given qctx.type = qctx
'{x + 1} // error: wrong staging level
'{(y: Expr[Int]) => $y } // error: wrong staging level
}
'{x + 1} // error: wrong staging level
'{(y: Expr[Int]) => $y } // error: wrong staging level
}
| som-snytt/dotty | tests/neg/quote-0.scala | Scala | apache-2.0 | 356 |
/*
* Copyright 2015 - 2016 Red Bull Media House GmbH <http://www.redbullmediahouse.com> - all rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rbmhtechnology.eventuate.crdt.pure
/**
* Marker trait for protobuf-serializable CRDTs and operations.
*/
trait CRDTFormat extends Serializable
| RBMHTechnology/eventuate | eventuate-crdt-pure/src/main/scala/com/rbmhtechnology/eventuate/crdt/pure/CRDTFormat.scala | Scala | apache-2.0 | 832 |
trait T:
type X
def x: X
def test1(t: T): t.X = t.x
def test2(t: T): t.X = return t.x
| dotty-staging/dotty | tests/pos/i9464.scala | Scala | apache-2.0 | 91 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import org.mockito.Mockito._
import uk.gov.hmrc.ct.accounts.{AccountsMoneyValidationFixture, MockAbridgedAccountsRetriever}
import uk.gov.hmrc.ct.accounts.frs102.retriever.Frs102AccountsBoxRetriever
import uk.gov.hmrc.ct.accounts.retriever.AccountsBoxRetriever
class AC118Spec extends AccountsMoneyValidationFixture[Frs102AccountsBoxRetriever] with MockAbridgedAccountsRetriever {
override def setUpMocks(accountsBoxRetriever: AccountsBoxRetriever) = {
super.setUpMocks(accountsBoxRetriever)
import boxRetriever._
when(ac42()).thenReturn(AC42(Some(100)))
when(ac43()).thenReturn(AC43(Some(100)))
}
testAccountsMoneyValidationWithMin("AC118", 0, AC118.apply)
}
| liquidarmour/ct-calculations | src/test/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC118Spec.scala | Scala | apache-2.0 | 1,339 |
package mesosphere.marathon.core.launchqueue.impl
import akka.actor.{ ActorRef, ActorSystem }
import akka.pattern.ask
import akka.testkit.TestProbe
import akka.util.Timeout
import mesosphere.marathon.MarathonSpec
import mesosphere.marathon.core.base.ConstantClock
import mesosphere.marathon.core.task.tracker.TaskTracker
import mesosphere.marathon.state.{ AppDefinition, PathId }
import org.mockito.Mockito
import scala.concurrent.Await
import scala.concurrent.duration._
class RateLimiterActorTest extends MarathonSpec {
test("GetDelay gets current delay") {
rateLimiter.addDelay(app)
val delay = askLimiter(RateLimiterActor.GetDelay(app)).asInstanceOf[RateLimiterActor.DelayUpdate]
assert(delay.delayUntil == clock.now() + backoff)
}
test("AddDelay increases delay and sends update") {
limiterRef ! RateLimiterActor.AddDelay(app)
updateReceiver.expectMsg(RateLimiterActor.DelayUpdate(app, clock.now() + backoff))
val delay = askLimiter(RateLimiterActor.GetDelay(app)).asInstanceOf[RateLimiterActor.DelayUpdate]
assert(delay.delayUntil == clock.now() + backoff)
}
test("ResetDelay resets delay and sends update") {
limiterRef ! RateLimiterActor.AddDelay(app)
updateReceiver.expectMsg(RateLimiterActor.DelayUpdate(app, clock.now() + backoff))
limiterRef ! RateLimiterActor.ResetDelay(app)
updateReceiver.expectMsg(RateLimiterActor.DelayUpdate(app, clock.now()))
val delay = askLimiter(RateLimiterActor.GetDelay(app)).asInstanceOf[RateLimiterActor.DelayUpdate]
assert(delay.delayUntil == clock.now())
}
private[this] def askLimiter(message: Any): Any = {
Await.result(limiterRef ? message, 3.seconds)
}
private val backoff: FiniteDuration = 10.seconds
private val backoffFactor: Double = 2.0
private[this] val app = AppDefinition(id = PathId("/test"), backoff = backoff, backoffFactor = backoffFactor)
private[this] implicit val timeout: Timeout = 3.seconds
private[this] implicit var actorSystem: ActorSystem = _
private[this] var clock: ConstantClock = _
private[this] var rateLimiter: RateLimiter = _
private[this] var taskTracker: TaskTracker = _
private[this] var updateReceiver: TestProbe = _
private[this] var limiterRef: ActorRef = _
before {
actorSystem = ActorSystem()
clock = ConstantClock()
rateLimiter = Mockito.spy(new RateLimiter(clock))
taskTracker = mock[TaskTracker]
updateReceiver = TestProbe()
val props = RateLimiterActor.props(rateLimiter, updateReceiver.ref)
limiterRef = actorSystem.actorOf(props, "limiter")
}
after {
Await.result(actorSystem.terminate(), Duration.Inf)
}
}
| yp-engineering/marathon | src/test/scala/mesosphere/marathon/core/launchqueue/impl/RateLimiterActorTest.scala | Scala | apache-2.0 | 2,642 |
package org.jetbrains.plugins.scala.lang.psi.implicits
import com.intellij.openapi.progress.ProgressManager
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.psi.{PsiElement, PsiNamedElement}
import org.jetbrains.plugins.scala.autoImport.GlobalImplicitConversion
import org.jetbrains.plugins.scala.autoImport.GlobalMember.findGlobalMembers
import org.jetbrains.plugins.scala.caches.ModTracker
import org.jetbrains.plugins.scala.extensions.{PsiClassExt, PsiElementExt, PsiNamedElementExt}
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.typedef.MixinNodes
import org.jetbrains.plugins.scala.lang.psi.implicits.ImplicitCollector.ImplicitState
import org.jetbrains.plugins.scala.lang.psi.stubs.index.ImplicitConversionIndex
import org.jetbrains.plugins.scala.lang.psi.types.api.{Any, FunctionType, StdTypes}
import org.jetbrains.plugins.scala.lang.psi.types.recursiveUpdate.ScSubstitutor
import org.jetbrains.plugins.scala.lang.psi.types.result.Typeable
import org.jetbrains.plugins.scala.lang.psi.types.{ConstraintSystem, ConstraintsResult, ScParameterizedType, ScType}
import org.jetbrains.plugins.scala.lang.psi.{ElementScope, ScalaPsiUtil}
import org.jetbrains.plugins.scala.lang.resolve.ScalaResolveResult
import org.jetbrains.plugins.scala.macroAnnotations.CachedInUserData
import org.jetbrains.plugins.scala.project.ProjectContext
import org.jetbrains.plugins.scala.util.CommonQualifiedNames.AnyFqn
import scala.annotation.tailrec
abstract class ImplicitConversionData {
def element: PsiNamedElement
protected def paramType: ScType
protected def returnType: ScType
protected def substitutor: ScSubstitutor
def withSubstitutor(substitutor: ScSubstitutor): ImplicitConversionData
override def toString: String = element.name
def isApplicable(fromType: ScType, place: PsiElement): Option[ImplicitConversionApplication] = {
// to prevent infinite recursion
if (PsiTreeUtil.isContextAncestor(element.nameContext, place, false))
return None
ProgressManager.checkCanceled()
fromType.conforms(paramType, ConstraintSystem.empty, checkWeak = true) match {
case ConstraintsResult.Left => None
case _ =>
element match {
case f: ScFunction if f.hasTypeParameters =>
returnTypeWithLocalTypeInference(f, fromType, place)
case _ =>
Some(ImplicitConversionApplication(returnType))
}
}
}
def resultType(from: ScType, place: PsiElement): Option[ScType] =
isApplicable(from: ScType, place: PsiElement).map(_.resultType)
private def returnTypeWithLocalTypeInference(function: ScFunction,
fromType: ScType,
place: PsiElement): Option[ImplicitConversionApplication] = {
implicit val projectContext: ProjectContext = function.projectContext
implicit val elementScope: ElementScope = function.elementScope
val functionType = FunctionType(Any, Seq(fromType.tryExtractDesignatorSingleton))
val implicitState = ImplicitState(place, functionType, functionType, None, isImplicitConversion = true,
searchImplicitsRecursively = 0, None, fullInfo = true, Some(ImplicitsRecursionGuard.currentMap))
val resolveResult = new ScalaResolveResult(function, ScSubstitutor.empty)
val collector = new ImplicitCollector(implicitState)
val compatible = collector.checkFunctionByType(resolveResult, withLocalTypeInference = true, checkFast = false)
for {
srr <- compatible
conversionType <- srr.implicitParameterType
resultType <- resultType(conversionType)
} yield {
ImplicitConversionApplication(resultType, srr.implicitParameters)
}
}
@tailrec
private def resultType(conversionType: ScType, isResult: Boolean = false): Option[ScType] = conversionType match {
case FunctionType(res, _) => resultType(res, isResult = true)
case _ if isResult => Option(conversionType)
case _ => None
}
}
object ImplicitConversionData {
def apply(globalConversion: GlobalImplicitConversion): Option[ImplicitConversionData] =
ImplicitConversionData(globalConversion.function, globalConversion.substitutor)
def apply(element: PsiNamedElement, substitutor: ScSubstitutor): Option[ImplicitConversionData] = {
ProgressManager.checkCanceled()
element match {
case function: ScFunction if function.isImplicitConversion => fromRegularImplicitConversion(function, substitutor)
case function: ScFunction if !function.isParameterless => None
case typeable: Typeable => fromElementWithFunctionType(typeable, substitutor)
case _ => None
}
}
def getPossibleConversions(expr: ScExpression): Map[GlobalImplicitConversion, ImplicitConversionApplication] =
expr.getTypeWithoutImplicits().toOption match {
case None => Map.empty
case Some(originalType) =>
val withSuperClasses = originalType.widen.extractClass match {
case Some(clazz) => MixinNodes.allSuperClasses(clazz).map(_.qualifiedName) + clazz.qualifiedName + AnyFqn
case _ => Set(AnyFqn)
}
val scope = expr.resolveScope
(for {
qName <- withSuperClasses
function <- ImplicitConversionIndex.conversionCandidatesForFqn(qName, scope)(expr.getProject)
if ImplicitConversionProcessor.applicable(function, expr)
conversion <- findGlobalMembers(function, scope)(GlobalImplicitConversion)
data <- ImplicitConversionData(conversion)
application <- data.isApplicable(originalType, expr)
} yield (conversion, application))
.toMap
}
@CachedInUserData(function, ModTracker.libraryAware(function))
private def rawCheck(function: ScFunction): Option[ImplicitConversionData] = {
for {
retType <- function.returnType.toOption
param <- function.parameters.headOption
paramType <- param.`type`().toOption
} yield {
new RegularImplicitConversionData(function, paramType, retType, ScSubstitutor.empty)
}
}
@CachedInUserData(named, ModTracker.libraryAware(named))
private def rawElementWithFunctionTypeCheck(named: PsiNamedElement with Typeable): Option[ImplicitConversionData] = {
for {
function1Type <- named.elementScope.cachedFunction1Type
elementType <- named.`type`().toOption
if elementType.conforms(function1Type)
} yield {
new ElementWithFunctionTypeData(named, elementType, ScSubstitutor.empty)
}
}
private def fromRegularImplicitConversion(function: ScFunction,
substitutor: ScSubstitutor): Option[ImplicitConversionData] = {
rawCheck(function).map(_.withSubstitutor(substitutor))
}
private def fromElementWithFunctionType(named: PsiNamedElement with Typeable,
substitutor: ScSubstitutor): Option[ImplicitConversionData] = {
rawElementWithFunctionTypeCheck(named).map(_.withSubstitutor(substitutor))
}
private class RegularImplicitConversionData(override val element: PsiNamedElement,
rawParamType: ScType,
rawReturnType: ScType,
override val substitutor: ScSubstitutor) extends ImplicitConversionData {
protected override lazy val paramType: ScType = {
val undefiningSubst = element match {
case fun: ScFunction => ScalaPsiUtil.undefineMethodTypeParams(fun)
case _ => ScSubstitutor.empty
}
substitutor.followed(undefiningSubst)(rawParamType)
}
protected override lazy val returnType: ScType = substitutor(rawReturnType)
override def withSubstitutor(substitutor: ScSubstitutor): ImplicitConversionData =
new RegularImplicitConversionData(element, rawParamType, rawReturnType, substitutor)
}
private class ElementWithFunctionTypeData(override val element: PsiNamedElement with Typeable,
rawElementType: ScType,
override val substitutor: ScSubstitutor = ScSubstitutor.empty)
extends ImplicitConversionData {
private def stdTypes = StdTypes.instance(element.getProject)
private lazy val functionTypeParams: Option[(ScType, ScType)] = {
val undefiningSubst = element match {
case fun: ScFunction => ScalaPsiUtil.undefineMethodTypeParams(fun)
case _ => ScSubstitutor.empty
}
for {
functionType <- element.elementScope.cachedFunction1Type
elementType <- element.`type`().toOption.map(substitutor.followed(undefiningSubst))
(paramType, retType) <- extractFunctionTypeParameters(elementType, functionType)
} yield (paramType, retType)
}
override protected def paramType: ScType = functionTypeParams.map(_._1).getOrElse(stdTypes.Nothing)
override protected def returnType: ScType = functionTypeParams.map(_._2).getOrElse(stdTypes.Any)
override def withSubstitutor(substitutor: ScSubstitutor): ImplicitConversionData =
new ElementWithFunctionTypeData(element, rawElementType, substitutor)
private def extractFunctionTypeParameters(functionTypeCandidate: ScType,
functionType: ScParameterizedType): Option[(ScType, ScType)] = {
implicit val projectContext: ProjectContext = functionType.projectContext
functionTypeCandidate.conforms(functionType, ConstraintSystem.empty) match {
case ConstraintSystem(newSubstitutor) =>
functionType.typeArguments.map(newSubstitutor) match {
case Seq(argType, retType) => Some((argType, retType))
case _ => None
}
case _ => None
}
}
}
} | JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/implicits/ImplicitConversionData.scala | Scala | apache-2.0 | 10,129 |
/* sbt -- Simple Build Tool
* Copyright 2008, 2009 Mark Harrah
*/
package sbt.internal.util
import jline.console.ConsoleReader
import jline.console.history.{ FileHistory, MemoryHistory }
import java.io.{ File, InputStream, PrintWriter }
import complete.Parser
import java.util.concurrent.atomic.AtomicBoolean
abstract class JLine extends LineReader {
protected[this] val handleCONT: Boolean
protected[this] val reader: ConsoleReader
def readLine(prompt: String, mask: Option[Char] = None) = JLine.withJLine { unsynchronizedReadLine(prompt, mask) }
private[this] def unsynchronizedReadLine(prompt: String, mask: Option[Char]) =
readLineWithHistory(prompt, mask) match {
case null => None
case x => Some(x.trim)
}
private[this] def readLineWithHistory(prompt: String, mask: Option[Char]): String =
reader.getHistory match {
case fh: FileHistory =>
try { readLineDirect(prompt, mask) }
finally { fh.flush() }
case _ => readLineDirect(prompt, mask)
}
private[this] def readLineDirect(prompt: String, mask: Option[Char]): String =
if (handleCONT)
Signals.withHandler(() => resume(), signal = Signals.CONT)(() => readLineDirectRaw(prompt, mask))
else
readLineDirectRaw(prompt, mask)
private[this] def readLineDirectRaw(prompt: String, mask: Option[Char]): String =
{
val newprompt = handleMultilinePrompt(prompt)
mask match {
case Some(m) => reader.readLine(newprompt, m)
case None => reader.readLine(newprompt)
}
}
private[this] def handleMultilinePrompt(prompt: String): String = {
val lines = """\\r?\\n""".r.split(prompt)
lines.length match {
case 0 | 1 => prompt
case _ => reader.print(lines.init.mkString("\\n") + "\\n"); lines.last;
}
}
private[this] def resume() {
jline.TerminalFactory.reset
JLine.terminal.init
reader.drawLine()
reader.flush()
}
}
private[sbt] object JLine {
private[this] val TerminalProperty = "jline.terminal"
fixTerminalProperty()
// translate explicit class names to type in order to support
// older Scala, since it shaded classes but not the system property
private[sbt] def fixTerminalProperty(): Unit = {
val newValue = System.getProperty(TerminalProperty) match {
case "jline.UnixTerminal" => "unix"
case null if System.getProperty("sbt.cygwin") != null => "unix"
case "jline.WindowsTerminal" => "windows"
case "jline.AnsiWindowsTerminal" => "windows"
case "jline.UnsupportedTerminal" => "none"
case x => x
}
if (newValue != null) System.setProperty(TerminalProperty, newValue)
()
}
// When calling this, ensure that enableEcho has been or will be called.
// TerminalFactory.get will initialize the terminal to disable echo.
private def terminal = jline.TerminalFactory.get
private def withTerminal[T](f: jline.Terminal => T): T =
synchronized {
val t = terminal
t.synchronized { f(t) }
}
/**
* For accessing the JLine Terminal object.
* This ensures synchronized access as well as re-enabling echo after getting the Terminal.
*/
def usingTerminal[T](f: jline.Terminal => T): T =
withTerminal { t =>
t.restore
f(t)
}
def createReader(): ConsoleReader = createReader(None)
def createReader(historyPath: Option[File]): ConsoleReader =
usingTerminal { t =>
val cr = new ConsoleReader
cr.setExpandEvents(false) // https://issues.scala-lang.org/browse/SI-7650
cr.setBellEnabled(false)
val h = historyPath match {
case None => new MemoryHistory
case Some(file) => new FileHistory(file)
}
h.setMaxSize(MaxHistorySize)
cr.setHistory(h)
cr
}
def withJLine[T](action: => T): T =
withTerminal { t =>
t.init
try { action }
finally { t.restore }
}
def simple(historyPath: Option[File], handleCONT: Boolean = HandleCONT): SimpleReader = new SimpleReader(historyPath, handleCONT)
val MaxHistorySize = 500
val HandleCONT = !java.lang.Boolean.getBoolean("sbt.disable.cont") && Signals.supported(Signals.CONT)
}
trait LineReader {
def readLine(prompt: String, mask: Option[Char] = None): Option[String]
}
final class FullReader(historyPath: Option[File], complete: Parser[_], val handleCONT: Boolean = JLine.HandleCONT) extends JLine {
protected[this] val reader =
{
val cr = JLine.createReader(historyPath)
sbt.internal.util.complete.JLineCompletion.installCustomCompletor(cr, complete)
cr
}
}
class SimpleReader private[sbt] (historyPath: Option[File], val handleCONT: Boolean) extends JLine {
protected[this] val reader = JLine.createReader(historyPath)
}
object SimpleReader extends SimpleReader(None, JLine.HandleCONT)
| Duhemm/util | internal/util-complete/src/main/scala/sbt/internal/util/LineReader.scala | Scala | bsd-3-clause | 4,815 |
package unfiltered.request
import java.io.ByteArrayInputStream
import java.nio.charset.StandardCharsets
import java.util.zip.GZIPInputStream
import okio.ByteString
import org.specs2.mutable._
import okhttp3.{MediaType, RequestBody}
class GzipSpecJetty
extends Specification
with unfiltered.specs2.jetty.Planned
with GZipSpec
class GzipSpecNetty
extends Specification
with unfiltered.specs2.netty.Planned
with GZipSpec
trait GZipSpec extends Specification with unfiltered.specs2.Hosted {
import unfiltered.response._
import unfiltered.request._
import unfiltered.request.{Path => UFPath}
import java.io.ByteArrayOutputStream
import java.util.zip.{GZIPOutputStream => GZOS}
val message = "message"
def intent[A,B]: unfiltered.Cycle.Intent[A,B] =
unfiltered.kit.GZip {
unfiltered.kit.GZip.Requests {
case UFPath(Seg("empty" :: Nil)) => Ok ~> ResponseString("")
case req @ UFPath(Seg("echo" :: Nil)) => {
ResponseString(Body.string(req))
}
case req@UFPath(Seg("test" :: Nil)) => ResponseString(message)
}
}
def gzipDecode(response: Response) = {
val body = response.body
body.map(bs =>
scala.io.Source.fromInputStream(new GZIPInputStream(new ByteArrayInputStream(bs.toByteArray))).mkString
).getOrElse("")
}
"GZip response kit should" should {
"gzip-encode a response when accepts header is present" in {
val resp = http(req(host / "test") <:< Map("Accept-Encoding" -> "gzip"))
resp.firstHeader("Content-Encoding") must_== Some("gzip")
gzipDecode(resp) must_== message
}
"gzip-encode an empty response when accepts header is present" in {
val resp = http(req(host / "empty") <:< Map("Accept-Encoding" -> "gzip"))
resp.firstHeader("Content-Encoding") must_== Some("gzip")
gzipDecode(resp) must_== ""
}
"serve unencoded response when accepts header is not present" in {
val resp = http(req(host / "test"))
resp.firstHeader("Content-Encoding") must_== None
resp.as_string must_== message
}
}
"GZip request kit should" should {
val expected = "légère"
val bos = {
val bos = new ByteArrayOutputStream
val zipped = new GZOS(bos)
zipped.write(expected.getBytes("iso-8859-1"))
zipped.close()
val arr = bos.toByteArray
ByteString.of(arr, 0, arr.length)
}
val ubos = {
val ubos = new ByteArrayOutputStream
val zipped = new GZOS(ubos)
zipped.write(expected.getBytes("utf-8"))
zipped.close()
val arr = ubos.toByteArray
ByteString.of(arr, 0, arr.length)
}
"echo an unencoded request" in {
val isobody = RequestBody.create(MediaType.parse("text/plain; charset=iso-8859-1"), expected.getBytes(StandardCharsets.ISO_8859_1))
val msg = http(req(host / "echo").POST(isobody)).as_string
msg must_== expected
}
"echo an zipped request" in {
val msg = http((req(host / "echo") <:< Map("Content-Encoding" -> "gzip")).POST(bos, MediaType.parse("text/plain"))).as_string
msg must_== expected
}
"pass an non-matching request" in {
val resp = httpx(host / "unknown")
resp.code must_== 404
}
"pass an non-matching zipped request" in {
val resp = httpx(req(host / "unknown").<:<(Map("Content-Encoding" -> "gzip")).POST(bos, MediaType.parse("text/plain")))
resp.code must_== 404
}
"echo a utf-8 request" in {
val msg = http(req(host / "echo").POST(expected, MediaType.parse("text/plain; charset=utf-8"))).as_string
msg must_== expected
}
"echo a utf-8 zipped request" in {
val msg = http(req(host / "echo").<:<(Map("Content-Encoding" -> "gzip")).POST(ubos, MediaType.parse("text/plain; charset=utf-8"))).as_string
msg must_== expected
}
}
}
| unfiltered/unfiltered | library/src/test/scala/GzipSpec.scala | Scala | mit | 3,823 |
package jumpmicro.shared.util.configuration
import jumpmicro.shared.model.MMicroConfig
import org.log4s._
import scala.concurrent.Promise
import scala.util.Success
//: -------------------------------------------------------------------------------------
//: Copyright © 2017 Philip Andrew https://github.com/PhilAndrew All Rights Reserved.
//: Released under the MIT License, refer to the project website for licence information.
//: -------------------------------------------------------------------------------------
class MicroConfiguration {
private[this] val logger = getLogger
private val configurationLoaded: Promise[Option[MMicroConfig]] = Promise[Option[MMicroConfig]]()
def configuration: Promise[Option[MMicroConfig]] = {
configurationLoaded
}
def setConfiguration(config: Option[MMicroConfig]) = {
configurationLoaded.complete(Success(config))
}
}
| PhilAndrew/JumpMicro | JMShared/src/main/scala/jumpmicro/shared/util/configuration/MicroConfiguration.scala | Scala | mit | 890 |
/**
* Copyright (C) 2010-2011 LShift Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.lshift.diffa.kernel.config
/**
* Utility for helping with validation Diffa configs.
*/
object ValidationUtil {
/**
* Build a path out of a parent and child component pair. The parent may be null, in which case only the child
* component will be used.
*/
def buildPath(parent:String, child:String):String = if (parent == null) {
child
} else {
parent + "/" + child
}
/**
* Build a path out of a parent and child component pair. The child attributes are used to distinguish a given child
* that may have multiple instances (for example, a pair). The parent may be null, in which case only the child
* component will be used.
*/
def buildPath(parent:String, child:String, childAttrs:Map[String, String]):String = {
val childAttrStr = childAttrs.map { case (k, v) => k + "=" + v }.reduceLeft(_ + ", " + _)
buildPath(parent, child + "[" + childAttrStr + "]")
}
/**
* Validates a field that is required to be present and not empty.
*/
def requiredAndNotEmpty(path:String, name:String, value:String) {
if (value == null || value.isEmpty) {
throw new ConfigValidationException(path, name + " cannot be null or empty")
}
}
/**
* Validates that the list of values contains only unique values. Raises an error for the first
* non-unique field.
*/
def ensureUniqueChildren(path:String, child:String, keyName:String, values:Seq[String]) {
for (i <- 0 until (values.length-1)) {
val current = values(i)
if (values.slice(i+1, values.length).contains(current)) {
val childPath = buildPath(path, child, Map(keyName -> current))
throw new ConfigValidationException(childPath, "'" + current + "' is not a unique " + keyName)
}
}
}
/**
* Verifies that the given value is a member of a permissible set.
*/
def ensureMembership[T](path: String, name: String, value: T, group: Set[T]) = {
if (value == null || !group.contains(value)) {
throw new ConfigValidationException(path,
"%s is invalid. %s is not a member of the set %s".format(name, value, group))
}
}
/**
* Validates that a given field does not exceed a length limit.
*/
def ensureLengthLimit(path:String, name:String, value:String, limit:Int) {
if (value != null && value.length() > limit) {
throw new ConfigValidationException(path,
"%s is too long. Limit is %s, value %s is %s".format(name, limit, value, value.length))
}
}
/**
* Checks a settings URL to confirm it is in the correct format.
*
* If the URL is not null and invalid, throws ConfigValidationException, otherwise returns true.
*/
def ensureSettingsURLFormat(path: String, url: String): Boolean = {
if (url == null) { return true } // nullable
if (!url.matches("(amqp|https?)://.*")) {
throw new ConfigValidationException(path, "incorrect settings URL format: %s".format(url))
}
return true
}
/**
* Validates that the given (REST) path segment has the correct format.
*
* Returns true if the domain name contains alphanumeric, _ or - only,
* and starts and ends with an alphanumeric.
*
* Throws ConfigValidationException otherwise.
*/
def ensurePathSegmentFormat(path: String, segment: String): Boolean = {
requiredAndNotEmpty(path, "Path segment", segment)
ensureLengthLimit(path, "Path segment", segment, 50)
if (!segment.matches("(?i)[a-z0-9]([a-z0-9_-]*[a-z0-9])?")) {
throw new ConfigValidationException(path, "Path segment %s is invalid, must begin and end with an alphanumeric and contain alphanumeric, _ or -".format(segment))
}
return true
}
/**
* Turns an empty string into a null string. This prevents issues whereby empty strings provided by the web
* interface look like incorrect values instead of missing ones.
*/
def maybeNullify(s:String) = if (s == null || s.isEmpty) null else s
/**
* Turns an empty or null string into a default value.
*/
def maybeDefault(s:String, default:String) = if (s == null || s.isEmpty) default else s
/**
* Turns an empty boolean into a default value.
*/
def maybeDefault(b:java.lang.Boolean, default:Boolean):java.lang.Boolean = if (b == null) default else b
} | lshift/diffa | kernel/src/main/scala/net/lshift/diffa/kernel/config/ValidationUtil.scala | Scala | apache-2.0 | 4,885 |
package org.bitcoins.spvnode.serializers.messages
import org.bitcoins.core.util.BitcoinSUtil
import org.bitcoins.spvnode.messages.{MsgBlock, MsgFilteredBlock, MsgTx}
import org.scalatest.{FlatSpec, MustMatchers}
/**
* Created by chris on 5/31/16.
*/
class RawTypeIdentifierSerializerTest extends FlatSpec with MustMatchers {
val msgTxHex = "01000000"
val msgBlockHex = "02000000"
val msgFilteredBlockHex = "03000000"
val encode = BitcoinSUtil.encodeHex(_: Seq[Byte])
"RawTypeIdentifier" must "read/write a MsgTx" in {
val msg = RawTypeIdentifierSerializer.read(msgTxHex)
msg must be (MsgTx)
encode(RawTypeIdentifierSerializer.write(msg)) must be (msgTxHex)
}
it must "read/write a MsgBlock" in {
val msg = RawTypeIdentifierSerializer.read(msgBlockHex)
msg must be (MsgBlock)
encode(RawTypeIdentifierSerializer.write(msg)) must be (msgBlockHex)
}
it must "read/write a MsgFilteredBlock" in {
val msg = RawTypeIdentifierSerializer.read(msgFilteredBlockHex)
msg must be (MsgFilteredBlock)
encode(RawTypeIdentifierSerializer.write(msg)) must be (msgFilteredBlockHex)
}
}
| Christewart/bitcoin-s-spv-node | src/test/scala/org/bitcoins/spvnode/serializers/messages/RawTypeIdentifierSerializerTest.scala | Scala | mit | 1,131 |
package models
import java.sql.Timestamp
import org.joda.time.DateTime
import slick.driver.MySQLDriver.api._
import slick.lifted.{ForeignKeyQuery, ProvenShape, TableQuery, Tag}
case class OauthClient(
id: Long,
ownerId: Long,
grantType: String,
clientId: String,
clientSecret: String,
redirectUri: Option[String],
createdAt: DateTime
)
class OauthClientTableDef(tag: Tag) extends Table[OauthClient](tag, "oauth_client") {
implicit def dateTime =
MappedColumnType.base[DateTime, Timestamp](
dt => new Timestamp(dt.getMillis),
ts => new DateTime(ts.getTime)
)
val accounts: TableQuery[AccountTableDef] = TableQuery[AccountTableDef]
def id: Rep[Long] = column[Long]("id", O.PrimaryKey, O.AutoInc)
def ownerId: Rep[Long] = column[Long]("owner_id")
def grantType: Rep[String] = column[String]("grant_type")
def clientId: Rep[String] = column[String]("client_id")
def clientSecret: Rep[String] = column[String]("client_secret")
def redirectUri: Rep[Option[String]] = column[Option[String]]("redirect_uri")
def createdAt: Rep[DateTime] = column[DateTime]("created_at")
def account: ForeignKeyQuery[AccountTableDef, Account] = foreignKey("oauth_client_owner_id_fkey", ownerId, accounts)(_.id)
def * : ProvenShape[OauthClient] = (id, ownerId, grantType, clientId, clientSecret, redirectUri, createdAt) <> ((OauthClient.apply _).tupled, OauthClient.unapply)
}
| naveenwashere/scala-oauth2-provider-slick | app/models/OAuthClient.scala | Scala | mit | 1,414 |
package org.scoverage.issue53.part.b
/**
* Created by Mikhail Kokho on 7/10/2015.
*/
object SubtractorScala {
def minus(x: Int, y: Int) = x - y
}
| ahirreddy/sbt-scoverage | src/sbt-test/scoverage/aggregate/partB/src/main/scala/org/scoverage/issue53/part/b/SubtractorScala.scala | Scala | apache-2.0 | 154 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.serializer
import org.apache.spark.internal.config.Kryo._
class UnsafeKryoSerializerSuite extends KryoSerializerSuite {
// This test suite should run all tests in KryoSerializerSuite with kryo unsafe.
override def beforeAll(): Unit = {
conf.set(KRYO_USE_UNSAFE, true)
super.beforeAll()
}
override def afterAll(): Unit = {
conf.set(KRYO_USE_UNSAFE, false)
super.afterAll()
}
}
| ueshin/apache-spark | core/src/test/scala/org/apache/spark/serializer/UnsafeKryoSerializerSuite.scala | Scala | apache-2.0 | 1,234 |
/*
* Copyright (c) 2014-2020 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.execution.internal
import monix.execution.UncaughtExceptionReporter
import scala.util.{Failure, Success, Try}
/** Internal API — some utilities for working with cats-effect
* callbacks.
*/
private[monix] object AttemptCallback {
import Constants.eitherOfUnit
/** Reusable runnable that triggers a tick. */
final class RunnableTick(cb: Either[Throwable, Unit] => Unit) extends Runnable {
def run(): Unit =
cb(eitherOfUnit)
}
/** Creates a callback that does nothing on success, but that
* reports errors on failure, similar with `Callback.empty`.
*/
def empty(implicit r: UncaughtExceptionReporter): Either[Throwable, Unit] => Unit = {
case Left(e) => r.reportFailure(e)
case _ => ()
}
/** Reusable callback reference that does absolutely nothing. */
val noop: Either[Throwable, Any] => Unit = {
case Left(e) => throw e
case _ => ()
}
/** Converts an attempt callback into one that uses `Try`
* (to be used with `Future.onComplete`).
*/
def toTry[A](cb: Either[Throwable, A] => Unit): (Try[A] => Unit) = {
case Success(a) => cb(Right(a))
case Failure(e) => cb(Left(e))
}
}
| alexandru/monifu | monix-execution/shared/src/main/scala/monix/execution/internal/AttemptCallback.scala | Scala | apache-2.0 | 1,851 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.yarn
import java.lang.{Boolean => JBoolean}
import java.util.{Collections, Set => JSet}
import java.util.concurrent.{CopyOnWriteArrayList, ConcurrentHashMap}
import java.util.concurrent.atomic.AtomicInteger
import scala.collection
import scala.collection.JavaConversions._
import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
import org.apache.spark.Logging
import org.apache.spark.scheduler.SplitInfo
import org.apache.spark.scheduler.cluster.{ClusterScheduler, CoarseGrainedSchedulerBackend}
import org.apache.spark.util.Utils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.yarn.api.AMRMProtocol
import org.apache.hadoop.yarn.api.records.{AMResponse, ApplicationAttemptId}
import org.apache.hadoop.yarn.api.records.{Container, ContainerId, ContainerStatus}
import org.apache.hadoop.yarn.api.records.{Priority, Resource, ResourceRequest}
import org.apache.hadoop.yarn.api.protocolrecords.{AllocateRequest, AllocateResponse}
import org.apache.hadoop.yarn.util.{RackResolver, Records}
object AllocationType extends Enumeration {
type AllocationType = Value
val HOST, RACK, ANY = Value
}
// TODO:
// Too many params.
// Needs to be mt-safe
// Need to refactor this to make it 'cleaner' ... right now, all computation is reactive - should
// make it more proactive and decoupled.
// Note that right now, we assume all node asks as uniform in terms of capabilities and priority
// Refer to http://developer.yahoo.com/blogs/hadoop/posts/2011/03/mapreduce-nextgen-scheduler/ for
// more info on how we are requesting for containers.
private[yarn] class YarnAllocationHandler(
val conf: Configuration,
val resourceManager: AMRMProtocol,
val appAttemptId: ApplicationAttemptId,
val maxWorkers: Int,
val workerMemory: Int,
val workerCores: Int,
val preferredHostToCount: Map[String, Int],
val preferredRackToCount: Map[String, Int])
extends Logging {
// These three are locked on allocatedHostToContainersMap. Complementary data structures
// allocatedHostToContainersMap : containers which are running : host, Set<containerid>
// allocatedContainerToHostMap: container to host mapping.
private val allocatedHostToContainersMap =
new HashMap[String, collection.mutable.Set[ContainerId]]()
private val allocatedContainerToHostMap = new HashMap[ContainerId, String]()
// allocatedRackCount is populated ONLY if allocation happens (or decremented if this is an
// allocated node)
// As with the two data structures above, tightly coupled with them, and to be locked on
// allocatedHostToContainersMap
private val allocatedRackCount = new HashMap[String, Int]()
// Containers which have been released.
private val releasedContainerList = new CopyOnWriteArrayList[ContainerId]()
// Containers to be released in next request to RM
private val pendingReleaseContainers = new ConcurrentHashMap[ContainerId, Boolean]
private val numWorkersRunning = new AtomicInteger()
// Used to generate a unique id per worker
private val workerIdCounter = new AtomicInteger()
private val lastResponseId = new AtomicInteger()
private val numWorkersFailed = new AtomicInteger()
def getNumWorkersRunning: Int = numWorkersRunning.intValue
def getNumWorkersFailed: Int = numWorkersFailed.intValue
def isResourceConstraintSatisfied(container: Container): Boolean = {
container.getResource.getMemory >= (workerMemory + YarnAllocationHandler.MEMORY_OVERHEAD)
}
def allocateContainers(workersToRequest: Int) {
// We need to send the request only once from what I understand ... but for now, not modifying
// this much.
// Keep polling the Resource Manager for containers
val amResp = allocateWorkerResources(workersToRequest).getAMResponse
val _allocatedContainers = amResp.getAllocatedContainers()
if (_allocatedContainers.size > 0) {
logDebug("""
Allocated containers: %d
Current worker count: %d
Containers released: %s
Containers to be released: %s
Cluster resources: %s
""".format(
_allocatedContainers.size,
numWorkersRunning.get(),
releasedContainerList,
pendingReleaseContainers,
amResp.getAvailableResources))
val hostToContainers = new HashMap[String, ArrayBuffer[Container]]()
// Ignore if not satisfying constraints {
for (container <- _allocatedContainers) {
if (isResourceConstraintSatisfied(container)) {
// allocatedContainers += container
val host = container.getNodeId.getHost
val containers = hostToContainers.getOrElseUpdate(host, new ArrayBuffer[Container]())
containers += container
}
// Add all ignored containers to released list
else releasedContainerList.add(container.getId())
}
// Find the appropriate containers to use. Slightly non trivial groupBy ...
val dataLocalContainers = new HashMap[String, ArrayBuffer[Container]]()
val rackLocalContainers = new HashMap[String, ArrayBuffer[Container]]()
val offRackContainers = new HashMap[String, ArrayBuffer[Container]]()
for (candidateHost <- hostToContainers.keySet)
{
val maxExpectedHostCount = preferredHostToCount.getOrElse(candidateHost, 0)
val requiredHostCount = maxExpectedHostCount - allocatedContainersOnHost(candidateHost)
var remainingContainers = hostToContainers.get(candidateHost).getOrElse(null)
assert(remainingContainers != null)
if (requiredHostCount >= remainingContainers.size){
// Since we got <= required containers, add all to dataLocalContainers
dataLocalContainers.put(candidateHost, remainingContainers)
// all consumed
remainingContainers = null
}
else if (requiredHostCount > 0) {
// Container list has more containers than we need for data locality.
// Split into two : data local container count of (remainingContainers.size -
// requiredHostCount) and rest as remainingContainer
val (dataLocal, remaining) = remainingContainers.splitAt(
remainingContainers.size - requiredHostCount)
dataLocalContainers.put(candidateHost, dataLocal)
// remainingContainers = remaining
// yarn has nasty habit of allocating a tonne of containers on a host - discourage this :
// add remaining to release list. If we have insufficient containers, next allocation
// cycle will reallocate (but wont treat it as data local)
for (container <- remaining) releasedContainerList.add(container.getId())
remainingContainers = null
}
// Now rack local
if (remainingContainers != null){
val rack = YarnAllocationHandler.lookupRack(conf, candidateHost)
if (rack != null){
val maxExpectedRackCount = preferredRackToCount.getOrElse(rack, 0)
val requiredRackCount = maxExpectedRackCount - allocatedContainersOnRack(rack) -
rackLocalContainers.get(rack).getOrElse(List()).size
if (requiredRackCount >= remainingContainers.size){
// Add all to dataLocalContainers
dataLocalContainers.put(rack, remainingContainers)
// All consumed
remainingContainers = null
}
else if (requiredRackCount > 0) {
// container list has more containers than we need for data locality.
// Split into two : data local container count of (remainingContainers.size -
// requiredRackCount) and rest as remainingContainer
val (rackLocal, remaining) = remainingContainers.splitAt(
remainingContainers.size - requiredRackCount)
val existingRackLocal = rackLocalContainers.getOrElseUpdate(rack,
new ArrayBuffer[Container]())
existingRackLocal ++= rackLocal
remainingContainers = remaining
}
}
}
// If still not consumed, then it is off rack host - add to that list.
if (remainingContainers != null){
offRackContainers.put(candidateHost, remainingContainers)
}
}
// Now that we have split the containers into various groups, go through them in order :
// first host local, then rack local and then off rack (everything else).
// Note that the list we create below tries to ensure that not all containers end up within a
// host if there are sufficiently large number of hosts/containers.
val allocatedContainers = new ArrayBuffer[Container](_allocatedContainers.size)
allocatedContainers ++= ClusterScheduler.prioritizeContainers(dataLocalContainers)
allocatedContainers ++= ClusterScheduler.prioritizeContainers(rackLocalContainers)
allocatedContainers ++= ClusterScheduler.prioritizeContainers(offRackContainers)
// Run each of the allocated containers
for (container <- allocatedContainers) {
val numWorkersRunningNow = numWorkersRunning.incrementAndGet()
val workerHostname = container.getNodeId.getHost
val containerId = container.getId
assert(
container.getResource.getMemory >= (workerMemory + YarnAllocationHandler.MEMORY_OVERHEAD))
if (numWorkersRunningNow > maxWorkers) {
logInfo("""Ignoring container %s at host %s, since we already have the required number of
containers for it.""".format(containerId, workerHostname))
releasedContainerList.add(containerId)
// reset counter back to old value.
numWorkersRunning.decrementAndGet()
}
else {
// Deallocate + allocate can result in reusing id's wrongly - so use a different counter
// (workerIdCounter)
val workerId = workerIdCounter.incrementAndGet().toString
val driverUrl = "akka.tcp://spark@%s:%s/user/%s".format(
System.getProperty("spark.driver.host"), System.getProperty("spark.driver.port"),
CoarseGrainedSchedulerBackend.ACTOR_NAME)
logInfo("launching container on " + containerId + " host " + workerHostname)
// Just to be safe, simply remove it from pendingReleaseContainers.
// Should not be there, but ..
pendingReleaseContainers.remove(containerId)
val rack = YarnAllocationHandler.lookupRack(conf, workerHostname)
allocatedHostToContainersMap.synchronized {
val containerSet = allocatedHostToContainersMap.getOrElseUpdate(workerHostname,
new HashSet[ContainerId]())
containerSet += containerId
allocatedContainerToHostMap.put(containerId, workerHostname)
if (rack != null) {
allocatedRackCount.put(rack, allocatedRackCount.getOrElse(rack, 0) + 1)
}
}
new Thread(
new WorkerRunnable(container, conf, driverUrl, workerId,
workerHostname, workerMemory, workerCores)
).start()
}
}
logDebug("""
Finished processing %d containers.
Current number of workers running: %d,
releasedContainerList: %s,
pendingReleaseContainers: %s
""".format(
allocatedContainers.size,
numWorkersRunning.get(),
releasedContainerList,
pendingReleaseContainers))
}
val completedContainers = amResp.getCompletedContainersStatuses()
if (completedContainers.size > 0){
logDebug("Completed %d containers, to-be-released: %s".format(
completedContainers.size, releasedContainerList))
for (completedContainer <- completedContainers){
val containerId = completedContainer.getContainerId
// Was this released by us ? If yes, then simply remove from containerSet and move on.
if (pendingReleaseContainers.containsKey(containerId)) {
pendingReleaseContainers.remove(containerId)
}
else {
// Simply decrement count - next iteration of ReporterThread will take care of allocating.
numWorkersRunning.decrementAndGet()
logInfo("Completed container %s (state: %s, exit status: %s)".format(
containerId,
completedContainer.getState,
completedContainer.getExitStatus()))
// Hadoop 2.2.X added a ContainerExitStatus we should switch to use
// there are some exit status' we shouldn't necessarily count against us, but for
// now I think its ok as none of the containers are expected to exit
if (completedContainer.getExitStatus() != 0) {
logInfo("Container marked as failed: " + containerId)
numWorkersFailed.incrementAndGet()
}
}
allocatedHostToContainersMap.synchronized {
if (allocatedContainerToHostMap.containsKey(containerId)) {
val host = allocatedContainerToHostMap.get(containerId).getOrElse(null)
assert (host != null)
val containerSet = allocatedHostToContainersMap.get(host).getOrElse(null)
assert (containerSet != null)
containerSet -= containerId
if (containerSet.isEmpty) allocatedHostToContainersMap.remove(host)
else allocatedHostToContainersMap.update(host, containerSet)
allocatedContainerToHostMap -= containerId
// Doing this within locked context, sigh ... move to outside ?
val rack = YarnAllocationHandler.lookupRack(conf, host)
if (rack != null) {
val rackCount = allocatedRackCount.getOrElse(rack, 0) - 1
if (rackCount > 0) allocatedRackCount.put(rack, rackCount)
else allocatedRackCount.remove(rack)
}
}
}
}
logDebug("""
Finished processing %d completed containers.
Current number of workers running: %d,
releasedContainerList: %s,
pendingReleaseContainers: %s
""".format(
completedContainers.size,
numWorkersRunning.get(),
releasedContainerList,
pendingReleaseContainers))
}
}
def createRackResourceRequests(hostContainers: List[ResourceRequest]): List[ResourceRequest] = {
// First generate modified racks and new set of hosts under it : then issue requests
val rackToCounts = new HashMap[String, Int]()
// Within this lock - used to read/write to the rack related maps too.
for (container <- hostContainers) {
val candidateHost = container.getHostName
val candidateNumContainers = container.getNumContainers
assert(YarnAllocationHandler.ANY_HOST != candidateHost)
val rack = YarnAllocationHandler.lookupRack(conf, candidateHost)
if (rack != null) {
var count = rackToCounts.getOrElse(rack, 0)
count += candidateNumContainers
rackToCounts.put(rack, count)
}
}
val requestedContainers: ArrayBuffer[ResourceRequest] =
new ArrayBuffer[ResourceRequest](rackToCounts.size)
for ((rack, count) <- rackToCounts){
requestedContainers +=
createResourceRequest(AllocationType.RACK, rack, count, YarnAllocationHandler.PRIORITY)
}
requestedContainers.toList
}
def allocatedContainersOnHost(host: String): Int = {
var retval = 0
allocatedHostToContainersMap.synchronized {
retval = allocatedHostToContainersMap.getOrElse(host, Set()).size
}
retval
}
def allocatedContainersOnRack(rack: String): Int = {
var retval = 0
allocatedHostToContainersMap.synchronized {
retval = allocatedRackCount.getOrElse(rack, 0)
}
retval
}
private def allocateWorkerResources(numWorkers: Int): AllocateResponse = {
var resourceRequests: List[ResourceRequest] = null
// default.
if (numWorkers <= 0 || preferredHostToCount.isEmpty) {
logDebug("numWorkers: " + numWorkers + ", host preferences: " + preferredHostToCount.isEmpty)
resourceRequests = List(
createResourceRequest(AllocationType.ANY, null, numWorkers, YarnAllocationHandler.PRIORITY))
}
else {
// request for all hosts in preferred nodes and for numWorkers -
// candidates.size, request by default allocation policy.
val hostContainerRequests: ArrayBuffer[ResourceRequest] =
new ArrayBuffer[ResourceRequest](preferredHostToCount.size)
for ((candidateHost, candidateCount) <- preferredHostToCount) {
val requiredCount = candidateCount - allocatedContainersOnHost(candidateHost)
if (requiredCount > 0) {
hostContainerRequests += createResourceRequest(
AllocationType.HOST,
candidateHost,
requiredCount,
YarnAllocationHandler.PRIORITY)
}
}
val rackContainerRequests: List[ResourceRequest] = createRackResourceRequests(
hostContainerRequests.toList)
val anyContainerRequests: ResourceRequest = createResourceRequest(
AllocationType.ANY,
resource = null,
numWorkers,
YarnAllocationHandler.PRIORITY)
val containerRequests: ArrayBuffer[ResourceRequest] = new ArrayBuffer[ResourceRequest](
hostContainerRequests.size + rackContainerRequests.size + 1)
containerRequests ++= hostContainerRequests
containerRequests ++= rackContainerRequests
containerRequests += anyContainerRequests
resourceRequests = containerRequests.toList
}
val req = Records.newRecord(classOf[AllocateRequest])
req.setResponseId(lastResponseId.incrementAndGet)
req.setApplicationAttemptId(appAttemptId)
req.addAllAsks(resourceRequests)
val releasedContainerList = createReleasedContainerList()
req.addAllReleases(releasedContainerList)
if (numWorkers > 0) {
logInfo("Allocating %d worker containers with %d of memory each.".format(numWorkers,
workerMemory + YarnAllocationHandler.MEMORY_OVERHEAD))
}
else {
logDebug("Empty allocation req .. release : " + releasedContainerList)
}
for (request <- resourceRequests) {
logInfo("ResourceRequest (host : %s, num containers: %d, priority = %s , capability : %s)".
format(
request.getHostName,
request.getNumContainers,
request.getPriority,
request.getCapability))
}
resourceManager.allocate(req)
}
private def createResourceRequest(
requestType: AllocationType.AllocationType,
resource:String,
numWorkers: Int,
priority: Int): ResourceRequest = {
// If hostname specified, we need atleast two requests - node local and rack local.
// There must be a third request - which is ANY : that will be specially handled.
requestType match {
case AllocationType.HOST => {
assert(YarnAllocationHandler.ANY_HOST != resource)
val hostname = resource
val nodeLocal = createResourceRequestImpl(hostname, numWorkers, priority)
// Add to host->rack mapping
YarnAllocationHandler.populateRackInfo(conf, hostname)
nodeLocal
}
case AllocationType.RACK => {
val rack = resource
createResourceRequestImpl(rack, numWorkers, priority)
}
case AllocationType.ANY => createResourceRequestImpl(
YarnAllocationHandler.ANY_HOST, numWorkers, priority)
case _ => throw new IllegalArgumentException(
"Unexpected/unsupported request type: " + requestType)
}
}
private def createResourceRequestImpl(
hostname:String,
numWorkers: Int,
priority: Int): ResourceRequest = {
val rsrcRequest = Records.newRecord(classOf[ResourceRequest])
val memCapability = Records.newRecord(classOf[Resource])
// There probably is some overhead here, let's reserve a bit more memory.
memCapability.setMemory(workerMemory + YarnAllocationHandler.MEMORY_OVERHEAD)
rsrcRequest.setCapability(memCapability)
val pri = Records.newRecord(classOf[Priority])
pri.setPriority(priority)
rsrcRequest.setPriority(pri)
rsrcRequest.setHostName(hostname)
rsrcRequest.setNumContainers(java.lang.Math.max(numWorkers, 0))
rsrcRequest
}
def createReleasedContainerList(): ArrayBuffer[ContainerId] = {
val retval = new ArrayBuffer[ContainerId](1)
// Iterator on COW list ...
for (container <- releasedContainerList.iterator()){
retval += container
}
// Remove from the original list.
if (! retval.isEmpty) {
releasedContainerList.removeAll(retval)
for (v <- retval) pendingReleaseContainers.put(v, true)
logInfo("Releasing " + retval.size + " containers. pendingReleaseContainers : " +
pendingReleaseContainers)
}
retval
}
}
object YarnAllocationHandler {
val ANY_HOST = "*"
// All requests are issued with same priority : we do not (yet) have any distinction between
// request types (like map/reduce in hadoop for example)
val PRIORITY = 1
// Additional memory overhead - in mb
val MEMORY_OVERHEAD = 384
// Host to rack map - saved from allocation requests
// We are expecting this not to change.
// Note that it is possible for this to change : and RM will indicate that to us via update
// response to allocate. But we are punting on handling that for now.
private val hostToRack = new ConcurrentHashMap[String, String]()
private val rackToHostSet = new ConcurrentHashMap[String, JSet[String]]()
def newAllocator(
conf: Configuration,
resourceManager: AMRMProtocol,
appAttemptId: ApplicationAttemptId,
args: ApplicationMasterArguments): YarnAllocationHandler = {
new YarnAllocationHandler(
conf,
resourceManager,
appAttemptId,
args.numWorkers,
args.workerMemory,
args.workerCores,
Map[String, Int](),
Map[String, Int]())
}
def newAllocator(
conf: Configuration,
resourceManager: AMRMProtocol,
appAttemptId: ApplicationAttemptId,
args: ApplicationMasterArguments,
map: collection.Map[String,
collection.Set[SplitInfo]]): YarnAllocationHandler = {
val (hostToCount, rackToCount) = generateNodeToWeight(conf, map)
new YarnAllocationHandler(
conf,
resourceManager,
appAttemptId,
args.numWorkers,
args.workerMemory,
args.workerCores,
hostToCount,
rackToCount)
}
def newAllocator(
conf: Configuration,
resourceManager: AMRMProtocol,
appAttemptId: ApplicationAttemptId,
maxWorkers: Int,
workerMemory: Int,
workerCores: Int,
map: collection.Map[String, collection.Set[SplitInfo]]): YarnAllocationHandler = {
val (hostToCount, rackToCount) = generateNodeToWeight(conf, map)
new YarnAllocationHandler(
conf,
resourceManager,
appAttemptId,
maxWorkers,
workerMemory,
workerCores,
hostToCount,
rackToCount)
}
// A simple method to copy the split info map.
private def generateNodeToWeight(
conf: Configuration,
input: collection.Map[String, collection.Set[SplitInfo]]) :
// host to count, rack to count
(Map[String, Int], Map[String, Int]) = {
if (input == null) return (Map[String, Int](), Map[String, Int]())
val hostToCount = new HashMap[String, Int]
val rackToCount = new HashMap[String, Int]
for ((host, splits) <- input) {
val hostCount = hostToCount.getOrElse(host, 0)
hostToCount.put(host, hostCount + splits.size)
val rack = lookupRack(conf, host)
if (rack != null){
val rackCount = rackToCount.getOrElse(host, 0)
rackToCount.put(host, rackCount + splits.size)
}
}
(hostToCount.toMap, rackToCount.toMap)
}
def lookupRack(conf: Configuration, host: String): String = {
if (!hostToRack.contains(host)) populateRackInfo(conf, host)
hostToRack.get(host)
}
def fetchCachedHostsForRack(rack: String): Option[Set[String]] = {
val set = rackToHostSet.get(rack)
if (set == null) return None
// No better way to get a Set[String] from JSet ?
val convertedSet: collection.mutable.Set[String] = set
Some(convertedSet.toSet)
}
def populateRackInfo(conf: Configuration, hostname: String) {
Utils.checkHost(hostname)
if (!hostToRack.containsKey(hostname)) {
// If there are repeated failures to resolve, all to an ignore list ?
val rackInfo = RackResolver.resolve(conf, hostname)
if (rackInfo != null && rackInfo.getNetworkLocation != null) {
val rack = rackInfo.getNetworkLocation
hostToRack.put(hostname, rack)
if (! rackToHostSet.containsKey(rack)) {
rackToHostSet.putIfAbsent(rack,
Collections.newSetFromMap(new ConcurrentHashMap[String, JBoolean]()))
}
rackToHostSet.get(rack).add(hostname)
// TODO(harvey): Figure out this comment...
// Since RackResolver caches, we are disabling this for now ...
} /* else {
// right ? Else we will keep calling rack resolver in case we cant resolve rack info ...
hostToRack.put(hostname, null)
} */
}
}
}
| mkolod/incubator-spark | yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocationHandler.scala | Scala | apache-2.0 | 26,050 |
package models
case class PrescriptionPrescriberPatientData (
prescription: Option[Prescription],
prescriberName: String,
patient: Patient
)
| BBK-SDP-2015-jtomli03/Morphidose2 | app/models/PrescriptionPrescriberPatientData.scala | Scala | apache-2.0 | 324 |
package com.ulb.code.wit.main
import org.apache.spark.graphx.PartitionStrategy
import org.apache.spark.graphx._
import com.ulb.code.wit.util.NodeApprox
import java.util.Random
class MyPartitionStrategy(val partitionlookup: collection.mutable.Map[(Long, Long), Int], val nodedegree: collection.mutable.Map[Long, Int], val nodeProfile: scala.collection.immutable.Map[Long, Long], val nodeUpdate: scala.collection.immutable.Map[Long, Int], val nodeReplication: scala.collection.mutable.Map[Long, Int]) extends Serializable {
def this() {
this(null, null, null, null, null)
}
def this(nodedegree: collection.mutable.Map[Long, Int]) {
this(null, nodedegree, null, null, null)
}
def this(partitionlookup: collection.mutable.Map[(Long, Long), Int], nodedegree: collection.mutable.Map[Long, Int]) {
this(partitionlookup, nodedegree, null, null, null)
}
case object Hashing extends PartitionStrategy {
override def getPartition(src: VertexId, dst: VertexId, numParts: PartitionID): PartitionID = {
(math.abs(Math.round(Math.random() * 10000) % numParts)).toInt
}
}
case object DBH extends PartitionStrategy {
override def getPartition(src: VertexId, dst: VertexId, numParts: PartitionID): PartitionID = {
val srcDegree = nodedegree.getOrElse(src, 0)
val dstDegree = nodedegree.getOrElse(dst, 0)
if (srcDegree < dstDegree) {
math.abs(src.hashCode()) % numParts
} else {
math.abs(dst.hashCode()) % numParts
}
}
}
case object ABH extends PartitionStrategy {
override def getPartition(src: VertexId, dst: VertexId, numParts: PartitionID): PartitionID = {
val srcDegree = nodedegree.getOrElse(src, 0)
val dstDegree = nodedegree.getOrElse(dst, 0)
if (srcDegree < dstDegree) {
math.abs(src.hashCode()) % numParts
} else {
math.abs(dst.hashCode()) % numParts
}
}
}
case object UBH extends PartitionStrategy {
override def getPartition(src: VertexId, dst: VertexId, numParts: PartitionID): PartitionID = {
val srcUpdateCount = nodeUpdate.getOrElse(src, 0)
val dstUpdateCount = nodeUpdate.getOrElse(dst, 0)
if (srcUpdateCount > dstUpdateCount) {
math.abs(src.hashCode()) % numParts
} else if (srcUpdateCount < dstUpdateCount) {
math.abs(dst.hashCode()) % numParts
} else {
//if update count is same follow degree based approach
val srcDegree = nodedegree.getOrElse(src, 0)
val dstDegree = nodedegree.getOrElse(dst, 0)
if (srcDegree < dstDegree) {
math.abs(src.hashCode()) % numParts
} else {
math.abs(dst.hashCode()) % numParts
}
}
}
}
case object UBHAdvanced extends PartitionStrategy {
override def getPartition(src: VertexId, dst: VertexId, numParts: PartitionID): PartitionID = {
val srcUpdateCount = nodeUpdate.getOrElse(src, 0)
val dstUpdateCount = nodeUpdate.getOrElse(dst, 0)
val srcReplicationCount = nodeReplication.getOrElse(src, 0)
val dstReplicationCount = nodeReplication.getOrElse(dst, 0)
if ((srcUpdateCount * srcReplicationCount) > (dstUpdateCount * dstReplicationCount)) {
math.abs(src.hashCode()) % numParts
} else if (((srcUpdateCount * srcReplicationCount) < (dstUpdateCount * dstReplicationCount))) {
math.abs(dst.hashCode()) % numParts
} else {
//if update count is same follow degree based approach
val srcDegree = nodedegree.getOrElse(src, 0)
val dstDegree = nodedegree.getOrElse(dst, 0)
if (srcDegree < dstDegree) {
math.abs(src.hashCode()) % numParts
} else {
math.abs(dst.hashCode()) % numParts
}
}
}
}
case object UBHreversed extends PartitionStrategy {
override def getPartition(src: VertexId, dst: VertexId, numParts: PartitionID): PartitionID = {
val srcUpdateCount = nodeUpdate.getOrElse(src, 0)
val dstUpdateCount = nodeUpdate.getOrElse(dst, 0)
if (srcUpdateCount < dstUpdateCount) {
math.abs(src.hashCode()) % numParts
} else if (srcUpdateCount < dstUpdateCount) {
math.abs(dst.hashCode()) % numParts
} else {
//if update count is same follow degree based approach
val srcDegree = nodedegree.getOrElse(src, 0)
val dstDegree = nodedegree.getOrElse(dst, 0)
if (srcDegree < dstDegree) {
math.abs(src.hashCode()) % numParts
} else {
math.abs(dst.hashCode()) % numParts
}
}
}
}
case object ReverseABH extends PartitionStrategy {
override def getPartition(src: VertexId, dst: VertexId, numParts: PartitionID): PartitionID = {
val srcDegree = nodedegree.getOrElse(src, 0)
val dstDegree = nodedegree.getOrElse(dst, 0)
if (srcDegree > dstDegree) {
math.abs(src.hashCode()) % numParts
} else {
math.abs(dst.hashCode()) % numParts
}
}
}
case object NPH extends PartitionStrategy {
override def getPartition(src: VertexId, dst: VertexId, numParts: PartitionID): PartitionID = {
val srcDegree = nodeProfile.getOrElse(src, 0l)
val dstDegree = nodeProfile.getOrElse(dst, 0l)
if (srcDegree < dstDegree) {
math.abs(src.hashCode()) % numParts
} else {
math.abs(dst.hashCode()) % numParts
}
}
}
case object HDRF extends PartitionStrategy {
override def getPartition(src: VertexId, dst: VertexId, numParts: PartitionID): PartitionID = {
partitionlookup.getOrElse((src, dst), (math.abs(Math.round(Math.random() * 10000) % numParts)).toInt)
}
}
case object HURL extends PartitionStrategy {
override def getPartition(src: VertexId, dst: VertexId, numParts: PartitionID): PartitionID = {
partitionlookup.getOrElse((src, dst), (math.abs(Math.round(Math.random() * 10000) % numParts)).toInt)
}
}
case object ONLYONE extends PartitionStrategy {
override def getPartition(src: VertexId, dst: VertexId, numParts: PartitionID): PartitionID = {
0
}
}
def fromString(s: String): PartitionStrategy = s match {
case "RandomVertexCut" => PartitionStrategy.RandomVertexCut
case "EdgePartition1D" => PartitionStrategy.EdgePartition1D
case "EdgePartition2D" => PartitionStrategy.EdgePartition2D
case "CanonicalRandomVertexCut" => PartitionStrategy.CanonicalRandomVertexCut
case "Hashing" => Hashing
case "HDRF" => HDRF
case "DBH" => DBH
case "ABH" => ABH
case "ReverseABH" => ReverseABH
case "NPH" => NPH
case "UBH" => UBH
case "UBHAdvanced" => UBHAdvanced
case "ONLYONE" => ONLYONE
case "HURL" => HURL
case "iDBH" => DBH
case "UBHReversed" => UBHreversed
case _ => throw new IllegalArgumentException("Invalid PartitionStrategy: " + s)
}
} | rohit13k/NeighborhoodProfileDistributed | src/com/ulb/code/wit/main/MyPartitionStrategy.scala | Scala | apache-2.0 | 7,117 |
/*
* Copyright 2016 Groupon, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.groupon.sparklint.events
import com.groupon.sparklint.common.Utils
import com.groupon.sparklint.data._
import org.apache.spark.scheduler._
/**
* @author rxue
* @since 9/22/16.
*/
class LosslessStateManager(metricsBuckets: Int = 1000) extends EventStateManagerLike with EventReceiverLike {
private var state: LosslessState = LosslessState.empty
override def getState: SparklintStateLike = state
override def onAddApp(event: SparkListenerApplicationStart): Unit = {
state = state.copy(
lastUpdatedAt = event.time
)
}
override def unAddApp(event: SparkListenerApplicationStart): Unit = {
state = state.copy(
lastUpdatedAt = event.time
)
}
override def onAddExecutor(event: SparkListenerExecutorAdded): Unit = {
val executorId = event.executorId
state = state.copy(
executorInfo = state.executorInfo + (executorId -> SparklintExecutorInfo(event.executorInfo.totalCores, event.time, None)),
lastUpdatedAt = event.time)
}
override def unAddExecutor(event: SparkListenerExecutorAdded): Unit = {
val executorId = event.executorId
state = state.copy(
executorInfo = state.executorInfo - executorId,
lastUpdatedAt = event.time)
}
override def onRemoveExecutor(event: SparkListenerExecutorRemoved): Unit = {
val executorId = event.executorId
state = state.copy(
executorInfo = state.executorInfo + (executorId -> state.executorInfo(executorId).copy(endTime = Some(event.time))),
lastUpdatedAt = event.time)
}
override def unRemoveExecutor(event: SparkListenerExecutorRemoved): Unit = {
val executorId = event.executorId
state = state.copy(
executorInfo = state.executorInfo + (executorId -> state.executorInfo(executorId).copy(endTime = None)),
lastUpdatedAt = event.time)
}
override def onAddBlockManager(event: SparkListenerBlockManagerAdded): Unit = {}
override def unAddBlockManager(event: SparkListenerBlockManagerAdded): Unit = {}
override def onJobStart(event: SparkListenerJobStart): Unit = {}
override def unJobStart(event: SparkListenerJobStart): Unit = {}
override def onStageSubmitted(event: SparkListenerStageSubmitted): Unit = {
val stageId = event.stageInfo.stageId
val stageIdentifier = SparkToSparklint.sparklintStageIdentifier(event.stageInfo, event.properties)
state = state.copy(
stageIdLookup = state.stageIdLookup + (stageId -> stageIdentifier),
lastUpdatedAt = event.stageInfo.submissionTime.getOrElse(state.lastUpdatedAt))
}
override def unStageSubmitted(event: SparkListenerStageSubmitted): Unit = {
val stageId = event.stageInfo.stageId
state = state.copy(
stageIdLookup = state.stageIdLookup - stageId,
lastUpdatedAt = event.stageInfo.submissionTime.getOrElse(state.lastUpdatedAt))
}
override def onStageCompleted(event: SparkListenerStageCompleted): Unit = {}
override def unStageCompleted(event: SparkListenerStageCompleted): Unit = {}
override def onTaskStart(event: SparkListenerTaskStart): Unit = {
val startTime = event.taskInfo.launchTime
if (state.firstTaskAt.isEmpty) {
state = state.copy(
coreUsageByLocality = Utils.LOCALITIES.map(locality => locality -> LosslessMetricsSink.empty(startTime, metricsBuckets)).toMap,
firstTaskAt = Some(startTime),
runningTasks = Map(event.taskInfo.taskId -> SparkToSparklint.sparklintTaskInfo(event.taskInfo)),
lastUpdatedAt = startTime
)
} else {
state = state.copy(
runningTasks = state.runningTasks + (event.taskInfo.taskId -> SparkToSparklint.sparklintTaskInfo(event.taskInfo)),
lastUpdatedAt = startTime)
}
}
override def unTaskStart(event: SparkListenerTaskStart): Unit = {
val startTime = event.taskInfo.launchTime
if (state.firstTaskAt.get == startTime) {
state = state.copy(
coreUsageByLocality = Map.empty,
firstTaskAt = None,
runningTasks = Map.empty,
lastUpdatedAt = startTime
)
} else {
state = state.copy(
runningTasks = state.runningTasks - event.taskInfo.taskId,
lastUpdatedAt = startTime)
}
}
override def onTaskEnd(event: SparkListenerTaskEnd): Unit = {
val stageId = state.stageIdLookup(event.stageId)
val locality = event.taskInfo.taskLocality
val pool = stageId.pool
val metricsSinkForPool = state.coreUsageByPool.getOrElse(pool, LosslessMetricsSink.empty(state.firstTaskAt.get, metricsBuckets))
state = state.copy(
coreUsageByLocality = state.coreUsageByLocality + (locality -> state.coreUsageByLocality(locality).addUsage(
startTime = event.taskInfo.launchTime,
endTime = event.taskInfo.finishTime)),
coreUsageByPool = state.coreUsageByPool + (pool -> metricsSinkForPool.addUsage(
startTime = event.taskInfo.launchTime,
endTime = event.taskInfo.finishTime)),
runningTasks = state.runningTasks - event.taskInfo.taskId,
stageMetrics = state.stageMetrics + (stageId -> state.stageMetrics.getOrElse(stageId, LosslessStageMetrics.empty).merge(
taskId = event.taskInfo.taskId,
taskType = Symbol(event.taskType),
locality = locality,
metrics = SparkVersionSpecificToSparklint.sparklintTaskMetrics(event.taskMetrics))),
lastUpdatedAt = event.taskInfo.finishTime)
}
override def unTaskEnd(event: SparkListenerTaskEnd): Unit = {
val stageId = state.stageIdLookup(event.stageId)
val locality = event.taskInfo.taskLocality
val pool = stageId.pool
state = state.copy(
coreUsageByLocality = state.coreUsageByLocality + (locality -> state.coreUsageByLocality(locality).removeUsage(
startTime = event.taskInfo.launchTime,
endTime = event.taskInfo.finishTime)),
coreUsageByPool = state.coreUsageByPool + (pool -> state.coreUsageByPool(pool).removeUsage(
startTime = event.taskInfo.launchTime,
endTime = event.taskInfo.finishTime)),
runningTasks = state.runningTasks + (event.taskInfo.taskId -> SparkToSparklint.sparklintTaskInfo(event.taskInfo)),
// cannot undo message from stageMetrics, since it is not reversible
lastUpdatedAt = event.taskInfo.finishTime)
}
override def onJobEnd(event: SparkListenerJobEnd): Unit = {}
override def unJobEnd(event: SparkListenerJobEnd): Unit = {}
override def onUnpersistRDD(event: SparkListenerUnpersistRDD): Unit = {}
override def unUnpersistRDD(event: SparkListenerUnpersistRDD): Unit = {}
override def onEndApp(event: SparkListenerApplicationEnd): Unit = {
state = state.copy(
executorInfo = state.executorInfo.map(pair => {
val executorInfo = if (pair._2.endTime.isEmpty) pair._2.copy(endTime = Some(event.time)) else pair._2
pair._1 -> executorInfo
}),
applicationEndedAt = Some(event.time),
lastUpdatedAt = event.time)
}
override def unEndApp(event: SparkListenerApplicationEnd): Unit = {
state = state.copy(
executorInfo = state.executorInfo.map(pair => {
val executorInfo = if (pair._2.endTime.exists(_ == event.time)) pair._2.copy(endTime = None) else pair._2
pair._1 -> executorInfo
}),
applicationEndedAt = None,
lastUpdatedAt = event.time)
}
}
| groupon/sparklint | src/main/scala/com/groupon/sparklint/events/LosslessStateManager.scala | Scala | apache-2.0 | 7,879 |
package com.campudus.tableaux.database.model
import java.util.UUID
import com.campudus.tableaux.database.domain.{DomainObject, ExtendedFile}
import com.campudus.tableaux.database.model.TableauxModel.{ColumnId, Ordering, RowId, TableId}
import com.campudus.tableaux.database.{DatabaseConnection, DatabaseQuery}
import com.campudus.tableaux.helper.ResultChecker._
import org.vertx.scala.core.json._
import scala.concurrent.Future
case class Attachment(tableId: TableId, columnId: ColumnId, rowId: RowId, uuid: UUID, ordering: Option[Ordering])
case class AttachmentFile(file: ExtendedFile, ordering: Ordering) extends DomainObject {
override def getJson: JsonObject = Json.obj("ordering" -> ordering).mergeIn(file.getJson)
}
object AttachmentModel {
def apply(connection: DatabaseConnection): AttachmentModel = apply(connection, FileModel(connection))
def apply(connection: DatabaseConnection, fileModel: FileModel): AttachmentModel = {
new AttachmentModel(connection, fileModel)
}
}
class AttachmentModel(protected[this] val connection: DatabaseConnection, protected[this] val fileModel: FileModel)
extends DatabaseQuery {
val table = "system_attachment"
def replace(tableId: TableId, columnId: ColumnId, rowId: RowId, attachments: Seq[Attachment]): Future[Unit] = {
val delete = s"DELETE FROM $table WHERE table_id = ? AND column_id = ? AND row_id = ?"
val insert = s"INSERT INTO $table(table_id, column_id, row_id, attachment_uuid, ordering) VALUES(?, ?, ?, ?, ?)"
// Build insert parameters
val paramStr = attachments.map(_ => "(?, ?, ?, ?, ?)").mkString(", ")
connection.transactional({ t =>
{
for {
(t, _) <- t.query(delete, Json.arr(tableId, columnId, rowId))
(_, changedAttachments) <- attachments.foldLeft(Future(0, Seq.empty[Attachment])) {
(lastResult, attachment) =>
{
lastResult.flatMap({
case (lastOrdering, seq) =>
val newOrdering = lastOrdering + 1
Future(newOrdering,
seq :+ attachment.copy(ordering = Some(attachment.ordering.getOrElse(newOrdering))))
})
}
}
params <- Future({
changedAttachments.flatMap(
attachment =>
List(attachment.tableId,
attachment.columnId,
attachment.rowId,
attachment.uuid.toString,
attachment.ordering.get))
})
(t, _) <- {
if (params.nonEmpty) {
t.query(s"INSERT INTO $table(table_id, column_id, row_id, attachment_uuid, ordering) VALUES $paramStr",
Json.arr(params: _*))
} else {
Future.successful((t, Json.emptyObj()))
}
}
} yield (t, ())
}
})
}
def add(a: Attachment): Future[AttachmentFile] = {
val insert = s"INSERT INTO $table(table_id, column_id, row_id, attachment_uuid, ordering) VALUES(?, ?, ?, ?, ?)"
connection.transactional({ t =>
for {
(t, ordering) <- retrieveOrdering(t, a)
(t, _) <- t.query(insert, Json.arr(a.tableId, a.columnId, a.rowId, a.uuid.toString, ordering))
file <- retrieveFile(a.uuid, ordering)
} yield (t, file)
})
}
def update(a: Attachment): Future[AttachmentFile] = {
val update =
s"UPDATE $table SET ordering = ? WHERE table_id = ? AND column_id = ? AND row_id = ? AND attachment_uuid = ?"
connection.transactional({ t =>
for {
(t, ordering: Ordering) <- retrieveOrdering(t, a)
(t, _) <- t.query(update, Json.arr(ordering, a.tableId, a.columnId, a.rowId, a.uuid.toString))
file <- retrieveFile(a.uuid, ordering)
} yield (t, file)
})
}
private def retrieveOrdering(t: connection.Transaction, a: Attachment): Future[(connection.Transaction, Ordering)] = {
for {
(t, ordering: Ordering) <- a.ordering match {
case Some(i: Ordering) => Future((t, i: Ordering))
case None =>
for {
(t, result) <- t.query(
s"SELECT COALESCE(MAX(ordering),0) + 1 FROM $table WHERE table_id = ? AND column_id = ? AND row_id = ?",
Json.arr(a.tableId, a.columnId, a.rowId))
resultArr <- Future(selectNotNull(result))
} yield {
(t, resultArr.head.get[Ordering](0))
}
}
} yield (t, ordering)
}
def retrieveAll(tableId: TableId, columnId: ColumnId, rowId: RowId): Future[Seq[AttachmentFile]] = {
val select =
s"SELECT attachment_uuid, ordering FROM $table WHERE table_id = ? AND column_id = ? AND row_id = ? ORDER BY ordering"
for {
result <- connection.query(select, Json.arr(tableId, columnId, rowId))
attachments <- Future(resultObjectToJsonArray(result).map(e => (e.get[String](0), e.get[Ordering](1))))
files <- Future.sequence(attachments.map({
case (uuid, ordering) => retrieveFile(UUID.fromString(uuid), ordering)
}))
} yield files
}
def delete(a: Attachment): Future[Unit] = {
val delete = s"DELETE FROM $table WHERE table_id = ? AND column_id = ? AND row_id = ? AND attachment_uuid = ?"
for {
result <- connection.query(delete, Json.arr(a.tableId, a.columnId, a.rowId, a.uuid.toString))
resultArr <- Future(deleteNotNull(result))
} yield ()
}
def deleteAll(tableId: TableId, columnId: ColumnId, rowId: RowId): Future[Unit] = {
val delete = s"DELETE FROM $table WHERE table_id = ? AND column_id = ? AND row_id = ?"
for {
result <- connection.query(delete, Json.arr(tableId, columnId, rowId))
} yield ()
}
def retrieveFile(file: UUID, ordering: Ordering): Future[AttachmentFile] = {
fileModel.retrieve(file).map(ExtendedFile).map(f => AttachmentFile(f, ordering))
}
def retrieveCells(file: UUID): Future[Seq[(TableId, ColumnId, RowId)]] = {
val select = s"SELECT table_id, column_id, row_id FROM $table WHERE attachment_uuid = ?"
for {
result <- connection.query(select, Json.arr(file.toString))
cells = resultObjectToJsonArray(result).map(e => (e.get[TableId](0), e.get[ColumnId](1), e.get[RowId](2)))
} yield cells
}
}
| campudus/tableaux | src/main/scala/com/campudus/tableaux/database/model/AttachmentModel.scala | Scala | apache-2.0 | 6,305 |
package dpla.ingestion3.data
import scala.xml.Elem
/**
* Created by scott on 1/22/17.
*/
object TestOaiData {
/**
* Local data for testing...
*/
// This needs to be a String because Scala won't allow for malformed XML
val badXmlStr: String=
"""<OAI-PMH>
<header>
<record>
</header>
</OAI-PMH>"""
val paOaiErrorRsp: String =
"""<OAI-PMH xmlns="http://www.openarchives.org/OAI/2.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/ http://www.openarchives.org/OAI/2.0/OAI-PMH.xsd">
<responseDate>2017-01-27T05:10:17Z</responseDate>
<request verb="ListRecords">http://localhost:8080/fedora/oai</request>
<error code="cannotDisseminateFormat">
Repository does not provide that format in OAI-PMH responses.
</error>
</OAI-PMH>"""
val paOaiListRecordsRsp: String =
"""<OAI-PMH
xmlns="http://www.openarchives.org/OAI/2.0/"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/ http://www.openarchives.org/OAI/2.0/OAI-PMH.xsd">
<responseDate>2017-01-21T17:24:56Z</responseDate>
<request metadataPrefix="oai_dc" verb="ListRecords">http://localhost:8080/fedora/oai</request>
<ListRecords>
<record>
<header>
<identifier>
oai:libcollab.temple.edu:fedora-system:ContentModel-3.0
</identifier>
<datestamp>2008-07-02T05:09:44Z</datestamp>
<setSpec>foobar</setSpec>
</header>
<metadata>
<oai_dc:dc
xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/oai_dc/ http://www.openarchives.org/OAI/2.0/oai_dc.xsd">
<dc:title>Content Model Object for Content Model Objects</dc:title>
<dc:identifier>fedora-system:ContentModel-3.0</dc:identifier>
</oai_dc:dc>
</metadata>
</record>
<record>
<header>
<identifier>
oai:libcollab.temple.edu:fedora-system:FedoraObject-3.0
</identifier>
<datestamp>2008-07-02T05:09:44Z</datestamp>
</header>
<metadata>
<oai_dc:dc
xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/oai_dc/ http://www.openarchives.org/OAI/2.0/oai_dc.xsd">
<dc:title>Content Model Object for All Objects</dc:title>
<dc:identifier>fedora-system:FedoraObject-3.0</dc:identifier>
</oai_dc:dc>
</metadata>
</record>
<record>
<header>
<identifier>
oai:libcollab.temple.edu:fedora-system:ServiceDefinition-3.0
</identifier>
<datestamp>2008-07-02T05:09:44Z</datestamp>
</header>
<metadata>
<oai_dc:dc
xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/oai_dc/ http://www.openarchives.org/OAI/2.0/oai_dc.xsd">
<dc:title>
Content Model Object for Service Definition Objects
</dc:title>
<dc:identifier>fedora-system:ServiceDefinition-3.0</dc:identifier>
</oai_dc:dc>
</metadata>
</record>
<record>
<header>
<identifier>
oai:libcollab.temple.edu:fedora-system:ServiceDeployment-3.0
</identifier>
<datestamp>2008-07-02T05:09:44Z</datestamp>
</header>
<metadata>
<oai_dc:dc
xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/oai_dc/ http://www.openarchives.org/OAI/2.0/oai_dc.xsd">
<dc:title>
Content Model Object for Service Deployment Objects
</dc:title>
<dc:identifier>fedora-system:ServiceDeployment-3.0</dc:identifier>
</oai_dc:dc>
</metadata>
</record>
<record>
<header>
<identifier>
oai:libcollab.temple.edu:dplapa:ALBRIGHT_cpaphoto_43
</identifier>
<datestamp>2016-12-21T15:54:10Z</datestamp>
</header>
<metadata>
<oai_dc:dc
xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/oai_dc/ http://www.openarchives.org/OAI/2.0/oai_dc.xsd">
<dc:title>Boarding room</dc:title>
<dc:creator>Charles H. Venus, 1899</dc:creator>
<dc:subject>Dormitories</dc:subject>
<dc:description>
Left - Charles Adolphus Mock, Class of 1898. Right - Charles Henry Venus, Class of 1899.
</dc:description>
<dc:contributor>Albright College</dc:contributor>
<dc:date>1897</dc:date>
<dc:type>Image</dc:type>
<dc:format>image/jpeg</dc:format>
<dc:identifier>dplapa:ALBRIGHT_cpaphoto_43</dc:identifier>
<dc:identifier>
http://digitalcollections.powerlibrary.org/cdm/ref/collection/cpaphoto/id/43
</dc:identifier>
<dc:identifier>
http://digitalcollections.powerlibrary.org/utils/getthumbnail/collection/cpaphoto/id/43
</dc:identifier>
<dc:source>POWER Library as sponsor and HSLC as maintainer</dc:source>
<dc:relation>
Albright College - Central Pennsylvania College Photo Collection
</dc:relation>
<dc:coverage>New Berlin, Union County, Pennsylvania</dc:coverage>
<dc:rights>
U.S. and international copyright laws protect this digital image. Commercial use or distribution of the image is not permitted without prior permission of the copyright holder. Please contact the Albright College, Special Collections for permission to use the digital image.
</dc:rights>
</oai_dc:dc>
</metadata>
</record>
<record>
<header>
<identifier>
oai:libcollab.temple.edu:dplapa:ALBRIGHT_cpaphoto_2
</identifier>
<datestamp>2016-12-21T15:54:12Z</datestamp>
</header>
<metadata>
<oai_dc:dc
xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/oai_dc/ http://www.openarchives.org/OAI/2.0/oai_dc.xsd">
<dc:title>Bookkeeping Class</dc:title>
<dc:subject>Bookkeeping, Classrooms</dc:subject>
<dc:description>
Bookeeping class. George Vellerchamp is possibly the fourth from left.
</dc:description>
<dc:contributor>Albright College</dc:contributor>
<dc:type>Image</dc:type>
<dc:format>image/jpeg</dc:format>
<dc:identifier>dplapa:ALBRIGHT_cpaphoto_2</dc:identifier>
<dc:identifier>
http://digitalcollections.powerlibrary.org/cdm/ref/collection/cpaphoto/id/2
</dc:identifier>
<dc:identifier>
http://digitalcollections.powerlibrary.org/utils/getthumbnail/collection/cpaphoto/id/2
</dc:identifier>
<dc:source>POWER Library as sponsor and HSLC as maintainer</dc:source>
<dc:relation>
Albright College - Central Pennsylvania College Photo Collection
</dc:relation>
<dc:coverage>New Berlin, Union County, Pennsylvania</dc:coverage>
<dc:rights>
U.S. and international copyright laws protect this digital image. Commercial use or distribution of the image is not permitted without prior permission of the copyright holder. Please contact the Albright College, Special Collections for permission to use the digital image.
</dc:rights>
</oai_dc:dc>
</metadata>
</record>
<record>
<header>
<identifier>
oai:libcollab.temple.edu:dplapa:ALBRIGHT_cpaphoto_58
</identifier>
<datestamp>2016-12-21T15:54:14Z</datestamp>
</header>
<metadata>
<oai_dc:dc
xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/oai_dc/ http://www.openarchives.org/OAI/2.0/oai_dc.xsd">
<dc:title>Rollin Wilson</dc:title>
<dc:subject>Students, Rollin Wilson</dc:subject>
<dc:description>Rollin Eugene Wilson, Class of 1902</dc:description>
<dc:contributor>Albright College</dc:contributor>
<dc:date>Between 1899-1902</dc:date>
<dc:type>Image</dc:type>
<dc:format>image/jpeg</dc:format>
<dc:identifier>dplapa:ALBRIGHT_cpaphoto_58</dc:identifier>
<dc:identifier>
http://digitalcollections.powerlibrary.org/cdm/ref/collection/cpaphoto/id/58
</dc:identifier>
<dc:identifier>
http://digitalcollections.powerlibrary.org/utils/getthumbnail/collection/cpaphoto/id/58
</dc:identifier>
<dc:source>POWER Library as sponsor and HSLC as maintainer</dc:source>
<dc:relation>
Albright College - Central Pennsylvania College Photo Collection
</dc:relation>
<dc:coverage>New Berlin, Union County, Pennsylvania</dc:coverage>
<dc:rights>
U.S. and international copyright laws protect this digital image. Commercial use or distribution of the image is not permitted without prior permission of the copyright holder. Please contact the Albright College, Special Collections for permission to use the digital image.
</dc:rights>
</oai_dc:dc>
</metadata>
</record>
<record>
<header>
<identifier>
oai:libcollab.temple.edu:dplapa:ALBRIGHT_cpaphoto_40
</identifier>
<datestamp>2016-12-21T15:55:24Z</datestamp>
</header>
<metadata>
<oai_dc:dc
xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/oai_dc/ http://www.openarchives.org/OAI/2.0/oai_dc.xsd">
<dc:title>Lover's Log</dc:title>
<dc:subject>Logs, Buildings</dc:subject>
<dc:description>Lover's log. Main building is in the background.</dc:description>
<dc:contributor>Albright College</dc:contributor>
<dc:date>Between 1887-1902</dc:date>
<dc:type>Image</dc:type>
<dc:format>image/jpeg</dc:format>
<dc:identifier>dplapa:ALBRIGHT_cpaphoto_40</dc:identifier>
<dc:identifier>
http://digitalcollections.powerlibrary.org/cdm/ref/collection/cpaphoto/id/40
</dc:identifier>
<dc:identifier>
http://digitalcollections.powerlibrary.org/utils/getthumbnail/collection/cpaphoto/id/40
</dc:identifier>
<dc:source>POWER Library as sponsor and HSLC as maintainer</dc:source>
<dc:relation>
Albright College - Central Pennsylvania College Photo Collection
</dc:relation>
<dc:coverage>New Berlin, Union County, Pennsylvania</dc:coverage>
<dc:rights>
U.S. and international copyright laws protect this digital image. Commercial use or distribution of the image is not permitted without prior permission of the copyright holder. Please contact the Albright College, Special Collections for permission to use the digital image.
</dc:rights>
</oai_dc:dc>
</metadata>
</record>
<record>
<header>
<identifier>
oai:libcollab.temple.edu:dplapa:ALBRIGHT_churchslide_35
</identifier>
<datestamp>2016-12-21T15:55:54Z</datestamp>
</header>
<metadata>
<oai_dc:dc
xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/oai_dc/ http://www.openarchives.org/OAI/2.0/oai_dc.xsd">
<dc:title>Portrait of Jacob Albright</dc:title>
<dc:creator>Fred W. Solver (1869-1951)</dc:creator>
<dc:subject>Jacob Albright</dc:subject>
<dc:description>
The lantern slides were produced by Rev. Fred W. Solver that he used for a lecture about individuals and places associated with the Evangelical Church.
</dc:description>
<dc:contributor>Albright College</dc:contributor>
<dc:date>Ca. 1930</dc:date>
<dc:type>Image</dc:type>
<dc:format>image/jpeg</dc:format>
<dc:identifier>dplapa:ALBRIGHT_churchslide_35</dc:identifier>
<dc:identifier>
http://digitalcollections.powerlibrary.org/cdm/ref/collection/churchslide/id/35
</dc:identifier>
<dc:identifier>
http://digitalcollections.powerlibrary.org/utils/getthumbnail/collection/churchslide/id/35
</dc:identifier>
<dc:source>POWER Library as sponsor and HSLC as maintainer</dc:source>
<dc:relation>
Albright Evangelical Church Lantern Slide Collection
</dc:relation>
<dc:coverage>Pennsylvania, Ohio</dc:coverage>
<dc:rights>
U.S. and international copyright laws protect this digital image. Commercial use or distribution of the image is not permitted without prior permission of the copyright holder. Please contact the Albright College, Special Collections for permission to use the digital image.
</dc:rights>
</oai_dc:dc>
</metadata>
</record>
<record>
<header>
<identifier>
oai:libcollab.temple.edu:dplapa:ALBRIGHT_churchslide_1
</identifier>
<datestamp>2016-12-21T15:55:55Z</datestamp>
</header>
<metadata>
<oai_dc:dc
xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/"
xmlns:dc="http://purl.org/dc/elements/1.1/"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/oai_dc/ http://www.openarchives.org/OAI/2.0/oai_dc.xsd">
<dc:title>General conference</dc:title>
<dc:creator>Fred W. Solver (1869-1951)</dc:creator>
<dc:subject>Evangelical Church conference</dc:subject>
<dc:description>
Site of the General Conference in 1830. - The lantern slides were produced by Rev. Fred W. Solver that he used for a lecture about individuals and places associated with the Evangelical Church.
</dc:description>
<dc:contributor>Albright College</dc:contributor>
<dc:date>Ca. 1930</dc:date>
<dc:type>Image</dc:type>
<dc:format>image/jpeg</dc:format>
<dc:identifier>dplapa:ALBRIGHT_churchslide_1</dc:identifier>
<dc:identifier>
http://digitalcollections.powerlibrary.org/cdm/ref/collection/churchslide/id/1
</dc:identifier>
<dc:identifier>
http://digitalcollections.powerlibrary.org/utils/getthumbnail/collection/churchslide/id/1
</dc:identifier>
<dc:source>POWER Library as sponsor and HSLC as maintainer</dc:source>
<dc:relation>
Albright Evangelical Church Lantern Slide Collection
</dc:relation>
<dc:coverage>Pennsylvania, Ohio</dc:coverage>
<dc:rights>
U.S. and international copyright laws protect this digital image. Commercial use or distribution of the image is not permitted without prior permission of the copyright holder. Please contact the Albright College, Special Collections for permission to use the digital image.
</dc:rights>
</oai_dc:dc>
</metadata>
</record>
<resumptionToken expirationDate="2017-01-21T17:33:16Z" cursor="0">90d421891feba6922f57a59868d7bcd1</resumptionToken>
</ListRecords>
</OAI-PMH>"""
val inOaiListSetsRsp: String =
"""<OAI-PMH xmlns="http://www.openarchives.org/OAI/2.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/ http://www.openarchives.org/OAI/2.0/OAI-PMH.xsd">
<responseDate>2017-07-20T19:58:26Z</responseDate>
<request verb="ListSets">http://dpla.library.in.gov/OAIHandler</request>
<ListSets>
<set>
<setSpec>PALNI_winona</setSpec>
<setName>Grace College - Winona Railroad Collection</setName>
</set>
<set>
<setSpec>BSU_INArtsDesk</setSpec>
<setName>Indiana ArtsDesk Broadcasts</setName>
</set>
<set>
<setSpec>IPFW_cc_fw_elect</setSpec>
<setName>Fort Wayne Area Election Returns</setName>
</set>
<set>
<setSpec>PPO_IndianaAlbum</setSpec>
<setName>Indiana Album</setName>
</set>
<set>
<setSpec>IMCPL_shs</setSpec>
<setName>Shortridge High School Yearbook Collection</setName>
</set>
</ListSets>
</OAI-PMH>"""
}
| dpla/ingestion3 | src/test/scala/dpla/ingestion3/data/TestOaiData.scala | Scala | mit | 19,009 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.