code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive
import org.apache.spark.annotation.{Experimental, InterfaceStability}
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.analysis.Analyzer
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.execution.SparkPlanner
import org.apache.spark.sql.execution.datasources._
import org.apache.spark.sql.hive.client.HiveClient
import org.apache.spark.sql.internal.{BaseSessionStateBuilder, SessionResourceLoader, SessionState}
/**
* Builder that produces a Hive-aware `SessionState`.
*/
@Experimental
@InterfaceStability.Unstable
class HiveSessionStateBuilder(session: SparkSession, parentState: Option[SessionState] = None)
extends BaseSessionStateBuilder(session, parentState) {
private def externalCatalog: HiveExternalCatalog =
session.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog]
/**
* Create a Hive aware resource loader.
*/
override protected lazy val resourceLoader: HiveSessionResourceLoader = {
val client: HiveClient = externalCatalog.client.newSession()
new HiveSessionResourceLoader(session, client)
}
/**
* Create a [[HiveSessionCatalog]].
*/
override protected lazy val catalog: HiveSessionCatalog = {
val catalog = new HiveSessionCatalog(
externalCatalog,
session.sharedState.globalTempViewManager,
new HiveMetastoreCatalog(session),
functionRegistry,
conf,
SessionState.newHadoopConf(session.sparkContext.hadoopConfiguration, conf),
sqlParser,
resourceLoader)
parentState.foreach(_.catalog.copyStateTo(catalog))
catalog
}
/**
* A logical query plan `Analyzer` with rules specific to Hive.
*/
override protected def analyzer: Analyzer = new Analyzer(catalog, conf) {
override val extendedResolutionRules: Seq[Rule[LogicalPlan]] =
new ResolveHiveSerdeTable(session) +:
new FindDataSourceTable(session) +:
new ResolveSQLOnFile(session) +:
customResolutionRules
override val postHocResolutionRules: Seq[Rule[LogicalPlan]] =
new DetermineTableStats(session) +:
RelationConversions(conf, catalog) +:
PreprocessTableCreation(session) +:
PreprocessTableInsertion(conf) +:
DataSourceAnalysis(conf) +:
HiveAnalysis +:
customPostHocResolutionRules
override val extendedCheckRules: Seq[LogicalPlan => Unit] =
PreWriteCheck +:
PreReadCheck +:
customCheckRules
}
/**
* Planner that takes into account Hive-specific strategies.
*/
override protected def planner: SparkPlanner = {
new SparkPlanner(session.sparkContext, conf, experimentalMethods) with HiveStrategies {
override val sparkSession: SparkSession = session
override def extraPlanningStrategies: Seq[Strategy] =
super.extraPlanningStrategies ++ customPlanningStrategies
override def strategies: Seq[Strategy] = {
experimentalMethods.extraStrategies ++
extraPlanningStrategies ++ Seq(
FileSourceStrategy,
DataSourceStrategy(conf),
SpecialLimits,
InMemoryScans,
HiveTableScans,
Scripts,
Aggregation,
JoinSelection,
BasicOperators
)
}
}
}
override protected def newBuilder: NewBuilder = new HiveSessionStateBuilder(_, _)
}
class HiveSessionResourceLoader(
session: SparkSession,
client: HiveClient)
extends SessionResourceLoader(session) {
override def addJar(path: String): Unit = {
client.addJar(path)
super.addJar(path)
}
}
| 1haodian/spark | sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionStateBuilder.scala | Scala | apache-2.0 | 4,469 |
/*
* Copyright (c) 2016 eBay Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ebay.rtran.report.impl
import java.io.OutputStream
import java.util.Optional
import ch.qos.logback.classic.spi.ILoggingEvent
import com.ebay.rtran.report.api.IReportEventSubscriber
import scala.compat.java8.OptionConverters._
class ManualChangesSummarySubscriber extends IReportEventSubscriber[(String, Int)] {
private[this] var manualChanges = Map.empty[String, Int]
override def filter(event: scala.Any): Optional[(String, Int)] = {
val Regex = """Rule (.*) requires (\\d+) manual changes""".r
val info = event match {
case event1: ILoggingEvent =>
event1.getFormattedMessage match {
case Regex(rule, num) => Some((rule, num.toInt))
case _ => None
}
case _ => None
}
info.asJava
}
override def dumpTo(outputStream: OutputStream): Unit = if (manualChanges.nonEmpty) {
val outputTemplate = "\\r## Manual Changes Required\\n\\n| Rule | Details |\\n| ---- | ----------- |\\n"
val content = manualChanges.foldLeft(outputTemplate) {(c, summary) =>
c + s"|[${summary._1}](#${summary._1.split("\\\\.").lastOption getOrElse ""}) | ${summary._2} manual changes required |\\n"
}
outputStream.write(content.getBytes("utf8"))
}
override def doAccept(event: (String, Int)): Unit = manualChanges get event._1 match {
case Some(num) => manualChanges += event._1 -> (event._2 + num)
case None => manualChanges += event._1 -> event._2
}
override val sequence = 4
}
| keshin/RTran | rtran-report/src/main/scala/com/ebay/rtran/report/impl/ManualChangesSummarySubscriber.scala | Scala | apache-2.0 | 2,087 |
/*Β§
===========================================================================
KnapScal - Core
===========================================================================
Copyright (C) 2015-2016 Gianluca Costa
===========================================================================
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===========================================================================
*/
package info.gianlucacosta.knapscal.knapsack.dynamic.full
import info.gianlucacosta.knapscal.knapsack.Item
case class Solution private[full](iterations: Seq[Iteration], items: Seq[Item], value: Int) {
override def toString(): String = {
val result = new StringBuilder()
result.append("Items: ")
result.append(items)
result.append("\\n\\n")
result.append("Value: ")
result.append(value)
result.append("\\n\\n\\n")
result.append("==== ITERATIONS ===\\n")
result.append("\\n\\n")
iterations.zipWithIndex.foreach {
case (iteration, index) => {
result.append(s"* Iteration ${index} *\\n\\n")
result.append(s"M${index} = {\\n\\t${iteration.activeStates.mkString(",\\n\\t")}\\n}\\n\\n")
result.append(s"Dominated states = ")
if (iteration.dominatedStates.isEmpty) {
result.append("{}")
} else {
result.append(s"{\\n\\t${iteration.dominatedStates.mkString(",\\n\\t")}\\n}")
}
result.append("\\n\\n")
result.append("\\n\\n")
}
}
return result.toString()
}
}
| giancosta86/KnapScal-core | src/main/scala/info/gianlucacosta/knapscal/knapsack/dynamic/full/Solution.scala | Scala | apache-2.0 | 2,006 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rdd
import java.util.UUID
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.h2o.H2OContext
import org.apache.spark.h2o.H2OSchemaUtils.vecTypeToDataType
import org.apache.spark.sql.catalyst.expressions.{GenericMutableRow, Row}
import org.apache.spark.sql.types._
import org.apache.spark.{Partition, TaskContext}
import water.fvec.H2OFrame
import water.parser.ValueString
/**
* H2O H2OFrame wrapper providing RDD[Row] API.
*
* @param h2oContext
* @param frame
*/
private[spark]
class H2OSchemaRDD(@transient val h2oContext: H2OContext,
@transient val frame: H2OFrame)
extends RDD[Row](h2oContext.sparkContext, Nil) with H2ORDDLike {
@DeveloperApi
override def compute(split: Partition, context: TaskContext): Iterator[Row] = {
val kn = keyName
new H2OChunkIterator[Row] {
override val partIndex: Int = split.index
override val keyName: String = kn
/** Mutable row returned by iterator */
val mutableRow = new GenericMutableRow(ncols)
/** Dummy muttable holder for String values */
val valStr = new ValueString()
/** Types for of columns */
// FIXME: should be cached
lazy val types = fr.vecs().map( v => vecTypeToDataType(v))
override def next(): Row = {
var i = 0
while (i < ncols) {
val chk = chks(i)
val typ = types(i)
if (chk.isNA(row)) {
mutableRow.setNullAt(i)
} else {
mutableRow(i) = typ match {
case ByteType => chk.at8(row).asInstanceOf[Byte]
case ShortType => chk.at8(row).asInstanceOf[Short]
case IntegerType => chk.at8(row).asInstanceOf[Int]
case LongType => chk.at8(row)
case FloatType => chk.atd(row)
case DoubleType => chk.atd(row)
case BooleanType => chk.at8(row) == 1
case StringType =>
if (chk.vec().isEnum) {
chk.vec().domain()(chk.at8(row).asInstanceOf[Int])
} else if (chk.vec().isString) {
chk.atStr(valStr, row)
valStr.toString
} else if (chk.vec().isUUID) {
val uuid = new UUID(chk.at16h(row), chk.at16l(row))
uuid.toString
} else None
case TimestampType => new java.sql.Timestamp(chk.at8(row))
case _ => ???
}
}
i += 1
}
row += 1
// Return result
mutableRow
}
}
}
override protected def getPartitions: Array[Partition] = {
val num = frame.anyVec().nChunks()
val res = new Array[Partition](num)
for( i <- 0 until num ) res(i) = new Partition { val index = i }
res
}
}
| printedheart/sparkling-water | core/src/main/scala/org/apache/spark/rdd/H2OSchemaRDD.scala | Scala | apache-2.0 | 3,601 |
object Test {
//Were affected by scala/bug#6306
def f[A](a: => A) = println(a.toString)
def f1[A <: AnyVal](a: => A) = println(a.toString)
def f1a[A <: AnyVal](a: => A) = println(a.##)
def f2[A <: AnyRef](a: => A) = println(a.toString)
def f2a[A <: String](a: => A) = println(a.toString)
//Works
def f3[A](a: => Seq[A]) = println(a.toString)
def foo() = println(2)
def client(f: () => Unit) = {f(); f()}
def attempt2(): Unit = {
val bar: () => Unit = foo _
//The code causing scala/bug#6306 was supposed to optimize code like this:
client(() => bar ())
//to:
client(bar)
}
def main(args: Array[String]): Unit = {
attempt2()
f3(Seq(1))
f3(Seq())
f("")
f((1).toString)
f((1).##)
f1((1).##)
f2((1).toString)
f2a((1).toString)
}
}
// vim: set ts=8 sw=2 et:
| scala/scala | test/files/run/pure-args-byname-noinline.scala | Scala | apache-2.0 | 839 |
/**
* Copyright (C) 2012 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.util
import java.net.URI
import java.util.{Map β JMap}
import javax.servlet.http.{Cookie, HttpServletRequest}
import org.apache.commons.lang3.StringUtils
import org.apache.http.client.CookieStore
import org.apache.http.impl.client.BasicCookieStore
import org.apache.log4j.Level
import org.orbeon.oxf.common.{OXFException, ValidationException}
import org.orbeon.oxf.externalcontext.URLRewriter
import org.orbeon.oxf.http.Headers._
import org.orbeon.oxf.http._
import org.orbeon.oxf.pipeline.api.ExternalContext
import org.orbeon.oxf.properties.Properties
import org.orbeon.oxf.resources.URLFactory
import org.orbeon.oxf.util.ScalaUtils._
import org.orbeon.oxf.xml.XMLUtils
import org.orbeon.oxf.xml.dom4j.LocationData
import scala.collection.JavaConverters._
import scala.util.control.NonFatal
/**
* Connection to a URL.
*
* Handles:
*
* - PUTting or POSTing a body
* - credentials
* - HTTP headers
* - forwarding session cookies
* - forwarding HTTP headers
* - managing SOAP POST and GET a la XForms 1.1 (should this be here?)
*/
class Connection(
httpMethod : String,
url : URI,
credentials : Option[Credentials],
content : Option[StreamedContent],
headers : Map[String, List[String]],
logBody : Boolean)(implicit
logger : IndentedLogger
) extends ConnectionState with Logging {
import org.orbeon.oxf.util.Connection._
require(StringUtils.isAllUpperCase(httpMethod))
// Open the connection. This sends request headers, request body, and reads status and response headers.
def connect(saveState: Boolean): ConnectionResult = {
val urlString = url.toString
val scheme = url.getScheme
try {
if (httpMethod == "GET" && Set("file", "oxf")(scheme)) {
// GET with file: or oxf:
// Create URL connection object
val urlConnection = URLFactory.createURL(urlString).openConnection
urlConnection.connect()
// Try to get a reasonable mediatype based on the extension
def contentTypeHeader =
Mediatypes.getMimeType(url.getPath) map (ct β ContentType β List(ct))
val headers =
urlConnection.getHeaderFields.asScala map { case (k, v) β k β v.asScala.to[List] } toMap
val headersWithContentType =
headers ++ contentTypeHeader.toList
// Create result
val connectionResult = ConnectionResult.apply(
url = urlString,
statusCode = 200,
headers = headersWithContentType,
content = StreamedContent.fromStreamAndHeaders(urlConnection.getInputStream, headersWithContentType)
)
if (debugEnabled) {
connectionResult.logResponseDetailsOnce(Level.DEBUG)
connectionResult.logResponseBody(Level.DEBUG, logBody)
}
connectionResult
} else if (isHTTPOrHTTPS(scheme)) {
// Any method with http: or https:
val cleanHeaders = {
// Gather all headers nicely capitalized
val capitalizedHeaders =
for {
(name, values) β headers.toList
if values ne null
value β values
if value ne null
} yield
capitalizeCommonOrSplitHeader(name) β value
combineValues[String, String, List](capitalizedHeaders).toMap
}
val internalPath = {
val servicePrefix =
URLRewriterUtils.rewriteServiceURL(
NetUtils.getExternalContext.getRequest,
"/",
URLRewriter.REWRITE_MODE_ABSOLUTE
)
val matchesServicePrefix = urlString.startsWith(servicePrefix)
val servicePath = matchesServicePrefix option urlString.substring(servicePrefix.size - 1)
servicePath filter isInternalPath
}
val cookieStore = cookieStoreOpt getOrElse new BasicCookieStore
cookieStoreOpt = Some(cookieStore)
val (effectiveConnectionURL, client) =
internalPath match {
case Some(internalPath) β (internalPath, InternalHttpClient)
case _ β (urlString, PropertiesApacheHttpClient)
}
val response =
client.connect(
effectiveConnectionURL,
credentials,
cookieStore,
httpMethod,
cleanHeaders,
content
)
ifDebug {
def replacePassword(s: String) = {
val colonIndex = s.indexOf(':')
if (colonIndex != -1)
s.substring(0, colonIndex + 1) + "xxxxxxxx"
else
s
}
val connectionURI =
new URI(
url.getScheme,
Option(url.getUserInfo) map replacePassword orNull,
url.getHost,
url.getPort,
url.getPath,
url.getQuery,
url.getFragment
)
debug("opening URL connection",
Seq(
"method" β httpMethod,
"URL" β connectionURI.toString
) ++ (cleanHeaders mapValues (_ mkString ",")))
}
// Create result
val connectionResult = ConnectionResult.apply(
url = urlString,
statusCode = response.statusCode,
headers = response.headers,
content = response.content
)
ifDebug {
connectionResult.logResponseDetailsOnce(Level.DEBUG)
connectionResult.logResponseBody(Level.DEBUG, logBody)
}
// Save state if possible
if (saveState)
saveHttpState()
connectionResult
} else if (httpMethod != "GET" && Set("file", "oxf")(scheme)) {
// Writing to file: and oxf: SHOULD be supported
throw new OXFException("submission URL scheme not yet implemented: " + scheme)
} else if (scheme == "mailto") {
// MAY be supported
throw new OXFException("submission URL scheme not yet implemented: " + scheme)
} else {
throw new OXFException("submission URL scheme not supported: " + scheme)
}
} catch {
case NonFatal(t) β throw new ValidationException(t, new LocationData(url.toString, -1, -1))
}
}
}
trait ConnectionState {
self: Connection β
import org.orbeon.oxf.util.ConnectionState._
private val stateScope = stateScopeFromProperty
var cookieStoreOpt: Option[CookieStore] = None
def loadHttpState()(implicit logger: IndentedLogger): Unit = {
cookieStoreOpt =
stateAttributes(createSession = false) flatMap
(m β Option(m.get(HttpCookieStoreAttribute).asInstanceOf[CookieStore]))
debugStore("loaded HTTP state", "did not load HTTP state")
}
def saveHttpState()(implicit logger: IndentedLogger): Unit = {
cookieStoreOpt foreach { cookieStore β
stateAttributes(createSession = true) foreach
(_.put(HttpCookieStoreAttribute, cookieStore))
}
debugStore("saved HTTP state", "did not save HTTP state")
}
private def debugStore(positive: String, negative: String)(implicit logger: IndentedLogger) =
ifDebug {
cookieStoreOpt match {
case Some(cookieStore) β
val cookies = cookieStore.getCookies.asScala map (_.getName) mkString " | "
debug(positive, Seq(
"scope" β stateScope,
"cookie names" β (if (cookies.nonEmpty) cookies else null))
)
case None β
debug(negative)
}
}
private def stateAttributes(createSession: Boolean) = {
val externalContext = NetUtils.getExternalContext
stateScope match {
case "request" β
Some(externalContext.getRequest.getAttributesMap)
case "session" if externalContext.getSession(createSession) ne null β
Some(externalContext.getSession(createSession).getAttributesMap)
case "application" β
Some(externalContext.getWebAppContext.getAttributesMap)
case _ β
None
}
}
}
private object ConnectionState {
def stateScopeFromProperty = {
val propertySet = Properties.instance.getPropertySet
val scopeString = propertySet.getString(HttpStateProperty, DefaultStateScope)
if (AllScopes(scopeString)) scopeString else DefaultStateScope
}
val DefaultStateScope = "session"
val HttpStateProperty = "oxf.http.state"
val HttpCookieStoreAttribute = "oxf.http.cookie-store"
val AllScopes = Set("none", "request", "session", "application")
}
object Connection extends Logging {
private val HttpInternalPathsProperty = "oxf.http.internal-paths"
private val HttpForwardCookiesProperty = "oxf.http.forward-cookies"
private val HttpForwardHeadersProperty = "oxf.http.forward-headers"
// Create a new Connection
def apply(
httpMethod : String,
url : URI,
credentials : Option[Credentials],
content : Option[StreamedContent],
headers : Map[String, List[String]],
loadState : Boolean,
logBody : Boolean)(implicit
logger : IndentedLogger
): Connection = {
require(! requiresRequestBody(httpMethod) || content.isDefined)
val connection =
new Connection(httpMethod, url, credentials, content, headers, logBody)
// Get connection state if possible
if (loadState && isHTTPOrHTTPS(url.getScheme))
connection.loadHttpState()
connection
}
// For Java callers
def jApply(
httpMethod : String,
url : URI,
credentialsOrNull : Credentials,
messageBodyOrNull : Array[Byte],
headers : Map[String, List[String]],
loadState : Boolean,
logBody : Boolean,
logger : IndentedLogger
): Connection = {
val messageBody: Option[Array[Byte]] =
if (requiresRequestBody(httpMethod)) Option(messageBodyOrNull) orElse Some(Array()) else None
val content = messageBody map
(StreamedContent.fromBytes(_, firstHeaderIgnoreCase(headers, ContentType)))
apply(
httpMethod = httpMethod,
url = url,
credentials = Option(credentialsOrNull),
content = content,
headers = headers,
loadState = loadState,
logBody = logBody)(
logger = logger
)
}
def isInternalPath(path: String) = {
val propertySet = Properties.instance.getPropertySet
val p = propertySet.getProperty(HttpInternalPathsProperty)
val r = p.associatedValue(_.value.toString.r)
r.pattern.matcher(path).matches()
}
// Whether the given method requires a request body
def requiresRequestBody(method: String) = Set("POST", "PUT")(method)
private def schemeRequiresHeaders(scheme: String) = ! Set("file", "oxf")(scheme)
private def isHTTPOrHTTPS(scheme: String) = Set("http", "https")(scheme)
// Build all the connection headers
def buildConnectionHeadersLowerIfNeeded(
scheme : String,
credentials : Option[Credentials],
customHeaders : Map[String, List[String]],
headersToForward : Option[String])(implicit
logger : IndentedLogger
): Map[String, List[String]] =
if (schemeRequiresHeaders(scheme))
buildConnectionHeadersLower(credentials, customHeaders, headersToForward)
else
EmptyHeaders
// For Java callers
def jBuildConnectionHeadersLowerIfNeeded(
scheme : String,
credentialsOrNull : Credentials,
customHeadersOrNull : JMap[String, Array[String]],
headersToForward : String,
logger : IndentedLogger
): Map[String, List[String]] =
buildConnectionHeadersLowerIfNeeded(
scheme,
Option(credentialsOrNull),
Option(customHeadersOrNull) map (_.asScala.toMap mapValues (_.toList)) getOrElse EmptyHeaders,
Option(headersToForward))(
logger
)
def buildConnectionHeadersLowerWithSOAPIfNeeded(
scheme : String,
httpMethod : String,
credentialsOrNull : Credentials,
mediatype : String,
encodingForSOAP : String,
customHeaders : Map[String, List[String]],
headersToForward : String)(implicit
logger : IndentedLogger
): Map[String, List[String]] =
if (schemeRequiresHeaders(scheme)) {
// "If a header element defines the Content-Type header, then this setting overrides a Content-type set by the
// mediatype attribute"
val headersWithContentTypeIfNeeded = {
val headers = customHeaders.toMap
if (requiresRequestBody(httpMethod) && firstHeaderIgnoreCase(headers, ContentType).isEmpty)
headers + (ContentType β List(mediatype ensuring (_ ne null)))
else
headers
}
// Also make sure that if a header element defines Content-Type, this overrides the mediatype attribute
def soapMediatypeWithContentType =
firstHeaderIgnoreCase(headersWithContentTypeIfNeeded, ContentTypeLower) getOrElse mediatype
// NOTE: SOAP processing overrides Content-Type in the case of a POST
// So we have: @serialization β @mediatype β xf:header β SOAP
val connectionHeadersLower =
buildConnectionHeadersLower(
Option(credentialsOrNull),
headersWithContentTypeIfNeeded,
Option(headersToForward)
)
val soapHeadersLower =
buildSOAPHeadersLowerIfNeeded(
httpMethod,
soapMediatypeWithContentType,
encodingForSOAP
)
connectionHeadersLower ++ soapHeadersLower
} else
EmptyHeaders
// Get a space-separated list of header names to forward from the configuration properties
def getForwardHeaders: String = {
val propertySet = Properties.instance.getPropertySet
propertySet.getString(HttpForwardHeadersProperty, "")
}
// Get a list of cookie names to forward from the configuration properties
def getForwardCookies: List[String] = {
val propertySet = Properties.instance.getPropertySet
val maybeHeaderList = Option(propertySet.getString(HttpForwardCookiesProperty))
maybeHeaderList.map(_.split("""\\s+""").toList).getOrElse(Nil)
}
/**
* Build connection headers to send given:
*
* - the incoming request if present
* - a list of headers names and values to set
* - credentials information
* - a list of headers to forward
*
* NOTE: All header names returned are lowercase.
*/
private def buildConnectionHeadersLower(
credentials : Option[Credentials],
customHeaders : Map[String, List[String]],
headersToForward : Option[String])(implicit
logger : IndentedLogger
): Map[String, List[String]] = {
val externalContext = NetUtils.getExternalContext
// 1. Caller-specified list of headers to forward based on a space-separated list of header names
val headersToForwardLower =
Option(externalContext.getRequest) match {
case Some(request) β
val forwardHeaderNamesLower = stringOptionToSet(headersToForward) map (_.toLowerCase)
// NOTE: Forwarding the "Cookie" header may yield unpredictable results because of the above work done w/ session cookies
val requestHeaderValuesMap = request.getHeaderValuesMap.asScala
def canForwardHeader(nameLower: String) = {
// Only forward Authorization header if there is no credentials provided
val canForward = nameLower != AuthorizationLower || credentials.isEmpty
if (! canForward)
debug("not forwarding Authorization header because credentials are present")
canForward
}
for {
nameLower β forwardHeaderNamesLower.toList
values β requestHeaderValuesMap.get(nameLower)
if canForwardHeader(nameLower)
} yield {
debug("forwarding header", Seq("name" β nameLower, "value" β (values mkString " ")))
nameLower β values.toList
}
case None β
Seq()
}
// 2. Explicit caller-specified header name/values
val explicitHeadersLower = customHeaders map { case (k, v) β k.toLowerCase β v }
// 3. Forward cookies for session handling only if no credentials have been explicitly set
val newCookieHeaderLower = credentials match {
case None β sessionCookieHeaderLower(externalContext)
case Some(_) β None
}
// 4. Authorization token
val tokenHeaderLower = {
// Get token from web app scope
val token =
externalContext.getWebAppContext.attributes.getOrElseUpdate(OrbeonTokenLower, SecureUtils.randomHexId).asInstanceOf[String]
Seq(OrbeonTokenLower β List(token))
}
// Don't forward headers for which a value is explicitly passed by the caller, so start with headersToForward
// New cookie header, if present, overrides any existing cookies
headersToForwardLower.toMap ++ explicitHeadersLower ++ newCookieHeaderLower ++ tokenHeaderLower
}
private def sessionCookieHeaderLower(
externalContext : ExternalContext)(implicit
logger : IndentedLogger
): Option[(String, List[String])] = {
// NOTE: We use a property, as some app servers like WebLogic allow configuring the session cookie name.
val cookiesToForward = getForwardCookies
if (cookiesToForward.nonEmpty) {
// By convention, the first cookie name is the session cookie
val sessionCookieName = cookiesToForward(0)
// NOTES 2011-01-22:
//
// If this is requested when a page is generated, it turns out we cannot rely on a JSESSIONID that makes
// sense right after authentication, even in the scenario where the JSESSIONID is clean, because Tomcat
// replays the initial request. In other words the JSESSIONID cookie can be stale.
//
// This means that the forwarding done below often doesn't make sense.
//
// We could possibly allow it only for XForms Ajax/page updates, where the probability that JSESSIONID is
// correct is greater.
//
// A stronger fix might be to simply disable JSESSIONID forwarding, or support a stronger SSO option.
//
// See: http://forge.ow2.org/tracker/?func=detail&atid=350207&aid=315104&group_id=168
// https://issues.apache.org/bugzilla/show_bug.cgi?id=50633
//
// TODO: ExternalContext must provide direct access to cookies
val requestOption = Option(externalContext.getRequest)
val nativeRequestOption =
requestOption flatMap
(r β Option(r.getNativeRequest)) collect
{ case r: HttpServletRequest β r }
// 1. If there is an incoming JSESSIONID cookie, use it. The reason is that there is not necessarily an
// obvious mapping between "session id" and JSESSIONID cookie value. With Tomcat, this works, but with e.g.
// WebSphere, you get session id="foobar" and JSESSIONID=0000foobar:-1. So we must first try to get the
// incoming JSESSIONID. To do this, we get the cookie, then serialize it as a header.
def fromIncoming =
nativeRequestOption flatMap
(sessionCookieFromIncomingLower(externalContext, _, cookiesToForward, sessionCookieName))
// 2. If there is no incoming session cookie, try to make our own cookie. This may fail with e.g. WebSphere.
def fromSession = sessionCookieFromGuessLower(externalContext, sessionCookieName)
// Logging
ifDebug {
val incomingSessionHeaders =
for {
request β requestOption.toList
cookieHeaders β request.getHeaderValuesMap.asScala.get("cookie").toList
cookieValue β cookieHeaders
if cookieValue.contains(sessionCookieName) // rough test
} yield
cookieValue
val incomingSessionCookies =
for {
nativeRequest β nativeRequestOption.toList
cookies β Option(nativeRequest.getCookies).toList
cookie β cookies
if cookie.getName == sessionCookieName
} yield
cookie.getValue
val sessionOption = Option(externalContext.getSession(false))
debug("setting cookie", Seq(
"new session" β (sessionOption map (_.isNew.toString) orNull),
"session id" β (sessionOption map (_.getId) orNull),
"requested session id" β (requestOption map (_.getRequestedSessionId) orNull),
"session cookie name" β sessionCookieName,
"incoming session cookies" β (incomingSessionCookies mkString " - "),
"incoming session headers" β (incomingSessionHeaders mkString " - ")))
}
fromIncoming orElse fromSession
} else
None
}
private def sessionCookieFromIncomingLower(
externalContext : ExternalContext,
nativeRequest : HttpServletRequest,
cookiesToForward : Seq[String],
sessionCookieName: String)(implicit
logger : IndentedLogger
): Option[(String, List[String])] = {
def requestedSessionIdMatches =
Option(externalContext.getSession(false)) exists { session β
val requestedSessionId = externalContext.getRequest.getRequestedSessionId
session.getId == requestedSessionId
}
val cookies = Option(nativeRequest.getCookies) getOrElse Array.empty[Cookie]
if (cookies.nonEmpty) {
val pairsToForward =
for {
cookie β cookies
// Only forward cookie listed as cookies to forward
if cookiesToForward.contains(cookie.getName)
// Only forward if there is the requested session id is the same as the current session. Otherwise,
// it means that the current session is no longer valid, or that the incoming cookie is out of date.
if sessionCookieName != cookie.getName || requestedSessionIdMatches
} yield
cookie.getName + '=' + cookie.getValue
if (pairsToForward.nonEmpty) {
// Multiple cookies in the header, separated with ";"
val cookieHeaderValue = pairsToForward mkString "; "
debug("forwarding cookies", Seq(
"cookie" β cookieHeaderValue,
"requested session id" β externalContext.getRequest.getRequestedSessionId))
Some("cookie" β List(cookieHeaderValue))
} else
None
} else
None
}
private def sessionCookieFromGuessLower(
externalContext : ExternalContext,
sessionCookieName : String
): Option[(String, List[String])] =
Option(externalContext.getSession(false)) map
{ session β "cookie" β List(sessionCookieName + "=" + session.getId) }
private def buildSOAPHeadersLowerIfNeeded(
httpMethod : String,
mediatypeMaybeWithCharset : String,
encoding : String)(implicit
logger : IndentedLogger
): List[(String, List[String])] = {
require(encoding ne null)
import org.orbeon.oxf.util.NetUtils.{APPLICATION_SOAP_XML, getContentTypeMediaType, getContentTypeParameters}
val contentTypeMediaType = getContentTypeMediaType(mediatypeMaybeWithCharset)
// "If the submission mediatype contains a charset MIME parameter, then it is appended to the application/soap+xml
// MIME type. Otherwise, a charset MIME parameter with same value as the encoding attribute (or its default) is
// appended to the application/soap+xml MIME type." and "the charset MIME parameter is appended . The charset
// parameter value from the mediatype attribute is used if it is specified. Otherwise, the value of the encoding
// attribute (or its default) is used."
def charsetSuffix(parameters: collection.Map[String, String]) =
"; charset=" + parameters.getOrElse("charset", encoding)
val newHeaders =
httpMethod match {
case "GET" if contentTypeMediaType == APPLICATION_SOAP_XML β
// Set an Accept header
val parameters = getContentTypeParameters(mediatypeMaybeWithCharset).asScala
val acceptHeader = APPLICATION_SOAP_XML + charsetSuffix(parameters)
// Accept header with optional charset
List("accept" β List(acceptHeader))
case "POST" if contentTypeMediaType == APPLICATION_SOAP_XML β
// Set Content-Type and optionally SOAPAction headers
val parameters = getContentTypeParameters(mediatypeMaybeWithCharset).asScala
val overriddenContentType = "text/xml" + charsetSuffix(parameters)
val actionParameter = parameters.get("action")
// Content-Type with optional charset and SOAPAction header if any
List(ContentTypeLower β List(overriddenContentType)) ++ (actionParameter map (a β "soapaction" β List(a)))
case _ β
// Not a SOAP submission
Nil
}
if (newHeaders.nonEmpty)
debug("adding SOAP headers", newHeaders map { case (k, v) β capitalizeCommonOrSplitHeader(k) β v(0) })
newHeaders
}
def logRequestBody(mediatype: String, messageBody: Array[Byte])(implicit logger: IndentedLogger): Unit =
if (XMLUtils.isXMLMediatype(mediatype) ||
XMLUtils.isTextOrJSONContentType(mediatype) ||
mediatype == "application/x-www-form-urlencoded")
logger.logDebug("submission", "setting request body", "body", new String(messageBody, "UTF-8"))
else
logger.logDebug("submission", "setting binary request body")
}
| martinluther/orbeon-forms | src/main/scala/org/orbeon/oxf/util/Connection.scala | Scala | lgpl-2.1 | 29,767 |
package funalgebra.examples
object configuration
extends ConfigurationFunctions
with ConfigurationTypes
with ConfigurationInstances
with ConfigurationUsage
object ordering
extends OrderingTypes
with OrderingInstances
with OrderingUsage
| mbbx6spp/funalgebra | src/main/scala/funalgebra/examples/package.scala | Scala | bsd-3-clause | 252 |
/**
* Copyright (C) 2013 Adam Retter (adam.retter@googlemail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shadoop.typehelper
import org.apache.hadoop.io.Writable
import java.io.{DataInput, DataOutput}
sealed trait HListWritable
final case class ::[+H <: Writable, +T <: HListWritable](head: H, tail: T) extends HListWritable with Writable {
import HListWritable.hlistWritableOps
override def toString = s"$head :: ${tail.toString}"
def length = {
var l = 0
this.applyDown {
item =>
l = l + 1
}
l
}
override def readFields(in: DataInput) {
this.applyDown {
item =>
item.readFields(in)
}
}
override def write(out: DataOutput): Unit = {
this.applyDown {
item =>
item.write(out)
}
}
}
final class HListWritableOps[+L <: HListWritable](l: L) {
def ::[H <: Writable](h : H) : H :: L = shadoop.typehelper.::(h, l)
def applyDown(f: Writable => Unit): Unit = {
l match {
case head :: tail => {
f(head)
tail.applyDown(f)
}
case _: HListWritableNil =>
//do nothing
}
}
}
sealed trait HListWritableNil extends HListWritable {
def ::[H <: Writable](h: H) = shadoop.typehelper.::(h, this)
}
case object HListWritableNil extends HListWritableNil
object HListWritable {
def apply() = HListWritableNil
implicit def hlistWritableOps[L <: HListWritable](l : L) : HListWritableOps[L] = new HListWritableOps(l)
}
| adamretter/Shadoop | src/main/scala/shadoop/typehelper/HListWritable.scala | Scala | apache-2.0 | 1,997 |
package scutil.lang
import minitest._
final case class Foo(a:Int, b:String)
final case class Bar[T](t:T)
object LensGenTest extends SimpleTestSuite {
test("LensGen should work with simple case class getters") {
val container = Foo(4711, "hallo")
val lens = Lens.Gen[Foo].a
assertEquals(
lens get container,
4711
)
}
test("LensGen should work with simple case class setters") {
val container = Foo(4711, "hallo")
val lens = Lens.Gen[Foo].a
assertEquals(
lens set 1337 apply container,
Foo(1337, "hallo")
)
}
test("LensGen should work with type parameterized case class getters") {
val container = Bar("test")
val lens = Lens.Gen[Bar[String]].t
assertEquals(
lens get container,
"test"
)
}
test("LensGen should work with type parameterized case class setters") {
val container = Bar("test")
val lens = Lens.Gen[Bar[String]].t
assertEquals(
lens set "haha" apply container,
Bar("haha")
)
}
test("LensGen should allow splitting lenser and lens creation") {
val container = Foo(4711, "hallo")
val lenses = Lens.Gen[Foo]
val lens = lenses.a
assertEquals(
lens set 1337 apply container,
Foo(1337, "hallo")
)
}
test("LensGen should work as Lens.Gen") {
val container = Foo(4711, "hallo")
val lens = Lens.Gen[Foo].a
assertEquals(
lens set 1337 apply container,
Foo(1337, "hallo")
)
}
}
| ritschwumm/scutil | modules/core/src/test/scala/scutil/lang/optic/LensGenTest.scala | Scala | bsd-2-clause | 1,390 |
package sri.test.components
import org.scalajs.dom
import sri.core._
import sri.web.all._
import sri.web.vdom.htmltags._
import scala.scalajs.js
import scala.scalajs.js.annotation.ScalaJSDefined
import scala.scalajs.js.{UndefOr => U, Function1, undefined => undefined}
object RefsTestComponent {
@ScalaJSDefined
class Component extends ReactComponent[Unit, Unit] {
def render() = {
div(key = "hello", ref = (e: dom.html.Div) => divRef = e)("A div with key")
}
var divRef: dom.html.Div = _
override def componentDidMount(): Unit = {
assert(divRef.nodeName == "DIV")
assert(divRef.innerHTML == "A div with key")
}
}
def apply(key: U[String] = js.undefined, ref: Function1[Component, Unit] = null) = makeElementNoProps[Component](key = key, ref = ref)
}
| chandu0101/sri | test/src/main/scala/sri/test/components/RefsTestComponent.scala | Scala | apache-2.0 | 807 |
package com.seanshubin.hello.console
import java.time.Clock
import com.seanshubin.hello.domain.{ApplicationBehavior, FilesContract, FilesFromOperatingSystem}
trait DependencyInjection {
def args: Seq[String]
lazy val clock: Clock = Clock.systemUTC()
lazy val files: FilesContract = FilesFromOperatingSystem
lazy val emit: String => Unit = println
lazy val behavior: Runnable = new ApplicationBehavior(args, clock, files, emit)
}
| SeanShubin/hello | console/src/main/scala/com/seanshubin/hello/console/DependencyInjection.scala | Scala | unlicense | 443 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.worker.ui
import javax.servlet.http.HttpServletRequest
import scala.xml.Node
import akka.dispatch.Await
import akka.pattern.ask
import akka.util.duration._
import net.liftweb.json.JsonAST.JValue
import org.apache.spark.deploy.JsonProtocol
import org.apache.spark.deploy.DeployMessages.{RequestWorkerState, WorkerStateResponse}
import org.apache.spark.deploy.worker.ExecutorRunner
import org.apache.spark.ui.UIUtils
import org.apache.spark.util.Utils
private[spark] class IndexPage(parent: WorkerWebUI) {
val workerActor = parent.worker.self
val worker = parent.worker
val timeout = parent.timeout
def renderJson(request: HttpServletRequest): JValue = {
val stateFuture = (workerActor ? RequestWorkerState)(timeout).mapTo[WorkerStateResponse]
val workerState = Await.result(stateFuture, 30 seconds)
JsonProtocol.writeWorkerState(workerState)
}
def render(request: HttpServletRequest): Seq[Node] = {
val stateFuture = (workerActor ? RequestWorkerState)(timeout).mapTo[WorkerStateResponse]
val workerState = Await.result(stateFuture, 30 seconds)
val executorHeaders = Seq("ExecutorID", "Cores", "Memory", "Job Details", "Logs")
val runningExecutorTable =
UIUtils.listingTable(executorHeaders, executorRow, workerState.executors)
val finishedExecutorTable =
UIUtils.listingTable(executorHeaders, executorRow, workerState.finishedExecutors)
val content =
<div class="row-fluid"> <!-- Worker Details -->
<div class="span12">
<ul class="unstyled">
<li><strong>ID:</strong> {workerState.workerId}</li>
<li><strong>
Master URL:</strong> {workerState.masterUrl}
</li>
<li><strong>Cores:</strong> {workerState.cores} ({workerState.coresUsed} Used)</li>
<li><strong>Memory:</strong> {Utils.megabytesToString(workerState.memory)}
({Utils.megabytesToString(workerState.memoryUsed)} Used)</li>
</ul>
<p><a href={workerState.masterWebUiUrl}>Back to Master</a></p>
</div>
</div>
<div class="row-fluid"> <!-- Running Executors -->
<div class="span12">
<h4> Running Executors {workerState.executors.size} </h4>
{runningExecutorTable}
</div>
</div>
<div class="row-fluid"> <!-- Finished Executors -->
<div class="span12">
<h4> Finished Executors </h4>
{finishedExecutorTable}
</div>
</div>;
UIUtils.basicSparkPage(content, "Spark Worker at %s:%s".format(
workerState.host, workerState.port))
}
def executorRow(executor: ExecutorRunner): Seq[Node] = {
<tr>
<td>{executor.execId}</td>
<td>{executor.cores}</td>
<td sorttable_customkey={executor.memory.toString}>
{Utils.megabytesToString(executor.memory)}
</td>
<td>
<ul class="unstyled">
<li><strong>ID:</strong> {executor.appId}</li>
<li><strong>Name:</strong> {executor.appDesc.name}</li>
<li><strong>User:</strong> {executor.appDesc.user}</li>
</ul>
</td>
<td>
<a href={"logPage?appId=%s&executorId=%s&logType=stdout"
.format(executor.appId, executor.execId)}>stdout</a>
<a href={"logPage?appId=%s&executorId=%s&logType=stderr"
.format(executor.appId, executor.execId)}>stderr</a>
</td>
</tr>
}
}
| windeye/spark | core/src/main/scala/org/apache/spark/deploy/worker/ui/IndexPage.scala | Scala | apache-2.0 | 4,285 |
package lensimpl.data
import lensimpl.typeclass.Strong
case class Forget[R, A, B](run: A => R){
def retag[C]: Forget[R, A, C] = asInstanceOf[Forget[R, A, C]]
}
object Forget {
implicit def strong[R]: Strong[({type Ξ»[a, b] = Forget[R, a, b]})#Ξ»] = new Strong[({type Ξ»[a, b] = Forget[R, a, b]})#Ξ»] {
override def dimap[A, B, C, D](pab: Forget[R, A, B])(f: C => A)(g: B => D): Forget[R, C, D] =
Forget[R, C, D](c => pab.run(f(c)))
override def lmap[A, B, C](pab: Forget[R, A, B])(f: C => A): Forget[R, C, B] =
Forget[R, C, B](c => pab.run(f(c)))
override def rmap[A, B, C](pab: Forget[R, A, B])(f: B => C): Forget[R, A, C] =
pab.retag[C]
override def second[A, B, C](pab: Forget[R, A, B]): Forget[R, (C, A), (C, B)] =
Forget[R, (C, A), (C, B)]{
case (c, a) => pab.run(a)
}
override def first[A, B, C](pab: Forget[R, A, B]): Forget[R, (A, C), (B, C)] =
Forget[R, (A, C), (B, C)]{
case (a, c) => pab.run(a)
}
}
}
| julien-truffaut/LensImpl | core/src/main/scala/lensimpl/data/Forget.scala | Scala | mit | 1,002 |
package com.qtamaki.rulis
import org.specs2.mutable._
class AppSpec extends Specification {
"The 'Hello world' string" should {
"contain 11 characters" in {
"Hello world" must have size(11)
}
"start with 'Hello'" in {
"Hello world" must startWith("Hello")
}
"end with 'world'" in {
"Hello world" must endWith("world")
}
}
}
| qtamaki/rulis | src/test/scala/com/qtamaki/rulis/AppSpec.scala | Scala | mit | 373 |
package bootstrap.liftweb
import _root_.net.liftweb.util._
import _root_.net.liftweb.http._
import _root_.net.liftweb.sitemap._
import _root_.net.liftweb.sitemap.Loc._
import Helpers._
/**
* A class that's instantiated early and run. It allows the application
* to modify lift's environment
*/
class Boot {
def boot {
// where to search snippet
LiftRules.addToPackages("retronym")
// Build SiteMap
val entries = Menu(Loc("Home", List("index"), "Home")) :: Nil
LiftRules.setSiteMap(SiteMap(entries:_*))
}
}
| retronym/scala-sandbox | web-interface/src/main/scala/bootstrap/liftweb/Boot.scala | Scala | mit | 541 |
package org.jetbrains.plugins.scala.macroAnnotations
import scala.annotation.StaticAnnotation
import scala.language.experimental.macros
import scala.reflect.macros.whitebox
/**
* This annotation makes the compiler generate code that caches values in the user data of the psiElement.
* Caches are invalidated on change of `dependencyItem`.
*
* Author: Svyatoslav Ilinskiy, Nikolay.Tropin
* Date: 9/25/15.
*/
class CachedInsidePsiElement(psiElement: Any, dependencyItem: Object) extends StaticAnnotation {
def macroTransform(annottees: Any*) = macro CachedInsidePsiElement.cachedInsidePsiElementImpl
}
object CachedInsidePsiElement {
def cachedInsidePsiElementImpl(c: whitebox.Context)(annottees: c.Tree*): c.Expr[Any] = {
import CachedMacroUtil._
import c.universe._
implicit val x: c.type = c
def parameters: (Tree, Tree) = {
c.prefix.tree match {
case q"new CachedInsidePsiElement(..$params)" if params.length == 2 =>
(params.head, modCountParamToModTracker(c)(params(1), params.head))
case _ => abort("Wrong annotation parameters!")
}
}
//annotation parameters
val (elem, modTracker) = parameters
annottees.toList match {
case DefDef(mods, name, tpParams, paramss, retTp, rhs) :: Nil =>
if (retTp.isEmpty) {
abort("You must specify return type")
}
//function parameters
val flatParams = paramss.flatten
val parameterTypes = flatParams.map(_.tpt)
val parameterNames: List[c.universe.TermName] = flatParams.map(_.name)
val hasParams = flatParams.nonEmpty
//generated types
val dataType = if (hasParams) tq"(..$parameterTypes)" else tq"Unit"
val resultType = box(c)(retTp)
//generated names
val keyId = c.freshName(name.toString + "cacheKey")
val cacheStatsName = TermName(c.freshName("cacheStats"))
val analyzeCaches = CachedMacroUtil.analyzeCachesEnabled(c)
val defdefFQN = q"""getClass.getName ++ "." ++ ${name.toString}"""
val elemName = generateTermName("element")
val dataName = generateTermName("data")
val keyVarName = generateTermName("key")
val holderName = generateTermName("holder")
val resultName = generateTermName("result")
val cachedFunName = generateTermName(name.toString + "cachedFun")
val dataValue = if (hasParams) q"(..$parameterNames)" else q"()"
val getOrCreateCachedHolder =
if (hasParams)
q"$cachesUtilFQN.getOrCreateCachedMap[$psiElementType, $dataType, $resultType]($elemName, $keyVarName, () => $modTracker)"
else
q"$cachesUtilFQN.getOrCreateCachedRef[$psiElementType, $resultType]($elemName, $keyVarName, () => $modTracker)"
val getFromHolder =
if (hasParams) q"$holderName.get($dataName)"
else q"$holderName.get()"
val updateHolder =
if (hasParams) q"$holderName.putIfAbsent($dataName, $resultName)"
else q"$holderName.compareAndSet(null, $resultName)"
val actualCalculation = withUIFreezingGuard(c) {
transformRhsToAnalyzeCaches(c)(cacheStatsName, retTp, rhs)
}
val analyzeCachesEnterCacheArea =
if (analyzeCaches) q"$cacheStatsName.aboutToEnterCachedArea()"
else EmptyTree
val computation = if (hasReturnStatements(c)(actualCalculation)) q"$cachedFunName()" else q"$actualCalculation"
val updatedRhs = q"""
def $cachedFunName(): $retTp = $actualCalculation
..$analyzeCachesEnterCacheArea
val $dataName = $dataValue
val $keyVarName = ${getOrCreateKey(c, hasParams)(q"$keyId", dataType, resultType)}
val $elemName = $elem
val $holderName = $getOrCreateCachedHolder
val fromCachedHolder = $getFromHolder
if (fromCachedHolder != null) return fromCachedHolder
val $resultName = $computation
$updateHolder
$resultName
"""
val updatedDef = DefDef(mods, name, tpParams, paramss, retTp, updatedRhs)
val res = q"""
${if (analyzeCaches) q"private val $cacheStatsName = $cacheStatisticsFQN($keyId, $defdefFQN)" else EmptyTree}
..$updatedDef
"""
println(res)
c.Expr(res)
case _ => abort("You can only annotate one function!")
}
}
}
| triplequote/intellij-scala | scala/macros/src/org/jetbrains/plugins/scala/macroAnnotations/CachedInsidePsiElement.scala | Scala | apache-2.0 | 4,420 |
/**********************************************************************************************************************
* This file is part of Scrupal, a Scalable Reactive Web Application Framework for Content Management *
* *
* Copyright (c) 2015, Reactific Software LLC. All Rights Reserved. *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed *
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for *
* the specific language governing permissions and limitations under the License. *
**********************************************************************************************************************/
package scrupal.api
import scrupal.storage.api.SchemaDesign
/** Database Schema For API
*
* This is the Database Schema for the scrupal.api module. It defines the kinds of collections that are found in
* the schema and provides the means for validating them.
*/
class ApiSchemaDesign extends SchemaDesign {
override def name: String = "API"
override def requiredNames: Seq[String] = Seq("sites", "nodes", "instances", "principals", "alerts")
}
object ApiSchemaDesign {
def apply() : ApiSchemaDesign = new ApiSchemaDesign
}
| scrupal/scrupal | scrupal-api/src/main/scala/scrupal/api/ApiSchemaDesign.scala | Scala | apache-2.0 | 2,224 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.apigateway.service
import java.util.UUID
import org.mockito.BDDMockito.given
import org.mockito.Mockito._
import org.scalatest.mockito.MockitoSugar
import uk.gov.hmrc.apigateway.config.AppContext
import uk.gov.hmrc.apigateway.connector.impl.ThirdPartyApplicationConnector
import uk.gov.hmrc.apigateway.exception.GatewayError._
import uk.gov.hmrc.apigateway.model.RateLimitTier.{SILVER, BRONZE}
import uk.gov.hmrc.apigateway.model._
import uk.gov.hmrc.apigateway.repository.RateLimitRepository
import uk.gov.hmrc.play.test.UnitSpec
import scala.concurrent.Future._
class ApplicationServiceSpec extends UnitSpec with MockitoSugar {
trait Setup {
val serverToken = "serverToken"
val applicationId = UUID.randomUUID()
val clientId = "clientId"
val application = Application(id = applicationId, clientId = "clientId", name = "App Name", rateLimitTier = BRONZE)
val bronzeRateLimit = 5
val silverRateLimit = 10
val api = ApiIdentifier("aContext", "aVersion")
val applicationConnector = mock[ThirdPartyApplicationConnector]
val rateLimitRepository = mock[RateLimitRepository]
val appContext = mock[AppContext]
val applicationService = new ApplicationService(applicationConnector, rateLimitRepository, appContext)
given(appContext.rateLimitBronze).willReturn(bronzeRateLimit)
given(appContext.rateLimitSilver).willReturn(silverRateLimit)
}
"Get application by server token" should {
"return the application when an application exists for the given server token" in new Setup {
when(applicationConnector.getApplicationByServerToken(serverToken)).thenReturn(successful(application))
val result = await(applicationService.getByServerToken(serverToken))
result shouldBe application
}
"propagate the error when the application cannot be fetched for the given server token" in new Setup {
when(applicationConnector.getApplicationByServerToken(serverToken)).thenReturn(failed(NotFound()))
intercept[NotFound] {
await(applicationService.getByServerToken(serverToken))
}
}
}
"Get application by client id" should {
"return the application when an application exists for the given client id" in new Setup {
when(applicationConnector.getApplicationByClientId(clientId)).thenReturn(successful(application))
val result = await(applicationService.getByClientId(clientId))
result shouldBe application
}
"propagate the error when the application cannot be fetched for the given client id" in new Setup {
when(applicationConnector.getApplicationByClientId(clientId)).thenReturn(failed(new RuntimeException))
intercept[RuntimeException] {
await(applicationService.getByClientId(clientId))
}
}
"throw a 'ServerError' when the application is not found" in new Setup {
when(applicationConnector.getApplicationByClientId(clientId)).thenReturn(failed(NotFound()))
intercept[ServerError] {
await(applicationService.getByClientId(clientId))
}
}
}
"validateSubscriptionAndRateLimit" should {
"propagate the InvalidSubscription when the application is not subscribed" in new Setup {
when(applicationConnector.validateSubscription(applicationId.toString, api)).thenReturn(failed(InvalidSubscription()))
intercept[InvalidSubscription] {
await(applicationService.validateSubscriptionAndRateLimit(application, api))
}
}
"propagate the ThrottledOut error when the rate limit is reached" in new Setup {
val silverApplication = application.copy(rateLimitTier = SILVER)
mockSubscription(applicationConnector, application.id, api)
given(rateLimitRepository.validateAndIncrement(silverApplication.clientId, silverRateLimit)).willReturn(failed(ThrottledOut()))
intercept[ThrottledOut] {
await(applicationService.validateSubscriptionAndRateLimit(silverApplication, api))
}
}
"return successfully when the application is subscribed and the rate limit is not reached" in new Setup {
mockSubscription(applicationConnector, application.id, api)
given(rateLimitRepository.validateAndIncrement(application.clientId, bronzeRateLimit)).willReturn(successful(()))
await(applicationService.validateSubscriptionAndRateLimit(application, api))
}
}
private def mockSubscription(applicationConnector: ThirdPartyApplicationConnector, applicationId: UUID, api: ApiIdentifier) =
when(applicationConnector.validateSubscription(applicationId.toString, api)).thenReturn(successful(()))
}
| hmrc/api-gateway | test/uk/gov/hmrc/apigateway/service/ApplicationServiceSpec.scala | Scala | apache-2.0 | 5,205 |
package at.magiun.core.model
import org.apache.spark.sql.{Dataset, Row}
trait StageOutput
case class DatasetOutput(dataSet: Dataset[Row]) extends StageOutput
case class MultiOutput(list: Seq[StageOutput]) extends StageOutput
object EmptyOutput extends StageOutput | Mihai238/magiun | core/src/main/scala/at/magiun/core/model/StageOutput.scala | Scala | apache-2.0 | 265 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples.mllib
import org.apache.spark.{SparkConf, SparkContext}
// $example on$
import org.apache.spark.mllib.tree.RandomForest
import org.apache.spark.mllib.tree.model.RandomForestModel
import org.apache.spark.mllib.util.MLUtils
// $example off$
object RandomForestClassificationExample {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("RandomForestClassificationExample")
val sc = new SparkContext(conf)
// $example on$
// Load and parse the data file.
val data = MLUtils.loadLibSVMFile(sc, "data/mllib/sample_libsvm_data.txt")
// Split the data into training and test sets (30% held out for testing)
val splits = data.randomSplit(Array(0.7, 0.3))
val (trainingData, testData) = (splits(0), splits(1))
// Train a RandomForest model.
// Empty categoricalFeaturesInfo indicates all features are continuous.
val numClasses = 2
val categoricalFeaturesInfo = Map[Int, Int]()
val numTrees = 3 // Use more in practice.
val featureSubsetStrategy = "auto" // Let the algorithm choose.
val impurity = "gini"
val maxDepth = 4
val maxBins = 32
val model = RandomForest.trainClassifier(trainingData, numClasses, categoricalFeaturesInfo,
numTrees, featureSubsetStrategy, impurity, maxDepth, maxBins)
// Evaluate model on test instances and compute test error
val labelAndPreds = testData.map { point =>
val prediction = model.predict(point.features)
(point.label, prediction)
}
val testErr = labelAndPreds.filter(r => r._1 != r._2).count.toDouble / testData.count()
println("Test Error = " + testErr)
println("Learned classification forest model:\\n" + model.toDebugString)
// Save and load model
model.save(sc, "target/tmp/myRandomForestClassificationModel")
val sameModel = RandomForestModel.load(sc, "target/tmp/myRandomForestClassificationModel")
// $example off$
}
}
// scalastyle:on println
| mrchristine/spark-examples-dbc | src/main/scala/org/apache/spark/examples/mllib/RandomForestClassificationExample.scala | Scala | apache-2.0 | 2,809 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.json
import com.fasterxml.jackson.core.{JsonParser, JsonToken}
import org.apache.spark.sql.types._
object JacksonUtils {
/**
* Advance the parser until a null or a specific token is found
*/
def nextUntil(parser: JsonParser, stopOn: JsonToken): Boolean = {
parser.nextToken() match {
case null => false
case x => x != stopOn
}
}
def verifyType(name: String, dataType: DataType): Unit = {
dataType match {
case NullType | _: AtomicType | CalendarIntervalType =>
case st: StructType => st.foreach(field => verifyType(field.name, field.dataType))
case at: ArrayType => verifyType(name, at.elementType)
// For MapType, its keys are treated as a string (i.e. calling `toString`) basically when
// generating JSON, so we only care if the values are valid for JSON.
case mt: MapType => verifyType(name, mt.valueType)
case udt: UserDefinedType[_] => verifyType(name, udt.sqlType)
case _ =>
throw new UnsupportedOperationException(
s"Unable to convert column $name of type ${dataType.catalogString} to JSON.")
}
}
/**
* Verify if the schema is supported in JSON parsing.
*/
def verifySchema(schema: StructType): Unit = {
schema.foreach(field => verifyType(field.name, field.dataType))
}
}
| witgo/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonUtils.scala | Scala | apache-2.0 | 2,156 |
package notebook.kernel.remote
import java.io.File
import java.util.concurrent.atomic.AtomicInteger
import akka.actor._
import akka.remote.RemoteScope
import com.typesafe.config.{Config, ConfigFactory}
import notebook.kernel.pfork.{BetterFork, ForkableProcess, ProcessInfo}
import org.apache.commons.io.FileUtils
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
/**
* Author: Ken
*/
class RemoteActorProcess extends ForkableProcess {
// http://stackoverflow.com/questions/14995834/programmatically-obtain-ephemeral-port-with-akka
private var _system: ActorSystem = null
def init(args: Seq[String]): String = {
val configFile = args.head
val cfg = ConfigFactory.load(configFile)
// Cookie file is optional second argument
val actualCfg = args.take(2) match {
case Seq(_, cookieFile) if cookieFile.size > 0 =>
val cookie = FileUtils.readFileToString(new File(cookieFile))
AkkaConfigUtils.requireCookie(cfg, cookie)
case _ => cfg
}
_system = ActorSystem("Remote", actualCfg)
val ws = _system.actorOf(Props[notebook.kernel.WebSocketAppender], "remote-logger")
val address = GetAddress(_system).address
address.toString
}
def waitForExit() {
_system.awaitTermination()
println("waitForExit complete")
}
}
class FindAddressImpl(system: ExtendedActorSystem) extends Extension {
def address = system.provider match {
case rarp if rarp.getClass.getSimpleName == "RemoteActorRefProvider" => rarp.getDefaultAddress
case _ => system.provider.rootPath.address
}
}
object GetAddress extends ExtensionKey[FindAddressImpl]
case object RemoteShutdown
class ShutdownActor extends Actor {
override def postStop() {
// KV: I tried to do a context.system.shutdown() here, but the system would often hang when multiple actors were in play.
// I think it was this issue: https://groups.google.com/forum/#!msg/akka-user/VmKMPI_tNQU/ZUSz25OBpIwJ
// So we take the hard way out. Would be nice to have graceful shutdown
sys.exit(0)
}
def receive = Map.empty
}
/**
* Represents a running remote actor system, with an address and the ability to kill it
*/
class RemoteActorSystem(localSystem: ActorSystem, info: ProcessInfo, remoteContext: ActorRefFactory) {
def this(localSystem: ActorSystem, info: ProcessInfo) = this(localSystem, info, localSystem)
val address = AddressFromURIString(info.initReturn)
def deploy = Deploy(scope = RemoteScope(address))
// this guy can `sys.exit` the remote process
// since we use the `deploy` object to deploy it and its `postStop`
val shutdownActor = remoteContext.actorOf(Props(new ShutdownActor).withDeploy(deploy))
def shutdownRemote() {
shutdownActor ! PoisonPill
}
}
/**
* Create a remote actor system
*/
object RemoteActorSystem {
val nextId = new AtomicInteger(1)
def spawn(config: Config, system: ActorSystem, configFile: String, kernelId: String,
notebookPath: Option[String], customArgs:Option[List[String]]): Future[RemoteActorSystem] = {
val cookiePath = ""
new BetterFork[RemoteActorProcess](config, system.dispatcher, customArgs)
.execute(kernelId, notebookPath.getOrElse("no-path"), configFile, cookiePath)
.map {
new RemoteActorSystem(system, _)
}
}
}
| 0asa/spark-notebook | modules/subprocess/src/main/scala/notebook/kernel/remote/RemoteActorSystem.scala | Scala | apache-2.0 | 3,329 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gearpump.streaming.hadoop.lib
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.gearpump.Time.MilliSeconds
class HadoopCheckpointStoreWriter(path: Path, hadoopConfig: Configuration) {
private lazy val stream = HadoopUtil.getOutputStream(path, hadoopConfig)
def write(timestamp: MilliSeconds, data: Array[Byte]): Long = {
stream.writeLong(timestamp)
stream.writeInt(data.length)
stream.write(data)
stream.hflush()
stream.getPos()
}
def close(): Unit = {
stream.close()
}
}
| manuzhang/incubator-gearpump | external/hadoopfs/src/main/scala/org/apache/gearpump/streaming/hadoop/lib/HadoopCheckpointStoreWriter.scala | Scala | apache-2.0 | 1,383 |
package org.jetbrains.plugins.scala.lang.psi.impl.expr
import com.intellij.lang.ASTNode
import com.intellij.psi.PsiElementVisitor
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiElementImpl
import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import org.jetbrains.plugins.scala.lang.psi.types.result.TypeResult
import scala.collection.Seq
/**
* @author Alexander Podkhalyuzin
* Date: 06.03.2008
*/
class ScInfixExprImpl(node: ASTNode) extends ScalaPsiElementImpl(node) with ScInfixExpr {
override def toString: String = "InfixExpression"
override def argumentExpressions: Seq[ScExpression] = {
if (isRightAssoc) Seq(lOp)
else rOp match {
case tuple: ScTuple => tuple.exprs
case t: ScParenthesisedExpr => t.expr match {
case Some(expr) => Seq(expr)
case None => Seq(t)
}
case _: ScUnitExpr => Seq.empty
case expr => Seq(expr)
}
}
protected override def innerType: TypeResult = {
operation.bind() match {
//this is assignment statement: x += 1 equals to x = x + 1
case Some(r) if r.element.name + "=" == operation.refName =>
super.innerType
val lText = lOp.getText
val rText = rOp.getText
val exprText = s"$lText = $lText ${r.element.name} $rText"
val newExpr = ScalaPsiElementFactory.createExpressionWithContextFromText(exprText, getContext, this)
newExpr.`type`()
case _ => super.innerType
}
}
override def accept(visitor: ScalaElementVisitor) {
visitor.visitInfixExpression(this)
}
override def accept(visitor: PsiElementVisitor) {
visitor match {
case visitor: ScalaElementVisitor => visitor.visitInfixExpression(this)
case _ => super.accept(visitor)
}
}
}
| gtache/intellij-lsp | intellij-lsp-dotty/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScInfixExprImpl.scala | Scala | apache-2.0 | 1,938 |
package org.bitcoins.commons.json
import org.bitcoins.commons.serializers.Picklers
import org.bitcoins.core.protocol.tlv.DLCAcceptTLV
import org.bitcoins.testkitcore.util.BitcoinSUnitTest
class DLCAcceptJsonSerializerTest extends BitcoinSUnitTest {
behavior of "DLCAcceptJsonSerializer"
private val testString: String =
s"""
|{
| "temporaryContractId": "bdc5286cd4b56c2b2525bc9e0e3dda1d6a2a130cc7fd8ca8a38d401ef9a5d3e7",
| "acceptCollateral": 100000000,
| "fundingPubkey": "0208dfffdda2a61c78c906f5d76afdb0b8fe0555e6a3644c41f53b511427f80f0a",
| "payoutSpk": "001463c84b34fcf37ee58566aa6daf0747f74e509970",
| "payoutSerialId": "11859227771650291066",
| "fundingInputs": [
| {
| "inputSerialId": "6134040349072004330",
| "prevTx": "020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff03510101ffffffff0200f2052a01000000160014c87d38bcd3a468680e7c0abeeb7821d6302df9120000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000",
| "prevTxVout": 0,
| "sequence": 4294967295,
| "maxWitnessLen": 107,
| "redeemScript": ""
| }
| ],
| "changeSpk": "001481467abc1d30f5139fe8ff8c1ca7f7cb3f5bc031",
| "changeSerialId": "13987689245506757418",
| "cetAdaptorSignatures": {
| "ecdsaAdaptorSignatures": [
| {
| "signature": "02b7dda1b4030e0f85a98eb15a6806f1ffb72b578a508f671f4e6bbd954aa2d5b9022a01536f70340da9ba2b0e034deec1a0b658cbec2432f2fa2a96de09000eafaafb586eec1375c85737bb7e9fde1cb7fcf3a8a970e698d0e5da55297ea64a45b8ffb2b705974b91e8e3d9b0e46573c648122fda1ef941980ba845ab09d1e7a43949add788ed79e4a23b832b1418c3b54251b0d3c83e791ce24c2e30544456d3b8"
| },
| {
| "signature": "02d510a07687553f7dd26ff6124cdccbf73ccd8661ae9bd6a59a25cedac04727fd03fb8955cb520fde3dc30fbf095054a87f8c3e87f027238e3bc435b0dc6df2532af475c531e234ee045d6119d989a62d8b29bdfdbdcdf8d6761b0cb5fbd1eb6ef71fa04c7a993801e2d02d5659c08f629f7d5ef02173ce5b1fcec361d99b9aee79549a085c14bb3f7df507e891a35089b3d9886e7c81b7367cac8c75bbb9432861"
| },
| {
| "signature": "0328ca81f2f281c39eda04fb69456f6b104f09d920d57def9e396aa60de6c0b38c03e73ec3891966e5d339ea154bf17e265f80cb75bfc922b83d79102f44479a69e281a485c8e99abc382aad656983f8f92a5836437156c783e261bfe0bf7eec9e3bb83ff8b4d948458b3d8fdfbc74b23cd02a72b06b84aa156a7bf6a578757c205257aa86a728b6b8022c303c9b01164f0332d6bc5aaa17014c7bb87d598b83af5d"
| },
| {
| "signature": "0337e4ce2c5b3fb9d70663bac8797f8e7a1493af23f74ac9ace7449a35d59c757a02676f9cc3adf9d3d03ef5a4daf704c5d178dae12364f0297ab09982d5da08145010fc26c71166e6a2e651e4c351916f2e7d538c14adeccbd2f21c18f1d4ddea619120af9bcef67dcda0ed5b8d5c4dddee167107263becf05c91e0a13c2e653cd9722674531031610ef6b8cd80b205fc78834db55cf08e45d37616a6e218b7e09c"
| }
| ]
| },
| "refundSignature": "304402202c9b25719f0a22d7372c54c36f916e44f445135809433585636417fe18b9316c0220125584711c7238d0adf4a494e30a166fef35edf3d0d1718955abd73b76a26e9d",
| "negotiationFields": null
| }
|""".stripMargin
it must "have serialization symmetry for a accept json message" in {
val accept = upickle.default.read[DLCAcceptTLV](testString)(
Picklers.dlcAcceptTLVPickler)
val json: String =
upickle.default.write(accept)(Picklers.dlcAcceptTLVPickler)
assert(json == testString.replaceAll("\\s", ""))
}
}
| bitcoin-s/bitcoin-s | app-commons-test/src/test/scala/org/bitcoins/commons/json/DLCAcceptJsonSerializerTest.scala | Scala | mit | 3,825 |
/*
* Copyright 2013 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.storage.anormdb
import com.twitter.zipkin.storage.Storage
import com.twitter.zipkin.common._
import com.twitter.zipkin.common.Annotation
import com.twitter.zipkin.common.BinaryAnnotation
import com.twitter.zipkin.util.Util
import com.twitter.util.{Duration, Future}
import anorm._
import anorm.SqlParser._
import java.nio.ByteBuffer
import java.sql.Connection
import AnormThreads.inNewThread
/**
* Retrieve and store span information.
*
* This is one of two places where Zipkin interacts directly with the database,
* the other one being AnormIndex.
*
* NOTE: We're ignoring TTL for now since unlike Cassandra and Redis, SQL
* databases don't have that built in and it shouldn't be a big deal for most
* sites. Several methods in this class deal with TTL and we just assume that
* all spans will live forever.
*/
case class AnormStorage(db: DB, openCon: Option[Connection] = None) extends Storage {
// Database connection object
private implicit val conn = openCon match {
case None => db.getConnection()
case Some(con) => con
}
/**
* Close the storage
*/
def close() { conn.close() }
/**
* Store the span in the underlying storage for later retrieval.
* @return a future for the operation
*/
def storeSpan(span: Span): Future[Unit] = inNewThread {
val createdTs: Option[Long] = span.firstAnnotation match {
case Some(anno) => Some(anno.timestamp)
case None => None
}
db.withTransaction(conn, { implicit conn: Connection =>
SQL(
"""INSERT INTO zipkin_spans
| (span_id, parent_id, trace_id, span_name, debug, duration, created_ts)
|VALUES
| ({span_id}, {parent_id}, {trace_id}, {span_name}, {debug}, {duration}, {created_ts})
""".stripMargin)
.on("span_id" -> span.id)
.on("parent_id" -> span.parentId)
.on("trace_id" -> span.traceId)
.on("span_name" -> span.name)
.on("debug" -> (if (span.debug) 1 else 0))
.on("duration" -> span.duration)
.on("created_ts" -> createdTs)
.execute()
span.annotations.foreach(a =>
SQL(
"""INSERT INTO zipkin_annotations
| (span_id, trace_id, span_name, service_name, value, ipv4, port,
| a_timestamp, duration)
|VALUES
| ({span_id}, {trace_id}, {span_name}, {service_name}, {value},
| {ipv4}, {port}, {timestamp}, {duration})
""".stripMargin)
.on("span_id" -> span.id)
.on("trace_id" -> span.traceId)
.on("span_name" -> span.name)
.on("service_name" -> a.serviceName)
.on("value" -> a.value)
.on("ipv4" -> a.host.map(_.ipv4))
.on("port" -> a.host.map(_.port))
.on("timestamp" -> a.timestamp)
.on("duration" -> a.duration.map(_.inNanoseconds))
.execute()
)
span.binaryAnnotations.foreach(b =>
SQL(
"""INSERT INTO zipkin_binary_annotations
| (span_id, trace_id, span_name, service_name, annotation_key,
| annotation_value, annotation_type_value, ipv4, port)
|VALUES
| ({span_id}, {trace_id}, {span_name}, {service_name}, {key}, {value},
| {annotation_type_value}, {ipv4}, {port})
""".stripMargin)
.on("span_id" -> span.id)
.on("trace_id" -> span.traceId)
.on("span_name" -> span.name)
.on("service_name" -> b.host.map(_.serviceName).getOrElse("Unknown service name")) // from Annotation
.on("key" -> b.key)
.on("value" -> Util.getArrayFromBuffer(b.value))
.on("annotation_type_value" -> b.annotationType.value)
.on("ipv4" -> b.host.map(_.ipv4))
.on("port" -> b.host.map(_.ipv4))
.execute()
)
})
}
/**
* Set the ttl of a trace. Used to store a particular trace longer than the
* default. It must be oh so interesting!
*/
def setTimeToLive(traceId: Long, ttl: Duration): Future[Unit] = {
Future.Unit
}
/**
* Get the time to live for a specific trace.
* If there are multiple ttl entries for one trace, pick the lowest one.
*/
def getTimeToLive(traceId: Long): Future[Duration] = {
Future.value(Duration.Top)
}
/**
* Finds traces that have been stored from a list of trace IDs
*
* @param traceIds a List of trace IDs
* @return a Set of those trace IDs from the list which are stored
*/
def tracesExist(traceIds: Seq[Long]): Future[Set[Long]] = inNewThread {
SQL(
"SELECT trace_id FROM zipkin_spans WHERE trace_id IN (%s)".format(traceIds.mkString(","))
).as(long("trace_id") *).toSet
}
/**
* Get the available trace information from the storage system.
* Spans in trace should be sorted by the first annotation timestamp
* in that span. First event should be first in the spans list.
*/
def getSpansByTraceIds(traceIds: Seq[Long]): Future[Seq[Seq[Span]]] = inNewThread {
val traceIdsString:String = traceIds.mkString(",")
val spans:List[DBSpan] =
SQL(
"""SELECT span_id, parent_id, trace_id, span_name, debug
|FROM zipkin_spans
|WHERE trace_id IN (%s)
""".stripMargin.format(traceIdsString))
.as((long("span_id") ~ get[Option[Long]]("parent_id") ~
long("trace_id") ~ str("span_name") ~ int("debug") map {
case a~b~c~d~e => DBSpan(a, b, c, d, e > 0)
}) *)
val annos:List[DBAnnotation] =
SQL(
"""SELECT span_id, trace_id, span_name, service_name, value, ipv4, port, a_timestamp, duration
|FROM zipkin_annotations
|WHERE trace_id IN (%s)
""".stripMargin.format(traceIdsString))
.as((long("span_id") ~ long("trace_id") ~ str("span_name") ~ str("service_name") ~ str("value") ~
get[Option[Int]]("ipv4") ~ get[Option[Int]]("port") ~
long("a_timestamp") ~ get[Option[Long]]("duration") map {
case a~b~c~d~e~f~g~h~i => DBAnnotation(a, b, c, d, e, f, g, h, i)
}) *)
val binAnnos:List[DBBinaryAnnotation] =
SQL(
"""SELECT span_id, trace_id, span_name, service_name, annotation_key,
| annotation_value, annotation_type_value, ipv4, port
|FROM zipkin_binary_annotations
|WHERE trace_id IN (%s)
""".stripMargin.format(traceIdsString))
.as((long("span_id") ~ long("trace_id") ~ str("span_name") ~ str("service_name") ~
str("annotation_key") ~ db.bytes("annotation_value") ~
int("annotation_type_value") ~ get[Option[Int]]("ipv4") ~
get[Option[Int]]("port") map {
case a~b~c~d~e~f~g~h~i => DBBinaryAnnotation(a, b, c, d, e, f, g, h, i)
}) *)
val results: Seq[Seq[Span]] = traceIds.map { traceId =>
spans.filter(_.traceId == traceId).map { span =>
val spanAnnos = annos.filter { a =>
a.traceId == span.traceId && a.spanId == span.spanId && a.spanName == span.spanName
}
.map { anno =>
val host:Option[Endpoint] = (anno.ipv4, anno.port) match {
case (Some(ipv4), Some(port)) => Some(Endpoint(ipv4, port.toShort, anno.serviceName))
case _ => None
}
val duration:Option[Duration] = anno.duration match {
case Some(nanos) => Some(Duration.fromNanoseconds(nanos))
case None => None
}
Annotation(anno.timestamp, anno.value, host, duration)
}
val spanBinAnnos = binAnnos.filter { a =>
a.traceId == span.traceId && a.spanId == span.spanId && a.spanName == span.spanName
}
.map { binAnno =>
val host:Option[Endpoint] = (binAnno.ipv4, binAnno.port) match {
case (Some(ipv4), Some(port)) => Some(Endpoint(ipv4, port.toShort, binAnno.serviceName))
case _ => None
}
val value = ByteBuffer.wrap(binAnno.value)
val annotationType = AnnotationType.fromInt(binAnno.annotationTypeValue)
BinaryAnnotation(binAnno.key, value, annotationType, host)
}
Span(traceId, span.spanName, span.spanId, span.parentId, spanAnnos, spanBinAnnos, span.debug)
}
}
results.filter(!_.isEmpty)
}
def getSpansByTraceId(traceId: Long): Future[Seq[Span]] = {
getSpansByTraceIds(Seq(traceId)).map {
_.head
}
}
/**
* How long do we store the data before we delete it? In seconds.
*/
def getDataTimeToLive: Int = {
Int.MaxValue
}
case class DBSpan(spanId: Long, parentId: Option[Long], traceId: Long, spanName: String, debug: Boolean)
case class DBAnnotation(spanId: Long, traceId: Long, spanName: String, serviceName: String, value: String, ipv4: Option[Int], port: Option[Int], timestamp: Long, duration: Option[Long])
case class DBBinaryAnnotation(spanId: Long, traceId: Long, spanName: String, serviceName: String, key: String, value: Array[Byte], annotationTypeValue: Int, ipv4: Option[Int], port: Option[Int])
}
| siddhaism/zipkin | zipkin-anormdb/src/main/scala/com/twitter/zipkin/storage/anormdb/AnormStorage.scala | Scala | apache-2.0 | 9,701 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.storage
import java.io.{InputStream, IOException}
import java.nio.channels.ClosedByInterruptException
import java.util.concurrent.{LinkedBlockingQueue, TimeUnit}
import javax.annotation.concurrent.GuardedBy
import scala.collection.mutable
import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet, LinkedHashMap, Queue}
import scala.util.{Failure, Success}
import org.apache.commons.io.IOUtils
import org.apache.spark.{SparkException, TaskContext}
import org.apache.spark.internal.Logging
import org.apache.spark.network.buffer.{FileSegmentManagedBuffer, ManagedBuffer}
import org.apache.spark.network.shuffle._
import org.apache.spark.network.util.TransportConf
import org.apache.spark.shuffle.{FetchFailedException, ShuffleReadMetricsReporter}
import org.apache.spark.util.{CompletionIterator, TaskCompletionListener, Utils}
/**
* An iterator that fetches multiple blocks. For local blocks, it fetches from the local block
* manager. For remote blocks, it fetches them using the provided BlockTransferService.
*
* This creates an iterator of (BlockID, InputStream) tuples so the caller can handle blocks
* in a pipelined fashion as they are received.
*
* The implementation throttles the remote fetches so they don't exceed maxBytesInFlight to avoid
* using too much memory.
*
* @param context [[TaskContext]], used for metrics update
* @param shuffleClient [[BlockStoreClient]] for fetching remote blocks
* @param blockManager [[BlockManager]] for reading local blocks
* @param blocksByAddress list of blocks to fetch grouped by the [[BlockManagerId]].
* For each block we also require two info: 1. the size (in bytes as a long
* field) in order to throttle the memory usage; 2. the mapIndex for this
* block, which indicate the index in the map stage.
* Note that zero-sized blocks are already excluded, which happened in
* [[org.apache.spark.MapOutputTracker.convertMapStatuses]].
* @param streamWrapper A function to wrap the returned input stream.
* @param maxBytesInFlight max size (in bytes) of remote blocks to fetch at any given point.
* @param maxReqsInFlight max number of remote requests to fetch blocks at any given point.
* @param maxBlocksInFlightPerAddress max number of shuffle blocks being fetched at any given point
* for a given remote host:port.
* @param maxReqSizeShuffleToMem max size (in bytes) of a request that can be shuffled to memory.
* @param detectCorrupt whether to detect any corruption in fetched blocks.
* @param shuffleMetrics used to report shuffle metrics.
* @param doBatchFetch fetch continuous shuffle blocks from same executor in batch if the server
* side supports.
*/
private[spark]
final class ShuffleBlockFetcherIterator(
context: TaskContext,
shuffleClient: BlockStoreClient,
blockManager: BlockManager,
blocksByAddress: Iterator[(BlockManagerId, Seq[(BlockId, Long, Int)])],
streamWrapper: (BlockId, InputStream) => InputStream,
maxBytesInFlight: Long,
maxReqsInFlight: Int,
maxBlocksInFlightPerAddress: Int,
maxReqSizeShuffleToMem: Long,
detectCorrupt: Boolean,
detectCorruptUseExtraMemory: Boolean,
shuffleMetrics: ShuffleReadMetricsReporter,
doBatchFetch: Boolean)
extends Iterator[(BlockId, InputStream)] with DownloadFileManager with Logging {
import ShuffleBlockFetcherIterator._
// Make remote requests at most maxBytesInFlight / 5 in length; the reason to keep them
// smaller than maxBytesInFlight is to allow multiple, parallel fetches from up to 5
// nodes, rather than blocking on reading output from one node.
private val targetRemoteRequestSize = math.max(maxBytesInFlight / 5, 1L)
/**
* Total number of blocks to fetch. This should be equal to the total number of blocks
* in [[blocksByAddress]] because we already filter out zero-sized blocks in [[blocksByAddress]].
*/
private[this] var numBlocksToFetch = 0
/**
* The number of blocks processed by the caller. The iterator is exhausted when
* [[numBlocksProcessed]] == [[numBlocksToFetch]].
*/
private[this] var numBlocksProcessed = 0
private[this] val startTimeNs = System.nanoTime()
/** Local blocks to fetch, excluding zero-sized blocks. */
private[this] val localBlocks = scala.collection.mutable.LinkedHashSet[(BlockId, Int)]()
/** Host local blockIds to fetch by executors, excluding zero-sized blocks. */
private[this] val hostLocalBlocksByExecutor =
LinkedHashMap[BlockManagerId, Seq[(BlockId, Long, Int)]]()
/** Host local blocks to fetch, excluding zero-sized blocks. */
private[this] val hostLocalBlocks = scala.collection.mutable.LinkedHashSet[(BlockId, Int)]()
/**
* A queue to hold our results. This turns the asynchronous model provided by
* [[org.apache.spark.network.BlockTransferService]] into a synchronous model (iterator).
*/
private[this] val results = new LinkedBlockingQueue[FetchResult]
/**
* Current [[FetchResult]] being processed. We track this so we can release the current buffer
* in case of a runtime exception when processing the current buffer.
*/
@volatile private[this] var currentResult: SuccessFetchResult = null
/**
* Queue of fetch requests to issue; we'll pull requests off this gradually to make sure that
* the number of bytes in flight is limited to maxBytesInFlight.
*/
private[this] val fetchRequests = new Queue[FetchRequest]
/**
* Queue of fetch requests which could not be issued the first time they were dequeued. These
* requests are tried again when the fetch constraints are satisfied.
*/
private[this] val deferredFetchRequests = new HashMap[BlockManagerId, Queue[FetchRequest]]()
/** Current bytes in flight from our requests */
private[this] var bytesInFlight = 0L
/** Current number of requests in flight */
private[this] var reqsInFlight = 0
/** Current number of blocks in flight per host:port */
private[this] val numBlocksInFlightPerAddress = new HashMap[BlockManagerId, Int]()
/**
* The blocks that can't be decompressed successfully, it is used to guarantee that we retry
* at most once for those corrupted blocks.
*/
private[this] val corruptedBlocks = mutable.HashSet[BlockId]()
/**
* Whether the iterator is still active. If isZombie is true, the callback interface will no
* longer place fetched blocks into [[results]].
*/
@GuardedBy("this")
private[this] var isZombie = false
/**
* A set to store the files used for shuffling remote huge blocks. Files in this set will be
* deleted when cleanup. This is a layer of defensiveness against disk file leaks.
*/
@GuardedBy("this")
private[this] val shuffleFilesSet = mutable.HashSet[DownloadFile]()
private[this] val onCompleteCallback = new ShuffleFetchCompletionListener(this)
initialize()
// Decrements the buffer reference count.
// The currentResult is set to null to prevent releasing the buffer again on cleanup()
private[storage] def releaseCurrentResultBuffer(): Unit = {
// Release the current buffer if necessary
if (currentResult != null) {
currentResult.buf.release()
}
currentResult = null
}
override def createTempFile(transportConf: TransportConf): DownloadFile = {
// we never need to do any encryption or decryption here, regardless of configs, because that
// is handled at another layer in the code. When encryption is enabled, shuffle data is written
// to disk encrypted in the first place, and sent over the network still encrypted.
new SimpleDownloadFile(
blockManager.diskBlockManager.createTempLocalBlock()._2, transportConf)
}
override def registerTempFileToClean(file: DownloadFile): Boolean = synchronized {
if (isZombie) {
false
} else {
shuffleFilesSet += file
true
}
}
/**
* Mark the iterator as zombie, and release all buffers that haven't been deserialized yet.
*/
private[storage] def cleanup(): Unit = {
synchronized {
isZombie = true
}
releaseCurrentResultBuffer()
// Release buffers in the results queue
val iter = results.iterator()
while (iter.hasNext) {
val result = iter.next()
result match {
case SuccessFetchResult(_, _, address, _, buf, _) =>
if (address != blockManager.blockManagerId) {
shuffleMetrics.incRemoteBytesRead(buf.size)
if (buf.isInstanceOf[FileSegmentManagedBuffer]) {
shuffleMetrics.incRemoteBytesReadToDisk(buf.size)
}
shuffleMetrics.incRemoteBlocksFetched(1)
}
buf.release()
case _ =>
}
}
shuffleFilesSet.foreach { file =>
if (!file.delete()) {
logWarning("Failed to cleanup shuffle fetch temp file " + file.path())
}
}
}
private[this] def sendRequest(req: FetchRequest): Unit = {
logDebug("Sending request for %d blocks (%s) from %s".format(
req.blocks.size, Utils.bytesToString(req.size), req.address.hostPort))
bytesInFlight += req.size
reqsInFlight += 1
// so we can look up the block info of each blockID
val infoMap = req.blocks.map {
case FetchBlockInfo(blockId, size, mapIndex) => (blockId.toString, (size, mapIndex))
}.toMap
val remainingBlocks = new HashSet[String]() ++= infoMap.keys
val blockIds = req.blocks.map(_.blockId.toString)
val address = req.address
val blockFetchingListener = new BlockFetchingListener {
override def onBlockFetchSuccess(blockId: String, buf: ManagedBuffer): Unit = {
// Only add the buffer to results queue if the iterator is not zombie,
// i.e. cleanup() has not been called yet.
ShuffleBlockFetcherIterator.this.synchronized {
if (!isZombie) {
// Increment the ref count because we need to pass this to a different thread.
// This needs to be released after use.
buf.retain()
remainingBlocks -= blockId
results.put(new SuccessFetchResult(BlockId(blockId), infoMap(blockId)._2,
address, infoMap(blockId)._1, buf, remainingBlocks.isEmpty))
logDebug("remainingBlocks: " + remainingBlocks)
}
}
logTrace(s"Got remote block $blockId after ${Utils.getUsedTimeNs(startTimeNs)}")
}
override def onBlockFetchFailure(blockId: String, e: Throwable): Unit = {
logError(s"Failed to get block(s) from ${req.address.host}:${req.address.port}", e)
results.put(new FailureFetchResult(BlockId(blockId), infoMap(blockId)._2, address, e))
}
}
// Fetch remote shuffle blocks to disk when the request is too large. Since the shuffle data is
// already encrypted and compressed over the wire(w.r.t. the related configs), we can just fetch
// the data and write it to file directly.
if (req.size > maxReqSizeShuffleToMem) {
shuffleClient.fetchBlocks(address.host, address.port, address.executorId, blockIds.toArray,
blockFetchingListener, this)
} else {
shuffleClient.fetchBlocks(address.host, address.port, address.executorId, blockIds.toArray,
blockFetchingListener, null)
}
}
private[this] def partitionBlocksByFetchMode(): ArrayBuffer[FetchRequest] = {
logDebug(s"maxBytesInFlight: $maxBytesInFlight, targetRemoteRequestSize: "
+ s"$targetRemoteRequestSize, maxBlocksInFlightPerAddress: $maxBlocksInFlightPerAddress")
// Partition to local, host-local and remote blocks. Remote blocks are further split into
// FetchRequests of size at most maxBytesInFlight in order to limit the amount of data in flight
val collectedRemoteRequests = new ArrayBuffer[FetchRequest]
var localBlockBytes = 0L
var hostLocalBlockBytes = 0L
var remoteBlockBytes = 0L
var numRemoteBlocks = 0
val hostLocalDirReadingEnabled =
blockManager.hostLocalDirManager != null && blockManager.hostLocalDirManager.isDefined
for ((address, blockInfos) <- blocksByAddress) {
if (address.executorId == blockManager.blockManagerId.executorId) {
checkBlockSizes(blockInfos)
val mergedBlockInfos = mergeContinuousShuffleBlockIdsIfNeeded(
blockInfos.map(info => FetchBlockInfo(info._1, info._2, info._3)).to[ArrayBuffer])
localBlocks ++= mergedBlockInfos.map(info => (info.blockId, info.mapIndex))
localBlockBytes += mergedBlockInfos.map(_.size).sum
} else if (hostLocalDirReadingEnabled && address.host == blockManager.blockManagerId.host) {
checkBlockSizes(blockInfos)
val mergedBlockInfos = mergeContinuousShuffleBlockIdsIfNeeded(
blockInfos.map(info => FetchBlockInfo(info._1, info._2, info._3)).to[ArrayBuffer])
val blocksForAddress =
mergedBlockInfos.map(info => (info.blockId, info.size, info.mapIndex))
hostLocalBlocksByExecutor += address -> blocksForAddress
hostLocalBlocks ++= blocksForAddress.map(info => (info._1, info._3))
hostLocalBlockBytes += mergedBlockInfos.map(_.size).sum
} else {
numRemoteBlocks += blockInfos.size
remoteBlockBytes += blockInfos.map(_._2).sum
collectFetchRequests(address, blockInfos, collectedRemoteRequests)
}
}
val totalBytes = localBlockBytes + remoteBlockBytes
logInfo(s"Getting $numBlocksToFetch (${Utils.bytesToString(totalBytes)}) non-empty blocks " +
s"including ${localBlocks.size} (${Utils.bytesToString(localBlockBytes)}) local and " +
s"${hostLocalBlocks.size} (${Utils.bytesToString(hostLocalBlockBytes)}) " +
s"host-local and $numRemoteBlocks (${Utils.bytesToString(remoteBlockBytes)}) remote blocks")
collectedRemoteRequests
}
private def collectFetchRequests(
address: BlockManagerId,
blockInfos: Seq[(BlockId, Long, Int)],
collectedRemoteRequests: ArrayBuffer[FetchRequest]): Unit = {
val iterator = blockInfos.iterator
var curRequestSize = 0L
var curBlocks = new ArrayBuffer[FetchBlockInfo]
while (iterator.hasNext) {
val (blockId, size, mapIndex) = iterator.next()
assertPositiveBlockSize(blockId, size)
curBlocks += FetchBlockInfo(blockId, size, mapIndex)
curRequestSize += size
// For batch fetch, the actual block in flight should count for merged block.
val exceedsMaxBlocksInFlightPerAddress = !doBatchFetch &&
curBlocks.size >= maxBlocksInFlightPerAddress
if (curRequestSize >= targetRemoteRequestSize || exceedsMaxBlocksInFlightPerAddress) {
// Add this FetchRequest
val mergedBlocks = mergeContinuousShuffleBlockIdsIfNeeded(curBlocks)
.grouped(maxBlocksInFlightPerAddress)
curBlocks = new ArrayBuffer[FetchBlockInfo]
mergedBlocks.foreach { mergedBlock =>
if (mergedBlock.size == maxBlocksInFlightPerAddress) {
collectedRemoteRequests += new FetchRequest(address, mergedBlock)
logDebug(s"Creating fetch request of $curRequestSize at $address "
+ s"with ${mergedBlock.size} blocks")
} else {
// The last group does not exceed `maxBlocksInFlightPerAddress`. Put it back
// to `curBlocks`.
curBlocks = mergedBlock
}
}
curRequestSize = 0
}
}
// Add in the final request
if (curBlocks.nonEmpty) {
val mergedBlocks = mergeContinuousShuffleBlockIdsIfNeeded(curBlocks)
collectedRemoteRequests += new FetchRequest(address, mergedBlocks)
}
}
private def assertPositiveBlockSize(blockId: BlockId, blockSize: Long): Unit = {
if (blockSize < 0) {
throw BlockException(blockId, "Negative block size " + size)
} else if (blockSize == 0) {
throw BlockException(blockId, "Zero-sized blocks should be excluded.")
}
}
private def checkBlockSizes(blockInfos: Seq[(BlockId, Long, Int)]): Unit = {
blockInfos.foreach { case (blockId, size, _) => assertPositiveBlockSize(blockId, size) }
}
private[this] def mergeContinuousShuffleBlockIdsIfNeeded(
blocks: ArrayBuffer[FetchBlockInfo]): ArrayBuffer[FetchBlockInfo] = {
def mergeFetchBlockInfo(toBeMerged: ArrayBuffer[FetchBlockInfo]): FetchBlockInfo = {
val startBlockId = toBeMerged.head.blockId.asInstanceOf[ShuffleBlockId]
FetchBlockInfo(
ShuffleBlockBatchId(
startBlockId.shuffleId,
startBlockId.mapId,
startBlockId.reduceId,
toBeMerged.last.blockId.asInstanceOf[ShuffleBlockId].reduceId + 1),
toBeMerged.map(_.size).sum,
toBeMerged.head.mapIndex)
}
val result = if (doBatchFetch) {
var curBlocks = new ArrayBuffer[FetchBlockInfo]
val mergedBlockInfo = new ArrayBuffer[FetchBlockInfo]
val iter = blocks.iterator
while (iter.hasNext) {
val info = iter.next()
val curBlockId = info.blockId.asInstanceOf[ShuffleBlockId]
if (curBlocks.isEmpty) {
curBlocks += info
} else {
if (curBlockId.mapId != curBlocks.head.blockId.asInstanceOf[ShuffleBlockId].mapId) {
mergedBlockInfo += mergeFetchBlockInfo(curBlocks)
curBlocks.clear()
}
curBlocks += info
}
}
if (curBlocks.nonEmpty) {
mergedBlockInfo += mergeFetchBlockInfo(curBlocks)
}
mergedBlockInfo
} else {
blocks
}
// update metrics
numBlocksToFetch += result.size
result
}
/**
* Fetch the local blocks while we are fetching remote blocks. This is ok because
* `ManagedBuffer`'s memory is allocated lazily when we create the input stream, so all we
* track in-memory are the ManagedBuffer references themselves.
*/
private[this] def fetchLocalBlocks(): Unit = {
logDebug(s"Start fetching local blocks: ${localBlocks.mkString(", ")}")
val iter = localBlocks.iterator
while (iter.hasNext) {
val (blockId, mapIndex) = iter.next()
try {
val buf = blockManager.getLocalBlockData(blockId)
shuffleMetrics.incLocalBlocksFetched(1)
shuffleMetrics.incLocalBytesRead(buf.size)
buf.retain()
results.put(new SuccessFetchResult(blockId, mapIndex, blockManager.blockManagerId,
buf.size(), buf, false))
} catch {
// If we see an exception, stop immediately.
case e: Exception =>
e match {
// ClosedByInterruptException is an excepted exception when kill task,
// don't log the exception stack trace to avoid confusing users.
// See: SPARK-28340
case ce: ClosedByInterruptException =>
logError("Error occurred while fetching local blocks, " + ce.getMessage)
case ex: Exception => logError("Error occurred while fetching local blocks", ex)
}
results.put(new FailureFetchResult(blockId, mapIndex, blockManager.blockManagerId, e))
return
}
}
}
private[this] def fetchHostLocalBlock(
blockId: BlockId,
mapIndex: Int,
localDirs: Array[String],
blockManagerId: BlockManagerId): Boolean = {
try {
val buf = blockManager.getHostLocalShuffleData(blockId, localDirs)
buf.retain()
results.put(SuccessFetchResult(blockId, mapIndex, blockManagerId, buf.size(), buf,
isNetworkReqDone = false))
true
} catch {
case e: Exception =>
// If we see an exception, stop immediately.
logError(s"Error occurred while fetching local blocks", e)
results.put(FailureFetchResult(blockId, mapIndex, blockManagerId, e))
false
}
}
/**
* Fetch the host-local blocks while we are fetching remote blocks. This is ok because
* `ManagedBuffer`'s memory is allocated lazily when we create the input stream, so all we
* track in-memory are the ManagedBuffer references themselves.
*/
private[this] def fetchHostLocalBlocks(hostLocalDirManager: HostLocalDirManager): Unit = {
val cachedDirsByExec = hostLocalDirManager.getCachedHostLocalDirs()
val (hostLocalBlocksWithCachedDirs, hostLocalBlocksWithMissingDirs) =
hostLocalBlocksByExecutor
.map { case (hostLocalBmId, bmInfos) =>
(hostLocalBmId, bmInfos, cachedDirsByExec.get(hostLocalBmId.executorId))
}.partition(_._3.isDefined)
val bmId = blockManager.blockManagerId
val immutableHostLocalBlocksWithoutDirs =
hostLocalBlocksWithMissingDirs.map { case (hostLocalBmId, bmInfos, _) =>
hostLocalBmId -> bmInfos
}.toMap
if (immutableHostLocalBlocksWithoutDirs.nonEmpty) {
logDebug(s"Asynchronous fetching host-local blocks without cached executors' dir: " +
s"${immutableHostLocalBlocksWithoutDirs.mkString(", ")}")
val execIdsWithoutDirs = immutableHostLocalBlocksWithoutDirs.keys.map(_.executorId).toArray
hostLocalDirManager.getHostLocalDirs(execIdsWithoutDirs) {
case Success(dirs) =>
immutableHostLocalBlocksWithoutDirs.foreach { case (hostLocalBmId, blockInfos) =>
blockInfos.takeWhile { case (blockId, _, mapIndex) =>
fetchHostLocalBlock(
blockId,
mapIndex,
dirs.get(hostLocalBmId.executorId),
hostLocalBmId)
}
}
logDebug(s"Got host-local blocks (without cached executors' dir) in " +
s"${Utils.getUsedTimeNs(startTimeNs)}")
case Failure(throwable) =>
logError(s"Error occurred while fetching host local blocks", throwable)
val (hostLocalBmId, blockInfoSeq) = immutableHostLocalBlocksWithoutDirs.head
val (blockId, _, mapIndex) = blockInfoSeq.head
results.put(FailureFetchResult(blockId, mapIndex, hostLocalBmId, throwable))
}
}
if (hostLocalBlocksWithCachedDirs.nonEmpty) {
logDebug(s"Synchronous fetching host-local blocks with cached executors' dir: " +
s"${hostLocalBlocksWithCachedDirs.mkString(", ")}")
hostLocalBlocksWithCachedDirs.foreach { case (_, blockInfos, localDirs) =>
blockInfos.foreach { case (blockId, _, mapIndex) =>
if (!fetchHostLocalBlock(blockId, mapIndex, localDirs.get, bmId)) {
return
}
}
}
logDebug(s"Got host-local blocks (with cached executors' dir) in " +
s"${Utils.getUsedTimeNs(startTimeNs)}")
}
}
private[this] def initialize(): Unit = {
// Add a task completion callback (called in both success case and failure case) to cleanup.
context.addTaskCompletionListener(onCompleteCallback)
// Partition blocks by the different fetch modes: local, host-local and remote blocks.
val remoteRequests = partitionBlocksByFetchMode()
// Add the remote requests into our queue in a random order
fetchRequests ++= Utils.randomize(remoteRequests)
assert ((0 == reqsInFlight) == (0 == bytesInFlight),
"expected reqsInFlight = 0 but found reqsInFlight = " + reqsInFlight +
", expected bytesInFlight = 0 but found bytesInFlight = " + bytesInFlight)
// Send out initial requests for blocks, up to our maxBytesInFlight
fetchUpToMaxBytes()
val numFetches = remoteRequests.size - fetchRequests.size
logInfo(s"Started $numFetches remote fetches in ${Utils.getUsedTimeNs(startTimeNs)}")
// Get Local Blocks
fetchLocalBlocks()
logDebug(s"Got local blocks in ${Utils.getUsedTimeNs(startTimeNs)}")
if (hostLocalBlocks.nonEmpty) {
blockManager.hostLocalDirManager.foreach(fetchHostLocalBlocks)
}
}
override def hasNext: Boolean = numBlocksProcessed < numBlocksToFetch
/**
* Fetches the next (BlockId, InputStream). If a task fails, the ManagedBuffers
* underlying each InputStream will be freed by the cleanup() method registered with the
* TaskCompletionListener. However, callers should close() these InputStreams
* as soon as they are no longer needed, in order to release memory as early as possible.
*
* Throws a FetchFailedException if the next block could not be fetched.
*/
override def next(): (BlockId, InputStream) = {
if (!hasNext) {
throw new NoSuchElementException()
}
numBlocksProcessed += 1
var result: FetchResult = null
var input: InputStream = null
var streamCompressedOrEncrypted: Boolean = false
// Take the next fetched result and try to decompress it to detect data corruption,
// then fetch it one more time if it's corrupt, throw FailureFetchResult if the second fetch
// is also corrupt, so the previous stage could be retried.
// For local shuffle block, throw FailureFetchResult for the first IOException.
while (result == null) {
val startFetchWait = System.nanoTime()
result = results.take()
val fetchWaitTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startFetchWait)
shuffleMetrics.incFetchWaitTime(fetchWaitTime)
result match {
case r @ SuccessFetchResult(blockId, mapIndex, address, size, buf, isNetworkReqDone) =>
if (address != blockManager.blockManagerId) {
if (hostLocalBlocks.contains(blockId -> mapIndex)) {
shuffleMetrics.incLocalBlocksFetched(1)
shuffleMetrics.incLocalBytesRead(buf.size)
} else {
numBlocksInFlightPerAddress(address) = numBlocksInFlightPerAddress(address) - 1
shuffleMetrics.incRemoteBytesRead(buf.size)
if (buf.isInstanceOf[FileSegmentManagedBuffer]) {
shuffleMetrics.incRemoteBytesReadToDisk(buf.size)
}
shuffleMetrics.incRemoteBlocksFetched(1)
bytesInFlight -= size
}
}
if (isNetworkReqDone) {
reqsInFlight -= 1
logDebug("Number of requests in flight " + reqsInFlight)
}
if (buf.size == 0) {
// We will never legitimately receive a zero-size block. All blocks with zero records
// have zero size and all zero-size blocks have no records (and hence should never
// have been requested in the first place). This statement relies on behaviors of the
// shuffle writers, which are guaranteed by the following test cases:
//
// - BypassMergeSortShuffleWriterSuite: "write with some empty partitions"
// - UnsafeShuffleWriterSuite: "writeEmptyIterator"
// - DiskBlockObjectWriterSuite: "commit() and close() without ever opening or writing"
//
// There is not an explicit test for SortShuffleWriter but the underlying APIs that
// uses are shared by the UnsafeShuffleWriter (both writers use DiskBlockObjectWriter
// which returns a zero-size from commitAndGet() in case no records were written
// since the last call.
val msg = s"Received a zero-size buffer for block $blockId from $address " +
s"(expectedApproxSize = $size, isNetworkReqDone=$isNetworkReqDone)"
throwFetchFailedException(blockId, mapIndex, address, new IOException(msg))
}
val in = try {
buf.createInputStream()
} catch {
// The exception could only be throwed by local shuffle block
case e: IOException =>
assert(buf.isInstanceOf[FileSegmentManagedBuffer])
e match {
case ce: ClosedByInterruptException =>
logError("Failed to create input stream from local block, " +
ce.getMessage)
case e: IOException => logError("Failed to create input stream from local block", e)
}
buf.release()
throwFetchFailedException(blockId, mapIndex, address, e)
}
try {
input = streamWrapper(blockId, in)
// If the stream is compressed or wrapped, then we optionally decompress/unwrap the
// first maxBytesInFlight/3 bytes into memory, to check for corruption in that portion
// of the data. But even if 'detectCorruptUseExtraMemory' configuration is off, or if
// the corruption is later, we'll still detect the corruption later in the stream.
streamCompressedOrEncrypted = !input.eq(in)
if (streamCompressedOrEncrypted && detectCorruptUseExtraMemory) {
// TODO: manage the memory used here, and spill it into disk in case of OOM.
input = Utils.copyStreamUpTo(input, maxBytesInFlight / 3)
}
} catch {
case e: IOException =>
buf.release()
if (buf.isInstanceOf[FileSegmentManagedBuffer]
|| corruptedBlocks.contains(blockId)) {
throwFetchFailedException(blockId, mapIndex, address, e)
} else {
logWarning(s"got an corrupted block $blockId from $address, fetch again", e)
corruptedBlocks += blockId
fetchRequests += FetchRequest(
address, Array(FetchBlockInfo(blockId, size, mapIndex)))
result = null
}
} finally {
// TODO: release the buf here to free memory earlier
if (input == null) {
// Close the underlying stream if there was an issue in wrapping the stream using
// streamWrapper
in.close()
}
}
case FailureFetchResult(blockId, mapIndex, address, e) =>
throwFetchFailedException(blockId, mapIndex, address, e)
}
// Send fetch requests up to maxBytesInFlight
fetchUpToMaxBytes()
}
currentResult = result.asInstanceOf[SuccessFetchResult]
(currentResult.blockId,
new BufferReleasingInputStream(
input,
this,
currentResult.blockId,
currentResult.mapIndex,
currentResult.address,
detectCorrupt && streamCompressedOrEncrypted))
}
def toCompletionIterator: Iterator[(BlockId, InputStream)] = {
CompletionIterator[(BlockId, InputStream), this.type](this,
onCompleteCallback.onComplete(context))
}
private def fetchUpToMaxBytes(): Unit = {
// Send fetch requests up to maxBytesInFlight. If you cannot fetch from a remote host
// immediately, defer the request until the next time it can be processed.
// Process any outstanding deferred fetch requests if possible.
if (deferredFetchRequests.nonEmpty) {
for ((remoteAddress, defReqQueue) <- deferredFetchRequests) {
while (isRemoteBlockFetchable(defReqQueue) &&
!isRemoteAddressMaxedOut(remoteAddress, defReqQueue.front)) {
val request = defReqQueue.dequeue()
logDebug(s"Processing deferred fetch request for $remoteAddress with "
+ s"${request.blocks.length} blocks")
send(remoteAddress, request)
if (defReqQueue.isEmpty) {
deferredFetchRequests -= remoteAddress
}
}
}
}
// Process any regular fetch requests if possible.
while (isRemoteBlockFetchable(fetchRequests)) {
val request = fetchRequests.dequeue()
val remoteAddress = request.address
if (isRemoteAddressMaxedOut(remoteAddress, request)) {
logDebug(s"Deferring fetch request for $remoteAddress with ${request.blocks.size} blocks")
val defReqQueue = deferredFetchRequests.getOrElse(remoteAddress, new Queue[FetchRequest]())
defReqQueue.enqueue(request)
deferredFetchRequests(remoteAddress) = defReqQueue
} else {
send(remoteAddress, request)
}
}
def send(remoteAddress: BlockManagerId, request: FetchRequest): Unit = {
sendRequest(request)
numBlocksInFlightPerAddress(remoteAddress) =
numBlocksInFlightPerAddress.getOrElse(remoteAddress, 0) + request.blocks.size
}
def isRemoteBlockFetchable(fetchReqQueue: Queue[FetchRequest]): Boolean = {
fetchReqQueue.nonEmpty &&
(bytesInFlight == 0 ||
(reqsInFlight + 1 <= maxReqsInFlight &&
bytesInFlight + fetchReqQueue.front.size <= maxBytesInFlight))
}
// Checks if sending a new fetch request will exceed the max no. of blocks being fetched from a
// given remote address.
def isRemoteAddressMaxedOut(remoteAddress: BlockManagerId, request: FetchRequest): Boolean = {
numBlocksInFlightPerAddress.getOrElse(remoteAddress, 0) + request.blocks.size >
maxBlocksInFlightPerAddress
}
}
private[storage] def throwFetchFailedException(
blockId: BlockId,
mapIndex: Int,
address: BlockManagerId,
e: Throwable) = {
blockId match {
case ShuffleBlockId(shufId, mapId, reduceId) =>
throw new FetchFailedException(address, shufId, mapId, mapIndex, reduceId, e)
case ShuffleBlockBatchId(shuffleId, mapId, startReduceId, _) =>
throw new FetchFailedException(address, shuffleId, mapId, mapIndex, startReduceId, e)
case _ =>
throw new SparkException(
"Failed to get block " + blockId + ", which is not a shuffle block", e)
}
}
}
/**
* Helper class that ensures a ManagedBuffer is released upon InputStream.close() and
* also detects stream corruption if streamCompressedOrEncrypted is true
*/
private class BufferReleasingInputStream(
// This is visible for testing
private[storage] val delegate: InputStream,
private val iterator: ShuffleBlockFetcherIterator,
private val blockId: BlockId,
private val mapIndex: Int,
private val address: BlockManagerId,
private val detectCorruption: Boolean)
extends InputStream {
private[this] var closed = false
override def read(): Int = {
try {
delegate.read()
} catch {
case e: IOException if detectCorruption =>
IOUtils.closeQuietly(this)
iterator.throwFetchFailedException(blockId, mapIndex, address, e)
}
}
override def close(): Unit = {
if (!closed) {
delegate.close()
iterator.releaseCurrentResultBuffer()
closed = true
}
}
override def available(): Int = delegate.available()
override def mark(readlimit: Int): Unit = delegate.mark(readlimit)
override def skip(n: Long): Long = {
try {
delegate.skip(n)
} catch {
case e: IOException if detectCorruption =>
IOUtils.closeQuietly(this)
iterator.throwFetchFailedException(blockId, mapIndex, address, e)
}
}
override def markSupported(): Boolean = delegate.markSupported()
override def read(b: Array[Byte]): Int = {
try {
delegate.read(b)
} catch {
case e: IOException if detectCorruption =>
IOUtils.closeQuietly(this)
iterator.throwFetchFailedException(blockId, mapIndex, address, e)
}
}
override def read(b: Array[Byte], off: Int, len: Int): Int = {
try {
delegate.read(b, off, len)
} catch {
case e: IOException if detectCorruption =>
IOUtils.closeQuietly(this)
iterator.throwFetchFailedException(blockId, mapIndex, address, e)
}
}
override def reset(): Unit = delegate.reset()
}
/**
* A listener to be called at the completion of the ShuffleBlockFetcherIterator
* @param data the ShuffleBlockFetcherIterator to process
*/
private class ShuffleFetchCompletionListener(var data: ShuffleBlockFetcherIterator)
extends TaskCompletionListener {
override def onTaskCompletion(context: TaskContext): Unit = {
if (data != null) {
data.cleanup()
// Null out the referent here to make sure we don't keep a reference to this
// ShuffleBlockFetcherIterator, after we're done reading from it, to let it be
// collected during GC. Otherwise we can hold metadata on block locations(blocksByAddress)
data = null
}
}
// Just an alias for onTaskCompletion to avoid confusing
def onComplete(context: TaskContext): Unit = this.onTaskCompletion(context)
}
private[storage]
object ShuffleBlockFetcherIterator {
/**
* The block information to fetch used in FetchRequest.
* @param blockId block id
* @param size estimated size of the block. Note that this is NOT the exact bytes.
* Size of remote block is used to calculate bytesInFlight.
* @param mapIndex the mapIndex for this block, which indicate the index in the map stage.
*/
private[storage] case class FetchBlockInfo(
blockId: BlockId,
size: Long,
mapIndex: Int)
/**
* A request to fetch blocks from a remote BlockManager.
* @param address remote BlockManager to fetch from.
* @param blocks Sequence of the information for blocks to fetch from the same address.
*/
case class FetchRequest(address: BlockManagerId, blocks: Seq[FetchBlockInfo]) {
val size = blocks.map(_.size).sum
}
/**
* Result of a fetch from a remote block.
*/
private[storage] sealed trait FetchResult {
val blockId: BlockId
val address: BlockManagerId
}
/**
* Result of a fetch from a remote block successfully.
* @param blockId block id
* @param mapIndex the mapIndex for this block, which indicate the index in the map stage.
* @param address BlockManager that the block was fetched from.
* @param size estimated size of the block. Note that this is NOT the exact bytes.
* Size of remote block is used to calculate bytesInFlight.
* @param buf `ManagedBuffer` for the content.
* @param isNetworkReqDone Is this the last network request for this host in this fetch request.
*/
private[storage] case class SuccessFetchResult(
blockId: BlockId,
mapIndex: Int,
address: BlockManagerId,
size: Long,
buf: ManagedBuffer,
isNetworkReqDone: Boolean) extends FetchResult {
require(buf != null)
require(size >= 0)
}
/**
* Result of a fetch from a remote block unsuccessfully.
* @param blockId block id
* @param mapIndex the mapIndex for this block, which indicate the index in the map stage
* @param address BlockManager that the block was attempted to be fetched from
* @param e the failure exception
*/
private[storage] case class FailureFetchResult(
blockId: BlockId,
mapIndex: Int,
address: BlockManagerId,
e: Throwable)
extends FetchResult
}
| jkbradley/spark | core/src/main/scala/org/apache/spark/storage/ShuffleBlockFetcherIterator.scala | Scala | apache-2.0 | 39,277 |
package org.bitcoins.marshallers.script
import org.bitcoins.marshallers.MarshallerUtil
import org.bitcoins.protocol.script.{ScriptPubKey}
import spray.json._
import DefaultJsonProtocol._
/**
* Created by chris on 12/27/15.
*/
object ScriptPubKeyMarshaller extends DefaultJsonProtocol with MarshallerUtil {
val reqSigsKey = "reqSigs"
val typeKey = "type"
val addressesKey = "addresses"
val scriptPubKeyKey = "scriptPubKey"
implicit object ScriptPubKeyFormatter extends RootJsonFormat[ScriptPubKey] {
override def read(value : JsValue) : ScriptPubKey = {
val obj = value.asJsObject
val asm = obj.fields(ScriptSignatureMarshaller.asmKey)
ScriptPubKey.fromAsm(ScriptParser.fromString(asm.convertTo[String]))
}
override def write(scriptPubKey : ScriptPubKey) : JsValue = {
import org.bitcoins.marshallers.BitcoinAddressProtocol._
val m : Map[String,JsValue] = Map(
ScriptSignatureMarshaller.asmKey -> JsString(scriptPubKey.asm.toString),
ScriptSignatureMarshaller.hexKey -> JsString(scriptPubKey.hex),
reqSigsKey -> JsNumber(-1)
)
JsObject(m)
}
}
}
| Christewart/scalacoin | src/main/scala/org/bitcoins/marshallers/script/ScriptPubKeyMarshaller.scala | Scala | mit | 1,145 |
/* __ __ _____ __ *\\
** / // // /_/ |/ / Wix **
** / // // / /| / (c) 2006-2015, Wix LTD. **
** / // // / // | http://www.wix.com/ **
** \\__/|__/_//_/| | **
\\* |/ */
package com.wix.restaurants.common.protocol.api
/** Encapsulates the data for a Response.
* It has mutual members - either a {{{value}}}, for successive response, or an {{{error}}}, for a failure response.
*
* @author <a href="mailto:ohadr@wix.com">Raz, Ohad</a>
*/
case class Response[V] private (value: Option[V] = None, error: Option[Error] = None)
/** The companion object of the [[Response]] case class, introduces the means to create a response, a successive one
* or an error.
*
* @author <a href="mailto:ohadr@wix.com">Raz, Ohad</a>
*/
object Response {
def apply[V](value: V): Response[V] = Response(Some(value), None)
def apply[V](error: Error): Response[V] = Response(None, Some(error))
}
| wix/common-protocol | common-protocol-api/src/main/scala/com/wix/restaurants/common/protocol/api/Response.scala | Scala | apache-2.0 | 1,155 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.regression
import scala.collection.mutable
import breeze.linalg.{DenseVector => BDV}
import breeze.optimize.{CachedDiffFunction, DiffFunction, LBFGS => BreezeLBFGS}
import org.apache.hadoop.fs.Path
import org.apache.spark.SparkException
import org.apache.spark.annotation.{Experimental, Since}
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.internal.Logging
import org.apache.spark.ml.{Estimator, Model}
import org.apache.spark.ml.linalg.{BLAS, Vector, Vectors, VectorUDT}
import org.apache.spark.ml.param._
import org.apache.spark.ml.param.shared._
import org.apache.spark.ml.util._
import org.apache.spark.ml.util.Instrumentation.instrumented
import org.apache.spark.mllib.linalg.VectorImplicits._
import org.apache.spark.mllib.stat.MultivariateOnlineSummarizer
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, Row}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.{DoubleType, StructType}
import org.apache.spark.storage.StorageLevel
/**
* Params for accelerated failure time (AFT) regression.
*/
private[regression] trait AFTSurvivalRegressionParams extends Params
with HasFeaturesCol with HasLabelCol with HasPredictionCol with HasMaxIter
with HasTol with HasFitIntercept with HasAggregationDepth with Logging {
/**
* Param for censor column name.
* The value of this column could be 0 or 1.
* If the value is 1, it means the event has occurred i.e. uncensored; otherwise censored.
* @group param
*/
@Since("1.6.0")
final val censorCol: Param[String] = new Param(this, "censorCol", "censor column name")
/** @group getParam */
@Since("1.6.0")
def getCensorCol: String = $(censorCol)
setDefault(censorCol -> "censor")
/**
* Param for quantile probabilities array.
* Values of the quantile probabilities array should be in the range (0, 1)
* and the array should be non-empty.
* @group param
*/
@Since("1.6.0")
final val quantileProbabilities: DoubleArrayParam = new DoubleArrayParam(this,
"quantileProbabilities", "quantile probabilities array",
(t: Array[Double]) => t.forall(ParamValidators.inRange(0, 1, false, false)) && t.length > 0)
/** @group getParam */
@Since("1.6.0")
def getQuantileProbabilities: Array[Double] = $(quantileProbabilities)
setDefault(quantileProbabilities -> Array(0.01, 0.05, 0.1, 0.25, 0.5, 0.75, 0.9, 0.95, 0.99))
/**
* Param for quantiles column name.
* This column will output quantiles of corresponding quantileProbabilities if it is set.
* @group param
*/
@Since("1.6.0")
final val quantilesCol: Param[String] = new Param(this, "quantilesCol", "quantiles column name")
/** @group getParam */
@Since("1.6.0")
def getQuantilesCol: String = $(quantilesCol)
/** Checks whether the input has quantiles column name. */
private[regression] def hasQuantilesCol: Boolean = {
isDefined(quantilesCol) && $(quantilesCol).nonEmpty
}
/**
* Validates and transforms the input schema with the provided param map.
* @param schema input schema
* @param fitting whether this is in fitting or prediction
* @return output schema
*/
protected def validateAndTransformSchema(
schema: StructType,
fitting: Boolean): StructType = {
SchemaUtils.checkColumnType(schema, $(featuresCol), new VectorUDT)
if (fitting) {
SchemaUtils.checkNumericType(schema, $(censorCol))
SchemaUtils.checkNumericType(schema, $(labelCol))
}
val schemaWithQuantilesCol = if (hasQuantilesCol) {
SchemaUtils.appendColumn(schema, $(quantilesCol), new VectorUDT)
} else schema
SchemaUtils.appendColumn(schemaWithQuantilesCol, $(predictionCol), DoubleType)
}
}
/**
* :: Experimental ::
* Fit a parametric survival regression model named accelerated failure time (AFT) model
* (see <a href="https://en.wikipedia.org/wiki/Accelerated_failure_time_model">
* Accelerated failure time model (Wikipedia)</a>)
* based on the Weibull distribution of the survival time.
*/
@Experimental
@Since("1.6.0")
class AFTSurvivalRegression @Since("1.6.0") (@Since("1.6.0") override val uid: String)
extends Estimator[AFTSurvivalRegressionModel] with AFTSurvivalRegressionParams
with DefaultParamsWritable with Logging {
@Since("1.6.0")
def this() = this(Identifiable.randomUID("aftSurvReg"))
/** @group setParam */
@Since("1.6.0")
def setFeaturesCol(value: String): this.type = set(featuresCol, value)
/** @group setParam */
@Since("1.6.0")
def setLabelCol(value: String): this.type = set(labelCol, value)
/** @group setParam */
@Since("1.6.0")
def setCensorCol(value: String): this.type = set(censorCol, value)
/** @group setParam */
@Since("1.6.0")
def setPredictionCol(value: String): this.type = set(predictionCol, value)
/** @group setParam */
@Since("1.6.0")
def setQuantileProbabilities(value: Array[Double]): this.type = set(quantileProbabilities, value)
/** @group setParam */
@Since("1.6.0")
def setQuantilesCol(value: String): this.type = set(quantilesCol, value)
/**
* Set if we should fit the intercept
* Default is true.
* @group setParam
*/
@Since("1.6.0")
def setFitIntercept(value: Boolean): this.type = set(fitIntercept, value)
setDefault(fitIntercept -> true)
/**
* Set the maximum number of iterations.
* Default is 100.
* @group setParam
*/
@Since("1.6.0")
def setMaxIter(value: Int): this.type = set(maxIter, value)
setDefault(maxIter -> 100)
/**
* Set the convergence tolerance of iterations.
* Smaller value will lead to higher accuracy with the cost of more iterations.
* Default is 1E-6.
* @group setParam
*/
@Since("1.6.0")
def setTol(value: Double): this.type = set(tol, value)
setDefault(tol -> 1E-6)
/**
* Suggested depth for treeAggregate (greater than or equal to 2).
* If the dimensions of features or the number of partitions are large,
* this param could be adjusted to a larger size.
* Default is 2.
* @group expertSetParam
*/
@Since("2.1.0")
def setAggregationDepth(value: Int): this.type = set(aggregationDepth, value)
setDefault(aggregationDepth -> 2)
/**
* Extract [[featuresCol]], [[labelCol]] and [[censorCol]] from input dataset,
* and put it in an RDD with strong types.
*/
protected[ml] def extractAFTPoints(dataset: Dataset[_]): RDD[AFTPoint] = {
dataset.select(col($(featuresCol)), col($(labelCol)).cast(DoubleType),
col($(censorCol)).cast(DoubleType)).rdd.map {
case Row(features: Vector, label: Double, censor: Double) =>
AFTPoint(features, label, censor)
}
}
@Since("2.0.0")
override def fit(dataset: Dataset[_]): AFTSurvivalRegressionModel = instrumented { instr =>
transformSchema(dataset.schema, logging = true)
val instances = extractAFTPoints(dataset)
val handlePersistence = dataset.storageLevel == StorageLevel.NONE
if (handlePersistence) instances.persist(StorageLevel.MEMORY_AND_DISK)
val featuresSummarizer = {
val seqOp = (c: MultivariateOnlineSummarizer, v: AFTPoint) => c.add(v.features)
val combOp = (c1: MultivariateOnlineSummarizer, c2: MultivariateOnlineSummarizer) => {
c1.merge(c2)
}
instances.treeAggregate(
new MultivariateOnlineSummarizer
)(seqOp, combOp, $(aggregationDepth))
}
val featuresStd = featuresSummarizer.variance.toArray.map(math.sqrt)
val numFeatures = featuresStd.size
instr.logPipelineStage(this)
instr.logDataset(dataset)
instr.logParams(this, labelCol, featuresCol, censorCol, predictionCol, quantilesCol,
fitIntercept, maxIter, tol, aggregationDepth)
instr.logNamedValue("quantileProbabilities.size", $(quantileProbabilities).length)
instr.logNumFeatures(numFeatures)
instr.logNumExamples(featuresSummarizer.count)
if (!$(fitIntercept) && (0 until numFeatures).exists { i =>
featuresStd(i) == 0.0 && featuresSummarizer.mean(i) != 0.0 }) {
instr.logWarning("Fitting AFTSurvivalRegressionModel without intercept on dataset with " +
"constant nonzero column, Spark MLlib outputs zero coefficients for constant nonzero " +
"columns. This behavior is different from R survival::survreg.")
}
val bcFeaturesStd = instances.context.broadcast(featuresStd)
val costFun = new AFTCostFun(instances, $(fitIntercept), bcFeaturesStd, $(aggregationDepth))
val optimizer = new BreezeLBFGS[BDV[Double]]($(maxIter), 10, $(tol))
/*
The parameters vector has three parts:
the first element: Double, log(sigma), the log of scale parameter
the second element: Double, intercept of the beta parameter
the third to the end elements: Doubles, regression coefficients vector of the beta parameter
*/
val initialParameters = Vectors.zeros(numFeatures + 2)
val states = optimizer.iterations(new CachedDiffFunction(costFun),
initialParameters.asBreeze.toDenseVector)
val parameters = {
val arrayBuilder = mutable.ArrayBuilder.make[Double]
var state: optimizer.State = null
while (states.hasNext) {
state = states.next()
arrayBuilder += state.adjustedValue
}
if (state == null) {
val msg = s"${optimizer.getClass.getName} failed."
throw new SparkException(msg)
}
state.x.toArray.clone()
}
bcFeaturesStd.destroy()
if (handlePersistence) instances.unpersist()
val rawCoefficients = parameters.slice(2, parameters.length)
var i = 0
while (i < numFeatures) {
rawCoefficients(i) *= { if (featuresStd(i) != 0.0) 1.0 / featuresStd(i) else 0.0 }
i += 1
}
val coefficients = Vectors.dense(rawCoefficients)
val intercept = parameters(1)
val scale = math.exp(parameters(0))
copyValues(new AFTSurvivalRegressionModel(uid, coefficients, intercept, scale).setParent(this))
}
@Since("1.6.0")
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema, fitting = true)
}
@Since("1.6.0")
override def copy(extra: ParamMap): AFTSurvivalRegression = defaultCopy(extra)
}
@Since("1.6.0")
object AFTSurvivalRegression extends DefaultParamsReadable[AFTSurvivalRegression] {
@Since("1.6.0")
override def load(path: String): AFTSurvivalRegression = super.load(path)
}
/**
* :: Experimental ::
* Model produced by [[AFTSurvivalRegression]].
*/
@Experimental
@Since("1.6.0")
class AFTSurvivalRegressionModel private[ml] (
@Since("1.6.0") override val uid: String,
@Since("2.0.0") val coefficients: Vector,
@Since("1.6.0") val intercept: Double,
@Since("1.6.0") val scale: Double)
extends Model[AFTSurvivalRegressionModel] with AFTSurvivalRegressionParams with MLWritable {
/** @group setParam */
@Since("1.6.0")
def setFeaturesCol(value: String): this.type = set(featuresCol, value)
/** @group setParam */
@Since("1.6.0")
def setPredictionCol(value: String): this.type = set(predictionCol, value)
/** @group setParam */
@Since("1.6.0")
def setQuantileProbabilities(value: Array[Double]): this.type = set(quantileProbabilities, value)
/** @group setParam */
@Since("1.6.0")
def setQuantilesCol(value: String): this.type = set(quantilesCol, value)
@Since("2.0.0")
def predictQuantiles(features: Vector): Vector = {
// scale parameter for the Weibull distribution of lifetime
val lambda = math.exp(BLAS.dot(coefficients, features) + intercept)
// shape parameter for the Weibull distribution of lifetime
val k = 1 / scale
val quantiles = $(quantileProbabilities).map {
q => lambda * math.exp(math.log(-math.log(1 - q)) / k)
}
Vectors.dense(quantiles)
}
@Since("2.0.0")
def predict(features: Vector): Double = {
math.exp(BLAS.dot(coefficients, features) + intercept)
}
@Since("2.0.0")
override def transform(dataset: Dataset[_]): DataFrame = {
transformSchema(dataset.schema, logging = true)
val predictUDF = udf { features: Vector => predict(features) }
val predictQuantilesUDF = udf { features: Vector => predictQuantiles(features)}
if (hasQuantilesCol) {
dataset.withColumn($(predictionCol), predictUDF(col($(featuresCol))))
.withColumn($(quantilesCol), predictQuantilesUDF(col($(featuresCol))))
} else {
dataset.withColumn($(predictionCol), predictUDF(col($(featuresCol))))
}
}
@Since("1.6.0")
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema, fitting = false)
}
@Since("1.6.0")
override def copy(extra: ParamMap): AFTSurvivalRegressionModel = {
copyValues(new AFTSurvivalRegressionModel(uid, coefficients, intercept, scale), extra)
.setParent(parent)
}
@Since("1.6.0")
override def write: MLWriter =
new AFTSurvivalRegressionModel.AFTSurvivalRegressionModelWriter(this)
}
@Since("1.6.0")
object AFTSurvivalRegressionModel extends MLReadable[AFTSurvivalRegressionModel] {
@Since("1.6.0")
override def read: MLReader[AFTSurvivalRegressionModel] = new AFTSurvivalRegressionModelReader
@Since("1.6.0")
override def load(path: String): AFTSurvivalRegressionModel = super.load(path)
/** [[MLWriter]] instance for [[AFTSurvivalRegressionModel]] */
private[AFTSurvivalRegressionModel] class AFTSurvivalRegressionModelWriter (
instance: AFTSurvivalRegressionModel
) extends MLWriter with Logging {
private case class Data(coefficients: Vector, intercept: Double, scale: Double)
override protected def saveImpl(path: String): Unit = {
// Save metadata and Params
DefaultParamsWriter.saveMetadata(instance, path, sc)
// Save model data: coefficients, intercept, scale
val data = Data(instance.coefficients, instance.intercept, instance.scale)
val dataPath = new Path(path, "data").toString
sparkSession.createDataFrame(Seq(data)).repartition(1).write.parquet(dataPath)
}
}
private class AFTSurvivalRegressionModelReader extends MLReader[AFTSurvivalRegressionModel] {
/** Checked against metadata when loading model */
private val className = classOf[AFTSurvivalRegressionModel].getName
override def load(path: String): AFTSurvivalRegressionModel = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
val dataPath = new Path(path, "data").toString
val data = sparkSession.read.parquet(dataPath)
val Row(coefficients: Vector, intercept: Double, scale: Double) =
MLUtils.convertVectorColumnsToML(data, "coefficients")
.select("coefficients", "intercept", "scale")
.head()
val model = new AFTSurvivalRegressionModel(metadata.uid, coefficients, intercept, scale)
metadata.getAndSetParams(model)
model
}
}
}
/**
* AFTAggregator computes the gradient and loss for a AFT loss function,
* as used in AFT survival regression for samples in sparse or dense vector in an online fashion.
*
* The loss function and likelihood function under the AFT model based on:
* Lawless, J. F., Statistical Models and Methods for Lifetime Data,
* New York: John Wiley & Sons, Inc. 2003.
*
* Two AFTAggregator can be merged together to have a summary of loss and gradient of
* the corresponding joint dataset.
*
* Given the values of the covariates $x^{'}$, for random lifetime $t_{i}$ of subjects i = 1,..,n,
* with possible right-censoring, the likelihood function under the AFT model is given as
*
* <blockquote>
* $$
* L(\\beta,\\sigma)=\\prod_{i=1}^n[\\frac{1}{\\sigma}f_{0}
* (\\frac{\\log{t_{i}}-x^{'}\\beta}{\\sigma})]^{\\delta_{i}}S_{0}
* (\\frac{\\log{t_{i}}-x^{'}\\beta}{\\sigma})^{1-\\delta_{i}}
* $$
* </blockquote>
*
* Where $\\delta_{i}$ is the indicator of the event has occurred i.e. uncensored or not.
* Using $\\epsilon_{i}=\\frac{\\log{t_{i}}-x^{'}\\beta}{\\sigma}$, the log-likelihood function
* assumes the form
*
* <blockquote>
* $$
* \\iota(\\beta,\\sigma)=\\sum_{i=1}^{n}[-\\delta_{i}\\log\\sigma+
* \\delta_{i}\\log{f_{0}}(\\epsilon_{i})+(1-\\delta_{i})\\log{S_{0}(\\epsilon_{i})}]
* $$
* </blockquote>
* Where $S_{0}(\\epsilon_{i})$ is the baseline survivor function,
* and $f_{0}(\\epsilon_{i})$ is corresponding density function.
*
* The most commonly used log-linear survival regression method is based on the Weibull
* distribution of the survival time. The Weibull distribution for lifetime corresponding
* to extreme value distribution for log of the lifetime,
* and the $S_{0}(\\epsilon)$ function is
*
* <blockquote>
* $$
* S_{0}(\\epsilon_{i})=\\exp(-e^{\\epsilon_{i}})
* $$
* </blockquote>
*
* and the $f_{0}(\\epsilon_{i})$ function is
*
* <blockquote>
* $$
* f_{0}(\\epsilon_{i})=e^{\\epsilon_{i}}\\exp(-e^{\\epsilon_{i}})
* $$
* </blockquote>
*
* The log-likelihood function for Weibull distribution of lifetime is
*
* <blockquote>
* $$
* \\iota(\\beta,\\sigma)=
* -\\sum_{i=1}^n[\\delta_{i}\\log\\sigma-\\delta_{i}\\epsilon_{i}+e^{\\epsilon_{i}}]
* $$
* </blockquote>
*
* Due to minimizing the negative log-likelihood equivalent to maximum a posteriori probability,
* the loss function we use to optimize is $-\\iota(\\beta,\\sigma)$.
* The gradient functions for $\\beta$ and $\\log\\sigma$ respectively are
*
* <blockquote>
* $$
* \\frac{\\partial (-\\iota)}{\\partial \\beta}=
* \\sum_{1=1}^{n}[\\delta_{i}-e^{\\epsilon_{i}}]\\frac{x_{i}}{\\sigma} \\\\
*
* \\frac{\\partial (-\\iota)}{\\partial (\\log\\sigma)}=
* \\sum_{i=1}^{n}[\\delta_{i}+(\\delta_{i}-e^{\\epsilon_{i}})\\epsilon_{i}]
* $$
* </blockquote>
*
* @param bcParameters The broadcasted value includes three part: The log of scale parameter,
* the intercept and regression coefficients corresponding to the features.
* @param fitIntercept Whether to fit an intercept term.
* @param bcFeaturesStd The broadcast standard deviation values of the features.
*/
private class AFTAggregator(
bcParameters: Broadcast[BDV[Double]],
fitIntercept: Boolean,
bcFeaturesStd: Broadcast[Array[Double]]) extends Serializable {
private val length = bcParameters.value.length
// make transient so we do not serialize between aggregation stages
@transient private lazy val parameters = bcParameters.value
// the regression coefficients to the covariates
@transient private lazy val coefficients = parameters.slice(2, length)
@transient private lazy val intercept = parameters(1)
// sigma is the scale parameter of the AFT model
@transient private lazy val sigma = math.exp(parameters(0))
private var totalCnt: Long = 0L
private var lossSum = 0.0
// Here we optimize loss function over log(sigma), intercept and coefficients
private lazy val gradientSumArray = Array.ofDim[Double](length)
def count: Long = totalCnt
def loss: Double = {
require(totalCnt > 0.0, s"The number of instances should be " +
s"greater than 0.0, but got $totalCnt.")
lossSum / totalCnt
}
def gradient: BDV[Double] = {
require(totalCnt > 0.0, s"The number of instances should be " +
s"greater than 0.0, but got $totalCnt.")
new BDV(gradientSumArray.map(_ / totalCnt.toDouble))
}
/**
* Add a new training data to this AFTAggregator, and update the loss and gradient
* of the objective function.
*
* @param data The AFTPoint representation for one data point to be added into this aggregator.
* @return This AFTAggregator object.
*/
def add(data: AFTPoint): this.type = {
val xi = data.features
val ti = data.label
val delta = data.censor
require(ti > 0.0, "The lifetime or label should be greater than 0.")
val localFeaturesStd = bcFeaturesStd.value
val margin = {
var sum = 0.0
xi.foreachActive { (index, value) =>
if (localFeaturesStd(index) != 0.0 && value != 0.0) {
sum += coefficients(index) * (value / localFeaturesStd(index))
}
}
sum + intercept
}
val epsilon = (math.log(ti) - margin) / sigma
lossSum += delta * math.log(sigma) - delta * epsilon + math.exp(epsilon)
val multiplier = (delta - math.exp(epsilon)) / sigma
gradientSumArray(0) += delta + multiplier * sigma * epsilon
gradientSumArray(1) += { if (fitIntercept) multiplier else 0.0 }
xi.foreachActive { (index, value) =>
if (localFeaturesStd(index) != 0.0 && value != 0.0) {
gradientSumArray(index + 2) += multiplier * (value / localFeaturesStd(index))
}
}
totalCnt += 1
this
}
/**
* Merge another AFTAggregator, and update the loss and gradient
* of the objective function.
* (Note that it's in place merging; as a result, `this` object will be modified.)
*
* @param other The other AFTAggregator to be merged.
* @return This AFTAggregator object.
*/
def merge(other: AFTAggregator): this.type = {
if (other.count != 0) {
totalCnt += other.totalCnt
lossSum += other.lossSum
var i = 0
while (i < length) {
this.gradientSumArray(i) += other.gradientSumArray(i)
i += 1
}
}
this
}
}
/**
* AFTCostFun implements Breeze's DiffFunction[T] for AFT cost.
* It returns the loss and gradient at a particular point (parameters).
* It's used in Breeze's convex optimization routines.
*/
private class AFTCostFun(
data: RDD[AFTPoint],
fitIntercept: Boolean,
bcFeaturesStd: Broadcast[Array[Double]],
aggregationDepth: Int) extends DiffFunction[BDV[Double]] {
override def calculate(parameters: BDV[Double]): (Double, BDV[Double]) = {
val bcParameters = data.context.broadcast(parameters)
val aftAggregator = data.treeAggregate(
new AFTAggregator(bcParameters, fitIntercept, bcFeaturesStd))(
seqOp = (c, v) => (c, v) match {
case (aggregator, instance) => aggregator.add(instance)
},
combOp = (c1, c2) => (c1, c2) match {
case (aggregator1, aggregator2) => aggregator1.merge(aggregator2)
}, depth = aggregationDepth)
bcParameters.destroy()
(aftAggregator.loss, aftAggregator.gradient)
}
}
/**
* Class that represents the (features, label, censor) of a data point.
*
* @param features List of features for this data point.
* @param label Label for this data point.
* @param censor Indicator of the event has occurred or not. If the value is 1, it means
* the event has occurred i.e. uncensored; otherwise censored.
*/
private[regression] case class AFTPoint(features: Vector, label: Double, censor: Double) {
require(censor == 1.0 || censor == 0.0, "censor of class AFTPoint must be 1.0 or 0.0")
}
| WindCanDie/spark | mllib/src/main/scala/org/apache/spark/ml/regression/AFTSurvivalRegression.scala | Scala | apache-2.0 | 23,514 |
package io.github.rollenholt.application.center.notify
/**
* @author rollenholt
*/
trait ApplicationNotify {
}
| rollenholt/application-center | src/main/scala/io/github/rollenholt/application/center/notify/ApplicationNotify.scala | Scala | gpl-2.0 | 116 |
package com.rasterfoundry.database
import com.rasterfoundry.datamodel.{Order, PageRequest, TaskSessionType}
import org.scalatest.funsuite.AnyFunSuite
import java.util.UUID
class CampaignPerformanceSpec
extends AnyFunSuite
with doobie.scalatest.IOChecker
with DBTestConfig {
val transactor = xa
val campaignId: UUID = UUID.randomUUID
test("performance query is valid for label sessions") {
check {
CampaignDao.performanceQ(
campaignId,
TaskSessionType.LabelSession,
PageRequest(
0,
10,
Map(
"hoursSpent" -> Order.Asc,
"tasksCompleted" -> Order.Desc,
"labellingRate" -> Order.Asc
)
)
)
}
}
test("performance query is valid for validation sessions") {
check {
CampaignDao.performanceQ(
campaignId,
TaskSessionType.ValidateSession,
PageRequest(
0,
10,
Map(
"hoursSpent" -> Order.Asc,
"tasksCompleted" -> Order.Desc,
"validationRate" -> Order.Asc
)
)
)
}
}
test("unique users query is valid") {
check {
CampaignDao.uniqueUsersQ(campaignId)
}
}
}
| raster-foundry/raster-foundry | app-backend/db/src/test/scala/com/azavea/rf/database/CampaignPerformanceSpec.scala | Scala | apache-2.0 | 1,249 |
/*
* Copyright 2013 Denis Bardadym
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.btd
package object jade {
private[jade]type ?[T] = Option[T]
private[jade] def empty(str: String) = if (str == null || str == "") None else Some(str)
} | btd/scala-jade | src/main/scala/com/github/btd/package.scala | Scala | apache-2.0 | 806 |
package skinny.session.servlet
import javax.servlet.http.{ HttpServletRequest, HttpServletRequestWrapper, HttpServletResponse, HttpUpgradeHandler }
import javax.servlet.{ ServletRequest, ServletResponse }
/**
* Http request wrapper for SkinnySession
*/
case class SkinnyHttpRequestWrapper(
request: HttpServletRequest,
session: SkinnyHttpSessionWrapper
) extends HttpServletRequestWrapper(request) {
override def getSession(create: Boolean) = session // already created
override def getSession = session
override def getAuthType = request.getAuthType
override def getCookies = request.getCookies
override def getDateHeader(name: String) = request.getDateHeader(name)
override def getHeader(name: String) = request.getHeader(name)
override def getHeaders(name: String) = request.getHeaders(name)
override def getHeaderNames = request.getHeaderNames
override def getIntHeader(name: String) = request.getIntHeader(name)
override def getMethod = request.getMethod
override def getPathInfo = request.getPathInfo
override def getPathTranslated = request.getPathTranslated
override def getContextPath = request.getContextPath
override def getQueryString = request.getQueryString
override def getRemoteUser = request.getRemoteUser
override def isUserInRole(role: String) = request.isUserInRole(role)
override def getUserPrincipal() = request.getUserPrincipal()
override def getRequestedSessionId = request.getRequestedSessionId
override def getRequestURI = request.getRequestURI
override def getRequestURL = request.getRequestURL
override def getServletPath = request.getServletPath
override def isRequestedSessionIdValid = request.isRequestedSessionIdValid
override def isRequestedSessionIdFromCookie = request.isRequestedSessionIdFromCookie
override def isRequestedSessionIdFromURL = request.isRequestedSessionIdFromURL
// method isRequestedSessionIdFromUrl in trait HttpServletRequest is deprecated: see corresponding Javadoc for more information.
override def isRequestedSessionIdFromUrl = request.isRequestedSessionIdFromURL
override def authenticate(response: HttpServletResponse) = request.authenticate(response)
override def login(username: String, password: String) = request.login(username, password)
override def logout() = request.logout
override def getParts = request.getParts
override def getPart(name: String) = request.getPart(name)
override def getAttribute(name: String) = request.getAttribute(name)
override def getAttributeNames = request.getAttributeNames
override def getCharacterEncoding = request.getCharacterEncoding
override def setCharacterEncoding(env: String) = request.setCharacterEncoding(env)
override def getContentLength = request.getContentLength
override def getContentType = request.getContentType
override def getInputStream = request.getInputStream
override def getParameter(name: String) = request.getParameter(name)
override def getParameterNames = request.getParameterNames
override def getParameterValues(name: String) = request.getParameterValues(name)
override def getParameterMap = request.getParameterMap
override def getProtocol = request.getProtocol
override def getScheme = request.getScheme
override def getServerName = request.getServerName
override def getServerPort = request.getServerPort
override def getReader = request.getReader
override def getRemoteAddr = request.getRemoteAddr
override def getRemoteHost = request.getRemoteHost
override def setAttribute(name: String, o: Any) = request.setAttribute(name, o)
override def removeAttribute(name: String) = request.removeAttribute(name)
override def getLocale = request.getLocale
override def getLocales = request.getLocales
override def isSecure = request.isSecure
override def getRequestDispatcher(path: String) = request.getRequestDispatcher(path)
// Deprecated. As of Version 2.1 of the Java Servlet API, use ServletContext#getRealPath instead.
override def getRealPath(path: String) = request.getRealPath(path)
override def getRemotePort = request.getRemotePort
override def getLocalName = request.getLocalName
override def getLocalAddr = request.getLocalAddr
override def getLocalPort = request.getLocalPort
override def getServletContext = request.getServletContext
override def startAsync() = request.startAsync
override def startAsync(servletRequest: ServletRequest, servletResponse: ServletResponse) =
request.startAsync(servletRequest, servletResponse)
override def isAsyncStarted = request.isAsyncStarted
override def isAsyncSupported = request.isAsyncSupported
override def getAsyncContext = request.getAsyncContext
override def getDispatcherType = request.getDispatcherType
override def changeSessionId(): String = request.changeSessionId()
override def upgrade[T <: HttpUpgradeHandler](handlerClass: Class[T]): T = request.upgrade(handlerClass)
override def getContentLengthLong: Long = request.getContentLengthLong
}
| seratch/skinny-framework | framework/src/main/scala/skinny/session/servlet/SkinnyHttpRequestWrapper.scala | Scala | mit | 6,126 |
package esp.eventuate
import akka.actor.ActorRef
import com.rbmhtechnology.eventuate.EventsourcedActor
import esp.Api
import esp.model._
class UserActor(override val id: String,override val eventLog: ActorRef) extends EventsourcedActor {
override def onEvent: Receive = ???
override def onCommand: Receive = ???
}
trait EventuateApi extends Api {
override def createUser(user: User): UserId = ???
override def listAccounts(id: UserId): Seq[AccountNumber] = ???
override def createAccount(id: UserId): Unit = ???
override def getUser(id: UserId): Option[User] = ???
override def transferMoney(from: AccountNumber, to: AccountNumber, amount: BigDecimal): Unit = ???
override def setAccountName(accountNumber: AccountNumber, newName: String): Unit = ???
override def listTransactionHistory(id: UserId): Seq[Transaction] = ???
override def depositMoney(accountNumber: AccountNumber, amount: BigDecimal): Unit = ???
override def changeEmail(id: UserId, email: Option[String]): Unit = ???
override def getAccount(accountNumber: AccountNumber): Option[Account] = ???
}
| lukasz-golebiewski/event-sourcing-playground | src/main/scala/esp/eventuate/EventuateApi.scala | Scala | gpl-2.0 | 1,100 |
package akka.analytics.cassandra
import akka.actor._
import akka.persistence.PersistentActor
import akka.serialization.Serializer
import akka.testkit._
import com.typesafe.config.ConfigFactory
import org.apache.spark.rdd.RDD
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.scalatest._
import scala.concurrent.duration._
object CustomSerializationSpec {
val akkaConfig = ConfigFactory.parseString(
"""
|akka.actor.serializers {
| example = "akka.analytics.cassandra.CustomSerializationSpec$ExamplePayloadSerializer"
|}
|akka.actor.serialization-bindings {
| "akka.analytics.cassandra.CustomSerializationSpec$ExamplePayload" = example
|}
|akka.persistence.journal.plugin = "cassandra-journal"
|akka.persistence.snapshot-store.plugin = "cassandra-snapshot-store"
|akka.test.single-expect-default = 10s
|cassandra-journal.port = 9142
|cassandra-snapshot-store.port = 9142
|cassandra-journal.replication-strategy = NetworkTopologyStrategy
|cassandra-journal.data-center-replication-factors = ["dc1:1"]
""".stripMargin)
val sparkConfig = new SparkConf()
.setAppName("CassandraExample")
.setMaster("local[4]")
.set("spark.cassandra.connection.host", "127.0.0.1")
.set("spark.cassandra.connection.port", "9142")
case class ExamplePayload(value: String)
class ExamplePayloadSerializer(system: ExtendedActorSystem) extends Serializer {
val ExamplePayloadClass = classOf[ExamplePayload]
override def identifier: Int = 44085
override def includeManifest: Boolean = true
override def toBinary(o: AnyRef): Array[Byte] = o match {
case ExamplePayload(value) =>
s"${value}-ser".getBytes("UTF-8")
}
override def fromBinary(bytes: Array[Byte], manifest: Option[Class[_]]): AnyRef = manifest.get match {
case ExamplePayloadClass =>
val value = new String(bytes, "UTF-8")
ExamplePayload(s"${value}-deser")
}
}
class ExampleActor(probe: ActorRef) extends PersistentActor {
override val persistenceId: String = "test"
override def receiveCommand: Receive = {
case msg => persist(msg) {
case evt => probe ! evt
}
}
override def receiveRecover: Receive = {
case _ =>
}
}
}
import CustomSerializationSpec._
class CustomSerializationSpec extends TestKit(ActorSystem("test", akkaConfig)) with WordSpecLike with Matchers with CassandraLifecycle {
val jsc: JournalSparkContext = new SparkContext(sparkConfig).withSerializerConfig(akkaConfig)
"akka-analytics-cassandra" must {
"support custom serialization" in {
val actor = system.actorOf(Props(new ExampleActor(testActor)))
actor ! ExamplePayload("a")
expectMsg(ExamplePayload("a"))
val rdd: RDD[(JournalKey, Any)] = jsc.eventTable().cache()
val actual = rdd.collect().head
val expected = (JournalKey("test", 0, 1), ExamplePayload("a-ser-deser"))
actual should be(expected)
}
}
}
| zenaptix/akka-analytics | akka-analytics-cassandra/src/test/scala/akka/analytics/cassandra/CustomSerializationSpec.scala | Scala | apache-2.0 | 3,081 |
import cats.Monad
import cats.data.State
import diesel.diesel
/**
* Note that we're using an abstract class with a Monad constraint on F, thus
* allowing us to directly use F as a Monad inside our DSL declaration
*/
@diesel
abstract class KVStore[F[_]: Monad] {
def put[A](k: String, o: A): F[Unit]
def get[A](k: String): F[Option[A]]
def delete(k: String): F[Unit]
def update[A, B](k: String, f: A => B): F[Unit] = {
import cats.implicits._
get[A](k).flatMap {
case Some(v) => {
val b = f(v)
put(k, b)
}
case None => Monad[F].pure(())
}
}
}
@SuppressWarnings(Array("org.wartremover.warts.Any"))
object KVStore {
type KVStoreState[A] = State[Map[String, Any], A]
implicit object KVSStateInterpreter extends KVStore[KVStoreState] {
def put[A](k: String, o: A): KVStoreState[Unit] = State.modify(_.updated(k, o))
def get[A](k: String): KVStoreState[Option[A]] = State.inspect(_.get(k).map(_.asInstanceOf[A]))
def delete(k: String): KVStoreState[Unit] = State.modify(_ - k)
}
}
| lloydmeta/diesel | examples/src/main/scala/KVStore.scala | Scala | mit | 1,067 |
package com.flurdy.socialcrowd.output
import scala.collection.mutable.MutableList
trait CrowdOutput {
def printLine(line: String)
def getPreviousLines: List[String]
}
class StandardCrowdOutput extends CrowdOutput {
val outputLines = new MutableList[String]
def printLine(line: String) {
outputLines += line
println(line)
}
def getPreviousLines: List[String] = outputLines.toList
}
| flurdy/socialcrowd | src/main/scala/output/crowdOutput.scala | Scala | mit | 421 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalactic.Equality
import org.scalactic.Uniformity
import org.scalactic.Prettifier
import org.scalactic.StringNormalizations._
import SharedHelpers._
import FailureMessages.decorateToStringValue
import Matchers._
import exceptions.TestFailedException
class ListShouldContainAtMostOneOfLogicalOrSpec extends FreeSpec {
private val prettifier = Prettifier.default
val upperCaseStringEquality =
new Equality[String] {
def areEqual(a: String, b: Any): Boolean = upperCase(a) == upperCase(b)
}
val invertedListOfStringEquality =
new Equality[List[String]] {
def areEqual(a: List[String], b: Any): Boolean = a != b
}
private def upperCase(value: Any): Any =
value match {
case l: List[_] => l.map(upperCase(_))
case s: String => s.toUpperCase
case c: Char => c.toString.toUpperCase.charAt(0)
case (s1: String, s2: String) => (s1.toUpperCase, s2.toUpperCase)
case e: java.util.Map.Entry[_, _] =>
(e.getKey, e.getValue) match {
case (k: String, v: String) => Entry(k.toUpperCase, v.toUpperCase)
case _ => value
}
case _ => value
}
//ADDITIONAL//
val fileName: String = "ListShouldContainAtMostOneOfLogicalOrSpec.scala"
"a List" - {
val fumList: List[String] = List("fum", "foe")
val toList: List[String] = List("to", "you")
"when used with (contain atMostOneOf (...) or contain atMostOneOf (...)) syntax" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
fumList should (contain atMostOneOf ("fee", "fie", "foe", "fam") or contain atMostOneOf("fie", "fee", "fam", "foe"))
fumList should (contain atMostOneOf ("fee", "fie", "foe", "fam") or contain atMostOneOf("fie", "fee", "fum", "foe"))
fumList should (contain atMostOneOf ("fee", "fie", "foe", "fum") or contain atMostOneOf("fie", "fee", "fam", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (contain atMostOneOf ("fee", "fie", "foe", "fum") or contain atMostOneOf ("fie", "fee", "fum", "foe"))
}
checkMessageStackDepth(e1, Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fum\\"") + ", and " + Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"fie\\", \\"fee\\", \\"fum\\", \\"foe\\""), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FAM") or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))
fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FAM") or contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))
fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))
val e1 = intercept[TestFailedException] {
fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or (contain atMostOneOf ("FIE", "FEE", "FUM", "FOE")))
}
checkMessageStackDepth(e1, Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\"") + ", and " + Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FUM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FAM") or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FAM") or contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\"") + ", and " + Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FUM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
(fumList should (contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FAM ") or contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FAM "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (contain atMostOneOf ("fee", "fie", "foe", "fie", "fum") or contain atMostOneOf("fie", "fee", "fam", "foe"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
fumList should (contain atMostOneOf ("fie", "fee", "fam", "foe") or contain atMostOneOf("fee", "fie", "foe", "fie", "fum"))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
"when used with (equal (...) and contain oneOf (...)) syntax" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
fumList should (equal (fumList) or contain atMostOneOf("fie", "fee", "fam", "foe"))
fumList should (equal (toList) or contain atMostOneOf("fie", "fee", "fam", "foe"))
fumList should (equal (fumList) or contain atMostOneOf("fie", "fee", "fum", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (equal (toList) or contain atMostOneOf ("fie", "fee", "fum", "foe"))
}
checkMessageStackDepth(e1, Resources.didNotEqual(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", and " + Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"fie\\", \\"fee\\", \\"fum\\", \\"foe\\""), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
fumList should (equal (fumList) or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))
fumList should (equal (toList) or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))
fumList should (equal (fumList) or contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))
val e1 = intercept[TestFailedException] {
fumList should (equal (toList) or (contain atMostOneOf ("FIE", "FEE", "FUM", "FOE")))
}
checkMessageStackDepth(e1, Resources.didNotEqual(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", and " + Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FUM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(fumList should (equal (toList) or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
(fumList should (equal (fumList) or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
(fumList should (equal (toList) or contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (equal (fumList) or contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.didNotEqual(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", and " + Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FUM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
(fumList should (equal (toList) or contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FAM "))) (decided by invertedListOfStringEquality, after being lowerCased and trimmed)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (equal (fumList) or contain atMostOneOf("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
"when used with (be (...) and contain theMostOneOf (...)) syntax" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
fumList should (be (fumList) or contain atMostOneOf("fie", "fee", "fam", "foe"))
fumList should (be (toList) or contain atMostOneOf("fie", "fee", "fam", "foe"))
fumList should (be (fumList) or contain atMostOneOf("fie", "fee", "fum", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (be (toList) or contain atMostOneOf ("fie", "fee", "fum", "foe"))
}
checkMessageStackDepth(e1, Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", and " + Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"fie\\", \\"fee\\", \\"fum\\", \\"foe\\""), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
fumList should (be (fumList) or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))
fumList should (be (toList) or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))
fumList should (be (fumList) or contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))
val e1 = intercept[TestFailedException] {
fumList should (be (toList) or (contain atMostOneOf ("FIE", "FEE", "FUM", "FOE")))
}
checkMessageStackDepth(e1, Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", and " + Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FUM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(fumList should (be (fumList) or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))) (decided by upperCaseStringEquality)
(fumList should (be (toList) or contain atMostOneOf ("FIE", "FEE", "FAM", "FOE"))) (decided by upperCaseStringEquality)
(fumList should (be (fumList) or contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (be (toList) or contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", and " + Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FUM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
(fumList should (be (fumList) or contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FAM "))) (after being lowerCased and trimmed)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (be (fumList) or contain atMostOneOf("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
"when used with (contain oneOf (...) and be (...)) syntax" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
fumList should (contain atMostOneOf("fie", "fee", "fam", "foe") or be (fumList))
fumList should (contain atMostOneOf("fie", "fee", "fum", "foe") or be (fumList))
fumList should (contain atMostOneOf("fie", "fee", "fam", "foe") or be (toList))
val e1 = intercept[TestFailedException] {
fumList should (contain atMostOneOf ("fee", "fie", "foe", "fum") or be (toList))
}
checkMessageStackDepth(e1, Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fum\\"") + ", and " + Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
fumList should (contain atMostOneOf ("FIE", "FEE", "FAM", "FOE") or be (fumList))
fumList should (contain atMostOneOf ("FIE", "FEE", "FUM", "FOE") or be (fumList))
fumList should (contain atMostOneOf ("FIE", "FEE", "FAM", "FOE") or be (toList))
val e1 = intercept[TestFailedException] {
fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or be (toList))
}
checkMessageStackDepth(e1, Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\"") + ", and " + Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(fumList should (contain atMostOneOf ("FIE", "FEE", "FAM", "FOE") or be (fumList))) (decided by upperCaseStringEquality)
(fumList should (contain atMostOneOf ("FIE", "FEE", "FUM", "FOE") or be (fumList))) (decided by upperCaseStringEquality)
(fumList should (contain atMostOneOf ("FIE", "FEE", "FAM", "FOE") or be (toList))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or be (toList))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.didNotContainAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\"") + ", and " + Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
(fumList should (contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FaM ") or be (fumList))) (after being lowerCased and trimmed)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (contain atMostOneOf("fee", "fie", "foe", "fie", "fum") or be (fumList))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
"when used with (not contain atMostOneOf (...) and not contain atMostOneOf (...)) syntax" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
fumList should (not contain atMostOneOf ("fee", "fie", "foe", "fum") or not contain atMostOneOf("fie", "fee", "fum", "foe"))
fumList should (not contain atMostOneOf ("fee", "fie", "foe", "fuu") or not contain atMostOneOf("fie", "fee", "fum", "foe"))
fumList should (not contain atMostOneOf ("fee", "fie", "foe", "fum") or not contain atMostOneOf("fie", "fee", "fuu", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (not contain atMostOneOf ("fee", "fie", "foe", "fuu") or not contain atMostOneOf ("fee", "fie", "foe", "fuu"))
}
checkMessageStackDepth(e1, Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fuu\\"") + ", and " + Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fuu\\""), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
fumList should (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))
fumList should (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU") or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))
fumList should (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or not contain atMostOneOf ("FIE", "FEE", "FUU", "FOE"))
val e1 = intercept[TestFailedException] {
fumList should (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU") or not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU"))
}
checkMessageStackDepth(e1, Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUU\\"") + ", and " + Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUU\\""), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(fumList should (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(fumList should (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU") or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(fumList should (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUM") or not contain atMostOneOf ("FIE", "FEE", "FUU", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU") or not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUU\\"") + ", and " + Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUU\\""), fileName, thisLineNumber - 2)
(fumList should (contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FAM ") or contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FAM "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (not contain atMostOneOf ("fee", "fie", "foe", "fie", "fum") or not contain atMostOneOf("fie", "fee", "fum", "foe"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
fumList should (not contain atMostOneOf ("fie", "fee", "fum", "foe") or not contain atMostOneOf("fee", "fie", "foe", "fie", "fum"))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
"when used with (not equal (...) and not contain oneOf (...)) syntax" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
fumList should (not equal (toList) or not contain atMostOneOf("fie", "fee", "fum", "foe"))
fumList should (not equal (fumList) or not contain atMostOneOf("fie", "fee", "fum", "foe"))
fumList should (not equal (toList) or not contain atMostOneOf("fie", "fee", "fuu", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (not equal (fumList) or not contain atMostOneOf ("fee", "fie", "foe", "fuu"))
}
checkMessageStackDepth(e1, Resources.equaled(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", and " + Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fuu\\""), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
fumList should (not equal (toList) or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))
fumList should (not equal (fumList) or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))
fumList should (not equal (toList) or not contain atMostOneOf ("FIE", "FEE", "FUU", "FOE"))
val e2 = intercept[TestFailedException] {
fumList should (not equal (fumList) or (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU")))
}
checkMessageStackDepth(e2, Resources.equaled(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", and " + Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUU\\""), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(fumList should (not equal (fumList) or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
(fumList should (not equal (toList) or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
(fumList should (not equal (fumList) or not contain atMostOneOf ("FIE", "FEE", "FUU", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (not equal (toList) or not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.equaled(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", and " + Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUU\\""), fileName, thisLineNumber - 2)
(fumList should (not contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FUM ") or not contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FUM "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (not equal (toList) or not contain atMostOneOf("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
"when used with (not be (...) and not contain oneOf (...)) syntax" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
fumList should (not be (toList) or not contain atMostOneOf("fie", "fee", "fum", "foe"))
fumList should (not be (fumList) or not contain atMostOneOf("fie", "fee", "fum", "foe"))
fumList should (not be (toList) or not contain atMostOneOf("fee", "fie", "foe", "fuu"))
val e1 = intercept[TestFailedException] {
fumList should (not be (fumList) or not contain atMostOneOf ("fee", "fie", "foe", "fuu"))
}
checkMessageStackDepth(e1, Resources.wasEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", and " + Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fuu\\""), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
fumList should (not be (toList) or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))
fumList should (not be (fumList) or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))
fumList should (not be (toList) or not contain atMostOneOf ("FIE", "FEE", "FUU", "FOE"))
val e1 = intercept[TestFailedException] {
fumList should (not be (fumList) or (not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU")))
}
checkMessageStackDepth(e1, Resources.wasEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", and " + Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUU\\""), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(fumList should (not be (toList) or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality)
(fumList should (not be (fumList) or not contain atMostOneOf ("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality)
(fumList should (not be (toList) or not contain atMostOneOf ("FIE", "FEE", "FUU", "FOE"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (not be (fumList) or not contain atMostOneOf ("FEE", "FIE", "FOE", "FUU"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.wasEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", and " + Resources.containedAtMostOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUU\\""), fileName, thisLineNumber - 2)
(fumList should (not contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FUM ") or not contain atMostOneOf (" FEE ", " FIE ", " FOE ", " FUM "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (not be (toList) or not contain atMostOneOf("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
}
"collection of Lists" - {
val list1s: Vector[List[Int]] = Vector(List(1, 2), List(1, 2), List(1, 2))
val lists: Vector[List[Int]] = Vector(List(1, 2), List(1, 2), List(2, 3))
val nils: Vector[List[Int]] = Vector(Nil, Nil, Nil)
val listsNil: Vector[List[Int]] = Vector(List(1), List(1), Nil)
val hiLists: Vector[List[String]] = Vector(List("hi", "he"), List("hi", "he"), List("hi", "he"))
val toLists: Vector[List[String]] = Vector(List("to", "you"), List("to", "you"), List("to", "you"))
def allErrMsg(index: Int, message: String, lineNumber: Int, left: Any): String =
"'all' inspection failed, because: \\n" +
" at index " + index + ", " + message + " (" + fileName + ":" + (lineNumber) + ") \\n" +
"in " + decorateToStringValue(prettifier, left)
"when used with (contain oneOf (..) and contain oneOf (..)) syntax" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
all (list1s) should (contain atMostOneOf (1, 6, 8) or contain atMostOneOf (1, 3, 4))
all (list1s) should (contain atMostOneOf (1, 2, 3) or contain atMostOneOf (1, 3, 4))
all (list1s) should (contain atMostOneOf (1, 6, 8) or contain atMostOneOf (1, 2, 3))
atLeast (2, lists) should (contain atMostOneOf (3, 2, 5) or contain atMostOneOf (2, 3, 4))
atLeast (2, lists) should (contain atMostOneOf (1, 2, 3) or contain atMostOneOf (2, 3, 4))
atLeast (2, lists) should (contain atMostOneOf (3, 2, 5) or contain atMostOneOf (1, 2, 3))
val e1 = intercept[TestFailedException] {
all (lists) should (contain atMostOneOf (2, 3, 4) or contain atMostOneOf (4, 3, 2))
}
checkMessageStackDepth(e1, allErrMsg(2, decorateToStringValue(prettifier, lists(2)) + " did not contain at most one of (2, 3, 4), and " + decorateToStringValue(prettifier, lists(2)) + " did not contain at most one of (4, 3, 2)", thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
all (hiLists) should (contain atMostOneOf ("HI", "HO") or contain atMostOneOf ("HO", "HI"))
all (hiLists) should (contain atMostOneOf ("HI", "HE") or contain atMostOneOf ("HO", "HI"))
all (hiLists) should (contain atMostOneOf ("HI", "HO") or contain atMostOneOf ("HI", "HE"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (contain atMostOneOf ("HI", "HE") or contain atMostOneOf ("HE", "HI"))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " did not contain at most one of (\\"HI\\", \\"HE\\"), and " + decorateToStringValue(prettifier, hiLists(0)) + " did not contain at most one of (\\"HE\\", \\"HI\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(all (hiLists) should (contain atMostOneOf ("HI", "HO") or contain atMostOneOf ("HO", "HI"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(all (hiLists) should (contain atMostOneOf ("HI", "HE") or contain atMostOneOf ("HO", "HI"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(all (hiLists) should (contain atMostOneOf ("HI", "HO") or contain atMostOneOf ("HI", "HE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (contain atMostOneOf ("HI", "HE") or contain atMostOneOf ("HE", "HI"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " did not contain at most one of (\\"HI\\", \\"HE\\"), and " + decorateToStringValue(prettifier, hiLists(0)) + " did not contain at most one of (\\"HE\\", \\"HI\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (contain atMostOneOf (1, 2, 2, 3) or contain atMostOneOf (1, 3, 4))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
all (list1s) should (contain atMostOneOf (1, 3, 4) or contain atMostOneOf (1, 2, 2, 3))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
"when used with (be (...) and contain oneOf (...)) syntax" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
all (list1s) should (be (List(1, 2)) or contain atMostOneOf (1, 6, 8))
all (list1s) should (be (List(2, 3)) or contain atMostOneOf (1, 6, 8))
all (list1s) should (be (List(1, 2)) or contain atMostOneOf (1, 2, 3))
val e1 = intercept[TestFailedException] {
all (list1s) should (be (List(2, 3)) or contain atMostOneOf (1, 2, 3))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, list1s(0)) + " was not equal to " + decorateToStringValue(prettifier, List(2, 3)) + ", and " + decorateToStringValue(prettifier, list1s(0)) + " did not contain at most one of (1, 2, 3)", thisLineNumber - 2, list1s), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
all (hiLists) should (be (List("hi", "he")) or contain atMostOneOf ("HI", "HO"))
all (hiLists) should (be (List("HO")) or contain atMostOneOf ("HI", "HO"))
all (hiLists) should (be (List("hi", "he")) or contain atMostOneOf ("HI", "HE"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (be (List("HO")) or contain atMostOneOf ("HI", "HE"))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " was not equal to " + decorateToStringValue(prettifier, List("HO")) + ", and " + decorateToStringValue(prettifier, hiLists(0)) + " did not contain at most one of (\\"HI\\", \\"HE\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(all (hiLists) should (be (List("hi", "he")) or contain atMostOneOf ("HI", "HO"))) (decided by upperCaseStringEquality)
(all (hiLists) should (be (List("HO")) or contain atMostOneOf ("HI", "HO"))) (decided by upperCaseStringEquality)
(all (hiLists) should (be (List("hi", "he")) or contain atMostOneOf ("HI", "HE"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (be (List("HO")) or contain atMostOneOf ("HI", "HE"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " was not equal to " + decorateToStringValue(prettifier, List("HO")) + ", and " + decorateToStringValue(prettifier, hiLists(0)) + " did not contain at most one of (\\"HI\\", \\"HE\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (be (List(1, 2)) or contain atMostOneOf (1, 2, 2, 3))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
"when used with (not contain oneOf (..) and not contain oneOf (..))" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
all (list1s) should (not contain atMostOneOf (3, 2, 1) or not contain atMostOneOf (1, 2, 3))
all (list1s) should (not contain atMostOneOf (1, 2, 8) or not contain atMostOneOf (1, 2, 3))
all (list1s) should (not contain atMostOneOf (3, 2, 1) or not contain atMostOneOf (1, 2, 8))
val e1 = intercept[TestFailedException] {
all (lists) should (not contain atMostOneOf (1, 2, 8) or not contain atMostOneOf (8, 2, 1))
}
checkMessageStackDepth(e1, allErrMsg(2, decorateToStringValue(prettifier, lists(2)) + " contained at most one of (1, 2, 8), and " + decorateToStringValue(prettifier, lists(2)) + " contained at most one of (8, 2, 1)", thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
all (hiLists) should (not contain atMostOneOf ("HI", "HE") or not contain atMostOneOf ("HE", "HI"))
all (hiLists) should (not contain atMostOneOf ("hi", "he") or not contain atMostOneOf ("HE", "HI"))
all (hiLists) should (not contain atMostOneOf ("HI", "HE") or not contain atMostOneOf ("hi", "he"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (not contain atMostOneOf ("HE", "HEY", "HOWDY") or not contain atMostOneOf ("HE", "HEY", "HOWDY"))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " contained at most one of (\\"HE\\", \\"HEY\\", \\"HOWDY\\"), and " + decorateToStringValue(prettifier, hiLists(0)) + " contained at most one of (\\"HE\\", \\"HEY\\", \\"HOWDY\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(all (hiLists) should (not contain atMostOneOf ("HI", "HE") or not contain atMostOneOf ("HE", "HI"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(all (hiLists) should (not contain atMostOneOf ("hi", "he") or not contain atMostOneOf ("HE", "HI"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
(all (hiLists) should (not contain atMostOneOf ("HI", "HE") or not contain atMostOneOf ("hi", "he"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (not contain atMostOneOf ("HE", "HEY", "HOWDY") or not contain atMostOneOf ("HE", "HEY", "HOWDY"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " contained at most one of (\\"HE\\", \\"HEY\\", \\"HOWDY\\"), and " + decorateToStringValue(prettifier, hiLists(0)) + " contained at most one of (\\"HE\\", \\"HEY\\", \\"HOWDY\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (not contain atMostOneOf (1, 2, 2, 3) or not contain atMostOneOf (1, 2, 3))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
all (list1s) should (not contain atMostOneOf (1, 2, 3) or not contain atMostOneOf (1, 2, 2, 3))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
"when used with (not be (...) and not contain atMostOneOf (...))" - {
"should do nothing if valid, else throw a TFE with an appropriate error message" in {
all (list1s) should (not be (List(2, 3)) or not contain atMostOneOf (1, 2, 3))
all (list1s) should (not be (List(1, 2)) or not contain atMostOneOf (1, 2, 3))
all (list1s) should (not be (List(2, 3)) or not contain atMostOneOf (2, 3, 4))
val e1 = intercept[TestFailedException] {
all (list1s) should (not be (List(1, 2)) or not contain atMostOneOf (2, 3, 4))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, list1s(0)) + " was equal to " + decorateToStringValue(prettifier, List(1, 2)) + ", and " + decorateToStringValue(prettifier, list1s(0)) + " contained at most one of (2, 3, 4)", thisLineNumber - 2, list1s), fileName, thisLineNumber - 2)
}
"should use the implicit Equality in scope" in {
implicit val ise = upperCaseStringEquality
all (hiLists) should (not be (List("ho")) or not contain atMostOneOf ("HI", "HE"))
all (hiLists) should (not be (List("hi", "he")) or not contain atMostOneOf ("HE", "HI"))
all (hiLists) should (not be (List("ho")) or not contain atMostOneOf ("HI", "HE"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (not be (List("hi", "he")) or not contain atMostOneOf ("HI", "HO"))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " was equal to " + decorateToStringValue(prettifier, List("hi", "he")) + ", and " + decorateToStringValue(prettifier, hiLists(0)) + " contained at most one of (\\"HI\\", \\"HO\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
"should use an explicitly provided Equality" in {
(all (hiLists) should (not be (List("ho")) or not contain atMostOneOf ("HI", "HE"))) (decided by upperCaseStringEquality)
(all (hiLists) should (not be (List("hi", "he")) or not contain atMostOneOf ("HE", "HI"))) (decided by upperCaseStringEquality)
(all (hiLists) should (not be (List("ho")) or not contain atMostOneOf ("HI", "HE"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (not be (List("hi", "he")) or not contain atMostOneOf ("HI", "HO"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, hiLists(0)) + " was equal to " + decorateToStringValue(prettifier, List("hi", "he")) + ", and " + decorateToStringValue(prettifier, hiLists(0)) + " contained at most one of (\\"HI\\", \\"HO\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
"should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value" in {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (not be (List(2, 3)) or not contain atMostOneOf (1, 2, 2, 3))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atMostOneOfDuplicate))
}
}
}
}
| dotty-staging/scalatest | scalatest-test/src/test/scala/org/scalatest/ListShouldContainAtMostOneOfLogicalOrSpec.scala | Scala | apache-2.0 | 44,152 |
package leo.datastructures.impl.orderings
import leo.Configuration
import leo.datastructures.{Term, Type}
import leo.datastructures._
import leo.datastructures.impl.Signature
import leo.modules.output.logger.Out
import scala.annotation.tailrec
/**
* Computability path Ordering (Core CPO)
* by Blanqui, Jouannaud, Rubio
*
* @author Alexander Steen <a.steen@fu-berlin.de>
* @since 29.09.2015
*/
object TO_CPO_Naive { //} extends LeoOrdering[Term] {
import leo.datastructures.Orderings._
/////////////////////////////////////////////////////////////////
/// Exported functions
/////////////////////////////////////////////////////////////////
/* Comparisons of types */
// Core function for comparisons
@inline final def gt(a: Type, b: Type): Boolean = gt0(a,b)
@inline final def gteq(a: Type, b: Type): Boolean = ge0(a,b)
// Defined by gt/ge
@inline final def lt(a: Type, b: Type): Boolean = gt(b,a)
@inline final def lteq(a: Type, b: Type): Boolean = gteq(b,a)
@inline final def compare(a: Type, b: Type): CMP_Result = {
if (a == b) CMP_EQ
else if (gt(a,b)) CMP_GT
else if (lt(a,b)) CMP_LT
else CMP_NC
}
@inline final def canCompare(a: Type, b: Type): Boolean = compare(a,b) != CMP_NC
/* Comparisons of terms */
// Core function for comparisons
@inline final def gt(s: Term, t: Term): Boolean = gteq(s.ty, t.ty) && gt0(s,t, Set())
@inline final def gt(s: Term, t: Term, bound: Set[Term]): Boolean = gteq(s.ty, t.ty) && gt0(s,t, bound)
@inline final def gtMult(s: Seq[Term], t: Seq[Term]): Boolean = gt0Mult(s,t)
@inline final def gteq(s: Term, t: Term): Boolean = gteq(s.ty, t.ty) && ge0(s,t, Set())
@inline final def gteq(s: Term, t: Term, bound: Set[Term]): Boolean = gteq(s.ty, t.ty) && ge0(s,t, bound)
// Defined by gt/ge
@inline final def lt(s: Term, t: Term): Boolean = gt(t,s)
@inline final def lteq(s: Term, t: Term): Boolean = gteq(t,s)
@inline final def compare(s: Term, t: Term): CMP_Result = {
if (s == t) CMP_EQ
else if (gt(s,t)) CMP_GT
else if (lt(s,t)) CMP_LT
else CMP_NC
}
@inline final def canCompare(s: Term, t: Term): Boolean = compare(s,t) != CMP_NC
/* Common comparison-related operations */
// Nothing
/////////////////////////////////////////////////////////////////
/// Internal functions
/////////////////////////////////////////////////////////////////
// ###############################################################################
// well-founded ordering of symbols in signature
final private def precedence(s: Signature#Key, t: Signature#Key): CMP_Result = Configuration.PRECEDENCE.compare(s,t)
// Well-founded ordering of base types (sort)
final private def gt_baseType(bt1: Signature#Key, bt2: Signature#Key): Boolean = bt1 > bt2
final private def ge_baseType(bt1: Signature#Key, bt2: Signature#Key): Boolean = eq_baseType(bt1,bt2) || gt_baseType(bt1,bt2)
final private def eq_baseType(bt1: Signature#Key, bt2: Signature#Key): Boolean = bt1 == bt2
////////////////////////////////////
// Comparisons of types
////////////////////////////////////
final private def gt0(a: Type, b: Type): Boolean = {
import leo.datastructures.Type.{BaseType,->,β}
if (a == b) return false
if (a.isBoundTypeVar) return false
/* a is base type */
if (a.isBaseType) {
val aId = BaseType.unapply(a).get
if (b.isBaseType) {
val bId = BaseType.unapply(b).get
return gt_baseType(aId, bId)
}
if (b.isFunType) {
val (bI, bO) = ->.unapply(b).get
return gt0(a, bI) && gt0(a, bO)
}
// TODO: Are there further meaningful cases?
}
/* a is function type */
if (a.isFunType) {
val (aI, aO) = ->.unapply(a).get
if (ge0(aO, b)) return true
if (b.isFunType) {
val (bI, bO) = ->.unapply(b).get
if (eq_type(aI,bI)) return gt0(aO,bO)
}
// TODO: Are there further meaningful cases?
}
/* adaption for quantified types */
if (a.isPolyType) {
val aO = β.unapply(a).get
if (ge0(aO, b)) return true
if (b.isPolyType) {
val bO = β.unapply(b).get
return gt0(aO,bO)
}
// TODO: Are there further meaningful cases?
}
/* adaption end */
// TODO: We dont know what to do with other cases
false
}
final private def ge0(a: Type, b: Type): Boolean = {
import leo.datastructures.Type.{BaseType,->,β}
if (a == b) return true
if (a.isBaseType) {
val aId = BaseType.unapply(a).get
if (b.isBaseType) {
val bId = BaseType.unapply(b).get
return ge_baseType(aId, bId)
}
if (b.isFunType) {
val (bI, bO) = ->.unapply(b).get
return gt0(a, bI) && gt0(a, bO)
}
// TODO: Are there further meaningful cases?
}
if (a.isFunType) {
val (aI, aO) = ->.unapply(a).get
if (ge0(aO, b)) return true
if (b.isFunType) {
val (bI, bO) = ->.unapply(b).get
if (eq_type(aI,bI)) return ge0(aO,bO)
}
// TODO: Are there further meaningful cases?
}
/* adaption for quantified types */
if (a.isPolyType) {
val aO = β.unapply(a).get
if (gt0(aO, b)) return true
if (b.isPolyType) {
val bO = β.unapply(b).get
return gt0(aO,bO)
}
// TODO: Are there further meaningful cases?
}
/* adaption end */
// TODO: We dont know what to do with other cases
false
}
// Two types are equal wrt to the type ordering if they are
// syntacticallly equal of they are equal base types (wrt to ordering of base types).
@inline final private def eq_type(a: Type, b: Type): Boolean = {
import leo.datastructures.Type.BaseType
if (a == b) return true
if (a.isBaseType && b.isBaseType) {
val (aId, bId) = (BaseType.unapply(a).get, BaseType.unapply(b).get)
return eq_baseType(aId,bId)
}
false
}
// ###############################################################################
////////////////////////////////////
// Comparisons of terms
////////////////////////////////////
@inline private final def gt0Stat(a: Term, s: Seq[Term], t: Seq[Term], x: Set[Term], status: Int): Boolean = {
import leo.datastructures.IsSignature.{lexStatus,multStatus}
if (status == lexStatus) {
if (s.length > t.length){
alleq(s,t,t.length)
} else gt0Lex(a,s,t,x)
} else if (status == multStatus) {
gt0Mult(s,t)
} else {
// This should not happen
Out.severe("[CPO_Naive] Status compare called with unknown status")
false
}
}
@tailrec
private final def gt0Lex(a: Term, s: Seq[Term], t: Seq[Term], x: Set[Term]): Boolean = {
if (s.nonEmpty && t.nonEmpty) {
if (s.head == t.head) {
gt0Lex(a,s.tail,t.tail,x)
} else {
gt(s.head,t.head) && t.tail.forall(gt0(a,_,x))
}
} else false
}
@tailrec
private final def alleq(s: Seq[Term], t: Seq[Term], n: Int): Boolean = {
if (n == 0) true
else if (s.head == t.head) {
alleq(s.tail,t.tail,n-1)
}
else false
}
private final def gt0Mult(s: Seq[Term], t: Seq[Term]): Boolean = {
if (s.nonEmpty && t.isEmpty) true
else if (s.nonEmpty && t.nonEmpty) {
val sameElements = s.intersect(t)
val remSameS = s.diff(sameElements)
val remSameT = t.diff(sameElements)
if (remSameS.isEmpty && remSameT.isEmpty) false
else gt0Mult0(remSameS, remSameT)
} else false
}
@tailrec
private final def gt0Mult0(s: Seq[Term], t: Seq[Term]): Boolean = {
if (t.isEmpty) true
else if (s.nonEmpty && t.nonEmpty) {
val sn = s.head
val tIt = t.iterator
var keepT: Seq[Term] = Seq()
while (tIt.hasNext) {
val tn = tIt.next()
if (!gt(sn, tn)) {
keepT = keepT :+ tn
}
}
gt0Mult0(s.tail,keepT)
} else false
}
final private def gt0(s: Term, t: Term, x: Set[Term]): Boolean = {
import leo.datastructures.Term.{:::>, Bound, MetaVar, Symbol, TypeLambda, β,mkApp}
if (s == t) return false
if (s.isVariable) return false
/* case 6+10+15: ... > y */
if (t.isVariable) {
return Bound.unapply(t).isDefined || x.contains(t)
}
if (s.isApp || s.isConstant) {
val (f,args) = β.unapply(s).get
val fargList: Seq[Term] = effectiveArgs(f.ty,args)
f match {
// #############
// All f(t)-rules
// #############
case Symbol(idf) =>
/* f(t) > ... cases */
/* case 1: f(t) >= v */
if (fargList.exists(gteq(_, t))) return true
/* case 2+3: f(t) > g(u) and case 4: f(t) > uv*/
if (t.isApp || t.isConstant) {
val (g,args2) = β.unapply(t).get
try {
val gargList: Seq[Term] = effectiveArgs(g.ty, args2)
g match {
case Symbol(idg) =>
/* case 2+3 */
if (precedence(idf, idg) == CMP_EQ) {
return gt0Stat(s,fargList, gargList, x, Signature(idf).status)
} else if (precedence(idf, idg) == CMP_GT) {
return gargList.forall(gt0(s, _, x))
} else {
return false
}
case _ if gargList.nonEmpty =>
/* case 4*/
return gt0(s, Term.mkApp(g, args2.init), x) && gt0(s, gargList.last, x)
}
} catch {
case e:AssertionError => {
Out.severe(e.getMessage)
Out.output("TERM s: \\t\\t " + s.pretty)
Out.output("TERM t: \\t\\t " + t.pretty)
throw e
}
case e: Exception => {
println(idf)
println(f.pretty)
throw e
}
}
}
/* case 5: f(t) > lambda yv*/
if (t.isTermAbs) {
val (_,tO) = :::>.unapply(t).get
return gt0(s,tO,x)
}
// otherwise, fail
return false
// #############
// All @-rules
// #############
case _ if fargList.nonEmpty => {
if (ge0(mkApp(f,args.init),t,x) || gteq(fargList.last,t,x)) return true
if (t.isApp) {
val (g,args2) = β.unapply(t).get
val gargList: Seq[Term] = effectiveArgs(g.ty,args2)
if (gargList.nonEmpty) {
val s2 = mkApp(f,args.init)
val t2 = mkApp(g, args2.init)
if (s2 == t2) {
if (gt0(fargList.last, gargList.last,x)) return true
}
return ((gt(s2,t2,x) || gteq(fargList.last,t2,x) || gt(s,t2))
&& (gt(s2,gargList.last,x) || gteq(fargList.last,gargList.last,x) || gt(s,gargList.last)))
}
}
if (t.isTermAbs) {
val (_, tO) = :::>.unapply(t).get
return gt0(s, tO, x)
}
return false
}
case _ => println(s.pretty);println(f.pretty); assert(false, "CPO: should not happen, sth not in beta nf");
}
}
// #############
// All \\-rules (\\>, \\=, \\!=, \\X) without \\eta
// #############
// TODO: eta rules left out for now -- we are in eta-long form invariantly
if (s.isTermAbs) {
val (sInTy, sO) = :::>.unapply(s).get
if (gteq(sO,t,x)) return true
if (t.isTermAbs) {
val (tInTy, tO) = :::>.unapply(t).get
if (sInTy == tInTy) return gt0(sO, tO, x)
else return gt0(s, tO, x)
}
return false
}
// #############
/* adaption for type abstractions*/
// #############
if (s.isTypeAbs) {
val sO = TypeLambda.unapply(s).get
if (gteq(sO,t,x)) return true
if (t.isTypeAbs) {
val tO = TypeLambda.unapply(t).get
return gt0(sO,tO,x)
}
return false
}
/* adaption end */
Out.severe("Comparing unrecognized term. This is considered a bug! Please report.")
Out.severe(s.pretty)
false
}
@inline final private def ge0(s: Term, t: Term, x: Set[Term]): Boolean = {
if (s == t) true
else gt0(s,t,x)
}
final private def effectiveArgs(forTy: Type, args: Seq[Either[Term, Type]]): Seq[Term] = {
assert(args.take(forTy.polyPrefixArgsCount).forall(_.isRight), s"Number of expected type arguments (${forTy.polyPrefixArgsCount}) do not match ty abstraction count: \\n\\t Type: ${forTy.pretty}\\n\\tArgs: ${args.map(_.fold(_.pretty,_.pretty))}")
filterTermArgs(args.drop(forTy.polyPrefixArgsCount))
}
final private def filterTermArgs(args: Seq[Either[Term, Type]]): Seq[Term] = args match {
case Seq() => Seq()
case Seq(h, rest@_*) => h match {
case Left(term) => term +: filterTermArgs(rest)
case Right(_) => filterTermArgs(rest)
}
}
}
| Ryugoron/Leonora | src/main/scala/leo/datastructures/impl/orderings/TO_CPO_Naive.scala | Scala | mit | 12,939 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.keras.layers
import com.intel.analytics.bigdl.dllib.nn.InferReshape
import com.intel.analytics.bigdl.dllib.nn.abstractnn.AbstractModule
import com.intel.analytics.bigdl.dllib.nn.internal.KerasLayer
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils.Shape
import com.intel.analytics.bigdl.dllib.keras.Net
import com.intel.analytics.bigdl.dllib.keras.layers.utils.KerasUtils
import scala.reflect.ClassTag
/**
* Reshapes an output to a certain shape.
* The batch dimension needs to be unchanged.
* Supports shape inference by allowing one -1 in the target shape.
* For example, if inputShape = Shape(2, 3, 4), targetShape = Array(3, -1),
* then outputShape will be Shape(3, 8).
*
* When you use this layer as the first layer of a model, you need to provide the argument
* inputShape (a Single Shape, does not include the batch dimension).
*
* @param targetShape Array of int. The target shape that you desire to have.
* Batch dimension should be excluded.
* @param inputShape A Single Shape, does not include the batch dimension.
* @tparam T Numeric type of parameter(e.g. weight, bias). Only support float/double now.
*/
class Reshape[T: ClassTag](
val targetShape: Array[Int],
val inputShape: Shape = null)(implicit ev: TensorNumeric[T])
extends KerasLayer[Tensor[T], Tensor[T], T](KerasUtils.addBatch(inputShape)) with Net {
private var infer = false
private var inferIndex = -1
validateTargetShape()
private def validateTargetShape(): Unit = {
if (targetShape.contains(-1)) {
infer = true
var i = 0
var inferCount = 0
while (i < targetShape.length) {
if (targetShape(i) == -1) {
inferIndex = i
inferCount += 1
}
// We don't consider 0 here, same as Keras
else require(targetShape(i) >= 1,
s"Wrong reshape size at index $i: ${targetShape(i)}")
i += 1
}
require(inferCount == 1, "Only one unknown dimension can be specified")
}
}
override def computeOutputShape(inputShape: Shape): Shape = {
val input = inputShape.toSingle().toArray
val nonBatchInput = input.slice(1, input.length)
if (infer) {
val nElements = nonBatchInput.product
val resizeElements = - targetShape.product
require(nElements % resizeElements == 0, s"Total size after reshape must be unchanged." +
s" inputShape: $inputShape, targetShape: ${targetShape.mkString(", ")}")
targetShape(inferIndex) = nElements / resizeElements
}
else {
require(targetShape.product == nonBatchInput.product,
s"Total size after reshape must be unchanged. But in ${this.getName()}: " +
s"input size is: ${nonBatchInput.product}, " +
s"while reshape size is: ${targetShape.product}")
}
Shape(Array(input(0)) ++ targetShape)
}
override def doBuild(inputShape: Shape): AbstractModule[Tensor[T], Tensor[T], T] = {
val layer = if (infer) {
InferReshape(targetShape, batchMode = true)
}
else {
com.intel.analytics.bigdl.dllib.nn.Reshape(targetShape, batchMode = Some(true))
}
layer.asInstanceOf[AbstractModule[Tensor[T], Tensor[T], T]]
}
override private[bigdl] def toKeras2(): String = {
val params = Net.inputShapeToString(inputShape) ++
Net.param(getName()) ++
Net.arrayToString(targetShape, "target_shape")
Net.kerasDef(this, params)
}
}
object Reshape {
def apply[@specialized(Float, Double) T: ClassTag](
targetShape: Array[Int],
inputShape: Shape = null)(implicit ev: TensorNumeric[T]): Reshape[T] = {
new Reshape[T](targetShape, inputShape)
}
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/keras/layers/Reshape.scala | Scala | apache-2.0 | 4,416 |
package scala.debug
object Example extends Example {
def main(args: Array[String]): Unit = {
println("Hallo Welt")
debug(this)
println("Auf Wiedersehen Welt")
}
}
class Example {
val i = 42
val guy = Guy("Hans", i)
}
case class Guy(name: String, age: Int) {
def greeting = s"Hello $name!"
def greet = println(greeting)
}
| machisuji/scala-debug | src/main/scala/scala/debug/example.scala | Scala | gpl-2.0 | 351 |
package de.tu_berlin.formic.common.server.datastructure
import akka.actor.{ActorLogging, Props}
import akka.persistence.{PersistentActor, RecoveryCompleted}
import de.tu_berlin.formic.common.DataStructureInstanceId
import de.tu_berlin.formic.common.datastructure.DataStructureName
import de.tu_berlin.formic.common.message.CreateRequest
import scala.reflect.ClassTag
/**
* @author Ronny BrΓ€unlich
*/
//Why the ClassTag? See http://stackoverflow.com/questions/18692265/no-classtag-available-for-t-not-for-array
abstract class AbstractServerDataStructureFactory[T <: AbstractServerDataStructure : ClassTag] extends PersistentActor with ActorLogging {
override def persistenceId: String = name.name
val receiveCommand: Receive = {
case req: CreateRequest =>
val logText = s"Factory for $name received CreateRequest: $req"
log.debug(logText)
val newDataType = context.actorOf(Props(create(req.dataStructureInstanceId)), req.dataStructureInstanceId.id)
persist(req) { request =>
sender ! NewDataStructureCreated(request.dataStructureInstanceId, newDataType)
}
}
val receiveRecover: Receive = {
case CreateRequest(_, dataStructureInstanceId, _) =>
context.actorOf(Props(create(dataStructureInstanceId)), dataStructureInstanceId.id)
case RecoveryCompleted =>
val logText = s"Data type factory $name recovered"
log.info(logText)
}
def create(dataStructureInstanceId: DataStructureInstanceId): T
val name: DataStructureName
}
| rbraeunlich/formic | common/jvm/src/main/scala/de/tu_berlin/formic/common/server/datastructure/AbstractServerDataStructureFactory.scala | Scala | apache-2.0 | 1,512 |
package ohnosequences.sbt
import sbt._
import java.nio.file.Path
package object nice {
type DefTask[X] = Def.Initialize[Task[X]]
implicit class FileOps(val file: File) extends AnyVal {
def absPath: Path = file.toPath.toAbsolutePath.normalize
def relPath(base: File): Path = base.absPath relativize file.absPath
}
}
| ohnosequences/nice-sbt-settings | src/main/scala/package.scala | Scala | agpl-3.0 | 334 |
package org.scalacvx
import org.scalacvx.atoms.Variable
import org.scalacvx.constraints.{GtConstraint, LtConstraint, EqualityConstraint}
import org.scalacvx.dcp.{ConcaveVexity, ConvexVexity, AffineVexity}
import org.scalatest.{Matchers, FlatSpec}
import org.scalacvx.atoms.ExpressionImplicits._
/**
* Created by lorenzo on 8/23/15.
*/
class ConstraintTests extends FlatSpec with Matchers {
val xVar = Variable(2)
val yVar = Variable(2)
val xMat = Variable(3,2)
"A constraint" should "have appropriate vexity" in {
(xVar == yVar) shouldBe an [EqualityConstraint]
(abs(xVar) < yVar) shouldBe a [LtConstraint]
(-xVar >= abs(yVar)) shouldBe a [GtConstraint]
}
"A constraint" should "require identical dimensions of both sides" in {
an [IllegalArgumentException] should be thrownBy (xMat <= xVar)
}
}
| lorenzolucido/ScalaCVX | src/test/scala/org/scalacvx/ConstraintTests.scala | Scala | mit | 837 |
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package com.normation.rudder.domain.eventlog
import com.normation.eventlog._
import scala.xml._
import org.joda.time.DateTime
import net.liftweb.common._
import com.normation.rudder.domain.nodes._
import com.normation.rudder.domain.policies.SimpleDiff
import com.normation.rudder.domain.queries.Query
import com.normation.inventory.domain.NodeId
import com.normation.utils.HashcodeCaching
sealed trait NodeGroupEventLog extends EventLog { override final val eventLogCategory = NodeGroupLogCategory }
final case class AddNodeGroup(
override val eventDetails : EventLogDetails
) extends NodeGroupEventLog with HashcodeCaching {
override val cause = None
override val eventType = AddNodeGroup.eventType
}
object AddNodeGroup extends EventLogFilter {
override val eventType = AddNodeGroupEventType
override def apply(x : (EventLogType, EventLogDetails)) : AddNodeGroup = AddNodeGroup(x._2)
}
final case class DeleteNodeGroup(
override val eventDetails : EventLogDetails
) extends NodeGroupEventLog with HashcodeCaching {
override val cause = None
override val eventType = DeleteNodeGroup.eventType
}
object DeleteNodeGroup extends EventLogFilter {
override val eventType = DeleteNodeGroupEventType
override def apply(x : (EventLogType, EventLogDetails)) : DeleteNodeGroup = DeleteNodeGroup(x._2)
}
final case class ModifyNodeGroup(
override val eventDetails : EventLogDetails
) extends NodeGroupEventLog with HashcodeCaching {
override val cause = None
override val eventType = ModifyNodeGroup.eventType
}
object ModifyNodeGroup extends EventLogFilter {
override val eventType = ModifyNodeGroupEventType
override def apply(x : (EventLogType, EventLogDetails)) : ModifyNodeGroup = ModifyNodeGroup(x._2)
}
object NodeGroupEventLogsFilter {
final val eventList : List[EventLogFilter] = List(
AddNodeGroup
, DeleteNodeGroup
, ModifyNodeGroup
)
}
| jooooooon/rudder | rudder-core/src/main/scala/com/normation/rudder/domain/eventlog/NodeGroupEventLog.scala | Scala | agpl-3.0 | 3,583 |
package org.bitcoins.core.p2p
import org.bitcoins.testkitcore.gen.p2p.P2PGenerator
import org.bitcoins.testkitcore.util.BitcoinSUnitTest
import java.net.{InetAddress => JvmAddress}
class InetAddressJVMTest extends BitcoinSUnitTest {
implicit override val generatorDrivenConfig: PropertyCheckConfiguration =
generatorDrivenConfigNewCode
behavior of "InetAddress"
it must "have serialization symmetry with java's InetAddress" in {
forAll(P2PGenerator.inetAddress) { inet =>
assert(
NetworkIpAddress.writeAddress(
JvmAddress.getByAddress(inet.getAddress).getAddress) == inet.bytes)
}
}
it must "have serialization symmetry with java's InetAddress with IPv4" in {
forAll(P2PGenerator.inetAddress) { inet =>
assert(
JvmAddress
.getByAddress(inet.ipv4Bytes.toArray)
.getAddress sameElements inet.ipv4Bytes.toArray)
}
}
}
| bitcoin-s/bitcoin-s | core-test/.jvm/src/test/scala/org/bitcoins/core/p2p/InetAddressJVMTest.scala | Scala | mit | 912 |
/*
Copyright 2009 David Hall, Daniel Ramage
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package scalanlp.serialization;
import scala.collection.MapLike
import scala.collection.generic.{GenericCompanion, MapFactory}
import scala.collection.mutable.{ArrayBuilder,Builder};
import scalanlp.util.Index
import scalala.tensor.{Counter2, Counter}
import scalala.scalar.Scalar
/**
* Reads type V from input Input.
*
* @author dramage
*/
trait Readable[-Input,V] {
def read(input : Input) : V;
}
/**
* Writes type V to Output.
*
* @author dramage
*/
trait Writable[-Output,V] {
def write(output : Output, value : V);
}
/**
* A base trait for brokers of serialization. See DataSerialization for a good example.
*
* Implementors should provide member types Input and Output which are the places the data
* can be stored. Readable and Writable implicit instances should be created for types
* that can be serialized in this format. If you also inherit from CompoundTypes,
* you get a lot of functionality for almost free.
*
* @author dlwh
* @author dramage
*/
trait SerializationFormat extends Serializable {
/** The place to read data from. */
type Input;
/** The place to write data to. */
type Output;
/** Inner trait for reading from Input. */
trait Readable[T] extends Serializable {
def read(source: Input): T
}
/** Inner trait for writing to Output. */
trait Writable[T] extends Serializable {
def write(sink: Output, what: T): Unit
}
/** A convenience wrapper for Readable and Writable. */
trait ReadWritable[T] extends Readable[T] with Writable[T];
def getReadWritable[T:ReadWritable] : ReadWritable[T] =
implicitly[ReadWritable[T]];
/** Sugar for implicitly[Readable[T]].read(source); */
def read[T:Readable](source: Input): T =
implicitly[Readable[T]].read(source);
/** Sugar for implicitly[Writable[T]].write(sink,what); */
def write[T:Writable](sink: Output, what:T):Unit =
implicitly[Writable[T]].write(sink, what);
}
object SerializationFormat {
/**
* Supports reading and writing standard primitive types and String.
*
* @author dramage
*/
trait PrimitiveTypes { this : SerializationFormat =>
implicit val intReadWritable : ReadWritable[Int];
implicit val byteReadWritable : ReadWritable[Byte];
implicit val longReadWritable : ReadWritable[Long];
implicit val shortReadWritable: ReadWritable[Short];
implicit val doubleReadWritable : ReadWritable[Double];
implicit val floatReadWritable : ReadWritable[Float];
implicit val charReadWritable : ReadWritable[Char];
implicit val booleanReadWritable : ReadWritable[Boolean];
implicit val stringReadWritable : ReadWritable[String];
}
/**
* Supports reading and writing tuples, collections, maps, arrays, etc.,
* if their underlying key and value types are readable and writable.
*
* @author dlwh
* @author dramage
*/
trait CompoundTypes extends SerializationFormat { outer =>
protected def readName(in : Input) : String;
protected def writeName(out : Output, name : String);
/** writeNameOpt is like writeName, but Serialization formats aren't required to use it. Defaults
* to just calling writeName */
protected def writeNameOpt(out: Output, name: String) { writeName(out, name) }
/** If you override writeNameOpt to do nothing, override this */
protected def writesNames:Boolean = true
private def readNameOpt(in: Input): Option[String] = {
if(writesNames) Some(readName(in))
else None
}
/** Reads elements of type T into the given buildable. Inverse of writeIterable. */
protected def readBuildable[T:Readable,To]
(src: Input, builder : Builder[T,To]) : To;
/** Writes elements of the given collection. Inverse of readBuildable. */
protected def writeIterable[T:Writable,CC<:Iterable[T]]
(sink: Output, coll : CC, name : String);
protected def readTupleStart(in : Input) =
{ /* do nothing */ }
protected def readTupleGlue(in : Input) =
{ /* do nothing */ }
protected def readTupleEnd(in : Input) =
{ /* do nothing */ }
protected def writeTupleStart(out : Output) =
{ /* do nothing */ }
protected def writeTupleGlue(out : Output) =
{ /* do nothing */ }
protected def writeTupleEnd(out : Output) =
{ /* do nothing */ }
/** Standard collection types. */
protected def collectionFromElements
[T:ReadWritable,CC[T]<:Iterable[T]]
(c: GenericCompanion[CC], name : String)
= new ReadWritable[CC[T]] {
def read(source : Input) =
readBuildable[T,CC[T]](source, c.newBuilder[T]);
def write(sink : Output, coll : CC[T]) =
writeIterable[T,CC[T]](sink, coll, name);
}
/** Map collection types. */
protected def collectionFromElements
[K:ReadWritable,V:ReadWritable,CC[K,V]<:Map[K,V] with MapLike[K,V,CC[K,V]]]
(c: MapFactory[CC], name : String)
= new ReadWritable[CC[K,V]] {
def read(source : Input) =
readBuildable[(K,V),CC[K,V]](source, c.newBuilder[K,V]);
def write(sink : Output, coll : CC[K,V]) =
writeIterable[(K,V),CC[K,V]](sink, coll, name);
}
implicit def tuple2ReadWritable[T1,T2]
(implicit t1H: ReadWritable[T1], t2H: ReadWritable[T2])
= new ReadWritable[(T1,T2)] {
def read(in: Input) = {
readTupleStart(in);
val t1 = t1H.read(in);
readTupleGlue(in);
val t2 = t2H.read(in);
readTupleEnd(in);
(t1,t2)
}
def write(out: Output, t: (T1,T2)) {
writeTupleStart(out);
t1H.write(out, t._1);
writeTupleGlue(out);
t2H.write(out, t._2);
writeTupleEnd(out);
}
}
implicit def tuple3ReadWritable[T1,T2,T3]
(implicit t1H: ReadWritable[T1], t2H: ReadWritable[T2], t3H: ReadWritable[T3])
= new ReadWritable[(T1,T2,T3)] {
def read(in: Input) = {
readTupleStart(in);
val t1 = t1H.read(in);
readTupleGlue(in);
val t2 = t2H.read(in);
readTupleGlue(in);
val t3 = t3H.read(in);
readTupleEnd(in);
(t1,t2,t3);
}
def write(out: Output, t: (T1,T2,T3)) {
writeTupleStart(out);
t1H.write(out, t._1);
writeTupleGlue(out);
t2H.write(out, t._2);
writeTupleGlue(out);
t3H.write(out, t._3);
writeTupleEnd(out);
}
}
implicit def tuple4ReadWritable[T1,T2,T3,T4]
(implicit t1H: ReadWritable[T1], t2H: ReadWritable[T2],
t3H: ReadWritable[T3], t4H: ReadWritable[T4])
= new ReadWritable[(T1,T2,T3,T4)] {
def read(in: Input) = {
readTupleStart(in);
val t1 = t1H.read(in);
readTupleGlue(in);
val t2 = t2H.read(in);
readTupleGlue(in);
val t3 = t3H.read(in);
readTupleGlue(in);
val t4 = t4H.read(in);
readTupleEnd(in);
(t1,t2,t3,t4)
}
def write(out: Output, t: (T1,T2,T3,T4)) {
writeTupleStart(out);
t1H.write(out, t._1);
writeTupleGlue(out);
t2H.write(out, t._2);
writeTupleGlue(out);
t3H.write(out, t._3);
writeTupleGlue(out);
t4H.write(out, t._4);
writeTupleEnd(out);
}
}
implicit def arrayReadWritable[T]
(implicit tH: ReadWritable[T], man: ClassManifest[T])
= new ReadWritable[Array[T]] {
def read(source: Input) =
readBuildable[T,Array[T]](source, ArrayBuilder.make[T]);
def write(sink: Output, value: Array[T]) =
writeIterable[T,Seq[T]](sink, value, "Array");
}
implicit def listReadWritable[T](implicit tH: ReadWritable[T]) =
collectionFromElements[T,List](List,"List");
implicit def seqReadWritable[T](implicit tH: ReadWritable[T]):ReadWritable[Seq[T]] =
collectionFromElements[T,Seq](Seq,"Seq");
implicit def indexedSeqReadWritable[T](implicit tH: ReadWritable[T]) =
collectionFromElements[T,IndexedSeq](IndexedSeq,"IndexedSeq");
implicit def setReadWritable[T](implicit tH: ReadWritable[T]) =
collectionFromElements[T,Set](Set,"Set");
implicit def mapReadWritable[K:ReadWritable,V:ReadWritable] =
collectionFromElements[K,V,Map](Map,"Map");
implicit def iterableReadWritable[T](implicit tH: ReadWritable[T]) =
collectionFromElements[T,Iterable](Iterable,"Iterable");
implicit def indexReadWritable[T:ReadWritable]
= new ReadWritable[Index[T]] {
def read(source: Input): Index[T] =
Index(seqReadWritable[T].read(source));
def write(sink: Output, value: Index[T]) =
writeIterable[T,Iterable[T]](sink, value, "Index");
}
implicit def counterReadWritable[T:ReadWritable, V:ReadWritable:Scalar]: ReadWritable[Counter[T,V]] = new ReadWritable[Counter[T,V]] {
def write(sink: Output, ctr: Counter[T,V]) = {
writeIterable[(T,V),Iterable[(T,V)]](sink, new Iterable[(T,V)] {
def iterator = ctr.pairsIterator
},"Counter")
}
def read(source: Input): Counter[T, V] = {
val map = readBuildable(source,Iterable.newBuilder[(T,V)])
val ctr = Counter(map)
ctr
}
}
implicit def counter2ReadWritable[T:ReadWritable, U:ReadWritable, V:ReadWritable:Scalar]: ReadWritable[Counter2[T,U,V]] = {
new ReadWritable[Counter2[T,U,V]] {
def write(sink: Output, ctr: Counter2[T,U,V]) = {
writeIterable[(T,U,V),Iterable[(T,U,V)]](sink, new Iterable[(T,U,V)] {
def iterator = ctr.triplesIteratorNonZero
},"Counter")
}
def read(source: Input): Counter2[T, U, V] = {
val map = readBuildable(source,Iterable.newBuilder[(T,U,V)])
val ctr = Counter2(map)
ctr
}
}
}
implicit def optionReadWritable[T:ReadWritable] : ReadWritable[Option[T]]
= new ReadWritable[Option[T]] {
override def read(in : Input) = {
readName(in) match {
case "Some" =>
readTupleStart(in);
val rv = implicitly[ReadWritable[T]].read(in);
readTupleEnd(in);
Some(rv);
case "None" =>
None;
}
}
override def write(out : Output, option : Option[T]) = {
option match {
case Some(v) =>
writeName(out, "Some");
writeTupleStart(out);
implicitly[ReadWritable[T]].write(out, v);
writeTupleEnd(out);
case None =>
writeName(out, "None");
}
}
}
/**
* Constructable provides a simple way to add serialization support to
* a more basic type. In a companion object, extend a SerializationFormat's
* Constructable as an implicit object.
*
* @author dramage
*/
abstract class Constructible[V:ClassManifest,RW:ReadWritable] extends ReadWritable[V] {
/** Name written and read. */
def name : String;
/** Packs the given value into a representation. */
def pack(value : V) : RW;
/** Unpacks the given value from a representation. */
def unpack(rep : RW) : V;
override def read(in : Input) = {
val seen = readNameOpt(in);
if (seen.exists(_ != name)) {
throw new SerializationException("Expected: "+name+" but got "+seen);
}
unpack(implicitly[ReadWritable[RW]].read(in));
}
override def write(out : Output, value : V) = {
writeNameOpt(out,name);
implicitly[ReadWritable[RW]].write(out,pack(value));
}
}
}
}
/**
* Supports marshalling to and from a byte array.
*
* @author dramage
* @author dlwh
*/
trait ByteSerialization extends SerializationFormat {
/** Marshalls the object to a byte array. */
def toBytes[T:Writable](value : T) : Array[Byte];
/** Unmarshalls the object from a byte array. */
def fromBytes[T:Readable](bytes : Array[Byte]) : T;
}
/**
* Abstract serialization format that supports marshalling to and from a String.
* TextSerialization extends this trait with functionality to actually read
* and write values as text..
*
* @author dramage
* @author dlwh
*/
trait StringSerialization extends SerializationFormat with ByteSerialization {
/** Encoding used by this StringSerialization instance. Defaults to UTF8, independent of platform. */
def encoding = "UTF8";
/** Marshalls the given value as a string. */
def toString[T:Writable](value: T) : String;
/** Demarshalls a value from the given string. */
def fromString[T:Readable](str: String) : T;
/** Returns a byte array using the this.encoding as the byte encoding of the value returned by toString. */
override def toBytes[T:Writable](x: T) =
toString(x).getBytes(encoding);
/** Returns fromString called on the string created using the this.encoding as the byte encoding of given bytes. */
override def fromBytes[T:Readable](bytes: Array[Byte]) : T =
fromString[T](new String(bytes,encoding));
}
class SerializationException(msg : String)
extends RuntimeException(msg);
| MLnick/scalanlp-core | data/src/main/scala/scalanlp/serialization/Serialization.scala | Scala | apache-2.0 | 13,578 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.dataload
import org.apache.spark.sql.common.util.QueryTest
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
class TestLoadDataWithDictionaryExcludeAndInclude extends QueryTest with BeforeAndAfterAll {
var filePath: String = _
var pwd: String = _
def buildTestData() = {
filePath = s"$resourcesPath/emptyDimensionData.csv"
}
def dropTable() = {
sql("DROP TABLE IF EXISTS exclude_include_t3")
sql("DROP TABLE IF EXISTS exclude_include_hive_t3")
}
def buildTable() = {
try {
sql(
"""
CREATE TABLE exclude_include_hive_t3
(ID Int, date Timestamp, country String,
name String, phonetype String, serialname String, salary Int)
row format delimited fields terminated by ','
""")
sql(
"""
CREATE TABLE exclude_include_t3
(ID Int, date Timestamp, country String,
name String, phonetype String, serialname String, salary Int)
STORED BY 'org.apache.carbondata.format'
TBLPROPERTIES('DICTIONARY_EXCLUDE'='country,phonetype,serialname',
'DICTIONARY_INCLUDE'='ID')
""")
} catch {
case ex: Throwable => LOGGER.error(ex.getMessage + "\\r\\n" + ex.getStackTraceString)
}
}
def loadTable() = {
try {
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
sql(
s"""
LOAD DATA LOCAL INPATH '$filePath' into table exclude_include_t3
""")
sql(
s"""
LOAD DATA LOCAL INPATH '$resourcesPath/emptyDimensionDataHive.csv' into table exclude_include_hive_t3
""")
} catch {
case ex: Throwable => LOGGER.error(ex.getMessage + "\\r\\n" + ex.getStackTraceString)
}
}
override def beforeAll {
dropTable
buildTestData
buildTable
loadTable
}
test("test load data with dictionary exclude & include and with empty dimension") {
checkAnswer(
sql("select ID from exclude_include_t3"), sql("select ID from exclude_include_hive_t3")
)
}
override def afterAll {
dropTable
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
}
}
| Sephiroth-Lin/incubator-carbondata | integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestDataWithDicExcludeAndInclude.scala | Scala | apache-2.0 | 3,219 |
package com.teambytes.inflatable.raft.cluster.clusters
import akka.remote.testkit.MultiNodeConfig
import com.typesafe.config.{Config, ConfigFactory}
import akka.dispatch._
import java.util.concurrent.ConcurrentLinkedQueue
import akka.actor.{Identify, ActorSystem, ActorRef}
import com.teambytes.inflatable.raft.cluster.ClusterRaftActor
object ThreeNodesIdentifyDroppingCluster extends MultiNodeConfig {
val first = role("first")
val second = role("second")
val third = role("third")
val nodes = Map (
1 -> first,
2 -> second,
3 -> third
)
val dropEverySecondIdentifyConfig =
s"""
|fail-every-second-identify-dispatcher {
| mailbox-requirement = "${classOf[DropEverySecondIdentifyMessageQueue].getCanonicalName}"
|}
|
|akka.actor.mailbox.requirements {
| "${classOf[DropEverySecondIdentifyMessageQueue].getCanonicalName}" = fail-every-second-identify-dispatcher-mailbox
|}
|
|fail-every-second-identify-dispatcher-mailbox {
| mailbox-type = "${classOf[DropFirstIdentifyMessageMailbox].getCanonicalName}"
|}
""".stripMargin
commonConfig(
ConfigFactory.parseString(dropEverySecondIdentifyConfig).withFallback(
ConfigFactory.parseResources("cluster.conf").withFallback(
ConfigFactory.load()
)
)
)
}
// mailbox impl
class DropEverySecondIdentifyMessageQueue extends MessageQueue {
private final val queue = new ConcurrentLinkedQueue[Envelope]()
var identifyMsgs = 0
def enqueue(receiver: ActorRef, handle: Envelope): Unit = handle match {
case envelope: Envelope if envelope.message.isInstanceOf[Identify] =>
identifyMsgs += 1
if (identifyMsgs % 2 == 1) {
// drop message
println(s"Dropping msg = ${envelope.message}, from sender = ${envelope.sender}")
} else {
println(s"Enqueue msg = ${envelope.message}, from sender = ${envelope.sender}")
queue.offer(envelope)
}
case _ => queue.offer(handle)
}
def dequeue(): Envelope = queue.poll()
def numberOfMessages: Int = queue.size
def hasMessages: Boolean = !queue.isEmpty
def cleanUp(owner: ActorRef, deadLetters: MessageQueue) {
while (hasMessages) {
deadLetters.enqueue(owner, dequeue())
}
}
}
class DropFirstIdentifyMessageMailbox extends MailboxType
with ProducesMessageQueue[DropEverySecondIdentifyMessageQueue] {
// This constructor signature must exist, it will be called by Akka
def this(settings: ActorSystem.Settings, config: Config) =
this()
final override def create(owner: Option[ActorRef], system: Option[ActorSystem]): MessageQueue =
new DropEverySecondIdentifyMessageQueue()
}
class OnFlakyClusterRaftActor(raftMember: ActorRef, waitUntilMembers: Int) extends ClusterRaftActor(raftMember, waitUntilMembers)
with RequiresMessageQueue[DropEverySecondIdentifyMessageQueue]
| grahamar/inflatable | src/multi-jvm/scala/com/teambytes/inflatable/raft/cluster/clusters/ThreeNodesIdentifyDroppingCluster.scala | Scala | apache-2.0 | 2,894 |
/* _____ _ __ ________ ___ *\\
** / ___/(_) |/ / ___/ |__ \\ Simple Mechanics Simulator 2 **
** \\__ \\/ / /|_/ /\\__ \\ __/ / copyright (c) 2011 Jakob Odersky **
** ___/ / / / / /___/ / / __/ **
** /____/_/_/ /_//____/ /____/ **
\\* */
package sims.dynamics
import sims.math._
class Body(shapes0: Shape*) {
val shapes: List[Shape] = shapes0.toList
var force: Vector2D = Vector2D.Null
var torque: Double = 0.0
var linearVelocity: Vector2D = Vector2D.Null
var angularVelocity: Double = 0.0
private var _position: Vector2D =
(Vector2D.Null /: shapes)((v: Vector2D, s: Shape) => v + s.position * s.mass) / shapes.map(_.mass).sum
def position = _position
def position_=(pos: Vector2D) = {
val delta = pos - _position
_position = pos
for (s <- shapes) s.position += delta
}
private var _rotation: Double = 0.0
def rotation = _rotation
def rotation_=(r: Double) = {
val delta = _rotation - r
_rotation = r
for (s <- shapes) {
s.rotation += delta
s.position = position + (s.local.get rotate r)
}
}
var fixed = false
/**Returns the mass of this body. If the body is free, its mass is the sum of the masses of its shapes.
* If the body is fixed, its mass is infinite (`Double.PositiveInfinity`).
* @return this body's mass*/
lazy val mass: Double = if (!fixed) shapes.map(_.mass).sum else Double.PositiveInfinity
/**Returns the moment of inertia for rotations about the COM of this body.
* It is calculated using the moments of inertia of this body's shapes and the parallel axis theorem.
* If the body is fixed, its moment of inertia is infinite (`Double.PositiveInfinity`).
* @return moment of inertia for rotations about the center of mass of this body*/
lazy val inertia: Double = if (!fixed) shapes.map((s: Shape) => s.inertia + s.mass * (s.local.get dot s.local.get)).sum else Double.PositiveInfinity
/**Applies a force to the center of mass of this body.
* @param force applied force*/
def applyForce(force: Vector2D) = if (!fixed) this.force += force
/**Applies a force to a point on this body. The point is considered to be contained within this body.
* @param force applied force
* @param point position vector of the point (in world coordinates)*/
def applyForce(force: Vector2D, point: Vector2D) = if (!fixed) {this.force += force; torque += (point - position) cross force}
/**Applies a torque to the center of mass.*/
def applyTorque(torque: Double) = if (!fixed) this.torque += torque
/**Applies an impulse to the center of mass of this body.
* @param impulse applied impulse*/
def applyImpulse(impulse: Vector2D) = if (!fixed) linearVelocity += impulse / mass
/**Applies an impulse to a point on this body. The point is considered to be contained within this body.
* @param impulse applied impulse
* @param point position vector of the point (in world coordinates)*/
def applyImpulse(impulse: Vector2D, point: Vector2D) = if (!fixed) {linearVelocity += impulse / mass; angularVelocity += ((point - position) cross impulse) / inertia}
/**Applies an angular impulse to the center of mass.*/
def applyAngularImpulse(impulse: Double) = if (!fixed) angularVelocity += impulse / inertia
/**Linear velocity of the given point on this body (in world coordinates).*/
def velocityOfPoint(point: Vector2D) = linearVelocity + (angularVelocity cross (point - position))
/**Linear momentum.*/
def linearMomentum = linearVelocity * mass
for (s0 <- shapes0) {
s0.local = Some(s0.position - _position)
s0.body = this
}
def contains(point: Vector2D) = shapes.exists(_.contains(point))
def info = {
"Body@" + hashCode + "(" + this.getClass() + ")\\n" +
"\\tPosition: " + position + "\\n" +
"\\tRotation: " + rotation + "\\n" +
"\\tLinear velocity: " + linearVelocity + "\\n" +
"\\tAngular velocity: " + angularVelocity + "\\n" +
"\\tForce: " + force + "\\n" +
"\\tTorque: " + torque + "\\n" +
"\\tMass: " + mass + "\\n" +
"\\tInertia: " + inertia + "\\n" +
"\\tFixed: " + fixed + "\\n" +
"\\tShape count" + shapes.length
}
} | jodersky/sims2 | src/main/scala/sims/dynamics/Body.scala | Scala | bsd-3-clause | 4,351 |
package model
import skinny.orm._, feature._
import scalikejdbc._
import org.joda.time._
case class Member(
id: Long,
name: String,
activated: Boolean,
luckyNumber: Option[Long] = None,
birthday: Option[LocalDate] = None,
createdAt: DateTime,
updatedAt: DateTime
)
object Member extends SkinnyCRUDMapper[Member] with TimestampsFeature[Member] {
override lazy val tableName = "members"
override lazy val defaultAlias = createAlias("m")
/*
* If you're familiar with ScalikeJDBC/Skinny ORM, using #autoConstruct makes your mapper simpler.
* (e.g.)
* override def extract(rs: WrappedResultSet, rn: ResultName[Member]) = autoConstruct(rs, rn)
*
* Be aware of excluding associations like this:
* (e.g.)
* case class Member(id: Long, companyId: Long, company: Option[Company] = None)
* object Member extends SkinnyCRUDMapper[Member] {
* override def extract(rs: WrappedResultSet, rn: ResultName[Member]) =
* autoConstruct(rs, rn, "company") // "company" will be skipped
* }
*/
override def extract(rs: WrappedResultSet, rn: ResultName[Member]): Member = new Member(
id = rs.get(rn.id),
name = rs.get(rn.name),
activated = rs.get(rn.activated),
luckyNumber = rs.get(rn.luckyNumber),
birthday = rs.get(rn.birthday),
createdAt = rs.get(rn.createdAt),
updatedAt = rs.get(rn.updatedAt)
)
}
| ijufumi/demo-scala | skinny-blank-app/src/main/scala/model/Member.scala | Scala | mit | 1,376 |
/*
* Copyright 2015 RONDHUIT Co.,LTD.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.File
import org.apache.lucene.analysis.standard.StandardTokenizerFactory
import org.nlp4l.colloc._
import scala.io._
import scalax.file.Path
import scalax.file.PathSet
val index = "/tmp/index-brown-colloc"
// delete existing Lucene index
val p = Path(new File(index))
p.deleteRecursively()
// create Collocational Analysis model index
val c: PathSet[Path] = Path("corpora", "brown", "brown").children()
val indexer = CollocationalAnalysisModelIndexer(index, new StandardTokenizerFactory(new java.util.HashMap[String, String]()))
c.filter{ e =>
val s = e.name
val c = s.charAt(s.length - 1)
c >= '0' && c <= '9'
}.foreach{ f =>
val source = Source.fromFile(f.path, "UTF-8")
source.getLines().map(_.trim).filter(_.length > 0).foreach { g =>
val pairs = g.split("\\\\s+")
val doc = pairs.map{h => h.split("/")}.filter{_.length==2}.map(_(0)).mkString(" ")
indexer.addDocument(doc)
}
}
indexer.close()
// read the model index
val model = CollocationalAnalysisModel(index)
val WORD = "found"
println("\\n=== print surrounding of the word %s ===".format(WORD))
val result = model.collocationalWordsStats(WORD, 10)
def arrangedString(data: Seq[Seq[(String, Long)]], i: Int, pos: Int): String = {
if(data.size > i && data(i).size > pos) "%10s(%2d)".format(data(i)(pos)._1, data(i)(pos)._2) else ""
}
for(i <- 0 to 9){
println("|%14s|%14s|%14s|%10s|%14s|%14s|%14s|".format(
arrangedString(result, 5, i), arrangedString(result, 4, i), arrangedString(result, 3, i),
if(i == 0) WORD else "",
arrangedString(result, 0, i), arrangedString(result, 1, i), arrangedString(result, 2, i)
))
}
| gazimahmud/nlp4l | examples/colloc_analysis_brown.scala | Scala | apache-2.0 | 2,237 |
package mesosphere.marathon.state
import mesosphere.marathon.StoreCommandFailedException
import mesosphere.marathon.metrics.{ MetricPrefixes, Metrics }
import mesosphere.util.{ LockManager, ThreadPoolContext }
import mesosphere.util.state.PersistentStore
import mesosphere.marathon.metrics.Metrics.Histogram
import org.slf4j.LoggerFactory
import scala.concurrent.Future
import scala.reflect.ClassTag
import scala.util.control.NonFatal
class MarathonStore[S <: MarathonState[_, S]](
store: PersistentStore,
metrics: Metrics,
newState: () => S,
prefix: String)(implicit ct: ClassTag[S]) extends EntityStore[S] {
import ThreadPoolContext.context
private[this] val log = LoggerFactory.getLogger(getClass)
private[this] lazy val lockManager = LockManager.create()
protected[this] def metricsPrefix = MetricPrefixes.SERVICE
protected[this] val bytesRead: Histogram =
metrics.histogram(metrics.name(metricsPrefix, getClass, s"${ct.runtimeClass.getSimpleName}.read-data-size"))
protected[this] val bytesWritten: Histogram =
metrics.histogram(metrics.name(metricsPrefix, getClass, s"${ct.runtimeClass.getSimpleName}.write-data-size"))
def fetch(key: String): Future[Option[S]] = {
store.load(prefix + key)
.map {
_.map { entity =>
bytesRead.update(entity.bytes.length)
stateFromBytes(entity.bytes.toArray)
}
}
.recover(exceptionTransform(s"Could not fetch ${ct.runtimeClass.getSimpleName} with key: $key"))
}
def modify(key: String)(f: Update): Future[S] = lockManager.executeSequentially(key) {
val res = store.load(prefix + key).flatMap {
case Some(entity) =>
bytesRead.update(entity.bytes.length)
val updated = f(() => stateFromBytes(entity.bytes.toArray))
val updatedEntity = entity.withNewContent(updated.toProtoByteArray)
bytesWritten.update(updatedEntity.bytes.length)
store.update(updatedEntity)
case None =>
val created = f(() => newState()).toProtoByteArray
bytesWritten.update(created.length)
store.create(prefix + key, created)
}
res
.map { entity => stateFromBytes(entity.bytes.toArray) }
.recover(exceptionTransform(s"Could not modify ${ct.runtimeClass.getSimpleName} with key: $key"))
}
def expunge(key: String): Future[Boolean] = lockManager.executeSequentially(key) {
store.delete(prefix + key)
.recover(exceptionTransform(s"Could not expunge ${ct.runtimeClass.getSimpleName} with key: $key"))
}
def names(): Future[Seq[String]] = {
store.allIds()
.map {
_.collect {
case name: String if name startsWith prefix => name.replaceFirst(prefix, "")
}
}
.recover(exceptionTransform(s"Could not list names for ${ct.runtimeClass.getSimpleName}"))
}
private[this] def exceptionTransform[T](errorMessage: String): PartialFunction[Throwable, T] = {
case NonFatal(ex) => throw new StoreCommandFailedException(errorMessage, ex)
}
private def stateFromBytes(bytes: Array[Byte]): S = {
newState().mergeFromProto(bytes)
}
}
| EasonYi/marathon | src/main/scala/mesosphere/marathon/state/MarathonStore.scala | Scala | apache-2.0 | 3,111 |
package scala.meta
package internal
package tokenizers
import scala.annotation.{ switch, tailrec }
import scala.collection.mutable
import Chars._
import LegacyToken._
import scala.meta.inputs._
import scala.meta.tokenizers.TokenizeException
class LegacyScanner(input: Input, dialect: Dialect) {
val reporter: Reporter = Reporter(input)
val curr: LegacyTokenData = new LegacyTokenData {}
val next: LegacyTokenData = new LegacyTokenData {}
val prev: LegacyTokenData = new LegacyTokenData {}
val reader: CharArrayReader = new CharArrayReader(input, dialect, reporter)
import curr._, reader._, reporter._
curr.input = this.input
next.input = this.input
prev.input = this.input
private def isDigit(c: Char) = java.lang.Character isDigit c
private var openComments = 0
protected def putCommentChar(): Unit = nextChar()
@tailrec private def skipLineComment(): Unit = ch match {
case SU | CR | LF =>
case '$' if !getDollar() => syntaxError("can't unquote into single-line comments", at = charOffset - 1)
case _ => nextChar() ; skipLineComment()
}
private def maybeOpen() {
putCommentChar()
if (ch == '*') {
putCommentChar()
openComments += 1
}
}
private def maybeClose(): Boolean = {
putCommentChar()
(ch == '/') && {
putCommentChar()
openComments -= 1
openComments == 0
}
}
@tailrec final def skipNestedComments(): Unit = ch match {
case '/' => maybeOpen() ; skipNestedComments()
case '*' => if (!maybeClose()) skipNestedComments()
case SU => incompleteInputError("unclosed comment", at = offset)
case '$' if !getDollar() => syntaxError("can't unquote into multi-line comments", at = charOffset - 1)
case _ => putCommentChar() ; skipNestedComments()
}
def skipDocComment(): Unit = skipNestedComments()
def skipBlockComment(): Unit = skipNestedComments()
private def skipToCommentEnd(isLineComment: Boolean) {
nextChar()
if (isLineComment) skipLineComment()
else {
openComments = 1
val isDocComment = (ch == '*') && { nextChar(); true }
if (isDocComment) {
// Check for the amazing corner case of /**/
if (ch == '/')
nextChar()
else
skipDocComment()
}
else skipBlockComment()
}
}
/** Precondition: ch == '/'
* Returns true if a comment was skipped.
*/
def skipComment(): Boolean = ch match {
case '/' | '*' => skipToCommentEnd(isLineComment = ch == '/') ; true
case _ => false
}
def flushDoc(): Unit = ()
/** To prevent doc comments attached to expressions from leaking out of scope
* onto the next documentable entity, they are discarded upon passing a right
* brace, bracket, or parenthesis.
*/
def discardDocBuffer(): Unit = ()
def isAtEnd = charOffset >= buf.length
def resume(lastCode: LegacyToken) = {
token = lastCode
if (next.token != EMPTY)
syntaxError("unexpected end of input: possible missing '}' in XML block", at = offset)
nextToken()
}
/** A character buffer for literals
*/
val cbuf = new StringBuilder
/** append Unicode character to "cbuf" buffer
*/
protected def putChar(c: Char) {
// assert(cbuf.size < 10000, cbuf)
cbuf.append(c)
}
/** Determines whether this scanner should emit identifier deprecation warnings,
* e.g. when seeing `macro' or `then', which are planned to become keywords in future versions of Scala.
*/
protected def emitIdentifierDeprecationWarnings = true
/** Clear buffer and set name and token */
private def finishNamed(idtoken: LegacyToken = IDENTIFIER) {
name = cbuf.toString
cbuf.clear()
token = idtoken
if (idtoken == IDENTIFIER) {
if (kw2legacytoken contains name) {
token = kw2legacytoken(name)
if (token == IDENTIFIER && emitIdentifierDeprecationWarnings)
deprecationWarning(s"$name is now a reserved word; usage as an identifier is deprecated", at = token)
if (token == ENUM && !dialect.allowEnums)
token = IDENTIFIER
}
}
}
/* much like endOffset, end is inclusive */
private def finishComposite(token: LegacyToken, end: Offset) {
val start = offset
curr.token = token
curr.strVal = new String(input.chars, start, end - start + 1)
curr.endOffset = end
reader.charOffset = end + 1
reader.nextChar()
}
/** Clear buffer and set string */
private def setStrVal() {
strVal = cbuf.toString
cbuf.clear()
}
/** a stack of tokens which indicates whether line-ends can be statement separators
* also used for keeping track of nesting levels.
* We keep track of the closing symbol of a region. This can be
* RPAREN if region starts with '('
* RBRACKET if region starts with '['
* RBRACE if region starts with '{'
* ARROW if region starts with `case'
* STRINGLIT if region is a string interpolation expression starting with '${'
* (the STRINGLIT appears twice in succession on the stack iff the
* expression is a multiline string literal).
*/
var sepRegions: List[LegacyToken] = List()
/**
* A map of upcoming xml literal parts that are left to be returned in nextToken().
*
* The keys are offset start positions of an xml literal and the values are
* the respective offset end positions and a boolean indicating if the part
* is the last part.
*/
val upcomingXmlLiteralParts = mutable.Map.empty[Offset, (Offset, Boolean)]
// Get next token ------------------------------------------------------------
/** Are we directly in a string interpolation expression?
*/
private def inStringInterpolation =
sepRegions.nonEmpty && sepRegions.head == STRINGLIT
private def inXmlLiteral: Boolean = {
upcomingXmlLiteralParts.contains(offset)
}
/** Are we directly in a multiline string interpolation expression?
* @pre inStringInterpolation
*/
private def inMultiLineInterpolation =
inStringInterpolation && sepRegions.tail.nonEmpty && sepRegions.tail.head == STRINGPART
/** read next token and return last offset
*/
def skipToken(): Offset = {
val off = offset
nextToken()
off
}
/** Produce next token, filling curr TokenData fields of Scanner.
*/
def nextToken(): Unit = {
val lastToken = token
// Adapt sepRegions according to last token
(lastToken: @switch) match {
case LPAREN =>
sepRegions = RPAREN :: sepRegions
case LBRACKET =>
sepRegions = RBRACKET :: sepRegions
case LBRACE =>
sepRegions = RBRACE :: sepRegions
case CASE =>
sepRegions = ARROW :: sepRegions
case RBRACE =>
while (!sepRegions.isEmpty && sepRegions.head != RBRACE)
sepRegions = sepRegions.tail
if (!sepRegions.isEmpty)
sepRegions = sepRegions.tail
discardDocBuffer()
case RBRACKET | RPAREN =>
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
discardDocBuffer()
case ARROW =>
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
case STRINGLIT =>
if (inMultiLineInterpolation)
sepRegions = sepRegions.tail.tail
else if (inStringInterpolation)
sepRegions = sepRegions.tail
case _ =>
}
// Read a token or copy it from `next` tokenData
if (next.token == EMPTY) {
lastOffset = charOffset - 1
if (lastOffset > 0 && buf(lastOffset) == '\\n' && buf(lastOffset - 1) == '\\r') {
lastOffset -= 1
}
fetchToken()
if(token == ERROR) {
if (inMultiLineInterpolation)
sepRegions = sepRegions.tail.tail
else if (inStringInterpolation)
sepRegions = sepRegions.tail
}
} else {
curr copyFrom next
next.token = EMPTY
}
// NOTE: endOffset is used to determine range positions for certain tokens.
// Most tokens (e.g. `(' or `;') have constant widths, so their range positions can be calculated trivially from their offsets,
// however some tokens have variable widths,
// and for them we need to remember where their parsing ended in order to calculate their positions.
// That's what endOffset does (indirectly): each token's position should be [curr.offset, curr.endOffset]
//
// Now how do we calculate endOffset?
// 1) What we have at hand is `charOffset`, which is the position right after the position of the character that's just been read.
// 2) This means that `charOffset - 1` is the position of the character that's just been read.
// 3) Since reading that character terminated fetchToken, this means that that character is the first character of the next token.
// 4) This means that `charOffset - 2` is where the last character of the our current token lies.
//
// The only corner case here is EOF. In that case the virtual position of the character that's just been read (or, more precisely,
// that's been attempted to be read) seems to be `buf.length`, but some other logic in the scanner suggests that sometimes it can even
// be `buf.length + 1` or more. Therefore, we don't bother ourselves with doing decrements and just assign endOffset to be `buf.length - 1`.
//
// upd. Speaking of corner cases, positions of tokens emitted by string interpolation tokenizers are simply insane,
// and need to be reverse engineered having some context (previous tokens, number of quotes in the interpolation) in mind.
// Therefore I don't even attempt to handle them here, and instead apply fixups elsewhere when converting legacy TOKENS into new LegacyToken instances.
if (curr.token != STRINGPART) { // endOffset of STRINGPART tokens is set elsewhere
curr.endOffset = charOffset - 2
if (charOffset >= buf.length && ch == SU) curr.endOffset = buf.length - 1
}
}
/** Is current token first one after a newline? */
private def afterLineEnd(): Boolean =
lastOffset < lineStartOffset &&
(lineStartOffset <= offset ||
lastOffset < lastLineStartOffset && lastLineStartOffset <= offset)
/** Is there a blank line between the current token and the last one?
* @pre afterLineEnd().
*/
private def pastBlankLine(): Boolean = {
var idx = lastOffset
var ch = buf(idx)
val end = offset
while (idx < end) {
if (ch == LF || ch == FF) {
do {
idx += 1; ch = buf(idx)
if (ch == LF || ch == FF) {
return true
}
if (idx == end) return false
} while (ch <= ' ')
}
idx += 1; ch = buf(idx)
}
false
}
/** read next token, filling TokenData fields of Scanner.
*/
protected final def fetchToken() {
offset = charOffset - 1
if (inStringInterpolation) return fetchStringPart()
else if (inXmlLiteral) return fetchXmlPart()
(ch: @switch) match {
case ' ' | '\\t' | CR | LF | FF =>
token = WHITESPACE
strVal = ch.toString
nextChar()
//nextToken()
case 'A' | 'B' | 'C' | 'D' | 'E' |
'F' | 'G' | 'H' | 'I' | 'J' |
'K' | 'L' | 'M' | 'N' | 'O' |
'P' | 'Q' | 'R' | 'S' | 'T' |
'U' | 'V' | 'W' | 'X' | 'Y' |
'Z' | '$' | '_' |
'a' | 'b' | 'c' | 'd' | 'e' |
'f' | 'g' | 'h' | 'i' | 'j' |
'k' | 'l' | 'm' | 'n' | 'o' |
'p' | 'q' | 'r' | 's' | 't' |
'u' | 'v' | 'w' | 'x' | 'y' | // scala-mode: need to understand multi-line case patterns
'z' =>
if (ch == '$' && !getDollar()) {
getUnquote()
} else {
putChar(ch)
nextChar()
getIdentRest()
if (ch == '"' && token == IDENTIFIER)
token = INTERPOLATIONID
}
case '<' => // is XMLSTART?
def fetchLT() = {
val last = if (charOffset >= 2) buf(charOffset - 2) else ' '
nextChar()
last match {
case ' ' | '\\t' | '\\n' | '{' | '(' | '>' if isNameStart(ch) || ch == '!' || ch == '?' =>
if (dialect.allowXmlLiterals) getXml()
else syntaxError("xml literals are not supported", at = offset)
case _ =>
// Console.println("found '<', but last is '"+in.last+"'"); // DEBUG
putChar('<')
getOperatorRest()
}
}
fetchLT()
case '~' | '!' | '@' | '#' | '%' |
'^' | '*' | '+' | '-' | /*'<' | */
'>' | '?' | ':' | '=' | '&' |
'|' | '\\\\' =>
putChar(ch)
nextChar()
getOperatorRest()
case '/' =>
nextChar()
if (skipComment()) {
token = COMMENT
} else {
putChar('/')
getOperatorRest()
}
case '0' =>
def fetchZero() = {
putChar(ch)
nextChar()
if (ch == 'x' || ch == 'X') {
putChar(ch)
nextChar()
base = 16
} else {
base = 8
}
getNumber()
}
fetchZero()
case '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
base = 10
getNumber()
case '`' =>
getBackquotedIdent()
case '\\"' =>
def fetchDoubleQuote(): Unit = {
if (token == INTERPOLATIONID) {
nextRawChar()
if (ch == '\\"') {
val lookahead = lookaheadReader
lookahead.nextChar()
if (lookahead.ch == '\\"') {
nextRawChar() // now eat it
offset += 3
nextRawChar()
getStringPart(multiLine = true)
sepRegions = STRINGPART :: sepRegions // indicate string part
sepRegions = STRINGLIT :: sepRegions // once more to indicate multi line string part
} else {
nextChar()
token = STRINGLIT
strVal = ""
}
} else {
offset += 1
getStringPart(multiLine = false)
sepRegions = STRINGLIT :: sepRegions // indicate single line string part
}
} else {
nextChar()
if (ch == '\\"') {
nextChar()
if (ch == '\\"') {
nextRawChar()
getRawStringLit()
} else {
token = STRINGLIT
strVal = ""
}
} else {
getStringLit()
}
}
}
fetchDoubleQuote()
case '\\'' =>
def fetchSingleQuote() = {
nextChar()
if (ch == '$' && !getDollar())
syntaxError("can't unquote into character literals", at = charOffset - 1)
else if (isIdentifierStart(ch))
charLitOr(getIdentRest _)
else if (isOperatorPart(ch) && (ch != '\\\\'))
charLitOr(getOperatorRest _)
else {
getLitChar()
if (ch == '\\'') {
nextChar()
token = CHARLIT
setStrVal()
} else {
syntaxError("unclosed character literal", at = offset)
}
}
}
fetchSingleQuote()
case '.' =>
nextChar()
if ('0' <= ch && ch <= '9') {
putChar('.'); getFraction()
} else if (dialect.allowUnquotes && ch == '.') {
base = 0
while (ch == '.') {
base += 1
nextChar()
}
token = ELLIPSIS
} else {
token = DOT
}
case ';' =>
nextChar(); token = SEMI
case ',' =>
nextChar(); token = COMMA
case '(' =>
nextChar(); token = LPAREN
case '{' =>
nextChar(); token = LBRACE
case ')' =>
nextChar(); token = RPAREN
case '}' =>
// save the end of the current token (exclusive) in case nextChar
// advances the offset more than once. See UnicodeEscapeSuite for a
// and https://github.com/scalacenter/scalafix/issues/593 for
// an example why this this is necessary.
val end = charOffset + 1
nextChar(); token = RBRACE
// restore the charOffset to the saved position
if (end < buf.length) charOffset = end
case '[' =>
nextChar(); token = LBRACKET
case ']' =>
nextChar(); token = RBRACKET
case SU =>
if (isAtEnd) {
// NOTE: sometimes EOF's offset is `input.chars.length - 1`, and that might mess things up
offset = input.chars.length
token = EOF
} else {
syntaxError("illegal character", at = offset)
nextChar()
}
case _ =>
def fetchOther() = {
if (ch == '\\u21D2') {
nextChar(); token = ARROW
} else if (ch == '\\u2190') {
nextChar(); token = LARROW
} else if (Character.isUnicodeIdentifierStart(ch)) {
putChar(ch)
nextChar()
getIdentRest()
} else if (isSpecial(ch)) {
putChar(ch)
nextChar()
getOperatorRest()
} else {
syntaxError("illegal character '" + ("" + '\\\\' + 'u' + "%04x".format(ch.toInt)) + "'", at = offset)
nextChar()
}
}
fetchOther()
}
}
// Identifiers ---------------------------------------------------------------
private def getBackquotedIdent() {
nextChar()
getLitChars('`')
if (ch == '`') {
nextChar()
finishNamed(BACKQUOTED_IDENT)
if (name.length == 0) syntaxError("empty quoted identifier", at = offset)
} else if (ch == '$') {
syntaxError("can't unquote into quoted identifiers", at = charOffset - 1)
} else {
syntaxError("unclosed quoted identifier", at = offset)
}
}
private def getIdentRest(): Unit = (ch: @switch) match {
case 'A' | 'B' | 'C' | 'D' | 'E' |
'F' | 'G' | 'H' | 'I' | 'J' |
'K' | 'L' | 'M' | 'N' | 'O' |
'P' | 'Q' | 'R' | 'S' | 'T' |
'U' | 'V' | 'W' | 'X' | 'Y' |
'Z' | '$' |
'a' | 'b' | 'c' | 'd' | 'e' |
'f' | 'g' | 'h' | 'i' | 'j' |
'k' | 'l' | 'm' | 'n' | 'o' |
'p' | 'q' | 'r' | 's' | 't' |
'u' | 'v' | 'w' | 'x' | 'y' |
'z' |
'0' | '1' | '2' | '3' | '4' |
'5' | '6' | '7' | '8' | '9' =>
if (ch == '$' && !getDollar()) { finishNamed(); return }
putChar(ch)
nextChar()
getIdentRest()
case '_' =>
putChar(ch)
nextChar()
getIdentOrOperatorRest()
case SU => // strangely enough, Character.isUnicodeIdentifierPart(SU) returns true!
finishNamed()
case _ =>
if (Character.isUnicodeIdentifierPart(ch)) {
putChar(ch)
nextChar()
getIdentRest()
} else {
finishNamed()
}
}
private def getOperatorRest(): Unit = (ch: @switch) match {
case '~' | '!' | '@' | '#' | '%' |
'^' | '*' | '+' | '-' | '<' |
'>' | '?' | ':' | '=' | '&' |
'|' | '\\\\' =>
putChar(ch); nextChar(); getOperatorRest()
case '/' =>
val lookahead = lookaheadReader
lookahead.nextChar()
if (lookahead.ch == '/' || lookahead.ch == '*') {
finishNamed()
} else {
putChar('/')
nextChar()
getOperatorRest()
}
case _ =>
if (isSpecial(ch)) { putChar(ch); nextChar(); getOperatorRest() }
else finishNamed()
}
private def getIdentOrOperatorRest() {
if (isIdentifierPart(ch))
getIdentRest()
else ch match {
case '~' | '!' | '@' | '#' | '%' |
'^' | '*' | '+' | '-' | '<' |
'>' | '?' | ':' | '=' | '&' |
'|' | '\\\\' | '/' =>
getOperatorRest()
case _ =>
if (isSpecial(ch)) getOperatorRest()
else finishNamed()
}
}
// True means that we have successfully read '$'
// False means that we need to switch into unquote reading mode.
private def getDollar(): Boolean = {
if (dialect.allowUnquotes) {
val lookahead = lookaheadReader
lookahead.nextChar()
if (lookahead.ch == '$') {
// Skip the first dollar and move on to whatever we've been doing:
// starting or continuing tokenization of an identifier,
// or continuing reading a string literal, or whatever.
nextChar()
} else {
// Don't do anything - our caller should know what to do.
return false
}
}
return true
}
// Literals -----------------------------------------------------------------
private def getStringLit() = {
getLitChars('"')
if (ch == '"') {
setStrVal()
nextChar()
token = STRINGLIT
} else if (ch == '$') {
syntaxError("can't unquote into string literals", at = charOffset - 1)
} else {
syntaxError("unclosed string literal", at = offset)
}
}
private def getRawStringLit(): Unit = {
if (ch == '\\"') {
nextRawChar()
if (isTripleQuote()) {
setStrVal()
token = STRINGLIT
} else
getRawStringLit()
} else if (ch == SU) {
incompleteInputError("unclosed multi-line string literal", at = offset)
} else if (ch == '$' && !getDollar()) {
syntaxError("can't unquote into string literals", at = charOffset - 1)
} else {
putChar(ch)
nextRawChar()
getRawStringLit()
}
}
@scala.annotation.tailrec private def getStringPart(multiLine: Boolean): Unit = {
def finishStringPart() = {
setStrVal()
token = STRINGPART
next.lastOffset = charOffset - 1
next.offset = charOffset - 1
}
if (ch == '"') {
if (multiLine) {
nextRawChar()
if (isTripleQuote()) {
setStrVal()
token = STRINGLIT
} else
getStringPart(multiLine)
} else {
nextChar()
setStrVal()
token = STRINGLIT
}
} else if (ch == '$') {
if (!getDollar()) {
syntaxError("can't unquote into string interpolations", at = charOffset - 1)
} else {
nextRawChar()
if (ch == '$') {
putChar(ch)
nextRawChar()
getStringPart(multiLine)
} else if (ch == '{') {
finishStringPart()
endOffset = charOffset - 3
nextRawChar()
next.token = LBRACE
} else if (ch == '_' && dialect.allowSpliceUnderscores) {
finishStringPart()
endOffset = charOffset - 3
nextRawChar()
next.token = USCORE
} else if (Character.isUnicodeIdentifierStart(ch)) {
finishStringPart()
endOffset = charOffset - 3
do {
putChar(ch)
nextRawChar()
} while (ch != SU && Character.isUnicodeIdentifierPart(ch))
next.token = IDENTIFIER
next.name = cbuf.toString
cbuf.clear()
if (kw2legacytoken contains next.name) {
next.token = kw2legacytoken(next.name)
if (next.token == ENUM && !dialect.allowEnums)
next.token = IDENTIFIER
if (next.token != IDENTIFIER && next.token != THIS)
syntaxError("invalid unquote: `$'ident, `$'BlockExpr, `$'this or `$'_ expected", at = offset)
}
} else {
var supportedCombos = List("`$$'", "`$'ident", "`$'this", "`$'BlockExpr")
if (dialect.allowSpliceUnderscores) supportedCombos = supportedCombos :+ "`$'_"
val s_supportedCombos = supportedCombos.init.mkString(", ") + supportedCombos.last
syntaxError(s_supportedCombos, at = offset)
}
}
} else {
val isUnclosedLiteral = !isUnicodeEscape && (ch == SU || (!multiLine && (ch == CR || ch == LF)))
if (isUnclosedLiteral) {
if (multiLine)
incompleteInputError("unclosed multi-line string interpolation", at = offset)
else
syntaxError("unclosed string interpolation", at = offset)
}
else {
putChar(ch)
nextRawChar()
getStringPart(multiLine)
}
}
}
private def fetchXmlPart(): Unit = {
require(inXmlLiteral, "must be at the start of an xml literal part")
val (end, isLastPart) = upcomingXmlLiteralParts(offset)
finishComposite(XMLLIT, end - 1)
if (isLastPart) {
next.token = XMLLITEND
}
// Clean up map, should be empty at EOF.
upcomingXmlLiteralParts.remove(offset)
}
private def fetchStringPart(): Unit =
getStringPart(multiLine = inMultiLineInterpolation)
private def isTripleQuote(): Boolean =
if (ch == '"') {
nextRawChar()
if (ch == '"') {
nextChar()
while (ch == '"') {
putChar('"')
nextChar()
}
true
} else {
putChar('"')
putChar('"')
false
}
} else {
putChar('"')
false
}
/** copy current character into cbuf, interpreting any escape sequences,
* and advance to next character.
*/
protected def getLitChar(): Unit =
if (ch == '\\\\') {
nextChar()
if ('0' <= ch && ch <= '7') {
val start = charOffset - 2
val leadch: Char = ch
var oct: Int = digit2int(ch, 8)
nextChar()
if ('0' <= ch && ch <= '7') {
oct = oct * 8 + digit2int(ch, 8)
nextChar()
if (leadch <= '3' && '0' <= ch && ch <= '7') {
oct = oct * 8 + digit2int(ch, 8)
nextChar()
}
}
val alt = if (oct == LF) "\\\\n" else "\\\\u%04x" format oct
def msg(what: String) = s"Octal escape literals are $what, use $alt instead."
deprecationWarning(msg("deprecated"), at = start)
putChar(oct.toChar)
} else {
ch match {
case 'b' => putChar('\\b')
case 't' => putChar('\\t')
case 'n' => putChar('\\n')
case 'f' => putChar('\\f')
case 'r' => putChar('\\r')
case '\\"' => putChar('\\"')
case '\\'' => putChar('\\'')
case '\\\\' => putChar('\\\\')
case _ => invalidEscape()
}
nextChar()
}
} else if (ch == '$' && !getDollar()) {
// bail and let the caller handle this
} else {
putChar(ch)
nextChar()
}
protected def invalidEscape(): Unit = {
syntaxError("invalid escape character", at = charOffset - 1)
putChar(ch)
}
private def getLitChars(delimiter: Char) = {
def stop = {
def success = ch == delimiter
def naturalBreak = (ch == SU || ch == CR || ch == LF) && !isUnicodeEscape
def unquote = ch == '$' && !getDollar()
success || naturalBreak || unquote
}
while (!isAtEnd && !stop) getLitChar()
}
/** read fractional part and exponent of floating point number
* if one is present.
*/
protected def getFraction() {
token = DOUBLELIT
while ('0' <= ch && ch <= '9') {
putChar(ch)
nextChar()
}
if (ch == 'e' || ch == 'E') {
val lookahead = lookaheadReader
lookahead.nextChar()
if (lookahead.ch == '+' || lookahead.ch == '-') {
lookahead.nextChar()
}
if ('0' <= lookahead.ch && lookahead.ch <= '9') {
putChar(ch)
nextChar()
if (ch == '+' || ch == '-') {
putChar(ch)
nextChar()
}
while ('0' <= ch && ch <= '9') {
putChar(ch)
nextChar()
}
}
token = DOUBLELIT
}
if (ch == 'd' || ch == 'D') {
putChar(ch)
nextChar()
token = DOUBLELIT
} else if (ch == 'f' || ch == 'F') {
putChar(ch)
nextChar()
token = FLOATLIT
}
checkNoLetter()
setStrVal()
}
def checkNoLetter() {
if (isIdentifierPart(ch) && ch >= ' ')
syntaxError("Invalid literal number", at = offset)
}
/** Read a number into strVal and set base
*/
protected def getNumber() {
val base1 = if (base < 10) 10 else base
// Read 8,9's even if format is octal, produce a malformed number error afterwards.
// At this point, we have already read the first digit, so to tell an innocent 0 apart
// from an octal literal 0123... (which we want to disallow), we check whether there
// are any additional digits coming after the first one we have already read.
var notSingleZero = false
while (digit2int(ch, base1) >= 0) {
putChar(ch)
nextChar()
notSingleZero = true
}
token = INTLIT
/* When we know for certain it's a number after using a touch of lookahead */
def restOfNumber() = {
putChar(ch)
nextChar()
getFraction()
}
def restOfUncertainToken() = {
def isEfd = ch match { case 'e' | 'E' | 'f' | 'F' | 'd' | 'D' => true ; case _ => false }
def isL = ch match { case 'l' | 'L' => true ; case _ => false }
if (base <= 10 && isEfd)
getFraction()
else {
// Checking for base == 8 is not enough, because base = 8 is set
// as soon as a 0 is read in `case '0'` of method fetchToken.
if (base == 8 && notSingleZero) syntaxError("Non-zero integral values may not have a leading zero.", at = offset)
if (isL) {
putChar(ch)
setStrVal()
nextChar()
token = LONGLIT
} else {
setStrVal()
checkNoLetter()
}
}
}
if (base > 10 || ch != '.')
restOfUncertainToken()
else {
val lookahead = lookaheadReader
val c = lookahead.getc()
/* Prohibit 1. */
if (!isDigit(c))
return setStrVal()
val isDefinitelyNumber = (c: @switch) match {
/** Another digit is a giveaway. */
case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' =>
true
/* Backquoted idents like 22.`foo`. */
case '`' =>
return setStrVal() /** Note the early return */
/* These letters may be part of a literal, or a method invocation on an Int.
*/
case 'd' | 'D' | 'f' | 'F' =>
!isIdentifierPart(lookahead.getc())
/* A little more special handling for e.g. 5e7 */
case 'e' | 'E' =>
val ch = lookahead.getc()
!isIdentifierPart(ch) || (isDigit(ch) || ch == '+' || ch == '-')
case x =>
!isIdentifierStart(x)
}
if (isDefinitelyNumber) restOfNumber()
else restOfUncertainToken()
}
}
/** Parse character literal if current character is followed by \\',
* or follow with given op and return a symbol literal token
*/
def charLitOr(op: () => Unit) {
putChar(ch)
nextChar()
if (ch == '\\'') {
nextChar()
token = CHARLIT
setStrVal()
} else {
op()
token = SYMBOLLIT
strVal = name.toString
}
}
def getXml(): Unit = {
// 1. Collect positions of scala expressions inside this xml literal.
import fastparse.core.Parsed
val start = offset
val embeddedScalaExprPositions = new ScalaExprPositionParser(dialect)
val xmlParser = new XmlParser(embeddedScalaExprPositions)
val result: Int = xmlParser.XmlExpr.parse(input.text, index = start) match {
case Parsed.Success(_, endExclusive) =>
endExclusive
case Parsed.Failure(_, failIndex, extra) =>
syntaxError(s"malformed xml literal, expected: ${extra.traced.expected}", at = failIndex)
}
// 2. Populate upcomingXmlLiteralParts with xml literal part positions.
var lastFrom = start
embeddedScalaExprPositions.splicePositions.foreach { pos =>
// pos contains the start and end positions of a scala expression.
// We want the range of the xml literal part which starts at lastFrom
// and ends at pos.from.
val to = pos.from - 1
upcomingXmlLiteralParts.update(lastFrom, (to, false))
lastFrom = pos.to + 1
}
// The final xml literal part is not followed by any embedded scala expr.
upcomingXmlLiteralParts.update(lastFrom, (result, true))
// 3. Return only the first xml part.
fetchXmlPart()
}
// Unquotes -----------------------------------------------------------------
def getUnquote() {
require(ch == '$')
val start = charOffset
val endInclusive = {
val exploratoryInput = Input.Slice(input, start, input.chars.length)
val exploratoryDialect = this.dialect.copy(allowTermUnquotes = false, allowPatUnquotes = false, allowMultilinePrograms = true)
val exploratoryScanner = new LegacyScanner(exploratoryInput, exploratoryDialect)
exploratoryScanner.reader.nextChar()
exploratoryScanner.nextToken()
exploratoryScanner.curr.token match {
case LBRACE =>
def loop(balance: Int): Unit = {
exploratoryScanner.nextToken()
exploratoryScanner.curr.token match {
case LBRACE =>
loop(balance + 1)
case RBRACE =>
if (balance == 1) () // do nothing, this is the end of the unquote
else loop(balance - 1)
case _ =>
loop(balance)
}
}
try {
loop(balance = 1)
} catch {
case TokenizeException(pos, message) =>
syntaxError(s"invalid unquote: $message", at = start + pos.start)
}
case IDENTIFIER | THIS | USCORE =>
// do nothing, this is the end of the unquote
case _ =>
syntaxError("invalid unquote: `$'ident, `$'BlockExpr, `$'this or `$'_ expected", at = start)
}
start + exploratoryScanner.curr.endOffset
}
finishComposite(UNQUOTE, endInclusive)
}
// Errors -----------------------------------------------------------------
override def toString() = token.toString
/** Initialize scanner; call f on each scanned token data
*/
def foreach(f: LegacyTokenData => Unit) {
nextChar()
do {
nextToken()
f(curr)
} while(curr.token != EOF)
}
}
| MasseGuillaume/scalameta | scalameta/tokenizers/shared/src/main/scala/scala/meta/internal/tokenizers/LegacyScanner.scala | Scala | bsd-3-clause | 34,360 |
import scalaz.{@@, Tag}
import com.github.ghik.silencer.silent
package object agfqg {
@silent sealed abstract class _S
private type S_ = String
type SentenceText = S_ @@ _S
object SentenceText {
@inline def apply(x: S_): SentenceText = Tag[S_, _S](x)
@inline def apply(x: SentenceText): S_ = Tag.unwrap(x)
}
implicit final class SentenceTextWrapSyntax(val x: S_) extends AnyVal {
@inline def wrap: SentenceText = SentenceText(x)
}
implicit final class SentenceTextUnwrapSyntax(val x: SentenceText)
extends AnyVal {
@inline def unwrap: S_ = SentenceText(x)
}
@silent sealed abstract class _D
private type D_ = Seq[Double]
type Distribution = D_ @@ _D
object Distribution {
@inline def apply(x: D_): Distribution = Tag[D_, _D](x)
@inline def apply(x: Distribution): D_ = Tag.unwrap(x)
}
implicit final class DistributionWrapSyntax(val x: D_) extends AnyVal {
@inline def wrap: Distribution = Distribution(x)
}
implicit final class DistributionUnwrapSyntax(val x: Distribution)
extends AnyVal {
@inline def unwrap: D_ = Distribution(x)
}
val nounPosTags: Set[String] =
"""NN
|NNP
|NNPS
|NNS""".stripMargin.trim.split("\\n").toSet
val verbPosTags: Set[String] =
"""VB
|VBD
|VBG
|VBN
|VBP
|VBZ""".stripMargin.trim.split("\\n").toSet
val okPosTagsForGapOrDistractor: Set[String] =
nounPosTags ++ verbPosTags
/** Set of lower-cased Strings that are known stop words. From ranks.nl" */
val stopWords: Set[String] =
"""a
|able
|about
|above
|abst
|accordance
|according
|accordingly
|across
|act
|actually
|added
|adj
|affected
|affecting
|affects
|after
|afterwards
|again
|against
|ah
|all
|almost
|alone
|along
|already
|also
|although
|always
|am
|among
|amongst
|an
|and
|announce
|another
|any
|anybody
|anyhow
|anymore
|anyone
|anything
|anyway
|anyways
|anywhere
|apparently
|approximately
|are
|aren
|arent
|arise
|around
|as
|aside
|ask
|asking
|at
|auth
|available
|away
|awfully
|b
|back
|be
|became
|because
|become
|becomes
|becoming
|been
|before
|beforehand
|begin
|beginning
|beginnings
|begins
|behind
|being
|believe
|below
|beside
|besides
|between
|beyond
|biol
|both
|brief
|briefly
|but
|by
|c
|ca
|came
|can
|cannot
|can't
|cause
|causes
|certain
|certainly
|co
|com
|come
|comes
|contain
|containing
|contains
|could
|couldnt
|d
|date
|did
|didn't
|different
|do
|does
|doesn't
|doing
|done
|don't
|down
|downwards
|due
|during
|e
|each
|ed
|edu
|effect
|eg
|eight
|eighty
|either
|else
|elsewhere
|end
|ending
|enough
|especially
|et
|et-al
|etc
|even
|ever
|every
|everybody
|everyone
|everything
|everywhere
|ex
|except
|f
|far
|few
|ff
|fifth
|first
|five
|fix
|followed
|following
|follows
|for
|former
|formerly
|forth
|found
|four
|from
|further
|furthermore
|g
|gave
|get
|gets
|getting
|give
|given
|gives
|giving
|go
|goes
|gone
|got
|gotten
|h
|had
|happens
|hardly
|has
|hasn't
|have
|haven't
|having
|he
|hed
|hence
|her
|here
|hereafter
|hereby
|herein
|heres
|hereupon
|hers
|herself
|hes
|hi
|hid
|him
|himself
|his
|hither
|home
|how
|howbeit
|however
|hundred
|i
|id
|ie
|if
|i'll
|im
|immediate
|immediately
|importance
|important
|in
|inc
|indeed
|index
|information
|instead
|into
|invention
|inward
|is
|isn't
|it
|itd
|it'll
|its
|itself
|i've
|j
|just
|k
|keep keeps
|kept
|kg
|km
|know
|known
|knows
|l
|largely
|last
|lately
|later
|latter
|latterly
|least
|less
|lest
|let
|lets
|like
|liked
|likely
|line
|little
|'ll
|look
|looking
|looks
|ltd
|m
|made
|mainly
|make
|makes
|many
|may
|maybe
|me
|mean
|means
|meantime
|meanwhile
|merely
|mg
|might
|million
|miss
|ml
|more
|moreover
|most
|mostly
|mr
|mrs
|much
|mug
|must
|my
|myself
|n
|na
|name
|namely
|nay
|nd
|near
|nearly
|necessarily
|necessary
|need
|needs
|neither
|never
|nevertheless
|new
|next
|nine
|ninety
|no
|nobody
|non
|none
|nonetheless
|noone
|nor
|normally
|nos
|not
|noted
|nothing
|now
|nowhere
|o
|obtain
|obtained
|obviously
|of
|off
|often
|oh
|ok
|okay
|old
|omitted
|on
|once
|ones
|only
|onto
|or
|ord
|other
|others
|otherwise
|ought
|our
|ours
|ourselves
|out
|outside
|over
|overall
|owing
|own
|p
|page
|pages
|part
|particular
|particularly
|past
|per
|perhaps
|placed
|please
|plus
|poorly
|possible
|possibly
|potentially
|pp
|predominantly
|present
|previously
|primarily
|probably
|promptly
|proud
|provides
|put
|q
|que
|quickly
|quite
|qv
|r
|ran
|rather
|rd
|re
|readily
|really
|recent
|recently
|ref
|refs
|regarding
|regardless
|regards
|related
|relatively
|research
|respectively
|resulted
|resulting
|results
|right
|run
|s
|said
|same
|saw
|say
|saying
|says
|sec
|section
|see
|seeing
|seem
|seemed
|seeming
|seems
|seen
|self
|selves
|sent
|seven
|several
|shall
|she
|shed
|she'll
|shes
|should
|shouldn't
|show
|showed
|shown
|showns
|shows
|significant
|significantly
|similar
|similarly
|since
|six
|slightly
|so
|some
|somebody
|somehow
|someone
|somethan
|something
|sometime
|sometimes
|somewhat
|somewhere
|soon
|sorry
|specifically
|specified
|specify
|specifying
|still
|stop
|strongly
|sub
|substantially
|successfully
|such
|sufficiently
|suggest
|sup
|sure t
|take
|taken
|taking
|tell
|tends
|th
|than
|thank
|thanks
|thanx
|that
|that'll
|thats
|that've
|the
|their
|theirs
|them
|themselves
|then
|thence
|there
|thereafter
|thereby
|thered
|therefore
|therein
|there'll
|thereof
|therere
|theres
|thereto
|thereupon
|there've
|these
|they
|theyd
|they'll
|theyre
|they've
|think
|this
|those
|thou
|though
|thoughh
|thousand
|throug
|through
|throughout
|thru
|thus
|til
|tip
|to
|together
|too
|took
|toward
|towards
|tried
|tries
|truly
|try
|trying
|ts
|twice
|u
|un
|under
|unfortunately
|unless
|unlike
|unlikely
|until
|unto
|up
|upon
|ups
|us
|use
|used
|useful
|usefully
|usefulness
|uses
|using
|usually
|v
|value
|various
|'ve
|very
|via
|viz
|vol
|vols
|vs
|w
|want
|wants
|was
|wasnt
|way
|we
|wed
|welcome
|we'll
|went
|were
|werent
|we've
|what
|whatever
|what'll
|whats
|when
|whence
|whenever
|where
|whereafter
|whereas
|whereby
|wherein
|wheres
|whereupon
|wherever
|whether
|which
|while
|whim
|whither
|who
|whod
|whoever
|whole
|who'll
|whom
|whomever
|whos
|whose
|why
|widely
|willing
|wish
|with
|within
|without
|wont
|words
|world
|would
|wouldnt
|www
|x
|y
|yes
|yet
|you
|youd
|you'd
|youll
|you'll
|your
|youre
|you're
|yours
|yourself
|yourselves
|youve
|you've
|z""".stripMargin.split("\\n").toSet
}
| malcolmgreaves/auto-gfqg | src/main/scala/agfqg/package.scala | Scala | apache-2.0 | 9,113 |
package kalmanb.akka.pull
import akka.actor.ActorSystem
import akka.actor.Props
import com.typesafe.config.ConfigFactory
import kalmanb.akka.Common
import kalmanb.akka.ConfigurablePort
object PullClient extends App with ConfigurablePort {
val RemoteUrl = "akka.tcp://master@127.0.0.1:2552"
println(s"PullClient Starting on port: $port ...")
val customConf = ConfigFactory.parseString(s"""
akka {
actor {
provider = "akka.remote.RemoteActorRefProvider"
}
remote {
enabled-transports = ["akka.remote.netty.tcp"]
netty.tcp {
port = $port
}
}
loggers = ["akka.event.slf4j.Slf4jLogger"]
loglevel = "DEBUG"
logging-filter = "akka.event.slf4j.Slf4jLoggingFilter"
}
""")
val system = ActorSystem("client", ConfigFactory.load(customConf))
// Note "actorFor" is a lookup - not creation
val controller = system.actorFor(RemoteUrl + "/user/controller")
val counter = system.actorFor(RemoteUrl + "/user/counter")
// Start 10 workers
(1 to 10).foreach { i β
system.actorOf(Props(new Worker(controller, counter)))
}
Common.shutdown(system)
}
| kalmanb/akka-examples | src/main/scala/kalmanb/akka/pull/PullClient.scala | Scala | apache-2.0 | 1,135 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.v2
import org.apache.spark.sql.Strategy
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.SparkPlan
object DataSourceV2Strategy extends Strategy {
override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case DataSourceV2Relation(output, reader) =>
DataSourceV2ScanExec(output, reader) :: Nil
case WriteToDataSourceV2(writer, query) =>
WriteToDataSourceV2Exec(writer, planLater(query)) :: Nil
case _ => Nil
}
}
| akopich/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala | Scala | apache-2.0 | 1,359 |
package org.jetbrains.plugins.scala
package codeInsight.intentions.argument
import org.jetbrains.plugins.scala.codeInsight.intention.argument.AddNameToArgumentIntention
import org.jetbrains.plugins.scala.codeInsight.intentions.ScalaIntentionTestBase
/**
* @author Ksenia.Sautina
* @since 5/14/12
*/
class AddNameToArgumentIntentionTest extends ScalaIntentionTestBase {
def familyName = AddNameToArgumentIntention.familyName
def test() {
val text =
"""
|class NameParameters {
| def doSomething(flag: Boolean) {}
|
| doSomething(t<caret>rue)
|}
"""
val resultText =
"""
|class NameParameters {
| def doSomething(flag: Boolean) {}
|
| doSomething(flag = t<caret>rue)
|}
"""
doTest(text, resultText)
}
def test2() {
val text =
"""
|class NameParameters {
| def doSomething(flag: Boolean, a: Int) {}
|
| doSomething(t<caret>rue, 8)
|}
"""
val resultText =
"""
|class NameParameters {
| def doSomething(flag: Boolean, a: Int) {}
|
| doSomething(flag = t<caret>rue, a = 8)
|}
"""
doTest(text, resultText)
}
def test3() {
val text =
"""
|class NameParameters {
| def doSomething(flag: Boolean, a: Int, b: Int) {}
|
| doSomething(true, 8, <caret>9)
|}
"""
val resultText =
"""
|class NameParameters {
| def doSomething(flag: Boolean, a: Int, b: Int) {}
|
| doSomething(true, 8, <caret>b = 9)
|}
"""
doTest(text, resultText)
}
} | triggerNZ/intellij-scala | test/org/jetbrains/plugins/scala/codeInsight/intentions/argument/AddNameToArgumentIntentionTest.scala | Scala | apache-2.0 | 1,706 |
package pl.jozwik.runner
import java.io.File
import java.nio.file.FileSystems
import org.apache.commons.io.FileUtils
import pl.jozwik.runner.GistsToFile._
import scala.sys.process.{ProcessLogger, Process}
import pl.jozwik.gist.GistReader
import scala.collection.mutable.ArrayBuffer
import java.net.URL
import org.eclipse.jgit.api.Git
object TestRunner {
val gitPostfix = ".git"
def main(args: Array[String]) {
val packageName = args(0)
val objectName = args(1)
val signature = args(2)
val testName = args(3)
uploadSolutionsAndRunTests(new URL("https://github.com/jaceklaskowski/scalania.git"), "exercises", packageName, objectName, signature, testName, Seq(7680647, 7680700))
}
def extractName(url: String): String = {
val lastSlash = url.lastIndexOf('/')
val name = url.substring(lastSlash + 1)
if (name.endsWith(gitPostfix)) {
name.substring(0, name.length - gitPostfix.length)
} else {
name
}
}
def uploadSolutionsAndRunTests(repositoryUrl: URL, subProject: String, packageName: String, objectName: String, signature: String, testName: String, numbers: Seq[Int], url: String = GistReader.DEFAULT_URL): (Seq[String], String) = {
val objectContent = gistsToFile(packageName, objectName, signature, numbers, url)
val repoDir = extractName(repositoryUrl.toString)
val tmpDir = new File("tmp")
val scalaniaDir = new File(tmpDir, repoDir)
cloneRepository(scalaniaDir, repositoryUrl)
storeFileWithTests(scalaniaDir, subProject, packageName, objectName, objectContent)
(runSbt(packageName, testName, scalaniaDir), objectContent)
}
private def runSbt(packageName: String, testName: String, scalaniaDir: File): Seq[String] = {
val sbtPb = Process(Seq("sbt", "testOnly " + packageName + s".$testName", ""), scalaniaDir)
println(s"$sbtPb")
val builder = new ArrayBuffer[String]()
val a: Process = sbtPb.run(ProcessLogger(output => {
println(output)
builder += output
}))
a.exitValue()
builder.toSeq
}
private def storeFileWithTests(scalaniaDir: File, subProject: String, packageName: String, objectName: String, content: String) {
val sub = if (subProject.isEmpty) "." else subProject
val srcPath = FileSystems.getDefault.getPath(scalaniaDir.getAbsolutePath, sub, "src", "main", "scala")
val splitted = packageName.split("\\\\.")
val packageDir = splitted.foldLeft(srcPath.toFile)((f, str) => new File(f, str))
val location = new File(packageDir, objectName + ".scala")
Some(new java.io.PrintWriter(location)).foreach {
f => try {
f.write(content)
} finally {
f.close()
}
}
}
private def cloneRepository(destDir: File, url: URL) = {
if (!destDir.exists()) {
FileUtils.deleteDirectory(destDir)
Git.cloneRepository().setURI(url.toString).setDirectory(destDir).call()
}
}
}
| ajozwik/scalania-gist-reader | testRunner/src/main/scala/pl/jozwik/runner/TestRunner.scala | Scala | apache-2.0 | 2,907 |
package com.sksamuel.elastic4s.http
import com.sksamuel.elastic4s.ElasticsearchClientUri
import org.scalatest.{FlatSpec, Matchers}
class ElasticClientResponsesTest extends FlatSpec with Matchers with ElasticDsl {
"HttpClient" should "provide flatMap and for-comprehension on responses" in {
// Functor
val response0: Response[Int] = RequestSuccess(0, None, Map.empty, 42)
val response1: Response[Int] = for {
i <- response0
} yield { i + 10 }
assert(response1 == RequestSuccess(0, None, Map.empty, 52))
// Monad
val response2 = for {
i <- response0
j <- response1
} yield { i + j }
assert(response2 == RequestSuccess(0, None, Map.empty, 94))
// Failure
val responseFail: Response[Int] = RequestFailure(0, None, Map.empty, null)
val response3 = for {
i <- responseFail
} yield { i + 42 }
assert(response3 == responseFail)
}
}
| Tecsisa/elastic4s | elastic4s-http/src/test/scala/com/sksamuel/elastic4s/http/responses.scala | Scala | apache-2.0 | 922 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.doc.generation
import org.apache.spark.ml.param.Params
import scala.util.{Failure, Success, Try}
import scala.collection.JavaConverters._
object ParametersTemplate {
def apply(algorithm: Class[_], mojoModel: Option[Class[_]]): String = {
val entities = getListOfAffectedEntities(algorithm)
val caption = s"Parameters of ${algorithm.getSimpleName}"
val dashes = caption.toCharArray.map(_ => '-').mkString
val classes = entities.map(c => s"- ``${c._2}``").mkString("\\n")
val classesCaption = if (entities.length > 1) "Affected Classes" else "Affected Class"
val classesCaptionUnderLine = classesCaption.replaceAll(".", "#")
val content = getParametersContent(algorithm, mojoModel)
s""".. _parameters_${algorithm.getSimpleName}:
|
|$caption
|$dashes
|
|$classesCaption
|$classesCaptionUnderLine
|
|$classes
|
|Parameters
|##########
|
|- *Each parameter has also a corresponding getter and setter method.*
| *(E.g.:* ``label`` *->* ``getLabel()`` *,* ``setLabel(...)`` *)*
|
|$content
""".stripMargin
}
private def getListOfAffectedEntities(entity: Class[_]): Seq[(String, String)] = {
val baseSimpleName = entity.getSimpleName
val baseCanonicalName = entity.getCanonicalName
val base = (baseSimpleName, baseCanonicalName) :: Nil
val namespaceWithDot = baseCanonicalName.substring(0, baseCanonicalName.length - baseSimpleName.length)
val fullClassifierName = s"${namespaceWithDot}classification.${baseSimpleName}Classifier"
val withClassifier = Try(Class.forName(fullClassifierName)) match {
case Success(classifier) => base :+ (classifier.getSimpleName, classifier.getCanonicalName)
case Failure(_) => base
}
val fullRegressorName = s"${namespaceWithDot}regression.${baseSimpleName}Regressor"
val withRegressor = Try(Class.forName(fullRegressorName)) match {
case Success(regressor) => withClassifier :+ (regressor.getSimpleName, regressor.getCanonicalName)
case Failure(_) => withClassifier
}
withRegressor
}
private def getParametersContent(algorithm: Class[_], mojoModel: Option[Class[_]]): String = {
val algorithmInstance = algorithm.newInstance().asInstanceOf[Params]
val mojoModelInstanceOption =
mojoModel.map(_.getConstructor(classOf[String]).newInstance("uid").asInstanceOf[Params])
algorithmInstance.params
.map { param =>
val defaultValue = if (algorithmInstance.getDefault(param).isDefined) {
algorithmInstance.getDefault(param).get
} else {
"No default value"
}
s"""${param.name}
| ${param.doc.replace(" \\n", "").replace("\\n ", "\\n\\n - ")}
|
| ${generateDefaultValue(defaultValue)}
| ${generateMOJOComment(param.name, mojoModelInstanceOption)}
|""".stripMargin
}
.mkString("\\n")
}
private def generateMOJOComment(paramName: String, mojoModelInstanceOption: Option[Params]): String = {
mojoModelInstanceOption
.map { mojoModelInstance =>
if (mojoModelInstance.hasParam(paramName)) {
s"\\n *Also available on the trained model.*"
} else {
""
}
}
.getOrElse("")
}
private def generateDefaultValue(value: Any): String = {
val pythonValue = stringifyAsPython(value)
val scalaValue = stringifyAsScala(value)
if (pythonValue == scalaValue) {
s"*Default value:* ``$pythonValue``"
} else {
s"*Scala default value:* ``$scalaValue`` *; Python default value:* ``$pythonValue``"
}
}
private def stringifyAsPython(value: Any): String = value match {
case a: Array[_] => s"[${a.map(stringifyAsPython).mkString(", ")}]"
case m: java.util.Map[_, _] =>
m.asScala
.map(entry => s"${stringifyAsPython(entry._1)} -> ${stringifyAsPython(entry._2)}")
.mkString("{", ", ", "}")
case b: Boolean => b.toString.capitalize
case s: String => s""""$s""""
case v if v == null => "None"
case v: Enum[_] => s""""$v""""
case v => v.toString
}
protected def stringifyAsScala(value: Any): String = value match {
case f: java.lang.Float => s"${f.toString.toLowerCase}f"
case d: java.lang.Double => d.toString.toLowerCase
case l: java.lang.Long => s"${l}L"
case m: java.util.Map[_, _] =>
m.asScala
.map(entry => s"${stringifyAsScala(entry._1)} -> ${stringifyAsScala(entry._2)}")
.mkString("Map(", ", ", ")")
case a: Array[_] => s"Array(${a.map(stringifyAsScala).mkString(", ")})"
case s: String => s""""$s""""
case v if v == null => null
case v => v.toString
}
}
| h2oai/sparkling-water | doc/src/main/scala/ai/h2o/sparkling/doc/generation/ParametersTemplate.scala | Scala | apache-2.0 | 5,571 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.evaluation
import org.apache.spark.SparkFunSuite
import org.apache.spark.ml.param.ParamsSuite
import org.apache.spark.ml.regression.LinearRegression
import org.apache.spark.ml.util.{DefaultReadWriteTest, MLTestingUtils}
import org.apache.spark.mllib.util.{LinearDataGenerator, MLlibTestSparkContext}
import org.apache.spark.mllib.util.TestingUtils._
class RegressionEvaluatorSuite
extends SparkFunSuite with MLlibTestSparkContext with DefaultReadWriteTest {
import testImplicits._
test("params") {
ParamsSuite.checkParams(new RegressionEvaluator)
}
test("Regression Evaluator: default params") {
/**
* Here is the instruction describing how to export the test data into CSV format
* so we can validate the metrics compared with R's mmetric package.
*
* import org.apache.spark.mllib.util.LinearDataGenerator
* val data = sc.parallelize(LinearDataGenerator.generateLinearInput(6.3,
* Array(4.7, 7.2), Array(0.9, -1.3), Array(0.7, 1.2), 100, 42, 0.1))
* data.map(x=> x.label + ", " + x.features(0) + ", " + x.features(1))
* .saveAsTextFile("path")
*/
val dataset = LinearDataGenerator.generateLinearInput(
6.3, Array(4.7, 7.2), Array(0.9, -1.3), Array(0.7, 1.2), 100, 42, 0.1)
.map(_.asML).toDF()
/**
* Using the following R code to load the data, train the model and evaluate metrics.
*
* > library("glmnet")
* > library("rminer")
* > data <- read.csv("path", header=FALSE, stringsAsFactors=FALSE)
* > features <- as.matrix(data.frame(as.numeric(data$V2), as.numeric(data$V3)))
* > label <- as.numeric(data$V1)
* > model <- glmnet(features, label, family="gaussian", alpha = 0, lambda = 0)
* > rmse <- mmetric(label, predict(model, features), metric='RMSE')
* > mae <- mmetric(label, predict(model, features), metric='MAE')
* > r2 <- mmetric(label, predict(model, features), metric='R2')
*/
val trainer = new LinearRegression
val model = trainer.fit(dataset)
val predictions = model.transform(dataset)
// default = rmse
val evaluator = new RegressionEvaluator()
assert(evaluator.evaluate(predictions) ~== 0.1013829 absTol 0.01)
// r2 score
evaluator.setMetricName("r2")
assert(evaluator.evaluate(predictions) ~== 0.9998387 absTol 0.01)
// mae
evaluator.setMetricName("mae")
assert(evaluator.evaluate(predictions) ~== 0.08399089 absTol 0.01)
// var
evaluator.setMetricName("var")
assert(evaluator.evaluate(predictions) ~== 63.6944519 absTol 0.01)
}
test("read/write") {
val evaluator = new RegressionEvaluator()
.setPredictionCol("myPrediction")
.setLabelCol("myLabel")
.setMetricName("r2")
testDefaultReadWrite(evaluator)
}
test("should support all NumericType labels and not support other types") {
MLTestingUtils.checkNumericTypes(new RegressionEvaluator, spark)
}
}
| goldmedal/spark | mllib/src/test/scala/org/apache/spark/ml/evaluation/RegressionEvaluatorSuite.scala | Scala | apache-2.0 | 3,757 |
package aerospikez
import com.typesafe.config.ConfigFactory
import org.specs2.mutable.Specification
class ConfigSpec extends Specification {
"ClientConfig" should {
"use the configuration file if exists" in {
ConfigFile.file = ConfigFactory.load("reference.conf")
val file = ConfigFile.file.getConfig("aerospike")
val clientPolicy = ClientConfig().policy
clientPolicy.timeout must beEqualTo(
file.getInt("client-policy.timeout")
)
clientPolicy.maxSocketIdle must beEqualTo(
file.getInt("client-policy.max-socket-idle")
)
clientPolicy.asyncMaxCommands must beEqualTo(
file.getInt("client-policy.async-max-commands")
)
clientPolicy.asyncSelectorTimeout must beEqualTo(
file.getInt("client-policy.async-selector-timeout")
)
clientPolicy.failIfNotConnected must beEqualTo(
file.getBoolean("client-policy.fail-if-not-connected")
)
clientPolicy.asyncSelectorThreads must beEqualTo(
file.getInt("client-policy.async-selector-threads")
)
clientPolicy.asyncMaxCommandAction.toString must beEqualTo(
file.getString("client-policy.async-max-command-action")
)
}
"use the default values if the configuration file is missing" in {
ConfigFile.file = ConfigFactory.load()
val clientPolicy = ClientConfig().policy
clientPolicy.timeout must beEqualTo(0)
clientPolicy.maxSocketIdle must beEqualTo(14)
clientPolicy.asyncMaxCommands must beEqualTo(200)
clientPolicy.asyncSelectorTimeout must beEqualTo(0)
clientPolicy.asyncSelectorThreads must beEqualTo(1)
clientPolicy.sharedThreadPool must beEqualTo(false)
clientPolicy.failIfNotConnected must beEqualTo(true)
clientPolicy.asyncMaxCommandAction.toString must beEqualTo("REJECT")
}
}
"QueryConfig" should {
"use the configuration file if exists" in {
ConfigFile.file = ConfigFactory.load("reference.conf")
val file = ConfigFile.file.getConfig("aerospike")
val queryPolicy = QueryConfig().policy
queryPolicy.timeout must beEqualTo(
file.getInt("query-policy.timeout")
)
queryPolicy.maxRetries must beEqualTo(
file.getInt("query-policy.max-retries")
)
queryPolicy.recordQueueSize must beEqualTo(
file.getInt("query-policy.record-queue-size")
)
queryPolicy.maxConcurrentNodes must beEqualTo(
file.getInt("query-policy.max-concurrent-nodes")
)
queryPolicy.sleepBetweenRetries must beEqualTo(
file.getInt("query-policy.sleep-between-retries")
)
}
"use the default values if the configuration file is missing" in {
ConfigFile.file = ConfigFactory.load()
val queryPolicy = QueryConfig().policy
queryPolicy.timeout must beEqualTo(0)
queryPolicy.maxRetries must beEqualTo(2)
queryPolicy.recordQueueSize must beEqualTo(5000)
queryPolicy.maxConcurrentNodes must beEqualTo(0)
queryPolicy.sleepBetweenRetries must beEqualTo(500)
}
}
"WriteConfig" should {
"use the configuration file if exists" in {
ConfigFile.file = ConfigFactory.load("reference.conf")
val file = ConfigFile.file.getConfig("aerospike")
val writePolicy = WriteConfig().policy
writePolicy.timeout must beEqualTo(
file.getInt("write-policy.timeout")
)
writePolicy.expiration must beEqualTo(
file.getInt("write-policy.expiration")
)
writePolicy.generation must beEqualTo(
file.getInt("write-policy.generation")
)
writePolicy.maxRetries must beEqualTo(
file.getInt("write-policy.max-retries")
)
writePolicy.sleepBetweenRetries must beEqualTo(
file.getInt("write-policy.sleep-between-retries")
)
writePolicy.priority.toString must beEqualTo(
file.getString("write-policy.priority")
)
writePolicy.generationPolicy.toString must beEqualTo(
file.getString("write-policy.generation-policy")
)
writePolicy.recordExistsAction.toString must beEqualTo(
file.getString("write-policy.record-exists-action")
)
}
"use the default values if the configuration file is missing" in {
ConfigFile.file = ConfigFactory.load()
val writePolicy = WriteConfig().policy
writePolicy.timeout must beEqualTo(0)
writePolicy.expiration must beEqualTo(0)
writePolicy.generation must beEqualTo(0)
writePolicy.maxRetries must beEqualTo(2)
writePolicy.sleepBetweenRetries must beEqualTo(500)
writePolicy.priority.toString must beEqualTo("DEFAULT")
writePolicy.generationPolicy.toString must beEqualTo("NONE")
writePolicy.recordExistsAction.toString must beEqualTo("UPDATE")
}
}
}
| otrimegistro/aerospikez | src/test/scala/aerospikez/PolicyConfigSpec.scala | Scala | mit | 4,827 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.data.api
import akka.event.Logging
import sun.misc.BASE64Decoder
import java.util.concurrent.TimeUnit
import akka.actor._
import akka.io.IO
import akka.pattern.ask
import akka.util.Timeout
import org.apache.predictionio.data.Utils
import org.apache.predictionio.data.storage.AccessKeys
import org.apache.predictionio.data.storage.Channels
import org.apache.predictionio.data.storage.DateTimeJson4sSupport
import org.apache.predictionio.data.storage.Event
import org.apache.predictionio.data.storage.EventJson4sSupport
import org.apache.predictionio.data.storage.BatchEventsJson4sSupport
import org.apache.predictionio.data.storage.LEvents
import org.apache.predictionio.data.storage.Storage
import org.json4s.DefaultFormats
import org.json4s.Formats
import org.json4s.JObject
import org.json4s.native.JsonMethods.parse
import spray.can.Http
import spray.http.FormData
import spray.http.MediaTypes
import spray.http.StatusCodes
import spray.httpx.Json4sSupport
import spray.routing._
import spray.routing.authentication.Authentication
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Try, Success, Failure}
class EventServiceActor(
val eventClient: LEvents,
val accessKeysClient: AccessKeys,
val channelsClient: Channels,
val config: EventServerConfig) extends HttpServiceActor {
object Json4sProtocol extends Json4sSupport {
implicit def json4sFormats: Formats = DefaultFormats +
new EventJson4sSupport.APISerializer +
new BatchEventsJson4sSupport.APISerializer +
// NOTE: don't use Json4s JodaTimeSerializers since it has issues,
// some format not converted, or timezone not correct
new DateTimeJson4sSupport.Serializer
}
val MaxNumberOfEventsPerBatchRequest = 50
val logger = Logging(context.system, this)
// we use the enclosing ActorContext's or ActorSystem's dispatcher for our
// Futures
implicit def executionContext: ExecutionContext = context.dispatcher
implicit val timeout = Timeout(5, TimeUnit.SECONDS)
val rejectionHandler = Common.rejectionHandler
val jsonPath = """(.+)\.json$""".r
val formPath = """(.+)\.form$""".r
val pluginContext = EventServerPluginContext(logger)
private lazy val base64Decoder = new BASE64Decoder
case class AuthData(appId: Int, channelId: Option[Int], events: Seq[String])
/* with accessKey in query/header, return appId if succeed */
def withAccessKey: RequestContext => Future[Authentication[AuthData]] = {
ctx: RequestContext =>
val accessKeyParamOpt = ctx.request.uri.query.get("accessKey")
val channelParamOpt = ctx.request.uri.query.get("channel")
Future {
// with accessKey in query, return appId if succeed
accessKeyParamOpt.map { accessKeyParam =>
accessKeysClient.get(accessKeyParam).map { k =>
channelParamOpt.map { ch =>
val channelMap =
channelsClient.getByAppid(k.appid)
.map(c => (c.name, c.id)).toMap
if (channelMap.contains(ch)) {
Right(AuthData(k.appid, Some(channelMap(ch)), k.events))
} else {
Left(ChannelRejection(s"Invalid channel '$ch'."))
}
}.getOrElse{
Right(AuthData(k.appid, None, k.events))
}
}.getOrElse(FailedAuth)
}.getOrElse {
// with accessKey in header, return appId if succeed
ctx.request.headers.find(_.name == "Authorization").map { authHeader =>
authHeader.value.split("Basic ") match {
case Array(_, value) =>
val appAccessKey =
new String(base64Decoder.decodeBuffer(value)).trim.split(":")(0)
accessKeysClient.get(appAccessKey) match {
case Some(k) => Right(AuthData(k.appid, None, k.events))
case None => FailedAuth
}
case _ => FailedAuth
}
}.getOrElse(MissedAuth)
}
}
}
private val FailedAuth = Left(
AuthenticationFailedRejection(
AuthenticationFailedRejection.CredentialsRejected, List()
)
)
private val MissedAuth = Left(
AuthenticationFailedRejection(
AuthenticationFailedRejection.CredentialsMissing, List()
)
)
lazy val statsActorRef = actorRefFactory.actorSelection("/user/StatsActor")
lazy val pluginsActorRef = actorRefFactory.actorSelection("/user/PluginsActor")
val route: Route =
pathSingleSlash {
import Json4sProtocol._
get {
respondWithMediaType(MediaTypes.`application/json`) {
complete(Map("status" -> "alive"))
}
}
} ~
path("plugins.json") {
import Json4sProtocol._
get {
respondWithMediaType(MediaTypes.`application/json`) {
complete {
Map("plugins" -> Map(
"inputblockers" -> pluginContext.inputBlockers.map { case (n, p) =>
n -> Map(
"name" -> p.pluginName,
"description" -> p.pluginDescription,
"class" -> p.getClass.getName)
},
"inputsniffers" -> pluginContext.inputSniffers.map { case (n, p) =>
n -> Map(
"name" -> p.pluginName,
"description" -> p.pluginDescription,
"class" -> p.getClass.getName)
}
))
}
}
}
} ~
path("plugins" / Segments) { segments =>
get {
handleExceptions(Common.exceptionHandler) {
authenticate(withAccessKey) { authData =>
respondWithMediaType(MediaTypes.`application/json`) {
complete {
val pluginArgs = segments.drop(2)
val pluginType = segments(0)
val pluginName = segments(1)
pluginType match {
case EventServerPlugin.inputBlocker =>
pluginContext.inputBlockers(pluginName).handleREST(
authData.appId,
authData.channelId,
pluginArgs)
case EventServerPlugin.inputSniffer =>
pluginsActorRef ? PluginsActor.HandleREST(
appId = authData.appId,
channelId = authData.channelId,
pluginName = pluginName,
pluginArgs = pluginArgs) map {
_.asInstanceOf[String]
}
}
}
}
}
}
}
} ~
path("events" / jsonPath ) { eventId =>
import Json4sProtocol._
get {
handleExceptions(Common.exceptionHandler) {
handleRejections(rejectionHandler) {
authenticate(withAccessKey) { authData =>
val appId = authData.appId
val channelId = authData.channelId
respondWithMediaType(MediaTypes.`application/json`) {
complete {
logger.debug(s"GET event ${eventId}.")
val data = eventClient.futureGet(eventId, appId, channelId).map { eventOpt =>
eventOpt.map( event =>
(StatusCodes.OK, event)
).getOrElse(
(StatusCodes.NotFound, Map("message" -> "Not Found"))
)
}
data
}
}
}
}
}
} ~
delete {
handleExceptions(Common.exceptionHandler) {
handleRejections(rejectionHandler) {
authenticate(withAccessKey) { authData =>
val appId = authData.appId
val channelId = authData.channelId
respondWithMediaType(MediaTypes.`application/json`) {
complete {
logger.debug(s"DELETE event ${eventId}.")
val data = eventClient.futureDelete(eventId, appId, channelId).map { found =>
if (found) {
(StatusCodes.OK, Map("message" -> "Found"))
} else {
(StatusCodes.NotFound, Map("message" -> "Not Found"))
}
}
data
}
}
}
}
}
}
} ~
path("events.json") {
import Json4sProtocol._
post {
handleExceptions(Common.exceptionHandler) {
handleRejections(rejectionHandler) {
authenticate(withAccessKey) { authData =>
val appId = authData.appId
val channelId = authData.channelId
val events = authData.events
entity(as[Event]) { event =>
complete {
if (events.isEmpty || authData.events.contains(event.event)) {
pluginContext.inputBlockers.values.foreach(
_.process(EventInfo(
appId = appId,
channelId = channelId,
event = event), pluginContext))
val data = eventClient.futureInsert(event, appId, channelId).map { id =>
pluginsActorRef ! EventInfo(
appId = appId,
channelId = channelId,
event = event)
val result = (StatusCodes.Created, Map("eventId" -> s"${id}"))
if (config.stats) {
statsActorRef ! Bookkeeping(appId, result._1, event)
}
result
}
data
} else {
(StatusCodes.Forbidden,
Map("message" -> s"${event.event} events are not allowed"))
}
}
}
}
}
}
} ~
get {
handleExceptions(Common.exceptionHandler) {
handleRejections(rejectionHandler) {
authenticate(withAccessKey) { authData =>
val appId = authData.appId
val channelId = authData.channelId
parameters(
'startTime.as[Option[String]],
'untilTime.as[Option[String]],
'entityType.as[Option[String]],
'entityId.as[Option[String]],
'event.as[Option[String]],
'targetEntityType.as[Option[String]],
'targetEntityId.as[Option[String]],
'limit.as[Option[Int]],
'reversed.as[Option[Boolean]]) {
(startTimeStr, untilTimeStr, entityType, entityId,
eventName, // only support one event name
targetEntityType, targetEntityId,
limit, reversed) =>
respondWithMediaType(MediaTypes.`application/json`) {
complete {
logger.debug(
s"GET events of appId=${appId} " +
s"st=${startTimeStr} ut=${untilTimeStr} " +
s"et=${entityType} eid=${entityId} " +
s"li=${limit} rev=${reversed} ")
require(!((reversed == Some(true))
&& (entityType.isEmpty || entityId.isEmpty)),
"the parameter reversed can only be used with" +
" both entityType and entityId specified.")
val parseTime = Future {
val startTime = startTimeStr.map(Utils.stringToDateTime(_))
val untilTime = untilTimeStr.map(Utils.stringToDateTime(_))
(startTime, untilTime)
}
parseTime.flatMap { case (startTime, untilTime) =>
val data = eventClient.futureFind(
appId = appId,
channelId = channelId,
startTime = startTime,
untilTime = untilTime,
entityType = entityType,
entityId = entityId,
eventNames = eventName.map(List(_)),
targetEntityType = targetEntityType.map(Some(_)),
targetEntityId = targetEntityId.map(Some(_)),
limit = limit.orElse(Some(20)),
reversed = reversed)
.map { eventIter =>
if (eventIter.hasNext) {
(StatusCodes.OK, eventIter.toArray)
} else {
(StatusCodes.NotFound,
Map("message" -> "Not Found"))
}
}
data
}.recover {
case e: Exception =>
(StatusCodes.BadRequest, Map("message" -> s"${e}"))
}
}
}
}
}
}
}
}
} ~
path("batch" / "events.json") {
import Json4sProtocol._
post {
handleExceptions(Common.exceptionHandler) {
handleRejections(rejectionHandler) {
authenticate(withAccessKey) { authData =>
val appId = authData.appId
val channelId = authData.channelId
val allowedEvents = authData.events
val handleEvent: PartialFunction[Try[Event], Future[Map[String, Any]]] = {
case Success(event) => {
if (allowedEvents.isEmpty || allowedEvents.contains(event.event)) {
pluginContext.inputBlockers.values.foreach(
_.process(EventInfo(
appId = appId,
channelId = channelId,
event = event), pluginContext))
val data = eventClient.futureInsert(event, appId, channelId).map { id =>
pluginsActorRef ! EventInfo(
appId = appId,
channelId = channelId,
event = event)
val status = StatusCodes.Created
val result = Map(
"status" -> status.intValue,
"eventId" -> s"${id}")
if (config.stats) {
statsActorRef ! Bookkeeping(appId, status, event)
}
result
}.recover { case exception =>
Map(
"status" -> StatusCodes.InternalServerError.intValue,
"message" -> s"${exception.getMessage()}")
}
data
} else {
Future.successful(Map(
"status" -> StatusCodes.Forbidden.intValue,
"message" -> s"${event.event} events are not allowed"))
}
}
case Failure(exception) => {
Future.successful(Map(
"status" -> StatusCodes.BadRequest.intValue,
"message" -> s"${exception.getMessage()}"))
}
}
entity(as[Seq[Try[Event]]]) { events =>
complete {
if (events.length <= MaxNumberOfEventsPerBatchRequest) {
Future.traverse(events)(handleEvent)
} else {
(StatusCodes.BadRequest,
Map("message" -> (s"Batch request must have less than or equal to " +
s"${MaxNumberOfEventsPerBatchRequest} events")))
}
}
}
}
}
}
}
} ~
path("stats.json") {
import Json4sProtocol._
get {
handleExceptions(Common.exceptionHandler) {
handleRejections(rejectionHandler) {
authenticate(withAccessKey) { authData =>
val appId = authData.appId
respondWithMediaType(MediaTypes.`application/json`) {
if (config.stats) {
complete {
statsActorRef ? GetStats(appId) map {
_.asInstanceOf[Map[String, StatsSnapshot]]
}
}
} else {
complete(
StatusCodes.NotFound,
parse("""{"message": "To see stats, launch Event Server """ +
"""with --stats argument."}"""))
}
}
}
}
}
} // stats.json get
} ~
path("webhooks" / jsonPath ) { web =>
import Json4sProtocol._
post {
handleExceptions(Common.exceptionHandler) {
handleRejections(rejectionHandler) {
authenticate(withAccessKey) { authData =>
val appId = authData.appId
val channelId = authData.channelId
respondWithMediaType(MediaTypes.`application/json`) {
entity(as[JObject]) { jObj =>
complete {
Webhooks.postJson(
appId = appId,
channelId = channelId,
web = web,
data = jObj,
eventClient = eventClient,
log = logger,
stats = config.stats,
statsActorRef = statsActorRef)
}
}
}
}
}
}
} ~
get {
handleExceptions(Common.exceptionHandler) {
handleRejections(rejectionHandler) {
authenticate(withAccessKey) { authData =>
val appId = authData.appId
val channelId = authData.channelId
respondWithMediaType(MediaTypes.`application/json`) {
complete {
Webhooks.getJson(
appId = appId,
channelId = channelId,
web = web,
log = logger)
}
}
}
}
}
}
} ~
path("webhooks" / formPath ) { web =>
post {
handleExceptions(Common.exceptionHandler) {
handleRejections(rejectionHandler) {
authenticate(withAccessKey) { authData =>
val appId = authData.appId
val channelId = authData.channelId
respondWithMediaType(MediaTypes.`application/json`) {
entity(as[FormData]){ formData =>
// logger.debug(formData.toString)
complete {
// respond with JSON
import Json4sProtocol._
Webhooks.postForm(
appId = appId,
channelId = channelId,
web = web,
data = formData,
eventClient = eventClient,
log = logger,
stats = config.stats,
statsActorRef = statsActorRef)
}
}
}
}
}
}
} ~
get {
handleExceptions(Common.exceptionHandler) {
handleRejections(rejectionHandler) {
authenticate(withAccessKey) { authData =>
val appId = authData.appId
val channelId = authData.channelId
respondWithMediaType(MediaTypes.`application/json`) {
complete {
// respond with JSON
import Json4sProtocol._
Webhooks.getForm(
appId = appId,
channelId = channelId,
web = web,
log = logger)
}
}
}
}
}
}
}
def receive: Actor.Receive = runRoute(route)
}
/* message */
case class StartServer(host: String, port: Int)
class EventServerActor(
val eventClient: LEvents,
val accessKeysClient: AccessKeys,
val channelsClient: Channels,
val config: EventServerConfig) extends Actor with ActorLogging {
val child = context.actorOf(
Props(classOf[EventServiceActor],
eventClient,
accessKeysClient,
channelsClient,
config),
"EventServiceActor")
implicit val system = context.system
def receive: Actor.Receive = {
case StartServer(host, portNum) => {
IO(Http) ! Http.Bind(child, interface = host, port = portNum)
}
case m: Http.Bound => log.info("Bound received. EventServer is ready.")
case m: Http.CommandFailed => log.error("Command failed.")
case _ => log.error("Unknown message.")
}
}
case class EventServerConfig(
ip: String = "localhost",
port: Int = 7070,
plugins: String = "plugins",
stats: Boolean = false)
object EventServer {
def createEventServer(config: EventServerConfig): ActorSystem = {
implicit val system = ActorSystem("EventServerSystem")
val eventClient = Storage.getLEvents()
val accessKeysClient = Storage.getMetaDataAccessKeys()
val channelsClient = Storage.getMetaDataChannels()
val serverActor = system.actorOf(
Props(
classOf[EventServerActor],
eventClient,
accessKeysClient,
channelsClient,
config),
"EventServerActor"
)
if (config.stats) system.actorOf(Props[StatsActor], "StatsActor")
system.actorOf(Props[PluginsActor], "PluginsActor")
serverActor ! StartServer(config.ip, config.port)
system
}
}
object Run {
def main(args: Array[String]): Unit = {
EventServer.createEventServer(EventServerConfig(
ip = "0.0.0.0",
port = 7070))
.awaitTermination
}
}
| himanshudhami/PredictionIO | data/src/main/scala/org/apache/predictionio/data/api/EventServer.scala | Scala | apache-2.0 | 22,915 |
package controllers
import com.wordnik.swagger.core.util.{ JsonUtil, RestResourceUtil }
import play.api.mvc._
import java.io.StringWriter
import play.api.libs.json._
class BaseApiController extends Controller with RestResourceUtil {
protected def JsonResponse(response: SimpleResult[JsValue]): PlainResult =
response.as("application/json")
.withHeaders(
("Access-Control-Allow-Origin", "*"),
("Access-Control-Allow-Methods", "GET, POST, DELETE, PUT"),
("Access-Control-Allow-Headers", "Content-Type, api_key, Authorization"))
}
| SG-LIUM/SGL-SpeechWeb-Demo | app/controllers/BaseApiController.scala | Scala | mit | 569 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding
import cascading.flow.hadoop.HadoopFlow
import cascading.flow.planner.BaseFlowStep
import org.apache.hadoop.conf.Configured
import org.apache.hadoop.mapred.JobConf
import org.apache.hadoop.util.{ GenericOptionsParser, Tool => HTool, ToolRunner }
import scala.annotation.tailrec
import scala.collection.JavaConverters._
class Tool extends Configured with HTool {
// This mutable state is not my favorite, but we are constrained by the Hadoop API:
var rootJob: Option[(Args) => Job] = None
// Allows you to set the job for the Tool to run
def setJobConstructor(jobc: (Args) => Job): Unit = {
if (rootJob.isDefined) {
sys.error("Job is already defined")
} else {
rootJob = Some(jobc)
}
}
protected def getJob(args: Args): Job = rootJob match {
case Some(job) => job(args)
case None if args.positional.isEmpty =>
throw ArgsException("Usage: Tool <jobClass> --local|--hdfs [args...]")
case None => // has at least one arg
val jobName = args.positional.head
// Remove the job name from the positional arguments:
val nonJobNameArgs = args + ("" -> args.positional.tail)
Job(jobName, nonJobNameArgs)
}
// This both updates the jobConf with hadoop arguments
// and returns all the non-hadoop arguments. Should be called once if
// you want to process hadoop arguments (like -libjars).
protected def nonHadoopArgsFrom(args: Array[String]): Array[String] = {
(new GenericOptionsParser(getConf, args)).getRemainingArgs
}
def parseModeArgs(args: Array[String]): (Mode, Args) = {
val a = Args(nonHadoopArgsFrom(args))
(Mode(a, getConf), a)
}
// Parse the hadoop args, and if job has not been set, instantiate the job
def run(args: Array[String]): Int = {
val (mode, jobArgs) = parseModeArgs(args)
// Connect mode with job Args
run(getJob(Mode.putMode(mode, jobArgs)))
}
protected def run(job: Job): Int = {
val onlyPrintGraph = job.args.boolean("tool.graph")
if (onlyPrintGraph) {
// TODO use proper logging
println("Only printing the job graph, NOT executing. Run without --tool.graph to execute the job")
}
/*
* This is a tail recursive loop that runs all the
* jobs spawned from this one
*/
val jobName = job.getClass.getName
@tailrec
def start(j: Job, cnt: Int): Unit = {
val successful = if (onlyPrintGraph) {
val flow = j.buildFlow
/*
* This just writes out the graph representing
* all the cascading elements that are created for this
* flow. Use graphviz to render it as a PDF.
* The job is NOT run in this case.
*/
val thisDot = jobName + cnt + ".dot"
println("writing DOT: " + thisDot)
/* We add descriptions if they exist to the stepName so it appears in the .dot file */
flow match {
case hadoopFlow: HadoopFlow =>
val flowSteps = hadoopFlow.getFlowSteps.asScala
flowSteps.foreach(step => {
val baseFlowStep: BaseFlowStep[JobConf] = step.asInstanceOf[BaseFlowStep[JobConf]]
val descriptions = baseFlowStep.getConfig.get(Config.StepDescriptions, "")
if (!descriptions.isEmpty) {
val stepXofYData = """\\(\\d+/\\d+\\)""".r.findFirstIn(baseFlowStep.getName).getOrElse("")
// Reflection is only temporary. Latest cascading has setName public: https://github.com/cwensel/cascading/commit/487a6e9ef#diff-0feab84bc8832b2a39312dbd208e3e69L175
// https://github.com/twitter/scalding/issues/1294
val x = classOf[BaseFlowStep[JobConf]].getDeclaredMethod("setName", classOf[String])
x.setAccessible(true)
x.invoke(step, "%s %s".format(stepXofYData, descriptions))
}
})
case _ => // descriptions not yet supported in other modes
}
flow.writeDOT(thisDot)
val thisStepsDot = jobName + cnt + "_steps.dot"
println("writing Steps DOT: " + thisStepsDot)
flow.writeStepsDOT(thisStepsDot)
true
} else {
j.validate()
j.run()
}
j.clear()
//When we get here, the job is finished
if (successful) {
// we need to use match not foreach to get tail recursion
j.next match { // linter:disable:UseOptionForeachNotPatMatch
case Some(nextj) => start(nextj, cnt + 1)
case None => ()
}
} else {
throw new RuntimeException("Job failed to run: " + jobName +
(if (cnt > 0) { " child: " + cnt.toString + ", class: " + j.getClass.getName }
else { "" }))
}
}
//start a counter to see how deep we recurse:
start(job, 0)
0
}
}
object Tool {
def main(args: Array[String]): Unit = {
try {
ToolRunner.run(new JobConf, new Tool, ExpandLibJarsGlobs(args))
} catch {
case t: Throwable => {
//re-throw the exception with extra info
throw new Throwable(RichXHandler(t), t)
}
}
}
}
| tdyas/scalding | scalding-core/src/main/scala/com/twitter/scalding/Tool.scala | Scala | apache-2.0 | 5,654 |
package org.pico.fake
import org.specs2.mutable.Specification
class FakeSpec extends Specification {
"Fake" in {
Fake.touch()
success
}
}
| newhoggy/pico-cuckoo-filter | pico-fake/src/test/scala/org/pico/fake/FakeSpec.scala | Scala | bsd-3-clause | 152 |
import sbt._
import Keys._
import org.scalatra.sbt._
import org.scalatra.sbt.PluginKeys._
import sbt.ScalaVersion
import twirl.sbt.TwirlPlugin._
import com.typesafe.sbteclipse.plugin.EclipsePlugin.EclipseKeys
object MyBuild extends Build {
val Organization = "jp.sf.amateras"
val Name = "gitbucket"
val Version = "0.0.1"
val ScalaVersion = "2.10.1"
val ScalatraVersion = "2.2.1"
lazy val project = Project (
"gitbucket",
file("."),
settings = Defaults.defaultSettings ++ ScalatraPlugin.scalatraWithJRebel ++ Seq(
organization := Organization,
name := Name,
version := Version,
scalaVersion := ScalaVersion,
resolvers ++= Seq(
Classpaths.typesafeReleases,
"amateras-repo" at "http://amateras.sourceforge.jp/mvn/"
),
libraryDependencies ++= Seq(
"org.eclipse.jgit" % "org.eclipse.jgit.http.server" % "3.0.0.201306101825-r",
"org.scalatra" %% "scalatra" % ScalatraVersion,
"org.scalatra" %% "scalatra-specs2" % ScalatraVersion % "test",
"org.scalatra" %% "scalatra-json" % ScalatraVersion,
"org.json4s" %% "json4s-jackson" % "3.2.4",
"jp.sf.amateras" %% "scalatra-forms" % "0.0.2",
"commons-io" % "commons-io" % "2.4",
"org.pegdown" % "pegdown" % "1.3.0",
"org.apache.commons" % "commons-compress" % "1.5",
"org.apache.commons" % "commons-email" % "1.3.1",
"org.apache.httpcomponents" % "httpclient" % "4.2.5",
"com.typesafe.slick" %% "slick" % "1.0.1",
"com.novell.ldap" % "jldap" % "2009-10-07",
"com.h2database" % "h2" % "1.3.171",
"ch.qos.logback" % "logback-classic" % "1.0.6" % "runtime",
"org.eclipse.jetty" % "jetty-webapp" % "8.1.8.v20121106" % "container;provided",
"org.eclipse.jetty.orbit" % "javax.servlet" % "3.0.0.v201112011016" % "container;provided;test" artifacts (Artifact("javax.servlet", "jar", "jar"))
),
EclipseKeys.withSource := true,
javacOptions in compile ++= Seq("-target", "6", "-source", "6"),
packageOptions += Package.MainClass("JettyLauncher")
) ++ seq(Twirl.settings: _*)
)
} | denen99/gitbucket | project/build.scala | Scala | apache-2.0 | 2,156 |
package uk.gov.dvla.vehicles.presentation.common.composition
import com.google.inject.Guice
trait Composition {
lazy val injector = Guice.createInjector(new DevModule)
}
| dvla/vehicles-presentation-common | common-test/app/uk/gov/dvla/vehicles/presentation/common/composition/Composition.scala | Scala | mit | 174 |
package main.scala
import scala.util.Random
import java.text.SimpleDateFormat
import java.util.Calendar
object MT103 {
val random = new Random
val DATE_FORMAT = new SimpleDateFormat("yyMMdd")
val date = Calendar.getInstance()
def function103(i:(String,String,String,String),f50a:Array[String],
f50f:Array[String],
f50k:Array[String])
:(String,String,String,Array[String]) = {
if(i._2=="1"){
(i._1,i._2,i._3,Array("F01MIDLGB22AXXX0000000000"))
}
else if(i._2=="2"){
(i._1,i._2,i._3,Array(i._1.split('-')(0)))
}
else if(i._2=="3"){
(i._1,i._2,i._3,Array(i._4))
}
else if(i._2=="4"){
if( i._3=="20"){
(i._1,i._2,i._3,Array(
(random.alphanumeric.take(7).mkString+random.nextInt(1000).toString).toUpperCase())
)
}
else if(i._3=="23B"){
(i._1,i._2,i._3,Array("CRED"))
}
else if(i._3=="32A"){
date.add(Calendar.HOUR,+10)
val d = date.getTime().getTime()
val t = DATE_FORMAT.format(d).toString
val currency = t+"GBP"+random.nextInt(99999).toString.replaceAll("0","1")+",00"
(i._1,i._2,i._3,Array(currency))
}
else if(i._3=="50A"){
val l = f50a.length
(i._1,i._2,i._3,f50a(random.nextInt(l-1)).split(",").map(i=>i.trim()))
}
else if(i._3=="50K"){
val lk = f50k.length
(i._1,i._2,i._3,f50k(random.nextInt(lk-1)).split('|').map(i=>i.trim()))
}
else if(i._3=="50F"){
val lf = f50f.length
(i._1,i._2,i._3,f50f(random.nextInt(lf-1)).split('|').map(i=>i.trim()).take(4))
}
else if(i._3=="53D"){
(i._1,i._2,i._3,Array("GB"+random.nextInt(10000000).toString))
}
else if(i._3=="57A"){
(i._1,i._2,i._3,Array("BARCGB"+(10000+random.nextInt(1000)).toString.substring(1,3)+"XXX"))
}
else if(i._3=="59"){
(i._1,i._2,i._3,Array("/GB11VSOP55547873"+(100000+random.nextInt(1000)).toString.substring(2,5)))
}
else if(i._3=="71A"){
if(random.nextInt(3)==0){
(i._1,i._2,i._3,Array("SHA"))
}else if(random.nextInt(3)==1){
(i._1,i._2,i._3,Array("OUR"))
}else{
(i._1,i._2,i._3,Array("BEN"))
}
}else{
(i._1,i._2,i._3,Array(""))
}
}
else{
(i._1,i._2,i._3,Array(""))
}
}
} | adeagbot/Swift-MT-Parser | src/main/scala/MT103.scala | Scala | gpl-3.0 | 2,272 |
package mesosphere.marathon.tasks
import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream }
import com.codahale.metrics.MetricRegistry
import com.google.common.collect.Lists
import mesosphere.marathon.MarathonSpec
import mesosphere.marathon.Protos.MarathonTask
import mesosphere.marathon.metrics.Metrics
import mesosphere.marathon.state.PathId.StringPathId
import mesosphere.mesos.protos.Implicits._
import mesosphere.mesos.protos.TextAttribute
import mesosphere.util.state.PersistentStore
import mesosphere.util.state.memory.InMemoryStore
import org.apache.mesos.Protos
import org.apache.mesos.Protos.{ TaskID, TaskState, TaskStatus }
import org.mockito.Matchers.any
import org.mockito.Mockito.{ reset, spy, times, verify }
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.concurrent.ScalaFutures._
import scala.collection._
class TaskTrackerTest extends MarathonSpec {
val TEST_APP_NAME = "foo".toRootPath
val TEST_TASK_ID = "sampleTask"
var taskTracker: TaskTracker = null
var state: PersistentStore = null
val config = defaultConfig()
val taskIdUtil = new TaskIdUtil
val metrics = new Metrics(new MetricRegistry)
before {
state = spy(new InMemoryStore)
taskTracker = new TaskTracker(state, config, metrics)
}
def makeSampleTask(id: String) = {
makeTask(id, "host", 999)
}
def makeTask(id: String, host: String, port: Int) = {
MarathonTask.newBuilder()
.setHost(host)
.addAllPorts(Lists.newArrayList(port))
.setId(id)
.addAttributes(TextAttribute("attr1", "bar"))
.build()
}
def makeTaskStatus(id: String, state: TaskState = TaskState.TASK_RUNNING) = {
TaskStatus.newBuilder
.setTaskId(TaskID.newBuilder
.setValue(id)
)
.setState(state)
.build
}
def containsTask(tasks: Iterable[MarathonTask], task: MarathonTask) =
tasks.exists(t => t.getId == task.getId
&& t.getHost == task.getHost
&& t.getPortsList == task.getPortsList)
def shouldContainTask(tasks: Iterable[MarathonTask], task: MarathonTask) =
assert(containsTask(tasks, task), s"Should contain task ${task.getId}")
def shouldNotContainTask(tasks: Iterable[MarathonTask], task: MarathonTask) =
assert(!containsTask(tasks, task), s"Should not contain task ${task.getId}")
def shouldHaveTaskStatus(task: MarathonTask, taskStatus: TaskStatus) {
assert(
task.getStatus == taskStatus, s"Should have task status ${taskStatus.getState.toString}"
)
}
def stateShouldNotContainKey(state: PersistentStore, key: String) {
assert(!state.allIds().futureValue.toSet.contains(key), s"Key $key was found in state")
}
def stateShouldContainKey(state: PersistentStore, key: String) {
assert(state.allIds().futureValue.toSet.contains(key), s"Key $key was not found in state")
}
test("SerializeAndDeserialize") {
val sampleTask = makeSampleTask(TEST_TASK_ID)
val byteOutputStream = new ByteArrayOutputStream()
val outputStream = new ObjectOutputStream(byteOutputStream)
taskTracker.serialize(sampleTask, outputStream)
val byteInputStream = new ByteArrayInputStream(byteOutputStream.toByteArray)
val inputStream = new ObjectInputStream(byteInputStream)
val deserializedTask = taskTracker.deserialize(taskTracker.getKey(TEST_APP_NAME, TEST_TASK_ID), inputStream)
assert(deserializedTask.get.equals(sampleTask), "Tasks are not properly serialized")
}
test("StoreAndFetchTask") {
val sampleTask = makeSampleTask(TEST_TASK_ID)
taskTracker.store(TEST_APP_NAME, sampleTask).futureValue
val fetchedTask = taskTracker.fetchTask(taskTracker.getKey(TEST_APP_NAME, TEST_TASK_ID))
assert(fetchedTask.get.equals(sampleTask), "Tasks are not properly stored")
}
test("FetchApp") {
val taskId1 = taskIdUtil.taskId(TEST_APP_NAME)
val taskId2 = taskIdUtil.taskId(TEST_APP_NAME)
val taskId3 = taskIdUtil.taskId(TEST_APP_NAME)
val task1 = makeSampleTask(taskId1)
val task2 = makeSampleTask(taskId2)
val task3 = makeSampleTask(taskId3)
taskTracker.store(TEST_APP_NAME, task1).futureValue
taskTracker.store(TEST_APP_NAME, task2).futureValue
taskTracker.store(TEST_APP_NAME, task3).futureValue
val testAppTasks = taskTracker.fetchApp(TEST_APP_NAME).tasks
shouldContainTask(testAppTasks.values.toSet, task1)
shouldContainTask(testAppTasks.values.toSet, task2)
shouldContainTask(testAppTasks.values.toSet, task3)
assert(testAppTasks.size == 3)
}
test("TaskLifecycle") {
val sampleTask = makeSampleTask(TEST_TASK_ID)
val sampleTaskKey = taskTracker.getKey(TEST_APP_NAME, TEST_TASK_ID)
// CREATE TASK
taskTracker.created(TEST_APP_NAME, sampleTask)
shouldContainTask(taskTracker.get(TEST_APP_NAME), sampleTask)
stateShouldNotContainKey(state, sampleTaskKey)
// TASK STATUS UPDATE
val startingTaskStatus = makeTaskStatus(TEST_TASK_ID, TaskState.TASK_STARTING)
taskTracker.statusUpdate(TEST_APP_NAME, startingTaskStatus).futureValue
shouldContainTask(taskTracker.get(TEST_APP_NAME), sampleTask)
stateShouldContainKey(state, sampleTaskKey)
taskTracker.get(TEST_APP_NAME).foreach(task => shouldHaveTaskStatus(task, startingTaskStatus))
// TASK RUNNING
val runningTaskStatus: TaskStatus = makeTaskStatus(TEST_TASK_ID, TaskState.TASK_RUNNING)
taskTracker.running(TEST_APP_NAME, runningTaskStatus).futureValue
shouldContainTask(taskTracker.get(TEST_APP_NAME), sampleTask)
stateShouldContainKey(state, sampleTaskKey)
taskTracker.get(TEST_APP_NAME).foreach(task => shouldHaveTaskStatus(task, runningTaskStatus))
// TASK STILL RUNNING
val res = taskTracker.running(TEST_APP_NAME, runningTaskStatus)
ScalaFutures.whenReady(res.failed) { e =>
assert(e.getMessage == s"Task for ID $TEST_TASK_ID already running, ignoring")
}
// TASK TERMINATED
val finishedTaskStatus = makeTaskStatus(TEST_TASK_ID, TaskState.TASK_FINISHED)
taskTracker.terminated(TEST_APP_NAME, finishedTaskStatus).futureValue
assert(taskTracker.contains(TEST_APP_NAME), "App was not stored")
stateShouldNotContainKey(state, sampleTaskKey)
// APP SHUTDOWN
taskTracker.shutdown(TEST_APP_NAME)
assert(!taskTracker.contains(TEST_APP_NAME), "App was not removed")
// ERRONEOUS MESSAGE
val erroneousStatus = makeTaskStatus(TEST_TASK_ID, TaskState.TASK_LOST)
val taskOption = taskTracker.statusUpdate(TEST_APP_NAME, erroneousStatus).futureValue
// Empty option means this message was discarded since there was no matching task
assert(taskOption.isEmpty, "Task was able to be updated and was not removed")
}
test("UnknownTasks") {
val sampleTask = makeSampleTask(TEST_TASK_ID)
val sampleTaskKey = taskTracker.getKey(TEST_APP_NAME, TEST_TASK_ID)
// don't call taskTracker.created, but directly running
val runningTaskStatus: TaskStatus = makeTaskStatus(TEST_TASK_ID, TaskState.TASK_RUNNING)
val res = taskTracker.running(TEST_APP_NAME, runningTaskStatus)
ScalaFutures.whenReady(res.failed) { e =>
assert(e.getMessage == s"No staged task for ID $TEST_TASK_ID, ignoring")
}
shouldNotContainTask(taskTracker.get(TEST_APP_NAME), sampleTask)
stateShouldNotContainKey(state, sampleTaskKey)
}
test("MultipleApps") {
val appName1 = "app1".toRootPath
val appName2 = "app2".toRootPath
val appName3 = "app3".toRootPath
val taskId1 = taskIdUtil.taskId(appName1)
val taskId2 = taskIdUtil.taskId(appName1)
val taskId3 = taskIdUtil.taskId(appName2)
val taskId4 = taskIdUtil.taskId(appName3)
val taskId5 = taskIdUtil.taskId(appName3)
val taskId6 = taskIdUtil.taskId(appName3)
val task1 = makeSampleTask(taskId1)
val task2 = makeSampleTask(taskId2)
val task3 = makeSampleTask(taskId3)
val task4 = makeSampleTask(taskId4)
val task5 = makeSampleTask(taskId5)
val task6 = makeSampleTask(taskId6)
taskTracker.created(appName1, task1)
taskTracker.running(appName1, makeTaskStatus(taskId1)).futureValue
taskTracker.created(appName1, task2)
taskTracker.running(appName1, makeTaskStatus(taskId2)).futureValue
taskTracker.created(appName2, task3)
taskTracker.running(appName2, makeTaskStatus(taskId3)).futureValue
taskTracker.created(appName3, task4)
taskTracker.running(appName3, makeTaskStatus(taskId4)).futureValue
taskTracker.created(appName3, task5)
taskTracker.running(appName3, makeTaskStatus(taskId5)).futureValue
taskTracker.created(appName3, task6)
taskTracker.running(appName3, makeTaskStatus(taskId6)).futureValue
assert(state.allIds().futureValue.size == 6, "Incorrect number of tasks in state")
val app1Tasks = taskTracker.fetchApp(appName1).tasks
shouldContainTask(app1Tasks.values.toSet, task1)
shouldContainTask(app1Tasks.values.toSet, task2)
assert(app1Tasks.size == 2, "Incorrect number of tasks")
val app2Tasks = taskTracker.fetchApp(appName2).tasks
shouldContainTask(app2Tasks.values.toSet, task3)
assert(app2Tasks.size == 1, "Incorrect number of tasks")
val app3Tasks = taskTracker.fetchApp(appName3).tasks
shouldContainTask(app3Tasks.values.toSet, task4)
shouldContainTask(app3Tasks.values.toSet, task5)
shouldContainTask(app3Tasks.values.toSet, task6)
assert(app3Tasks.size == 3, "Incorrect number of tasks")
}
test("ExpungeOrphanedTasks") {
val ORPHANED_APP_NAME = "orphanedApp".toRootPath
val orphanedTaskId1 = taskIdUtil.taskId(ORPHANED_APP_NAME)
val orphanedTaskId2 = taskIdUtil.taskId(ORPHANED_APP_NAME)
val orphanedTaskId3 = taskIdUtil.taskId(ORPHANED_APP_NAME)
val orphanedTask1 = makeSampleTask(orphanedTaskId1)
val orphanedTask2 = makeSampleTask(orphanedTaskId2)
val orphanedTask3 = makeSampleTask(orphanedTaskId3)
taskTracker.store(ORPHANED_APP_NAME, orphanedTask1).futureValue
taskTracker.store(ORPHANED_APP_NAME, orphanedTask2).futureValue
taskTracker.store(ORPHANED_APP_NAME, orphanedTask3).futureValue
val taskId1 = taskIdUtil.taskId(TEST_APP_NAME)
val taskId2 = taskIdUtil.taskId(TEST_APP_NAME)
val taskId3 = taskIdUtil.taskId(TEST_APP_NAME)
val task1 = makeSampleTask(taskId1)
val task2 = makeSampleTask(taskId2)
val task3 = makeSampleTask(taskId3)
taskTracker.created(TEST_APP_NAME, task1)
taskTracker.running(TEST_APP_NAME, makeTaskStatus(taskId1)).futureValue
taskTracker.created(TEST_APP_NAME, task2)
taskTracker.running(TEST_APP_NAME, makeTaskStatus(taskId2)).futureValue
taskTracker.created(TEST_APP_NAME, task3)
taskTracker.running(TEST_APP_NAME, makeTaskStatus(taskId3)).futureValue
taskTracker.expungeOrphanedTasks()
val names = state.allIds().futureValue
assert(names.size == 3, "Orphaned tasks were not correctly expunged")
assert(!taskTracker.contains(ORPHANED_APP_NAME), "Orphaned app should not exist in TaskTracker")
val tasks = taskTracker.get(TEST_APP_NAME)
shouldContainTask(tasks, task1)
shouldContainTask(tasks, task2)
shouldContainTask(tasks, task3)
}
test("Should not store if state did not change (no health present)") {
val sampleTask = makeSampleTask(TEST_TASK_ID)
val status = Protos.TaskStatus
.newBuilder
.setState(Protos.TaskState.TASK_RUNNING)
.setTaskId(Protos.TaskID.newBuilder.setValue(sampleTask.getId))
.build()
taskTracker.store(TEST_APP_NAME, sampleTask).futureValue
taskTracker.running(TEST_APP_NAME, status).futureValue
taskTracker.statusUpdate(TEST_APP_NAME, status).futureValue
reset(state)
taskTracker.statusUpdate(TEST_APP_NAME, status).futureValue
verify(state, times(0)).update(any())
}
test("Should not store if state and health did not change") {
val sampleTask = makeSampleTask(TEST_TASK_ID)
val status = Protos.TaskStatus
.newBuilder
.setState(Protos.TaskState.TASK_RUNNING)
.setTaskId(Protos.TaskID.newBuilder.setValue(sampleTask.getId))
.setHealthy(true)
.build()
taskTracker.store(TEST_APP_NAME, sampleTask).futureValue
taskTracker.running(TEST_APP_NAME, status).futureValue
taskTracker.statusUpdate(TEST_APP_NAME, status).futureValue
reset(state)
taskTracker.statusUpdate(TEST_APP_NAME, status).futureValue
verify(state, times(0)).update(any())
}
test("Should store if state changed") {
val sampleTask = makeSampleTask(TEST_TASK_ID)
val status = Protos.TaskStatus
.newBuilder
.setState(Protos.TaskState.TASK_RUNNING)
.setTaskId(Protos.TaskID.newBuilder.setValue(sampleTask.getId))
.build()
taskTracker.store(TEST_APP_NAME, sampleTask).futureValue
taskTracker.running(TEST_APP_NAME, status).futureValue
taskTracker.statusUpdate(TEST_APP_NAME, status).futureValue
reset(state)
val newStatus = status.toBuilder
.setState(Protos.TaskState.TASK_FAILED)
.build()
taskTracker.statusUpdate(TEST_APP_NAME, newStatus).futureValue
verify(state, times(1)).update(any())
}
test("Should store if health changed") {
val sampleTask = makeSampleTask(TEST_TASK_ID)
val status = Protos.TaskStatus
.newBuilder
.setState(Protos.TaskState.TASK_RUNNING)
.setTaskId(Protos.TaskID.newBuilder.setValue(sampleTask.getId))
.setHealthy(true)
.build()
taskTracker.store(TEST_APP_NAME, sampleTask).futureValue
taskTracker.running(TEST_APP_NAME, status).futureValue
taskTracker.statusUpdate(TEST_APP_NAME, status).futureValue
reset(state)
val newStatus = status.toBuilder
.setHealthy(false)
.build()
taskTracker.statusUpdate(TEST_APP_NAME, newStatus).futureValue
verify(state, times(1)).update(any())
}
test("Should store if state and health changed") {
val sampleTask = makeSampleTask(TEST_TASK_ID)
val status = Protos.TaskStatus
.newBuilder
.setState(Protos.TaskState.TASK_RUNNING)
.setTaskId(Protos.TaskID.newBuilder.setValue(sampleTask.getId))
.setHealthy(true)
.build()
taskTracker.store(TEST_APP_NAME, sampleTask).futureValue
taskTracker.running(TEST_APP_NAME, status).futureValue
taskTracker.statusUpdate(TEST_APP_NAME, status).futureValue
reset(state)
val newStatus = status.toBuilder
.setState(Protos.TaskState.TASK_FAILED)
.setHealthy(false)
.build()
taskTracker.statusUpdate(TEST_APP_NAME, newStatus).futureValue
verify(state, times(1)).update(any())
}
test("Should store if health changed (no health present at first)") {
val sampleTask = makeSampleTask(TEST_TASK_ID)
val status = Protos.TaskStatus
.newBuilder
.setState(Protos.TaskState.TASK_RUNNING)
.setTaskId(Protos.TaskID.newBuilder.setValue(sampleTask.getId))
.build()
taskTracker.store(TEST_APP_NAME, sampleTask).futureValue
taskTracker.running(TEST_APP_NAME, status).futureValue
taskTracker.statusUpdate(TEST_APP_NAME, status).futureValue
reset(state)
val newStatus = status.toBuilder
.setHealthy(true)
.build()
taskTracker.statusUpdate(TEST_APP_NAME, newStatus).futureValue
verify(state, times(1)).update(any())
}
test("Should store if state and health changed (no health present at first)") {
val sampleTask = makeSampleTask(TEST_TASK_ID)
val status = Protos.TaskStatus
.newBuilder
.setState(Protos.TaskState.TASK_RUNNING)
.setTaskId(Protos.TaskID.newBuilder.setValue(sampleTask.getId))
.build()
taskTracker.store(TEST_APP_NAME, sampleTask).futureValue
taskTracker.running(TEST_APP_NAME, status).futureValue
taskTracker.statusUpdate(TEST_APP_NAME, status).futureValue
reset(state)
val newStatus = status.toBuilder
.setState(Protos.TaskState.TASK_FAILED)
.setHealthy(false)
.build()
taskTracker.statusUpdate(TEST_APP_NAME, newStatus).futureValue
verify(state, times(1)).update(any())
}
}
| bsideup/marathon | src/test/scala/mesosphere/marathon/tasks/TaskTrackerTest.scala | Scala | apache-2.0 | 16,040 |
package io.getquill.context.jdbc
import java.sql.{ Timestamp, Date => SqlDate }
import java.sql.Types._
import java.time.LocalDate
import java.util.Date
import io.getquill.context.sql.encoding.ArrayEncoding
import scala.collection.compat._
trait ArrayEncoders extends ArrayEncoding {
self: JdbcComposition[_, _] =>
implicit def arrayStringEncoder[Col <: Seq[String]]: Encoder[Col] = arrayRawEncoder[String, Col](VARCHAR)
implicit def arrayBigDecimalEncoder[Col <: Seq[BigDecimal]]: Encoder[Col] = arrayEncoder[BigDecimal, Col](parseJdbcType(NUMERIC), _.bigDecimal)
implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col] = arrayRawEncoder[Boolean, Col](BOOLEAN)
implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col] = arrayRawEncoder[Byte, Col](TINYINT)
implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col] = arrayRawEncoder[Short, Col](SMALLINT)
implicit def arrayIntEncoder[Col <: Seq[Int]]: Encoder[Col] = arrayRawEncoder[Int, Col](INTEGER)
implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col] = arrayRawEncoder[Long, Col](BIGINT)
implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col] = arrayRawEncoder[Float, Col](FLOAT)
implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col] = arrayRawEncoder[Double, Col](DOUBLE)
implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col] = arrayRawEncoder[Date, Col](TIMESTAMP)
implicit def arrayTimestampEncoder[Col <: Seq[Timestamp]]: Encoder[Col] = arrayRawEncoder[Timestamp, Col](TIMESTAMP)
implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col] = arrayEncoder[LocalDate, Col](parseJdbcType(DATE), SqlDate.valueOf)
/**
* Generic encoder for JDBC arrays.
*
* @param jdbcType JDBC specific type identification, may be various regarding to JDBC driver
* @param mapper jdbc array accepts AnyRef objects hence a mapper is needed.
* If input type of an element of collection is not comfortable with jdbcType
* then use this mapper to transform to appropriate type before casting to AnyRef
* @tparam T element type
* @tparam Col seq type
* @return JDBC array encoder
*/
def arrayEncoder[T, Col <: Seq[T]](jdbcType: String, mapper: T => AnyRef): Encoder[Col] = {
encoder[Col](ARRAY, (idx: Index, seq: Col, row: PrepareRow) => {
val bf = implicitly[CBF[AnyRef, Array[AnyRef]]]
row.setArray(
idx,
row.getConnection.createArrayOf(
jdbcType,
seq.foldLeft(bf.newBuilder)((b, x) => b += mapper(x)).result()
)
)
})
}
/**
* Creates JDBC array encoder for type `T` which is already supported by database as array element.
*
* @param jdbcType JDBC specific type identification, may be various regarding to JDBC driver
* @tparam T element type
* @tparam Col seq type
* @return JDBC array encoder
*/
def arrayRawEncoder[T, Col <: Seq[T]](jdbcType: String): Encoder[Col] =
arrayEncoder[T, Col](jdbcType, _.asInstanceOf[AnyRef])
/**
* Transform jdbcType int using `parseJdbcType` and calls overloaded method to create Encoder
*
* @param jdbcType java.sql.Types
* @see arrayRawEncoder(jdbcType: String)
* @see JdbcContext#parseJdbcType(jdbcType: String)
*/
def arrayRawEncoder[T, Col <: Seq[T]](jdbcType: Int): Encoder[Col] =
arrayRawEncoder[T, Col](parseJdbcType(jdbcType))
}
| getquill/quill | quill-jdbc/src/main/scala/io/getquill/context/jdbc/ArrayEncoders.scala | Scala | apache-2.0 | 3,401 |
package satisfaction
package hadoop.hive
import java.net.URL
import scala.collection.JavaConversions.seqAsJavaList
import _root_.org.apache.commons.logging.Log
import _root_.org.apache.hadoop.hive.conf.HiveConf
import satisfaction.Logging
import satisfaction.Track
import satisfaction.Witness.Witness2Properties
import satisfaction.hadoop.CachingTrackLoader
import _root_.org.apache.hadoop.hive.ql.metadata.Hive
import satisfaction.util.classloader.IsolatedClassLoader
import satisfaction.fs.LocalFileSystem
import satisfaction.fs.Path
import org.apache.hadoop.mapred.JobConf
import org.apache.hadoop.conf.Configuration
/**
* Trait for class which can executes
* Hive Queries,
* and interact with Hive ( i.e parse syntax, plan queries,
* define dependencies )
*
* Can be both local, using Hive Driver implementation,
* or remote, using HiveServer2 JDBC client
*/
trait HiveDriver extends java.io.Closeable {
def useDatabase(dbName: String) : Boolean
def executeQuery(query: String): Boolean
def setProperty( prop : String, propValue : String )
def abort()
def close()
}
object HiveDriver extends Logging {
def apply(hiveConfRef: HiveConf)(implicit track : Track): HiveDriver = {
try {
/**
val parentLoader = if (Thread.currentThread.getContextClassLoader != null) {
Thread.currentThread.getContextClassLoader
} else {
hiveConf.getClassLoader
}
*
*/
/// Create a new HiveConf, so that we don't have reference to global objects,
//// and we can get garbage collected
val hiveConf = new HiveConf(hiveConfRef)
info( s" Current Thread = ${Thread.currentThread.getName} ThreadLoader = ${Thread.currentThread.getContextClassLoader} HiveConfLoader = ${hiveConf.getClassLoader} This loader = ${this.getClass.getClassLoader} ")
//// XXX What should the parent loader be
///val parentLoader = classOf[HiveDriver].getClassLoader()
val parentLoader = hiveConf.getClassLoader
//// XXX Centralize somewhere
hiveConf.setVar(HiveConf.ConfVars.HMSHANDLERATTEMPTS, "10")
hiveConf.setVar(HiveConf.ConfVars.HMSHANDLERINTERVAL, "3000")
hiveConf.setVar(HiveConf.ConfVars.METASTORETHRIFTCONNECTIONRETRIES, "5")
hiveConf.setVar(HiveConf.ConfVars.METASTORETHRIFTFAILURERETRIES, "6")
if(hiveConf.getVar(HiveConf.ConfVars.PREEXECHOOKS).equals("org.apache.hadoop.hive.ql.hooks.ATSHook") ) {
log.warn(" Overriding bogus Ambari Timeline Server ATSHook class")
hiveConf.setVar(HiveConf.ConfVars.PREEXECHOOKS, "")
hiveConf.setVar(HiveConf.ConfVars.POSTEXECHOOKS, "")
hiveConf.setVar(HiveConf.ConfVars.ONFAILUREHOOKS, "")
}
info(s" ParentLoader = ${parentLoader} ")
val auxJars = hiveConf.getAuxJars
info( s" Track libPath is ${track.libPath}")
info( s" Track resourcePath is ${track.resourcePath}")
val urls = track.listLibraries
val resources = track.listResources
val exportFiles = ( urls ++ resources)
val isolateFlag = track.trackProperties.getProperty("satisfaction.classloader.isolate","true").toBoolean
val urlClassLoader = if( isolateFlag) {
val localPath = track.trackPath.toUri.getPath() /// remove hdfs:// scheme
val cacheBase = track.trackProperties.getProperty("satisfaction.track.cache.path" , "/var/log/satisfaction-cache-root")
val cachePath = new Path(cacheBase) / localPath / "lib"
val localFs = LocalFileSystem()
if( !localFs.exists(cachePath) ) { localFs.mkdirs( cachePath) }
info(s" Using IsolatedClassLoader with a cachePath of $cachePath")
/// XXX Store as static resource
val frontLoadClasses = List("org.apache.hadoop.hive.ql.*",
"satisfaction.hadoop.hive.HiveLocalDriver",
"satisfaction.hadoop.hive.HiveLocalDriver.*",
"satisfaction.hadoop.hive.*",
"satisfaction.hadoop.hdfs.*",
"org.apache.hadoop.hive.ql.Driver",
"org.apache.hadoop.hive.ql.Driver.*",
"org.apache.hadoop.hive.ql.exec.*",
"org.apache.hadoop.hive.ql.exec.Task.*",
"org.apache.hadoop.hive.ql.exec.Utilities",
"org.apache.hadoop.hive.ql.exec.Utilities.*",
"org.apache.hadoop.hive.ql.exec.DDLTask.*",
"org.apache.hadoop.hive.ql.exec.TaskRunner.*",
"org.apache.hadoop.hive.ql.session.SessionState",
"org.apache.hadoop.hive.ql.session.SessionState.*",
"org.apache.op.hive.ql.session.SessionState.*",
"brickhouse.*",
"org.apache.hive.com.esotericsoftware.kryo.*",
"org.apache.hadoop.util.ReflectionUtils",
"org.apache.hadoop.util.ReflectionUtils.*",
"org.apache.hadoop.io.WritableComparator",
"org.apache.hadoop.io.WritableComparator.*",
"org.apache.hadoop.io.compress.CompressionCodecFactory",
"org.apache.hadoop.io.compress.CompressionCodecFactory.*",
"org.apache.hadoop.yarn.*",
"org.apache.hadoop.mapreduce.*",
"satisfaction.Logging",
"satisfaction.Logging.*",
"com.tagged.udf.*",
"com.tagged.hadoop.hive.*")
val backLoadClasses = List(
"satisfaction.hadoop.hive.HiveSatisfier",
"org.apache.hadoop.hive.conf.*",
"org.apache.hive.common.*",
"org.apache.hadoop.hive.common.*",
"org.apache.commons.logging.*",
"org.apache.hadoop.hbase.*",
"org.apache.hadoop.mapreduce.Cluster",
"org.apache.hadoop.mapreduce.protocol.*",
"org.apache.hadoop.mapreduce.util.*",
"satisfaction.util.*"
////"org.apache.hadoop.hive.ql.metadata.*",
///"org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper",
///"org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper.*",
///"org.apache.hadoop.hive.metastore.*",
///"org.apache.hadoop.hive.ql.plan.api.*",
///"org.apache.hadoop.hive.ql.metadata.*",
//"org.apache.*HiveMetaStoreClient.*",
///"org.apache.*IMetaStoreClient.*",
///"org.apache.hadoop.hive.metastore.*"
///"org.apache.hadoop.hive.ql.lockmgr.*",
////"org.apache.hadoop.hive.metastore.api.*",
///"org.apache.*HiveMetaHookLoader.*")
)
val isolatedClassLoader = new IsolatedClassLoader( exportFiles.map( _.toUri.toURL).toArray[URL],
parentLoader,
frontLoadClasses,
backLoadClasses,
hiveConf,
cachePath.pathString);
isolatedClassLoader.registerClass(classOf[HiveDriver]);
isolatedClassLoader.registerClass(classOf[HiveConf]);
isolatedClassLoader.registerClass(classOf[HiveConf.ConfVars]);
isolatedClassLoader.registerClass(classOf[org.apache.hadoop.mapred.JobConf]);
isolatedClassLoader.registerClass(classOf[Configuration]);
info( s" LOG CLASSLOADER is ${classOf[Log].getClassLoader}")
if( track.trackProperties.contains("satisfaction.classloader.frontload")) {
track.trackProperties.getProperty("satisfaction.classloader.frontload").split(",").foreach( expr => {
isolatedClassLoader.addFrontLoadExpr( expr);
})
}
if( track.trackProperties.contains("satisfaction.classloader.backload")) {
track.trackProperties.getProperty("satisfaction.classloader.backload").split(",").foreach( expr => {
isolatedClassLoader.addFrontLoadExpr( expr);
})
}
isolatedClassLoader
} else {
java.net.URLClassLoader.newInstance( exportFiles.map( _.toUri.toURL).toArray[URL] )
}
val auxJarPath = exportFiles.map( _.toUri.toString ).mkString(",")
info(" Using AuxJarPath " + auxJarPath)
hiveConf.setAuxJars( auxJarPath)
hiveConf.set("hive.aux.jars.path", auxJarPath)
hiveConf.setClassLoader(urlClassLoader)
//// XXX Move to Scala reflection ...
info( "Instantiating HiveLocalDriver")
//// XXX Specify as track property ..
val localDriverClass: Class[_] = urlClassLoader.loadClass("satisfaction.hadoop.hive.HiveLocalDriver")
info( s" Local Driver Class is $localDriverClass ")
val constructor = localDriverClass.getConstructor(hiveConf.getClass() )
val satisfactionHiveConf = new SatisfactionHiveConf(hiveConf)
satisfactionHiveConf.setClassLoader( urlClassLoader)
val hiveLocalDriver = constructor.newInstance(satisfactionHiveConf )
info( s" Hive Local Driver is ${hiveLocalDriver} ${hiveLocalDriver.getClass} ")
hiveLocalDriver match {
case traitDriver : HiveDriver => {
info(s" Local Driver $hiveLocalDriver is Trait Driver $traitDriver" )
return traitDriver
}
case _ => {
error(s" LocalDriver $hiveLocalDriver really isn't a Hive Driver !!!!")
warn(s" LocalDriver $hiveLocalDriver really isn't a Hive Driver !!!!")
error( s" HiveDriver Class is ${classOf[HiveDriver]} ${classOf[HiveDriver].hashCode()} Loader is ${classOf[HiveDriver].getClassLoader} ")
error( " TRAITS of localDriver ")
localDriverClass.getInterfaces().foreach( ifc => {
error( s" TRAIT CLASS ${ifc} ${ifc.getCanonicalName} ${ifc.hashCode} ${ifc.getClassLoader} ")
})
throw new RuntimeException(s" LocalDriver $hiveLocalDriver really isn't a Hive Driver !!!!")
}
}
} catch {
case e: Exception =>
e.printStackTrace(System.out)
error("Error while accessing HiveDriver", e)
throw e
}
}
}
class SatisfactionHiveConf(hc : HiveConf) extends HiveConf(hc) with Logging {
/**
* Don't Cache !!!
*/
override def getClassByName( className : String ) : Class[_] = {
debug(s" Loading HiveConf class $className with ClassLoader ${getClassLoader}" )
getClassLoader.loadClass(className)
}
}
/**
class HiveDriverHook extends HiveDriverRunHook with Logging {
/**
* Invoked before Hive begins any processing of a command in the Driver,
* notably before compilation and any customizable performance logging.
*/
def preDriverRun(hookContext : HiveDriverRunHookContext) = {
info("HIVE_DRIVER :: PRE DRIVER RUN :: " + hookContext.getCommand())
////SessionState.getConsole.printInfo("HIVE_DRIVER :: PRE DRIVER RUN :: " + hookContext.getCommand())
}
/**
* Invoked after Hive performs any processing of a command, just before a
* response is returned to the entity calling the Driver.
*/
def postDriverRun( hookContext : HiveDriverRunHookContext) = {
info(" HIVE DRIVER POST RUN " + hookContext.getCommand() )
SessionState.get.getLastMapRedStatsList()
SessionState.getConsole().printInfo("HIVE DRVER POST RUN " + hookContext.getCommand() )
}
}
*
*/
| ifwe/satisfaction | modules/hive/src/main/scala/satisfaction/hadoop/hive/HiveDriver.scala | Scala | apache-2.0 | 11,246 |
package io.buoyant.consul.v1
import java.util.Base64
import com.twitter.finagle.{Backoff, http}
import com.twitter.finagle.stats.{DefaultStatsReceiver, StatsReceiver}
import com.twitter.util._
object KvApi {
def apply(c: Client, backoff: Backoff): KvApi = new KvApi(c, s"/$versionString", backoff)
}
class KvApi(
val client: Client,
val uriPrefix: String,
val backoffs: Backoff,
val stats: StatsReceiver = DefaultStatsReceiver
) extends BaseApi with Closable {
val kvPrefix = s"$uriPrefix/kv"
// https://www.consul.io/docs/agent/http/kv.html#single
def list(
path: String,
datacenter: Option[String] = None,
blockingIndex: Option[String] = None,
separator: Option[String] = Some("/"),
consistency: Option[ConsistencyMode] = None,
retry: Boolean = false
): ApiCall[Indexed[Seq[String]]] = ApiCall(
req = mkreq(
http.Method.Get,
s"$kvPrefix$path",
consistency,
"keys" -> Some(true.toString),
"separator" -> separator,
"index" -> blockingIndex,
"dc" -> datacenter
),
call = req => executeJson[Seq[String]](req, retry)
)
/**
* Get the key value
*
* https://www.consul.io/docs/agent/http/kv.html#single
*
* @param path path to the key, must start with /
*/
def get(
path: String,
datacenter: Option[String] = None,
blockingIndex: Option[String] = None,
consistency: Option[ConsistencyMode] = None,
retry: Boolean = false
): ApiCall[Indexed[String]] = ApiCall(
req = mkreq(
http.Method.Get,
s"$kvPrefix$path",
consistency,
"raw" -> Some(true.toString),
"index" -> blockingIndex,
"dc" -> datacenter
),
call = req => executeRaw(req, retry)
)
/**
* Get key(s)
*
* https://www.consul.io/docs/agent/http/kv.html#single
*
* @param path path to the key, must start with /
*/
def multiGet(
path: String,
datacenter: Option[String] = None,
blockingIndex: Option[String] = None,
recurse: Option[Boolean] = None,
consistency: Option[ConsistencyMode] = None,
retry: Boolean = false
): ApiCall[Indexed[Seq[Key]]] = ApiCall(
req = mkreq(
http.Method.Get,
s"$kvPrefix$path",
consistency,
"index" -> blockingIndex,
"dc" -> datacenter,
"recurse" -> recurse.map(_.toString)
),
call = req => executeJson[Seq[Key]](req, retry)
)
/**
* Store the key value
*
* https://www.consul.io/docs/agent/http/kv.html#single
*
* @param path path to the key, must start with /
*/
def put(
path: String,
value: String,
datacenter: Option[String] = None,
cas: Option[String] = None,
consistency: Option[ConsistencyMode] = None,
retry: Boolean = false
): ApiCall[Boolean] = ApiCall(
req = mkreq(
http.Method.Put,
s"$kvPrefix$path",
consistency,
"cas" -> cas,
"dc" -> datacenter
),
call = req => {
req.setContentString(value)
executeJson[Boolean](req, retry).map(_.value)
}
)
/**
* Delete the key
*
* https://www.consul.io/docs/agent/http/kv.html#single
*
* @param path path to the key, must start with /
*/
def delete(
path: String,
datacenter: Option[String] = None,
cas: Option[String] = None,
recurse: Option[Boolean] = None,
consistency: Option[ConsistencyMode] = None,
retry: Boolean = false
): ApiCall[Boolean] = ApiCall(
req = mkreq(
http.Method.Delete,
s"$kvPrefix$path",
consistency,
"cas" -> cas,
"recurse" -> recurse.map(_.toString),
"dc" -> datacenter
),
call = req => executeJson[Boolean](req, retry).map(_.value)
)
}
object Key {
def mk(key: String, value: String): Key = Key(Some(key), Some(Base64.getEncoder.encodeToString(value.getBytes)))
}
case class Key(
Key: Option[String],
Value: Option[String]
) {
lazy val decoded: Option[String] = Value.map { raw => new String(Base64.getDecoder.decode(raw)) }
}
| linkerd/linkerd | consul/src/main/scala/io/buoyant/consul/v1/KvApi.scala | Scala | apache-2.0 | 3,994 |
package us.msea.vaderz.jvm
import us.msea.vaderz.shared._
object Main {
def main(args: Array[String]): Unit = {
val game = new Game(new JvmCanvas())
game.init()
}
} | vydra/ccvaderz | jvm/src/main/scala/us/msea/vaderz/jvm/Main.scala | Scala | mit | 179 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2014 LeanIX GmbH
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package net.leanix.api.codegen
import com.wordnik.swagger.codegen.BasicCSharpGenerator
import com.wordnik.swagger.model._
object CSharpCodegen extends BasicCSharpGenerator {
def main(args: Array[String]) = generateClient(args)
// location of templates
override def templateDir = "csharp"
override def destinationDir = "target/generated-sources/swagger/src"
// package for api invoker, error files
override def invokerPackage = Some("LeanIX.Api.Common")
// package for models
override def modelPackage = Some("LeanIX.Api.Models")
// package for api classes
override def apiPackage = Some("LeanIX.Api")
// supporting classes
override def supportingFiles =
List(
("apiClient.mustache", destinationDir + java.io.File.separator + invokerPackage.get.replace(".", java.io.File.separator) + java.io.File.separator, "ApiClient.cs"),
("apiException.mustache", destinationDir + java.io.File.separator + invokerPackage.get.replace(".", java.io.File.separator) + java.io.File.separator, "ApiException.cs")
)
}
| greece57/leanix-sdk-csharp | src/main/scala/net/leanix/api/codegen/CSharpCodegen.scala | Scala | mit | 2,195 |
////////////////////////////////////////////////////////////////////////////////
// //
// OpenSolid is a generic library for the representation and manipulation //
// of geometric objects such as points, curves, surfaces, and volumes. //
// //
// Copyright 2007-2015 by Ian Mackenzie //
// ian.e.mackenzie@gmail.com //
// //
// This Source Code Form is subject to the terms of the Mozilla Public //
// License, v. 2.0. If a copy of the MPL was not distributed with this file, //
// you can obtain one at http://mozilla.org/MPL/2.0/. //
// //
////////////////////////////////////////////////////////////////////////////////
package org.opensolid.core
import org.opensolid.core.Direction3dGenerators._
import org.opensolid.core.Point3dGenerators._
import org.scalacheck._
trait Plane3dGenerators {
val anyPlane3d: Gen[Plane3d] =
for {
originPoint <- anyPoint3d
normalDirection <- anyDirection3d
} yield Plane3d.fromPointAndNormal(originPoint, normalDirection)
implicit val arbitraryPlane3d: Arbitrary[Plane3d] = Arbitrary(anyPlane3d)
}
object Plane3dGenerators extends Plane3dGenerators
| ianmackenzie/opensolid-core | src/test/scala/org/opensolid/core/Plane3dGenerators.scala | Scala | mpl-2.0 | 1,544 |
package com.gu.mobile.notifications.football.models
import org.specs2.mock.Mockito
import org.specs2.mutable.Specification
import org.specs2.specification.Scope
import pa.{MatchDayTeam, Player}
class DismissalSpec extends Specification with Mockito {
"MatchPhaseEvent" should {
"Create a Dismissal event" in new DismissalScope {
val event = mock[pa.MatchEvent]
event.id returns Some("event-1")
event.eventType returns "dismissal"
event.eventTime returns Some("85")
event.reason returns Some("Violent Conduct")
event.players returns List(Player("player-1", "home-1", "player-1"), Player("", "", ""))
event.addedTime returns Some("5:00")
Dismissal.fromEvent(home,away)(event) should beSome(Dismissal("event-1", "player-1", home, 85, Some("5:00")))
}
}
trait DismissalScope extends Scope {
val home = MatchDayTeam(
id = "home-1",
name = "Home Side",
score = None,
htScore = None,
aggregateScore = None,
scorers = None
)
val away = MatchDayTeam(
id = "away-1",
name = "Away Side",
score = None,
htScore = None,
aggregateScore = None,
scorers = None
)
}
}
| guardian/mobile-n10n | football/src/test/scala/com/gu/mobile/notifications/football/models/DismissalSpec.scala | Scala | apache-2.0 | 1,207 |
package authentication.oauth2
import java.time.Duration
private[authentication] class AuthenticationConfig {
val clientId = "play-with-food"
val scope = "play-with-food"
val redirectUri = "play-with-food-authenticated"
val usernameParamName = "username"
val passwordParamName = "password"
val accessTokenLifeDuration: Duration = Duration.ofHours(24)
} | Dasiu/play-framework-test-project | app/authentication/oauth2/AuthenticationConfig.scala | Scala | mit | 367 |
package org.emailscript.api
import org.emailscript.dkim.{DkimResult, DkimSignature}
import scala.beans.BeanProperty
/**
* Represents results of a DKIM validation
*/
class DkimInfo {
@BeanProperty var description: String = ""
@BeanProperty var result: DkimSignature = DkimSignature.empty
@BeanProperty var signature: String = ""
@BeanProperty var isValid: Boolean = false
}
object DkimInfo{
def apply(result: DkimResult) = {
var info = new DkimInfo()
info.description = result.description
info.result = result.dkim
info.signature = result.dkim.rawSignature
info.isValid = result.isDefined()
info
}
}
| OdysseusLevy/emailscript | src/main/scala/org/emailscript/api/DkimInfo.scala | Scala | lgpl-3.0 | 648 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.carbondata.spark.testsuite.aggquery
import org.apache.spark.sql.Row
import org.apache.spark.sql.common.util.CarbonHiveContext._
import org.apache.spark.sql.common.util.QueryTest
import org.scalatest.BeforeAndAfterAll
/**
* Test Class for aggregate query on Integer datatypes
*
* @author N00902756
*
*/
class IntegerDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
override def beforeAll {
sql("CREATE TABLE integertypetableAgg (empno int, workgroupcategory string, deptno int, projectcode int, attendance int) STORED BY 'org.apache.carbondata.format'")
sql("LOAD DATA local inpath './src/test/resources/data.csv' INTO TABLE integertypetableAgg OPTIONS ('DELIMITER'= ',', 'QUOTECHAR'= '\\"', 'FILEHEADER'='')")
}
test("select empno from integertypetableAgg") {
checkAnswer(
sql("select empno from integertypetableAgg"),
Seq(Row(11), Row(12), Row(13), Row(14), Row(15), Row(16), Row(17), Row(18), Row(19), Row(20)))
}
override def afterAll {
sql("drop table integertypetableAgg")
}
} | foryou2030/incubator-carbondata | integration/spark/src/test/scala/org/apache/carbondata/integration/spark/testsuite/aggquery/IntegerDataTypeTestCase.scala | Scala | apache-2.0 | 1,870 |
package org.bjean.sample.wordcount.aws
import com.amazonaws.services.elasticmapreduce.model.BootstrapActionConfig
import com.amazonaws.services.elasticmapreduce.model.ScriptBootstrapActionConfig
import com.typesafe.config.Config
import org.bjean.sample.wordcount.aws.BootstrapActionsBuilder._
import scala.collection.mutable
object BootstrapActionsBuilder {
val CONFIG_AWS_CLUSTER_HADOOP_SITE_CONFIG_FILE: String = "aws.cluster.hadoop.siteConfigFile"
val CONFIG_AWS_CLUSTER_SPARK_INSTALL_FILE: String = "aws.cluster.spark.bootstrapConfigFile"
val CONFIG_AWS_CLUSTER_HADOOP_BOOTSTRAP_CONFIG_FILE: String = "aws.cluster.hadoop.bootstrapConfigFile"
def createBootstrapAction(bootstrapName: String, bootstrapPath: String, args: List[String]): BootstrapActionConfig = {
import collection.JavaConversions._
val bootstrapScriptConfig: ScriptBootstrapActionConfig = new ScriptBootstrapActionConfig
bootstrapScriptConfig.setPath(bootstrapPath)
if (args != null) {
bootstrapScriptConfig.setArgs(args)
}
val bootstrapConfig: BootstrapActionConfig = new BootstrapActionConfig
bootstrapConfig.setName(bootstrapName)
bootstrapConfig.setScriptBootstrapAction(bootstrapScriptConfig)
bootstrapConfig
}
}
class BootstrapActionsBuilder(config: Config) {
private var sparkConfig: Boolean = false
private var hadoopSiteConfig: Boolean = false
private var hdfsSiteConfig: Boolean = false
private var customBootstrapActionConfigs: List[BootstrapActionConfig] = null
def withSpark: BootstrapActionsBuilder = {
sparkConfig = true
this
}
def withHadoopSiteConfig: BootstrapActionsBuilder = {
hadoopSiteConfig = true
this
}
def withHdfsSiteConfig: BootstrapActionsBuilder = {
hdfsSiteConfig = true
this
}
def withCustomBootstrapActions(bootstrapActionConfigs: List[BootstrapActionConfig]): BootstrapActionsBuilder = {
customBootstrapActionConfigs = bootstrapActionConfigs
this
}
def build: List[BootstrapActionConfig] = {
val bootstrapActionConfigs: mutable.MutableList[BootstrapActionConfig] = mutable.MutableList()
if (hdfsSiteConfig) {
bootstrapActionConfigs.+=(createHdfsSiteConfig)
}
if (hadoopSiteConfig) {
bootstrapActionConfigs.+=(createHadoopSiteConfig)
}
if (sparkConfig) {
bootstrapActionConfigs.+=(createSparkConfig)
}
if (customBootstrapActionConfigs != null) {
for(custom <- customBootstrapActionConfigs){
bootstrapActionConfigs.+=(custom)
}
}
bootstrapActionConfigs.toList
}
protected def createHadoopSiteConfig: BootstrapActionConfig = {
val siteBootstrapConfigArgs = List("--core-config-file",config.getString(CONFIG_AWS_CLUSTER_HADOOP_SITE_CONFIG_FILE))
createBootstrapAction("Hadoop Site Config", config.getString(CONFIG_AWS_CLUSTER_HADOOP_BOOTSTRAP_CONFIG_FILE), siteBootstrapConfigArgs)
}
protected def createHdfsSiteConfig: BootstrapActionConfig = {
val hdfsBootstrapConfigArgs = List("--hdfs-key-value","dfs.permissions=false")
createBootstrapAction("HDFS Config", config.getString(CONFIG_AWS_CLUSTER_HADOOP_BOOTSTRAP_CONFIG_FILE), hdfsBootstrapConfigArgs)
}
protected def createSparkConfig: BootstrapActionConfig = {
createBootstrapAction("Spark Cluster Config", config.getString(CONFIG_AWS_CLUSTER_SPARK_INSTALL_FILE), null)
}
}
| bjet007/word-count-spark-aws | aws-launcher/src/main/scala/org/bjean/sample/wordcount/aws/BootstrapActionsBuilder.scala | Scala | apache-2.0 | 3,364 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.trees
import java.util.UUID
import scala.collection.Map
import scala.reflect.ClassTag
import org.apache.commons.lang3.ClassUtils
import org.json4s.JsonAST._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
import org.apache.spark.sql.catalyst.FunctionIdentifier
import org.apache.spark.sql.catalyst.ScalaReflection._
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogStorageFormat, CatalogTable, CatalogTableType, FunctionResource}
import org.apache.spark.sql.catalyst.errors._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.JoinType
import org.apache.spark.sql.catalyst.plans.physical.{BroadcastMode, Partitioning}
import org.apache.spark.sql.catalyst.util.StringUtils.StringConcat
import org.apache.spark.sql.catalyst.util.truncatedString
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.storage.StorageLevel
/** Used by [[TreeNode.getNodeNumbered]] when traversing the tree for a given number */
private class MutableInt(var i: Int)
case class Origin(
line: Option[Int] = None,
startPosition: Option[Int] = None)
/**
* Provides a location for TreeNodes to ask about the context of their origin. For example, which
* line of code is currently being parsed.
*/
object CurrentOrigin {
private val value = new ThreadLocal[Origin]() {
override def initialValue: Origin = Origin()
}
def get: Origin = value.get()
def set(o: Origin): Unit = value.set(o)
def reset(): Unit = value.set(Origin())
def setPosition(line: Int, start: Int): Unit = {
value.set(
value.get.copy(line = Some(line), startPosition = Some(start)))
}
def withOrigin[A](o: Origin)(f: => A): A = {
set(o)
val ret = try f finally { reset() }
ret
}
}
// scalastyle:off
abstract class TreeNode[BaseType <: TreeNode[BaseType]] extends Product {
// scalastyle:on
self: BaseType =>
val origin: Origin = CurrentOrigin.get
/**
* Returns a Seq of the children of this node.
* Children should not change. Immutability required for containsChild optimization
*/
def children: Seq[BaseType]
lazy val containsChild: Set[TreeNode[_]] = children.toSet
private lazy val _hashCode: Int = scala.util.hashing.MurmurHash3.productHash(this)
override def hashCode(): Int = _hashCode
/**
* Faster version of equality which short-circuits when two treeNodes are the same instance.
* We don't just override Object.equals, as doing so prevents the scala compiler from
* generating case class `equals` methods
*/
def fastEquals(other: TreeNode[_]): Boolean = {
this.eq(other) || this == other
}
/**
* Find the first [[TreeNode]] that satisfies the condition specified by `f`.
* The condition is recursively applied to this node and all of its children (pre-order).
*/
def find(f: BaseType => Boolean): Option[BaseType] = if (f(this)) {
Some(this)
} else {
children.foldLeft(Option.empty[BaseType]) { (l, r) => l.orElse(r.find(f)) }
}
/**
* Runs the given function on this node and then recursively on [[children]].
* @param f the function to be applied to each node in the tree.
*/
def foreach(f: BaseType => Unit): Unit = {
f(this)
children.foreach(_.foreach(f))
}
/**
* Runs the given function recursively on [[children]] then on this node.
* @param f the function to be applied to each node in the tree.
*/
def foreachUp(f: BaseType => Unit): Unit = {
children.foreach(_.foreachUp(f))
f(this)
}
/**
* Returns a Seq containing the result of applying the given function to each
* node in this tree in a preorder traversal.
* @param f the function to be applied.
*/
def map[A](f: BaseType => A): Seq[A] = {
val ret = new collection.mutable.ArrayBuffer[A]()
foreach(ret += f(_))
ret
}
/**
* Returns a Seq by applying a function to all nodes in this tree and using the elements of the
* resulting collections.
*/
def flatMap[A](f: BaseType => TraversableOnce[A]): Seq[A] = {
val ret = new collection.mutable.ArrayBuffer[A]()
foreach(ret ++= f(_))
ret
}
/**
* Returns a Seq containing the result of applying a partial function to all elements in this
* tree on which the function is defined.
*/
def collect[B](pf: PartialFunction[BaseType, B]): Seq[B] = {
val ret = new collection.mutable.ArrayBuffer[B]()
val lifted = pf.lift
foreach(node => lifted(node).foreach(ret.+=))
ret
}
/**
* Returns a Seq containing the leaves in this tree.
*/
def collectLeaves(): Seq[BaseType] = {
this.collect { case p if p.children.isEmpty => p }
}
/**
* Finds and returns the first [[TreeNode]] of the tree for which the given partial function
* is defined (pre-order), and applies the partial function to it.
*/
def collectFirst[B](pf: PartialFunction[BaseType, B]): Option[B] = {
val lifted = pf.lift
lifted(this).orElse {
children.foldLeft(Option.empty[B]) { (l, r) => l.orElse(r.collectFirst(pf)) }
}
}
/**
* Efficient alternative to `productIterator.map(f).toArray`.
*/
protected def mapProductIterator[B: ClassTag](f: Any => B): Array[B] = {
val arr = Array.ofDim[B](productArity)
var i = 0
while (i < arr.length) {
arr(i) = f(productElement(i))
i += 1
}
arr
}
/**
* Returns a copy of this node with the children replaced.
* TODO: Validate somewhere (in debug mode?) that children are ordered correctly.
*/
def withNewChildren(newChildren: Seq[BaseType]): BaseType = {
assert(newChildren.size == children.size, "Incorrect number of children")
var changed = false
val remainingNewChildren = newChildren.toBuffer
val remainingOldChildren = children.toBuffer
def mapTreeNode(node: TreeNode[_]): TreeNode[_] = {
val newChild = remainingNewChildren.remove(0)
val oldChild = remainingOldChildren.remove(0)
if (newChild fastEquals oldChild) {
oldChild
} else {
changed = true
newChild
}
}
def mapChild(child: Any): Any = child match {
case arg: TreeNode[_] if containsChild(arg) => mapTreeNode(arg)
case nonChild: AnyRef => nonChild
case null => null
}
val newArgs = mapProductIterator {
case s: StructType => s // Don't convert struct types to some other type of Seq[StructField]
// Handle Seq[TreeNode] in TreeNode parameters.
case s: Stream[_] =>
// Stream is lazy so we need to force materialization
s.map(mapChild).force
case s: Seq[_] =>
s.map(mapChild)
case m: Map[_, _] =>
// `mapValues` is lazy and we need to force it to materialize
m.mapValues(mapChild).view.force
case arg: TreeNode[_] if containsChild(arg) => mapTreeNode(arg)
case nonChild: AnyRef => nonChild
case null => null
}
if (changed) makeCopy(newArgs) else this
}
/**
* Returns a copy of this node where `rule` has been recursively applied to the tree.
* When `rule` does not apply to a given node it is left unchanged.
* Users should not expect a specific directionality. If a specific directionality is needed,
* transformDown or transformUp should be used.
*
* @param rule the function use to transform this nodes children
*/
def transform(rule: PartialFunction[BaseType, BaseType]): BaseType = {
transformDown(rule)
}
/**
* Returns a copy of this node where `rule` has been recursively applied to it and all of its
* children (pre-order). When `rule` does not apply to a given node it is left unchanged.
*
* @param rule the function used to transform this nodes children
*/
def transformDown(rule: PartialFunction[BaseType, BaseType]): BaseType = {
val afterRule = CurrentOrigin.withOrigin(origin) {
rule.applyOrElse(this, identity[BaseType])
}
// Check if unchanged and then possibly return old copy to avoid gc churn.
if (this fastEquals afterRule) {
mapChildren(_.transformDown(rule))
} else {
afterRule.mapChildren(_.transformDown(rule))
}
}
/**
* Returns a copy of this node where `rule` has been recursively applied first to all of its
* children and then itself (post-order). When `rule` does not apply to a given node, it is left
* unchanged.
*
* @param rule the function use to transform this nodes children
*/
def transformUp(rule: PartialFunction[BaseType, BaseType]): BaseType = {
val afterRuleOnChildren = mapChildren(_.transformUp(rule))
if (this fastEquals afterRuleOnChildren) {
CurrentOrigin.withOrigin(origin) {
rule.applyOrElse(this, identity[BaseType])
}
} else {
CurrentOrigin.withOrigin(origin) {
rule.applyOrElse(afterRuleOnChildren, identity[BaseType])
}
}
}
/**
* Returns a copy of this node where `f` has been applied to all the nodes children.
*/
def mapChildren(f: BaseType => BaseType): BaseType = {
if (children.nonEmpty) {
var changed = false
def mapChild(child: Any): Any = child match {
case arg: TreeNode[_] if containsChild(arg) =>
val newChild = f(arg.asInstanceOf[BaseType])
if (!(newChild fastEquals arg)) {
changed = true
newChild
} else {
arg
}
case tuple@(arg1: TreeNode[_], arg2: TreeNode[_]) =>
val newChild1 = if (containsChild(arg1)) {
f(arg1.asInstanceOf[BaseType])
} else {
arg1.asInstanceOf[BaseType]
}
val newChild2 = if (containsChild(arg2)) {
f(arg2.asInstanceOf[BaseType])
} else {
arg2.asInstanceOf[BaseType]
}
if (!(newChild1 fastEquals arg1) || !(newChild2 fastEquals arg2)) {
changed = true
(newChild1, newChild2)
} else {
tuple
}
case other => other
}
val newArgs = mapProductIterator {
case arg: TreeNode[_] if containsChild(arg) =>
val newChild = f(arg.asInstanceOf[BaseType])
if (!(newChild fastEquals arg)) {
changed = true
newChild
} else {
arg
}
case Some(arg: TreeNode[_]) if containsChild(arg) =>
val newChild = f(arg.asInstanceOf[BaseType])
if (!(newChild fastEquals arg)) {
changed = true
Some(newChild)
} else {
Some(arg)
}
case m: Map[_, _] => m.mapValues {
case arg: TreeNode[_] if containsChild(arg) =>
val newChild = f(arg.asInstanceOf[BaseType])
if (!(newChild fastEquals arg)) {
changed = true
newChild
} else {
arg
}
case other => other
}.view.force // `mapValues` is lazy and we need to force it to materialize
case d: DataType => d // Avoid unpacking Structs
case args: Stream[_] => args.map(mapChild).force // Force materialization on stream
case args: Traversable[_] => args.map(mapChild)
case nonChild: AnyRef => nonChild
case null => null
}
if (changed) makeCopy(newArgs) else this
} else {
this
}
}
/**
* Args to the constructor that should be copied, but not transformed.
* These are appended to the transformed args automatically by makeCopy
* @return
*/
protected def otherCopyArgs: Seq[AnyRef] = Nil
/**
* Creates a copy of this type of tree node after a transformation.
* Must be overridden by child classes that have constructor arguments
* that are not present in the productIterator.
* @param newArgs the new product arguments.
*/
def makeCopy(newArgs: Array[AnyRef]): BaseType = attachTree(this, "makeCopy") {
// Skip no-arg constructors that are just there for kryo.
val ctors = getClass.getConstructors.filter(_.getParameterTypes.size != 0)
if (ctors.isEmpty) {
sys.error(s"No valid constructor for $nodeName")
}
val allArgs: Array[AnyRef] = if (otherCopyArgs.isEmpty) {
newArgs
} else {
newArgs ++ otherCopyArgs
}
val defaultCtor = ctors.find { ctor =>
if (ctor.getParameterTypes.length != allArgs.length) {
false
} else if (allArgs.contains(null)) {
// if there is a `null`, we can't figure out the class, therefore we should just fallback
// to older heuristic
false
} else {
val argsArray: Array[Class[_]] = allArgs.map(_.getClass)
ClassUtils.isAssignable(argsArray, ctor.getParameterTypes, true /* autoboxing */)
}
}.getOrElse(ctors.maxBy(_.getParameterTypes.length)) // fall back to older heuristic
try {
CurrentOrigin.withOrigin(origin) {
defaultCtor.newInstance(allArgs.toArray: _*).asInstanceOf[BaseType]
}
} catch {
case e: java.lang.IllegalArgumentException =>
throw new TreeNodeException(
this,
s"""
|Failed to copy node.
|Is otherCopyArgs specified correctly for $nodeName.
|Exception message: ${e.getMessage}
|ctor: $defaultCtor?
|types: ${newArgs.map(_.getClass).mkString(", ")}
|args: ${newArgs.mkString(", ")}
""".stripMargin)
}
}
/**
* Returns the name of this type of TreeNode. Defaults to the class name.
* Note that we remove the "Exec" suffix for physical operators here.
*/
def nodeName: String = getClass.getSimpleName.replaceAll("Exec$", "")
/**
* The arguments that should be included in the arg string. Defaults to the `productIterator`.
*/
protected def stringArgs: Iterator[Any] = productIterator
private lazy val allChildren: Set[TreeNode[_]] = (children ++ innerChildren).toSet[TreeNode[_]]
/** Returns a string representing the arguments to this node, minus any children */
def argString(maxFields: Int): String = stringArgs.flatMap {
case tn: TreeNode[_] if allChildren.contains(tn) => Nil
case Some(tn: TreeNode[_]) if allChildren.contains(tn) => Nil
case Some(tn: TreeNode[_]) => tn.simpleString(maxFields) :: Nil
case tn: TreeNode[_] => tn.simpleString(maxFields) :: Nil
case seq: Seq[Any] if seq.toSet.subsetOf(allChildren.asInstanceOf[Set[Any]]) => Nil
case iter: Iterable[_] if iter.isEmpty => Nil
case seq: Seq[_] => truncatedString(seq, "[", ", ", "]", maxFields) :: Nil
case set: Set[_] => truncatedString(set.toSeq, "{", ", ", "}", maxFields) :: Nil
case array: Array[_] if array.isEmpty => Nil
case array: Array[_] => truncatedString(array, "[", ", ", "]", maxFields) :: Nil
case null => Nil
case None => Nil
case Some(null) => Nil
case Some(any) => any :: Nil
case table: CatalogTable =>
table.storage.serde match {
case Some(serde) => table.identifier :: serde :: Nil
case _ => table.identifier :: Nil
}
case other => other :: Nil
}.mkString(", ")
/**
* ONE line description of this node.
* @param maxFields Maximum number of fields that will be converted to strings.
* Any elements beyond the limit will be dropped.
*/
def simpleString(maxFields: Int): String = {
s"$nodeName ${argString(maxFields)}".trim
}
/** ONE line description of this node with more information */
def verboseString(maxFields: Int): String
/** ONE line description of this node with some suffix information */
def verboseStringWithSuffix(maxFields: Int): String = verboseString(maxFields)
override def toString: String = treeString
/** Returns a string representation of the nodes in this tree */
def treeString: String = treeString(verbose = true)
def treeString(
verbose: Boolean,
addSuffix: Boolean = false,
maxFields: Int = SQLConf.get.maxToStringFields): String = {
val concat = new StringConcat()
treeString(concat.append, verbose, addSuffix, maxFields)
concat.toString
}
def treeString(
append: String => Unit,
verbose: Boolean,
addSuffix: Boolean,
maxFields: Int): Unit = {
generateTreeString(0, Nil, append, verbose, "", addSuffix, maxFields)
}
/**
* Returns a string representation of the nodes in this tree, where each operator is numbered.
* The numbers can be used with [[TreeNode.apply]] to easily access specific subtrees.
*
* The numbers are based on depth-first traversal of the tree (with innerChildren traversed first
* before children).
*/
def numberedTreeString: String =
treeString.split("\\n").zipWithIndex.map { case (line, i) => f"$i%02d $line" }.mkString("\\n")
/**
* Returns the tree node at the specified number, used primarily for interactive debugging.
* Numbers for each node can be found in the [[numberedTreeString]].
*
* Note that this cannot return BaseType because logical plan's plan node might return
* physical plan for innerChildren, e.g. in-memory relation logical plan node has a reference
* to the physical plan node it is referencing.
*/
def apply(number: Int): TreeNode[_] = getNodeNumbered(new MutableInt(number)).orNull
/**
* Returns the tree node at the specified number, used primarily for interactive debugging.
* Numbers for each node can be found in the [[numberedTreeString]].
*
* This is a variant of [[apply]] that returns the node as BaseType (if the type matches).
*/
def p(number: Int): BaseType = apply(number).asInstanceOf[BaseType]
private def getNodeNumbered(number: MutableInt): Option[TreeNode[_]] = {
if (number.i < 0) {
None
} else if (number.i == 0) {
Some(this)
} else {
number.i -= 1
// Note that this traversal order must be the same as numberedTreeString.
innerChildren.map(_.getNodeNumbered(number)).find(_ != None).getOrElse {
children.map(_.getNodeNumbered(number)).find(_ != None).flatten
}
}
}
/**
* All the nodes that should be shown as a inner nested tree of this node.
* For example, this can be used to show sub-queries.
*/
protected def innerChildren: Seq[TreeNode[_]] = Seq.empty
/**
* Appends the string representation of this node and its children to the given Writer.
*
* The `i`-th element in `lastChildren` indicates whether the ancestor of the current node at
* depth `i + 1` is the last child of its own parent node. The depth of the root node is 0, and
* `lastChildren` for the root node should be empty.
*
* Note that this traversal (numbering) order must be the same as [[getNodeNumbered]].
*/
def generateTreeString(
depth: Int,
lastChildren: Seq[Boolean],
append: String => Unit,
verbose: Boolean,
prefix: String = "",
addSuffix: Boolean = false,
maxFields: Int): Unit = {
if (depth > 0) {
lastChildren.init.foreach { isLast =>
append(if (isLast) " " else ": ")
}
append(if (lastChildren.last) "+- " else ":- ")
}
val str = if (verbose) {
if (addSuffix) verboseStringWithSuffix(maxFields) else verboseString(maxFields)
} else {
simpleString(maxFields)
}
append(prefix)
append(str)
append("\\n")
if (innerChildren.nonEmpty) {
innerChildren.init.foreach(_.generateTreeString(
depth + 2, lastChildren :+ children.isEmpty :+ false, append, verbose,
addSuffix = addSuffix, maxFields = maxFields))
innerChildren.last.generateTreeString(
depth + 2, lastChildren :+ children.isEmpty :+ true, append, verbose,
addSuffix = addSuffix, maxFields = maxFields)
}
if (children.nonEmpty) {
children.init.foreach(_.generateTreeString(
depth + 1, lastChildren :+ false, append, verbose, prefix, addSuffix, maxFields))
children.last.generateTreeString(
depth + 1, lastChildren :+ true, append, verbose, prefix, addSuffix, maxFields)
}
}
/**
* Returns a 'scala code' representation of this `TreeNode` and its children. Intended for use
* when debugging where the prettier toString function is obfuscating the actual structure. In the
* case of 'pure' `TreeNodes` that only contain primitives and other TreeNodes, the result can be
* pasted in the REPL to build an equivalent Tree.
*/
def asCode: String = {
val args = productIterator.map {
case tn: TreeNode[_] => tn.asCode
case s: String => "\\"" + s + "\\""
case other => other.toString
}
s"$nodeName(${args.mkString(",")})"
}
def toJSON: String = compact(render(jsonValue))
def prettyJson: String = pretty(render(jsonValue))
private def jsonValue: JValue = {
val jsonValues = scala.collection.mutable.ArrayBuffer.empty[JValue]
def collectJsonValue(tn: BaseType): Unit = {
val jsonFields = ("class" -> JString(tn.getClass.getName)) ::
("num-children" -> JInt(tn.children.length)) :: tn.jsonFields
jsonValues += JObject(jsonFields)
tn.children.foreach(collectJsonValue)
}
collectJsonValue(this)
jsonValues
}
protected def jsonFields: List[JField] = {
val fieldNames = getConstructorParameterNames(getClass)
val fieldValues = productIterator.toSeq ++ otherCopyArgs
assert(fieldNames.length == fieldValues.length, s"${getClass.getSimpleName} fields: " +
fieldNames.mkString(", ") + s", values: " + fieldValues.map(_.toString).mkString(", "))
fieldNames.zip(fieldValues).map {
// If the field value is a child, then use an int to encode it, represents the index of
// this child in all children.
case (name, value: TreeNode[_]) if containsChild(value) =>
name -> JInt(children.indexOf(value))
case (name, value: Seq[BaseType]) if value.forall(containsChild) =>
name -> JArray(
value.map(v => JInt(children.indexOf(v.asInstanceOf[TreeNode[_]]))).toList
)
case (name, value) => name -> parseToJson(value)
}.toList
}
private def parseToJson(obj: Any): JValue = obj match {
case b: Boolean => JBool(b)
case b: Byte => JInt(b.toInt)
case s: Short => JInt(s.toInt)
case i: Int => JInt(i)
case l: Long => JInt(l)
case f: Float => JDouble(f)
case d: Double => JDouble(d)
case b: BigInt => JInt(b)
case null => JNull
case s: String => JString(s)
case u: UUID => JString(u.toString)
case dt: DataType => dt.jsonValue
// SPARK-17356: In usage of mllib, Metadata may store a huge vector of data, transforming
// it to JSON may trigger OutOfMemoryError.
case m: Metadata => Metadata.empty.jsonValue
case clazz: Class[_] => JString(clazz.getName)
case s: StorageLevel =>
("useDisk" -> s.useDisk) ~ ("useMemory" -> s.useMemory) ~ ("useOffHeap" -> s.useOffHeap) ~
("deserialized" -> s.deserialized) ~ ("replication" -> s.replication)
case n: TreeNode[_] => n.jsonValue
case o: Option[_] => o.map(parseToJson)
// Recursive scan Seq[TreeNode], Seq[Partitioning], Seq[DataType]
case t: Seq[_] if t.forall(_.isInstanceOf[TreeNode[_]]) ||
t.forall(_.isInstanceOf[Partitioning]) || t.forall(_.isInstanceOf[DataType]) =>
JArray(t.map(parseToJson).toList)
case t: Seq[_] if t.length > 0 && t.head.isInstanceOf[String] =>
JString(truncatedString(t, "[", ", ", "]", SQLConf.get.maxToStringFields))
case t: Seq[_] => JNull
case m: Map[_, _] => JNull
// if it's a scala object, we can simply keep the full class path.
// TODO: currently if the class name ends with "$", we think it's a scala object, there is
// probably a better way to check it.
case obj if obj.getClass.getName.endsWith("$") => "object" -> obj.getClass.getName
case p: Product if shouldConvertToJson(p) =>
try {
val fieldNames = getConstructorParameterNames(p.getClass)
val fieldValues = p.productIterator.toSeq
assert(fieldNames.length == fieldValues.length)
("product-class" -> JString(p.getClass.getName)) :: fieldNames.zip(fieldValues).map {
case (name, value) => name -> parseToJson(value)
}.toList
} catch {
case _: RuntimeException => null
}
case _ => JNull
}
private def shouldConvertToJson(product: Product): Boolean = product match {
case exprId: ExprId => true
case field: StructField => true
case id: TableIdentifier => true
case join: JoinType => true
case id: FunctionIdentifier => true
case spec: BucketSpec => true
case catalog: CatalogTable => true
case partition: Partitioning => true
case resource: FunctionResource => true
case broadcast: BroadcastMode => true
case table: CatalogTableType => true
case storage: CatalogStorageFormat => true
case _ => false
}
}
| facaiy/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala | Scala | apache-2.0 | 25,791 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import sbt._
import sbt.Keys._
import sbtsparkpackage.SparkPackagePlugin.autoImport._
object CassandraSparkBuild extends Build {
import Settings._
import sbtassembly.AssemblyPlugin
import Versions.scalaBinary
import sbtsparkpackage.SparkPackagePlugin
val namespace = "spark-cassandra-connector"
val demosPath = file(s"$namespace-demos")
lazy val root = RootProject(
name = "root",
dir = file("."),
settings = rootSettings,
contains = Seq(embedded, connector, demos, jconnector)
).disablePlugins(AssemblyPlugin, SparkPackagePlugin)
lazy val embedded = CrossScalaVersionsProject(
name = s"$namespace-embedded",
conf = defaultSettings ++ Seq(libraryDependencies ++= Dependencies.embedded)
).disablePlugins(AssemblyPlugin, SparkPackagePlugin) configs IntegrationTest
lazy val connector = CrossScalaVersionsProject(
name = namespace,
conf = assembledSettings ++ Seq(libraryDependencies ++= Dependencies.connector ++ Seq(
"org.scala-lang" % "scala-reflect" % scalaVersion.value,
"org.scala-lang" % "scala-compiler" % scalaVersion.value % "test,it"))
).copy(dependencies = Seq(embedded % "test->test,compile;it->it,test,compile;")
) configs IntegrationTest
lazy val jconnector = Project(
id = s"$namespace-java",
base = file(s"$namespace-java"),
settings = japiSettings ++ connector.settings :+ (spName := s"datastax/$namespace-java"),
dependencies = Seq(connector % "compile;runtime->runtime;test->test;it->it,test;provided->provided")
) configs IntegrationTest
lazy val demos = RootProject(
name = "demos",
dir = demosPath,
contains = Seq(simpleDemos/*, kafkaStreaming*/, twitterStreaming)
).disablePlugins(AssemblyPlugin, SparkPackagePlugin)
lazy val simpleDemos = Project(
id = "simple-demos",
base = demosPath / "simple-demos",
settings = japiSettings ++ demoSettings,
dependencies = Seq(connector, jconnector, embedded)
).disablePlugins(AssemblyPlugin, SparkPackagePlugin)
/*
lazy val kafkaStreaming = CrossScalaVersionsProject(
name = "kafka-streaming",
conf = demoSettings ++ kafkaDemoSettings ++ Seq(
libraryDependencies ++= (CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, minor)) if minor < 11 => Dependencies.kafka
case _ => Seq.empty
}))).copy(base = demosPath / "kafka-streaming", dependencies = Seq(connector, embedded))
*/
lazy val twitterStreaming = Project(
id = "twitter-streaming",
base = demosPath / "twitter-streaming",
settings = demoSettings ++ Seq(libraryDependencies ++= Dependencies.twitter),
dependencies = Seq(connector)
).disablePlugins(AssemblyPlugin, SparkPackagePlugin)
def crossBuildPath(base: sbt.File, v: String): sbt.File = base / s"scala-$v" / "src"
/* templates */
def CrossScalaVersionsProject(name: String,
conf: Seq[Def.Setting[_]],
reliesOn: Seq[ClasspathDep[ProjectReference]] = Seq.empty) =
Project(id = name, base = file(name), dependencies = reliesOn, settings = conf ++ Seq(
unmanagedSourceDirectories in (Compile, packageBin) +=
crossBuildPath(baseDirectory.value, scalaBinaryVersion.value),
unmanagedSourceDirectories in (Compile, doc) +=
crossBuildPath(baseDirectory.value, scalaBinaryVersion.value),
unmanagedSourceDirectories in Compile +=
crossBuildPath(baseDirectory.value, scalaBinaryVersion.value)
))
def RootProject(
name: String,
dir: sbt.File, settings: =>
scala.Seq[sbt.Def.Setting[_]] = Seq.empty,
contains: Seq[ProjectReference]): Project =
Project(
id = name,
base = dir,
settings = parentSettings ++ settings,
aggregate = contains)
}
object Dependencies {
import Versions._
implicit class Exclude(module: ModuleID) {
def guavaExclude: ModuleID =
module exclude("com.google.guava", "guava")
def sparkExclusions: ModuleID = module.guavaExclude
.exclude("org.apache.spark", s"spark-core_$scalaBinary")
def logbackExclude: ModuleID = module
.exclude("ch.qos.logback", "logback-classic")
.exclude("ch.qos.logback", "logback-core")
def replExclusions: ModuleID = module.guavaExclude
.exclude("org.apache.spark", s"spark-bagel_$scalaBinary")
.exclude("org.apache.spark", s"spark-mllib_$scalaBinary")
.exclude("org.scala-lang", "scala-compiler")
def kafkaExclusions: ModuleID = module
.exclude("org.slf4j", "slf4j-simple")
.exclude("com.sun.jmx", "jmxri")
.exclude("com.sun.jdmk", "jmxtools")
.exclude("net.sf.jopt-simple", "jopt-simple")
}
object Compile {
val akkaActor = "com.typesafe.akka" %% "akka-actor" % Akka % "provided" // ApacheV2
val akkaRemote = "com.typesafe.akka" %% "akka-remote" % Akka % "provided" // ApacheV2
val akkaSlf4j = "com.typesafe.akka" %% "akka-slf4j" % Akka % "provided" // ApacheV2
val cassandraClient = "org.apache.cassandra" % "cassandra-clientutil" % Cassandra guavaExclude // ApacheV2
val cassandraDriver = "com.datastax.cassandra" % "cassandra-driver-core" % CassandraDriver guavaExclude // ApacheV2
val commonsLang3 = "org.apache.commons" % "commons-lang3" % CommonsLang3 // ApacheV2
val config = "com.typesafe" % "config" % Config % "provided" // ApacheV2
val guava = "com.google.guava" % "guava" % Guava
val jodaC = "org.joda" % "joda-convert" % JodaC
val jodaT = "joda-time" % "joda-time" % JodaT
val lzf = "com.ning" % "compress-lzf" % Lzf % "provided"
val slf4jApi = "org.slf4j" % "slf4j-api" % Slf4j % "provided" // MIT
val jsr166e = "com.twitter" % "jsr166e" % JSR166e // Creative Commons
val airlift = "io.airlift" % "airline" % Airlift
/* To allow spark artifact inclusion in the demos at runtime, we set 'provided' below. */
val sparkCore = "org.apache.spark" %% "spark-core" % Spark guavaExclude // ApacheV2
val sparkStreaming = "org.apache.spark" %% "spark-streaming" % Spark guavaExclude // ApacheV2
val sparkSql = "org.apache.spark" %% "spark-sql" % Spark sparkExclusions // ApacheV2
val sparkCatalyst = "org.apache.spark" %% "spark-catalyst" % Spark sparkExclusions // ApacheV2
val sparkHive = "org.apache.spark" %% "spark-hive" % Spark sparkExclusions // ApacheV2
object Metrics {
val metricsCore = "com.codahale.metrics" % "metrics-core" % CodaHaleMetrics % "provided"
val metricsJson = "com.codahale.metrics" % "metrics-json" % CodaHaleMetrics % "provided"
}
object Jetty {
val jettyServer = "org.eclipse.jetty" % "jetty-server" % SparkJetty % "provided"
val jettyServlet = "org.eclipse.jetty" % "jetty-servlet" % SparkJetty % "provided"
}
object Embedded {
val akkaCluster = "com.typesafe.akka" %% "akka-cluster" % Akka // ApacheV2
val cassandraServer = "org.apache.cassandra" % "cassandra-all" % Cassandra logbackExclude // ApacheV2
val jopt = "net.sf.jopt-simple" % "jopt-simple" % JOpt
val kafka = "org.apache.kafka" %% "kafka" % Kafka kafkaExclusions // ApacheV2
val sparkRepl = "org.apache.spark" %% "spark-repl" % Spark % "provided" replExclusions // ApacheV2
val snappy = "org.xerial.snappy" % "snappy-java" % "1.1.1.7"
}
object Demos {
val kafka = "org.apache.kafka" % "kafka_2.10" % Kafka kafkaExclusions // ApacheV2
val kafkaStreaming = "org.apache.spark" % "spark-streaming-kafka_2.10" % Spark % "provided" sparkExclusions // ApacheV2
val twitterStreaming = "org.apache.spark" %% "spark-streaming-twitter" % Spark % "provided" sparkExclusions // ApacheV2
}
object Test {
val akkaTestKit = "com.typesafe.akka" %% "akka-testkit" % Akka % "test,it" // ApacheV2
val commonsIO = "commons-io" % "commons-io" % CommonsIO % "test,it" // ApacheV2
val scalaMock = "org.scalamock" %% "scalamock-scalatest-support" % ScalaMock % "test,it" // BSD
val scalaTest = "org.scalatest" %% "scalatest" % ScalaTest % "test,it" // ApacheV2
val scalactic = "org.scalactic" %% "scalactic" % Scalactic % "test,it" // ApacheV2
val sparkCoreT = "org.apache.spark" %% "spark-core" % Spark % "test,it" classifier "tests"
val sparkStreamingT = "org.apache.spark" %% "spark-streaming" % Spark % "test,it" classifier "tests"
val mockito = "org.mockito" % "mockito-all" % "1.10.19" % "test,it" // MIT
val junit = "junit" % "junit" % "4.11" % "test,it"
val junitInterface = "com.novocode" % "junit-interface" % "0.10" % "test,it"
val powerMock = "org.powermock" % "powermock-module-junit4" % "1.6.2" % "test,it" // ApacheV2
val powerMockMockito = "org.powermock" % "powermock-api-mockito" % "1.6.2" % "test,it" // ApacheV2
}
}
import Compile._
import BuildUtil._
val logging = Seq(slf4jApi)
val metrics = Seq(Metrics.metricsCore, Metrics.metricsJson)
val jetty = Seq(Jetty.jettyServer, Jetty.jettyServlet)
val testKit = Seq(
Test.akkaTestKit,
Test.commonsIO,
Test.junit,
Test.junitInterface,
Test.scalaMock,
Test.scalaTest,
Test.scalactic,
Test.sparkCoreT,
Test.sparkStreamingT,
Test.mockito,
Test.powerMock,
Test.powerMockMockito
)
val akka = Seq(akkaActor, akkaRemote, akkaSlf4j)
val cassandra = Seq(cassandraClient, cassandraDriver)
val spark = Seq(sparkCore, sparkStreaming, sparkSql, sparkCatalyst, sparkHive)
val connector = testKit ++ metrics ++ jetty ++ logging ++ akka ++ cassandra ++ spark.map(_ % "provided") ++ Seq(
commonsLang3, config, guava, jodaC, jodaT, lzf, jsr166e)
val embedded = logging ++ spark ++ cassandra ++ Seq(
Embedded.cassandraServer, Embedded.jopt, Embedded.sparkRepl, Embedded.kafka, Embedded.snappy, airlift)
val kafka = Seq(Demos.kafka, Demos.kafkaStreaming)
val twitter = Seq(sparkStreaming, Demos.twitterStreaming)
val documentationMappings = Seq(
DocumentationMapping(url(s"http://spark.apache.org/docs/${Versions.Spark}/api/scala/"),
sparkCore, sparkStreaming, sparkSql, sparkCatalyst, sparkHive
),
DocumentationMapping(url(s"http://doc.akka.io/api/akka/${Versions.Akka}/"),
akkaActor, akkaRemote, akkaSlf4j
)
)
}
| viirya/spark-cassandra-connector | project/CassandraSparkBuild.scala | Scala | apache-2.0 | 12,597 |
package org.randi3.utility
import javax.mail._
import javax.mail.internet._
import java.util.Properties
trait MailSenderComponent extends Utility {
val mailSender: MailSender
class MailSender(smtpHost: String, port: String, smtpAuth: Boolean, username: String, password: String, ssl: Boolean, from: String) {
def sendMessage(to: String, cc: String, bcc: String, subject: String, content: String) {
try {
val props = new Properties()
props.put("mail.smtp.host", smtpHost)
props.put("mail.smtp.port", port)
if (smtpAuth) {
props.put("mail.smtp.auth", "true")
} else {
props.put("mail.smtp.auth", "false")
}
if (ssl) {
props.put("mail.smtp.socketFactory.port", port)
props.put("mail.smtp.socketFactory.class",
"javax.net.ssl.SSLSocketFactory")
}
val session = if (smtpAuth) {
Session.getInstance(props,
new javax.mail.Authenticator {
override def getPasswordAuthentication: PasswordAuthentication = {
new PasswordAuthentication(username, password)
}
})
} else {
Session.getInstance(props, null)
}
val message = new MimeMessage(session)
// Set the from, to, subject, body text
message.setFrom(new InternetAddress(from))
message.setRecipients(Message.RecipientType.TO, to)
message.setRecipients(Message.RecipientType.CC, cc)
message.setRecipients(Message.RecipientType.BCC, bcc)
message.setSubject(subject)
message.setContent(content, "text/html" )
Transport.send(message)
} catch {
case e: Exception => logError(e)
}
}
}
} | dschrimpf/randi3-core | src/main/scala/org/randi3/utility/MailSender.scala | Scala | gpl-3.0 | 1,775 |
package se.joham.funrts.model
import java.util
import se.joham.funrts.math.Vec2FixPt
/**
* Created by johan on 2016-06-13.
*/
case class Terrain(nx: Int, ny: Int, tiles: Array[Tile.Type]) {
override def equals(other: Any): Boolean = {
other match {
case m: Terrain => (nx == m.nx) && (ny == m.ny) && util.Arrays.equals(tiles, m.tiles)
case _ => false
}
}
val size: Size.Type = Vec2FixPt(nx, ny)
def update(pos: Pos.Type, tile: Tile.Type): Unit = {
require(pos.x < size.x, s"Position $pos out of level bounds $size")
require(pos.x >= 0, s"Position $pos out of level bounds $size")
require(pos.y < size.y, s"Position $pos out of level bounds $size")
require(pos.y >= 0, s"Position $pos out of level bounds $size")
tiles(pos2Index(pos)) = tile
}
def apply(pos: Pos.Type): Tile.Type = {
require(pos.x < size.x, s"Position $pos out of level bounds $size")
require(pos.x >= 0, s"Position $pos out of level bounds $size")
require(pos.y < size.y, s"Position $pos out of level bounds $size")
require(pos.y >= 0, s"Position $pos out of level bounds $size")
tiles(pos2Index(pos))
}
def pos2Index(pos: Pos.Type): Int = {
(pos.y * nx + pos.x).toInt
}
}
| GiGurra/fun-rts | src/main/scala/se/joham/funrts/model/Terrain.scala | Scala | gpl-2.0 | 1,237 |
package io.really
import scala.util.Random
object TestHelpers {
def randomBucketID(r: R)(implicit config: ReallyConfig): BucketID = {
require(r.isObject, "R doesn't contain an ID")
r.skeleton.actorFriendlyStr + "-" + (r.head.id.get % config.Sharding.bucketsNumber) + "-" + Random.nextString(4)
}
} | reallylabs/really | modules/really-core/src/test/scala/io/really/TestHelpers.scala | Scala | apache-2.0 | 312 |
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package com.normation.rudder.repository.ldap
import com.normation.rudder.domain.policies.RuleId
import com.normation.rudder.repository.NodeConfigurationRepository
import com.unboundid.ldap.sdk.DN
import com.normation.rudder.exceptions.HierarchicalException
import com.normation.rudder.domain.servers.{RootNodeConfiguration,NodeConfiguration,SimpleNodeConfiguration}
import com.normation.rudder.domain.{RudderDit,Constants}
import com.normation.ldap.sdk.LDAPConnectionProvider
import net.liftweb.common._
import com.normation.ldap.sdk._
import BuildFilter._
import com.normation.inventory.ldap.core.LDAPConstants._
import com.normation.inventory.domain._
import com.normation.rudder.domain.RudderLDAPConstants._
import com.normation.cfclerk.domain.TechniqueId
import com.normation.utils.Control.{ bestEffort, sequence }
class LDAPNodeConfigurationRepository(
ldap:LDAPConnectionProvider[RwLDAPConnection],
rudderDit:RudderDit,
mapper:LDAPNodeConfigurationMapper
) extends NodeConfigurationRepository with Loggable {
def addRootNodeConfiguration(rootNodeConfiguration : RootNodeConfiguration) : Box[RootNodeConfiguration] = saveNodeConfiguration(rootNodeConfiguration).map(_ => rootNodeConfiguration)
/**
* An utility class that finds all root NodeConfigurations UUID among servers.
* It should alway return a full seq of exactly one server
*/
def getRootNodeIds() : Box[Seq[NodeId]] = {
ldap.map { con =>
con.searchOne(rudderDit.NODE_CONFIGS.dn, IS(OC_ROOT_POLICY_SERVER), "1:1" ).collect {
case e if(rudderDit.NODE_CONFIGS.NODE_CONFIG.idFromDn(e.dn).isDefined) => rudderDit.NODE_CONFIGS.NODE_CONFIG.idFromDn(e.dn).get
}
}
}
/**
* retrieve server object from a set of dn of server Entries.
* It checks that the dn is actually a dn for a server entry
* It only returns if all dn which are server entry DN lead to
* a server object in order to not miss data inconsistencies (what
* means that even if only one server is corrupted, none could b retrieve)
* TODO: discuss if it's the behaviour we want
* @param con
* @param dns
* @return
*/
private def findNodeConfigurationFromNodeConfigurationEntryDN(con:RwLDAPConnection, dns:Set[DN]) : Box[List[NodeConfiguration]] = {
( (Full(List[NodeConfiguration]()):Box[List[NodeConfiguration]]) /: dns ) {
case (e:EmptyBox,_) => e
case (Full(list),dn) =>
if(!dn.getRDN.isMultiValued && dn.getRDN.hasAttribute(rudderDit.NODE_CONFIGS.NODE_CONFIG.rdnAttribute._1)) {
(for {
tree <- con.getTree(dn)
server <- mapper.toNodeConfiguration(tree)
} yield server) match {
case e:EmptyBox => logger.error("Couldn't map node %s reason : %s". format(dn, e)) ; e
case Full(server) => Full(server::list)
}
} else Full(list)
}
}
def getRootNodeConfiguration() : Box[RootNodeConfiguration] = {
ldap.flatMap { con =>
val seq = con.searchOne(rudderDit.NODE_CONFIGS.dn, IS(OC_ROOT_POLICY_SERVER), "1:1" )
if(seq.size == 1) {
findNodeConfigurationFromNodeConfigurationEntryDN(con,Set(seq(0).dn)) match {
case Full(seq2) => seq2.toList match {
case (root:RootNodeConfiguration) :: Nil => Full(root)
case s :: Nil => Failure("Corrupted data: found a normal server in place of the root server %s".format(s))
case other => Failure("Exactly one policy server must be configured, found %s (%s)".format(other.size, other.map(_.id).mkString(",")))
}
case e:EmptyBox => e
}
} else if(seq.size < 1)
Failure("Exactly one policy server must be configured, found 0")
else Failure("Exactly one policy server must be configured, found %s (%s)".format(seq.size, seq.map(_.dn).mkString(",")))
}
}
/**
* Search a server by its id
* @param id
* @return the server
*/
def findNodeConfiguration(id : NodeId) : Box[NodeConfiguration] = {
for {
con <- ldap
tree <- con.getTree(rudderDit.NODE_CONFIGS.NODE_CONFIG.dn(id.value))
server <- mapper.toNodeConfiguration(tree) ?~! "Mapping from LDAP representation to NodeConfiguration failed"
} yield server
}
/**
* Return multiples servers
* @param ids
* @return
*/
def getMultipleNodeConfigurations(ids : Seq[NodeId]) : Box[Set[NodeConfiguration]] = {
ldap.flatMap { con =>
findNodeConfigurationFromNodeConfigurationEntryDN(con,
con.searchOne(
rudderDit.NODE_CONFIGS.dn,
OR(ids.toSet[NodeId].map(id => EQ(rudderDit.NODE_CONFIGS.NODE_CONFIG.rdnAttribute._1,id.value)).toSeq:_*),
"1:1"
).map(_.dn).toSet
).map(_.toSet)
}
}
/**
* Save a server in the repo
* @param server
* @return
*/
def saveNodeConfiguration(server:NodeConfiguration) : Box[NodeConfiguration] = {
(ldap.map { con =>
val tree = mapper.fromNodeConfiguration(server)
con.saveTree(tree, true)
}).map( _ => server)
}
/**
* Save several servers in the repo
* @param server
* @return
*/
def saveMultipleNodeConfigurations(servers: Seq[NodeConfiguration]) : Box[Seq[NodeConfiguration]] = {
( (Full(Nil):Box[List[NodeConfiguration]]) /: servers ) {
case (e:EmptyBox, _) => e
case (Full(list), server) => this.saveNodeConfiguration(server) match {
case Full(s) => Full(s::list)
case e:EmptyBox => e
}
}
}
/**
* Delete a node configuration.
* Does not check the consistency of anything
* @param server
*/
def deleteNodeConfigurations(ids:Set[NodeId]) : Box[Set[NodeId]] = {
for {
con <- ldap
deleted <- sequence(ids.toSeq) { id =>
con.delete(rudderDit.NODE_CONFIGS.NODE_CONFIG.dn(id.value), true)
}
} yield {
ids
}
}
/**
* Delete all node configurations
*/
def deleteAllNodeConfigurations : Box[Set[NodeId]] = {
for {
con <- ldap
nodeConfigDns <- Full(con.searchOne(rudderDit.NODE_CONFIGS.dn, ALL, "1:1").map(_.dn))
deleted <- bestEffort(nodeConfigDns) { dn =>
con.delete(dn, recurse = true )
}
} yield {
nodeConfigDns.flatMap { dn =>
rudderDit.NODE_CONFIGS.NODE_CONFIG.idFromDn(dn)
}.toSet
}
}
/**
* Return all servers
* @return
*/
def getAll() : Box[Map[NodeId, NodeConfiguration]] = {
ldap.flatMap { con =>
findNodeConfigurationFromNodeConfigurationEntryDN(con, con.searchOne(rudderDit.NODE_CONFIGS.dn, ALL, "1:1").map(_.dn).toSet)
}.map { seq => seq.map(s => (s.id,s)).toMap }
}
/**
* Look for all server which have the given directive ID.
*/
def findNodeConfigurationByCurrentRuleId(id:RuleId) : Box[Seq[NodeConfiguration]] = {
ldap.flatMap { con =>
findNodeConfigurationFromNodeConfigurationEntryDN(con,
con.searchSub(
rudderDit.NODE_CONFIGS.dn,
AND(IS(OC_NODE_CONFIGURATION),EQ(A_DIRECTIVE_UUID,id.value)),
"1:1"
).map { e => e.dn.getParent }.toSet
)
}
}
/**
* Look for all server which have the given policy name (however directive
* of that policy they, as long as they have at least one)
*/
def findNodeConfigurationByTargetPolicyName(techniqueId:TechniqueId) : Box[Seq[NodeConfiguration]] = {
ldap.flatMap { con =>
findNodeConfigurationFromNodeConfigurationEntryDN(con,
con.searchSub(
rudderDit.NODE_CONFIGS.dn,
AND(IS(OC_TARGET_RULE_WITH_CF3POLICYDRAFT),EQ(A_NAME,techniqueId.name.value),EQ(A_TECHNIQUE_VERSION,techniqueId.version.toString)),
"1:1"
).map { e => e.dn.getParent }.toSet
)
}
}
/**
* Return all the server that need to be commited
* Meaning, all servers that have a difference between the current and target directive
*
* TODO: perhaps it should be a method of BridgeToCfclerkService,
* and then NodeConfigurationService will be able to find all servers with
* theses directives
*/
def findUncommitedNodeConfigurations() : Box[Seq[NodeConfiguration]] = {
ldap.flatMap { con =>
findNodeConfigurationFromNodeConfigurationEntryDN(con,
con.searchOne(
rudderDit.NODE_CONFIGS.dn,
AND(IS(OC_NODE_CONFIGURATION),EQ(A_SERVER_IS_MODIFIED,true.toLDAPString)),
"1:1"
).map(_.dn).toSet
)
}
}
}
| jooooooon/rudder | rudder-core/src/main/scala/com/normation/rudder/repository/ldap/LDAPNodeConfigurationRepository.scala | Scala | agpl-3.0 | 10,157 |
/**
* Copyright (c) 2015, Cloudera, Inc. All Rights Reserved.
*
* Cloudera, Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"). You may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the
* License.
*/
package com.cloudera.sparkts.models
import com.cloudera.sparkts.UnivariateTimeSeries.{differencesOfOrderD, inverseDifferencesOfOrderD}
import org.apache.commons.math3.random.MersenneTwister
import org.apache.spark.mllib.linalg.DenseVector
import org.scalatest.FunSuite
import org.scalatest.Matchers._
import scala.util.{Success, Try}
class ARIMASuite extends FunSuite {
test("compare with R") {
// > R.Version()$version.string
// [1] "R version 3.2.0 (2015-04-16)"
// > set.seed(456)
// y <- arima.sim(n=250,list(ar=0.3,ma=0.7),mean = 5)
// write.table(y, file = "resources/R_ARIMA_DataSet1.csv", row.names = FALSE, col.names = FALSE)
val dataFile = getClass.getClassLoader.getResourceAsStream("R_ARIMA_DataSet1.csv")
val rawData = scala.io.Source.fromInputStream(dataFile).getLines().toArray.map(_.toDouble)
val data = new DenseVector(rawData)
val model = ARIMA.fitModel(1, 0, 1, data)
val Array(c, ar, ma) = model.coefficients
ar should be (0.3 +- 0.05)
ma should be (0.7 +- 0.05)
}
test("Data sampled from a given model should result in similar model if fit") {
val rand = new MersenneTwister(10L)
val model = new ARIMAModel(2, 1, 2, Array(8.2, 0.2, 0.5, 0.3, 0.1))
val sampled = model.sample(1000, rand)
val newModel = ARIMA.fitModel(2, 1, 2, sampled)
val Array(c, ar1, ar2, ma1, ma2) = model.coefficients
val Array(cTest, ar1Test, ar2Test, ma1Test, ma2Test) = newModel.coefficients
// intercept is given more leeway
c should be (cTest +- 1)
ar1Test should be (ar1 +- 0.1)
ma1Test should be (ma1 +- 0.1)
ar2Test should be (ar2 +- 0.1)
ma2Test should be (ma2 +- 0.1)
}
test("Fitting CSS with BOBYQA and conjugate gradient descent should be fairly similar") {
val rand = new MersenneTwister(10L)
val model = new ARIMAModel(2, 1, 2, Array(8.2, 0.2, 0.5, 0.3, 0.1))
val sampled = model.sample(1000, rand)
val fitWithBOBYQA = ARIMA.fitModel(2, 1, 2, sampled, method = "css-bobyqa")
val fitWithCGD = ARIMA.fitModel(2, 1, 2, sampled, method = "css-cgd")
val Array(c, ar1, ar2, ma1, ma2) = fitWithBOBYQA.coefficients
val Array(cCGD, ar1CGD, ar2CGD, ma1CGD, ma2CGD) = fitWithCGD.coefficients
// give more leeway for intercept
cCGD should be (c +- 1)
ar1CGD should be (ar1 +- 0.1)
ar2CGD should be (ar2 +- 0.1)
ma1CGD should be (ma1 +- 0.1)
ma2CGD should be (ma2 +- 0.1)
}
test("Fitting ARIMA(p, d, q) should be the same as fitting a d-order differenced ARMA(p, q)") {
val rand = new MersenneTwister(10L)
val model = new ARIMAModel(1, 1, 2, Array(0.3, 0.7, 0.1), hasIntercept = false)
val sampled = model.sample(1000, rand)
val arimaModel = ARIMA.fitModel(1, 1, 2, sampled, includeIntercept = false)
val differencedSample = new DenseVector(differencesOfOrderD(sampled, 1).toArray.drop(1))
val armaModel = ARIMA.fitModel(1, 0, 2, differencedSample, includeIntercept = false)
val Array(refAR, refMA1, refMA2) = model.coefficients
val Array(iAR, iMA1, iMA2) = arimaModel.coefficients
val Array(ar, ma1, ma2) = armaModel.coefficients
// ARIMA model should match parameters used to sample, to some extent
iAR should be (refAR +- 0.05)
iMA1 should be (refMA1 +- 0.05)
iMA2 should be (refMA2 +- 0.05)
// ARMA model parameters of differenced sample should be equal to ARIMA model parameters
ar should be (iAR)
ma1 should be (iMA1)
ma2 should be (iMA2)
}
test("Adding ARIMA effects to series, and removing should return the same series") {
val rand = new MersenneTwister(20L)
val model = new ARIMAModel(1, 1, 2, Array(8.3, 0.1, 0.2, 0.3), hasIntercept = true)
val whiteNoise = new DenseVector(Array.fill(100)(rand.nextGaussian))
val arimaProcess = new DenseVector(Array.fill(100)(0.0))
model.addTimeDependentEffects(whiteNoise, arimaProcess)
val closeToWhiteNoise = new DenseVector(Array.fill(100)(0.0))
model.removeTimeDependentEffects(arimaProcess, closeToWhiteNoise)
for (i <- 0 until whiteNoise.size) {
val diff = whiteNoise(i) - closeToWhiteNoise(i)
math.abs(diff) should be < 1e-4
}
}
test("Fitting ARIMA(0, 0, 0) with intercept term results in model with average as parameter") {
val rand = new MersenneTwister(10L)
val sampled = new DenseVector(Array.fill(100)(rand.nextGaussian))
val model = ARIMA.fitModel(0, 0, 0, sampled)
val mean = sampled.toArray.sum / sampled.size
model.coefficients(0) should be (mean +- 1e-4)
}
test("Fitting an integrated time series of order 3") {
// > set.seed(10)
// > vals <- arima.sim(list(ma = c(0.2), order = c(0, 3, 1)), 200)
// > arima(order = c(0, 3, 1), vals, method = "CSS")
//
// Call:
// arima(x = vals, order = c(0, 3, 1), method = "CSS")
//
// Coefficients:
// ma1
// 0.2523
// s.e. 0.0623
//
// sigma^2 estimated as 0.9218: part log likelihood = -275.65
// > write.table(y, file = "resources/R_ARIMA_DataSet2.csv", row.names = FALSE, col.names =
// FALSE)
val dataFile = getClass.getClassLoader.getResourceAsStream("R_ARIMA_DataSet2.csv")
val rawData = scala.io.Source.fromInputStream(dataFile).getLines().toArray.map(_.toDouble)
val data = new DenseVector(rawData)
val model = ARIMA.fitModel(0, 3, 1, data)
val Array(c, ma) = model.coefficients
ma should be (0.2 +- 0.05)
}
test("Stationarity and Invertibility checks") {
// Testing violations of stationarity and invertibility
val model1 = new ARIMAModel(1, 0, 0, Array(0.2, 1.5), hasIntercept = true)
model1.isStationary() should be (false)
model1.isInvertible() should be (true)
val model2 = new ARIMAModel(0, 0, 1, Array(0.13, 1.8), hasIntercept = true)
model2.isStationary() should be (true)
model2.isInvertible() should be (false)
// http://www.econ.ku.dk/metrics/Econometrics2_05_II/Slides/07_univariatetimeseries_2pp.pdf
// AR(2) model on slide 31 should be stationary
val model3 = new ARIMAModel(2, 0, 0, Array(0.003359, 1.545, -0.5646), hasIntercept = true)
model3.isStationary() should be (true)
model3.isInvertible() should be (true)
// http://www.econ.ku.dk/metrics/Econometrics2_05_II/Slides/07_univariatetimeseries_2pp.pdf
// ARIMA(1, 0, 1) model from slide 36 should be stationary and invertible
val model4 = new ARIMAModel(1, 0, 1, Array(-0.09341, 0.857361, -0.300821), hasIntercept = true)
model4.isStationary() should be (true)
model4.isInvertible() should be (true)
}
test("Auto fitting ARIMA models") {
val model1 = new ARIMAModel(2, 0, 0, Array(2.5, 0.4, 0.3), hasIntercept = true)
val rand = new MersenneTwister(10L)
val sampled = model1.sample(250, rand)
// a series with a high integration order
val highI = inverseDifferencesOfOrderD(sampled, 5)
// auto fitting without increasing the maxD parameter should result in a failure
val highIntegrationFailure = Try(ARIMA.autoFit(highI)).isFailure
highIntegrationFailure should be (true)
// but should work if we increase the differencing order limit
val highIntegrationWorks = Try(ARIMA.autoFit(highI, maxD = 10)).isSuccess
highIntegrationWorks should be (true)
// in this test, we'll throw an exception and not go on if the auto fit function fails
// the sample we're trying to model is I(0), and we can always fit a model with just the
// intercept, so a failure here would be indicative of other issues
val (maxP, maxQ) = (5, 5)
val fitted = Try(ARIMA.autoFit(sampled, maxP = maxP, maxQ = maxQ)) match {
case Success(model) => model
case _ => throw new Exception("Unable to fit model in test suite")
}
val fittedApproxAIC = fitted.approxAIC(sampled)
// The model should have a lower AIC than the dummy model (just intercept)
// testing other models effectively boils down to the function implementation
// so we don't do that here
val justIntercept = ARIMA.fitModel(0, fitted.d, 0, sampled, includeIntercept = true)
justIntercept.approxAIC(sampled) should be > fittedApproxAIC
}
test("Polynomial eigensolver should find easy root") {
ARIMA.findRoots(Array(1, -0.4))(0).abs() should be (2.5)
}
// To compare with R:
// roots <- abs(polyroot(c(1, 0.5, -0.3, 1.9, -3.0, 0.5)))
test("Polynomial eigensolver should find harder roots") {
val roots = ARIMA.findRoots(Array(1, 0.5, -0.3, 1.9, -3.0, 0.5))
roots.map(x => (x.abs() * 1E5).round / 1E5) should contain theSameElementsAs
Array(0.77959, 0.55383, 0.77959, 1.12229, 5.29438)
}
}
| ypramos1986/spark-timeseries | src/test/scala/com/cloudera/sparkts/models/ARIMASuite.scala | Scala | apache-2.0 | 9,213 |
package io.scalajs.nodejs
package http
import io.scalajs.RawOptions
import io.scalajs.util.PromiseHelper._
import scala.concurrent.Future
import scala.scalajs.js
import scala.scalajs.js.|
/**
* The HTTP Agent is used for pooling sockets used in HTTP client requests.
* The HTTP Agent also defaults client requests to using Connection:keep-alive. If no pending
* HTTP requests are waiting on a socket to become free the socket is closed. This means
* that Node.js's pool has the benefit of keep-alive when under load but still does not
* require developers to manually close the HTTP clients using KeepAlive.
* @author lawrence.daniels@gmail.com
*/
@js.native
trait Agent extends js.Object {
/////////////////////////////////////////////////////////////////////////////////
// Properties
/////////////////////////////////////////////////////////////////////////////////
/**
* The agent's domain name
*/
def domain: String = js.native
/**
* An object which contains arrays of sockets currently awaiting use by the Agent when HTTP KeepAlive is used. Do not modify.
* @example agent.freeSockets
*/
def freeSockets: js.Object = js.native
/**
* By default set to 256. For Agents supporting HTTP KeepAlive, this sets the maximum number of sockets
* that will be left open in the free state.
* @example agent.maxFreeSockets
*/
var maxFreeSockets: Int = js.native
/**
* By default set to Infinity. Determines how many concurrent sockets the agent can have open per origin.
* Origin is either a 'host:port' or 'host:port:localAddress' combination.
* @example agent.maxSockets
*/
var maxSockets: Int = js.native
/**
* An object which contains queues of requests that have not yet been assigned to sockets. Do not modify.
* @example agent.requests
*/
def requests: js.Object = js.native
/**
* An object which contains arrays of sockets currently in use by the Agent. Do not modify.
* @example agent.sockets
*/
// TODO what is the underlying object?
def sockets: js.Array[js.Any] = js.native
/////////////////////////////////////////////////////////////////////////////////
// Methods
/////////////////////////////////////////////////////////////////////////////////
/**
* Produces a socket/stream to be used for HTTP requests. By default, this function is the same
* as net.createConnection(). However, custom Agents may override this method in case greater
* flexibility is desired.
*
* A socket/stream can be supplied in one of two ways: by returning the socket/stream from this function,
* or by passing the socket/stream to callback.
*
* callback has a signature of (err, stream).
* @example agent.createConnection(options[, callback])
*/
def createConnection(options: ConnectionOptions | RawOptions, callback: js.Function): Unit = js.native
/**
* Destroy any sockets that are currently in use by the agent.
*
* It is usually not necessary to do this. However, if you are using an agent with KeepAlive enabled,
* then it is best to explicitly shut down the agent when you know that it will no longer be used.
* Otherwise, sockets may hang open for quite a long time before the server terminates them.
* @example agent.destroy()
*/
def destroy(): Unit = js.native
}
/**
* Agent Companion
* @author lawrence.daniels@gmail.com
*/
object Agent {
/**
* Agent Extensions
* @author lawrence.daniels@gmail.com
*/
implicit class AgentExtensions(val agent: Agent) extends AnyVal {
/**
* Produces a socket/stream to be used for HTTP requests. By default, this function is the same
* as net.createConnection(). However, custom Agents may override this method in case greater
* flexibility is desired.
*/
@inline
def createConnectionFuture(options: ConnectionOptions): Future[js.Any] = {
promiseWithError1[Error, js.Any](agent.createConnection(options, _))
}
}
} | scalajs-io/nodejs | app/common/src/main/scala/io/scalajs/nodejs/http/Agent.scala | Scala | apache-2.0 | 4,044 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.metadata
import org.apache.flink.table.api.TableException
import org.apache.flink.table.planner.plan.nodes.calcite.{Expand, Rank, WindowAggregate}
import org.apache.flink.table.planner.plan.nodes.physical.batch._
import org.apache.flink.table.planner.plan.schema.FlinkRelOptTable
import org.apache.flink.table.planner.plan.utils.AggregateUtil
import org.apache.flink.table.planner.{JArrayList, JDouble, JList}
import com.google.common.collect.ImmutableList
import org.apache.calcite.avatica.util.ByteString
import org.apache.calcite.plan.volcano.RelSubset
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rel.core._
import org.apache.calcite.rel.metadata._
import org.apache.calcite.rel.{RelNode, SingleRel}
import org.apache.calcite.rex.{RexCall, RexInputRef, RexLiteral, RexNode}
import org.apache.calcite.sql.`type`.SqlTypeName
import org.apache.calcite.util.{BuiltInMethod, ImmutableNullableList, NlsString, Util}
import scala.collection.JavaConversions._
/**
* FlinkRelMdSize supplies a implementation of
* [[RelMetadataQuery#getAverageRowSize]] and
* [[RelMetadataQuery#getAverageColumnSizes]] for the standard logical algebra.
*/
class FlinkRelMdSize private extends MetadataHandler[BuiltInMetadata.Size] {
def getDef: MetadataDef[BuiltInMetadata.Size] = BuiltInMetadata.Size.DEF
// --------------- averageRowSize ----------------------------------------------------------------
def averageRowSize(rel: TableScan, mq: RelMetadataQuery): JDouble = {
val averageColumnSizes = mq.getAverageColumnSizes(rel)
require(averageColumnSizes != null && !averageColumnSizes.contains(null))
averageColumnSizes.foldLeft(0D)(_ + _)
}
def averageRowSize(rel: RelNode, mq: RelMetadataQuery): JDouble = {
val averageColumnSizes = mq.getAverageColumnSizes(rel)
if (averageColumnSizes == null) {
FlinkRelMdSize.estimateRowSize(rel.getRowType)
} else {
val fields = rel.getRowType.getFieldList
val columnSizes = averageColumnSizes.zip(fields) map {
case (columnSize, field) =>
if (columnSize == null) FlinkRelMdSize.averageTypeValueSize(field.getType) else columnSize
}
columnSizes.foldLeft(0D)(_ + _)
}
}
// --------------- averageColumnSizes ------------------------------------------------------------
def averageColumnSizes(rel: TableScan, mq: RelMetadataQuery): JList[JDouble] = {
val statistic = rel.getTable.asInstanceOf[FlinkRelOptTable].getFlinkStatistic
rel.getRowType.getFieldList.map { field =>
val colStats = statistic.getColumnStats(field.getName)
if (colStats != null && colStats.getAvgLen != null) {
colStats.getAvgLen
} else {
FlinkRelMdSize.averageTypeValueSize(field.getType)
}
}
}
def averageColumnSizes(rel: Values, mq: RelMetadataQuery): JList[JDouble] = {
val fields = rel.getRowType.getFieldList
val list = ImmutableList.builder[JDouble]()
fields.zipWithIndex.foreach {
case (field, index) =>
val d: JDouble = if (rel.getTuples().isEmpty) {
FlinkRelMdSize.averageTypeValueSize(field.getType)
} else {
val sumSize = rel.getTuples().foldLeft(0D) { (acc, literals) =>
val size = typeValueSize(field.getType,
literals.get(index).getValueAs(classOf[Comparable[_]]))
acc + size
}
sumSize / rel.getTuples.size()
}
list.add(d)
}
list.build
}
def averageColumnSizes(rel: Project, mq: RelMetadataQuery): JList[JDouble] = {
val inputColumnSizes = mq.getAverageColumnSizesNotNull(rel.getInput)
val sizesBuilder = ImmutableNullableList.builder[JDouble]()
rel.getProjects.foreach(p => sizesBuilder.add(averageRexSize(p, inputColumnSizes)))
sizesBuilder.build
}
def averageColumnSizes(rel: Filter, mq: RelMetadataQuery): JList[JDouble] =
mq.getAverageColumnSizes(rel.getInput)
def averageColumnSizes(rel: Calc, mq: RelMetadataQuery): JList[JDouble] = {
val inputColumnSizes = mq.getAverageColumnSizesNotNull(rel.getInput())
val sizesBuilder = ImmutableNullableList.builder[JDouble]()
val projects = rel.getProgram.split().left
projects.foreach(p => sizesBuilder.add(averageRexSize(p, inputColumnSizes)))
sizesBuilder.build()
}
def averageColumnSizes(rel: Expand, mq: RelMetadataQuery): JList[JDouble] = {
val fieldCount = rel.getRowType.getFieldCount
// get each column's RexNode (RexLiteral, RexInputRef or null)
val projectNodes = (0 until fieldCount).map { i =>
val initNode: RexNode = rel.getCluster.getRexBuilder.constantNull()
rel.projects.foldLeft(initNode) {
(mergeNode, project) =>
(mergeNode, project.get(i)) match {
case (l1: RexLiteral, l2: RexLiteral) =>
// choose non-null one
if (l1.getValueAs(classOf[Comparable[_]]) == null) l2 else l1
case (_: RexLiteral, r: RexInputRef) => r
case (r: RexInputRef, _: RexLiteral) => r
case (r1: RexInputRef, r2: RexInputRef) =>
// if reference different columns, return null (using default value)
if (r1.getIndex == r2.getIndex) r1 else null
case (_, _) => null
}
}
}
val inputColumnSizes = mq.getAverageColumnSizesNotNull(rel.getInput())
val sizesBuilder = ImmutableNullableList.builder[JDouble]()
projectNodes.zipWithIndex.foreach {
case (p, i) =>
val size = if (p == null || i == rel.expandIdIndex) {
// use default value
FlinkRelMdSize.averageTypeValueSize(rel.getRowType.getFieldList.get(i).getType)
} else {
// use value from input
averageRexSize(p, inputColumnSizes)
}
sizesBuilder.add(size)
}
sizesBuilder.build()
}
def averageColumnSizes(rel: Exchange, mq: RelMetadataQuery): JList[JDouble] =
mq.getAverageColumnSizes(rel.getInput)
def averageColumnSizes(rel: Rank, mq: RelMetadataQuery): JList[JDouble] = {
val inputColumnSizes = mq.getAverageColumnSizes(rel.getInput)
if (rel.getRowType.getFieldCount != rel.getInput.getRowType.getFieldCount) {
// if outputs rank function value, rank function column is the last one
val rankFunColumnSize =
FlinkRelMdSize.averageTypeValueSize(rel.getRowType.getFieldList.last.getType)
inputColumnSizes ++ List(rankFunColumnSize)
} else {
inputColumnSizes
}
}
def averageColumnSizes(rel: Sort, mq: RelMetadataQuery): JList[JDouble] =
mq.getAverageColumnSizes(rel.getInput)
def averageColumnSizes(rel: Aggregate, mq: RelMetadataQuery): JList[JDouble] = {
val inputColumnSizes = mq.getAverageColumnSizesNotNull(rel.getInput)
val sizesBuilder = ImmutableList.builder[JDouble]()
val (auxGroupSet, otherAggCalls) = AggregateUtil.checkAndSplitAggCalls(rel)
val fullGrouping = rel.getGroupSet.toArray ++ auxGroupSet
fullGrouping.foreach(i => sizesBuilder.add(inputColumnSizes.get(i)))
otherAggCalls.foreach(aggCall => sizesBuilder.add(
FlinkRelMdSize.averageTypeValueSize(aggCall.getType)))
sizesBuilder.build
}
def averageColumnSizes(rel: BatchExecGroupAggregateBase, mq: RelMetadataQuery): JList[JDouble] = {
// note: the logical to estimate column sizes of AggregateBatchExecBase is different from
// Calcite Aggregate because AggregateBatchExecBase's rowTypes is not composed by
// grouping columns + aggFunctionCall results
val mapInputToOutput = (rel.getGrouping ++ rel.getAuxGrouping).zipWithIndex.toMap
getColumnSizesFromInputOrType(rel, mq, mapInputToOutput)
}
def averageColumnSizes(rel: WindowAggregate, mq: RelMetadataQuery): JList[JDouble] = {
averageColumnSizesOfWindowAgg(rel, mq)
}
def averageColumnSizes(
rel: BatchExecWindowAggregateBase,
mq: RelMetadataQuery): JList[JDouble] = {
averageColumnSizesOfWindowAgg(rel, mq)
}
private def averageColumnSizesOfWindowAgg(
windowAgg: SingleRel,
mq: RelMetadataQuery): JList[JDouble] = {
val mapInputToOutput: Map[Int, Int] = windowAgg match {
case agg: WindowAggregate =>
AggregateUtil.checkAndGetFullGroupSet(agg).zipWithIndex.toMap
case agg: BatchExecLocalHashWindowAggregate =>
// local win-agg output type: grouping + assignTs + auxGrouping + aggCalls
agg.getGrouping.zipWithIndex.toMap ++
agg.getAuxGrouping.zipWithIndex.map {
case (k, v) => k -> (agg.getGrouping.length + 1 + v)
}.toMap
case agg: BatchExecLocalSortWindowAggregate =>
// local win-agg output type: grouping + assignTs + auxGrouping + aggCalls
agg.getGrouping.zipWithIndex.toMap ++
agg.getAuxGrouping.zipWithIndex.map {
case (k, v) => k -> (agg.getGrouping.length + 1 + v)
}.toMap
case agg: BatchExecWindowAggregateBase =>
(agg.getGrouping ++ agg.getAuxGrouping).zipWithIndex.toMap
case _ => throw new IllegalArgumentException(s"Unknown node type ${windowAgg.getRelTypeName}")
}
getColumnSizesFromInputOrType(windowAgg, mq, mapInputToOutput)
}
def averageColumnSizes(overWindow: Window, mq: RelMetadataQuery): JList[JDouble] =
averageColumnSizesOfOverAgg(overWindow, mq)
def averageColumnSizes(rel: BatchExecOverAggregate, mq: RelMetadataQuery): JList[JDouble] =
averageColumnSizesOfOverAgg(rel, mq)
private def averageColumnSizesOfOverAgg(
overAgg: SingleRel,
mq: RelMetadataQuery): JList[JDouble] = {
val inputFieldCount = overAgg.getInput.getRowType.getFieldCount
getColumnSizesFromInputOrType(overAgg, mq, (0 until inputFieldCount).zipWithIndex.toMap)
}
def averageColumnSizes(rel: Join, mq: RelMetadataQuery): JList[JDouble] = {
val acsOfLeft = mq.getAverageColumnSizes(rel.getLeft)
val acsOfRight = rel.getJoinType match {
case JoinRelType.SEMI | JoinRelType.ANTI => null
case _ => mq.getAverageColumnSizes(rel.getRight)
}
if (acsOfLeft == null && acsOfRight == null) {
null
} else if (acsOfRight == null) {
acsOfLeft
} else if (acsOfLeft == null) {
acsOfRight
} else {
val sizesBuilder = ImmutableNullableList.builder[JDouble]()
sizesBuilder.addAll(acsOfLeft)
sizesBuilder.addAll(acsOfRight)
sizesBuilder.build()
}
}
def averageColumnSizes(rel: Union, mq: RelMetadataQuery): JList[JDouble] = {
val inputColumnSizeList = new JArrayList[JList[JDouble]]()
rel.getInputs.foreach { input =>
val inputSizes = mq.getAverageColumnSizes(input)
if (inputSizes != null) {
inputColumnSizeList.add(inputSizes)
}
}
inputColumnSizeList.length match {
case 0 => null // all were null
case 1 => inputColumnSizeList.get(0) // all but one were null
case _ =>
val sizes = ImmutableNullableList.builder[JDouble]()
var nn = 0
val fieldCount: Int = rel.getRowType.getFieldCount
(0 until fieldCount).foreach { i =>
var d = 0D
var n = 0
inputColumnSizeList.foreach { inputColumnSizes =>
val d2 = inputColumnSizes.get(i)
if (d2 != null) {
d += d2
n += 1
nn += 1
}
}
val size: JDouble = if (n > 0) d / n else null
sizes.add(size)
}
if (nn == 0) {
null // all columns are null
} else {
sizes.build()
}
}
}
def averageColumnSizes(rel: Intersect, mq: RelMetadataQuery): JList[JDouble] =
mq.getAverageColumnSizes(rel.getInput(0))
def averageColumnSizes(rel: Minus, mq: RelMetadataQuery): JList[JDouble] =
mq.getAverageColumnSizes(rel.getInput(0))
def averageColumnSizes(subset: RelSubset, mq: RelMetadataQuery): JList[JDouble] = {
val rel = Util.first(subset.getBest, subset.getOriginal)
mq.getAverageColumnSizes(rel)
}
def averageColumnSizes(rel: RelNode, mq: RelMetadataQuery): JList[JDouble] =
rel.getRowType.getFieldList.map(f => FlinkRelMdSize.averageTypeValueSize(f.getType)).toList
private def averageRexSize(node: RexNode, inputColumnSizes: JList[JDouble]): JDouble = {
node match {
case ref: RexInputRef => inputColumnSizes.get(ref.getIndex)
case lit: RexLiteral => typeValueSize(node.getType, lit.getValueAs(classOf[Comparable[_]]))
case call: RexCall =>
val nodeSqlTypeName = node.getType.getSqlTypeName
val matchedOps = call.getOperands.filter(op => op.getType.getSqlTypeName eq nodeSqlTypeName)
matchedOps.headOption match {
case Some(op) => averageRexSize(op, inputColumnSizes)
case _ => FlinkRelMdSize.averageTypeValueSize(node.getType)
}
case _ => FlinkRelMdSize.averageTypeValueSize(node.getType)
}
}
/**
* Estimates the average size (in bytes) of a value of a type.
*
* Nulls count as 1 byte.
*/
private def typeValueSize(t: RelDataType, value: Comparable[_]): JDouble = {
if (value == null) {
return 1D
}
t.getSqlTypeName match {
case SqlTypeName.BINARY | SqlTypeName.VARBINARY =>
value.asInstanceOf[ByteString].length().toDouble
case SqlTypeName.CHAR | SqlTypeName.VARCHAR =>
value.asInstanceOf[NlsString].getValue.length * FlinkRelMdSize.BYTES_PER_CHARACTER.toDouble
case _ => FlinkRelMdSize.averageTypeValueSize(t)
}
}
/**
* Gets each column size of rel output from input column size or from column type.
* column size is from input column size if the column index is in `mapInputToOutput` keys,
* otherwise from column type.
*/
private def getColumnSizesFromInputOrType(
rel: SingleRel,
mq: RelMetadataQuery,
mapInputToOutput: Map[Int, Int]): JList[JDouble] = {
val outputIndices = mapInputToOutput.values
require(outputIndices.forall(idx => rel.getRowType.getFieldCount > idx && idx >= 0))
val inputIndices = mapInputToOutput.keys
val input = rel.getInput
inputIndices.forall(idx => input.getRowType.getFieldCount > idx && idx >= 0)
val mapOutputToInput = mapInputToOutput.map(_.swap)
val acsOfInput = mq.getAverageColumnSizesNotNull(input)
val sizesBuilder = ImmutableList.builder[JDouble]()
rel.getRowType.getFieldList.zipWithIndex.foreach {
case (f, idx) =>
val size = mapOutputToInput.get(idx) match {
case Some(inputIdx) => acsOfInput.get(inputIdx)
case _ => FlinkRelMdSize.averageTypeValueSize(f.getType)
}
sizesBuilder.add(size)
}
sizesBuilder.build()
}
}
object FlinkRelMdSize {
private val INSTANCE = new FlinkRelMdSize
// Bytes per character (2).
val BYTES_PER_CHARACTER: Int = Character.SIZE / java.lang.Byte.SIZE
val SOURCE: RelMetadataProvider = ReflectiveRelMetadataProvider.reflectiveSource(
INSTANCE,
BuiltInMethod.AVERAGE_COLUMN_SIZES.method,
BuiltInMethod.AVERAGE_ROW_SIZE.method)
def averageTypeValueSize(t: RelDataType): JDouble = t.getSqlTypeName match {
case SqlTypeName.ROW =>
estimateRowSize(t)
case SqlTypeName.ARRAY =>
// 16 is an arbitrary estimate
averageTypeValueSize(t.getComponentType) * 16
case SqlTypeName.MAP =>
// 16 is an arbitrary estimate
(averageTypeValueSize(t.getKeyType) + averageTypeValueSize(t.getValueType)) * 16
case SqlTypeName.MULTISET =>
// 16 is an arbitrary estimate
(averageTypeValueSize(t.getComponentType) + averageTypeValueSize(SqlTypeName.INTEGER)) * 16
case _ => averageTypeValueSize(t.getSqlTypeName)
}
private def estimateRowSize(rowType: RelDataType): JDouble = {
val fieldList = rowType.getFieldList
fieldList.map(_.getType).foldLeft(0.0) {
(s, t) =>
s + averageTypeValueSize(t)
}
}
def averageTypeValueSize(sqlType: SqlTypeName): JDouble = sqlType match {
case SqlTypeName.TINYINT => 1D
case SqlTypeName.SMALLINT => 2D
case SqlTypeName.INTEGER => 4D
case SqlTypeName.BIGINT => 8D
case SqlTypeName.BOOLEAN => 1D
case SqlTypeName.FLOAT => 4D
case SqlTypeName.DOUBLE => 8D
case SqlTypeName.VARCHAR => 12D
case SqlTypeName.CHAR => 1D
case SqlTypeName.DECIMAL => 12D
case typeName if SqlTypeName.YEAR_INTERVAL_TYPES.contains(typeName) => 8D
case typeName if SqlTypeName.DAY_INTERVAL_TYPES.contains(typeName) => 4D
// TODO after time/date => int, timestamp => long, this estimate value should update
case SqlTypeName.TIME | SqlTypeName.TIMESTAMP |
SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE | SqlTypeName.DATE => 12D
case SqlTypeName.ANY => 128D // 128 is an arbitrary estimate
case SqlTypeName.BINARY | SqlTypeName.VARBINARY => 16D // 16 is an arbitrary estimate
case _ => throw new TableException(s"Unsupported data type encountered: $sqlType")
}
}
| fhueske/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/metadata/FlinkRelMdSize.scala | Scala | apache-2.0 | 17,735 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.oap.expression
import com.google.common.collect.Lists
import com.google.common.collect.Sets
import org.apache.arrow.gandiva.evaluator._
import org.apache.arrow.gandiva.exceptions.GandivaException
import org.apache.arrow.gandiva.expression._
import org.apache.arrow.vector.types.pojo.ArrowType
import org.apache.arrow.vector.types.pojo.Field
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.types._
import scala.collection.mutable.ListBuffer
class ColumnarInSet(value: Expression, hset: Set[Any], original: Expression)
extends InSet(value: Expression, hset: Set[Any])
with ColumnarExpression
with Logging {
override def doColumnarCodeGen(args: java.lang.Object): (TreeNode, ArrowType) = {
val (value_node, valueType): (TreeNode, ArrowType) =
value.asInstanceOf[ColumnarExpression].doColumnarCodeGen(args)
val resultType = new ArrowType.Bool()
if (value.dataType == StringType) {
val newlist = hset.toList.map (expr => {
expr.toString
});
val tlist = Lists.newArrayList(newlist:_*);
val funcNode = TreeBuilder.makeInExpressionString(value_node, Sets.newHashSet(tlist))
(funcNode, resultType)
} else if (value.dataType == IntegerType) {
val newlist = hset.toList.map (expr => {
expr.asInstanceOf[Integer]
});
val tlist = Lists.newArrayList(newlist:_*);
val funcNode = TreeBuilder.makeInExpressionInt32(value_node: TreeNode, Sets.newHashSet(tlist))
(funcNode, resultType)
} else if (value.dataType == LongType) {
val newlist = hset.toList.map (expr => {
expr.asInstanceOf[java.lang.Long]
});
val tlist = Lists.newArrayList(newlist:_*);
val funcNode = TreeBuilder.makeInExpressionBigInt(value_node, Sets.newHashSet(tlist))
(funcNode, resultType)
} else {
throw new UnsupportedOperationException(s"not currently supported: ${value.dataType}.")
}
}
}
object ColumnarInSetOperator {
def create(value: Expression, hset: Set[Any], original: Expression): Expression = original match {
case i: InSet =>
new ColumnarInSet(value, hset, i)
case other =>
throw new UnsupportedOperationException(s"not currently supported: $other.")
}
}
| Intel-bigdata/OAP | oap-native-sql/core/src/main/scala/com/intel/oap/expression/ColumnarInSetOperator.scala | Scala | apache-2.0 | 3,113 |
package sgl
package analytics
import util.LoggingProvider
/** An implementation of the Analytics module that only logs.
*
* This implementation relies on the logging module to log each event but it
* does not send the data to an analysis service. One could technically
* collect the logs to extract the data, but more likely this can be used when
* one does not wish (or can't) to send analytics data.
*/
trait LoggedAnalyticsProvider extends AnalyticsProvider {
this: GameStateComponent with LoggingProvider =>
class LoggedAnalytics extends Analytics {
implicit val tag = Logger.Tag("analytics")
override def logCustomEvent(name: String, params: EventParams): Unit = {
logger.info(s"${name}: ${params}")
}
override def logLevelUpEvent(level: Long): Unit = {
logger.info(s"level_up: {level=${level}}")
}
override def logLevelStartEvent(level: String): Unit = {
logger.info(s"level_start: {level=${level}}")
}
override def logLevelEndEvent(level: String, success: Boolean): Unit = {
logger.info(s"level_end: {level=${level}, success=${success}}")
}
override def logShareEvent(itemId: Option[String]): Unit = {
logger.info(s"share: {item_id=${itemId}}")
}
override def logGameOverEvent(score: Option[Long], map: Option[String]): Unit = {
logger.info(s"game_over: {score=${score}, map=${map}}")
}
override def logBeginTutorialEvent(): Unit = {
logger.info(s"begin_tutorial")
}
override def logCompleteTutorialEvent(): Unit = {
logger.info(s"complete_tutorial")
}
override def logUnlockAchievementEvent(achievement: String): Unit = {
logger.info(s"unlock_achievement: {achievement=${achievement}}")
}
override def logPostScoreEvent(score: Long, level: Option[Long], character: Option[String]): Unit = {
logger.info(s"post_score: {score=${score}, level=${level}, character=${character}}")
}
override def setGameScreen(gameScreen: GameScreen): Unit = {
logger.info(s"setting current game screen: $gameScreen")
}
override def setPlayerProperty(name: String, value: String): Unit = {
logger.info(s"setting player property ${name}=${value}")
}
}
override val Analytics: Analytics = new LoggedAnalytics
}
| regb/scala-game-library | core/src/main/scala/sgl/analytics/LoggedAnalyticsProvider.scala | Scala | mit | 2,297 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.flatspec
import org.scalactic.{source, Prettifier}
import org.scalatest._
import Suite.anExceptionThatShouldCauseAnAbort
import Suite.autoTagClassAnnotations
import java.util.ConcurrentModificationException
import java.util.concurrent.atomic.AtomicReference
import org.scalatest.exceptions.StackDepthExceptionHelper.getStackDepth
import verbs.{ResultOfTaggedAsInvocation, ResultOfStringPassedToVerb, BehaveWord, ShouldVerb, MustVerb, CanVerb, StringVerbStringInvocation, StringVerbBehaveLikeInvocation}
/**
* Implementation trait for class <code>AnyFlatSpec</code>, which facilitates a
* “behavior-driven” style of development (BDD), in which tests
* are combined with text that specifies the behavior the tests verify.
*
* <p>
* <a href="AnyFlatSpec.html"><code>AnyFlatSpec</code></a> is a class, not a trait,
* to minimize compile time given there is a slight compiler overhead to
* mixing in traits compared to extending classes. If you need to mix the
* behavior of <code>AnyFlatSpec</code> into some other class, you can use this
* trait instead, because class <code>AnyFlatSpec</code> does nothing more than
* extend this trait and add a nice <code>toString</code> implementation.
* </p>
*
* <p>
* See the documentation of the class for a <a href="AnyFlatSpec.html">detailed
* overview of <code>AnyFlatSpec</code></a>.
* </p>
*
* @author Bill Venners
*/
@Finders(Array("org.scalatest.finders.FlatSpecFinder"))
//SCALATESTJS-ONLY @scala.scalajs.reflect.annotation.EnableReflectiveInstantiation
//SCALATESTNATIVE-ONLY @scala.scalajs.reflect.annotation.EnableReflectiveInstantiation
trait AnyFlatSpecLike extends TestSuite with TestRegistration with ShouldVerb with MustVerb with CanVerb with Informing with Notifying with Alerting with Documenting { thisSuite =>
private final val engine = new Engine(Resources.concurrentSpecMod, "Spec")
import engine._
/**
* Returns an <code>Informer</code> that during test execution will forward strings passed to its
* <code>apply</code> method to the current reporter. If invoked in a constructor, it
* will register the passed string for forwarding later during test execution. If invoked from inside a scope,
* it will forward the information to the current reporter immediately. If invoked from inside a test function,
* it will record the information and forward it to the current reporter only after the test completed, as <code>recordedEvents</code>
* of the test completed event, such as <code>TestSucceeded</code>. If invoked at any other time, it will print to the standard output.
* This method can be called safely by any thread.
*/
protected def info: Informer = atomicInformer.get
/**
* Returns a <code>Notifier</code> that during test execution will forward strings passed to its
* <code>apply</code> method to the current reporter. If invoked in a constructor, it
* will register the passed string for forwarding later during test execution. If invoked while this
* <code>AnyFlatSpec</code> is being executed, such as from inside a test function, it will forward the information to
* the current reporter immediately. If invoked at any other time, it will
* print to the standard output. This method can be called safely by any thread.
*/
protected def note: Notifier = atomicNotifier.get
/**
* Returns an <code>Alerter</code> that during test execution will forward strings passed to its
* <code>apply</code> method to the current reporter. If invoked in a constructor, it
* will register the passed string for forwarding later during test execution. If invoked while this
* <code>AnyFlatSpec</code> is being executed, such as from inside a test function, it will forward the information to
* the current reporter immediately. If invoked at any other time, it will
* print to the standard output. This method can be called safely by any thread.
*/
protected def alert: Alerter = atomicAlerter.get
/**
* Returns a <code>Documenter</code> that during test execution will forward strings passed to its
* <code>apply</code> method to the current reporter. If invoked in a constructor, it
* will register the passed string for forwarding later during test execution. If invoked from inside a scope,
* it will forward the information to the current reporter immediately. If invoked from inside a test function,
* it will record the information and forward it to the current reporter only after the test completed, as <code>recordedEvents</code>
* of the test completed event, such as <code>TestSucceeded</code>. If invoked at any other time, it will print to the standard output.
* This method can be called safely by any thread.
*/
protected def markup: Documenter = atomicDocumenter.get
private final def registerTestImpl(testText: String, testTags: Tag*)(testFun: => Any /* Assertion */, pos: source.Position): Unit = {
// SKIP-SCALATESTJS,NATIVE-START
val stackDepthAdjustment = -1
// SKIP-SCALATESTJS,NATIVE-END
//SCALATESTJS,NATIVE-ONLY val stackDepthAdjustment = -4
engine.registerTest(testText, Transformer(() => testFun), Resources.testCannotBeNestedInsideAnotherTest, "AnyFlatSpecLike.scala", "registerTest", 4, stackDepthAdjustment, None, None, Some(pos), None, testTags: _*)
}
// SKIP-DOTTY-START
final def registerTest(testText: String, testTags: Tag*)(testFun: => Any /* Assertion */)(implicit pos: source.Position): Unit = {
registerTestImpl(testText, testTags: _*)(testFun, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def registerTest(testText: String, testTags: Tag*)(testFun: => Any /* Assertion */)(implicit pos: source.Position): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestImpl(testText, testTags: _*)(testFun, pos) }) }
//DOTTY-ONLY }
private final def registerIgnoredTestImpl(testText: String, testTags: Tag*)(testFun: => Any /* Assertion */, pos: source.Position): Unit = {
// SKIP-SCALATESTJS,NATIVE-START
val stackDepthAdjustment = -3
// SKIP-SCALATESTJS,NATIVE-END
//SCALATESTJS,NATIVE-ONLY val stackDepthAdjustment = -4
engine.registerIgnoredTest(testText, Transformer(() => testFun), Resources.testCannotBeNestedInsideAnotherTest, "AnyFlatSpecLike.scala", "registerIgnoredTest", 4, stackDepthAdjustment, None, Some(pos), testTags: _*)
}
// SKIP-DOTTY-START
final def registerIgnoredTest(testText: String, testTags: Tag*)(testFun: => Any /* Assertion */)(implicit pos: source.Position): Unit = {
registerIgnoredTestImpl(testText, testTags: _*)(testFun, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def registerIgnoredTest(testText: String, testTags: Tag*)(testFun: => Any /* Assertion */)(implicit pos: source.Position): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerIgnoredTestImpl(testText, testTags: _*)(testFun, pos) }) }
//DOTTY-ONLY }
/**
* Register a test with the given spec text, optional tags, and test function value that takes no arguments.
* An invocation of this method is called an “example.”
*
* This method will register the test for later execution via an invocation of one of the <code>execute</code>
* methods. The name of the test will be a concatenation of the text of all surrounding describers,
* from outside in, and the passed spec text, with one space placed between each item. (See the documenation
* for <code>testNames</code> for an example.) The resulting test name must not have been registered previously on
* this <code>AnyFlatSpec</code> instance.
*
* @param specText the specification text, which will be combined with the descText of any surrounding describers
* to form the test name
* @param methodName Method name of the caller
* @param testTags the optional list of tags for this test
* @param testFun the test function
* @throws DuplicateTestNameException if a test with the same name has been registered previously
* @throws TestRegistrationClosedException if invoked after <code>run</code> has been invoked on this suite
* @throws NullArgumentException if <code>specText</code> or any passed test tag is <code>null</code>
*/
private def registerTestToRun(specText: String, methodName: String, testTags: List[Tag], testFun: () => Any /* Assertion */, pos: source.Position): Unit = {
// SKIP-SCALATESTJS,NATIVE-START
val stackDepth = 4
val stackDepthAdjustment = -3
// SKIP-SCALATESTJS,NATIVE-END
//SCALATESTJS,NATIVE-ONLY val stackDepth = 6
//SCALATESTJS,NATIVE-ONLY val stackDepthAdjustment = -6
def testRegistrationClosedMessageFun: String =
methodName match {
case "in" => Resources.inCannotAppearInsideAnotherInOrIs
case "is" => Resources.isCannotAppearInsideAnotherInOrIs
}
engine.registerTest(specText, Transformer(testFun), testRegistrationClosedMessageFun, "AnyFlatSpecLike.scala", methodName, stackDepth, stackDepthAdjustment, None, None, Some(pos), None, testTags: _*)
}
/**
* Class that supports the registration of a “subject” being specified and tested via the
* instance referenced from <code>AnyFlatSpec</code>'s <code>behavior</code> field.
*
* <p>
* This field enables syntax such as the following subject registration:
* </p>
*
* <pre class="stHighlight">
* behavior of "A Stack"
* ^
* </pre>
*
* <p>
* For more information and examples of the use of the <code>behavior</code> field, see the <a href="AnyFlatSpec.html">main documentation</a>
* for trait <code>AnyFlatSpec</code>.
* </p>
*/
protected final class BehaviorWord {
private final def ofImpl(description: String, pos: source.Position): Unit = {
// SKIP-SCALATESTJS,NATIVE-START
val stackDepth = 3
// SKIP-SCALATESTJS,NATIVE-END
//SCALATESTJS,NATIVE-ONLY val stackDepth = 5
registerFlatBranch(description, Resources.behaviorOfCannotAppearInsideAnIn, "AnyFlatSpecLike.scala", "of", stackDepth, 0, Some(pos))
}
/**
* Supports the registration of a “subject” being specified and tested via the
* instance referenced from <code>AnyFlatSpec</code>'s <code>behavior</code> field.
*
* <p>
* This method enables syntax such as the following subject registration:
* </p>
*
* <pre class="stHighlight">
* behavior of "A Stack"
* ^
* </pre>
*
* <p>
* For more information and examples of the use of this method, see the <a href="AnyFlatSpec.html">main documentation</a>
* for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def of(description: String)(implicit pos: source.Position): Unit = {
ofImpl(description, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def of(description: String): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => ofImpl(description, pos) }) }
//DOTTY-ONLY }
}
/**
* Supports the registration of a “subject” being specified and tested.
*
* <p>
* This field enables syntax such as the following subject registration:
* </p>
*
* <pre class="stHighlight">
* behavior of "A Stack"
* ^
* </pre>
*
* <p>
* For more information and examples of the use of the <code>behavior</code> field, see the main documentation
* for this trait.
* </p>
*/
protected val behavior = new BehaviorWord
/**
* Class that supports the registration of tagged tests via the <code>ItWord</code> instance
* referenced from <code>AnyFlatSpec</code>'s <code>it</code> field.
*
* <p>
* This class enables syntax such as the following tagged test registration:
* </p>
*
* <pre class="stHighlight">
* it should "pop values in last-in-first-out order" taggedAs(SlowTest) in { ... }
* ^
* </pre>
*
* <p>
* It also enables syntax such as the following registration of an ignored, tagged test:
* </p>
*
* <pre class="stHighlight">
* it should "pop values in last-in-first-out order" taggedAs(SlowTest) ignore { ... }
* ^
* </pre>
*
* <p>
* In addition, it enables syntax such as the following registration of a pending, tagged test:
* </p>
*
* <pre class="stHighlight">
* it should "pop values in last-in-first-out order" taggedAs(SlowTest) is (pending)
* ^
* </pre>
*
* <p>
* For more information and examples of the use of the <code>it</code> field to register tagged tests, see
* the <a href="AnyFlatSpec.html#taggingTests">Tagging tests section</a> in the main documentation for trait <code>AnyFlatSpec</code>.
* For examples of tagged test registration, see
* the <a href="AnyFlatSpec.html#taggingTests">Tagging tests section</a> in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
protected final class ItVerbStringTaggedAs(verb: String, name: String, tags: List[Tag]) {
/**
* Supports the registration of tagged tests in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* it must "pop values in last-in-first-out order" taggedAs(SlowTest) in { ... }
* ^
* </pre>
*
* <p>
* For examples of tagged test registration, see
* the <a href="AnyFlatSpec.html#taggingTests">Tagging tests section</a> in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def in(testFun: => Any /* Assertion */)(implicit pos: source.Position): Unit = {
registerTestToRun(verb.trim + " " + name.trim, "in", tags, () => testFun, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def in(testFun: => Any /* Assertion */): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToRun(verb.trim + " " + name.trim, "in", tags, () => testFun, pos) }) }
//DOTTY-ONLY }
/**
* Supports the registration of pending, tagged tests in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* it must "pop values in last-in-first-out order" taggedAs(SlowTest) is (pending)
* ^
* </pre>
*
* <p>
* For examples of pending test registration, see the <a href="AnyFlatSpec.html#pendingTests">Pending tests section</a> in the main documentation
* for trait <code>AnyFlatSpec</code>. And for examples of tagged test registration, see
* the <a href="AnyFlatSpec.html#taggingTests">Tagging tests section</a> in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def is(testFun: => PendingStatement)(implicit pos: source.Position): Unit = {
registerTestToRun(verb.trim + " " + name.trim, "is", tags, () => { testFun; succeed }, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def is(testFun: => PendingStatement): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToRun(verb.trim + " " + name.trim, "is", tags, () => { testFun; succeed }, pos) }) }
//DOTTY-ONLY }
/**
* Supports the registration of ignored, tagged tests in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* it must "pop values in last-in-first-out order" taggedAs(SlowTest) ignore { ... }
* ^
* </pre>
*
* <p>
* For examples of ignored test registration, see the <a href="AnyFlatSpec.html#ignoredTests">Ignored tests section</a> in the main documentation
* for trait <code>AnyFlatSpec</code>. And for examples of tagged test registration, see
* the <a href="AnyFlatSpec.html#taggingTests">Tagging tests section</a> in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def ignore(testFun: => Any /* Assertion */)(implicit pos: source.Position): Unit = {
registerTestToIgnore(verb.trim + " " + name.trim, tags, "ignore", () => testFun, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def ignore(testFun: => Any /* Assertion */): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToIgnore(verb.trim + " " + name.trim, tags, "ignore", () => testFun, pos) }) }
//DOTTY-ONLY }
}
/**
* Class that supports test registration via the <code>ItWord</code> instance referenced from <code>AnyFlatSpec</code>'s <code>it</code> field.
*
* <p>
* This class enables syntax such as the following test registration:
* </p>
*
* <pre class="stHighlight">
* it should "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* It also enables syntax such as the following registration of an ignored test:
* </p>
*
* <pre class="stHighlight">
* it should "pop values in last-in-first-out order" ignore { ... }
* ^
* </pre>
*
* <p>
* In addition, it enables syntax such as the following registration of a pending test:
* </p>
*
* <pre class="stHighlight">
* it should "pop values in last-in-first-out order" is (pending)
* ^
* </pre>
*
* <p>
* And finally, it also enables syntax such as the following tagged test registration:
* </p>
*
* <pre class="stHighlight">
* it should "pop values in last-in-first-out order" taggedAs(SlowTest) in { ... }
* ^
* </pre>
*
* <p>
* For more information and examples of the use of the <code>it</code> field, see the <a href="AnyFlatSpec.html">main documentation</a>
* for trait <code>AnyFlatSpec</code>.
* </p>
*/
protected final class ItVerbString(verb: String, name: String) {
/**
* Supports the registration of tests in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* it must "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* For examples of test registration, see the <a href="AnyFlatSpec.html">main documentation</a>
* for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def in(testFun: => Any /* Assertion */)(implicit pos: source.Position): Unit = {
registerTestToRun(verb.trim + " " + name.trim, "in", List(), () => testFun, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def in(testFun: => Any /* Assertion */): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToRun(verb.trim + " " + name.trim, "in", List(), () => testFun, pos) }) }
//DOTTY-ONLY }
/**
* Supports the registration of pending tests in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* it must "pop values in last-in-first-out order" is (pending)
* ^
* </pre>
*
* <p>
* For examples of pending test registration, see the <a href="AnyFlatSpec.html#pendingTests">Pending tests section</a> in the main documentation
* for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def is(testFun: => PendingStatement)(implicit pos: source.Position): Unit = {
registerTestToRun(verb.trim + " " + name.trim, "is", List(), () => { testFun; succeed }, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def is(testFun: => PendingStatement): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToRun(verb.trim + " " + name.trim, "is", List(), () => { testFun; succeed }, pos) }) }
//DOTTY-ONLY }
/**
* Supports the registration of ignored tests in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* it must "pop values in last-in-first-out order" ignore { ... }
* ^
* </pre>
*
* <p>
* For examples of ignored test registration, see the <a href="AnyFlatSpec.html#ignoredTests">Ignored tests section</a> in the main documentation
* for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def ignore(testFun: => Any /* Assertion */)(implicit pos: source.Position): Unit = {
registerTestToIgnore(verb.trim + " " + name.trim, List(), "ignore", () => testFun, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def ignore(testFun: => Any /* Assertion */): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToIgnore(verb.trim + " " + name.trim, List(), "ignore", () => testFun, pos) }) }
//DOTTY-ONLY }
/**
* Supports the registration of tagged tests in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* it must "pop values in last-in-first-out order" taggedAs(SlowTest) in { ... }
* ^
* </pre>
*
* <p>
* For examples of tagged test registration, see the <a href="AnyFlatSpec.html#taggingTests">Tagging tests section</a> in the main documentation
* for trait <code>AnyFlatSpec</code>.
* </p>
*/
def taggedAs(firstTestTag: Tag, otherTestTags: Tag*) = {
val tagList = firstTestTag :: otherTestTags.toList
new ItVerbStringTaggedAs(verb, name, tagList)
}
}
/**
* Class that supports test (and shared test) registration via the instance referenced from <code>AnyFlatSpec</code>'s <code>it</code> field.
*
* <p>
* This class enables syntax such as the following test registration:
* </p>
*
* <pre class="stHighlight">
* it should "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* It also enables syntax such as the following shared test registration:
* </p>
*
* <pre class="stHighlight">
* it should behave like nonEmptyStack(lastItemPushed)
* ^
* </pre>
*
* <p>
* For more information and examples of the use of the <code>it</code> field, see the main documentation
* for this trait.
* </p>
*/
protected final class ItWord {
/**
* Supports the registration of tests with <code>should</code> in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* it should "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* For examples of test registration, see the <a href="AnyFlatSpec.html">main documentation</a>
* for trait <code>AnyFlatSpec</code>.
* </p>
*/
def should(string: String) = new ItVerbString("should", string)
/**
* Supports the registration of tests with <code>must</code> in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* it must "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* For examples of test registration, see the <a href="AnyFlatSpec.html">main documentation</a>
* for trait <code>AnyFlatSpec</code>.
* </p>
*/
def must(string: String) = new ItVerbString("must", string)
/**
* Supports the registration of tests with <code>can</code> in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* it can "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* For examples of test registration, see the <a href="AnyFlatSpec.html">main documentation</a>
* for trait <code>AnyFlatSpec</code>.
* </p>
*/
def can(string: String) = new ItVerbString("can", string)
/**
* Supports the registration of shared tests with <code>should</code> in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* it should behave like nonFullStack(stackWithOneItem)
* ^
* </pre>
*
* <p>
* For examples of shared tests, see the <a href="AnyFlatSpec.html#sharedTests">Shared tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
def should(behaveWord: BehaveWord) = behaveWord
/**
* Supports the registration of shared tests with <code>must</code> in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* it must behave like nonFullStack(stackWithOneItem)
* ^
* </pre>
*
* <p>
* For examples of shared tests, see the <a href="AnyFlatSpec.html#sharedTests">Shared tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
def must(behaveWord: BehaveWord) = behaveWord
/**
* Supports the registration of shared tests with <code>can</code> in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* it can behave like nonFullStack(stackWithOneItem)
* ^
* </pre>
*
* <p>
* For examples of shared tests, see the <a href="AnyFlatSpec.html#sharedTests">Shared tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
def can(behaveWord: BehaveWord) = behaveWord
}
/**
* Supports test (and shared test) registration in <code>AnyFlatSpec</code>s.
*
* <p>
* This field enables syntax such as the following test registration:
* </p>
*
* <pre class="stHighlight">
* it should "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* It also enables syntax such as the following shared test registration:
* </p>
*
* <pre class="stHighlight">
* it should behave like nonEmptyStack(lastItemPushed)
* ^
* </pre>
*
* <p>
* For more information and examples of the use of the <code>it</code> field, see the main documentation
* for this trait.
* </p>
*/
protected val it = new ItWord
/**
* Class that supports registration of ignored, tagged tests via the <code>IgnoreWord</code> instance referenced
* from <code>AnyFlatSpec</code>'s <code>ignore</code> field.
*
* <p>
* This class enables syntax such as the following registration of an ignored, tagged test:
* </p>
*
* <pre class="stHighlight">
* ignore should "pop values in last-in-first-out order" taggedAs(SlowTest) in { ... }
* ^
* </pre>
*
* <p>
* In addition, it enables syntax such as the following registration of an ignored, tagged, pending test:
* </p>
*
* <pre class="stHighlight">
* ignore should "pop values in last-in-first-out order" taggedAs(SlowTest) is (pending)
* ^
* </pre>
*
* <p>
* Note: the <code>is</code> method is provided for completeness and design symmetry, given there's no way
* to prevent changing <code>is</code> to <code>ignore</code> and marking a pending test as ignored that way.
* Although it isn't clear why someone would want to mark a pending test as ignored, it can be done.
* </p>
*
* <p>
* For more information and examples of the use of the <code>ignore</code> field, see the <a href="AnyFlatSpec.html#ignoredTests">Ignored tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>. For examples of tagged test registration, see
* the <a href="AnyFlatSpec.html#taggingTests">Tagging tests section</a> in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
protected final class IgnoreVerbStringTaggedAs(verb: String, name: String, tags: List[Tag]) {
/**
* Supports the registration of ignored, tagged tests in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* ignore must "pop values in last-in-first-out order" taggedAs(SlowTest) in { ... }
* ^
* </pre>
*
* <p>
* For examples of the registration of ignored tests, see the <a href="AnyFlatSpec.html#ignoredTests">Ignored tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>. For examples of tagged test registration, see
* the <a href="AnyFlatSpec.html#taggingTests">Tagging tests section</a> in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def in(testFun: => Any /* Assertion */)(implicit pos: source.Position): Unit = {
registerTestToIgnore(verb.trim + " " + name.trim, tags, "in", () => testFun, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def in(testFun: => Any /* Assertion */): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToIgnore(verb.trim + " " + name.trim, tags, "in", () => testFun, pos) }) }
//DOTTY-ONLY }
/**
* Supports the registration of ignored, tagged, pending tests in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* ignore must "pop values in last-in-first-out order" taggedAs(SlowTest) is (pending)
* ^
* </pre>
*
* <p>
* Note: this <code>is</code> method is provided for completeness and design symmetry, given there's no way
* to prevent changing <code>is</code> to <code>ignore</code> and marking a pending test as ignored that way.
* Although it isn't clear why someone would want to mark a pending test as ignored, it can be done.
* </p>
*
* <p>
* For examples of pending test registration, see the <a href="AnyFlatSpec.html#pendingTests">Pending tests section</a> in the main documentation
* for trait <code>AnyFlatSpec</code>. For examples of the registration of ignored tests,
* see the <a href="AnyFlatSpec.html#ignoredTests">Ignored tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>. For examples of tagged test registration, see
* the <a href="AnyFlatSpec.html#taggingTests">Tagging tests section</a> in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def is(testFun: => PendingStatement)(implicit pos: source.Position): Unit = {
registerTestToIgnore(verb.trim + " " + name.trim, tags, "is", () => { testFun; succeed }, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def is(testFun: => PendingStatement): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToIgnore(verb.trim + " " + name.trim, tags, "is", () => { testFun; succeed }, pos) }) }
//DOTTY-ONLY }
// Note: no def ignore here, so you can't put two ignores in the same line
}
/**
* Class that supports registration of ignored tests via the <code>IgnoreWord</code> instance referenced
* from <code>AnyFlatSpec</code>'s <code>ignore</code> field.
*
* <p>
* This class enables syntax such as the following registration of an ignored test:
* </p>
*
* <pre class="stHighlight">
* ignore should "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* In addition, it enables syntax such as the following registration of an ignored, pending test:
* </p>
*
* <pre class="stHighlight">
* ignore should "pop values in last-in-first-out order" is (pending)
* ^
* </pre>
*
* <p>
* Note: the <code>is</code> method is provided for completeness and design symmetry, given there's no way
* to prevent changing <code>is</code> to <code>ignore</code> and marking a pending test as ignored that way.
* Although it isn't clear why someone would want to mark a pending test as ignored, it can be done.
* </p>
*
* <p>
* And finally, it also enables syntax such as the following ignored, tagged test registration:
* </p>
*
* <pre class="stHighlight">
* ignore should "pop values in last-in-first-out order" taggedAs(SlowTest) in { ... }
* ^
* </pre>
*
* <p>
* For more information and examples of the use of the <code>ignore</code> field, see the <a href="AnyFlatSpec.html#ignoredTests">Ignored tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
protected final class IgnoreVerbString(verb: String, name: String) {
/**
* Supports the registration of ignored tests in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* ignore must "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* For examples of the registration of ignored tests, see the <a href="AnyFlatSpec.html#ignoredTests">Ignored tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def in(testFun: => Any /* Assertion */)(implicit pos: source.Position): Unit = {
registerTestToIgnore(verb.trim + " " + name.trim, List(), "in", () => testFun, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def in(testFun: => Any /* Assertion */): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToIgnore(verb.trim + " " + name.trim, List(), "in", () => testFun, pos) }) }
//DOTTY-ONLY }
/**
* Supports the registration of ignored, pending tests in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* ignore must "pop values in last-in-first-out order" is (pending)
* ^
* </pre>
*
* <p>
* Note: this <code>is</code> method is provided for completeness and design symmetry, given there's no way
* to prevent changing <code>is</code> to <code>ignore</code> and marking a pending test as ignored that way.
* Although it isn't clear why someone would want to mark a pending test as ignored, it can be done.
* </p>
*
* <p>
* For examples of pending test registration, see the <a href="AnyFlatSpec.html#pendingTests">Pending tests section</a> in the main documentation
* for trait <code>AnyFlatSpec</code>. For examples of the registration of ignored tests,
* see the <a href="AnyFlatSpec.html#ignoredTests">Ignored tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def is(testFun: => PendingStatement)(implicit pos: source.Position): Unit = {
registerTestToIgnore(verb.trim + " " + name.trim, List(), "is", () => { testFun; succeed }, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def is(testFun: => PendingStatement): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToIgnore(verb.trim + " " + name.trim, List(), "is", () => { testFun; succeed }, pos) }) }
//DOTTY-ONLY }
/**
* Supports the registration of ignored, tagged tests in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* ignore must "pop values in last-in-first-out order" taggedAs(SlowTest) in { ... }
* ^
* </pre>
*
* <p>
* For examples of tagged test registration, see the <a href="AnyFlatSpec.html#taggingTests">Tagging tests section</a> in the main documentation
* for trait <code>AnyFlatSpec</code>. For examples of the registration of ignored tests,
* see the <a href="AnyFlatSpec.html#ignoredTests">Ignored tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
def taggedAs(firstTestTag: Tag, otherTestTags: Tag*) = {
val tagList = firstTestTag :: otherTestTags.toList
new IgnoreVerbStringTaggedAs(verb, name, tagList)
}
}
/**
* Class that supports registration of ignored tests via the <code>ItWord</code> instance
* referenced from <code>AnyFlatSpec</code>'s <code>ignore</code> field.
*
* <p>
* This class enables syntax such as the following registration of an ignored test:
* </p>
*
* <pre class="stHighlight">
* ignore should "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* For more information and examples of the use of the <code>ignore</code> field, see <a href="AnyFlatSpec.html#ignoredTests">Ignored tests section</a>
* in the main documentation for this trait.
* </p>
*/
protected final class IgnoreWord {
/**
* Supports the registration of ignored tests with <code>should</code> in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* ignore should "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* For more information and examples of the use of the <code>ignore</code> field, see <a href="AnyFlatSpec.html#ignoredTests">Ignored tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
def should(string: String) = new IgnoreVerbString("should", string)
/**
* Supports the registration of ignored tests with <code>must</code> in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* ignore must "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* For more information and examples of the use of the <code>ignore</code> field, see <a href="AnyFlatSpec.html#ignoredTests">Ignored tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
def must(string: String) = new IgnoreVerbString("must", string)
/**
* Supports the registration of ignored tests with <code>can</code> in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* ignore can "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* For more information and examples of the use of the <code>ignore</code> field, see <a href="AnyFlatSpec.html#ignoredTests">Ignored tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
def can(string: String) = new IgnoreVerbString("can", string)
}
/**
* Supports registration of ignored tests in <code>AnyFlatSpec</code>s.
*
* <p>
* This field enables syntax such as the following registration of an ignored test:
* </p>
*
* <pre class="stHighlight">
* ignore should "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* For more information and examples of the use of the <code>ignore</code> field, see the <a href="#ignoredTests">Ignored tests section</a>
* in the main documentation for this trait.
* </p>
*/
protected val ignore = new IgnoreWord
/**
* Class that supports the registration of tagged tests via the <code>TheyWord</code> instance
* referenced from <code>AnyFlatSpec</code>'s <code>they</code> field.
*
* <p>
* This class enables syntax such as the following tagged test registration:
* </p>
*
* <pre class="stHighlight">
* they should "pop values in last-in-first-out order" taggedAs(SlowTest) in { ... }
* ^
* </pre>
*
* <p>
* It also enables syntax such as the following registration of an ignored, tagged test:
* </p>
*
* <pre class="stHighlight">
* they should "pop values in last-in-first-out order" taggedAs(SlowTest) ignore { ... }
* ^
* </pre>
*
* <p>
* In addition, it enables syntax such as the following registration of a pending, tagged test:
* </p>
*
* <pre class="stHighlight">
* they should "pop values in last-in-first-out order" taggedAs(SlowTest) is (pending)
* ^
* </pre>
*
* <p>
* For more information and examples of the use of the <code>they</code> field to register tagged tests, see
* the <a href="AnyFlatSpec.html#taggingTests">Tagging tests section</a> in the main documentation for trait <code>AnyFlatSpec</code>.
* For examples of tagged test registration, see
* the <a href="AnyFlatSpec.html#taggingTests">Tagging tests section</a> in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
protected final class TheyVerbStringTaggedAs(verb: String, name: String, tags: List[Tag]) {
/**
* Supports the registration of tagged tests in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* they must "pop values in last-in-first-out order" taggedAs(SlowTest) in { ... }
* ^
* </pre>
*
* <p>
* For examples of tagged test registration, see
* the <a href="AnyFlatSpec.html#taggingTests">Tagging tests section</a> in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def in(testFun: => Any /* Assertion */)(implicit pos: source.Position): Unit = {
registerTestToRun(verb.trim + " " + name.trim, "in", tags, () => testFun, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def in(testFun: => Any /* Assertion */): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToRun(verb.trim + " " + name.trim, "in", tags, () => testFun, pos) }) }
//DOTTY-ONLY }
/**
* Supports the registration of pending, tagged tests in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* they must "pop values in last-in-first-out order" taggedAs(SlowTest) is (pending)
* ^
* </pre>
*
* <p>
* For examples of pending test registration, see the <a href="AnyFlatSpec.html#pendingTests">Pending tests section</a> in the main documentation
* for trait <code>AnyFlatSpec</code>. And for examples of tagged test registration, see
* the <a href="AnyFlatSpec.html#taggingTests">Tagging tests section</a> in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def is(testFun: => PendingStatement)(implicit pos: source.Position): Unit = {
registerTestToRun(verb.trim + " " + name.trim, "is", tags, () => { testFun; succeed }, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def is(testFun: => PendingStatement): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToRun(verb.trim + " " + name.trim, "is", tags, () => { testFun; succeed }, pos) }) }
//DOTTY-ONLY }
/**
* Supports the registration of ignored, tagged tests in a <code>FlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* they must "pop values in last-in-first-out order" taggedAs(SlowTest) ignore { ... }
* ^
* </pre>
*
* <p>
* For examples of ignored test registration, see the <a href="AnyFlatSpec.html#ignoredTests">Ignored tests section</a> in the main documentation
* for trait <code>AnyFlatSpec</code>. And for examples of tagged test registration, see
* the <a href="AnyFlatSpec.html#taggingTests">Tagging tests section</a> in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def ignore(testFun: => Any /* Assertion */)(implicit pos: source.Position): Unit = {
registerTestToIgnore(verb.trim + " " + name.trim, tags, "ignore", () => testFun, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def ignore(testFun: => Any /* Assertion */): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToIgnore(verb.trim + " " + name.trim, tags, "ignore", () => testFun, pos) }) }
//DOTTY-ONLY }
}
/**
* Class that supports test registration via the <code>TheyWord</code> instance referenced from <code>AnyFlatSpec</code>'s <code>they</code> field.
*
* <p>
* This class enables syntax such as the following test registration:
* </p>
*
* <pre class="stHighlight">
* they should "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* It also enables syntax such as the following registration of an ignored test:
* </p>
*
* <pre class="stHighlight">
* they should "pop values in last-in-first-out order" ignore { ... }
* ^
* </pre>
*
* <p>
* In addition, it enables syntax such as the following registration of a pending test:
* </p>
*
* <pre class="stHighlight">
* they should "pop values in last-in-first-out order" is (pending)
* ^
* </pre>
*
* <p>
* And finally, it also enables syntax such as the following tagged test registration:
* </p>
*
* <pre class="stHighlight">
* they should "pop values in last-in-first-out order" taggedAs(SlowTest) in { ... }
* ^
* </pre>
*
* <p>
* For more information and examples of the use of the <code>it</code> field, see the <a href="AnyFlatSpec.html">main documentation</a>
* for trait <code>AnyFlatSpec</code>.
* </p>
*/
protected final class TheyVerbString(verb: String, name: String) {
/**
* Supports the registration of tests in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* they must "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* For examples of test registration, see the <a href="AnyFlatSpec.html">main documentation</a>
* for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def in(testFun: => Any /* Assertion */)(implicit pos: source.Position): Unit = {
registerTestToRun(verb.trim + " " + name.trim, "in", List(), () => testFun, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def in(testFun: => Any /* Assertion */): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToRun(verb.trim + " " + name.trim, "in", List(), () => testFun, pos) }) }
//DOTTY-ONLY }
/**
* Supports the registration of pending tests in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* they must "pop values in last-in-first-out order" is (pending)
* ^
* </pre>
*
* <p>
* For examples of pending test registration, see the <a href="AnyFlatSpec.html#pendingTests">Pending tests section</a> in the main documentation
* for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def is(testFun: => PendingStatement)(implicit pos: source.Position): Unit = {
registerTestToRun(verb.trim + " " + name.trim, "is", List(), () => { testFun; succeed }, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def is(testFun: => PendingStatement): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToRun(verb.trim + " " + name.trim, "is", List(), () => { testFun; succeed }, pos) }) }
//DOTTY-ONLY }
/**
* Supports the registration of ignored tests in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* they must "pop values in last-in-first-out order" ignore { ... }
* ^
* </pre>
*
* <p>
* For examples of ignored test registration, see the <a href="AnyFlatSpec.html#ignoredTests">Ignored tests section</a> in the main documentation
* for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def ignore(testFun: => Any /* Assertion */)(implicit pos: source.Position): Unit = {
registerTestToIgnore(verb.trim + " " + name.trim, List(), "ignore", () => testFun, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def ignore(testFun: => Any /* Assertion */): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToIgnore(verb.trim + " " + name.trim, List(), "ignore", () => testFun, pos) }) }
//DOTTY-ONLY }
/**
* Supports the registration of tagged tests in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* they must "pop values in last-in-first-out order" taggedAs(SlowTest) in { ... }
* ^
* </pre>
*
* <p>
* For examples of tagged test registration, see the <a href="AnyFlatSpec.html#taggingTests">Tagging tests section</a> in the main documentation
* for trait <code>AnyFlatSpec</code>.
* </p>
*/
def taggedAs(firstTestTag: Tag, otherTestTags: Tag*) = {
val tagList = firstTestTag :: otherTestTags.toList
new ItVerbStringTaggedAs(verb, name, tagList)
}
}
/**
* Class that supports test (and shared test) registration via the instance referenced from <code>AnyFlatSpec</code>'s <code>it</code> field.
*
* <p>
* This class enables syntax such as the following test registration:
* </p>
*
* <pre class="stHighlight">
* they should "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* It also enables syntax such as the following shared test registration:
* </p>
*
* <pre class="stHighlight">
* they should behave like nonEmptyStack(lastItemPushed)
* ^
* </pre>
*
* <p>
* For more information and examples of the use of the <code>it</code> field, see the main documentation
* for this trait.
* </p>
*/
protected final class TheyWord {
/**
* Supports the registration of tests with <code>should</code> in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* they should "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* For examples of test registration, see the <a href="AnyFlatSpec.html">main documentation</a>
* for trait <code>AnyFlatSpec</code>.
* </p>
*/
def should(string: String) = new ItVerbString("should", string)
/**
* Supports the registration of tests with <code>must</code> in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* they must "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* For examples of test registration, see the <a href="AnyFlatSpec.html">main documentation</a>
* for trait <code>AnyFlatSpec</code>.
* </p>
*/
def must(string: String) = new ItVerbString("must", string)
/**
* Supports the registration of tests with <code>can</code> in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* they can "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* For examples of test registration, see the <a href="AnyFlatSpec.html">main documentation</a>
* for trait <code>AnyFlatSpec</code>.
* </p>
*/
def can(string: String) = new ItVerbString("can", string)
/**
* Supports the registration of shared tests with <code>should</code> in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* they should behave like nonFullStack(stackWithOneItem)
* ^
* </pre>
*
* <p>
* For examples of shared tests, see the <a href="AnyFlatSpec.html#sharedTests">Shared tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
def should(behaveWord: BehaveWord) = behaveWord
/**
* Supports the registration of shared tests with <code>must</code> in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* they must behave like nonFullStack(stackWithOneItem)
* ^
* </pre>
*
* <p>
* For examples of shared tests, see the <a href="AnyFlatSpec.html#sharedTests">Shared tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
def must(behaveWord: BehaveWord) = behaveWord
/**
* Supports the registration of shared tests with <code>can</code> in a <code>AnyFlatSpec</code>.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* they can behave like nonFullStack(stackWithOneItem)
* ^
* </pre>
*
* <p>
* For examples of shared tests, see the <a href="AnyFlatSpec.html#sharedTests">Shared tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
def can(behaveWord: BehaveWord) = behaveWord
}
/**
* Supports test (and shared test) registration in <code>AnyFlatSpec</code>s.
*
* <p>
* This field enables syntax such as the following test registration:
* </p>
*
* <pre class="stHighlight">
* they should "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* It also enables syntax such as the following shared test registration:
* </p>
*
* <pre class="stHighlight">
* they should behave like nonEmptyStack(lastItemPushed)
* ^
* </pre>
*
* <p>
* For more information and examples of the use of the <code>it</code> field, see the main documentation
* for this trait.
* </p>
*/
protected val they = new TheyWord
/**
* Class that supports test registration in shorthand form.
*
* <p>
* For example, this class enables syntax such as the following test registration
* in shorthand form:
* </p>
*
* <pre class="stHighlight">
* "A Stack (when empty)" should "be empty" in { ... }
* ^
* </pre>
*
* <p>
* This class also enables syntax such as the following ignored test registration
* in shorthand form:
* </p>
*
* <pre class="stHighlight">
* "A Stack (when empty)" should "be empty" ignore { ... }
* ^
* </pre>
*
* <p>
* This class is used via an implicit conversion (named <code>convertToInAndIgnoreMethods</code>)
* from <code>ResultOfStringPassedToVerb</code>. The <code>ResultOfStringPassedToVerb</code> class
* does not declare any methods named <code>in</code>, because the
* type passed to <code>in</code> differs in a <code>AnyFlatSpec</code> and a <code>FixtureAnyFlatSpec</code>.
* A <code>FixtureAnyFlatSpec</code> needs two <code>in</code> methods, one that takes a no-arg
* test function and another that takes a one-arg test function (a test that takes a
* <code>Fixture</code> as its parameter). By constrast, a <code>AnyFlatSpec</code> needs
* only one <code>in</code> method that takes a by-name parameter. As a result,
* <code>AnyFlatSpec</code> and <code>FixtureAnyFlatSpec</code> each provide an implicit conversion
* from <code>ResultOfStringPassedToVerb</code> to a type that provides the appropriate
* <code>in</code> methods.
* </p>
*
* @author Bill Venners
*/
protected final class InAndIgnoreMethods(resultOfStringPassedToVerb: ResultOfStringPassedToVerb) {
import resultOfStringPassedToVerb.rest
import resultOfStringPassedToVerb.verb
/**
* Supports the registration of tests in shorthand form.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* "A Stack" must "pop values in last-in-first-out order" in { ... }
* ^
* </pre>
*
* <p>
* For examples of test registration, see the <a href="AnyFlatSpec.html">main documentation</a>
* for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def in(testFun: => Any /* Assertion */)(implicit pos: source.Position): Unit = {
registerTestToRun(verb.trim + " " + rest.trim, "in", List(), () => testFun, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def in(testFun: => Any /* Assertion */): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToRun(verb.trim + " " + rest.trim, "in", List(), () => testFun, pos) }) }
//DOTTY-ONLY }
/**
* Supports the registration of ignored tests in shorthand form.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* "A Stack" must "pop values in last-in-first-out order" ignore { ... }
* ^
* </pre>
*
* <p>
* For examples of ignored test registration, see the <a href="AnyFlatSpec.html#ignoredTests">Ignored tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def ignore(testFun: => Any /* Assertion */)(implicit pos: source.Position): Unit = {
registerTestToIgnore(verb.trim + " " + rest.trim, List(), "ignore", () => testFun, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def ignore(testFun: => Any /* Assertion */): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToIgnore(verb.trim + " " + rest.trim, List(), "ignore", () => testFun, pos) }) }
//DOTTY-ONLY }
}
import scala.language.implicitConversions
/**
* Implicitly converts an object of type <code>ResultOfStringPassedToVerb</code> to an
* <code>InAndIgnoreMethods</code>, to enable <code>in</code> and <code>ignore</code>
* methods to be invokable on that object.
*/
protected implicit def convertToInAndIgnoreMethods(resultOfStringPassedToVerb: ResultOfStringPassedToVerb): InAndIgnoreMethods =
new InAndIgnoreMethods(resultOfStringPassedToVerb)
/**
* Class that supports tagged test registration in shorthand form.
*
* <p>
* For example, this class enables syntax such as the following tagged test registration
* in shorthand form:
* </p>
*
* <pre class="stHighlight">
* "A Stack (when empty)" should "be empty" taggedAs() in { ... }
* ^
* </pre>
*
* <p>
* This class also enables syntax such as the following tagged, ignored test registration
* in shorthand form:
* </p>
*
* <pre class="stHighlight">
* "A Stack (when empty)" should "be empty" taggedAs(SlowTest) ignore { ... }
* ^
* </pre>
*
* <p>
* This class is used via an implicit conversion (named <code>convertToInAndIgnoreMethodsAfterTaggedAs</code>)
* from <code>ResultOfTaggedAsInvocation</code>. The <code>ResultOfTaggedAsInvocation</code> class
* does not declare any methods named <code>in</code>, because the
* type passed to <code>in</code> differs in a <code>AnyFlatSpec</code> and a <code>FixtureAnyFlatSpec</code>.
* A <code>FixtureAnyFlatSpec</code> needs two <code>in</code> methods, one that takes a no-arg
* test function and another that takes a one-arg test function (a test that takes a
* <code>Fixture</code> as its parameter). By constrast, a <code>AnyFlatSpec</code> needs
* only one <code>in</code> method that takes a by-name parameter. As a result,
* <code>AnyFlatSpec</code> and <code>FixtureAnyFlatSpec</code> each provide an implicit conversion
* from <code>ResultOfTaggedAsInvocation</code> to a type that provides the appropriate
* <code>in</code> methods.
* </p>
*
* @author Bill Venners
*/
protected final class InAndIgnoreMethodsAfterTaggedAs(resultOfTaggedAsInvocation: ResultOfTaggedAsInvocation) {
import resultOfTaggedAsInvocation.verb
import resultOfTaggedAsInvocation.rest
import resultOfTaggedAsInvocation.{tags => tagsList}
/**
* Supports the registration of tagged tests in shorthand form.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* "A Stack" must "pop values in last-in-first-out order" taggedAs(SlowTest) in { ... }
* ^
* </pre>
*
* <p>
* For examples of tagged test registration, see the <a href="AnyFlatSpec.html#taggingTests">Tagging tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def in(testFun: => Any /* Assertion */)(implicit pos: source.Position): Unit = {
registerTestToRun(verb.trim + " " + rest.trim, "in", tagsList, () => testFun, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def in(testFun: => Any /* Assertion */): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToRun(verb.trim + " " + rest.trim, "in", tagsList, () => testFun, pos) }) }
//DOTTY-ONLY }
/**
* Supports the registration of tagged, ignored tests in shorthand form.
*
* <p>
* This method supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* "A Stack" must "pop values in last-in-first-out order" taggedAs(SlowTest) ignore { ... }
* ^
* </pre>
*
* <p>
* For examples of ignored test registration, see the <a href="AnyFlatSpec.html#ignoredTests">Ignored tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>.
* For examples of tagged test registration, see the <a href="AnyFlatSpec.html#taggingTests">Tagging tests section</a>
* in the main documentation for trait <code>AnyFlatSpec</code>.
* </p>
*/
// SKIP-DOTTY-START
def ignore(testFun: => Any /* Assertion */)(implicit pos: source.Position): Unit = {
registerTestToIgnore(verb.trim + " " + rest.trim, tagsList, "ignore", () => testFun, pos)
}
// SKIP-DOTTY-END
//DOTTY-ONLY inline def ignore(testFun: => Any /* Assertion */): Unit = {
//DOTTY-ONLY ${ source.Position.withPosition[Unit]('{(pos: source.Position) => registerTestToIgnore(verb.trim + " " + rest.trim, tagsList, "ignore", () => testFun, pos) }) }
//DOTTY-ONLY }
}
/**
* Implicitly converts an object of type <code>ResultOfTaggedAsInvocation</code> to an
* <code>InAndIgnoreMethodsAfterTaggedAs</code>, to enable <code>in</code> and <code>ignore</code>
* methods to be invokable on that object.
*/
protected implicit def convertToInAndIgnoreMethodsAfterTaggedAs(resultOfTaggedAsInvocation: ResultOfTaggedAsInvocation): InAndIgnoreMethodsAfterTaggedAs =
new InAndIgnoreMethodsAfterTaggedAs(resultOfTaggedAsInvocation)
/**
* Supports the shorthand form of test registration.
*
* <p>
* For example, this method enables syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* "A Stack (when empty)" should "be empty" in { ... }
* ^
* </pre>
*
* <p>
* This function is passed as an implicit parameter to a <code>should</code> method
* provided in <code>ShouldVerb</code>, a <code>must</code> method
* provided in <code>MustVerb</code>, and a <code>can</code> method
* provided in <code>CanVerb</code>. When invoked, this function registers the
* subject description (the first parameter to the function) and returns a <code>ResultOfStringPassedToVerb</code>
* initialized with the verb and rest parameters (the second and third parameters to
* the function, respectively).
* </p>
*/
protected implicit val shorthandTestRegistrationFunction: StringVerbStringInvocation =
new StringVerbStringInvocation {
def apply(subject: String, verb: String, rest: String, pos: source.Position): ResultOfStringPassedToVerb = {
// SKIP-SCALATESTJS,NATIVE-START
val stackDepth = 6
// SKIP-SCALATESTJS,NATIVE-END
//SCALATESTJS,NATIVE-ONLY val stackDepth = 8
registerFlatBranch(subject, Resources.shouldCannotAppearInsideAnIn, "AnyFlatSpecLike.scala", "apply", stackDepth, 0, Some(pos))
new ResultOfStringPassedToVerb(verb, rest) {
def is(testFun: => PendingStatement): Unit = {
registerTestToRun(this.verb.trim + " " + this.rest.trim, "is", List(), () => { testFun; succeed }, pos)
}
// Note, won't have an is method that takes fixture => PendingStatement one, because don't want
// to say is (fixture => pending), rather just say is (pending)
def taggedAs(firstTestTag: Tag, otherTestTags: Tag*) = {
val tagList = firstTestTag :: otherTestTags.toList
new ResultOfTaggedAsInvocation(this.verb, this.rest, tagList) {
// "A Stack" should "bla bla" taggedAs(SlowTest) is (pending)
// ^
def is(testFun: => PendingStatement): Unit = {
registerTestToRun(this.verb.trim + " " + this.rest.trim, "is", this.tags, () => { testFun; succeed }, pos)
}
}
}
}
}
}
/**
* Supports the shorthand form of shared test registration.
*
* <p>
* For example, this method enables syntax such as the following in:
* </p>
*
* <pre class="stHighlight">
* "A Stack (with one item)" should behave like nonEmptyStack(stackWithOneItem, lastValuePushed)
* ^
* </pre>
*
* <p>
* This function is passed as an implicit parameter to a <code>should</code> method
* provided in <code>ShouldVerb</code>, a <code>must</code> method
* provided in <code>MustVerb</code>, and a <code>can</code> method
* provided in <code>CanVerb</code>. When invoked, this function registers the
* subject description (the parameter to the function) and returns a <code>BehaveWord</code>.
* </p>
*/
protected implicit val shorthandSharedTestRegistrationFunction: StringVerbBehaveLikeInvocation =
new StringVerbBehaveLikeInvocation {
def apply(subject: String, pos: source.Position): BehaveWord = {
registerFlatBranch(subject, Resources.shouldCannotAppearInsideAnIn, "AnyFlatSpecLike.scala", "apply", 5, 0, Some(pos))
new BehaveWord
}
}
// TODO: I got a:
// runsuite:
// [scalatest] *** RUN ABORTED ***
// [scalatest] An exception or error caused a run to abort: Duplicate test name: should return the new exception with the clue string appended, separated by a space char if passed a function that does that (Engine.scala:464)
// Shouldn't be Engine.scala clearly
/**
* Register a test to ignore, which has the given spec text, optional tags, and test function value that takes no arguments.
* This method will register the test for later ignoring via an invocation of one of the <code>execute</code>
* methods. This method exists to make it easy to ignore an existing test by changing the call to <code>it</code>
* to <code>ignore</code> without deleting or commenting out the actual test code. The test will not be executed, but a
* report will be sent that indicates the test was ignored. The name of the test will be a concatenation of the text of all surrounding describers,
* from outside in, and the passed spec text, with one space placed between each item. (See the documenation
* for <code>testNames</code> for an example.) The resulting test name must not have been registered previously on
* this <code>AnyFlatSpec</code> instance.
*
* @param specText the specification text, which will be combined with the descText of any surrounding describers
* to form the test name
* @param testTags the optional list of tags for this test
* @param methodName caller's method name
* @param testFun the test function
* @throws DuplicateTestNameException if a test with the same name has been registered previously
* @throws TestRegistrationClosedException if invoked after <code>run</code> has been invoked on this suite
* @throws NullArgumentException if <code>specText</code> or any passed test tag is <code>null</code>
*/
private def registerTestToIgnore(specText: String, testTags: List[Tag], methodName: String, testFun: () => Any /* Assertion */, pos: source.Position): Unit = {
// SKIP-SCALATESTJS,NATIVE-START
val stackDepth = 4
val stackDepthAdjustment = -4
// SKIP-SCALATESTJS,NATIVE-END
//SCALATESTJS,NATIVE-ONLY val stackDepth = 6
//SCALATESTJS,NATIVE-ONLY val stackDepthAdjustment = -6
engine.registerIgnoredTest(specText, Transformer(testFun), Resources.ignoreCannotAppearInsideAnInOrAnIs, "AnyFlatSpecLike.scala", methodName, stackDepth, stackDepthAdjustment, None, Some(pos), testTags: _*)
}
/**
* A <code>Map</code> whose keys are <code>String</code> names of tagged tests and whose associated values are
* the <code>Set</code> of tags for the test. If this <code>AnyFlatSpec</code> contains no tags, this method returns an empty <code>Map</code>.
*
* <p>
* This trait's implementation returns tags that were passed as strings contained in <code>Tag</code> objects passed to
* <code>taggedAs</code>.
* </p>
*
* <p>
* In addition, this trait's implementation will also auto-tag tests with class level annotations.
* For example, if you annotate <code>@Ignore</code> at the class level, all test methods in the class will be auto-annotated with
* <code>org.scalatest.Ignore</code>.
* </p>
*/
override def tags: Map[String, Set[String]] = autoTagClassAnnotations(atomic.get.tagsMap, this)
/**
* Run a test. This trait's implementation runs the test registered with the name specified by
* <code>testName</code>. Each test's name is a concatenation of the text of all describers surrounding a test,
* from outside in, and the test's spec text, with one space placed between each item. (See the documenation
* for <code>testNames</code> for an example.)
*
* @param testName the name of one test to execute.
* @param args the <code>Args</code> for this run
* @return a <code>Status</code> object that indicates when the test started by this method has completed, and whether or not it failed .
*
* @throws NullArgumentException if any of <code>testName</code>, <code>reporter</code>, <code>stopper</code>, or <code>configMap</code>
* is <code>null</code>.
*/
protected override def runTest(testName: String, args: Args): Status = {
def invokeWithFixture(theTest: TestLeaf): Outcome = {
val theConfigMap = args.configMap
val testData = testDataFor(testName, theConfigMap)
withFixture(
new NoArgTest {
val name = testData.name
def apply(): Outcome = { theTest.testFun() }
val configMap = testData.configMap
val scopes = testData.scopes
val text = testData.text
val tags = testData.tags
val pos = testData.pos
}
)
}
runTestImpl(thisSuite, testName, args, true, invokeWithFixture)
}
/**
* Run zero to many of this <code>AnyFlatSpec</code>'s tests.
*
* <p>
* This method takes a <code>testName</code> parameter that optionally specifies a test to invoke.
* If <code>testName</code> is <code>Some</code>, this trait's implementation of this method
* invokes <code>runTest</code> on this object, passing in:
* </p>
*
* <ul>
* <li><code>testName</code> - the <code>String</code> value of the <code>testName</code> <code>Option</code> passed
* to this method</li>
* <li><code>reporter</code> - the <code>Reporter</code> passed to this method, or one that wraps and delegates to it</li>
* <li><code>stopper</code> - the <code>Stopper</code> passed to this method, or one that wraps and delegates to it</li>
* <li><code>configMap</code> - the <code>configMap</code> passed to this method, or one that wraps and delegates to it</li>
* </ul>
*
* <p>
* This method takes a <code>Set</code> of tag names that should be included (<code>tagsToInclude</code>), and a <code>Set</code>
* that should be excluded (<code>tagsToExclude</code>), when deciding which of this <code>Suite</code>'s tests to execute.
* If <code>tagsToInclude</code> is empty, all tests will be executed
* except those those belonging to tags listed in the <code>tagsToExclude</code> <code>Set</code>. If <code>tagsToInclude</code> is non-empty, only tests
* belonging to tags mentioned in <code>tagsToInclude</code>, and not mentioned in <code>tagsToExclude</code>
* will be executed. However, if <code>testName</code> is <code>Some</code>, <code>tagsToInclude</code> and <code>tagsToExclude</code> are essentially ignored.
* Only if <code>testName</code> is <code>None</code> will <code>tagsToInclude</code> and <code>tagsToExclude</code> be consulted to
* determine which of the tests named in the <code>testNames</code> <code>Set</code> should be run. For more information on trait tags, see the main documentation for this trait.
* </p>
*
* <p>
* If <code>testName</code> is <code>None</code>, this trait's implementation of this method
* invokes <code>testNames</code> on this <code>Suite</code> to get a <code>Set</code> of names of tests to potentially execute.
* (A <code>testNames</code> value of <code>None</code> essentially acts as a wildcard that means all tests in
* this <code>Suite</code> that are selected by <code>tagsToInclude</code> and <code>tagsToExclude</code> should be executed.)
* For each test in the <code>testName</code> <code>Set</code>, in the order
* they appear in the iterator obtained by invoking the <code>elements</code> method on the <code>Set</code>, this trait's implementation
* of this method checks whether the test should be run based on the <code>tagsToInclude</code> and <code>tagsToExclude</code> <code>Set</code>s.
* If so, this implementation invokes <code>runTest</code>, passing in:
* </p>
*
* <ul>
* <li><code>testName</code> - the <code>String</code> name of the test to run (which will be one of the names in the <code>testNames</code> <code>Set</code>)</li>
* <li><code>reporter</code> - the <code>Reporter</code> passed to this method, or one that wraps and delegates to it</li>
* <li><code>stopper</code> - the <code>Stopper</code> passed to this method, or one that wraps and delegates to it</li>
* <li><code>configMap</code> - the <code>configMap</code> passed to this method, or one that wraps and delegates to it</li>
* </ul>
*
* @param testName an optional name of one test to execute. If <code>None</code>, all relevant tests should be executed.
* I.e., <code>None</code> acts like a wildcard that means execute all relevant tests in this <code>AnyFlatSpec</code>.
* @param args the <code>Args</code> for this run
* @return a <code>Status</code> object that indicates when all tests started by this method have completed, and whether or not a failure occurred.
*
* @throws NullArgumentException if any of <code>testName</code>, <code>reporter</code>, <code>stopper</code>, <code>tagsToInclude</code>,
* <code>tagsToExclude</code>, or <code>configMap</code> is <code>null</code>.
*/
protected override def runTests(testName: Option[String], args: Args): Status = {
runTestsImpl(thisSuite, testName, args, info, true, runTest)
}
/**
* An immutable <code>Set</code> of test names. If this <code>AnyFlatSpec</code> contains no tests, this method returns an
* empty <code>Set</code>.
*
* <p>
* This trait's implementation of this method will return a set that contains the names of all registered tests. The set's
* iterator will return those names in the order in which the tests were registered. Each test's name is composed
* of the concatenation of the text of each surrounding describer, in order from outside in, and the text of the
* example itself, with all components separated by a space. For example, consider this <code>AnyFlatSpec</code>:
* </p>
*
* <pre class="stHighlight">
* import org.scalatest.flatspec.AnyFlatSpec
*
* class StackSpec extends AnyFlatSpec {
*
* "A Stack (when not empty)" must "allow me to pop" in {}
* it must "not be empty" in {}
*
* "A Stack (when not full)" must "allow me to push" in {}
* it must "not be full" in {}
* }
* </pre>
*
* <p>
* Invoking <code>testNames</code> on this <code>AnyFlatSpec</code> will yield a set that contains the following
* two test name strings:
* </p>
*
* <pre>
* "A Stack (when not empty) must allow me to pop"
* "A Stack (when not empty) must not be empty"
* "A Stack (when not full) must allow me to push"
* "A Stack (when not full) must not be full"
* </pre>
*/
override def testNames: Set[String] = {
InsertionOrderSet(atomic.get.testNamesList)
}
override def run(testName: Option[String], args: Args): Status = {
runImpl(thisSuite, testName, args, super.run)
}
/**
* Supports shared test registration in <code>AnyFlatSpec</code>s.
*
* <p>
* This field supports syntax such as the following:
* </p>
*
* <pre class="stHighlight">
* it should behave like nonFullStack(stackWithOneItem)
* ^
* </pre>
*
* <p>
* For more information and examples of the use of <code>behave</code>, see the <a href="#sharedTests">Shared tests section</a>
* in the main documentation for this trait.
* </p>
*/
protected val behave = new BehaveWord
/**
* <strong>The <code>styleName</code> lifecycle method has been deprecated and will be removed in a future version of ScalaTest.</strong>
*
* <p>This method was used to support the chosen styles feature, which was deactivated in 3.1.0. The internal modularization of ScalaTest in 3.2.0
* will replace chosen styles as the tool to encourage consistency across a project. We do not plan a replacement for <code>styleName</code>.</p>
*/
@deprecated("The styleName lifecycle method has been deprecated and will be removed in a future version of ScalaTest with no replacement.", "3.1.0")
final override val styleName: String = "org.scalatest.FlatSpec"
override def testDataFor(testName: String, theConfigMap: ConfigMap = ConfigMap.empty): TestData = createTestDataFor(testName, theConfigMap, this)
}
| scalatest/scalatest | jvm/flatspec/src/main/scala/org/scalatest/flatspec/AnyFlatSpecLike.scala | Scala | apache-2.0 | 83,279 |
package com.karasiq.nanoboard.sources.bitmessage
import java.nio.charset.StandardCharsets
import scala.concurrent.Future
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model._
import akka.http.scaladsl.server.Directives._
import akka.stream.{ActorMaterializer, OverflowStrategy}
import akka.stream.scaladsl.{Sink, Source}
import com.typesafe.config.{Config, ConfigFactory}
import org.apache.commons.codec.binary.Base64
import play.api.libs.json._
import com.karasiq.nanoboard.NanoboardMessage
object BitMessageTransport {
def fromConfig(bmConfig: Config)(implicit ac: ActorSystem, am: ActorMaterializer) = {
val chanAddress = bmConfig.getString("chan-address")
val apiAddress = bmConfig.getString("host")
val apiPort = bmConfig.getInt("port")
val apiUsername = bmConfig.getString("username")
val apiPassword = bmConfig.getString("password")
new BitMessageTransport(chanAddress, apiAddress, apiPort, apiUsername, apiPassword)
}
def apply(config: Config = ConfigFactory.load())(implicit ac: ActorSystem, am: ActorMaterializer) = {
fromConfig(config.getConfig("nanoboard.bitmessage"))
}
def wrap(messages: NanoboardMessage*): String = {
val wrappedMessages = messages.map(m β WrappedNanoboardMessage(m.hash, asBase64(NanoboardMessage.textWithSignatureTags(m)), m.parent))
Json.toJson(wrappedMessages).toString()
}
def unwrap(bitMessage: String): Vector[NanoboardMessage] = {
val messages = Json.parse(bitMessage).as[Vector[WrappedNanoboardMessage]]
messages.map { wrapped β
val (text, pow, signature) = NanoboardMessage.stripSignatureTags(fromBase64(wrapped.message))
NanoboardMessage(wrapped.replyTo, text, pow.getOrElse(NanoboardMessage.NoPOW), signature.getOrElse(NanoboardMessage.NoSignature))
}
}
@inline
private[bitmessage] def asBase64(string: String): String = {
Base64.encodeBase64String(string.getBytes(StandardCharsets.UTF_8))
}
@inline
private[bitmessage] def fromBase64(string: String): String = {
new String(Base64.decodeBase64(string), StandardCharsets.UTF_8)
}
}
/**
* Nanoboard BitMessage transport, compatible with official implementation.
* @see [[https://github.com/nanoboard/nanoboard-bittransport]]
*/
final class BitMessageTransport(chanAddress: String, apiAddress: String, apiPort: Int, apiUsername: String, apiPassword: String)(implicit ac: ActorSystem, am: ActorMaterializer) {
import XmlRpcProxy._
private val http = Http()
private val xmlRpcProxy = new XmlRpcProxy(http, apiAddress, apiPort, apiUsername, apiPassword)
def sendMessage(message: NanoboardMessage): Future[HttpResponse] = {
xmlRpcProxy.sendMessage(chanAddress, chanAddress, (), BitMessageTransport.asBase64(BitMessageTransport.wrap(message)), 2, 21600)
}
def receiveMessages(host: String, port: Int, sink: Sink[NanoboardMessage, _]): Future[Http.ServerBinding] = {
http.bindAndHandle(route(sink), host, port)
}
private def route(sink: Sink[NanoboardMessage, _]) = {
val queue = Source
.queue(20, OverflowStrategy.dropHead)
.to(sink)
.run()
post {
(path("api" / "add" / NanoboardMessage.HashFormat) & entity(as[String])) { (parent, message) β
val (text, pow, signature) = NanoboardMessage.stripSignatureTags(BitMessageTransport.fromBase64(message))
queue.offer(NanoboardMessage(parent, text, pow.getOrElse(NanoboardMessage.NoPOW), signature.getOrElse(NanoboardMessage.NoSignature)))
complete(StatusCodes.OK)
}
}
}
}
| Karasiq/nanoboard | library/src/main/scala/com/karasiq/nanoboard/sources/bitmessage/BitMessageTransport.scala | Scala | apache-2.0 | 3,558 |
package pl.umk.bugclassification.scmparser.invokers
import scala.sys.process.Process
trait InvokerOnDirectory {
def dirUrl: String
protected def createProcessBuilder(command: Command): scala.sys.process.ProcessBuilder = {
val pb = Process((new java.lang.ProcessBuilder(command.command))
directory new java.io.File(dirUrl))
return pb
}
protected def createProcessBuilder(command: Command, subDir:String): scala.sys.process.ProcessBuilder = {
val pb = Process((new java.lang.ProcessBuilder(command.command))
directory new java.io.File(dirUrl+subDir))
return pb
}
} | mfejzer/CommitClassification | src/main/scala/pl/umk/bugclassification/scmparser/invokers/InvokerOnDirectory.scala | Scala | bsd-3-clause | 604 |
package com.github.bomgar.sns
import com.github.bomgar.Region
import com.github.bomgar.auth.credentials.AwsCredentialsProvider
import com.github.bomgar.client.BaseAwsClient
import com.github.bomgar.sns.domain.{SubscriptionReference, TopicAttributes, TopicReference}
import play.api.libs.ws.WSClient
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}
class AwsSnsClient(
credentialsProvider: AwsCredentialsProvider,
region: Region.Type,
client: WSClient,
defaultTimeout: Duration = 5.seconds
)(implicit executionContext: ExecutionContext)
extends BaseAwsClient(credentialsProvider, region, client, "sns", defaultTimeout) {
def createTopic(topicName: String): Future[TopicReference] = {
val actionParameters = Map(
"Action" -> "CreateTopic",
"Version" -> "2010-03-31",
"Name" -> topicName
)
executeFormEncodedAction(actionParameters)
.map(TopicReference.fromCreateTopicResult)
}
def getTopicAttributes(topic: TopicReference): Future[TopicAttributes] = {
val actionParameters = Map(
"Action" -> "GetTopicAttributes",
"Version" -> "2010-03-31",
"TopicArn" -> topic.topicArn
)
executeFormEncodedAction(actionParameters)
.map(TopicAttributes.fromGetTopicAttributesResponse)
}
def setTopicAttribute(topic: TopicReference, attributeName:String, attributeValue: String): Future[Unit] = {
val actionParameters = Map(
"TopicArn" -> topic.topicArn,
"Action" -> "SetTopicAttributes",
"AttributeName" -> attributeName,
"AttributeValue" -> attributeValue,
"Version" -> "2010-03-31"
)
executeFormEncodedAction(actionParameters).map(_ => ())
}
def listTopics(): Future[Seq[TopicReference]] = {
val actionParameters = Map(
"Action" -> "ListTopics",
"Version" -> "2010-03-31"
)
executeFormEncodedAction(actionParameters)
.map(TopicReference.fromListTopicResult)
}
def deleteTopic(topic: TopicReference): Future[Unit] = {
val actionParameters = Map(
"TopicArn" -> topic.topicArn,
"Action" -> "DeleteTopic",
"Version" -> "2010-03-31"
)
executeFormEncodedAction(actionParameters).map(_ => ())
}
def publish(message: String, topic: TopicReference): Future[Unit] = {
val actionParameters = Map(
"TopicArn" -> topic.topicArn,
"Action" -> "Publish",
"Message" -> message,
"Version" -> "2010-03-31"
)
executeFormEncodedAction(actionParameters).map(_ => ())
}
def subscribe(topic: TopicReference, endpoint: String, protocol: String): Future[SubscriptionReference] = {
val actionParameters = Map(
"TopicArn" -> topic.topicArn,
"Action" -> "Subscribe",
"Endpoint" -> endpoint,
"Protocol" -> protocol,
"Version" -> "2010-03-31"
)
executeFormEncodedAction(actionParameters)
.map(SubscriptionReference.fromSubscribeResult)
}
def listSubscriptionsByTopics(topic: TopicReference): Future[Seq[SubscriptionReference]] = {
val actionParameters = Map(
"Action" -> "ListSubscriptionsByTopic",
"TopicArn" -> topic.topicArn,
"Version" -> "2010-03-31"
)
executeFormEncodedAction(actionParameters)
.map(SubscriptionReference.fromListSubscriptionByTopicResult)
}
}
| bomgar/reactive-aws | sns/src/main/scala/com/github/bomgar/sns/AwsSnsClient.scala | Scala | apache-2.0 | 3,398 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.storage
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.storage.BlockManagerMessages.UpdateBlockInfo
/**
* :: DeveloperApi ::
* Stores information about a block status in a block manager.
*/
@DeveloperApi
case class BlockUpdatedInfo(
blockManagerId: BlockManagerId,
blockId: BlockId,
storageLevel: StorageLevel,
memSize: Long,
diskSize: Long,
externalBlockStoreSize: Long)
private[spark] object BlockUpdatedInfo {
private[spark] def apply(updateBlockInfo: UpdateBlockInfo): BlockUpdatedInfo = {
BlockUpdatedInfo(
updateBlockInfo.blockManagerId,
updateBlockInfo.blockId,
updateBlockInfo.storageLevel,
updateBlockInfo.memSize,
updateBlockInfo.diskSize,
updateBlockInfo.externalBlockStoreSize)
}
}
| ArvinDevel/onlineAggregationOnSparkV2 | core/src/main/scala/org/apache/spark/storage/BlockUpdatedInfo.scala | Scala | apache-2.0 | 1,621 |
/**
* This code is generated using [[https://www.scala-sbt.org/contraband/ sbt-contraband]].
*/
// DO NOT EDIT MANUALLY
package sbt.internal.langserver.codec
trait JsonProtocol extends sjsonnew.BasicJsonProtocol
with sbt.internal.langserver.codec.PositionFormats
with sbt.internal.langserver.codec.RangeFormats
with sbt.internal.langserver.codec.LocationFormats
with sbt.internal.langserver.codec.DiagnosticFormats
with sbt.internal.util.codec.JValueFormats
with sbt.internal.langserver.codec.ClientCapabilitiesFormats
with sbt.internal.langserver.codec.InitializeParamsFormats
with sbt.internal.langserver.codec.SaveOptionsFormats
with sbt.internal.langserver.codec.TextDocumentSyncOptionsFormats
with sbt.internal.langserver.codec.ServerCapabilitiesFormats
with sbt.internal.langserver.codec.InitializeResultFormats
with sbt.internal.langserver.codec.LogMessageParamsFormats
with sbt.internal.langserver.codec.PublishDiagnosticsParamsFormats
with sbt.internal.langserver.codec.SbtExecParamsFormats
with sbt.internal.langserver.codec.CancelRequestParamsFormats
with sbt.internal.langserver.codec.TextDocumentIdentifierFormats
with sbt.internal.langserver.codec.TextDocumentPositionParamsFormats
with sbt.internal.langserver.codec.TextDocumentPositionParamsInterfaceFormats
object JsonProtocol extends JsonProtocol | sbt/sbt | protocol/src/main/contraband-scala/sbt/internal/langserver/codec/JsonProtocol.scala | Scala | apache-2.0 | 1,353 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.