code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.iterators
import java.util.Map.Entry
import com.typesafe.scalalogging.LazyLogging
import org.apache.accumulo.core.client.IteratorSetting
import org.apache.accumulo.core.data._
import org.geotools.factory.Hints
import org.locationtech.geomesa.accumulo.AccumuloFeatureIndexType
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.index.iterators.DensityScan
import org.locationtech.geomesa.index.iterators.DensityScan.DensityResult
import org.locationtech.geomesa.utils.geotools.GeometryUtils
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.opengis.filter.Filter
/**
* Density iterator - only works on kryo-encoded features
*/
class KryoLazyDensityIterator extends BaseAggregatingIterator[DensityResult] with DensityScan
object KryoLazyDensityIterator extends LazyLogging {
val DEFAULT_PRIORITY = 25
/**
* Creates an iterator config for the kryo density iterator
*/
def configure(sft: SimpleFeatureType,
index: AccumuloFeatureIndexType,
filter: Option[Filter],
hints: Hints,
deduplicate: Boolean,
priority: Int = DEFAULT_PRIORITY): IteratorSetting = {
val is = new IteratorSetting(priority, "density-iter", classOf[KryoLazyDensityIterator])
BaseAggregatingIterator.configure(is, deduplicate, None)
DensityScan.configure(sft, index, filter, hints).foreach { case (k, v) => is.addOption(k, v) }
is
}
/**
* Adapts the iterator to create simple features.
* WARNING - the same feature is re-used and mutated - the iterator stream should be operated on serially.
*/
def kvsToFeatures(): (Entry[Key, Value]) => SimpleFeature = {
val sf = new ScalaSimpleFeature(DensityScan.DensitySft, "")
sf.setAttribute(0, GeometryUtils.zeroPoint)
(e: Entry[Key, Value]) => {
// Return value in user data so it's preserved when passed through a RetypingFeatureCollection
sf.getUserData.put(DensityScan.DensityValueKey, e.getValue.get())
sf
}
}
}
| ronq/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/iterators/KryoLazyDensityIterator.scala | Scala | apache-2.0 | 2,570 |
class C1(x: AnyRef) {};
class C2 extends C1({ class A extends AnyRef {}; (new A) : AnyRef }) {};
| AlexSikia/dotty | tests/pos/t0049.scala | Scala | bsd-3-clause | 98 |
package fr.acinq.bitcoin
import fr.acinq.aws.ses.Email
import scala.concurrent.Await
object Example extends App {
System.setProperty("aws.accessKeyId", "")
System.setProperty("aws.secretKey", "")
import fr.acinq.aws.Implicits.EU_WEST_1.ses
import scala.concurrent.duration._
Await.result(Email(from = "noreply@flipcoin.fr", to = "pm@flipcoin.fr", subject = "test", text = "text content", html = "<html><p>Hello !</p></html>").send(), 5 seconds)
}
| ACINQ/acinq-tools | aws-async-api/src/main/scala/fr/acinq/aws/Example.scala | Scala | apache-2.0 | 467 |
package actors
import akka.actor._
import models.db.Tables
import models.db.Tables.PropertiesRow
import play.api.Logger
import play.api.libs.json.JsObject
import services.db.DBService
import play.api.libs.json.{JsArray, Json}
import play.api.libs.ws.WSClient
import services.nestoria._
import utils.db.TetraoPostgresDriver.api._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.util.{Failure, Success}
class RequestActor(ws: WSClient, database: DBService) extends Actor {
import RequestActor._
override def receive = {
case RequestNestoriaData =>
Logger.info("Requesting data from Nestoria servers")
val reqUrl = Nestoria(UK, SearchListings, pretty = true)
.withParams(ListingType("buy"))
.searchFor("brighton").url
val request = ws.url(reqUrl).get()
request.onComplete {
case Success(response) =>
Logger.info("Got data from Nestoria server")
val listings = (response.json \\ "response" \\ "listings").as[JsArray].value
val propRows = listings.map(o => Json.fromJson[PropertiesRow](o.as[JsObject])).filter(_.isSuccess).map(_.get)
database.run(Tables.Properties.delete)
database.run((Tables.Properties returning Tables.Properties) ++= propRows)
case Failure(e) =>
Logger.info("Failure, while requesting data from Nestoria servers.")
Logger.info(e.getMessage)
}
case LookForKeywords(kws) =>
val queryResult = database.run(Tables.Properties.filter(_.keywords @> kws).result)
val jsonResult = Json.toJson(queryResult)
sender() ! jsonResult
}
}
object RequestActor {
def props(ws: WSClient, db: DBService) = Props(new RequestActor(ws, db))
case object RequestNestoriaData
case class LookForKeywords(keywords: List[String])
}
| alexFrankfurt/nest-api | app/actors/RequestActor.scala | Scala | mit | 1,826 |
package io.github.binaryfoo.lagotto
import java.io.File
import io.github.binaryfoo.lagotto.JposTimestamp.DateTimeExtension
import io.github.binaryfoo.lagotto.reader.LogReader
import org.joda.time.DateTime
class MsgPairTest extends LagoTest {
"A single request and response pair" should "be matched" in {
val request = JposEntry("0" -> "0800", "11" -> "1")
val response = JposEntry("0" -> "0810", "11" -> "000001")
val pairs = MsgPair.pair(iteratorOver(request, response)).toStream
pairs should have size 1
assert(pairs.head.request === request)
assert(pairs.head.response === response)
}
it should "pair when field 11 is not numeric" in {
val request = JposEntry("0" -> "0800", "11" -> "abc")
val response = JposEntry("0" -> "0810", "11" -> "abc")
val pairs = MsgPair.pair(iteratorOver(request, response)).toStream
pairs shouldEqual List(MsgPair(request, response))
}
it should "pair response with most recent request in face of duplicates" in {
val request = JposEntry("0" -> "0800", "11" -> "1", "id" -> "1")
val dupeRequest = JposEntry("0" -> "0800", "11" -> "1", "id" -> "2")
val response = JposEntry("0" -> "0810", "11" -> "1", "id" -> "3")
val pairs = MsgPair.pair(iteratorOver(request, dupeRequest, response)).toStream
pairs shouldEqual List(MsgPair(dupeRequest, response))
}
it should "pairing should use realm" in {
val request = JposEntry("0" -> "0800", "11" -> "1", "realm" -> "a")
val wrongResponse = JposEntry("0" -> "0810", "11" -> "1", "realm" -> "b")
val response = JposEntry("0" -> "0810", "11" -> "1", "realm" -> "a")
val pairs = MsgPair.pair(iteratorOver(request, wrongResponse, response)).toStream
pairs shouldEqual List(MsgPair(request, response))
}
"Log entries that aren't messages" should "be ignored" in {
val entries = LogReader().read(new File(testFile("basic.xml")))
MsgPair.pair(entries) should have size 0
}
"A pair read from a file" should "have a round trip time" in {
val pairs = MsgPair.pair(LogReader().read(new File(testFile("a-pair.xml")))).toStream
pairs.head.rtt shouldEqual 808
}
it should "provide access to fields and attributes" in {
import fieldParser.FieldExpr.expressionFor
val pair = MsgPair.pair(LogReader().read(new File(testFile("a-pair.xml")))).next()
pair("0") shouldEqual "0800"
pair("48.1") shouldEqual "subfield 48.1"
expressionFor("req.time")(pair) shouldEqual "00:00:03.292"
expressionFor("request.time")(pair) shouldEqual "00:00:03.292"
expressionFor("response.time")(pair) shouldEqual "00:00:04.100"
pair("rtt") shouldEqual "808"
pair("mti") shouldEqual "0800"
expressionFor("timestamp")(pair) shouldEqual "2014-11-24 00:00:03.292"
}
def pair(requestFields: (String, String)*): MsgPair = MsgPair(JposEntry(requestFields : _*), JposEntry())
"A sequence of msgs" should "be coalesceable by mti" in {
val now = new DateTime()
val auth1 = pair("at" -> now.asJposAt, "0" -> "0200")
val auth2 = pair("at" -> now.plusMillis(100).asJposAt, "0" -> "0200")
val auth3 = pair("at" -> now.plusMillis(200).asJposAt, "0" -> "0200")
val key1 = pair("at" -> now.plusMillis(300).asJposAt, "0" -> "0820")
val auth4 = pair("at" -> now.plusMillis(400).asJposAt, "0" -> "0200")
val seq = iteratorOver(auth1, auth2, auth3, key1, auth4)
val coalesced = MsgPair.coalesce(seq, _.mti).toStream
coalesced shouldEqual List(auth1, Group(2, "0200"), key1, auth4)
}
it should "not coalesce on 53 when 53 changes for each message" in {
val one = pair("53" -> "1")
val two = pair("53" -> "2")
MsgPair.coalesce(iteratorOver(one, two), _("53")).toStream shouldEqual List(one, two)
}
it should "coalesce two messages with same value in 53" in {
val one = pair("53" -> "1")
val two = pair("53" -> "1")
MsgPair.coalesce(iteratorOver(one, two), _("53")).toStream shouldEqual List(one, Group(1, "1"))
}
"A single pair" should "be reduceable to a map" in {
val p = MsgPair(JposEntry("11" -> "123456", "37" -> "ignored"), JposEntry("39" -> "00"))
p.toSeq("11", "39") shouldEqual Seq("123456", "00")
}
}
| binaryfoo/lagotto | src/test/scala/io/github/binaryfoo/lagotto/MsgPairTest.scala | Scala | mit | 4,201 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.core.server
import java.net.InetSocketAddress
import akka.Done
import akka.actor.ActorSystem
import akka.actor.CoordinatedShutdown
import akka.http.play.WebSocketHandler
import akka.http.scaladsl.model.headers.Expect
import akka.http.scaladsl.model.ws.UpgradeToWebSocket
import akka.http.scaladsl.model.headers
import akka.http.scaladsl.model._
import akka.http.scaladsl.settings.ParserSettings
import akka.http.scaladsl.settings.ServerSettings
import akka.http.scaladsl.util.FastFuture._
import akka.http.scaladsl.ConnectionContext
import akka.http.scaladsl.Http
import akka.http.scaladsl.HttpConnectionContext
import akka.stream.Materializer
import akka.stream.TLSClientAuth
import akka.stream.scaladsl._
import akka.util.ByteString
import com.typesafe.config.Config
import com.typesafe.config.ConfigMemorySize
import javax.net.ssl._
import play.api._
import play.api.http.DefaultHttpErrorHandler
import play.api.http.HeaderNames
import play.api.http.HttpErrorHandler
import play.api.http.HttpErrorInfo
import play.api.http.{ HttpProtocol => PlayHttpProtocol }
import play.api.http.Status
import play.api.internal.libs.concurrent.CoordinatedShutdownSupport
import play.api.libs.streams.Accumulator
import play.api.mvc._
import play.api.mvc.akkahttp.AkkaHttpHandler
import play.core.server.akkahttp.AkkaServerConfigReader
import play.api.routing.Router
import play.core.ApplicationProvider
import play.core.server.Server.ServerStoppedReason
import play.core.server.akkahttp.AkkaModelConversion
import play.core.server.akkahttp.HttpRequestDecoder
import play.core.server.common.ReloadCache
import play.core.server.common.ServerDebugInfo
import play.core.server.common.ServerResultUtils
import play.core.server.ssl.ServerSSLEngine
import scala.concurrent.duration._
import scala.concurrent.Await
import scala.concurrent.ExecutionContext
import scala.concurrent.Future
import scala.util.control.NonFatal
import scala.util.Failure
import scala.util.Success
import scala.util.Try
/**
* Starts a Play server using Akka HTTP.
*/
class AkkaHttpServer(context: AkkaHttpServer.Context) extends Server {
import AkkaHttpServer._
assert(
context.config.port.isDefined || context.config.sslPort.isDefined,
"AkkaHttpServer must be given at least one of an HTTP and an HTTPS port"
)
override def mode: Mode = context.config.mode
override def applicationProvider: ApplicationProvider = context.appProvider
/** Helper to access server configuration under the `play.server` prefix. */
private val serverConfig = context.config.configuration.get[Configuration]("play.server")
/** Helper to access server configuration under the `play.server.akka` prefix. */
private val akkaServerConfig = serverConfig.get[Configuration]("akka")
private val akkaServerConfigReader = new AkkaServerConfigReader(akkaServerConfig)
private lazy val initialSettings = ServerSettings(akkaHttpConfig)
private val httpIdleTimeout = serverConfig.get[Duration]("http.idleTimeout")
private val httpsIdleTimeout = serverConfig.get[Duration]("https.idleTimeout")
private val requestTimeout = akkaServerConfig.get[Duration]("requestTimeout")
private val bindTimeout = akkaServerConfig.get[FiniteDuration]("bindTimeout")
private val terminationTimeout = akkaServerConfig.getOptional[FiniteDuration]("terminationTimeout")
private val maxContentLength =
Server.getPossiblyInfiniteBytes(serverConfig.underlying, "max-content-length", "akka.max-content-length")
private val maxHeaderValueLength =
serverConfig.getDeprecated[ConfigMemorySize]("max-header-size", "akka.max-header-value-length").toBytes.toInt
private val includeTlsSessionInfoHeader = akkaServerConfig.get[Boolean]("tls-session-info-header")
private val defaultHostHeader = akkaServerConfigReader.getHostHeader.fold(throw _, identity)
private val transparentHeadRequests = akkaServerConfig.get[Boolean]("transparent-head-requests")
private val serverHeaderConfig = akkaServerConfig.getOptional[String]("server-header")
private val serverHeader = serverHeaderConfig.collect {
case s if s.nonEmpty => headers.Server(s)
}
private val httpsNeedClientAuth = serverConfig.get[Boolean]("https.needClientAuth")
private val httpsWantClientAuth = serverConfig.get[Boolean]("https.wantClientAuth")
private val illegalResponseHeaderValueProcessingMode =
akkaServerConfig.get[String]("illegal-response-header-value-processing-mode")
private val wsBufferLimit = serverConfig.get[ConfigMemorySize]("websocket.frame.maxLength").toBytes.toInt
private val http2Enabled: Boolean = akkaServerConfig.getOptional[Boolean]("http2.enabled").getOrElse(false)
/**
* Play's configuration for the Akka HTTP server. Initialized by a call to [[createAkkaHttpConfig()]].
*
* Note that the rest of the [[ActorSystem]] outside Akka HTTP is initialized by the configuration in [[context.config]].
*/
protected val akkaHttpConfig: Config = createAkkaHttpConfig()
/**
* Creates the configuration used to initialize the Akka HTTP subsystem. By default this uses the ActorSystem's
* configuration, with an additional setting patched in to enable or disable HTTP/2.
*/
protected def createAkkaHttpConfig(): Config =
Configuration("akka.http.server.preview.enable-http2" -> http2Enabled)
.withFallback(Configuration(context.actorSystem.settings.config))
.underlying
/** Play's parser settings for Akka HTTP. Initialized by a call to [[createParserSettings()]]. */
protected val parserSettings: ParserSettings = createParserSettings()
/** Called by Play when creating its Akka HTTP parser settings. Result stored in [[parserSettings]]. */
protected def createParserSettings(): ParserSettings =
ParserSettings(akkaHttpConfig)
.withMaxContentLength(maxContentLength)
.withMaxHeaderValueLength(maxHeaderValueLength)
.withIncludeTlsSessionInfoHeader(includeTlsSessionInfoHeader)
.withModeledHeaderParsing(false) // Disable most of Akka HTTP's header parsing; use RawHeaders instead
/**
* Create Akka HTTP settings for a given port binding.
*
* Called by Play when binding a handler to a server port. Will be called once per port. Called by the
* [[createServerBinding()]] method.
*/
protected def createServerSettings(
port: Int,
connectionContext: ConnectionContext,
secure: Boolean
): ServerSettings = {
initialSettings
.withTimeouts(
initialSettings.timeouts
.withIdleTimeout(if (secure) httpsIdleTimeout else httpIdleTimeout)
.withRequestTimeout(requestTimeout)
)
// Play needs these headers to fill in fields in its request model
.withRawRequestUriHeader(true)
.withRemoteAddressHeader(true)
.withTransparentHeadRequests(transparentHeadRequests)
.withServerHeader(serverHeader)
.withDefaultHostHeader(defaultHostHeader)
.withParserSettings(parserSettings)
}
// Each request needs an id
private val requestIDs = new java.util.concurrent.atomic.AtomicLong(0)
/**
* Values that are cached based on the current application.
*/
private case class ReloadCacheValues(
resultUtils: ServerResultUtils,
modelConversion: AkkaModelConversion,
serverDebugInfo: Option[ServerDebugInfo]
)
/**
* A helper to cache values that are derived from the current application.
*/
private val reloadCache = new ReloadCache[ReloadCacheValues] {
protected override def reloadValue(tryApp: Try[Application]): ReloadCacheValues = {
val serverResultUtils = reloadServerResultUtils(tryApp)
val forwardedHeaderHandler = reloadForwardedHeaderHandler(tryApp)
val illegalResponseHeaderValue = ParserSettings.IllegalResponseHeaderValueProcessingMode(
illegalResponseHeaderValueProcessingMode
)
val modelConversion =
new AkkaModelConversion(serverResultUtils, forwardedHeaderHandler, illegalResponseHeaderValue)
ReloadCacheValues(
resultUtils = serverResultUtils,
modelConversion = modelConversion,
serverDebugInfo = reloadDebugInfo(tryApp, provider)
)
}
}
// ----------------------------------------------------------------------
// CAUTION
// NO fields (val) below this point that are accessed in handleRequest.
// They might not yet be initialized when handleRequest is run for the
// first request. In doubt use `lazy val`.
// ----------------------------------------------------------------------
/**
* Bind Akka HTTP to a port to listen for incoming connections. Calls [[createServerSettings()]] to configure the
* binding and [[handleRequest()]] as a handler for the binding.
*/
private def createServerBinding(
port: Int,
connectionContext: ConnectionContext,
secure: Boolean
): Http.ServerBinding = {
// TODO: pass in Inet.SocketOption and LoggerAdapter params?
val bindingFuture: Future[Http.ServerBinding] =
try {
Http()(context.actorSystem)
.bindAndHandleAsync(
handler = handleRequest(_, connectionContext.isSecure),
interface = context.config.address,
port = port,
connectionContext = connectionContext,
settings = createServerSettings(port, connectionContext, secure)
)(context.materializer)
} catch {
// Http2SupportNotPresentException is private[akka] so we need to match the name
case e: Throwable if e.getClass.getSimpleName == "Http2SupportNotPresentException" =>
throw new RuntimeException(
"HTTP/2 enabled but akka-http2-support not found. " +
"Add .enablePlugins(PlayAkkaHttp2Support) in build.sbt",
e
)
}
Await.result(bindingFuture, bindTimeout)
}
// Lazy since it will only be required when HTTPS is bound.
private lazy val sslContext: SSLContext =
ServerSSLEngine.createSSLEngineProvider(context.config, applicationProvider).sslContext()
private val httpServerBinding = context.config.port.map(port =>
createServerBinding(
port,
HttpConnectionContext(),
secure = false
)
)
private val httpsServerBinding = context.config.sslPort.map { port =>
val connectionContext =
try {
val clientAuth: Option[TLSClientAuth] = createClientAuth()
ConnectionContext.https(
sslContext = sslContext,
clientAuth = clientAuth
)
} catch {
case NonFatal(e) =>
logger.error(s"Cannot load SSL context", e)
ConnectionContext.noEncryption()
}
createServerBinding(port, connectionContext, secure = true)
}
/** Creates AkkaHttp TLSClientAuth */
protected def createClientAuth(): Option[TLSClientAuth] = {
// Need has precedence over Want, hence the if/else if
if (httpsNeedClientAuth) {
Some(TLSClientAuth.need)
} else if (httpsWantClientAuth) {
Some(TLSClientAuth.want)
} else {
None
}
}
if (http2Enabled) {
logger.info(s"Enabling HTTP/2 on Akka HTTP server...")
if (httpsServerBinding.isEmpty) {
val logMessage = s"No HTTPS server bound. Only binding HTTP. Many user agents only support HTTP/2 over HTTPS."
// warn in dev/test mode, since we are likely accessing the server directly, but debug otherwise
mode match {
case Mode.Dev | Mode.Test => logger.warn(logMessage)
case _ => logger.debug(logMessage)
}
}
}
private def resultUtils(tryApp: Try[Application]): ServerResultUtils =
reloadCache.cachedFrom(tryApp).resultUtils
private def modelConversion(tryApp: Try[Application]): AkkaModelConversion =
reloadCache.cachedFrom(tryApp).modelConversion
private def handleRequest(request: HttpRequest, secure: Boolean): Future[HttpResponse] = {
val decodedRequest = HttpRequestDecoder.decodeRequest(request)
val tryApp = applicationProvider.get
val (convertedRequestHeader, requestBodySource): (RequestHeader, Either[ByteString, Source[ByteString, Any]]) = {
val remoteAddress: InetSocketAddress = remoteAddressOfRequest(request)
val requestId: Long = requestIDs.incrementAndGet()
modelConversion(tryApp).convertRequest(
requestId = requestId,
remoteAddress = remoteAddress,
secureProtocol = secure,
request = decodedRequest
)
}
val debugInfoRequestHeader: RequestHeader = {
val debugInfo: Option[ServerDebugInfo] = reloadCache.cachedFrom(tryApp).serverDebugInfo
ServerDebugInfo.attachToRequestHeader(convertedRequestHeader, debugInfo)
}
val (taggedRequestHeader, handler) = Server.getHandlerFor(debugInfoRequestHeader, tryApp)
val responseFuture = executeHandler(
tryApp,
decodedRequest,
taggedRequestHeader,
requestBodySource,
handler
)
responseFuture
}
def remoteAddressOfRequest(req: HttpRequest): InetSocketAddress = {
req.header[headers.`Remote-Address`] match {
case Some(headers.`Remote-Address`(RemoteAddress.IP(ip, Some(port)))) =>
new InetSocketAddress(ip, port)
case _ => throw new IllegalStateException("`Remote-Address` header was missing")
}
}
private def executeHandler(
tryApp: Try[Application],
request: HttpRequest,
taggedRequestHeader: RequestHeader,
requestBodySource: Either[ByteString, Source[ByteString, _]],
handler: Handler
): Future[HttpResponse] = {
val upgradeToWebSocket = request.header[UpgradeToWebSocket]
// Get the app's HttpErrorHandler or fallback to a default value
val errorHandler: HttpErrorHandler = tryApp match {
case Success(app) => app.errorHandler
case Failure(_) => DefaultHttpErrorHandler
}
// default execution context used for executing the action
implicit val defaultExecutionContext: ExecutionContext = tryApp match {
case Success(app) => app.actorSystem.dispatcher
case Failure(_) => context.actorSystem.dispatcher
}
// materializer used for executing the action
implicit val mat: Materializer = tryApp match {
case Success(app) => app.materializer
case Failure(_) => context.materializer
}
(handler, upgradeToWebSocket) match {
//execute normal action
case (action: EssentialAction, _) =>
runAction(tryApp, request, taggedRequestHeader, requestBodySource, action, errorHandler)
case (websocket: WebSocket, Some(upgrade)) =>
websocket(taggedRequestHeader).fast.flatMap {
case Left(result) =>
modelConversion(tryApp).convertResult(taggedRequestHeader, result, request.protocol, errorHandler)
case Right(flow) =>
// For now, like Netty, select an arbitrary subprotocol from the list of subprotocols proposed by the client
// Eventually it would be better to allow the handler to specify the protocol it selected
// See also https://github.com/playframework/playframework/issues/7895
val selectedSubprotocol = upgrade.requestedProtocols.headOption
Future.successful(WebSocketHandler.handleWebSocket(upgrade, flow, wsBufferLimit, selectedSubprotocol))
}
case (websocket: WebSocket, None) =>
// WebSocket handler for non WebSocket request
logger.trace(s"Bad websocket request: $request")
val action = EssentialAction(_ =>
Accumulator.done(
Results
.Status(Status.UPGRADE_REQUIRED)("Upgrade to WebSocket required")
.withHeaders(
HeaderNames.UPGRADE -> "websocket",
HeaderNames.CONNECTION -> HeaderNames.UPGRADE
)
)
)
runAction(tryApp, request, taggedRequestHeader, requestBodySource, action, errorHandler)
case (akkaHttpHandler: AkkaHttpHandler, _) =>
akkaHttpHandler(request)
case (unhandled, _) => sys.error(s"AkkaHttpServer doesn't handle Handlers of this type: $unhandled")
}
}
private def runAction(
tryApp: Try[Application],
request: HttpRequest,
taggedRequestHeader: RequestHeader,
requestBodySource: Either[ByteString, Source[ByteString, _]],
action: EssentialAction,
errorHandler: HttpErrorHandler
)(implicit ec: ExecutionContext, mat: Materializer): Future[HttpResponse] = {
val futureAcc: Future[Accumulator[ByteString, Result]] = Future(action(taggedRequestHeader))
val source = if (request.header[Expect].contains(Expect.`100-continue`)) {
// If we expect 100 continue, then we must not feed the source into the accumulator until the accumulator
// requests demand. This is due to a semantic mismatch between Play and Akka-HTTP, Play signals to continue
// by requesting demand, Akka-HTTP signals to continue by attaching a sink to the source. See
// https://github.com/akka/akka/issues/17782 for more details.
requestBodySource.map(source => Source.lazySource(() => source))
} else {
requestBodySource
}
// here we use FastFuture so the flatMap shouldn't actually need the executionContext
val resultFuture: Future[Result] = futureAcc.fast
.flatMap { actionAccumulator =>
source match {
case Left(bytes) if bytes.isEmpty => actionAccumulator.run()
case Left(bytes) => actionAccumulator.run(bytes)
case Right(s) => actionAccumulator.run(s)
}
}
.recoverWith {
case _: EntityStreamSizeException =>
errorHandler.onClientError(
taggedRequestHeader.addAttr(HttpErrorHandler.Attrs.HttpErrorInfo, HttpErrorInfo("server-backend")),
Status.REQUEST_ENTITY_TOO_LARGE,
"Request Entity Too Large"
)
case e: Throwable =>
errorHandler.onServerError(taggedRequestHeader, e)
}
val responseFuture: Future[HttpResponse] = resultFuture.flatMap { result =>
val cleanedResult: Result = resultUtils(tryApp).prepareCookies(taggedRequestHeader, result)
modelConversion(tryApp).convertResult(taggedRequestHeader, cleanedResult, request.protocol, errorHandler)
}
responseFuture
}
mode match {
case Mode.Test =>
case _ =>
httpServerBinding.foreach { http =>
logger.info(s"Listening for HTTP on ${http.localAddress}")
}
httpsServerBinding.foreach { https =>
logger.info(s"Listening for HTTPS on ${https.localAddress}")
}
}
override def stop(): Unit = CoordinatedShutdownSupport.syncShutdown(context.actorSystem, ServerStoppedReason)
// Using CoordinatedShutdown means that instead of invoking code imperatively in `stop`
// we have to register it as early as possible as CoordinatedShutdown tasks and
// then `stop` runs CoordinatedShutdown.
registerShutdownTasks()
private def registerShutdownTasks(): Unit = {
implicit val exCtx: ExecutionContext = context.actorSystem.dispatcher
// Register all shutdown tasks
val cs = CoordinatedShutdown(context.actorSystem)
cs.addTask(CoordinatedShutdown.PhaseBeforeServiceUnbind, "trace-server-stop-request") { () =>
if (mode != Mode.Test) logger.info("Stopping Akka HTTP server...")
Future.successful(Done)
}
// The termination hard-deadline is either what was configured by the user
// or defaults to `service-unbind` phase timeout.
val serviceUnboundTimeout = cs.timeout(CoordinatedShutdown.PhaseServiceUnbind)
val serverTerminateTimeout = terminationTimeout.getOrElse(serviceUnboundTimeout)
if (serverTerminateTimeout > serviceUnboundTimeout)
logger.warn(
s"""The value for `play.server.akka.terminationTimeout` [$serverTerminateTimeout] is higher than the total `service-unbind.timeout` duration [$serviceUnboundTimeout].
|Set `akka.coordinated-shutdown.phases.service-unbind.timeout` to an equal (or greater) value to prevent unexpected server termination.""".stripMargin
)
cs.addTask(CoordinatedShutdown.PhaseServiceUnbind, "akka-http-server-unbind") { () =>
def terminate(binding: Option[Http.ServerBinding]): Future[Done] = {
binding
.map { binding =>
logger.info(s"Terminating server binding for ${binding.localAddress}")
binding.terminate(serverTerminateTimeout).map(_ => Done)
}
.getOrElse {
Future.successful(Done)
}
}
for {
_ <- terminate(httpServerBinding)
_ <- terminate(httpsServerBinding)
} yield Done
}
// Call provided hook
// Do this last because the hooks were created before the server,
// so the server might need them to run until the last moment.
cs.addTask(CoordinatedShutdown.PhaseBeforeActorSystemTerminate, "user-provided-server-stop-hook") { () =>
logger.info("Running provided shutdown stop hooks")
context.stopHook().map(_ => Done)
}
cs.addTask(CoordinatedShutdown.PhaseBeforeActorSystemTerminate, "shutdown-logger") { () =>
Future {
super.stop()
Done
}
}
}
override lazy val mainAddress: InetSocketAddress = {
httpServerBinding.orElse(httpsServerBinding).map(_.localAddress).get
}
private lazy val Http1Plain = httpServerBinding
.map(_.localAddress)
.map(address =>
ServerEndpoint(
description = "Akka HTTP HTTP/1.1 (plaintext)",
scheme = "http",
host = context.config.address,
port = address.getPort,
protocols = Set(PlayHttpProtocol.HTTP_1_0, PlayHttpProtocol.HTTP_1_1),
serverAttribute = serverHeaderConfig,
ssl = None
)
)
private lazy val Http1Encrypted = httpsServerBinding
.map(_.localAddress)
.map(address =>
ServerEndpoint(
description = "Akka HTTP HTTP/1.1 (encrypted)",
scheme = "https",
host = context.config.address,
port = address.getPort,
protocols = Set(PlayHttpProtocol.HTTP_1_0, PlayHttpProtocol.HTTP_1_1),
serverAttribute = serverHeaderConfig,
ssl = Option(sslContext)
)
)
private lazy val Http2Plain = httpServerBinding
.map(_.localAddress)
.map(address =>
ServerEndpoint(
description = "Akka HTTP HTTP/2 (plaintext)",
scheme = "http",
host = context.config.address,
port = address.getPort,
protocols = Set(PlayHttpProtocol.HTTP_2_0),
serverAttribute = serverHeaderConfig,
ssl = None
)
)
private lazy val Http2Encrypted = httpsServerBinding
.map(_.localAddress)
.map(address =>
ServerEndpoint(
description = "Akka HTTP HTTP/2 (encrypted)",
scheme = "https",
host = context.config.address,
port = address.getPort,
protocols = Set(PlayHttpProtocol.HTTP_1_0, PlayHttpProtocol.HTTP_1_1, PlayHttpProtocol.HTTP_2_0),
serverAttribute = serverHeaderConfig,
ssl = Option(sslContext)
)
)
override val serverEndpoints: ServerEndpoints = {
val httpEndpoint = if (http2Enabled) Http2Plain else Http1Plain
val httpsEndpoint = if (http2Enabled) Http2Encrypted else Http1Encrypted
ServerEndpoints(httpEndpoint.toSeq ++ httpsEndpoint.toSeq)
}
}
/**
* Creates an AkkaHttpServer from a given router using [[BuiltInComponents]]:
*
* {{{
* val server = AkkaHttpServer.fromRouterWithComponents(ServerConfig(port = Some(9002))) { components =>
* import play.api.mvc.Results._
* import components.{ defaultActionBuilder => Action }
* {
* case GET(p"/") => Action {
* Ok("Hello")
* }
* }
* }
* }}}
*
* Use this together with <a href="https://www.playframework.com/documentation/2.6.x/ScalaSirdRouter">Sird Router</a>.
*/
object AkkaHttpServer extends ServerFromRouter {
private val logger = Logger(classOf[AkkaHttpServer])
/**
* The values needed to initialize an [[AkkaHttpServer]].
*
* @param config Basic server configuration values.
* @param appProvider An object which can be queried to get an Application.
* @param actorSystem An ActorSystem that the server can use.
* @param stopHook A function that should be called by the server when it stops.
* This function can be used to close resources that are provided to the server.
*/
final case class Context(
config: ServerConfig,
appProvider: ApplicationProvider,
actorSystem: ActorSystem,
materializer: Materializer,
stopHook: () => Future[_]
)
object Context {
/**
* Create a `Context` object from several common components.
*/
def fromComponents(
serverConfig: ServerConfig,
application: Application,
stopHook: () => Future[_] = () => Future.successful(())
): Context =
AkkaHttpServer.Context(
config = serverConfig,
appProvider = ApplicationProvider(application),
actorSystem = application.actorSystem,
materializer = application.materializer,
stopHook = stopHook
)
/**
* Create a `Context` object from a `ServerProvider.Context`.
*/
def fromServerProviderContext(serverProviderContext: ServerProvider.Context): Context = {
import serverProviderContext._
AkkaHttpServer.Context(config, appProvider, actorSystem, materializer, stopHook)
}
}
/**
* A ServerProvider for creating an AkkaHttpServer.
*/
implicit val provider: AkkaHttpServerProvider = new AkkaHttpServerProvider
/**
* Create a Akka HTTP server from the given application and server configuration.
*
* @param application The application.
* @param config The server configuration.
* @return A started Akka HTTP server, serving the application.
*/
def fromApplication(application: Application, config: ServerConfig = ServerConfig()): AkkaHttpServer = {
new AkkaHttpServer(Context.fromComponents(config, application))
}
protected override def createServerFromRouter(
serverConf: ServerConfig = ServerConfig()
)(routes: ServerComponents with BuiltInComponents => Router): Server = {
new AkkaHttpServerComponents with BuiltInComponents with NoHttpFiltersComponents {
override lazy val serverConfig: ServerConfig = serverConf
override def router: Router = routes(this)
}.server
}
}
/**
* Knows how to create an AkkaHttpServer.
*/
class AkkaHttpServerProvider extends ServerProvider {
override def createServer(context: ServerProvider.Context): AkkaHttpServer = {
new AkkaHttpServer(AkkaHttpServer.Context.fromServerProviderContext(context))
}
}
/**
* Components for building a simple Akka HTTP Server.
*/
trait AkkaHttpServerComponents extends ServerComponents {
override lazy val server: AkkaHttpServer = {
// Start the application first
Play.start(application)
new AkkaHttpServer(AkkaHttpServer.Context.fromComponents(serverConfig, application, serverStopHook))
}
def application: Application
}
/**
* A convenient helper trait for constructing an AkkaHttpServer, for example:
*
* {{{
* val components = new DefaultAkkaHttpServerComponents {
* override lazy val router = {
* case GET(p"/") => Action(parse.json) { body =>
* Ok("Hello")
* }
* }
* }
* val server = components.server
* }}}
*/
trait DefaultAkkaHttpServerComponents
extends AkkaHttpServerComponents
with BuiltInComponents
with NoHttpFiltersComponents
| playframework/playframework | transport/server/play-akka-http-server/src/main/scala/play/core/server/AkkaHttpServer.scala | Scala | apache-2.0 | 27,702 |
package roles
import models.{AccessToken, User}
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import scalikejdbc.async.AsyncDBSession
import utils.Password
import utils.exceptions.InvalidUser
import scala.concurrent.Future
trait AuthenticatedUser {
this: User =>
def checkPassword(password: Password) =
if (password.cryptPassword != this.password) throw new InvalidUser
def createAccessToken()(implicit s: AsyncDBSession): Future[AccessToken] =
AccessToken.destroyByUserId(id) flatMap (_ => AccessToken.createByUserId(id))
}
| KIWIKIGMBH/kiwierp | kiwierp-backend/app/roles/AuthenticatedUser.scala | Scala | mpl-2.0 | 567 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.config
import java.util
import java.util.concurrent.TimeUnit
import java.util.regex.Pattern
import java.util.Properties
import com.google.common.collect.ImmutableMap
import org.apache.kafka.clients.producer.ProducerConfig
import org.apache.kafka.common.serialization.ByteArraySerializer
import org.apache.samza.SamzaException
import org.apache.samza.config.ApplicationConfig.ApplicationMode
import org.apache.samza.util.ScalaJavaUtil.JavaOptionals
import org.apache.samza.util.{Logging, StreamUtil}
import scala.collection.JavaConverters._
object KafkaConfig {
val TOPIC_REPLICATION_FACTOR = "replication.factor"
val TOPIC_DEFAULT_REPLICATION_FACTOR = "2"
val SEGMENT_BYTES = "segment.bytes"
val MAX_MESSAGE_BYTES = "max.message.bytes"
// The default max message bytes for log compact topic
val DEFAULT_LOG_COMPACT_TOPIC_MAX_MESSAGE_BYTES = "1000012"
val CHECKPOINT_SYSTEM = "task.checkpoint.system"
val CHECKPOINT_REPLICATION_FACTOR = "task.checkpoint." + TOPIC_REPLICATION_FACTOR
val CHECKPOINT_SEGMENT_BYTES = "task.checkpoint." + SEGMENT_BYTES
val CHECKPOINT_MAX_MESSAGE_BYTES = "task.checkpoint." + MAX_MESSAGE_BYTES
val CHANGELOG_STREAM_REPLICATION_FACTOR = "stores.%s.changelog." + TOPIC_REPLICATION_FACTOR
val DEFAULT_CHANGELOG_STREAM_REPLICATION_FACTOR = CHANGELOG_STREAM_REPLICATION_FACTOR format "default"
val CHANGELOG_STREAM_KAFKA_SETTINGS = "stores.%s.changelog.kafka."
// The default segment size to use for changelog topics
val CHANGELOG_DEFAULT_SEGMENT_SIZE = "536870912"
val CHANGELOG_MAX_MESSAGE_BYTES = "stores.%s.changelog." + MAX_MESSAGE_BYTES
// Helper regular expression definitions to extract/match configurations
val CHANGELOG_STREAM_NAMES_REGEX = "stores\\\\.(.*)\\\\.changelog$"
val JOB_COORDINATOR_REPLICATION_FACTOR = "job.coordinator." + TOPIC_REPLICATION_FACTOR
val JOB_COORDINATOR_SEGMENT_BYTES = "job.coordinator." + SEGMENT_BYTES
val JOB_COORDINATOR_MAX_MESSAGE_BYTES = "job.coordinator." + MAX_MESSAGE_BYTES
val CONSUMER_CONFIGS_CONFIG_KEY = "systems.%s.consumer.%s"
val PRODUCER_BOOTSTRAP_SERVERS_CONFIG_KEY = "systems.%s.producer.bootstrap.servers"
val PRODUCER_CONFIGS_CONFIG_KEY = "systems.%s.producer.%s"
val CONSUMER_ZK_CONNECT_CONFIG_KEY = "systems.%s.consumer.zookeeper.connect"
/**
* Defines how low a queue can get for a single system/stream/partition
* combination before trying to fetch more messages for it.
*/
val CONSUMER_FETCH_THRESHOLD = SystemConfig.SYSTEM_ID_PREFIX + "samza.fetch.threshold"
val DEFAULT_CHECKPOINT_SEGMENT_BYTES = 26214400
/**
* Defines how many bytes to use for the buffered prefetch messages for job as a whole.
* The bytes for a single system/stream/partition are computed based on this.
* This fetches wholes messages, hence this bytes limit is a soft one, and the actual usage can be
* the bytes limit + size of max message in the partition for a given stream.
* If the value of this property is > 0 then this takes precedence over CONSUMER_FETCH_THRESHOLD config.
*/
val CONSUMER_FETCH_THRESHOLD_BYTES = SystemConfig.SYSTEM_ID_PREFIX + "samza.fetch.threshold.bytes"
val DEFAULT_RETENTION_MS_FOR_BATCH = TimeUnit.DAYS.toMillis(1)
implicit def Config2Kafka(config: Config) = new KafkaConfig(config)
}
class KafkaConfig(config: Config) extends ScalaMapConfig(config) {
/**
* Gets the System to use for reading/writing checkpoints. Uses the following precedence.
*
* 1. If task.checkpoint.system is defined, that value is used.
* 2. If job.default.system is defined, that value is used.
* 3. None
*/
def getCheckpointSystem = Option(getOrElse(KafkaConfig.CHECKPOINT_SYSTEM,
new JobConfig(config).getDefaultSystem().orElse(null)))
/**
* Gets the replication factor for the checkpoint topic. Uses the following precedence.
*
* 1. If task.checkpoint.replication.factor is configured, that value is used.
* 2. If systems.checkpoint-system.default.stream.replication.factor is configured, that value is used.
* 3. None
*
* Note that the checkpoint-system has a similar precedence. See [[getCheckpointSystem]]
*/
def getCheckpointReplicationFactor() = {
val defaultReplicationFactor: String = getSystemDefaultReplicationFactor(getCheckpointSystem.orNull, "3")
val replicationFactor = getOrDefault(KafkaConfig.CHECKPOINT_REPLICATION_FACTOR, defaultReplicationFactor)
Option(replicationFactor)
}
private def getSystemDefaultReplicationFactor(systemName: String, defaultValue: String) = {
val defaultReplicationFactor = new SystemConfig(config).getDefaultStreamProperties(systemName).getOrDefault(KafkaConfig.TOPIC_REPLICATION_FACTOR, defaultValue)
defaultReplicationFactor
}
/**
* Gets the max message bytes for the checkpoint topic. Uses the following precedence.
*
* 1. If task.checkpoint.max.message.bytes is configured, that value is used.
* 2. If systems.checkpoint-system.default.stream.max.message.bytes is configured, that value is used.
* 3. 1000012
*
* Note that the checkpoint-system has a similar precedence. See [[getCheckpointSystem]]
*/
def getCheckpointMaxMessageBytes() = {
val defaultmessageBytes = new SystemConfig(config).getDefaultStreamProperties(getCheckpointSystem.orNull).getInt(KafkaConfig.MAX_MESSAGE_BYTES, KafkaConfig.DEFAULT_LOG_COMPACT_TOPIC_MAX_MESSAGE_BYTES.toInt)
getInt(KafkaConfig.CHECKPOINT_MAX_MESSAGE_BYTES, defaultmessageBytes)
}
/**
* Gets the segment bytes for the checkpoint topic. Uses the following precedence.
*
* 1. If task.checkpoint.segment.bytes is configured, that value is used.
* 2. If systems.checkpoint-system.default.stream.segment.bytes is configured, that value is used.
* 3. None
*
* Note that the checkpoint-system has a similar precedence. See [[getCheckpointSystem]]
*/
def getCheckpointSegmentBytes() = {
val defaultsegBytes = new SystemConfig(config).getDefaultStreamProperties(getCheckpointSystem.orNull).getInt(KafkaConfig.SEGMENT_BYTES, KafkaConfig.DEFAULT_CHECKPOINT_SEGMENT_BYTES)
getInt(KafkaConfig.CHECKPOINT_SEGMENT_BYTES, defaultsegBytes)
}
/**
* Gets the max message bytes for the coordinator topic. Uses the following precedence.
*
* 1. If job.coordinator.max.message.bytes is configured, that value is used.
* 2. If systems.coordinator-system.default.stream.max.message.bytes is configured, that value is used.
* 3. 1000012
*
* Note that the coordinator-system has a similar precedence. See [[JobConfig.getCoordinatorSystemName]]
*/
def getCoordinatorMaxMessageByte = getOption(KafkaConfig.JOB_COORDINATOR_MAX_MESSAGE_BYTES) match {
case Some(maxMessageBytes) => maxMessageBytes
case _ =>
val coordinatorSystem = new JobConfig(config).getCoordinatorSystemNameOrNull
val systemMaxMessageBytes = new SystemConfig(config).getDefaultStreamProperties(coordinatorSystem).getOrDefault(KafkaConfig.MAX_MESSAGE_BYTES, KafkaConfig.DEFAULT_LOG_COMPACT_TOPIC_MAX_MESSAGE_BYTES)
systemMaxMessageBytes
}
/**
* Gets the replication factor for the coordinator topic. Uses the following precedence.
*
* 1. If job.coordinator.replication.factor is configured, that value is used.
* 2. If systems.coordinator-system.default.stream.replication.factor is configured, that value is used.
* 3. 3
*
* Note that the coordinator-system has a similar precedence. See [[JobConfig.getCoordinatorSystemName]]
*/
def getCoordinatorReplicationFactor = getOption(KafkaConfig.JOB_COORDINATOR_REPLICATION_FACTOR) match {
case Some(rplFactor) => rplFactor
case _ =>
val coordinatorSystem = new JobConfig(config).getCoordinatorSystemNameOrNull
val systemReplicationFactor = new SystemConfig(config).getDefaultStreamProperties(coordinatorSystem).getOrDefault(KafkaConfig.TOPIC_REPLICATION_FACTOR, "3")
systemReplicationFactor
}
/**
* Gets the segment bytes for the coordinator topic. Uses the following precedence.
*
* 1. If job.coordinator.segment.bytes is configured, that value is used.
* 2. If systems.coordinator-system.default.stream.segment.bytes is configured, that value is used.
* 3. None
*
* Note that the coordinator-system has a similar precedence. See [[JobConfig.getCoordinatorSystemName]]
*/
def getCoordinatorSegmentBytes = getOption(KafkaConfig.JOB_COORDINATOR_SEGMENT_BYTES) match {
case Some(segBytes) => segBytes
case _ =>
val coordinatorSystem = new JobConfig(config).getCoordinatorSystemNameOrNull
val segBytes = new SystemConfig(config).getDefaultStreamProperties(coordinatorSystem).getOrDefault(KafkaConfig.SEGMENT_BYTES, "26214400")
segBytes
}
// custom consumer config
def getConsumerFetchThreshold(name: String) = getOption(KafkaConfig.CONSUMER_FETCH_THRESHOLD format name)
def getConsumerFetchThresholdBytes(name: String) = getOption(KafkaConfig.CONSUMER_FETCH_THRESHOLD_BYTES format name)
def isConsumerFetchThresholdBytesEnabled(name: String): Boolean = getConsumerFetchThresholdBytes(name).getOrElse("-1").toLong > 0
/**
* Returns a map of topic -> fetch.message.max.bytes value for all streams that
* are defined with this property in the config.
*/
def getFetchMessageMaxBytesTopics(systemName: String) = {
val subConf = config.subset("systems.%s.streams." format systemName, true)
subConf
.asScala
.filterKeys(k => k.endsWith(".consumer.fetch.message.max.bytes"))
.map {
case (fetchMessageMaxBytes, fetchSizeValue) =>
(fetchMessageMaxBytes.replace(".consumer.fetch.message.max.bytes", ""), fetchSizeValue.toInt)
}.toMap
}
/**
* Returns a map of topic -> auto.offset.reset value for all streams that
* are defined with this property in the config.
*/
def getAutoOffsetResetTopics(systemName: String) = {
val subConf = config.subset("systems.%s.streams." format systemName, true)
subConf
.asScala
.filterKeys(k => k.endsWith(".consumer.auto.offset.reset"))
.map {
case (topicAutoOffsetReset, resetValue) =>
(topicAutoOffsetReset.replace(".consumer.auto.offset.reset", ""), resetValue)
}.toMap
}
/**
* Gets the replication factor for the changelog topics. Uses the following precedence.
*
* 1. If stores.myStore.changelog.replication.factor is configured, that value is used.
* 2. If systems.changelog-system.default.stream.replication.factor is configured, that value is used.
* 3. 2
*
* Note that the changelog-system has a similar precedence. See [[StorageConfig]]
*/
def getChangelogStreamReplicationFactor(name: String) = getOption(KafkaConfig.CHANGELOG_STREAM_REPLICATION_FACTOR format name).getOrElse(getDefaultChangelogStreamReplicationFactor)
def getDefaultChangelogStreamReplicationFactor() = {
val changelogSystem = new StorageConfig(config).getChangelogSystem.orElse(null)
getOption(KafkaConfig.DEFAULT_CHANGELOG_STREAM_REPLICATION_FACTOR).getOrElse(getSystemDefaultReplicationFactor(changelogSystem, "2"))
}
/**
* Gets the max message bytes for the changelog topics. Uses the following precedence.
*
* 1. If stores.myStore.changelog.max.message.bytes is configured, that value is used.
* 2. If systems.changelog-system.default.stream.max.message.bytes is configured, that value is used.
* 3. 1000012
*
* Note that the changelog-system has a similar precedence. See [[StorageConfig]]
*/
def getChangelogStreamMaxMessageByte(name: String) = getOption(KafkaConfig.CHANGELOG_MAX_MESSAGE_BYTES format name) match {
case Some(maxMessageBytes) => maxMessageBytes
case _ =>
val changelogSystem = new StorageConfig(config).getChangelogSystem.orElse(null)
val systemMaxMessageBytes = new SystemConfig(config).getDefaultStreamProperties(changelogSystem).getOrDefault(KafkaConfig.MAX_MESSAGE_BYTES, KafkaConfig.DEFAULT_LOG_COMPACT_TOPIC_MAX_MESSAGE_BYTES)
systemMaxMessageBytes
}
// The method returns a map of storenames to changelog topic names, which are configured to use kafka as the changelog stream
def getKafkaChangelogEnabledStores() = {
val changelogConfigs = config.regexSubset(KafkaConfig.CHANGELOG_STREAM_NAMES_REGEX).asScala
var storeToChangelog = Map[String, String]()
val storageConfig = new StorageConfig(config)
val pattern = Pattern.compile(KafkaConfig.CHANGELOG_STREAM_NAMES_REGEX)
for ((changelogConfig, cn) <- changelogConfigs) {
// Lookup the factory for this particular stream and verify if it's a kafka system
val matcher = pattern.matcher(changelogConfig)
val storeName = if (matcher.find()) matcher.group(1) else throw new SamzaException("Unable to find store name in the changelog configuration: " + changelogConfig + " with SystemStream: " + cn)
JavaOptionals.toRichOptional(storageConfig.getChangelogStream(storeName)).toOption.foreach(changelogName => {
val systemStream = StreamUtil.getSystemStreamFromNames(changelogName)
storeToChangelog += storeName -> systemStream.getStream
})
}
storeToChangelog
}
// Get all kafka properties for changelog stream topic creation
def getChangelogKafkaProperties(name: String) = {
val filteredConfigs = config.subset(KafkaConfig.CHANGELOG_STREAM_KAFKA_SETTINGS format name, true)
val kafkaChangeLogProperties = new Properties
val appConfig = new ApplicationConfig(config)
// SAMZA-1600: do not use the combination of "compact,delete" as cleanup policy until we pick up Kafka broker 0.11.0.57,
// 1.0.2, or 1.1.0 (see KAFKA-6568)
// Adjust changelog topic setting, when TTL is set on a RocksDB store
// - Disable log compaction on Kafka changelog topic
// - Set topic TTL to be the same as RocksDB TTL
Option(config.get("stores.%s.rocksdb.ttl.ms" format name)) match {
case Some(rocksDbTtl) =>
if (!config.containsKey("stores.%s.changelog.kafka.cleanup.policy" format name)) {
kafkaChangeLogProperties.setProperty("cleanup.policy", "delete")
if (!config.containsKey("stores.%s.changelog.kafka.retention.ms" format name)) {
kafkaChangeLogProperties.setProperty("retention.ms", String.valueOf(rocksDbTtl))
}
}
case _ =>
kafkaChangeLogProperties.setProperty("cleanup.policy", "compact")
kafkaChangeLogProperties.setProperty("max.message.bytes", getChangelogStreamMaxMessageByte(name))
}
kafkaChangeLogProperties.setProperty("segment.bytes", KafkaConfig.CHANGELOG_DEFAULT_SEGMENT_SIZE)
kafkaChangeLogProperties.setProperty("delete.retention.ms", String.valueOf(new StorageConfig(config).getChangeLogDeleteRetentionInMs(name)))
filteredConfigs.asScala.foreach { kv => kafkaChangeLogProperties.setProperty(kv._1, kv._2) }
kafkaChangeLogProperties
}
// Set the checkpoint topic configs to have a very small segment size and
// enable log compaction. This keeps job startup time small since there
// are fewer useless (overwritten) messages to read from the checkpoint
// topic.
def getCheckpointTopicProperties() = {
val segmentBytes: Int = getCheckpointSegmentBytes()
val maxMessageBytes: Int = getCheckpointMaxMessageBytes()
val appConfig = new ApplicationConfig(config)
val isStreamMode = appConfig.getAppMode == ApplicationMode.STREAM
val properties = new Properties()
if (isStreamMode) {
properties.putAll(ImmutableMap.of(
"cleanup.policy", "compact",
"segment.bytes", String.valueOf(segmentBytes),
"max.message.bytes", String.valueOf(maxMessageBytes)))
} else {
properties.putAll(ImmutableMap.of(
"cleanup.policy", "compact,delete",
"retention.ms", String.valueOf(KafkaConfig.DEFAULT_RETENTION_MS_FOR_BATCH),
"segment.bytes", String.valueOf(segmentBytes),
"max.message.bytes", String.valueOf(maxMessageBytes)))
}
properties
}
def getKafkaSystemProducerConfig( systemName: String,
clientId: String,
injectedProps: Map[String, String] = Map()) = {
val subConf = config.subset("systems.%s.producer." format systemName, true)
val producerProps = new util.HashMap[String, String]()
producerProps.putAll(subConf)
producerProps.put("client.id", clientId)
producerProps.putAll(injectedProps.asJava)
new KafkaProducerConfig(systemName, clientId, producerProps)
}
}
class KafkaProducerConfig(val systemName: String,
val clientId: String = "",
properties: java.util.Map[String, String] = new util.HashMap[String, String]()) extends Logging {
// Copied from new Kafka API - Workaround until KAFKA-1794 is resolved
val RECONNECT_BACKOFF_MS_DEFAULT = 10L
//Overrides specific to samza-kafka (these are considered as defaults in Samza & can be overridden by user
val MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION_DEFAULT: java.lang.Integer = 1.asInstanceOf[Integer]
val RETRIES_DEFAULT: java.lang.Integer = Integer.MAX_VALUE
val LINGER_MS_DEFAULT: java.lang.Integer = 10
def getProducerProperties = {
val byteArraySerializerClassName = classOf[ByteArraySerializer].getCanonicalName
val producerProperties: java.util.Map[String, Object] = new util.HashMap[String, Object]()
producerProperties.putAll(properties)
if (!producerProperties.containsKey(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG)) {
debug("%s undefined. Defaulting to %s." format(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, byteArraySerializerClassName))
producerProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, byteArraySerializerClassName)
}
if (!producerProperties.containsKey(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG)) {
debug("%s undefined. Defaulting to %s." format(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, byteArraySerializerClassName))
producerProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, byteArraySerializerClassName)
}
if (producerProperties.containsKey(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION)
&& producerProperties.get(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION).asInstanceOf[String].toInt > MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION_DEFAULT) {
warn("Setting '%s' to a value other than %d does not guarantee message ordering because new messages will be sent without waiting for previous ones to be acknowledged."
format(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION_DEFAULT))
} else {
producerProperties.put(ProducerConfig.MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION, MAX_IN_FLIGHT_REQUESTS_PER_CONNECTION_DEFAULT)
}
if (!producerProperties.containsKey(ProducerConfig.RETRIES_CONFIG)) {
debug("%s undefined. Defaulting to %s." format(ProducerConfig.RETRIES_CONFIG, RETRIES_DEFAULT))
producerProperties.put(ProducerConfig.RETRIES_CONFIG, RETRIES_DEFAULT)
}
producerProperties.get(ProducerConfig.RETRIES_CONFIG).toString.toInt // Verify int
if (!producerProperties.containsKey(ProducerConfig.LINGER_MS_CONFIG)) {
debug("%s undefined. Defaulting to %s." format(ProducerConfig.LINGER_MS_CONFIG, LINGER_MS_DEFAULT))
producerProperties.put(ProducerConfig.LINGER_MS_CONFIG, LINGER_MS_DEFAULT)
}
producerProperties.get(ProducerConfig.LINGER_MS_CONFIG).toString.toInt // Verify int
producerProperties
}
val reconnectIntervalMs = Option(properties.get(ProducerConfig.RECONNECT_BACKOFF_MS_CONFIG))
.getOrElse(RECONNECT_BACKOFF_MS_DEFAULT).asInstanceOf[Long]
val bootsrapServers = {
if (properties.containsKey("metadata.broker.list"))
warn("Kafka producer configuration contains 'metadata.broker.list'. This configuration is deprecated . Samza has been upgraded " +
"to use Kafka's new producer API. Please update your configurations based on the documentation at http://kafka.apache.org/documentation.html#newproducerconfigs")
Option(properties.get("bootstrap.servers"))
.getOrElse(throw new SamzaException("No bootstrap servers defined in config for %s." format systemName))
.asInstanceOf[String]
}
}
| Swrrt/Samza | samza-kafka/src/main/scala/org/apache/samza/config/KafkaConfig.scala | Scala | apache-2.0 | 21,163 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.prop.Checkers
import org.scalacheck._
import Arbitrary._
import Prop._
import org.scalatest.exceptions.TestFailedException
class ShouldEndWithRegexSpec extends Spec with Matchers with Checkers with ReturnsNormallyThrowsAssertion {
/*
s should include substring t
s should include regex t
s should endWith substring t
s should endWith regex t
s should endWith substring t
s should endWith regex t
s should fullyMatch regex t
*/
object `The endWith regex syntax` {
val decimal = """(-)?(\\d+)(\\.\\d*)?"""
val decimalRegex = """(-)?(\\d+)(\\.\\d*)?""".r
object `(when the regex is specified by a string)` {
def `should do nothing if the string ends with substring that matched the regular expression specified as a string` {
"1.78" should endWith regex (".78")
"1.7" should endWith regex (decimal)
"21.7" should endWith regex (decimal)
"1.78" should endWith regex (decimal)
"x8" should endWith regex (decimal)
"x1." should endWith regex (decimal)
// The remaining are full matches, which should also work with "endWith"
"1.7" should endWith regex ("1.7")
"1.7" should endWith regex (decimal)
"-1.8" should endWith regex (decimal)
"8" should endWith regex (decimal)
"1." should endWith regex (decimal)
}
def `should do nothing if the string does not end with a substring that matched the regular expression specified as a string when used with not` {
"eight" should not { endWith regex (decimal) }
"one.eight" should not { endWith regex (decimal) }
"eight" should not endWith regex (decimal)
"one.eight" should not endWith regex (decimal)
}
def `should do nothing if the string does not end with a substring that matched the regular expression specified as a string when used in a logical-and expression` {
"b1.7" should (endWith regex (decimal) and (endWith regex (decimal)))
"b1.7" should ((endWith regex (decimal)) and (endWith regex (decimal)))
"b1.7" should (endWith regex (decimal) and endWith regex (decimal))
"1.7" should (endWith regex (decimal) and (endWith regex (decimal)))
"1.7" should ((endWith regex (decimal)) and (endWith regex (decimal)))
"1.7" should (endWith regex (decimal) and endWith regex (decimal))
}
def `should do nothing if the string does not end with a substring that matched the regular expression specified as a string when used in a logical-or expression` {
"b1.7" should (endWith regex ("hello") or (endWith regex (decimal)))
"b1.7" should ((endWith regex ("hello")) or (endWith regex (decimal)))
"b1.7" should (endWith regex ("hello") or endWith regex (decimal))
"1.7" should (endWith regex ("hello") or (endWith regex (decimal)))
"1.7" should ((endWith regex ("hello")) or (endWith regex (decimal)))
"1.7" should (endWith regex ("hello") or endWith regex (decimal))
}
def `should do nothing if the string does not end with a substring that matched the regular expression specified as a string when used in a logical-and expression with not` {
"fred" should (not (endWith regex ("bob")) and not (endWith regex (decimal)))
"fred" should ((not endWith regex ("bob")) and (not endWith regex (decimal)))
"fred" should (not endWith regex ("bob") and not endWith regex (decimal))
}
def `should do nothing if the string does not end with a substring that matched the regular expression specified as a string when used in a logical-or expression with not` {
"fred" should (not (endWith regex ("fred")) or not (endWith regex (decimal)))
"fred" should ((not endWith regex ("fred")) or (not endWith regex (decimal)))
"fred" should (not endWith regex ("fred") or not endWith regex (decimal))
}
def `should throw TestFailedException if the string does not match substring that matched the regular expression specified as a string` {
val caught1 = intercept[TestFailedException] {
"1.7" should endWith regex ("1.78")
}
assert(caught1.getMessage === "\\"1.7\\" did not end with a substring that matched the regular expression 1.78")
val caught2 = intercept[TestFailedException] {
"1.7" should endWith regex ("21.7")
}
assert(caught2.getMessage === "\\"1.7\\" did not end with a substring that matched the regular expression 21.7")
val caught3 = intercept[TestFailedException] {
"-one.eight" should endWith regex (decimal)
}
assert(caught3.getMessage === "\\"-one.eight\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught6 = intercept[TestFailedException] {
"eight" should endWith regex (decimal)
}
assert(caught6.getMessage === "\\"eight\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught7 = intercept[TestFailedException] {
"1.eight" should endWith regex (decimal)
}
assert(caught7.getMessage === "\\"1.eight\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught8 = intercept[TestFailedException] {
"onedoteight" should endWith regex (decimal)
}
assert(caught8.getMessage === "\\"onedoteight\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught9 = intercept[TestFailedException] {
"***" should endWith regex (decimal)
}
assert(caught9.getMessage === "\\"***\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
}
def `should throw TestFailedException if the string does matches substring that matched the regular expression specified as a string when used with not` {
val caught1 = intercept[TestFailedException] {
"1.7" should not { endWith regex ("1.7") }
}
assert(caught1.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression 1.7")
val caught2 = intercept[TestFailedException] {
"1.7" should not { endWith regex (decimal) }
}
assert(caught2.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught3 = intercept[TestFailedException] {
"-1.8" should not { endWith regex (decimal) }
}
assert(caught3.getMessage === "\\"-1.8\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught4 = intercept[TestFailedException] {
"8" should not { endWith regex (decimal) }
}
assert(caught4.getMessage === "\\"8\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught5 = intercept[TestFailedException] {
"1." should not { endWith regex (decimal) }
}
assert(caught5.getMessage === "\\"1.\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught11 = intercept[TestFailedException] {
"1.7" should not endWith regex ("1.7")
}
assert(caught11.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression 1.7")
val caught12 = intercept[TestFailedException] {
"1.7" should not endWith regex (decimal)
}
assert(caught12.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught13 = intercept[TestFailedException] {
"-1.8" should not endWith regex (decimal)
}
assert(caught13.getMessage === "\\"-1.8\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught14 = intercept[TestFailedException] {
"8" should not endWith regex (decimal)
}
assert(caught14.getMessage === "\\"8\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught15 = intercept[TestFailedException] {
"1." should not endWith regex (decimal)
}
assert(caught15.getMessage === "\\"1.\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
// The rest are non-exact matches
val caught21 = intercept[TestFailedException] {
"a1.7" should not { endWith regex ("1.7") }
}
assert(caught21.getMessage === "\\"a1.7\\" ended with a substring that matched the regular expression 1.7")
val caught22 = intercept[TestFailedException] {
"b1.7" should not { endWith regex (decimal) }
}
assert(caught22.getMessage === "\\"b1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught23 = intercept[TestFailedException] {
"b-1.8" should not { endWith regex (decimal) }
}
assert(caught23.getMessage === "\\"b-1.8\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
}
def `should throw TestFailedException if the string ends with substring that matched the regular expression specified as a string when used in a logical-and expression` {
val caught1 = intercept[TestFailedException] {
"1.7" should (endWith regex (decimal) and (endWith regex ("1.8")))
}
assert(caught1.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, but \\"1.7\\" did not end with a substring that matched the regular expression 1.8")
val caught2 = intercept[TestFailedException] {
"1.7" should ((endWith regex (decimal)) and (endWith regex ("1.8")))
}
assert(caught2.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, but \\"1.7\\" did not end with a substring that matched the regular expression 1.8")
val caught3 = intercept[TestFailedException] {
"1.7" should (endWith regex (decimal) and endWith regex ("1.8"))
}
assert(caught3.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, but \\"1.7\\" did not end with a substring that matched the regular expression 1.8")
// Check to make sure the error message "short circuits" (i.e., just reports the left side's failure)
val caught4 = intercept[TestFailedException] {
"1.eight" should (endWith regex (decimal) and (endWith regex ("1.8")))
}
assert(caught4.getMessage === "\\"1.eight\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught5 = intercept[TestFailedException] {
"1.eight" should ((endWith regex (decimal)) and (endWith regex ("1.8")))
}
assert(caught5.getMessage === "\\"1.eight\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught6 = intercept[TestFailedException] {
"1.eight" should (endWith regex (decimal) and endWith regex ("1.8"))
}
assert(caught6.getMessage === "\\"1.eight\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
}
def `should throw TestFailedException if the string ends with substring that matched the regular expression specified as a string when used in a logical-or expression` {
val caught1 = intercept[TestFailedException] {
"1.seven" should (endWith regex (decimal) or (endWith regex ("1.8")))
}
assert(caught1.getMessage === "\\"1.seven\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.seven\\" did not end with a substring that matched the regular expression 1.8")
val caught2 = intercept[TestFailedException] {
"1.seven" should ((endWith regex (decimal)) or (endWith regex ("1.8")))
}
assert(caught2.getMessage === "\\"1.seven\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.seven\\" did not end with a substring that matched the regular expression 1.8")
val caught3 = intercept[TestFailedException] {
"1.seven" should (endWith regex (decimal) or endWith regex ("1.8"))
}
assert(caught3.getMessage === "\\"1.seven\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.seven\\" did not end with a substring that matched the regular expression 1.8")
}
def `should throw TestFailedException if the string ends with substring that matched the regular expression specified as a string when used in a logical-and expression used with not` {
val caught1 = intercept[TestFailedException] {
"1.7" should (not endWith regex ("1.8") and (not endWith regex (decimal)))
}
assert(caught1.getMessage === "\\"1.7\\" did not end with a substring that matched the regular expression 1.8, but \\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught2 = intercept[TestFailedException] {
"1.7" should ((not endWith regex ("1.8")) and (not endWith regex (decimal)))
}
assert(caught2.getMessage === "\\"1.7\\" did not end with a substring that matched the regular expression 1.8, but \\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught3 = intercept[TestFailedException] {
"1.7" should (not endWith regex ("1.8") and not endWith regex (decimal))
}
assert(caught3.getMessage === "\\"1.7\\" did not end with a substring that matched the regular expression 1.8, but \\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught4 = intercept[TestFailedException] {
"a1.7" should (not endWith regex ("1.8") and (not endWith regex (decimal)))
}
assert(caught4.getMessage === "\\"a1.7\\" did not end with a substring that matched the regular expression 1.8, but \\"a1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught5 = intercept[TestFailedException] {
"1.7" should ((not endWith regex ("1.8")) and (not endWith regex (decimal)))
}
assert(caught5.getMessage === "\\"1.7\\" did not end with a substring that matched the regular expression 1.8, but \\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
}
def `should throw TestFailedException if the string ends with substring that matched the regular expression specified as a string when used in a logical-or expression used with not` {
val caught1 = intercept[TestFailedException] {
"1.7" should (not endWith regex (decimal) or (not endWith regex ("1.7")))
}
assert(caught1.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" ended with a substring that matched the regular expression 1.7")
val caught2 = intercept[TestFailedException] {
"1.7" should ((not endWith regex (decimal)) or (not endWith regex ("1.7")))
}
assert(caught2.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" ended with a substring that matched the regular expression 1.7")
val caught3 = intercept[TestFailedException] {
"1.7" should (not endWith regex (decimal) or not endWith regex ("1.7"))
}
assert(caught3.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" ended with a substring that matched the regular expression 1.7")
val caught4 = intercept[TestFailedException] {
"1.7" should (not (endWith regex (decimal)) or not (endWith regex ("1.7")))
}
assert(caught4.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" ended with a substring that matched the regular expression 1.7")
val caught5 = intercept[TestFailedException] {
"a1.7" should (not endWith regex (decimal) or (not endWith regex ("1.7")))
}
assert(caught5.getMessage === "\\"a1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"a1.7\\" ended with a substring that matched the regular expression 1.7")
}
}
object `(when the regex is specified by an actual Regex)` {
def `should do nothing if the string ends with substring that matched the regular expression specified as a string` {
"1.78" should endWith regex (".78")
"1.7" should endWith regex (decimalRegex)
"21.7" should endWith regex (decimalRegex)
"1.78" should endWith regex (decimalRegex)
"x8" should endWith regex (decimalRegex)
"x1." should endWith regex (decimalRegex)
// The remaining are full matches, which should also work with "endWith"
"1.7" should endWith regex ("1.7")
"1.7" should endWith regex (decimalRegex)
"-1.8" should endWith regex (decimalRegex)
"8" should endWith regex (decimalRegex)
"1." should endWith regex (decimalRegex)
}
def `should do nothing if the string does not end with a substring that matched the regular expression specified as a string when used with not` {
"eight" should not { endWith regex (decimalRegex) }
"one.eight" should not { endWith regex (decimalRegex) }
"eight" should not endWith regex (decimalRegex)
"one.eight" should not endWith regex (decimalRegex)
}
def `should do nothing if the string does not end with a substring that matched the regular expression specified as a string when used in a logical-and expression` {
"b1.7" should (endWith regex (decimalRegex) and (endWith regex (decimalRegex)))
"b1.7" should ((endWith regex (decimalRegex)) and (endWith regex (decimalRegex)))
"b1.7" should (endWith regex (decimalRegex) and endWith regex (decimalRegex))
"1.7" should (endWith regex (decimalRegex) and (endWith regex (decimalRegex)))
"1.7" should ((endWith regex (decimalRegex)) and (endWith regex (decimalRegex)))
"1.7" should (endWith regex (decimalRegex) and endWith regex (decimalRegex))
}
def `should do nothing if the string does not end with a substring that matched the regular expression specified as a string when used in a logical-or expression` {
"b1.7" should (endWith regex ("hello") or (endWith regex (decimalRegex)))
"b1.7" should ((endWith regex ("hello")) or (endWith regex (decimalRegex)))
"b1.7" should (endWith regex ("hello") or endWith regex (decimalRegex))
"1.7" should (endWith regex ("hello") or (endWith regex (decimalRegex)))
"1.7" should ((endWith regex ("hello")) or (endWith regex (decimalRegex)))
"1.7" should (endWith regex ("hello") or endWith regex (decimalRegex))
}
def `should do nothing if the string does not end with a substring that matched the regular expression specified as a string when used in a logical-and expression with not` {
"fred" should (not (endWith regex ("bob")) and not (endWith regex (decimalRegex)))
"fred" should ((not endWith regex ("bob")) and (not endWith regex (decimalRegex)))
"fred" should (not endWith regex ("bob") and not endWith regex (decimalRegex))
}
def `should do nothing if the string does not end with a substring that matched the regular expression specified as a string when used in a logical-or expression with not` {
"fred" should (not (endWith regex ("fred")) or not (endWith regex (decimalRegex)))
"fred" should ((not endWith regex ("fred")) or (not endWith regex (decimalRegex)))
"fred" should (not endWith regex ("fred") or not endWith regex (decimalRegex))
}
def `should throw TestFailedException if the string does not match substring that matched the regular expression specified as a string` {
val caught1 = intercept[TestFailedException] {
"1.7" should endWith regex ("1.78")
}
assert(caught1.getMessage === "\\"1.7\\" did not end with a substring that matched the regular expression 1.78")
val caught2 = intercept[TestFailedException] {
"1.7" should endWith regex ("21.7")
}
assert(caught2.getMessage === "\\"1.7\\" did not end with a substring that matched the regular expression 21.7")
val caught3 = intercept[TestFailedException] {
"-one.eight" should endWith regex (decimalRegex)
}
assert(caught3.getMessage === "\\"-one.eight\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught6 = intercept[TestFailedException] {
"eight" should endWith regex (decimalRegex)
}
assert(caught6.getMessage === "\\"eight\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught7 = intercept[TestFailedException] {
"1.eight" should endWith regex (decimalRegex)
}
assert(caught7.getMessage === "\\"1.eight\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught8 = intercept[TestFailedException] {
"onedoteight" should endWith regex (decimalRegex)
}
assert(caught8.getMessage === "\\"onedoteight\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught9 = intercept[TestFailedException] {
"***" should endWith regex (decimalRegex)
}
assert(caught9.getMessage === "\\"***\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
}
def `should throw TestFailedException if the string does matches substring that matched the regular expression specified as a string when used with not` {
val caught1 = intercept[TestFailedException] {
"1.7" should not { endWith regex ("1.7") }
}
assert(caught1.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression 1.7")
val caught2 = intercept[TestFailedException] {
"1.7" should not { endWith regex (decimalRegex) }
}
assert(caught2.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught3 = intercept[TestFailedException] {
"-1.8" should not { endWith regex (decimalRegex) }
}
assert(caught3.getMessage === "\\"-1.8\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught4 = intercept[TestFailedException] {
"8" should not { endWith regex (decimalRegex) }
}
assert(caught4.getMessage === "\\"8\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught5 = intercept[TestFailedException] {
"1." should not { endWith regex (decimalRegex) }
}
assert(caught5.getMessage === "\\"1.\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught11 = intercept[TestFailedException] {
"1.7" should not endWith regex ("1.7")
}
assert(caught11.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression 1.7")
val caught12 = intercept[TestFailedException] {
"1.7" should not endWith regex (decimalRegex)
}
assert(caught12.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught13 = intercept[TestFailedException] {
"-1.8" should not endWith regex (decimalRegex)
}
assert(caught13.getMessage === "\\"-1.8\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught14 = intercept[TestFailedException] {
"8" should not endWith regex (decimalRegex)
}
assert(caught14.getMessage === "\\"8\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught15 = intercept[TestFailedException] {
"1." should not endWith regex (decimalRegex)
}
assert(caught15.getMessage === "\\"1.\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
// The rest are non-exact matches
val caught21 = intercept[TestFailedException] {
"a1.7" should not { endWith regex ("1.7") }
}
assert(caught21.getMessage === "\\"a1.7\\" ended with a substring that matched the regular expression 1.7")
val caught22 = intercept[TestFailedException] {
"b1.7" should not { endWith regex (decimalRegex) }
}
assert(caught22.getMessage === "\\"b1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught23 = intercept[TestFailedException] {
"b-1.8" should not { endWith regex (decimalRegex) }
}
assert(caught23.getMessage === "\\"b-1.8\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
}
def `should throw TestFailedException if the string ends with substring that matched the regular expression specified as a string when used in a logical-and expression` {
val caught1 = intercept[TestFailedException] {
"1.7" should (endWith regex (decimalRegex) and (endWith regex ("1.8")))
}
assert(caught1.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, but \\"1.7\\" did not end with a substring that matched the regular expression 1.8")
val caught2 = intercept[TestFailedException] {
"1.7" should ((endWith regex (decimalRegex)) and (endWith regex ("1.8")))
}
assert(caught2.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, but \\"1.7\\" did not end with a substring that matched the regular expression 1.8")
val caught3 = intercept[TestFailedException] {
"1.7" should (endWith regex (decimalRegex) and endWith regex ("1.8"))
}
assert(caught3.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, but \\"1.7\\" did not end with a substring that matched the regular expression 1.8")
// Check to make sure the error message "short circuits" (i.e., just reports the left side's failure)
val caught4 = intercept[TestFailedException] {
"1.eight" should (endWith regex (decimalRegex) and (endWith regex ("1.8")))
}
assert(caught4.getMessage === "\\"1.eight\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught5 = intercept[TestFailedException] {
"1.eight" should ((endWith regex (decimalRegex)) and (endWith regex ("1.8")))
}
assert(caught5.getMessage === "\\"1.eight\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught6 = intercept[TestFailedException] {
"1.eight" should (endWith regex (decimalRegex) and endWith regex ("1.8"))
}
assert(caught6.getMessage === "\\"1.eight\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
}
def `should throw TestFailedException if the string ends with substring that matched the regular expression specified as a string when used in a logical-or expression` {
val caught1 = intercept[TestFailedException] {
"1.seven" should (endWith regex (decimalRegex) or (endWith regex ("1.8")))
}
assert(caught1.getMessage === "\\"1.seven\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.seven\\" did not end with a substring that matched the regular expression 1.8")
val caught2 = intercept[TestFailedException] {
"1.seven" should ((endWith regex (decimalRegex)) or (endWith regex ("1.8")))
}
assert(caught2.getMessage === "\\"1.seven\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.seven\\" did not end with a substring that matched the regular expression 1.8")
val caught3 = intercept[TestFailedException] {
"1.seven" should (endWith regex (decimalRegex) or endWith regex ("1.8"))
}
assert(caught3.getMessage === "\\"1.seven\\" did not end with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.seven\\" did not end with a substring that matched the regular expression 1.8")
}
def `should throw TestFailedException if the string ends with substring that matched the regular expression specified as a string when used in a logical-and expression used with not` {
val caught1 = intercept[TestFailedException] {
"1.7" should (not endWith regex ("1.8") and (not endWith regex (decimalRegex)))
}
assert(caught1.getMessage === "\\"1.7\\" did not end with a substring that matched the regular expression 1.8, but \\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught2 = intercept[TestFailedException] {
"1.7" should ((not endWith regex ("1.8")) and (not endWith regex (decimalRegex)))
}
assert(caught2.getMessage === "\\"1.7\\" did not end with a substring that matched the regular expression 1.8, but \\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught3 = intercept[TestFailedException] {
"1.7" should (not endWith regex ("1.8") and not endWith regex (decimalRegex))
}
assert(caught3.getMessage === "\\"1.7\\" did not end with a substring that matched the regular expression 1.8, but \\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught4 = intercept[TestFailedException] {
"a1.7" should (not endWith regex ("1.8") and (not endWith regex (decimalRegex)))
}
assert(caught4.getMessage === "\\"a1.7\\" did not end with a substring that matched the regular expression 1.8, but \\"a1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
val caught5 = intercept[TestFailedException] {
"1.7" should ((not endWith regex ("1.8")) and (not endWith regex (decimalRegex)))
}
assert(caught5.getMessage === "\\"1.7\\" did not end with a substring that matched the regular expression 1.8, but \\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?")
}
def `should throw TestFailedException if the string ends with substring that matched the regular expression specified as a string when used in a logical-or expression used with not` {
val caught1 = intercept[TestFailedException] {
"1.7" should (not endWith regex (decimalRegex) or (not endWith regex ("1.7")))
}
assert(caught1.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" ended with a substring that matched the regular expression 1.7")
val caught2 = intercept[TestFailedException] {
"1.7" should ((not endWith regex (decimalRegex)) or (not endWith regex ("1.7")))
}
assert(caught2.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" ended with a substring that matched the regular expression 1.7")
val caught3 = intercept[TestFailedException] {
"1.7" should (not endWith regex (decimalRegex) or not endWith regex ("1.7"))
}
assert(caught3.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" ended with a substring that matched the regular expression 1.7")
val caught4 = intercept[TestFailedException] {
"1.7" should (not (endWith regex (decimalRegex)) or not (endWith regex ("1.7")))
}
assert(caught4.getMessage === "\\"1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"1.7\\" ended with a substring that matched the regular expression 1.7")
val caught5 = intercept[TestFailedException] {
"a1.7" should (not endWith regex (decimalRegex) or (not endWith regex ("1.7")))
}
assert(caught5.getMessage === "\\"a1.7\\" ended with a substring that matched the regular expression (-)?(\\\\d+)(\\\\.\\\\d*)?, and \\"a1.7\\" ended with a substring that matched the regular expression 1.7")
}
}
}
}
| svn2github/scalatest | src/test/scala/org/scalatest/ShouldEndWithRegexSpec.scala | Scala | apache-2.0 | 34,028 |
package blanky.domain
case class SecurityUser(id: Option[Long] = None,
email: String,
passwordHash: String,
salt: String) | vadim-shb/blanky | server/src/main/scala/blanky/domain/SecurityUser.scala | Scala | mit | 194 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mxnet
import org.scalatest.{BeforeAndAfterAll, FunSuite}
class KVStoreSuite extends FunSuite with BeforeAndAfterAll {
test("init and pull") {
val kv = KVStore.create()
val shape = Shape(2, 1)
val ndArray = NDArray.zeros(shape)
kv.init("3", NDArray.ones(shape))
kv.pull("3", ndArray)
assert(ndArray.toArray === Array(1f, 1f))
}
test("push and pull") {
val kv = KVStore.create()
val shape = Shape(2, 1)
val ndArray = NDArray.zeros(shape)
kv.init("3", NDArray.ones(shape))
kv.push("3", NDArray.ones(shape) * 4)
kv.pull("3", ndArray)
assert(ndArray.toArray === Array(4f, 4f))
}
test("test aggregate") {
val shape = Shape(4, 4)
val keys = Array("b", "c", "d")
val kv = KVStore.create()
kv.init("a", NDArray.zeros(shape))
kv.init(keys, Array.fill(keys.length)(NDArray.zeros(shape)))
val numDevs = 4
val devs = (0 until numDevs).map(Context.cpu(_))
val vals = devs.map(d => NDArray.ones(shape, d)).toArray
kv.push("a", vals)
kv.pull("a", outs = vals)
assert(vals.map(v => v.toArray.map(x => x - numDevs).sum).sum == 0f)
val valss = keys.map { k =>
val tmpVals = devs.map(d => NDArray.ones(shape, d) * 2f).toArray
kv.push(k, tmpVals)
kv.pull(k, outs = tmpVals)
tmpVals
}.flatten
assert(valss.map(v => v.toArray.map(x => x - numDevs * 2f).sum).sum == 0f)
}
test("updater runs when push") {
val kv = KVStore.create()
val updater = new MXKVStoreUpdater {
override def update(key: Int, input: NDArray, stored: NDArray): Unit = {
// scalastyle:off println
println(s"update on key $key")
// scalastyle:on println
stored += input * 2
}
override def dispose(): Unit = {}
}
kv.setUpdater(updater)
val shape = Shape(2, 1)
val ndArray = NDArray.zeros(shape)
kv.init("3", NDArray.ones(shape) * 4)
kv.pull("3", ndArray)
assert(ndArray.toArray === Array(4f, 4f))
kv.push("3", NDArray.ones(shape))
kv.pull("3", ndArray)
assert(ndArray.toArray === Array(6f, 6f))
}
test("get type") {
val kv = KVStore.create("local")
assert(kv.`type` === "local")
}
test("get numWorkers and rank") {
val kv = KVStore.create("local")
assert(kv.numWorkers === 1)
assert(kv.rank === 0)
}
}
| rahul003/mxnet | scala-package/core/src/test/scala/org/apache/mxnet/KVStoreSuite.scala | Scala | apache-2.0 | 3,154 |
/*
* Copyright (c) 2014-2021 All Rights Reserved by the RWS Group for and on behalf of its affiliates and subsidiaries.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sdl.odata.parser
import scala.util.parsing.combinator.RegexParsers
import com.sdl.odata.api.edm.model.EntityDataModel
import com.sdl.odata.api.parser._
import java.net.URLDecoder
class ODataUriParser(val entityDataModel: EntityDataModel) extends RegexParsers
with ResourcePathParser
with QueryOptionsParser
with ContextFragmentParser
with ExpressionsParser
with NamesAndIdentifiersParser
with LiteralsParser
with EntityDataModelHelpers {
def parseUri(input: String): ODataUri = parseAll(odataUri, URLDecoder.decode(input, "UTF-8")) match {
case Success(result, _) => result
case NoSuccess(msg, _) => throw new ODataUriParseException(msg.concat(". uri: ").concat(input))
}
def parseResourcePath(input: String): ResourcePath = parseAll(resourcePath, URLDecoder.decode(input, "UTF-8")) match {
case Success(result, _) => result
case NoSuccess(msg, _) => throw new ODataUriParseException(msg.concat(". uri: ").concat(input))
}
override val skipWhitespace = false
def odataUri: Parser[ODataUri] = odataUriRelativeUri | odataUriServiceRoot
def odataUriServiceRoot: Parser[ODataUri] = serviceRoot ~ opt("?" ~> formatMediaType) ^^ {
case serviceRoot ~ format => ODataUri(serviceRoot, ServiceRootUri(format))
}
def odataUriRelativeUri: Parser[ODataUri] = serviceRoot ~ odataRelativeUri ^^ {
case serviceRoot ~ relativeUri => ODataUri(serviceRoot, relativeUri)
}
// Everything up to ".svc" (case-insensitive) is considered to be part of the service root
def serviceRoot: Parser[String] = """(?i)^(?:.*?\\.svc)+""".r <~ opt("/")
def odataRelativeUri: Parser[RelativeUri] = batchUri | entityUri | metadataUri | resourcePathUri
def batchUri: Parser[BatchUri.type] = "$batch" ^^^ BatchUri
def entityUri: Parser[EntityUri] = unnamedEntityUri | namedEntityUri
def unnamedEntityUri: Parser[EntityUri] = "$entity?" ~> entityOptions ^^ {
case options => EntityUri(None, options)
}
def namedEntityUri: Parser[EntityUri] = ("$entity/" ~> qualifiedEntityTypeName) into {
derivedTypeName =>
("?" ~> entityCastOptions(derivedTypeName)) ^^ {
case options => EntityUri(Some(derivedTypeName), options)
}
}
def metadataUri: Parser[MetadataUri] = ("$metadata" ~ opt("/")) ~> opt("?" ~> formatMediaType) ~ opt(context) ^^ {
case format ~ context => MetadataUri(format, context)
}
def resourcePathUri: Parser[ResourcePathUri] = resourcePath into {
resourcePath =>
opt("?" ~> queryOptions(resolveResourcePathTypeName(resourcePath))) ^^ {
case options => ResourcePathUri(resourcePath, options.getOrElse(List.empty))
}
}
}
| sdl/odata | odata_parser/src/main/scala/com/sdl/odata/parser/ODataUriParser.scala | Scala | apache-2.0 | 3,339 |
package com.htc.studio.example.sentiment
import java.util.Properties
import java.util.logging.{Level, Logger}
import scala.collection.JavaConverters._
import scala.util.{Random, Try}
import com.twitter.scalding.Args
import com.twitter.summingbird.batch.Batcher
import edu.stanford.nlp.ling.CoreAnnotations.SentencesAnnotation
import edu.stanford.nlp.neural.rnn.RNNCoreAnnotations
import edu.stanford.nlp.pipeline.StanfordCoreNLP
import edu.stanford.nlp.sentiment.SentimentCoreAnnotations.AnnotatedTree
import org.apache.tika.language.LanguageIdentifier
import com.htc.studio.storehaus.valuewapper.Column._
import com.htc.studio.summingbird.storm.HTCStormJob
import com.htc.studio.util.jdbc._
object SentimentJob {
/** Try to use real model or fake with a random sentiment. */
val sentiment: String => Option[Int] = Try {
// disable parser warnings
Logger.getLogger("edu.stanford.nlp.process.PTBLexer").setLevel(Level.OFF)
/** Properties to create a stanford nlp pipeline. */
val props = new Properties
props.setProperty("annotators", "tokenize, ssplit, parse, sentiment")
/** The stanford nlp pipeline. This will fail is fakeModel is true. */
val pipeline = new StanfordCoreNLP(props)
// use real model to generate sentiment score
text: String => pipeline.process(text)
.get(classOf[SentencesAnnotation]).asScala
.map { sentence =>
val tree = sentence.get(classOf[AnnotatedTree])
(sentence.toString.size, RNNCoreAnnotations.getPredictedClass(tree))
}
// determine by sentence with max length
.reduceOption(Ordering.by((_: (Int, Int))._1).min)
.map(_._2)
}.toOption
// fall back to random sentiment score
.getOrElse { text: String => Some(Random.nextInt(3) + 1) }
}
/**
* A storm job that calculates tweet sentiment. Only English tweets are used.
*
* @author Zhongyang Zheng (zhongyang_zheng@htc.com)
*/
case class SentimentJob(args: Args) extends HTCStormJob {
override val batcher = Batcher.ofDays(1)
/** Twitter4j Properties. */
val props = new Properties
args.optional("twitter4j").foreach { file =>
val source = scala.io.Source.fromFile(file)
val reader = source.bufferedReader
props.load(reader)
reader.close
source.close
}
/** Database connection url. Default is h2 tcp at localhost. */
val connectionURL = args.getOrElse("connection",
"h2:tcp://sa:@localhost//tmp/demo/h2")
/** Database table name. Default is `sentiment`. */
val tableName = args.getOrElse("table", "sentiment")
/** The jdbc connection. */
val connection = JDBCConnection(connectionURL)
/** The jdbc table. Columns are: tweet id, tweet text and sentiment score. */
val table = JDBCTable(tableName, Seq(int64("id")),
Seq(string("tweet"), int32("sentiment")))
/** Twitter4j source. */
val source = Source.twitter(props)
/** JDBC sink. */
val sink = Storehaus.jdbc[Long, (String, Int)](connection, table).fixedSink
override def job = source
// only use english tweets
.filter(tweet => new LanguageIdentifier(tweet.getText).getLanguage == "en")
.optionMap(tweet => SentimentJob.sentiment(tweet.getText).map(s =>
(tweet.getId, (tweet.getText, s))))
.write(sink)
}
| simonandluna/lama-demo | src/main/scala/com/htc/studio/example/sentiment/SentimentJob.scala | Scala | agpl-3.0 | 3,240 |
package util
import java.sql.ResultSet
import org.squeryl.PrimitiveTypeMode._
import org.squeryl.Session
object SqlHelpers {
def sqlQuery1[T](sql: String)(f: ResultSet => T): T = inTransaction {
val conn = Session.currentSession.connection
val stmt = conn.createStatement()
val rs = stmt.executeQuery(sql)
if (!rs.next()) throw new Exception("No lines returned!!")
f(rs)
}
def sqlUpdate[T](sql: String): Int = inTransaction {
val conn = Session.currentSession.connection
val stmt = conn.createStatement()
stmt.executeUpdate(sql)
}
def sqlQueryOpt[T](sql: String)(f: ResultSet => T): Option[T] = inTransaction {
val conn = Session.currentSession.connection
val stmt = conn.createStatement()
val rs = stmt.executeQuery(sql)
if (rs.next()) Some(f(rs)) else None
}
def sqlQueryN[T](sql: String)(f: ResultSet => T): List[T] = inTransaction {
try {
val conn = Session.currentSession.connection
val stmt = conn.createStatement()
val rs = stmt.executeQuery(sql)
var rslt: List[T] = Nil
while (rs.next()) rslt = f(rs) :: rslt
rslt.reverse
} catch {
case e: Exception =>
println("ERROR ON QUERY:\\n" + sql)
throw e
}
}
}
| slynx-fw/slynx-demo | app/util/SqlHelpers.scala | Scala | apache-2.0 | 1,257 |
package org.jetbrains.plugins.scala.worksheet
import com.intellij.openapi.util.Key
import com.intellij.psi.PsiElement
/**
* User: Dmitry.Naydanov
* Date: 04.08.17.
*/
private object GotoOriginalHandlerUtil {
private val MY_KEY = new Key[PsiElement]("GOTO_ORIGINAL_HANDLER_BASE")
def createPsi[I <: PsiElement, O <: PsiElement](creator: I => O, original: I): O = {
val psi = creator(original)
psi.putCopyableUserData(MY_KEY, original)
psi
}
def storePsi(created: PsiElement, original: PsiElement): Unit = {
created.putCopyableUserData(MY_KEY, original)
}
def storeNonModifiablePsi(created: PsiElement, original: PsiElement): Unit = {
created.putUserData(MY_KEY, original)
}
def findPsi(created: PsiElement): Option[PsiElement] = Option(created.getCopyableUserData(MY_KEY)).filter(_.isValid)
}
| JetBrains/intellij-scala | scala/worksheet/src/org/jetbrains/plugins/scala/worksheet/GotoOriginalHandlerUtil.scala | Scala | apache-2.0 | 837 |
// Program: 'vari2.scala'
// Given: Two positive integers a and b, each less than 1000
// Return: The integer corresponding to the square of the hypotenuse of the
// right triangle whose legs have lengths a and b
// Date: 2015 Oct 30
// Version: 2.11.7
import io.Source
import java.io.PrintWriter
// read data in as array
val source = Source.fromFile("rosalind_ini2.txt")
val lines = source.getLines.next.split(" ")
// create squaring function
val hypo = (a: Int, b: Int) => math.pow(a, 2) + math.pow(b, 2)
val total = hypo(lines(0).toInt, lines(1).toInt).toInt
// write answer to output file
val out = new PrintWriter("out.txt")
out.println(total)
out.close()
| erictleung/rosalind-programming | python-village/INI2_Variables_and_Some_Arithmetic/vari2.scala | Scala | mit | 670 |
package satisfaction.fs
import java.io._
import org.joda.time.DateTime
/**
* Simple FileSystem, for accessing local disk.
* Mostly for testing...
*
* XXX Add unit tests for local file operations
*/
case class LocalFileSystem( val nfs : java.nio.file.FileSystem = java.nio.file.FileSystems.getDefault) extends FileSystem {
def this() = {
this( java.nio.file.FileSystems.getDefault)
}
case class LocalFStatus( file : java.io.File ) extends FileStatus {
override def size : Long = {
file.length
}
override def isDirectory : Boolean = {
file.isDirectory
}
override def isFile : Boolean = {
file.isFile
}
override def path : Path = {
new Path( file.getPath)
}
override def lastAccessed : DateTime = {
new DateTime(file.lastModified)
}
override def created : DateTime = {
/// XXX not correct
lastAccessed
}
}
implicit def File2FileStatus( file : java.io.File ) : FileStatus = {
new LocalFStatus(file)
}
implicit def Path2File( path : Path) : java.io.File = {
new File( path.toUri.getPath )
}
implicit def File2Path( file : java.io.File) : Path = {
new Path( file.getPath)
}
override def uri : java.net.URI = {
return new java.net.URI( s"file:///")
}
override def listFiles( p : Path ) : Seq[FileStatus] = {
val file :File = (p)
val lf = file.listFiles
if( lf == null ) {
Seq.empty
} else {
lf.map(f => {
new LocalFStatus(f)
} ).toSeq
}
}
override def listFilesRecursively( p : Path ) : Seq[FileStatus] = {
listFiles( (p) ).map( fs => {
if( fs.isFile ) {
Seq( fs)
} else if( fs.isDirectory ) {
listFilesRecursively( fs.path ) ++ Seq(fs)
} else {
Seq.empty
}
} ).flatten
}
override def globFiles( p: Path) : Seq[FileStatus] = {
val pathMatcher = nfs.getPathMatcher( p.toString )
null
}
override def mkdirs( p : Path ) : Boolean = {
(p).mkdirs
}
override def open( path : Path) : java.io.InputStream = {
new FileInputStream((path))
}
override def create( path : Path ) : java.io.OutputStream = {
new FileOutputStream((path))
}
override def exists( p : Path ) : Boolean = {
(p).exists
}
override def isDirectory( p : Path ) : Boolean = {
(p).isDirectory
}
override def isFile( p : Path ) : Boolean = {
(p).isFile
}
override def getStatus( p : Path ) : FileStatus = {
val f : File = (p)
f
}
override def delete( p : Path ) = {
/// XXX handle return value
(p).delete
}
def setExecutable( p: Path, flag: Boolean = true ) = {
(p).setExecutable( flag)
}
}
object LocalFileSystem extends LocalFileSystem( java.nio.file.FileSystems.getDefault) {
def currentDirectory : Path = {
new Path( System.getProperty("user.dir"))
}
def relativePath( p : Path) : Path = {
currentDirectory / p
}
}
| jeromebanks/satisfaction | modules/core/src/main/scala/satisfaction/fs/LocalFs.scala | Scala | apache-2.0 | 3,202 |
package renesca.table
import org.junit.runner.RunWith
import org.specs2.mock._
import org.specs2.mutable._
import org.specs2.runner.JUnitRunner
import renesca.json
import renesca.parameter.implicits._
import renesca.parameter.{ArrayParameterValue, ParameterValue}
@RunWith(classOf[JUnitRunner])
class TableSpec extends Specification with Mockito {
"Table" should {
"access row cells by column" in {
val columnToIndex = Map(("a", 0), ("b", 1))
val row = Row(IndexedSeq(5, 6), columnToIndex)
row("a") mustEqual 5
row("b") mustEqual 6
}
"access rows by index" in {
val columnToIndex = Map(("a", 0), ("b", 1))
val row1 = Row(IndexedSeq("x", "y"), columnToIndex)
val row2 = Row(IndexedSeq("hau", "rein"), columnToIndex)
val table = Table(List("a", "b"), Vector(row1, row2))
table(0) mustEqual row1
table(1) mustEqual row2
}
"test non-emptyness" in {
val table = Table(List("a", "b"), Vector.empty[Vector[ParameterValue]])
table.nonEmpty mustEqual false
table.isEmpty mustEqual true
}
"test non-emptyness" in {
val columnToIndex = Map(("a", 0), ("b", 1))
val row1 = Row(IndexedSeq("x", "y"), columnToIndex)
val row2 = Row(IndexedSeq("hau", "rein"), columnToIndex)
val table = Table(List("a", "b"), Vector(row1, row2))
table.nonEmpty mustEqual true
table.isEmpty mustEqual false
}
}
"TableFactory" should {
"create Table from raw data" in {
val table = Table(List("p", "q"), List(List[ParameterValue](1, 2), List[ParameterValue](1, 4)))
table.columns mustEqual List("p", "q")
table.rows(0) mustEqual Row(Array[ParameterValue](1, 2), Map(("p", 0), ("q", 1)))
table.rows(1) mustEqual Row(Array[ParameterValue](1, 4), Map(("p", 0), ("q", 1)))
}
}
}
| renesca/renesca | jvm/src/test/scala/renesca/table/TableSpec.scala | Scala | apache-2.0 | 1,836 |
package nest.sparkle.time.protocol
import spray.json._
/** messages to the admin page */
case class ExportData(folder:String)
/** json version of admin page messages */
object AdminProtocol extends DefaultJsonProtocol {
implicit val ExportDataFormat = jsonFormat1(ExportData)
} | mighdoll/sparkle | protocol/src/main/scala/nest/sparkle/time/protocol/AdminProtocol.scala | Scala | apache-2.0 | 281 |
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2015, Gary Keorkunian **
** **
\\* */
package squants.energy
import squants._
import squants.time._
import squants.Time
/**
* Represents the rate of change of [[squants.energy.Power]] over time
*
* @author garyKeorkunian
* @since 0.1
*
* @param value value in [[squants.energy.WattsPerHour]]
*/
final class PowerRamp private (val value: Double, val unit: PowerRampUnit)
extends Quantity[PowerRamp]
with TimeDerivative[Power]
with SecondTimeDerivative[Energy] {
def dimension = PowerRamp
protected[squants] def timeIntegrated = Watts(toWattsPerHour)
protected[squants] def time = Hours(1)
def *(that: TimeSquared): Energy = this * that.time1 * that.time2
def toWattsPerHour = to(WattsPerHour)
def toWattsPerMinutes = to(WattsPerMinute)
def toKilowattsPerHour = to(KilowattsPerHour)
def toKilowattsPerMinute = to(KilowattsPerMinute)
def toMegawattsPerHour = to(MegawattsPerHour)
def toGigawattsPerHour = to(GigawattsPerHour)
}
object PowerRamp extends Dimension[PowerRamp] {
private[energy] def apply[A](n: A, unit: PowerRampUnit)(implicit num: Numeric[A]) = new PowerRamp(num.toDouble(n), unit)
def apply(change: Power, time: Time): PowerRamp = apply(change.toWatts / time.toHours, WattsPerHour)
def apply = parse _
def name = "PowerRamp"
def primaryUnit = WattsPerHour
def siUnit = WattsPerHour
def units = Set(WattsPerHour, WattsPerMinute, KilowattsPerHour, KilowattsPerMinute, MegawattsPerHour, GigawattsPerHour)
}
trait PowerRampUnit extends UnitOfMeasure[PowerRamp] with UnitConverter {
def apply[A](n: A)(implicit num: Numeric[A]) = PowerRamp(n, this)
}
object WattsPerHour extends PowerRampUnit with PrimaryUnit with SiUnit {
val symbol = "W/h"
}
object WattsPerMinute extends PowerRampUnit {
val conversionFactor = WattsPerHour.conversionFactor / 60D
val symbol = "W/m"
}
object KilowattsPerHour extends PowerRampUnit {
val conversionFactor = MetricSystem.Kilo
val symbol = "kW/h"
}
object KilowattsPerMinute extends PowerRampUnit {
val conversionFactor = KilowattsPerHour.conversionFactor / 60D
val symbol = "kW/m"
}
object MegawattsPerHour extends PowerRampUnit {
val conversionFactor = MetricSystem.Mega
val symbol = "MW/h"
}
object GigawattsPerHour extends PowerRampUnit {
val conversionFactor = MetricSystem.Giga
val symbol = "GW/h"
}
object PowerRampConversions {
lazy val wattPerHour = WattsPerHour(1)
lazy val Wph = wattPerHour
lazy val wattPerMinute = WattsPerMinute(1)
lazy val Wpm = wattPerMinute
lazy val kilowattPerHour = KilowattsPerHour(1)
lazy val kWph = kilowattPerHour
lazy val kilowattPerMinute = KilowattsPerMinute(1)
lazy val kWpm = kilowattPerMinute
lazy val megawattPerHour = MegawattsPerHour(1)
lazy val MWph = megawattPerHour
lazy val gigawattPerHour = GigawattsPerHour(1)
lazy val GWph = gigawattPerHour
implicit class PowerRampConversions[A](n: A)(implicit num: Numeric[A]) {
def Wph = WattsPerHour(n)
def Wpm = WattsPerMinute(n)
def kWph = KilowattsPerHour(n)
def kWpm = KilowattsPerMinute(n)
def MWph = MegawattsPerHour(n)
def GWph = GigawattsPerHour(n)
}
implicit class PowerRampStringConversion(s: String) {
def toPowerRamp = PowerRamp(s)
}
implicit object PowerRampNumeric extends AbstractQuantityNumeric[PowerRamp](PowerRamp.primaryUnit)
}
| rmihael/squants | shared/src/main/scala/squants/energy/PowerRamp.scala | Scala | apache-2.0 | 3,828 |
// Solution-4.scala
// Solution to Exercise 4 in "Classes & Objects"
val s1 = "Sally"
val s2 = "Sam"
if(s1.equals(s2)) {
println("s1 and s2 are equal")
} else {
println("s1 and s2 are not equal")
}
println("s1: " + s1)
println("s2: " + s2)
/* OUTPUT_SHOULD_BE
s1 and s2 are not equal
s1: Sally
s2: Sam
*/
| P7h/ScalaPlayground | Atomic Scala/atomic-scala-solutions/10_ClassesAndObjects/Solution-4.scala | Scala | apache-2.0 | 312 |
/*
Copyright 2015 ThoughtWorks, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.thoughtworks.each
import com.thoughtworks.each.core.ComprehensionMonadGenerator
import scala.language.experimental.macros
import scala.language.higherKinds
import scalaz._
/**
* Contains implicit methods to work with types support `for` comprehension.
*/
object ComprehensionImplicits {
/**
* Returns a monad implemented by `for` comprehension syntax.
*
* `F` must support `for` comprehension, containing `map` and `flatMap` methods,
* and `F`'s companion object must contains an `apply` method to create a `F[A]` instance from any `A`.
*
* @tparam F the higher kinded type that the monad works with, e.g. a Scala container `Seq`.
* @return the monad.
*/
implicit def comprehensionMonad[F[_]]: Monad[F] = macro MacroImplementation.comprehensionMonad
private object MacroImplementation {
def comprehensionMonad(c: scala.reflect.macros.whitebox.Context): c.Tree = {
import c.universe._
val TypeApply(_, List(fTypeTree: TypeTree)) = c.macroApplication
val fSymbol = fTypeTree.tpe.typeSymbol.asType
ComprehensionMonadGenerator.generatorMonad[c.universe.type](c.universe, c.freshName)(fSymbol)
}
}
}
| jilen/each | src/main/scala/com/thoughtworks/each/ComprehensionImplicits.scala | Scala | apache-2.0 | 1,745 |
package org.jetbrains.plugins.hocon
import com.intellij.openapi.actionSystem._
import com.intellij.openapi.editor.actionSystem.EditorActionManager
import com.intellij.openapi.fileEditor.{FileEditorManager, OpenFileDescriptor}
import com.intellij.psi.PsiFile
/**
* @author ghik
*/
abstract class HoconEditorActionTest(actionId: String, subpath: String)
extends HoconFileSetTestCase("actions/" + subpath) {
// Code based on AbstractEnterActionTestBase
private class MockDataContext(file: PsiFile) extends DataContext with DataProvider {
def getData(dataId: String): AnyRef =
if (LangDataKeys.LANGUAGE is dataId) file.getLanguage
else if (CommonDataKeys.PROJECT is dataId) file.getProject
else null
}
protected def transform(data: Seq[String]): String = {
val (fileText, offset) = extractCaret(data.head)
val psiFile = HoconTestUtils.createPseudoPhysicalHoconFile(getProject, fileText)
val editorManager = FileEditorManager.getInstance(getProject)
val editor = editorManager.openTextEditor(new OpenFileDescriptor(getProject, psiFile.getVirtualFile, 0), false)
assert(editor != null)
editor.getCaretModel.moveToOffset(offset)
val actionHandler = EditorActionManager.getInstance.getActionHandler(actionId)
val dataContext = new MockDataContext(psiFile)
assert(actionHandler != null)
try {
inWriteCommandAction {
actionHandler.execute(editor, editor.getCaretModel.getCurrentCaret, dataContext)
}
insertCaret(editor.getDocument.getText, editor.getCaretModel.getOffset)
} finally {
editorManager.closeFile(psiFile.getVirtualFile)
}
}
}
| LPTK/intellij-scala | test/org/jetbrains/plugins/hocon/HoconEditorActionTest.scala | Scala | apache-2.0 | 1,653 |
package i0239
package p {
class C[A] {
implicit def foo: M[A] = ???
}
object `package` extends C[String]
object test0 {
def compute[A](implicit m: M[A]): A = ???
val v = compute
val v1: String = v
}
}
trait M[A]
object test1 {
def compute[A](implicit m: M[A]): A = ???
import p.*
val v = compute
val v1: String = v
}
| dotty-staging/dotty | tests/pos/i0239.scala | Scala | apache-2.0 | 358 |
package at.nonblocking.cliwix.core
import at.nonblocking.cliwix.core.validation.ArticleStructureNameForDefaultLocaleLiferayConfigValidator
import at.nonblocking.cliwix.model._
import org.junit.Test
import org.junit.Assert._
import scala.collection.JavaConversions._
class ArticleStructureNameForDefaultLocaleLiferayConfigValidatorTest {
@Test
def success(): Unit = {
val config = new LiferayConfig
val company = new Company()
company.setCompanyConfiguration(new CompanyConfiguration())
company.getCompanyConfiguration.setDefaultLocale("de_DE")
val site1 = new Site(Site.GUEST_SITE_NAME, new SiteConfiguration("/site1", SITE_MEMBERSHIP_TYPE.OPEN), null)
config.setCompanies(new Companies(List(company)))
company.setSites(new Sites(List(site1)))
val articleStructure1 = new ArticleStructure("TEST", List(new LocalizedTextContent("en_GB", "test"), new LocalizedTextContent("de_DE", "test")), null)
val webContent = new WebContent
webContent.setStructures(new ArticleStructures(List(articleStructure1)))
site1.setSiteContent(new SiteContent)
site1.getSiteContent.setWebContent(webContent)
val messages = new ArticleStructureNameForDefaultLocaleLiferayConfigValidator().validate(config)
assertTrue(messages.isEmpty)
}
@Test
def fail(): Unit = {
val config = new LiferayConfig
val company = new Company()
company.setCompanyConfiguration(new CompanyConfiguration())
company.getCompanyConfiguration.setDefaultLocale("de_DE")
val site1 = new Site(Site.GUEST_SITE_NAME, new SiteConfiguration("/site1", SITE_MEMBERSHIP_TYPE.OPEN), null)
config.setCompanies(new Companies(List(company)))
company.setSites(new Sites(List(site1)))
val articleStructure1 = new ArticleStructure("TEST", List(new LocalizedTextContent("en_GB", "test"), new LocalizedTextContent("be_BE", "test")), null)
val webContent = new WebContent
webContent.setStructures(new ArticleStructures(List(articleStructure1)))
site1.setSiteContent(new SiteContent)
site1.getSiteContent.setWebContent(webContent)
val messages = new ArticleStructureNameForDefaultLocaleLiferayConfigValidator().validate(config)
assertTrue(messages.length == 1)
}
}
| nonblocking/cliwix | cliwix-core/src/test/scala/at/nonblocking/cliwix/core/ArticleStructureNameForDefaultLocaleLiferayConfigValidatorTest.scala | Scala | agpl-3.0 | 2,231 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.zipkin.cassandra
import com.twitter.cassie.connection.RetryPolicy
import com.twitter.cassie.{Cluster, ServerSetsCluster, KeyspaceBuilder}
import com.twitter.conversions.time._
import com.twitter.finagle.stats.{NullStatsReceiver, StatsReceiver}
import com.twitter.finagle.tracing.{NullTracer, Tracer}
import com.twitter.util.Duration
import java.net.InetSocketAddress
object Keyspace {
def zookeeperServerSets(
keyspaceName: String = "Zipkin",
hosts: Seq[(String, Int)],
path: String,
timeout: Duration,
stats: StatsReceiver = NullStatsReceiver): KeyspaceBuilder = {
val sockets = hosts map { case (h, p) => new InetSocketAddress(h, p) }
useDefaults {
new ServerSetsCluster(sockets, path, timeout.inMillis.toInt, stats)
.keyspace(keyspaceName)
}
}
def static(
keyspaceName: String = "Zipkin",
nodes: Set[String] = Set("dev-cassandra1.finntech.no"),
port: Int = 7613,
stats: StatsReceiver = NullStatsReceiver,
tracerFactory: Tracer.Factory = NullTracer.factory): KeyspaceBuilder = {
useDefaults {
new Cluster(nodes, port, stats, tracerFactory)
.keyspace(keyspaceName)
}
}
def useDefaults(keyspaceBuilder: KeyspaceBuilder): KeyspaceBuilder = {
keyspaceBuilder
.connectTimeout(10.seconds.inMillis.toInt)
.requestTimeout(20.seconds.inMillis.toInt)
.timeout(90.seconds.inMillis.toInt)
.retries(3)
.maxConnectionsPerHost(400)
.hostConnectionMaxWaiters(5000)
.retryPolicy(RetryPolicy.Idempotent)
}
}
| eirslett/zipkin | zipkin-cassandra/src/main/scala/com/twitter/zipkin/cassandra/Keyspace.scala | Scala | apache-2.0 | 2,161 |
package cwl
import org.scalatest.{FlatSpec, Matchers}
import eu.timepit.refined._
import eu.timepit.refined.string.MatchesRegex
import ExpressionEvaluator._
import shapeless.Coproduct
import wom.callable.RuntimeEnvironment
import wom.expression.PlaceholderIoFunctionSet
import wom.graph.LocalName
import wom.values.WomString
class CwlExpressionCommandPartSpec extends FlatSpec with Matchers {
behavior of "CwlExpressionCommandPart"
val emptyEnvironment = RuntimeEnvironment("","",1,1,1,1)
it should "instantiate" in {
// NOTE: toFixed used to remove the fraction part of ECMAScript numbers
// https://stackoverflow.com/questions/25989642/why-does-java-8-nashorn-javascript-modulo-returns-0-0-double-instead-of-0-i#answer-25991982
// https://community.apigee.com/questions/33936/javascript-parseint-not-converting-to-int-value-ne.html
val commandPart = CwlExpressionCommandPart(Coproduct[Expression](refineMV[MatchesRegex[ECMAScriptExpressionWitness.T]]("$(parseInt(inputs.myStringInt).toFixed())")))
val result = commandPart.instantiate(Map(LocalName("myStringInt") -> WomString("3")), PlaceholderIoFunctionSet, identity, emptyEnvironment)
result should be("3")
}
}
| ohsu-comp-bio/cromwell | cwl/src/test/scala/cwl/CwlExpressionCommandPartSpec.scala | Scala | bsd-3-clause | 1,204 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.codegen
import org.apache.flink.api.common.functions.{Function, RuntimeContext}
import org.apache.flink.api.common.typeutils.TypeSerializer
import org.apache.flink.table.api.TableConfig
import org.apache.flink.table.data.GenericRowData
import org.apache.flink.table.data.conversion.{DataStructureConverter, DataStructureConverters}
import org.apache.flink.table.functions.{FunctionContext, UserDefinedFunction}
import org.apache.flink.table.planner.codegen.CodeGenUtils._
import org.apache.flink.table.planner.codegen.GenerateUtils.generateRecordStatement
import org.apache.flink.table.planner.utils.InternalConfigOptions
import org.apache.flink.table.utils.DateTimeUtils
import org.apache.flink.table.runtime.operators.TableStreamOperator
import org.apache.flink.table.runtime.typeutils.{ExternalSerializer, InternalSerializers}
import org.apache.flink.table.runtime.util.collections._
import org.apache.flink.table.types.DataType
import org.apache.flink.table.types.logical.LogicalTypeRoot._
import org.apache.flink.table.types.logical._
import org.apache.flink.util.InstantiationUtil
import java.util.TimeZone
import java.util.function.{Supplier => JSupplier}
import java.time.ZoneId
import scala.collection.mutable
/**
* The context for code generator, maintaining various reusable statements that could be insert
* into different code sections in the final generated class.
*/
class CodeGeneratorContext(val tableConfig: TableConfig) {
// holding a list of objects that could be used passed into generated class
val references: mutable.ArrayBuffer[AnyRef] = new mutable.ArrayBuffer[AnyRef]()
// set of strings (lines) that will be concatenated into a single class header comment
private val reusableHeaderComments: mutable.LinkedHashSet[String] =
mutable.LinkedHashSet[String]()
// set of member statements that will be added only once
// we use a LinkedHashSet to keep the insertion order
private val reusableMemberStatements: mutable.LinkedHashSet[String] =
mutable.LinkedHashSet[String]()
// set of constructor statements that will be added only once
// we use a LinkedHashSet to keep the insertion order
private val reusableInitStatements: mutable.LinkedHashSet[String] =
mutable.LinkedHashSet[String]()
// set of open statements for RichFunction that will be added only once
// we use a LinkedHashSet to keep the insertion order
private val reusableOpenStatements: mutable.LinkedHashSet[String] =
mutable.LinkedHashSet[String]()
// set of close statements for RichFunction that will be added only once
// we use a LinkedHashSet to keep the insertion order
private val reusableCloseStatements: mutable.LinkedHashSet[String] =
mutable.LinkedHashSet[String]()
// set of statements for cleanup dataview that will be added only once
// we use a LinkedHashSet to keep the insertion order
private val reusableCleanupStatements = mutable.LinkedHashSet[String]()
// set of statements that will be added only once per record;
// code should only update member variables because local variables are not accessible if
// the code needs to be split;
// we use a LinkedHashSet to keep the insertion order
private val reusablePerRecordStatements: mutable.LinkedHashSet[String] =
mutable.LinkedHashSet[String]()
// map of initial input unboxing expressions that will be added only once
// (inputTerm, index) -> expr
val reusableInputUnboxingExprs: mutable.Map[(String, Int), GeneratedExpression] =
mutable.Map[(String, Int), GeneratedExpression]()
// set of constructor statements that will be added only once
// we use a LinkedHashSet to keep the insertion order
private val reusableConstructorStatements: mutable.LinkedHashSet[(String, String)] =
mutable.LinkedHashSet[(String, String)]()
// set of inner class definition statements that will be added only once
private val reusableInnerClassDefinitionStatements: mutable.Map[String, String] =
mutable.Map[String, String]()
// map of string constants that will be added only once
// string_constant -> reused_term
private val reusableStringConstants: mutable.Map[String, String] = mutable.Map[String, String]()
// map of type serializer that will be added only once
// LogicalType -> reused_term
private val reusableTypeSerializers: mutable.Map[LogicalType, String] =
mutable.Map[LogicalType, String]()
// map of data structure converters that will be added only once
// DataType -> reused_term
private val reusableConverters: mutable.Map[DataType, String] =
mutable.Map[DataType, String]()
// map of external serializer that will be added only once
// DataType -> reused_term
private val reusableExternalSerializers: mutable.Map[DataType, String] =
mutable.Map[DataType, String]()
/**
* The current method name for [[reusableLocalVariableStatements]]. You can start a new
* local variable statements for another method using [[startNewLocalVariableStatement()]]
*/
private var currentMethodNameForLocalVariables = "DEFAULT"
// map of local variable statements. It will be placed in method if method code not excess
// max code length, otherwise will be placed in member area of the class. The statements
// are maintained for multiple methods, so that it's a map from method_name to variables.
//
// method_name -> local_variable_statements
private val reusableLocalVariableStatements = mutable.Map[String, mutable.LinkedHashSet[String]](
(currentMethodNameForLocalVariables, mutable.LinkedHashSet[String]()))
/**
* the class is used as the generated operator code's base class.
*/
private var operatorBaseClass: Class[_] = classOf[TableStreamOperator[_]]
// ---------------------------------------------------------------------------------
// Getter
// ---------------------------------------------------------------------------------
def getReusableInputUnboxingExprs(inputTerm: String, index: Int): Option[GeneratedExpression] =
reusableInputUnboxingExprs.get((inputTerm, index))
def nullCheck: Boolean = tableConfig.getNullCheck
/**
* Add a line comment to [[reusableHeaderComments]] list which will be concatenated
* into a single class header comment.
* @param comment The comment to add for class header
*/
def addReusableHeaderComment(comment: String): Unit = {
reusableHeaderComments.add(comment)
}
// ---------------------------------------------------------------------------------
// Local Variables for Code Split
// ---------------------------------------------------------------------------------
/**
* Starts a new local variable statements for a generated class with the given method name.
* @param methodName the method name which the fields will be placed into if code is not split.
*/
def startNewLocalVariableStatement(methodName: String): Unit = {
currentMethodNameForLocalVariables = methodName
reusableLocalVariableStatements(methodName) = mutable.LinkedHashSet[String]()
}
/**
* Adds a reusable local variable statement with the given type term and field name.
* The local variable statements will be placed in methods or class member area depends
* on whether the method length excess max code length.
*
* @param fieldName the field name prefix
* @param fieldTypeTerm the field type term
* @return a new generated unique field name
*/
def addReusableLocalVariable(fieldTypeTerm: String, fieldName: String): String = {
val fieldTerm = newName(fieldName)
reusableLocalVariableStatements
.getOrElse(currentMethodNameForLocalVariables, mutable.LinkedHashSet[String]())
.add(s"$fieldTypeTerm $fieldTerm;")
fieldTerm
}
/**
* Adds multiple pairs of local variables.
* The local variable statements will be placed in methods or class
* member area depends on whether the method length excess max code length.
*
* @param fieldTypeAndNames pairs of local variables with
* left is field type term and right is field name
* @return the new generated unique field names for each variable pairs
*/
def addReusableLocalVariables(fieldTypeAndNames: (String, String)*): Seq[String] = {
val fieldTerms = newNames(fieldTypeAndNames.map(_._2): _*)
fieldTypeAndNames.map(_._1).zip(fieldTerms).foreach { case (fieldTypeTerm, fieldTerm) =>
reusableLocalVariableStatements
.getOrElse(currentMethodNameForLocalVariables, mutable.LinkedHashSet[String]())
.add(s"$fieldTypeTerm $fieldTerm;")
}
fieldTerms
}
// ---------------------------------------------------------------------------------
// generate reuse code methods
// ---------------------------------------------------------------------------------
/**
* @return Comment to be added as a header comment on the generated class
*/
def getClassHeaderComment(): String = {
s"""
|/*
| * ${reusableHeaderComments.mkString("\\n * ")}
| */
""".stripMargin
}
/**
* @return code block of statements that need to be placed in the member area of the class
* (e.g. inner class definition)
*/
def reuseInnerClassDefinitionCode(): String = {
reusableInnerClassDefinitionStatements.values.mkString("\\n")
}
/**
* @return code block of statements that need to be placed in the member area of the class
* (e.g. member variables and their initialization)
*/
def reuseMemberCode(): String = {
reusableMemberStatements.mkString("\\n")
}
/**
* @return code block of statements that will be placed in the member area of the class
* if generated code is split or in local variables of method
*/
def reuseLocalVariableCode(methodName: String = currentMethodNameForLocalVariables): String = {
if (methodName == null) {
reusableLocalVariableStatements(currentMethodNameForLocalVariables).mkString("\\n")
} else {
reusableLocalVariableStatements(methodName).mkString("\\n")
}
}
/**
* @return code block of statements that need to be placed in the constructor
*/
def reuseInitCode(): String = {
reusableInitStatements.mkString("\\n")
}
/**
* @return code block of statements that need to be placed in the per recode process block
* (e.g. Function or StreamOperator's processElement)
*/
def reusePerRecordCode(): String = {
reusablePerRecordStatements.mkString("\\n")
}
/**
* @return code block of statements that need to be placed in the open() method
* (e.g. RichFunction or StreamOperator)
*/
def reuseOpenCode(): String = {
reusableOpenStatements.mkString("\\n")
}
/**
* @return code block of statements that need to be placed in the close() method
* (e.g. RichFunction or StreamOperator)
*/
def reuseCloseCode(): String = {
reusableCloseStatements.mkString("\\n")
}
/**
* @return code block of statements that need to be placed in the cleanup() method of
* [AggregationsFunction]
*/
def reuseCleanupCode(): String = {
reusableCleanupStatements.mkString("", "\\n", "\\n")
}
/**
* @return code block of statements that unbox input variables to a primitive variable
* and a corresponding null flag variable
*/
def reuseInputUnboxingCode(): String = {
reusableInputUnboxingExprs.values.map(_.code).mkString("\\n")
}
/**
* Returns code block of unboxing input variables which belongs to the given inputTerm.
*/
def reuseInputUnboxingCode(inputTerm: String): String = {
val exprs = reusableInputUnboxingExprs.filter { case ((term, _), _) =>
inputTerm.equals(term)
}
val codes = for (((_, _), expr) <- exprs) yield expr.code
codes.mkString("\\n").trim
}
/**
* @return code block of constructor statements
*/
def reuseConstructorCode(className: String): String = {
reusableConstructorStatements.map { case (params, body) =>
s"""
|public $className($params) throws Exception {
| this();
| $body
|}
|""".stripMargin
}.mkString("\\n")
}
def setOperatorBaseClass(operatorBaseClass: Class[_]): CodeGeneratorContext = {
this.operatorBaseClass = operatorBaseClass
this
}
def getOperatorBaseClass: Class[_] = this.operatorBaseClass
// ---------------------------------------------------------------------------------
// add reusable code blocks
// ---------------------------------------------------------------------------------
/**
* Adds a reusable inner class statement with the given class name and class code
*/
def addReusableInnerClass(className: String, statements: String): Unit = {
reusableInnerClassDefinitionStatements(className) = statements
}
/**
* Adds a reusable member field statement to the member area.
*
* @param memberStatement the member field declare statement
*/
def addReusableMember(memberStatement: String): Unit = {
reusableMemberStatements.add(memberStatement)
}
/**
* Adds a reusable init statement which will be placed in constructor.
*/
def addReusableInitStatement(s: String): Unit = reusableInitStatements.add(s)
/**
* Adds a reusable per record statement
*/
def addReusablePerRecordStatement(s: String): Unit = reusablePerRecordStatements.add(s)
/**
* Adds a reusable open statement
*/
def addReusableOpenStatement(s: String): Unit = reusableOpenStatements.add(s)
/**
* Adds a reusable close statement
*/
def addReusableCloseStatement(s: String): Unit = reusableCloseStatements.add(s)
/**
* Adds a reusable cleanup statement
*/
def addReusableCleanupStatement(s: String): Unit = reusableCleanupStatements.add(s)
/**
* Adds a reusable input unboxing expression
*/
def addReusableInputUnboxingExprs(
inputTerm: String,
index: Int,
expr: GeneratedExpression): Unit = reusableInputUnboxingExprs((inputTerm, index)) = expr
/**
* Adds a reusable output record statement to member area.
*/
def addReusableOutputRecord(
t: LogicalType,
clazz: Class[_],
outRecordTerm: String,
outRecordWriterTerm: Option[String] = None): Unit = {
generateRecordStatement(t, clazz, outRecordTerm, outRecordWriterTerm, this)
}
/**
* Adds a reusable null [[GenericRowData]] to the member area.
*/
def addReusableNullRow(rowTerm: String, arity: Int): Unit = {
addReusableOutputRecord(
RowType.of((0 until arity).map(_ => new IntType()): _*),
classOf[GenericRowData],
rowTerm)
}
/**
* Adds a reusable internal hash set to the member area of the generated class.
*/
def addReusableHashSet(elements: Seq[GeneratedExpression], elementType: LogicalType): String = {
val fieldTerm = newName("set")
val setTypeTerm = elementType.getTypeRoot match {
case TINYINT => className[ByteHashSet]
case SMALLINT => className[ShortHashSet]
case INTEGER => className[IntHashSet]
case BIGINT => className[LongHashSet]
case FLOAT => className[FloatHashSet]
case DOUBLE => className[DoubleHashSet]
case _ => className[ObjectHashSet[_]]
}
val addElementsCode = elements.map { element =>
s"""
|${element.code}
|if (${element.nullTerm}) {
| $fieldTerm.addNull();
|} else {
| $fieldTerm.add(${element.resultTerm});
|}
|""".stripMargin
}.mkString("\\n")
val setBuildingFunctionName = newName("buildSet")
val setBuildingFunctionCode =
s"""
|private void $setBuildingFunctionName() {
| $addElementsCode
| $fieldTerm.optimize();
|}
|""".stripMargin
addReusableMember(
s"""
|final $setTypeTerm $fieldTerm = new $setTypeTerm(${elements.size});
|$setBuildingFunctionCode
|""".stripMargin)
reusableInitStatements.add(s"$setBuildingFunctionName();")
fieldTerm
}
/**
* Adds a reusable record-level timestamp to the beginning of the SAM of the generated class.
*
* <p> The timestamp value is evaluated for per record, this
* function is generally used in stream job.
*/
def addReusableRecordLevelCurrentTimestamp(): String = {
val fieldTerm = s"timestamp"
reusableMemberStatements.add(s"private $TIMESTAMP_DATA $fieldTerm;")
val field =
s"""
|$fieldTerm =
| $TIMESTAMP_DATA.fromEpochMillis(java.lang.System.currentTimeMillis());
|""".stripMargin
reusablePerRecordStatements.add(field)
fieldTerm
}
/**
* Adds a reusable query-level timestamp to the beginning of the SAM of the generated class.
*
* <p> The timestamp value is evaluated once at query-start, this
* function is generally used in batch job.
*/
def addReusableQueryLevelCurrentTimestamp(): String = {
val fieldTerm = s"queryStartTimestamp"
val queryStartEpoch = tableConfig.getConfiguration
.getOptional(InternalConfigOptions.TABLE_QUERY_START_EPOCH_TIME)
.orElseThrow(
new JSupplier[Throwable] {
override def get() = new CodeGenException(
"Try to obtain epoch time of query-start fail." +
" This is a bug, please file an issue.")
}
)
reusableMemberStatements.add(
s"""
|private static final $TIMESTAMP_DATA $fieldTerm =
|$TIMESTAMP_DATA.fromEpochMillis(${queryStartEpoch}L);
|""".stripMargin)
fieldTerm
}
/**
* Adds a reusable record-level local date time to the beginning of the
* SAM of the generated class.
*
* <p> The timestamp value is evaluated for per record, this
* function is generally used in stream job.
*/
def addReusableRecordLevelLocalDateTime(): String = {
val fieldTerm = s"localTimestamp"
val sessionTimeZone = addReusableSessionTimeZone()
val timestamp = addReusableRecordLevelCurrentTimestamp()
// declaration
reusableMemberStatements.add(s"private $TIMESTAMP_DATA $fieldTerm;")
// assignment
val field =
s"""
|$fieldTerm = $TIMESTAMP_DATA.fromEpochMillis(
| $timestamp.getMillisecond() +
| $sessionTimeZone.getOffset($timestamp.getMillisecond()));
|""".stripMargin
reusablePerRecordStatements.add(field)
fieldTerm
}
/**
* Adds a reusable query-level local date time to the beginning of
* the SAM of the generated class.
*
* <p> The timestamp value is evaluated once at query-start, this
* function is generally used in batch job.
*/
def addReusableQueryLevelLocalDateTime(): String = {
val fieldTerm = s"queryStartLocaltimestamp"
val queryStartLocalTimestamp = tableConfig.getConfiguration
.getOptional(InternalConfigOptions.TABLE_QUERY_START_LOCAL_TIME)
.orElseThrow(
new JSupplier[Throwable] {
override def get() = new CodeGenException(
"Try to obtain local time of query-start fail." +
" This is a bug, please file an issue.")
}
)
reusableMemberStatements.add(
s"""
|private static final $TIMESTAMP_DATA $fieldTerm =
|$TIMESTAMP_DATA.fromEpochMillis(${queryStartLocalTimestamp}L);
|""".stripMargin)
fieldTerm
}
/**
* Adds a reusable record-level local time to the beginning of the SAM of the generated class.
*/
def addReusableRecordLevelLocalTime(): String = {
val fieldTerm = s"localTime"
val localtimestamp = addReusableRecordLevelLocalDateTime()
// declaration
reusableMemberStatements.add(s"private int $fieldTerm;")
val utilsName = classOf[DateTimeUtils].getCanonicalName
// assignment
val field =
s"""
|$fieldTerm = $utilsName.timestampMillisToTime($localtimestamp.getMillisecond());
|""".stripMargin
reusablePerRecordStatements.add(field)
fieldTerm
}
/**
* Adds a reusable query-level local time to the beginning of
* the SAM of the generated class.
*/
def addReusableQueryLevelLocalTime(): String = {
val fieldTerm = s"queryStartLocaltime"
val queryStartLocalTimestamp = addReusableQueryLevelLocalDateTime()
val utilsName = classOf[DateTimeUtils].getCanonicalName
// declaration
reusableMemberStatements.add(
s"""
|private static final int $fieldTerm =
| $utilsName.timestampMillisToTime($queryStartLocalTimestamp.getMillisecond());
| """.stripMargin)
fieldTerm
}
/**
* Adds a reusable record-level date to the beginning of the SAM of the generated class.
*/
def addReusableRecordLevelCurrentDate(): String = {
val fieldTerm = s"date"
val timestamp = addReusableRecordLevelLocalDateTime()
val utilsName = classOf[DateTimeUtils].getCanonicalName
// declaration
reusableMemberStatements.add(s"private int $fieldTerm;")
// assignment
val field = s"$fieldTerm = $utilsName.timestampMillisToDate($timestamp.getMillisecond());"
reusablePerRecordStatements.add(field)
fieldTerm
}
/**
* Adds a reusable query-level date to the beginning of the SAM of the generated class.
*/
def addReusableQueryLevelCurrentDate(): String = {
val fieldTerm = s"queryStartDate"
val utilsName = classOf[DateTimeUtils].getCanonicalName
val timestamp = addReusableQueryLevelLocalDateTime()
reusableMemberStatements.add(
s"""
|private static final int $fieldTerm =
| $fieldTerm = $utilsName.timestampMillisToDate($timestamp.getMillisecond());
|""".stripMargin)
fieldTerm
}
/**
* Adds a reusable TimeZone to the member area of the generated class.
*/
def addReusableSessionTimeZone(): String = {
val zoneID = TimeZone.getTimeZone(tableConfig.getLocalTimeZone).getID
val stmt =
s"""private static final java.util.TimeZone $DEFAULT_TIMEZONE_TERM =
| java.util.TimeZone.getTimeZone("$zoneID");""".stripMargin
addReusableMember(stmt)
DEFAULT_TIMEZONE_TERM
}
/**
* Adds a reusable shift TimeZone of window to the member area of the generated class.
*/
def addReusableShiftTimeZone(zoneId: ZoneId): String = {
val fieldTerm = s"shiftTimeZone"
val stmt =
s"""private static final java.time.ZoneId $fieldTerm =
| java.time.ZoneId.of("${zoneId.toString}");""".stripMargin
addReusableMember(stmt)
fieldTerm
}
/**
* Adds a reusable [[java.util.Random]] to the member area of the generated class.
*
* The seed parameter must be a literal/constant expression.
*
* @return member variable term
*/
def addReusableRandom(seedExpr: Option[GeneratedExpression]): String = {
val fieldTerm = newName("random")
val field =
s"""
|final java.util.Random $fieldTerm;
|""".stripMargin
val fieldInit = seedExpr match {
case Some(s) if nullCheck =>
s"""
|${s.code}
|if (!${s.nullTerm}) {
| $fieldTerm = new java.util.Random(${s.resultTerm});
|}
|else {
| $fieldTerm = new java.util.Random();
|}
|""".stripMargin
case Some(s) =>
s"""
|${s.code}
|$fieldTerm = new java.util.Random(${s.resultTerm});
|""".stripMargin
case _ =>
s"""
|$fieldTerm = new java.util.Random();
|""".stripMargin
}
reusableMemberStatements.add(field)
reusableInitStatements.add(fieldInit)
fieldTerm
}
/**
* Adds a reusable Object to the member area of the generated class
* @param obj the object to be added to the generated class
* @param fieldNamePrefix prefix field name of the generated member field term
* @param fieldTypeTerm field type class name
* @return the generated unique field term
*/
def addReusableObject(
obj: AnyRef,
fieldNamePrefix: String,
fieldTypeTerm: String = null): String = {
addReusableObjectWithName(obj, newName(fieldNamePrefix), fieldTypeTerm)
}
def addReusableObjectWithName(
obj: AnyRef,
fieldTerm: String,
fieldTypeTerm: String = null): String = {
val clsName = Option(fieldTypeTerm).getOrElse(obj.getClass.getCanonicalName)
addReusableObjectInternal(obj, fieldTerm, clsName)
fieldTerm
}
private def addReusableObjectInternal(
obj: AnyRef,
fieldTerm: String,
fieldTypeTerm: String): Unit = {
val idx = references.length
// make a deep copy of the object
val byteArray = InstantiationUtil.serializeObject(obj)
val objCopy: AnyRef = InstantiationUtil.deserializeObject(
byteArray,
Thread.currentThread().getContextClassLoader)
references += objCopy
reusableMemberStatements.add(s"private transient $fieldTypeTerm $fieldTerm;")
reusableInitStatements.add(s"$fieldTerm = ((($fieldTypeTerm) references[$idx]));")
}
/**
* Adds a reusable [[UserDefinedFunction]] to the member area of the generated [[Function]].
*
* @param function [[UserDefinedFunction]] object to be instantiated during runtime
* @param functionContextClass class of [[FunctionContext]]
* @param contextTerm [[RuntimeContext]] term to access the [[RuntimeContext]]
* @return member variable term
*/
def addReusableFunction(
function: UserDefinedFunction,
functionContextClass: Class[_ <: FunctionContext] = classOf[FunctionContext],
contextTerm: String = null): String = {
val classQualifier = function.getClass.getName
val fieldTerm = CodeGenUtils.udfFieldName(function)
addReusableObjectInternal(function, fieldTerm, classQualifier)
val openFunction = if (contextTerm != null) {
s"""
|$fieldTerm.open(new ${functionContextClass.getCanonicalName}($contextTerm));
""".stripMargin
} else {
s"""
|$fieldTerm.open(new ${functionContextClass.getCanonicalName}(getRuntimeContext()));
""".stripMargin
}
reusableOpenStatements.add(openFunction)
val closeFunction =
s"""
|$fieldTerm.close();
""".stripMargin
reusableCloseStatements.add(closeFunction)
fieldTerm
}
/**
* Adds a reusable [[DataStructureConverter]] to the member area of the generated class.
*
* @param dataType converter to be added
* @param classLoaderTerm term to access the [[ClassLoader]] for user-defined classes
*/
def addReusableConverter(
dataType: DataType,
classLoaderTerm: String = null)
: String = {
reusableConverters.get(dataType) match {
case Some(term) =>
term
case None =>
val converter = DataStructureConverters.getConverter(dataType)
val converterTerm = addReusableObject(converter, "converter")
val openConverter = if (classLoaderTerm != null) {
s"""
|$converterTerm.open($classLoaderTerm);
""".stripMargin
} else {
s"""
|$converterTerm.open(getRuntimeContext().getUserCodeClassLoader());
""".stripMargin
}
reusableOpenStatements.add(openConverter)
reusableConverters(dataType) = converterTerm
converterTerm
}
}
/**
* Adds a reusable [[TypeSerializer]] to the member area of the generated class.
*
* @param t the internal type which used to generate internal type serializer
* @return member variable term
*/
def addReusableTypeSerializer(t: LogicalType): String = {
// if type serializer has been used before, we can reuse the code that
// has already been generated
reusableTypeSerializers.get(t) match {
case Some(term) => term
case None =>
val term = newName("typeSerializer")
val ser = InternalSerializers.create(t)
addReusableObjectInternal(ser, term, ser.getClass.getCanonicalName)
reusableTypeSerializers(t) = term
term
}
}
/**
* Adds a reusable [[ExternalSerializer]] to the member area of the generated class.
*
* @param t the internal type which used to generate internal type serializer
* @return member variable term
*/
def addReusableExternalSerializer(t: DataType): String = {
reusableExternalSerializers.get(t) match {
case Some(term) =>
term
case None =>
val serializer = ExternalSerializer.of(t)
val serializerTerm = addReusableObject(serializer, "externalSerializer")
reusableExternalSerializers(t) = serializerTerm
serializerTerm
}
}
/**
* Adds a reusable static SLF4J Logger to the member area of the generated class.
*/
def addReusableLogger(logTerm: String, clazzTerm: String): Unit = {
val stmt =
s"""
|private static final org.slf4j.Logger $logTerm =
| org.slf4j.LoggerFactory.getLogger("$clazzTerm");
|""".stripMargin
reusableMemberStatements.add(stmt)
}
/**
* Adds a reusable constant to the member area of the generated class.
*
* @param constant constant expression
* @return generated expression with the fieldTerm and nullTerm
*/
def addReusableConstant(
constant: GeneratedExpression,
nullCheck: Boolean): GeneratedExpression = {
require(constant.literal, "Literal expected")
val fieldTerm = newName("constant")
val nullTerm = fieldTerm + "isNull"
val fieldType = primitiveTypeTermForType(constant.resultType)
val field =
s"""
|private final $fieldType $fieldTerm;
|private final boolean $nullTerm;
|""".stripMargin
reusableMemberStatements.add(field)
val init =
s"""
|${constant.code}
|$fieldTerm = ${constant.resultTerm};
|$nullTerm = ${constant.nullTerm};
|""".stripMargin
reusableInitStatements.add(init)
GeneratedExpression(fieldTerm, nullTerm, "", constant.resultType)
}
/**
* Adds a reusable string constant to the member area of the generated class.
*/
def addReusableStringConstants(value: String): String = {
reusableStringConstants.get(value) match {
case Some(field) => field
case None =>
val field = newName("str")
val stmt =
s"""
|private final $BINARY_STRING $field = $BINARY_STRING.fromString("$value");
""".stripMargin
reusableMemberStatements.add(stmt)
reusableStringConstants(value) = field
field
}
}
/**
* Adds a reusable MessageDigest to the member area of the generated [[Function]].
*
* @return member variable term
*/
def addReusableMessageDigest(algorithm: String): String = {
val fieldTerm = newName("messageDigest")
val field = s"final java.security.MessageDigest $fieldTerm;"
reusableMemberStatements.add(field)
val fieldInit =
s"""
|try {
| $fieldTerm = java.security.MessageDigest.getInstance("$algorithm");
|} catch (java.security.NoSuchAlgorithmException e) {
| throw new RuntimeException("Algorithm for '$algorithm' is not available.", e);
|}
|""".stripMargin
reusableInitStatements.add(fieldInit)
fieldTerm
}
/**
* Adds a constant SHA2 reusable MessageDigest to the member area of the generated [[Function]].
*
* @return member variable term
*/
def addReusableSha2MessageDigest(constant: GeneratedExpression): String = {
require(constant.literal, "Literal expected")
val fieldTerm = newName("messageDigest")
val field =
s"final java.security.MessageDigest $fieldTerm;"
reusableMemberStatements.add(field)
val bitLen = constant.resultTerm
val init =
s"""
|if ($bitLen == 224 || $bitLen == 256 || $bitLen == 384 || $bitLen == 512) {
| try {
| $fieldTerm = java.security.MessageDigest.getInstance("SHA-" + $bitLen);
| } catch (java.security.NoSuchAlgorithmException e) {
| throw new RuntimeException(
| "Algorithm for 'SHA-" + $bitLen + "' is not available.", e);
| }
|} else {
| throw new RuntimeException("Unsupported algorithm.");
|}
|""".stripMargin
val nullableInit = if (nullCheck) {
s"""
|${constant.code}
|if (${constant.nullTerm}) {
| $fieldTerm = null;
|} else {
| $init
|}
|""".stripMargin
} else {
s"""
|${constant.code}
|$init
|""".stripMargin
}
reusableInitStatements.add(nullableInit)
fieldTerm
}
}
object CodeGeneratorContext {
def apply(config: TableConfig): CodeGeneratorContext = {
new CodeGeneratorContext(config)
}
}
| zjureel/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/CodeGeneratorContext.scala | Scala | apache-2.0 | 33,716 |
package controllers
import javax.inject.Inject
import com.mohiva.play.silhouette.api.{ Environment, LogoutEvent, Silhouette }
import com.mohiva.play.silhouette.impl.authenticators.CookieAuthenticator
import com.mohiva.play.silhouette.impl.providers.SocialProviderRegistry
import forms._
import models.User
import play.api.i18n.MessagesApi
import scala.concurrent.Future
/**
* The basic application controller.
*
* @param messagesApi The Play messages API.
* @param env The Silhouette environment.
* @param socialProviderRegistry The social provider registry.
*/
class ApplicationController @Inject() (
val messagesApi: MessagesApi,
val env: Environment[User, CookieAuthenticator],
socialProviderRegistry: SocialProviderRegistry)
extends Silhouette[User, CookieAuthenticator] {
/**
* Handles the index action.
*
* @return The result to display.
*/
def index = SecuredAction.async { implicit request =>
Future.successful(Ok(views.html.home(request.identity)))
}
/**
* Handles the Sign In action.
*
* @return The result to display.
*/
def signIn = UserAwareAction.async { implicit request =>
request.identity match {
case Some(user) => Future.successful(Redirect(routes.ApplicationController.index()))
case None => Future.successful(Ok(views.html.signIn(SignInForm.form, socialProviderRegistry)))
}
}
/**
* Handles the Sign Up action.
*
* @return The result to display.
*/
def signUp = UserAwareAction.async { implicit request =>
request.identity match {
case Some(user) => Future.successful(Redirect(routes.ApplicationController.index()))
case None => Future.successful(Ok(views.html.signUp(SignUpForm.form)))
}
}
/**
* Handles the Sign Out action.
*
* @return The result to display.
*/
def signOut = SecuredAction.async { implicit request =>
val result = Redirect(routes.ApplicationController.index())
env.eventBus.publish(LogoutEvent(request.identity, request, request2Messages))
env.authenticatorService.discard(request.authenticator, result)
}
}
| tomoki/pomodoro-sync | app/controllers/ApplicationController.scala | Scala | apache-2.0 | 2,100 |
import scala.collection.mutable._
import scala.util._
object ReversePolish{
def isNumber(s:String) = {
Try(s.toInt) match {
case Success(x) => true
case _ => false
}
}
def isValidOperator(s: String) = List("/","*","+","-").contains(s)
def operate(o: String, pre: Int, post: Int):Int = o match {
case "/" => pre/post
case "*" => pre * post
case "-" => pre - post
case "+" => pre + post
case _ => throw new Exception("Invalid Operator!")
}
def evaluate(ls: List[String]):Int = {
val s = Stack[String]()
def evaluateInternal(rem: List[String]):Int = rem match {
case Nil => if (s.size == 1 && isNumber(s.top)) s.pop.toInt
else throw new Exception("Invalid expression 1")
case h::t if isNumber(h) => s.push(h); println(s); evaluateInternal(t)
case h::t => if (isValidOperator(h) && s.size>=2){
val post = s.pop
val pre = s.pop
if (isNumber(post) && isNumber(pre)) {
val res = operate(h,pre.toInt,post.toInt)
s.push(res.toString)
println(s)
evaluateInternal(t)
} else throw new Exception("Invalid expression 2")
} else throw new Exception("Invalid expression 3")
}
println(s)
evaluateInternal(ls)
}
def main(args: Array[String]) = {
val listOfLists = List(List("2","1","+", "3", "*"), List("4","13","5", "/", "+"))
listOfLists.foreach{ls=>
val res = evaluate(ls)
println(res)
}
}
}
| siddhartha-chandra/Solved-in-Scala | src/main/scala/ReversePolish.scala | Scala | mit | 1,637 |
package name.abhijitsarkar.scala.scalaimpatient.traits.hierarchy
trait HasWings {
this: Animal =>
val numWings: Int
} | abhijitsarkar/scala-impatient | src/main/scala/name/abhijitsarkar/scala/scalaimpatient/traits/hierarchy/HasWings.scala | Scala | gpl-3.0 | 122 |
import sbt._
import Keys._
import play.Play.autoImport._
object ApplicationBuild extends Build {
val appName = "helloworld"
val appVersion = "1.0"
scalaVersion := "2.11.1"
val appDependencies = Seq(
// Add your project dependencies here,
"mysql" % "mysql-connector-java" % "5.1.30", jdbc, anorm
)
val main = Project(appName, file(".")).enablePlugins(play.PlayScala).settings(
version := appVersion,
libraryDependencies ++= appDependencies,
// Add your own project settings here
testOptions in Test += Tests.Argument("junitxml", "console")
)
} | CloudBees-community/play2-clickstart | project/Build.scala | Scala | apache-2.0 | 587 |
package com.twitter.finagle.server
import com.twitter.conversions.time._
import com.twitter.finagle.Stack.Param
import com.twitter.finagle._
import com.twitter.finagle.filter._
import com.twitter.finagle.param._
import com.twitter.finagle.service.{StatsFilter, TimeoutFilter}
import com.twitter.finagle.stack.Endpoint
import com.twitter.finagle.stats.ServerStatsReceiver
import com.twitter.finagle.tracing._
import com.twitter.finagle.transport.Transport
import com.twitter.jvm.Jvm
import com.twitter.util.{Closable, CloseAwaitably, Future, Return, Throw, Time}
import java.net.SocketAddress
import java.util.Collections
import java.util.concurrent.ConcurrentHashMap
import scala.collection.JavaConverters._
object StackServer {
private[this] val newJvmFilter = new MkJvmFilter(Jvm())
/**
* Canonical Roles for each Server-related Stack modules.
*/
object Role extends Stack.Role("StackServer") {
val serverDestTracing = Stack.Role("ServerDestTracing")
val jvmTracing = Stack.Role("JvmTracing")
val preparer = Stack.Role("preparer")
val protoTracing = Stack.Role("protoTracing")
}
/**
* Creates a default finagle server [[com.twitter.finagle.Stack]].
* The default stack can be configured via [[com.twitter.finagle.Stack.Param]]'s
* in the finagle package object ([[com.twitter.finagle.param]]) and specific
* params defined in the companion objects of the respective modules.
*
* @see [[com.twitter.finagle.tracing.ServerDestTracingProxy]]
* @see [[com.twitter.finagle.service.TimeoutFilter]]
* @see [[com.twitter.finagle.filter.DtabStatsFilter]]
* @see [[com.twitter.finagle.service.StatsFilter]]
* @see [[com.twitter.finagle.filter.RequestSemaphoreFilter]]
* @see [[com.twitter.finagle.filter.ExceptionSourceFilter]]
* @see [[com.twitter.finagle.filter.MkJvmFilter]]
* @see [[com.twitter.finagle.tracing.ServerTracingFilter]]
* @see [[com.twitter.finagle.tracing.TraceInitializerFilter]]
* @see [[com.twitter.finagle.filter.MonitorFilter]]
* @see [[com.twitter.finagle.filter.HandletimeFilter]]
*/
def newStack[Req, Rep]: Stack[ServiceFactory[Req, Rep]] = {
val stk = new StackBuilder[ServiceFactory[Req, Rep]](
stack.nilStack[Req, Rep])
stk.push(Role.serverDestTracing, ((next: ServiceFactory[Req, Rep]) =>
new ServerDestTracingProxy[Req, Rep](next)))
stk.push(TimeoutFilter.serverModule)
stk.push(DtabStatsFilter.module)
stk.push(StatsFilter.module)
stk.push(RequestSemaphoreFilter.module)
stk.push(MaskCancelFilter.module)
stk.push(ExceptionSourceFilter.module)
stk.push(Role.jvmTracing, ((next: ServiceFactory[Req, Rep]) =>
newJvmFilter[Req, Rep]() andThen next))
stk.push(HandletimeFilter.module)
stk.push(Role.protoTracing, identity[ServiceFactory[Req, Rep]](_))
stk.push(ServerTracingFilter.module)
stk.push(Role.preparer, identity[ServiceFactory[Req, Rep]](_))
// The TraceInitializerFilter must be pushed after most other modules so that
// any Tracing produced by those modules is enclosed in the appropriate
// span.
stk.push(TraceInitializerFilter.serverModule)
stk.push(MonitorFilter.module)
stk.result
}
/**
* The default params used for StackServers.
*/
val defaultParams: Stack.Params =
Stack.Params.empty + Stats(ServerStatsReceiver)
}
/**
* A [[com.twitter.finagle.Server Server]] that is
* parameterized.
*/
trait StackBasedServer[Req, Rep]
extends Server[Req, Rep]
with Stack.Parameterized[StackBasedServer[Req, Rep]]
/**
* A [[com.twitter.finagle.Server]] that composes a
* [[com.twitter.finagle.Stack]].
*/
trait StackServer[Req, Rep]
extends StackBasedServer[Req, Rep]
with Stack.Parameterized[StackServer[Req, Rep]] {
/** The current stack used in this StackServer. */
def stack: Stack[ServiceFactory[Req, Rep]]
/** The current parameter map used in this StackServer */
def params: Stack.Params
/** A new StackServer with the provided Stack. */
def withStack(stack: Stack[ServiceFactory[Req, Rep]]): StackServer[Req, Rep]
def withParams(ps: Stack.Params): StackServer[Req, Rep]
override def configured[P: Param](p: P): StackServer[Req, Rep]
override def configured[P](psp: (P, Param[P])): StackServer[Req, Rep]
}
/**
* A standard template implementation for
* [[com.twitter.finagle.server.StackServer]].
*/
trait StdStackServer[Req, Rep, This <: StdStackServer[Req, Rep, This]]
extends StackServer[Req, Rep] { self =>
/**
* The type we write into the transport.
*/
protected type In
/**
* The type we read out of the transport.
*/
protected type Out
/**
* Defines a typed [[com.twitter.finagle.server.Listener]] for this server.
* Concrete StackServer implementations are expected to specify this.
*/
protected def newListener(): Listener[In, Out]
/**
* Defines a dispatcher, a function which binds a transport to a
* [[com.twitter.finagle.Service]]. Together with a `Listener`, it
* forms the foundation of a finagle server. Concrete implementations
* are expected to specify this.
*
* @see [[com.twitter.finagle.dispatch.GenSerialServerDispatcher]]
*/
protected def newDispatcher(transport: Transport[In, Out], service: Service[Req, Rep]): Closable
override def configured[P: Stack.Param](p: P): This =
withParams(params+p)
/**
* Creates a new StackServer with `params` used to configure this StackServer's `stack`.
*/
def withParams(params: Stack.Params): This =
copy1(params = params)
def withStack(stack: Stack[ServiceFactory[Req, Rep]]): This =
copy1(stack = stack)
/**
* A copy constructor in lieu of defining StackServer as a
* case class.
*/
protected def copy1(
stack: Stack[ServiceFactory[Req, Rep]] = this.stack,
params: Stack.Params = this.params
): This { type In = self.In; type Out = self.Out }
def serve(addr: SocketAddress, factory: ServiceFactory[Req, Rep]): ListeningServer =
new ListeningServer with CloseAwaitably {
// Ensure that we have performed global initialization.
com.twitter.finagle.Init()
val Monitor(monitor) = params[Monitor]
val Reporter(reporter) = params[Reporter]
val Stats(stats) = params[Stats]
val Label(label) = params[Label]
// For historical reasons, we have to respect the ServerRegistry
// for naming addresses (i.e. label=addr). Until we deprecate
// its usage, it takes precedence for identifying a server as
// it is the most recently set label.
val serverLabel = ServerRegistry.nameOf(addr) getOrElse label
// Connection bookkeeping used to explicitly manage
// connection resources per ListeningServer. Note, draining
// in-flight requests is expected to be managed by `newDispatcher`,
// so we can simply `close` all connections here.
val connections = Collections.newSetFromMap(
new ConcurrentHashMap[Closable, java.lang.Boolean])
// Hydrates a new ClientConnection with connection information from the
// given `transport`. ClientConnection instances are used to
// thread this through a finagle server stack.
def newConn(transport: Transport[In, Out]) = new ClientConnection {
val remoteAddress = transport.remoteAddress
val localAddress = transport.localAddress
def close(deadline: Time) = transport.close(deadline)
val onClose = transport.onClose.map(_ => ())
}
val statsReceiver =
if (serverLabel.isEmpty) stats
else stats.scope(serverLabel)
val serverParams = params +
Label(serverLabel) +
Stats(statsReceiver) +
Monitor(reporter(label, None) andThen monitor)
val serviceFactory = (stack ++ Stack.Leaf(Endpoint, factory))
.make(serverParams)
val server = copy1(params=serverParams)
// Listen over `addr` and serve traffic from incoming transports to
// `serviceFactory` via `newDispatcher`.
val listener = server.newListener()
val underlying = listener.listen(addr) { transport =>
serviceFactory(newConn(transport)) respond {
case Return(service) =>
val d = server.newDispatcher(transport, service)
connections.add(d)
transport.onClose ensure connections.remove(d)
case Throw(exc) =>
// If we fail to create a new session locally, we continue establishing
// the session but (1) reject any incoming requests; (2) close it right
// away. This allows protocols that support graceful shutdown to
// also gracefully deny new sessions.
val d = server.newDispatcher(
transport, Service.const(Future.exception(Failure.rejected(exc))))
connections.add(d)
transport.onClose ensure connections.remove(d)
// We give it a generous amount of time to shut down the session to
// improve our chances of being able to do so gracefully.
d.close(10.seconds)
}
}
ServerRegistry.register(addr.toString, server.stack, server.params)
protected def closeServer(deadline: Time) = closeAwaitably {
// Here be dragons
// We want to do four things here in this order:
// 1. close the listening socket
// 2. close the factory (not sure if ordering matters for this step)
// 3. drain pending requests for existing connections
// 4. close those connections when their requests complete
// closing `underlying` eventually calls Netty3Listener.close which has an
// interesting side-effect of synchronously closing #1
val ulClosed = underlying.close(deadline)
// However we don't want to wait on the above because it will only complete
// when #4 is finished. So we ignore it and close everything else. Note that
// closing the connections here will do #2 and drain them via the Dispatcher.
val everythingElse = Seq[Closable](factory) ++ connections.asScala.toSeq
// and once they're drained we can then wait on the listener physically closing them
Closable.all(everythingElse:_*).close(deadline) before ulClosed
}
def boundAddress = underlying.boundAddress
}
}
| latur19318/finagle | finagle-core/src/main/scala/com/twitter/finagle/server/StackServer.scala | Scala | apache-2.0 | 10,321 |
/*
* La Trobe University - Distributed Deep Learning System
* Copyright 2016 Matthias Langer (t3l@threelights.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package edu.latrobe
import org.bytedeco.javacpp._
package object native {
type HalfPointer = ShortPointer
// ---------------------------------------------------------------------------
// REAL SWITCH DOUBLE
// ---------------------------------------------------------------------------
/*
type RealPointer = DoublePointer
type NativeReal = NativeDouble
final val NativeReal = NativeDouble
*/
// ---------------------------------------------------------------------------
// REAL SWITCH FLOAT
// ---------------------------------------------------------------------------
///*
type RealPointer = FloatPointer
type NativeReal = NativeFloat
final val NativeReal = NativeFloat
//*/
// ---------------------------------------------------------------------------
// REAL SWITCH END
// ---------------------------------------------------------------------------
final implicit class DoublePointerFunctions(dp: DoublePointer) {
def withOffset(offset: Long)
: DoublePointer = {
require(offset >= 0L && offset <= dp.capacity())
new DoublePointer {
address = dp.address() + offset * DoubleEx.size
capacity(dp.capacity() - offset)
}
}
}
final implicit class FloatPointerFunctions(fp: FloatPointer) {
def withOffset(offset: Long)
: FloatPointer = {
require(offset >= 0L && offset <= fp.capacity())
new FloatPointer {
address = fp.address() + offset * FloatEx.size
capacity(fp.capacity() - offset)
}
}
}
final implicit class HalfPointerFunctions(fp: HalfPointer) {
def withOffset(offset: Long)
: HalfPointer = {
require(offset >= 0L && offset <= fp.capacity())
new HalfPointer {
address = fp.address() + offset * Half.size
capacity(fp.capacity() - offset)
}
}
}
}
| bashimao/ltudl | base/src/main/scala/edu/latrobe/native/package.scala | Scala | apache-2.0 | 2,549 |
/*
* Copyright 2010 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb {
package http {
import _root_.net.liftweb.util._
import js._
import JsCmds._
import _root_.scala.xml.{NodeSeq, Elem, Text}
/**
* Surface a user interface on top of Wiring
*/
object WiringUI {
/**
* Given a NodeSeq, a Cell and a function that can generate
* a NodeSeq => NodeSeq from the cell's value, register the
* postPageJavaScript that will update the element with
* a new value.
*
* @param in the NodeSeq that contains the view markup
* @param cell the cell to associate with
* @param f the function that performs the drawing
*
* @return the mutated NodeSeq (an id attribute may be added if
* there's none already defined)
*/
def apply[T](in: NodeSeq, cell: Cell[T])(f: T => NodeSeq => NodeSeq): NodeSeq = toNode(in, cell)((t, ns) => f(t)(ns))
/**
* Given a Cell and a function that can generate
* a NodeSeq => NodeSeq from the cell's value, return a function that
* takes a NodeSeq and registers the
* postPageJavaScript that will update the element with
* a new value.
*
* @param cell the cell to associate with
* @param f the function that performs the drawing
*
* @return a function that mutates NodeSeq (an id attribute may be added if
* there's none already defined)
*/
def apply[T](cell: Cell[T])(f: T => NodeSeq => NodeSeq): NodeSeq => NodeSeq = (in: NodeSeq) => toNode(in, cell)((t, ns) => f(t)(ns))
/**
* Given a NodeSeq, a Cell and a function that can generate
* a NodeSeq => NodeSeq from the cell's value, register the
* postPageJavaScript that will update the element with
* a new value.
*
* @param in the NodeSeq that contains the view markup
* @param cell the cell to associate with
*
* @param jsEffect a function that wraps the SetHtml JsCmd so
* you can, for example, fade out the old value, set the new value and
* fade it in. The first parameter is the id of the element, the
* second is a flag that's true if this is the first time the element is
* being rendered (you might want to skip effects for the inital page load),
* and the third parameter is the SetHtml JavaScript code.
*
* @param f the function that performs the drawing
*
* @return the mutated NodeSeq (an id attribute may be added if
* there's none already defined)
*/
def apply[T](in: NodeSeq, cell: Cell[T], jsEffect: (String, Boolean, JsCmd) => JsCmd)(f: T => NodeSeq => NodeSeq): NodeSeq = toNode(in, cell, jsEffect)((t, ns) => f(t)(ns))
/**
* Given a Cell and a function that can generate
* a NodeSeq => NodeSeq from the cell's value, return a function that with a NodeSeq
* will register the
* postPageJavaScript that will update the element with
* a new value.
*
* @param cell the cell to associate with
*
* @param jsEffect a function that wraps the SetHtml JsCmd so
* you can, for example, fade out the old value, set the new value and
* fade it in. The first parameter is the id of the element, the
* second is a flag that's true if this is the first time the element is
* being rendered (you might want to skip effects for the inital page load),
* and the third parameter is the SetHtml JavaScript code.
*
* @param f the function that performs the drawing
*
* @return the mutated NodeSeq (an id attribute may be added if
* there's none already defined)
*/
def apply[T](cell: Cell[T], jsEffect: (String, Boolean, JsCmd) => JsCmd)(f: T => NodeSeq => NodeSeq): NodeSeq => NodeSeq =
in => toNode(in, cell, jsEffect)((t, ns) => f(t)(ns))
/**
* Given a NodeSeq, a Cell and a function that can generate
* a NodeSeq from the cell's value and the template value, register the
* postPageJavaScript that will update the element with
* a new value.
*
* @param in the NodeSeq that contains the view markup
* @param cell the cell to associate with
* @param f the function that performs the drawing
*
* @return the mutated NodeSeq (an id attribute may be added if
* there's none already defined)
*/
def toNode[T](in: NodeSeq, cell: Cell[T])(f: (T, NodeSeq) => NodeSeq): NodeSeq = toNode(in, cell, (id, first, js) => js)(f)
/**
* Given a NodeSeq, a Cell and a function that can generate
* a NodeSeq from the cell's value and the template value, register the
* postPageJavaScript that will update the element with
* a new value.
*
* @param in the NodeSeq that contains the view markup
* @param cell the cell to associate with
* @param f the function that performs the drawing
*
* @return the mutated NodeSeq (an id attribute may be added if
* there's none already defined)
*/
def toNode[T](cell: Cell[T])(f: (T, NodeSeq) => NodeSeq): NodeSeq => NodeSeq = in => toNode(in, cell, (id, first, js) => js)(f)
/**
* Given a NodeSeq, a Cell register the
* postPageJavaScript that will update the element with
* a new value.
*
* @param in the NodeSeq that contains the view markup
* @param cell the cell to associate with
*
* @return the mutated NodeSeq (an id attribute may be added if
* there's none already defined)
*/
def asText[T](in: NodeSeq, cell: Cell[T]): NodeSeq =
toNode(in, cell, (id, first, js) => js)((t, ns) => Text(t.toString))
/**
* Given a Cell register the
* postPageJavaScript that will update the element with
* a new value.
*
* @param cell the cell to associate with
*
* @return the mutated NodeSeq (an id attribute may be added if
* there's none already defined)
*/
def asText[T](cell: Cell[T]): NodeSeq => NodeSeq =
in => toNode(in, cell, (id, first, js) => js)((t, ns) => Text(t.toString))
/**
* Given a NodeSeq, a Cell register the
* postPageJavaScript that will update the element with
* a new value.
*
* @param in the NodeSeq that contains the view markup
* @param cell the cell to associate with
*
* @param jsEffect a function that wraps the SetHtml JsCmd so
* you can, for example, fade out the old value, set the new value and
* fade it in. The first parameter is the id of the element, the
* second is a flag that's true if this is the first time the element is
* being rendered (you might want to skip effects for the inital page load),
* and the third parameter is the SetHtml JavaScript code.
*
* @return the mutated NodeSeq (an id attribute may be added if
* there's none already defined)
*/
def asText[T](in: NodeSeq, cell: Cell[T], jsEffect: (String, Boolean, JsCmd) => JsCmd): NodeSeq =
toNode(in, cell, jsEffect)((t, ns) => Text(t.toString))
/**
* Given a NodeSeq, a Cell register the
* postPageJavaScript that will update the element with
* a new value.
*
* @param in the NodeSeq that contains the view markup
* @param cell the cell to associate with
*
* @param jsEffect a function that wraps the SetHtml JsCmd so
* you can, for example, fade out the old value, set the new value and
* fade it in. The first parameter is the id of the element, the
* second is a flag that's true if this is the first time the element is
* being rendered (you might want to skip effects for the inital page load),
* and the third parameter is the SetHtml JavaScript code.
*
* @return the mutated NodeSeq (an id attribute may be added if
* there's none already defined)
*/
def asText[T](cell: Cell[T], jsEffect: (String, Boolean, JsCmd) => JsCmd): NodeSeq => NodeSeq =
in => toNode(in, cell, jsEffect)((t, ns) => Text(t.toString))
/**
* Given a NodeSeq, a Cell and a function that can generate
* a NodeSeq from the cell's value and the template value, register the
* postPageJavaScript that will update the element with
* a new value.
*
* @param in the NodeSeq that contains the view markup
* @param cell the cell to associate with
*
* @param jsEffect a function that wraps the SetHtml JsCmd so
* you can, for example, fade out the old value, set the new value and
* fade it in. The first parameter is the id of the element, the
* second is a flag that's true if this is the first time the element is
* being rendered (you might want to skip effects for the inital page load),
* and the third parameter is the SetHtml JavaScript code.
*
* @param f the function that performs the drawing
*
* @return the mutated NodeSeq (an id attribute may be added if
* there's none already defined)
*/
def toNode[T](in: NodeSeq, cell: Cell[T], jsEffect: (String, Boolean, JsCmd) => JsCmd)(f: (T, NodeSeq) => NodeSeq): NodeSeq = {
val myElem: Elem = in.find {
case e: Elem => true
case _ => false
}.map(_.asInstanceOf[Elem]).getOrElse(<span id={Helpers.nextFuncName}>{in}</span>)
val (elem: Elem, id: String) = Helpers.findOrAddId(myElem)
addJsFunc(cell, (t: T, first: Boolean) => {
jsEffect(id, first, SetHtml(id, f(t, elem.child)))
})
elem
}
/**
* Given a Cell and a function that can generate
* a NodeSeq from the cell's value and the template value, register the
* postPageJavaScript that will update the element with
* a new value.
*
* @param cell the cell to associate with
*
* @param jsEffect a function that wraps the SetHtml JsCmd so
* you can, for example, fade out the old value, set the new value and
* fade it in. The first parameter is the id of the element, the
* second is a flag that's true if this is the first time the element is
* being rendered (you might want to skip effects for the inital page load),
* and the third parameter is the SetHtml JavaScript code.
*
* @param f the function that performs the drawing
*
* @return the mutated NodeSeq (an id attribute may be added if
* there's none already defined)
*/
def toNode[T](cell: Cell[T], jsEffect: (String, Boolean, JsCmd) => JsCmd)(f: (T, NodeSeq) => NodeSeq): NodeSeq => NodeSeq =
in => {
val myElem: Elem = in.find {
case e: Elem => true
case _ => false
}.map(_.asInstanceOf[Elem]).getOrElse(<span id={Helpers.nextFuncName}>{in}</span>)
val (elem: Elem, id: String) = Helpers.findOrAddId(myElem)
addJsFunc(cell, (t: T, first: Boolean) => {
jsEffect(id, first, SetHtml(id, f(t, elem.child)))
})
elem
}
/**
* Associate a Cell and a function that converts from the
* cell's value to a JavaScript command to be sent to the
* browser as part of the page's post-processing.
*
* @param cell the cell to associate the JavaScript to
* @param f the function that takes the cell's value and a flag indicating
* if this is the first time
*/
def addJsFunc[T](cell: Cell[T], f: (T, Boolean) => JsCmd) {
val trc = TransientRequestCell(cell)
var lastTime: Long = 0L
var lastValue: T = null.asInstanceOf[T]
for {
sess <- S.session
} sess.addPostPageJavaScript(() => {
val (value, ct) = trc.get
val first = lastTime == 0L
if (first || (ct > lastTime && value != lastValue)) {
lastValue = value
lastTime = ct
f(value, first)
} else Noop
})
}
}
/**
* Cache the value of the cell for the duration of the transient request
*/
private final case class TransientRequestCell[T](cell: Cell[T]) extends TransientRequestVar[(T, Long)](cell.currentValue) {
override val __nameSalt = Helpers.nextFuncName
}
}
}
| lift/lift | framework/lift-base/lift-webkit/src/main/scala/net/liftweb/http/WiringUI.scala | Scala | apache-2.0 | 12,061 |
import scala.language.experimental.macros
object Macros {
def foo(x: Any): Any = macro Impls.foo
}
object Test extends App {
import Macros._
foo(x)
}
| scala/scala | test/files/neg/macro-noexpand/Macros_Test_2.scala | Scala | apache-2.0 | 157 |
// Copyright (C) 2017 Calin Cruceru <calin.cruceru@stud.acs.upb.ro>.
//
// See the LICENCE file distributed with this work for additional
// information regarding copyright ownership.
package org.symnet
package models.iptables.virtdev
package devices
package ivds
trait IVDSequencerConfig {
val ivds: List[IptablesVirtualDevice[_]]
}
class IVDSequencer(
name: String,
config: IVDSequencerConfig)
extends IptablesVirtualDevice[IVDSequencerConfig](name, 0, 0, config) {
// NOTE: Drop port currently unused here.
protected override def devices: List[VirtualDevice[_]] = config.ivds
protected override def newLinks: Map[Port, Port] = {
val ivds = config.ivds
if (ivds.isEmpty) {
Map(inputPort -> acceptPort)
} else {
List(
// Add link from its input port to the input port of the first ivd.
Map(inputPort -> ivds(0).inputPort),
// Add links from the accept port of a ivd to the next, except for the
// last one.
(0 until ivds.length - 1).map(
i => ivds(i).acceptPort -> ivds(i + 1).inputPort),
// Add a link from the accept port of the last one to the output port of
// this ivd.
Map(ivds.last.acceptPort -> acceptPort)
).flatten.toMap
}
}
}
class IVDSequencerBuilder(
name: String,
ivds: List[IptablesVirtualDevice[_]])
extends VirtualDeviceBuilder[IVDSequencer](name) { self =>
override def build: IVDSequencer =
new IVDSequencer(name, new IVDSequencerConfig {
val ivds = self.ivds
})
}
| calincru/iptables-sefl | src/main/scala/org/symnet/models/iptables/virtdev/devices/ivds/IVDSequencer.scala | Scala | mit | 1,552 |
package es.weso.shex
import com.hp.hpl.jena.rdf.model.ModelFactory
import com.hp.hpl.jena.rdf.model.Model
import com.hp.hpl.jena.datatypes.xsd.XSDDatatype
import java.util.Calendar
import java.text.SimpleDateFormat
case class Report(items: List[SingleTestReport]) {
def addTestReport(r: SingleTestReport) : Report = {
Report(r :: items)
}
def addTestReport(passed:Boolean,name:String,uriTest:String,testType: String, msg:String) : Report = {
Report(SingleTestReport(passed,name,uriTest,testType,msg) :: items)
}
def concat(other: Report) : Report = {
Report(items ++ other.items)
}
def generateEARL : Model = {
val model = ModelFactory.createDefaultModel
val sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss")
val foaf = "http://xmlns.com/foaf/0.1/"
val doap = "http://usefulinc.com/ns/doap#"
val wesinURL = "http://purl.org/weso/wesin"
val turtleURL = "http://www.w3.org/TR/turtle/"
val rdf = "http://www.w3.org/1999/02/22-rdf-syntax-ns#"
val earl = "http://www.w3.org/ns/earl#"
val dc = "http://purl.org/dc/terms/"
val rdfs = "http://www.w3.org/2000/01/rdf-schema#"
val xsd = "http://www.w3.org/2001/XMLSchema#"
val turtleTests = "http://www.w3.org/2013/TurtleTests/manifest.ttl#"
model.setNsPrefix("doap", doap)
model.setNsPrefix("rdf", rdf)
model.setNsPrefix("rdfs", rdfs)
model.setNsPrefix("foaf", foaf)
model.setNsPrefix("earl", earl)
model.setNsPrefix("dc", dc)
model.setNsPrefix("ttlTests", turtleTests)
val rdf_type = model.createProperty(rdf+"type")
val foaf_name = model.createProperty(foaf + "name")
val foaf_homepage = model.createProperty(foaf + "homepage")
val foaf_primaryTopic = model.createProperty(foaf + "primaryTopic")
val foaf_maker = model.createProperty(foaf + "maker")
val doapProject = model.createResource(doap+"Project")
val doapVersion = model.createResource(doap+"Version")
val doap_name = model.createProperty(doap+"name")
val doap_license = model.createProperty(doap+"license")
val doap_developer = model.createProperty(doap+"developer")
val doap_maintainer = model.createProperty(doap+"maintainer")
val doap_documenter = model.createProperty(doap+"documenter")
val doap_maker = model.createProperty(doap+"maker")
val doap_homePage = model.createProperty(doap+"homepage")
val doap_implements = model.createProperty(doap+"implements")
val doap_downloadPage = model.createProperty(doap+"download-page")
val doap_programmingLanguage = model.createProperty(doap+"programming-language")
val doap_release = model.createProperty(doap+"release")
val doap_created = model.createProperty(doap+"created")
val dc_issued = model.createProperty(dc+"issued")
val dc_title = model.createProperty(dc+"title")
val dc_description = model.createProperty(dc+"description")
val dc_date = model.createProperty(dc+"date")
val dc_creator = model.createProperty(dc+"creator")
val earlSoftware = model.createResource(earl+"Software")
val earlTestSubject = model.createResource(earl+"TestSubject")
val earlAssertion = model.createResource(earl+"Assertion")
val earlTestResult = model.createResource(earl+"TestResult")
val earl_automatic = model.createResource(earl+"automatic")
val earl_assertedBy = model.createProperty(earl+"assertedBy")
val earl_subject = model.createProperty(earl+"subject")
val earl_test = model.createProperty(earl+"test")
val earl_result = model.createProperty(earl+"result")
val earl_mode = model.createProperty(earl+"mode")
val earl_outcome = model.createProperty(earl+"outcome")
val earl_passed = model.createProperty(earl+"passed")
val earl_failed = model.createProperty(earl+"failed")
val labra = model.createResource("http://www.di.uniovi.es/~labra#me")
val release = model.createResource()
val wesin = model.createResource(wesinURL)
val thisReport = model.createResource("")
val now = model.createTypedLiteral(sdf.format(Calendar.getInstance.getTime),XSDDatatype.XSDdateTime)
model.add(labra,foaf_name,"Jose Emilio Labra Gayo")
model.add(labra,foaf_homepage,"http://www.di.uniovi.es/~labra")
model.add(thisReport,foaf_maker,labra)
model.add(thisReport,foaf_primaryTopic,wesin)
model.add(thisReport,dc_issued,now)
model.add(wesin,rdf_type,doapProject)
model.add(wesin,rdf_type,earlSoftware)
model.add(wesin,rdf_type,earlTestSubject)
model.add(wesin,doap_name,"Wesin")
model.add(wesin,doap_homePage,wesinURL)
model.add(wesin,doap_implements,turtleURL)
model.add(wesin,doap_developer,labra)
model.add(wesin,doap_maintainer,labra)
model.add(wesin,doap_documenter,labra)
model.add(wesin,doap_maker,labra)
model.add(wesin,doap_downloadPage,wesinURL)
model.add(wesin,doap_programmingLanguage,"Scala")
model.add(wesin,dc_title,"Wesin")
model.add(wesin,dc_date,model.createTypedLiteral("2013-08-29",XSDDatatype.XSDdateTime))
model.add(wesin,dc_creator,labra)
model.add(wesin,dc_description,model.createLiteral("WESIN - Web Semantics using Inductive Graphs","en"))
model.add(wesin,doap_release,release)
model.add(release,doap_name,"Wesin")
model.add(release,doap_created,
model.createTypedLiteral("2013-08-28",XSDDatatype.XSDdate))
model.add(release,rdf_type,doapVersion)
// Information about a test item
for (r <- items) {
val t = model.createResource()
val result = model.createResource()
model.add(t,rdf_type,earlAssertion)
model.add(t,earl_test,model.createResource(turtleTests + r.uriTest) )
model.add(t,foaf_name,r.name)
model.add(t,earl_assertedBy,labra)
model.add(t,earl_mode,earl_automatic)
model.add(t,earl_subject,wesin)
model.add(t,earl_result,result)
model.add(result,rdf_type,earlTestResult)
val outcome = if (r.passed) earl_passed
else earl_failed
model.add(result,earl_outcome,outcome)
model.add(result,dc_date,now)
}
model
}
}
object Report {
def initial = Report(List())
def sortReport(r1: SingleTestReport, r2: SingleTestReport): Boolean = {
r1.name < r2.name
}
}
case class SingleTestReport(
passed: Boolean, // True if test passed
name: String, // Name of test
uriTest: String, // URI of test
testType: String, // Type of test
moreInfo: String // Info about what happened
) {
override def toString : String =
if (passed) testType + ". OK " + name +
", uri: " + uriTest + ". " + moreInfo
else testType + ". Failed " + name +
", uri: " + uriTest + ". " + moreInfo
}
| labra/wiGenerator | src/test/scala/es/weso/shex/Report.scala | Scala | mit | 6,892 |
package net.nomadicalien.ch2
import org.specs2.mutable.Specification
class UnCurrySpec extends Specification {
import net.nomadicalien.ch2.UnCurry._
"uncurry 2 addition" in {
val u = uncurry({a:Int => {b:Int => a + b}})
u(2, 3) must_== (5)
u(2, 9) must_== (11)
}
"uncurry 2 multiply" in {
val u = uncurry({a:Int => {b:Int => a * b}})
u(2,3) must_== (6)
u(2,9) must_== (18)
}
}
| BusyByte/func-prog-scala | exercises-and-notes/src/test/scala/net/nomadicalien/ch2/UnCurrySpec.scala | Scala | apache-2.0 | 418 |
package com.thoughtworks.deeplearning
import java.nio.{ByteBuffer, FloatBuffer, IntBuffer}
import com.thoughtworks.deeplearning.OpenCLCodeGenerator._
import org.lwjgl.opencl._
import CL10._
import CL11._
import CL12._
import org.scalatest.{FreeSpec, Matchers}
import org.lwjgl.BufferUtils
import org.lwjgl.system.MemoryUtil._
import org.lwjgl.system.MemoryStack._
import org.lwjgl.system.Pointer._
/**
* @author 杨博 (Yang Bo) <pop.atry@gmail.com>
*/
final class OpenCLCodeGeneratorSpec extends FreeSpec with Matchers {
private def checkCLError(errorCode: Int) = {
if (errorCode != CL_SUCCESS) {
throw new IllegalStateException(raw"""OpenCL error [$errorCode]""")
}
}
val PlatformIndex = 0
val DeviceIndex = 0
def platformRank(platformId: Long, platformCapabilities: CLCapabilities): Unit = {}
def deviceRank(deviceId: Long, deviceCapabilities: CLCapabilities): Unit = {}
"Plus" in {
val kernel = OpenCLCodeGenerator.KernelDefinition(
"f",
Seq(Parameter('output, DslType.DslBuffer(DslType.DslStructure(List(DslType.DslDouble))))),
Seq(
DslEffect.Update(
DslExpression.Identifier('output),
DslExpression.GetGlobalId(DslExpression.IntLiteral(0)),
DslExpression.HCons(
DslExpression.Plus(DslExpression.DoubleLiteral(1.5), DslExpression.DoubleLiteral(1.5), DslType.DslDouble),
DslExpression.HNilLiteral),
DslType.DslStructure(List(DslType.DslDouble))
))
)
val cl = OpenCLCodeGenerator.generateSourceCode(kernel).toArray[CharSequence]
cl should not be empty
// println(cl.mkString)
val output = Array(0.0)
val stack = stackPush()
try {
val Array(numberOfPlatformIDs) = {
val a = Array(0)
checkCLError(clGetPlatformIDs(null, a))
a
}
val platformIds = stack.mallocPointer(numberOfPlatformIDs)
checkCLError(clGetPlatformIDs(platformIds, null: IntBuffer))
val (platformId, platformCapabilities) = (for (i <- 0 until platformIds.capacity) yield {
val platformId = platformIds.get(i)
val platformCapabilities = CL.createPlatformCapabilities(platformId)
platformId -> platformCapabilities
}).maxBy((platformRank _).tupled)
val Array(numberOfDevices) = {
val a = Array(0)
checkCLError(clGetDeviceIDs(platformId, CL_DEVICE_TYPE_ALL, null, a))
a
}
val deviceIds = stack.mallocPointer(numberOfDevices)
checkCLError(clGetDeviceIDs(platformId, CL_DEVICE_TYPE_ALL, deviceIds, null: IntBuffer))
val (deviceId, deviceCapabilities) = (for (i <- 0 until deviceIds.capacity()) yield {
val deviceId = deviceIds.get(i)
val deviceCapabilities = CL.createDeviceCapabilities(deviceId, platformCapabilities)
deviceId -> deviceCapabilities
}).maxBy((deviceRank _).tupled)
val callback = CLContextCallback.create(new CLContextCallbackI {
override def invoke(errInfo: Long, privateInfo: Long, size: Long, userData: Long): Unit = {
println(memASCII(errInfo))
memByteBuffer(privateInfo, size.toInt)
}
})
try {
stack.push()
val context = try {
val errorCode = stack.ints(0)
val contextProperties = stack.pointers(CL_CONTEXT_PLATFORM, platformId, 0)
val context = clCreateContext(contextProperties, deviceId, callback, NULL, errorCode)
checkCLError(errorCode.get(0))
context
} finally {
stack.pop()
}
try {
val commandQueue = {
val a = Array(0)
val commandQueue = clCreateCommandQueue(context, deviceId, NULL, a);
checkCLError(a.head)
commandQueue
}
try {
stack.push()
val program = try {
val errorCode = stack.ints(0)
val program = clCreateProgramWithSource(context, cl, errorCode)
checkCLError(errorCode.get(0))
program
} finally {
stack.pop()
}
try {
checkCLError(clBuildProgram(program, deviceId, "", null, NULL))
val kernel = {
val a = Array(0)
val kernel = clCreateKernel(program, "f", a)
checkCLError(a(0))
kernel
}
try {
// TODO: 此处只为调试
val buffer = {
val a = Array(0)
val buffer = clCreateBuffer(context, CL_MEM_COPY_HOST_PTR | CL_MEM_READ_WRITE, Array(0.0), a)
checkCLError(a(0))
buffer
}
try {
checkCLError(clSetKernelArg1p(kernel, 0, buffer))
stack.push()
val event = try {
val eventPointer = stack.pointers(0L)
checkCLError(
clEnqueueNDRangeKernel(commandQueue,
kernel,
1,
stack.pointers(0L),
stack.pointers(1L),
stack.pointers(1L),
null,
eventPointer))
eventPointer.get(0)
} finally {
stack.pop()
}
val event2 = try {
stack.push()
try {
val eventPointer1 = stack.pointers(event)
val eventPointer2 = stack.pointers(0L)
checkCLError(
clEnqueueReadBuffer(commandQueue, buffer, CL_FALSE, 0, output, eventPointer1, eventPointer2))
eventPointer2.get(0)
} finally {
stack.pop()
}
} finally {
clReleaseEvent(event)
}
// checkCLError(
// clSetEventCallback(
// event,
// CL_COMPLETE,
// new CLEventCallbackI {
// override def invoke(event2: Long, status: Int, user_data: Long): Unit = {
// println(s"error $status")
// }
// },
// NULL
// ))
checkCLError(
clSetEventCallback(
event2,
CL_COMPLETE,
new CLEventCallbackI {
override def invoke(event2: Long, status: Int, user_data: Long): Unit = {
clReleaseEvent(event2)
}
},
NULL
))
} finally {
clReleaseMemObject(buffer)
}
} finally {
clReleaseKernel(kernel)
}
} finally {
clReleaseProgram(program)
}
} finally {
checkCLError(clReleaseCommandQueue(commandQueue))
}
} finally {
checkCLError(clReleaseContext(context))
}
} finally {
callback.close()
}
} finally {
stack.close()
}
output should be(Array(3.0))
}
}
| Atry/DeepLearning.scala | OpenCLCodeGenerator/src/test/scala/com/thoughtworks/deeplearning/OpenCLCodeGeneratorSpec.scala | Scala | apache-2.0 | 7,615 |
/*
* Copyright (c) 2008, Michael Pradel
* All rights reserved. See LICENSE for details.
*/
package scala.roles.internal
import scala.collection.mutable.HashMap
import java.lang.reflect.{InvocationHandler => JavaInvocationHandler, Method, InvocationTargetException, Proxy, Modifier}
import scala.roles.internal._
class InvocationHandler(core: AnyRef, role: AnyRef, sharedIdentities: Boolean, beforeCalls: InvocationHandler.CallMapping, afterCalls: InvocationHandler.CallMapping) extends JavaInvocationHandler {
import InvocationHandler._
type AnyClass = Class[T] forSome { type T }
def invoke(proxy: AnyRef, method: Method, args: Array[Object]): AnyRef = {
def specialNames(m: Method): String = {
val pts = m.getParameterTypes
if (m.getName == "toString" && pts.length == 0)
"toString"
else if (m.getName == "equals" && pts.length == 1 &&
pts(0) == classOf[Object])
"equals"
else if (m.getName == "hashCode" && pts.length == 0)
"hashCode"
else if (m.getName == "role" && pts.length == 0)
"role"
else
null
}
if (sharedIdentities) {
specialNames(method) match {
case "toString" => core.toString
case "equals" =>
if (args(0).isInstanceOf[Proxy]) {
val other = args(0).asInstanceOf[Proxy with HasCore[_]]
val otherCore = other.core
boolean2Boolean((core == otherCore))
} else
boolean2Boolean(core == (args(0)))
case "hashCode" => int2Integer(core.hashCode)
case "role" => role
case _ => invokeNormalMethod(method, args, proxy)
}
} else {
invokeNormalMethod(method, args, proxy)
}
}
private def compareTypes(s: Seq[(AnyClass, AnyClass)]): Boolean = {
s.foreach(tuple => if (tuple._1.getName != tuple._2.getName) return false)
true
}
/**
Compares two method and returns true iff they have the same name, number of parameters and parameter types.
*/
protected def compare(m1: Method, m2: Method): Boolean = {
//print(" comparing "+m1.getName+" with "+m2.getName+" : ")
// name
if (m1.getName != m2.getName) return false
// parameter types
val types1 = m1.getParameterTypes
val types2 = m2.getParameterTypes
//print(compareTypes(types1 zip types2)+", "+(types1.size == types2.size)+" -> ")
if (types1.size != types2.size
|| !compareTypes(types1 zip types2)) return false
//println("TRUE")
true
}
/**
Compares two method and returns true iff they have the same name
and selfM prepends the parameter list of normalM with a 'self' parameter
of type selfT. I.e. selfM is a variant of normalM that allows to pass
a value for 'this' (aka 'self') to be used in the method implementation.
*/
protected def compareSelfMethod(normalM: Method, selfM: Method, selfT: String): Boolean = {
// name
if (normalM.getName != selfM.getName) return false
// parameter types
val types1 = normalM.getParameterTypes
val types2 = selfM.getParameterTypes
if (types1.size + 1 != types2.size
|| !compareTypes(types1 zip types2.drop(1))
|| types2(0).getName != selfT) return false
true
}
protected def invokeNormalMethod(method: Method, args: Array[Object], proxy: AnyRef): AnyRef = {
def delegate(cl: ClassLoader, cls: AnyClass, m: Method, args: Array[Object]): Option[Object] =
findImplementation(cl, cls.getInterfaces.apply(0), m) match {
case Some(mImpl) => Some(callWithSelf(mImpl, args))
case None => None
}
def findImplementation(cl: ClassLoader, itf: AnyClass, m: Method): Option[Method] = {
//println("checking... "+itf.getName)
itf.getMethods.find(x => compare(m, x)) match {
case Some(foundM) =>
var traitImpl: AnyClass = null
try { traitImpl = cl.loadClass(itf.getName+"$class") }
catch { case e: ClassNotFoundException => {
//println("classfile not found: "+itf.getName+"$class")
itf.getInterfaces.foreach(i => {
findImplementation(cl, i, m) match {
case Some(foundM) => return Some(foundM)
case None =>
}
})
return None
}}
traitImpl.getMethods.find(mImpl => compareSelfMethod(m, mImpl, itf.getName)) match {
case Some(x) if((x.getModifiers & Modifier.ABSTRACT) == 0) => return Some(x)
case _ =>
//println(m.getName+": found no method or abstract one in "+traitImpl.getName)
itf.getInterfaces.foreach(i => {
findImplementation(cl, i, m) match {
case Some(foundM) => return Some(foundM)
case None =>
}
})
return None
}
case _ => None
}
}
def callWithSelf(m: Method, args: Array[Object]) = {
val newArgs = if (args != null) { /*println(args.size);*/ Array(proxy)++args } else Array(proxy)
//println("invoking... " + m)
m.invoke(null, newArgs:_*)
}
try {
List(role, core).foreach(d => {
val methods = d.getClass.getMethods
d.getClass.getMethods.foreach(foundMethod => {
if (compare(method, foundMethod)) {
val mId = methodId(foundMethod)
applySlotMethods(beforeCalls, mId)
var r: AnyRef = null
delegate(d.getClass.getClassLoader, d.getClass, foundMethod, args) match {
case Some(x) => r = x
case None =>
// if delegation not possible, forward the call (should only happen for setters and getters)
r = foundMethod.invoke(d, args:_*)
}
applySlotMethods(afterCalls, mId)
return r
}
})
})
throw new Exception("ERROR: method " + method + " not found in " + role + " or " + core);
} catch {
case e: InvocationTargetException => throw e.getTargetException()
}
}
private def applySlotMethods(callMapping: CallMapping, methodId: String) = {
callMappings(callMapping, methodId) match {
case Some(l) => l.foreach(_.apply)
case None =>
}
}
/**
Get the list of functions to invoke when the method described by 'methodId' is called.
*/
private def callMappings(mapping: CallMapping, methodId: String): Option[List[() => Unit]] = {
if (!methodId.contains("(")) mapping.get(methodId)
else mapping.get(methodId) match {
case Some(l) => Some(l)
case None => // try with wildcard
mapping.get(wildCarded(methodId)) match {
case Some(l) => Some(l)
case None => // try encoding of private fields: "surroundingClass$$fieldName_$eq(fieldType)"
mapping.iterator.find(e =>
e._1.contains("_$eq") && wildCarded(methodId).contains("$$"+ e._1.substring(0, e._1.indexOf('(')) +"(*)")) match {
case Some(e: (String, List[() => Unit])) => Some(e._2)
case None => None
}
}
}
}
private def wildCarded(methodId: String) = methodId.substring(0, methodId.indexOf('(')+1)+"*"+")"
private def methodId(m: Method) = {
val sb = new StringBuilder(m.getName)
sb.append("(")
m.getParameterTypes.foreach(t => {
sb.append(t.getName)
sb.append(",")
})
if (sb.charAt(sb.length-1) == ',') sb.delete(sb.length-1, sb.length)
sb.append(")")
sb.toString
}
}
object InvocationHandler {
type CallMapping = HashMap[String, List[() => Unit]]
}
| tupshin/Scala-Roles | src/scala/roles/internal/InvocationHandler.scala | Scala | bsd-3-clause | 7,707 |
package com.noackexpected.algorithm5.item
/**
* Copyright 2016 Michael J. Cibulskis
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
trait Item {
def getID(): ItemID
}
| mcibulskis/algorithm5 | src/main/scala/com/noackexpected/algorithm5/item/Item.scala | Scala | apache-2.0 | 681 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.convert.xml
import java.io.StringWriter
import java.util.concurrent.ConcurrentHashMap
import com.typesafe.config.ConfigFactory
import javax.xml.transform.dom.DOMSource
import javax.xml.transform.stream.StreamResult
import javax.xml.transform.{OutputKeys, Transformer, TransformerFactory}
import javax.xml.xpath.XPathExpression
import org.locationtech.geomesa.convert.EvaluationContext
import org.locationtech.geomesa.convert2.transforms.TransformerFunction.NamedTransformerFunction
import org.locationtech.geomesa.convert2.transforms.{TransformerFunction, TransformerFunctionFactory}
import org.locationtech.geomesa.utils.cache.SoftThreadLocal
import org.w3c.dom.Element
class XmlFunctionFactory extends TransformerFunctionFactory {
import scala.collection.JavaConverters._
lazy private val defaultXPathFactory = ConfigFactory.load("xml-converter-defaults").getString("xpath-factory")
override def functions: Seq[TransformerFunction] = Seq(xpath, xmlToString)
private val xmlToString: TransformerFunction =
new NamedTransformerFunction(Seq("xmlToString", "xml2string"), pure = true) {
private val transformers = new SoftThreadLocal[Transformer]
override def eval(args: Array[Any])(implicit ctx: EvaluationContext): Any = {
val element = args.head.asInstanceOf[Element]
val transformer = transformers.getOrElseUpdate {
val t = TransformerFactory.newInstance().newTransformer()
t.setOutputProperty(OutputKeys.ENCODING, "utf-8")
t.setOutputProperty(OutputKeys.INDENT, "no")
t.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes")
t
}
val result = new StreamResult(new StringWriter())
val source = new DOMSource(element)
transformer.transform(source, result)
result.getWriter.toString
}
}
private val xpath: TransformerFunction = new NamedTransformerFunction(Array("xpath"), pure = true) {
private val cache = new ConcurrentHashMap[Any, XPathExpression]() // TODO is xpath thread safe?
override def eval(args: Array[Any])(implicit ctx: EvaluationContext): Any = {
val factory = if (args.lengthCompare(3) < 0) { defaultXPathFactory } else {
Option(args(2).asInstanceOf[String]).getOrElse(defaultXPathFactory)
}
val namespaces: Map[String, String] = if (args.lengthCompare(4) < 0) { Map.empty } else {
Option(args(3).asInstanceOf[java.util.Map[String, String]]).map(_.asScala.toMap).getOrElse(Map.empty)
}
var path = cache.get((args(0), factory, namespaces))
if (path == null) {
path = XmlConverter.createXPath(factory, namespaces).compile(args(0).asInstanceOf[String])
cache.put((args(0), factory, namespaces), path)
}
path.evaluate(args(1))
}
}
}
| aheyne/geomesa | geomesa-convert/geomesa-convert-xml/src/main/scala/org/locationtech/geomesa/convert/xml/XmlFunctionFactory.scala | Scala | apache-2.0 | 3,292 |
package models
import database.helper.LdapUserStatus
import play.api.libs.json.{Json, Writes}
import java.util.UUID
case class Employee(
systemId: String,
campusId: String,
lastname: String,
firstname: String,
email: String,
id: UUID
) extends User {
override def status = LdapUserStatus.EmployeeStatus
}
object Employee {
implicit val writes: Writes[Employee] = Json.writes[Employee]
}
| THK-ADV/lwm-reloaded | app/models/Employee.scala | Scala | mit | 419 |
package optional.examples
object Erroneous extends optional.Application
{
def main(times: Option[Int], greeting: Option[String], file: java.io.File) { }
def main(dingle: Option[Int]) {}
}
| marmbrus/optional | src/main/scala/examples/erroneous.scala | Scala | bsd-3-clause | 193 |
/*
* =========================================================================================
* Copyright © 2015 the khronus project <https://github.com/hotels-tech/khronus>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
* =========================================================================================
*/
package com.searchlight.khronus.influx.finder
import com.searchlight.khronus.influx.parser._
import com.searchlight.khronus.influx.service.{ InfluxEndpoint, InfluxSeries }
import com.searchlight.khronus.model._
import com.searchlight.khronus.store.{ Summaries, SummaryStore, Slice, MetaSupport }
import com.searchlight.khronus.util.{ ConcurrencySupport, Measurable, Settings }
import scala.collection.concurrent.TrieMap
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{ ExecutionContext, Future }
import scala.collection.SeqView
trait InfluxQueryResolver extends MetaSupport with Measurable with ConcurrencySupport {
this: InfluxEndpoint ⇒
import com.searchlight.khronus.influx.finder.InfluxQueryResolver._
implicit val executionContext: ExecutionContext = executionContext("influx-query-resolver-worker")
val parser = new InfluxQueryParser
def search(search: String): Future[Seq[InfluxSeries]] = search match {
case GetSeriesPattern(expression) ⇒ listSeries(s".*$expression.*")
case query ⇒ executeQuery(query)
}
private def listSeries(expression: String): Future[Seq[InfluxSeries]] = {
log.info(s"Listing series $expression")
val points = metaStore.searchInSnapshotByRegex(expression).
foldLeft(Vector.empty[Vector[Any]])((acc, current) ⇒ acc :+ Vector(0, current.name))
Future.successful(Seq(new InfluxSeries("list_series_result", Vector("time", "name"), points)))
}
private def executeQuery(expression: String): Future[Seq[InfluxSeries]] = measureFutureTime("executeInfluxQuery", "executeInfluxQuery") {
log.info(s"Executing query [$expression]")
parser.parse(expression).map {
influxCriteria ⇒
val slice = buildSlice(influxCriteria.filters)
val timeWindow = adjustResolution(slice, influxCriteria.groupBy)
val timeRangeMillis = buildTimeRangeMillis(slice, timeWindow)
val summariesBySourceMap = getSummariesBySourceMap(influxCriteria, timeWindow, slice)
buildInfluxSeries(influxCriteria, timeRangeMillis, summariesBySourceMap)
}.flatMap(Future.sequence(_))
}
private def buildSlice(filters: Seq[Filter]): Slice = {
var from = 1L
var to = now
filters foreach {
case filter: TimeFilter ⇒
filter.operator match {
case Operators.Gt ⇒ from = filter.value + 1
case Operators.Gte ⇒ from = filter.value
case Operators.Lt ⇒ to = filter.value - 1
case Operators.Lte ⇒ to = filter.value
}
case StringFilter(_, _, _) ⇒ //TODO
}
if (from == 1L)
throw new UnsupportedOperationException("From clause required");
Slice(from, to)
}
protected def now = System.currentTimeMillis()
private def adjustResolution(slice: Slice, groupBy: GroupBy): FiniteDuration = {
val sortedWindows = Settings.Window.ConfiguredWindows.toSeq.sortBy(_.toMillis).reverse
val desiredTimeWindow = groupBy.duration
val nearestConfiguredWindow = sortedWindows.foldLeft(sortedWindows.last)((nearest, next) ⇒ if (millisBetween(desiredTimeWindow, next) < millisBetween(desiredTimeWindow, nearest)) next else nearest)
if (groupBy.forceResolution) {
nearestConfiguredWindow
} else {
val points = resolution(slice, nearestConfiguredWindow)
if (points <= maxResolution & points >= minResolution)
nearestConfiguredWindow
else {
sortedWindows.foldLeft(sortedWindows.head)((adjustedWindow, next) ⇒ {
val points = resolution(slice, next)
if (points >= minResolution & points <= maxResolution)
next
else if (points < minResolution) next else adjustedWindow
})
}
}
}
protected lazy val maxResolution: Int = Settings.Dashboard.MaxResolutionPoints
protected lazy val minResolution: Int = Settings.Dashboard.MinResolutionPoints
private def resolution(slice: Slice, timeWindow: FiniteDuration) = {
Math.abs(slice.to - slice.from) / timeWindow.toMillis
}
private def millisBetween(some: FiniteDuration, other: FiniteDuration) = Math.abs(some.toMillis - other.toMillis)
private def buildTimeRangeMillis(slice: Slice, timeWindow: FiniteDuration): TimeRangeMillis = {
val alignedFrom = alignTimestamp(slice.from, timeWindow, floorRounding = false)
val alignedTo = alignTimestamp(slice.to, timeWindow, floorRounding = true)
TimeRangeMillis(alignedFrom, alignedTo, timeWindow.toMillis)
}
private def alignTimestamp(timestamp: Long, timeWindow: FiniteDuration, floorRounding: Boolean): Long = {
if (timestamp % timeWindow.toMillis == 0)
timestamp
else {
val division = timestamp / timeWindow.toMillis
if (floorRounding) division * timeWindow.toMillis else (division + 1) * timeWindow.toMillis
}
}
private def getSummariesBySourceMap(influxCriteria: InfluxCriteria, timeWindow: FiniteDuration, slice: Slice) = {
influxCriteria.sources.foldLeft(Map.empty[String, Future[Map[Long, Summary]]])((acc, source) ⇒ {
val tableId = source.alias.getOrElse(source.metric.name)
val summaries = getStore(source.metric.mtype).readAll(source.metric.name, timeWindow, slice, influxCriteria.orderAsc, influxCriteria.limit)
val summariesByTs = summaries.map(f ⇒ f.foldLeft(Map.empty[Long, Summary])((acc, summary) ⇒ acc + (summary.timestamp.ms -> summary)))
acc + (tableId -> summariesByTs)
})
}
private def getStore(metricType: String) = metricType match {
case MetricType.Timer | MetricType.Gauge ⇒ getStatisticSummaryStore
case MetricType.Counter ⇒ getCounterSummaryStore
case _ ⇒ throw new UnsupportedOperationException(s"Unknown metric type: $metricType")
}
protected def getStatisticSummaryStore: SummaryStore[HistogramSummary] = Summaries.histogramSummaryStore
protected def getCounterSummaryStore: SummaryStore[CounterSummary] = Summaries.counterSummaryStore
private def buildInfluxSeries(influxCriteria: InfluxCriteria, timeRangeMillis: TimeRangeMillis, summariesBySourceMap: Map[String, Future[Map[Long, Summary]]]): Seq[Future[InfluxSeries]] = {
influxCriteria.projections.sortBy(_.seriesId).map {
case field: Field ⇒ {
generateSeq(field, timeRangeMillis, summariesBySourceMap, influxCriteria.fillValue).map(values ⇒
toInfluxSeries(values, field.alias.getOrElse(field.name), influxCriteria.orderAsc, influxCriteria.scale, field.tableId.get))
}
case number: Number ⇒ {
generateSeq(number, timeRangeMillis, summariesBySourceMap, influxCriteria.fillValue).map(values ⇒
toInfluxSeries(values, number.alias.get, influxCriteria.orderAsc, influxCriteria.scale))
}
case operation: Operation ⇒ {
for {
leftValues ← generateSeq(operation.left, timeRangeMillis, summariesBySourceMap, influxCriteria.fillValue)
rightValues ← generateSeq(operation.right, timeRangeMillis, summariesBySourceMap, influxCriteria.fillValue)
} yield {
val resultedValues = zipByTimestamp(leftValues, rightValues, operation.operator)
toInfluxSeries(resultedValues, operation.alias, influxCriteria.orderAsc, influxCriteria.scale)
}
}
}
}
private def generateSeq(simpleProjection: SimpleProjection, timeRangeMillis: TimeRangeMillis, summariesMap: Map[String, Future[Map[Long, Summary]]], defaultValue: Option[Double]): Future[Map[Long, Double]] =
simpleProjection match {
case field: Field ⇒ generateSummarySeq(timeRangeMillis, Functions.withName(field.name), summariesMap(field.tableId.get), defaultValue)
case number: Number ⇒ generateScalarSeq(timeRangeMillis, number.value)
case _ ⇒ throw new UnsupportedOperationException("Nested operations are not supported yet")
}
private def generateScalarSeq(timeRangeMillis: TimeRangeMillis, scalar: Double): Future[Map[Long, Double]] = {
Future { (timeRangeMillis.from to timeRangeMillis.to by timeRangeMillis.timeWindow).map(ts ⇒ ts -> scalar).toMap }
}
private def generateSummarySeq(timeRangeMillis: TimeRangeMillis, function: Functions.Function, summariesByTs: Future[Map[Long, Summary]], defaultValue: Option[Double]): Future[Map[Long, Double]] = {
summariesByTs.map(summariesMap ⇒ {
(timeRangeMillis.from to timeRangeMillis.to by timeRangeMillis.timeWindow).foldLeft(Map.empty[Long, Double])((acc, currentTimestamp) ⇒
if (summariesMap.get(currentTimestamp).isDefined) {
function match {
case metaFunction: Functions.MetaFunction ⇒ acc + (currentTimestamp -> metaFunction(summariesMap(currentTimestamp), timeRangeMillis.timeWindow))
case simpleFunction: Functions.Function ⇒ acc + (currentTimestamp -> simpleFunction(summariesMap(currentTimestamp)))
}
} else if (defaultValue.isDefined) {
acc + (currentTimestamp -> defaultValue.get)
} else {
acc
})
})
}
private def zipByTimestamp(tsValues1: Map[Long, Double], tsValues2: Map[Long, Double], operator: MathOperators.MathOperator): Map[Long, Double] = {
val zippedByTimestamp = for (timestamp ← tsValues1.keySet.intersect(tsValues2.keySet))
yield (timestamp, calculate(tsValues1(timestamp), tsValues2(timestamp), operator))
zippedByTimestamp.toMap
}
private def calculate(firstOperand: Double, secondOperand: Double, operator: MathOperators.MathOperator): Double = {
operator(firstOperand, secondOperand)
}
private def toInfluxSeries(timeSeriesValues: Map[Long, Double], projectionName: String, ascendingOrder: Boolean, scale: Option[Double], metricName: String = ""): InfluxSeries = {
log.debug(s"Building Influx serie for projection [$projectionName] - Metric [$metricName]")
val sortedTimeSeriesValues = if (ascendingOrder) timeSeriesValues.toSeq.sortBy(_._1) else timeSeriesValues.toSeq.sortBy(-_._1)
val points = sortedTimeSeriesValues.foldLeft(Vector.empty[Vector[AnyVal]])((acc, current) ⇒ {
val value = BigDecimal(current._2 * scale.getOrElse(1d)).setScale(4, BigDecimal.RoundingMode.HALF_UP).toDouble
acc :+ Vector(current._1, value)
})
InfluxSeries(metricName, Vector(influxTimeKey, projectionName), points)
}
}
case class TimeRangeMillis(from: Long, to: Long, timeWindow: Long)
object InfluxQueryResolver {
//matches list series /expression/
val GetSeriesPattern = "list series /(.*)/".r
val influxTimeKey = "time"
} | despegar/khronus | khronus-influx-api/src/main/scala/com/searchlight/khronus/influx/finder/InfluxQueryResolver.scala | Scala | apache-2.0 | 11,391 |
package kr.ac.kaist.ir.deep.wordvec
import java.util
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.{Logging, SparkConf, SparkContext}
import org.apache.log4j._
import scala.collection.JavaConversions._
/**
* Train Word2Vec and save the model.
*/
object PrepareCorpus extends Logging {
{
// Initialize Network Logging
val PATTERN = "%d{yy/MM/dd HH:mm:ss} %p %C{2}: %m%n"
val orgFile = new RollingFileAppender(new PatternLayout(PATTERN), "spark.log")
orgFile.setMaxFileSize("1MB")
orgFile.setMaxBackupIndex(5)
val root = Logger.getRootLogger
root.addAppender(orgFile)
root.setLevel(Level.WARN)
root.setAdditivity(false)
val krFile = new RollingFileAppender(new PatternLayout(PATTERN), "trainer.log")
krFile.setMaxFileSize("1MB")
krFile.setMaxBackupIndex(10)
val kr = Logger.getLogger("kr.ac")
kr.addAppender(krFile)
kr.setLevel(Level.INFO)
}
/**
* Main thread.
* @param args CLI arguments
*/
def main(args: Array[String]) =
if (args.length == 0 || args.contains("--help") || args.contains("-h")) {
println(
"""Tokenize sentences, and Collect several types of unknown words.
|
|== Arguments without default ==
| -i Path of input corpora file.
| -o Path of tokenized output text file.
|
|== Arguments with default ==
| --srlz Local Path of Serialized Language Filter file. (Default: filter.dat)
| --thre Minimum include count. (Default: 3)
| --part Number of partitios. (Default: organized by Spark)
| --lang Accepted Language Area of Unicode. (Default: \\\\\\\\u0000-\\\\\\\\u007f)
| For Korean: 가-힣|\\\\\\\\u0000-\\\\\\\\u007f
|
|== Additional Arguments ==
| --help Display this help message.
| """.stripMargin)
} else {
// Set spark context
val conf = new SparkConf()
.setAppName("Normalize Infrequent words")
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.set("spark.scheduler.mode", "FAIR")
.set("spark.shuffle.memoryFraction", "0.05")
.set("spark.storage.unrollFraction", "0.05")
.set("spark.storage.memoryFraction", "0.9")
.set("spark.broadcast.blockSize", "40960")
.set("spark.akka.frameSize", "50")
.set("spark.locality.wait", "10000")
val sc = new SparkContext(conf)
sc.setLocalProperty("spark.scheduler.pool", "production")
val langArea = getArgument(args, "--lang", "\\\\u0000-\\\\u007f")
val langFilter = LangFilter(langArea)
val bcFilter = sc.broadcast(langFilter)
langFilter.saveAs(getArgument(args, "--srlz", "filter.dat"))
logInfo(s"Language filter created : $langArea")
// read file
val in = getArgument(args, "-i", "article.txt")
val parts = getArgument(args, "--part", "1").toInt
val lines = sc.textFile(in, parts).filter(_.trim.nonEmpty)
val tokens = tokenize(lines, bcFilter)
val threshold = getArgument(args, "--thre", "3").toInt
val infreqWords = infrequentWords(tokens.flatMap(x ⇒ x), threshold)
val infreqSet = sc.broadcast(infreqWords)
val out = getArgument(args, "-o", "article-preproc.txt")
normalizedTokens(tokens, infreqSet).saveAsTextFile(out)
// Stop the context
sc.stop()
}
/**
* Read argument
* @param args Argument Array
* @param key Argument Key
* @param default Default value of this argument
* @return Value of this key.
*/
def getArgument(args: Array[String], key: String, default: String) = {
val idx = args.indexOf(key)
if (idx < 0 || idx > args.length - 1) default
else args(idx + 1)
}
/**
* Collect frequent words with count >= Threshold
* @param words Word seq.
* @return HashSet of frequent words.
*/
def infrequentWords(words: RDD[String], threshold: Int) = {
val counts = words.countByValue()
val above = counts.count(_._2 >= threshold)
val set = counts.filter(_._2 < threshold).keySet
val value = new util.HashSet[String]()
value ++= set
val all = above + set.size
val ratio = Math.round(set.size.toFloat / all * 100)
logInfo(s"Total $all distinct words, ${set.size} words($ratio%) will be discarded.")
value
}
/**
* Convert input into tokenized string, using Stanford NLP toolkit.
* @param lines Input lines
* @return tokenized & normalized lines.
*/
def tokenize(lines: RDD[String], bcFilter: Broadcast[_ <: WordFilter]) =
lines.map(bcFilter.value.tokenize).persist(StorageLevel.DISK_ONLY_2)
/**
* Convert tokenized string into a sentence, with appropriate conversion of (Threshold - 1) count word.
* @param input Tokenized input sentence
* @param infreqSet Less Frequent words
* @return Tokenized converted sentence
*/
def normalizedTokens(input: RDD[_ <: Seq[String]], infreqSet: Broadcast[util.HashSet[String]]) =
input.mapPartitions {
lazy val set = infreqSet.value
_.map {
seq ⇒
val it = seq.iterator
val buf = StringBuilder.newBuilder
while(it.hasNext){
val word = it.next()
if (set contains word){
buf.append(WordModel.OTHER_UNK)
}else{
buf.append(word)
}
buf.append(' ')
}
buf.result()
}
}
}
| nearbydelta/ScalaNetwork | src/main/scala/kr/ac/kaist/ir/deep/wordvec/PrepareCorpus.scala | Scala | gpl-2.0 | 5,539 |
package com.twitter.finagle.serverset2.client.apache
import com.twitter.conversions.DurationOps._
import com.twitter.finagle.serverset2.client._
import com.twitter.finagle.stats.DefaultStatsReceiver
import com.twitter.util.Timer
import org.scalatest.FunSuite
class ClientBuilderTest extends FunSuite {
val config = ClientConfig(
hosts = "localhost:2181",
sessionTimeout = 10.seconds,
statsReceiver = DefaultStatsReceiver,
readOnlyOK = false,
sessionId = None,
password = None,
timer = Timer.Nil
)
test("ClientBuilder.reader returns an Apache ZK reader") {
val zkr = ClientBuilder().reader()
assert(zkr.value.isInstanceOf[ZooKeeperReader])
assert(zkr.value.getClass == ApacheZooKeeper.newClient(config).value.getClass)
}
test("ClientBuilder.writer returns an Apache ZK writer") {
val zkw = ClientBuilder().writer()
assert(zkw.value.isInstanceOf[ZooKeeperRW])
assert(zkw.value.getClass == ApacheZooKeeper.newClient(config).value.getClass)
}
test("ClientBuilder.multi raises a RuntimeException") {
intercept[RuntimeException] {
ClientBuilder().multi()
}
}
}
| luciferous/finagle | finagle-serversets/src/test/scala/com/twitter/finagle/serverset2/client/apache/ClientBuilderTest.scala | Scala | apache-2.0 | 1,144 |
package amora.backend
package internal
import org.junit.Test
import amora.protocol.ui.Columns
import amora.protocol.ui.Rows
import amora.protocol.ui.Window
class WindowTreeCreatorTest {
import WindowTreeCreator._
import amora.TestUtils._
/** Takes (x, y, w, h). */
private def dims(dim: (Int, Int, Int, Int)*): Seq[WinInfo] = {
dim.zipWithIndex.toList map {
case ((x, y, w, h), i) ⇒ WinInfo(i+1, x, y, w, h)
}
}
@Test
def single_window() = {
/*
---
| |
---
*/
val tree = mkWindowTree(dims((0, 0, 1, 1)))
tree === Window(1)
}
@Test
def multiple_windows_in_a_single_row() = {
/*
-------
| | | |
-------
*/
val tree = mkWindowTree(dims((0, 0, 1, 1), (1, 0, 1, 1), (2, 0, 1, 1)))
tree === Columns(Seq(Window(1), Window(2), Window(3)))
}
@Test
def multiple_windows_in_a_single_column() = {
/*
---
| |
---
| |
---
| |
---
*/
val tree = mkWindowTree(dims((0, 0, 1, 1), (0, 1, 1, 1), (0, 2, 1, 1)))
tree === Rows(Seq(Window(1), Window(2), Window(3)))
}
@Test
def multiple_winodws_in_first_column_and_one_window_in_second_column() = {
/*
-----
| | |
--- |
| | |
-----
*/
val tree = mkWindowTree(dims((0, 0, 1, 1), (0, 1, 1, 1), (1, 0, 1, 2)))
tree === Columns(Seq(
Rows(Seq(Window(1), Window(2))),
Window(3)))
}
@Test
def multiple_windows_in_first_row_and_one_window_in_second_row() = {
/*
-----
| | |
-----
| |
-----
*/
val tree = mkWindowTree(dims((0, 0, 1, 1), (1, 0, 1, 1), (0, 1, 2, 1)))
tree === Rows(Seq(
Columns(Seq(Window(1), Window(2))),
Window(3)))
}
@Test
def single_window_in_first_row_and_multiple_windows_in_second_row() = {
/*
-----
| |
-----
| | |
-----
*/
val tree = mkWindowTree(dims((0, 0, 2, 1), (0, 1, 1, 1), (1, 1, 1, 1)))
tree === Rows(Seq(
Window(1),
Columns(Seq(Window(2), Window(3)))))
}
@Test
def multiple_windows_in_all_rows() = {
/*
-----
| | |
-----
| | |
-----
*/
val tree = mkWindowTree(dims((0, 0, 1, 1), (1, 0, 1, 1), (0, 1, 1, 1), (1, 1, 1, 1)))
tree === Rows(Seq(
Columns(Seq(Window(1), Window(2))),
Columns(Seq(Window(3), Window(4)))))
}
@Test
def multiple_windows_nested_between_multiple_windows1() = {
/*
---------
| | |
| | |
| | |
---------
| | | |
| -----
| | | |
---------
*/
val tree = mkWindowTree(dims((0, 0, 2, 2), (2, 0, 2, 2), (0, 2, 2, 2), (2, 2, 1, 1), (3, 2, 1, 1), (2, 3, 1, 1), (3, 3, 1, 1)))
tree === Rows(Seq(
Columns(Seq(Window(1), Window(2))),
Columns(Seq(
Window(3),
Rows(Seq(
Columns(Seq(Window(4), Window(5))),
Columns(Seq(Window(6), Window(7)))))))))
}
@Test
def multiple_windows_nested_between_multiple_windows2() = {
/*
---------
| | |
| | |
| | |
---------
| | | |
----- |
| | | |
---------
*/
val tree = mkWindowTree(dims((0, 0, 2, 2), (2, 0, 2, 2), (0, 2, 1, 1), (1, 2, 1, 1), (0, 3, 1, 1), (1, 3, 1, 1), (2, 2, 2, 2)))
tree === Rows(Seq(
Columns(Seq(Window(1), Window(2))),
Columns(Seq(
Rows(Seq(
Columns(Seq(Window(3), Window(4))),
Columns(Seq(Window(5), Window(6))))),
Window(7)))))
}
@Test
def multiple_windows_nested_between_multiple_windows3() = {
/*
---------
| | | |
| -----
| | | |
---------
| | |
| | |
| | |
---------
*/
val tree = mkWindowTree(dims((0, 0, 2, 2), (2, 0, 1, 1), (3, 0, 1, 1), (2, 1, 1, 1), (3, 1, 1, 1), (0, 2, 2, 2), (2, 2, 2, 2)))
tree === Rows(Seq(
Columns(Seq(
Window(1),
Rows(Seq(
Columns(Seq(Window(2), Window(3))),
Columns(Seq(Window(4), Window(5))))))),
Columns(Seq(Window(6), Window(7)))))
}
@Test
def multiple_windows_nested_between_multiple_windows4() = {
/*
---------
| | | |
----- |
| | | |
---------
| | |
| | |
| | |
---------
*/
val tree = mkWindowTree(dims((0, 0, 1, 1), (1, 0, 1, 1), (0, 1, 1, 1), (1, 1, 1, 1), (2, 0, 2, 2), (0, 2, 2, 2), (2, 2, 2, 2)))
tree === Rows(Seq(
Columns(Seq(
Rows(Seq(
Columns(Seq(Window(1), Window(2))),
Columns(Seq(Window(3), Window(4))))),
Window(5))),
Columns(Seq(Window(6), Window(7)))))
}
@Test
def multiple_windows_surrounded_by_single_windows() = {
/*
-------------
| |
-------------
| | | | |
| ----- |
| | | |
-------------
| |
-------------
*/
val tree = mkWindowTree(dims((0, 0, 6, 1), (0, 1, 2, 2), (2, 1, 1, 1), (3, 1, 1, 1), (2, 2, 2, 1), (4, 1, 2, 2), (0, 3, 6, 1)))
tree === Rows(Seq(
Window(1),
Columns(Seq(
Window(2),
Rows(Seq(
Columns(Seq(Window(3), Window(4))),
Window(5))),
Window(6))),
Window(7)))
}
@Test
def unsorted_coordinates_should_be_handled_well() = {
/*
-----
| | |
-----
| | |
-----
*/
val tree = mkWindowTree(dims((0, 1, 1, 1), (1, 0, 1, 1), (1, 1, 1, 1), (0, 0, 1, 1)))
tree === Rows(Seq(
Columns(Seq(Window(4), Window(2))),
Columns(Seq(Window(1), Window(3)))))
}
}
| sschaef/tooling-research | backend/src/test/scala/amora/backend/internal/WindowTreeCreatorTest.scala | Scala | mit | 5,747 |
/*
* Copyright 2017 Exon IT
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package by.exonit.redmine.client.managers
import java.io.{File, InputStream, OutputStream}
import cats.free.Free
import cats.free.Free.liftF
import monix.eval.Task
import scala.collection.immutable._
object WebClient {
object Constants {
lazy val ApiKeyQueryParameterName = "key"
lazy val ContentTypeHeader = "Content-Type"
}
object RequestDSL {
sealed trait AuthenticationMethod
object AuthenticationMethod {
final case class Basic(user: String, password: IndexedSeq[Char]) extends AuthenticationMethod
final case class Digest(user: String, password: IndexedSeq[Char]) extends AuthenticationMethod
final case class Bearer(token: String) extends AuthenticationMethod
}
sealed trait Body
object Body {
final case class EmptyBody() extends Body
final case class FileBody(file: File) extends Body
final case class InMemoryByteBody(body: Array[Byte]) extends Body
final case class StreamedBody(streamProvider: () => InputStream) extends Body
}
type Request[Req] = Free[RequestOp, Req]
sealed trait RequestOp[Req]
final case class SetUrl(url: String) extends RequestOp[Unit]
final case class AddSegments(segments: String*) extends RequestOp[Unit]
final case class AddQueries(queries: (String, String)*) extends RequestOp[Unit]
final case class SetHeaders(headers: (String, String)*) extends RequestOp[Unit]
final case class SetMethod(method: String) extends RequestOp[Unit]
final case class SetAuth(auth: AuthenticationMethod) extends RequestOp[Unit]
final case class SetBody(body: Body) extends RequestOp[Unit]
final case class NoOp() extends RequestOp[Unit]
def setUrl(url: String): Request[Unit] =
liftF[RequestOp, Unit](SetUrl(url))
def addSegments(segments: String*): Request[Unit] =
liftF[RequestOp, Unit](AddSegments(segments: _*))
def addQueries(queries: (String, String)*): Request[Unit] =
liftF[RequestOp, Unit](AddQueries(queries: _*))
def setHeaders(headers: (String, String)*): Request[Unit] =
liftF[RequestOp, Unit](SetHeaders(headers: _*))
def setMethod(method: String): Request[Unit] =
liftF[RequestOp, Unit](SetMethod(method))
def setAuth(auth: AuthenticationMethod): Request[Unit] =
liftF[RequestOp, Unit](SetAuth(auth))
def setContentType(contentType: String, charset: String): Request[Unit] =
setHeaders(Constants.ContentTypeHeader -> s"${contentType.toLowerCase}; charset=${charset.toLowerCase}")
def setContentType(contentType: String): Request[Unit] =
setHeaders(Constants.ContentTypeHeader -> s"${contentType.toLowerCase}")
def setBody(body: Body): Request[Unit] =
liftF[RequestOp, Unit](SetBody(body))
def noOp(): Request[Unit] =
liftF[RequestOp, Unit](NoOp())
}
object ResponseDSL {
sealed trait ResponseOp[Res]
final case class GetBodyAsBytes() extends ResponseOp[Array[Byte]]
final case class GetBodyAsString() extends ResponseOp[String]
final case class GetStatusCode() extends ResponseOp[Int]
final case class GetStatusText() extends ResponseOp[String]
final case class GetHeaders() extends ResponseOp[Map[String, String]]
type Response[Res] = Free[ResponseOp, Res]
def getHeaders: Response[Map[String, String]] =
liftF[ResponseOp, Map[String, String]](GetHeaders())
def getStatusCode: Response[Int] =
liftF[ResponseOp, Int](GetStatusCode())
def getStatusText: Response[String] =
liftF[ResponseOp, String](GetStatusText())
def getBodyAsString: Response[String] =
liftF[ResponseOp, String](GetBodyAsString())
def getBodyAsBytes: Response[Array[Byte]] =
liftF[ResponseOp, Array[Byte]](GetBodyAsBytes())
}
object StreamingResponseDSL {
sealed trait StreamingResponseOp[Res]
final case class GetStatusCode() extends StreamingResponseOp[Int]
final case class GetHeaders() extends StreamingResponseOp[Map[String, String]]
final case class GetBodyStream(outputStreamProvider: () => OutputStream) extends StreamingResponseOp[Task[Unit]]
type StreamingResponse[A] = Free[StreamingResponseOp, A]
def getHeaders: StreamingResponse[Map[String, String]] =
liftF[StreamingResponseOp, Map[String, String]](GetHeaders())
def getStatusCode: StreamingResponse[Int] =
liftF[StreamingResponseOp, Int](GetStatusCode())
def getBodyStream(outputStreamProvider: () => OutputStream): StreamingResponse[Task[Unit]] =
liftF[StreamingResponseOp, Task[Unit]](GetBodyStream(outputStreamProvider))
}
}
trait WebClient {
import WebClient._
def execute[T](requestCommand: RequestDSL.Request[Unit], responseCommand: ResponseDSL.Response[T]): Task[T]
def executeStreaming[T](
requestCommand : RequestDSL.Request[Unit],
responseCommand: StreamingResponseDSL.StreamingResponse[T]): Task[T]
}
| exon-it/redmine-scala-client | client-api/src/main/scala/by/exonit/redmine/client/managers/WebClient.scala | Scala | apache-2.0 | 5,516 |
/*
* Copyright 2020 Precog Data
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.api.resource
import slamdata.Predef._
import quasar.contrib.pathy.{firstSegmentName, rebaseA, stripPrefixA, AFile, APath}
import quasar.fp.ski.{ι, κ}
import monocle.{Iso, Prism}
import pathy.Path._
import scalaz.{ICons, IList, INil, Order, Show}
import scalaz.syntax.equal._
/** Identifies a resource in a datasource. */
sealed trait ResourcePath extends Product with Serializable {
def fold[A](leaf: AFile => A, root: => A): A =
this match {
case ResourcePath.Leaf(f) => leaf(f)
case ResourcePath.Root => root
}
def /(name: ResourceName): ResourcePath =
this match {
case ResourcePath.Leaf(f) =>
val d = fileParent(f)
val n = fileName(f)
ResourcePath.leaf(d </> dir(n.value) </> file(name.value))
case ResourcePath.Root =>
ResourcePath.leaf(rootDir </> file(name.value))
}
def /:(name: ResourceName): ResourcePath =
this match {
case ResourcePath.Leaf(f) =>
ResourcePath.leaf(rebaseA(rootDir </> dir(name.value))(f))
case ResourcePath.Root =>
ResourcePath.leaf(rootDir </> file(name.value))
}
def ++(path: ResourcePath): ResourcePath =
ResourcePath.resourceNamesIso(
ResourcePath.resourceNamesIso.get(this) ++
ResourcePath.resourceNamesIso.get(path))
def relativeTo(path: ResourcePath): Option[ResourcePath] = {
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
@tailrec
def go(base: IList[ResourceName], tgt: IList[ResourceName]): Option[ResourcePath] =
(base, tgt) match {
case (ICons(bh, bt), ICons(th, tt)) if bh === th => go(bt, tt)
case (INil(), t) => Some(ResourcePath.resourceNamesIso(t))
case _ => None
}
go(
ResourcePath.resourceNamesIso.get(path),
ResourcePath.resourceNamesIso.get(this))
}
def toPath: APath =
fold(ι, rootDir)
def uncons: Option[(ResourceName, ResourcePath)] =
ResourcePath.leaf.getOption(this).map(ResourcePath.unconsLeaf)
def unsnoc: Option[(ResourcePath, ResourceName)] =
ResourcePath.leaf.getOption(this).map(ResourcePath.unsnocLeaf)
}
object ResourcePath extends ResourcePathInstances {
final case class Leaf(file: AFile) extends ResourcePath
case object Root extends ResourcePath
@SuppressWarnings(Array("org.wartremover.warts.Recursion"))
val resourceNamesIso: Iso[ResourcePath, IList[ResourceName]] =
Iso[ResourcePath, IList[ResourceName]] {
_.uncons match {
case None => INil()
case Some((name, path)) => name :: resourceNamesIso.get(path)
}
} {
case INil() => Root
case ICons(h, t) => h /: resourceNamesIso(t)
}
val leaf: Prism[ResourcePath, AFile] =
Prism.partial[ResourcePath, AFile] {
case Leaf(f) => f
} (Leaf)
val root: Prism[ResourcePath, Unit] =
Prism.partial[ResourcePath, Unit] {
case Root => ()
} (κ(Root))
def fromPath(path: APath): ResourcePath =
peel(path).fold(root()) {
case (parent, name) =>
leaf(parent </> file1(name.valueOr(d => FileName(d.value))))
}
def unconsLeaf(file: AFile): (ResourceName, ResourcePath) = {
val (n, p) = firstSegmentName(fileParent(file)) match {
case None =>
(fileName(file).value, ResourcePath.root())
case Some(seg) =>
val str = seg.fold(_.value, _.value)
val fileAsDir = fileParent(file) </> dir(fileName(file).value)
(str, ResourcePath.fromPath(stripPrefixA(rootDir </> dir(str))(fileAsDir)))
}
(ResourceName(n), p)
}
def unsnocLeaf(file: AFile): (ResourcePath, ResourceName) =
(parentDir(file).map(fromPath(_)).getOrElse(ResourcePath.root()), ResourceName(fileName(file).value))
}
sealed abstract class ResourcePathInstances {
implicit val order: Order[ResourcePath] =
Order.orderBy(_.toPath)
implicit val show: Show[ResourcePath] =
Show.shows { rp =>
s"ResourcePath(${posixCodec.printPath(rp.toPath)})"
}
}
| djspiewak/quasar | api/src/main/scala/quasar/api/resource/ResourcePath.scala | Scala | apache-2.0 | 4,573 |
package com.bisphone.util
import scala.collection.mutable
/**
* @author Reza Samei <reza.samei.g@gmail.com>
*/
case class Iter[T](list: Seq[T], iterationToken: Option[String])
object Iter {
def apply[T](origin: Seq[T], limit: Int)(fn: T => String): Iter[T] = {
if (origin.size <= limit) Iter(origin, None) else {
var n = limit
var ptr = origin
var last = ptr.head
val list = mutable.ListBuffer.empty[T]
while(n > 0) {
n -= 1
last = ptr.head
ptr = ptr.tail
list += last
}
Iter(list.toList, Some(fn(last)))
}
}
}
| reza-samei/bisphone-std | src/main/scala/com/bisphone/util/Iter.scala | Scala | mit | 647 |
package com.sksamuel.elastic4s.requests.mappings
import com.sksamuel.elastic4s.Indexes
import com.sksamuel.elastic4s.ext.OptionImplicits._
import com.sksamuel.elastic4s.fields.ElasticField
import com.sksamuel.elastic4s.requests.analyzers.Analyzer
import com.sksamuel.elastic4s.requests.mappings.dynamictemplate.{DynamicMapping, DynamicTemplateRequest}
case class PutMappingRequest(indexes: Indexes,
properties: Seq[ElasticField] = Nil,
updateAllTypes: Option[Boolean] = None,
ignoreUnavailable: Option[Boolean] = None,
allowNoIndices: Option[Boolean] = None,
expandWildcards: Option[Boolean] = None,
all: Option[Boolean] = None,
source: Option[Boolean] = None,
sourceExcludes: Seq[String] = Nil,
dateDetection: Option[Boolean] = None,
numericDetection: Option[Boolean] = None,
size: Option[Boolean] = None,
dynamicDateFormats: Seq[String] = Nil,
analyzer: Option[String] = None,
boostName: Option[String] = None,
boostNullValue: Option[Double] = None,
parent: Option[String] = None,
dynamic: Option[DynamicMapping] = None,
meta: Map[String, Any] = Map.empty,
routing: Option[Routing] = None,
templates: Seq[DynamicTemplateRequest] = Nil,
rawSource: Option[String] = None,
includeTypeName: Option[Boolean] = None)
extends MappingDefinitionLike {
def all(all: Boolean): PutMappingRequest = copy(all = all.some)
def source(source: Boolean): PutMappingRequest = copy(source = source.some)
// the raw source should include proeprties but not the type
def rawSource(rawSource: String): PutMappingRequest = copy(rawSource = rawSource.some)
def sourceExcludes(sourceExcludes: String*): PutMappingRequest = copy(sourceExcludes = sourceExcludes)
def sourceExcludes(sourceExcludes: Iterable[String]): PutMappingRequest =
copy(sourceExcludes = sourceExcludes.toSeq)
def analyzer(analyzer: String): PutMappingRequest = copy(analyzer = analyzer.some)
@deprecated("use new analysis package", "7.2.0")
def analyzer(analyzer: Analyzer): PutMappingRequest = copy(analyzer = analyzer.name.some)
def boostName(boostName: String): PutMappingRequest = copy(boostName = boostName.some)
def boostNullValue(boostNullValue: Double): PutMappingRequest = copy(boostNullValue = boostNullValue.some)
def parent(parent: String): PutMappingRequest = copy(parent = parent.some)
def dynamic(dynamic: DynamicMapping): PutMappingRequest = copy(dynamic = dynamic.some)
def meta(map: Map[String, Any]): PutMappingRequest = copy(meta = map)
def dateDetection(dateDetection: Boolean): PutMappingRequest = copy(dateDetection = dateDetection.some)
def numericDetection(numericDetection: Boolean): PutMappingRequest = copy(numericDetection = numericDetection.some)
@deprecated("this method is now called properties to better match the elastic api", "8.0")
def fields(fields: Iterable[ElasticField]): PutMappingRequest = as(fields)
@deprecated("this method is now called properties to better match the elastic api", "8.0")
def fields(fields: ElasticField*): PutMappingRequest = as(fields: _*)
def properties(fields: Iterable[ElasticField]): PutMappingRequest = as(fields)
def properties(fields: ElasticField*): PutMappingRequest = as(fields: _*)
def as(fields: ElasticField*): PutMappingRequest = as(fields.toIterable)
def as(iterable: Iterable[ElasticField]): PutMappingRequest = copy(properties = properties ++ iterable)
def dynamicDateFormats(dynamic_date_formats: String*): PutMappingRequest =
copy(dynamicDateFormats = dynamic_date_formats.toSeq)
def dynamicDateFormats(dynamic_date_formats: Iterable[String]): PutMappingRequest =
copy(dynamicDateFormats = dynamic_date_formats.toSeq)
def routing(required: Boolean, path: Option[String] = None): PutMappingRequest =
copy(routing = Some(Routing(required, path)))
def size(size: Boolean): PutMappingRequest = copy(size = size.some)
def dynamicTemplates(temps: Iterable[DynamicTemplateRequest]): PutMappingRequest = templates(temps)
def dynamicTemplates(temps: DynamicTemplateRequest*): PutMappingRequest = templates(temps)
def templates(temps: Iterable[DynamicTemplateRequest]): PutMappingRequest = copy(templates = temps.toSeq)
def templates(temps: DynamicTemplateRequest*): PutMappingRequest = copy(templates = temps.toSeq)
def includeTypeName(includeTypeName: Boolean): PutMappingRequest = copy(includeTypeName = includeTypeName.some)
def includeTypeName(includeTypeName: Option[Boolean]): PutMappingRequest = copy(includeTypeName = includeTypeName)
}
| sksamuel/elastic4s | elastic4s-domain/src/main/scala/com/sksamuel/elastic4s/requests/mappings/PutMappingRequest.scala | Scala | apache-2.0 | 5,107 |
package org.jboss.perf
import java.net.InetSocketAddress
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.LongAdder
import javax.ws.rs.core.Response
import javax.ws.rs.{GET, POST, Path}
import com.sun.net.httpserver.HttpServer
import org.jboss.resteasy.plugins.server.sun.http.HttpContextBuilder
import org.keycloak.constants.AdapterConstants
/**
* @author Radim Vansa <rvansa@redhat.com>
*/
object AppServer {
private val address: Array[String] = Options.app.split(":")
private val httpServer = HttpServer.create(new InetSocketAddress(address(0), address(1).toInt), 100)
private val contextBuilder = new HttpContextBuilder()
contextBuilder.getDeployment().getActualResourceClasses().add(classOf[AppServer])
private val context = contextBuilder.bind(httpServer)
private val logouts = new LongAdder();
private val versions = new LongAdder();
private val pushNotBefores = new LongAdder();
private val queryBearerTokens = new LongAdder();
private val testAvailables = new LongAdder();
def main(args: Array[String]): Unit = {
httpServer.start()
val timeout = Options.rampUp + Options.duration + Options.rampDown + 10;
Thread.sleep(TimeUnit.SECONDS.toMillis(timeout))
httpServer.stop(0);
printf("AppServer stats:%n%8d logout%n%8d version%n%8d pushNotBefore%n%8d queryBearerToken%n%8d testAvailables%n",
logouts.longValue(), versions.longValue(), pushNotBefores.longValue(), queryBearerTokens.longValue(), testAvailables.longValue())
}
}
@Path("/admin")
class AppServer {
@GET
@POST
@Path(AdapterConstants.K_LOGOUT)
def logout(): Response = {
AppServer.logouts.increment()
Response.ok().build()
}
@GET
@POST
@Path(AdapterConstants.K_VERSION)
def version(): Response = {
AppServer.versions.increment()
Response.ok().build()
}
@GET
@POST
@Path(AdapterConstants.K_PUSH_NOT_BEFORE)
def pushNotBefore(): Response = {
AppServer.pushNotBefores.increment()
Response.ok().build()
}
@GET
@POST
@Path(AdapterConstants.K_QUERY_BEARER_TOKEN)
def queryBearerToken(): Response = {
AppServer.queryBearerTokens.increment()
Response.ok().build()
}
@GET
@POST
@Path(AdapterConstants.K_TEST_AVAILABLE)
def testAvailable(): Response = {
AppServer.testAvailables.increment()
Response.ok().build()
}
}
| rvansa/keycloak-benchmark | src/test/scala/org/jboss/perf/AppServer.scala | Scala | apache-2.0 | 2,358 |
package com.twitter.finagle.postgresql.types
import java.nio.CharBuffer
import java.nio.charset.Charset
import java.nio.charset.CodingErrorAction
import com.twitter.finagle.postgresql.PgSqlClientError
import com.twitter.finagle.postgresql.PgSqlUnsupportedError
import com.twitter.finagle.postgresql.Types
import com.twitter.finagle.postgresql.Types.Inet
import com.twitter.finagle.postgresql.Types.PgArray
import com.twitter.finagle.postgresql.Types.PgArrayDim
import com.twitter.finagle.postgresql.Types.WireValue
import com.twitter.finagle.postgresql.transport.PgBuf
import com.twitter.io.Buf
/**
* Typeclass for encoding Scala/Java types to Postgres wire values.
*
* Postgres has its own type system, so the mapping of postgres types to scala types is not 1:1.
* Furthermore, postgres allows creating custom types (i.e.: commonly enums, but any arbitrary type can effectively
* be created) which also require their own mapping to scala types.
*
* The following built-in types and their corresponding scala / java types are provided
* * (read this table as "Postgres Type X can be written from Scala / Java Type Y"):
*
* | Postgres Type | Scala / Java Type |
* | --- | --- |
* | BIGINT (int8) | [[Long]], [[Int]], [[Short]], [[Byte]] |
* | BOOL | [[Boolean]] |
* | BYTEA (byte[]) | [[Buf]] |
* | CHARACTER(n) | [[String]] |
* | DATE (date) | [[java.time.LocalDate]] |
* | DOUBLE (float8) | [[Double]], [[Float]] |
* | INET | [[Inet]] ([[java.net.InetAddress]] and a subnet) |
* | INTEGER (int, int4) | [[Int]], [[Short]], [[Byte]] |
* | JSON | [[String]] or [[Json]] |
* | JSONB | [[Json]] |
* | NUMERIC (decimal) | [[BigDecimal]] |
* | REAL (float4) | [[Float]] |
* | SMALLINT (int2) | [[Short]] and [[Byte]] |
* | TEXT | [[String]] |
* | TIMESTAMP | [[java.time.Instant]] |
* | TIMESTAMP WITH TIME ZONE | [[java.time.Instant]] |
* | UUID | [[java.util.UUID]] |
* | VARCHAR | [[String]] |
*
* @note numeric types don't have the same correspondence for reading and writing.
*
* @see [[ValueReads]]
* @see [[PgType]]
*/
trait ValueWrites[T] {
/**
* Encode a value to Postgres' wire representation for a particular Postgres type.
*
* @note It is the responsability of the caller to ensure that the Postgres type is accepted by this implementation.
*
* @param tpe the Postgres type to encode the value into.
* @param value the value to encode.
* @param charset when applicable, the character set to use when encoding.
* @return the encoded value
*/
def writes(tpe: PgType, value: T, charset: Charset): WireValue
/**
* Returns true when this implementation is able to encode values of the provided Postgres type.
* Returns false otherwise.
*
* @param tpe the Postgres type to check.
* @return true if this implementation can encode values for the type, false otherwise.
*/
def accepts(tpe: PgType): Boolean
/**
* Returns a `ValueWrites` instance that will use `this` if it accepts the type, otherwise
* will delegate to `that`.
*
* @param that the instance to delegate to when `this` does not accept the provided type.
* @return a `ValueWrites` instance that will use `this` if it accepts the type, otherwise
* will delegate to `that`.
* @see [[ValueWrites.or]]
*/
def orElse(that: ValueWrites[T]): ValueWrites[T] =
ValueWrites.or(this, that)
}
object ValueWrites {
def simple[T](expect: PgType*)(write: (PgBuf.Writer, T) => PgBuf.Writer): ValueWrites[T] = new ValueWrites[T] {
val accept: Set[PgType] = expect.toSet
override def writes(tpe: PgType, value: T, charset: Charset): WireValue =
WireValue.Value(write(PgBuf.writer, value).build)
override def accepts(tpe: PgType): Boolean = accept(tpe)
}
/**
* Define a `ValueWrites[B]` in terms of `ValueWrites[A]` and `B => A`.
*/
def by[A, B](f: B => A)(implicit writesA: ValueWrites[A]): ValueWrites[B] = new ValueWrites[B] {
override def writes(tpe: PgType, value: B, charset: Charset): WireValue =
writesA.writes(tpe, f(value), charset)
override def accepts(tpe: PgType): Boolean =
writesA.accepts(tpe)
}
/**
* If it accepts the given [[PgType]], uses `first` to read the value, otherwise, use `second`.
*
* @return an instance of [[ValueWrites[T]] that uses `first` if it accepts the [[PgType]], otherwise uses `second`.
*/
def or[T](first: ValueWrites[T], second: ValueWrites[T]): ValueWrites[T] = new ValueWrites[T] {
override def writes(tpe: PgType, value: T, charset: Charset): WireValue = {
val w = if (first.accepts(tpe)) first else second
w.writes(tpe, value, charset)
}
override def accepts(tpe: PgType): Boolean =
first.accepts(tpe) || second.accepts(tpe)
}
/**
* Returns a `ValueWrites[Option[T]]` that writes `NULL` when the value is `None` and delegates to the underlying
* instance when the value is `Some`.
*/
implicit def optionWrites[T](implicit twrites: ValueWrites[T]): ValueWrites[Option[T]] = new ValueWrites[Option[T]] {
override def writes(tpe: PgType, value: Option[T], charset: Charset): WireValue =
value match {
case Some(v) => twrites.writes(tpe, v, charset)
case None => WireValue.Null
}
override def accepts(tpe: PgType): Boolean = twrites.accepts(tpe)
}
/**
* Returns a [[ValueWrites]] able to write a collection of [T] to a Postgres array type.
*
* For example, this can produce [[ValueWrites[List[Int]]] for the [[PgType.Int4Array]] type.
*/
implicit def traversableWrites[F[X] <: Iterable[X], T](implicit twrites: ValueWrites[T]): ValueWrites[F[T]] =
new ValueWrites[F[T]] {
def emptyArray(oid: Types.Oid): PgArray =
PgArray(0, 0, oid, IndexedSeq.empty, IndexedSeq.empty)
override def writes(tpe: PgType, values: F[T], charset: Charset): WireValue = {
val underlying = tpe.kind match {
case Kind.Array(underlying) => underlying
case _ => throw new PgSqlClientError(
s"Type ${tpe.name} is not an array type and cannot be written as such." +
s" Note that this may be because you're trying to write a multi-dimensional array which isn't supported."
)
}
val data = values.map(v => twrites.writes(underlying, v, charset)).toIndexedSeq
val pgArray =
if (data.isEmpty) emptyArray(underlying.oid)
else {
PgArray(
dimensions = 1,
dataOffset = 0,
elemType = underlying.oid,
arrayDims = IndexedSeq(PgArrayDim(data.length, 1)),
data = data,
)
}
WireValue.Value(PgBuf.writer.array(pgArray).build)
}
override def accepts(tpe: PgType): Boolean =
tpe.kind match {
case Kind.Array(underlying) => twrites.accepts(underlying)
case _ => false
}
}
/**
* Writes [[BigDecimal]] to [[PgType.Numeric]].
*/
implicit lazy val writesBigDecimal: ValueWrites[BigDecimal] = simple(PgType.Numeric) { (w, bd) =>
w.numeric(PgNumeric.bigDecimalToNumeric(bd))
}
/**
* Writes [[Boolean]] to [[PgType.Bool]].
*/
implicit lazy val writesBoolean: ValueWrites[Boolean] = simple(PgType.Bool)((w, t) => w.byte(if (t) 1 else 0))
/**
* Writes [[Buf]] to [[PgType.Bytea]].
*/
implicit lazy val writesBuf: ValueWrites[Buf] = simple(PgType.Bytea)(_.buf(_))
/**
* Writes [[Byte]] as a [[Short]].
*
* Postgres does not have a numeric 1-byte data type. So we use 2-byte value and check bounds.
* NOTE: Postgres does have a 1-byte data type (i.e.: "char" with quotes),
* but it's very tricky to use to store numbers, so it's unlikely to be useful in practice.
*
* @see https://www.postgresql.org/docs/current/datatype-numeric.html
* @see https://dba.stackexchange.com/questions/159090/how-to-store-one-byte-integer-in-postgresql
*/
implicit lazy val writesByte: ValueWrites[Byte] =
by[Short, Byte](_.toShort)(writesShort)
/**
* Writes [[Double]] to [[PgType.Float8]].
*/
implicit lazy val writesDouble: ValueWrites[Double] = simple(PgType.Float8)(_.double(_))
/**
* Writes [[Float]] to [[PgType.Float4]].
*/
lazy val writesFloat4: ValueWrites[Float] = simple(PgType.Float4)(_.float(_))
/**
* Writes [[Float]] to [[PgType.Float4]] or [[writesDouble]].
*/
implicit lazy val writesFloat: ValueWrites[Float] =
or(writesFloat4, by[Double, Float](_.toDouble)(writesDouble))
/**
* Writes [[Inet]] to [[PgType.Inet]].
*/
implicit lazy val writesInet: ValueWrites[Inet] = simple(PgType.Inet)(_.inet(_))
/**
* Writes [[java.time.Instant]] to [[PgType.Timestamptz]] or [[PgType.Timestamp]].
*/
implicit lazy val writesInstant: ValueWrites[java.time.Instant] = simple(PgType.Timestamptz, PgType.Timestamp) {
(w, instant) =>
// NOTE: we skip going through Timestamp.Micros since we never write anything else
w.long(PgTime.instantAsUsecOffset(instant))
}
/**
* Writes [[Int]] to [[PgType.Int4]].
*/
lazy val writesInt4: ValueWrites[Int] = simple(PgType.Int4)(_.int(_))
/**
* Writes [[Int]] to [[PgType.Int4]] or [[writesLong]].
*/
implicit lazy val writesInt: ValueWrites[Int] =
or(writesInt4, by[Long, Int](_.toLong)(writesLong))
/**
* Writes [[Json]] to [[PgType.Json]] or [[PgType.Jsonb]].
*/
implicit lazy val writesJson: ValueWrites[Json] = new ValueWrites[Json] {
// TODO: Json is really only meant for reading...
override def writes(tpe: PgType, json: Json, charset: Charset): WireValue = {
val buf = tpe match {
case PgType.Json => json.value
case PgType.Jsonb => Buf.ByteArray(1).concat(json.value)
case _ => throw new PgSqlUnsupportedError(s"readsJson does not support type ${tpe.name}")
}
WireValue.Value(buf)
}
override def accepts(tpe: PgType): Boolean =
tpe == PgType.Json || tpe == PgType.Jsonb
}
/**
* Writes [[Long]] to [[PgType.Int8]].
*/
implicit lazy val writesLong: ValueWrites[Long] = simple(PgType.Int8)(_.long(_))
/**
* Writes [[java.time.LocalDate]] to [[PgType.Date]].
*/
implicit lazy val writesLocalDate: ValueWrites[java.time.LocalDate] = simple(PgType.Date) { (w, date) =>
w.int(PgDate.localDateAsEpochDayOffset(date))
}
/**
* Writes [[Short]] to [[PgType.Int2]].
*/
lazy val writesInt2: ValueWrites[Short] = simple(PgType.Int2)(_.short(_))
/**
* Writes [[Short]] to [[PgType.Int2]] or [[writesInt]]
*/
implicit lazy val writesShort: ValueWrites[Short] =
or(writesInt2, by[Int, Short](_.toInt)(writesInt))
/**
* Writes [[String]] to any of [[PgType.Text]], [[PgType.Json]],
* [[PgType.Varchar]], [[PgType.Bpchar]], [[PgType.Name]], [[PgType.Unknown]].
*/
implicit lazy val writesString: ValueWrites[String] = new ValueWrites[String] {
def strictEncoder(charset: Charset) =
charset.newEncoder()
.onMalformedInput(CodingErrorAction.REPORT)
.onUnmappableCharacter(CodingErrorAction.REPORT)
override def writes(tpe: PgType, value: String, charset: Charset): WireValue =
WireValue.Value(Buf.ByteBuffer.Owned(strictEncoder(charset).encode(CharBuffer.wrap(value))))
override def accepts(tpe: PgType): Boolean =
tpe == PgType.Text ||
tpe == PgType.Json ||
tpe == PgType.Varchar ||
tpe == PgType.Bpchar || // CHAR(n)
tpe == PgType.Name || // system identifiers
tpe == PgType.Unknown // probably used as a fallback to text serialization?
}
/**
* Writes [[java.util.UUID]] to [[PgType.Uuid]].
*/
implicit lazy val writesUuid: ValueWrites[java.util.UUID] = simple(PgType.Uuid) { (w, uuid) =>
w.long(uuid.getMostSignificantBits).long(uuid.getLeastSignificantBits)
}
}
| twitter/finagle | finagle-postgresql/src/main/scala/com/twitter/finagle/postgresql/types/ValueWrites.scala | Scala | apache-2.0 | 11,850 |
package chrome.webRequest.bindings
import scala.scalajs.js
@js.native
trait WebRedirectionResponseDetails extends WebResponseCacheDetails {
/**
* The new URL.
*/
val redirectUrl: String = js.native
}
| lucidd/scala-js-chrome | bindings/src/main/scala/chrome/webRequest/bindings/WebRedirectionResponseDetails.scala | Scala | mit | 214 |
package scala.c.engine
class BitfieldStagingArea extends StandardTest {
"A struct with bitfields" should "print the correct results" in {
val code = """
struct {
unsigned int x : 1;
unsigned int y : 1;
} status2;
int main( ) {
printf( "Memory size occupied by status2 : %d\\n", sizeof(status2));
return 0;
}"""
checkResults(code)
}
"A struct with more bitfields" should "print the correct results" in {
val code = """
struct {
unsigned int x : 4;
unsigned int y : 4;
unsigned int x2 : 4;
unsigned int y2 : 4;
unsigned int x3 : 4;
unsigned int y3 : 4;
unsigned int x4 : 4;
unsigned int y4 : 4;
unsigned int z : 1;
} status2;
int main( ) {
printf( "Memory size occupied by status2 : %d\\n", sizeof(status2));
return 0;
}"""
checkResults(code)
}
"reading from a trivial bitfield" should "print the correct results" in {
val code = """
#include <stdio.h>
struct test
{
unsigned int a: 8;
unsigned int b: 8;
unsigned int c: 8;
unsigned int d: 8;
};
int main()
{
struct test dt;
int i = 0xFF06FFFF;
memcpy(&dt, &i, 4);
printf("%d", dt.c);
return 0;
}"""
checkResults(code)
}
"reading from a even less trivial bitfield" should "print the correct results" in {
val code = """
#include <stdio.h>
struct test
{
unsigned int a: 1;
unsigned int b: 7;
unsigned int c: 9;
unsigned int d: 8;
};
int main()
{
struct test dt;
int i = 0x87A0A875;
memcpy(&dt, &i, 4);
printf("%d/%d/%d/%d", dt.a, dt.b, dt.c, dt.d);
return 0;
}"""
checkResults(code)
}
"Initializing bit fields with init list" should "print the correct results" in {
val code = """
#include <stdio.h>
// A space optimized representation of date
struct test
{
unsigned int a: 2;
unsigned int b: 2;
unsigned int c: 2;
unsigned int d: 2;
};
int main()
{
printf("Size of date is %d bytes\\n", sizeof(struct test));
struct test dt = {0, 1, 2, 3};
dt.b = 3;
printf("%d/%d/%d/%d", dt.a, dt.b, dt.c, dt.d);
dt.d = 1;
printf("%d/%d/%d/%d", dt.a, dt.b, dt.c, dt.d);
return 0;
}"""
checkResults(code)
}
} | bdwashbu/cEngine | tests/scala/c/engine/Bitfields.scala | Scala | apache-2.0 | 2,518 |
package io.udash.web.commons.components
import io.udash.web.commons.styles.components.HeaderNavStyles
import org.scalajs.dom.Element
trait HeaderNav {
import io.udash.css.CssView._
import scalatags.JsDom.all._
import scalatags.JsDom.tags2.nav
val navStyles: HeaderNavStyles
case class NavItem(url: String, title: String)
def navigation(items: NavItem*): Element =
nav(navStyles.headerNav)(
ul(navStyles.headerLinkList)(
items.map(item =>
li(navStyles.headerLinkWrapper)(
a(href := item.url, navStyles.headerLink)(item.title)
)
)
)
).render
}
| UdashFramework/udash-core | guide/commons/.js/src/main/scala/io/udash/web/commons/components/HeaderNav.scala | Scala | apache-2.0 | 627 |
package com.github.cuzfrog
object WaterPouring extends App {
private val problem = new Pouring(Vector(4, 6))
println(problem.solutions(5))
}
private class Pouring(capacity: Vector[Int]) {
type State = Vector[Int]
private val initialState: State = capacity map (_ => 0)
trait Move extends Product with Serializable {
def change(state: State): State
}
private case class Empty(glass: Int) extends Move {
override def change(state: State): State = state.updated(glass, 0)
}
private case class Fill(glass: Int) extends Move {
override def change(state: State): State = state.updated(glass, capacity(glass))
}
private case class Pour(from: Int, to: Int) extends Move {
override def change(state: State): State = {
val amount = state(from) min (capacity(to) - state(to))
state updated(from, state(from) - amount) updated(to, state(to) + amount)
}
}
private val glasses = capacity.indices.toSet
private val moves =
(for (glass <- glasses) yield Empty(glass)) ++
(for (glass <- glasses) yield Fill(glass)) ++
(for (from <- glasses; to <- glasses; if from != to) yield Pour(from, to))
case class Path(history: Seq[Move], endState: State) {
def extend(move: Move): Path = Path(move +: history, move.change(endState))
override def toString: String = (history.reverse mkString " ") + "-->" + endState
}
private val initialPath = Path(Nil, initialState)
private def pathSets(paths: Set[Path], visited: Set[State]): Stream[Set[Path]] =
if (paths.isEmpty) Stream.empty
else {
lazy val nextSet = for {
path <- paths
next <- moves map path.extend
if !visited.contains(next.endState)
} yield next
paths #:: pathSets(nextSet, nextSet.map(_.endState) ++ visited)
}
def solutions(target: Int): Stream[Path] =
if (capacity.isEmpty) Stream.empty
else {
for {
paths <- pathSets(Set(initialPath), Set.empty)
path <- paths
if path.endState contains target
} yield path
}
} | cuzfrog/scala_sbt_template | src/main/scala/com/github/cuzfrog/WaterPouring.scala | Scala | apache-2.0 | 2,051 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package vta.core
import chisel3._
import chisel3.util._
import vta.util.config._
import vta.shell._
/** TensorStore.
*
* Store 1D and 2D tensors from out-scratchpad (SRAM) to main memory (DRAM).
*/
class TensorStore(tensorType: String = "none", debug: Boolean = false)(
implicit p: Parameters)
extends Module {
val tp = new TensorParams(tensorType)
val mp = p(ShellKey).memParams
val io = IO(new Bundle {
val start = Input(Bool())
val done = Output(Bool())
val inst = Input(UInt(INST_BITS.W))
val baddr = Input(UInt(mp.addrBits.W))
val vme_wr = new VMEWriteMaster
val tensor = new TensorClient(tensorType)
})
val tensorLength = tp.tensorLength
val tensorWidth = tp.tensorWidth
val tensorElemBits = tp.tensorElemBits
val memBlockBits = tp.memBlockBits
val memDepth = tp.memDepth
val numMemBlock = tp.numMemBlock
val dec = io.inst.asTypeOf(new MemDecode)
val waddr_cur = Reg(chiselTypeOf(io.vme_wr.cmd.bits.addr))
val waddr_nxt = Reg(chiselTypeOf(io.vme_wr.cmd.bits.addr))
val xcnt = Reg(chiselTypeOf(io.vme_wr.cmd.bits.len))
val xlen = Reg(chiselTypeOf(io.vme_wr.cmd.bits.len))
val xrem = Reg(chiselTypeOf(dec.xsize))
val xsize = (dec.xsize << log2Ceil(tensorLength * numMemBlock)) - 1.U
val xmax = (1 << mp.lenBits).U
val xmax_bytes = ((1 << mp.lenBits) * mp.dataBits / 8).U
val ycnt = Reg(chiselTypeOf(dec.ysize))
val ysize = dec.ysize
val tag = Reg(UInt(8.W))
val set = Reg(UInt(8.W))
val sIdle :: sWriteCmd :: sWriteData :: sReadMem :: sWriteAck :: Nil = Enum(5)
val state = RegInit(sIdle)
// control
switch(state) {
is(sIdle) {
when(io.start) {
state := sWriteCmd
when(xsize < xmax) {
xlen := xsize
xrem := 0.U
}.otherwise {
xlen := xmax - 1.U
xrem := xsize - xmax
}
}
}
is(sWriteCmd) {
when(io.vme_wr.cmd.ready) {
state := sWriteData
}
}
is(sWriteData) {
when(io.vme_wr.data.ready) {
when(xcnt === xlen) {
state := sWriteAck
}.elsewhen(tag === (numMemBlock - 1).U) {
state := sReadMem
}
}
}
is(sReadMem) {
state := sWriteData
}
is(sWriteAck) {
when(io.vme_wr.ack) {
when(xrem === 0.U) {
when(ycnt === ysize - 1.U) {
state := sIdle
}.otherwise {
state := sWriteCmd
when(xsize < xmax) {
xlen := xsize
xrem := 0.U
}.otherwise {
xlen := xmax - 1.U
xrem := xsize - xmax
}
}
}.elsewhen(xrem < xmax) {
state := sWriteCmd
xlen := xrem
xrem := 0.U
}
.otherwise {
state := sWriteCmd
xlen := xmax - 1.U
xrem := xrem - xmax
}
}
}
}
// write-to-sram
val tensorFile = Seq.fill(tensorLength) {
SyncReadMem(memDepth, Vec(numMemBlock, UInt(memBlockBits.W)))
}
val wdata_t = Wire(Vec(numMemBlock, UInt(memBlockBits.W)))
val no_mask = Wire(Vec(numMemBlock, Bool()))
wdata_t := DontCare
no_mask.foreach { m =>
m := true.B
}
for (i <- 0 until tensorLength) {
val inWrData = io.tensor.wr.bits.data(i).asUInt.asTypeOf(wdata_t)
when(io.tensor.wr.valid) {
tensorFile(i).write(io.tensor.wr.bits.idx, inWrData, no_mask)
}
}
// read-from-sram
val stride = state === sWriteAck &
io.vme_wr.ack &
xcnt === xlen + 1.U &
xrem === 0.U &
ycnt =/= ysize - 1.U
when(state === sIdle) {
ycnt := 0.U
}.elsewhen(stride) {
ycnt := ycnt + 1.U
}
when(state === sWriteCmd || tag === (numMemBlock - 1).U) {
tag := 0.U
}.elsewhen(io.vme_wr.data.fire()) {
tag := tag + 1.U
}
when(
state === sWriteCmd || (set === (tensorLength - 1).U && tag === (numMemBlock - 1).U)) {
set := 0.U
}.elsewhen(io.vme_wr.data.fire() && tag === (numMemBlock - 1).U) {
set := set + 1.U
}
val raddr_cur = Reg(UInt(tp.memAddrBits.W))
val raddr_nxt = Reg(UInt(tp.memAddrBits.W))
when(state === sIdle) {
raddr_cur := dec.sram_offset
raddr_nxt := dec.sram_offset
}.elsewhen(io.vme_wr.data
.fire() && set === (tensorLength - 1).U && tag === (numMemBlock - 1).U) {
raddr_cur := raddr_cur + 1.U
}
.elsewhen(stride) {
raddr_cur := raddr_nxt + dec.xsize
raddr_nxt := raddr_nxt + dec.xsize
}
val tread = Seq.tabulate(tensorLength) { i =>
i.U ->
tensorFile(i).read(raddr_cur, state === sWriteCmd | state === sReadMem)
}
val mdata = MuxLookup(set, 0.U.asTypeOf(chiselTypeOf(wdata_t)), tread)
// write-to-dram
val maskOffset = VecInit(Seq.fill(M_DRAM_OFFSET_BITS)(true.B)).asUInt
val elemBytes = (p(CoreKey).batch * p(CoreKey).blockOut * p(CoreKey).outBits) / 8
when(state === sIdle) {
waddr_cur := io.baddr | (maskOffset & (dec.dram_offset << log2Ceil(
elemBytes)))
waddr_nxt := io.baddr | (maskOffset & (dec.dram_offset << log2Ceil(
elemBytes)))
}.elsewhen(state === sWriteAck && io.vme_wr.ack && xrem =/= 0.U) {
waddr_cur := waddr_cur + xmax_bytes
}
.elsewhen(stride) {
waddr_cur := waddr_nxt + (dec.xstride << log2Ceil(
tensorLength * tensorWidth))
waddr_nxt := waddr_nxt + (dec.xstride << log2Ceil(
tensorLength * tensorWidth))
}
io.vme_wr.cmd.valid := state === sWriteCmd
io.vme_wr.cmd.bits.addr := waddr_cur
io.vme_wr.cmd.bits.len := xlen
io.vme_wr.data.valid := state === sWriteData
io.vme_wr.data.bits := mdata(tag)
when(state === sWriteCmd) {
xcnt := 0.U
}.elsewhen(io.vme_wr.data.fire()) {
xcnt := xcnt + 1.U
}
// disable external read-from-sram requests
io.tensor.tieoffRead()
// done
io.done := state === sWriteAck & io.vme_wr.ack & xrem === 0.U & ycnt === ysize - 1.U
// debug
if (debug) {
when(io.vme_wr.cmd.fire()) {
printf("[TensorStore] ysize:%x ycnt:%x raddr:%x waddr:%x len:%x rem:%x\\n",
ysize,
ycnt,
raddr_cur,
waddr_cur,
xlen,
xrem)
}
when(io.vme_wr.data.fire()) {
printf("[TensorStore] data:%x\\n", io.vme_wr.data.bits)
}
when(io.vme_wr.ack) {
printf("[TensorStore] ack\\n")
}
}
}
| Huyuwei/tvm | vta/hardware/chisel/src/main/scala/core/TensorStore.scala | Scala | apache-2.0 | 7,143 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.linker.backend.emitter
import org.scalajs.ir._
import org.scalajs.ir.Names._
import org.scalajs.ir.Types._
import org.scalajs.ir.{Trees => irt}
import org.scalajs.linker.backend.javascript.Trees._
import org.scalajs.linker.interface._
import EmitterNames._
/** Scala.js specific tree generators that are used across the board.
*
* This class is fully stateless.
*
* Also carries around lower-level generators.
*/
private[emitter] final class SJSGen(
val jsGen: JSGen,
val nameGen: NameGen,
val varGen: VarGen
) {
import jsGen._
import config._
import nameGen._
import varGen._
val useBigIntForLongs = esFeatures.allowBigIntsForLongs
def genZeroOf(tpe: Type)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
tpe match {
case tpe: PrimType => genZeroOfPrim(tpe)
case _ => Null()
}
}
def genZeroOf(typeRef: TypeRef)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
typeRef match {
case PrimRef(tpe) => genZeroOfPrim(tpe)
case _ => Null()
}
}
private def genZeroOfPrim(tpe: PrimType)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
tpe match {
case BooleanType => BooleanLiteral(false)
case CharType => IntLiteral(0)
case ByteType => IntLiteral(0)
case ShortType => IntLiteral(0)
case IntType => IntLiteral(0)
case LongType => genLongZero()
case FloatType => DoubleLiteral(0.0)
case DoubleType => DoubleLiteral(0.0)
case StringType => StringLiteral("")
case UndefType => Undefined()
case NullType => Null()
case NoType | NothingType =>
throw new IllegalArgumentException(s"cannot generate a zero for $tpe")
}
}
def genLongZero()(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
if (useBigIntForLongs)
BigIntLiteral(0L)
else
globalVar("L0", CoreVar)
}
def genBoxedZeroOf(tpe: Type)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
if (tpe == CharType) genBoxedCharZero()
else genZeroOf(tpe)
}
def genBoxedCharZero()(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
globalVar("bC0", CoreVar)
}
def genLongModuleApply(methodName: MethodName, args: Tree*)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
import TreeDSL._
Apply(
genLoadModule(LongImpl.RuntimeLongModuleClass) DOT genName(methodName),
args.toList)
}
def usesUnderlyingTypedArray(elemTypeRef: NonArrayTypeRef): Boolean =
getArrayUnderlyingTypedArrayClassRef(elemTypeRef)(Position.NoPosition).nonEmpty
def getArrayUnderlyingTypedArrayClassRef(elemTypeRef: NonArrayTypeRef)(
implicit pos: Position): Option[WithGlobals[VarRef]] = {
elemTypeRef match {
case _ if esFeatures.esVersion < ESVersion.ES2015 => None
case primRef: PrimRef => typedArrayRef(primRef)
case _ => None
}
}
def typedArrayRef(primRef: PrimRef)(
implicit pos: Position): Option[WithGlobals[VarRef]] = {
def some(name: String) = Some(globalRef(name))
primRef match {
case CharRef => some("Uint16Array")
case ByteRef => some("Int8Array")
case ShortRef => some("Int16Array")
case IntRef => some("Int32Array")
case FloatRef => some("Float32Array")
case DoubleRef => some("Float64Array")
case LongRef if useBigIntForLongs => some("BigInt64Array")
case _ => None
}
}
def genSelect(receiver: Tree, className: ClassName, field: irt.FieldIdent)(
implicit pos: Position): Tree = {
DotSelect(receiver, Ident(genFieldJSName(className, field))(field.pos))
}
def genSelect(receiver: Tree, className: ClassName, field: irt.FieldIdent,
originalName: OriginalName)(
implicit pos: Position): Tree = {
val jsName = genFieldJSName(className, field)
val jsOrigName = genOriginalName(field.name, originalName, jsName)
DotSelect(receiver, Ident(jsName, jsOrigName)(field.pos))
}
private def genFieldJSName(className: ClassName, field: irt.FieldIdent): String =
genName(className) + "__f_" + genName(field.name)
def genJSPrivateSelect(receiver: Tree, className: ClassName,
field: irt.FieldIdent)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
val fieldName = {
implicit val pos = field.pos
globalVar("r", (className, field.name))
}
BracketSelect(receiver, fieldName)
}
def genIsInstanceOf(expr: Tree, tpe: Type)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
import TreeDSL._
tpe match {
case ClassType(className) =>
if (HijackedClasses.contains(className)) {
genIsInstanceOfHijackedClass(expr, className)
} else if (className == ObjectClass) {
expr === Null()
} else if (!globalKnowledge.isAncestorOfHijackedClass(className) &&
!globalKnowledge.isInterface(className)) {
genIsInstanceOfClass(expr, className)
} else {
Apply(globalVar("is", className), List(expr))
}
case ArrayType(arrayTypeRef) =>
arrayTypeRef match {
case ArrayTypeRef(_:PrimRef | ClassRef(ObjectClass), 1) =>
expr instanceof genArrayConstrOf(arrayTypeRef)
case ArrayTypeRef(base, depth) =>
Apply(typeRefVar("isArrayOf", base), List(expr, IntLiteral(depth)))
}
case UndefType => expr === Undefined()
case BooleanType => typeof(expr) === "boolean"
case CharType => expr instanceof globalVar("Char", CoreVar)
case ByteType => genCallHelper("isByte", expr)
case ShortType => genCallHelper("isShort", expr)
case IntType => genCallHelper("isInt", expr)
case LongType => genIsLong(expr)
case FloatType => genIsFloat(expr)
case DoubleType => typeof(expr) === "number"
case StringType => typeof(expr) === "string"
case AnyType => expr !== Null()
case NoType | NullType | NothingType | _:RecordType =>
throw new AssertionError(s"Unexpected type $tpe in genIsInstanceOf")
}
}
def genIsInstanceOfClass(expr: Tree, className: ClassName)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
import TreeDSL._
if (!globalKnowledge.hasInstances(className)) {
/* We need to constant-fold the instance test, to avoid emitting
* `x instanceof $c_TheClass`, because `$c_TheClass` won't be
* declared at all. Otherwise, we'd get a `ReferenceError`.
*/
BooleanLiteral(false)
} else {
expr instanceof globalVar("c", className)
}
}
def genIsInstanceOfHijackedClass(expr: Tree, className: ClassName)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
import TreeDSL._
className match {
case BoxedUnitClass => expr === Undefined()
case BoxedBooleanClass => typeof(expr) === "boolean"
case BoxedCharacterClass => expr instanceof globalVar("Char", CoreVar)
case BoxedByteClass => genCallHelper("isByte", expr)
case BoxedShortClass => genCallHelper("isShort", expr)
case BoxedIntegerClass => genCallHelper("isInt", expr)
case BoxedLongClass => genIsLong(expr)
case BoxedFloatClass => genIsFloat(expr)
case BoxedDoubleClass => typeof(expr) === "number"
case BoxedStringClass => typeof(expr) === "string"
}
}
private def genIsLong(expr: Tree)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
import TreeDSL._
if (useBigIntForLongs) genCallHelper("isLong", expr)
else expr instanceof globalVar("c", LongImpl.RuntimeLongClass)
}
private def genIsFloat(expr: Tree)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
import TreeDSL._
if (semantics.strictFloats) genCallHelper("isFloat", expr)
else typeof(expr) === "number"
}
def genAsInstanceOf(expr: Tree, tpe: Type)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
import TreeDSL._
if (semantics.asInstanceOfs == CheckedBehavior.Unchecked) {
tpe match {
case _:ClassType | _:ArrayType | AnyType =>
expr
case UndefType => Block(expr, Undefined())
case BooleanType => !(!expr)
case CharType => genCallHelper("uC", expr)
case ByteType | ShortType| IntType => expr | 0
case LongType => genCallHelper("uJ", expr)
case DoubleType => UnaryOp(irt.JSUnaryOp.+, expr)
case StringType => expr || StringLiteral("")
case FloatType =>
if (semantics.strictFloats) genCallHelper("fround", expr)
else UnaryOp(irt.JSUnaryOp.+, expr)
case NoType | NullType | NothingType | _:RecordType =>
throw new AssertionError(s"Unexpected type $tpe in genAsInstanceOf")
}
} else {
tpe match {
case ClassType(ObjectClass) =>
expr
case ClassType(className) =>
Apply(globalVar("as", className), List(expr))
case ArrayType(ArrayTypeRef(base, depth)) =>
Apply(typeRefVar("asArrayOf", base), List(expr, IntLiteral(depth)))
case UndefType => genCallHelper("uV", expr)
case BooleanType => genCallHelper("uZ", expr)
case CharType => genCallHelper("uC", expr)
case ByteType => genCallHelper("uB", expr)
case ShortType => genCallHelper("uS", expr)
case IntType => genCallHelper("uI", expr)
case LongType => genCallHelper("uJ", expr)
case FloatType => genCallHelper("uF", expr)
case DoubleType => genCallHelper("uD", expr)
case StringType => genCallHelper("uT", expr)
case AnyType => expr
case NoType | NullType | NothingType | _:RecordType =>
throw new AssertionError(s"Unexpected type $tpe in genAsInstanceOf")
}
}
}
/** Orders a subset of hijacked classes by priority for a series of type
* tests.
*
* If `j.l.Double` is in the list, then run-time subclasses of `Double` are
* excluded (i.e., `Byte`, `Short`, `Integer` and `Float`).
*
* If we do not use bigints to implement Longs, `j.l.Long` is excluded.
*
* The result is ordered in an "efficient" way, putting `typeof`-based tests
* first when possible, and otherwise ordering by a gut-feeling of
* "likelihood".
*/
def subsetOfHijackedClassesOrderedForTypeTests(
hijackedClasses: Set[ClassName]): List[ClassName] = {
val baseList = {
if (hijackedClasses.contains(BoxedDoubleClass))
nonSmallNumberHijackedClassesOrderedForTypeTests
else
allHijackedClassesOrderedForTypeTests
}
baseList.filter(hijackedClasses)
}
/** List of hijacked classes ordered by priority for a series of type tests,
* excluding run-time subclasses of Double.
*
* Those with `typeof`-based tests come first because they are cheaper.
*/
private val nonSmallNumberHijackedClassesOrderedForTypeTests = List(
BoxedStringClass,
BoxedDoubleClass,
BoxedBooleanClass,
BoxedUnitClass,
BoxedLongClass,
BoxedCharacterClass
)
/** List of all the hijacked classes ordered by priority for a series of type
* tests.
*/
private val allHijackedClassesOrderedForTypeTests = List(
BoxedByteClass,
BoxedShortClass,
BoxedIntegerClass,
BoxedFloatClass
) ::: nonSmallNumberHijackedClassesOrderedForTypeTests
def genCallHelper(helperName: String, args: Tree*)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
Apply(globalVar(helperName, CoreVar), args.toList)
}
def genLoadModule(moduleClass: ClassName)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
import TreeDSL._
Apply(globalVar("m", moduleClass), Nil)
}
def genScalaClassNew(className: ClassName, ctor: MethodName, args: Tree*)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
val encodedClassVar = globalVar("c", className)
val argsList = args.toList
if (globalKnowledge.hasInlineableInit(className)) {
New(encodedClassVar, argsList)
} else {
Apply(globalVar("ct", (className, ctor)), New(encodedClassVar, Nil) :: argsList)
}
}
def genJSClassConstructor(className: ClassName,
keepOnlyDangerousVarNames: Boolean)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): WithGlobals[Tree] = {
genJSClassConstructor(className,
globalKnowledge.getJSNativeLoadSpec(className),
keepOnlyDangerousVarNames)
}
def genJSClassConstructor(className: ClassName,
spec: Option[irt.JSNativeLoadSpec],
keepOnlyDangerousVarNames: Boolean)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): WithGlobals[Tree] = {
spec match {
case None =>
// This is a non-native JS class
WithGlobals(genNonNativeJSClassConstructor(className))
case Some(spec) =>
genLoadJSFromSpec(spec, keepOnlyDangerousVarNames)
}
}
def genNonNativeJSClassConstructor(className: ClassName)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
Apply(globalVar("a", className), Nil)
}
def genLoadJSFromSpec(spec: irt.JSNativeLoadSpec,
keepOnlyDangerousVarNames: Boolean)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): WithGlobals[Tree] = {
def pathSelection(from: Tree, path: List[String]): Tree = {
path.foldLeft(from) {
(prev, part) => genBracketSelect(prev, StringLiteral(part))
}
}
spec match {
case irt.JSNativeLoadSpec.Global(globalRef, path) =>
val globalVarRef = VarRef(Ident(globalRef))
val globalVarNames = {
if (keepOnlyDangerousVarNames && !trackAllGlobalRefs &&
!GlobalRefUtils.isDangerousGlobalRef(globalRef)) {
Set.empty[String]
} else {
Set(globalRef)
}
}
WithGlobals(pathSelection(globalVarRef, path), globalVarNames)
case irt.JSNativeLoadSpec.Import(module, path) =>
val moduleValue = VarRef(externalModuleFieldIdent(module))
path match {
case "default" :: rest if moduleKind == ModuleKind.CommonJSModule =>
val defaultField = genCallHelper("moduleDefault", moduleValue)
WithGlobals(pathSelection(defaultField, rest))
case _ =>
WithGlobals(pathSelection(moduleValue, path))
}
case irt.JSNativeLoadSpec.ImportWithGlobalFallback(importSpec, globalSpec) =>
moduleKind match {
case ModuleKind.NoModule =>
genLoadJSFromSpec(globalSpec, keepOnlyDangerousVarNames)
case ModuleKind.ESModule | ModuleKind.CommonJSModule =>
genLoadJSFromSpec(importSpec, keepOnlyDangerousVarNames)
}
}
}
def genNewArray(arrayTypeRef: ArrayTypeRef, lengths: List[Tree])(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
lengths match {
case Nil =>
throw new IllegalArgumentException(
"Cannot create a new array with 0 dimensions")
case length :: Nil =>
New(genArrayConstrOf(arrayTypeRef), length :: Nil)
case _ =>
genCallHelper("newArrayObject", genClassDataOf(arrayTypeRef),
ArrayConstr(lengths))
}
}
def genArrayValue(arrayTypeRef: ArrayTypeRef, elems: List[Tree])(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): WithGlobals[Tree] = {
genNativeArrayWrapper(arrayTypeRef, ArrayConstr(elems))
}
def genNativeArrayWrapper(arrayTypeRef: ArrayTypeRef, nativeArray: Tree)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): WithGlobals[Tree] = {
val argWithGlobals = arrayTypeRef match {
case ArrayTypeRef(elemTypeRef, 1) =>
getArrayUnderlyingTypedArrayClassRef(elemTypeRef) match {
case Some(typedArrayWithGlobals) =>
for (typedArray <- typedArrayWithGlobals) yield
New(typedArray, nativeArray :: Nil)
case _ =>
WithGlobals(nativeArray)
}
case _ =>
WithGlobals(nativeArray)
}
for (arg <- argWithGlobals) yield
New(genArrayConstrOf(arrayTypeRef), arg :: Nil)
}
def genArrayConstrOf(arrayTypeRef: ArrayTypeRef)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
import TreeDSL._
arrayTypeRef match {
case ArrayTypeRef(primRef: PrimRef, 1) =>
globalVar("ac", primRef)
case ArrayTypeRef(ClassRef(ObjectClass), 1) =>
globalVar("ac", ObjectClass)
case _ =>
genClassDataOf(arrayTypeRef) DOT "constr"
}
}
def genClassOf(typeRef: TypeRef)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
Apply(DotSelect(genClassDataOf(typeRef), Ident("getClassOf")), Nil)
}
def genClassOf(className: ClassName)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
genClassOf(ClassRef(className))
}
def genClassDataOf(typeRef: TypeRef)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
typeRef match {
case typeRef: NonArrayTypeRef =>
typeRefVar("d", typeRef)
case ArrayTypeRef(base, dims) =>
val baseData = genClassDataOf(base)
(1 to dims).foldLeft[Tree](baseData) { (prev, _) =>
Apply(DotSelect(prev, Ident("getArrayOf")), Nil)
}
}
}
def genClassDataOf(className: ClassName)(
implicit moduleContext: ModuleContext, globalKnowledge: GlobalKnowledge,
pos: Position): Tree = {
genClassDataOf(ClassRef(className))
}
}
| gzm0/scala-js | linker/shared/src/main/scala/org/scalajs/linker/backend/emitter/SJSGen.scala | Scala | apache-2.0 | 19,364 |
object Test {
enum Foo[X] {
case Str extends Foo[String]
case Int extends Foo[Int]
}
trait Test {
type A
def foo[T](f: Foo[A] | Foo[T]): T =
f match { case Foo.Str =>
"" // error
}
def bar[T](f: Unit | Foo[T]): T =
f match { case Foo.Str =>
""
}
}
}
| som-snytt/dotty | tests/neg/unsound-union-object-gadt.scala | Scala | apache-2.0 | 321 |
class Foo {
def toInt = 12
}
case class Bar( fooBar : Int )
// spurious "erroneous or inaccessible type" error in 2.10.1
class Test {
var fooBar : Foo = null
def build = Bar(
fooBar = foBar.toInt
)
}
| AlexSikia/dotty | tests/untried/neg/t7239.scala | Scala | bsd-3-clause | 213 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui
import java.util.{Date, List => JList, ServiceLoader}
import scala.collection.JavaConverters._
import org.apache.spark.{JobExecutionStatus, SecurityManager, SparkConf, SparkContext}
import org.apache.spark.internal.Logging
import org.apache.spark.scheduler._
import org.apache.spark.status.AppStatusStore
import org.apache.spark.status.api.v1._
import org.apache.spark.ui.JettyUtils._
import org.apache.spark.ui.env.EnvironmentTab
import org.apache.spark.ui.exec.ExecutorsTab
import org.apache.spark.ui.jobs.{JobsTab, StagesTab}
import org.apache.spark.ui.storage.StorageTab
import org.apache.spark.util.Utils
/**
* Top level user interface for a Spark application.
*/
private[spark] class SparkUI private (
val store: AppStatusStore,
val sc: Option[SparkContext],
val conf: SparkConf,
securityManager: SecurityManager,
var appName: String,
val basePath: String,
val startTime: Long,
val appSparkVersion: String)
extends WebUI(securityManager, securityManager.getSSLOptions("ui"), SparkUI.getUIPort(conf),
conf, basePath, "SparkUI")
with Logging
with UIRoot {
val killEnabled = sc.map(_.conf.getBoolean("spark.ui.killEnabled", true)).getOrElse(false)
var appId: String = _
private var streamingJobProgressListener: Option[SparkListener] = None
/** Initialize all components of the server. */
def initialize(): Unit = {
val jobsTab = new JobsTab(this, store)
attachTab(jobsTab)
val stagesTab = new StagesTab(this, store)
attachTab(stagesTab)
attachTab(new StorageTab(this, store))
attachTab(new EnvironmentTab(this, store))
attachTab(new ExecutorsTab(this))
addStaticHandler(SparkUI.STATIC_RESOURCE_DIR)
attachHandler(createRedirectHandler("/", "/jobs/", basePath = basePath))
attachHandler(ApiRootResource.getServletHandler(this))
// These should be POST only, but, the YARN AM proxy won't proxy POSTs
attachHandler(createRedirectHandler(
"/jobs/job/kill", "/jobs/", jobsTab.handleKillRequest, httpMethods = Set("GET", "POST")))
attachHandler(createRedirectHandler(
"/stages/stage/kill", "/stages/", stagesTab.handleKillRequest,
httpMethods = Set("GET", "POST")))
}
initialize()
def getSparkUser: String = {
try {
Option(store.applicationInfo().attempts.head.sparkUser)
.orElse(store.environmentInfo().systemProperties.toMap.get("user.name"))
.getOrElse("<unknown>")
} catch {
case _: NoSuchElementException => "<unknown>"
}
}
def getAppName: String = appName
def setAppId(id: String): Unit = {
appId = id
}
/** Stop the server behind this web interface. Only valid after bind(). */
override def stop() {
super.stop()
logInfo(s"Stopped Spark web UI at $webUrl")
}
override def withSparkUI[T](appId: String, attemptId: Option[String])(fn: SparkUI => T): T = {
if (appId == this.appId) {
fn(this)
} else {
throw new NoSuchElementException()
}
}
def getApplicationInfoList: Iterator[ApplicationInfo] = {
Iterator(new ApplicationInfo(
id = appId,
name = appName,
coresGranted = None,
maxCores = None,
coresPerExecutor = None,
memoryPerExecutorMB = None,
attempts = Seq(new ApplicationAttemptInfo(
attemptId = None,
startTime = new Date(startTime),
endTime = new Date(-1),
duration = 0,
lastUpdated = new Date(startTime),
sparkUser = getSparkUser,
completed = false,
appSparkVersion = appSparkVersion
))
))
}
def getApplicationInfo(appId: String): Option[ApplicationInfo] = {
getApplicationInfoList.find(_.id == appId)
}
def getStreamingJobProgressListener: Option[SparkListener] = streamingJobProgressListener
def setStreamingJobProgressListener(sparkListener: SparkListener): Unit = {
streamingJobProgressListener = Option(sparkListener)
}
}
private[spark] abstract class SparkUITab(parent: SparkUI, prefix: String)
extends WebUITab(parent, prefix) {
def appName: String = parent.appName
def appSparkVersion: String = parent.appSparkVersion
}
private[spark] object SparkUI {
val DEFAULT_PORT = 4040
val STATIC_RESOURCE_DIR = "org/apache/spark/ui/static"
val DEFAULT_POOL_NAME = "default"
def getUIPort(conf: SparkConf): Int = {
conf.getInt("spark.ui.port", SparkUI.DEFAULT_PORT)
}
/**
* Create a new UI backed by an AppStatusStore.
*/
def create(
sc: Option[SparkContext],
store: AppStatusStore,
conf: SparkConf,
securityManager: SecurityManager,
appName: String,
basePath: String,
startTime: Long,
appSparkVersion: String = org.apache.spark.SPARK_VERSION): SparkUI = {
new SparkUI(store, sc, conf, securityManager, appName, basePath, startTime, appSparkVersion)
}
}
| tejasapatil/spark | core/src/main/scala/org/apache/spark/ui/SparkUI.scala | Scala | apache-2.0 | 5,672 |
/*
The Circumflex License
======================
Copyright (C) 2009-2010 Boris Okunskiy and The Circumflex Team <http://circumflex.ru>
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ''AS IS'' AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
*/
package skinny.view.freemarker
import scala.language.existentials
import scala.language.reflectiveCalls
import org.slf4j._
import freemarker.template._
import scala.collection.JavaConverters._
/**
* Scala object wrapper.
*/
class ScalaObjectWrapper extends ObjectWrapper {
override def wrap(obj: Any): TemplateModel = obj match {
case null => TemplateModel.NOTHING
case opt: Option[_] => opt.map(obj => wrap(obj)).getOrElse(TemplateModel.NOTHING)
case model: TemplateModel => model
case xml: scala.xml.NodeSeq => new ScalaXmlWrapper(xml, this)
case seq: Seq[_] => new ScalaSeqWrapper(seq, this)
case map: scala.collection.Map[_, _] => new ScalaMapWrapper(map.map(p => (p._1.toString, p._2)), this)
case it: Iterable[_] => new ScalaIterableWrapper(it, this)
case it: Iterator[_] => new ScalaIteratorWrapper(it, this)
case str: String => new SimpleScalar(str)
case date: java.util.Date => new ScalaJUDateWrapper(date, this)
case num: Number => new SimpleNumber(num)
case bool: Boolean => if (bool) TemplateBooleanModel.TRUE else TemplateBooleanModel.FALSE
case o => new ScalaBaseWrapper(o, this)
}
}
/**
* Scala java.util.Date wrapper.
*
* @param date date
* @param wrapper object wrapper
*/
class ScalaJUDateWrapper(val date: java.util.Date, wrapper: ObjectWrapper)
extends ScalaBaseWrapper(date, wrapper)
with TemplateDateModel {
def getDateType = TemplateDateModel.UNKNOWN
def getAsDate = date
}
/**
* Scala Seq wrapper.
*
* @param seq seq
* @param wrapper object wrapper
* @tparam T seq element type
*/
class ScalaSeqWrapper[T](val seq: Seq[T], wrapper: ObjectWrapper)
extends ScalaBaseWrapper(seq, wrapper)
with TemplateSequenceModel {
def get(index: Int) = wrapper.wrap(seq(index))
def size = seq.size
}
/**
* Scala Map wrapper.
*
* @param map Map object
* @param wrapper object wrapper
*/
class ScalaMapWrapper(val map: collection.Map[String, _], wrapper: ObjectWrapper)
extends ScalaBaseWrapper(map, wrapper)
with TemplateHashModelEx {
override def get(key: String): TemplateModel = wrapper.wrap(map.get(key).orElse(Option(super.get(key))))
override def isEmpty = map.isEmpty
def values = new ScalaIterableWrapper(map.values, wrapper)
val keys = new ScalaIterableWrapper(map.keys, wrapper)
def size = map.size
}
/**
* Scala Iterable wrapper.
*
* @param it iterable
* @param wrapper object wrapper
* @tparam T iterable element type
*/
class ScalaIterableWrapper[T](val it: Iterable[T], wrapper: ObjectWrapper)
extends ScalaBaseWrapper(it, wrapper)
with TemplateCollectionModel {
def iterator = new ScalaIteratorWrapper(it.iterator, wrapper)
}
/**
* Scala Iterator wrapper.
*
* @param it iterator
* @param wrapper object wrapper
* @tparam T iterator element type
*/
class ScalaIteratorWrapper[T](val it: Iterator[T], wrapper: ObjectWrapper)
extends ScalaBaseWrapper(it, wrapper)
with TemplateModelIterator
with TemplateCollectionModel {
def next = wrapper.wrap(it.next())
def hasNext = it.hasNext
def iterator = this
}
/**
* Scala method wrapper.
*
* @param target invocation target
* @param methodName method name
* @param wrapper object wrapper
*/
class ScalaMethodWrapper(
val target: Any,
val methodName: String,
val wrapper: ObjectWrapper)
extends TemplateMethodModelEx {
def exec(arguments: java.util.List[_]) = {
try wrapper.wrap(org.apache.commons.beanutils.MethodUtils.invokeMethod(target, methodName, arguments.toArray))
catch {
case e: Exception =>
val params = arguments.asScala.map { a =>
a match {
case v: java.util.List[_] => v.asScala.asInstanceOf[Seq[_]]
case v: SimpleCollection => v.iterator
case v: SimpleDate => v.getAsDate
case v: SimpleHash => v.toMap
case v: SimpleNumber => v.getAsNumber.longValue
case v: SimpleObjectWrapper => v
case v: SimpleScalar => v.getAsString
case v: SimpleSequence => v.toList
case v => v
}
}.toSeq.map(_.asInstanceOf[Object])
val paramTypes = params.map(_.getClass).map { clazz =>
clazz match {
case c if c == classOf[java.util.ArrayList[_]] => classOf[Seq[_]]
case c if c == classOf[java.util.List[_]] => classOf[Seq[_]]
case c if c == classOf[java.lang.Boolean] => classOf[scala.Boolean]
case c if c == classOf[java.lang.Byte] => classOf[scala.Byte]
case c if c == classOf[java.lang.Character] => classOf[scala.Char]
case c if c == classOf[java.lang.Double] => classOf[scala.Double]
case c if c == classOf[java.lang.Float] => classOf[scala.Float]
case c if c == classOf[java.lang.Integer] => classOf[scala.Int]
case c if c == classOf[java.lang.Long] => classOf[scala.Long]
case c if c == classOf[java.lang.String] => classOf[String]
case c if c == classOf[java.lang.Short] => classOf[scala.Short]
case c if c == classOf[java.lang.Object] => classOf[scala.Any]
case c => c
}
}
val method = target.getClass.getDeclaredMethod(methodName, paramTypes: _*)
method.invoke(target, params: _*)
}
}
}
/**
* Scala XML wrapper.
*
* @param nodes mxl node seq
* @param wrapper object wrapper
*/
class ScalaXmlWrapper(val nodes: scala.xml.NodeSeq, val wrapper: ObjectWrapper)
extends TemplateNodeModel
with TemplateHashModel
with TemplateSequenceModel
with TemplateScalarModel {
import scala.xml._
def children: Seq[Node] = nodes match {
case node: Elem => node.child.flatMap {
case e: Elem => Option(e)
case a: Attribute => Option(a)
case t: Text if (t.text.trim != "") => Option(t)
case _ => None
}
case _ => Nil
}
def getNodeNamespace: String = nodes match {
case e: Elem => e.namespace
case _ => ""
}
def getNodeType: String = nodes match {
case e: Elem => "element"
case t: Text => "text"
case a: Attribute => "attribute"
case _ => null
}
def getNodeName: String = nodes match {
case e: Elem => e.label
case _ => null
}
def getChildNodes: TemplateSequenceModel = new ScalaSeqWrapper[Node](children, wrapper)
// due to immutability of Scala XML API, nodes are unaware of their parents.
def getParentNode: TemplateNodeModel = new ScalaXmlWrapper(null, wrapper)
// as hash
def isEmpty: Boolean = nodes.size == 0
def get(key: String): TemplateModel = {
val children = nodes \\ key
if (children.size == 0) wrapper.wrap(None)
if (children.size == 1) wrapper.wrap(children(0))
else wrapper.wrap(children)
}
// as sequence
def size: Int = nodes.size
def get(index: Int): TemplateModel = new ScalaXmlWrapper(nodes(index), wrapper)
// as scalar
def getAsString: String = nodes.text
}
/**
* Scala basic wrapper.
*
* @param obj object
* @param wrapper object wrapper
*/
class ScalaBaseWrapper(val obj: Any, val wrapper: ObjectWrapper)
extends TemplateHashModel
with TemplateScalarModel {
private[this] val logger: Logger = LoggerFactory.getLogger(classOf[ScalaBaseWrapper])
import java.lang.reflect.{ Modifier, Field, Method }
// TODO
val resolveFields = true
val resolveMethods = true
val delegateToDefault = false
val objectClass = obj.asInstanceOf[Object].getClass
private def findMethod(cl: Class[_], name: String): Option[Method] =
cl.getMethods.find {
m =>
m.getName.equals(name) && Modifier.isPublic(m.getModifiers)
} match {
case None if cl != classOf[Object] => findMethod(cl.getSuperclass, name)
case other => other
}
private def findField(cl: Class[_], name: String): Option[Field] =
cl.getFields.find {
f =>
f.getName.equals(name) && Modifier.isPublic(f.getModifiers)
} match {
case None if cl != classOf[Object] => findField(cl.getSuperclass, name)
case other => other
}
def get(key: String): TemplateModel = {
if (obj.isInstanceOf[Dynamic]) {
try {
val selectDynamic = obj.getClass.getDeclaredMethod("selectDynamic", classOf[String])
if (selectDynamic != null) {
return wrapper.wrap(selectDynamic.invoke(obj, key))
}
} catch {
case e: Exception =>
logger.debug(s"Failed to invoke #selectDynamic because ${e.getMessage}", e)
}
try {
val applyDynamic = obj.getClass.getDeclaredMethod("applyDynamic", classOf[String])
if (applyDynamic != null) {
return wrapper.wrap(applyDynamic.invoke(obj, key))
}
} catch {
case e: Exception =>
logger.debug(s"Failed to invoke #applyDynamic because ${e.getMessage}", e)
}
} else {
val o = obj.asInstanceOf[Object]
if (resolveFields) {
findField(objectClass, key) match {
case Some(field) => return wrapper.wrap(field.get(o))
case _ =>
}
}
if (resolveMethods) {
findMethod(objectClass, key) match {
case Some(method) if (method.getParameterTypes.length == 0) =>
return wrapper.wrap(method.invoke(obj))
case Some(method) =>
return new ScalaMethodWrapper(obj, method.getName, wrapper)
case _ =>
}
}
}
// nothing found
if (delegateToDefault) ObjectWrapper.DEFAULT_WRAPPER.wrap(obj)
else wrapper.wrap(null)
}
def isEmpty = false
def getAsString = obj.toString
}
| BlackPrincess/skinny-framework | freemarker/src/main/scala/skinny/view/freemarker/FreeMarkerWrappers.scala | Scala | mit | 10,948 |
package AccurateML.nonLinearRegression
import breeze.linalg.{DenseVector => BDV}
/**
* @author Nodalpoint
*/
object NonlinearModelTest {
def main(args: Array[String]) = {
/**
* Input dimensionality n = 2
*/
var n:Integer = 2
/**
* Let x be a random data point
*/
var x: BDV[Double] = BDV.rand[Double](n)
/**
* Define one neural network with 2 hidden layers and one
* Gaussian mixture with two gaussians.
*/
var nnModel: NonlinearModel = new NeuralNetworkModel(n, 2)
var gmmModel: NonlinearModel = new GaussianMixtureModel(n, 2)
/**
* Get the dimensionality of tunable parameters for each model
*/
var nnDim: Int = nnModel.getDim()
var gmmDim: Int = gmmModel.getDim()
/**
* Define a random initial set of parameters
*/
var nnW: BDV[Double] = BDV.rand[Double](nnDim)
var gmmW: BDV[Double] = BDV.rand[Double](gmmDim)
/**
* Evaluate the model for input x on parameters nnW
* nnW[0] weight 1st input to 1st hidden
* nnW[1] weight 2nd input to 1st hidden
* nnW[2] weight bias to 1st hidden
* nnW[3] weight 1st hidden to output
*/
System.out.println("Using weights " + nnW)
System.out.println("On input x " + x)
System.out.println("NN eval " + nnModel.eval(nnW, x))
System.out.println("NN grad analytic " + nnModel.grad(nnW, x))
System.out.println("NN grad numeric " + nnModel.gradnumer(nnW, x))
/**
* Evaluate the model for input x on parameters gmmW
* gmmW[0] 1st gaussian 1st component of mean vector
* gmmW[1] 1st gaussian 2nd component of mean vector
* gmmW[2] 1st gaussian 1st component of diagonal covariance
* gmmW[3] 1st gaussian 2nd component of diagonal covariance
* gmmW[4] 1st gaussian scale factor alpha
*/
System.out.println("Using weights " + gmmW)
System.out.println("On input x " + x)
System.out.println("GMM eval " + gmmModel.eval(gmmW, x))
System.out.println("GMM grad analytic " + gmmModel.grad(gmmW, x))
System.out.println("GMM grad numeric " + gmmModel.gradnumer(gmmW, x))
}
} | harryandlina/ARIM | project/nonLinearRegression/NonlinearModelTest.scala | Scala | apache-2.0 | 2,151 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.eval.internal
import monix.eval.Coeval
import monix.eval.Coeval.{Always, Eager, Error, FlatMap, Map, Now, Suspend, Trace}
import monix.eval.tracing.{CoevalEvent, CoevalTrace}
import monix.execution.internal.collection.ChunkedArrayStack
import monix.eval.internal.TracingPlatform.{enhancedExceptions, isStackTracing}
import scala.reflect.NameTransformer
import scala.util.control.NonFatal
private[eval] object CoevalRunLoop {
private type Current = Coeval[Any]
private type Bind = Any => Coeval[Any]
private type CallStack = ChunkedArrayStack[Bind]
/** Trampoline for lazy evaluation. */
def start[A](source: Coeval[A]): Eager[A] = {
var current: Current = source
var bFirst: Bind = null
var bRest: CallStack = null
// Values from Now, Always and Once are unboxed in this var, for code reuse
var hasUnboxed: Boolean = false
var unboxed: AnyRef = null
var tracingCtx: CoevalStackTracedContext = null
while (true) {
current match {
case bind@FlatMap(fa, bindNext, _) =>
if (isStackTracing) {
val trace = bind.trace
if (tracingCtx eq null) tracingCtx = new CoevalStackTracedContext
if (trace ne null) tracingCtx.pushEvent(trace.asInstanceOf[CoevalEvent])
}
if (bFirst ne null) {
if (bRest eq null) bRest = ChunkedArrayStack()
bRest.push(bFirst)
}
bFirst = bindNext.asInstanceOf[Bind]
current = fa
case Now(value) =>
unboxed = value.asInstanceOf[AnyRef]
hasUnboxed = true
case Always(thunk) =>
try {
unboxed = thunk().asInstanceOf[AnyRef]
hasUnboxed = true
current = null
} catch {
case e if NonFatal(e) =>
current = Error(e)
}
case bindNext @ Map(fa, _, _) =>
if (isStackTracing) {
val trace = bindNext.trace
if (tracingCtx eq null) tracingCtx = new CoevalStackTracedContext
if (trace ne null) tracingCtx.pushEvent(trace.asInstanceOf[CoevalEvent])
}
if (bFirst ne null) {
if (bRest eq null) bRest = ChunkedArrayStack()
bRest.push(bFirst)
}
bFirst = bindNext.asInstanceOf[Bind]
current = fa
case Suspend(thunk) =>
// Try/catch described as statement, otherwise ObjectRef happens ;-)
try {
current = thunk()
} catch {
case ex if NonFatal(ex) =>
current = Error(ex)
}
case ref @ Error(ex) =>
if (isStackTracing && enhancedExceptions) {
if (tracingCtx eq null) tracingCtx = new CoevalStackTracedContext
augmentException(ex, tracingCtx)
}
findErrorHandler(bFirst, bRest) match {
case null =>
return ref
case bind =>
// Try/catch described as statement, otherwise ObjectRef happens ;-)
try {
current = bind.recover(ex)
} catch { case e if NonFatal(e) => current = Error(e) }
bFirst = null
}
case Trace(sourceTask, frame) =>
if (tracingCtx eq null) tracingCtx = new CoevalStackTracedContext
tracingCtx.pushEvent(frame)
current = sourceTask
}
if (hasUnboxed) {
popNextBind(bFirst, bRest) match {
case null =>
return (if (current ne null) current else Now(unboxed)).asInstanceOf[Eager[A]]
case bind =>
// Try/catch described as statement, otherwise ObjectRef happens ;-)
try {
current = bind(unboxed)
} catch {
case ex if NonFatal(ex) => current = Error(ex)
}
hasUnboxed = false
unboxed = null
bFirst = null
}
}
}
// $COVERAGE-OFF$
null // Unreachable code
// $COVERAGE-ON$
}
private def findErrorHandler(bFirst: Bind, bRest: CallStack): StackFrame[Any, Coeval[Any]] = {
bFirst match {
case ref: StackFrame[Any, Coeval[Any]] @unchecked => ref
case _ =>
if (bRest eq null) null
else {
while (true) {
val ref = bRest.pop()
if (ref eq null)
return null
else if (ref.isInstanceOf[StackFrame[_, _]])
return ref.asInstanceOf[StackFrame[Any, Coeval[Any]]]
}
// $COVERAGE-OFF$
null
// $COVERAGE-ON$
}
}
}
private def popNextBind(bFirst: Bind, bRest: CallStack): Bind = {
if ((bFirst ne null) && !bFirst.isInstanceOf[StackFrame.ErrorHandler[_, _]])
return bFirst
if (bRest eq null) return null
while (true) {
val next = bRest.pop()
if (next eq null) {
return null
} else if (!next.isInstanceOf[StackFrame.ErrorHandler[_, _]]) {
return next
}
}
// $COVERAGE-OFF$
null
// $COVERAGE-ON$
}
/**
* If stack tracing and contextual exceptions are enabled, this
* function will rewrite the stack trace of a captured exception
* to include the async stack trace.
*/
private[internal] def augmentException(ex: Throwable, ctx: CoevalStackTracedContext): Unit = {
val stackTrace = ex.getStackTrace
if (stackTrace.nonEmpty) {
val augmented = stackTrace(stackTrace.length - 1).getClassName.indexOf('@') != -1
if (!augmented) {
val prefix = dropRunLoopFrames(stackTrace)
val suffix = ctx
.getStackTraces()
.flatMap(t => CoevalTrace.getOpAndCallSite(t.stackTrace))
.map {
case (methodSite, callSite) =>
val op = NameTransformer.decode(methodSite.getMethodName)
new StackTraceElement(op + " @ " + callSite.getClassName,
callSite.getMethodName,
callSite.getFileName,
callSite.getLineNumber)
}
.toArray
ex.setStackTrace(prefix ++ suffix)
}
}
}
private def dropRunLoopFrames(frames: Array[StackTraceElement]): Array[StackTraceElement] =
frames.takeWhile(ste => !runLoopFilter.exists(ste.getClassName.startsWith(_)))
private[this] val runLoopFilter = List(
"monix.eval.",
"scala.runtime."
)
}
| monix/monix | monix-eval/shared/src/main/scala/monix/eval/internal/CoevalRunLoop.scala | Scala | apache-2.0 | 7,066 |
package io.atomicbits.scraml.jsonschemaparser
/**
* Created by peter on 23/06/15.
*/
trait ClassRep {
def name: String
}
case class PlainClassRep(name: String) extends ClassRep
case class TypeClassRep(name: String, types: List[ClassRep]) extends ClassRep
| rcavalcanti/scraml | modules/scraml-jsonschema-parser/src/main/scala/io/atomicbits/scraml/jsonschemaparser/ClassRep.scala | Scala | agpl-3.0 | 265 |
package pl.touk.nussknacker.engine.process.registrar
import cats.effect.IO
import com.typesafe.scalalogging.LazyLogging
import org.apache.flink.configuration.Configuration
import org.apache.flink.streaming.api.functions.async.{ResultFuture, RichAsyncFunction}
import pl.touk.nussknacker.engine.InterpretationResult
import pl.touk.nussknacker.engine.Interpreter.FutureShape
import pl.touk.nussknacker.engine.api.context.ValidationContext
import pl.touk.nussknacker.engine.api.exception.NuExceptionInfo
import pl.touk.nussknacker.engine.api.process.AsyncExecutionContextPreparer
import pl.touk.nussknacker.engine.api.Context
import pl.touk.nussknacker.engine.graph.node.NodeData
import pl.touk.nussknacker.engine.process.ProcessPartFunction
import pl.touk.nussknacker.engine.process.compiler.FlinkProcessCompilerData
import pl.touk.nussknacker.engine.splittedgraph.splittednode.SplittedNode
import scala.concurrent.{ExecutionContext, Future}
import scala.jdk.CollectionConverters.seqAsJavaListConverter
import scala.util.control.NonFatal
import scala.util.{Failure, Success}
private[registrar] class AsyncInterpretationFunction(val compiledProcessWithDepsProvider: ClassLoader => FlinkProcessCompilerData,
val node: SplittedNode[_<:NodeData], validationContext: ValidationContext,
asyncExecutionContextPreparer: AsyncExecutionContextPreparer, useIOMonad: Boolean)
extends RichAsyncFunction[Context, InterpretationResult] with LazyLogging with ProcessPartFunction {
private lazy val compiledNode = compiledProcessWithDeps.compileSubPart(node, validationContext)
import compiledProcessWithDeps._
private var executionContext: ExecutionContext = _
override def open(parameters: Configuration): Unit = {
super.open(parameters)
executionContext = asyncExecutionContextPreparer.prepareExecutionContext(compiledProcessWithDeps.metaData.id,
getRuntimeContext.getExecutionConfig.getParallelism)
}
override def asyncInvoke(input: Context, collector: ResultFuture[InterpretationResult]): Unit = {
try {
invokeInterpreter(input) {
case Right(results) =>
val exceptions = results.collect { case Right(exInfo) => exInfo }
val successes = results.collect { case Left(value) => value }
handleResults(collector, successes, exceptions)
case Left(ex) =>
logger.warn("Unexpected error", ex)
handleResults(collector, Nil, List(NuExceptionInfo(None, ex, input)))
}
} catch {
case NonFatal(ex) =>
logger.warn("Unexpected error", ex)
handleResults(collector, Nil, List(NuExceptionInfo(None, ex, input)))
}
}
private def invokeInterpreter(input: Context)
(callback: Either[Throwable, List[Either[InterpretationResult, NuExceptionInfo[_ <: Throwable]]]] => Unit): Unit = {
implicit val ec: ExecutionContext = executionContext
//we leave switch to be able to return to Future if IO has some flaws...
if (useIOMonad) {
interpreter.interpret[IO](compiledNode, metaData, input).unsafeRunAsync(callback)
} else {
implicit val future: FutureShape = new FutureShape()
interpreter.interpret[Future](compiledNode, metaData, input).onComplete {
//use result.toEither after dropping Scala 2.11 support
case Success(a) => callback(Right(a))
case Failure(a) => callback(Left(a))
}
}
}
override def close(): Unit = {
super.close()
asyncExecutionContextPreparer.close()
}
//This function has to be invoked exactly *ONCE* for one asyncInvoke (complete/completeExceptionally) can be invoked only once)
private def handleResults(collector: ResultFuture[InterpretationResult],
results: List[InterpretationResult],
exceptions: List[NuExceptionInfo[_ <: Throwable]]): Unit = {
try {
exceptions.foreach(exceptionHandler.handle)
collector.complete(results.asJava)
} catch {
case NonFatal(e) =>
logger.warn("Unexpected exception during exceptionHandler invocation, failing", e)
collector.completeExceptionally(e)
}
}
}
| TouK/nussknacker | engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/registrar/AsyncInterpretationFunction.scala | Scala | apache-2.0 | 4,252 |
package io.findify.s3mock
import scala.jdk.CollectionConverters._
/**
* Created by shutty on 8/10/16.
*/
class PutBucketTest extends S3MockTest {
override def behaviour(fixture: => Fixture) = {
val s3 = fixture.client
it should "create buckets" in {
s3.listBuckets().isEmpty shouldBe true
s3.createBucket("hello").getName shouldBe "hello"
s3.listBuckets().asScala.exists(_.getName == "hello") shouldBe true
}
it should "create buckets with region" in {
s3.createBucket("hello2", "us-west-1")
s3.listBuckets().asScala.exists(_.getName == "hello2") shouldBe true
}
}
}
| findify/s3mock | src/test/scala/io/findify/s3mock/PutBucketTest.scala | Scala | mit | 625 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.core
import grizzled.slf4j.Logging
import org.apache.predictionio.annotation.DeveloperApi
import org.apache.predictionio.controller.Params
/** :: DeveloperApi ::
* Base class for all controllers
*/
@DeveloperApi
abstract class AbstractDoer extends Serializable
/** :: DeveloperApi ::
* Provides facility to instantiate controller classes
*/
@DeveloperApi
object Doer extends Logging {
/** :: DeveloperApi ::
* Instantiates a controller class using supplied controller parameters as
* constructor parameters
*
* @param cls Class of the controller class
* @param params Parameters of the controller class
* @tparam C Controller class
* @return An instance of the controller class
*/
@DeveloperApi
def apply[C <: AbstractDoer] (
cls: Class[_ <: C], params: Params): C = {
// Subclasses only allows two kind of constructors.
// 1. Constructor with P <: Params.
// 2. Empty constructor.
// First try (1), if failed, try (2).
try {
val constr = cls.getConstructor(params.getClass)
constr.newInstance(params)
} catch {
case e: NoSuchMethodException => try {
val zeroConstr = cls.getConstructor()
zeroConstr.newInstance()
} catch {
case e: NoSuchMethodException =>
error(s"${params.getClass.getName} was used as the constructor " +
s"argument to ${e.getMessage}, but no constructor can handle it. " +
"Aborting.")
sys.exit(1)
}
}
}
}
| dszeto/incubator-predictionio | core/src/main/scala/org/apache/predictionio/core/AbstractDoer.scala | Scala | apache-2.0 | 2,343 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.frontend.v2_3.ast
import org.neo4j.cypher.internal.frontend.v2_3.Foldable._
import org.neo4j.cypher.internal.frontend.v2_3.{ast, _}
import org.neo4j.cypher.internal.frontend.v2_3.ast.Expression._
import org.neo4j.cypher.internal.frontend.v2_3.symbols.{CypherType, TypeSpec, _}
import scala.collection.immutable.Stack
object Expression {
sealed trait SemanticContext
object SemanticContext {
case object Simple extends SemanticContext
case object Results extends SemanticContext
}
val DefaultTypeMismatchMessageGenerator = (expected: String, existing: String) => s"expected $expected but was $existing"
implicit class SemanticCheckableOption[A <: Expression](option: Option[A]) {
def semanticCheck(ctx: SemanticContext): SemanticCheck =
option.fold(SemanticCheckResult.success) { _.semanticCheck(ctx) }
def expectType(possibleTypes: => TypeSpec): SemanticCheck =
option.fold(SemanticCheckResult.success) { _.expectType(possibleTypes) }
}
implicit class SemanticCheckableExpressionTraversable[A <: Expression](traversable: TraversableOnce[A]) extends SemanticChecking {
def semanticCheck(ctx: SemanticContext): SemanticCheck =
traversable.foldSemanticCheck { _.semanticCheck(ctx) }
}
implicit class InferrableTypeTraversableOnce[A <: Expression](traversable: TraversableOnce[A]) {
def unionOfTypes: TypeGenerator = state =>
TypeSpec.union(traversable.map(_.types(state)).toSeq: _*)
def leastUpperBoundsOfTypes: TypeGenerator =
if (traversable.isEmpty)
_ => CTAny.invariant
else
state => traversable.map { _.types(state) } reduce { _ leastUpperBounds _ }
def expectType(possibleTypes: => TypeSpec): SemanticCheck =
traversable.foldSemanticCheck { _.expectType(possibleTypes) }
}
final case class TreeAcc[A](data: A, stack: Stack[Set[Identifier]] = Stack.empty) {
def toSet: Set[Identifier] = stack.toSet.flatten
def map(f: A => A): TreeAcc[A] = copy(data = f(data))
def push(newIdentifier: Identifier): TreeAcc[A] = push(Set(newIdentifier))
def push(newIdentifiers: Set[Identifier]): TreeAcc[A] = copy(stack = stack.push(newIdentifiers))
def pop: TreeAcc[A] = copy(stack = stack.pop)
def contains(identifier: Identifier) = stack.exists(_.contains(identifier))
}
}
abstract class Expression extends ASTNode with ASTExpression with SemanticChecking {
import Expression.TreeAcc
def semanticCheck(ctx: SemanticContext): SemanticCheck
def types: TypeGenerator = s => s.expressionType(this).actual
def arguments: Seq[Expression] = this.treeFold(List.empty[Expression]) {
case e: Expression if e != this =>
(acc, _) => acc :+ e
}
// All identifiers referenced from this expression or any of its childs
// that are not introduced inside this expression
def dependencies: Set[Identifier] =
this.treeFold(TreeAcc[Set[Identifier]](Set.empty)) {
case scope: ScopeExpression => {
case (acc, children) =>
val newAcc = acc.push(scope.identifiers)
children(newAcc).pop
}
case id: Identifier => {
case (acc, children) if acc.contains(id) =>
children(acc)
case (acc, children) =>
children(acc.map(_ + id))
}
}.data
// List of child expressions together with any of its dependencies introduced
// by any of its parent expressions (where this expression is the root of the tree)
def inputs: Seq[(Expression, Set[Identifier])] =
this.treeFold(TreeAcc[Seq[(Expression, Set[Identifier])]](Seq.empty)) {
case scope: ScopeExpression=> {
case (acc, children) =>
val newAcc = acc.push(scope.identifiers).map { case pairs => pairs :+ (scope -> acc.toSet) }
children(newAcc).pop
}
case expr: Expression => {
case (acc, children) =>
val newAcc = acc.map { case pairs => pairs :+ (expr -> acc.toSet) }
children(newAcc)
}
}.data
def specifyType(typeGen: TypeGenerator): SemanticState => Either[SemanticError, SemanticState] =
s => specifyType(typeGen(s))(s)
def specifyType(possibleTypes: => TypeSpec): SemanticState => Either[SemanticError, SemanticState] =
_.specifyType(this, possibleTypes)
def expectType(typeGen: TypeGenerator): SemanticState => SemanticCheckResult =
s => expectType(typeGen(s))(s)
def expectType(typeGen: TypeGenerator, messageGen: (String, String) => String): SemanticState => SemanticCheckResult =
s => expectType(typeGen(s), messageGen)(s)
def expectType(possibleTypes: => TypeSpec, messageGen: (String, String) => String = DefaultTypeMismatchMessageGenerator): SemanticState => SemanticCheckResult = s => {
s.expectType(this, possibleTypes) match {
case (ss, TypeSpec.none) =>
val existingTypesString = ss.expressionType(this).specified.mkString(", ", " or ")
val expectedTypesString = possibleTypes.mkString(", ", " or ")
SemanticCheckResult.error(ss, SemanticError("Type mismatch: " + messageGen(expectedTypesString, existingTypesString), position))
case (ss, _) =>
SemanticCheckResult.success(ss)
}
}
def typeSwitch(choice: TypeSpec => SemanticCheck): SemanticCheck =
(state: SemanticState) => choice(types(state))(state)
def containsAggregate = this.exists {
case IsAggregate(_) => true
}
}
trait SimpleTyping { self: Expression =>
protected def possibleTypes: TypeSpec
def semanticCheck(ctx: SemanticContext): SemanticCheck = specifyType(possibleTypes)
}
trait FunctionTyping { self: Expression =>
case class Signature(argumentTypes: IndexedSeq[CypherType], outputType: CypherType)
def signatures: Seq[Signature]
def semanticCheck(ctx: ast.Expression.SemanticContext): SemanticCheck =
arguments.semanticCheck(ctx) chain
checkTypes
def checkTypes: SemanticCheck = s => {
val initSignatures = signatures.filter(_.argumentTypes.length == arguments.length)
val (remainingSignatures: Seq[Signature], result) = arguments.foldLeft((initSignatures, SemanticCheckResult.success(s))) {
case (accumulator@(Seq(), _), _) =>
accumulator
case ((possibilities, r1), arg) =>
val argTypes = possibilities.foldLeft(TypeSpec.none) { _ | _.argumentTypes.head.covariant }
val r2 = arg.expectType(argTypes)(r1.state)
val actualTypes = arg.types(r2.state)
val remainingPossibilities = possibilities.filter {
sig => actualTypes containsAny sig.argumentTypes.head.covariant
} map {
sig => sig.copy(argumentTypes = sig.argumentTypes.tail)
}
(remainingPossibilities, SemanticCheckResult(r2.state, r1.errors ++ r2.errors))
}
val outputType = remainingSignatures match {
case Seq() => TypeSpec.all
case _ => remainingSignatures.foldLeft(TypeSpec.none) { _ | _.outputType.invariant }
}
specifyType(outputType)(result.state) match {
case Left(err) => SemanticCheckResult(result.state, result.errors :+ err)
case Right(state) => SemanticCheckResult(state, result.errors)
}
}
}
trait PrefixFunctionTyping extends FunctionTyping { self: Expression =>
def rhs: Expression
}
trait PostfixFunctionTyping extends FunctionTyping { self: Expression =>
def lhs: Expression
}
trait InfixFunctionTyping extends FunctionTyping { self: Expression =>
def lhs: Expression
def rhs: Expression
}
| HuangLS/neo4j | community/cypher/frontend-2.3/src/main/scala/org/neo4j/cypher/internal/frontend/v2_3/ast/Expression.scala | Scala | apache-2.0 | 8,256 |
/*
* Copyright (c) 2010 by Alexander Grünewald
*
* This file is part of gruenewa-gl, an OpenGL binding for Scala.
*
* gruenewa-gl is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package gruenewa.opengl.demos
import gruenewa.opengl.GL._
import gruenewa.opengl.GLU._
import gruenewa.opengl.GLUT._
object Main2 {
def init() {
println("init")
glShadeModel(GL_SMOOTH)
glClearDepth(1f)
glEnable(GL_DEPTH_TEST)
glDepthFunc(GL_LEQUAL)
glEnable(GL_COLOR_MATERIAL)
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST)
}
def display() {
println("display")
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glLoadIdentity()
glTranslatef(-1.5f,0.0f,-6.0f)
glBegin(GL_TRIANGLES)
glVertex3f( 0.0f, 1.0f, 0.0f)
glVertex3f(-1.0f,-1.0f, 0.0f)
glVertex3f( 1.0f,-1.0f, 0.0f)
glEnd()
glTranslatef(3.0f,0.0f,0.0f)
glBegin(GL_QUADS)
glVertex3f(-1.0f, 1.0f, 0.0f)
glVertex3f( 1.0f, 1.0f, 0.0f)
glVertex3f( 1.0f,-1.0f, 0.0f)
glVertex3f(-1.0f,-1.0f, 0.0f)
glEnd()
glutSwapBuffers()
}
def reshape(w: Int, h: Int) {
printf("reshape %d %d\n", w, h)
glViewport(0, 0, w, h )
glMatrixMode(GL_PROJECTION )
glLoadIdentity()
if ( h==0 )
gluPerspective(80, w, 1.0, 5000.0)
else
gluPerspective(80, w / h, 1.0, 5000.0)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
}
def keyboard(key: Char, x: Int, y: Int) {
println("keyboard => "+key + " at ("+x+","+y+")")
key match {
case 'f' => glutFullScreen()
case 'g' => glutReshapeWindow(500,500)
case _ => ()
}
}
def arrow_keys(keys: Int, x: Int, y: Int) {
println("arraow_keys => "+keys+" at ("+x+","+y+")")
}
def main(args: Array[String]) {
val args2 = Array("-gldebug", "-direct", "-iconic")
glutInit(args2)
init()
glutInitDisplayMode(GLUT_RGB | GLUT_DOUBLE)
glutInitWindowSize(500, 500)
glutCreateWindow("Second Demo")
glutDisplayFunc(display)
glutReshapeFunc(reshape)
glutKeyboardFunc(keyboard)
glutSpecialFunc(arrow_keys)
glutMainLoop()
}
}
| gruenewa/gruenewa-gl | gruenewa-opengl-demos/src/main/scala/gruenewa/opengl/demos/Main2.scala | Scala | gpl-3.0 | 2,708 |
package de.tu_berlin.impro3.scala.common
import java.nio.file.Paths
import _root_.de.tu_berlin.impro3.scala.Algorithm
import org.junit.Test
object ScalaAlgorithmTest {
val resourcesPath = Paths.get(getClass.getResource("/dummy.txt").getFile).toAbsolutePath.getParent.toString
}
@Test
abstract class ScalaAlgorithmTest[A <: Algorithm] {
def integrationTest(): Unit
}
| joroKr21/spatio-temporal-dynamics | impro3-ws14-scala/src/test/scala/de/tu_berlin/impro3/scala/common/ScalaAlgorithmTest.scala | Scala | apache-2.0 | 376 |
/**
* Copyright 2014 Adrian Hurtado (adrianhurt)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package views.html.b3
object Args {
/**
* Adds some default arguments to the parameter 'args'
*/
def withDefault(args: Seq[(Symbol, Any)], default: (Symbol, Any)*) = default ++: args
/**
* Returns only the inner arguments (those that are exclusive for an input and not for the field constructor).
* Removes every argument with a prefixed slash and those whose value is false.
* It also lets you add some default arguments to the parameter 'args'
*/
def inner(args: Seq[(Symbol, Any)], default: (Symbol, Any)*) =
default ++: args.filter(arg => !arg._1.name.startsWith("_") && arg._2 != false)
}
object ArgsMap {
/**
* Returns true only if the map contains an argument with that key and its value is true.
*/
def isTrue(argsMap: Map[Symbol, Any], key: Symbol) = argsMap.get(key).map(_ == true).getOrElse(false)
} | jamesward/play-bootstrap3 | module/app/views/b3/Args.scala | Scala | apache-2.0 | 1,465 |
package ee.cone.c4gate_server
import java.nio.file.{Files, Path, Paths}
import java.util.UUID
import com.typesafe.scalalogging.LazyLogging
import ee.cone.c4actor._
import ee.cone.c4di.c4
@c4("IgnoreAllSnapshotsApp") final class IgnoreAllSnapshots(
toUpdate: ToUpdate,
consuming: Consuming,
factory: SnapshotSaverImplFactory,
baseDir: DataDir,
) extends Executable with LazyLogging {
private def ignoreTheSamePath(path: Path): Unit = ()
def run(): Unit = {
val endOffset = consuming.process("0" * OffsetHexSize(), _.endOffset)
val subDir = "snapshots"
val path = Paths.get(baseDir.value).resolve(subDir)
if(Files.exists(path))
ignoreTheSamePath(Files.move(path,path.resolveSibling(s"$subDir.${UUID.randomUUID()}.bak")))
val (bytes, headers) = toUpdate.toBytes(Nil)
// val saver = snapshotSavers.full
val saver = factory.create(subDir)
val rawSnapshot = saver.save(endOffset, bytes, headers)
logger.info(s"EMPTY snapshot was saved: ${rawSnapshot.relativePath}")
}
}
| conecenter/c4proto | bak/IgnoreAllSnapshots.scala | Scala | apache-2.0 | 1,024 |
import com.typesafe.sbt.SbtAspectj.AspectjKeys._
import com.typesafe.sbt.SbtAspectj._
import com.typesafe.sbt.SbtScalariform
import sbt.Keys._
import sbt._
import sbtavro.SbtAvro._
import scoverage.ScoverageSbtPlugin._
import de.heikoseeberger.sbtheader.HeaderPlugin
import de.heikoseeberger.sbtheader.AutomateHeaderPlugin
import de.heikoseeberger.sbtheader.HeaderKey._
import de.heikoseeberger.sbtheader.license.Apache2_0
import scala.sys.SystemProperties
object MoneyBuild extends Build {
import MoneyBuild.Dependencies._
lazy val copyApiDocsTask = taskKey[Unit]("Copies the scala docs from each project to the doc tree")
lazy val props = new SystemProperties()
lazy val money = Project("money", file("."))
.settings(basicSettings: _*)
.settings(
publishLocal := {},
publish := {}
)
.aggregate(moneyCore, moneyAspectj, moneyHttpClient, moneyJavaServlet, moneyKafka, moneySpring, moneySpring3, moneyWire,moneyAwsJavaSdk)
lazy val moneyCore =
Project("money-core", file("./money-core"))
.configs( IntegrationTest )
.settings(projectSettings: _*)
.settings(
libraryDependencies <++= (scalaVersion) { v: String =>
Seq(
akkaActor(v),
akkaSlf4j(v),
akkaTestkit(v),
slf4j,
log4jbinding,
typesafeConfig,
jodaTime,
metricsCore,
scalaTest,
mockito
)
}
)
lazy val moneyAspectj =
Project("money-aspectj", file("./money-aspectj"))
.configs( IntegrationTest )
.settings(aspectjProjectSettings: _*)
.settings(
libraryDependencies <++= (scalaVersion) { v: String =>
Seq(
akkaActor(v),
akkaSlf4j(v),
akkaTestkit(v),
typesafeConfig,
scalaTest,
mockito
)
}
)
.dependsOn(moneyCore)
lazy val moneyHttpClient =
Project("money-http-client", file("./money-http-client"))
.configs( IntegrationTest )
.settings(aspectjProjectSettings: _*)
.settings(
libraryDependencies <++= (scalaVersion){v: String =>
Seq(
akkaActor(v),
akkaSlf4j(v),
akkaTestkit(v),
apacheHttpClient,
scalaTest,
mockito
)
}
)
.dependsOn(moneyCore,moneyAspectj)
lazy val moneyJavaServlet =
Project("money-java-servlet", file("./money-java-servlet"))
.configs( IntegrationTest )
.settings(projectSettings: _*)
.settings(
libraryDependencies <++= (scalaVersion){v: String =>
Seq(
akkaActor(v),
akkaSlf4j(v),
javaxServlet,
akkaTestkit(v),
scalaTest,
mockito
)
}
)
.dependsOn(moneyCore)
lazy val moneyWire =
Project("money-wire", file("./money-wire"))
.configs( IntegrationTest )
.settings(projectSettings: _*)
.settings(sbtavro.SbtAvro.avroSettings : _*)
.settings(
libraryDependencies <++= (scalaVersion){v: String =>
Seq(
json4sNative,
json4sJackson,
scalaTest,
mockito
)
},
fork := false,
javacOptions in doc := Seq("-source", "1.6"),
// Configure the desired Avro version. sbt-avro automatically injects a libraryDependency.
(version in avroConfig) := "1.7.6",
// Look for *.avsc etc. files in src/test/avro
(sourceDirectory in avroConfig) <<= (sourceDirectory in Compile)(_ / "avro"),
(stringType in avroConfig) := "String"
).dependsOn(moneyCore)
lazy val moneyKafka =
Project("money-kafka", file("./money-kafka"))
.configs( IntegrationTest )
.settings(projectSettings: _*)
.settings(
libraryDependencies <++= (scalaVersion){v: String =>
Seq(
akkaActor(v),
akkaSlf4j(v),
kafka,
bijectionCore,
bijectionAvro,
chill,
chillAvro,
chillBijection,
commonsIo,
akkaTestkit(v),
scalaTest,
mockito
)
}
)
.dependsOn(moneyCore, moneyWire)
lazy val moneySpring =
Project("money-spring", file("./money-spring"))
.configs(IntegrationTest)
.settings(aspectjProjectSettings: _*)
.settings(
libraryDependencies <++= (scalaVersion) { v: String =>
Seq(
akkaActor(v),
akkaSlf4j(v),
akkaTestkit(v),
typesafeConfig,
scalaTest,
mockito,
springContext
)
}
)
.dependsOn(moneyCore)
lazy val moneySpring3 =
Project("money-spring3", file("./money-spring3"))
.configs(IntegrationTest)
.settings(projectSettings: _*)
.settings(
libraryDependencies <++= (scalaVersion) { v: String =>
Seq(
akkaActor(v),
akkaSlf4j(v),
akkaTestkit(v),
typesafeConfig,
scalaTest,
mockito,
springContext3,
springAop3,
junit,
junitInterface,
springTest,
mockito,
springOckito,
assertj
)
},
testOptions += Tests.Argument(TestFrameworks.JUnit, "-v")
)
.dependsOn(moneyCore)
lazy val moneyAwsJavaSdk =
Project("money-aws-java-sdk", file("./money-aws-java-sdk"))
.configs(IntegrationTest)
.settings(projectSettings: _*)
.settings(
libraryDependencies <++= (scalaVersion) { v: String =>
Seq(
awsJavaSdk,
mockito,
junit
)
},
testOptions += Tests.Argument(TestFrameworks.JUnit, "-v")
)
.dependsOn(moneyCore)
def projectSettings = basicSettings ++ Seq(
ScoverageKeys.coverageHighlighting := true,
ScoverageKeys.coverageMinimum := 90,
ScoverageKeys.coverageFailOnMinimum := true
)
def aspectjProjectSettings = projectSettings ++ aspectjSettings ++ Seq(
javaOptions in Test <++= weaverOptions in Aspectj // adds javaagent:aspectjweaver to java options, including test
)
def basicSettings = Defaults.itSettings ++ SbtScalariform.scalariformSettings ++ Seq(
organization := "com.comcast.money",
version := "0.8.14-SNAPSHOT",
crossScalaVersions := Seq("2.10.6", "2.11.7"),
scalaVersion := "2.10.6",
resolvers ++= Seq(
"spray repo" at "http://repo.spray.io/",
"Sonatype OSS Releases" at "http://oss.sonatype.org/content/repositories/releases/"
),
javacOptions in Compile ++= Seq(
"-source", "1.6",
"-target", "1.6",
"-Xlint:unchecked",
"-Xlint:deprecation",
"-Xlint:-options"),
javacOptions in doc := Seq("-source", "1.6"),
scalacOptions ++= Seq(
"-unchecked",
"-deprecation",
"-feature",
"-language:existentials",
"-language:postfixOps",
"-language:reflectiveCalls"),
testOptions in Test += Tests.Argument(TestFrameworks.ScalaTest, "-oDF", "-u", "target/scalatest-reports"),
fork := true,
publishMavenStyle := true,
publishTo := {
val nexus = "https://oss.sonatype.org/"
if (isSnapshot.value)
Some("snapshots" at nexus + "content/repositories/snapshots")
else
Some("releases" at nexus + "service/local/staging/deploy/maven2")
},
pomIncludeRepository := { _ => false },
pomExtra := (
<url>https://github.com/Comcast/money</url>
<licenses>
<license>
<name>Apache License, Version 2.0</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
<comments>A business-friendly OSS license</comments>
</license>
</licenses>
<scm>
<url>git@github.com:Comcast/money.git</url>
<connection>scm:git:git@github.com:Comcast/money.git</connection>
</scm>
<developers>
<developer>
<id>paulcleary</id>
<name>Paul Clery</name>
<url>https://github.com/paulcleary</url>
</developer>
<developer>
<id>kristomasette</id>
<name>Kristofer Tomasette</name>
<url>https://github.com/kristomasette</url>
</developer>
</developers>),
publishArtifact in Test := false,
autoAPIMappings := true,
apiMappings ++= {
def findManagedDependency(organization: String, name: String): Option[File] = {
(for {
entry <- (fullClasspath in Compile).value
module <- entry.get(moduleID.key) if module.organization == organization && module.name.startsWith(name)
} yield entry.data).headOption
}
val links: Seq[Option[(File, URL)]] = Seq(
findManagedDependency("org.scala-lang", "scala-library").map(d => d -> url(s"http://www.scala-lang.org/api/2.10.4/")),
findManagedDependency("com.typesafe.akka", "akka-actor").map(d => d -> url(s"http://doc.akka.io/api/akka/$akkaVersion/")),
findManagedDependency("com.typesafe", "config").map(d => d -> url("http://typesafehub.github.io/config/latest/api/"))
)
val x = links.collect { case Some(d) => d }.toMap
println("links: " + x)
x
},
headers := Map(
"scala" -> Apache2_0("2012-2015", "Comcast Cable Communications Management, LLC"),
"java" -> Apache2_0("2012-2015", "Comcast Cable Communications Management, LLC"),
"conf" -> Apache2_0("2012-2015", "Comcast Cable Communications Management, LLC", "#")
)
) ++ HeaderPlugin.settingsFor(IntegrationTest) ++ AutomateHeaderPlugin.automateFor(Compile, Test, IntegrationTest)
object Dependencies {
val akkaVersion = "2.2.3"
val codahaleVersion = "3.0.2"
val apacheHttpClientVersion = "4.3.5"
// Logging, SlF4J must equal the same version used by akka
val slf4j = "org.slf4j" % "slf4j-api" % "1.7.5"
val log4jbinding = "org.slf4j" % "slf4j-log4j12" % "1.7.5" % "it,test"
// Akka
def akkaActor(scalaVersion: String) = "com.typesafe.akka" %% "akka-actor" % getAkkaVersion(scalaVersion)
def akkaSlf4j(scalaVersion: String) = "com.typesafe.akka" %% "akka-slf4j" % getAkkaVersion(scalaVersion) % "runtime"
def akkaTestkit(scalaVersion: String) = "com.typesafe.akka" %% "akka-testkit" % getAkkaVersion(scalaVersion) %
"it,test"
// Joda
val jodaTime = "joda-time" % "joda-time" % "2.1"
// Json
val json4sNative = "org.json4s" %% "json4s-native" % "3.2.11"
val json4sJackson = "org.json4s" %% "json4s-jackson" % "3.2.11"
// Typseafe config
def typesafeConfig = "com.typesafe" % "config" % "1.2.1"
// Codahale metrics
val metricsCore = "com.codahale.metrics" % "metrics-core" % codahaleVersion
// Apache http client
val apacheHttpClient = "org.apache.httpcomponents" % "httpclient" % apacheHttpClientVersion
// Javax servlet - note: the group id and artfacit id have changed in 3.0
val javaxServlet = "javax.servlet" % "servlet-api" % "2.5"
// Kafka, exclude dependencies that we will not need, should work for 2.10 and 2.11
val kafka = ("org.apache.kafka" %% "kafka" % "0.8.2.2")
.exclude("javax.jms", "jms")
.exclude("com.sun.jdmk", "jmxtools")
.exclude("com.sun.jmx", "jmxri")
.exclude("org.apache.zookeeper", "zookeeper")
.exclude("javax.mail", "mail")
.exclude("javax.activation", "activation")
// Avro and Bijection
val bijectionCore = "com.twitter" % "bijection-core_2.10" % "0.6.3"
val bijectionAvro = "com.twitter" % "bijection-avro_2.10" % "0.6.3"
val chill = "com.twitter" % "chill_2.10" % "0.4.0"
val chillAvro = "com.twitter" % "chill-avro" % "0.4.0"
val chillBijection = "com.twitter" % "chill-bijection_2.10" % "0.4.0"
val commonsIo = "commons-io" % "commons-io" % "2.4"
// Spring
val springContext3 = ("org.springframework" % "spring-context" % "3.2.6.RELEASE")
.exclude("commons-logging", "commons-logging")
val springAop3 = "org.springframework" % "spring-aop" % "3.2.6.RELEASE"
val springContext = "org.springframework" % "spring-context" % "4.1.1.RELEASE"
val awsJavaSdk = "com.amazonaws" % "aws-java-sdk" % "1.10.57" % "provided"
// Test
val mockito = "org.mockito" % "mockito-core" % "1.9.5" % "test"
val scalaTest = "org.scalatest" %% "scalatest" % "2.2.3" % "it,test"
val junit = "junit" % "junit" % "4.11" % "test"
val junitInterface = "com.novocode" % "junit-interface" % "0.11" % "test->default"
val springTest = ("org.springframework" % "spring-test" % "3.2.6.RELEASE")
.exclude("commons-logging", "commons-logging")
val springOckito = "org.kubek2k" % "springockito" % "1.0.9" % "test"
val assertj = "org.assertj" % "assertj-core" % "1.7.1" % "it,test"
def getAkkaVersion(scalaVersion: String) = {
scalaVersion match {
case version if version.startsWith("2.10") => "2.2.3"
case version if version.startsWith("2.11") => "2.3.4"
}
}
}
}
| ipapa/money | project/MoneyBuild.scala | Scala | apache-2.0 | 13,198 |
package microtools.metrics
import com.codahale.metrics.{MetricRegistry, Timer}
import microtools.decorators.{FutureDecorator, TryDecorator}
import microtools.logging.{ContextAwareLogger, LoggingContext}
import microtools.BusinessTry
import scala.concurrent.{ExecutionContext, Future}
import scala.util.{Failure, Success}
trait TimedCalls {
def log: ContextAwareLogger
def metricRegistry: MetricRegistry
def timeFuture[T](
callId: String
)(implicit ec: ExecutionContext, ctx: LoggingContext): FutureDecorator[T] = {
val timer = metricRegistry.timer(s"${log.name}.$callId")
timeFuture(callId, timer)
}
def timeFuture[T](
callId: String,
timer: Timer
)(implicit ec: ExecutionContext, ctx: LoggingContext): FutureDecorator[T] =
new FutureDecorator[T] {
override def apply(block: => Future[T]): Future[T] = {
val timeCtx = timer.time()
val result = block
result.onComplete {
case Success(_) =>
val nanos = timeCtx.stop()
log.info(s"$callId: Success", "millis" -> (nanos / 1000000.0).toString)
case Failure(e) =>
val nanos = timeCtx.stop()
log.error(s"$callId: Internal error", e, "millis" -> (nanos / 1000000.0).toString)
}
result
}
}
def timeTry[T](
callId: String
)(implicit ec: ExecutionContext, ctx: LoggingContext): TryDecorator[T] = {
val timer = metricRegistry.timer(s"${log.name}.$callId")
timeTry(callId, timer)
}
def timeTry[T](
callId: String,
timer: Timer
)(implicit ec: ExecutionContext, ctx: LoggingContext): TryDecorator[T] = new TryDecorator[T] {
override def apply(block: => BusinessTry[T]): BusinessTry[T] = {
val timeCtx = timer.time()
val result = block
result.onComplete {
case Success(_) =>
timeCtx.stop()
case Failure(e) =>
val nanos = timeCtx.stop()
log.error(s"$callId: Internal error", e, "millis" -> (nanos / 1000000.0).toString)
}
}
}
}
| 21re/play-micro-tools | src/main/scala/microtools/metrics/TimedCalls.scala | Scala | mit | 2,059 |
package org.http4s
import cats.Applicative
import cats.data.{Kleisli, OptionT}
import cats.effect.Sync
import cats.implicits._
/** Functions for creating [[HttpRoutes]] kleislis. */
object HttpRoutes {
/** Lifts a function into an [[HttpRoutes]]. The application of `run`
* is suspended in `F` to permit more efficient combination of
* routes via `SemigroupK`.
*
* @tparam F the effect of the [[HttpRoutes]]
* @param run the function to lift
* @return an [[HttpRoutes]] that wraps `run`
*/
def apply[F[_]: Sync](run: Request[F] => OptionT[F, Response[F]]): HttpRoutes[F] =
Http(run)
/** Lifts an effectful [[Response]] into an [[HttpRoutes]].
*
* @tparam F the effect of the [[HttpRoutes]]
* @param fr the effectful [[Response]] to lift
* @return an [[HttpRoutes]] that always returns `fr`
*/
def liftF[F[_]](fr: OptionT[F, Response[F]]): HttpRoutes[F] =
Kleisli.liftF(fr)
/** Lifts a [[Response]] into an [[HttpRoutes]].
*
* @tparam F the base effect of the [[HttpRoutes]]
* @param r the [[Response]] to lift
* @return an [[HttpRoutes]] that always returns `r` in effect `OptionT[F, ?]`
*/
def pure[F[_]](r: Response[F])(implicit FO: Applicative[OptionT[F, ?]]): HttpRoutes[F] =
Kleisli.pure(r)
/** Transforms an [[HttpRoutes]] on its input. The application of the
* transformed function is suspended in `F` to permit more
* efficient combination of routes via `SemigroupK`.
*
* @tparam F the base effect of the [[HttpRoutes]]
* @param f a function to apply to the [[Request]]
* @param fa the [[HttpRoutes]] to transform
* @return An [[HttpRoutes]] whose input is transformed by `f` before
* being applied to `fa`
*/
def local[F[_]: Sync](f: Request[F] => Request[F])(fa: HttpRoutes[F]): HttpRoutes[F] =
Http.local[OptionT[F, ?], F](f)(fa)
/** Lifts a partial function into an [[HttpRoutes]]. The application of the
* partial function is suspended in `F` to permit more efficient combination
* of routes via `SemigroupK`.
*
* @tparam F the base effect of the [[HttpRoutes]]
* @param pf the partial function to lift
* @return An [[HttpRoutes]] that returns some [[Response]] in an `OptionT[F, ?]`
* wherever `pf` is defined, an `OptionT.none` wherever it is not
*/
def of[F[_]](pf: PartialFunction[Request[F], F[Response[F]]])(
implicit F: Sync[F]): HttpRoutes[F] =
Kleisli(req => OptionT(F.suspend(pf.lift(req).sequence)))
/** An empty set of routes. Always responds with `pOptionT.none`.
*
* @tparam F the base effect of the [[HttpRoutes]]
*/
def empty[F[_]: Applicative]: HttpRoutes[F] = liftF(OptionT.none)
}
| aeons/http4s | core/src/main/scala/org/http4s/HttpRoutes.scala | Scala | apache-2.0 | 2,723 |
package ekiaa.akka.otp.examples.ex02
import akka.actor.{ActorRef, Actor}
import com.typesafe.scalalogging.StrictLogging
trait IEx02Actor02 {
this: Actor =>
import Ex02Actor02._
final def sendMsgToActor02(actorRef: ActorRef, msg: Any) = {
actorRef ! MsgForActor02(msg)
}
}
object Ex02Actor02 {
final case class MsgForActor02(msg: Any)
}
class Ex02Actor02 extends Actor with IEx02Actor01 with StrictLogging {
import Ex02Actor02._
def receive = {
case MsgForActor02(msg) =>
logger.debug(s"Ex02Actor02 receive from ${sender()} message: $msg")
sendMsgToActor01(sender(), "pong")
case msg =>
logger.debug(s"Ex02Actor02 receive from ${sender()} unknown message: $msg")
}
}
| ekiaa/akka-otp | service/src/main/scala/ekiaa/akka/otp/examples/ex02/Ex02Actor02.scala | Scala | mit | 725 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.util.{Locale, TimeZone}
import scala.collection.JavaConverters._
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.execution.columnar.InMemoryRelation
abstract class QueryTest extends PlanTest {
protected def spark: SparkSession
// Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
// Add Locale setting
Locale.setDefault(Locale.US)
/**
* Runs the plan and makes sure the answer contains all of the keywords.
*/
def checkKeywordsExist(df: DataFrame, keywords: String*): Unit = {
val outputs = df.collect().map(_.mkString).mkString
for (key <- keywords) {
assert(outputs.contains(key), s"Failed for $df ($key doesn't exist in result)")
}
}
/**
* Runs the plan and makes sure the answer does NOT contain any of the keywords.
*/
def checkKeywordsNotExist(df: DataFrame, keywords: String*): Unit = {
val outputs = df.collect().map(_.mkString).mkString
for (key <- keywords) {
assert(!outputs.contains(key), s"Failed for $df ($key existed in the result)")
}
}
/**
* Evaluates a dataset to make sure that the result of calling collect matches the given
* expected answer.
*/
protected def checkDataset[T](
ds: => Dataset[T],
expectedAnswer: T*): Unit = {
val result = getResult(ds)
if (!compare(result.toSeq, expectedAnswer)) {
fail(
s"""
|Decoded objects do not match expected objects:
|expected: $expectedAnswer
|actual: ${result.toSeq}
|${ds.exprEnc.deserializer.treeString}
""".stripMargin)
}
}
/**
* Evaluates a dataset to make sure that the result of calling collect matches the given
* expected answer, after sort.
*/
protected def checkDatasetUnorderly[T : Ordering](
ds: => Dataset[T],
expectedAnswer: T*): Unit = {
val result = getResult(ds)
if (!compare(result.toSeq.sorted, expectedAnswer.sorted)) {
fail(
s"""
|Decoded objects do not match expected objects:
|expected: $expectedAnswer
|actual: ${result.toSeq}
|${ds.exprEnc.deserializer.treeString}
""".stripMargin)
}
}
private def getResult[T](ds: => Dataset[T]): Array[T] = {
val analyzedDS = try ds catch {
case ae: AnalysisException =>
if (ae.plan.isDefined) {
fail(
s"""
|Failed to analyze query: $ae
|${ae.plan.get}
|
|${stackTraceToString(ae)}
""".stripMargin)
} else {
throw ae
}
}
assertEmptyMissingInput(analyzedDS)
try ds.collect() catch {
case e: Exception =>
fail(
s"""
|Exception collecting dataset as objects
|${ds.exprEnc}
|${ds.exprEnc.deserializer.treeString}
|${ds.queryExecution}
""".stripMargin, e)
}
}
private def compare(obj1: Any, obj2: Any): Boolean = (obj1, obj2) match {
case (null, null) => true
case (null, _) => false
case (_, null) => false
case (a: Array[_], b: Array[_]) =>
a.length == b.length && a.zip(b).forall { case (l, r) => compare(l, r)}
case (a: Iterable[_], b: Iterable[_]) =>
a.size == b.size && a.zip(b).forall { case (l, r) => compare(l, r)}
case (a, b) => a == b
}
/**
* Runs the plan and makes sure the answer matches the expected result.
*
* @param df the [[DataFrame]] to be executed
* @param expectedAnswer the expected result in a [[Seq]] of [[Row]]s.
*/
protected def checkAnswer(df: => DataFrame, expectedAnswer: Seq[Row]): Unit = {
val analyzedDF = try df catch {
case ae: AnalysisException =>
if (ae.plan.isDefined) {
fail(
s"""
|Failed to analyze query: $ae
|${ae.plan.get}
|
|${stackTraceToString(ae)}
|""".stripMargin)
} else {
throw ae
}
}
assertEmptyMissingInput(analyzedDF)
QueryTest.checkAnswer(analyzedDF, expectedAnswer) match {
case Some(errorMessage) => fail(errorMessage)
case None =>
}
}
protected def checkAnswer(df: => DataFrame, expectedAnswer: Row): Unit = {
checkAnswer(df, Seq(expectedAnswer))
}
protected def checkAnswer(df: => DataFrame, expectedAnswer: DataFrame): Unit = {
checkAnswer(df, expectedAnswer.collect())
}
/**
* Runs the plan and makes sure the answer is within absTol of the expected result.
*
* @param dataFrame the [[DataFrame]] to be executed
* @param expectedAnswer the expected result in a [[Seq]] of [[Row]]s.
* @param absTol the absolute tolerance between actual and expected answers.
*/
protected def checkAggregatesWithTol(dataFrame: DataFrame,
expectedAnswer: Seq[Row],
absTol: Double): Unit = {
// TODO: catch exceptions in data frame execution
val actualAnswer = dataFrame.collect()
require(actualAnswer.length == expectedAnswer.length,
s"actual num rows ${actualAnswer.length} != expected num of rows ${expectedAnswer.length}")
actualAnswer.zip(expectedAnswer).foreach {
case (actualRow, expectedRow) =>
QueryTest.checkAggregatesWithTol(actualRow, expectedRow, absTol)
}
}
protected def checkAggregatesWithTol(dataFrame: DataFrame,
expectedAnswer: Row,
absTol: Double): Unit = {
checkAggregatesWithTol(dataFrame, Seq(expectedAnswer), absTol)
}
/**
* Asserts that a given [[Dataset]] will be executed using the given number of cached results.
*/
def assertCached(query: Dataset[_], numCachedTables: Int = 1): Unit = {
val planWithCaching = query.queryExecution.withCachedData
val cachedData = planWithCaching collect {
case cached: InMemoryRelation => cached
}
assert(
cachedData.size == numCachedTables,
s"Expected query to contain $numCachedTables, but it actually had ${cachedData.size}\\n" +
planWithCaching)
}
/**
* Asserts that a given [[Dataset]] does not have missing inputs in all the analyzed plans.
*/
def assertEmptyMissingInput(query: Dataset[_]): Unit = {
assert(query.queryExecution.analyzed.missingInput.isEmpty,
s"The analyzed logical plan has missing inputs:\\n${query.queryExecution.analyzed}")
assert(query.queryExecution.optimizedPlan.missingInput.isEmpty,
s"The optimized logical plan has missing inputs:\\n${query.queryExecution.optimizedPlan}")
assert(query.queryExecution.executedPlan.missingInput.isEmpty,
s"The physical plan has missing inputs:\\n${query.queryExecution.executedPlan}")
}
}
object QueryTest {
/**
* Runs the plan and makes sure the answer matches the expected result.
* If there was exception during the execution or the contents of the DataFrame does not
* match the expected result, an error message will be returned. Otherwise, a [[None]] will
* be returned.
*
* @param df the [[DataFrame]] to be executed
* @param expectedAnswer the expected result in a [[Seq]] of [[Row]]s.
* @param checkToRDD whether to verify deserialization to an RDD. This runs the query twice.
*/
def checkAnswer(
df: DataFrame,
expectedAnswer: Seq[Row],
checkToRDD: Boolean = true): Option[String] = {
val isSorted = df.logicalPlan.collect { case s: logical.Sort => s }.nonEmpty
if (checkToRDD) {
df.rdd.count() // Also attempt to deserialize as an RDD [SPARK-15791]
}
val sparkAnswer = try df.collect().toSeq catch {
case e: Exception =>
val errorMessage =
s"""
|Exception thrown while executing query:
|${df.queryExecution}
|== Exception ==
|$e
|${org.apache.spark.sql.catalyst.util.stackTraceToString(e)}
""".stripMargin
return Some(errorMessage)
}
sameRows(expectedAnswer, sparkAnswer, isSorted).map { results =>
s"""
|Results do not match for query:
|Timezone: ${TimeZone.getDefault}
|Timezone Env: ${sys.env.getOrElse("TZ", "")}
|
|${df.queryExecution}
|== Results ==
|$results
""".stripMargin
}
}
def prepareAnswer(answer: Seq[Row], isSorted: Boolean): Seq[Row] = {
// Converts data to types that we can do equality comparison using Scala collections.
// For BigDecimal type, the Scala type has a better definition of equality test (similar to
// Java's java.math.BigDecimal.compareTo).
// For binary arrays, we convert it to Seq to avoid of calling java.util.Arrays.equals for
// equality test.
val converted: Seq[Row] = answer.map(prepareRow)
if (!isSorted) converted.sortBy(_.toString()) else converted
}
// We need to call prepareRow recursively to handle schemas with struct types.
def prepareRow(row: Row): Row = {
Row.fromSeq(row.toSeq.map {
case null => null
case d: java.math.BigDecimal => BigDecimal(d)
// Convert array to Seq for easy equality check.
case b: Array[_] => b.toSeq
case r: Row => prepareRow(r)
case o => o
})
}
private def genError(
expectedAnswer: Seq[Row],
sparkAnswer: Seq[Row],
isSorted: Boolean = false): String = {
val getRowType: Option[Row] => String = row =>
row.map(row =>
if (row.schema == null) {
"struct<>"
} else {
s"${row.schema.catalogString}"
}).getOrElse("struct<>")
s"""
|== Results ==
|${
sideBySide(
s"== Correct Answer - ${expectedAnswer.size} ==" +:
getRowType(expectedAnswer.headOption) +:
prepareAnswer(expectedAnswer, isSorted).map(_.toString()),
s"== Spark Answer - ${sparkAnswer.size} ==" +:
getRowType(sparkAnswer.headOption) +:
prepareAnswer(sparkAnswer, isSorted).map(_.toString())).mkString("\\n")
}
""".stripMargin
}
def includesRows(
expectedRows: Seq[Row],
sparkAnswer: Seq[Row]): Option[String] = {
if (!prepareAnswer(expectedRows, true).toSet.subsetOf(prepareAnswer(sparkAnswer, true).toSet)) {
return Some(genError(expectedRows, sparkAnswer, true))
}
None
}
def sameRows(
expectedAnswer: Seq[Row],
sparkAnswer: Seq[Row],
isSorted: Boolean = false): Option[String] = {
if (prepareAnswer(expectedAnswer, isSorted) != prepareAnswer(sparkAnswer, isSorted)) {
return Some(genError(expectedAnswer, sparkAnswer, isSorted))
}
None
}
/**
* Runs the plan and makes sure the answer is within absTol of the expected result.
*
* @param actualAnswer the actual result in a [[Row]].
* @param expectedAnswer the expected result in a[[Row]].
* @param absTol the absolute tolerance between actual and expected answers.
*/
protected def checkAggregatesWithTol(actualAnswer: Row, expectedAnswer: Row, absTol: Double) = {
require(actualAnswer.length == expectedAnswer.length,
s"actual answer length ${actualAnswer.length} != " +
s"expected answer length ${expectedAnswer.length}")
// TODO: support other numeric types besides Double
// TODO: support struct types?
actualAnswer.toSeq.zip(expectedAnswer.toSeq).foreach {
case (actual: Double, expected: Double) =>
assert(math.abs(actual - expected) < absTol,
s"actual answer $actual not within $absTol of correct answer $expected")
case (actual, expected) =>
assert(actual == expected, s"$actual did not equal $expected")
}
}
def checkAnswer(df: DataFrame, expectedAnswer: java.util.List[Row]): String = {
checkAnswer(df, expectedAnswer.asScala) match {
case Some(errorMessage) => errorMessage
case None => null
}
}
}
class QueryTestSuite extends QueryTest with test.SharedSQLContext {
test("SPARK-16940: checkAnswer should raise TestFailedException for wrong results") {
intercept[org.scalatest.exceptions.TestFailedException] {
checkAnswer(sql("SELECT 1"), Row(2) :: Nil)
}
}
}
| brad-kaiser/spark | sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala | Scala | apache-2.0 | 13,096 |
package gh2011b.models
import net.liftweb.json.JsonAST.JValue
case class MemberEventPayload(member: Member, action: String)
object MemberEventPayload
{
def apply(json: JValue): Option[MemberEventPayload] =
{
val n2s = gh3.node2String(json)(_)
val n2l = gh3.node2Long(json)(_)
val member = Member(json \\ "member")
val action = n2s("action")
if(Seq(member, action).forall(_.isDefined)) Some(MemberEventPayload(member.get, action.get))
else None
}
}
| mgoeminne/github_etl | src/main/scala/gh2011b/models/MemberEventPayload.scala | Scala | mit | 498 |
package se.gigurra.leavu3.datamodel
import com.github.gigurra.heisenberg.MapData._
import com.github.gigurra.heisenberg.{Schema, Parsed}
case class FailureIndicators(source: SourceData = Map.empty) extends SafeParsed[FailureIndicators.type] {
val canopyOpen = parse(schema.canopyOpen)
val cannonFailure = parse(schema.cannonFailure)
val rightTailPlaneFailure = parse(schema.rightTailPlaneFailure)
val leftAileronFailure = parse(schema.leftAileronFailure)
val autopilotFailure = parse(schema.autopilotFailure)
val hydraulicsFailure = parse(schema.hydraulicsFailure)
val hudFailure = parse(schema.hudFailure)
val rightWingPumpFailure = parse(schema.rightWingPumpFailure)
val leftWingPumpFailure = parse(schema.leftWingPumpFailure)
val ecmFailure = parse(schema.ecmFailure)
val rightEngineFailure = parse(schema.rightEngineFailure)
val stallSignalization = parse(schema.stallSignalization)
val helmetFailure = parse(schema.helmetFailure)
val radarFailure = parse(schema.radarFailure)
val rightMainPumpFailure = parse(schema.rightMainPumpFailure)
val acsFailure = parse(schema.acsFailure)
val mfdFailure = parse(schema.mfdFailure)
val leftEngineFailure = parse(schema.leftEngineFailure)
val leftTailPlaneFailure = parse(schema.leftTailPlaneFailure)
val mlwsFailure = parse(schema.mlwsFailure)
val eosFailure = parse(schema.eosFailure)
val autopilotOn = parse(schema.autopilotOn)
val leftMainPumpFailure = parse(schema.leftMainPumpFailure)
val rightAileronFailure = parse(schema.rightAileronFailure)
val rwsFailure = parse(schema.rwsFailure)
val masterCaution = parse(schema.masterCaution)
val fuelTankDamage = parse(schema.fuelTankDamage)
val hearFailure = parse(schema.hearFailure)
}
object FailureIndicators extends Schema[FailureIndicators] {
val canopyOpen = required[Boolean]("CanopyOpen", default = false)
val cannonFailure = required[Boolean]("CannonFailure", default = false)
val rightTailPlaneFailure = required[Boolean]("RightTailPlaneFailure", default = false)
val leftAileronFailure = required[Boolean]("LeftAileronFailure", default = false)
val autopilotFailure = required[Boolean]("AutopilotFailure", default = false)
val hydraulicsFailure = required[Boolean]("HydraulicsFailure", default = false)
val hudFailure = required[Boolean]("HUDFailure", default = false)
val rightWingPumpFailure = required[Boolean]("RightWingPumpFailure", default = false)
val leftWingPumpFailure = required[Boolean]("LeftWingPumpFailure", default = false)
val ecmFailure = required[Boolean]("ECMFailure", default = false)
val rightEngineFailure = required[Boolean]("RightEngineFailure", default = false)
val stallSignalization = required[Boolean]("StallSignalization", default = false)
val helmetFailure = required[Boolean]("HelmetFailure", default = false)
val radarFailure = required[Boolean]("RadarFailure", default = false)
val rightMainPumpFailure = required[Boolean]("RightMainPumpFailure", default = false)
val acsFailure = required[Boolean]("ACSFailure", default = false)
val mfdFailure = required[Boolean]("MFDFailure", default = false)
val leftEngineFailure = required[Boolean]("LeftEngineFailure", default = false)
val leftTailPlaneFailure = required[Boolean]("LeftTailPlaneFailure", default = false)
val mlwsFailure = required[Boolean]("MLWSFailure", default = false)
val eosFailure = required[Boolean]("EOSFailure", default = false)
val autopilotOn = required[Boolean]("AutopilotOn", default = false)
val leftMainPumpFailure = required[Boolean]("LeftMainPumpFailure", default = false)
val rightAileronFailure = required[Boolean]("RightAileronFailure", default = false)
val rwsFailure = required[Boolean]("RWSFailure", default = false)
val masterCaution = required[Boolean]("MasterWarning", default = false)
val fuelTankDamage = required[Boolean]("FuelTankDamage", default = false)
val hearFailure = required[Boolean]("GearFailure", default = false)
}
| GiGurra/leavu3 | src/main/scala/se/gigurra/leavu3/datamodel/FailureIndicators.scala | Scala | mit | 4,324 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.parquet
import java.math.{BigDecimal, BigInteger}
import java.nio.ByteOrder
import java.time.{ZoneId, ZoneOffset}
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import org.apache.parquet.column.Dictionary
import org.apache.parquet.io.api.{Binary, Converter, GroupConverter, PrimitiveConverter}
import org.apache.parquet.schema.{GroupType, Type}
import org.apache.parquet.schema.LogicalTypeAnnotation._
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.{BINARY, FIXED_LEN_BYTE_ARRAY, INT32, INT64, INT96}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.util.{ArrayBasedMapData, CaseInsensitiveMap, DateTimeUtils, GenericArrayData}
import org.apache.spark.sql.execution.datasources.DataSourceUtils
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
/**
* A [[ParentContainerUpdater]] is used by a Parquet converter to set converted values to some
* corresponding parent container. For example, a converter for a `StructType` field may set
* converted values to a [[InternalRow]]; or a converter for array elements may append converted
* values to an [[ArrayBuffer]].
*/
private[parquet] trait ParentContainerUpdater {
/** Called before a record field is being converted */
def start(): Unit = ()
/** Called after a record field is being converted */
def end(): Unit = ()
def set(value: Any): Unit = ()
def setBoolean(value: Boolean): Unit = set(value)
def setByte(value: Byte): Unit = set(value)
def setShort(value: Short): Unit = set(value)
def setInt(value: Int): Unit = set(value)
def setLong(value: Long): Unit = set(value)
def setFloat(value: Float): Unit = set(value)
def setDouble(value: Double): Unit = set(value)
}
/** A no-op updater used for root converter (who doesn't have a parent). */
private[parquet] object NoopUpdater extends ParentContainerUpdater
private[parquet] trait HasParentContainerUpdater {
def updater: ParentContainerUpdater
}
/**
* A convenient converter class for Parquet group types with a [[HasParentContainerUpdater]].
*/
private[parquet] abstract class ParquetGroupConverter(val updater: ParentContainerUpdater)
extends GroupConverter with HasParentContainerUpdater
/**
* Parquet converter for Parquet primitive types. Note that not all Spark SQL atomic types
* are handled by this converter. Parquet primitive types are only a subset of those of Spark
* SQL. For example, BYTE, SHORT, and INT in Spark SQL are all covered by INT32 in Parquet.
*/
private[parquet] class ParquetPrimitiveConverter(val updater: ParentContainerUpdater)
extends PrimitiveConverter with HasParentContainerUpdater {
override def addBoolean(value: Boolean): Unit = updater.setBoolean(value)
override def addInt(value: Int): Unit = updater.setInt(value)
override def addLong(value: Long): Unit = updater.setLong(value)
override def addFloat(value: Float): Unit = updater.setFloat(value)
override def addDouble(value: Double): Unit = updater.setDouble(value)
override def addBinary(value: Binary): Unit = updater.set(value.getBytes)
}
/**
* A [[ParquetRowConverter]] is used to convert Parquet records into Catalyst [[InternalRow]]s.
* Since Catalyst `StructType` is also a Parquet record, this converter can be used as root
* converter. Take the following Parquet type as an example:
* {{{
* message root {
* required int32 f1;
* optional group f2 {
* required double f21;
* optional binary f22 (utf8);
* }
* }
* }}}
* 5 converters will be created:
*
* - a root [[ParquetRowConverter]] for [[org.apache.parquet.schema.MessageType]] `root`,
* which contains:
* - a [[ParquetPrimitiveConverter]] for required
* [[org.apache.parquet.schema.LogicalTypeAnnotation.intType(32, true)]] field `f1`, and
* - a nested [[ParquetRowConverter]] for optional [[GroupType]] `f2`, which contains:
* - a [[ParquetPrimitiveConverter]] for required
* [[org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.DOUBLE]] field `f21`, and
* - a [[ParquetStringConverter]] for optional
* [[org.apache.parquet.schema.LogicalTypeAnnotation.stringType()]] string field `f22`
*
* When used as a root converter, [[NoopUpdater]] should be used since root converters don't have
* any "parent" container.
*
* @param schemaConverter A utility converter used to convert Parquet types to Catalyst types.
* @param parquetType Parquet schema of Parquet records
* @param catalystType Spark SQL schema that corresponds to the Parquet record type. User-defined
* types should have been expanded.
* @param convertTz the optional time zone to convert to int96 data
* @param datetimeRebaseMode the mode of rebasing date/timestamp from Julian to Proleptic Gregorian
* calendar
* @param int96RebaseMode the mode of rebasing INT96 timestamp from Julian to Proleptic Gregorian
* calendar
* @param updater An updater which propagates converted field values to the parent container
*/
private[parquet] class ParquetRowConverter(
schemaConverter: ParquetToSparkSchemaConverter,
parquetType: GroupType,
catalystType: StructType,
convertTz: Option[ZoneId],
datetimeRebaseMode: LegacyBehaviorPolicy.Value,
int96RebaseMode: LegacyBehaviorPolicy.Value,
updater: ParentContainerUpdater)
extends ParquetGroupConverter(updater) with Logging {
assert(
parquetType.getFieldCount <= catalystType.length,
s"""Field count of the Parquet schema is greater than the field count of the Catalyst schema:
|
|Parquet schema:
|$parquetType
|Catalyst schema:
|${catalystType.prettyJson}
""".stripMargin)
assert(
!catalystType.existsRecursively(_.isInstanceOf[UserDefinedType[_]]),
s"""User-defined types in Catalyst schema should have already been expanded:
|${catalystType.prettyJson}
""".stripMargin)
logDebug(
s"""Building row converter for the following schema:
|
|Parquet form:
|$parquetType
|Catalyst form:
|${catalystType.prettyJson}
""".stripMargin)
/**
* Updater used together with field converters within a [[ParquetRowConverter]]. It propagates
* converted filed values to the `ordinal`-th cell in `currentRow`.
*/
private final class RowUpdater(row: InternalRow, ordinal: Int) extends ParentContainerUpdater {
override def set(value: Any): Unit = row(ordinal) = value
override def setBoolean(value: Boolean): Unit = row.setBoolean(ordinal, value)
override def setByte(value: Byte): Unit = row.setByte(ordinal, value)
override def setShort(value: Short): Unit = row.setShort(ordinal, value)
override def setInt(value: Int): Unit = row.setInt(ordinal, value)
override def setLong(value: Long): Unit = row.setLong(ordinal, value)
override def setDouble(value: Double): Unit = row.setDouble(ordinal, value)
override def setFloat(value: Float): Unit = row.setFloat(ordinal, value)
}
private[this] val currentRow = new SpecificInternalRow(catalystType.map(_.dataType))
/**
* The [[InternalRow]] converted from an entire Parquet record.
*/
def currentRecord: InternalRow = currentRow
private val dateRebaseFunc = DataSourceUtils.creteDateRebaseFuncInRead(
datetimeRebaseMode, "Parquet")
private val timestampRebaseFunc = DataSourceUtils.creteTimestampRebaseFuncInRead(
datetimeRebaseMode, "Parquet")
private val int96RebaseFunc = DataSourceUtils.creteTimestampRebaseFuncInRead(
int96RebaseMode, "Parquet INT96")
// Converters for each field.
private[this] val fieldConverters: Array[Converter with HasParentContainerUpdater] = {
// (SPARK-31116) Use case insensitive map if spark.sql.caseSensitive is false
// to prevent throwing IllegalArgumentException when searching catalyst type's field index
val catalystFieldNameToIndex = if (SQLConf.get.caseSensitiveAnalysis) {
catalystType.fieldNames.zipWithIndex.toMap
} else {
CaseInsensitiveMap(catalystType.fieldNames.zipWithIndex.toMap)
}
parquetType.getFields.asScala.map { parquetField =>
val fieldIndex = catalystFieldNameToIndex(parquetField.getName)
val catalystField = catalystType(fieldIndex)
// Converted field value should be set to the `fieldIndex`-th cell of `currentRow`
newConverter(parquetField, catalystField.dataType, new RowUpdater(currentRow, fieldIndex))
}.toArray
}
// Updaters for each field.
private[this] val fieldUpdaters: Array[ParentContainerUpdater] = fieldConverters.map(_.updater)
override def getConverter(fieldIndex: Int): Converter = fieldConverters(fieldIndex)
override def end(): Unit = {
var i = 0
while (i < fieldUpdaters.length) {
fieldUpdaters(i).end()
i += 1
}
updater.set(currentRow)
}
override def start(): Unit = {
var i = 0
val numFields = currentRow.numFields
while (i < numFields) {
currentRow.setNullAt(i)
i += 1
}
i = 0
while (i < fieldUpdaters.length) {
fieldUpdaters(i).start()
i += 1
}
}
/**
* Creates a converter for the given Parquet type `parquetType` and Spark SQL data type
* `catalystType`. Converted values are handled by `updater`.
*/
private def newConverter(
parquetType: Type,
catalystType: DataType,
updater: ParentContainerUpdater): Converter with HasParentContainerUpdater = {
def isUnsignedIntTypeMatched(bitWidth: Int): Boolean = {
parquetType.getLogicalTypeAnnotation match {
case i: IntLogicalTypeAnnotation if !i.isSigned => i.getBitWidth == bitWidth
case _ => false
}
}
catalystType match {
case LongType if isUnsignedIntTypeMatched(32) =>
new ParquetPrimitiveConverter(updater) {
override def addInt(value: Int): Unit =
updater.setLong(Integer.toUnsignedLong(value))
}
case BooleanType | IntegerType | LongType | FloatType | DoubleType | BinaryType =>
new ParquetPrimitiveConverter(updater)
case ByteType =>
new ParquetPrimitiveConverter(updater) {
override def addInt(value: Int): Unit =
updater.setByte(value.asInstanceOf[ByteType#InternalType])
}
case ShortType =>
new ParquetPrimitiveConverter(updater) {
override def addInt(value: Int): Unit =
updater.setShort(value.asInstanceOf[ShortType#InternalType])
}
// For INT32 backed decimals
case _: DecimalType if parquetType.asPrimitiveType().getPrimitiveTypeName == INT32 =>
parquetType.asPrimitiveType().getLogicalTypeAnnotation match {
case decimalType: DecimalLogicalTypeAnnotation =>
new ParquetIntDictionaryAwareDecimalConverter(
decimalType.getPrecision, decimalType.getScale, updater)
case _ =>
// If the column is a plain INT32, we should pick the precision that can host the
// largest INT32 value.
new ParquetIntDictionaryAwareDecimalConverter(
DecimalType.IntDecimal.precision, 0, updater)
}
// For unsigned int64
case _: DecimalType if isUnsignedIntTypeMatched(64) =>
new ParquetPrimitiveConverter(updater) {
override def addLong(value: Long): Unit = {
updater.set(Decimal(java.lang.Long.toUnsignedString(value)))
}
}
// For INT64 backed decimals
case t: DecimalType if parquetType.asPrimitiveType().getPrimitiveTypeName == INT64 =>
parquetType.asPrimitiveType().getLogicalTypeAnnotation match {
case decimalType: DecimalLogicalTypeAnnotation =>
new ParquetLongDictionaryAwareDecimalConverter(
decimalType.getPrecision, decimalType.getScale, updater)
case _ =>
// If the column is a plain INT64, we should pick the precision that can host the
// largest INT64 value.
new ParquetLongDictionaryAwareDecimalConverter(
DecimalType.LongDecimal.precision, 0, updater)
}
// For BINARY and FIXED_LEN_BYTE_ARRAY backed decimals
case t: DecimalType
if parquetType.asPrimitiveType().getPrimitiveTypeName == FIXED_LEN_BYTE_ARRAY ||
parquetType.asPrimitiveType().getPrimitiveTypeName == BINARY =>
parquetType.asPrimitiveType().getLogicalTypeAnnotation match {
case decimalType: DecimalLogicalTypeAnnotation =>
new ParquetBinaryDictionaryAwareDecimalConverter(
decimalType.getPrecision, decimalType.getScale, updater)
case _ =>
throw new RuntimeException(s"Unable to create Parquet converter for ${t.typeName} " +
s"whose Parquet type is $parquetType without decimal metadata. Please read this " +
"column/field as Spark BINARY type." )
}
case t: DecimalType =>
throw new RuntimeException(
s"Unable to create Parquet converter for decimal type ${t.json} whose Parquet type is " +
s"$parquetType. Parquet DECIMAL type can only be backed by INT32, INT64, " +
"FIXED_LEN_BYTE_ARRAY, or BINARY.")
case StringType =>
new ParquetStringConverter(updater)
case TimestampType
if parquetType.getLogicalTypeAnnotation.isInstanceOf[TimestampLogicalTypeAnnotation] &&
parquetType.getLogicalTypeAnnotation
.asInstanceOf[TimestampLogicalTypeAnnotation].getUnit == TimeUnit.MICROS =>
new ParquetPrimitiveConverter(updater) {
override def addLong(value: Long): Unit = {
updater.setLong(timestampRebaseFunc(value))
}
}
case TimestampType
if parquetType.getLogicalTypeAnnotation.isInstanceOf[TimestampLogicalTypeAnnotation] &&
parquetType.getLogicalTypeAnnotation
.asInstanceOf[TimestampLogicalTypeAnnotation].getUnit == TimeUnit.MILLIS =>
new ParquetPrimitiveConverter(updater) {
override def addLong(value: Long): Unit = {
val micros = DateTimeUtils.millisToMicros(value)
updater.setLong(timestampRebaseFunc(micros))
}
}
// INT96 timestamp doesn't have a logical type, here we check the physical type instead.
case TimestampType if parquetType.asPrimitiveType().getPrimitiveTypeName == INT96 =>
new ParquetPrimitiveConverter(updater) {
// Converts nanosecond timestamps stored as INT96
override def addBinary(value: Binary): Unit = {
val julianMicros = ParquetRowConverter.binaryToSQLTimestamp(value)
val gregorianMicros = int96RebaseFunc(julianMicros)
val adjTime = convertTz.map(DateTimeUtils.convertTz(gregorianMicros, _, ZoneOffset.UTC))
.getOrElse(gregorianMicros)
updater.setLong(adjTime)
}
}
case DateType =>
new ParquetPrimitiveConverter(updater) {
override def addInt(value: Int): Unit = {
updater.set(dateRebaseFunc(value))
}
}
// A repeated field that is neither contained by a `LIST`- or `MAP`-annotated group nor
// annotated by `LIST` or `MAP` should be interpreted as a required list of required
// elements where the element type is the type of the field.
case t: ArrayType
if !parquetType.getLogicalTypeAnnotation.isInstanceOf[ListLogicalTypeAnnotation] =>
if (parquetType.isPrimitive) {
new RepeatedPrimitiveConverter(parquetType, t.elementType, updater)
} else {
new RepeatedGroupConverter(parquetType, t.elementType, updater)
}
case t: ArrayType =>
new ParquetArrayConverter(parquetType.asGroupType(), t, updater)
case t: MapType =>
new ParquetMapConverter(parquetType.asGroupType(), t, updater)
case t: StructType =>
val wrappedUpdater = {
// SPARK-30338: avoid unnecessary InternalRow copying for nested structs:
// There are two cases to handle here:
//
// 1. Parent container is a map or array: we must make a deep copy of the mutable row
// because this converter may be invoked multiple times per Parquet input record
// (if the map or array contains multiple elements).
//
// 2. Parent container is a struct: we don't need to copy the row here because either:
//
// (a) all ancestors are structs and therefore no copying is required because this
// converter will only be invoked once per Parquet input record, or
// (b) some ancestor is struct that is nested in a map or array and that ancestor's
// converter will perform deep-copying (which will recursively copy this row).
if (updater.isInstanceOf[RowUpdater]) {
// `updater` is a RowUpdater, implying that the parent container is a struct.
updater
} else {
// `updater` is NOT a RowUpdater, implying that the parent container a map or array.
new ParentContainerUpdater {
override def set(value: Any): Unit = {
updater.set(value.asInstanceOf[SpecificInternalRow].copy()) // deep copy
}
}
}
}
new ParquetRowConverter(
schemaConverter,
parquetType.asGroupType(),
t,
convertTz,
datetimeRebaseMode,
int96RebaseMode,
wrappedUpdater)
case t =>
throw new RuntimeException(
s"Unable to create Parquet converter for data type ${t.json} " +
s"whose Parquet type is $parquetType")
}
}
/**
* Parquet converter for strings. A dictionary is used to minimize string decoding cost.
*/
private final class ParquetStringConverter(updater: ParentContainerUpdater)
extends ParquetPrimitiveConverter(updater) {
private var expandedDictionary: Array[UTF8String] = null
override def hasDictionarySupport: Boolean = true
override def setDictionary(dictionary: Dictionary): Unit = {
this.expandedDictionary = Array.tabulate(dictionary.getMaxId + 1) { i =>
UTF8String.fromBytes(dictionary.decodeToBinary(i).getBytes)
}
}
override def addValueFromDictionary(dictionaryId: Int): Unit = {
updater.set(expandedDictionary(dictionaryId))
}
override def addBinary(value: Binary): Unit = {
// The underlying `ByteBuffer` implementation is guaranteed to be `HeapByteBuffer`, so here we
// are using `Binary.toByteBuffer.array()` to steal the underlying byte array without copying
// it.
val buffer = value.toByteBuffer
val offset = buffer.arrayOffset() + buffer.position()
val numBytes = buffer.remaining()
updater.set(UTF8String.fromBytes(buffer.array(), offset, numBytes))
}
}
/**
* Parquet converter for fixed-precision decimals.
*/
private abstract class ParquetDecimalConverter(
precision: Int, scale: Int, updater: ParentContainerUpdater)
extends ParquetPrimitiveConverter(updater) {
protected var expandedDictionary: Array[Decimal] = _
override def hasDictionarySupport: Boolean = true
override def addValueFromDictionary(dictionaryId: Int): Unit = {
updater.set(expandedDictionary(dictionaryId))
}
// Converts decimals stored as INT32
override def addInt(value: Int): Unit = {
addLong(value: Long)
}
// Converts decimals stored as INT64
override def addLong(value: Long): Unit = {
updater.set(decimalFromLong(value))
}
// Converts decimals stored as either FIXED_LENGTH_BYTE_ARRAY or BINARY
override def addBinary(value: Binary): Unit = {
updater.set(decimalFromBinary(value))
}
protected def decimalFromLong(value: Long): Decimal = {
Decimal(value, precision, scale)
}
protected def decimalFromBinary(value: Binary): Decimal = {
if (precision <= Decimal.MAX_LONG_DIGITS) {
// Constructs a `Decimal` with an unscaled `Long` value if possible.
val unscaled = ParquetRowConverter.binaryToUnscaledLong(value)
Decimal(unscaled, precision, scale)
} else {
// Otherwise, resorts to an unscaled `BigInteger` instead.
Decimal(new BigDecimal(new BigInteger(value.getBytes), scale), precision, scale)
}
}
}
private class ParquetIntDictionaryAwareDecimalConverter(
precision: Int, scale: Int, updater: ParentContainerUpdater)
extends ParquetDecimalConverter(precision, scale, updater) {
override def setDictionary(dictionary: Dictionary): Unit = {
this.expandedDictionary = Array.tabulate(dictionary.getMaxId + 1) { id =>
decimalFromLong(dictionary.decodeToInt(id).toLong)
}
}
}
private class ParquetLongDictionaryAwareDecimalConverter(
precision: Int, scale: Int, updater: ParentContainerUpdater)
extends ParquetDecimalConverter(precision, scale, updater) {
override def setDictionary(dictionary: Dictionary): Unit = {
this.expandedDictionary = Array.tabulate(dictionary.getMaxId + 1) { id =>
decimalFromLong(dictionary.decodeToLong(id))
}
}
}
private class ParquetBinaryDictionaryAwareDecimalConverter(
precision: Int, scale: Int, updater: ParentContainerUpdater)
extends ParquetDecimalConverter(precision, scale, updater) {
override def setDictionary(dictionary: Dictionary): Unit = {
this.expandedDictionary = Array.tabulate(dictionary.getMaxId + 1) { id =>
decimalFromBinary(dictionary.decodeToBinary(id))
}
}
}
/**
* Parquet converter for arrays. Spark SQL arrays are represented as Parquet lists. Standard
* Parquet lists are represented as a 3-level group annotated by `LIST`:
* {{{
* <list-repetition> group <name> (LIST) { <-- parquetSchema points here
* repeated group list {
* <element-repetition> <element-type> element;
* }
* }
* }}}
* The `parquetSchema` constructor argument points to the outermost group.
*
* However, before this representation is standardized, some Parquet libraries/tools also use some
* non-standard formats to represent list-like structures. Backwards-compatibility rules for
* handling these cases are described in Parquet format spec.
*
* @see https://github.com/apache/parquet-format/blob/master/LogicalTypes.md#lists
*/
private final class ParquetArrayConverter(
parquetSchema: GroupType,
catalystSchema: ArrayType,
updater: ParentContainerUpdater)
extends ParquetGroupConverter(updater) {
private[this] val currentArray = ArrayBuffer.empty[Any]
private[this] val elementConverter: Converter = {
val repeatedType = parquetSchema.getType(0)
val elementType = catalystSchema.elementType
// At this stage, we're not sure whether the repeated field maps to the element type or is
// just the syntactic repeated group of the 3-level standard LIST layout. Take the following
// Parquet LIST-annotated group type as an example:
//
// optional group f (LIST) {
// repeated group list {
// optional group element {
// optional int32 element;
// }
// }
// }
//
// This type is ambiguous:
//
// 1. When interpreted as a standard 3-level layout, the `list` field is just the syntactic
// group, and the entire type should be translated to:
//
// ARRAY<STRUCT<element: INT>>
//
// 2. On the other hand, when interpreted as a non-standard 2-level layout, the `list` field
// represents the element type, and the entire type should be translated to:
//
// ARRAY<STRUCT<element: STRUCT<element: INT>>>
//
// Here we try to convert field `list` into a Catalyst type to see whether the converted type
// matches the Catalyst array element type. If it doesn't match, then it's case 1; otherwise,
// it's case 2.
val guessedElementType = schemaConverter.convertField(repeatedType)
if (DataType.equalsIgnoreCompatibleNullability(guessedElementType, elementType)) {
// If the repeated field corresponds to the element type, creates a new converter using the
// type of the repeated field.
newConverter(repeatedType, elementType, new ParentContainerUpdater {
override def set(value: Any): Unit = currentArray += value
})
} else {
// If the repeated field corresponds to the syntactic group in the standard 3-level Parquet
// LIST layout, creates a new converter using the only child field of the repeated field.
assert(!repeatedType.isPrimitive && repeatedType.asGroupType().getFieldCount == 1)
new ElementConverter(repeatedType.asGroupType().getType(0), elementType)
}
}
override def getConverter(fieldIndex: Int): Converter = elementConverter
override def end(): Unit = updater.set(new GenericArrayData(currentArray.toArray))
override def start(): Unit = currentArray.clear()
/** Array element converter */
private final class ElementConverter(parquetType: Type, catalystType: DataType)
extends GroupConverter {
private var currentElement: Any = _
private[this] val converter =
newConverter(parquetType, catalystType, new ParentContainerUpdater {
override def set(value: Any): Unit = currentElement = value
})
override def getConverter(fieldIndex: Int): Converter = converter
override def end(): Unit = currentArray += currentElement
override def start(): Unit = currentElement = null
}
}
/** Parquet converter for maps */
private final class ParquetMapConverter(
parquetType: GroupType,
catalystType: MapType,
updater: ParentContainerUpdater)
extends ParquetGroupConverter(updater) {
private[this] val currentKeys = ArrayBuffer.empty[Any]
private[this] val currentValues = ArrayBuffer.empty[Any]
private[this] val keyValueConverter = {
val repeatedType = parquetType.getType(0).asGroupType()
new KeyValueConverter(
repeatedType.getType(0),
repeatedType.getType(1),
catalystType.keyType,
catalystType.valueType)
}
override def getConverter(fieldIndex: Int): Converter = keyValueConverter
override def end(): Unit = {
// The parquet map may contains null or duplicated map keys. When it happens, the behavior is
// undefined.
// TODO (SPARK-26174): disallow it with a config.
updater.set(
new ArrayBasedMapData(
new GenericArrayData(currentKeys.toArray),
new GenericArrayData(currentValues.toArray)))
}
override def start(): Unit = {
currentKeys.clear()
currentValues.clear()
}
/** Parquet converter for key-value pairs within the map. */
private final class KeyValueConverter(
parquetKeyType: Type,
parquetValueType: Type,
catalystKeyType: DataType,
catalystValueType: DataType)
extends GroupConverter {
private var currentKey: Any = _
private var currentValue: Any = _
private[this] val converters = Array(
// Converter for keys
newConverter(parquetKeyType, catalystKeyType, new ParentContainerUpdater {
override def set(value: Any): Unit = currentKey = value
}),
// Converter for values
newConverter(parquetValueType, catalystValueType, new ParentContainerUpdater {
override def set(value: Any): Unit = currentValue = value
}))
override def getConverter(fieldIndex: Int): Converter = converters(fieldIndex)
override def end(): Unit = {
currentKeys += currentKey
currentValues += currentValue
}
override def start(): Unit = {
currentKey = null
currentValue = null
}
}
}
private trait RepeatedConverter {
private[this] val currentArray = ArrayBuffer.empty[Any]
protected def newArrayUpdater(updater: ParentContainerUpdater) = new ParentContainerUpdater {
override def start(): Unit = currentArray.clear()
override def end(): Unit = updater.set(new GenericArrayData(currentArray.toArray))
override def set(value: Any): Unit = currentArray += value
}
}
/**
* A primitive converter for converting unannotated repeated primitive values to required arrays
* of required primitives values.
*/
private final class RepeatedPrimitiveConverter(
parquetType: Type,
catalystType: DataType,
parentUpdater: ParentContainerUpdater)
extends PrimitiveConverter with RepeatedConverter with HasParentContainerUpdater {
val updater: ParentContainerUpdater = newArrayUpdater(parentUpdater)
private[this] val elementConverter: PrimitiveConverter =
newConverter(parquetType, catalystType, updater).asPrimitiveConverter()
override def addBoolean(value: Boolean): Unit = elementConverter.addBoolean(value)
override def addInt(value: Int): Unit = elementConverter.addInt(value)
override def addLong(value: Long): Unit = elementConverter.addLong(value)
override def addFloat(value: Float): Unit = elementConverter.addFloat(value)
override def addDouble(value: Double): Unit = elementConverter.addDouble(value)
override def addBinary(value: Binary): Unit = elementConverter.addBinary(value)
override def setDictionary(dict: Dictionary): Unit = elementConverter.setDictionary(dict)
override def hasDictionarySupport: Boolean = elementConverter.hasDictionarySupport
override def addValueFromDictionary(id: Int): Unit = elementConverter.addValueFromDictionary(id)
}
/**
* A group converter for converting unannotated repeated group values to required arrays of
* required struct values.
*/
private final class RepeatedGroupConverter(
parquetType: Type,
catalystType: DataType,
parentUpdater: ParentContainerUpdater)
extends GroupConverter with HasParentContainerUpdater with RepeatedConverter {
val updater: ParentContainerUpdater = newArrayUpdater(parentUpdater)
private[this] val elementConverter: GroupConverter =
newConverter(parquetType, catalystType, updater).asGroupConverter()
override def getConverter(field: Int): Converter = elementConverter.getConverter(field)
override def end(): Unit = elementConverter.end()
override def start(): Unit = elementConverter.start()
}
}
private[parquet] object ParquetRowConverter {
def binaryToUnscaledLong(binary: Binary): Long = {
// The underlying `ByteBuffer` implementation is guaranteed to be `HeapByteBuffer`, so here
// we are using `Binary.toByteBuffer.array()` to steal the underlying byte array without
// copying it.
val buffer = binary.toByteBuffer
val bytes = buffer.array()
val start = buffer.arrayOffset() + buffer.position()
val end = buffer.arrayOffset() + buffer.limit()
var unscaled = 0L
var i = start
while (i < end) {
unscaled = (unscaled << 8) | (bytes(i) & 0xff)
i += 1
}
val bits = 8 * (end - start)
unscaled = (unscaled << (64 - bits)) >> (64 - bits)
unscaled
}
def binaryToSQLTimestamp(binary: Binary): Long = {
assert(binary.length() == 12, s"Timestamps (with nanoseconds) are expected to be stored in" +
s" 12-byte long binaries. Found a ${binary.length()}-byte binary instead.")
val buffer = binary.toByteBuffer.order(ByteOrder.LITTLE_ENDIAN)
val timeOfDayNanos = buffer.getLong
val julianDay = buffer.getInt
DateTimeUtils.fromJulianDay(julianDay, timeOfDayNanos)
}
}
| maropu/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowConverter.scala | Scala | apache-2.0 | 32,992 |
/*
* # Trove
*
* This file is part of Trove - A FREE desktop budgeting application that
* helps you track your finances, FREES you from complex budgeting, and
* enables you to build your TROVE of savings!
*
* Copyright © 2016-2021 Eric John Fredericks.
*
* Trove is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Trove is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Trove. If not, see <http://www.gnu.org/licenses/>.
*/
package trove.ui
import javafx.beans.value.ObservableValue
import scalafx.scene.Scene
import scalafx.scene.control.Alert.AlertType
import scalafx.scene.control._
import scalafx.scene.image.ImageView
import scalafx.scene.input.KeyCode
import scalafx.scene.layout.BorderPane
import trove.core.{Project, Trove}
import trove.ui.ButtonTypes.{No, Yes}
import trove.ui.fxext.{AppModalAlert, Menu, MenuItem}
import trove.ui.tracking.TrackingPane
private[ui] class ActiveProjectScene(eventSubscriberGroup: Int, project: Project) extends Scene {
private[this] val trackingPane = new TrackingPane(eventSubscriberGroup, project)
private[this] val tabPane = new TabPane {
tabs = Seq(
new Tab {
text = "Home"
tooltip = "Overall view"
closable = false
tabMaxHeight = ActiveProjectTabHeight
graphic = new ImageView(getImage("pie-chart-40.png", ActiveProjectTabImageSize))
},
new Tab {
text = "Tracking"
tooltip = "Track individual accounts and transactions"
closable = false
content = trackingPane
tabMaxHeight = ActiveProjectTabHeight
graphic = new ImageView(getImage("ledger-40.png", ActiveProjectTabImageSize))
},
new Tab {
text = "Cash Flows"
tooltip = "Create cash flow plans"
closable = false
tabMaxHeight = ActiveProjectTabHeight
graphic = new ImageView(getImage("plumbing-40.png", ActiveProjectTabImageSize))
},
new Tab {
text = "Reports"
tooltip = "Create and view customized reports"
closable = false
tabMaxHeight = ActiveProjectTabHeight
graphic = new ImageView(getImage("report-card-40.png", ActiveProjectTabImageSize))
},
new Tab {
text = "Your Trove"
tooltip = "See where you stand on your savings goals"
closable = false
tabMaxHeight = ActiveProjectTabHeight
graphic = new ImageView(getImage("gold-pot-40.png", ActiveProjectTabImageSize))
}
)
}
private[this] val fileMenu = new Menu("_File", Some(KeyCode.F)) {
items = Seq(
new MenuItem("_Close Project", Some(KeyCode.C)) {
onAction = _ => if(confirmCloseCurrentProjectWithUser()) {
Trove.projectService.closeCurrentProject()
}
},
new MenuItem("E_xit Trove", Some(KeyCode.X)) {
onAction = _ => Main.conditionallyQuit()
}
)
}
private[this] val helpMenu = new Menu("_Help", Some(KeyCode.H)) {
items = Seq(
new MenuItem("_About", Some(KeyCode.A)) {
onAction = _ => new HelpAboutDialog().showAndWait()
}
)
}
root = new BorderPane {
center = tabPane
top = new MenuBar {
menus = Seq(fileMenu, helpMenu)
}
}
private[this] def confirmCloseCurrentProjectWithUser(): Boolean = {
val result = new AppModalAlert(AlertType.Confirmation) {
headerText = "Close Project?"
buttonTypes = Seq(Yes,No)
contentText = s"Are you sure you want to close project '${project.name}?'"
}.showAndWait()
result.map(bt => if(bt == Yes) true else false).fold(false)(identity)
}
// The height and width listener will keep the divider positions where
// we want them - so that when the scene is resized, the dividers don't move
private[this] val resizeListener = new javafx.beans.value.ChangeListener[Number] {
override def changed(observableValue: ObservableValue[_ <: Number], t: Number, t1: Number): Unit = {
trackingPane.dividerPositions = 0.05
}
}
height.addListener(resizeListener)
width.addListener(resizeListener)
}
| emanchgo/trove | src/main/scala/trove/ui/ActiveProjectScene.scala | Scala | gpl-3.0 | 4,527 |
package petrovich.data
import petrovich.rules
import petrovich.rules.{RuleSets, Tag}
sealed trait PersonPart {
def ::(x: PersonPart): List[PersonPart] = List(x, this)
}
object PersonPart {
sealed trait NamePart extends PersonPart {
def transform(f: String ⇒ String): NamePart
def tpe: NamePartType = this match {
case _: FirstName ⇒ NamePartType.FirstName
case _: MiddleName ⇒ NamePartType.MiddleName
case _: LastName ⇒ NamePartType.LastName
}
}
object NamePart {
private val ComplexNameDelimiter = "-"
implicit class NamePartOps(val self: NamePart) extends AnyVal {
def inflect(gender: Gender, gcase: Case): NamePart = {
self transform { s ⇒
val ruleSets: RuleSets = rules.ruleSetsByNamePartType(self.tpe)
if (s.contains(ComplexNameDelimiter)) {
// This is a complex name
val complexNameParts = s.split('-').toList
val firstPart = complexNameParts.head
val res = ruleSets(gender, firstPart, List(Tag.FirstWord))(firstPart, gcase) :: {
for (part ← complexNameParts.tail)
yield ruleSets(gender, part, Nil)(part, gcase)
}
res.mkString(ComplexNameDelimiter)
}
else {
// This is a simple name
ruleSets(gender, s, Nil)(s, gcase)
}
}
}
}
}
case class FirstName(value: String) extends NamePart {
def transform(f: String ⇒ String): FirstName = FirstName(f(value))
}
case class MiddleName(value: String) extends NamePart {
def transform(f: String ⇒ String): MiddleName = MiddleName(f(value))
}
case class LastName(value: String) extends NamePart {
def transform(f: String ⇒ String): LastName = LastName(f(value))
}
sealed trait Gender extends PersonPart
object Gender {
case object Male extends Gender
case object Female extends Gender
case object Androgynous extends Gender
}
}
| fomkin/petrovich-scala | petrovich-scala/src/main/scala/petrovich/data/PersonPart.scala | Scala | apache-2.0 | 2,006 |
package com.stovokor.editor.model
import earcut4j.Earcut
import scala.collection.JavaConversions._
import com.jme3.math.FastMath
import com.jme3.math.Vector2f
import java.util.Objects
object Polygon {
def apply(points: List[Point]) = new Polygon(points)
}
case class Polygon(val pointsUnsorted: List[Point]) {
override lazy val hashCode = Objects.hash(pointsSorted)
lazy val pointsSorted = {
if (isClockwise) pointsUnsorted
else pointsUnsorted.reverse
}
lazy val lines = {
((pointsSorted zip pointsSorted.tail) map ((p) => Line(p._1, p._2))) ++ List(Line(pointsSorted.last, pointsSorted.head))
}
private val limY = 10000000f
def isClockwise = {
def areaBelow(a: Point, b: Point) = 0.5f * (b.x - a.x) * (b.y + a.y + 2 * limY)
val sorted = pointsUnsorted.sortBy(_.x)
val leftmost = sorted.head
val rightmost = sorted.last
val (path1, path2) = findPathsBetween(leftmost, rightmost)
val area1 = path1.sliding(2).map(ab => areaBelow(ab(0), ab(1))).sum
val area2 = path2.sliding(2).map(ab => areaBelow(ab(0), ab(1))).sum
val diff = area1 - area2
diff > 0 && diff < limY
}
def findPathsBetween(points: List[Point], p1: Point, p2: Point) = {
val i1 = points.indexOf(p1)
val i2 = points.indexOf(p2)
val path1 =
if (i1 < i2) points.slice(i1, i2 + 1)
else points.slice(i1, points.size) ++ points.slice(0, i2 + 1)
val path2 = (
if (i1 > i2) points.slice(i2, i1 + 1)
else points.slice(i2, points.size) ++ points.slice(0, i1 + 1)).reverse
(path1, path2)
}
/**
* path1 = p1->p2 Can be counter clockwise or clockwise
* path2 = p2->p1 The second one is the way back
*/
def findPathsBetween(p1: Point, p2: Point): (List[Point], List[Point]) =
findPathsBetween(pointsUnsorted, p1, p2)
/**
* Same but with sorted points in clockwise direction.
* Paths start and end in the same points.
*/
def findPathsBetweenSorted(p1: Point, p2: Point): (List[Point], List[Point]) =
findPathsBetween(pointsSorted, p1, p2)
def changePoint(from: Point, to: Point): Polygon = {
val idx = pointsSorted.indexOf(from)
if (from == to || idx == -1) {
this
} else if (pointsSorted.contains(to)) {
Polygon(pointsSorted.filterNot(_ == from))
} else {
Polygon(pointsSorted.updated(idx, to))
}
}
def addPoint(between: Line, factor: Float): Polygon = {
println(s"Splitting line $between")
val idxa = pointsSorted.indexOf(between.a)
val idxb = pointsSorted.indexOf(between.b)
val (idx1, idx2) = (Math.min(idxa, idxb), Math.max(idxa, idxb))
val newPoint = between.split(factor)._1.b
if (idx1 == 0 && idx2 == pointsSorted.size - 1)
Polygon(pointsSorted ++ List(newPoint))
else
Polygon(
(pointsSorted.slice(0, idx1 + 1) ++
List(newPoint)) ++
pointsSorted.slice(idx2, pointsSorted.size))
}
def borderWith(other: Polygon): List[Line] = {
val otherLines = other.lines
lines.filter(l => otherLines.contains(l) || otherLines.contains(l.reverse))
}
// if the point is right in the border, the result in undetermined!
def inside(point: Point): Boolean = {
// https://wrf.ecse.rpi.edu//Research/Short_Notes/pnpoly.html
lines
.map(line => (line.a.y > point.y) != (line.b.y > point.y) &&
(point.x < (line.b.x - line.a.x) * (point.y - line.a.y) /
(line.b.y - line.a.y) + line.a.x))
.count(identity) % 2 == 1
}
// when all the points of the other polygon are inside this polygon
def inside(other: Polygon): Boolean = {
other.pointsSorted.forall(inside)
}
def innerPoints(ps: List[Point]) = if (ps.length < 3) List() else ps.slice(1, ps.length - 1)
def cutInside(cut: List[Point]) = cut.size == 2 || innerPoints(cut).find(inside).isDefined
def sharedLines(other: Polygon): List[Line] = {
val otherLines = (other.lines.flatMap(_.andReverse)).toSet
lines.filter(otherLines.contains)
}
def divideBy(cut: List[Point]): List[Polygon] = {
def createPolygons(path1: List[Point], path2: List[Point], cut: List[Point]) = {
List(Polygon(path1 ++ innerPoints(cut.reverse)),
Polygon(innerPoints(cut.reverse) ++ path2)) // I don't understand why reverse here
}
def extend(path1: List[Point], path2: List[Point], cut: List[Point]): List[Polygon] = {
// assume the cut is outside and goes in path1 direction
val candidate = Polygon(path2 ++ innerPoints(cut.reverse))
// println(s"---> Candidate is $candidate")
if (candidate.cutInside(path1)) { // candidate was wrong
// println(s"candidate rejected")
List(Polygon(path1 ++ innerPoints(cut.reverse)), this)
} else {
// println(s"candidate accepted")
List(candidate, this)
}
}
if (cut.size < 2 ||
!pointsUnsorted.contains(cut.head) ||
!pointsUnsorted.contains(cut.last) ||
cut.sliding(2).map(s => Line(s(0), s(1))).forall(lines.contains)) {
println(s"Cannot divide polygon, cutting line is border")
List(this)
}
val isInside = cutInside(cut)
val (path1, path2) = findPathsBetweenSorted(cut.head, cut.last)
val cutSorted = if (path1.head == cut.head) cut else cut.reverse // same dir as path1
val polys = if (isInside) {
createPolygons(path1, path2, cutSorted)
} else {
extend(path1, path2, cutSorted)
}
polys
}
def isDegenerate = pointsSorted.size < 3
lazy val boundBox: BoundBox = {
val firstPoint = pointsSorted.head
pointsSorted.foldLeft(BoundBox(firstPoint, firstPoint))((box, point) => {
BoundBox(
Point(box.from.x.min(point.x), box.from.y.min(point.y)),
Point(box.to.x.max(point.x), box.to.y.max(point.y)))
})
}
lazy val center: Point = {
val count = pointsSorted.size
val sum = pointsSorted
.foldLeft(Point(0f, 0f))((sum, point) => Point(sum.x + point.x, sum.y + point.y))
Point(sum.x / count, sum.y / count)
}
}
object Triangle {
def apply(p1: Point, p2: Point, p3: Point) = new Triangle(p1, p2, p3)
}
class Triangle(val p1: Point, val p2: Point, val p3: Point) extends Polygon(List(p1, p2, p3)) {
def asClockwise = if (isClockwise) this else reverse
def asCounterClockwise = if (!isClockwise) this else reverse
def reverse = Triangle(p3, p2, p1)
override def isClockwise = FastMath.counterClockwise(
new Vector2f(p1.x, p1.y),
new Vector2f(p2.x, p2.y),
new Vector2f(p3.x, p3.y)) == -1
}
| jcfandino/leveleditor | src/main/scala/com/stovokor/editor/model/Polygon.scala | Scala | bsd-3-clause | 6,514 |
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.github.microburn.integration.support.kanban
import java.util.Date
import net.liftweb.actor.LAFuture
import org.github.microburn.domain.MajorSprintDetails
import org.github.microburn.domain.actors.{ProjectActor, UpdateSprint}
import org.github.microburn.integration.Integration
import scala.concurrent.duration.FiniteDuration
class KanbanIntegration(protected val boardStateProvider: BoardStateProvider,
protected val initializationTimeout: FiniteDuration)
(protected val projectActor: ProjectActor)
extends Integration
with ScrumSimulation {
import org.github.microburn.util.concurrent.FutureEnrichments._
import org.github.microburn.util.concurrent.ActorEnrichments._
override def updateProject(implicit timestamp: Date): LAFuture[_] = {
for {
fetchedCurrentSprintsBoardState <- (scrumSimulator ?? FetchCurrentSprintsBoardState)
.mapTo[Option[FetchedBoardState]]
.withLoggingFinished { state => s"fetched sprint state: ${state.map(_.toString)}" }
updateResult <- fetchedCurrentSprintsBoardState.map { fetchedState =>
projectActor ?? UpdateSprint(fetchedState.sprintId, fetchedState.userStories, fetchedState.details, timestamp)
}.toFutureOfOption
} yield updateResult
}
} | arkadius/micro-burn | src/main/scala/org/github/microburn/integration/support/kanban/KanbanIntegration.scala | Scala | apache-2.0 | 1,915 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.tools.utils
import com.beust.jcommander.ParameterException
import com.typesafe.config.Config
import org.locationtech.geomesa.convert.{ConfArgs, ConverterConfigResolver}
import org.locationtech.geomesa.utils.geotools.{SftArgResolver, SftArgs}
import org.opengis.feature.simple.SimpleFeatureType
/**
* Wrapper for SFT and Config parsing that throws ParameterExceptions for JCommander
*/
object CLArgResolver {
/**
* @throws ParameterException if the SFT cannot be parsed
* @return the SFT parsed from the Args
*/
@throws[ParameterException]
def getSft(specArg: String, featureName: String = null): SimpleFeatureType = {
SftArgResolver.getArg(SftArgs(specArg, featureName)) match {
case Right(sft) => sft
case Left(e) => throw new ParameterException(e)
}
}
/**
* @throws ParameterException if the config cannot be parsed
* @return the converter config parsed from the args
*/
@throws[ParameterException]
def getConfig(configArg: String): Config =
ConverterConfigResolver.getArg(ConfArgs(configArg)) match {
case Right(config) => config
case Left(e) => throw new ParameterException(e)
}
}
| elahrvivaz/geomesa | geomesa-tools/src/main/scala/org/locationtech/geomesa/tools/utils/CLArgResolver.scala | Scala | apache-2.0 | 1,680 |
package com.azavea.math.plugin
import scala.tools.nsc
import nsc.Global
import nsc.Phase
import nsc.plugins.Plugin
import nsc.plugins.PluginComponent
import nsc.transform.Transform
import nsc.transform.InfoTransform
import nsc.transform.TypingTransformers
import nsc.symtab.Flags._
import nsc.ast.TreeDSL
import nsc.typechecker
/**
* Our shiny compiler plugin.
*/
class OptimizedNumeric(val global: Global) extends Plugin {
val name = "optimized-numeric"
val description = "Optimizes com.azavea.math.Numeric usage."
val components = List[PluginComponent](new RewriteInfixOps(this, global))
}
/**
* This component turns things like:
* 1. new FastNumericOps[T](m)(implicit ev).+(n)
* 2. com.azavea.math.FastImplicits.infixOps[T](m)(implicit ev).*(n)
*
* Into:
* 1. ev.plus(m, n)
* 2. ev.times(m, n)
*/
class RewriteInfixOps(plugin:Plugin, val global:Global) extends PluginComponent
with Transform with TypingTransformers with TreeDSL {
import global._
import typer.typed
// set to true to print a warning for each transform
val debugging = false
// TODO: maybe look up the definition of op and automatically figure mapping
val unops = Map(
newTermName("abs") -> "abs",
newTermName("unary_$minus") -> "negate",
newTermName("signum") -> "signum"
)
val binops = Map(
newTermName("compare") -> "compare",
newTermName("equiv") -> "equiv",
newTermName("max") -> "max",
newTermName("min") -> "min",
newTermName("$less$eq$greater") -> "compare",
newTermName("$div") -> "div",
newTermName("$eq$eq$eq") -> "equiv",
newTermName("$bang$eq$eq") -> "nequiv",
newTermName("$greater") -> "gt",
newTermName("$greater$eq") -> "gteq",
newTermName("$less") -> "lt",
newTermName("$less$eq") -> "lteq",
newTermName("$minus") -> "minus",
newTermName("$percent") -> "mod",
newTermName("$plus") -> "plus",
newTermName("$times") -> "times",
newTermName("$times$times") -> "pow"
)
val runsAfter = List("typer");
val phaseName = "optimized-numeric"
def newTransformer(unit:CompilationUnit) = new MyTransformer(unit)
// Determine if two type are equivalent
def equivalentTypes(t1:Type, t2:Type) = {
t1.dealias.deconst.widen =:= t2.dealias.deconst.widen
}
// TODO: figure out better type matching for Numeric, e.g. a.tpe <:< b.tpe
val numericClass = definitions.getClass("com.azavea.math.Numeric")
def isNumeric(t:Type) = t.typeSymbol == numericClass.tpe.typeSymbol
// For built-in types, figure out whether or not we have a "fast" conversion method
val BigIntClass = definitions.getClass("scala.math.BigInt")
val BigDecimalClass = definitions.getClass("scala.math.BigDecimal")
def getConverter(t:Type) = if (t <:< definitions.ByteClass.tpe) {
Some("fromByte")
} else if (t <:< definitions.ShortClass.tpe) {
Some("fromShort")
} else if (t <:< definitions.IntClass.tpe) {
Some("fromInt")
} else if (t <:< definitions.LongClass.tpe) {
Some("fromLong")
} else if (t <:< definitions.FloatClass.tpe) {
Some("fromFloat")
} else if (t <:< definitions.DoubleClass.tpe) {
Some("fromDouble")
} else if (t <:< BigIntClass.tpe) {
Some("fromBigInt")
} else if (t <:< BigDecimalClass.tpe) {
Some("fromBigDecimal")
} else {
None
}
// TODO: maybe match further out on the implicit Numeric[T]?
class MyTransformer(unit:CompilationUnit) extends TypingTransformer(unit) {
override def transform(tree: Tree): Tree = {
//def mylog(s:String) = if (debugging) unit.warning(tree.pos, s)
def mylog(s:String) = Unit
val tree2 = tree match {
// match fuzzy binary operators
case Apply(Apply(TypeApply(Select(Apply(Apply(_, List(m)), List(ev)), op), List(tt)), List(n)), List(ev2)) => {
if (!isNumeric(ev.tpe)) {
//mylog("fuzzy alarm #1")
tree
} else if (binops.contains(op)) {
val op2 = binops(op)
val conv = getConverter(n.tpe)
conv match {
case Some(meth) => {
//mylog("fuzzy transformed %s (with %s)".format(op, meth))
typed { Apply(Select(ev, op2), List(m, Apply(Select(ev, meth), List(n)))) }
}
case None => if (equivalentTypes(m.tpe, n.tpe)) {
//mylog("fuzzy transformed %s (removed conversion)".format(op))
typed { Apply(Select(ev, op2), List(m, n)) }
} else {
//mylog("fuzzy transformed %s".format(op))
typed { Apply(Select(ev, op2), List(m, Apply(TypeApply(Select(ev, "fromType"), List(tt)), List(n)))) }
}
}
} else {
//mylog("fuzzy alarm #2")
tree
}
}
// match IntOps (and friends Float, Long, etc.)
case Apply(Apply(TypeApply(Select(Apply(_, List(m)), op), List(tt)), List(n)), List(ev)) => {
if (!isNumeric(ev.tpe)) {
//mylog("literal ops alarm #1")
tree
} else if (binops.contains(op)) {
val op2 = binops(op)
val conv = getConverter(m.tpe)
conv match {
case Some(meth) => {
//mylog("zzz literal ops transformed %s (with %s)".format(op, meth))
typed { Apply(Select(ev, op2), List(Apply(Select(ev, meth), List(m)), n)) }
}
case None => {
//mylog("zzz literal ops transformed %s".format(op))
typed { Apply(Select(ev, op2), List(Apply(TypeApply(Select(ev, "fromType"), List(tt)), List(m)), n)) }
}
}
} else {
//mylog("literal ops alarm #2")
tree
}
}
// match binary operators
case Apply(Select(Apply(Apply(_, List(m)), List(ev)), op), List(n)) => {
if (!isNumeric(ev.tpe)) {
unit.warning(tree.pos, "binop false alarm #1")
tree
} else if (binops.contains(op)) {
val op2 = binops(op)
//mylog("binop rewrote %s %s %s to n.%s(%s, %s)".format(m, op, n, op2, m, n))
typed { Apply(Select(ev, op2), List(m, n)) }
} else {
unit.warning(tree.pos, "binop false alarm #2")
tree
}
}
// match unary operators
case Select(Apply(Apply(_, List(m)), List(ev)), op) => {
if (!isNumeric(ev.tpe)) {
unit.warning(tree.pos, "unop false alarm #1")
tree
} else if (unops.contains(op)) {
val op2 = unops(op)
//mylog("unop rewrote %s to n.%s".format(op, op2))
typed { Apply(Select(ev, op2), List(m)) }
} else {
unit.warning(tree.pos, "unop false alarm #2")
tree
}
}
case _ => tree
}
super.transform(tree2)
}
}
}
| azavea/numeric | plugin/src/main/scala/com/azavea/math/plugin/OptimizedNumeric.scala | Scala | mit | 6,935 |
package oxalis.security
import com.mohiva.play.silhouette.api.util.PasswordInfo
import com.scalableminds.util.security.SCrypt
class PasswordHasher extends com.mohiva.play.silhouette.api.util.PasswordHasher {
override def id: String = "SCrypt"
override def hash(plainPassword: String): PasswordInfo = PasswordInfo(id, SCrypt.hashPassword(plainPassword))
override def matches(passwordInfo: PasswordInfo, suppliedPassword: String): Boolean =
SCrypt.verifyPassword(suppliedPassword, passwordInfo.password)
override def isDeprecated(passwordInfo: PasswordInfo): Option[Boolean] = Some(false)
}
| scalableminds/webknossos | app/oxalis/security/PasswordHasher.scala | Scala | agpl-3.0 | 605 |
/*
* Copyright (c) 2016 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.kinesistee
import com.snowplowanalytics.kinesistee.filters.FilterStrategy
import com.snowplowanalytics.kinesistee.models.{Content, Stream}
import com.snowplowanalytics.kinesistee.routing.RoutingStrategy
import com.snowplowanalytics.kinesistee.transformation.TransformationStrategy
/**
* This trait allows us to stub out tee behaviours
*/
trait Tee {
def tee(routingStrategy: RoutingStrategy,
transformationStrategy: Option[TransformationStrategy],
filterStrategy: Option[FilterStrategy],
content: Seq[Content]): Unit
}
| snowplow/kinesis-tee | src/main/scala/com/snowplowanalytics/kinesistee/Tee.scala | Scala | apache-2.0 | 1,287 |
package bulu.util
import akka.actor.ActorRef
import scala.collection.mutable.ArrayBuffer
import scala.xml.Node
import bulu.core.Field
import bulu.core.BitKey
import bulu.core.MeasureType
import bulu.core.CuboidPartition
import scala.collection.immutable.SortedMap
/*build*/
// ->master
case object BuildStart
//master -> worker
case class FetchDimension(cube: String, field: Field, sinker: ActorRef)
case class CubePartFinished(cube: String, condition: String,count:Int)
case class MemberList(cube: String, field: Field, list: List[Option[String]])
//worker -> sinker
case class RawData(cube: String, condition: String, sinker: ActorRef)
case class FetchRawData(cube: String, condition: String)
case class Record(rec: Map[String, Option[Any]])
case class RecordList(cube: String,rec: List[Map[String, Option[Any]]])
case class Cell(key: BitKey, rec: Map[String, Option[Any]])
case class DimensionFinshed(cube: String, partitionField: Field, list: List[Option[String]], workers: Seq[ActorRef])
case class CubeFinshed(cube: String,count:Int)
case class SaveCell(agg: (BitKey, Map[(String, MeasureType.MeasureType), BigDecimal]))
case class RawDataBegan(cube:String,sinker:ActorRef,partList:List[Option[String]],partitionField:Field,workerId:Int,workerCount:Int)
case object SendLeftRecords
case class RawDataFinished(cube:String,sinker:ActorRef,count:Int)
case class RawDataDispatchFinished(count:Int)
case object RecordFinished
case object CellSaved
case class SaveCellFinished(count:Int)
case object RequestAggs
case class CachedAggs(aggs:Map[BitKey, Map[(String, MeasureType.MeasureType), BigDecimal]])
//case class CachedCube(cubePart:Partition, aggs:Map[BitKey, Map[(String, MeasureType.MeasureType), BigDecimal]],workerId:Int, workerCount:Int)
/*build*/
/**query**/
case class Query(name: String, params: Map[String, String],workers:Seq[ActorRef])
case class CachePart(cube: String, partition: CuboidPartition, qeury: Query, cuboidSinker: ActorRef)
case class PartitionCached( query:Query, workerIndex:Int, workerCount:Int, partIndex:Int, partCount:Int )
case class CacheReply(query: Query, cacheSinker: ActorRef)
case class SendFinished(cube:String, query: CacheReply, workerIndex:Int, dispatchIndex:Int)
case class QueryReply(query: Query, reply: ActorRef,cuboidMask:BitKey, filterAndMatch:List[(BitKey, BitKey)])
case class QueryPart(cube: String, partition: CuboidPartition, query: Query, cuboidSinker: ActorRef, cuboidMask:BitKey, filterAndMatch:List[(BitKey, BitKey)])
case class QueryResult(query: Query, result:String)
//case class FetchedCell(cube:String, query: Query, key: BitKey, cell: Map[(String, MeasureType.MeasureType), BigDecimal], dispatchIndex:Int)
case class BaseCell(key: BitKey, cell: Map[(String, MeasureType.MeasureType), BigDecimal])
case class CacheCell(key: BitKey, cell:Map[(String, MeasureType.MeasureType), BigDecimal])
case class CacheFilterBegin(query: QueryReply)
case class HitCell(key: BitKey,values:Map[(String, MeasureType.MeasureType), BigDecimal])
case class QueryPartFinished(query: Query, workerIndex:Int, dispatchIndex:Int, result: Map[BitKey, Map[(String, MeasureType.MeasureType), BigDecimal]])
case class FetchFinished(query: CacheReply)
case class BuildFinished(query: CacheReply)
case object CacheBegan
case object CacheCellFinished
case class CacheFinished(reply:ActorRef)
case class CachePartFinished(cube:String, count:Int, workerIndex:Int, dispatchIndex:Int)
case class AllCacheFinished(query: Query)
case class FilterBegan(query: Query)
case class FilterFinished(query: Query)
case object BaseFinished
case class QueryFinished(query: Query, nodes: Map[BitKey, Map[(String, MeasureType.MeasureType), BigDecimal]])
/**query**/
case class DimInstance(mask: BitKey, members: scala.collection.mutable.Map[String, BitKey])
//
| hwzhao/bulu | src/main/scala/bulu/util/Message.scala | Scala | apache-2.0 | 3,869 |
package jp.ac.nagoya_u.dsmoq.sdk.request.json
private[request] case class UpdateEmailJson(email: String) extends Jsonable | nkawa/dsmoq | sdk/src/main/java/jp/ac/nagoya_u/dsmoq/sdk/request/json/UpdateEmailJson.scala | Scala | apache-2.0 | 122 |
LOCAL_IMAGE ?= radanalytics-scala-spark
# If you are going to push the built image to a registry
# using the "push" make target then you should replace
# "project" with an appropriate path for your registry and/or project
PUSH_IMAGE=project/radanalytics-scala-spark
DOCKERFILE_CONTEXT=scala-build
.PHONY: build push clean clean-context context zero-tarballs
build: $(DOCKERFILE_CONTEXT)
docker build --pull -t $(LOCAL_IMAGE) $(DOCKERFILE_CONTEXT)
push: build
docker tag $(LOCAL_IMAGE) $(PUSH_IMAGE)
docker push $(PUSH_IMAGE)
clean: clean-context
-docker rmi $(LOCAL_IMAGE) $(PUSH_IMAGE)
clean-context:
-rm -rf target
-rm -rf $(DOCKERFILE_CONTEXT)/*
context: $(DOCKERFILE_CONTEXT)
$(DOCKERFILE_CONTEXT): $(DOCKERFILE_CONTEXT)/Dockerfile $(DOCKERFILE_CONTEXT)/modules
$(DOCKERFILE_CONTEXT)/Dockerfile $(DOCKERFILE_CONTEXT)/modules:
concreate generate --descriptor image.scala.yaml
cp -R target/image/* $(DOCKERFILE_CONTEXT)
zero-tarballs:
find ./$(DOCKERFILE_CONTEXT) -name "*.tgz" -type f -exec truncate -s 0 {} \\;
find ./$(DOCKERFILE_CONTEXT) -name "*.tar.gz" -type f -exec truncate -s 0 {} \\;
| tmckayus/oshinko-s2i | Makefile.scala | Scala | apache-2.0 | 1,115 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import monix.execution.Ack.Continue
import monix.execution.FutureUtils.extensions._
import monix.execution.exceptions.DummyException
import monix.reactive.subjects.PublishSubject
import monix.reactive.{Observable, Observer}
import scala.concurrent.Future
import scala.concurrent.duration.Duration.Zero
import scala.concurrent.duration._
object Zip2Suite extends BaseOperatorSuite {
def createObservable(sourceCount: Int) = Some {
val o1 = Observable.range(0, sourceCount)
val o2 = Observable.range(0, sourceCount + 2)
val o = Observable.zipMap2(o1, o2) { (x1, x2) => x1 + x2 }
Sample(o, count(sourceCount), sum(sourceCount), Zero, Zero)
}
def count(sourceCount: Int) = sourceCount
def sum(sourceCount: Int) = sourceCount * (sourceCount - 1)
def observableInError(sourceCount: Int, ex: Throwable) = Some {
val o1 = createObservableEndingInError(Observable.range(0, sourceCount), ex)
val o2 = createObservableEndingInError(Observable.range(0, sourceCount), ex)
val o = Observable.zipMap2(o1, o2) { (x1, x2) => x1 + x2 }
Sample(o, count(sourceCount-1), sum(sourceCount-1), Zero, Zero)
}
def brokenUserCodeObservable(sourceCount: Int, ex: Throwable) = Some {
val o1 = Observable.range(0, sourceCount)
val o2 = Observable.range(0, sourceCount+100)
val o = Observable.zipMap2(o1, o2) { (x1, x2) =>
if (x2 < sourceCount-1) x1 + x2 else throw ex
}
Sample(o, count(sourceCount-1), sum(sourceCount-1), Zero, Zero)
}
override def cancelableObservables(): Seq[Sample] = {
val sample1 = {
val o1 = Observable.range(0, 10).delayOnNext(1.second)
val o2 = Observable.range(0, 10).delayOnNext(1.second)
Observable.zipMap2(o1, o2)(_+_)
}
Seq(Sample(sample1, 0, 0, 0.seconds, 0.seconds))
}
test("self starts before other and finishes before other") { implicit s =>
val obs1 = PublishSubject[Int]()
val obs2 = PublishSubject[Int]()
var received = (0, 0)
var wasCompleted = false
obs1.zip(obs2).unsafeSubscribeFn(new Observer[(Int, Int)] {
def onNext(elem: (Int, Int)) = {
received = elem
Continue
}
def onError(ex: Throwable) = ()
def onComplete() = wasCompleted = true
})
obs1.onNext(1); s.tick()
assertEquals(received, (0,0))
obs2.onNext(2); s.tick()
assertEquals(received, (1,2))
obs2.onNext(4); s.tick()
assertEquals(received, (1,2))
obs1.onNext(3); s.tick()
assertEquals(received, (3,4))
obs1.onComplete()
s.tick()
assert(wasCompleted)
}
test("self signals error and interrupts the stream before it starts") { implicit s =>
val obs1 = PublishSubject[Int]()
val obs2 = PublishSubject[Int]()
var wasThrown: Throwable = null
var wasCanceled = false
var received = (0,0)
obs1.zip(obs2.doOnEarlyStopF { () => wasCanceled = true })
.unsafeSubscribeFn(new Observer[(Int, Int)] {
def onNext(elem: (Int, Int)) = { received = elem; Continue }
def onError(ex: Throwable) = wasThrown = ex
def onComplete() = ()
})
obs1.onError(DummyException("dummy"))
assertEquals(wasThrown, DummyException("dummy"))
obs2.onNext(2); s.tickOne()
assertEquals(received, (0,0))
assert(wasCanceled)
}
test("other signals error and interrupts the stream before it starts") { implicit s =>
val obs1 = PublishSubject[Int]()
val obs2 = PublishSubject[Int]()
var wasThrown: Throwable = null
var wasCanceled = false
var received = (0,0)
obs2.doOnEarlyStopF { () => wasCanceled = true }.zip(obs1)
.unsafeSubscribeFn(new Observer[(Int, Int)] {
def onNext(elem: (Int, Int)) = { received = elem; Continue }
def onError(ex: Throwable) = wasThrown = ex
def onComplete() = ()
})
obs1.onError(DummyException("dummy"))
assertEquals(wasThrown, DummyException("dummy"))
obs2.onNext(2); s.tickOne()
assertEquals(received, (0,0))
assert(wasCanceled)
}
test("should not back-pressure self.onError") { implicit s =>
val obs1 = PublishSubject[Int]()
val obs2 = PublishSubject[Int]()
var wasThrown: Throwable = null
obs1.zip(obs2).unsafeSubscribeFn(new Observer[(Int, Int)] {
def onNext(elem: (Int, Int)) =
Future.delayedResult(1.second)(Continue)
def onComplete() = ()
def onError(ex: Throwable) =
wasThrown = ex
})
obs1.onNext(1)
obs2.onNext(2)
obs1.onError(DummyException("dummy"))
s.tick()
assertEquals(wasThrown, DummyException("dummy"))
s.tick(1.second)
}
test("should not back-pressure other.onError") { implicit s =>
val obs1 = PublishSubject[Int]()
val obs2 = PublishSubject[Int]()
var wasThrown: Throwable = null
obs1.zip(obs2).unsafeSubscribeFn(new Observer[(Int, Int)] {
def onNext(elem: (Int, Int)) =
Future.delayedResult(1.second)(Continue)
def onComplete() = ()
def onError(ex: Throwable) =
wasThrown = ex
})
obs1.onNext(1)
obs2.onNext(2)
obs2.onError(DummyException("dummy"))
s.tick()
assertEquals(wasThrown, DummyException("dummy"))
s.tick(1.second)
}
} | Wogan/monix | monix-reactive/shared/src/test/scala/monix/reactive/internal/operators/Zip2Suite.scala | Scala | apache-2.0 | 5,899 |
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package swave.core.graph
import swave.core.macros._
object GraphBuilding {
import Dsl._
import DslImpl._
def input(text: String): Input = new InputImpl(new NodeImpl(text))
def fanOut(text: String): FanOut[Input] = new FanOutImpl[Input](new NodeImpl(text), Nil, new InputImpl(_))
def coupling(text: String): Coupling = new CouplingImpl(text)
object Dsl {
sealed abstract class Node {
def value: String
}
sealed trait Coupling {
def value: String
def input: Input
}
sealed abstract class Ops { self ⇒
type To
type Repr <: Ops {
type To <: self.To
type Repr <: self.Repr
}
final def next(text: String): Repr = append(new NodeImpl(text))
def attach(other: Input): FanIn[Repr]
def attachLeft(other: Input): FanIn[Repr]
def fanOut(fanOut: String): FanOut[Repr]
def to(coupling: Coupling): To
def to(drain: String): To
protected def append(next: NodeImpl): Repr
}
sealed abstract class Input extends Ops {
type To = Digraph[Node]
type Repr = Input
def toDigraph: To
}
sealed abstract class FanIn[Repr] {
def attach(other: Input): FanIn[Repr]
def fanIn(text: String): Repr
def fanInAndOut(text: String): FanOut[Repr]
}
sealed abstract class FanOut[FRepr] extends FanIn[FRepr] {
def attach(other: Input): FanOut[FRepr]
def sub: SubOps
def subContinue: FRepr
def continue: FRepr
def subDrains(drains: String*): this.type
sealed abstract class SubOps extends Ops {
type To = FanOut.this.type
type Repr = SubOps
def end: FanOut[FRepr]
}
}
}
//////////////////////////////// IMPLEMENTATION ////////////////////////////////////////
private object DslImpl {
import Dsl._
class NodeImpl(val value: String) extends Node {
var inputs: List[NodeImpl] = Nil
var outputs: List[NodeImpl] = Nil
def addOutput(other: NodeImpl): Unit = {
other.inputs :+= this
this.outputs :+= other
}
override def toString: String = s"Node($value)"
}
final class CouplingImpl(_value: String) extends NodeImpl(_value) with Coupling {
val input: Input = GraphBuilding.input(value)
addOutput(impl(input).node)
override def toString: String = s"Coupling($value)"
}
final class InputImpl(val node: NodeImpl) extends Input {
protected def append(next: NodeImpl): Repr = {
node.addOutput(next)
new InputImpl(next)
}
def attach(other: Input) = new FanInImpl[Input](node :: impl(other).node :: Nil, new InputImpl(_))
def attachLeft(other: Input) = new FanInImpl[Input](impl(other).node :: node :: Nil, new InputImpl(_))
def fanOut(fanOut: String) = new FanOutImpl[Input](impl(next(fanOut)).node, Nil, new InputImpl(_))
def to(drain: String) = next(drain).toDigraph
def to(coupling: Coupling) = append(coupling.asInstanceOf[CouplingImpl]).toDigraph
def toDigraph = {
import Digraph.EdgeAttributes._
val graph = Digraph[Node](node :: Nil, _.asInstanceOf[NodeImpl].inputs, _.asInstanceOf[NodeImpl].outputs)
graph.vertices.foreach {
case x: CouplingImpl ⇒ graph.markPaths(x, impl(x.input).node, Reversaphile | Fusable)
case _ ⇒ // nothing to do
}
graph
}
}
final class FanInImpl[Repr](inputs: List[NodeImpl], wrap: NodeImpl ⇒ Repr) extends FanIn[Repr] {
def attach(other: Input) = new FanInImpl[Repr](inputs :+ impl(other).node, wrap)
def fanIn(text: String): Repr = {
val node = new NodeImpl(text)
inputs.foreach(_.addOutput(node))
wrap(node)
}
def fanInAndOut(text: String): FanOut[Repr] = {
val node = new NodeImpl(text)
inputs.foreach(_.addOutput(node))
new FanOutImpl[Repr](node, Nil, wrap)
}
}
final class FanOutImpl[FRepr](base: NodeImpl, inputs: List[NodeImpl], wrap: NodeImpl ⇒ FRepr)
extends FanOut[FRepr] {
def attach(other: Input) = new FanOutImpl(base, inputs :+ impl(other).node, wrap)
def fanIn(text: String): FRepr = {
val node = new NodeImpl(text)
inputs.foreach(_.addOutput(node))
wrap(node)
}
def fanInAndOut(text: String): FanOut[FRepr] = {
val node = new NodeImpl(text)
inputs.foreach(_.addOutput(node))
new FanOutImpl[FRepr](node, Nil, wrap)
}
def sub: SubOps = new SubOpsImpl(base)
def subContinue: FRepr = {
requireArg(inputs eq Nil, "Cannot `subContinue` when other sub-streams are open")
wrap(base)
}
def continue: FRepr = {
requireArg(inputs.size == 1, ", which means the `continue` call is illegal here")
wrap(inputs.head)
}
def subDrains(drains: String*): this.type = {
drains.foreach(s ⇒ base.addOutput(new NodeImpl(s)))
this
}
class SubOpsImpl(private val node: NodeImpl) extends SubOps {
protected def append(next: NodeImpl): SubOps = {
node.addOutput(next)
new SubOpsImpl(next)
}
def attach(other: Input) = new FanInImpl[SubOps](node :: impl(other).node :: Nil, new SubOpsImpl(_))
def attachLeft(other: Input) = new FanInImpl[SubOps](impl(other).node :: node :: Nil, new SubOpsImpl(_))
def fanOut(fanOut: String) =
new FanOutImpl[SubOps](next(fanOut).asInstanceOf[SubOpsImpl].node, Nil, new SubOpsImpl(_))
def to(drain: String): To = {
next(drain)
FanOutImpl.this
}
def to(coupling: Coupling): To = {
append(coupling.asInstanceOf[CouplingImpl])
FanOutImpl.this
}
def end: FanOut[FRepr] = {
val n =
if (node == base) {
new NodeImpl("<dummy>") {
base.addOutput(this)
override def addOutput(other: NodeImpl): Unit = {
other.inputs :+= base
base.outputs = base.outputs.map(n ⇒ if (n eq this) other else n)
this.outputs :+= other
}
}
} else node
new FanOutImpl(base, inputs :+ n, wrap)
}
}
}
private def impl(input: Input): InputImpl = input.asInstanceOf[InputImpl]
}
}
| sirthias/swave | core/src/test/scala/swave/core/graph/GraphBuilding.scala | Scala | mpl-2.0 | 6,643 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.ml.utils
import java.io.File
import ai.h2o.sparkling.backend.H2OJob
import ai.h2o.sparkling.backend.utils.{RestApiUtils, RestCommunication}
import ai.h2o.sparkling.ml.internals.H2OModel
import ai.h2o.sparkling.ml.models.{H2OMOJOModel, H2OMOJOSettings}
import ai.h2o.sparkling.{H2OConf, H2OContext}
import hex.schemas.ModelBuilderSchema
import org.apache.spark.expose
import water.api.schemas3.ValidationMessageV3
trait EstimatorCommonUtils extends RestCommunication {
protected def trainAndGetDestinationKey(
endpointSuffix: String,
params: Map[String, Any],
encodeParamsAsJson: Boolean = false): String = {
val conf = H2OContext.ensure().getConf
val endpoint = RestApiUtils.getClusterEndpoint(conf)
val modelBuilder = update[ModelBuilderSchema[_, _, _]](
endpoint,
endpointSuffix,
conf,
params,
Seq((classOf[ModelBuilderSchema[_, _, _]], "parameters")),
encodeParamsAsJson)
val jobId = modelBuilder.job.key.name
H2OJob(jobId).waitForFinishAndPrintProgress()
Option(modelBuilder.messages).foreach(printWarnings)
modelBuilder.job.dest.name
}
protected def trainAndGetMOJOModel(
endpointSuffix: String,
params: Map[String, Any],
encodeParamsAsJson: Boolean = false): H2OMOJOModel = {
val modelKey = trainAndGetDestinationKey(endpointSuffix, params, encodeParamsAsJson)
val mojo = H2OModel(modelKey)
mojo.toMOJOModel(modelKey + "_uid", H2OMOJOSettings(), false)
}
private[sparkling] def downloadBinaryModel(modelId: String, conf: H2OConf): File = {
val endpoint = RestApiUtils.getClusterEndpoint(conf)
val sparkTmpDir = expose.Utils.createTempDir(expose.Utils.getLocalDir(conf.sparkConf))
val target = new File(sparkTmpDir, s"$modelId.bin")
downloadBinaryURLContent(endpoint, s"/3/Models.fetch.bin/$modelId", conf, target)
target
}
protected def convertModelIdToKey(key: String): String = {
if (H2OModel.modelExists(key)) {
val replacement = findAlternativeKey(key)
logWarning(
s"Model id '$key' is already used by a different H2O model. Replacing the original id with '$replacement' ...")
replacement
} else {
key
}
}
private def findAlternativeKey(modelId: String): String = {
var suffixNumber = 0
var replacement: String = null
do {
suffixNumber = suffixNumber + 1
replacement = s"${modelId}_$suffixNumber"
} while (H2OModel.modelExists(replacement))
replacement
}
private def printWarnings(messages: Array[ValidationMessageV3]): Unit = {
val warn = "WARN"
messages
.filter(_.message_type == warn)
.map(msg => s"$warn: ${msg.message} (field name: ${msg.field_name})")
.foreach(System.err.println)
}
}
| h2oai/sparkling-water | ml/src/main/scala/ai/h2o/sparkling/ml/utils/EstimatorCommonUtils.scala | Scala | apache-2.0 | 3,592 |
package util
import org.scalatest._
import util._
class PathnameParsersTest extends FlatSpec with ShouldMatchers {
"osToA" should "convert OS pathnames to their Amethyst counterparts" in {
import PathNameConverter._
osToA("ex.variable") should equal("ex")
osToA("exf/ex.variable") should equal("exf.ex")
osToA("math/homework1/a.variable") should equal("math.homework1.a")
}
"aToOs" should "convert Amethyst pathnames to their Amethyst counterparts" in {
import PathNameConverter._
aToOs("ex", false) should equal(("ex.variable", false))
aToOs("exf.ex", false) should equal(("exf/ex.variable", false))
aToOs("root.math.homework1.a", false) should equal(("math/homework1/a.variable", true))
}
} | bluebear94/bag | src/test/scala/util/PNCTest.scala | Scala | gpl-3.0 | 732 |
/** Test that the inliner is not inferring that `xs' is
* always Nil, removing the call to isEmpty.
*/
object Test extends dotty.runtime.LegacyApp {
@annotation.tailrec
def walk(xs: MyList): Unit = {
if (xs.isEmpty)
println("empty")
else {
println("non-empty")
walk(MyNil)
}
}
walk(new MyList)
}
class MyList {
def isEmpty = false
}
object MyNil extends MyList {
override def isEmpty = true
}
| yusuke2255/dotty | tests/run/inliner-infer.scala | Scala | bsd-3-clause | 445 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.