code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
package example.application.resource import javax.ws.rs.core.MediaType import javax.ws.rs._ @Path("/basic") @Consumes(Array(MediaType.TEXT_PLAIN)) @Produces(Array(MediaType.TEXT_PLAIN)) class BasicResource { @GET @Path("ping") def ping() = "Pong" @GET @Path("hello") def hello(@QueryParam("name") name: String) = s"Hello, $name!" @GET @Path("x2") def x2(@QueryParam("x") x: Int) = x*2 @GET @Path("greet/{name}") @Produces(Array(MediaType.TEXT_PLAIN)) def greet(@PathParam("name") name: String) = s"Greetings, $name!" }
jostly/scala-dropwizard
src/test/scala/example/application/resource/BasicResource.scala
Scala
mit
552
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.datasources.v2 import org.apache.spark.rdd.RDD import org.apache.spark.sql.SparkSession import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions.Attribute import org.apache.spark.sql.connector.catalog.CatalogManager import org.apache.spark.sql.execution.LeafExecNode /** * Physical plan node for setting the current catalog and/or namespace. */ case class SetCatalogAndNamespaceExec( catalogManager: CatalogManager, catalogName: Option[String], namespace: Option[Seq[String]]) extends LeafExecNode { override protected def doExecute(): RDD[InternalRow] = { // The catalog is updated first because CatalogManager resets the current namespace // when the current catalog is set. catalogName.map(catalogManager.setCurrentCatalog) namespace.map(ns => catalogManager.setCurrentNamespace(ns.toArray)) sqlContext.sparkContext.parallelize(Seq.empty, 1) } override def output: Seq[Attribute] = Seq.empty }
rezasafi/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/SetCatalogAndNamespaceExec.scala
Scala
apache-2.0
1,827
/* * Copyright (C) 2015 Stratio (http://stratio.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.stratio.crossdata.driver.querybuilder.dslentities import com.stratio.crossdata.driver.querybuilder.{BinaryExpression, Expression, Predicate} // Logical predicates case class And(left: Expression, right: Expression) extends BinaryExpression with Predicate { override val tokenStr = "AND" override def childExpansion(child: Expression): String = child match { case _: And => child.toXDQL case _: Predicate => s"(${child.toXDQL})" case _: Expression => child.toXDQL case _ => s"(${child.toXDQL})" } } case class Or(left: Expression, right: Expression) extends BinaryExpression with Predicate { override val tokenStr = "OR" override def childExpansion(child: Expression): String = child match { case _: Or => child.toXDQL case _: Predicate => s"(${child.toXDQL})" case _: Expression => child.toXDQL case _ => s"(${child.toXDQL})" } } private[dslentities] trait EqualityCheckers extends BinaryExpression { //TODO: Improve management of cases as `x === y === z` override def childExpansion(child: Expression): String = child.toXDQL } // Comparison predicates case class Equal(left: Expression, right: Expression) extends EqualityCheckers with Predicate { override val tokenStr: String = "=" } case class Different(left: Expression, right: Expression) extends EqualityCheckers with Predicate { override val tokenStr: String = "<>" } case class LessThan(left: Expression, right: Expression) extends BinaryExpression with Predicate { override val tokenStr: String = "<" override def childExpansion(child: Expression): String = child match { case _: Expression => child.toXDQL case _ => s"(${child.toXDQL})" } } case class LessThanOrEqual(left: Expression, right: Expression) extends BinaryExpression with Predicate { override val tokenStr: String = "<=" override def childExpansion(child: Expression): String = child match { case _: Expression => child.toXDQL case _ => s"(${child.toXDQL})" } } case class GreaterThan(left: Expression, right: Expression) extends BinaryExpression //TODO: Review with Predicate { override val tokenStr: String = ">" override def childExpansion(child: Expression): String = child match { case _: Expression => child.toXDQL case _ => s"(${child.toXDQL})" } } case class GreaterThanOrEqual(left: Expression, right: Expression) extends BinaryExpression with Predicate { override val tokenStr: String = ">=" override def childExpansion(child: Expression): String = child match { case _: Expression => child.toXDQL case _ => s"(${child.toXDQL})" } } case class IsNull(expr: Expression) extends Predicate { override private[querybuilder] def toXDQL: String = s" ${expr.toXDQL} IS NULL" } case class IsNotNull(expr: Expression) extends Predicate { override private[querybuilder] def toXDQL: String = s" ${expr.toXDQL} IS NOT NULL" } case class In(left: Expression, right: Expression*) extends Expression with Predicate { override private[querybuilder] def toXDQL: String = s" ${left.toXDQL} IN ${right map (_.toXDQL) mkString("(", ",", ")")}" } case class Like(left: Expression, right: Expression) extends BinaryExpression with Predicate { override val tokenStr = "LIKE" }
Stratio/crossdata
driver/src/main/scala/com/stratio/crossdata/driver/querybuilder/dslentities/predicates.scala
Scala
apache-2.0
3,862
/* * Copyright 2013 Akiyoshi Sugiki, University of Tsukuba * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kumoi.impl.os import kumoi.shell.aaa._ import kumoi.shell.pm._ import kumoi.impl.pm._ import kumoi.core._ import kumoi.core.Command._ import scala.io._ import java.util.{Timer, TimerTask} /** * Linux-dependent statistics. * @author Akiyoshi SUGIKI */ abstract class LinuxStat extends PMStat case class CPU(usr: Double, sys: Double, total: Double) extends LinuxStat case class CPUEach(no: Int, usr: Double, sys: Double, total: Double) extends LinuxStat case class Pages(in: Long, out: Long) extends LinuxStat case class Swaps(in: Long, out: Long) extends LinuxStat case class ContSwitches(no: Long) extends LinuxStat case class BootTime(t: Long) extends LinuxStat case class Forks(no: Long) extends LinuxStat case class Procs(no: Long) extends LinuxStat case class BlockedProcs(no: Long) extends LinuxStat /** * A companion object for PMLinux. * @author Akiyoshi SUGIKI */ object HotLinux { private val timer = new Timer } /** * * @author Akiyoshi Sugiki */ class HotLinux(pm: HotPhysicalMachineImpl) extends HotOSImpl { import HotLinux.timer private val PathStat = "/proc/stat" private val Interval = Config("kumoi.impl.pm.os.statInterval", 1 * 1000) private var cpu0 = List.fill(9)(0L) private var cpus = Map[Int, List[Long]]() private var ctxt0 = 0L //private var fork0 = 0L private var lstats = List[PMStat]() timer.scheduleAtFixedRate(Calc, 0, Interval) // TODO: override def stats(implicit auth: AAA) = super.stats ++ lstats object Calc extends TimerTask { def run() { var st = List[PMStat]() val src = Source.fromFile(PathStat) for (line <- src.getLines()) { line.split("\\\\s+").toList match { case "cpu" :: rest => val cpu = rest.map(_.toLong) val diff = (cpu, cpu0).zipped.map(_ - _) val sum = diff.reduceLeft(_ + _) st = CPU((diff(0) + diff(1)) / sum.toDouble, // user' = user + nice (diff(2) + diff(4) + diff(5) + diff(6))/ sum.toDouble, // sys' = sys + iowait + irq (diff(0) + diff(1) + diff(2) + diff(4) + diff(5) + diff(6)) / sum.toDouble) :: st //pm.vmmi.cpui(0)) :: st // (sum - diff(3)) / sum.toDouble) :: st // total' = total - idle cpu0 = cpu //case "cpu?" :: rest => case "ctxt" :: rest => val ctxt = rest.head.toLong st = ContSwitches(ctxt - ctxt0) :: st ctxt0 = ctxt case "btime" :: rest => st = BootTime(rest.head.toLong) :: st case "processes" :: rest => st = Forks(rest.head.toLong) :: st /* val fork = rest.head.toLong st = Forks(fork - fork0) :: st fork0 = fork */ case "procs_running" :: rest => st = Procs(rest.head.toLong) :: st case "procs_blocked" :: rest => st = BlockedProcs(rest.head.toLong) :: st case _ => } } lstats = st.reverse } } override def shutdown(implicit auth: AAA) { exec("/sbin/shutdown", "-h", "now") } override def restart(implicit auth: AAA) { exec("/sbin/shutdown", "-r", "now") } }
axi-sugiki/kumoi
src/kumoi/impl/os/HotLinux.scala
Scala
apache-2.0
3,658
package com.sksamuel.elastic4s.jackson import com.fasterxml.jackson.core.JsonParser import com.fasterxml.jackson.databind.module.SimpleModule import com.fasterxml.jackson.databind.{DeserializationContext, JsonDeserializer, JsonMappingException, ObjectMapper} import com.fasterxml.jackson.module.scala.experimental.ScalaObjectMapper import com.sksamuel.elastic4s.RefreshPolicy import com.sksamuel.elastic4s.testkit.DockerTests import org.scalatest.{Matchers, WordSpec} class ElasticJacksonIndexableTest extends WordSpec with Matchers with DockerTests { import ElasticJackson.Implicits._ "ElasticJackson implicits" should { "index a case class" in { client.execute { bulk( indexInto("jacksontest" / "characters").source(Character("tyrion", "game of thrones")).withId("1"), indexInto("jacksontest" / "characters").source(Character("hank", "breaking bad")).withId("2"), indexInto("jacksontest" / "characters").source(Location("dorne", "game of thrones")).withId("3") ).refresh(RefreshPolicy.WaitFor) }.await } "read a case class" in { val resp = client.execute { search("jacksontest").query("breaking") }.await.result resp.to[Character] shouldBe List(Character("hank", "breaking bad")) } "populate special fields" in { val resp = client.execute { search("jacksontest").query("breaking") }.await.result // should populate _id, _index and _type for us from the search result resp.safeTo[CharacterWithIdTypeAndIndex] shouldBe List(Right(CharacterWithIdTypeAndIndex("2", "jacksontest", "characters", "hank", "breaking bad"))) } "support custom mapper" in { implicit val custom: ObjectMapper with ScalaObjectMapper = new ObjectMapper with ScalaObjectMapper val module = new SimpleModule module.addDeserializer(classOf[String], new JsonDeserializer[String] { override def deserialize(p: JsonParser, ctxt: DeserializationContext): String = sys.error("boom") }) custom.registerModule(module) val resp = client.execute { search("jacksontest").query("breaking") }.await.result // if our custom mapper has been picked up, then it should throw an exception when deserializing intercept[JsonMappingException] { resp.to[Character].toList } } } } case class Character(name: String, show: String) case class CharacterWithIdTypeAndIndex(_id: String, _index: String, _type: String, name: String, show: String) case class Location(name: String, show: String)
Tecsisa/elastic4s
elastic4s-tests/src/test/scala/com/sksamuel/elastic4s/jackson/ElasticJacksonIndexableTest.scala
Scala
apache-2.0
2,595
package hammock import cats.~> trait InterpTrans[F[_]] { def trans: HttpF ~> F }
pepegar/hammock
core/src/main/scala/hammock/InterpTrans.scala
Scala
mit
85
package controllers import java.nio.file._ import javax.inject.Inject import com.github.dockerjava.api.model.NetworkSettings import com.toscaruntime.exception.UnexpectedException import com.toscaruntime.rest.client.DockerDaemonClient import com.toscaruntime.rest.model.{DeploymentInfoDTO, RestResponse} import com.toscaruntime.util.FileUtil import com.typesafe.scalalogging.LazyLogging import play.api.cache._ import play.api.libs.json._ import play.api.libs.ws.{WSClient, WSResponse} import play.api.mvc._ import play.mvc.Http.MimeTypes import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import scala.collection.JavaConverters._ class ProxyController @Inject()(ws: WSClient, cache: CacheApi) extends Controller with LazyLogging { private val bootstrapContextCacheKey: String = "bootstrap_context" private val agentURLCacheKeyPrefix: String = "agent_" // TODO better do it properly by performing connect operation in bootstrap recipe's relationship private lazy val dockerClient = connect(System.getenv("DOCKER_URL")) private lazy val bootstrapContextPath = Paths.get(play.Play.application().configuration().getString("com.toscaruntime.bootstrapContext")) def loadBootstrapContext() = { val cached = cache.get[JsObject](bootstrapContextCacheKey) if (cached.nonEmpty) { cached.get } else if (Files.exists(bootstrapContextPath)) { logger.info(s"Load bootstrap context from $bootstrapContextPath") val persisted = Json.parse(Files.newInputStream(bootstrapContextPath)).as[JsObject] cache.set(bootstrapContextCacheKey, persisted) persisted } else { Json.obj() } } def connect(url: String) = { if (url != null && url.nonEmpty) { new DockerDaemonClient(url, null) } else { throw new UnexpectedException("Need docker url to initialize toscaruntime proxy") } } def getURL(deploymentId: String): Option[String] = { val agentURLKey = agentURLCacheKeyPrefix + deploymentId val cachedURL = cache.get[String](agentURLKey) if (cachedURL.isEmpty) { dockerClient.getAgentInfo(deploymentId).map { agentInfo => val context = loadBootstrapContext() val ipAddresses = agentInfo.getNetworkSettings.getNetworks.asScala.map { case (networkName: String, network: NetworkSettings.Network) => (networkName, network.getIpAddress) }.toMap val agentIp = ipAddresses.getOrElse(context.value.getOrElse("docker_network_name", JsString("bridge")).asInstanceOf[JsString].value, ipAddresses.values.head) val agentURL = "http://" + agentIp + ":9000" cache.set(agentURLKey, agentURL) agentURL } } else { cachedURL } } def saveBootstrapContext() = Action { request => request.body.asJson.map { case json@JsObject(fields) => cache.set(bootstrapContextCacheKey, json.as[JsObject]) FileUtil.writeTextFile(Json.prettyPrint(json), bootstrapContextPath) Ok(s"Bootstrap context saved to $bootstrapContextPath") case _ => BadRequest("Expecting a map of value") }.getOrElse { BadRequest("Expecting Json data") } } def getBootstrapContext = Action { request => Ok(Json.toJson(RestResponse.success[JsObject](Some(loadBootstrapContext())))) } def list = Action { Ok(Json.toJson(RestResponse.success[List[DeploymentInfoDTO]](Some(dockerClient.listDeploymentAgents().values.toList)))) } private def handleWSResponse(response: WSResponse) = { response.status match { case play.api.http.Status.OK => val contentTypeOpt = response.header(play.api.http.HeaderNames.CONTENT_TYPE) val proxyResponse = Ok(response.body) if (contentTypeOpt.isDefined) proxyResponse.withHeaders((play.api.http.HeaderNames.CONTENT_TYPE, contentTypeOpt.get)) proxyResponse case play.api.http.Status.BAD_REQUEST => BadRequest(response.body) case _ => InternalServerError(s"Encountered unexpected status ${response.status} :\n ${response.body}") } } private def doRedirect(deploymentId: String, path: String, request: Request[AnyContent], webAction: (String, Request[AnyContent]) => Future[WSResponse]) = { val url = getURL(deploymentId).map(_ + path) url.map { url => webAction(url, request).map(handleWSResponse) }.getOrElse(Future(NotFound(s"Deployment id $deploymentId do not exist"))) } private def doRedirectPost(deploymentId: String, path: String, request: Request[AnyContent]) = { doRedirect(deploymentId, path, request, (url, request) => { val redirectQuery = ws.url(url) val allQueryString = request.queryString.flatMap { case (key, values) => values.map((key, _)) } request.contentType.getOrElse(MimeTypes.TEXT) match { case MimeTypes.JSON => redirectQuery.withQueryString(allQueryString.toSeq: _*).post(request.body.asJson.getOrElse(JsNull)) case _ => redirectQuery.withQueryString(allQueryString.toSeq: _*).post(request.body.asText.getOrElse("")) } }) } private def doRedirectGet(deploymentId: String, path: String, request: Request[AnyContent]) = { doRedirect(deploymentId, path, request, (url, request) => ws.url(url).get()) } def execute(deploymentId: String) = Action.async { request => doRedirectPost(deploymentId, "/deployment/executions", request) } def get(deploymentId: String) = Action.async { request => doRedirectGet(deploymentId, "/deployment", request) } def cancel(deploymentId: String) = Action.async { request => doRedirectPost(deploymentId, "/deployment/executions/cancel", request) } def resume(deploymentId: String) = Action.async { request => doRedirectPost(deploymentId, "/deployment/executions/resume", request) } def stop(deploymentId: String) = Action.async { request => doRedirectPost(deploymentId, "/deployment/executions/stop", request) } def updateRecipe(deploymentId: String) = Action.async { request => doRedirectPost(deploymentId, "/deployment/recipe/update", request) } }
vuminhkh/tosca-runtime
proxy/app/controllers/ProxyController.scala
Scala
mit
6,084
package controllers.admin import javax.inject.Inject import controllers.admin import models.admin.{User, UserDAO} import play.api.data.Forms._ import play.api.data._ import play.api.i18n.{I18nSupport, MessagesApi} import play.api.mvc._ import views.html import play.api.i18n.Messages.Implicits._ /** * Created by murat on 7/2/15. */ class Auth @Inject() (val messagesApi: MessagesApi) extends Controller with I18nSupport{ val loginForm = Form( mapping( "id" -> ignored[Option[Long]](None), "username" -> text, "password" -> text )(User.apply) (User.unapply) verifying ("Invalid username or password", result => result match { case User(id, username, password) => check(username, password) }) ) def check(username: String, password: String) = { UserDAO.checkCredentials(username, password) } def login = Action { implicit request => Ok(html.admin.login(loginForm)) } def authenticate = Action { implicit request => loginForm.bindFromRequest.fold( formWithErrors => BadRequest(html.admin.login(formWithErrors)), user => Redirect(admin.routes.Application.index()).withSession(Security.username -> user.username) ) } def logout = Action { Redirect(routes.Auth.login()).withNewSession.flashing( "success" -> "You are now logged out." ) } } trait Secured { case class AuthenticatedRequest[A]( user: User, request: Request[A] ) extends WrappedRequest(request) def Authenticated[A](p: BodyParser[A])(f: AuthenticatedRequest[A] => Result) = { Action(p) { request => request.session.get(Security.username).flatMap(u => UserDAO.findByName(u)).map { user => f(AuthenticatedRequest(user, request)) }.getOrElse(onUnauthorized(request)) } } // Overloaded method to use the default body parser import play.api.mvc.BodyParsers._ def Authenticated(f: AuthenticatedRequest[AnyContent] => Result): Action[AnyContent] = { Authenticated(parse.anyContent)(f) } def username(request: RequestHeader) = request.session.get(Security.username) def onUnauthorized(request: RequestHeader) = Results.Redirect(routes.Auth.login()) def withAuth(f: => String => Request[AnyContent] => Result) = { Security.Authenticated(username, onUnauthorized) { user => Action(request => f(user)(request)) } } /** * This method shows how you could wrap the withAuth method to also fetch your user * You will need to implement UserDAO.findOneByUsername */ def withUser[A](f: User => Request[AnyContent] => Result) = withAuth { username => implicit request => UserDAO.findByName(username).map { user => f(user)(request) }.getOrElse(onUnauthorized(request)) } }
mustafin/ent-quiz-server
modules/admin/app/controllers/admin/Auth.scala
Scala
apache-2.0
2,820
package im.tox.antox.wrapper class GroupPeer(var name: String, var ignored: Boolean) { override def toString: String = name }
Ansa89/Antox
app/src/main/scala/im/tox/antox/wrapper/GroupPeer.scala
Scala
gpl-3.0
146
package mytest.casematch import scala.util.Random /** * Created by fqc on 2016/7/15. */ object CaseNBA extends App { val arr = Array("kobe", "james", "jordan", "rose") private val index: Int = Random.nextInt(arr.length) index match { case 0 => println("kobe") case 1 => println("james") case 2 => println("jordan") case _ => println("not find") //ζ³¨ζ„ε¦‚ζžœδ½Ώη”¨indexηš„θ―οΌŒιœ€θ¦εšε€„η† _ 倄理 } private val name: String = arr(Random.nextInt(arr.length)) name match { case "kobe" => println("kobe") case "jordan" => println("jordan") case "james" => println("james") case _ => println("not find") } }
fqc/Scala_sidepro
src/mytest/casematch/CaseNBA.scala
Scala
mit
667
package org.pinky.representation import _root_.com.thoughtworks.xstream.XStream import com.thoughtworks.xstream.io.json.JsonHierarchicalStreamDriver /** * Provides Json rendering, using the Jettison lib * * @author peter hausel gmail com (Peter Hausel) */ class JsonRepresentation extends XmlRepresentation { xstream = new XStream(new JsonHierarchicalStreamDriver()); xstream.setMode(XStream.NO_REFERENCES); }
d5nguyenvan/pinky
src/main/scala/org/pinky/representation/JsonRepresentation.scala
Scala
bsd-3-clause
421
import sbt._ class BuildProject(info: ProjectInfo) extends DefaultProject(info) { val mavenLocal = "Local Maven Repository" at "file://"+Path.userHome+"/.m2/repository" }
inigo/less-scala
project/build/Build.scala
Scala
apache-2.0
174
/* * Copyright 2008-present MongoDB, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mongodb.scala import java.nio.ByteBuffer import com.mongodb.async.SingleResultCallback import com.mongodb.async.client.gridfs.{AsyncInputStream => JAsyncInputStream, AsyncOutputStream => JAsyncOutputStream} import org.mongodb.scala.internal.ObservableHelper.{observeCompleted, observeInt, observeLong} package object gridfs { /** * An exception indicating that a failure occurred in GridFS. */ type MongoGridFSException = com.mongodb.MongoGridFSException /** * GridFS upload options * * Customizable options used when uploading files into GridFS */ type GridFSUploadOptions = com.mongodb.client.gridfs.model.GridFSUploadOptions /** * The GridFSFile */ type GridFSFile = com.mongodb.client.gridfs.model.GridFSFile /** * The GridFS download by name options * * Controls the selection of the revision to download */ type GridFSDownloadOptions = com.mongodb.client.gridfs.model.GridFSDownloadOptions implicit class JavaAsyncInputStreamToScala(wrapped: JAsyncInputStream) extends AsyncInputStream { override def close(): Observable[Completed] = observeCompleted(wrapped.close(_: SingleResultCallback[Void])) override def read(dst: ByteBuffer): Observable[Int] = observeInt(wrapped.read(dst, _: SingleResultCallback[java.lang.Integer])) override def skip(bytesToSkip: Long): Observable[Long] = observeLong(wrapped.skip(bytesToSkip, _: SingleResultCallback[java.lang.Long])) } implicit class JavaAsyncOutputStreamToScala(wrapped: JAsyncOutputStream) extends AsyncOutputStream { override def close(): Observable[Completed] = observeCompleted(wrapped.close(_: SingleResultCallback[Void])) override def write(src: ByteBuffer): Observable[Int] = observeInt(wrapped.write(src, _: SingleResultCallback[java.lang.Integer])) } implicit class ScalaAsyncInputStreamToJava(wrapped: AsyncInputStream) extends JAsyncInputStream { // scalastyle:off null override def close(callback: SingleResultCallback[Void]): Unit = wrapped.close().subscribe( (_: Completed) => (), (e: Throwable) => callback.onResult(null, e), () => callback.onResult(null, null) ) override def read(dst: ByteBuffer, callback: SingleResultCallback[Integer]): Unit = wrapped.read(dst).subscribe( new Observer[Int] { var bytesRead: Option[Int] = None override def onError(e: Throwable): Unit = callback.onResult(null, e) override def onComplete(): Unit = bytesRead.foreach(callback.onResult(_, null)) override def onNext(result: Int): Unit = bytesRead = Some(result) } ) override def skip(bytesToSkip: Long, callback: SingleResultCallback[java.lang.Long]): Unit = wrapped.skip(bytesToSkip).subscribe( new Observer[Long] { var bytesSkipped: Option[Long] = None override def onError(e: Throwable): Unit = callback.onResult(null, e) override def onComplete(): Unit = callback.onResult(bytesSkipped.getOrElse(0L).asInstanceOf[java.lang.Long], null) override def onNext(result: Long): Unit = bytesSkipped = Some(result) } ) // scalastyle:on null } implicit class ScalaAsyncOutputStreamToJava(wrapped: AsyncOutputStream) extends JAsyncOutputStream { // scalastyle:off null override def write(src: ByteBuffer, callback: SingleResultCallback[Integer]): Unit = wrapped.write(src).subscribe( new Observer[Int] { var bytesWritten: Option[Int] = None override def onError(e: Throwable): Unit = callback.onResult(null, e) override def onComplete(): Unit = bytesWritten.foreach(callback.onResult(_, null)) override def onNext(result: Int): Unit = bytesWritten = Some(result) } ) override def close(callback: SingleResultCallback[Void]): Unit = wrapped.close().subscribe( (_: Completed) => (), (e: Throwable) => callback.onResult(null, e), () => callback.onResult(null, null) ) // scalastyle:off null } }
rozza/mongo-scala-driver
driver/src/main/scala/org/mongodb/scala/gridfs/package.scala
Scala
apache-2.0
4,588
package org.allenai.common import org.slf4j.LoggerFactory /** This trait is meant to be mixed into a class to provide logging. * * The enclosed methods provide a Scala-style logging signature where the * message is a block instead of a string. This way the message string is * not constructed unless the message will be logged. */ trait Logging { val internalLogger = LoggerFactory.getLogger(this.getClass) object logger { // scalastyle:ignore def trace(message: => String): Unit = if (internalLogger.isTraceEnabled) { internalLogger.trace(message) } def debug(message: => String): Unit = if (internalLogger.isDebugEnabled) { internalLogger.debug(message) } def info(message: => String): Unit = if (internalLogger.isInfoEnabled) { internalLogger.info(message) } def warn(message: => String): Unit = if (internalLogger.isWarnEnabled) { internalLogger.warn(message) } def warn(message: => String, throwable: Throwable): Unit = if (internalLogger.isWarnEnabled) { internalLogger.warn(message, throwable) } def error(message: => String): Unit = if (internalLogger.isErrorEnabled) { internalLogger.error(message) } def error(message: => String, throwable: Throwable): Unit = if (internalLogger.isErrorEnabled) { internalLogger.error(message, throwable) } } }
cristipp/common
core/src/main/scala/org/allenai/common/Logging.scala
Scala
apache-2.0
1,448
package com.eevolution.context.dictionary.infrastructure.repository import com.eevolution.context.dictionary.domain.model.SchedulerLog import com.eevolution.context.dictionary.infrastructure.db.DbContext._ /** * Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * Email: eduardo.moreno@e-evolution.com, http://www.e-evolution.com , http://github.com/e-Evolution * Created by eduardo.moreno@e-evolution.com , www.e-evolution.com */ /** * Scheduler Log Mapping */ trait SchedulerLogMapping { val querySchedulerLog = quote { querySchema[SchedulerLog]("AD_SchedulerLog", _.schedulerId -> "AD_Scheduler_ID", _.schedulerLogId -> "AD_SchedulerLog_ID", _.tenantId -> "AD_Client_ID", _.organizationId-> "AD_Org_ID", _.isActive -> "IsActive", _.created -> "Created", _.createdBy -> "CreatedBy", _.updated -> "Updated", _.updatedBy -> "UpdatedBy", _.isError -> "IsError", _.summary -> "Summary", _.reference -> "Reference", _.description -> "Description", _.textMsg -> "TextMsg", _.binaryData -> "BinaryData", _.uuid -> "UUID") } }
adempiere/ADReactiveSystem
dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/repository/SchedulerLogMapping.scala
Scala
gpl-3.0
1,824
/* * sbt * Copyright 2011 - 2018, Lightbend, Inc. * Copyright 2008 - 2010, Mark Harrah * Licensed under Apache License 2.0 (see LICENSE) */ package sbt.internal.util package complete import java.lang.Character.{ toLowerCase => lower } /** @author Paul Phillips */ object EditDistance { /** * Translated from the java version at * http://www.merriampark.com/ld.htm * which is declared to be public domain. */ def levenshtein( s: String, t: String, insertCost: Int = 1, deleteCost: Int = 1, subCost: Int = 1, transposeCost: Int = 1, matchCost: Int = 0, caseCost: Int = 1, transpositions: Boolean = false ): Int = { val _ = transposeCost val n = s.length val m = t.length if (n == 0) return m if (m == 0) return n val d = Array.ofDim[Int](n + 1, m + 1) 0 to n foreach (x => d(x)(0) = x) 0 to m foreach (x => d(0)(x) = x) for (i <- 1 to n; s_i = s(i - 1); j <- 1 to m) { val t_j = t(j - 1) val cost = if (s_i == t_j) matchCost else if (lower(s_i) == lower(t_j)) caseCost else subCost val c1 = d(i - 1)(j) + deleteCost val c2 = d(i)(j - 1) + insertCost val c3 = d(i - 1)(j - 1) + cost d(i)(j) = c1 min c2 min c3 if (transpositions) { if (i > 1 && j > 1 && s(i - 1) == t(j - 2) && s(i - 2) == t(j - 1)) d(i)(j) = d(i)(j) min (d(i - 2)(j - 2) + cost) } } d(n)(m) } }
sbt/sbt
internal/util-complete/src/main/scala/sbt/internal/util/complete/EditDistance.scala
Scala
apache-2.0
1,463
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.api.stream.table.stringexpr import org.apache.flink.api.scala._ import org.apache.flink.table.api._ import org.apache.flink.table.expressions.utils.Func23 import org.apache.flink.table.utils.TableTestBase import org.junit.Test class CalcStringExpressionTest extends TableTestBase { @Test def testSimpleSelect(): Unit = { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]() val resScala = t.select('_1, '_2) val resJava = t.select("_1, _2") verifyTableEquals(resJava, resScala) } @Test def testSelectStar(): Unit = { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]('int, 'long, 'string) val resScala = t.select('*) val resJava = t.select("*") verifyTableEquals(resJava, resScala) } @Test def testSelectWithWhere(): Unit = { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]('int, 'long, 'string) val resScala = t.where('string === "true").select('int) val resJava = t.where("string === 'true'").select("int") verifyTableEquals(resJava, resScala) } @Test def testSimpleSelectWithNaming(): Unit = { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]('int, 'long, 'string) val resScala = t.select('int, 'string) val resJava = t.select("int, string") verifyTableEquals(resJava, resScala) } @Test def testSimpleSelectWithAlias(): Unit = { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]('int, 'long, 'string) val resScala = t.select('int as 'myInt, 'string as 'myString) val resJava = t.select("int as myInt, string as myString") verifyTableEquals(resJava, resScala) } @Test def testSimpleFilter(): Unit = { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]('int, 'long, 'string) val resScala = t.filter('int === 3).select('int as 'myInt, 'string) val resJava = t.filter("int === 3").select("int as myInt, string") verifyTableEquals(resJava, resScala) } @Test def testAllRejectingFilter(): Unit = { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]('int, 'long, 'string) val resScala = t.filter(false).select('int as 'myInt, 'string) val resJava = t.filter("false").select("int as myInt, string") verifyTableEquals(resJava, resScala) } @Test def testAllPassingFilter(): Unit = { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]('int, 'long, 'string) val resScala = t.filter(true).select('int as 'myInt, 'string) val resJava = t.filter("true").select("int as myInt, string") verifyTableEquals(resJava, resScala) } @Test def testNotEqualsFilter(): Unit = { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]('int, 'long, 'string) val resScala = t.filter('int !== 2).filter('string.like("%world%")).select('int, 'string) val resJava = t.filter("int !== 2").filter("string.like('%world%')").select("int, string") verifyTableEquals(resJava, resScala) } @Test def testFilterWithExpression(): Unit = { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]('int, 'long, 'string) val resScala = t.filter('int % 2 === 0).select('int, 'string) val resJava = t.filter("int % 2 === 0").select("int, string") verifyTableEquals(resJava, resScala) } @Test def testAddColumns(): Unit = { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]("Table3",'a, 'b, 'c) val t1 = t.addColumns(concat('c, "Sunny") as 'kid).addColumns('b + 1) val t2 = t.addColumns("concat(c, 'Sunny') as kid").addColumns("b + 1") verifyTableEquals(t1, t2) } @Test def addOrReplaceColumns(): Unit = { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]("Table3",'a, 'b, 'c) var t1 = t.addOrReplaceColumns(concat('c, "Sunny") as 'kid).addColumns('b + 1) var t2 = t.addOrReplaceColumns("concat(c, 'Sunny') as kid").addColumns("b + 1") verifyTableEquals(t1, t2) } @Test def testRenameColumns(): Unit = { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]("Table3",'a, 'b, 'c) val t1 = t.renameColumns('a as 'a2, 'c as 'c2) val t2 = t.renameColumns("a as a2, c as c2") verifyTableEquals(t1, t2) } @Test def testDropColumns(): Unit = { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]("Table3",'a, 'b, 'c) val t1 = t.dropColumns('a, 'c) val t2 = t.dropColumns("a,c") verifyTableEquals(t1, t2) } @Test def testMap(): Unit = { val util = streamTestUtil() val t = util.addTable[(Int, Long, String)]("Table3",'a, 'b, 'c) util.tableEnv.registerFunction("func", Func23) val t1 = t.map("func(a, b, c)") val t2 = t.map(Func23('a, 'b, 'c)) verifyTableEquals(t1, t2) } }
GJL/flink
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/stringexpr/CalcStringExpressionTest.scala
Scala
apache-2.0
5,761
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.api.scala.operators import org.apache.flink.api.scala.util.CollectionDataSets import org.apache.flink.test.util.MultipleProgramsTestBase.TestExecutionMode import org.apache.flink.test.util.{TestBaseUtils, MultipleProgramsTestBase} import org.junit._ import org.junit.runner.RunWith import org.junit.runners.Parameterized import org.apache.flink.api.scala._ import scala.collection.mutable.ArrayBuffer @RunWith(classOf[Parameterized]) class UnionITCase(mode: TestExecutionMode) extends MultipleProgramsTestBase(mode) { private var result: Seq[String] = null private var expected: String = null private final val FULL_TUPLE_3_STRING: String = "(1,1,Hi)\\n" + "(2,2,Hello)\\n" + "(3,2," + "Hello world)\\n" + "(4,3,Hello world, how are you?)\\n" + "(5,3,I am fine.)\\n" + "(6,3," + "Luke Skywalker)\\n" + "(7,4,Comment#1)\\n" + "(8,4,Comment#2)\\n" + "(9,4,Comment#3)\\n" + "(10,4," + "Comment#4)\\n" + "(11,5,Comment#5)\\n" + "(12,5,Comment#6)\\n" + "(13,5,Comment#7)\\n" + "(14,5," + "Comment#8)\\n" + "(15,5,Comment#9)\\n" + "(16,6,Comment#10)\\n" + "(17,6,Comment#11)\\n" + "(18,6," + "Comment#12)\\n" + "(19,6,Comment#13)\\n" + "(20,6,Comment#14)\\n" + "(21,6,Comment#15)\\n" @After def after(): Unit = { import collection.JavaConverters._ TestBaseUtils.compareResultAsText(ArrayBuffer(result: _*).asJava, expected) } @Test def testUnionOf2IdenticalDS(): Unit = { /* * Union of 2 Same Data Sets */ val env = ExecutionEnvironment.getExecutionEnvironment val ds = CollectionDataSets.get3TupleDataSet(env) val unionDs = ds.union(CollectionDataSets.get3TupleDataSet(env)) result = unionDs.collect().map(_.toString) expected = FULL_TUPLE_3_STRING + FULL_TUPLE_3_STRING } @Test def testUnionOf5IdenticalDSWithMultipleUnions(): Unit = { /* * Union of 5 same Data Sets, with multiple unions */ val env = ExecutionEnvironment.getExecutionEnvironment val ds = CollectionDataSets.get3TupleDataSet(env) val unionDs = ds .union(CollectionDataSets.get3TupleDataSet(env)) .union(CollectionDataSets.get3TupleDataSet(env)) .union(CollectionDataSets.get3TupleDataSet(env)) .union(CollectionDataSets.get3TupleDataSet(env)) result = unionDs.collect().map(_.toString) expected = FULL_TUPLE_3_STRING + FULL_TUPLE_3_STRING + FULL_TUPLE_3_STRING + FULL_TUPLE_3_STRING + FULL_TUPLE_3_STRING } @Test def testUnionWithEmptyDS(): Unit = { /* * Test on union with empty dataset */ val env = ExecutionEnvironment.getExecutionEnvironment // Don't know how to make an empty result in an other way than filtering it val empty = CollectionDataSets.get3TupleDataSet(env).filter( t => false ) val unionDs = CollectionDataSets.get3TupleDataSet(env).union(empty) result = unionDs.collect().map(_.toString()) expected = FULL_TUPLE_3_STRING } @Test def testUnionWithOptionType(): Unit = { /* * Union of a tuple with an Option field */ val env = ExecutionEnvironment.getExecutionEnvironment val data = Seq((Some(1), 1), (None, -1), (Some(42), 42)) val input1 = env.fromCollection(data) val input2 = env.fromCollection(data) result = input1.union(input2).collect().map(_.toString()) expected = data ++ data mkString("\\n") } }
hequn8128/flink
flink-tests/src/test/scala/org/apache/flink/api/scala/operators/UnionITCase.scala
Scala
apache-2.0
4,154
package org.jetbrains.plugins.scala.traceLogViewer.selection import com.intellij.util.ui.{ColumnInfo, ListTableModel} import org.jetbrains.plugins.scala.traceLogger.TraceLog import java.nio.file.attribute.BasicFileAttributes import java.nio.file.{Files, Path} import java.time.format.DateTimeFormatter import java.time.{Duration, Instant, ZoneId} import java.util.Comparator import scala.jdk.CollectionConverters.* import scala.jdk.StreamConverters.StreamHasToScala import scala.util.Try private object TraceLogSelectionModel extends ListTableModel[Entry](Entry.nameColumn, Entry.dateColumn) { final def refresh(): Option[Path] = { val prevItems = getItems.asScala val items = listEntries() setItems(items.asJava) val newItems = items.toSet -- prevItems if (newItems.size == 1) newItems.headOption.map(_.path) else None } private def listEntries(): Seq[Entry] = { val paths = Try(Files.list(TraceLog.loggerOutputPath).toScala(Seq)) .getOrElse(Seq.empty) for (path <- paths) yield { val attr = Try(Files.readAttributes(path, classOf[BasicFileAttributes])).toOption Entry( path.getFileName.toString, path, Instant.ofEpochMilli(attr.fold(0L)(_.lastModifiedTime().toMillis)) ) } } } private case class Entry(name: String, path: Path, date: Instant) //noinspection ScalaExtractStringToBundle private object Entry { def nameColumn: ColumnInfo[Entry, String] = new ColumnInfo[Entry, String]("Log") { override def valueOf(item: Entry): String = item.name override def getComparator: Comparator[Entry] = Comparator.comparing(_.name) } def dateColumn: ColumnInfo[Entry, String] = new ColumnInfo[Entry, String]("Created") { private val formatter = DateTimeFormatter.ofPattern("hh:mm (()) (dd.MM.yyyy)") .withZone(ZoneId.systemDefault()) override def valueOf(item: Entry): String = { val date = item.date val daysAgo = Duration.between(date, Instant.now()).toDays val ago = daysAgo match { case 0 => "today" case 1 => "yesterday" case _ => s"" } formatter.format(item.date).replace("(())", ago) } override def getComparator: Comparator[Entry] = Comparator.comparing(_.date) } }
JetBrains/intellij-scala
scala/traceLogViewer/src/org/jetbrains/plugins/scala/traceLogViewer/selection/TraceLogSelectionModel.scala
Scala
apache-2.0
2,267
package play.api.libs.iteratee import scala.concurrent.Future import scala.concurrent.Promise import scala.util.{Try, Failure, Success} import Enumerator.Pushee import java.util.concurrent.{ TimeUnit } import play.api.libs.iteratee.internal.defaultExecutionContext /** * Utilities for concurrent usage of iteratees, enumerators and enumeratees. */ object Concurrent { private val timer = new java.util.Timer() private def timeoutFuture[A](v:A, delay:Long, unit:TimeUnit):Future[A] = { val p = Promise[A]() timer.schedule( new java.util.TimerTask{ def run(){ p.success(v) } },unit.toMillis(delay) ) p.future } /** * A channel for imperative style feeding of input into one or more iteratees. */ trait Channel[E] { /** * Push an input chunk into this channel * * @param chunk The chunk to push */ def push(chunk: Input[E]) /** * Push an item into this channel * * @param item The item to push */ def push(item: E) { push(Input.El(item)) } /** * Send a failure to this channel. This results in any promises that the enumerator associated with this channel * produced being redeemed with a failure. * * @param e The failure. */ def end(e: Throwable) /** * End the input for this channel. This results in any promises that the enumerator associated with this channel * produced being redeemed. * * Note that an EOF won't be sent, so any iteratees consuming this channel will still be able to consume input * (if they are in the cont state). */ def end() /** * Send an EOF to the channel, and then end the input for the channel. */ def eofAndEnd() { push(Input.EOF) end() } } /** * Create an enumerator and channel for broadcasting input to many iteratees. * * This is intended for imperative style push input feeding into iteratees. For example: * * {{{ * val (chatEnumerator, chatChannel) = Concurrent.broadcast[String] * val chatClient1 = Iteratee.foreach[String](m => println("Client 1: " + m)) * val chatClient2 = Iteratee.foreach[String](m => println("Client 2: " + m)) * chatEnumerator |>>> chatClient1 * chatEnumerator |>>> chatClient2 * * chatChannel.push(Message("Hello world!")) * }}} */ def broadcast[E]: (Enumerator[E], Channel[E]) = { import scala.concurrent.stm._ val iteratees: Ref[List[(Iteratee[E, _], Promise[Iteratee[E, _]])]] = Ref(List()) def step(in: Input[E]): Iteratee[E, Unit] = { val interested = iteratees.single.swap(List()) val ready = interested.map { case (it, p) => it.fold { case Step.Done(a, e) => Future.successful(Left(Done(a, e))) case Step.Cont(k) => { val next = k(in) next.pureFold { case Step.Done(a, e) => Left(Done(a, e)) case Step.Cont(k) => Right((Cont(k), p)) case Step.Error(msg, e) => Left(Error(msg, e)) } } case Step.Error(msg, e) => Future.successful(Left(Error(msg, e))) }.map { case Left(s) => p.success(s) None case Right(s) => Some(s) }.recover { case e:Throwable => p.failure(e) None } } Iteratee.flatten(Future.sequence(ready).map { commitReady => val downToZero = atomic { implicit txn => iteratees.transform(commitReady.collect { case Some(s) => s } ++ _) (interested.length > 0 && iteratees().length <= 0) } if (in == Input.EOF) Done((), Input.Empty) else Cont(step) }) } val redeemed = Ref(None: Option[Try[Unit]]) val enumerator = new Enumerator[E] { def apply[A](it: Iteratee[E, A]): Future[Iteratee[E, A]] = { val result = Promise[Iteratee[E, A]]() val finished = atomic { implicit txn => redeemed() match { case None => iteratees.transform(_ :+ ((it, (result: Promise[Iteratee[E, A]]).asInstanceOf[Promise[Iteratee[E, _]]]))) None case Some(notWaiting) => Some(notWaiting) } } finished.foreach { case Success(_) => result.success(it) case Failure(e) => result.failure(e) } result.future } } val mainIteratee = Ref(Cont(step)) val toPush = new Channel[E] { def push(chunk: Input[E]) { val itPromise = Promise[Iteratee[E, Unit]]() val current: Iteratee[E, Unit] = mainIteratee.single.swap(Iteratee.flatten(itPromise.future)) val next = current.pureFold { case Step.Done(a, e) => Done(a, e) case Step.Cont(k) => k(chunk) case Step.Error(msg, e) => Error(msg, e) } next.onComplete { case Success(it) => itPromise.success(it) case Failure(e) => { val its = atomic { implicit txn => redeemed() = Some(Failure(e)) iteratees.swap(List()) } itPromise.failure(e) its.foreach { case (it, p) => p.success(it) } } } } def end(e: Throwable) { val current: Iteratee[E, Unit] = mainIteratee.single.swap(Done((), Input.Empty)) def endEveryone() = { val its = atomic { implicit txn => redeemed() = Some(Failure(e)) iteratees.swap(List()) } its.foreach { case (it, p) => p.failure(e) } } current.pureFold { case _ => endEveryone() } } def end() { val current: Iteratee[E, Unit] = mainIteratee.single.swap(Done((), Input.Empty)) def endEveryone() = { val its = atomic { implicit txn => redeemed() = Some(Success(())) iteratees.swap(List()) } its.foreach { case (it, p) => p.success(it) } } current.pureFold { case _ => endEveryone() } } } (enumerator, toPush) } /** * Enumeratee that times out if the iteratee it feeds to takes to long to consume available input. * * @param timeout The timeout period * @param unit the time unit */ def lazyAndErrIfNotReady[E](timeout: Long, unit: TimeUnit = TimeUnit.MILLISECONDS): Enumeratee[E, E] = new Enumeratee[E, E] { def applyOn[A](inner: Iteratee[E, A]): Iteratee[E, Iteratee[E, A]] = { def step(it: Iteratee[E, A]): K[E, Iteratee[E, A]] = { case Input.EOF => Done(it, Input.EOF) case other => Iteratee.flatten( Future.firstCompletedOf( it.unflatten.map(Left(_)) :: timeoutFuture(Right(()), timeout, unit) :: Nil ).map { case Left(Step.Cont(k)) => Cont(step(k(other))) case Left(done) => Done(done.it, other) case Right(_) => Error("iteratee is taking too long", other) } ) } Cont(step(inner)) } } /** * A buffering enumeratee. * * Maintains a buffer of maximum size maxBuffer, consuming as much of the input as the buffer will allow as quickly * as it comes, while allowing the iteratee it feeds to consume it as slowly as it likes. * * This is useful in situations where the enumerator holds expensive resources open, while the iteratee may be slow, * for example if the enumerator is a database result set that holds a transaction open, but the result set is being * serialised and fed directly to an HTTP response. * * @param maxBuffer The maximum number of items to buffer */ def buffer[E](maxBuffer: Int): Enumeratee[E, E] = buffer[E](maxBuffer, length = (_: Input[E]) => 1) /** * A buffering enumeratee. * * Maintains a buffer of maximum size maxBuffer, consuming as much of the input as the buffer will allow as quickly * as it comes, while allowing the iteratee it feeds to consume it as slowly as it likes. * * This is useful in situations where the enumerator holds expensive resources open, while the iteratee may be slow, * for example if the enumerator is a database result set that holds a transaction open, but the result set is being * serialised and fed directly to an HTTP response. * * @param maxBuffer The maximum size to buffer. The size is computed using the given `length` function. * @param length A function that computes the length of an input item */ def buffer[E](maxBuffer: Int, length: Input[E] => Int): Enumeratee[E, E] = new Enumeratee[E, E] { import scala.collection.immutable.Queue import scala.concurrent.stm._ import play.api.libs.iteratee.Enumeratee.CheckDone def applyOn[A](it: Iteratee[E, A]): Iteratee[E, Iteratee[E, A]] = { val last = Promise[Iteratee[E, Iteratee[E, A]]]() sealed trait State case class Queueing(q: Queue[Input[E]], length: Long) extends State case class Waiting(p: scala.concurrent.Promise[Input[E]]) extends State case class DoneIt(s: Iteratee[E, Iteratee[E, A]]) extends State val state: Ref[State] = Ref(Queueing(Queue[Input[E]](), 0)) def step: K[E, Iteratee[E, A]] = { case in @ Input.EOF => state.single.getAndTransform { case Queueing(q, l) => Queueing(q.enqueue(in), l) case Waiting(p) => Queueing(Queue(), 0) case d @ DoneIt(it) => d } match { case Waiting(p) => p.success(in) case _ => } Iteratee.flatten(last.future) case other => val chunkLength = length(other) val s = state.single.getAndTransform { case Queueing(q, l) if maxBuffer > 0 && l <= maxBuffer => Queueing(q.enqueue(other), l + chunkLength) case Queueing(q, l) => Queueing(Queue(Input.EOF), l) case Waiting(p) => Queueing(Queue(), 0) case d @ DoneIt(it) => d } s match { case Waiting(p) => p.success(other) Cont(step) case DoneIt(it) => it case Queueing(q, l) if maxBuffer > 0 && l <= maxBuffer => Cont(step) case Queueing(_, _) => Error("buffer overflow", other) } } def moreInput[A](k: K[E, A]): Iteratee[E, Iteratee[E, A]] = { val in: Future[Input[E]] = atomic { implicit txn => state() match { case Queueing(q, l) => if (!q.isEmpty) { val (e, newB) = q.dequeue state() = Queueing(newB, l - length(e)) Future.successful(e) } else { val p = Promise[Input[E]]() state() = Waiting(p) p.future } case _ => throw new Exception("can't get here") } } Iteratee.flatten(in.map { in => (new CheckDone[E, E] { def continue[A](cont: K[E, A]) = moreInput(cont) } &> k(in)) }) } (new CheckDone[E, E] { def continue[A](cont: K[E, A]) = moreInput(cont) } &> it).unflatten.onComplete { case Success(it) => state.single() = DoneIt(it.it) last.success(it.it) case Failure(e) => state.single() = DoneIt(Iteratee.flatten(Future.failed[Iteratee[E, Iteratee[E, A]]](e))) last.failure(e) } Cont(step) } } /** * An enumeratee that consumes all input immediately, and passes it to the iteratee only if the iteratee is ready to * handle it within the given timeout, otherwise it drops it. * * @param duration The time to wait for the iteratee to be ready * @param unit The timeunit */ def dropInputIfNotReady[E](duration: Long, unit: java.util.concurrent.TimeUnit = java.util.concurrent.TimeUnit.MILLISECONDS): Enumeratee[E, E] = new Enumeratee[E, E] { val busy = scala.concurrent.stm.Ref(false) def applyOn[A](it: Iteratee[E, A]): Iteratee[E, Iteratee[E, A]] = { def step(inner: Iteratee[E, A])(in: Input[E]): Iteratee[E, Iteratee[E, A]] = { in match { case Input.EOF => Done(inner, Input.Empty) case in => if (!busy.single()) { val readyOrNot: Future[Either[Iteratee[E, Iteratee[E, A]], Unit]] = Future.firstCompletedOf( Seq( inner.pureFold[Iteratee[E, Iteratee[E, A]]] { case Step.Done(a, e) => Done(Done(a, e), Input.Empty) case Step.Cont(k) => Cont { in => val next = k(in) Cont(step(next)) } case Step.Error(msg, e) => Done(Error(msg, e), Input.Empty) }.map(i => { busy.single() = false; i }).map(Left(_)), timeoutFuture(Right(()), duration, unit) ) ) Iteratee.flatten(readyOrNot.map { case Left(ready) => Iteratee.flatten(ready.feed(in)) case Right(_) => busy.single() = true Cont(step(inner)) }) } else Cont(step(inner)) } } Cont(step(it)) } } /** * Create an enumerator that allows imperative style pushing of input into a single iteratee. * * The enumerator may be used multiple times, each time will cause a new invocation of `onStart`, which will pass a * [[play.api.libs.iteratee.Concurrent.Channel]] that can be used to feed input into the iteratee. However, note that * there is no way for the caller to know which iteratee is finished or encountered an error in the `onComplete` or * `onError` functions. * * @param onStart Called when an enumerator is applied to an iteratee, providing the channel to feed input into that * iteratee. * @param onComplete Called when an iteratee is done. * @param onError Called when an iteratee encounters an error, supplying the error and the input that caused the error. * @return */ def unicast[E]( onStart: Channel[E] => Unit, onComplete: => Unit = (), onError: (String, Input[E]) => Unit = (_: String, _: Input[E]) => ()) = new Enumerator[E] { import scala.concurrent.stm.Ref def apply[A](it: Iteratee[E, A]): Future[Iteratee[E, A]] = { val promise: scala.concurrent.Promise[Iteratee[E, A]] = Promise[Iteratee[E, A]]() val iteratee: Ref[Future[Option[Input[E] => Iteratee[E, A]]]] = Ref(it.pureFold { case Step.Cont(k) => Some(k); case other => promise.success(other.it); None}) val pushee = new Channel[E] { def close() { iteratee.single.swap(Future.successful(None)).onComplete{ case Success(maybeK) => maybeK.foreach { k => promise.success(k(Input.EOF)) } case Failure(e) => promise.failure(e) } } def end(e: Throwable) { iteratee.single.swap(Future.successful(None)).onComplete { case Success(maybeK) => maybeK.foreach(_ => promise.failure(e)) case Failure(e) => promise.failure(e) } } def end() { iteratee.single.swap(Future.successful(None)).onComplete { maybeK => maybeK.get.foreach(k => promise.success(Cont(k))) } } def push(item: Input[E]) { val eventuallyNext = Promise[Option[Input[E] => Iteratee[E,A]]]() iteratee.single.swap(eventuallyNext.future).onComplete { case Success(None) => eventuallyNext.success(None) case Success(Some(k)) => val n = { val next = k(item) next.pureFold { case Step.Done(a, in) => { onComplete promise.success(next) None } case Step.Error(msg, e) => onError(msg, e) promise.success(next) None case Step.Cont(k) => Some(k) } } eventuallyNext.completeWith(n) case Failure(e) => promise.failure(e) eventuallyNext.success(None) } } } onStart(pushee) promise.future } } /** * Create a broadcaster from the given enumerator. This allows iteratees to attach (and unattach by returning a done * state) to a single enumerator. Iteratees will only receive input sent from the enumerator after they have * attached to the broadcasting enumerator. * * @param e The enumerator to broadcast * @param interestIsDownToZero Function that is invoked when all iteratees are done. May be invoked multiple times. * @return A tuple of the broadcasting enumerator, that can be applied to each iteratee that wants to receive the * input, and the broadcaster. */ def broadcast[E](e: Enumerator[E], interestIsDownToZero: Broadcaster => Unit = _ => ()): (Enumerator[E], Broadcaster) = { lazy val h: Hub[E] = hub(e, () => interestIsDownToZero(h)); (h.getPatchCord(), h) } /** * A broadcaster. Used to control a broadcasting enumerator. */ trait Broadcaster { /** * Are there any iteratees that are still receiving input? */ def noCords(): Boolean /** * Close the broadcasting enumerator. */ def close() /** * Whether this broadcaster is closed. */ def closed(): Boolean } @scala.deprecated("use Concurrent.broadcast instead", "2.1.0") trait Hub[E] extends Broadcaster { def getPatchCord(): Enumerator[E] } @scala.deprecated("use Concurrent.broadcast instead", "2.1.0") def hub[E](e: Enumerator[E], interestIsDownToZero: () => Unit = () => ()): Hub[E] = { import scala.concurrent.stm._ val iteratees: Ref[List[(Iteratee[E, _], Promise[Iteratee[E, _]])]] = Ref(List()) val started = Ref(false) var closeFlag = false def step(in: Input[E]): Iteratee[E, Unit] = { val interested: List[(Iteratee[E, _], Promise[Iteratee[E, _]])] = iteratees.single.swap(List()) val commitReady: Ref[List[(Int, (Iteratee[E, _], Promise[Iteratee[E, _]]))]] = Ref(List()) val commitDone: Ref[List[Int]] = Ref(List()) val ready = interested.zipWithIndex.map { case (t, index) => val p = t._2 t._1.fold { case Step.Done(a, e) => p.success(Done(a, e)) commitDone.single.transform(_ :+ index) Future.successful(()) case Step.Cont(k) => val next = k(in) next.pureFold { case Step.Done(a, e) => { p.success(Done(a, e)) commitDone.single.transform(_ :+ index) } case Step.Cont(k) => commitReady.single.transform(_ :+ (index, (Cont(k), p))) case Step.Error(msg, e) => { p.success(Error(msg, e)) commitDone.single.transform(_ :+ index) } } case Step.Error(msg, e) => p.success(Error(msg, e)) commitDone.single.transform(_ :+ index) Future.successful(()) }.andThen { case Success(a) => a case Failure(e) => p.failure(e) } }.fold(Future.successful(())) { (s, p) => s.flatMap(_ => p) } Iteratee.flatten(ready.map { _ => val downToZero = atomic { implicit txn => val ready = commitReady().toMap iteratees.transform(commitReady().map(_._2) ++ _) (interested.length > 0 && iteratees().length <= 0) } if (downToZero) interestIsDownToZero() if (in == Input.EOF || closeFlag) Done((), Input.Empty) else Cont(step) }) } new Hub[E] { def noCords() = iteratees.single().isEmpty def close() { closeFlag = true } def closed() = closeFlag val redeemed = Ref(None: Option[Try[Iteratee[E, Unit]]]) def getPatchCord() = new Enumerator[E] { def apply[A](it: Iteratee[E, A]): Future[Iteratee[E, A]] = { val result = Promise[Iteratee[E, A]]() val alreadyStarted = !started.single.compareAndSet(false, true) if (!alreadyStarted) { val promise = (e |>> Cont(step)) promise.onComplete { v => val its = atomic { implicit txn => redeemed() = Some(v) iteratees.swap(List()) } v match { case Failure(e) => its.foreach { case (_, p) => p.failure(e) } case Success(_) => its.foreach { case (it, p) => p.success(it) } } } } val finished = atomic { implicit txn => redeemed() match { case None => iteratees.transform(_ :+ ((it, (result: Promise[Iteratee[E, A]]).asInstanceOf[Promise[Iteratee[E, _]]]))) None case Some(notWaiting) => Some(notWaiting) } } finished.foreach { case Success(_) => result.success(it) case Failure(e) => result.failure(e) case _ => throw new RuntimeException("should be either Redeemed or Thrown") } result.future } } } } /** * Allows patching in enumerators to an iteratee. */ trait PatchPanel[E] { /** * Patch in the given enumerator into the iteratee. * * @return Whether the enumerator was successfully patched in. Will return false if the patch panel is closed. */ def patchIn(e: Enumerator[E]): Boolean /** * Whether the patch panel is closed. * * The patch panel will become closed when the iteratee it is feeding is done or is error. */ def closed(): Boolean } /** * An enumerator that allows patching in enumerators to supply it with input. * * @param patcher A function that passes a patch panel whenever the enumerator is applied to an iteratee. */ def patchPanel[E](patcher: PatchPanel[E] => Unit): Enumerator[E] = new Enumerator[E] { import scala.concurrent.stm._ def apply[A](it: Iteratee[E, A]): Future[Iteratee[E, A]] = { val result = Promise[Iteratee[E, A]]() var isClosed: Boolean = false result.future.onComplete(_ => isClosed = true); def refIteratee(ref: Ref[Iteratee[E, Option[A]]]): Iteratee[E, Option[A]] = { val next = Promise[Iteratee[E, Option[A]]]() val current = ref.single.swap(Iteratee.flatten(next.future)) current.pureFlatFold { case Step.Done(a, e) => { a.foreach(aa => result.success(Done(aa, e))) next.success(Done(a, e)) Done(a, e) } case Step.Cont(k) => { next.success(current) Cont(step(ref)) } case Step.Error(msg, e) => { result.success(Error(msg, e)) next.success(Error(msg, e)) Error(msg, e) } } } def step(ref: Ref[Iteratee[E, Option[A]]])(in: Input[E]): Iteratee[E, Option[A]] = { val next = Promise[Iteratee[E, Option[A]]]() val current = ref.single.swap(Iteratee.flatten(next.future)) current.pureFlatFold { case Step.Done(a, e) => { next.success(Done(a, e)) Done(a, e) } case Step.Cont(k) => { val n = k(in) next.success(n) n.pureFlatFold { case Step.Done(a, e) => { a.foreach(aa => result.success(Done(aa, e))) Done(a, e) } case Step.Cont(k) => Cont(step(ref)) case Step.Error(msg, e) => { result.success(Error(msg, e)) Error(msg, e) } } } case Step.Error(msg, e) => { next.success(Error(msg, e)) Error(msg, e) } } } patcher(new PatchPanel[E] { val ref: Ref[Ref[Iteratee[E, Option[A]]]] = Ref(Ref(it.map(Some(_)))) def closed() = isClosed def patchIn(e: Enumerator[E]): Boolean = { !(closed() || { val newRef = atomic { implicit txn => val enRef = ref() val it = enRef.swap(Done(None, Input.Empty)) val newRef = Ref(it) ref() = newRef newRef } e |>> refIteratee(newRef) //TODO maybe do something if the enumerator is done, maybe not false }) } }) result.future } } }
noel-yap/setter-for-catan
play-2.1.1/framework/src/iteratees/src/main/scala/play/api/libs/iteratee/Concurrent.scala
Scala
apache-2.0
24,979
/* * Copyright 2021 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package views.businessdetails import forms.{EmptyForm, Form2, InvalidForm, ValidForm} import jto.validation.{Path, ValidationError} import models.Country import models.autocomplete.NameValuePair import models.businessdetails.CorrespondenceAddressNonUk import org.scalatest.MustMatchers import play.api.i18n.Messages import utils.AmlsViewSpec import views.Fixture import views.html.businessdetails.correspondence_address_non_uk class correspondence_addressNonUkSpec extends AmlsViewSpec with MustMatchers { trait ViewFixture extends Fixture { lazy val correspondence_address_non_uk = app.injector.instanceOf[correspondence_address_non_uk] implicit val requestWithToken = addTokenForView() val countries = Some(Seq( NameValuePair("Country 1", "country:1") )) } "correspondence_address view" must { "have correct title" in new ViewFixture { val form2: ValidForm[CorrespondenceAddressNonUk] = Form2(CorrespondenceAddressNonUk( "Name", "BusinessName", "addressLine1", "addressLine1", None, None, Country("AB12CD", "XX") )) def view = correspondence_address_non_uk(form2, true, countries) doc.title must startWith(Messages("businessdetails.correspondenceaddress.title") + " - " + Messages("summary.businessdetails")) } "have correct headings" in new ViewFixture { val form2: ValidForm[CorrespondenceAddressNonUk] = Form2(CorrespondenceAddressNonUk( "Name", "BusinessName", "addressLine1", "addressLine1", None, None, Country("Antarctica", "XX") )) def view = correspondence_address_non_uk(form2, true, countries) heading.html must be(Messages("businessdetails.correspondenceaddress.title")) subHeading.html must include(Messages("summary.businessdetails")) } "show errors in the correct locations" in new ViewFixture { val form2: InvalidForm = InvalidForm(Map.empty, Seq( (Path \\ "country-fieldset") -> Seq(ValidationError("fourth not a message Key")) )) def view = correspondence_address_non_uk(form2, true, countries) errorSummary.html() must include("fourth not a message Key") val test = doc.getElementById("country-fieldset") .getElementsByClass("error-notification").first().html() must include("fourth not a message Key") } "have a back link" in new ViewFixture { val form2: Form2[_] = EmptyForm def view = correspondence_address_non_uk(form2, true, countries) doc.getElementsByAttributeValue("class", "link-back") must not be empty } } }
hmrc/amls-frontend
test/views/businessdetails/correspondence_addressNonUkSpec.scala
Scala
apache-2.0
3,271
package com.twitter.finatra.http.integration.tweetexample.main.services import com.twitter.concurrent.exp.AsyncStream import com.twitter.finatra.http.integration.tweetexample.main.domain.Tweet import com.twitter.util.Future trait TweetsRepository { def getById(id: Long): Future[Option[Tweet]] def getByIds(ids: AsyncStream[Long]): AsyncStream[Tweet] }
deanh/finatra
http/src/test/scala/com/twitter/finatra/http/integration/tweetexample/main/services/TweetsRepository.scala
Scala
apache-2.0
360
package com.bizo.mighty.csv /** Settings for CSVWriter */ trait CSVWriterSettings { val separator: Char val quotechar: Char val escapechar: Char val lineEnd: String val encoding: String } object CSVWriterSettings{ import au.com.bytecode.opencsv.{ CSVWriter => OpenCSVWriter } import OpenCSVWriter._ val DEFAULT_ENCODING = "UTF-8" case class Standard( override val separator: Char = DEFAULT_SEPARATOR, override val quotechar: Char = DEFAULT_QUOTE_CHARACTER, override val escapechar: Char = DEFAULT_ESCAPE_CHARACTER, override val lineEnd: String = DEFAULT_LINE_END, override val encoding: String = DEFAULT_ENCODING ) extends CSVWriterSettings /** The Standard/default Writer settings */ implicit object Standard extends Standard( DEFAULT_SEPARATOR, DEFAULT_QUOTE_CHARACTER, DEFAULT_ESCAPE_CHARACTER, DEFAULT_LINE_END, DEFAULT_ENCODING ) }
t-pleasure/mighty-csv
src/main/scala/com/bizo/mighty/csv/CSVWriterSettings.scala
Scala
apache-2.0
1,003
package scala.meta.internal.scalacp import scala.meta.internal.{semanticdb => s} import scala.meta.internal.semanticdb.{Language => l} import scala.meta.internal.semanticdb.Scala._ import scala.meta.internal.semanticdb.Scala.{Descriptor => d} import scala.meta.internal.semanticdb.Scala.{Names => n} import scala.meta.internal.semanticdb.SymbolInformation.{Kind => k} object Synthetics { def setterInfos( getterInfo: s.SymbolInformation, linkMode: LinkMode ): List[s.SymbolInformation] = { val getterSym = getterInfo.symbol val setterSym = { if (getterSym.isGlobal) { val setterSymbolName = s"${getterSym.desc.name}_=" Symbols.Global(getterSym.owner, d.Method(setterSymbolName, "()")) } else { getterSym + "+1" } } val paramSym = { if (getterSym.isGlobal) Symbols.Global(setterSym, d.Parameter("x$1")) else getterSym + "+2" } val paramSig = getterInfo.signature match { case s.MethodSignature(_, _, sret) => s.ValueSignature(sret) case _ => s.NoSignature } val paramInfo = s.SymbolInformation( symbol = paramSym, language = l.SCALA, kind = k.PARAMETER, properties = 0, displayName = "x$1", signature = paramSig, annotations = Nil, access = s.NoAccess ) val setterSig = { val unit = s.TypeRef(s.NoType, "scala/Unit#", Nil) val setterParamss = { linkMode match { case SymlinkChildren => List(s.Scope(symlinks = List(paramInfo.symbol))) case HardlinkChildren => List(s.Scope(hardlinks = List(paramInfo))) } } s.MethodSignature(Some(s.Scope()), setterParamss, unit) } val setterInfo = s.SymbolInformation( symbol = setterSym, language = l.SCALA, kind = k.METHOD, properties = getterInfo.properties, displayName = getterInfo.displayName + "_=", signature = setterSig, annotations = getterInfo.annotations, access = getterInfo.access ) linkMode match { case SymlinkChildren => List(paramInfo, setterInfo) case HardlinkChildren => List(setterInfo) } } }
scalameta/scalameta
semanticdb/metacp/src/main/scala/scala/meta/internal/scalacp/Synthetics.scala
Scala
bsd-3-clause
2,164
import sbt.Keys._ import sbt._ import scala.scalajs.sbtplugin.ScalaJSPlugin.ScalaJSKeys._ import scala.scalajs.sbtplugin.ScalaJSPlugin._ object ScalajsReact extends Build { type PE = Project => Project lazy val commonSettings: PE = _.settings( organization := "net.bblfish", scalaVersion := "2.11.2", version := "0.1", description := "Akka's Uri ported to Scala", licenses := Seq("Apache License, Version 2.0" -> url("http://opensource.org/licenses/Apache-2.0")), homepage := Some(url("https://github.com/bblfish/akka.http.model.Uri")), publishTo := { //eg: export SBT_PROPS=-Dbanana.publish=bblfish.net:/home/hjs/htdocs/work/repo/ val nexus = "https://oss.sonatype.org/" val other = Option(System.getProperty("banana.publish")).map(_.split(":")) if (version.value.trim.endsWith("SNAPSHOT")) { val repo = other.map(p => Resolver.ssh("banana.publish specified server", p(0), p(1) + "snapshots")) repo.orElse(Some("snapshots" at nexus + "content/repositories/snapshots")) } else { val repo = other.map(p => Resolver.ssh("banana.publish specified server", p(0), p(1) + "releases")) repo.orElse(Some("releases" at nexus + "service/local/staging/deploy/maven2")) } }, publishArtifact in Test := false, pomIncludeRepository := { _ => false} ) //suggested that I add relativeSourceMaps see https://github.com/japgolly/scalajs-react/issues/14 lazy val scalaJSSettingsPlus = scalaJSSettings ++ Seq( relativeSourceMaps := true ) // only needed for speed test - should be moved to a different subproject utest.jsrunner.Plugin.utestJsSettings lazy val root = project.in(file(".")).aggregate() lazy val UriJS = project.in(file("akka.UriJS")) .configure(commonSettings) .settings(scalaJSSettingsPlus:_*) .settings( name := "akka.UriJS", resolvers += "bintray-alexander_myltsev" at "http://dl.bintray.com/content/alexander-myltsev/maven", libraryDependencies += "name.myltsev" %%% "parboiled" % "2.0.0",// from ("http://dl.bintray.com/content/alexander-myltsev/maven"), libraryDependencies += "com.lihaoyi" %%% "utest" % "0.1.7" % "test", // to be removed as soon as parboiled issue is fixed // https://github.com/sirthias/parboiled2/issues/81 libraryDependencies += "org.scala-lang" % "scala-reflect" % "2.11.1" % "provided" ) lazy val NodeUriJS = project.in(file("node.Uri")) .configure(commonSettings) .settings(scalaJSSettingsPlus:_*) .settings( name := "node.Uri", ScalaJSKeys.jsDependencies += ProvidedJS / "node.uri.bundle.js" ) def useReact(scope: String = "compile"): PE = _.settings( jsDependencies += "org.webjars" % "react" % "0.11.1" % scope / "react-with-addons.js" commonJSName "React", skip in packageJSDependencies := false) lazy val browserTest = project.in(file("browserTest")) .configure(commonSettings) .settings(scalaJSSettingsPlus:_*) .dependsOn(UriJS) .settings( name := "http.model.Uri.browsertest", jsDependencies += "org.webjars" % "react" % "0.11.1" / "react-with-addons.js" commonJSName "React", libraryDependencies ++= Seq( "org.scala-lang.modules.scalajs" %%% "scalajs-dom" % "0.6", "org.scala-lang.modules.scalajs" %%% "scalajs-jquery" % "0.6", "net.bblfish" %%% "node-scalajs" % "0.1", //shapeless //scalaz-react-js ( as I like to call it ) "com.scalatags" %%% "scalatags" % "0.3.5", "com.github.japgolly.scalajs-react" %%% "core" % "0.4.0", "com.github.japgolly.scalajs-react" %%% "test" % "0.4.0" % "test", "com.github.japgolly.scalajs-react" %%% "ext-scalaz71" % "0.4.0" ), skip in packageJSDependencies := false ) lazy val cliTest = project.in(file("cliTest")) .configure(commonSettings) .settings( name := "Uri.cliTest", libraryDependencies += "com.typesafe.akka" %% "akka-http-core-experimental" % "0.4" ) lazy val cliJSTest = project.in(file("cliJSTest")) .configure(commonSettings) .settings(scalaJSSettingsPlus:_*) .dependsOn(NodeUriJS) .settings( name := "Uri.cliJSTest", // ScalaJSKeys.jsDependencies += ProvidedJS / "node..bundle.js", skip in ScalaJSKeys.packageJSDependencies := false ) }
bblfish/akka.http.model.Uri
project/Build.scala
Scala
apache-2.0
4,362
package pl.touk.nussknacker.engine.api.context.transformation import cats.data.ValidatedNel import com.typesafe.scalalogging.LazyLogging import pl.touk.nussknacker.engine.api.context.ProcessCompilationError.{CannotCreateObjectError, WrongParameters} import pl.touk.nussknacker.engine.api.context.{ProcessCompilationError, ValidationContext} import pl.touk.nussknacker.engine.api.definition.{NodeDependency, OutputVariableNameDependency, Parameter} import pl.touk.nussknacker.engine.api.typed.typing.{TypingResult, Unknown} import pl.touk.nussknacker.engine.api.NodeId import scala.util.control.NonFatal /* This trait provided most generic way of defining Node. In particular, implementations can dynamically define parameter list and target validation context, based on current parameters. Please @see LastVariableFilterTransformer for sample usage NOTE: this is *experimental* API, subject to changes. In particular: - handling dependencies probably will change. In particular definition of OutputVariable/ValidationContext transformation for sources/sinks is subject to limitations: - for sinks OutputVariable is not handled, result ValidationContext will be ignored - for sources OutputVariable *has* to be used for Flink sources, it's value is always equal to 'input' ATM, due to source API limitations */ trait GenericNodeTransformation[T] { //ValidationContext for single input, Map[String, ValidationContext] for joins type InputContext type DefinedParameter <: BaseDefinedParameter //State is arbitrary data that can be passed between steps of NodeTransformationDefinition type State //TODO: what if we cannot determine parameters/context? With some "fatal validation error"? type NodeTransformationDefinition = PartialFunction[TransformationStep, TransformationStepResult] def contextTransformation(context: InputContext, dependencies: List[NodeDependencyValue])(implicit nodeId: NodeId): NodeTransformationDefinition def implementation(params: Map[String, Any], dependencies: List[NodeDependencyValue], finalState: Option[State]): T //Here we assume that this list is fixed - cannot be changed depending on parameter values def nodeDependencies: List[NodeDependency] // FinalResult which will be used if some TransformationStep won't be handled inside contextTransformation. def handleUnmatchedTransformationStep(step: TransformationStep, inputContext: InputContext, outputVariable: Option[String])(implicit nodeId: NodeId): FinalResults = { val fallback = fallbackFinalResult(step, inputContext, outputVariable) // if some parameters are failed to define, then probably it just missing implementantion of this corner case and we can just use fallback if (step.parameters.map(_._2).contains(FailedToDefineParameter)) { fallback } else { // TODO: better error fallback.copy(errors = fallback.errors :+ WrongParameters(Set.empty, step.parameters.map(_._1).toSet)) } } // FinalResult which will be used when some exception will be thrown during handling of TransformationStep def handleExceptionDuringTransformation(step: TransformationStep, inputContext: InputContext, outputVariable: Option[String], ex: Throwable) (implicit nodeId: NodeId): FinalResults = { val fallback = fallbackFinalResult(step, inputContext, outputVariable) fallback.copy(errors = fallback.errors :+ CannotCreateObjectError(ex.getMessage, nodeId.id)) } protected def fallbackFinalResult(step: TransformationStep, inputContext: InputContext, outputVariable: Option[String])(implicit nodeId: NodeId): FinalResults = { prepareFinalResultWithOptionalVariable(inputContext, outputVariable.map(name => (name, Unknown)), step.state) } protected final def prepareFinalResultWithOptionalVariable(inputContext: InputContext, outputVariable: Option[(String, TypingResult)], state: Option[State])(implicit nodeId: NodeId): FinalResults = { val context = inputContext match { case single: ValidationContext => single case _ => ValidationContext.empty } outputVariable.map { case (name, typ) => FinalResults.forValidation(context, state = state)(_.withVariable(name, typ, paramName = None)) }.getOrElse(FinalResults(context, state = state)) } sealed trait TransformationStepResult { def errors: List[ProcessCompilationError] } case class NextParameters(parameters: List[Parameter], errors: List[ProcessCompilationError] = Nil, state: Option[State] = None) extends TransformationStepResult case class FinalResults(finalContext: ValidationContext, errors: List[ProcessCompilationError] = Nil, state: Option[State] = None) extends TransformationStepResult object FinalResults { def forValidation(context: ValidationContext, errors: List[ProcessCompilationError] = Nil, state: Option[State] = None) (validation: ValidationContext => ValidatedNel[ProcessCompilationError, ValidationContext]): FinalResults = { val validatedFinalContext = validation(context) FinalResults( validatedFinalContext.getOrElse(context), errors ++ validatedFinalContext.swap.map(_.toList).getOrElse(Nil), state ) } } case class TransformationStep(parameters: List[(String, DefinedParameter)], state: Option[State]) } trait SingleInputGenericNodeTransformation[T] extends GenericNodeTransformation[T] { type InputContext = ValidationContext type DefinedParameter = DefinedSingleParameter } /* NOTE: currently, due to FE limitations, it's *NOT* possible to defined dynamic branch parameters - that is, branch parameters that are changed based on other parameter values */ trait JoinGenericNodeTransformation[T] extends GenericNodeTransformation[T] with LazyLogging { type InputContext = Map[String, ValidationContext] type DefinedParameter = BaseDefinedParameter // TODO: currently branch parameters must be determined on node template level - aren't enriched dynamically during node validation // This default method implementation try to determine branch parameter by initial context transformation step. // If node has some other complex logic of preparing them, this method should be overridden def initialBranchParameters: List[Parameter] = { try { val nodeDependencyValues = nodeDependencies.collect { case OutputVariableNameDependency => OutputVariableNameValue("fakeOutputVariable") } contextTransformation(Map.empty, nodeDependencyValues)(NodeId("fakeNodeId"))(TransformationStep(List.empty, None)) match { case NextParameters(params, _, _) => params.filter(_.branchParam) case FinalResults(_, _, _) => List.empty } } catch { // initial parameters must be determined without exception - otherwise it will blow off whole frontend case NonFatal(ex) => logger.warn("Error during determining initial branch parameters. Branch parameters will be ignored", ex) List.empty } } }
TouK/nussknacker
components-api/src/main/scala/pl/touk/nussknacker/engine/api/context/transformation/GenericNodeTransformation.scala
Scala
apache-2.0
7,122
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.mv.plans import org.scalatest.BeforeAndAfter import org.apache.carbondata.mv.testutil.ModularPlanTest // scalastyle:off println class Tpcds_1_4_BenchmarkSuite extends ModularPlanTest with BeforeAndAfter { // val spark = SparkSession.builder().master("local").enableHiveSupport().getOrCreate() // // spark.conf.set("spark.sql.crossJoin.enabled", true) // val testHive = new org.apache.spark.sql.hive.test.TestHiveContext(spark.sparkContext, false) // val hiveClient = testHive.sparkSession.metadataHive // test("test SQLBuilder using tpc-ds queries") { // // tpcds1_4Tables.foreach { create_table => // hiveClient.runSqlHive(create_table) // } // //// val dest = "qTradeflow" // this line is for development, comment it out once done // val dest = "qSEQ" //// val dest = "qAggPushDown" // this line is for development, comment it out once done //// val dest = "q10" // // tpcds1_4Queries.foreach { query => // if (query._1 == dest) { // this line is for development, comment it out once done // val analyzed = testHive.sql(query._2).queryExecution.analyzed // println(s"""\\n\\n===== Analyzed Logical Plan for ${query._1} =====\\n\\n$analyzed \\n""") // //// val cnonicalizedPlan = new SQLBuilder(analyzed).Canonicalizer.execute(analyzed) //// //// Try(new SQLBuilder(analyzed).toSQL) match { //// case Success(s) => logInfo(s"""\\n\\n===== CONVERTED back ${query._1} USING SQLBuilder =====\\n\\n$s \\n""") //// case Failure(e) => logInfo(s"""Cannot convert the logical query plan of ${query._1} back to SQL""") //// } // // // this Try is for development, comment it out once done // Try(analyzed.optimize) match { // case Success(o) => { // println(s"""\\n\\n===== Optimized Logical Plan for ${query._1} =====\\n\\n$o \\n""") // } // case Failure(e) => // } // // val o = analyzed.optimize // val o1 = o.modularize // // Try(o.modularize.harmonize) match { // case Success(m) => { // println(s"""\\n\\n===== MODULAR PLAN for ${query._1} =====\\n\\n$m \\n""") // // Try(m.asCompactSQL) match { // case Success(s) => println(s"\\n\\n===== CONVERTED SQL for ${query._1} =====\\n\\n${s}\\n") // case Failure(e) => println(s"""\\n\\n===== CONVERTED SQL for ${query._1} failed =====\\n\\n${e.toString}""") // } // } // case Failure(e) => println(s"""\\n\\n==== MODULARIZE the logical query plan for ${query._1} failed =====\\n\\n${e.toString}""") // } // } // } // // } } // scalastyle:on println
jackylk/incubator-carbondata
mv/core/src/test/scala/org/apache/carbondata/mv/plans/Tpcds_1_4_BenchmarkSuite.scala
Scala
apache-2.0
3,471
/** * Copyright (C) 2014 Stratio (http://stratio.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.stratio.decision.unit.siddhi.query.model import scala.collection.mutable.ArrayBuffer import scalaj.collection.Imports._ /** * Created by aitor on 9/17/15. */ class OrderStream { var order_id: String = _ var timestamp: String = _ var day_time_zone: String = _ var client_id: Int = _ var payment_method: String = _ var latitude: Double = _ var longitude: Double = _ var credit_card: String = _ var shopping_center: String = _ var channel: String = _ var city: String = _ var country: String = _ var employee: Int = _ var total_amount: Float = _ var total_products: Int = _ var order_size: String = _ var lines: String = _ def getData: Array[Any] = { Array[Any](order_id, timestamp, day_time_zone, client_id, payment_method, latitude, longitude, credit_card, shopping_center, channel, city, country, employee, total_amount, total_products, order_size, lines) } } object OrderStream { def getFromList(m: java.util.List[Array[String]]) : java.util.List[OrderStream] = { //var listOrders= List[OrderStream]() var listOrders= ArrayBuffer[OrderStream]() //val scalaMap = m.asScala.mapValues(_.toList) val scalaMap= m.asScala//.mapValues(_.toList) //val scalaMap = scala.collection.convert.decorateAsScala.mapAsScalaMapConverter(m).asScala var counter= 0 scalaMap.foreach { line => //println("# Line: " + line) //listOrders :: getOrderStreamFromArray(line.toList) listOrders += getOrderStreamFromArray(line.toList) } listOrders.asJava //return scala.collection.convert.decorateAsJava.bufferAsJavaListConverter(listOrders) } def getOrderStreamFromArray(a: List[String]) : OrderStream = { var orderStream= new OrderStream() orderStream.order_id= a(0) orderStream.timestamp= a(1) orderStream.day_time_zone= a(2) orderStream.client_id= a(3).toInt orderStream.payment_method= a(4) orderStream.latitude= a(5).toDouble orderStream.longitude= a(6).toDouble orderStream.credit_card= a(7) orderStream.shopping_center= a(8) orderStream.channel= a(9) orderStream.city= a(10) orderStream.country= a(11) orderStream.employee= a(12).toInt orderStream.total_amount= a(13).toFloat orderStream.total_products= a(14).toInt orderStream.order_size= a(15) orderStream.lines= a(16) return orderStream } }
Stratio/streaming-cep-engine
siddhi/src/test/scala/com/stratio/decision/unit/siddhi/query/model/OrderStream.scala
Scala
apache-2.0
3,022
package com.bot4s.telegram.marshalling import akka.http.scaladsl.marshalling.{ Marshaller, Marshalling, ToEntityMarshaller } import akka.http.scaladsl.model._ import akka.http.scaladsl.unmarshalling.{ FromEntityUnmarshaller, Unmarshaller } import com.bot4s.telegram.marshalling import com.bot4s.telegram.methods.{ JsonRequest, MultipartRequest, Request } import com.bot4s.telegram.models.{ AkkaInputFile, InputFile } import io.circe.{ Decoder, Encoder } object AkkaHttpMarshalling { implicit def camelCaseJsonUnmarshaller[R](implicit decR: Decoder[R]): FromEntityUnmarshaller[R] = Unmarshaller.stringUnmarshaller .forContentTypes(ContentTypes.`application/json`) .map(marshalling.fromJson[R]) implicit def underscore_case_marshaller[T <: Request[_]](implicit encT: Encoder[T]): ToEntityMarshaller[T] = Marshaller.strict { request => request match { // JSON-only request case r: JsonRequest[_] => Marshalling.Opaque(() => HttpEntity(ContentTypes.`application/json`, marshalling.toJson(request))) // Request with multipart payload case r: MultipartRequest[_] => val files = r.getFiles val parts = files.map { case (camelKey, inputFile) => val key = CaseConversions.snakenize(camelKey) inputFile match { case InputFile.FileId(id) => Multipart.FormData.BodyPart(key, HttpEntity(id)) case InputFile.Contents(filename, contents) => Multipart.FormData.BodyPart( key, HttpEntity(ContentTypes.`application/octet-stream`, contents), Map("filename" -> filename) ) case InputFile.Path(path) => Multipart.FormData.BodyPart.fromPath(key, MediaTypes.`application/octet-stream`, path) case AkkaInputFile.ByteString(filename, bytes) => Multipart.FormData.BodyPart( key, HttpEntity(MediaTypes.`application/octet-stream`, bytes), Map("filename" -> filename) ) case other => throw new RuntimeException(s"InputFile $other not supported") } } val fields = io.circe.parser .parse(marshalling.toJson(request)) .fold( throw _, _.asObject.map { _.toMap.mapValues { json => json.asString.getOrElse(marshalling.printer.print(json)) } } ) val params = fields.getOrElse(Map()).toMap val paramParts = params.map { case (key, value) => Multipart.FormData.BodyPart(key, HttpEntity(value)) } Marshalling.Opaque(() => Multipart.FormData((parts ++ paramParts): _*).toEntity()) } } }
mukel/telegrambot4s
akka/src/com/bot4s/telegram/marshalling/AkkaHttpMarshalling.scala
Scala
apache-2.0
2,853
package com.codacy.client.bitbucket.client sealed trait RequestResponse[+A] { @deprecated("This should be removed or re-implemented to support pagination", "12.0.0") def map[B](f: A => B): RequestResponse[B] = { flatMap(a => RequestResponse.success(f(a))) } @deprecated("This should be removed or re-implemented to support pagination", "12.0.0") def flatMap[B](f: A => RequestResponse[B]): RequestResponse[B] = { this match { case SuccessfulResponse(a, _, _, _, _, _) => f(a) case e: FailedResponse => e } } } case class SuccessfulResponse[A]( value: A, size: Option[Int] = None, pageLen: Option[Int] = None, page: Option[Int] = None, next: Option[String] = None, previous: Option[String] = None ) extends RequestResponse[A] case class FailedResponse(message: String) extends RequestResponse[Nothing] object RequestResponse { @deprecated("Build a SuccessfulResponse object instead of using this", "12.0.0") def success[A](a: A): RequestResponse[A] = SuccessfulResponse(a) @deprecated("Build a FailedResponse object instead of using this", "12.0.0") def failure[A](message: String): RequestResponse[A] = FailedResponse(message: String) @deprecated("This should be either removed or re-implemented to support pagination", "12.0.0") def applyDiscardingPaginationInfo[A]( r1: RequestResponse[Seq[A]], r2: RequestResponse[Seq[A]] ): RequestResponse[Seq[A]] = { r1 match { case SuccessfulResponse(v1, _, _, _, _, _) => r2 match { case SuccessfulResponse(v2, _, _, _, _, _) => SuccessfulResponse(v1 ++ v2) case f @ FailedResponse(_) => f } case f @ FailedResponse(_) => f } } }
codacy/bitbucket-scala-client
src/main/scala/com/codacy/client/bitbucket/client/RequestResponse.scala
Scala
apache-2.0
1,738
package nestor package domain import scalaz._, Scalaz._ import org.joda.time.DateTime case class Person( id: Int, firstName: String, lastName: String, country: Country, document: String, email: Option[String] = None, phone: Option[String] = None, notes: Option[String] = None, createdAt: DateTime) { def fullName = firstName + " " + lastName override def toString = fullName def data = Person.Data( firstName = firstName, lastName = lastName, countryCode = country.code, document = document, email = email, phone = phone, notes = notes) } object Person { case class Data( firstName: String, lastName: String, countryCode: String, document: String, email: Option[String] = None, phone: Option[String] = None, notes: Option[String] = None) { def apply: Valid[Int β‡’ Person] = Country(countryCode) map { country β‡’ (id: Int) β‡’ Person( id = id, firstName = firstName, lastName = lastName, country = country, document = document, email = email, phone = phone, notes = notes, createdAt = DateTime.now) } toSuccess { Error("Invalid country code: " + countryCode) } } object Form { import play.api.data.{ Form β‡’ F } import play.api.data.Forms._ def create(documentUnique: String β‡’ Boolean) = F(mapping( "firstName" -> nonEmptyText, "lastName" -> nonEmptyText, "countryCode" -> nonEmptyText.verifying(Country.all contains _), "document" -> nonEmptyText .verifying("Already exists", documentUnique), "email" -> optional(email), "phone" -> optional(text), "notes" -> optional(text) )(Data.apply)(Data.unapply)) def update(person: Person, documentUnique: String β‡’ Boolean) = create(documentUnique) fill person.data } }
ornicar/nestor
app/domain/Person.scala
Scala
mit
1,911
/* * Copyright 2014 Lars Edenbrandt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package se.nimsa.sbx.log import se.nimsa.sbx.model.Entity object LogProtocol { sealed trait LogEntryType { override def toString: String = this match { case LogEntryType.DEFAULT => "DEFAULT" case LogEntryType.INFO => "INFO" case LogEntryType.WARN => "WARN" case LogEntryType.ERROR => "ERROR" } } object LogEntryType { case object DEFAULT extends LogEntryType case object INFO extends LogEntryType case object WARN extends LogEntryType case object ERROR extends LogEntryType def withName(string: String): LogEntryType = string match { case "DEFAULT" => DEFAULT case "INFO" => INFO case "WARN" => WARN case "ERROR" => ERROR } } case class LogEntry(id: Long, created: Long, entryType: LogEntryType, subject: String, message: String) extends Entity // Messages case class AddLogEntry(logEntry: LogEntry) case class GetLogEntries(startIndex: Long, count: Long) case class GetLogEntriesBySubject(subject: String, startIndex: Long, count: Long) case class GetLogEntriesByType(entryType: LogEntryType, startIndex: Long, count: Long) case class GetLogEntriesBySubjectAndType(subject: String, entryType: LogEntryType, startIndex: Long, count: Long) case object ClearLog case class RemoveLogEntry(logId: Long) case class LogEntries(logEntries: Seq[LogEntry]) case class LogEntryAdded(logEntry: LogEntry) case class LogEntryRemoved(logId: Long) }
slicebox/slicebox
src/main/scala/se/nimsa/sbx/log/LogProtocol.scala
Scala
apache-2.0
2,070
package com.ee.assets import com.ee.assets.deployment.Deployer import com.ee.assets.models.{SimpleAssetsInfo, Suffix, AssetsLoaderConfig, AssetsInfo} import com.ee.assets.paths.PathResolver import com.ee.assets.transformers._ import com.ee.log.Logger import com.google.javascript.jscomp.CompilerOptions import java.io.File import java.net.URL import play.api.templates.Html import play.api.{Play, Configuration, Mode} /** * * @param deployer * @param mode * @param config * @param closureCompilerOptions * @param info - path info that maps the web path (when loading the assets via the server) -> file path (the location of the files in the project) */ class Loader( deployer: Option[Deployer] = None, mode: Mode.Mode, config: Configuration, closureCompilerOptions: Option[CompilerOptions] = None, info: AssetsInfo = SimpleAssetsInfo("assets", "public")) { private lazy val JsConfig: AssetsLoaderConfig = validateConfig(AssetsLoaderConfig.fromAppConfiguration(mode.toString.toLowerCase, Suffix.js, config)) private lazy val CssConfig: AssetsLoaderConfig = validateConfig(AssetsLoaderConfig.fromAppConfiguration(mode.toString.toLowerCase, Suffix.css, config)) private def validateConfig(c: AssetsLoaderConfig): AssetsLoaderConfig = { if (c.deploy && deployer.isEmpty) { logger.warn(s"Deployment has been enabled but no deployer has been specified - setting deploy to false. Original config: $c") c.copy(deploy = false) } else { c } } val generatedDir = com.ee.utils.play.generatedFolder val logger = Logger("new-loader") def scripts(concatPrefix: String)(paths: String*): play.api.templates.Html = { val key = s"$concatPrefix-${paths.sortWith(_ < _).mkString(",").hashCode}" logger.info(s"[js] mode: $mode") TagCache.js.get(key) match { case Some(tag) if mode != Mode.Dev => { logger.info(s"[js] Found cached tag for: $concatPrefix - ${paths.mkString(",")}") tag } case _ => { logger.debug(s"[js] create tag for: $concatPrefix - ${paths.mkString(",")}") val tags = prepareTags( s => s.endsWith(".js"), buildJsTransformations(concatPrefix), Templates.script, JsConfig.addHints )(paths: _*) TagCache.js.put(key, tags) tags } } } def css(concatPrefix: String)(paths: String*): play.api.templates.Html = { val key = s"$concatPrefix-${paths.sortWith(_ < _).mkString(",").hashCode}" logger.info(s"[css] mode: $mode") TagCache.css.get(key) match { case Some(tag) if mode != Mode.Dev => { logger.debug(s"[css] Found cached tag for: $concatPrefix - ${paths.mkString(",")}") tag } case _ => { logger.info(s"[css] create tag for: $concatPrefix - ${paths.mkString(",")}") val tags = prepareTags( s => s.endsWith(".css"), buildCssTransformations(concatPrefix), Templates.css, CssConfig.addHints )(paths: _*) TagCache.css.put(key, tags) tags } } } private def prepareTags( filetypeFilter: String => Boolean, transformationFn: Seq[Element[Unit]] => Seq[Element[Unit]], tagFn: String => String, addHints: Boolean)(paths: String*): Html = { def transformAndCreateHtml(elements: Seq[Element[Unit]]): Html = { logger.trace(s"Initialising generated assets folder to: ${generatedDir.getAbsolutePath}") val transformed = transformationFn(elements) import com.ee.assets.Templates._ val tags = transformed.map(e => tagFn(e.path)) val tagString = tags.mkString("\\n") val pathString = transformed.map(_.path).mkString("\\n") def hinted = s""" <!-- [assets-loader] hints (you can disable these by adding `addHints: false` to your conf) url: $pathString Request: -------- ${paths.mkString("\\n")} Found Elements: ----------------- ${elements.map(_.path).mkString("\\n")} --> $tagString """ def plain = tags.mkString("\\n") val out = if (addHints) hinted else plain Html(out) } toElements(filetypeFilter)(paths: _*) match { case Nil => if(addHints) Html(s"<!-- [assets-loader] warning: missing ${paths.mkString(",")} -->") else Html("") case e: Seq[Element[Unit]] => transformAndCreateHtml(e) } } private def buildCssTransformations(concatPrefix: String) = buildTransformations(concatPrefix, "css", CssConfig, new CssMinifier()) private def buildJsTransformations(concatPrefix: String) = buildTransformations(concatPrefix, "js", JsConfig, new JsMinifier(closureCompilerOptions)) private def buildTransformations( concatPrefix: String, suffix: String, config: AssetsLoaderConfig, minify: Transformer[String, String]): Seq[Element[Unit]] => Seq[Element[Unit]] = { val read = new PlayResourceReader val namer = new CommonRootNamer(concatPrefix, suffix) val concat = new Concatenator(namer) val toWebPath = new FileToWebPath(info) val gzip = new Gzip() val stringWriter = if (config.deploy) new StringDeploy(deployer.get).run _ else new Writer(writeToGeneratedFolder).run _ andThen toWebPath.run _ val byteWriter = if (config.deploy) new ByteArrayDeploy(deployer.get).run _ else new ByteArrayWriter(pathToFile).run _ andThen toWebPath.run _ val builder = new TransformationBuilder(read.run, concat.run, gzip.run, minify.run, stringWriter, byteWriter, toWebPath.run) builder.build(config) } def writeToGeneratedFolder(path: String, contents: String): Unit = { import com.ee.utils.file val finalPath = new File(s"${generatedDir}${File.separator}$path").getCanonicalPath logger.trace(s"final path for generated asset: $finalPath") file.writeToFile(finalPath, contents) } def pathToFile(p: String): File = new File(s"$generatedDir${File.separator}/$p") private def toElements(filter: String => Boolean)(paths: String*): Seq[Element[Unit]] = { import play.api.Play.current logger.debug(s"[toElements]: $paths") def publicDir(p: String) = s"${info.filePath}/$p" val pathsAndUrls: Seq[(String, URL)] = paths.map { p => val public = publicDir(p) Play.resource(public).map((public, _)).orElse { logger.warn(s"[toElements] Can't find resource: $p") None } }.flatten pathsAndUrls.map { t: (String, URL) => val paths = PathResolver.resolve(t._1, t._2) val filtered = paths.filter(filter) logger.trace(s"[toElements]: \\n${filtered.mkString("\\n")}") filtered.map(PathElement(_)) }.flatten } }
edeustace/assets-loader
plugin/app/com/ee/assets/Loader.scala
Scala
mit
7,001
package com.github.j5ik2o.spetstore.adaptor.http case class CreateItemTypeJson( categoryId: String, name: String, description: Option[String] )
j5ik2o/spetstore-cqrs-es-akka
write-interface/src/main/scala/com/github/j5ik2o/spetstore/adaptor/http/CreateItemTypeJson.scala
Scala
mit
159
/* * Scala.js (https://www.scala-js.org/) * * Copyright EPFL. * * Licensed under Apache License 2.0 * (https://www.apache.org/licenses/LICENSE-2.0). * * See the NOTICE file distributed with this work for * additional information regarding copyright ownership. */ package scala.scalajs.js import scala.scalajs.js /** Adapts a JavaScript Iterable to a Scala Iterable */ @inline final class IterableOps[+A](self: js.Iterable[A]) extends scala.collection.Iterable[A] { @inline def iterator: scala.collection.Iterator[A] = self.jsIterator().toIterator }
nicolasstucki/scala-js
library/src/main/scala/scala/scalajs/js/IterableOps.scala
Scala
apache-2.0
570
/* * Copyright 2014 Adam Rosenberger * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.nalloc.bitb.kcits.sandbox.flatMap import org.nalloc.bitb.kcits.optional._ import org.nalloc.bitb.kcits.sandbox.Inspectable class BlockInlineLambda extends Inspectable { private[this] val bInlineComplex = b.flatMap { x => val y = x + 5 OptionalInt(y * 3) } private[this] val sInlineComplex = s.flatMap { x => val y = x + 5 OptionalInt(y * 3) } private[this] val iInlineComplex = i.flatMap { x => val y = x + 5 OptionalInt(y * 3) } private[this] val lInlineComplex = l.flatMap { x => val y = x + 5 OptionalLong(y * 3) } private[this] val fInlineComplex = f.flatMap { x => val y = x + 5 OptionalFloat(y * 3) } private[this] val dInlineComplex = d.flatMap { x => val y = x + 5 OptionalDouble(y * 3) } private[this] val stInlineComplex = st.flatMap { x => val y = x + x Optional(y + y) } }
arosenberger/nalloc_2.10
sandbox/src/main/scala/org/nalloc/bitb/kcits/sandbox/flatMap/BlockInlineLambda.scala
Scala
apache-2.0
1,481
/* * Copyright (c) 2014-2018 by The Monix Project Developers. * See the project homepage at: https://monix.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monix.tail package internal import cats.effect.Sync import cats.syntax.all._ import monix.execution.internal.collection.ArrayStack import monix.tail.Iterant.{Concat, Halt, Last, Next, NextBatch, NextCursor, Scope, Suspend} private[tail] object IterantReduce { /** Implementation for `Iterant.reduce`. */ def apply[F[_], A](self: Iterant[F, A], op: (A, A) => A) (implicit F: Sync[F]): F[Option[A]] = { F.suspend { new Loop[F, A](op).apply(self) } } private class Loop[F[_], A](op: (A, A) => A)(implicit F: Sync[F]) extends Iterant.Visitor[F, A, F[Option[A]]] { private[this] var isEmpty = true private[this] var state: A = _ //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= // Used in visit(Concat) private[this] var stackRef: ArrayStack[F[Iterant[F, A]]] = _ private def stackPush(item: F[Iterant[F, A]]): Unit = { if (stackRef == null) stackRef = new ArrayStack() stackRef.push(item) } private def stackPop(): F[Iterant[F, A]] = { if (stackRef != null) stackRef.pop() else null.asInstanceOf[F[Iterant[F, A]]] } private[this] val concatContinue: (Option[A] => F[Option[A]]) = state => stackPop() match { case null => F.pure(state) case xs => xs.flatMap(this) } //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= def visit(ref: Next[F, A]): F[Option[A]] = { if (isEmpty) { state = ref.item isEmpty = false } else { state = op(state, ref.item) } ref.rest.flatMap(this) } def visit(ref: NextBatch[F, A]): F[Option[A]] = { if (isEmpty) { visit(ref.toNextCursor()) } else { state = ref.batch.foldLeft(state)(op) ref.rest.flatMap(this) } } def visit(ref: NextCursor[F, A]): F[Option[A]] = { if (isEmpty) { if (ref.cursor.hasNext()) { isEmpty = false state = ref.cursor.next() state = ref.cursor.foldLeft(state)(op) } } else { state = ref.cursor.foldLeft(state)(op) } ref.rest.flatMap(this) } def visit(ref: Suspend[F, A]): F[Option[A]] = ref.rest.flatMap(this) def visit(ref: Concat[F, A]): F[Option[A]] = { stackPush(ref.rh) ref.lh.flatMap(this).flatMap(concatContinue) } def visit[S](ref: Scope[F, S, A]): F[Option[A]] = ref.runFold(this) def visit(ref: Last[F, A]): F[Option[A]] = { if (isEmpty) { state = ref.item isEmpty = false } else { state = op(state, ref.item) } F.pure(Some(state)) } def visit(ref: Halt[F, A]): F[Option[A]] = ref.e match { case None => if (isEmpty) F.pure(None) else F.pure(Some(state)) case Some(e) => F.raiseError(e) } def fail(e: Throwable): F[Option[A]] = F.raiseError(e) } }
ddworak/monix
monix-tail/shared/src/main/scala/monix/tail/internal/IterantReduce.scala
Scala
apache-2.0
3,616
package com.awesomesauce.minecraft.forge.openautomation.common.lasers.packets import com.awesomesauce.minecraft.forge.openautomation.api.lasers.LaserPacket import net.minecraft.entity.EntityLivingBase import net.minecraft.world.World import net.minecraftforge.common.util.ForgeDirection class EntityPacket(var entity: EntityLivingBase) extends LaserPacket { def arrive(world: World, x: Int, y: Int, z: Int, to: ForgeDirection) = { if (!world.isAirBlock(x + to.offsetX, y + to.offsetY, z + to.offsetZ)) false else { entity.setPositionAndUpdate(x + to.offsetX + 0.5, y + to.offsetY, z + to.offsetZ + 0.5) true } } def split(amount: Int) = { val arr = new Array[LaserPacket](amount) for (i <- Range(0, amount)) { arr(i) = new EntityPacket(entity) } arr } }
AwesomeSauceMods/OpenAutomation
main/scala/com/awesomesauce/minecraft/forge/openautomation/common/lasers/packets/EntityPacket.scala
Scala
mit
817
/* ************************************************************** * * * * Copyright (c) 2016-, Kota Mizushima, All rights reserved. * * * * * * This software is distributed under the modified BSD License. * * ************************************************************** */ package onion.compiler import java.lang.{Byte => JByte, Short => JShort, Character => JCharacter, Integer => JInteger, Long => JLong, Double => JDouble, Float => JFloat, Boolean => JBoolean } import java.util.{Map => JMap, HashMap => JHashMap, List, ArrayList, Set, Iterator} import scala.jdk.CollectionConverters._ import onion.compiler.toolbox._ import org.apache.bcel.Constants import org.apache.bcel.classfile.JavaClass import org.apache.bcel.generic._ import onion.compiler.IRT.BinaryTerm.Constants._ import onion.compiler.IRT.UnaryTerm.Constants._ /** * @author Kota Mizushima */ object CodeGeneration { def translateIxTypeToVmType(`type`: IRT.Type): Type = { if (`type`.isBasicType) { BASIC_TYPE_MAPPING(`type`.asInstanceOf[IRT.BasicType]) } else if (`type`.isArrayType) { val arrayType: IRT.ArrayType = `type`.asInstanceOf[IRT.ArrayType] new ArrayType(translateIxTypeToVmType(arrayType.component), arrayType.dimension) } else if (`type`.isClassType) { new ObjectType(`type`.name) } else { Type.NULL } } private def toJavaModifier(src: Int): Int = { var modifier: Int = 0 modifier |= (if (Modifier.isPrivate(src)) Constants.ACC_PRIVATE else modifier) modifier |= (if (Modifier.isProtected(src)) Constants.ACC_PROTECTED else modifier) modifier |= (if (Modifier.isPublic(src)) Constants.ACC_PUBLIC else modifier) modifier |= (if (Modifier.isStatic(src)) Constants.ACC_STATIC else modifier) modifier |= (if (Modifier.isSynchronized(src)) Constants.ACC_SYNCHRONIZED else modifier) modifier |= (if (Modifier.isAbstract(src)) Constants.ACC_ABSTRACT else modifier) modifier |= (if (Modifier.isFinal(src)) Constants.ACC_FINAL else modifier) modifier } private val unboxingMethods = Map( "java.lang.Byte" -> "byteValue", "java.lang.Short" -> "shortValue", "java.lang.Character" -> "charValue", "java.lang.Integer" -> "intValue", "java.lang.Long" -> "longValue", "java.lang.Float" -> "floatValue", "java.lang.Double" -> "doubleValue", "java.lang.Boolean" -> "booleanValue" ) private final val FRAME_PREFIX: String = "frame" private final val OUTER_THIS: String = "outer$" private final val CLOSURE_CLASS_SUFFIX: String = "Closure" private final val BASIC_TYPE_MAPPING: Map[IRT.BasicType, Type] = Map( IRT.BasicType.BYTE -> Type.BYTE, IRT.BasicType.SHORT -> Type.SHORT, IRT.BasicType.CHAR -> Type.CHAR, IRT.BasicType.INT -> Type.INT, IRT.BasicType.LONG -> Type.LONG, IRT.BasicType.FLOAT -> Type.FLOAT, IRT.BasicType.DOUBLE -> Type.DOUBLE, IRT.BasicType.BOOLEAN -> Type.BOOLEAN, IRT.BasicType.VOID -> Type.VOID ) class Proxy(pool: ConstantPoolGen) { private val code: InstructionList = new InstructionList() private val factory: InstructionFactory = new InstructionFactory(pool) private var frame: LocalFrame = _ private var frameObjectIndex: Int = 0 private var indexTable: Array[Int] = _ private var method: MethodGen = _ def setFrame(frame: LocalFrame): Unit = { this.frame = frame } def getFrame: LocalFrame = frame def getFrameObjectIndex: Int = frameObjectIndex def setFrameObjectIndex(frameObjectIndex: Int): Unit = { this.frameObjectIndex = frameObjectIndex } def setIndexTable(indexTable: Array[Int]): Unit = { this.indexTable = indexTable.clone.asInstanceOf[Array[Int]] } def index(index: Int): Int = indexTable(index) def getIndexTable: Array[Int] = indexTable.clone.asInstanceOf[Array[Int]] def setMethod(method: MethodGen): Unit = { this.method = method } def getMethod: MethodGen = { method } def getCode: InstructionList = code def addExceptionHandler(start_pc: InstructionHandle, end_pc: InstructionHandle, handler_pc: InstructionHandle, catch_type: ObjectType): CodeExceptionGen = { method.addExceptionHandler(start_pc, end_pc, handler_pc, catch_type) } def addLineNumber(ih: InstructionHandle, src_line: Int): LineNumberGen = method.addLineNumber(ih, src_line) def appendCallConstructor(`type`: ObjectType, params: Array[Type]): InstructionHandle = { appendInvoke(`type`.getClassName, "<init>", Type.VOID, params, Constants.INVOKESPECIAL) } def appendDefaultValue(tp: Type): InstructionHandle = { if (tp.isInstanceOf[BasicType]) { if (tp eq Type.BOOLEAN) { appendConstant(JBoolean.valueOf(false)) } else if (tp eq Type.BYTE) { appendConstant(JByte.valueOf(0.asInstanceOf[Byte])) } else if (tp eq Type.SHORT) { appendConstant(JShort.valueOf(0.asInstanceOf[Short])) } else if (tp eq Type.CHAR) { appendConstant(JCharacter.valueOf(0.asInstanceOf[Char])) } else if (tp eq Type.INT) { appendConstant(JInteger.valueOf(0)) } else if (tp eq Type.LONG) { appendConstant(JLong.valueOf(0L)) } else if (tp eq Type.FLOAT) { appendConstant(JFloat.valueOf(0.0f)) } else if (tp eq Type.DOUBLE) { appendConstant(JDouble.valueOf(0.0)) } else { append(InstructionConstants.NOP) } } else { appendNull(tp) } } def boxing(`type`: Type): ObjectType = { val boxedType: ObjectType = BOXING_TABLE(`type`.asInstanceOf[BasicType]) if (boxedType == null) throw new RuntimeException("type " + `type` + "cannot be boxed") boxedType } def appendArrayLoad(`type`: Type): InstructionHandle = code.append(InstructionFactory.createArrayLoad(`type`)) def appendArrayStore(`type`: Type): InstructionHandle = code.append(InstructionFactory.createArrayStore(`type`)) def appendBinaryOperation(op: String, `type`: Type): InstructionHandle = { code.append(InstructionFactory.createBinaryOperation(op, `type`)) } def appendBranchInstruction(opcode: Short, target: InstructionHandle): BranchHandle = { code.append(InstructionFactory.createBranchInstruction(opcode, target)) } def appendDup(size: Int): InstructionHandle = code.append(InstructionFactory.createDup(size)) def appendDup_1(size: Int): InstructionHandle = code.append(InstructionFactory.createDup_1(size)) def appendDup_2(size: Int): InstructionHandle = code.append(InstructionFactory.createDup_2(size)) def appendLoad(`type`: Type, index: Int): InstructionHandle = code.append(InstructionFactory.createLoad(`type`, index)) def appendNull(`type`: Type): InstructionHandle = code.append(InstructionFactory.createNull(`type`)) def appendPop(size: Int): InstructionHandle = code.append(InstructionFactory.createPop(size)) def appendReturn(`type`: Type): InstructionHandle = code.append(InstructionFactory.createReturn(`type`)) def appendStore(`type`: Type, index: Int): InstructionHandle = code.append(InstructionFactory.createStore(`type`, index)) def appendThis: InstructionHandle = code.append(InstructionFactory.createThis) def appendAppend(`type`: Type): InstructionHandle = code.append(factory.createAppend(`type`)) def appendCast(src_type: Type, dest_type: Type): InstructionHandle = code.append(factory.createCast(src_type, dest_type)) def appendCheckCast(t: ReferenceType): InstructionHandle = code.append(factory.createCheckCast(t)) def appendConstant(value: AnyRef): InstructionHandle = code.append(factory.createConstant(value)) def appendFieldAccess(class_name: String, name: String, `type`: Type, kind: Short): InstructionHandle = { code.append(factory.createFieldAccess(class_name, name, `type`, kind)) } def appendGetField(class_name: String, name: String, t: Type): InstructionHandle = { code.append(factory.createGetField(class_name, name, t)) } def appendGetStatic(class_name: String, name: String, t: Type): InstructionHandle = { code.append(factory.createGetStatic(class_name, name, t)) } def appendInstanceOf(t: ReferenceType): InstructionHandle = { code.append(factory.createInstanceOf(t)) } def appendInvoke(class_name: String, name: String, ret_type: Type, arg_types: Array[Type], kind: Short): InstructionHandle = { code.append(factory.createInvoke(class_name, name, ret_type, arg_types, kind)) } def appendNew(s: String): InstructionHandle = { code.append(factory.createNew(s)) } def appendNew(t: ObjectType): InstructionHandle = { code.append(factory.createNew(t)) } def appendNewArray(t: Type, dim: Short): InstructionHandle = { code.append(factory.createNewArray(t, dim)) } def appendPutField(class_name: String, name: String, t: Type): InstructionHandle = { code.append(factory.createPutField(class_name, name, t)) } def appendPutStatic(class_name: String, name: String, t: Type): InstructionHandle = { code.append(factory.createPutStatic(class_name, name, t)) } def append(i: BranchInstruction): BranchHandle = { code.append(i) } def append(c: CompoundInstruction): InstructionHandle = code.append(c) def append(i: Instruction): InstructionHandle = code.append(i) def append(i: Instruction, c: CompoundInstruction): InstructionHandle = code.append(i, c) def append(i: Instruction, j: Instruction): InstructionHandle = code.append(i, j) def append(i: Instruction, il: InstructionList): InstructionHandle = code.append(i, il) def append(ih: InstructionHandle, i: BranchInstruction): BranchHandle = code.append(ih, i) def append(ih: InstructionHandle, c: CompoundInstruction): InstructionHandle = code.append(ih, c) def append(ih: InstructionHandle, i: Instruction): InstructionHandle = code.append(ih, i) def append(ih: InstructionHandle, il: InstructionList): InstructionHandle = code.append(ih, il) def append(il: InstructionList): InstructionHandle = code.append(il) def getEnd: InstructionHandle = code.getEnd def getInstructionHandles: Array[InstructionHandle] = code.getInstructionHandles def getInstructionPositions: Array[Int] = code.getInstructionPositions def getInstructions: Array[Instruction] = code.getInstructions def getLength: Int = code.getLength def getStart: InstructionHandle = code.getStart def insert(i: BranchInstruction): BranchHandle = code.insert(i) def insert(c: CompoundInstruction): InstructionHandle = code.insert(c) def insert(i: Instruction): InstructionHandle = code.insert(i) def insert(i: Instruction, c: CompoundInstruction): InstructionHandle = code.insert(i, c) def insert(i: Instruction, j: Instruction): InstructionHandle = code.insert(i, j) def insert(i: Instruction, il: InstructionList): InstructionHandle = code.insert(i, il) def insert(ih: InstructionHandle, i: BranchInstruction): BranchHandle = code.insert(ih, i) def insert(ih: InstructionHandle, c: CompoundInstruction): InstructionHandle = code.insert(ih, c) def insert(ih: InstructionHandle, i: Instruction): InstructionHandle = code.insert(ih, i) def insert(ih: InstructionHandle, il: InstructionList): InstructionHandle = code.insert(ih, il) def insert(il: InstructionList): InstructionHandle = code.insert(il) def isEmpty: Boolean = code.isEmpty def iterator: Iterator[_] = code.iterator def move(ih: InstructionHandle, target: InstructionHandle): Unit = code.move(ih, target) def move(start: InstructionHandle, end: InstructionHandle, target: InstructionHandle): Unit = code.move(start, end, target) def redirectBranches(old_target: InstructionHandle, new_target: InstructionHandle): Unit = code.redirectBranches(old_target, new_target) def redirectExceptionHandlers(exceptions: Array[CodeExceptionGen], old_target: InstructionHandle, new_target: InstructionHandle): Unit = code.redirectExceptionHandlers(exceptions, old_target, new_target) def redirectLocalVariables(lg: Array[LocalVariableGen], old_target: InstructionHandle, new_target: InstructionHandle): Unit = code.redirectLocalVariables(lg, old_target, new_target) def update: Unit = code.update private final val BOXING_TABLE = Map( Type.BOOLEAN -> new ObjectType("java.lang.Boolean"), Type.BYTE -> new ObjectType("java.lang.Byte"), Type.SHORT -> new ObjectType("java.lang.Short"), Type.CHAR -> new ObjectType("java.lang.Character"), Type.INT -> new ObjectType("java.lang.Integer"), Type.LONG -> new ObjectType("java.lang.Long"), Type.FLOAT -> new ObjectType("java.lang.Float"), Type.DOUBLE -> new ObjectType("java.lang.Double") ) } } class CodeGeneration(config: CompilerConfig) { import CodeGeneration._ def process(classes: Seq[IRT.ClassDefinition]): Seq[CompiledClass] = { compiledClasses.clear val base = (if (config.outputDirectory != null) config.outputDirectory else ".") + Systems.fileSeparator for (klass <- classes) codeClass(klass) val classFiles: List[CompiledClass] = new ArrayList[CompiledClass] for (o <- compiledClasses.asScala) { val clazz: JavaClass = o val outDir: String = getOutputDir(base, clazz.getClassName) classFiles.add(new CompiledClass(clazz.getClassName, outDir, clazz.getBytes)) } classFiles.asScala.toSeq } private def getOutputDir(base: String, fqcn: String): String = { val packageName: String = getPackageName(fqcn) base + packageName.replaceAll(".", Systems.fileSeparator) } private def getPackageName(fqcn: String): String = { val index: Int = fqcn.lastIndexOf("\\.") if (index < 0) "" else fqcn.substring(0, index) } private def classModifier(node: IRT.ClassDefinition): Int = { var modifier: Int = toJavaModifier(node.modifier) modifier |= (if (node.isInterface) Constants.ACC_INTERFACE else modifier) modifier |= (if ((!Modifier.isInternal(modifier))) Constants.ACC_PUBLIC else modifier) modifier } def codeClass(node: IRT.ClassDefinition): Unit = { val modifier: Int = classModifier(node) val className: String = node.name generator = new SymbolGenerator(className + CLOSURE_CLASS_SUFFIX) val superClass: String = node.superClass.name val interfaces = namesOf(node.interfaces) val file: String = node.getSourceFile val gen: ClassGen = new ClassGen(className, superClass, file, modifier, interfaces.toArray) val constructors: Array[IRT.ConstructorRef] = node.constructors for (ref <- constructors) { codeConstructor(gen, (ref.asInstanceOf[IRT.ConstructorDefinition])) } val methods = node.methods for (ref <- methods) { codeMethod(gen, (ref.asInstanceOf[IRT.MethodDefinition])) } val fields: Array[IRT.FieldRef] = node.fields for (ref <- fields) { codeField(gen, (ref.asInstanceOf[IRT.FieldDefinition])) } compiledClasses.add(gen.getJavaClass) } def codeExpressions(nodes: Array[IRT.Term], code: CodeGeneration.Proxy): InstructionHandle = { if (nodes.length > 0) { val start = codeExpression(nodes(0), code) for(i <- 1 until nodes.length) { codeExpression(nodes(i), code) } start } else { code.append(InstructionConstants.NOP) } } def codeConstructor(gen: ClassGen, node: IRT.ConstructorDefinition): Unit = { val isStaticOld: Boolean = isStatic isStatic = false val code: CodeGeneration.Proxy = new CodeGeneration.Proxy(gen.getConstantPool) val frame: LocalFrame = node.frame code.setFrame(frame) val args: Array[String] = new Array[String](node.getArgs.length) for(i <- 0 until args.length) { args(i) = "arg" + i } var classType: ObjectType = typeOf(node.affiliation).asInstanceOf[ObjectType] val modifier: Int = toJavaModifier(node.modifier) var arguments: Array[Type] = typesOf(node.getArgs) val method: MethodGen = new MethodGen(modifier, Type.VOID, arguments, args, "<init>", classType.getClassName, code.getCode, gen.getConstantPool) if (frame.closed) { val frameObjectIndexLocal = frameObjectIndex(1, node.getArgs) code.setFrameObjectIndex(frameObjectIndexLocal) code.setIndexTable(makeIndexTableForClosureFrame(frame)) appendInitialCode(code, frame, arguments, 1) } else { code.setIndexTable(makeIndexTableFor(1, frame)) } code.setMethod(method) val init: IRT.Super = node.superInitializer classType = typeOf(init.classType).asInstanceOf[ObjectType] arguments = typesOf(init.arguments) code.append(InstructionConstants.ALOAD_0) codeExpressions(init.terms, code) code.appendCallConstructor(classType, arguments) codeBlock(node.block, code) method.setMaxLocals method.setMaxStack code.appendReturn(typeOf(IRT.BasicType.VOID)) gen.addMethod(method.getMethod) isStatic = isStaticOld } def codeMethod(gen: ClassGen, node: IRT.MethodDefinition): Unit = { val isStaticOld: Boolean = isStatic isStatic = Modifier.isStatic(node.modifier) val code: CodeGeneration.Proxy = new CodeGeneration.Proxy(gen.getConstantPool) val frame: LocalFrame = node.getFrame code.setFrame(frame) val modifier: Int = toJavaModifier(node.modifier) val returned: Type = typeOf(node.returnType) val arguments: Array[Type] = typesOf(node.arguments) val argNames: Array[String] = names(arguments.length) val name: String = node.name val className: String = node.affiliation.name val method: MethodGen = new MethodGen(modifier, returned, arguments, argNames, name, className, code.getCode, gen.getConstantPool) code.setMethod(method) if (!Modifier.isAbstract(node.modifier)) { if (frame.closed) { var origin: Int = 0 if (Modifier.isStatic(node.modifier)) { code.setFrameObjectIndex(frameObjectIndex(0, node.arguments)) origin = 0 } else { code.setFrameObjectIndex(frameObjectIndex(1, node.arguments)) origin = 1 } code.setIndexTable(makeIndexTableForClosureFrame(frame)) appendInitialCode(code, frame, arguments, origin) } else { if (Modifier.isStatic(node.modifier)) { code.setIndexTable(makeIndexTableFor(0, frame)) } else { code.setIndexTable(makeIndexTableFor(1, frame)) } } codeBlock(node.getBlock, code) method.setMaxLocals method.setMaxStack } gen.addMethod(method.getMethod) isStatic = isStaticOld } private def appendInitialCode(code: CodeGeneration.Proxy, frame: LocalFrame, arguments: Array[Type], origin: Int): Unit = { val frameObjectIndex: Int = code.getFrameObjectIndex code.appendConstant(JInteger.valueOf(frame.entries.length)) code.appendNewArray(Type.OBJECT, 1.asInstanceOf[Short]) code.appendDup(1) code.appendStore(new ArrayType(Type.OBJECT, 1), frameObjectIndex) val index: Int = origin var i: Int = 0 while (i < arguments.length) { val arg: Type = arguments(i) code.appendDup(1) code.appendConstant(JInteger.valueOf(i)) if (arguments(i).isInstanceOf[BasicType]) { val boxed: ObjectType = code.boxing(arg) code.appendNew(boxed) code.appendDup(1) code.appendLoad(arg, index + i) code.appendCallConstructor(boxed, Array[Type](arg)) } else { code.appendLoad(arg, index + i) } code.appendArrayStore(Type.OBJECT) if ((arg eq Type.DOUBLE) || (arg eq Type.LONG)) { i += 2 } else { i += 1 } } } private def implementsMethods(gen: ClassGen, methods: Array[IRT.Method]): Unit = { { var i: Int = 0 while (i < methods.length) { { val method: IRT.Method = methods(i) val returnType: Type = typeOf(method.returnType) val name: String = method.name val args: Array[Type] = typesOf(method.arguments) val argNames: Array[String] = names(args.length) val code: CodeGeneration.Proxy = new CodeGeneration.Proxy(gen.getConstantPool) val mgen: MethodGen = new MethodGen(Constants.ACC_PUBLIC, returnType, args, argNames, name, gen.getClassName, code.getCode, gen.getConstantPool) code.appendDefaultValue(returnType) code.appendReturn(returnType) mgen.setMaxLocals mgen.setMaxStack gen.addMethod(mgen.getMethod) } ({ i += 1; i }) } } } def codeClosure(node: IRT.NewClosure, code: CodeGeneration.Proxy): InstructionHandle = { val classType: IRT.ClassType = node.classType val closureName: String = generator.generate val arguments: Array[Type] = typesOf(node.arguments) val gen: ClassGen = new ClassGen(closureName, "java.lang.Object", "<generated>", Constants.ACC_PUBLIC, Array[String](classType.name)) val methods: Set[_] = Classes.getInterfaceMethods(classType) methods.remove(node.method) implementsMethods(gen, methods.toArray(new Array[IRT.Method](0)).asInstanceOf[Array[IRT.Method]]) val frame: LocalFrame = node.frame val depth: Int = frame.depth var i: Int = 1 while (i <= depth) { val field: FieldGen = new FieldGen(Constants.ACC_PRIVATE, new ArrayType("java.lang.Object", 1), FRAME_PREFIX + i, gen.getConstantPool) gen.addField(field.getField) i += 1 } gen.addField(new FieldGen(Constants.ACC_PUBLIC, new ObjectType("java.lang.Object"), OUTER_THIS, gen.getConstantPool).getField) val types: Array[Type] = closureArguments(depth) var method: MethodGen = createClosureConstructor(closureName, types, gen.getConstantPool) gen.addMethod(method.getMethod) val closureCode: CodeGeneration.Proxy = new CodeGeneration.Proxy(gen.getConstantPool) method = new MethodGen(Constants.ACC_PUBLIC, typeOf(node.returnType), arguments, names(arguments.length), node.name, closureName, closureCode.getCode, gen.getConstantPool) closureCode.setMethod(method) closureCode.setFrame(frame) if (frame.closed) { val frameObjectIndexLocal = frameObjectIndex(1, node.arguments) closureCode.setFrameObjectIndex(frameObjectIndexLocal) closureCode.setIndexTable(makeIndexTableForClosureFrame(frame)) appendInitialCode(closureCode, frame, arguments, 1) } else { closureCode.setIndexTable(makeIndexTableFor(1, frame)) } val isClosureOld: Boolean = isClosure val currentClosureNameOld: String = currentClosureName isClosure = true currentClosureName = closureName codeStatement(node.block, closureCode) isClosure = isClosureOld currentClosureName = currentClosureNameOld method.setMaxLocals method.setMaxStack gen.addMethod(method.getMethod) compiledClasses.add(gen.getJavaClass) val start: InstructionHandle = code.appendNew(new ObjectType(closureName)) code.appendDup(1) val name: String = code.getMethod.getClassName val index: Int = code.getFrameObjectIndex if (!isStatic) { if (isClosure) { code.appendThis code.appendGetField(currentClosureName, OUTER_THIS, new ObjectType("java.lang.Object")) } else { code.appendThis } } code.appendLoad(new ArrayType("java.lang.Object", 1), index) i = 1 while (i < depth) { code.appendThis code.appendGetField(name, FRAME_PREFIX + i, new ArrayType("java.lang.Object", 1)) i += 1; } code.appendCallConstructor(new ObjectType(closureName), complementOuterThis(closureArguments(depth))) start } private def complementOuterThis(types: Array[Type]): Array[Type] = { if (!isStatic) { val newTypes: Array[Type] = new Array[Type](types.length + 1) newTypes(0) = new ObjectType("java.lang.Object") { var i: Int = 0 while (i < types.length) { { newTypes(i + 1) = types(i) } ({ i += 1; i }) } } newTypes } else { types } } private def closureArguments(size: Int): Array[Type] = { val arguments: Array[Type] = new Array[Type](size) { var i: Int = 0 while (i < arguments.length) { { arguments(i) = new ArrayType("java.lang.Object", 1) } ({ i += 1; i }) } } arguments } private def codeList(node: IRT.ListLiteral, code: CodeGeneration.Proxy): InstructionHandle = { val listType: ObjectType = typeOf(node.`type`).asInstanceOf[ObjectType] val start: InstructionHandle = code.appendNew("java.util.ArrayList") code.appendDup(1) code.appendCallConstructor(new ObjectType("java.util.ArrayList"), new Array[Type](0)) val elements: Array[IRT.Term] = node.getElements var i: Int = 0 while (i < elements.length) { code.appendDup(1) codeExpression(elements(i), code) code.appendInvoke(listType.getClassName, "add", Type.BOOLEAN, Array[Type](Type.OBJECT), Constants.INVOKEINTERFACE) code.appendPop(1) i += 1 } start } def codeSuperCall(node: IRT.CallSuper, code: CodeGeneration.Proxy): InstructionHandle = { val start: InstructionHandle = codeExpression(node.target, code) codeExpressions(node.params, code) val method: IRT.Method = node.method code.appendInvoke(method.affiliation.name, method.name, typeOf(method.returnType), typesOf(method.arguments), Constants.INVOKESPECIAL) start } private def names(size: Int): Array[String] = { val names: Array[String] = new Array[String](size) var i: Int = 0 while (i < names.length) { names(i) = "args" + size i += 1; } names } private def createClosureConstructor(className: String, types: Array[Type], pool: ConstantPoolGen): MethodGen = { var argNames: Array[String] = null if (isStatic) { argNames = new Array[String](types.length) var i: Int = 0 while (i < types.length) { argNames(i) = FRAME_PREFIX + (i + 1) i += 1; } } else { argNames = new Array[String](types.length + 1) argNames(0) = OUTER_THIS var i: Int = 0 while (i < types.length) { argNames(i + 1) = FRAME_PREFIX + (i + 1) i += 1; } } val code: CodeGeneration.Proxy = new CodeGeneration.Proxy(pool) val constructor: MethodGen = new MethodGen(Constants.ACC_PUBLIC, Type.VOID, complementOuterThis(types), argNames, "<init>", className, code.getCode, pool) code.appendThis code.appendCallConstructor(Type.OBJECT, new Array[Type](0)) if (!isStatic) { code.appendThis code.appendLoad(Type.OBJECT, 1) code.appendPutField(className, OUTER_THIS, Type.OBJECT) } val origin: Int = if (isStatic) 1 else 2 var i: Int = 0 while (i < types.length) { code.appendThis code.appendLoad(types(i), i + origin) code.appendPutField(className, FRAME_PREFIX + (i + 1), types(i)) i += 1; } code.append(InstructionConstants.RETURN) constructor.setMaxLocals constructor.setMaxStack constructor } def codeField(gen: ClassGen, node: IRT.FieldDefinition): Unit = { val field = new FieldGen(toJavaModifier(node.modifier), typeOf(node.`type`), node.name, gen.getConstantPool) gen.addField(field.getField) } def codeBlock(node: IRT.StatementBlock, code: CodeGeneration.Proxy): InstructionHandle = { var start: InstructionHandle = null if (node.statements.length > 0) { start = codeStatement(node.statements(0), code) var i: Int = 1 while (i < node.statements.length) { codeStatement(node.statements(i), code) i += 1; } } else { start = code.append(InstructionConstants.NOP) } start } def codeExpressionStatement(node: IRT.ExpressionActionStatement, code: CodeGeneration.Proxy): InstructionHandle = { val start: InstructionHandle = codeExpression(node.term, code) val `type`: IRT.Type = node.term.`type` if (`type` ne IRT.BasicType.VOID) { if (isWideType(`type`)) { code.append(InstructionConstants.POP2) } else { code.append(InstructionConstants.POP) } } start } def codeStatement(node: IRT.ActionStatement, code: CodeGeneration.Proxy): InstructionHandle = { node match { case node1: IRT.StatementBlock => codeBlock(node1, code) case node1: IRT.ExpressionActionStatement => codeExpressionStatement(node1, code) case node1: IRT.IfStatement => codeIf(node1, code) case node1: IRT.ConditionalLoop => codeLoop(node1, code) case node1: IRT.NOP => codeEmpty(node1, code) case node1: IRT.Return => codeReturn(node1, code) case node1: IRT.Synchronized => codeSynchronized(node1, code) case node1: IRT.Throw => codeThrowNode(node1, code) case node1: IRT.Try => codeTry(node1, code) case _ => code.append(InstructionConstants.NOP) } } def codeReturn(node: IRT.Return, code: CodeGeneration.Proxy): InstructionHandle = { var start: InstructionHandle = null if (node.term != null) { start = codeExpression(node.term, code) val `type`: Type = typeOf(node.term.`type`) code.appendReturn(`type`) } else { start = code.append(InstructionConstants.RETURN) } start } def codeSynchronized(node: IRT.Synchronized, code: CodeGeneration.Proxy): InstructionHandle = { null } def codeThrowNode(node: IRT.Throw, code: CodeGeneration.Proxy): InstructionHandle = { val start: InstructionHandle = codeExpression(node.term, code) code.append(InstructionConstants.ATHROW) start } def codeTry(node: IRT.Try, code: CodeGeneration.Proxy): InstructionHandle = { val start: InstructionHandle = codeStatement(node.tryStatement, code) val to: BranchHandle = code.append(new GOTO(null)) val length: Int = node.catchTypes.length val catchEnds: Array[BranchHandle] = new Array[BranchHandle](length) { var i: Int = 0 while (i < length) { { val bind: ClosureLocalBinding = node.catchTypes(i) val index: Int = code.getIndexTable(bind.index) val `type`: ObjectType = typeOf(bind.tp).asInstanceOf[ObjectType] val target: InstructionHandle = code.appendStore(`type`, index) code.addExceptionHandler(start, to, target, `type`) codeStatement(node.catchStatements(i), code) catchEnds(i) = code.append(new GOTO(null)) } ({ i += 1; i }) } } val end: InstructionHandle = code.append(InstructionConstants.NOP) to.setTarget(end) var i: Int = 0 while (i < catchEnds.length) { catchEnds(i).setTarget(end) i += 1; } start } def codeEmpty(node: IRT.NOP, code: CodeGeneration.Proxy): InstructionHandle = { code.append(InstructionConstants.NOP) } def codeIf(node: IRT.IfStatement, code: CodeGeneration.Proxy): InstructionHandle = { val start: InstructionHandle = codeExpression(node.getCondition, code) val toThen: BranchHandle = code.append(new IFNE(null)) if (node.getElseStatement != null) { codeStatement(node.getElseStatement, code) } val toEnd: BranchHandle = code.append(new GOTO(null)) toThen.setTarget(codeStatement(node.getThenStatement, code)) toEnd.setTarget(code.append(new NOP)) start } def codeLoop(node: IRT.ConditionalLoop, code: CodeGeneration.Proxy): InstructionHandle = { val start: InstructionHandle = codeExpression(node.condition, code) val branch: BranchHandle = code.append(new IFEQ(null)) codeStatement(node.stmt, code) code.append(new GOTO(start)) val end: InstructionHandle = code.append(InstructionConstants.NOP) branch.setTarget(end) start } private def nameOf(symbol: IRT.ClassType): String = symbol.name private def namesOf(symbols: Seq[IRT.ClassType]): Seq[String] = symbols.map{ s => nameOf(s)} def codeExpression(node: IRT.Term, code: CodeGeneration.Proxy): InstructionHandle = { var start: InstructionHandle = null if (node.isInstanceOf[IRT.BinaryTerm]) { start = codeBinaryExpression(node.asInstanceOf[IRT.BinaryTerm], code) } else if (node.isInstanceOf[IRT.UnaryTerm]) { start = codeUnaryExpression(node.asInstanceOf[IRT.UnaryTerm], code) } else if (node.isInstanceOf[IRT.Begin]) { start = codeBegin(node.asInstanceOf[IRT.Begin], code) } else if (node.isInstanceOf[IRT.SetLocal]) { start = codeLocalAssign(node.asInstanceOf[IRT.SetLocal], code) } else if (node.isInstanceOf[IRT.RefLocal]) { start = codeLocalRef(node.asInstanceOf[IRT.RefLocal], code) } else if (node.isInstanceOf[IRT.RefStaticField]) { start = codeStaticFieldRef(node.asInstanceOf[IRT.RefStaticField], code) } else if (node.isInstanceOf[IRT.RefField]) { start = codeFieldRef(node.asInstanceOf[IRT.RefField], code) } else if (node.isInstanceOf[IRT.SetField]) { start = codeFieldAssign(node.asInstanceOf[IRT.SetField], code) } else if (node.isInstanceOf[IRT.Call]) { start = codeMethodCall(node.asInstanceOf[IRT.Call], code) } else if (node.isInstanceOf[IRT.RefArray]) { start = codeArrayRef(node.asInstanceOf[IRT.RefArray], code) } else if (node.isInstanceOf[IRT.ArrayLength]) { start = codeArrayLengthNode(node.asInstanceOf[IRT.ArrayLength], code) } else if (node.isInstanceOf[IRT.SetArray]) { start = codeArrayAssignment(node.asInstanceOf[IRT.SetArray], code) } else if (node.isInstanceOf[IRT.NewObject]) { start = codeNew(node.asInstanceOf[IRT.NewObject], code) } else if (node.isInstanceOf[IRT.NewArray]) { start = codeNewArray(node.asInstanceOf[IRT.NewArray], code) } else if (node.isInstanceOf[IRT.RefArray]) { start = codeArrayRef(node.asInstanceOf[IRT.RefArray], code) } else if (node.isInstanceOf[IRT.CallStatic]) { start = codeStaticMethodCall(node.asInstanceOf[IRT.CallStatic], code) } else if (node.isInstanceOf[IRT.CharacterValue]) { start = codeChar(node.asInstanceOf[IRT.CharacterValue], code) } else if (node.isInstanceOf[IRT.StringValue]) { start = codeString(node.asInstanceOf[IRT.StringValue], code) } else if (node.isInstanceOf[IRT.IntValue]) { start = codeInteger(node.asInstanceOf[IRT.IntValue], code) } else if (node.isInstanceOf[IRT.LongValue]) { start = codeLong(node.asInstanceOf[IRT.LongValue], code) } else if (node.isInstanceOf[IRT.FloatValue]) { start = codeFloat(node.asInstanceOf[IRT.FloatValue], code) } else if (node.isInstanceOf[IRT.DoubleValue]) { start = codeDouble(node.asInstanceOf[IRT.DoubleValue], code) } else if (node.isInstanceOf[IRT.BoolValue]) { start = codeBoolean(node.asInstanceOf[IRT.BoolValue], code) } else if (node.isInstanceOf[IRT.NullValue]) { start = codeNull(node.asInstanceOf[IRT.NullValue], code) } else if (node.isInstanceOf[IRT.AsInstanceOf]) { start = codeCast(node.asInstanceOf[IRT.AsInstanceOf], code) } else if (node.isInstanceOf[IRT.This]) { start = codeSelf(node.asInstanceOf[IRT.This], code) } else if (node.isInstanceOf[IRT.OuterThis]) { start = codeOuterThis(node.asInstanceOf[IRT.OuterThis], code) } else if (node.isInstanceOf[IRT.InstanceOf]) { start = codeIsInstance(node.asInstanceOf[IRT.InstanceOf], code) } else if (node.isInstanceOf[IRT.NewClosure]) { start = codeClosure(node.asInstanceOf[IRT.NewClosure], code) } else if (node.isInstanceOf[IRT.ListLiteral]) { start = codeList(node.asInstanceOf[IRT.ListLiteral], code) } else if (node.isInstanceOf[IRT.CallSuper]) { start = codeSuperCall(node.asInstanceOf[IRT.CallSuper], code) } else { throw new RuntimeException } start } def codeBegin(node: IRT.Begin, code: CodeGeneration.Proxy): InstructionHandle = { var start: InstructionHandle = null val terms: Array[IRT.Term] = node.terms if (terms.length > 0) { start = codeExpression(terms(0), code) var i: Int = 1 while (i < terms.length) { val `type`: IRT.Type = terms(i - 1).`type` if (`type` ne IRT.BasicType.VOID) { if (isWideType(`type`)) { code.append(InstructionConstants.POP2) } else { code.append(InstructionConstants.POP) } codeExpression(terms(i), code) } i += 1 } } else { start = code.append(InstructionConstants.NOP) } start } def codeLocalAssign(node: IRT.SetLocal, code: CodeGeneration.Proxy): InstructionHandle = { var start: InstructionHandle = null val `type`: Type = typeOf(node.`type`) if (node.frame == 0 && !code.getFrame.closed) { start = codeExpression(node.value, code) if (isWideType(node.`type`)) { code.append(InstructionConstants.DUP2) } else { code.append(InstructionConstants.DUP) } code.appendStore(`type`, code.getIndexTable(node.index)) } else { if (node.frame == 0 && code.getFrame.closed) { val index: Int = code.getFrameObjectIndex start = code.appendLoad(new ArrayType("java.lang.Object", 1), index) code.appendConstant(JInteger.valueOf(code.index(node.index))) } else { start = code.appendThis code.appendGetField(code.getMethod.getClassName, FRAME_PREFIX + node.frame, new ArrayType("java.lang.Object", 1)) code.appendConstant(JInteger.valueOf(node.index)) } if (node.isBasicType) { val boxed: ObjectType = code.boxing(`type`) code.appendNew(boxed) code.appendDup(1) codeExpression(node.value, code) code.appendInvoke(boxed.getClassName, "<init>", Type.VOID, Array[Type](`type`), Constants.INVOKESPECIAL) code.appendDup_2(1) code.appendArrayStore(Type.OBJECT) val method: String = unboxingMethods(boxed.getClassName).asInstanceOf[String] code.appendInvoke(boxed.getClassName, method, `type`, new Array[Type](0), Constants.INVOKEVIRTUAL) } else { codeExpression(node.value, code) code.appendDup_2(1) code.appendArrayStore(Type.OBJECT) } } start } def codeLocalRef(node: IRT.RefLocal, code: CodeGeneration.Proxy): InstructionHandle = { var start: InstructionHandle = null val `type`: Type = typeOf(node.`type`) if (node.frame == 0 && !code.getFrame.closed) { start = code.appendLoad(`type`, code.index(node.index)) } else { if (node.frame == 0 && code.getFrame.closed) { val index: Int = code.getFrameObjectIndex start = code.appendLoad(new ArrayType("java.lang.Object", 1), index) code.appendConstant(JInteger.valueOf(code.index(node.index))) } else { start = code.appendThis code.appendGetField(code.getMethod.getClassName, FRAME_PREFIX + node.frame, new ArrayType("java.lang.Object", 1)) code.appendConstant(JInteger.valueOf(node.index)) } code.appendArrayLoad(Type.OBJECT) if (node.isBasicType) { val boxed: ObjectType = code.boxing(`type`) val method: String = unboxingMethods(boxed.getClassName).asInstanceOf[String] code.appendCast(Type.OBJECT, boxed) code.appendInvoke(boxed.getClassName, method, `type`, new Array[Type](0), Constants.INVOKEVIRTUAL) } else { code.appendCast(Type.OBJECT, `type`) } } start } def codeStaticFieldRef(node: IRT.RefStaticField, code: CodeGeneration.Proxy): InstructionHandle = { val classType: String = node.field.affiliation.name val name: String = node.field.name val `type`: Type = typeOf(node.`type`) code.appendGetStatic(classType, name, `type`) } def codeMethodCall(node: IRT.Call, code: CodeGeneration.Proxy): InstructionHandle = { val start: InstructionHandle = codeExpression(node.target, code) var i: Int = 0 while (i < node.parameters.length) { codeExpression(node.parameters(i), code) i += 1; } val classType: IRT.ObjectType = node.target.`type`.asInstanceOf[IRT.ObjectType] var kind: Short = 0 if (classType.isInterface) { kind = Constants.INVOKEINTERFACE } else { kind = Constants.INVOKEVIRTUAL } val className: String = classType.name val name: String = node.method.name val ret: Type = typeOf(node.`type`) val args: Array[Type] = typesOf(node.method.arguments) code.appendInvoke(className, name, ret, args, kind) start } def codeArrayRef(node: IRT.RefArray, code: CodeGeneration.Proxy): InstructionHandle = { val targetType: IRT.ArrayType = node.target.`type`.asInstanceOf[IRT.ArrayType] val start: InstructionHandle = codeExpression(node.target, code) codeExpression(node.index, code) code.appendArrayLoad(typeOf(targetType.base)) start } def codeArrayLengthNode(node: IRT.ArrayLength, code: CodeGeneration.Proxy): InstructionHandle = { val start: InstructionHandle = codeExpression(node.target, code) code.append(InstructionConstants.ARRAYLENGTH) start } def codeArrayAssignment(node: IRT.SetArray, code: CodeGeneration.Proxy): InstructionHandle = { val targetType: IRT.ArrayType = node.`object`.`type`.asInstanceOf[IRT.ArrayType] val start: InstructionHandle = codeExpression(node.`object`, code) code.appendDup(1) codeExpression(node.index, code) codeExpression(node.value, code) code.appendArrayStore(typeOf(targetType.base)) start } def codeNew(node: IRT.NewObject, code: CodeGeneration.Proxy): InstructionHandle = { val `type`: IRT.ClassType = node.constructor.affiliation val start: InstructionHandle = code.appendNew(typeOf(`type`).asInstanceOf[ObjectType]) code.append(InstructionConstants.DUP) var i: Int = 0 while (i < node.parameters.length) { codeExpression(node.parameters(i), code) i += 1 } val className: String = `type`.name val arguments: Array[Type] = typesOf(node.constructor.getArgs) val kind: Short = Constants.INVOKESPECIAL code.appendInvoke(className, "<init>", Type.VOID, arguments, kind) start } def codeNewArray(node: IRT.NewArray, code: CodeGeneration.Proxy): InstructionHandle = { val start: InstructionHandle = codeExpressions(node.parameters, code) val `type`: IRT.ArrayType = node.arrayType code.appendNewArray(typeOf(`type`.component), node.parameters.length.asInstanceOf[Short]) start } def codeStaticMethodCall(node: IRT.CallStatic, code: CodeGeneration.Proxy): InstructionHandle = { var start: InstructionHandle = null if (node.parameters.length > 0) { start = codeExpression(node.parameters(0), code) var i: Int = 1 while (i < node.parameters.length) { codeExpression(node.parameters(i), code) i += 1; } } else { start = code.append(InstructionConstants.NOP) } val className: String = node.target.name val name: String = node.method.name val returnType: Type = typeOf(node.`type`) val arguments: Array[Type] = typesOf(node.method.arguments) val kind: Short = Constants.INVOKESTATIC code.appendInvoke(className, name, returnType, arguments, kind) start } def codeBinaryExpression(node: IRT.BinaryTerm, code: CodeGeneration.Proxy): InstructionHandle = { if (node.kind == LOGICAL_AND) { return codeLogicalAnd(node, code) } else if (node.kind == LOGICAL_OR) { return codeLogicalOr(node, code) } else if (node.kind == ELVIS) { return codeElvis(node, code) } val left: IRT.Term = node.lhs val right: IRT.Term = node.rhs val start: InstructionHandle = codeExpression(left, code) codeExpression(right, code) node.kind match { case ADD => add(code, left.`type`) case SUBTRACT => sub(code, left.`type`) case MULTIPLY => mul(code, left.`type`) case DIVIDE => div(code, left.`type`) case MOD => mod(code, left.`type`) case EQUAL => eq(code, left.`type`) case NOT_EQUAL => noteq(code, left.`type`) case LESS_OR_EQUAL => lte(code, left.`type`) case GREATER_OR_EQUAL => gte(code, left.`type`) case LESS_THAN => lt(code, left.`type`) case GREATER_THAN => gt(code, left.`type`) case BIT_AND => bitAnd(code, left.`type`) case BIT_OR => bitOr(code, right.`type`) case XOR => xor(code, right.`type`) case BIT_SHIFT_L2 => bitShiftL2(code, left.`type`) case BIT_SHIFT_R2 => bitShiftR2(code, left.`type`) case BIT_SHIFT_R3 => bitShiftR3(code, left.`type`) case _ => } start } def bitShiftR2(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { if (`type` eq IRT.BasicType.INT) { code.append(InstructionConstants.ISHR) } else if (`type` eq IRT.BasicType.LONG) { code.append(InstructionConstants.LSHR) } else { throw new RuntimeException } } def bitShiftL2(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { if (`type` eq IRT.BasicType.INT) { code.append(InstructionConstants.ISHL) } else if (`type` eq IRT.BasicType.LONG) { code.append(InstructionConstants.LSHL) } else { throw new RuntimeException } } def bitShiftR3(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { if (`type` eq IRT.BasicType.INT) { code.append(InstructionConstants.IUSHR) } else if (`type` eq IRT.BasicType.LONG) { code.append(InstructionConstants.LUSHR) } else { throw new RuntimeException } } def codeLogicalAnd(node: IRT.BinaryTerm, code: CodeGeneration.Proxy): InstructionHandle = { val start: InstructionHandle = codeExpression(node.lhs, code) var b1: BranchHandle = null var b2: BranchHandle = null var b3: BranchHandle = null b1 = code.append(new IFEQ(null)) codeExpression(node.rhs, code) b2 = code.append(new IFEQ(null)) code.append(InstructionConstants.ICONST_1) b3 = code.append(new GOTO(null)) val failure: InstructionHandle = code.append(InstructionConstants.ICONST_0) b1.setTarget(failure) b2.setTarget(failure) b3.setTarget(code.append(InstructionConstants.NOP)) start } def codeLogicalOr(node: IRT.BinaryTerm, code: CodeGeneration.Proxy): InstructionHandle = { val start: InstructionHandle = codeExpression(node.lhs, code) var b1: BranchHandle = null var b2: BranchHandle = null var b3: BranchHandle = null b1 = code.append(new IFNE(null)) codeExpression(node.rhs, code) b2 = code.append(new IFNE(null)) code.append(InstructionConstants.ICONST_0) b3 = code.append(new GOTO(null)) val success: InstructionHandle = code.append(InstructionConstants.ICONST_1) b1.setTarget(success) b2.setTarget(success) b3.setTarget(code.append(new NOP)) start } def codeElvis(node: IRT.BinaryTerm, code: CodeGeneration.Proxy): InstructionHandle = { val start: InstructionHandle = codeExpression(node.lhs, code) code.appendDup(1) code.appendNull(typeOf(node.`type`)) val b1: BranchHandle = code.append(new IF_ACMPEQ(null)) val b2: BranchHandle = code.append(new GOTO(null)) b1.setTarget(code.appendPop(1)) codeExpression(node.rhs, code) b2.setTarget(code.append(new NOP)) start } def bitAnd(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { if ((`type` eq IRT.BasicType.INT) || (`type` eq IRT.BasicType.BOOLEAN)) { code.append(new IAND) } else if (`type` eq IRT.BasicType.LONG) { code.append(new LAND) } else { throw new RuntimeException } } def bitOr(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { if ((`type` eq IRT.BasicType.INT) || (`type` eq IRT.BasicType.BOOLEAN)) { code.append(new IOR) } else if (`type` eq IRT.BasicType.LONG) { code.append(new LOR) } else { throw new RuntimeException } } def xor(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { if ((`type` eq IRT.BasicType.INT) || (`type` eq IRT.BasicType.BOOLEAN)) { code.append(new IXOR) } else if (`type` eq IRT.BasicType.LONG) { code.append(new LXOR) } else { throw new RuntimeException } } def eq(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { var b1: BranchHandle = null if ((`type` eq IRT.BasicType.INT) || (`type` eq IRT.BasicType.CHAR) || (`type` eq IRT.BasicType.BOOLEAN)) { b1 = code.append(new IF_ICMPEQ(null)) } else if (`type` eq IRT.BasicType.LONG) { code.append(new LCMP) b1 = code.append(new IFEQ(null)) } else if (`type` eq IRT.BasicType.FLOAT) { code.append(new FCMPL) b1 = code.append(new IFEQ(null)) } else if (`type` eq IRT.BasicType.DOUBLE) { code.append(new DCMPL) b1 = code.append(new IFEQ(null)) } else { b1 = code.append(new IF_ACMPEQ(null)) } processBranch(code, b1) } def noteq(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { var b1: BranchHandle = null if ((`type` eq IRT.BasicType.INT) || (`type` eq IRT.BasicType.CHAR) || (`type` eq IRT.BasicType.BOOLEAN)) { b1 = code.append(new IF_ICMPNE(null)) } else if (`type` eq IRT.BasicType.LONG) { code.append(new LCMP) b1 = code.append(new IFNE(null)) } else if (`type` eq IRT.BasicType.FLOAT) { code.append(new FCMPL) b1 = code.append(new IFNE(null)) } else if (`type` eq IRT.BasicType.DOUBLE) { code.append(new DCMPL) b1 = code.append(new IFNE(null)) } else { b1 = code.append(new IF_ACMPNE(null)) } processBranch(code, b1) } def gt(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { var b1: BranchHandle = null if (`type` eq IRT.BasicType.INT) { b1 = code.append(new IF_ICMPGT(null)) } else if (`type` eq IRT.BasicType.LONG) { code.append(new LCMP) b1 = code.append(new IFGT(null)) } else if (`type` eq IRT.BasicType.FLOAT) { code.append(new FCMPL) b1 = code.append(new IFGT(null)) } else if (`type` eq IRT.BasicType.DOUBLE) { code.append(new DCMPL) b1 = code.append(new IFGT(null)) } else { throw new RuntimeException("") } processBranch(code, b1) } def gte(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { var comparation: BranchHandle = null if (`type` eq IRT.BasicType.INT) { comparation = code.append(new IF_ICMPGE(null)) } else if (`type` eq IRT.BasicType.LONG) { code.append(new LCMP) comparation = code.append(new IFGE(null)) } else if (`type` eq IRT.BasicType.FLOAT) { code.append(new FCMPL) comparation = code.append(new IFGE(null)) } else if (`type` eq IRT.BasicType.DOUBLE) { code.append(new DCMPL) comparation = code.append(new IFGE(null)) } else { throw new RuntimeException("") } processBranch(code, comparation) } def lte(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { var b1: BranchHandle = null if (`type` eq IRT.BasicType.INT) { b1 = code.append(new IF_ICMPLE(null)) } else if (`type` eq IRT.BasicType.LONG) { code.append(new LCMP) b1 = code.append(new IFLT(null)) } else if (`type` eq IRT.BasicType.FLOAT) { code.append(new FCMPL) b1 = code.append(new IFLE(null)) } else if (`type` eq IRT.BasicType.DOUBLE) { code.append(new DCMPL) b1 = code.append(new IFLE(null)) } else { throw new RuntimeException("") } processBranch(code, b1) } def lt(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { var comparation: BranchHandle = null if (`type` eq IRT.BasicType.INT) { comparation = code.append(new IF_ICMPLT(null)) } else if (`type` eq IRT.BasicType.LONG) { code.append(new LCMP) comparation = code.append(new IFLT(null)) } else if (`type` eq IRT.BasicType.FLOAT) { code.append(new FCMPL) comparation = code.append(new IFLT(null)) } else if (`type` eq IRT.BasicType.DOUBLE) { code.append(new DCMPL) comparation = code.append(new IFLT(null)) } else { throw new RuntimeException("") } processBranch(code, comparation) } private def processBranch(code: CodeGeneration.Proxy, b1: BranchHandle): Unit = { code.append(InstructionConstants.ICONST_0) val b2: BranchHandle = code.append(new GOTO(null)) b1.setTarget(code.append(InstructionConstants.ICONST_1)) b2.setTarget(code.append(InstructionConstants.NOP)) } def codeChar(node: IRT.CharacterValue, code: CodeGeneration.Proxy): InstructionHandle = { code.appendConstant(JCharacter.valueOf(node.value)) } def codeString(node: IRT.StringValue, code: CodeGeneration.Proxy): InstructionHandle = { code.appendConstant(node.value) } def codeInteger(node: IRT.IntValue, code: CodeGeneration.Proxy): InstructionHandle = { code.appendConstant(JInteger.valueOf(node.value)) } def codeLong(node: IRT.LongValue, code: CodeGeneration.Proxy): InstructionHandle = { code.appendConstant(JLong.valueOf(node.value)) } def codeFloat(node: IRT.FloatValue, code: CodeGeneration.Proxy): InstructionHandle = { code.appendConstant(JFloat.valueOf(node.value)) } def codeDouble(node: IRT.DoubleValue, code: CodeGeneration.Proxy): InstructionHandle = { code.appendConstant(JDouble.valueOf(node.value)) } def codeBoolean(node: IRT.BoolValue, code: CodeGeneration.Proxy): InstructionHandle = { code.appendConstant(JBoolean.valueOf(node.value)) } def codeNull(node: IRT.NullValue, code: CodeGeneration.Proxy): InstructionHandle = { code.append(InstructionConstants.ACONST_NULL) } def codeUnaryExpression(node: IRT.UnaryTerm, code: CodeGeneration.Proxy): InstructionHandle = { val start: InstructionHandle = codeExpression(node.operand, code) val `type`: IRT.Type = node.operand.`type` node.kind match { case PLUS => plus(code, `type`) case MINUS => minus(code, `type`) case NOT => not(code, `type`) case BIT_NOT => bitNot(code, `type`) case _ => throw new RuntimeException } start } private def plus(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { if ((`type` ne IRT.BasicType.INT) && (`type` ne IRT.BasicType.LONG) && (`type` ne IRT.BasicType.FLOAT) && (`type` ne IRT.BasicType.DOUBLE)) { throw new RuntimeException } } private def minus(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { if (`type` eq IRT.BasicType.INT) { code.append(InstructionConstants.INEG) } else if (`type` eq IRT.BasicType.LONG) { code.append(InstructionConstants.LNEG) } else if (`type` eq IRT.BasicType.FLOAT) { code.append(InstructionConstants.FNEG) } else if (`type` eq IRT.BasicType.DOUBLE) { code.append(InstructionConstants.DNEG) } else { throw new RuntimeException } } private def not(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { if (`type` eq IRT.BasicType.BOOLEAN) { val b1: BranchHandle = code.append(new IFNE(null)) var b2: BranchHandle = null code.append(new ICONST(1)) b2 = code.append(new GOTO(null)) b1.setTarget(code.append(new ICONST(0))) b2.setTarget(code.append(new NOP)) } else { throw new RuntimeException } } private def bitNot(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { if (`type` eq IRT.BasicType.INT) { code.append(new ICONST(-1)) code.append(new IXOR) } else if (`type` eq IRT.BasicType.LONG) { code.append(new LCONST(-1)) code.append(new LXOR) } else { throw new RuntimeException } } def codeCast(node: IRT.AsInstanceOf, code: CodeGeneration.Proxy): InstructionHandle = { val target: IRT.Term = node.target val start: InstructionHandle = codeExpression(target, code) code.appendCast(typeOf(target.`type`), typeOf(node.destination)) start } def codeIsInstance(node: IRT.InstanceOf, code: CodeGeneration.Proxy): InstructionHandle = { val start: InstructionHandle = codeExpression(node.target, code) code.appendInstanceOf(typeOf(node.checked).asInstanceOf[ReferenceType]) start } def codeSelf(node: IRT.This, code: CodeGeneration.Proxy): InstructionHandle = { code.append(InstructionConstants.ALOAD_0) } def codeOuterThis(node: IRT.OuterThis, code: CodeGeneration.Proxy): InstructionHandle = { code.appendThis code.appendGetField(currentClosureName, OUTER_THIS, Type.OBJECT) code.appendCast(Type.OBJECT, typeOf(node.`type`)) } def codeFieldRef(node: IRT.RefField, code: CodeGeneration.Proxy): InstructionHandle = { val start: InstructionHandle = codeExpression(node.target, code) val symbol: IRT.ClassType = node.target.`type`.asInstanceOf[IRT.ClassType] code.appendGetField(symbol.name, node.field.name, typeOf(node.`type`)) start } def codeFieldAssign(node: IRT.SetField, code: CodeGeneration.Proxy): InstructionHandle = { val start: InstructionHandle = codeExpression(node.target, code) codeExpression(node.value, code) if (isWideType(node.value.`type`)) { code.append(InstructionConstants.DUP2_X1) } else { code.append(InstructionConstants.DUP_X1) } val symbol: IRT.ClassType = node.target.`type`.asInstanceOf[IRT.ClassType] code.appendPutField(symbol.name, node.field.name, typeOf(node.`type`)) start } private def add(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { if (`type` eq IRT.BasicType.INT) { code.append(new IADD) } else if (`type` eq IRT.BasicType.LONG) { code.append(new LADD) } else if (`type` eq IRT.BasicType.FLOAT) { code.append(new FADD) } else { code.append(new DADD) } } private def sub(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { if (`type` eq IRT.BasicType.INT) { code.append(new ISUB) } else if (`type` eq IRT.BasicType.LONG) { code.append(new LSUB) } else if (`type` eq IRT.BasicType.FLOAT) { code.append(new FSUB) } else { code.append(new DSUB) } } private def mul(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { if (`type` eq IRT.BasicType.INT) { code.append(new IMUL) } else if (`type` eq IRT.BasicType.LONG) { code.append(new LMUL) } else if (`type` eq IRT.BasicType.FLOAT) { code.append(new FMUL) } else { code.append(new DMUL) } } private def div(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { if (`type` eq IRT.BasicType.INT) { code.append(new IDIV) } else if (`type` eq IRT.BasicType.LONG) { code.append(new LDIV) } else if (`type` eq IRT.BasicType.FLOAT) { code.append(new FDIV) } else { code.append(new DDIV) } } private def mod(code: CodeGeneration.Proxy, `type`: IRT.Type): Unit = { if (`type` eq IRT.BasicType.INT) { code.append(new IREM) } else if (`type` eq IRT.BasicType.LONG) { code.append(new LREM) } else if (`type` eq IRT.BasicType.FLOAT) { code.append(new FREM) } else { code.append(new DREM) } } private def frameObjectIndex(origin: Int, arguments: Array[IRT.Type]): Int = { var maxIndex: Int = origin var i: Int = 0 while (i < arguments.length) { if (isWideType(arguments(i))) { maxIndex += 2 } else { maxIndex += 1 } i += 1; } maxIndex } private def makeIndexTableFor(origin: Int, frame: LocalFrame): Array[Int] = { val bindings: Seq[LocalBinding] = frame.entries val indexTable: Array[Int] = new Array[Int](bindings.length) var maxIndex: Int = origin var i: Int = 0 while (i < bindings.length) { indexTable(i) = maxIndex if (isWideType(bindings(i).tp)) { maxIndex += 2 } else { maxIndex += 1 } i += 1; } indexTable } private def makeIndexTableForClosureFrame(frame: LocalFrame): Array[Int] = { val bindings: Seq[LocalBinding] = frame.entries val indexTable: Array[Int] = new Array[Int](bindings.length) var maxIndex: Int = 0 var i: Int = 0 while (i < bindings.length) { indexTable(i) = maxIndex maxIndex += 1 i += 1 } indexTable } private def isWideType(symbol: IRT.Type): Boolean = { ((symbol eq IRT.BasicType.DOUBLE) || (symbol eq IRT.BasicType.LONG)) } private def typeOf(`type`: IRT.Type): Type = { translateIxTypeToVmType(`type`) } private def typesOf(types: Array[IRT.Type]): Array[Type] = { val destinationTypes: Array[Type] = new Array[Type](types.length) var i: Int = 0 while (i < destinationTypes.length) { destinationTypes(i) = translateIxTypeToVmType(types(i)) i += 1; } destinationTypes } private final val compiledClasses: List[JavaClass] = new ArrayList[JavaClass] private var generator: SymbolGenerator = null private var isStatic: Boolean = false private var isClosure: Boolean = false private var currentClosureName: String = null }
onionlang/onion
src/main/scala/onion/compiler/CodeGeneration.scala
Scala
bsd-3-clause
62,216
class CSuper { object A } class C extends CSuper { def f = (A: AnyRef) match { case _: A.type => "joepie" } } object Test extends C with App { println(f) }
yusuke2255/dotty
tests/run/t4897.scala
Scala
bsd-3-clause
164
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.consumer import kafka.cluster.Broker import kafka.server.AbstractFetcherThread import kafka.message.ByteBufferMessageSet import kafka.api.{PartitionOffsetRequestInfo, Request, OffsetRequest, FetchResponsePartitionData} import kafka.common.TopicAndPartition class ConsumerFetcherThread(name: String, val config: ConsumerConfig, sourceBroker: Broker, partitionMap: Map[TopicAndPartition, PartitionTopicInfo], val consumerFetcherManager: ConsumerFetcherManager) extends AbstractFetcherThread(name = name, clientId = config.clientId, sourceBroker = sourceBroker, socketTimeout = config.socketTimeoutMs, socketBufferSize = config.socketBufferSize, fetchSize = config.fetchSize, fetcherBrokerId = Request.OrdinaryConsumerId, maxWait = config.maxFetchWaitMs, minBytes = config.minFetchBytes) { // process fetched data def processPartitionData(topicAndPartition: TopicAndPartition, fetchOffset: Long, partitionData: FetchResponsePartitionData) { val pti = partitionMap(topicAndPartition) if (pti.getFetchOffset != fetchOffset) throw new RuntimeException("Offset doesn't match for topic %s partition: %d pti offset: %d fetch offset: %d" .format(topicAndPartition.topic, topicAndPartition.partition, pti.getFetchOffset, fetchOffset)) pti.enqueue(partitionData.messages.asInstanceOf[ByteBufferMessageSet]) } // handle a partition whose offset is out of range and return a new fetch offset def handleOffsetOutOfRange(topicAndPartition: TopicAndPartition): Long = { var startTimestamp : Long = 0 config.autoOffsetReset match { case OffsetRequest.SmallestTimeString => startTimestamp = OffsetRequest.EarliestTime case OffsetRequest.LargestTimeString => startTimestamp = OffsetRequest.LatestTime case _ => startTimestamp = OffsetRequest.LatestTime } val request = OffsetRequest(Map(topicAndPartition -> PartitionOffsetRequestInfo(startTimestamp, 1))) val newOffset = simpleConsumer.getOffsetsBefore(request).partitionErrorAndOffsets(topicAndPartition).offsets.head val pti = partitionMap(topicAndPartition) pti.resetFetchOffset(newOffset) pti.resetConsumeOffset(newOffset) newOffset } // any logic for partitions whose leader has changed def handlePartitionsWithErrors(partitions: Iterable[TopicAndPartition]) { consumerFetcherManager.addPartitionsWithError(partitions) } }
dchenbecker/kafka-sbt
core/src/main/scala/kafka/consumer/ConsumerFetcherThread.scala
Scala
apache-2.0
3,634
import scala.collection.mutable object Solution { private val distance = 6 private def minReach(adj: Array[Array[Boolean]], s: Int, n: Int): Array[Int] = { val indices = 0 until n def go(q: mutable.Queue[Int], dist: Array[Int], visited: Array[Boolean], iter: Int): Array[Int] = { val src = q.dequeue if (q.isEmpty) dist.map { case -1 => -1; case n_ => n_ * distance } else { if(src < 0) { q.enqueue(-1) go(q,dist,visited,iter + 1) } else { val adjs = adj(src) for (edgeTo <- indices.filter(to => adjs(to) && !visited(to))) { dist(edgeTo) = iter q.enqueue(edgeTo) visited(edgeTo) = true } go(q, dist, visited, iter) } } } val visited = Array.fill[Boolean](n)(false) visited(s) = true go(mutable.Queue[Int](s,-1), Array.fill[Int](n)(-1), visited, 1) } def main(args: Array[String]) = { val distance = 6 val it = scala.io.Source.stdin.getLines val t = it.next.toInt var i = 0 while (i < t) { var j = 0 val Array(n, m) = it.next.split(" ").map(_.toInt) val adj = Array.fill[Boolean](n, n)(false) while (j < m) { val Array(v, w) = it.next.split(" ").map(_.toInt - 1) adj(v)(w) = true adj(w)(v) = true j += 1 } val s = it.next.trim.toInt - 1 val dists = minReach(adj,s,n) println(dists.indices.filterNot(_ == s).map(dists(_)).mkString(" ")) i += 1 } } }
marcos-sb/hacker-rank
algorithms/graph-theory/bfsshortreach/Solution.scala
Scala
apache-2.0
1,548
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ai.h2o.sparkling.utils import java.io.OutputStream class FinalizingOutputStream(inner: OutputStream, finalizer: () => Unit) extends OutputStream { override def close(): Unit = { try { inner.close() } finally { finalizer() } } override def flush(): Unit = inner.flush() override def write(b: Int): Unit = inner.write(b) override def write(b: Array[Byte]): Unit = inner.write(b) override def write(b: Array[Byte], off: Int, len: Int): Unit = inner.write(b, off, len) }
h2oai/sparkling-water
utils/src/main/scala/ai/h2o/sparkling/utils/FinalizingOutputStream.scala
Scala
apache-2.0
1,319
package pureconfig import scala.collection.JavaConverters._ import scala.language.higherKinds import com.typesafe.config._ /** A trait signaling that a `ConfigWriter` can write missing (undefined) values. * * `ConfigWriter`s always produce a valid `ConfigValue` with their `to` method. This trait adds an extra `toOpt` method * that parent writers can use in order to decide whether or not they should write a value using this writer. */ trait WritesMissingKeys[A] { this: ConfigWriter[A] => def toOpt(a: A): Option[ConfigValue] } /** Trait containing `ConfigWriter` instances for collection types. */ trait CollectionWriters { implicit def optionWriter[A](implicit conv: ConfigWriter[A]): ConfigWriter[Option[A]] = new ConfigWriter[Option[A]] with WritesMissingKeys[Option[A]] { override def to(t: Option[A]): ConfigValue = t match { case Some(v) => conv.to(v) case None => ConfigValueFactory.fromAnyRef(null) } def toOpt(t: Option[A]): Option[ConfigValue] = t.map(conv.to) } implicit def traversableWriter[A, F[A] <: TraversableOnce[A]](implicit configConvert: ConfigWriter[A] ): ConfigWriter[F[A]] = new ConfigWriter[F[A]] { override def to(ts: F[A]): ConfigValue = { ConfigValueFactory.fromIterable(ts.toList.map(configConvert.to).asJava) } } implicit def mapWriter[A](implicit configConvert: ConfigWriter[A]): ConfigWriter[Map[String, A]] = new ConfigWriter[Map[String, A]] { override def to(keyVals: Map[String, A]): ConfigValue = { ConfigValueFactory.fromMap(keyVals.mapValues(configConvert.to).toMap.asJava) } } implicit def arrayWriter[A](implicit writer: ConfigWriter[A]): ConfigWriter[Array[A]] = new ConfigWriter[Array[A]] { override def to(a: Array[A]): ConfigValue = ConfigValueFactory.fromIterable(a.toList.map(writer.to).asJava) } } object CollectionWriters extends CollectionWriters
melrief/pureconfig
core/src/main/scala/pureconfig/CollectionWriters.scala
Scala
mpl-2.0
1,973
package notebook.kernel.pfork import java.io.File import java.net.{ServerSocket, Socket, URLClassLoader, URLDecoder} import java.util.concurrent.atomic.AtomicBoolean import org.apache.commons.exec._ import org.slf4j.LoggerFactory import scala.collection.JavaConversions._ import scala.collection.mutable /** * I am so sick of this being a thing that gets implemented everywhere. Let's abstract. */ class ProcessFork[A: reflect.ClassTag] { import ProcessFork._ val processClass = (implicitly[reflect.ClassTag[A]]).runtimeClass assert(processClass.getConstructors exists { c => val types = c.getParameterTypes singleSeqParameter(types) || allStringParameters(types) }, "Class %s can't be used by ProcessFork because it does not have a constructor that can take strings.".format(processClass)) def workingDirectory = new File(".") def heap: Long = defaultHeap def stack: Long = -1 def permGen: Long = -1 def reservedCodeCache: Long = -1 def server: Boolean = true def debug: Boolean = false // If true, then you will likely get address in use errors spawning multiple processes def classPath: IndexedSeq[String] = defaultClassPath def classPathString = classPath.mkString(File.pathSeparator) def jvmArgs = { val builder = IndexedSeq.newBuilder[String] def ifNonNeg(value: Long, prefix: String) { if (value >= 0) { builder += (prefix + value) } } ifNonNeg(heap, "-Xmx") ifNonNeg(stack, "-Xss") ifNonNeg(permGen, "-XX:MaxPermSize=") ifNonNeg(reservedCodeCache, "-XX:ReservedCodeCacheSize=") if (server) builder += "-server" if (debug) builder ++= IndexedSeq("-Xdebug", "-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=5005") builder.result() } implicit protected def int2SuffixOps(i: Int) = new SuffixOps(i) protected final class SuffixOps(i: Int) { def k: Long = i.toLong << 10 def m: Long = i.toLong << 20 def g: Long = i.toLong << 30 } def execute(args: String*): ProcessKiller = { val cmd = new CommandLine(javaHome + "/bin/java") .addArguments(jvmArgs.toArray) .addArgument(classOf[ChildProcessMain].getName) .addArgument(processClass.getName) .addArgument(ProcessFork.serverPort.toString) .addArguments(args.toArray) log.info("Spawning %s".format(cmd.toString)) // use environment because classpaths can be longer here than as a command line arg val environment = System.getenv + ("CLASSPATH" -> ( sys.env.get("HADOOP_CONF_DIR").map(_ + ":").getOrElse("") + sys.env.get("EXTRA_CLASSPATH").map(_ + ":").getOrElse("") + classPathString)) val exec = new KillableExecutor // Change me! exec.setWorkingDirectory(workingDirectory) exec.execute(cmd, environment, new ExecuteResultHandler { def onProcessFailed(e: ExecuteException) {} def onProcessComplete(exitValue: Int) {} }) () => exec.kill() } } object ProcessFork { type ProcessKiller = () => Unit def defaultClassPath: IndexedSeq[String] = { val loader = getClass.getClassLoader.asInstanceOf[URLClassLoader] loader.getURLs map { u => val f = new File(u.getFile) URLDecoder.decode(f.getAbsolutePath, "UTF8") } } def defaultHeap = Runtime.getRuntime.maxMemory /* Override to expose ability to forcibly kill the process */ private class KillableExecutor extends DefaultExecutor { val killed = new AtomicBoolean(false) setWatchdog(new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT) { override def start(p: Process) { if (killed.get()) p.destroy() } }) def kill() { if (killed.compareAndSet(false, true)) { Option(getExecutorThread) foreach (_.interrupt()) } } } /* DK: Bi-directional liveness can be detected via redirected System.in (child), System.out (parent), avoids need for socket... */ private lazy val serverPort = { val ss = new ServerSocket(0) //FIXME import scala.concurrent.ExecutionContext.Implicits.global concurrent.future { try { // CY: Not super rigorous, but we think that, if we don't hang onto a reference to the Socket server-side, // eventually it'll get GC'ed, which causes the child VM to die. val conns = mutable.Buffer[Socket]() while (true) { val conn = ss.accept() conns += conn //TODO: mem/resource leak... } } catch { case e: Throwable => e.printStackTrace(); throw e } } ss.getLocalPort } private lazy val javaHome = System.getProperty("java.home") private lazy val log = LoggerFactory.getLogger(getClass) private[pfork] def main(args: Array[String]) { val className = args(0) val parentPort = args(1).toInt val socket = new Socket("127.0.0.1", parentPort) val remainingArgs = args.drop(2).toIndexedSeq Class.forName(className).getConstructors collectFirst { case c if singleSeqParameter(c.getParameterTypes) => c.newInstance(remainingArgs) case c if allStringParameters(c.getParameterTypes) && c.getParameterTypes.length == remainingArgs.length => c.newInstance(remainingArgs: _*) } getOrElse log.error("Inconceivable!") try { // Blocks until parent quits, then throws an exception socket.getInputStream.read() } finally { log.warn("Parent process stopped; exiting.") sys.exit(1) } } private def singleSeqParameter( types: Array[Class[_]]) = types.length == 1 && types(0).isAssignableFrom(classOf[IndexedSeq[_]]) private def allStringParameters(types: Array[Class[_]]) = types.forall(_ == classOf[String]) }
caiomsouza/spark-notebook
modules/subprocess/src/main/scala/notebook/kernel/pfork/ProcessFork.scala
Scala
apache-2.0
5,680
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.knockdata.spark.highcharts.model import org.junit.Test import scala.collection.mutable class ModelAnalyzer { val lines : List[String] = { // the file is from http://api.highcharts.com/highcharts/names val lines = scala.io.Source.fromFile("src/test/resources/highcharts.4.2.5.names.json").getLines().toList lines.tail.init.map { line => line.replace('"', ' ').replace(',',' ').trim } } def combo2(values: List[String]): List[(String, String)] = { val buffer = mutable.Buffer[(String, String)]() def combination0(current: String, rest: List[String]): Unit = { rest.foreach(item => buffer += current -> item) rest match { case head :: Nil => // no more to traverse case head :: tail => combination0 (head, tail) } } combination0(values.head, values.tail) buffer.toList } val plotOptionsLines: List[String] = { lines.filter(_.startsWith("plotOptions-")) .map(_.drop("plotOptions-".length)) } val chartTypes = { val allPlotOptions = plotOptionsLines.map {_.split('-').head}.distinct // allPlotOptions diff List("funnel", "gauge", "solidgauge", "pyramid", "pie", // "heatmap", "treemap", // "errorbar", // "boxplot", "polygon", "waterfall", "scatter", "bubble") allPlotOptions } val chartTypePlotOptions: List[(String, List[String])] = chartTypes.map { option => val len = option.length + 1 option -> plotOptionsLines.collect { case line if line.startsWith(option) => line.drop(len) } } val topTreePlotOptions = chartTypePlotOptions.map { case (k, options) => k -> options.filter(o => !o.contains('-')) } val topLeafPlotOptions = chartTypePlotOptions.map { case (k, options) => k -> options.filter(o => o.startsWith("-")) } val topPlotOptions = chartTypePlotOptions.map { case (k, options) => k -> options.filter(o => o.startsWith("-") || !o.contains('-')) } val plotOptionsMap: Map[String, List[String]] = chartTypePlotOptions.toMap val topTreePlotOptionsMap: Map[String, List[String]] = topTreePlotOptions.toMap val topLeafPlotOptionsMap: Map[String, List[String]] = topLeafPlotOptions.toMap val topPlotOptionsMap: Map[String, List[String]] = topPlotOptions.toMap val commonOptions: List[String] = plotOptionsMap.values.reduce(_ intersect _) val commonTopPlotOptions: List[String] = topPlotOptionsMap.values.reduce(_ intersect _) val combos = combo2(chartTypes) val commonTopCombos = combos.map { case (a, b) => val optionsA = topPlotOptionsMap(a) val optionsB = topPlotOptionsMap(b) (a, optionsA.size, b, optionsB.size) -> (optionsA intersect optionsB) }.sortBy(_._2.size) val commonTopCombosLeaf = combos.map { case (a, b) => val optionsA = topLeafPlotOptionsMap(a) val optionsB = topLeafPlotOptionsMap(b) (a, optionsA.size, b, optionsB.size) -> (optionsA intersect optionsB) }.sortBy(_._2.size) @Test def testCommonTop(): Unit = { println(commonTopPlotOptions.mkString("\n")) } @Test def testCommonFunc(): Unit = { val commonFunc = commonTopPlotOptions.map(_.replace("-", "")).sorted.map{ name => s""" | def $name(value: Any): this.type = { | append("$name", value) | } """.stripMargin } println(commonFunc.mkString) } @Test def testSpecialTop(): Unit = { for ((chartType, options) <- topPlotOptionsMap) { val specialOptions = options diff commonTopPlotOptions println(chartType) println(specialOptions.map(o => s" $o").mkString("\n")) } } @Test def testSpecialTopFunctions(): Unit = { for ((chartType, options) <- topPlotOptionsMap) { val specialOptions = options diff commonTopPlotOptions val specialFunc = specialOptions.map(_.replace("-", "")).sorted.map{ name => s""" | def $name(value: Any): this.type = { | append("$name", value) | } """.stripMargin } println(s""" ------------------------- $chartType ---------------- """") println(s""" def fieldName = "$chartType"""") println(specialFunc.mkString) } } @Test def testTopCombinations(): Unit = { println(commonTopCombos.mkString("\n")) } @Test def testTopLeafCombinations(): Unit = { val nums = commonTopCombosLeaf.map{case (k, v) => k -> v.size} println(nums.mkString("\n")) // println(commonTopLeaf.mkString("\n")) } @Test def testTopLeafCombosSpecial(): Unit = { val nums = commonTopCombosLeaf.map{case (k, v) => k -> v.size} println(nums.mkString("\n")) // println(commonTopLeaf.mkString("\n")) } @Test def testOptions(): Unit = { println(s"common ${commonOptions.size}") println(commonOptions.map(o => s" $o").mkString("\n")) for ((optionName, allOptions: List[String]) <- chartTypePlotOptions) { println(optionName) val extraOptions: List[String] = allOptions diff commonOptions println(extraOptions.map(o => s" $o").mkString("\n")) } // val areaOptions = plotOptionsMap("area") // val areaRangeOption = plotOptionsMap("arearange") // // // val areaCommonOptions = areaOptions intersect areaRangeOption // println(s"areaCommonOptions ${areaCommonOptions.size}") // println(areaCommonOptions.map(o => s" $o").mkString("\n")) } }
knockdata/spark-highcharts
src/test/scala/com/knockdata/spark/highcharts/model/ModelAnalyzer.scala
Scala
apache-2.0
6,309
package com.twitter.finagle /** * Codecs provide protocol encoding and decoding via netty pipelines * as well as a standard filter stack that are applied to services * from this codec. */ import java.net.SocketAddress import org.jboss.netty.channel.{ChannelPipelineFactory, ChannelPipeline} import com.twitter.util.Future import com.twitter.finagle.builder.{ClientConfig, ServerConfig} /** * Superclass for all codecs. */ trait Codec[Req, Rep] { /** * The pipeline factory that implements the protocol. */ def pipelineFactory: ChannelPipelineFactory /** * Prepare a newly-created (connected) Service endpoint. It becomes * available once the returned Future is satisfied. */ def prepareService( underlying: Service[Req, Rep] ): Future[Service[Req, Rep]] = Future.value(underlying) } object Codec { def ofPipelineFactory[Req, Rep](makePipeline: => ChannelPipeline) = new Codec[Req, Rep] { def pipelineFactory = new ChannelPipelineFactory { def getPipeline = makePipeline } } def ofPipeline[Req, Rep](p: ChannelPipeline) = new Codec[Req, Rep] { def pipelineFactory = new ChannelPipelineFactory { def getPipeline = p } } } /** * Codec factories create codecs given some configuration. */ /** * Clients */ case class ClientCodecConfig(serviceName: String) /** * Servers */ case class ServerCodecConfig(serviceName: String, boundAddress: SocketAddress) /** * A combined codec factory provides both client and server codec * factories in one (when available). */ trait CodecFactory[Req, Rep] { type Client = ClientCodecConfig => Codec[Req, Rep] type Server = ServerCodecConfig => Codec[Req, Rep] def client: Client def server: Server }
enachb/finagle_2.9_durgh
finagle-core/src/main/scala/com/twitter/finagle/Codec.scala
Scala
apache-2.0
1,732
package com.twitter.finagle.http.codec import com.twitter.conversions.StorageUnitOps._ import com.twitter.finagle.ChannelBufferUsageException import com.twitter.finagle.stats.{NullStatsReceiver, StatsReceiver} import com.twitter.util.StorageUnit /** * ChannelBufferUsageTracker tracks the channel buffer used by outstanding * requests. An exception will be thrown if the total size exceeds a limit. * ChannelBufferManager uses ChannelBufferUsageTracker. */ private[finagle] class ChannelBufferUsageTracker( limit: StorageUnit, statsReceiver: StatsReceiver = NullStatsReceiver) { private[this] object state { var currentUsage = 0L var maxUsage = 0L var usageLimit = limit } // It is probably not necessary to use synchronized methods here. We // can change this if there is a performance problem. private[this] val currentUsageStat = statsReceiver.addGauge("channel_buffer_current_usage") { currentUsage.inBytes } private[this] val maxUsageStat = statsReceiver.addGauge("channel_buffer_max_usage") { maxUsage.inBytes } def currentUsage: StorageUnit = synchronized { state.currentUsage.bytes } def maxUsage: StorageUnit = synchronized { state.maxUsage.bytes } def usageLimit(): StorageUnit = synchronized { state.usageLimit } def setUsageLimit(limit: StorageUnit) = synchronized { state.usageLimit = limit } def increase(size: Long) = synchronized { if (state.currentUsage + size > state.usageLimit.inBytes) { throw new ChannelBufferUsageException( "Channel buffer usage exceeded limit (" + currentUsage + ", " + size + " vs. " + usageLimit + ")" ) } else { state.currentUsage += size if (currentUsage > maxUsage) state.maxUsage = state.currentUsage } } def decrease(size: Long) = synchronized { if (state.currentUsage < size) { throw new ChannelBufferUsageException( "invalid ChannelBufferUsageTracker decrease operation (" + size + " vs. " + currentUsage + ")" ) } else { state.currentUsage -= size } } }
twitter/finagle
finagle-base-http/src/main/scala/com/twitter/finagle/http/codec/ChannelBufferUsageTracker.scala
Scala
apache-2.0
2,080
package lila.app package mashup import chess.Color import org.joda.time.Period import lila.api.Context import lila.bookmark.BookmarkApi import lila.forum.PostApi import lila.game.{ GameRepo, Game, Crosstable, PlayTime } import lila.relation.RelationApi import lila.security.Granter import lila.user.{ User, Trophy, Trophies, TrophyApi } case class UserInfo( user: User, ranks: Map[lila.rating.Perf.Key, Int], nbUsers: Int, nbPlaying: Int, hasSimul: Boolean, crosstable: Option[Crosstable], nbBookmark: Int, nbImported: Int, ratingChart: Option[String], nbFollowing: Int, nbFollowers: Int, nbBlockers: Option[Int], nbPosts: Int, playTime: User.PlayTime, donor: Boolean, trophies: Trophies, isStreamer: Boolean) { def nbRated = user.count.rated def nbWithMe = crosstable ?? (_.nbGames) def percentRated: Int = math.round(nbRated / user.count.game.toFloat * 100) def allTrophies = List( donor option Trophy( _id = "", user = user.id, kind = Trophy.Kind.Donor, date = org.joda.time.DateTime.now), isStreamer option Trophy( _id = "", user = user.id, kind = Trophy.Kind.Streamer, date = org.joda.time.DateTime.now) ).flatten ::: trophies } object UserInfo { def apply( countUsers: () => Fu[Int], bookmarkApi: BookmarkApi, relationApi: RelationApi, trophyApi: TrophyApi, gameCached: lila.game.Cached, crosstableApi: lila.game.CrosstableApi, postApi: PostApi, getRatingChart: User => Fu[Option[String]], getRanks: String => Fu[Map[String, Int]], isDonor: String => Fu[Boolean], isHostingSimul: String => Fu[Boolean], isStreamer: String => Boolean)(user: User, ctx: Context): Fu[UserInfo] = countUsers() zip getRanks(user.id) zip (gameCached nbPlaying user.id) zip gameCached.nbImportedBy(user.id) zip (ctx.me.filter(user!=) ?? { me => crosstableApi(me.id, user.id) }) zip getRatingChart(user) zip relationApi.nbFollowing(user.id) zip relationApi.nbFollowers(user.id) zip (ctx.me ?? Granter(_.UserSpy) ?? { relationApi.nbBlockers(user.id) map (_.some) }) zip postApi.nbByUser(user.id) zip isDonor(user.id) zip trophyApi.findByUser(user) zip PlayTime(user) flatMap { case ((((((((((((nbUsers, ranks), nbPlaying), nbImported), crosstable), ratingChart), nbFollowing), nbFollowers), nbBlockers), nbPosts), isDonor), trophies), playTime) => (nbPlaying > 0) ?? isHostingSimul(user.id) map { hasSimul => new UserInfo( user = user, ranks = ranks, nbUsers = nbUsers, nbPlaying = nbPlaying, hasSimul = hasSimul, crosstable = crosstable, nbBookmark = bookmarkApi countByUser user, nbImported = nbImported, ratingChart = ratingChart, nbFollowing = nbFollowing, nbFollowers = nbFollowers, nbBlockers = nbBlockers, nbPosts = nbPosts, playTime = playTime, donor = isDonor, trophies = trophies, isStreamer = isStreamer(user.id)) } } }
TangentialAlan/lila
app/mashup/UserInfo.scala
Scala
mit
3,264
package com.socrata.tileserver package util import org.scalatest.mockito.MockitoSugar import com.socrata.http.client.{RequestBuilder, Response} import com.socrata.http.server.util.RequestId.ReqIdHeader import com.socrata.soql.types.SoQLText import com.socrata.testcommon class GeoProviderTest extends TestBase with UnusedSugar with MockitoSugar { test("Headers and parameters are correct") { implicit val generatorDrivenConfig = PropertyCheckConfig(minSuccessful = 5) import gen.Headers._ import gen.Alphanumerics._ import gen.ShortStrings._ val resp = mock[Response] val base = RequestBuilder("mock.socrata.com") forAll { (reqId: Alphanumeric, id: Alphanumeric, param: (ShortString, ShortString), knownHeader: IncomingHeader, unknownHeader: UnknownHeader) => val request = mocks.StaticRequest(param, Map(knownHeader, unknownHeader, "X-Socrata-Host" -> "geo.provider.test")) val info = RequestInfo(request, id, Unused, Unused, Unused) val filter = GeoProvider.filter(info.tile, info.geoColumn, Unused) val augmented = GeoProvider.augmentParams(info, filter) val expected = base. addPath("id"). addPath(s"${id: String}.soqlpack"). addHeader(ReqIdHeader -> info.requestId). addHeader("X-Socrata-Federation" -> "Honey Badger"). addHeader("X-Socrata-Host" -> "geo.provider.test"). addHeader(knownHeader). addParameters(augmented). get.builder val client = testcommon.mocks.StaticCuratedClient { request => val actual = request(base).builder // Assertions are in here, since we only care about what the client sees. actual.url must equal (expected.url) actual.method must equal (expected.method) actual.query.toSet must equal (expected.query.toSet) actual.headers.toSet must equal (expected.headers.toSet) resp } GeoProvider(client).doQuery(info): Unit } } test("Augmenting parameters adds to select, where and group") { import gen.Alphanumerics._ val otherKey = "$other" val groupKey = "$group" val whereKey = "$where" val selectKey = "$select" forAll {(rawOtherValue: Alphanumeric, selectParam: (Alphanumeric, Alphanumeric), whereParam: (Alphanumeric, Alphanumeric), groupParam: Alphanumeric) => val otherValue: String = rawOtherValue val (selectBase, selectValue) = selectParam: (String, String) val (whereBase, whereValue) = whereParam: (String, String) val groupBase = groupParam: String val mondara = mocks.StaticRequest('$' + "mondara" -> "true") val neither = mocks.StaticRequest(otherKey -> otherValue) val select = mocks.StaticRequest(selectKey -> selectBase) val where = mocks.StaticRequest(whereKey -> whereBase) val group = mocks.StaticRequest(groupKey -> groupBase) neither.queryParameters must have size (1) val nParams = GeoProvider.augmentParams(reqInfo(neither ++ mondara, geoColumn=selectValue), whereValue) nParams must have size (7) nParams(selectKey) must include (selectValue) nParams(selectKey) must include ("snap_to_grid") nParams(whereKey) must equal (whereValue) nParams(otherKey) must equal (otherValue) nParams(groupKey) must include ("snap_to_grid") val nfParams = GeoProvider.augmentParams(reqInfo(neither, geoColumn=selectValue), whereValue) nfParams must have size (6) nfParams(selectKey) must equal (selectValue) nfParams(whereKey) must equal (whereValue) nfParams(otherKey) must equal (otherValue) val sParams = GeoProvider.augmentParams(reqInfo(neither ++ select ++ mondara, geoColumn=selectValue), whereValue) sParams must have size (7) sParams(selectKey) must startWith (s"$selectBase,") sParams(selectKey) must include (selectValue) sParams(selectKey) must include ("snap_to_grid") sParams(otherKey) must equal (otherValue) sParams(whereKey) must equal (whereValue) sParams(groupKey) must include ("snap_to_grid") val wParams = GeoProvider.augmentParams(reqInfo(neither ++ where ++ mondara, geoColumn=selectValue), whereValue) wParams must have size (7) wParams(otherKey) must equal (otherValue) wParams(selectKey) must include (selectValue) wParams(selectKey) must include ("snap_to_grid") wParams(whereKey) must startWith (s"(${whereBase}) and") wParams(whereKey) must endWith (s"(${whereValue})") wParams(groupKey) must include ("snap_to_grid") val gParams = GeoProvider.augmentParams(reqInfo(neither ++ group ++ mondara, geoColumn=selectValue), whereValue) gParams must have size (7) gParams(otherKey) must equal (otherValue) gParams(selectKey) must include (selectValue) gParams(selectKey) must include ("snap_to_grid") gParams(groupKey) must startWith (s"(${groupBase}),") gParams(groupKey) must include ("snap_to_grid") val allParams = GeoProvider.augmentParams( reqInfo(neither ++ where ++ select ++ group ++ mondara, geoColumn=selectValue), whereValue) allParams must have size (7) allParams(otherKey) must equal (otherValue) allParams(selectKey) must startWith (s"$selectBase,") allParams(selectKey) must include (selectValue) allParams(selectKey) must include ("snap_to_grid") allParams(whereKey) must startWith (s"(${whereBase}) and ") allParams(whereKey) must endWith (s"(${whereValue})") allParams(groupKey) must startWith (s"(${groupBase}),") allParams(groupKey) must include ("snap_to_grid") val allfParams = GeoProvider.augmentParams( reqInfo(neither ++ where ++ select ++ group, geoColumn=selectValue), whereValue) allfParams must have size (7) allfParams(otherKey) must equal (otherValue) allfParams(selectKey) must equal (s"$selectBase, $selectValue") allfParams(whereKey) must equal (s"(${whereBase}) and (${whereValue})") allfParams(groupKey) must equal (s"${groupBase}") } } test("GeoProvider adds info.overscan to $where if present") { val resp = mock[Response] val base = RequestBuilder("mock.socrata.com") forAll { os: Int => val info = mocks.PngInfo(os) val expected = GeoProvider.filter(info.tile, info.geoColumn, os) val client = testcommon.mocks.StaticCuratedClient { request => val actual = request(base).builder val query = actual.query.toMap // Assertions are in here, since we only care about what the client sees. query('$' + "where") must equal (expected) resp } GeoProvider(client).doQuery(info): Unit } } test("GeoProvider adds overscan of zero to $where if absent") { val resp = mock[Response] val base = RequestBuilder("mock.socrata.com") val info = mocks.PngInfo(Unused, None, None) val expected = GeoProvider.filter(info.tile, info.geoColumn, 0) val client = testcommon.mocks.StaticCuratedClient { request => val actual = request(base).builder val query = actual.query.toMap // Assertions are in here, since we only care about what the client sees. query('$' + "where") must equal (expected) resp } GeoProvider(client).doQuery(info): Unit } test("filter uses overscan to adjust corners") { import gen.QuadTiles._ forAll { (tile: QuadTile, geoColumn: String, os: Int) => val actual = GeoProvider.filter(tile, geoColumn, os) tile.corners(os).foreach { case (lat, lon) => actual must include (lat.toString) actual must include (lon.toString) } } } }
socrata-platform/tileserver
src/test/scala/com.socrata.tileserver/util/GeoProviderTest.scala
Scala
apache-2.0
8,410
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql import org.apache.carbondata.core.metadata.DatabaseLocationProvider /** * environment related code */ object EnvHelper { def isCloud(sparkSession: SparkSession): Boolean = false def isPrivacy(sparkSession: SparkSession, isExternal: Boolean): Boolean = false def setDefaultHeader( sparkSession: SparkSession, optionsFinal: java.util.Map[String, String] ): Unit = { } def isRetainData(sparkSession: SparkSession, retainData: Boolean): Boolean = { true } def getDatabase(database: String): String = { DatabaseLocationProvider.get().provide(database) } }
jackylk/incubator-carbondata
integration/spark/src/main/scala/org/apache/spark/sql/EnvHelper.scala
Scala
apache-2.0
1,430
package com.softwaremill.macwire import com.softwaremill.macwire.internals.* import scala.quoted.* object MacwireMacros { private val log = new Logger() def wireImpl[T: Type](using q: Quotes): Expr[T] = { import q.reflect.* val dependencyResolver = DependencyResolver.throwErrorOnResolutionFailure[q.type, T](log) wire[T](using q)(dependencyResolver) } //TODO build failure path def wireRecImpl[T: Type](using q: Quotes): Expr[T] = { import q.reflect.* // FIXME for some reason `TypeRepr.of[String].typeSymbol.owner` and `defn.JavaLangPackage` have different hash codes def isWireable(tpe: TypeRepr): Boolean = tpe.classSymbol.map(_.owner.fullName != defn.JavaLangPackage.fullName).getOrElse(false) val dependencyResolver = new DependencyResolver[q.type, T](using q)( log, tpe => if !isWireable(tpe) then report.throwError(s"Cannot find a value of type: [${showTypeName(tpe)}]") else tpe.asType match { case '[t] => wireRecImpl[t].asTerm } ) wire[T](using q)(dependencyResolver) } private def wire[T: Type](using q: Quotes)(dependencyResolver: DependencyResolver[q.type, T]): Expr[T] = { import q.reflect.* val constructorCrimper = new ConstructorCrimper[q.type, T](using q)(dependencyResolver, log) val companionCrimper = new CompanionCrimper[q.type, T](using q)(dependencyResolver, log) lazy val whatWasWrong: String = { if ( constructorCrimper.constructor.isEmpty && companionCrimper.applies.isDefined && companionCrimper.applies.get.isEmpty ) s"Cannot find a public constructor and the companion object has no apply methods constructing target type for [${showTypeName[T]}]" else if (companionCrimper.applies.isDefined && companionCrimper.applies.get.size > 1) s"No public primary constructor found for ${showTypeName[T]} and multiple matching apply methods in its companion object were found." else s"Target type not supported for wiring: ${showTypeName[T]}. Please file a bug report with your use-case." } val code: Tree = (constructorCrimper.constructorTree orElse companionCrimper.applyTree) .getOrElse(report.throwError(whatWasWrong)) log(s"Generated code: ${code.show}, ${code}") code.asExprOf[T] } def wireWith_impl[T: Type](using q: Quotes)(factory: Expr[Any]): Expr[T] = { import q.reflect.* val typeCheckUtil = new TypeCheckUtil[q.type](log) val dependencyResolver = DependencyResolver.throwErrorOnResolutionFailure[q.type, T](log) val (params, fun) = factory.asTerm match { case Inlined(_, _, Block(List(DefDef(_, List(p), _, Some(Apply(f, _)))), _)) => (p.params, f) case _ => report.throwError(s"Not supported factory type: [$factory]") } val values = params.map { // case vd@ValDef(_, name, tpt, rhs) => dependencyResolver.resolve(vd.symbol, typeCheckIfNeeded(tpt)) case vd @ ValDef(name, tpt, rhs) => dependencyResolver.resolve(vd.symbol, tpt.tpe) } val code = Apply(fun, values).asExprOf[T] log(s"Generated code: ${code.show}") code } def wireSet_impl[T: Type](using q: Quotes): Expr[Set[T]] = { import q.reflect.* val tpe = TypeRepr.of[T] val dependencyResolver = DependencyResolver.throwErrorOnResolutionFailure[q.type, T](log) val instances = dependencyResolver.resolveAll(tpe) val code = '{ ${ Expr.ofSeq(instances.toSeq.map(_.asExprOf[T])) }.toSet } log(s"Generated code: ${code.show}") code } }
adamw/macwire
macros/src/main/scala-3/com/softwaremill/macwire/MacwireMacros.scala
Scala
apache-2.0
3,550
package org.jetbrains.plugins.scala.failed.annotator import com.intellij.testFramework.fixtures.CodeInsightTestFixture import org.jetbrains.plugins.scala.PerfCycleTests import org.junit.experimental.categories.Category /** * @author Anton Yalyshev */ @Category(Array(classOf[PerfCycleTests])) class OverloadingBadCodeGreenTest extends BadCodeGreenTestBase { import CodeInsightTestFixture.CARET_MARKER def testScl2117A(): Unit = { val text = s"""object Test extends App{ | class A | class B extends A | def foo(x: A, y: B) = print(1) | object foo { | def apply(x: B, y: B) = print(3) | def apply(x: A, y: A) = print(5) | } | | ${CARET_MARKER}foo(new B, new B) |} """.stripMargin doTest(text) } def testScl2117B(): Unit = { val text = s"""object Test { | def apply[T](x1: T) = "one arg" // A | def apply[T](x1: T, x2: T) = "two args" // B | def apply[T](elems: T*) = "var-args: " + elems.size // C |} | |object Exec { | ${CARET_MARKER}Test(1) |} """.stripMargin doTest(text) } }
ilinum/intellij-scala
test/org/jetbrains/plugins/scala/failed/annotator/OverloadingBadCodeGreenTest.scala
Scala
apache-2.0
1,267
package com.chaordicsystems.cactus import com.chaordicsystems.cactus.Operator._ import com.chaordicsystems.cactus.Validator._ import com.sksamuel.elastic4s.ElasticDsl.{bool, must, should} import com.sksamuel.elastic4s._ import org.json4s._ import org.json4s.jackson.JsonMethods._ object Parser { implicit val formats = DefaultFormats def AND(args: List[QueryDefinition]): QueryDefinition = bool { must(args) } def OR(args: List[QueryDefinition]): QueryDefinition = bool { should(args) minimumShouldMatch 1 } def handleUnary(operation: JValue, typeEnabled: Boolean): QueryDefinition = { val (op, field, args) = validateUnaryOperation(operation) op match { case Operator.NE => field NE (args, typeEnabled) case Operator.EQ => field EQ (args, typeEnabled) case Operator.LT => field LT (args, typeEnabled) case Operator.GT => field GT (args, typeEnabled) case Operator.LE => field LE (args, typeEnabled) case Operator.GE => field GE (args, typeEnabled) case _ => throw OperatorHandlerException(op) } } def handleBinary(operation: JValue, typeEnabled: Boolean): QueryDefinition = { val op = validateOperator(operation) val args = validateBinaryArgs(operation) op match { case Operator.AND => AND(args.map(validateAndTranslate(_, typeEnabled))) case Operator.OR => OR(args.map(validateAndTranslate(_, typeEnabled))) case _ => throw OperatorHandlerException(op) } } def handleMultiary(operation: JValue): QueryDefinition = { val (op, field, args) = validateMultiaryOperation(operation) op match { case Operator.ALL => field ALL args case Operator.ANY => field ANY args case _ => throw OperatorHandlerException(op) } } def validateAndTranslate(operation: JValue, typeEnabled: Boolean): QueryDefinition = { val op = validateOperator(operation) if (isBinary(op)) { handleBinary(operation, typeEnabled) } else if (isUnary(op)) { handleUnary(operation, typeEnabled) } else if (isMultiary(op)) { handleMultiary(operation) } else { throw InvalidOperatorException(op) } } def cactusToES(jsonValue: String, typeEnabled: Boolean = false): QueryDefinition = validateAndTranslate(parse(jsonValue), typeEnabled) }
chaordic/cactus
src/main/scala/com/chaordicsystems/cactus/Parser.scala
Scala
mit
2,326
package de.is24.play.orientdb import scala.language.implicitConversions sealed trait Operation case class UpdateOperation(record: Map[String, String]) extends Operation case class DeleteOperation(record: Map[String, String]) extends Operation case class CreateOperation(record: Map[String, String]) extends Operation case class CommandOperation(language: String, command: String) extends Operation case class ScriptOperation(language: String, script: Seq[String]) extends Operation object Operation { def sqlScript(queries: String*): ScriptOperation = new ScriptOperation(language = "sql", script = queries.toSeq) def sqlCommand(query: String): CommandOperation = new CommandOperation(language = "sql", command = query) class Batchable(queries: Seq[String]) { def asBatch(transaction: Boolean = false): BatchOperation = { new BatchOperation(transaction = transaction, operations = Seq( Operation.sqlScript(queries: _*) )) } def transactionally: BatchOperation = asBatch(transaction = true) } implicit def stringToBatchable(query: String): Batchable = new Batchable(Seq(query)) implicit def seqToBatchable(queries: Seq[String]): Batchable = new Batchable(queries) implicit def queryToBatchable(query: OrientDbQuery): Batchable = new Batchable(Seq(query.query)) implicit def queriesToBatchable(queries: Seq[OrientDbQuery]): Batchable = new Batchable(queries.map(_.query)) }
ImmobilienScout24/play-orientdb-client
app/de/is24/play/orientdb/Operation.scala
Scala
apache-2.0
1,435
/** * The MIT License (MIT) * <p/> * Copyright (c) 2016 ScalateKids * <p/> * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * <p/> * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * <p/> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * <p/> * * @author Scalatekids * @version 1.0 * @since 1.0 */ package com.actorbase.actorsystem.actors.main import akka.actor.{ Actor, ActorLogging, ActorRef, OneForOneStrategy, PoisonPill, Props } import akka.cluster.sharding.ShardRegion.{ExtractEntityId, ExtractShardId} import akka.actor.SupervisorStrategy._ import akka.pattern.ask import akka.util.Timeout import com.actorbase.actorsystem.messages.AuthActorMessages.{ AddCollectionTo, RemoveCollectionFrom, ListUsers } import com.actorbase.actorsystem.actors.storefinder.Storefinder import com.actorbase.actorsystem.utils.ActorbaseCollection import com.actorbase.actorsystem.utils.ActorbaseCollection.{ Read, ReadWrite } import com.actorbase.actorsystem.messages.MainMessages._ import com.actorbase.actorsystem.messages.StorefinderMessages._ import com.actorbase.actorsystem.messages.ClientActorMessages.{ MapResponse, ListResponse } import com.actorbase.actorsystem.utils.CryptoUtils import com.typesafe.config.ConfigFactory import scala.collection.mutable import scala.concurrent.duration._ import scala.language.postfixOps import scala.concurrent.ExecutionContext.Implicits.global /** * Class that represents a Main actor. This actor is responsible of managing * incoming requests. */ object Main { /** * Read from configuration the number of shards, typically equals to the * (number of nodes) x (a factor of ten) */ lazy val numberOfShards = ConfigFactory.load().getInt("shard-number") /** * Props method, used to build an instance of Main actor * @param authProxy ActorRef representing the Authenticator actor that will be used by the Main actor * @return an object of type Props, usable directly with an actorsystem running */ def props(authProxy: ActorRef) = Props(classOf[Main], authProxy) /** name of the sharded entity */ def shardName = "mainActor" /** * ExtractShardId is a function needed by akka's cluster sharding extension in order to * retrieve shard region ids while addressing messages between sharded actors * * @return a String representing an UUID of a shard-region where the actor belongs to */ val extractShardId: ExtractShardId = { case CreateCollection(_, collection, _) => (collection.getUUID.hashCode % numberOfShards).toString case RemoveFrom(_, uuid, _) => (uuid.hashCode % numberOfShards).toString case InsertTo(_,collection, _, _, _) => (collection.getUUID.hashCode % numberOfShards).toString case GetFrom(_,collection, _) => (collection.getUUID.hashCode % numberOfShards).toString case AddContributor(_, _, _, uuid, _) => (uuid.hashCode % numberOfShards).toString case RemoveContributor(_, _, uuid) => (uuid.hashCode % numberOfShards).toString } /** * ExtractEntityId is a function needed by akka's cluster sharding extension in order to * retrieve entity actors ids while addressing messages * * @return a String representing an UUID of an entity actor inside a shard-region */ val extractEntityId: ExtractEntityId = { case msg: CreateCollection => (msg.collection.getUUID, msg) case msg: RemoveFrom => (msg.uuid, msg) case msg: InsertTo => (msg.collection.getUUID, msg) case msg: GetFrom => (msg.collection.getUUID, msg) case msg: AddContributor => (msg.uuid, msg) case msg: RemoveContributor => (msg.uuid, msg) } } /** * Class that represents a Main actor. This actor is responsible of managing * incoming requests. */ class Main(authProxy: ActorRef) extends Actor with ActorLogging { private var sfMap = Map[ActorbaseCollection, ActorRef]().empty private var requestMap = Map[String, mutable.Map[String, mutable.Map[String, Array[Byte]]]]() // a bit clunky, should switch to a queue /** * Method that overrides the supervisorStrategy method. */ override val supervisorStrategy = OneForOneStrategy(maxNrOfRetries = 10, withinTimeRange = 1 minute) { case _: Exception => Resume } /** * Method that creates a collection in Actorbase. * * @param collection an ActorbaseCollection representing the collection that needs to be created * @return an ActorRef pointing to the Storefinder just created that maps the collection */ private def createCollection(collection: ActorbaseCollection, persist: Boolean = true): Option[ActorRef] = { if (sfMap.contains(collection)) sfMap get collection else { if (collection.getOwner != "admin") authProxy ! AddCollectionTo("admin", collection, ReadWrite, persist) log.debug(s"creating ${collection.getName} for ${collection.getOwner}") val sf = context.actorOf(Storefinder.props(collection, authProxy)) sfMap += (collection -> sf) authProxy ! AddCollectionTo(collection.getOwner, collection, ReadWrite, persist) Some(sf) } } private def extractContributors(collection: ActorbaseCollection): Map[String, Boolean] = { collection.getContributors mapValues { c => c match { case Read => false case ReadWrite => true } } } /** * Receive method of the Main actor, it does different things based on the message it receives:<br> * _InsertTo: when the actor receives this message it inserts the item in the collection requested by the user.<br> * _CreateCollection: when the actor receives this message it creates the collection inserted by the user <br> * _GetFrom: when the actor receives this message it sends back the item requested by the user<br> * _CompleteTransaction: when the actor receives this message it awaits for Storefinder response of all storekeepers, expecting * a given number of response, equals to the number of key-value pairs of the collection requested<br> * _RemoveFrom: when the actor receives this message it removes the item requested by the user<br> * _AddContributor: when the actor receives this message it adds the specified user to the contributor list of the defined by the user<br> * _RemoveContributor: when the actor receives this message it removes the specified user to the contributor list of the defined by the user<br> * */ def receive: Receive = { case message: MainMessage => message match { /** * Insert message, insert a key/value into a designed collection, searching * if sfMap contains the collection I need, if it's present search for the * right keyrange * * @param owner a String representing the owner of the collection * @param name a String representing the collection name * @param key a String representing the new key to be inserted * @param value a Any object type representing the value to be inserted * with associated key, default to Array[Byte] type * @param update a Boolean flag, define the insert behavior (with or without * updating the value) */ case InsertTo(requester, collection, key, value, update) => log.debug("MAIN: got work!") sfMap.find(x => x._1 == collection) map { c => if (requester == c._1.getOwner || c._1.containsReadWriteContributor(requester)) c._2 forward Insert(key, value, update) else sender ! "NoPrivileges" } getOrElse { if (requester == "admin" || requester == collection.getOwner) createCollection(collection) map (_ forward Insert(key, value, update)) getOrElse sender ! "UndefinedCollection" else sender ! "NoPrivileges" } /** * Create a collection in the system * * @param name a String representing the name of the collection * @param owner a String representing the owner of the collection */ case CreateCollection(requester, collection, persist) => if (requester == "admin" || requester == collection.getOwner) { createCollection(collection, persist) sender ! "OK" } else sender ! "NoPrivileges" /** * Get item from collection message, given a key of type String, retrieve * a value from a specified collection, if key is empty, remove the * entire collection * * @param collection a String representing the collection name * @param key a String representing the key to be retrieved */ case GetFrom(requester, collection, key) => if (key.nonEmpty) sfMap.find(x => (x._1 == collection) || (x._1.containsReadContributor(requester)) || (x._1.containsReadWriteContributor(requester))) map { c => if (c._1.getOwner == requester || c._1.containsReadWriteContributor(requester) || c._1.containsReadContributor(requester)) c._2 forward Get(key) else sender ! Left("NoPrivileges") } getOrElse sender ! Left("UndefinedCollection") else { sfMap.find(x => (x._1 == collection) || (x._1.containsReadContributor(requester)) || (x._1.containsReadWriteContributor(requester))) map { coll => if (coll._1.getOwner == requester || coll._1.containsReadWriteContributor(requester) || coll._1.containsReadContributor(requester)) { requestMap.find(_._1 == requester) map (_._2 += (coll._1.getUUID -> mutable.Map[String, Array[Byte]]())) getOrElse ( requestMap += (requester -> mutable.Map(coll._1.getUUID -> mutable.Map[String, Array[Byte]]()))) if (coll._1.getSize > 0) sfMap find { y => (y._1 == collection) || (y._1.containsReadContributor(requester)) || (y._1.containsReadWriteContributor(requester)) } map (_._2 forward GetAllItems(requester)) getOrElse sender ! Left("UndefinedCollection") else sender ! Right(MapResponse(collection.getOwner, collection.getName, extractContributors(coll._1), Map[String, Array[Byte]]())) } else sender ! Left("UndefinedCollection") } getOrElse sender ! Left("UndefinedCollection") } /** * Await for storefinder response of all storekeeper, expecting * a given number of response, equals to the number of key-value pairs * of the collection requested * * @param requester a String representing the requester of the action * @param clientRef the reference of the client demanding the collection * @param collection an ActorbaseCollection item containing the number of response * expected for the requested collection at the current state * @param items a TreeMap[String, Any] representing a shard of the requested collection, represent a storekeeper payload * @return * @throws */ case CompleteTransaction(requester, clientRef, collection, items) => requestMap.find(_._1 == requester) map { ref => ref._2.find(_._1 == collection.getUUID) map { colMap => colMap._2 ++= items log.debug(s"${colMap._2.size} - ${collection.getSize}") if (colMap._2.size == collection.getSize) { val k = colMap._2.toMap mapValues (v => CryptoUtils.bytesToAny(v)) clientRef ! Right(MapResponse(collection.getOwner, collection.getName, extractContributors(collection), k)) colMap._2.clear ref._2.-(collection.getUUID) } } getOrElse log.warning("GetItemFromResponse: collectionMap not found") } getOrElse log.warning("GetItemFromResponse: refPair not found") /** * Remove item from collection message, given a key of type String, * delete key-value pair from a specified collection * * @param collection a String representing the collection name * @param key a String representing the key to be deleted * */ case RemoveFrom(requester, uuid, key) => if (key.nonEmpty) sfMap.find(_._1.getUUID == uuid) map { c => if (requester == c._1.getOwner || c._1.containsReadWriteContributor(requester)) c._2 forward Remove(key) else sender ! "NoPrivileges" } getOrElse sender ! "UndefinedCollection" else { sfMap find (_._1.getUUID == uuid) map { coll => if (requester == coll._1.getOwner || requester == "admin") { coll._2 ! PoisonPill sfMap = sfMap - coll._1 sender ! "OK" authProxy ! RemoveCollectionFrom("admin", coll._1) authProxy ! RemoveCollectionFrom(requester, coll._1) if (coll._1.getOwner != requester) authProxy ! RemoveCollectionFrom(coll._1.getOwner, coll._1) } else sender ! "NoPrivileges" } getOrElse sender ! "UndefinedCollection" } /** * Add Contributor from collection, given username of Contributor and read * or readWrite permission * * @param username a String to identify the contributor to add * @param permission a boolean representing the permission : (true = readWrite , false = readOnly) * @param collection a String representing the collection name * */ case AddContributor(requester, username, permission, uuid, persist) => implicit val timeout = Timeout(5 seconds) val optColl = sfMap find (_._1.getUUID == uuid) optColl map { x => if (x._1.getOwner == requester || requester == "admin") { if (username != "admin") { (authProxy ? ListUsers).mapTo[ListResponse] onSuccess { case users => if (users.list.contains(username)) x._1.addContributor(username, permission) } } authProxy forward AddCollectionTo(username, x._1, permission, persist) } else sender ! "NoPrivileges" } getOrElse sender ! "UndefinedCollection" /** * Remove Contributor from collection, given username of Contributor , and permission * * @param username a String to identify the contributor to remove * @param collection a String representing the collection name * */ case RemoveContributor(requester, username, uuid) => implicit val timeout = Timeout(5 seconds) val optColl = sfMap find (_._1.getUUID == uuid) optColl map { x => if (x._1.getOwner == requester || requester == "admin") { if (username != "admin") { if (!x._1.containsReadContributor(username) && !x._1.containsReadWriteContributor(username)) sender ! "UndefinedUsername" else { (authProxy ? ListUsers).mapTo[ListResponse] onSuccess { case users => if (users.list.contains(username)) x._1.removeContributor(username) } authProxy forward RemoveCollectionFrom(username, x._1) } } else sender ! "NoPrivileges" } else sender ! "NoPrivileges" } getOrElse sender ! "UndefinedCollection" } } }
ScalateKids/Actorbase
src/main/scala/com/actorbase/actorsystem/actors/main/Main.scala
Scala
mit
16,302
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.api.batch.sql import org.apache.flink.api.scala._ import org.apache.flink.table.api.scala._ import org.apache.flink.table.utils.TableTestBase import org.apache.flink.table.utils.TableTestUtil._ import org.junit.Test class JoinTest extends TableTestBase { @Test def testLeftOuterJoinEquiPred(): Unit = { val util = batchTestUtil() val table = util.addTable[(Int, Long, String)]("t", 'a, 'b, 'c) val table1 = util.addTable[(Long, String, Int)]("s", 'x, 'y, 'z) val query = "SELECT b, y FROM t LEFT OUTER JOIN s ON a = z" val result = util.tableEnv.sqlQuery(query) val expected = unaryNode( "DataSetCalc", binaryNode( "DataSetJoin", unaryNode( "DataSetCalc", batchTableNode(table), term("select", "a", "b") ), unaryNode( "DataSetCalc", batchTableNode(table1), term("select", "y", "z") ), term("where", "=(a, z)"), term("join", "a", "b", "y", "z"), term("joinType", "LeftOuterJoin") ), term("select", "b", "y") ) util.verifyTable(result, expected) } @Test def testLeftOuterJoinEquiAndLocalPred(): Unit = { val util = batchTestUtil() val table = util.addTable[(Int, Long, String)]("t", 'a, 'b, 'c) val table1 = util.addTable[(Long, String, Int)]("s", 'x, 'y, 'z) val query = "SELECT b, y FROM t LEFT OUTER JOIN s ON a = z AND b < 2" val result = util.tableEnv.sqlQuery(query) val expected = unaryNode( "DataSetCalc", binaryNode( "DataSetJoin", unaryNode( "DataSetCalc", batchTableNode(table), term("select", "a", "b", "<(b, 2) AS $f3") ), unaryNode( "DataSetCalc", batchTableNode(table1), term("select", "y", "z") ), term("where", "AND(=(a, z), $f3)"), term("join", "a", "b", "$f3", "y", "z"), term("joinType", "LeftOuterJoin") ), term("select", "b", "y") ) util.verifyTable(result, expected) } @Test def testLeftOuterJoinEquiAndNonEquiPred(): Unit = { val util = batchTestUtil() val table = util.addTable[(Int, Long, String)]("t", 'a, 'b, 'c) val table1 = util.addTable[(Long, String, Int)]("s", 'x, 'y, 'z) val query = "SELECT b, y FROM t LEFT OUTER JOIN s ON a = z AND b < x" val result = util.tableEnv.sqlQuery(query) val expected = unaryNode( "DataSetCalc", binaryNode( "DataSetJoin", unaryNode( "DataSetCalc", batchTableNode(table), term("select", "a", "b") ), batchTableNode(table1), term("where", "AND(=(a, z), <(b, x))"), term("join", "a", "b", "x", "y", "z"), term("joinType", "LeftOuterJoin") ), term("select", "b", "y") ) util.verifyTable(result, expected) } @Test def testRightOuterJoinEquiPred(): Unit = { val util = batchTestUtil() val table = util.addTable[(Int, Long, String)]("t", 'a, 'b, 'c) val table1 = util.addTable[(Long, String, Int)]("s", 'x, 'y, 'z) val query = "SELECT b, y FROM t RIGHT OUTER JOIN s ON a = z" val result = util.tableEnv.sqlQuery(query) val expected = unaryNode( "DataSetCalc", binaryNode( "DataSetJoin", unaryNode( "DataSetCalc", batchTableNode(table), term("select", "a", "b") ), unaryNode( "DataSetCalc", batchTableNode(table1), term("select", "y", "z") ), term("where", "=(a, z)"), term("join", "a", "b", "y", "z"), term("joinType", "RightOuterJoin") ), term("select", "b", "y") ) util.verifyTable(result, expected) } @Test def testRightOuterJoinEquiAndLocalPred(): Unit = { val util = batchTestUtil() val table = util.addTable[(Int, Long, String)]("t", 'a, 'b, 'c) val table1 = util.addTable[(Long, String, Int)]("s", 'x, 'y, 'z) val query = "SELECT b, x FROM t RIGHT OUTER JOIN s ON a = z AND x < 2" val result = util.tableEnv.sqlQuery(query) val expected = unaryNode( "DataSetCalc", binaryNode( "DataSetJoin", unaryNode( "DataSetCalc", batchTableNode(table), term("select", "a", "b") ), unaryNode( "DataSetCalc", batchTableNode(table1), term("select", "x", "z", "<(x, 2) AS $f3") ), term("where", "AND(=(a, z), $f3)"), term("join", "a", "b", "x", "z", "$f3"), term("joinType", "RightOuterJoin") ), term("select", "b", "x") ) util.verifyTable(result, expected) } @Test def testRightOuterJoinEquiAndNonEquiPred(): Unit = { val util = batchTestUtil() val table = util.addTable[(Int, Long, String)]("t", 'a, 'b, 'c) val table1 = util.addTable[(Long, String, Int)]("s", 'x, 'y, 'z) val query = "SELECT b, y FROM t RIGHT OUTER JOIN s ON a = z AND b < x" val result = util.tableEnv.sqlQuery(query) val expected = unaryNode( "DataSetCalc", binaryNode( "DataSetJoin", unaryNode( "DataSetCalc", batchTableNode(table), term("select", "a", "b") ), batchTableNode(table1), term("where", "AND(=(a, z), <(b, x))"), term("join", "a", "b", "x", "y", "z"), term("joinType", "RightOuterJoin") ), term("select", "b", "y") ) util.verifyTable(result, expected) } @Test def testFullOuterJoinEquiPred(): Unit = { val util = batchTestUtil() val table = util.addTable[(Int, Long, String)]("t", 'a, 'b, 'c) val table1 = util.addTable[(Long, String, Int)]("s", 'x, 'y, 'z) val query = "SELECT b, y FROM t FULL OUTER JOIN s ON a = z" val result = util.tableEnv.sqlQuery(query) val expected = unaryNode( "DataSetCalc", binaryNode( "DataSetJoin", unaryNode( "DataSetCalc", batchTableNode(table), term("select", "a", "b") ), unaryNode( "DataSetCalc", batchTableNode(table1), term("select", "y", "z") ), term("where", "=(a, z)"), term("join", "a", "b", "y", "z"), term("joinType", "FullOuterJoin") ), term("select", "b", "y") ) util.verifyTable(result, expected) } @Test def testFullOuterJoinEquiAndLocalPred(): Unit = { val util = batchTestUtil() val table = util.addTable[(Int, Long, String)]("t", 'a, 'b, 'c) val table1 = util.addTable[(Long, String, Int)]("s", 'x, 'y, 'z) val query = "SELECT b, y FROM t FULL OUTER JOIN s ON a = z AND b < 2 AND z > 5" val result = util.tableEnv.sqlQuery(query) val expected = unaryNode( "DataSetCalc", binaryNode( "DataSetJoin", unaryNode( "DataSetCalc", batchTableNode(table), term("select", "a", "b", "<(b, 2) AS $f3") ), unaryNode( "DataSetCalc", batchTableNode(table1), term("select", "y", "z", ">(z, 5) AS $f3") ), term("where", "AND(=(a, z), $f3, $f30)"), term("join", "a", "b", "$f3", "y", "z", "$f30"), term("joinType", "FullOuterJoin") ), term("select", "b", "y") ) util.verifyTable(result, expected) } @Test def testFullOuterJoinEquiAndNonEquiPred(): Unit = { val util = batchTestUtil() val table = util.addTable[(Int, Long, String)]("t", 'a, 'b, 'c) val table1 = util.addTable[(Long, String, Int)]("s", 'x, 'y, 'z) val query = "SELECT b, y FROM t FULL OUTER JOIN s ON a = z AND b < x" val result = util.tableEnv.sqlQuery(query) val expected = unaryNode( "DataSetCalc", binaryNode( "DataSetJoin", unaryNode( "DataSetCalc", batchTableNode(table), term("select", "a", "b") ), batchTableNode(table1), term("where", "AND(=(a, z), <(b, x))"), term("join", "a", "b", "x", "y", "z"), term("joinType", "FullOuterJoin") ), term("select", "b", "y") ) util.verifyTable(result, expected) } }
fhueske/flink
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/batch/sql/JoinTest.scala
Scala
apache-2.0
9,078
/* * Copyright 2022 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package iht.views.registration import iht.utils._ import iht.views.ViewTestHelper import iht.views.html.registration.completed_registration class CompletedRegistrationViewTest extends ViewTestHelper{ val ihtRef = "A1A1A1" lazy val completedRegistrationView: completed_registration = app.injector.instanceOf[completed_registration] "CompletedRegistrationView" must { "have no message keys in html" in { implicit val request = createFakeRequest() val view = completedRegistrationView(ihtRef).toString noMessageKeysShouldBePresent(view) } "contain the correct title and browser title" in { implicit val request = createFakeRequest() val view = completedRegistrationView(ihtRef).toString titleShouldBeCorrect(view, messagesApi("iht.registration.complete")) browserTitleShouldBeCorrect(view, messagesApi("iht.registration.complete")) } "contain the correct guidance" in { implicit val request = createFakeRequest() val view = completedRegistrationView(ihtRef).toString messagesShouldBePresent(view, messagesApi("page.iht.registration.completedRegistration.ref.title")) messagesShouldBePresent(view, messagesApi("page.iht.registration.completedRegistration.ref.text")) messagesShouldBePresent(view, messagesApi("iht.nextSteps")) messagesShouldBePresent(view, messagesApi("page.iht.registration.completedRegistration.p1")) messagesShouldBePresent(view, messagesApi("page.iht.registration.completedRegistration.p2")) } "contain correct formatted reference number" in { implicit val request = createFakeRequest() val view = completedRegistrationView(ihtRef).toString messagesShouldBePresent(view, formattedIHTReference(ihtRef)) } "contain button with correct text and target as Estate report page" in { implicit val request = createFakeRequest() val view = completedRegistrationView(ihtRef).toString val doc = asDocument(view) val button = doc.getElementById("go-to-inheritance-tax-report") button.text mustBe messagesApi("page.iht.registration.completedRegistration.button") button.attr("href") mustBe iht.controllers.estateReports.routes.YourEstateReportsController.onPageLoad.url } "contain text-link with correct text and target as Save and Exit page" in { implicit val request = createFakeRequest() val view = completedRegistrationView(ihtRef).toString val doc = asDocument(view) val textlink = doc.getElementById("go-to-save-and-exit") textlink.text mustBe messagesApi("page.iht.registration.completedRegistration.link") textlink.attr("href") mustBe iht.controllers.routes.SessionTimeoutController.onSaveAndExitPageLoad.url } } }
hmrc/iht-frontend
test/iht/views/registration/CompletedRegistrationViewTest.scala
Scala
apache-2.0
3,381
/* * Copyright (C) 2014 - 2017 Contributors as noted in the AUTHORS.md file * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.wegtam.tensei.agent.helpers import java.util.Locale import com.wegtam.tensei.agent.DefaultSpec import com.wegtam.tensei.agent.SchemaExtractor.FormatsFormattime import org.scalatest.BeforeAndAfterAll import scala.collection.mutable.ListBuffer class ExtractorHelpersTest extends DefaultSpec with BeforeAndAfterAll with ExtractorHelpers { var oldLocale: Option[Locale] = None override protected def beforeAll(): Unit = { oldLocale = Option(Locale.getDefault) val l = Locale.forLanguageTag("de") Locale.setDefault(l) super.beforeAll() } override protected def afterAll(): Unit = { oldLocale.foreach(l => Locale.setDefault(l)) super.afterAll() } describe("ExtractorHelpers") { describe("createFormatnumRegex") { describe("without length and precision") { it("should work") { val result = createFormatnumRegex(0, 0) result should be("(-?[\\d\\.,βŽ–]+)") } } describe("without length") { it("should work") { val result = createFormatnumRegex(0, 4) result should be(s"(-?\\d*?\\.\\d{0,4})") } } describe("without precision") { it("should work") { val result = createFormatnumRegex(20, 0) result should be("(-?\\d{1,20})") } } describe("with length and precision") { it("should work") { val result = createFormatnumRegex(20, 5) result should be("(-?\\d{0,15}\\.\\d{0,5})") } } describe("with different decimal separator") { it("should work") { val result = createFormatnumRegex(20, 5, ",") result should be("(-?\\d{0,15},\\d{0,5})") } } } } describe("parseLong") { describe("with an empty String") { it("should be false") { parseLong("") should be(false) } } describe("with a real String") { it("should be false") { parseLong("FOO") should be(false) } } describe("with a double and a dot as decimal separator") { it("should be false") { parseLong("3.22") should be(false) } } describe("with a double and a comma as decimal separator") { it("should be false") { parseLong("3,22") should be(false) } } describe("with a long") { it("should be true") { parseLong("3") should be(true) } } } describe("parseDouble") { describe("with an empty String") { it("should be false") { parseDouble("") should be(false) } } describe("with a real String") { it("should be false") { parseDouble("FOO") should be(false) } } describe("with a double and a dot as decimal separator") { it("should be true") { parseDouble("3.22") should be(true) } } describe("with a double and a comma as decimal separator") { it("should be true") { parseDouble("3,22") should be(true) } } describe("with a double and a βŽ– as decimal separator") { it("should be true") { parseDouble("3βŽ–22") should be(true) } } describe("with a long") { it("should be true") { parseDouble("3") should be(true) } } } describe("parseDate") { describe("with an empty String") { it("should be false") { parseDate("") should be(false) } } describe("with a wrong value") { it("should be false") { parseDate("am 23.11.2015 um 14:30") should be(false) } } describe("with just a Date value") { it("should be true") { parseDate("1970-01-01") should be(true) } } describe("with just a Time value") { it("should be false") { parseDate("14:30:25") should be(false) } } describe("with DateTime values") { describe("ISO conform") { it("should be false") { parseDate("1970-01-01T14:30:25") should be(false) } } describe("not ISO conform") { it("should be false") { parseDate("1970-01-01 14:30:25") should be(false) } } } } describe("parseTime") { describe("with an empty String") { it("should be false") { parseTime("") should be(false) } } describe("with a wrong value") { it("should be false") { parseTime("am 23.11.2015 um 14:30") should be(false) } } describe("with just a Date value") { it("should be false") { parseTime("1970-01-01") should be(false) } } describe("with just a Time value") { it("should be true") { parseTime("14:30:25") should be(true) } } describe("with DateTime values") { describe("ISO conform") { it("should be false") { parseTime("1970-01-01T14:30:25") should be(false) } } describe("not ISO conform") { it("should be false") { parseTime("1970-01-01 14:30:25") should be(false) } } } } describe("parseTimestamp") { describe("with an empty String") { it("should be false") { parseTimestamp("") should be(false) } } describe("with a wrong value") { it("should be false") { parseTimestamp("am 23.11.2015 um 14:30") should be(false) } } describe("with just a Date value") { it("should be false") { parseTimestamp("1970-01-01") should be(false) } } describe("with just a Time value") { it("should be false") { parseTimestamp("14:30:25") should be(false) } } describe("with DateTime values") { describe("ISO conform") { it("should be true") { parseTimestamp("1970-01-01T14:30:25") should be(true) } } describe("not ISO conform") { it("should be true") { parseTimestamp("1970-01-01 14:30:25") should be(true) } } } } describe("determineSeparator") { describe("with no entries") { it("should return the '.'") { val entries = List[String]() determineSeparator(entries) should be(None) } } describe("with no separators") { it("should return None") { val entries = List[String]("1200", "122", "1200", "1000", "1234567", "1200") determineSeparator(entries) should be(None) } } describe("with a thousand separator") { it("should return None") { val entries = List[String]("1.200", "122", "1.2001", "1000,23", "1.234.567", "1.200") determineSeparator(entries) should be(None) } } describe("with the comma as decimal separator") { describe("without thousand separator") { it("should return the ','") { val entries = List[String]("1200,20", "1,22", "1200,11", "1000,23", "1234567,89", "100,00") determineSeparator(entries) should be(Option(",")) } } describe("with thousand separator") { it("should return the ','") { val entries = List[String]("1.200,20", "1,22", "1.200,11", "4.000,23", "1.234.567,89", "1.200,00") determineSeparator(entries) should be(Option(",")) } } } describe("with the dot as decimal separator") { describe("without thousand separator") { it("should return the '.'") { val entries = List[String]("1200.20", "1.22", "1200.11", "4000.23", "1234567.89", "1200.00") determineSeparator(entries) should be(Option(".")) } } describe("with thousand separator") { it("should return the '.'") { val entries = List[String]("1,200.20", "1.22", "1,200.11", "4,000.23", "1,234,567.89", "1,200.00") determineSeparator(entries) should be(Option(".")) } } } } describe("determinePrecisionLength") { describe("with an empty list") { it("should return None") { val entries = List[String]() determinePrecisionLength(entries, ".") should be(None) } } describe("with a wrong decimal separator") { it("should return None") { val entries = List[String]("1.200,20", "1,22", "1.200,11", "4.000,23", "1.234.567,89", "1.200,00") determinePrecisionLength(entries, ".") should be(None) } } describe("with a correct decimal separator") { describe("with unequal decimal precisions") { describe("with max 2") { it("should return 2") { val entries = List[String]("1.200,0", "1,2", "1.200,11", "4.000,3", "1.234.567,89", "1.200,00") determinePrecisionLength(entries, ",") should be(Option(2)) } } describe("with max 3") { it("should return 3") { val entries = List[String]("1.200,20", "1,223", "1.200,11", "4.000,23", "1.234.567,893", "1.200,00") determinePrecisionLength(entries, ",") should be(Option(3)) } } } describe("with equal decimal precsions") { describe("with length 2") { it("should return 2") { val entries = List[String]("1.200,20", "1,23", "1.200,11", "4.000,23", "1.234.567,93", "1.200,00") determinePrecisionLength(entries, ",") should be(Option(2)) } } describe("with length 4") { it("should return 4") { val entries = List[String]("1.200,2077", "1,2237", "1.200,1771", "4.000,2773", "1.234.567,7893", "1.200,7700") determinePrecisionLength(entries, ",") should be(Option(4)) } } } } } describe("parseFormattedTime") { describe("with an empty source and format") { it("should return false") { parseFormattedTime("", "") should be(false) } } describe("with an empty source") { it("should return false") { parseFormattedTime("", "dd.MM.yyyy") should be(false) } } describe("with an empty format") { it("should return false") { parseFormattedTime("12.01.2006", "") should be(false) } } describe("for date formats") { describe("with format `dd.MM.yyyy`") { it("should return true") { parseFormattedTime("12.06.2006", "dd.MM.yyyy") should be(true) } } describe("with format `dd MM yyyy`") { it("should return true") { parseFormattedTime("12 06 2006", "dd MM yyyy") should be(true) } } describe("with format `dd.LLL.yyyy`") { it("should return true") { parseFormattedTime("12.Jul.2006", "dd.LLL.yyyy") should be(true) } } describe("with format `dd LLL yyyy`") { it("should return true") { parseFormattedTime("12 Jul 2006", "dd LLL yyyy") should be(true) } } describe("with format `dd/MM/yyyy`") { it("should return true") { parseFormattedTime("14/06/2006", "dd/MM/yyyy") should be(true) } } describe("with format `dd/LLL/yyyy`") { it("should return true") { parseFormattedTime("14/Nov/2006", "dd/LLL/yyyy") should be(true) } } describe("with format `MM/dd/yyyy`") { it("should return true") { parseFormattedTime("11/14/2006", "MM/dd/yyyy") should be(true) } } describe("with format `LLL/dd/yyyy`") { it("should return true") { parseFormattedTime("Nov/14/2006", "LLL/dd/yyyy") should be(true) } } describe("with format `yyyyMMdd`") { it("should return true") { parseFormattedTime("20060323", "yyyyMMdd") should be(true) } } } describe("for time formats") { describe("with format `HH:mm`") { it("should return true") { parseFormattedTime("14:12", "HH:mm") should be(true) } } describe("with format `HH:mm a` with `am`") { it("should return true") { parseFormattedTime("2:12 AM", "h:mm a") should be(true) } } describe("with format `HH:mm a` with `pm`") { it("should return true") { parseFormattedTime("1:12 PM", "h:mm a") should be(true) } } } describe("for timestamp formats") { describe("with format `yyyy-MM-dd h:mm:ss a` with `am`") { it("should return true") { parseFormattedTime("2012-01-12 1:12:00 AM", "yyyy-MM-dd h:mm:ss a") should be(true) } } describe("with format `yyyy-MM-dd h:mm:ss a` with `pm`") { it("should return true") { parseFormattedTime("2012-01-12 1:12:00 PM", "yyyy-MM-dd h:mm:ss a") should be(true) } } describe("with format `yyyy-MM-dd h:mm:ss a` with `am` and zoned-name") { it("should return true") { parseFormattedTime("2012-01-12 1:12:00 AM UTC", "yyyy-MM-dd h:mm:ss a z") should be(true) } } describe("with format `yyyy-MM-dd h:mm:ss a` with `pm` and zoned-name") { it("should return true") { parseFormattedTime("2012-01-12 1:12:00 PM UTC", "yyyy-MM-dd h:mm:ss a z") should be(true) } } describe("with format `EEE, dd LLL yyyy HH:mm:ss z`") { it("should return true") { parseFormattedTime("Mo, 15 Feb 2016 18:18:39 UTC", "EEE, dd LLL yyyy HH:mm:ss z") should be( true ) } } } } describe("determineSpecificFormat") { val formatsFormattime = FormatsFormattime( timestamp = List( "yyyy-MM-dd h:mm:ss a", "yyyy-MM-dd h:mm:ss a z", "EEE, dd LLL yyyy HH:mm:ss z" ), date = List( "yyyyMMdd", "dd.MM.yyyy", "dd MM yyyy", "dd.LLL.yyyy", "dd LLL yyyy", "dd/MM/yyyy", "dd/LLL/yyyy", "MM/dd/yyyy", "LLL/dd/yyyy" ), time = List( "h:mm a", "HH:mm" ) ) describe("timestamps") { describe("for `yyyy-MM-dd h:mm:ss a`") { describe("all given values fit the pattern") { it("should return the `yyyy-MM-dd h:mm:ss a` format") { val values = ListBuffer("2012-12-12 1:12:22 AM", "2001-01-22 7:22:22 PM", "1985-11-01 4:00:00 AM") determineSpecificFormat(values, formatsFormattime.timestamp) should be( Option("yyyy-MM-dd h:mm:ss a") ) } } describe("not all given values fit the pattern") { it("should return None") { val values = ListBuffer("2012-12-12 1:12:22 AM", "2001-01-22 7:22:22 PM", "1985-11-01 4:00:00") determineSpecificFormat(values, formatsFormattime.timestamp) should be(None) } } } } describe("date") { describe("for `yyyyMMdd`") { describe("all given values fit the pattern") { it("should return the `yyyyMMdd` format") { val values = ListBuffer("20120112", "20130322", "20011201") determineSpecificFormat(values, formatsFormattime.date) should be(Option("yyyyMMdd")) } } describe("not all given values fit the pattern") { it("should return None") { val values = ListBuffer("20120112", "2013032", "20011201") determineSpecificFormat(values, formatsFormattime.date) should be(None) } } describe("not all given values fit the pattern and contain timestamp") { it("should return None") { val values = ListBuffer("20120112", "20130321 14:12:00", "20011201") determineSpecificFormat(values, formatsFormattime.date) should be(None) } } } } describe("time") { describe("for `HH:mm`") { describe("all given values fit the pattern") { it("should return the `HH:mm` format") { val values = ListBuffer("12:12", "22:22", "07:35", "21:15") determineSpecificFormat(values, formatsFormattime.time) should be(Option("HH:mm")) } } describe("all given values fit the pattern") { it("should return None") { val values = ListBuffer("12:12", "22:22:12", "07:35", "21:15") determineSpecificFormat(values, formatsFormattime.time) should be(None) } } } } } }
Tensei-Data/tensei-agent
src/test/scala/com/wegtam/tensei/agent/helpers/ExtractorHelpersTest.scala
Scala
agpl-3.0
17,447
/* Copyright 2013 Twitter, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.twitter.summingbird import com.twitter.algebird.{ MapAlgebra, Monoid, Semigroup } import com.twitter.summingbird.option.JobId import org.scalacheck.Arbitrary /** * Helpful functions and test graphs designed to flex Summingbird * planners. */ object TestGraphs { // implicit ordering on the either pair implicit def eitherOrd[T, U]: Ordering[Either[T, U]] = new Ordering[Either[T, U]] { def compare(l: Either[T, U], r: Either[T, U]) = (l, r) match { case (Left(_), Right(_)) => -1 case (Right(_), Left(_)) => 1 case (Left(_), Left(_)) => 0 case (Right(_), Right(_)) => 0 } } // Helpers private def sum[V: Semigroup](opt: Option[V], v: V): V = if (opt.isDefined) Semigroup.plus(opt.get, v) else v private def scanSum[V: Semigroup](it: Iterator[V]): Iterator[(Option[V], V)] = { var prev: Option[V] = None it.map { v => val res = (prev, v) prev = Some(sum(prev, v)) res } } /** * This function simulates the loop join in ScaldingPlatform loopJoin. Used when joining against a store and the store depends on the result of the join. * The function takes an Iterable of Either and a valuesFn function. The Eithers in the Iterable are updates to the store, corresponding to the two TypedPipes * in ScaldingPlatform loopJoin (summingbird-scalding/src/main/scala/com/twitter/summingbird/scalding/ScaldingPlatform.scala). * The result is a join stream and the output stream of the store. */ private def loopJoinInScala[K: Ordering, U, V: Monoid](leftAndRight: Iterable[(K, (Long, Either[U, V]))], valuesFn: ((Long, (U, Option[V]))) => TraversableOnce[(Long, V)]): List[(K, List[(Option[(Long, (U, Option[V]))], Option[(Long, (Option[V], V))])])] = { leftAndRight .groupBy(_._1) .mapValues { _.map(_._2).toList.sortBy(identity) .scanLeft((Option.empty[(Long, (U, Option[V]))], Option.empty[(Long, (Option[V], V))])) { case ((_, None), (time, Left(u))) => /* * This is a lookup, but there is no value for this key */ val joinResult = Some((time, (u, None))) val sumResult = Semigroup.sumOption(valuesFn(time, (u, None))).map(v => (time, (None, v._2))) (joinResult, sumResult) case ((_, Some((_, (optv, v)))), (time, Left(u))) => /* * This is a lookup, and there is an existing value */ val currentV = Some(sum(optv, v)) // isn't u already a sum and optu prev value? val joinResult = Some((time, (u, currentV))) val sumResult = Semigroup.sumOption(valuesFn(time, (u, currentV))).map(v => (time, (currentV, v._2))) (joinResult, sumResult) case ((_, None), (time, Right(v))) => /* * This is merging in new data into the store not coming in from the service * (either from the store history or from a merge after the leftJoin, but * There was previously no data. */ val joinResult = None val sumResult = Some((time, (None, v))) (joinResult, sumResult) case ((_, Some((_, (optv, oldv)))), (time, Right(v))) => /* * This is the case where we are updating a non-empty key. This should * only be triggered by a merged data-stream after the join since * store initialization */ val joinResult = None val currentV = Some(sum(optv, oldv)) val sumResult = Some((time, (currentV, v))) (joinResult, sumResult) } }.toList } // Test graphs def diamondJobInScala[T, K, V: Monoid](source: TraversableOnce[T])(fnA: T => TraversableOnce[(K, V)])(fnB: T => TraversableOnce[(K, V)]): Map[K, V] = { val stream = source.toStream val left = stream.flatMap(fnA) val right = stream.flatMap(fnB) MapAlgebra.sumByKey(left ++ right) } def diamondJob[P <: Platform[P], T, K, V: Monoid](source: Producer[P, T], sink: P#Sink[T], store: P#Store[K, V])(fnA: T => TraversableOnce[(K, V)])(fnB: T => TraversableOnce[(K, V)]): TailProducer[P, (K, (Option[V], V))] = { val written = source.write(sink) val left = written.flatMap(fnA) val right = written.flatMap(fnB) left.merge(right).sumByKey(store) } def singleStepInScala[T, K, V: Monoid](source: TraversableOnce[T])(fn: T => TraversableOnce[(K, V)]): Map[K, V] = MapAlgebra.sumByKey( source.flatMap(fn) ) def singleStepJob[P <: Platform[P], T, K, V: Monoid](source: Producer[P, T], store: P#Store[K, V])(fn: T => TraversableOnce[(K, V)]): TailProducer[P, (K, (Option[V], V))] = source .flatMap(fn).name("FM") .sumByKey(store) def twinStepOptionMapFlatMapScala[T1, T2, K, V: Monoid](source: TraversableOnce[T1])(fnA: T1 => Option[T2], fnB: T2 => TraversableOnce[(K, V)]): Map[K, V] = MapAlgebra.sumByKey( source.flatMap(fnA(_).iterator).flatMap(fnB) ) def twinStepOptionMapFlatMapJob[P <: Platform[P], T1, T2, K, V: Monoid](source: Producer[P, T1], store: P#Store[K, V])(fnA: T1 => Option[T2], fnB: T2 => TraversableOnce[(K, V)]): TailProducer[P, (K, (Option[V], V))] = source .optionMap(fnA) .flatMap(fnB) .sumByKey(store) def singleStepMapKeysInScala[T, K1, K2, V: Monoid](source: TraversableOnce[T])(fnA: T => TraversableOnce[(K1, V)], fnB: K1 => TraversableOnce[K2]): Map[K2, V] = MapAlgebra.sumByKey( source.flatMap(fnA).flatMap { x => fnB(x._1).map((_, x._2)) } ) def singleStepMapKeysJob[P <: Platform[P], T, K1, K2, V: Monoid](source: Producer[P, T], store: P#Store[K2, V])(fnA: T => TraversableOnce[(K1, V)], fnB: K1 => TraversableOnce[K2]): TailProducer[P, (K2, (Option[V], V))] = source .flatMap(fnA) .flatMapKeys(fnB) .sumByKey(store) def repeatedTupleLeftJoinInScala[T, U, JoinedU, K, V: Monoid](source: TraversableOnce[T])(service: K => Option[JoinedU])(preJoinFn: T => TraversableOnce[(K, U)])(postJoinFn: ((K, (U, Option[JoinedU]))) => TraversableOnce[(K, V)]): Map[K, V] = MapAlgebra.sumByKey( source .flatMap(preJoinFn) .flatMap { case (k, v) => List((k, v), (k, v)) } .map { case (k, v) => (k, (v, service(k))) } .flatMap(postJoinFn) ) def repeatedTupleLeftJoinJob[P <: Platform[P], T, U, JoinedU, K, V: Monoid]( source: Producer[P, T], service: P#Service[K, JoinedU], store: P#Store[K, V])(preJoinFn: T => TraversableOnce[(K, U)])(postJoinFn: ((K, (U, Option[JoinedU]))) => TraversableOnce[(K, V)]): TailProducer[P, (K, (Option[V], V))] = source .name("My named source") .flatMap(preJoinFn) .flatMap { case (k, v) => List((k, v), (k, v)) } .leftJoin(service) .name("My named flatmap") .flatMap(postJoinFn) .sumByKey(store) def leftJoinInScala[T, U, JoinedU, K, V: Monoid](source: TraversableOnce[T])(service: K => Option[JoinedU])(preJoinFn: T => TraversableOnce[(K, U)])(postJoinFn: ((K, (U, Option[JoinedU]))) => TraversableOnce[(K, V)]): Map[K, V] = MapAlgebra.sumByKey( source .flatMap(preJoinFn) .map { case (k, v) => (k, (v, service(k))) } .flatMap(postJoinFn) ) def leftJoinJob[P <: Platform[P], T, U, JoinedU, K, V: Monoid]( source: Producer[P, T], service: P#Service[K, JoinedU], store: P#Store[K, V])(preJoinFn: T => TraversableOnce[(K, U)])(postJoinFn: ((K, (U, Option[JoinedU]))) => TraversableOnce[(K, V)]): TailProducer[P, (K, (Option[V], V))] = source .name("My named source") .flatMap(preJoinFn) .leftJoin(service) .name("My named flatmap") .flatMap(postJoinFn) .sumByKey(store) def leftJoinWithFlatMapValuesInScala[T, U, JoinedU, K, V: Monoid](source: TraversableOnce[T])(service: K => Option[JoinedU])(preJoinFn: T => TraversableOnce[(K, U)])(postJoinFn: ((U, Option[JoinedU])) => TraversableOnce[V]): Map[K, V] = MapAlgebra.sumByKey( source .flatMap(preJoinFn) .map { case (k, v) => (k, (v, service(k))) } .flatMap { case (k, v) => postJoinFn(v).map { v => (k, v) } } ) def leftJoinJobWithFlatMapValues[P <: Platform[P], T, U, JoinedU, K, V: Monoid]( source: Producer[P, T], service: P#Service[K, JoinedU], store: P#Store[K, V])(preJoinFn: T => TraversableOnce[(K, U)])(postJoinFn: ((U, Option[JoinedU])) => TraversableOnce[V]): TailProducer[P, (K, (Option[V], V))] = source .name("My named source") .flatMap(preJoinFn) .leftJoin(service) .name("My named flatmap") .flatMapValues(postJoinFn) .sumByKey(store) def leftJoinWithStoreInScala[T1, T2, U, JoinedU: Monoid, K: Ordering, V: Monoid](source1: TraversableOnce[T1], source2: TraversableOnce[T2])(simpleFM1: T1 => TraversableOnce[(Long, (K, JoinedU))])(simpleFM2: T2 => TraversableOnce[(Long, (K, U))])(postJoinFn: ((Long, (K, (U, Option[JoinedU])))) => TraversableOnce[(Long, (K, V))]): (Map[K, JoinedU], Map[K, V]) = { val firstStore = MapAlgebra.sumByKey( source1 .flatMap(simpleFM1) .map { case (_, kju) => kju } // drop the time from the key for the store ) // create the delta stream val sumStream: Iterable[(Long, (K, (Option[JoinedU], JoinedU)))] = source1 .flatMap(simpleFM1) .toList.groupBy(_._1) .mapValues { _.map { case (time, (k, joinedu)) => (k, joinedu) } .groupBy(_._1) .mapValues { l => scanSum(l.iterator.map(_._2)).toList } .toIterable .flatMap { case (k, lv) => lv.map { case (optju, ju) => (k, (optju, ju)) } } } .toIterable .flatMap { case (time, lv) => lv.map { case (k, (optju, ju)) => (time, (k, (optju, ju))) } } // zip the left and right streams val leftAndRight: Iterable[(K, (Long, Either[U, JoinedU]))] = source2 .flatMap(simpleFM2) .toList .map { case (time, (k, u)) => (k, (time, Left(u))) } .++(sumStream.map { case (time, (k, (optju, ju))) => (k, (time, Right(ju))) }) // scan left to join the left values and the right summing result stream val resultStream: List[(Long, (K, (U, Option[JoinedU])))] = leftAndRight .groupBy(_._1) .mapValues { _.map(_._2).toList.sortBy(identity) .scanLeft(Option.empty[(Long, JoinedU)], Option.empty[(Long, U, Option[JoinedU])]) { case ((None, result), (time, Left(u))) => { // The was no value previously (None, Some((time, u, None))) } case ((prev @ Some((oldt, ju)), result), (time, Left(u))) => { // gate the time for window join? (prev, Some((time, u, Some(ju)))) } case ((None, result), (time, Right(joined))) => { (Some((time, joined)), None) } case ((Some((oldt, oldJ)), result), (time, Right(joined))) => { val nextJoined = Semigroup.plus(oldJ, joined) (Some((time, nextJoined)), None) } } }.toList.flatMap { case (k, lv) => lv.map { case ((_, optuju)) => (k, optuju) } } .flatMap { case (k, opt) => opt.map { case (time, u, optju) => (time, (k, (u, optju))) } } // compute the final store result after join val finalStore = MapAlgebra.sumByKey( resultStream .flatMap(postJoinFn) .map { case (time, (k, v)) => (k, v) } // drop the time ) (firstStore, finalStore) } def leftJoinWithStoreJob[P <: Platform[P], T1, T2, U, K, JoinedU: Monoid, V: Monoid]( source1: Producer[P, T1], source2: Producer[P, T2], storeAndService: P#Store[K, JoinedU] with P#Service[K, JoinedU], store: P#Store[K, V])(simpleFM1: T1 => TraversableOnce[(K, JoinedU)])(simpleFM2: T2 => TraversableOnce[(K, U)])(postJoinFn: ((K, (U, Option[JoinedU]))) => TraversableOnce[(K, V)]): TailProducer[P, (K, (Option[V], V))] = { // sum to first store val dag1: Summer[P, K, JoinedU] = source1 .flatMap(simpleFM1) .sumByKey(storeAndService) // join second source with stream from first store val dag2: Summer[P, K, V] = source2 .flatMap(simpleFM2) .leftJoin(storeAndService) .flatMap(postJoinFn) .sumByKey(store) dag1.also(dag2) } def leftJoinWithDependentStoreInScala[T, U, K: Ordering, V: Monoid](source: TraversableOnce[T])(simpleFM: T => TraversableOnce[(Long, (K, U))])(flatMapValuesFn: ((Long, (U, Option[V]))) => TraversableOnce[(Long, V)]): Map[K, V] = { // zip the left and right streams val leftAndRight: Iterable[(K, (Long, Either[U, V]))] = source .flatMap(simpleFM) .toList .map { case (time, (k, u)) => (k, (time, Left(u))) } // scan left to join the left values and the right summing result stream val resultStream = loopJoinInScala(leftAndRight, flatMapValuesFn) // compute the final store result after join val rightStream = resultStream .flatMap { case (k, lopts) => lopts.map { case ((_, optoptv)) => (k, optoptv) } } // compute the final store result after join MapAlgebra.sumByKey( rightStream .flatMap { case (k, opt) => opt.map { case (time, (optv, v)) => (k, v) } } // drop time and opt[v] ) } def leftJoinWithDependentStoreJob[P <: Platform[P], T, V1, U, K, V: Monoid]( source1: Producer[P, T], storeAndService: P#Store[K, V] with P#Service[K, V])(simpleFM1: T => TraversableOnce[(K, U)])(valuesFlatMap1: ((U, Option[V])) => TraversableOnce[V1])(valuesFlatMap2: (V1) => TraversableOnce[V]): TailProducer[P, (K, (Option[V], V))] = { source1 .flatMap(simpleFM1) .leftJoin(storeAndService) .flatMapValues(valuesFlatMap1) .flatMapValues(valuesFlatMap2) .sumByKey(storeAndService) } def leftJoinWithDependentStoreJoinFanoutInScala[T, U, K: Ordering, V: Monoid, V1: Monoid](source: TraversableOnce[T])(simpleFM: T => TraversableOnce[(Long, (K, U))])(flatMapValuesFn: ((Long, (U, Option[V]))) => TraversableOnce[(Long, V)])(flatMapFn: ((Long, (K, (U, Option[V])))) => TraversableOnce[(Long, (K, V1))]): (Map[K, V], Map[K, V1]) = { // zip the left and right streams val leftAndRight: Iterable[(K, (Long, Either[U, V]))] = source .flatMap(simpleFM) .toList .map { case (time, (k, u)) => (k, (time, Left(u))) } // scan left to join the left values and the right summing result stream val resultStream = loopJoinInScala(leftAndRight, flatMapValuesFn) val leftStream = resultStream .flatMap { case (k, lopts) => lopts.map { case ((optuoptv, _)) => (k, optuoptv) } } val rightStream = resultStream .flatMap { case (k, lopts) => lopts.map { case ((_, optoptv)) => (k, optoptv) } } // compute the first store using the join stream as input val storeAfterFlatMap = MapAlgebra.sumByKey( leftStream .flatMap { case (k, opt) => opt.map { case (time, (u, optv)) => (time, (k, (u, optv))) } } .flatMap(flatMapFn(_)) .map { case (time, (k, v)) => (k, v) } // drop the time ) // compute the final store result after join val storeAfterJoin = MapAlgebra.sumByKey( rightStream .flatMap { case (k, opt) => opt.map { case (time, (optv, v)) => (k, v) } } // drop time and opt[v] ) (storeAfterJoin, storeAfterFlatMap) } def leftJoinWithDependentStoreJoinFanoutJob[P <: Platform[P], T1, V1: Monoid, U, K, V: Monoid]( source1: Producer[P, T1], storeAndService: P#Store[K, V] with P#Service[K, V], store: P#Store[K, V1])(simpleFM1: T1 => TraversableOnce[(K, U)])(valuesFlatMap1: ((U, Option[V])) => TraversableOnce[V])(flatMapFn: ((K, (U, Option[V]))) => TraversableOnce[(K, V1)]): TailProducer[P, (K, (Option[V], V))] = { val join: KeyedProducer[P, K, (U, Option[V])] = source1 .flatMap(simpleFM1) .leftJoin(storeAndService) val dependentSum: Summer[P, K, V] = join .flatMapValues(valuesFlatMap1) .sumByKey(storeAndService) val indepSum: Summer[P, K, V1] = join .flatMap(flatMapFn) .sumByKey(store) indepSum.also(dependentSum) } def realJoinTestJob[P <: Platform[P], T1, T2, T3, T4, K1, K2, U, JoinedU, V: Monoid]( source1: Producer[P, T1], source2: Producer[P, T2], source3: Producer[P, T3], source4: Producer[P, T4], service: P#Service[K1, JoinedU], store: P#Store[K2, V], simpleFM1: T1 => TraversableOnce[(K2, V)], simpleFM2: T2 => TraversableOnce[(K2, V)], simpleFM3: T3 => TraversableOnce[(K2, V)], preJoin: T4 => (K1, U), postJoin: ((K1, (U, Option[JoinedU]))) => TraversableOnce[(K2, V)]): TailProducer[P, (K2, (Option[V], V))] = { val data1 = source1.flatMap(simpleFM1) val data2 = source2.flatMap(simpleFM2) val data3 = source3.flatMap(simpleFM3) val data4 = source4.map(preJoin).leftJoin(service).flatMap(postJoin) data1.merge(data2).merge(data3).merge(data4).sumByKey(store).name("Customer Supplied Job") } def writtenPostSum[P <: Platform[P], T, K, V: Monoid](source: Producer[P, T], sink: P#Sink[(K, (Option[V], V))], store: P#Store[K, V])(fnA: T => TraversableOnce[(K, V)]): TailProducer[P, (K, (Option[V], V))] = { val left = source.flatMap(fnA) left.sumByKey(store).write(sink) } def realJoinTestJobInScala[P <: Platform[P], T1, T2, T3, T4, K1, K2, U, JoinedU, V: Monoid]( source1: List[T1], source2: List[T2], source3: List[T3], source4: List[T4], service: K1 => Option[JoinedU], simpleFM1: T1 => TraversableOnce[(K2, V)], simpleFM2: T2 => TraversableOnce[(K2, V)], simpleFM3: T3 => TraversableOnce[(K2, V)], preJoin: T4 => (K1, U), postJoin: ((K1, (U, Option[JoinedU]))) => TraversableOnce[(K2, V)]): Map[K2, V] = { val data1 = source1.flatMap(simpleFM1) val data2 = source2.flatMap(simpleFM2) val data3 = source3.flatMap(simpleFM3) val data4 = source4.map(preJoin).map { case (k, v) => (k, (v, service(k))) } .flatMap(postJoin) MapAlgebra.sumByKey(data1 ::: data2 ::: data3 ::: data4) } def multipleSummerJobInScala[T1, T2, K1, V1: Monoid, K2, V2: Monoid](source: List[T1])(fnR: T1 => TraversableOnce[T2], fnA: T2 => TraversableOnce[(K1, V1)], fnB: T2 => TraversableOnce[(K2, V2)]): (Map[K1, V1], Map[K2, V2]) = { val mapA = MapAlgebra.sumByKey(source.flatMap(fnR).flatMap(fnA)) val mapB = MapAlgebra.sumByKey(source.flatMap(fnR).flatMap(fnB)) (mapA, mapB) } def multipleSummerJob[P <: Platform[P], T1, T2, K1, V1: Monoid, K2, V2: Monoid](source: Producer[P, T1], store1: P#Store[K1, V1], store2: P#Store[K2, V2])(fnR: T1 => TraversableOnce[T2], fnA: T2 => TraversableOnce[(K1, V1)], fnB: T2 => TraversableOnce[(K2, V2)]): TailProducer[P, (K2, (Option[V2], V2))] = { val combined = source.flatMap(fnR) val calculated = combined.flatMap(fnB).sumByKey(store2) combined.flatMap(fnA).sumByKey(store1).also(calculated) } def mapOnlyJob[P <: Platform[P], T, U]( source: Producer[P, T], sink: P#Sink[U])(mapOp: T => TraversableOnce[U]): TailProducer[P, U] = source .flatMap(mapOp) .write(sink) def lookupJob[P <: Platform[P], T, U]( source: Producer[P, T], srv: P#Service[T, U], sink: P#Sink[(T, U)]): TailProducer[P, (T, U)] = source.lookup(srv).collectValues { case Some(v) => v }.write(sink) def lookupJobInScala[T, U](in: List[T], srv: (T) => Option[U]): List[(T, U)] = in.map { t => (t, srv(t)) }.collect { case (t, Some(u)) => (t, u) } def twoSumByKey[P <: Platform[P], K, V: Monoid, K2]( source: Producer[P, (K, V)], store: P#Store[K, V], fn: K => List[K2], store2: P#Store[K2, V]): TailProducer[P, (K2, (Option[V], V))] = source .sumByKey(store) .mapValues(_._2) .flatMapKeys(fn) .sumByKey(store2) def twoSumByKeyInScala[K1, V: Semigroup, K2](in: List[(K1, V)], fn: K1 => List[K2]): (Map[K1, V], Map[K2, V]) = { val sum1 = MapAlgebra.sumByKey(in) val sumStream = in.groupBy(_._1) .mapValues { l => scanSum(l.iterator.map(_._2)).toList } .toIterable .flatMap { case (k, lv) => lv.map((k, _)) } val v2 = sumStream.map { case (k, (_, v)) => fn(k).map { (_, v) } }.flatten val sum2 = MapAlgebra.sumByKey(v2) (sum1, sum2) } def jobWithStats[P <: Platform[P], T, K, V: Monoid](id: JobId, source: Producer[P, T], store: P#Store[K, V])(fn: T => TraversableOnce[(K, V)]): TailProducer[P, (K, (Option[V], V))] = { implicit val jobID: JobId = id val origCounter = Counter(Group("counter.test"), Name("orig_counter")) val fmCounter = Counter(Group("counter.test"), Name("fm_counter")) val fltrCounter = Counter(Group("counter.test"), Name("fltr_counter")) source .flatMap { x => origCounter.incr; fn(x) }.name("FM") .filter { x => fmCounter.incrBy(2); true } .map { x => fltrCounter.incr; x } .sumByKey(store) } } class TestGraphs[P <: Platform[P], T: Manifest: Arbitrary, K: Arbitrary, V: Arbitrary: Equiv: Monoid](platform: P)( store: () => P#Store[K, V])(sink: () => P#Sink[T])( sourceMaker: TraversableOnce[T] => Producer[P, T])( toLookupFn: P#Store[K, V] => (K => Option[V]))( toSinkChecker: (P#Sink[T], List[T]) => Boolean)( run: (P, P#Plan[_]) => Unit) { def diamondChecker(items: List[T], fnA: T => List[(K, V)], fnB: T => List[(K, V)]): Boolean = { val currentStore = store() val currentSink = sink() // Use the supplied platform to execute the source into the // supplied store. val plan = platform.plan { TestGraphs.diamondJob(sourceMaker(items), currentSink, currentStore)(fnA)(fnB) } run(platform, plan) val lookupFn = toLookupFn(currentStore) TestGraphs.diamondJobInScala(items)(fnA)(fnB).forall { case (k, v) => val lv = lookupFn(k).getOrElse(Monoid.zero) val eqv = Equiv[V].equiv(v, lv) if (!eqv) { println(s"in diamondChecker: $k, $v is scala result, but platform gave $lv") } eqv } && toSinkChecker(currentSink, items) } /** * Accepts a platform, and supplier of an EMPTY store from K -> V * and returns a ScalaCheck property. The property generates a * random function from T => TraversableOnce[(K, V)], wires up * singleStepJob summingbird job using this function and runs the * job using the supplied platform. * * Results are retrieved using the supplied toMap function. The * initial data source is generated using the supplied sourceMaker * function. */ def singleStepChecker(items: List[T], fn: T => List[(K, V)]): Boolean = { val currentStore = store() // Use the supplied platform to execute the source into the // supplied store. val plan = platform.plan { TestGraphs.singleStepJob(sourceMaker(items), currentStore)(fn) } run(platform, plan) val lookupFn = toLookupFn(currentStore) TestGraphs.singleStepInScala(items)(fn).forall { case (k, v) => val lv = lookupFn(k).getOrElse(Monoid.zero) Equiv[V].equiv(v, lv) } } /** * Accepts a platform, a service of K -> JoinedU and a supplier of * an EMPTY store from K -> V and returns a ScalaCheck * property. The property generates random functions between the * types required by leftJoinJob, wires up a summingbird job using * those functions and runs the job using the supplied platform. * * Results are retrieved using the supplied toMap function. The * service is tested in scala's in-memory mode using serviceToFn, * and the initial data source is generated using the supplied * sourceMaker function. */ def leftJoinChecker[U: Arbitrary, JoinedU: Arbitrary](service: P#Service[K, JoinedU], serviceToFn: P#Service[K, JoinedU] => (K => Option[JoinedU]), items: List[T], preJoinFn: T => List[(K, U)], postJoinFn: ((K, (U, Option[JoinedU]))) => List[(K, V)]): Boolean = { val currentStore = store() val plan = platform.plan { TestGraphs.leftJoinJob(sourceMaker(items), service, currentStore)(preJoinFn)(postJoinFn) } run(platform, plan) val serviceFn = serviceToFn(service) val lookupFn = toLookupFn(currentStore) MapAlgebra.sumByKey( items .flatMap(preJoinFn) .map { case (k, u) => (k, (u, serviceFn(k))) } .flatMap(postJoinFn) ).forall { case (k, v) => val lv = lookupFn(k).getOrElse(Monoid.zero) Equiv[V].equiv(v, lv) } } }
nabarunnag/Summingbird_dev
summingbird-core-test/src/main/scala/com/twitter/summingbird/TestGraphs.scala
Scala
apache-2.0
25,386
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.log import java.io.File import java.util.Properties import kafka.api.KAFKA_0_11_0_IV0 import kafka.api.{KAFKA_0_10_0_IV1, KAFKA_0_9_0} import kafka.server.KafkaConfig import kafka.server.checkpoints.OffsetCheckpointFile import kafka.utils._ import org.apache.kafka.common.TopicPartition import org.apache.kafka.common.record._ import org.junit.Assert._ import org.junit._ import org.junit.runner.RunWith import org.junit.runners.Parameterized import org.junit.runners.Parameterized.Parameters import scala.collection._ /** * This is an integration test that tests the fully integrated log cleaner */ @RunWith(value = classOf[Parameterized]) class LogCleanerParameterizedIntegrationTest(compressionCodec: String) extends AbstractLogCleanerIntegrationTest { val codec: CompressionType = CompressionType.forName(compressionCodec) val time = new MockTime() val topicPartitions = Array(new TopicPartition("log", 0), new TopicPartition("log", 1), new TopicPartition("log", 2)) @Test def cleanerTest(): Unit = { val largeMessageKey = 20 val (largeMessageValue, largeMessageSet) = createLargeSingleMessageSet(largeMessageKey, RecordBatch.CURRENT_MAGIC_VALUE) val maxMessageSize = largeMessageSet.sizeInBytes cleaner = makeCleaner(partitions = topicPartitions, maxMessageSize = maxMessageSize) val log = cleaner.logs.get(topicPartitions(0)) val appends = writeDups(numKeys = 100, numDups = 3, log = log, codec = codec) val startSize = log.size cleaner.startup() val firstDirty = log.activeSegment.baseOffset checkLastCleaned("log", 0, firstDirty) val compactedSize = log.logSegments.map(_.size).sum assertTrue(s"log should have been compacted: startSize=$startSize compactedSize=$compactedSize", startSize > compactedSize) checkLogAfterAppendingDups(log, startSize, appends) val appendInfo = log.appendAsLeader(largeMessageSet, leaderEpoch = 0) val largeMessageOffset = appendInfo.firstOffset.get val dups = writeDups(startKey = largeMessageKey + 1, numKeys = 100, numDups = 3, log = log, codec = codec) val appends2 = appends ++ Seq((largeMessageKey, largeMessageValue, largeMessageOffset)) ++ dups val firstDirty2 = log.activeSegment.baseOffset checkLastCleaned("log", 0, firstDirty2) checkLogAfterAppendingDups(log, startSize, appends2) // simulate deleting a partition, by removing it from logs // force a checkpoint // and make sure its gone from checkpoint file cleaner.logs.remove(topicPartitions(0)) cleaner.updateCheckpoints(logDir) val checkpoints = new OffsetCheckpointFile(new File(logDir, cleaner.cleanerManager.offsetCheckpointFile)).read() // we expect partition 0 to be gone assertFalse(checkpoints.contains(topicPartitions(0))) } @Test def testCleansCombinedCompactAndDeleteTopic(): Unit = { val logProps = new Properties() val retentionMs: Integer = 100000 logProps.put(LogConfig.RetentionMsProp, retentionMs: Integer) logProps.put(LogConfig.CleanupPolicyProp, "compact,delete") def runCleanerAndCheckCompacted(numKeys: Int): (Log, Seq[(Int, String, Long)]) = { cleaner = makeCleaner(partitions = topicPartitions.take(1), propertyOverrides = logProps, backOffMs = 100L) val log = cleaner.logs.get(topicPartitions(0)) val messages = writeDups(numKeys = numKeys, numDups = 3, log = log, codec = codec) val startSize = log.size log.updateHighWatermark(log.logEndOffset) val firstDirty = log.activeSegment.baseOffset cleaner.startup() // should compact the log checkLastCleaned("log", 0, firstDirty) val compactedSize = log.logSegments.map(_.size).sum assertTrue(s"log should have been compacted: startSize=$startSize compactedSize=$compactedSize", startSize > compactedSize) (log, messages) } val (log, _) = runCleanerAndCheckCompacted(100) // Set the last modified time to an old value to force deletion of old segments val endOffset = log.logEndOffset log.logSegments.foreach(_.lastModified = time.milliseconds - (2 * retentionMs)) TestUtils.waitUntilTrue(() => log.logStartOffset == endOffset, "Timed out waiting for deletion of old segments") assertEquals(1, log.numberOfSegments) cleaner.shutdown() // run the cleaner again to make sure if there are no issues post deletion val (log2, messages) = runCleanerAndCheckCompacted(20) val read = readFromLog(log2) assertEquals("Contents of the map shouldn't change", toMap(messages), toMap(read)) } @Test def testCleanerWithMessageFormatV0(): Unit = { // zstd compression is not supported with older message formats if (codec == CompressionType.ZSTD) return val largeMessageKey = 20 val (largeMessageValue, largeMessageSet) = createLargeSingleMessageSet(largeMessageKey, RecordBatch.MAGIC_VALUE_V0) val maxMessageSize = codec match { case CompressionType.NONE => largeMessageSet.sizeInBytes case _ => // the broker assigns absolute offsets for message format 0 which potentially causes the compressed size to // increase because the broker offsets are larger than the ones assigned by the client // adding `5` to the message set size is good enough for this test: it covers the increased message size while // still being less than the overhead introduced by the conversion from message format version 0 to 1 largeMessageSet.sizeInBytes + 5 } cleaner = makeCleaner(partitions = topicPartitions, maxMessageSize = maxMessageSize) val log = cleaner.logs.get(topicPartitions(0)) val props = logConfigProperties(maxMessageSize = maxMessageSize) props.put(LogConfig.MessageFormatVersionProp, KAFKA_0_9_0.version) log.config = new LogConfig(props) val appends = writeDups(numKeys = 100, numDups = 3, log = log, codec = codec, magicValue = RecordBatch.MAGIC_VALUE_V0) val startSize = log.size cleaner.startup() val firstDirty = log.activeSegment.baseOffset checkLastCleaned("log", 0, firstDirty) val compactedSize = log.logSegments.map(_.size).sum assertTrue(s"log should have been compacted: startSize=$startSize compactedSize=$compactedSize", startSize > compactedSize) checkLogAfterAppendingDups(log, startSize, appends) val appends2: Seq[(Int, String, Long)] = { val dupsV0 = writeDups(numKeys = 40, numDups = 3, log = log, codec = codec, magicValue = RecordBatch.MAGIC_VALUE_V0) val appendInfo = log.appendAsLeader(largeMessageSet, leaderEpoch = 0) val largeMessageOffset = appendInfo.firstOffset.get // also add some messages with version 1 and version 2 to check that we handle mixed format versions correctly props.put(LogConfig.MessageFormatVersionProp, KAFKA_0_11_0_IV0.version) log.config = new LogConfig(props) val dupsV1 = writeDups(startKey = 30, numKeys = 40, numDups = 3, log = log, codec = codec, magicValue = RecordBatch.MAGIC_VALUE_V1) val dupsV2 = writeDups(startKey = 15, numKeys = 5, numDups = 3, log = log, codec = codec, magicValue = RecordBatch.MAGIC_VALUE_V2) appends ++ dupsV0 ++ Seq((largeMessageKey, largeMessageValue, largeMessageOffset)) ++ dupsV1 ++ dupsV2 } val firstDirty2 = log.activeSegment.baseOffset checkLastCleaned("log", 0, firstDirty2) checkLogAfterAppendingDups(log, startSize, appends2) } @Test def testCleaningNestedMessagesWithMultipleVersions(): Unit = { // zstd compression is not supported with older message formats if (codec == CompressionType.ZSTD) return val maxMessageSize = 192 cleaner = makeCleaner(partitions = topicPartitions, maxMessageSize = maxMessageSize, segmentSize = 256) val log = cleaner.logs.get(topicPartitions(0)) val props = logConfigProperties(maxMessageSize = maxMessageSize, segmentSize = 256) props.put(LogConfig.MessageFormatVersionProp, KAFKA_0_9_0.version) log.config = new LogConfig(props) // with compression enabled, these messages will be written as a single message containing // all of the individual messages var appendsV0 = writeDupsSingleMessageSet(numKeys = 2, numDups = 3, log = log, codec = codec, magicValue = RecordBatch.MAGIC_VALUE_V0) appendsV0 ++= writeDupsSingleMessageSet(numKeys = 2, startKey = 3, numDups = 2, log = log, codec = codec, magicValue = RecordBatch.MAGIC_VALUE_V0) props.put(LogConfig.MessageFormatVersionProp, KAFKA_0_10_0_IV1.version) log.config = new LogConfig(props) var appendsV1 = writeDupsSingleMessageSet(startKey = 4, numKeys = 2, numDups = 2, log = log, codec = codec, magicValue = RecordBatch.MAGIC_VALUE_V1) appendsV1 ++= writeDupsSingleMessageSet(startKey = 4, numKeys = 2, numDups = 2, log = log, codec = codec, magicValue = RecordBatch.MAGIC_VALUE_V1) appendsV1 ++= writeDupsSingleMessageSet(startKey = 6, numKeys = 2, numDups = 2, log = log, codec = codec, magicValue = RecordBatch.MAGIC_VALUE_V1) val appends = appendsV0 ++ appendsV1 val startSize = log.size cleaner.startup() val firstDirty = log.activeSegment.baseOffset assertTrue(firstDirty > appendsV0.size) // ensure we clean data from V0 and V1 checkLastCleaned("log", 0, firstDirty) val compactedSize = log.logSegments.map(_.size).sum assertTrue(s"log should have been compacted: startSize=$startSize compactedSize=$compactedSize", startSize > compactedSize) checkLogAfterAppendingDups(log, startSize, appends) } @Test def cleanerConfigUpdateTest(): Unit = { val largeMessageKey = 20 val (largeMessageValue, largeMessageSet) = createLargeSingleMessageSet(largeMessageKey, RecordBatch.CURRENT_MAGIC_VALUE) val maxMessageSize = largeMessageSet.sizeInBytes cleaner = makeCleaner(partitions = topicPartitions, backOffMs = 1, maxMessageSize = maxMessageSize, cleanerIoBufferSize = Some(1)) val log = cleaner.logs.get(topicPartitions(0)) writeDups(numKeys = 100, numDups = 3, log = log, codec = codec) val startSize = log.size cleaner.startup() assertEquals(1, cleaner.cleanerCount) // Verify no cleaning with LogCleanerIoBufferSizeProp=1 val firstDirty = log.activeSegment.baseOffset val topicPartition = new TopicPartition("log", 0) cleaner.awaitCleaned(topicPartition, firstDirty, maxWaitMs = 10) assertTrue("Should not have cleaned", cleaner.cleanerManager.allCleanerCheckpoints.isEmpty) def kafkaConfigWithCleanerConfig(cleanerConfig: CleanerConfig): KafkaConfig = { val props = TestUtils.createBrokerConfig(0, "localhost:2181") props.put(KafkaConfig.LogCleanerThreadsProp, cleanerConfig.numThreads.toString) props.put(KafkaConfig.LogCleanerDedupeBufferSizeProp, cleanerConfig.dedupeBufferSize.toString) props.put(KafkaConfig.LogCleanerDedupeBufferLoadFactorProp, cleanerConfig.dedupeBufferLoadFactor.toString) props.put(KafkaConfig.LogCleanerIoBufferSizeProp, cleanerConfig.ioBufferSize.toString) props.put(KafkaConfig.MessageMaxBytesProp, cleanerConfig.maxMessageSize.toString) props.put(KafkaConfig.LogCleanerBackoffMsProp, cleanerConfig.backOffMs.toString) props.put(KafkaConfig.LogCleanerIoMaxBytesPerSecondProp, cleanerConfig.maxIoBytesPerSecond.toString) KafkaConfig.fromProps(props) } // Verify cleaning done with larger LogCleanerIoBufferSizeProp val oldConfig = kafkaConfigWithCleanerConfig(cleaner.currentConfig) val newConfig = kafkaConfigWithCleanerConfig(CleanerConfig(numThreads = 2, dedupeBufferSize = cleaner.currentConfig.dedupeBufferSize, dedupeBufferLoadFactor = cleaner.currentConfig.dedupeBufferLoadFactor, ioBufferSize = 100000, maxMessageSize = cleaner.currentConfig.maxMessageSize, maxIoBytesPerSecond = cleaner.currentConfig.maxIoBytesPerSecond, backOffMs = cleaner.currentConfig.backOffMs)) cleaner.reconfigure(oldConfig, newConfig) assertEquals(2, cleaner.cleanerCount) checkLastCleaned("log", 0, firstDirty) val compactedSize = log.logSegments.map(_.size).sum assertTrue(s"log should have been compacted: startSize=$startSize compactedSize=$compactedSize", startSize > compactedSize) } private def checkLastCleaned(topic: String, partitionId: Int, firstDirty: Long): Unit = { // wait until cleaning up to base_offset, note that cleaning happens only when "log dirty ratio" is higher than // LogConfig.MinCleanableDirtyRatioProp val topicPartition = new TopicPartition(topic, partitionId) cleaner.awaitCleaned(topicPartition, firstDirty) val lastCleaned = cleaner.cleanerManager.allCleanerCheckpoints(topicPartition) assertTrue(s"log cleaner should have processed up to offset $firstDirty, but lastCleaned=$lastCleaned", lastCleaned >= firstDirty) } private def checkLogAfterAppendingDups(log: Log, startSize: Long, appends: Seq[(Int, String, Long)]): Unit = { val read = readFromLog(log) assertEquals("Contents of the map shouldn't change", toMap(appends), toMap(read)) assertTrue(startSize > log.size) } private def toMap(messages: Iterable[(Int, String, Long)]): Map[Int, (String, Long)] = { messages.map { case (key, value, offset) => key -> (value, offset) }.toMap } private def readFromLog(log: Log): Iterable[(Int, String, Long)] = { import JavaConverters._ for (segment <- log.logSegments; deepLogEntry <- segment.log.records.asScala) yield { val key = TestUtils.readString(deepLogEntry.key).toInt val value = TestUtils.readString(deepLogEntry.value) (key, value, deepLogEntry.offset) } } private def writeDupsSingleMessageSet(numKeys: Int, numDups: Int, log: Log, codec: CompressionType, startKey: Int = 0, magicValue: Byte): Seq[(Int, String, Long)] = { val kvs = for (_ <- 0 until numDups; key <- startKey until (startKey + numKeys)) yield { val payload = counter.toString incCounter() (key, payload) } val records = kvs.map { case (key, payload) => new SimpleRecord(key.toString.getBytes, payload.toString.getBytes) } val appendInfo = log.appendAsLeader(MemoryRecords.withRecords(magicValue, codec, records: _*), leaderEpoch = 0) val offsets = appendInfo.firstOffset.get to appendInfo.lastOffset kvs.zip(offsets).map { case (kv, offset) => (kv._1, kv._2, offset) } } } object LogCleanerParameterizedIntegrationTest { @Parameters def parameters: java.util.Collection[Array[String]] = { val list = new java.util.ArrayList[Array[String]]() for (codec <- CompressionType.values) list.add(Array(codec.name)) list } }
noslowerdna/kafka
core/src/test/scala/unit/kafka/log/LogCleanerParameterizedIntegrationTest.scala
Scala
apache-2.0
15,484
package org.jetbrains.plugins.scala package lang.psi.applicability import lang.psi.types._ /** * Pavel.Fatin, 18.05.2010 */ class NamedTest extends ApplicabilityTestBase { def testFine { assertProblems("(a: A)", "(a = A)") { case Nil => } assertProblems("(a: A, b: B)", "(a = A, b = B)") { case Nil => } } def testReversed { assertProblems("(a: A, b: B)", "(b = B, a = A)") { case Nil => } } def testPositionalWithNamed { assertProblems("(a: A, b: B)", "(A, b = B)") { case Nil => } //TODO compiler allows such calls, they seem to be OK // assertProblems("(a: A, b: B)", "(a = A, b)") { // case Nil => // } } def testPositionalAfterNamed { assertProblems("(a: A, b: B)", "(b = B, A)") { case PositionalAfterNamedArgument(Expression("A")) :: Nil => } assertProblems("(a: A, b: B, c: C)", "(c = C, A, B)") { case PositionalAfterNamedArgument(Expression("A")) :: PositionalAfterNamedArgument(Expression("B")) :: Nil => } assertProblems("(a: A, b: B, c: C)", "(c = C, A, B)") { case PositionalAfterNamedArgument(Expression("A")) :: PositionalAfterNamedArgument(Expression("B")) :: Nil => } assertProblems("(a: A, b: B, c: C)", "(A, c = C, B)") { case PositionalAfterNamedArgument(Expression("B")) :: Nil => } } def testNamedDuplicates { assertProblems("(a: A)", "(a = A, a = null)") { case ParameterSpecifiedMultipleTimes(Assignment("a = A")) :: ParameterSpecifiedMultipleTimes(Assignment("a = null")) :: Nil => } assertProblems("(a: A)", "(a = A, a = A, a = A)") { case ParameterSpecifiedMultipleTimes(Assignment("a = A")) :: ParameterSpecifiedMultipleTimes(Assignment("a = A")) :: ParameterSpecifiedMultipleTimes(Assignment("a = A")) :: Nil => } assertProblems("(a: A, b: B)", "(a = A, a = null, b = B, b = null)") { case ParameterSpecifiedMultipleTimes(Assignment("a = A")) :: ParameterSpecifiedMultipleTimes(Assignment("a = null")) :: ParameterSpecifiedMultipleTimes(Assignment("b = B")) :: ParameterSpecifiedMultipleTimes(Assignment("b = null")) :: Nil => } assertProblems("(a: A, b: B)", "(A, b = B, b = null)") { case ParameterSpecifiedMultipleTimes(Assignment("b = B")) :: ParameterSpecifiedMultipleTimes(Assignment("b = null")) :: Nil => } } def testUnresolvedParameter { assertProblems("()", "(a = A)") { case ExcessArgument(Assignment("a = A")) :: Nil => } assertProblems("()", "(a = A, b = B)") { case ExcessArgument(Assignment("a = A")) :: ExcessArgument(Assignment("b = B")) :: Nil => } assertProblems("(a: A)", "(a = A, b = B)") { case ExcessArgument(Assignment("b = B")) :: Nil => } } def testNamedUnresolvedDuplicates { assertProblems("(a: A)", "(b = A, b = null)") { case ParameterSpecifiedMultipleTimes(Assignment("b = A")) :: ParameterSpecifiedMultipleTimes(Assignment("b = null")) :: Nil => } } /*def testDoesNotTakeParameters { assertProblems("", "(a = A)") { case DoesNotTakeParameters() :: Nil => } assertProblems("", "(a = A, b = B)") { case DoesNotTakeParameters() :: Nil => } }*/ def testTooManyArguments { assertProblems("(a: A)", "(A, a = A)") { case ExcessArgument(Expression("a = A")) :: Nil => } assertProblems("(a: A, b: B)", "(A, B, a = A)") { case ExcessArgument(Expression("a = A")) :: Nil => } assertProblems("(a: A, b: B)", "(A, B, b = B)") { case ExcessArgument(Expression("b = B")) :: Nil => } assertProblems("(a: A, b: B)", "(A, B, a = A, b = B)") { case ExcessArgument(Expression("a = A")) :: ExcessArgument(Expression("b = B")) :: Nil => } } def testTypeMismatch { assertProblems("(a: A)", "(a = B)") { case TypeMismatch(Expression("B"), Type("A")) :: Nil => } assertProblems("(a: A, b: B)", "(a = B, b = A)") { case TypeMismatch(Expression("B"), Type("A")) :: TypeMismatch(Expression("A"), Type("B")) :: Nil => } } }
consulo/consulo-scala
test/org/jetbrains/plugins/scala/lang/psi/applicability/NamedTest.scala
Scala
apache-2.0
4,247
/** * Copyright (C) 2009-2011 the original author or authors. * See the notice.md file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.fusesource.scalate.scuery.support import _root_.org.fusesource.scalate.FunSuiteSupport import org.fusesource.scalate.scuery.Selector import org.fusesource.scalate.scuery.XmlHelper._ import xml.{ Elem, Node, NodeSeq } abstract class CssParserTestSupport extends FunSuiteSupport { var parser = new CssParser def xml: Node def assertFilter(selector: String, expected: NodeSeq): Unit = { test("assertFilter: " + selector) { val actual = xml.$(selector) debug("filtering selector: %s expected: %s actual: %s", selector, expected, actual) assertResult(expected) { actual } } } def assertMatches(css: String, node: Node): Unit = assertSelector("assertMatches ", css, node, true) def assertNotMatches(css: String, node: Node): Unit = assertSelector("assertNotMatches ", css, node, false) def assertSelector(message: String, css: String, node: Node, expected: Boolean): Unit = { test(message + css + " on " + summary(node)) { val selector = Selector(css) val ancestors = ancestorsOf(node) debug("testing selector: " + selector + " on " + summary(node) + " with ancestors: " + summary(ancestors)) assertResult(expected) { selector.matches(node, ancestors) } } } def ancestorsOf(node: Node, ancestor: Node = xml): Seq[Node] = { def findChild(node: Node, ancestor: Node): Option[Seq[Node]] = { if (node == ancestor) { Some(Nil) } else if (ancestor.contains(node)) { Some(ancestor :: Nil) } else { var a: Option[Seq[Node]] = None for (c <- ancestor.child if a.isEmpty) { a = findChild(node, c) } a match { case Some(l) => Some(l ++ ancestor) case _ => a } } } findChild(node, ancestor).getOrElse(Nil) } protected def summary(node: Node): String = node match { case e: Elem => replaceContent(e, Nil).toString case _ => node.toString } protected def summary(nodes: NodeSeq): String = nodes.map(summary(_)).mkString(" ") }
maslovalex/scalate
scalate-core/src/test/scala/org/fusesource/scalate/scuery/support/CssParserTestSupport.scala
Scala
apache-2.0
2,782
package com.tribbloids.spookystuff.actions import java.util.Date import com.tribbloids.spookystuff.{QueryException, SpookyEnvFixture} /** * Created by peng on 08/09/15. */ //TODO: test independently for each cache type (after switch for different cache is implemented) class TestWayback extends SpookyEnvFixture { import com.tribbloids.spookystuff.dsl._ import scala.concurrent.duration._ it("Wget.waybackTo should work on cache") { spooky.spookyConf.cacheWrite = true spooky.spookyConf.IgnoreCachedDocsBefore = Some(new Date()) val dates: Seq[Long] = (0 to 2).map { i => val pages = (Delay(10.seconds) +> Wget("http://www.wikipedia.org")).head.fetch(spooky) //5s is long enough assert(pages.size == 1) pages.head.timeMillis } spooky.spookyConf.cacheRead = true val cachedPages = (Delay(10.seconds) +> Wget("http://www.wikipedia.org").waybackToTimeMillis(dates(1) + 2000)).head.fetch(spooky) assert(cachedPages.size == 1) assert(cachedPages.head.timeMillis == dates(1)) spooky.spookyConf.remote = false intercept[QueryException] { (Delay(10.seconds) +> Wget("http://www.wikipedia.org").waybackToTimeMillis(dates.head - 2000)).head.fetch(spooky) } } it("Snapshot.waybackTo should work on cache") { spooky.spookyConf.cacheWrite = true spooky.spookyConf.IgnoreCachedDocsBefore = Some(new Date()) val dates: Seq[Long] = (0 to 2).map { i => val pages = (Delay(10.seconds) +> Visit("http://www.wikipedia.org")).rewriteGlobally(defaultSchema).head.fetch(spooky) //5s is long enough assert(pages.size == 1) pages.head.timeMillis } spooky.spookyConf.cacheRead = true val cachedPages = (Delay(10.seconds) +> Visit("http://www.wikipedia.org") +> Snapshot().waybackToTimeMillis(dates(1) + 2000)).head.fetch(spooky) assert(cachedPages.size == 1) assert(cachedPages.head.timeMillis == dates(1)) spooky.spookyConf.remote = false intercept[QueryException] { (Delay(10.seconds) +> Visit("http://www.wikipedia.org") +> Snapshot().waybackToTimeMillis(dates.head - 2000)).head.fetch(spooky) } } it("Screenshot.waybackTo should work on cache") { spooky.spookyConf.cacheWrite = true spooky.spookyConf.IgnoreCachedDocsBefore = Some(new Date()) val dates: Seq[Long] = (0 to 2).map { i => val pages = (Delay(10.seconds) +> Visit("http://www.wikipedia.org") +> Screenshot()).head.fetch(spooky) //5s is long enough assert(pages.size == 1) pages.head.timeMillis } spooky.spookyConf.cacheRead = true val cachedPages = (Delay(10.seconds) +> Visit("http://www.wikipedia.org") +> Screenshot().waybackToTimeMillis(dates(1) + 2000)).head.fetch(spooky) assert(cachedPages.size == 1) assert(cachedPages.head.timeMillis == dates(1)) spooky.spookyConf.remote = false intercept[QueryException] { (Delay(10.seconds) +> Visit("http://www.wikipedia.org") +> Screenshot().waybackToTimeMillis(dates.head - 2000)).head.fetch(spooky) } } }
tribbloid/spookystuff
core/src/test/scala/com/tribbloids/spookystuff/actions/TestWayback.scala
Scala
apache-2.0
3,135
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.storage import java.nio.ByteBuffer import java.util.LinkedHashMap import scala.collection.mutable import scala.collection.mutable.ArrayBuffer import org.apache.spark.TaskContext import org.apache.spark.memory.MemoryManager import org.apache.spark.util.{SizeEstimator, Utils} import org.apache.spark.util.collection.SizeTrackingVector private case class MemoryEntry(value: Any, size: Long, deserialized: Boolean) /** * Stores blocks in memory, either as Arrays of deserialized Java objects or as * serialized ByteBuffers. */ private[spark] class MemoryStore(blockManager: BlockManager, memoryManager: MemoryManager) extends BlockStore(blockManager) { // Note: all changes to memory allocations, notably putting blocks, evicting blocks, and // acquiring or releasing unroll memory, must be synchronized on `memoryManager`! private val conf = blockManager.conf private val entries = new LinkedHashMap[BlockId, MemoryEntry](32, 0.75f, true) // A mapping from taskAttemptId to amount of memory used for unrolling a block (in bytes) // All accesses of this map are assumed to have manually synchronized on `memoryManager` private val unrollMemoryMap = mutable.HashMap[Long, Long]() // Same as `unrollMemoryMap`, but for pending unroll memory as defined below. // Pending unroll memory refers to the intermediate memory occupied by a task // after the unroll but before the actual putting of the block in the cache. // This chunk of memory is expected to be released *as soon as* we finish // caching the corresponding block as opposed to until after the task finishes. // This is only used if a block is successfully unrolled in its entirety in // memory (SPARK-4777). private val pendingUnrollMemoryMap = mutable.HashMap[Long, Long]() // Initial memory to request before unrolling any block private val unrollMemoryThreshold: Long = conf.getLong("spark.storage.unrollMemoryThreshold", 1024 * 1024) /** Total amount of memory available for storage, in bytes. */ private def maxMemory: Long = memoryManager.maxStorageMemory if (maxMemory < unrollMemoryThreshold) { logWarning(s"Max memory ${Utils.bytesToString(maxMemory)} is less than the initial memory " + s"threshold ${Utils.bytesToString(unrollMemoryThreshold)} needed to store a block in " + s"memory. Please configure Spark with more memory.") } logInfo("MemoryStore started with capacity %s".format(Utils.bytesToString(maxMemory))) /** Total storage memory used including unroll memory, in bytes. */ private def memoryUsed: Long = memoryManager.storageMemoryUsed /** * Amount of storage memory, in bytes, used for caching blocks. * This does not include memory used for unrolling. */ private def blocksMemoryUsed: Long = memoryManager.synchronized { memoryUsed - currentUnrollMemory } override def getSize(blockId: BlockId): Long = { entries.synchronized { entries.get(blockId).size } } override def putBytes(blockId: BlockId, _bytes: ByteBuffer, level: StorageLevel): PutResult = { // Work on a duplicate - since the original input might be used elsewhere. val bytes = _bytes.duplicate() bytes.rewind() if (level.deserialized) { val values = blockManager.dataDeserialize(blockId, bytes) putIterator(blockId, values, level, returnValues = true) } else { val droppedBlocks = new ArrayBuffer[(BlockId, BlockStatus)] tryToPut(blockId, bytes, bytes.limit, deserialized = false, droppedBlocks) PutResult(bytes.limit(), Right(bytes.duplicate()), droppedBlocks) } } /** * Use `size` to test if there is enough space in MemoryStore. If so, create the ByteBuffer and * put it into MemoryStore. Otherwise, the ByteBuffer won't be created. * * The caller should guarantee that `size` is correct. */ def putBytes(blockId: BlockId, size: Long, _bytes: () => ByteBuffer): PutResult = { // Work on a duplicate - since the original input might be used elsewhere. lazy val bytes = _bytes().duplicate().rewind().asInstanceOf[ByteBuffer] val droppedBlocks = new ArrayBuffer[(BlockId, BlockStatus)] val putSuccess = tryToPut(blockId, () => bytes, size, deserialized = false, droppedBlocks) val data = if (putSuccess) { assert(bytes.limit == size) Right(bytes.duplicate()) } else { null } PutResult(size, data, droppedBlocks) } override def putArray( blockId: BlockId, values: Array[Any], level: StorageLevel, returnValues: Boolean): PutResult = { val droppedBlocks = new ArrayBuffer[(BlockId, BlockStatus)] if (level.deserialized) { val sizeEstimate = SizeEstimator.estimate(values.asInstanceOf[AnyRef]) tryToPut(blockId, values, sizeEstimate, deserialized = true, droppedBlocks) PutResult(sizeEstimate, Left(values.iterator), droppedBlocks) } else { val bytes = blockManager.dataSerialize(blockId, values.iterator) tryToPut(blockId, bytes, bytes.limit, deserialized = false, droppedBlocks) PutResult(bytes.limit(), Right(bytes.duplicate()), droppedBlocks) } } override def putIterator( blockId: BlockId, values: Iterator[Any], level: StorageLevel, returnValues: Boolean): PutResult = { putIterator(blockId, values, level, returnValues, allowPersistToDisk = true) } /** * Attempt to put the given block in memory store. * * There may not be enough space to fully unroll the iterator in memory, in which case we * optionally drop the values to disk if * (1) the block's storage level specifies useDisk, and * (2) `allowPersistToDisk` is true. * * One scenario in which `allowPersistToDisk` is false is when the BlockManager reads a block * back from disk and attempts to cache it in memory. In this case, we should not persist the * block back on disk again, as it is already in disk store. */ private[storage] def putIterator( blockId: BlockId, values: Iterator[Any], level: StorageLevel, returnValues: Boolean, allowPersistToDisk: Boolean): PutResult = { val droppedBlocks = new ArrayBuffer[(BlockId, BlockStatus)] val unrolledValues = unrollSafely(blockId, values, droppedBlocks) unrolledValues match { case Left(arrayValues) => // Values are fully unrolled in memory, so store them as an array val res = putArray(blockId, arrayValues, level, returnValues) droppedBlocks ++= res.droppedBlocks PutResult(res.size, res.data, droppedBlocks) case Right(iteratorValues) => // Not enough space to unroll this block; drop to disk if applicable if (level.useDisk && allowPersistToDisk) { logWarning(s"Persisting block $blockId to disk instead.") val res = blockManager.diskStore.putIterator(blockId, iteratorValues, level, returnValues) PutResult(res.size, res.data, droppedBlocks) } else { PutResult(0, Left(iteratorValues), droppedBlocks) } } } override def getBytes(blockId: BlockId): Option[ByteBuffer] = { val entry = entries.synchronized { entries.get(blockId) } if (entry == null) { None } else if (entry.deserialized) { Some(blockManager.dataSerialize(blockId, entry.value.asInstanceOf[Array[Any]].iterator)) } else { Some(entry.value.asInstanceOf[ByteBuffer].duplicate()) // Doesn't actually copy the data } } override def getValues(blockId: BlockId): Option[Iterator[Any]] = { val entry = entries.synchronized { entries.get(blockId) } if (entry == null) { None } else if (entry.deserialized) { Some(entry.value.asInstanceOf[Array[Any]].iterator) } else { val buffer = entry.value.asInstanceOf[ByteBuffer].duplicate() // Doesn't actually copy data Some(blockManager.dataDeserialize(blockId, buffer)) } } override def remove(blockId: BlockId): Boolean = memoryManager.synchronized { val entry = entries.synchronized { entries.remove(blockId) } if (entry != null) { memoryManager.releaseStorageMemory(entry.size) logDebug(s"Block $blockId of size ${entry.size} dropped " + s"from memory (free ${maxMemory - blocksMemoryUsed})") true } else { false } } override def clear(): Unit = memoryManager.synchronized { entries.synchronized { entries.clear() } unrollMemoryMap.clear() pendingUnrollMemoryMap.clear() memoryManager.releaseAllStorageMemory() logInfo("MemoryStore cleared") } /** * Unroll the given block in memory safely. * * The safety of this operation refers to avoiding potential OOM exceptions caused by * unrolling the entirety of the block in memory at once. This is achieved by periodically * checking whether the memory restrictions for unrolling blocks are still satisfied, * stopping immediately if not. This check is a safeguard against the scenario in which * there is not enough free memory to accommodate the entirety of a single block. * * This method returns either an array with the contents of the entire block or an iterator * containing the values of the block (if the array would have exceeded available memory). */ def unrollSafely( blockId: BlockId, values: Iterator[Any], droppedBlocks: ArrayBuffer[(BlockId, BlockStatus)]) : Either[Array[Any], Iterator[Any]] = { // Number of elements unrolled so far var elementsUnrolled = 0 // Whether there is still enough memory for us to continue unrolling this block var keepUnrolling = true // Initial per-task memory to request for unrolling blocks (bytes). Exposed for testing. val initialMemoryThreshold = unrollMemoryThreshold // How often to check whether we need to request more memory val memoryCheckPeriod = 16 // Memory currently reserved by this task for this particular unrolling operation var memoryThreshold = initialMemoryThreshold // Memory to request as a multiple of current vector size val memoryGrowthFactor = 1.5 // Keep track of pending unroll memory reserved by this method. var pendingMemoryReserved = 0L // Underlying vector for unrolling the block var vector = new SizeTrackingVector[Any] // Request enough memory to begin unrolling keepUnrolling = reserveUnrollMemoryForThisTask(blockId, initialMemoryThreshold, droppedBlocks) if (!keepUnrolling) { logWarning(s"Failed to reserve initial memory threshold of " + s"${Utils.bytesToString(initialMemoryThreshold)} for computing block $blockId in memory.") } else { pendingMemoryReserved += initialMemoryThreshold } // Unroll this block safely, checking whether we have exceeded our threshold periodically try { while (values.hasNext && keepUnrolling) { vector += values.next() if (elementsUnrolled % memoryCheckPeriod == 0) { // If our vector's size has exceeded the threshold, request more memory val currentSize = vector.estimateSize() if (currentSize >= memoryThreshold) { val amountToRequest = (currentSize * memoryGrowthFactor - memoryThreshold).toLong keepUnrolling = reserveUnrollMemoryForThisTask( blockId, amountToRequest, droppedBlocks) if (keepUnrolling) { pendingMemoryReserved += amountToRequest } // New threshold is currentSize * memoryGrowthFactor memoryThreshold += amountToRequest } } elementsUnrolled += 1 } if (keepUnrolling) { // We successfully unrolled the entirety of this block Left(vector.toArray) } else { // We ran out of space while unrolling the values for this block logUnrollFailureMessage(blockId, vector.estimateSize()) Right(vector.iterator ++ values) } } finally { // If we return an array, the values returned here will be cached in `tryToPut` later. // In this case, we should release the memory only after we cache the block there. if (keepUnrolling) { val taskAttemptId = currentTaskAttemptId() memoryManager.synchronized { // Since we continue to hold onto the array until we actually cache it, we cannot // release the unroll memory yet. Instead, we transfer it to pending unroll memory // so `tryToPut` can further transfer it to normal storage memory later. // TODO: we can probably express this without pending unroll memory (SPARK-10907) unrollMemoryMap(taskAttemptId) -= pendingMemoryReserved pendingUnrollMemoryMap(taskAttemptId) = pendingUnrollMemoryMap.getOrElse(taskAttemptId, 0L) + pendingMemoryReserved } } else { // Otherwise, if we return an iterator, we can only release the unroll memory when // the task finishes since we don't know when the iterator will be consumed. } } } /** * Return the RDD ID that a given block ID is from, or None if it is not an RDD block. */ private def getRddId(blockId: BlockId): Option[Int] = { blockId.asRDDId.map(_.rddId) } private def tryToPut( blockId: BlockId, value: Any, size: Long, deserialized: Boolean, droppedBlocks: mutable.Buffer[(BlockId, BlockStatus)]): Boolean = { tryToPut(blockId, () => value, size, deserialized, droppedBlocks) } /** * Try to put in a set of values, if we can free up enough space. The value should either be * an Array if deserialized is true or a ByteBuffer otherwise. Its (possibly estimated) size * must also be passed by the caller. * * `value` will be lazily created. If it cannot be put into MemoryStore or disk, `value` won't be * created to avoid OOM since it may be a big ByteBuffer. * * Synchronize on `memoryManager` to ensure that all the put requests and its associated block * dropping is done by only on thread at a time. Otherwise while one thread is dropping * blocks to free memory for one block, another thread may use up the freed space for * another block. * * All blocks evicted in the process, if any, will be added to `droppedBlocks`. * * @return whether put was successful. */ private def tryToPut( blockId: BlockId, value: () => Any, size: Long, deserialized: Boolean, droppedBlocks: mutable.Buffer[(BlockId, BlockStatus)]): Boolean = { /* TODO: Its possible to optimize the locking by locking entries only when selecting blocks * to be dropped. Once the to-be-dropped blocks have been selected, and lock on entries has * been released, it must be ensured that those to-be-dropped blocks are not double counted * for freeing up more space for another block that needs to be put. Only then the actually * dropping of blocks (and writing to disk if necessary) can proceed in parallel. */ memoryManager.synchronized { // Note: if we have previously unrolled this block successfully, then pending unroll // memory should be non-zero. This is the amount that we already reserved during the // unrolling process. In this case, we can just reuse this space to cache our block. // The synchronization on `memoryManager` here guarantees that the release and acquire // happen atomically. This relies on the assumption that all memory acquisitions are // synchronized on the same lock. releasePendingUnrollMemoryForThisTask() val enoughMemory = memoryManager.acquireStorageMemory(blockId, size, droppedBlocks) if (enoughMemory) { // We acquired enough memory for the block, so go ahead and put it val entry = new MemoryEntry(value(), size, deserialized) entries.synchronized { entries.put(blockId, entry) } val valuesOrBytes = if (deserialized) "values" else "bytes" logInfo("Block %s stored as %s in memory (estimated size %s, free %s)".format( blockId, valuesOrBytes, Utils.bytesToString(size), Utils.bytesToString(blocksMemoryUsed))) } else { // Tell the block manager that we couldn't put it in memory so that it can drop it to // disk if the block allows disk storage. lazy val data = if (deserialized) { Left(value().asInstanceOf[Array[Any]]) } else { Right(value().asInstanceOf[ByteBuffer].duplicate()) } val droppedBlockStatus = blockManager.dropFromMemory(blockId, () => data) droppedBlockStatus.foreach { status => droppedBlocks += ((blockId, status)) } } enoughMemory } } /** * Try to evict blocks to free up a given amount of space to store a particular block. * Can fail if either the block is bigger than our memory or it would require replacing * another block from the same RDD (which leads to a wasteful cyclic replacement pattern for * RDDs that don't fit into memory that we want to avoid). * * @param blockId the ID of the block we are freeing space for, if any * @param space the size of this block * @param droppedBlocks a holder for blocks evicted in the process * @return whether the requested free space is freed. */ private[spark] def evictBlocksToFreeSpace( blockId: Option[BlockId], space: Long, droppedBlocks: mutable.Buffer[(BlockId, BlockStatus)]): Boolean = { assert(space > 0) memoryManager.synchronized { var freedMemory = 0L val rddToAdd = blockId.flatMap(getRddId) val selectedBlocks = new ArrayBuffer[BlockId] // This is synchronized to ensure that the set of entries is not changed // (because of getValue or getBytes) while traversing the iterator, as that // can lead to exceptions. entries.synchronized { val iterator = entries.entrySet().iterator() while (freedMemory < space && iterator.hasNext) { val pair = iterator.next() val blockId = pair.getKey if (rddToAdd.isEmpty || rddToAdd != getRddId(blockId)) { selectedBlocks += blockId freedMemory += pair.getValue.size } } } if (freedMemory >= space) { logInfo(s"${selectedBlocks.size} blocks selected for dropping") for (blockId <- selectedBlocks) { val entry = entries.synchronized { entries.get(blockId) } // This should never be null as only one task should be dropping // blocks and removing entries. However the check is still here for // future safety. if (entry != null) { val data = if (entry.deserialized) { Left(entry.value.asInstanceOf[Array[Any]]) } else { Right(entry.value.asInstanceOf[ByteBuffer].duplicate()) } val droppedBlockStatus = blockManager.dropFromMemory(blockId, data) droppedBlockStatus.foreach { status => droppedBlocks += ((blockId, status)) } } } true } else { blockId.foreach { id => logInfo(s"Will not store $id as it would require dropping another block " + "from the same RDD") } false } } } override def contains(blockId: BlockId): Boolean = { entries.synchronized { entries.containsKey(blockId) } } private def currentTaskAttemptId(): Long = { // In case this is called on the driver, return an invalid task attempt id. Option(TaskContext.get()).map(_.taskAttemptId()).getOrElse(-1L) } /** * Reserve memory for unrolling the given block for this task. * @return whether the request is granted. */ def reserveUnrollMemoryForThisTask( blockId: BlockId, memory: Long, droppedBlocks: mutable.Buffer[(BlockId, BlockStatus)]): Boolean = { memoryManager.synchronized { val success = memoryManager.acquireUnrollMemory(blockId, memory, droppedBlocks) if (success) { val taskAttemptId = currentTaskAttemptId() unrollMemoryMap(taskAttemptId) = unrollMemoryMap.getOrElse(taskAttemptId, 0L) + memory } success } } /** * Release memory used by this task for unrolling blocks. * If the amount is not specified, remove the current task's allocation altogether. */ def releaseUnrollMemoryForThisTask(memory: Long = Long.MaxValue): Unit = { val taskAttemptId = currentTaskAttemptId() memoryManager.synchronized { if (unrollMemoryMap.contains(taskAttemptId)) { val memoryToRelease = math.min(memory, unrollMemoryMap(taskAttemptId)) if (memoryToRelease > 0) { unrollMemoryMap(taskAttemptId) -= memoryToRelease if (unrollMemoryMap(taskAttemptId) == 0) { unrollMemoryMap.remove(taskAttemptId) } memoryManager.releaseUnrollMemory(memoryToRelease) } } } } /** * Release pending unroll memory of current unroll successful block used by this task */ def releasePendingUnrollMemoryForThisTask(memory: Long = Long.MaxValue): Unit = { val taskAttemptId = currentTaskAttemptId() memoryManager.synchronized { if (pendingUnrollMemoryMap.contains(taskAttemptId)) { val memoryToRelease = math.min(memory, pendingUnrollMemoryMap(taskAttemptId)) if (memoryToRelease > 0) { pendingUnrollMemoryMap(taskAttemptId) -= memoryToRelease if (pendingUnrollMemoryMap(taskAttemptId) == 0) { pendingUnrollMemoryMap.remove(taskAttemptId) } memoryManager.releaseUnrollMemory(memoryToRelease) } } } } /** * Return the amount of memory currently occupied for unrolling blocks across all tasks. */ def currentUnrollMemory: Long = memoryManager.synchronized { unrollMemoryMap.values.sum + pendingUnrollMemoryMap.values.sum } /** * Return the amount of memory currently occupied for unrolling blocks by this task. */ def currentUnrollMemoryForThisTask: Long = memoryManager.synchronized { unrollMemoryMap.getOrElse(currentTaskAttemptId(), 0L) } /** * Return the number of tasks currently unrolling blocks. */ private def numTasksUnrolling: Int = memoryManager.synchronized { unrollMemoryMap.keys.size } /** * Log information about current memory usage. */ private def logMemoryUsage(): Unit = { logInfo( s"Memory use = ${Utils.bytesToString(blocksMemoryUsed)} (blocks) + " + s"${Utils.bytesToString(currentUnrollMemory)} (scratch space shared across " + s"$numTasksUnrolling tasks(s)) = ${Utils.bytesToString(memoryUsed)}. " + s"Storage limit = ${Utils.bytesToString(maxMemory)}." ) } /** * Log a warning for failing to unroll a block. * * @param blockId ID of the block we are trying to unroll. * @param finalVectorSize Final size of the vector before unrolling failed. */ private def logUnrollFailureMessage(blockId: BlockId, finalVectorSize: Long): Unit = { logWarning( s"Not enough space to cache $blockId in memory! " + s"(computed ${Utils.bytesToString(finalVectorSize)} so far)" ) logMemoryUsage() } }
chenc10/Spark-PAF
core/src/main/scala/org/apache/spark/storage/MemoryStore.scala
Scala
apache-2.0
24,107
package scalajsclient import org.scalajs.dom import scala.scalajs.js import scala.scalajs.js.timers._ import scala.scalajs.js.Dynamic.global import org.singlespaced.d3js.d3 import org.singlespaced.d3js.Ops._ import scala.concurrent.duration._ import upickle.default._ import shared._ ////////////////////////////////////////////////////////////////////////// case class EngineMessage( action: String = "", name: String = "", command: String = "", buffer: String = "", available: List[String] = List[String]() ) { } abstract class AbstractEngineSocketActorJS( url: String = "ws://localhost:9000/ws" ) extends JSActor { val socket = new dom.WebSocket(url) socket.onopen = { (e: dom.Event) => self ! SocketOpenedMsg } socket.onmessage = { (e: dom.MessageEvent) => val rawcontent = e.data.toString //println("received: " + rawcontent) try { self ! read[EngineMessage](rawcontent) } catch { case e: Throwable => println("decoding message failed fatally " + rawcontent + " stacktrace : "); e.printStackTrace(); self ! MalformedMessage(rawcontent) } } socket.onclose = (_: dom.raw.CloseEvent) => {} dom.window.onbeforeunload = (_: dom.raw.BeforeUnloadEvent) => { socket.onclose = (_: dom.raw.CloseEvent) => {} socket.close() } def sendStr(str: String) { socket.send(str) } def sendMsg(msg: Any) { msg match { case x: EngineMessage => { val upicklestr = write[EngineMessage](x) socket.send(upicklestr) //println("sent: " + upicklestr) } case _ => println("error: unknown message") } } } ////////////////////////////////////////////////////////////////////////// case object SocketOpenedMsg object SocketActorJS { import ScalaJSUtils._ var domain = "localhost:9000" var prot = "ws" def setupfromurl(url: String) { val pd = getprotanddomain(url) if (pd._1 == "https") prot = "wss" domain = pd._2 //println(s"setting up socket actor from $url , pd: $pd , prot: $prot, domain: $domain, socket url: " + getUrl) } def getUrl: String = { s"$prot://$domain/ws" } } abstract class SocketActorJS( url: String = SocketActorJS.getUrl ) extends JSActor { val socket = new dom.WebSocket(url) socket.onopen = { (e: dom.Event) => self ! SocketOpenedMsg } socket.onmessage = { (e: dom.MessageEvent) => val rawcontent = e.data.toString //println("received: " + rawcontent) try { val firstspace = rawcontent.indexOf(" ") val kind = rawcontent.substring(0, firstspace) val content = rawcontent.substring(firstspace + 1) kind match { case "HelloMessage" => self ! read[HelloMessage](content) case "TableCreationResultMessage" => self ! read[TableCreationResultMessage](content) case "SendTablesResultMessage" => self ! read[SendTablesResultMessage](content) case "SitPlayerResultMessage" => self ! read[SitPlayerResultMessage](content) case "SendTableResultMessage" => self ! read[SendTableResultMessage](content) case "StorePresentationResultMessage" => self ! read[StorePresentationResultMessage](content) case _ => println("unknown message kind " + kind); self ! MalformedMessage(rawcontent) } } catch { case e: Throwable => println("decoding message failed fatally " + rawcontent + " stacktrace : "); e.printStackTrace(); self ! MalformedMessage(rawcontent) } } socket.onclose = (_: dom.raw.CloseEvent) => {} dom.window.onbeforeunload = (_: dom.raw.BeforeUnloadEvent) => { socket.onclose = (_: dom.raw.CloseEvent) => {} socket.close() } def sendMsg(msg: Any) { val upicklestr = msg match { case x: HelloMessage => write(x) case x: CreateTableMessage => write(x) case x: StoreViewMessage => write(x) case x: SendTablesMessage => write(x) case x: DeleteTableMessage => write(x) case x: SitPlayerMessage => write(x) case x: SendTableMessage => write(x) case x: SendMoveMessage => write(x) case x: RegisterWebBoardMessage => write(x) case x: StorePresentationMessage => write(x) case _ => println("error: unknown message") } val content = msg.getClass.getSimpleName + " " + upicklestr socket.send(content) //println("sent: " + content) } } case class LoggerStatusProperty( backgroundcolor: String = "#ffffff" ) object Logger { val MAX_ITEMS = 50 val STATUS_PROPERTIES = Map( "ok" -> LoggerStatusProperty("#afffaf"), "failed" -> LoggerStatusProperty("#ffafaf"), "warn" -> LoggerStatusProperty("#ffffaf") ) } case class Logitem( content: String = "", status: String = "" ) { import Logger._ def statusproperty = if (STATUS_PROPERTIES.contains(status)) STATUS_PROPERTIES(status) else LoggerStatusProperty() def bcol = statusproperty.backgroundcolor } case class Logger() extends scala.collection.mutable.ArrayBuffer[Logitem] { import Logger._ def log(li: Logitem) { this += li if (this.length > MAX_ITEMS) this.remove(0) } def reportHTML: String = { val td = """td class="devtd"""" val logcontent = (for (li <- this.reverse) yield { s""" |<tr> |<$td><span style="background-color: ${li.bcol};">${li.content}</span></td> |</tr> """.stripMargin }).mkString("\n") s""" |<table> |$logcontent |</table> """.stripMargin } } object ScalaJSUtils { var implccfg: ChessConfig = ChessConfig() def t(phrase: String) = implccfg.translate(phrase) def log(what: String) = global.console.log(what) def viewportWidth = dom.window.innerWidth def viewportHeight = dom.window.innerHeight def dgebid(id: String) = global.document.getElementById(id) def getpx(pxstr: String) = pxstr.replaceAll("px", "").toDouble def gbcrleft(e: dom.raw.HTMLElement) = e.getBoundingClientRect().left.asInstanceOf[Double] def gbcrleftd(e: scala.scalajs.js.Dynamic) = gbcrleft(e.asInstanceOf[dom.raw.HTMLElement]) def gbcrleftbyid(id: String) = gbcrleftd(dgebid(id)) def gbcrtop(e: dom.raw.HTMLElement) = e.getBoundingClientRect().top.asInstanceOf[Double] def gbcrtopd(e: scala.scalajs.js.Dynamic) = gbcrtop(e.asInstanceOf[dom.raw.HTMLElement]) def gbcrtopbyid(id: String) = gbcrtopd(dgebid(id)) def gbcrwidth(e: dom.raw.HTMLElement) = e.getBoundingClientRect().width.asInstanceOf[Double] def gbcrwidthd(e: scala.scalajs.js.Dynamic) = gbcrwidth(e.asInstanceOf[dom.raw.HTMLElement]) def gbcrwidthbyid(id: String) = gbcrwidthd(dgebid(id)) def gbcrheight(e: dom.raw.HTMLElement) = e.getBoundingClientRect().height.asInstanceOf[Double] def gbcrheightd(e: scala.scalajs.js.Dynamic) = gbcrheight(e.asInstanceOf[dom.raw.HTMLElement]) def gbcrheightbyid(id: String) = gbcrheightd(dgebid(id)) def isincrid(x: Double, y: Double, id: String): Boolean = { val cy = gbcrtopbyid(id) val cx = gbcrleftbyid(id) val w = gbcrwidthbyid(id) val h = gbcrheightbyid(id) if (y < cy) return false if (y > cy + h) return false if (x < cx) return false x < cx + w } def s(id: String) = d3.select("#" + id) def root = s("root") def info = s("info") def roote = dgebid("root") def roottop = gbcrtopd(roote) def rootleft = gbcrleftd(roote) def user = root.attr("user") def admin: Boolean = (root.attr("admin") == "true") case class MyFactor(factor: Double = 1.0) { def *(f: Double): MyFactor = MyFactor(this.factor * f) } def scaled(d: Double)(implicit f: MyFactor = MyFactor()): Double = d * f.factor def px(d: Double)(implicit f: MyFactor = MyFactor()): String = scaled(d) + "px" type MouseHandler = js.Function1[dom.MouseEvent, Unit] type FullMouseHandler = js.ThisFunction1[dom.raw.HTMLElement, dom.MouseEvent, Unit] type DragHandler = js.ThisFunction1[dom.raw.HTMLElement, dom.DragEvent, Unit] def svgspinner = s""" <svg version="1.1" id="svg-spinner" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 80 80" xml:space="preserve"> <path id="spinner" fill="#D43B11" d="M40,72C22.4,72,8,57.6,8,40C8,22.4, 22.4,8,40,8c17.6,0,32,14.4,32,32c0,1.1-0.9,2-2,2 s-2-0.9-2-2c0-15.4-12.6-28-28-28S12,24.6,12,40s12.6, 28,28,28c1.1,0,2,0.9,2,2S41.1,72,40,72z" <!-- ANIMATION START --> <animateTransform attributeType="xml" attributeName="transform" type="rotate" from="0 40 40" to="360 40 40" dur="0.8s" repeatCount="indefinite" /> </path> </svg> """ def clearInfo { info.html("") } def coverInfo(msg: String) { info.html("").append("div"). style("position", "fixed"). style("width", viewportWidth + "px"). style("height", viewportHeight + "px"). style("top", "0px"). style("left", "0px"). style("background-color", "#afafaf"). style("border-style", "solid"). style("border-width", "0px"). style("opacity", "0.7"). style("font-size", "40px"). style("text-align", "center"). style("padding-top", "50px"). style("z-index", "500"). html(msg) } def getprotanddomain(url: String): Tuple2[String, String] = { val parts0 = url.split(":\\/\\/") val parts1 = parts0(1).split("\\/") (parts0(0), parts1(0)) } def playsound(which: String) { val id = which + "sound" val el = dgebid(id) el.play() } }
serversideapps/silhmojs
client/src/main/scala/scalajsclient/ScalaJSUtils.scala
Scala
apache-2.0
9,616
/* * SPDX-License-Identifier: Apache-2.0 * * Copyright 2015-2021 Andre White. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.truthencode.ddo.model.feats import io.truthencode.ddo.model.religions.UndyingCourt import io.truthencode.ddo.support.requisite.{FeatRequisiteImpl, RequiresAllOfFeat} trait BelovedOfTheUndyingCourt extends FeatRequisiteImpl with EberronReligionNonWarforged with BelovedLevelBase with RequiresAllOfFeat with UndyingCourt with TheUndyingCourtFeatBase { self: DeityFeat => override def allOfFeats: Seq[Feat] = List(DeityFeat.ChildOfTheUndyingCourt) }
adarro/ddo-calc
subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/feats/BelovedOfTheUndyingCourt.scala
Scala
apache-2.0
1,139
/* * Copyright (C) 2005 - 2019 Schlichtherle IT Services. * All rights reserved. Use is subject to license terms. */ package global.namespace.truelicense.tests.v2.json import global.namespace.truelicense.api.LicenseManagementContextBuilder import global.namespace.truelicense.tests.core.ExtraMapTestContext import global.namespace.truelicense.tests.v2.core.V2TestContext import global.namespace.truelicense.v2.json.V2Json trait V2JsonTestContext extends V2TestContext with ExtraMapTestContext { final def managementContextBuilder: LicenseManagementContextBuilder = V2Json.builder }
christian-schlichtherle/truelicense
tests/src/test/scala/global/namespace/truelicense/tests/v2/json/V2JsonTestContext.scala
Scala
apache-2.0
590
package com.lorandszakacs.sg.exporter import com.lorandszakacs.sg.exporter.html.HTMLGeneratorAssembly import com.lorandszakacs.sg.exporter.impl.SGExporterImpl import com.lorandszakacs.sg.exporter.indexwriter.HTMLIndexWriterAssembly import com.lorandszakacs.sg.model.SGRepoAssembly /** * * @author Lorand Szakacs, lsz@lorandszakacs.com * @since 17 Jul 2016 * */ trait SGExporterAssembly extends HTMLIndexWriterAssembly with HTMLGeneratorAssembly { this: SGRepoAssembly => def sgExporter: SGExporter = _exporter private lazy val _exporter = new SGExporterImpl( repo = sgAndHFRepository, html = htmlGenerator, fileWriter = htmlIndexWriter, ) }
lorandszakacs/sg-downloader
sg-harvester/src/main/scala/com/lorandszakacs/sg/exporter/SGExporterAssembly.scala
Scala
apache-2.0
688
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.analysis import org.scalatest.Assertions._ import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.dsl.plans._ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.aggregate.{AggregateExpression, Complete, Count, Max} import org.apache.spark.sql.catalyst.parser.CatalystSqlParser import org.apache.spark.sql.catalyst.plans.{Cross, LeftOuter, RightOuter} import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.catalyst.util.{ArrayBasedMapData, GenericArrayData, MapData} import org.apache.spark.sql.types._ private[sql] case class GroupableData(data: Int) { def getData: Int = data } private[sql] class GroupableUDT extends UserDefinedType[GroupableData] { override def sqlType: DataType = IntegerType override def serialize(groupableData: GroupableData): Int = groupableData.data override def deserialize(datum: Any): GroupableData = { datum match { case data: Int => GroupableData(data) } } override def userClass: Class[GroupableData] = classOf[GroupableData] private[spark] override def asNullable: GroupableUDT = this } private[sql] case class UngroupableData(data: Map[Int, Int]) { def getData: Map[Int, Int] = data } private[sql] class UngroupableUDT extends UserDefinedType[UngroupableData] { override def sqlType: DataType = MapType(IntegerType, IntegerType) override def serialize(ungroupableData: UngroupableData): MapData = { val keyArray = new GenericArrayData(ungroupableData.data.keys.toSeq) val valueArray = new GenericArrayData(ungroupableData.data.values.toSeq) new ArrayBasedMapData(keyArray, valueArray) } override def deserialize(datum: Any): UngroupableData = { datum match { case data: MapData => val keyArray = data.keyArray().array val valueArray = data.valueArray().array assert(keyArray.length == valueArray.length) val mapData = keyArray.zip(valueArray).toMap.asInstanceOf[Map[Int, Int]] UngroupableData(mapData) } } override def userClass: Class[UngroupableData] = classOf[UngroupableData] private[spark] override def asNullable: UngroupableUDT = this } case class TestFunction( children: Seq[Expression], inputTypes: Seq[AbstractDataType]) extends Expression with ImplicitCastInputTypes with Unevaluable { override def nullable: Boolean = true override def dataType: DataType = StringType } case class UnresolvedTestPlan() extends LeafNode { override lazy val resolved = false override def output: Seq[Attribute] = Nil } class AnalysisErrorSuite extends AnalysisTest { import TestRelations._ def errorTest( name: String, plan: LogicalPlan, errorMessages: Seq[String], caseSensitive: Boolean = true): Unit = { test(name) { assertAnalysisError(plan, errorMessages, caseSensitive) } } val dateLit = Literal.create(null, DateType) errorTest( "scalar subquery with 2 columns", testRelation.select( (ScalarSubquery(testRelation.select($"a", dateLit.as("b"))) + Literal(1)).as("a")), "Scalar subquery must return only one column, but got 2" :: Nil) errorTest( "scalar subquery with no column", testRelation.select(ScalarSubquery(LocalRelation()).as("a")), "Scalar subquery must return only one column, but got 0" :: Nil) errorTest( "single invalid type, single arg", testRelation.select(TestFunction(dateLit :: Nil, IntegerType :: Nil).as("a")), "cannot resolve" :: "testfunction(CAST(NULL AS DATE))" :: "argument 1" :: "requires int type" :: "'CAST(NULL AS DATE)' is of date type" :: Nil) errorTest( "single invalid type, second arg", testRelation.select( TestFunction(dateLit :: dateLit :: Nil, DateType :: IntegerType :: Nil).as("a")), "cannot resolve" :: "testfunction(CAST(NULL AS DATE), CAST(NULL AS DATE))" :: "argument 2" :: "requires int type" :: "'CAST(NULL AS DATE)' is of date type" :: Nil) errorTest( "multiple invalid type", testRelation.select( TestFunction(dateLit :: dateLit :: Nil, IntegerType :: IntegerType :: Nil).as("a")), "cannot resolve" :: "testfunction(CAST(NULL AS DATE), CAST(NULL AS DATE))" :: "argument 1" :: "argument 2" :: "requires int type" :: "'CAST(NULL AS DATE)' is of date type" :: Nil) errorTest( "invalid window function", testRelation2.select( WindowExpression( Literal(0), WindowSpecDefinition( UnresolvedAttribute("a") :: Nil, SortOrder(UnresolvedAttribute("b"), Ascending) :: Nil, UnspecifiedFrame)).as("window")), "not supported within a window function" :: Nil) errorTest( "distinct aggregate function in window", testRelation2.select( WindowExpression( AggregateExpression(Count(UnresolvedAttribute("b")), Complete, isDistinct = true), WindowSpecDefinition( UnresolvedAttribute("a") :: Nil, SortOrder(UnresolvedAttribute("b"), Ascending) :: Nil, UnspecifiedFrame)).as("window")), "Distinct window functions are not supported" :: Nil) errorTest( "window aggregate function with filter predicate", testRelation2.select( WindowExpression( AggregateExpression( Count(UnresolvedAttribute("b")), Complete, isDistinct = false, filter = Some(UnresolvedAttribute("b") > 1)), WindowSpecDefinition( UnresolvedAttribute("a") :: Nil, SortOrder(UnresolvedAttribute("b"), Ascending) :: Nil, UnspecifiedFrame)).as("window")), "window aggregate function with filter predicate is not supported" :: Nil ) errorTest( "distinct function", CatalystSqlParser.parsePlan("SELECT hex(DISTINCT a) FROM TaBlE"), "DISTINCT or FILTER specified, but hex is not an aggregate function" :: Nil) errorTest( "non aggregate function with filter predicate", CatalystSqlParser.parsePlan("SELECT hex(a) FILTER (WHERE c = 1) FROM TaBlE2"), "DISTINCT or FILTER specified, but hex is not an aggregate function" :: Nil) errorTest( "distinct window function", CatalystSqlParser.parsePlan("SELECT percent_rank(DISTINCT a) OVER () FROM TaBlE"), "DISTINCT or FILTER specified, but percent_rank is not an aggregate function" :: Nil) errorTest( "window function with filter predicate", CatalystSqlParser.parsePlan("SELECT percent_rank(a) FILTER (WHERE c > 1) OVER () FROM TaBlE2"), "DISTINCT or FILTER specified, but percent_rank is not an aggregate function" :: Nil) errorTest( "higher order function with filter predicate", CatalystSqlParser.parsePlan("SELECT aggregate(array(1, 2, 3), 0, (acc, x) -> acc + x) " + "FILTER (WHERE c > 1)"), "FILTER predicate specified, but aggregate is not an aggregate function" :: Nil) errorTest( "DISTINCT aggregate function with filter predicate", CatalystSqlParser.parsePlan("SELECT count(DISTINCT a) FILTER (WHERE c > 1) FROM TaBlE2"), "DISTINCT and FILTER cannot be used in aggregate functions at the same time" :: Nil) errorTest( "non-deterministic filter predicate in aggregate functions", CatalystSqlParser.parsePlan("SELECT count(a) FILTER (WHERE rand(int(c)) > 1) FROM TaBlE2"), "FILTER expression is non-deterministic, it cannot be used in aggregate functions" :: Nil) errorTest( "nested aggregate functions", testRelation.groupBy($"a")( AggregateExpression( Max(AggregateExpression(Count(Literal(1)), Complete, isDistinct = false)), Complete, isDistinct = false)), "not allowed to use an aggregate function in the argument of another aggregate function." :: Nil ) errorTest( "offset window function", testRelation2.select( WindowExpression( new Lead(UnresolvedAttribute("b")), WindowSpecDefinition( UnresolvedAttribute("a") :: Nil, SortOrder(UnresolvedAttribute("b"), Ascending) :: Nil, SpecifiedWindowFrame(RangeFrame, Literal(1), Literal(2)))).as("window")), "window frame" :: "must match the required frame" :: Nil) errorTest( "too many generators", listRelation.select(Explode($"list").as("a"), Explode($"list").as("b")), "only one generator" :: "explode" :: Nil) errorTest( "unresolved attributes", testRelation.select($"abcd"), "cannot resolve" :: "abcd" :: Nil) errorTest( "unresolved attributes with a generated name", testRelation2.groupBy($"a")(max($"b")) .where(sum($"b") > 0) .orderBy($"havingCondition".asc), "cannot resolve" :: "havingCondition" :: Nil) errorTest( "unresolved star expansion in max", testRelation2.groupBy($"a")(sum(UnresolvedStar(None))), "Invalid usage of '*'" :: "in expression 'sum'" :: Nil) errorTest( "sorting by unsupported column types", mapRelation.orderBy($"map".asc), "sort" :: "type" :: "map<int,int>" :: Nil) errorTest( "sorting by attributes are not from grouping expressions", testRelation2.groupBy($"a", $"c")($"a", $"c", count($"a").as("a3")).orderBy($"b".asc), "cannot resolve" :: "'`b`'" :: "given input columns" :: "[a, a3, c]" :: Nil) errorTest( "non-boolean filters", testRelation.where(Literal(1)), "filter" :: "'1'" :: "not a boolean" :: Literal(1).dataType.simpleString :: Nil) errorTest( "non-boolean join conditions", testRelation.join(testRelation, condition = Some(Literal(1))), "condition" :: "'1'" :: "not a boolean" :: Literal(1).dataType.simpleString :: Nil) errorTest( "missing group by", testRelation2.groupBy($"a")($"b"), "'`b`'" :: "group by" :: Nil ) errorTest( "ambiguous field", nestedRelation.select($"top.duplicateField"), "Ambiguous reference to fields" :: "duplicateField" :: Nil, caseSensitive = false) errorTest( "ambiguous field due to case insensitivity", nestedRelation.select($"top.differentCase"), "Ambiguous reference to fields" :: "differentCase" :: "differentcase" :: Nil, caseSensitive = false) errorTest( "missing field", nestedRelation2.select($"top.c"), "No such struct field" :: "aField" :: "bField" :: "cField" :: Nil, caseSensitive = false) errorTest( "catch all unresolved plan", UnresolvedTestPlan(), "unresolved" :: Nil) errorTest( "union with unequal number of columns", testRelation.union(testRelation2), "union" :: "number of columns" :: testRelation2.output.length.toString :: testRelation.output.length.toString :: Nil) errorTest( "intersect with unequal number of columns", testRelation.intersect(testRelation2, isAll = false), "intersect" :: "number of columns" :: testRelation2.output.length.toString :: testRelation.output.length.toString :: Nil) errorTest( "except with unequal number of columns", testRelation.except(testRelation2, isAll = false), "except" :: "number of columns" :: testRelation2.output.length.toString :: testRelation.output.length.toString :: Nil) errorTest( "union with incompatible column types", testRelation.union(nestedRelation), "union" :: "the compatible column types" :: Nil) errorTest( "union with a incompatible column type and compatible column types", testRelation3.union(testRelation4), "union" :: "the compatible column types" :: "map" :: "decimal" :: Nil) errorTest( "intersect with incompatible column types", testRelation.intersect(nestedRelation, isAll = false), "intersect" :: "the compatible column types" :: Nil) errorTest( "intersect with a incompatible column type and compatible column types", testRelation3.intersect(testRelation4, isAll = false), "intersect" :: "the compatible column types" :: "map" :: "decimal" :: Nil) errorTest( "except with incompatible column types", testRelation.except(nestedRelation, isAll = false), "except" :: "the compatible column types" :: Nil) errorTest( "except with a incompatible column type and compatible column types", testRelation3.except(testRelation4, isAll = false), "except" :: "the compatible column types" :: "map" :: "decimal" :: Nil) errorTest( "SPARK-9955: correct error message for aggregate", // When parse SQL string, we will wrap aggregate expressions with UnresolvedAlias. testRelation2.where($"bad_column" > 1).groupBy($"a")(UnresolvedAlias(max($"b"))), "cannot resolve '`bad_column`'" :: Nil) errorTest( "slide duration greater than window in time window", testRelation2.select( TimeWindow(Literal("2016-01-01 01:01:01"), "1 second", "2 second", "0 second").as("window")), s"The slide duration " :: " must be less than or equal to the windowDuration " :: Nil ) errorTest( "start time greater than slide duration in time window", testRelation.select( TimeWindow(Literal("2016-01-01 01:01:01"), "1 second", "1 second", "1 minute").as("window")), "The absolute value of start time " :: " must be less than the slideDuration " :: Nil ) errorTest( "start time equal to slide duration in time window", testRelation.select( TimeWindow(Literal("2016-01-01 01:01:01"), "1 second", "1 second", "1 second").as("window")), "The absolute value of start time " :: " must be less than the slideDuration " :: Nil ) errorTest( "SPARK-21590: absolute value of start time greater than slide duration in time window", testRelation.select( TimeWindow(Literal("2016-01-01 01:01:01"), "1 second", "1 second", "-1 minute").as("window")), "The absolute value of start time " :: " must be less than the slideDuration " :: Nil ) errorTest( "SPARK-21590: absolute value of start time equal to slide duration in time window", testRelation.select( TimeWindow(Literal("2016-01-01 01:01:01"), "1 second", "1 second", "-1 second").as("window")), "The absolute value of start time " :: " must be less than the slideDuration " :: Nil ) errorTest( "negative window duration in time window", testRelation.select( TimeWindow(Literal("2016-01-01 01:01:01"), "-1 second", "1 second", "0 second").as("window")), "The window duration " :: " must be greater than 0." :: Nil ) errorTest( "zero window duration in time window", testRelation.select( TimeWindow(Literal("2016-01-01 01:01:01"), "0 second", "1 second", "0 second").as("window")), "The window duration " :: " must be greater than 0." :: Nil ) errorTest( "negative slide duration in time window", testRelation.select( TimeWindow(Literal("2016-01-01 01:01:01"), "1 second", "-1 second", "0 second").as("window")), "The slide duration " :: " must be greater than 0." :: Nil ) errorTest( "zero slide duration in time window", testRelation.select( TimeWindow(Literal("2016-01-01 01:01:01"), "1 second", "0 second", "0 second").as("window")), "The slide duration" :: " must be greater than 0." :: Nil ) errorTest( "generator nested in expressions", listRelation.select(Explode($"list") + 1), "Generators are not supported when it's nested in expressions, but got: (explode(list) + 1)" :: Nil ) errorTest( "SPARK-30998: unsupported nested inner generators", { val nestedListRelation = LocalRelation( AttributeReference("nestedList", ArrayType(ArrayType(IntegerType)))()) nestedListRelation.select(Explode(Explode($"nestedList"))) }, "Generators are not supported when it's nested in expressions, but got: " + "explode(explode(nestedList))" :: Nil ) errorTest( "SPARK-30998: unsupported nested inner generators for aggregates", testRelation.select(Explode(Explode( CreateArray(CreateArray(min($"a") :: max($"a") :: Nil) :: Nil)))), "Generators are not supported when it's nested in expressions, but got: " + "explode(explode(array(array(min(a), max(a)))))" :: Nil ) errorTest( "generator nested in expressions for aggregates", testRelation.select(Explode(CreateArray(min($"a") :: max($"a") :: Nil)) + 1), "Generators are not supported when it's nested in expressions, but got: " + "(explode(array(min(a), max(a))) + 1)" :: Nil ) errorTest( "generator appears in operator which is not Project", listRelation.sortBy(Explode($"list").asc), "Generators are not supported outside the SELECT clause, but got: Sort" :: Nil ) errorTest( "an evaluated limit class must not be null", testRelation.limit(Literal(null, IntegerType)), "The evaluated limit expression must not be null, but got " :: Nil ) errorTest( "num_rows in limit clause must be equal to or greater than 0", listRelation.limit(-1), "The limit expression must be equal to or greater than 0, but got -1" :: Nil ) errorTest( "more than one generators in SELECT", listRelation.select(Explode($"list"), Explode($"list")), "Only one generator allowed per select clause but found 2: explode(list), explode(list)" :: Nil ) errorTest( "more than one generators for aggregates in SELECT", testRelation.select(Explode(CreateArray(min($"a") :: Nil)), Explode(CreateArray(max($"a") :: Nil))), "Only one generator allowed per select clause but found 2: " + "explode(array(min(a))), explode(array(max(a)))" :: Nil ) test("SPARK-6452 regression test") { // CheckAnalysis should throw AnalysisException when Aggregate contains missing attribute(s) // Since we manually construct the logical plan at here and Sum only accept // LongType, DoubleType, and DecimalType. We use LongType as the type of a. val attrA = AttributeReference("a", LongType)(exprId = ExprId(1)) val otherA = AttributeReference("a", LongType)(exprId = ExprId(2)) val attrC = AttributeReference("c", LongType)(exprId = ExprId(3)) val aliases = Alias(sum(attrA), "b")() :: Alias(sum(attrC), "d")() :: Nil val plan = Aggregate( Nil, aliases, LocalRelation(otherA)) assert(plan.resolved) val resolved = s"${attrA.toString},${attrC.toString}" val errorMsg = s"Resolved attribute(s) $resolved missing from ${otherA.toString} " + s"in operator !Aggregate [${aliases.mkString(", ")}]. " + s"Attribute(s) with the same name appear in the operation: a. " + "Please check if the right attribute(s) are used." assertAnalysisError(plan, errorMsg :: Nil) } test("error test for self-join") { val join = Join(testRelation, testRelation, Cross, None, JoinHint.NONE) val error = intercept[AnalysisException] { SimpleAnalyzer.checkAnalysis(join) } assert(error.message.contains("Failure when resolving conflicting references in Join")) assert(error.message.contains("Conflicting attributes")) } test("check grouping expression data types") { def checkDataType(dataType: DataType, shouldSuccess: Boolean): Unit = { val plan = Aggregate( AttributeReference("a", dataType)(exprId = ExprId(2)) :: Nil, Alias(sum(AttributeReference("b", IntegerType)(exprId = ExprId(1))), "c")() :: Nil, LocalRelation( AttributeReference("a", dataType)(exprId = ExprId(2)), AttributeReference("b", IntegerType)(exprId = ExprId(1)))) if (shouldSuccess) { assertAnalysisSuccess(plan, true) } else { assertAnalysisError(plan, "expression `a` cannot be used as a grouping expression" :: Nil) } } val supportedDataTypes = Seq( StringType, BinaryType, NullType, BooleanType, ByteType, ShortType, IntegerType, LongType, FloatType, DoubleType, DecimalType(25, 5), DecimalType(6, 5), DateType, TimestampType, ArrayType(IntegerType), new StructType() .add("f1", FloatType, nullable = true) .add("f2", StringType, nullable = true), new StructType() .add("f1", FloatType, nullable = true) .add("f2", ArrayType(BooleanType, containsNull = true), nullable = true), new GroupableUDT()) supportedDataTypes.foreach { dataType => checkDataType(dataType, shouldSuccess = true) } val unsupportedDataTypes = Seq( MapType(StringType, LongType), new StructType() .add("f1", FloatType, nullable = true) .add("f2", MapType(StringType, LongType), nullable = true), new UngroupableUDT()) unsupportedDataTypes.foreach { dataType => checkDataType(dataType, shouldSuccess = false) } } test("we should fail analysis when we find nested aggregate functions") { val plan = Aggregate( AttributeReference("a", IntegerType)(exprId = ExprId(2)) :: Nil, Alias(sum(sum(AttributeReference("b", IntegerType)(exprId = ExprId(1)))), "c")() :: Nil, LocalRelation( AttributeReference("a", IntegerType)(exprId = ExprId(2)), AttributeReference("b", IntegerType)(exprId = ExprId(1)))) assertAnalysisError( plan, "It is not allowed to use an aggregate function in the argument of " + "another aggregate function." :: Nil) } test("Join can work on binary types but can't work on map types") { val left = LocalRelation(Symbol("a").binary, Symbol("b").map(StringType, StringType)) val right = LocalRelation(Symbol("c").binary, Symbol("d").map(StringType, StringType)) val plan1 = left.join( right, joinType = Cross, condition = Some(Symbol("a") === Symbol("c"))) assertAnalysisSuccess(plan1) val plan2 = left.join( right, joinType = Cross, condition = Some(Symbol("b") === Symbol("d"))) assertAnalysisError(plan2, "EqualTo does not support ordering on type map" :: Nil) } test("PredicateSubQuery is used outside of a filter") { val a = AttributeReference("a", IntegerType)() val b = AttributeReference("b", IntegerType)() val plan = Project( Seq(a, Alias(InSubquery(Seq(a), ListQuery(LocalRelation(b))), "c")()), LocalRelation(a)) assertAnalysisError(plan, "Predicate sub-queries can only be used" + " in Filter" :: Nil) } test("PredicateSubQuery is used is a nested condition") { val a = AttributeReference("a", IntegerType)() val b = AttributeReference("b", IntegerType)() val c = AttributeReference("c", BooleanType)() val plan1 = Filter(Cast(Not(InSubquery(Seq(a), ListQuery(LocalRelation(b)))), BooleanType), LocalRelation(a)) assertAnalysisError(plan1, "Null-aware predicate sub-queries cannot be used in nested conditions" :: Nil) val plan2 = Filter( Or(Not(InSubquery(Seq(a), ListQuery(LocalRelation(b)))), c), LocalRelation(a, c)) assertAnalysisError(plan2, "Null-aware predicate sub-queries cannot be used in nested conditions" :: Nil) } test("PredicateSubQuery correlated predicate is nested in an illegal plan") { val a = AttributeReference("a", IntegerType)() val b = AttributeReference("b", IntegerType)() val c = AttributeReference("c", IntegerType)() val plan1 = Filter( Exists( Join( LocalRelation(b), Filter(EqualTo(UnresolvedAttribute("a"), c), LocalRelation(c)), LeftOuter, Option(EqualTo(b, c)), JoinHint.NONE)), LocalRelation(a)) assertAnalysisError(plan1, "Accessing outer query column is not allowed in" :: Nil) val plan2 = Filter( Exists( Join( Filter(EqualTo(UnresolvedAttribute("a"), c), LocalRelation(c)), LocalRelation(b), RightOuter, Option(EqualTo(b, c)), JoinHint.NONE)), LocalRelation(a)) assertAnalysisError(plan2, "Accessing outer query column is not allowed in" :: Nil) val plan3 = Filter( Exists(Union(LocalRelation(b), Filter(EqualTo(UnresolvedAttribute("a"), c), LocalRelation(c)))), LocalRelation(a)) assertAnalysisError(plan3, "Accessing outer query column is not allowed in" :: Nil) val plan4 = Filter( Exists( Limit(1, Filter(EqualTo(UnresolvedAttribute("a"), b), LocalRelation(b))) ), LocalRelation(a)) assertAnalysisError(plan4, "Accessing outer query column is not allowed in" :: Nil) val plan5 = Filter( Exists( Sample(0.0, 0.5, false, 1L, Filter(EqualTo(UnresolvedAttribute("a"), b), LocalRelation(b))).select("b") ), LocalRelation(a)) assertAnalysisError(plan5, "Accessing outer query column is not allowed in" :: Nil) } test("Error on filter condition containing aggregate expressions") { val a = AttributeReference("a", IntegerType)() val b = AttributeReference("b", IntegerType)() val plan = Filter(Symbol("a") === UnresolvedFunction("max", Seq(b), true), LocalRelation(a, b)) assertAnalysisError(plan, "Aggregate/Window/Generate expressions are not valid in where clause of the query" :: Nil) } test("SPARK-30811: CTE should not cause stack overflow when " + "it refers to non-existent table with same name") { val plan = With( UnresolvedRelation(TableIdentifier("t")), Seq("t" -> SubqueryAlias("t", Project( Alias(Literal(1), "x")() :: Nil, UnresolvedRelation(TableIdentifier("t", Option("nonexist"))))))) assertAnalysisError(plan, "Table or view not found:" :: Nil) } }
goldmedal/spark
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
Scala
apache-2.0
26,492
package pl.touk.nussknacker.engine.process.runner import pl.touk.nussknacker.engine.graph.EspProcess import pl.touk.nussknacker.engine.marshall.ScenarioParser import pl.touk.nussknacker.engine.util.Implicits.SourceIsReleasable import java.nio.charset.StandardCharsets import scala.util.Using trait FlinkRunner { protected def readProcessFromArg(arg: String): EspProcess = { val canonicalJson = if (arg.startsWith("@")) { Using.resource(scala.io.Source.fromFile(arg.substring(1), StandardCharsets.UTF_8.name()))(_.mkString) } else { arg } ScenarioParser.parseUnsafe(canonicalJson) } }
TouK/nussknacker
engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/runner/FlinkRunner.scala
Scala
apache-2.0
621
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy.worker import java.io._ import java.nio.charset.StandardCharsets import scala.collection.JavaConverters._ import com.google.common.io.Files import org.apache.spark.{SecurityManager, SparkConf} import org.apache.spark.deploy.{ApplicationDescription, ExecutorState} import org.apache.spark.deploy.DeployMessages.ExecutorStateChanged import org.apache.spark.deploy.StandaloneResourceUtils.prepareResourcesFile import org.apache.spark.internal.Logging import org.apache.spark.internal.config.SPARK_EXECUTOR_PREFIX import org.apache.spark.internal.config.UI._ import org.apache.spark.resource.ResourceInformation import org.apache.spark.rpc.RpcEndpointRef import org.apache.spark.util.{ShutdownHookManager, Utils} import org.apache.spark.util.logging.FileAppender /** * Manages the execution of one executor process. * This is currently only used in standalone mode. */ private[deploy] class ExecutorRunner( val appId: String, val execId: Int, val appDesc: ApplicationDescription, val cores: Int, val memory: Int, val worker: RpcEndpointRef, val workerId: String, val webUiScheme: String, val host: String, val webUiPort: Int, val publicAddress: String, val sparkHome: File, val executorDir: File, val workerUrl: String, conf: SparkConf, val appLocalDirs: Seq[String], @volatile var state: ExecutorState.Value, val resources: Map[String, ResourceInformation] = Map.empty) extends Logging { private val fullId = appId + "/" + execId private var workerThread: Thread = null private var process: Process = null private var stdoutAppender: FileAppender = null private var stderrAppender: FileAppender = null // Timeout to wait for when trying to terminate an executor. private val EXECUTOR_TERMINATE_TIMEOUT_MS = 10 * 1000 // NOTE: This is now redundant with the automated shut-down enforced by the Executor. It might // make sense to remove this in the future. private var shutdownHook: AnyRef = null private[worker] def start(): Unit = { workerThread = new Thread("ExecutorRunner for " + fullId) { override def run(): Unit = { fetchAndRunExecutor() } } workerThread.start() // Shutdown hook that kills actors on shutdown. shutdownHook = ShutdownHookManager.addShutdownHook { () => // It's possible that we arrive here before calling `fetchAndRunExecutor`, then `state` will // be `ExecutorState.LAUNCHING`. In this case, we should set `state` to `FAILED`. if (state == ExecutorState.LAUNCHING) { state = ExecutorState.FAILED } killProcess(Some("Worker shutting down")) } } /** * Kill executor process, wait for exit and notify worker to update resource status. * * @param message the exception message which caused the executor's death */ private def killProcess(message: Option[String]): Unit = { var exitCode: Option[Int] = None if (process != null) { logInfo("Killing process!") if (stdoutAppender != null) { stdoutAppender.stop() } if (stderrAppender != null) { stderrAppender.stop() } exitCode = Utils.terminateProcess(process, EXECUTOR_TERMINATE_TIMEOUT_MS) if (exitCode.isEmpty) { logWarning("Failed to terminate process: " + process + ". This process will likely be orphaned.") } } try { worker.send(ExecutorStateChanged(appId, execId, state, message, exitCode)) } catch { case e: IllegalStateException => logWarning(e.getMessage(), e) } } /** Stop this executor runner, including killing the process it launched */ private[worker] def kill(): Unit = { if (workerThread != null) { // the workerThread will kill the child process when interrupted workerThread.interrupt() workerThread = null state = ExecutorState.KILLED try { ShutdownHookManager.removeShutdownHook(shutdownHook) } catch { case e: IllegalStateException => None } } } /** Replace variables such as {{EXECUTOR_ID}} and {{CORES}} in a command argument passed to us */ private[worker] def substituteVariables(argument: String): String = argument match { case "{{WORKER_URL}}" => workerUrl case "{{EXECUTOR_ID}}" => execId.toString case "{{HOSTNAME}}" => host case "{{CORES}}" => cores.toString case "{{APP_ID}}" => appId case other => other } /** * Download and run the executor described in our ApplicationDescription */ private def fetchAndRunExecutor(): Unit = { try { val resourceFileOpt = prepareResourcesFile(SPARK_EXECUTOR_PREFIX, resources, executorDir) // Launch the process val arguments = appDesc.command.arguments ++ resourceFileOpt.map(f => Seq("--resourcesFile", f.getAbsolutePath)).getOrElse(Seq.empty) val subsOpts = appDesc.command.javaOpts.map { Utils.substituteAppNExecIds(_, appId, execId.toString) } val subsCommand = appDesc.command.copy(arguments = arguments, javaOpts = subsOpts) val builder = CommandUtils.buildProcessBuilder(subsCommand, new SecurityManager(conf), memory, sparkHome.getAbsolutePath, substituteVariables) val command = builder.command() val redactedCommand = Utils.redactCommandLineArgs(conf, command.asScala.toSeq) .mkString("\\"", "\\" \\"", "\\"") logInfo(s"Launch command: $redactedCommand") builder.directory(executorDir) builder.environment.put("SPARK_EXECUTOR_DIRS", appLocalDirs.mkString(File.pathSeparator)) // In case we are running this from within the Spark Shell, avoid creating a "scala" // parent process for the executor command builder.environment.put("SPARK_LAUNCH_WITH_SCALA", "0") // Add webUI log urls val baseUrl = if (conf.get(UI_REVERSE_PROXY)) { s"/proxy/$workerId/logPage/?appId=$appId&executorId=$execId&logType=" } else { s"$webUiScheme$publicAddress:$webUiPort/logPage/?appId=$appId&executorId=$execId&logType=" } builder.environment.put("SPARK_LOG_URL_STDERR", s"${baseUrl}stderr") builder.environment.put("SPARK_LOG_URL_STDOUT", s"${baseUrl}stdout") process = builder.start() val header = "Spark Executor Command: %s\\n%s\\n\\n".format( redactedCommand, "=" * 40) // Redirect its stdout and stderr to files val stdout = new File(executorDir, "stdout") stdoutAppender = FileAppender(process.getInputStream, stdout, conf) val stderr = new File(executorDir, "stderr") Files.write(header, stderr, StandardCharsets.UTF_8) stderrAppender = FileAppender(process.getErrorStream, stderr, conf) state = ExecutorState.RUNNING worker.send(ExecutorStateChanged(appId, execId, state, None, None)) // Wait for it to exit; executor may exit with code 0 (when driver instructs it to shutdown) // or with nonzero exit code val exitCode = process.waitFor() state = ExecutorState.EXITED val message = "Command exited with code " + exitCode worker.send(ExecutorStateChanged(appId, execId, state, Some(message), Some(exitCode))) } catch { case interrupted: InterruptedException => logInfo("Runner thread for executor " + fullId + " interrupted") state = ExecutorState.KILLED killProcess(None) case e: Exception => logError("Error running executor", e) state = ExecutorState.FAILED killProcess(Some(e.toString)) } } }
dbtsai/spark
core/src/main/scala/org/apache/spark/deploy/worker/ExecutorRunner.scala
Scala
apache-2.0
8,358
package models.party import net.scalytica.symbiotic.api.types.PartyBaseTypes.PartyId import net.scalytica.symbiotic.api.types.PersistentType /** * An abstraction defining a Party (person or organisation) */ abstract class Party extends PersistentType { val id: Option[PartyId] }
kpmeen/symbiotic
examples/symbiotic-server/app/models/party/Party.scala
Scala
apache-2.0
285
package com.criteo.dev.cluster.s3 import com.criteo.dev.cluster.config.GlobalConfig import com.criteo.dev.cluster.{CliAction, NodeFactory, Public} import com.criteo.dev.cluster.docker.{DockerConstants, DockerRunning, DockerUtilities} import org.slf4j.LoggerFactory /** * Points a local docker dev cluster to be able to run queries against tables in a given s3 bucket. */ @Public object AttachBucketLocalCliAction extends CliAction[Unit] { private val logger = LoggerFactory.getLogger(AttachBucketLocalCliAction.getClass) override def command: String = "attach-bucket-local" override def usageArgs: List[Any] = List("bucket-id", "container.id") override def help: String = "Attaches the given local docker cluster to Hive tables located in given S3 bucket. " + "Any existing Hive metadata on cluster is not overriden, be aware to maintain consistency." override def applyInternal(args: List[String], config: GlobalConfig): Unit = { val bucketId = args(0) val dockerContainerId = args(1) val conf = config.backCompat //check if docker container id matches a running docker instance. val dockerMeta = DockerUtilities.getDockerContainerMetadata( DockerConstants.localClusterContainerLabel, Some(dockerContainerId)) val runningDockerMeta = dockerMeta.filter(_.dockerState == DockerRunning) require(runningDockerMeta.length == 1, s"Cannot find running docker container with id $dockerContainerId") //add some conf arguments expected by the SshHive command to construct the target node. val target = NodeFactory.getDockerNode(config.target.local, runningDockerMeta(0)) RunS3DdlAction(target, bucketId, copiedLocally = false, conf) DockerUtilities.printClusterDockerContainerInfo(conf, runningDockerMeta) } }
criteo/berilia
src/main/scala/com/criteo/dev/cluster/s3/AttachBucketLocalCliAction.scala
Scala
apache-2.0
1,787
package com.rocketfuel.sdbc.cassandra import cats.effect.Async import com.datastax.oss.driver.api.core.{CqlSession, CqlSessionBuilder} import com.rocketfuel.sdbc.base.Logger import fs2.Stream object StreamUtils extends Logger { /** * Create a stream from a managed CqlSession. */ def session[F[_], O]( builder: CqlSessionBuilder )(use: CqlSession => Stream[F, O] )(implicit async: Async[F] ): Stream[F, O] = { val req = toAsync { builder.buildAsync() } def release(session: CqlSession): F[Unit] = { async.map(toAsync(session.closeAsync()))(Function.const(())) } Stream.bracket(req)(release).flatMap(use) } }
rocketfuel/sdbc
cassandra/src/main/scala/com/rocketfuel/sdbc/cassandra/StreamUtils.scala
Scala
bsd-3-clause
666
package com.blogspot.ramannanda.scala.algorithms.cp3.medium import scala.util.control.Breaks._ /** * UVA 10911 Problem is to minimize the overall cost of distance between pair of points */ object FormingQuizTeams { type Point = (Int, Int) def doMatching(bitmask: Int, n: Int, distanceTable: Array[Array[Double]]): Double = { val dp = Array.fill(1 << 2 * n)(-1.0) val target = (1 << 2 * n) - 1 /** * Computes the minimum distance for bitmask until all are matched * * @param bitmask the bitmask * @return the minimum distance */ def doMatchingRec(bitmask: Int): Double = { if (dp(bitmask) > -0.5) { return dp(bitmask) } if (bitmask == target) { dp(bitmask) = 0 return dp(bitmask) } var p1 = 0 //Some high value var cost = 50000000.0 breakable { for (i <- 0 until 2 * n) { p1 = i if ((bitmask & (1 << p1)) == 0) break } } for (p2 <- p1 + 1 until 2 * n) { if ((bitmask & (1 << p2)) == 0) { cost = Math.min(cost, distanceTable(p1)(p2) + doMatchingRec(bitmask | 1 << p1 | 1 << p2)) } } dp(bitmask) = cost cost } doMatchingRec(bitmask) } def main(args: Array[String]): Unit = { val numPoints = scala.io.StdIn.readLine().trim.toInt var points = Vector[Point]() for (i <- 0 until numPoints) { val xy = scala.io.StdIn.readLine().split("\\s+") points = points :+ (xy(0).toInt, xy(1).toInt) } val distanceTable = Array.ofDim[Double](numPoints, numPoints) for (i <- 0 until numPoints - 1) { for (j <- i + 1 until numPoints) { val distance = Math.hypot(points(i)._1 - points(j)._1, points(i)._2 - points(j)._2) distanceTable(i)(j) = distance distanceTable(j)(i) = distance } } println(s"distance is ${doMatching(0, points.size / 2, distanceTable)}") } }
ramannanda9/algorithms-in-scala
src/main/scala/com/blogspot/ramannanda/scala/algorithms/cp3/medium/FormingQuizTeams.scala
Scala
gpl-3.0
1,961
package io.scalac.amqp import akka.util.ByteString import com.google.common.net.MediaType import io.scalac.amqp.Message.{PriorityMax, PriorityMin} import org.joda.time.DateTime import scala.concurrent.duration.Duration object Message { val PriorityMin = 0 val PriorityMax = 9 } final case class Message( /** Message body. */ body: ByteString = ByteString.empty, /** The RFC-2046 MIME type for the messages application-data section (body). * Can contain a charset parameter defining the character encoding * used: e.g., ’text/plain; charset=β€œutf-8”’. Where the content type is unknown * the content-type SHOULD NOT be set, allowing the recipient to determine the actual type. * Where the section is known to be truly opaque binary data, the content-type SHOULD be set * to application/octet-stream. */ contentType: Option[MediaType] = None, /** When present, describes additional content encodings applied to the application-data, * and thus what decoding mechanisms need to be applied in order to obtain the media-type * referenced by the content-type header field. Primarily used to allow a document to be * compressed without losing the identity of its underlying content type. A modifier to * the content-type, interpreted as per section 3.5 of RFC 2616. Valid content-encodings * are registered at IANA. Implementations SHOULD NOT use the compress encoding, except * as to remain compatible with messages originally sent with other protocols, e.g. HTTP or SMTP. * Implementations SHOULD NOT specify multiple content-encoding values except as to be * compatible with messages originally sent with other protocols, e.g. HTTP or SMTP. */ contentEncoding: Option[String] = None, /** An application-specified list of header parameters and their values. * These may be setup for application-only use. Additionally, it is possible to create * queues with "Header Exchange Type" - when the queue is created, it is given a series * of header property names to match, each with optional values to be matched, so that * routing to this queue occurs via header-matching. */ headers: Map[String, String] = Map(), /** Whether message should be persisted to disk. * Only works for queues that implement persistence. A persistent message is held securely * on disk and guaranteed to be delivered even if there is a serious network failure, * server crash, overflow etc. */ mode: DeliveryMode = Persistent, /** The relative message priority (0 to 9). * A high priority message is sent ahead of lower priority messages waiting * in the same message queue. When messages must be discarded in order to maintain * a specific service quality level the server will first discard low-priority messages. * Only works for queues that implement priorities. */ priority: Option[Int] = None, /** Message correlated to this one, e.g. what request this message is a reply to. * Applications are encouraged to use this attribute instead of putting this information * into the message payload. */ correlationId: Option[String] = None, /** Queue name other apps should send the response to. * Commonly used to name a reply queue (or any other identifier that helps a consumer * application to direct its response). Applications are encouraged to use this attribute * instead of putting this information into the message payload. */ replyTo: Option[String] = None, /** Expiration time after which the message will be deleted. * The value of the expiration field describes the TTL period in milliseconds. * When both a per-queue and a per-message TTL are specified, the lower value between the two will be chosen. */ expiration: Duration = Duration.Inf, /** Message identifier as a string. If applications need to identify messages, * it is recommended that they use this attribute instead of putting it into the message payload. */ messageId: Option[String] = None, /** Timestamp of the moment when message was sent. */ timestamp: Option[DateTime] = None, /** Message type, e.g. what type of event or command this message represents. * Recommended to be used by applications instead of including this information * into the message payload. */ `type`: Option[String] = None, /** Optional user ID. Verified by RabbitMQ against the actual connection username. */ userId: Option[String] = None, /** Identifier of the application that produced the message. */ appId: Option[String] = None) { priority.foreach(priority β‡’ require(priority >= PriorityMin && priority <= PriorityMax, s"priority < $PriorityMin || priority > $PriorityMax")) }
davidwrpayne/reactive-rabbit
src/main/scala/io/scalac/amqp/Message.scala
Scala
apache-2.0
4,741
package hu.frankdavid.ranking.gui import java.util.concurrent.atomic.AtomicInteger import javafx.beans.property.SimpleListProperty import javafx.concurrent.Task import javafx.scene.Group import javafx.scene.chart.XYChart import javafx.scene.control.Alert.AlertType import javafx.scene.control.ButtonBar.ButtonData import javafx.scene.control.{Alert, ButtonType} import javafx.scene.text.Text import hu.frankdavid.ranking.TournamentStrategy import hu.frankdavid.ranking.strategy._ import hu.frankdavid.ranking.workbench.{SingleTestResult, TestResultLike, TestRunner} import myjavafx.spinner.NumberSpinner import org.controlsfx.dialog.Dialogs import scalafx.Includes._ import scalafx.application.Platform import scalafx.beans.property.ObjectProperty import scalafx.collections.ObservableBuffer import scalafx.geometry.{Insets, Pos} import scalafx.scene.Scene import scalafx.scene.chart.XYChart.{Data, Series} import scalafx.scene.chart.{BarChart, CategoryAxis, NumberAxis} import scalafx.scene.control._ import scalafx.scene.layout._ import scalafx.stage.{Screen, Stage} class ChartScene(stage: Stage) extends Scene { val results = new SimpleListProperty[(TournamentStrategy, TestResultLike)]( ObservableBuffer.empty[(TournamentStrategy, TestResultLike)]) val strategyPicker = new StrategyPicker() val numberOfPlayersSpinner = new NumberSpinner() numberOfPlayersSpinner.setMinValue(1) numberOfPlayersSpinner.setValue(16) Platform.runLater { window().width = Screen.primary.bounds.width window().height = Screen.primary.bounds.height window().centerOnScreen() window().opacity = 1 } val resultPredictionInaccuracySlider = new Slider(1e-10, 1, 0) { blockIncrement = 0.1 majorTickUnit = 0.1 showTickLabels = true hgrow = Priority.Always } val maxParallelismSlider = new Slider(0, 1, 1) { blockIncrement = 0.1 majorTickUnit = 0.1 showTickLabels = true hgrow = Priority.Always } val playerPerformanceDeviationSlider = new Slider(1e-10, 1, 0) { blockIncrement = 0.1 majorTickUnit = 0.1 showTickLabels = true hgrow = Priority.Always } val topNSpinner = new NumberSpinner() topNSpinner.setMinValue(1) topNSpinner.setValue(3) topNSpinner.maxValueProperty() <== numberOfPlayersSpinner.valueProperty() val testCasesSpinner = new NumberSpinner() testCasesSpinner.setMinValue(1) testCasesSpinner.setValue(1) val selectedStrategyResult = new ObjectProperty[TestResultLike](this, "selectedStrategyResult") root = new SplitPane { prefWidth = Region.USE_COMPUTED_SIZE padding = Insets(10) dividerPositions = 0.3 items ++= Seq( new VBox { padding = Insets(10) spacing = 10 alignment = Pos.CenterRight content = Seq(new GridPane { spacing = 10 vgap = 10 hgap = 5 add(new Label("Number of test cases: "), 0, 0) add(jfxControl2sfx(testCasesSpinner), 1, 0) add(new Label("Number of players: "), 0, 1) add(jfxControl2sfx(numberOfPlayersSpinner), 1, 1) add(new Label("Performance deviation: "), 0, 2) add(playerPerformanceDeviationSlider, 1, 2) add(new Label("Result prediction inaccuracy: "), 0, 3) add(resultPredictionInaccuracySlider, 1, 3) add(new Label("Awarded players: "), 0, 4) add(jfxControl2sfx(topNSpinner), 1, 4) add(new Label("Maximum parallelism: "), 0, 5) add(maxParallelismSlider, 1, 5) add(strategyPicker, 0, 6, 2, 1) }, new Button("Organize matches") {onAction = simulate _; hgrow = Priority.Always} ) }.delegate, new ScrollPane { maxHeight = 800 prefWidth = Screen.primary.bounds.width content = new GridPane() { hgrow = Priority.Always add(new MatchStatisticsBarChart( "Max number of matches per player", "Number of matches", _.maxNumberOfGamesPerPlayer), 0, 0) add(new MatchStatisticsBarChart("Number of matches", "Number of matches", _.numberOfGames), 1, 0) add(new MatchStatisticsBarChart("Number of rounds", "Number of rounds", _.numberOfRounds), 0, 1) add(NumberOfGamesPerPlayerBarChart, 1, 1) add(new MatchStatisticsBarChart( "Result difference (lower is better)", "Result squared distance", _.resultDistance), 0, 2) add(new MatchStatisticsBarChart("Silver is correct", "Number of matches", _.placeGuessedCorrectly(1)), 0, 3) } }.delegate ) } stylesheets += getClass.getResource("common.css").toExternalForm def simulate(): Unit = { val numberOfPlayers = numberOfPlayersSpinner.getValue.intValue() val testRunner = new TestRunner( numberOfPlayers = numberOfPlayers, awardedPlayers = topNSpinner.getValue.intValue(), playerPerformanceDeviation = playerPerformanceDeviationSlider.value(), resultPredictionDeviation = resultPredictionInaccuracySlider.value(), maxParallelism = (maxParallelismSlider.value() * numberOfPlayers / 2).toInt max 1 ) val task = new Task[Unit] { def call() = { val progress = new AtomicInteger() val strategies = strategyPicker.strategies val newResults = strategies.par.flatMap { strategy => try { val runResult = testRunner.runMany(strategy, testCasesSpinner.getValue.intValue()) Some(strategy -> runResult) } catch { case StrategyException(message) => handleException(strategy, message) None case e: Throwable => e.printStackTrace(); None } finally { updateProgress(progress.incrementAndGet(), strategies.size()) } }.seq if (!isCancelled) { Platform.runLater { results.clear() results ++= newResults stage.centerOnScreen() window().opacity = 1 } } } } new Thread(task).start() Dialogs.create().showWorkerProgress(task) } private def handleException(strategy: TournamentStrategy, message: String) { Platform.runLater { val alert = new Alert(AlertType.WARNING, s"Could not execute simulation of $strategy because of the following error:\\n$message", new ButtonType("OK", ButtonData.OK_DONE)) alert.showAndWait() alert.setHeight(300) } } private def displayLabelForData(data: XYChart.Data[String, Number]) { val node = data.getNode val dataString = data.getYValue match { case x: java.lang.Double => x.formatted("%.2f") case x: java.lang.Float => x.formatted("%.2f") case o => o.toString } val dataText = new Text(dataString) node.parentProperty().onChange { (_, old, parent) => if (old != null) { val oldParentGroup = old.asInstanceOf[Group] oldParentGroup.getChildren.collect { case t: Text => Platform.runLater(oldParentGroup.getChildren.remove(t)) } } if (parent != null) { val parentGroup = parent.asInstanceOf[Group] parentGroup.children += dataText } } node.boundsInParentProperty().onChange { (_, _, bounds) => dataText.layoutX = math.round(bounds.getMinX + bounds.getWidth / 2 - dataText.prefWidth(-1) / 2) val minY = math.round(bounds.getMaxY - dataText.prefHeight(-1)) val Y = math.round(bounds.getMinY + dataText.prefHeight(-1)) dataText.layoutY = math.min(minY, Y) } } private class MatchStatisticsBarChart(chartTitle: String, categoryTitle: String, attribute: TestResultLike => Number) extends Pane { observableList2ObservableBuffer(results).onChange { (newResults, _) => content = new BarChart(CategoryAxis("Strategy"), NumberAxis(categoryTitle)) { prefHeight = 300 title = chartTitle legendVisible = false val seriesBuffer = ObservableBuffer[XYChart.Data[String, Number]]() seriesBuffer ++= newResults.zipWithIndex.map { case ((strategy, result), index) => { val label = (index + 1) + ". " + strategy.typeName.replaceAll("(?i)strategy", "") val data = Data[String, Number](label, attribute(result)) data.nodeProperty().onChange { (_, _, node) => displayLabelForData(data) node.onMouseEntered = { () => selectedStrategyResult() = result} } data } } data() = ObservableBuffer(Series(seriesBuffer)) } } } private object NumberOfGamesPerPlayerBarChart extends Pane { selectedStrategyResult.onChange { (_, _, strategyResult) => content = strategyResult match { case singleResult: SingleTestResult => new BarChart(CategoryAxis("Player"), NumberAxis("Number of matches")) { prefHeight = 300 title = s"Number of matches per player (${singleResult.strategy.name})" legendVisible = false categoryGap = 0 val seriesBuffer = ObservableBuffer[XYChart.Data[String, Number]]() seriesBuffer ++= singleResult.expectedResult.map { player => Data[String, Number](player.name, singleResult.games(player)) } data() = ObservableBuffer(Series(seriesBuffer)) } case _ => new Pane() } } } }
frankdavid/ranking
src/main/scala/hu/frankdavid/ranking/gui/ChartScene.scala
Scala
apache-2.0
9,475
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.hsc.sql.hbase import org.apache.hadoop.hbase.TableName import org.apache.spark.sql.hbase.{TestHbase, TestBaseWithSplitData} /** * Test insert / query against the table */ class HBaseBasicOperationSuite extends TestBaseWithSplitData { import org.apache.spark.sql.hbase.TestHbase._ override def afterAll() = { if (TestHbase.hbaseAdmin.tableExists(TableName.valueOf("ht0"))) { TestHbase.hbaseAdmin.disableTable(TableName.valueOf("ht0")) TestHbase.hbaseAdmin.deleteTable(TableName.valueOf("ht0")) } if (TestHbase.hbaseAdmin.tableExists(TableName.valueOf("ht1"))) { TestHbase.hbaseAdmin.disableTable(TableName.valueOf("ht1")) TestHbase.hbaseAdmin.deleteTable(TableName.valueOf("ht1")) } super.afterAll() } test("Insert Into table in StringFormat") { sql( """CREATE TABLE tb0 (column2 INTEGER, column1 INTEGER, column4 FLOAT, column3 SHORT, PRIMARY KEY(column1)) MAPPED BY (default.ht0, COLS=[column2=family0.qualifier0, column3=family1.qualifier1, column4=family2.qualifier2]) IN StringFormat""" ) assert(sql( """SELECT * FROM tb0""").collect().length == 0) sql( """INSERT INTO TABLE tb0 SELECT col4,col4,col6,col3 FROM ta""") assert(sql( """SELECT * FROM tb0""").collect().length == 14) sql( """SELECT * FROM tb0""").show sql( """SELECT * FROM tb0 where column2 > 200""").show sql( """DROP TABLE tb0""") dropNativeHbaseTable("ht0") } test("Insert and Query Single Row") { sql( """CREATE TABLE tb1 (column1 INTEGER, column2 STRING, PRIMARY KEY(column1)) MAPPED BY (ht1, COLS=[column2=cf.cq])""" ) assert(sql( """SELECT * FROM tb1""").collect().length == 0) sql( """INSERT INTO TABLE tb1 VALUES (1024, "abc")""") sql( """INSERT INTO TABLE tb1 VALUES (1028, "abd")""") assert(sql( """SELECT * FROM tb1""").collect().length == 2) assert( sql( """SELECT * FROM tb1 WHERE (column1 = 1023 AND column2 ="abc")""").collect().length == 0) assert(sql( """SELECT * FROM tb1 WHERE (column1 = 1024) |OR (column1 = 1028 AND column2 ="abd")""".stripMargin).collect().length == 2) sql( """DROP TABLE tb1""") dropNativeHbaseTable("ht1") } test("Insert and Query Single Row in StringFormat") { sql( """CREATE TABLE tb1 (col1 STRING, col2 BOOL, col3 SHORT, col4 INTEGER, | col5 LONG, col6 FLOAT, col7 DOUBLE, | PRIMARY KEY(col1)) | MAPPED BY (ht2, COLS=[col2=cf1.cq11, col3=cf1.cq12, col4=cf1.cq13, | col5=cf2.cq21, col6=cf2.cq22, col7=cf2.cq23]) In StringFormat""".stripMargin ) assert(sql( """SELECT * FROM tb1""").collect().length == 0) sql( """INSERT INTO TABLE tb1 VALUES ("row1", false, 1000, 5050 , 50000 , 99.99 , 999.999)""") sql( """INSERT INTO TABLE tb1 VALUES ("row2", false, 99 , 10000, 9999 , 1000.1, 5000.5)""") sql( """INSERT INTO TABLE tb1 VALUES ("row3", true , 555 , 999 , 100000, 500.05, 10000.01)""") sql( """SELECT col1 FROM tb1 where col2<true order by col2""") .collect().zip(Seq("row1", "row2")).foreach{case (r,s) => assert(r.getString(0) == s)} sql( """SELECT * FROM tb1 where col3>500 order by col3""") .collect().zip(Seq("row3", "row1")).foreach{case (r,s) => assert(r.getString(0) == s)} sql( """SELECT * FROM tb1 where col4>5000 order by col4""") .collect().zip(Seq("row1", "row2")).foreach{case (r,s) => assert(r.getString(0) == s)} sql( """SELECT * FROM tb1 where col5>50000 order by col5""") .collect().zip(Seq("row3")).foreach{case (r,s) => assert(r.getString(0) == s)} sql( """SELECT * FROM tb1 where col6>500 order by col6""") .collect().zip(Seq("row3", "row2")).foreach{case (r,s) => assert(r.getString(0) == s)} sql( """SELECT * FROM tb1 where col7>5000 order by col7""") .collect().zip(Seq("row2", "row3")).foreach{case (r,s) => assert(r.getString(0) == s)} sql( """DROP TABLE tb1""") dropNativeHbaseTable("ht2") } test("Select test 0") { assert(sql( """SELECT * FROM ta""").count() == 14) } test("Count(*/1) and Non-Key Column Query") { assert(sql( """SELECT count(*) FROM ta""").collect()(0).get(0) == 14) assert(sql( """SELECT count(*) FROM ta where col2 < 8""").collect()(0).get(0) == 7) assert(sql( """SELECT count(*) FROM ta where col4 < 0""").collect()(0).get(0) == 7) assert(sql( """SELECT count(1) FROM ta where col2 < 8""").collect()(0).get(0) == 7) assert(sql( """SELECT count(1) FROM ta where col4 < 0""").collect()(0).get(0) == 7) } test("InSet Query") { assert(sql( """SELECT count(*) FROM ta where col2 IN (1, 2, 3)""").collect()(0).get(0) == 3) assert(sql( """SELECT count(*) FROM ta where col4 IN (1, 2, 3)""").collect()(0).get(0) == 1) } test("Point Aggregate Query") { sql( """CREATE TABLE tb2 (column2 INTEGER, column1 INTEGER, column4 FLOAT, column3 SHORT, PRIMARY KEY(column1, column2)) MAPPED BY (default.ht0, COLS=[column3=family1.qualifier1, column4=family2.qualifier2])""" ) sql( """INSERT INTO TABLE tb2 SELECT col4,col4,col6,col3 FROM ta""") val result = sql( """SELECT count(*) FROM tb2 where column1=1 AND column2=1""").collect() assert(result.size == 1) assert(result(0).get(0) == 1) } test("Select test 1 (AND, OR)") { assert(sql( """SELECT * FROM ta WHERE col7 = 255 OR col7 = 127""").collect().length == 2) assert(sql( """SELECT * FROM ta WHERE col7 < 0 AND col4 < -255""").collect().length == 4) } test("Select test 2 (WHERE)") { assert(sql( """SELECT * FROM ta WHERE col7 > 128""").count() == 3) assert(sql( """SELECT * FROM ta WHERE (col7 - 10 > 128) AND col1 = ' p255 '""").collect().length == 1) assert(sql( """SELECT * FROM ta WHERE (col7 > 1) AND (col7 < 1)""").collect().length == 0) assert(sql( """SELECT * FROM ta WHERE (col7 > 1) OR (col7 < 1)""").collect().length == 13) assert(sql( """SELECT * FROM ta WHERE |((col7 = 1) AND (col1 < ' p255 ') AND (col1 > ' p255 ')) OR |((col7 = 2) AND (col1 < ' p255 ') AND (col1 > ' p255 ')) """.stripMargin).collect().length == 0) assert(sql( """SELECT * FROM ta WHERE |((col7 = 1) AND (col3 < 128) AND (col3 > 128)) OR |((col7 = 2) AND (col3 < 127) AND (col3 > 127)) """.stripMargin).collect().length == 0) } test("Select test 3 (ORDER BY)") { val result = sql( """SELECT col1, col7 FROM ta ORDER BY col7 DESC""").collect() val sortedResult = result.sortWith( (r1, r2) => r1(1).asInstanceOf[Int] > r2(1).asInstanceOf[Int]) for ((r1, r2) <- result zip sortedResult) { assert(r1.equals(r2)) } } test("Select test 4 (join)") { assert(sql( """SELECT ta.col2 FROM ta join tb on ta.col4=tb.col7""").collect().length == 2) assert(sql( """SELECT * FROM ta FULL OUTER JOIN tb WHERE tb.col7 = 1""").collect().length == 14) assert(sql( """SELECT * FROM ta LEFT JOIN tb WHERE tb.col7 = 1""").collect().length == 14) assert(sql( """SELECT * FROM ta RIGHT JOIN tb WHERE tb.col7 = 1""").collect().length == 14) } test("Alter Add column and Alter Drop column") { assert(sql( """SELECT * FROM ta""").collect()(0).size == 7) sql( """ALTER TABLE ta ADD col8 STRING MAPPED BY (col8 = cf1.cf13)""") assert(sql( """SELECT * FROM ta""").collect()(0).size == 8) sql( """ALTER TABLE ta DROP col8""") assert(sql( """SELECT * FROM ta""").collect()(0).size == 7) } }
yzhou2001/HSpark
src/test/scala/org/apache/spark/sql/hbase/HBaseBasicOperationSuite.scala
Scala
apache-2.0
8,349
package dk.tennis.compare.rating.multiskill.model.perfdiff.skillsfactor.cov.opponentseiso import dk.tennis.compare.rating.multiskill.model.perfdiff.skillsfactor.cov.CovFunc import dk.tennis.compare.rating.multiskill.model.perfdiff.Player import dk.tennis.compare.rating.multiskill.model.perfdiff.skillsfactor.cov.opponent.PlayerSkill import dk.bayes.math.covfunc.CovSEiso import scala.math._ import breeze.linalg.DenseMatrix case class OpponentSeIsoCovFunc(params: Seq[Double]) extends CovFunc { private val Seq(logSf, logEll) = params private val opponentCovFunc = CovSEiso(logSf, logEll) def withParams(newParams: Seq[Double]): CovFunc = OpponentSeIsoCovFunc(params) def withPlayerSkills(getPlayerSkill: (Player) => PlayerSkill): CovFunc = throw new UnsupportedOperationException("Not implemented yet") def getParams(): Seq[Double] = params def covariance(player1: Player, player2: Player): Double = { val (player1Vec, player2Vec) = if (player1.opponentName.equals(player2.opponentName)) (Array(0d), Array(0d)) else (Array(0d, 1), Array(1d, 0)) val cov = opponentCovFunc.cov(player1Vec, player2Vec) cov } def covarianceD(player1: Player, player2: Player, paramIndex: Int): Double = { val (player1Vec, player2Vec) = if (player1.opponentName.equals(player2.opponentName)) (Array(0d), Array(0d)) else (Array(0d, 1), Array(1d, 0)) val covD = paramIndex match { case 0 => opponentCovFunc.df_dSf(player1Vec, player2Vec) case 1 => opponentCovFunc.df_dEll(player1Vec, player2Vec) } covD } def save(file: String) = throw new UnsupportedOperationException("Not implemented yet") }
danielkorzekwa/tennis-player-compare
multiskill/src/main/scala/dk/tennis/compare/rating/multiskill/model/perfdiff/skillsfactor/cov/opponentseiso/OpponentSeIsoCovFunc.scala
Scala
bsd-2-clause
1,642
package synereo.client.rootmodels // scalastyle:off case class SessionRootModel(sessionUri: String = "")
LivelyGig/ProductWebUI
sclient/src/main/scala/synereo/client/rootmodels/SessionRootModel.scala
Scala
apache-2.0
106
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.parser import scala.language.implicitConversions import scala.util.matching.Regex import scala.util.parsing.combinator.PackratParsers import scala.util.parsing.combinator.syntactical.StandardTokenParsers import org.apache.spark.sql.{DataFrame, SparkSession} import org.apache.spark.sql.catalyst.SqlLexical import org.apache.spark.sql.catalyst.plans.logical.{Command, LogicalPlan} import org.apache.spark.sql.optimizer.MVRewriteRule import org.apache.spark.sql.util.SparkSQLUtil import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException import org.apache.carbondata.view.MVFunctions class MVQueryParser extends StandardTokenParsers with PackratParsers { // Keywords used in this parser protected val SELECT: Regex = carbonKeyWord("SELECT") /** * This will convert key word to regular expression. */ private def carbonKeyWord(keys: String): Regex = { ("(?i)" + keys).r } implicit def regexToParser(regex: Regex): Parser[String] = { import lexical.Identifier acceptMatch( s"identifier matching regex ${ regex }", { case Identifier(str) if regex.unapplySeq(str).isDefined => str } ) } // By default, use Reflection to find the reserved words defined in the sub class. // NOTICE, Since the Keyword properties defined by sub class, we couldn't call this // method during the parent class instantiation, because the sub class instance // isn't created yet. protected lazy val reservedWords: Seq[String] = this .getClass .getMethods .filter(_.getReturnType == classOf[Keyword]) .map(_.invoke(this).asInstanceOf[Keyword].normalize) // Set the keywords as empty by default, will change that later. override val lexical = new SqlLexical protected case class Keyword(str: String) { def normalize: String = lexical.normalizeKeyword(str) def parser: Parser[String] = normalize } // Returns the rest of the input string that are not parsed yet private lazy val query: Parser[String] = new Parser[String] { def apply(input: Input): ParseResult[String] = Success( input.source.subSequence(input.offset, input.source.length()).toString, input.drop(input.source.length())) } private lazy val queryAppendDummyFunction: Parser[String] = SELECT ~> query <~ opt(";") ^^ { case query => "SELECT " + MVFunctions.DUMMY_FUNCTION + "() as " + MVFunctions.DUMMY_FUNCTION + ", " + query } def parseAndAppendDummyFunction(sql: String): String = { queryAppendDummyFunction(new lexical.Scanner(sql)) match { case Success(query, _) => query case _ => throw new MalformedCarbonCommandException(s"Unsupported query") } } } object MVQueryParser { def getQuery(query: String, session: SparkSession): DataFrame = { SparkSQLUtil .execute(getQueryPlan(query, session), session) .drop(MVFunctions.DUMMY_FUNCTION) } def getQueryPlan(query: String, session: SparkSession): LogicalPlan = { val updatedQuery = new MVQueryParser().parseAndAppendDummyFunction(query) val analyzedPlan = session.sql(updatedQuery).queryExecution.analyzed analyzedPlan match { case _: Command => analyzedPlan case _ => val optimizedRule = new MVRewriteRule(session) if (optimizedRule != null) { optimizedRule.apply(analyzedPlan) } else { analyzedPlan } } } }
zzcclp/carbondata
integration/spark/src/main/scala/org/apache/spark/sql/parser/MVQueryParser.scala
Scala
apache-2.0
4,260
package org.bitcoins.core.util import org.bitcoins.core.gen.NumberGenerator import org.bitcoins.core.number.{ UInt32, UInt8 } import org.scalacheck.{ Gen, Prop, Properties } /** * Created by chris on 6/20/16. */ class NumberUtilSpec extends Properties("NumberUtilSpec") { private val logger = BitcoinSLogger.logger property("Serialization symmetry for BigInt") = Prop.forAll(NumberGenerator.bigInts) { bigInt: BigInt => NumberUtil.toBigInt(BitcoinSUtil.encodeHex(bigInt)) == bigInt } property("serialization symmetry for ints") = Prop.forAll { int: Int => NumberUtil.toInt(BitcoinSUtil.encodeHex(int)) == int } property("serialization symmetry for longs") = Prop.forAll { long: Long => NumberUtil.toLong(BitcoinSUtil.encodeHex(long)) == long } property("converBits symmetry") = { Prop.forAllNoShrink(Gen.choose(1, 8), NumberGenerator.uInt8s) { case (to, u8s: Seq[UInt8]) => //TODO: in the future make this a generated value instead of fixed to 8 //but the trick is we need to make sure that the u8s generated are valid numbers in the 'from' base val u32From = UInt32(8.toShort) val u32To = UInt32(to.toShort) val converted = NumberUtil.convertUInt8s(u8s, u32From, u32To, true) val original = converted.flatMap(c => NumberUtil.convertUInt8s(c, u32To, u32From, false)) if (original.isFailure) { throw original.failed.get } else { original.get == u8s } } } }
Christewart/bitcoin-s-core
src/test/scala/org/bitcoins/core/util/NumberUtilSpec.scala
Scala
mit
1,524
/* * Copyright (c) 2009 Sony Pictures Imageworks Inc. * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the * distribution. Neither the name of Sony Pictures Imageworks nor the * names of its contributors may be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.imageworks.migration.tests.duplicate_versions import com.imageworks.migration.Migration class Migrate_20081118191214_Foo extends Migration { def up() {} def down() {} }
imageworks/scala-migrations
src/test/scala/com/imageworks/migration/tests/duplicate_versions/Migrate_20081118191214_Foo.scala
Scala
bsd-3-clause
1,778
/* * Copyright 2014 IBM Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.ibm.spark.kernel.protocol.v5.kernel.socket import akka.actor.{Props, ActorRef, ActorSystem} import com.ibm.spark.communication.actors.{RouterSocketActor, RepSocketActor, PubSocketActor} object SocketFactory { def apply(socketConfig: SocketConfig) = { new SocketFactory(socketConfig) } } /** * A Factory class to provide various socket connections for IPython Kernel Spec * @param socketConfig The configuration for the sockets to be properly * instantiated */ class SocketFactory(socketConfig: SocketConfig) { val HeartbeatConnection = SocketConnection( socketConfig.transport, socketConfig.ip, socketConfig.hb_port) val ShellConnection = SocketConnection( socketConfig.transport, socketConfig.ip, socketConfig.shell_port) val IOPubConnection = SocketConnection( socketConfig.transport, socketConfig.ip, socketConfig.iopub_port) val StdinConnection = SocketConnection( socketConfig.transport, socketConfig.ip, socketConfig.stdin_port) /** * Creates a ZeroMQ reply socket representing the server endpoint for * heartbeat messages * @param system The actor system the socket actor will belong * @param listener The actor who will receive * @return The ActorRef created for the socket connection */ def Heartbeat(system: ActorSystem, listener: ActorRef) : ActorRef = system.actorOf(Props(classOf[RepSocketActor], HeartbeatConnection.toString, listener)) // ZeroMQExtension(system).newRepSocket( // Array(Listener(listener), Bind(HeartbeatConnection.toString)) // ) /** * Creates a ZeroMQ reply socket representing the server endpoint for shell * messages * @param system The actor system the socket actor will belong * @param listener The actor who will receive * @return The ActorRef created for the socket connection */ def Shell(system: ActorSystem, listener: ActorRef) : ActorRef = system.actorOf(Props(classOf[RouterSocketActor], ShellConnection.toString, listener)) // ZeroMQExtension(system).newRouterSocket( // Array(Listener(listener), Bind(ShellConnection.toString)) // ) /** * Creates a ZeroMQ reply socket representing the server endpoint for stdin * messages * @param system The actor system the socket actor will belong * @param listener The actor who will receive * @return The ActorRef created for the socket connection */ def Stdin(system: ActorSystem, listener: ActorRef) : ActorRef = system.actorOf(Props(classOf[RouterSocketActor], StdinConnection.toString, listener)) // ZeroMQExtension(system).newRouterSocket( // Array(Listener(listener), Bind(StdinConnection.toString)) // ) /** * Creates a ZeroMQ reply socket representing the server endpoint for IOPub * messages * @param system The actor system the socket actor will belong * @return The ActorRef created for the socket connection */ def IOPub(system: ActorSystem) : ActorRef = system.actorOf(Props(classOf[PubSocketActor], IOPubConnection.toString)) // ZeroMQExtension(system).newPubSocket( // Bind(IOPubConnection.toString) // ) }
yeghishe/spark-kernel
kernel/src/main/scala/com/ibm/spark/kernel/protocol/v5/kernel/socket/SocketFactory.scala
Scala
apache-2.0
3,720
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.dllib.keras.objectives import com.intel.analytics.bigdl.dllib.keras.layers.KerasBaseSpec class MeanAbsoluteErrorSpec extends KerasBaseSpec { "AbsCriterion" should "be the same as Keras mae" in { val kerasCode = """ |input_tensor = Input(shape=[3, 4]) |target_tensor = Input(shape=[3, 4]) |loss = mean_absolute_error(target_tensor, input_tensor) |input = np.random.random([2, 3, 4]) |Y = np.random.random([2, 3, 4]) """.stripMargin val loss = mae[Float]() checkOutputAndGradForLoss(loss, kerasCode) } }
intel-analytics/BigDL
scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/keras/objectives/MeanAbsoluteErrorSpec.scala
Scala
apache-2.0
1,211
package scala.pickling.staticonlyfail1 import scala.pickling._ import NegativeCompilation._ import org.scalatest.FunSuite class StaticOnlyFail1Test extends FunSuite { test("main") { expectError("Cannot generate") { """ | import _root_.scala.pickling._ | import _root_.scala.pickling.Defaults.{ pickleOps, unpickleOps } | import _root_.scala.pickling.json._ | import _root_.scala.pickling.static.StaticOnly | | class C(val fld: Any) | | val x: C = new C(1) | val pickle: JSONPickle = x.pickle """.stripMargin } } }
scala/pickling
core/src/test/scala/scala/pickling/neg/StaticOnlyFail1Test.scala
Scala
bsd-3-clause
612
package io.getquill.context.sql.norm import io.getquill.Spec import io.getquill.context.sql.testContext._ import io.getquill.Query class ExpandMappedInfixSpec extends Spec { "expand infix out of map body if first part is empty" in { val forUpdate = quote { q: Query[TestEntity] => infix"$q FOR UPDATE".as[Query[TestEntity]] } val q = quote { forUpdate(qr1).map(x => x) } q.ast.toString mustEqual s"""infix"$${querySchema("TestEntity")} FOR UPDATE".map(x => x)""" ExpandMappedInfix(q.ast).toString mustEqual s"""infix"$${querySchema("TestEntity").map(x => x)} FOR UPDATE"""" } "do not expand other cases" in { val forUpdate = quote { q: Query[TestEntity] => infix"SELECT $q FOR UPDATE".as[Query[TestEntity]] } val q = quote { forUpdate(qr1).map(x => x) } ExpandMappedInfix(q.ast) mustEqual q.ast } }
getquill/quill
quill-sql/src/test/scala/io/getquill/context/sql/norm/ExpandMappedInfixSpec.scala
Scala
apache-2.0
892
/* * Copyright 2015 Nicolas Rinaudo * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kantan.csv package scalaz import _root_.scalaz.Show import _root_.scalaz.scalacheck.ScalazProperties.{equal => equ} import arbitrary._ import kantan.codecs.scalaz.laws.discipline.ScalazDisciplineSuite class ErrorTests extends ScalazDisciplineSuite { checkAll("ReadError", equ.laws[ReadError]) checkAll("DecodeError", equ.laws[DecodeError]) checkAll("DecodeError.OutOfbounds", equ.laws[DecodeError.OutOfBounds]) checkAll("DecodeError.TypeError", equ.laws[DecodeError.TypeError]) checkAll("ParseError", equ.laws[ParseError]) checkAll("ParseError.NoSuchElement", equ.laws[ParseError.NoSuchElement.type]) checkAll("ParseError.IOError", equ.laws[ParseError.IOError]) test("Show[DecodeError.OutOfBounds] should yield a string containing the expected index") { forAll { error: DecodeError.OutOfBounds => Show[DecodeError.OutOfBounds].shows(error) should include(error.index.toString) Show[DecodeError].shows(error) should include(error.index.toString) Show[ReadError].shows(error) should include(error.index.toString) } } test("Show[DecodeError.TypeError] should yield a string containing the expected message") { forAll { error: DecodeError.TypeError => Show[DecodeError.TypeError].shows(error) should include(error.message) Show[DecodeError].shows(error) should include(error.message) Show[ReadError].shows(error) should include(error.message) } } test("Show[ParseError.IOError] should yield a string containing the expected message") { forAll { error: ParseError.IOError => Show[ParseError.IOError].shows(error) should include(error.message) Show[ParseError].shows(error) should include(error.message) Show[ReadError].shows(error) should include(error.message) } } test("Show[ParseError.NoSuchElement] should yield a string containing 'trying to read from an empty reader'") { val expected = "trying to read from an empty reader" Show[ParseError.NoSuchElement.type].shows(ParseError.NoSuchElement) should include(expected) Show[ParseError].shows(ParseError.NoSuchElement) should include(expected) Show[ReadError].shows(ParseError.NoSuchElement) should include(expected) } }
nrinaudo/scala-csv
scalaz/shared/src/test/scala/kantan/csv/scalaz/ErrorTests.scala
Scala
mit
2,813
package com.github.swwjf import org.springframework.boot.autoconfigure.SpringBootApplication import org.springframework.boot.builder.SpringApplicationBuilder import org.springframework.boot.context.web.SpringBootServletInitializer import org.springframework.context.annotation.ComponentScan @SpringBootApplication @ComponentScan(Array("com.github.swwjf")) class WebServicesApplication extends SpringBootServletInitializer { override def configure(builder: SpringApplicationBuilder): SpringApplicationBuilder = builder.sources(classOf[WebServicesApplication]) } object WebServicesApplication { def main(args: Array[String]) { new SpringApplicationBuilder(classOf[WebServicesApplication]).run(args: _*) } }
andrei-l/scala-webapp-with-java-frameworks
webservices/src/main/scala/com/github/swwjf/WebServicesApplication.scala
Scala
mit
722
package rsyntaxEdit import scalaExec.Interpreter.GlobalValues // process the word under mouse cursor position for the JSyntaxPane editor, // the behavior depends on whether we are in ScalaSci mode (last execution key stroke is F6), object ProcessWordAtCursorJSyntaxPane { def processWordAtCursorJSyntaxPane(wd: String) = { var editor = scalaExec.Interpreter.GlobalValues.editorPane // the jSyntaxPane based ScalaLab's editor' var wordAtCursor = wd var sI = scalaExec.Interpreter.GlobalValues.globalInterpreter // the global Scala interpreter // let as an example that the wordAtCursor variable, is: var aa =10, wordAtCursor=="aa" var typeOfId = sI.typeOfTerm(wordAtCursor).toString() if (typeOfId != "<notype>") { typeOfId = typeOfId filter (_ != '(') filter (_ != ')') // remove some strange parenthesis the interpreter // returns before type if (typeOfId.contains(":") == false) { // not a function: avoid displaying values for functions // take in $$dummy synthertic variable the identifier as a string, e.g. for : var aa = 10, is $$dymmy = "aa" // var $$dummy = ""+wordAtCursor // construct command to extract the value of the variable, e.g. var $$dummy = aa // var execString = "var $$dummy = "+$$dummy // sI.quietRun(execString) // execute quitely, the required value is assigned to the synthetic variable // $$dummy // var valueOfId = scalaExec.Interpreter.GlobalValues.globalInterpreter.valueOfTerm("$$dummy") // .getOrElse("none") var valueOfId = scalaExec.Interpreter.GlobalValues.globalInterpreter.eval(wordAtCursor) if (GlobalValues.getValuesForAllRSyntax == true) { if (valueOfId != "none") editor.setToolTipText(wordAtCursor + " [ " + typeOfId + " ] " + valueOfId) else editor.setToolTipText(wordAtCursor + " [ " + typeOfId + " ] ") } else { // values for controlled types only var isPrimitiveType = ((typeOfId.contains("Double") || typeOfId.contains("Int") || typeOfId.contains("Long") || typeOfId.contains("Char") || typeOfId.contains("Short") || typeOfId.contains("Boolean") || typeOfId.contains("String")) && (typeOfId.contains("[") == false)) // for scalaSci types we have the provision to cut large strings at toString var isScalaSciType = typeOfId.contains("scalaSci") // display also size for scalaSci types if (isScalaSciType) { val sizeOfId = scalaExec.Interpreter.GlobalValues.globalInterpreter.eval(wordAtCursor + ".size") wordAtCursor = wordAtCursor + " (" + sizeOfId + ") " } if (valueOfId != "none") { if (isScalaSciType == false && isPrimitiveType == false) { // not a scalaSci or primitive type, avoid displaying value editor.setToolTipText(wordAtCursor + " [ " + typeOfId + " ] ") } else { // var valueOfId = scalaExec.Interpreter.GlobalValues.globalInterpreter.valueOfTerm("$$dummy") // .getOrElse("none") editor.setToolTipText(wordAtCursor + " [ " + typeOfId + " ] " + valueOfId) } } // valueOfId != "none" else editor.setToolTipText(wordAtCursor + " [ " + typeOfId + " ] ") } // values for controlled types only } // not a function: avoid displaying values for functions else editor.setToolTipText(wordAtCursor + " [ " + typeOfId + " ] ") } // <notype> else editor.setToolTipText("") } }
scalalab/scalalab
source/src/main/scala/rsyntaxEdit/ProcessWordAtCursorJSyntaxPane.scala
Scala
mit
3,685
package rere.ql.data import rere.ql.types.{ReqlPoint, ReqlPolygon} import rere.ql.values.ReqlPolygonQuery case class GeoLinearRing(point1: GeoPoint, point2: GeoPoint, point3: GeoPoint, otherPoints: GeoPoint*) object GeoLinearRing { implicit def toReqlPolygon(implicit ring: GeoLinearRing): ReqlPolygon = { new ReqlPolygonQuery(ring.point1, ring.point2, ring.point3, ring.otherPoints.map(x => x: ReqlPoint): _*) } }
pbaun/rere
modules/ql/src/main/scala/rere/ql/data/GeoLinearRing.scala
Scala
apache-2.0
426
/** * tuProlog - Copyright (C) 2001-2002 aliCE team at deis.unibo.it * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 3.0 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package com.szadowsz.gospel.core.db import java.io.File import java.net.{URL, URLClassLoader} import com.szadowsz.gospel.core.PrologEngine import com.szadowsz.gospel.core.error.InvalidLibraryException import com.szadowsz.gospel.core.event.interpreter.LibraryEvent import scala.util.control.NonFatal final case class DefaultLibraryManager(override protected val wam: PrologEngine) extends LibraryManager { private def getClassResource(klass: Class[_]): URL = Option(klass) match { case None => null case Some(c) => c.getClassLoader.getResource(c.getName.replace('.', '/') + ".class") } override def loadLibrary(className: String, paths: Array[String]): Library = { // = { try { val urls = paths.map(p => (if (!p.contains(".class")) new File(p) else new File(p.substring(0, p.lastIndexOf(File.separator) + 1))).toURI.toURL) val loader = URLClassLoader.newInstance(urls, getClass.getClassLoader) val lib = Class.forName(className, true, loader).newInstance.asInstanceOf[Library] Option(getLibrary(lib.getName)) match { case Some(oldLib) => logger.warn(s"Library ${oldLib.getName} already loaded.") oldLib case None => externalLibs = externalLibs + (className -> getClassResource(lib.getClass)) bindLibrary(lib) logger.info(s"Loaded Library ${lib.getName}") val ev = new LibraryEvent(wam, lib.getName) wam.notifyLoadedLibrary(ev) lib } } catch { case NonFatal(_) => throw new InvalidLibraryException(className, -1, -1) } } }
zakski/project-soisceal
gospel-core/src/main/scala/com/szadowsz/gospel/core/db/DefaultLibraryManager.scala
Scala
lgpl-3.0
2,421
/* * Copyright 2018 Analytics Zoo Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.zoo.pipeline.api.keras.layers import com.intel.analytics.bigdl.nn.abstractnn.AbstractModule import com.intel.analytics.bigdl.tensor.Tensor import com.intel.analytics.bigdl.utils.Shape import com.intel.analytics.zoo.pipeline.api.keras.models.Sequential import com.intel.analytics.zoo.pipeline.api.keras.serializer.ModuleSerializationTest class MaxPooling3DSpec extends KerasBaseSpec { "MaxPooling3D" should "be the same as Keras" in { val kerasCode = """ |input_tensor = Input(shape=[3, 20, 15, 35]) |input = np.random.random([2, 3, 20, 15, 35]) |output_tensor = MaxPooling3D((2, 2, 3), dim_ordering="th")(input_tensor) |model = Model(input=input_tensor, output=output_tensor) """.stripMargin val seq = Sequential[Float]() val layer = MaxPooling3D[Float](poolSize = (2, 2, 3), inputShape = Shape(3, 20, 15, 35)) seq.add(layer) seq.getOutputShape().toSingle().toArray should be (Array(-1, 3, 10, 7, 11)) checkOutputAndGrad(seq.asInstanceOf[AbstractModule[Tensor[Float], Tensor[Float], Float]], kerasCode) } } class MaxPooling3DSerialTest extends ModuleSerializationTest { override def test(): Unit = { val layer = MaxPooling3D[Float](inputShape = Shape(3, 20, 15, 35)) layer.build(Shape(2, 3, 20, 15, 35)) val input = Tensor[Float](2, 3, 20, 15, 35).rand() runSerializationTest(layer, input) } }
intel-analytics/analytics-zoo
zoo/src/test/scala/com/intel/analytics/zoo/pipeline/api/keras/layers/MaxPooling3DSpec.scala
Scala
apache-2.0
2,033
package objektwerks import org.apache.spark.sql.{DataFrame, Dataset, Row} import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers class DataframeTest extends AnyFunSuite with Matchers { import SparkInstance._ import org.apache.spark.sql.expressions._ import org.apache.spark.sql.functions._ import sparkSession.implicits._ val dataframe = sparkSession.read.json("./data/person/person.json").cache dataframe.write.json("./target/dataframe/person.json") test("dataframe") { dataframe.count shouldBe 4 assert(dataframe.isInstanceOf[Dataset[Row]]) assert(dataframe.as[Person].isInstanceOf[Dataset[Person]]) } test("column") { val idColumn = dataframe.col("id") val nameColumn = col("name") val ageColumn = column("age") val roleColumn = expr("role") dataframe.select(idColumn, nameColumn, ageColumn, roleColumn).count shouldBe 4 } test("selectExpr") { dataframe.selectExpr("id", "name", "age", "role").count shouldBe 4 } test("extend") { dataframe.withColumn("dog_age", $"age" * 7).head.getLong(4) shouldBe 168 } test("update") { val incrementAgeNameToUpper = dataframe .withColumn("age", $"age" + 1) .withColumn("name", upper($"name")) .cache incrementAgeNameToUpper.count shouldBe 4 incrementAgeNameToUpper.head.getLong(0) shouldBe 25 incrementAgeNameToUpper.head.getString(2) shouldBe "FRED" } test("transform") { def incrementAge(df: DataFrame): DataFrame = df.withColumn("age", $"age" + 1) def nameToUpper(df: DataFrame): DataFrame = df.withColumn("name", upper($"name")) val incrementAgeNameToUpper = dataframe .transform(incrementAge) .transform(nameToUpper) .cache incrementAgeNameToUpper.count shouldBe 4 incrementAgeNameToUpper.head.getLong(0) shouldBe 25 incrementAgeNameToUpper.head.getString(2) shouldBe "FRED" } test("filter") { val filterByName = dataframe.filter("name == 'barney'").cache filterByName.count shouldBe 1 filterByName.head.getLong(0) shouldBe 22 filterByName.head.getString(2) shouldBe "barney" filterByName.head.getString(3) shouldBe "husband" } test("select > where") { val selectByName = dataframe.select("name").where("name == 'barney'").cache selectByName.count shouldBe 1 selectByName.head.getString(0) shouldBe "barney" val selectByAge = dataframe.select("age").where("age > 23").cache selectByAge.count shouldBe 1 selectByAge.head.getLong(0) shouldBe 24 } test("select > orderBy") { val orderByName = dataframe.select("name").orderBy("name").cache orderByName.count shouldBe 4 orderByName.head.getString(0) shouldBe "barney" } test("sort") { val sortByName = dataframe.sort("name").cache sortByName.count shouldBe 4 sortByName.head.getLong(0) shouldBe 22 sortByName.head.getString(2) shouldBe "barney" sortByName.head.getString(3) shouldBe "husband" } test("agg") { dataframe.agg("age" -> "min").head.getLong(0) shouldBe 21 dataframe.agg("age" -> "avg").head.getDouble(0) shouldBe 22.5 dataframe.agg("age" -> "max").head.getLong(0) shouldBe 24 dataframe.agg("age" -> "sum").head.getLong(0) shouldBe 90 } test("select > agg") { dataframe.select(min(col("age"))).head.getLong(0) shouldBe 21 dataframe.select(max(col("age"))).head.getLong(0) shouldBe 24 dataframe.select(avg(col("age"))).head.getDouble(0) shouldBe 22.5 dataframe.select(sum(col("age"))).head.getLong(0) shouldBe 90 } test("select > agg > case class") { dataframe.select(min(col("age"))).map(row => Age(row.getLong(0))).head shouldBe Age(21) dataframe.select(max(col("age"))).map(row => Age(row.getLong(0))).head shouldBe Age(24) } test("groupBy > avg") { val groupByRole = dataframe .groupBy("role") .avg("age") .cache groupByRole.count shouldBe 2 groupByRole.collect.foreach { case Row("husband", avgAge) => avgAge shouldBe 23.0 case Row("wife", avgAge) => avgAge shouldBe 22.0 } } test("groupBy > agg(min, avg, max)") { val groupByRole = dataframe .groupBy("role") .agg( min("age"), avg("age"), max("age") ) .cache groupByRole.count shouldBe 2 groupByRole.collect.foreach { case Row("husband", minAge, avgAge, maxAge) => minAge shouldBe 22 avgAge shouldBe 23.0 maxAge shouldBe 24 case Row("wife", minAge, avgAge, maxAge) => minAge shouldBe 21 avgAge shouldBe 22.0 maxAge shouldBe 23 } } test("when > otherwise") { val personsWithGender = dataframe.withColumn("gender", when($"role" === "husband", "male").otherwise("female")) personsWithGender.collect.foreach { case Row(_, _, _, "husband", gender ) => gender shouldBe "male" case Row(_, _, _, "wife", gender) => gender shouldBe "female" } } test("window") { val window = Window.partitionBy("role").orderBy($"age".desc) val ranking = rank.over(window).as("rank") val result = dataframe.select(col("role"), col("name"), col("age"), ranking).as[(String, String, Long, Int)].cache ("wife", "wilma", 23, 1) shouldEqual result.head } test("join") { val persons = sparkSession.read.json("./data/person/person.json").cache val tasks = sparkSession.read.json("./data/task/task.json").cache persons.count shouldBe 4 tasks.count shouldBe 4 val joinBy = persons.col("id") === tasks.col("pid") val personsTasks = persons.join(tasks, joinBy) personsTasks.count shouldBe 4 } }
objektwerks/spark
src/test/scala/objektwerks/DataframeTest.scala
Scala
apache-2.0
5,624
package scommons.client.app import scommons.api.ApiResponse import scommons.client.ui.popup._ import scommons.react._ import scommons.react.redux.task.TaskManagerUiProps import scala.util.{Success, Try} /** * Displays status of running tasks. */ object AppTaskManagerUi extends FunctionComponent[TaskManagerUiProps] { var errorHandler: PartialFunction[Try[_], (Option[String], Option[String])] = { case Success(result) => result match { case res: ApiResponse if res.status.nonSuccessful => (Some(res.status.error.getOrElse("Non-successful response")), res.status.details) case _ => (None, None) } } protected def render(compProps: Props): ReactElement = { val props = compProps.wrapped val showStatus = props.status.isDefined val statusMessage = props.status.getOrElse("") val showError = props.error.isDefined val errorMessage = props.error.getOrElse("") <.>()( if (showStatus) Some( <(StatusPopup())(^.wrapped := StatusPopupProps( statusMessage, onHide = props.onHideStatus ))() ) else None, if (props.showLoading) Some( <(LoadingPopup())()() ) else None, if (showError) Some( <(ErrorPopup())(^.wrapped := ErrorPopupProps( errorMessage, details = props.errorDetails, onClose = props.onCloseErrorPopup ))() ) else None ) } }
viktor-podzigun/scommons
ui/src/main/scala/scommons/client/app/AppTaskManagerUi.scala
Scala
apache-2.0
1,436
package reactivemongo import java.util.Arrays import akka.util.ByteString import org.specs2.mutable._ import reactivemongo.bson._ import reactivemongo.bson.BSONObjectID import reactivemongo.bson.utils.Converters import reactivemongo.core.netty._ class BsonSpec extends Specification { val simple = Array[Byte](0x16, 0x00, 0x00, 0x00, 0x02, 'h', 'e', 'l', 'l', 'o', 0x00, 0x06, 0x00, 0x00, 0x00, 'w', 'o', 'r', 'l', 'd', 0x00, 0x00) val embeddingArray = Array[Byte](70, 0, 0, 0, 7, 95, 105, 100, 0, 80, 55, -110, -63, -104, 69, -121, -105, 27, 20, 83, 14, 4, 66, 83, 79, 78, 0, 42, 0, 0, 0, 2, 48, 0, 8, 0, 0, 0, 97, 119, 101, 115, 111, 109, 101, 0, 1, 49, 0, 51, 51, 51, 51, 51, 51, 20, 64, 1, 50, 0, 0, 0, 0, 0, 0, 8, -97, 64, 0, 0) val bsonArray = Array[Byte](42, 0, 0, 0, 2, 48, 0, 8, 0, 0, 0, 97, 119, 101, 115, 111, 109, 101, 0, 1, 49, 0, 51, 51, 51, 51, 51, 51, 20, 64, 1, 50, 0, 0, 0, 0, 0, 0, 8, -97, 64, 0) "ReactiveMongo" should { "produce a simple doc" in { val doc = BSONDocument("hello" -> BSONString("world")) val buffer = doc.makeBuffer compare(simple, buffer) } "produce a simple doc through a traversable" in { val buffer = BSONDocument("hello" -> BSONString("world")).makeBuffer val buffer2 = buffer.makeDocument.makeBuffer compare(simple, buffer2) } "produce a document embedding an array" in { val buffer = BSONDocument( "_id" -> BSONObjectID("503792c1984587971b14530e"), "BSON" -> BSONArray( BSONString("awesome"), BSONDouble(5.05), BSONDouble(1986))).makeBuffer compare(embeddingArray, buffer) } "produce a document embedding an array through traversable" in { val buffer = BSONDocument( "_id" -> BSONObjectID("503792c1984587971b14530e"), "BSON" -> BSONArray( BSONString("awesome"), BSONDouble(5.05), BSONDouble(1986))).makeBuffer embeddingArray.length mustEqual buffer.size val buffer2 = buffer.makeDocument.makeBuffer compare(embeddingArray, buffer2) } "nested subdocuments and arrays" in { val expected = Array[Byte](72, 0, 0, 0, 3, 112, 117, 115, 104, 65, 108, 108, 0, 58, 0, 0, 0, 4, 99, 111, 110, 102, 105, 103, 0, 45, 0, 0, 0, 3, 48, 0, 37, 0, 0, 0, 2, 110, 97, 109, 101, 0, 7, 0, 0, 0, 102, 111, 111, 98, 97, 114, 0, 2, 118, 97, 108, 117, 101, 0, 4, 0, 0, 0, 98, 97, 114, 0, 0, 0, 0, 0) // {"pushAll":{"config":[{"name":"foobar","value":"bar"}]}} val subsubdoc = BSONDocument("name" -> BSONString("foobar"), "value" -> BSONString("bar")) val arr = BSONArray(subsubdoc) val subdoc = BSONDocument("config" -> arr) val doc = BSONDocument("pushAll" -> subdoc) compare(expected, doc.makeBuffer) } "concat two arrays" in { val array1 = BSONArray(BSONInteger(1), BSONInteger(2)) val array2 = BSONArray(BSONString("a"), BSONString("b")) val mergedArray = array1 ++ array2 val str = mergedArray.values.map { case BSONString(value) => value.toString case BSONInteger(value) => value.toString case _ => "NOELEM" }.mkString(",") str must equalTo("1,2,a,b") } "build arrays with mixed values and optional values" in { val array = BSONArray( BSONInteger(1), Some(BSONInteger(2)), None, Some(BSONInteger(4))) val str = array.values.map { case BSONInteger(value) => value.toString case _ => "NOELEM" }.mkString(",") str mustEqual "1,2,4" } val docLike = BSONDocument( "likeFalseInt" -> BSONInteger(0), "likeFalseLong" -> BSONLong(0), "likeFalseDouble" -> BSONDouble(0.0), "likeFalseUndefined" -> BSONUndefined, "likeFalseNull" -> BSONNull, "likeTrueInt" -> BSONInteger(1), "likeTrueLong" -> BSONLong(2), "likeTrueDouble" -> BSONDouble(-0.1), "anInt" -> BSONInteger(200), "aLong" -> BSONLong(12345678912L), "aDouble" -> BSONDouble(9876543210.98)) "abstract booleans and numbers" in { docLike.getAs[BSONBooleanLike]("likeFalseInt").get.toBoolean mustEqual false docLike.getAs[BSONBooleanLike]("likeFalseLong").get.toBoolean mustEqual false docLike.getAs[BSONBooleanLike]("likeFalseDouble").get.toBoolean mustEqual false docLike.getAs[BSONBooleanLike]("likeFalseUndefined").get.toBoolean mustEqual false docLike.getAs[BSONBooleanLike]("likeFalseNull").get.toBoolean mustEqual false docLike.getAs[BSONBooleanLike]("likeTrueInt").get.toBoolean mustEqual true docLike.getAs[BSONBooleanLike]("likeTrueLong").get.toBoolean mustEqual true docLike.getAs[BSONBooleanLike]("likeTrueDouble").get.toBoolean mustEqual true docLike.getAs[BSONNumberLike]("anInt").get.toDouble mustEqual 200 docLike.getAs[BSONNumberLike]("aLong").get.toDouble mustEqual 12345678912L docLike.getAs[BSONNumberLike]("aDouble").get.toDouble mustEqual 9876543210.98 } } def compare(origin: Array[Byte], buffer: ByteString) = { val array = buffer.toArray val result = array.corresponds(origin)(_ == _) if (!result) { log(origin, array) failure } else success } def log(origin: Array[Byte], test: Array[Byte]) = { println(Arrays.toString(origin)) println(Arrays.toString(test)) } } class BSONObjectIDSpec extends Specification { "BSONObjectID" should { "equal when created with string" in { val objectID = BSONObjectID.generate val sameObjectID = BSONObjectID(objectID.stringify) objectID.valueAsArray must equalTo(sameObjectID.valueAsArray) } "equal another instance of BSONObjectID with the same value" in { val objectID = BSONObjectID.generate val sameObjectID = BSONObjectID(objectID.stringify) objectID must equalTo(sameObjectID) } "not equal another newly generated instance of BSONObjectID" in { val objectID = BSONObjectID.generate val nextObjectID = BSONObjectID(BSONObjectID.generate.stringify) objectID must not equalTo (nextObjectID) } } "Converters" should { "strings equal each other" in { val objectID = "506fff5bb8f6b133007b5bcf" val hex = Converters.str2Hex(objectID) val string = Converters.hex2Str(hex) string must equalTo(objectID) } "bytes generated equal bytes converted from string" in { val objectID = BSONObjectID.generate val bytes = Converters.str2Hex(objectID.stringify) objectID.valueAsArray must equalTo(bytes) } } }
sh1ng/ReactiveMongo
driver/src/test/scala/BsonSpec.scala
Scala
apache-2.0
6,610
package org.higherstate.jameson.parsers import org.higherstate.jameson.tokenizers.Tokenizer import org.higherstate.jameson.{Selector, Path} import org.higherstate.jameson.failures.Valid import scala.collection.mutable.ListBuffer case class Tuple2MapParser[T1, T2](s1:Selector[String, T1], s2:Selector[String, T2]) extends ObjectArgumentsParser[(T1, T2)] { protected val (arguments, groups) = TupleMapParser.getArgumentsAndGroup(s1, s2) protected lazy val template: Array[Any] = Array(NoArgFound(s1.parser), NoArgFound(s2.parser)) def parse(tokenizer: Tokenizer, path: Path): Valid[(T1, T2)] = getArgs(tokenizer, path).map{ x => (x: @unchecked) match { case Array(_1:T1 @unchecked, _2:T2 @unchecked) => (_1, _2) }} def schema = Map( "type" -> "object", "properties" -> Map( s1.keys.head -> s1.parser.schema, s2.keys.head -> s2.parser.schema)) } case class Tuple3MapParser[T1, T2, T3](s1:Selector[String, T1], s2:Selector[String, T2], s3:Selector[String, T3]) extends ObjectArgumentsParser[(T1, T2, T3)] { protected val (arguments, groups) = TupleMapParser.getArgumentsAndGroup(s1, s2, s3) protected lazy val template: Array[Any] = Array(NoArgFound(s1.parser), NoArgFound(s2.parser), NoArgFound(s3.parser)) def parse(tokenizer: Tokenizer, path: Path): Valid[(T1, T2, T3)] = getArgs(tokenizer, path).map{ x => (x: @unchecked) match { case Array(_1:T1 @unchecked, _2:T2 @unchecked, _3:T3 @unchecked) => (_1, _2, _3) }} def schema = Map( "type" -> "object", "properties" -> Map( s1.keys.head -> s1.parser.schema, s2.keys.head -> s2.parser.schema, s3.keys.head -> s3.parser.schema)) } case class Tuple4MapParser[T1, T2, T3, T4](s1:Selector[String, T1], s2:Selector[String, T2], s3:Selector[String, T3], s4:Selector[String, T4]) extends ObjectArgumentsParser[(T1, T2, T3, T4)] { protected val (arguments, groups) = TupleMapParser.getArgumentsAndGroup(s1, s2, s3, s4) protected lazy val template: Array[Any] = Array(NoArgFound(s1.parser), NoArgFound(s2.parser), NoArgFound(s3.parser), NoArgFound(s4.parser)) def parse(tokenizer: Tokenizer, path: Path): Valid[(T1, T2, T3, T4)] = getArgs(tokenizer, path).map{ x => (x: @unchecked) match { case Array(_1:T1 @unchecked, _2:T2 @unchecked, _3:T3 @unchecked, _4:T4 @unchecked) => (_1, _2, _3, _4) }} def schema = Map( "type" -> "object", "properties" -> Map( s1.keys.head -> s1.parser.schema, s2.keys.head -> s2.parser.schema, s3.keys.head -> s3.parser.schema, s4.keys.head -> s4.parser.schema)) } case class Tuple5MapParser[T1, T2, T3, T4, T5](s1:Selector[String, T1], s2:Selector[String, T2], s3:Selector[String, T3], s4:Selector[String, T4], s5:Selector[String, T5]) extends ObjectArgumentsParser[(T1, T2, T3, T4, T5)] { protected val (arguments, groups) = TupleMapParser.getArgumentsAndGroup(s1, s2, s3, s4, s5) protected lazy val template: Array[Any] = Array(NoArgFound(s1.parser), NoArgFound(s2.parser), NoArgFound(s3.parser), NoArgFound(s4.parser), NoArgFound(s5.parser)) def parse(tokenizer: Tokenizer, path: Path): Valid[(T1, T2, T3, T4, T5)] = getArgs(tokenizer, path).map{ x => (x: @unchecked) match { case Array(_1:T1 @unchecked, _2:T2 @unchecked, _3:T3 @unchecked, _4:T4 @unchecked, _5:T5 @unchecked) => (_1, _2, _3, _4, _5) }} def schema = Map( "type" -> "object", "properties" -> Map( s1.keys.head -> s1.parser.schema, s2.keys.head -> s2.parser.schema, s3.keys.head -> s3.parser.schema, s4.keys.head -> s4.parser.schema, s5.keys.head -> s5.parser.schema)) } object TupleMapParser { def getArgumentsAndGroup(selectors:Selector[String, _]*) = { val argsBuffer = new ListBuffer[(String, (Parser[_], Int))]() val groupBuffer = new ListBuffer[(Int, Parser[_], Set[String])]() for ((selector, index) <- selectors.zipWithIndex) { if (selector.isGroup) groupBuffer += ((index, selector.parser, selector.keys)) else argsBuffer ++= selector.keys.map(k => (k, (selector.parser, index))) } argsBuffer.toMap -> groupBuffer.result } def getSchema(selectors:Selector[String, _]*) = { val m = Map( "type" -> "object", "properties" -> selectors.map(s => s.keys.head -> s.parser.schema) ) val r = selectors.filter(!_.parser.hasDefault) if (r.isEmpty) m else m + ("required" -> r.map(r => r.keys.head)) } }
HigherState/jameson
src/main/scala/org/higherstate/jameson/parsers/TupleMapParser.scala
Scala
apache-2.0
4,404
/* * Copyright (C) 2012 The Regents of The University California. * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package shark.parse import java.io.Serializable import java.util.ArrayList import org.apache.hadoop.fs.Path import org.apache.hadoop.hive.conf.HiveConf import org.apache.hadoop.hive.ql.exec._ import org.apache.hadoop.hive.ql.parse._ import shark.execution.SharkExplainWork class SharkExplainSemanticAnalyzer(conf: HiveConf) extends ExplainSemanticAnalyzer(conf) { var sem: BaseSemanticAnalyzer = null /** * This is basically the same as Hive's except we invoke * SharkSemanticAnalyzerFactory. We need to do this to get * SharkSemanticAnalyzer for SELECT and CTAS queries. */ override def analyzeInternal(ast: ASTNode): Unit = { ctx.setExplain(true) // Create a semantic analyzer for the query val childNode = ast.getChild(0).asInstanceOf[ASTNode] sem = SharkSemanticAnalyzerFactory.get(conf, childNode) sem.analyze(childNode, ctx) val extended = (ast.getChildCount() > 1) ctx.setResFile(new Path(ctx.getLocalTmpFileURI())) var tasks = sem.getRootTasks() val fetchTask = sem.getFetchTask() if (tasks == null) { if (fetchTask != null) { tasks = new ArrayList[Task[_ <: Serializable]](); tasks.add(fetchTask) } } else if (fetchTask != null) { tasks.add(fetchTask) } val task = TaskFactory.get( new SharkExplainWork(ctx.getResFile().toString(), tasks, childNode.toStringTree(), sem.getInputs(), extended), conf) rootTasks.add(task) } }
KrishnaVamsiKV/Shark
src/main/scala/shark/parse/SharkExplainSemanticAnalyzer.scala
Scala
apache-2.0
2,130
package coursier package test import utest._ import scala.async.Async.{async, await} import coursier.core.{Classifier, Configuration, Extension, Type} import coursier.graph.{Conflict, ModuleTree} import coursier.test.compatibility._ import coursier.util.{Artifact, Print, Tree} import scala.concurrent.Future object CentralTests extends CentralTests abstract class CentralTests extends TestSuite { def central = Repositories.central private def centralBase = central.root private final def isActualCentral = centralBase == Repositories.central.root private lazy val runner = new TestRunner(repositories = Seq(central)) def tests = Tests { test("logback") { async { val dep = dep"ch.qos.logback:logback-classic:1.1.3" val res = await(runner.resolve(Seq(dep))).clearCaches val expected = Resolution() .withRootDependencies(Seq(dep)) .withDependencies( Set( dep.withCompileScope, dep"ch.qos.logback:logback-core:1.1.3".withCompileScope, dep"org.slf4j:slf4j-api:1.7.7".withCompileScope ) ) assert(res == expected) } } test("asm") { async { val dep = dep"org.ow2.asm:asm-commons:5.0.2" val res = await(runner.resolve(Seq(dep))).clearCaches val expected = Resolution() .withRootDependencies(Seq(dep)) .withDependencies( Set( dep.withCompileScope, dep"org.ow2.asm:asm-tree:5.0.2".withCompileScope, dep"org.ow2.asm:asm:5.0.2".withCompileScope ) ) assert(res == expected) } } test("jodaVersionInterval") { async { val dep = dep"joda-time:joda-time:[2.2,2.8]" val res0 = await(runner.resolve(Seq(dep))) val res = res0.clearCaches val expected = Resolution() .withRootDependencies(Seq(dep)) .withDependencies(Set(dep.withCompileScope)) assert(res == expected) assert(res0.projectCache.contains(dep.moduleVersion)) val proj = res0.projectCache(dep.moduleVersion)._2 assert(proj.version == "2.8") } } test("spark") { test - runner.resolutionCheck( mod"org.apache.spark:spark-core_2.11", "1.3.1", profiles = Some(Set("hadoop-2.2")) ) test("scala210") - runner.resolutionCheck( mod"org.apache.spark:spark-core_2.10", "2.1.1", profiles = Some(Set("hadoop-2.6", "scala-2.10", "!scala-2.11")) ) } test("argonautShapeless") { runner.resolutionCheck( mod"com.github.alexarchambault:argonaut-shapeless_6.1_2.11", "0.2.0" ) } test("snapshotMetadata") { test("simple") { val mod = mod"com.github.fommil:java-logging" val version = "1.2-SNAPSHOT" val extraRepo = MavenRepository("https://oss.sonatype.org/content/repositories/public/") test - runner.resolutionCheck( mod, version, configuration = Configuration.runtime, extraRepos = Seq(extraRepo) ) test - runner.ensureHasArtifactWithExtension( mod, version, Extension.jar, Attributes(Type.jar), extraRepos = Seq(extraRepo) ) } test { val mod = mod"org.jitsi:jitsi-videobridge" val version = "1.0-SNAPSHOT" val extraRepos = Seq( MavenRepository("https://github.com/jitsi/jitsi-maven-repository/raw/master/releases"), MavenRepository("https://github.com/jitsi/jitsi-maven-repository/raw/master/snapshots"), MavenRepository("https://jitpack.io") ) test - runner.resolutionCheck( mod, version, extraRepos = extraRepos, forceVersions = Map(mod"commons-codec:commons-codec" -> "1.6") ) } } test("versionProperty") { // nasty one - in its POM, its version contains "${parent.project.version}" runner.resolutionCheck( mod"org.bytedeco.javacpp-presets:opencv", "3.0.0-1.1" ) } test("parentProjectProperties") { runner.resolutionCheck( mod"com.github.fommil.netlib:all", "1.1.2" ) } test("projectProperties") { runner.resolutionCheck( mod"org.glassfish.jersey.core:jersey-client", "2.19" ) } test("parentDependencyManagementProperties") { runner.resolutionCheck( mod"com.nativelibs4java:jnaerator-runtime", "0.12" ) } test("propertySubstitution") { runner.resolutionCheck( mod"org.drools:drools-compiler", "7.0.0.Final" ) } test("artifactIdProperties") { runner.resolutionCheck( mod"cc.factorie:factorie_2.11", "1.2" ) } test("versionInterval") { if (isActualCentral) // that one involves version intervals, thus changing versions, so only // running it against our cached Central stuff runner.resolutionCheck( mod"org.webjars.bower:malihu-custom-scrollbar-plugin", "3.1.5" ) else Future.successful(()) } test("latestRevision") { test - runner.resolutionCheck( mod"com.chuusai:shapeless_2.11", "[2.2.0,2.3-a1)" ) test - runner.resolutionCheck( mod"com.chuusai:shapeless_2.11", "2.2.+" ) test - runner.resolutionCheck( mod"com.googlecode.libphonenumber:libphonenumber", "[7.0,7.1)" ) test - runner.resolutionCheck( mod"com.googlecode.libphonenumber:libphonenumber", "7.0.+" ) } test("versionFromDependency") { val mod = mod"org.apache.ws.commons:XmlSchema" val version = "1.1" val expectedArtifactUrl = s"$centralBase/org/apache/ws/commons/XmlSchema/1.1/XmlSchema-1.1.jar" test - runner.resolutionCheck(mod, version) test - runner.withArtifacts(mod, version, Attributes(Type.jar)) { artifacts => assert(artifacts.exists(_.url == expectedArtifactUrl)) } } test("fixedVersionDependency") { val mod = mod"io.grpc:grpc-netty" val version = "0.14.1" runner.resolutionCheck(mod, version) } test("mavenScopes") { def check(config: Configuration) = runner.resolutionCheck( mod"com.android.tools:sdklib", "24.5.0", configuration = config ) test("compile") - check(Configuration.compile) test("runtime") - check(Configuration.runtime) } test("optionalScope") { def intransitiveCompiler(config: Configuration) = dep"org.scala-lang:scala-compiler:2.11.8" .withConfiguration(config) .withAttributes(Attributes(Type.jar)) .withTransitive(false) runner.withArtifacts( Seq( intransitiveCompiler(Configuration.default), intransitiveCompiler(Configuration.optional) ), extraRepos = Nil, classifierOpt = None ) { case Seq() => throw new Exception("Expected one JAR") case Seq(jar) => () // ok case other => throw new Exception(s"Got too many JARs (${other.mkString})") } } test("packaging") { test("aar") { // random aar-based module found on Central val module = mod"com.yandex.android:speechkit" val version = "2.5.0" test - runner.ensureHasArtifactWithExtension( module, version, Extension("aar"), attributes = Attributes(Type("aar")) ) test - runner.ensureHasArtifactWithExtension( module, version, Extension("aar") ) } test("bundle") { // has packaging bundle - ensuring coursier gives its artifact the .jar extension test - runner.ensureHasArtifactWithExtension( mod"com.google.guava:guava", "17.0", Extension.jar ) // even though packaging is bundle, depending on attribute type "jar" should still find // an artifact test - runner.ensureHasArtifactWithExtension( mod"com.google.guava:guava", "17.0", Extension.jar, attributes = Attributes(Type.jar) ) } test("mavenPlugin") { // has packaging maven-plugin - ensuring coursier gives its artifact the .jar extension runner.ensureHasArtifactWithExtension( mod"org.bytedeco:javacpp", "1.1", Extension.jar, Attributes(Type("maven-plugin")) ) } } test("classifier") { test("vanilla") { async { val deps = Seq(dep"org.apache.avro:avro:1.8.1") val res = await(runner.resolve(deps)) val filenames: Set[String] = res.artifacts().map(_.url.split("/").last).toSet assert(filenames.contains("avro-1.8.1.jar")) assert(!filenames.contains("avro-1.8.1-tests.jar")) } } test("tests") { async { val deps = Seq( dep"org.apache.avro:avro:1.8.1" .withAttributes(Attributes(Type.empty, Classifier.tests)) ) val res = await(runner.resolve(deps)) val filenames: Set[String] = res.artifacts().map(_.url.split("/").last).toSet assert(!filenames.contains("avro-1.8.1.jar")) assert(filenames.contains("avro-1.8.1-tests.jar")) } } test("mixed") { async { val deps = Seq( dep"org.apache.avro:avro:1.8.1", dep"org.apache.avro:avro:1.8.1" .withAttributes(Attributes(Type.empty, Classifier.tests)) ) val res = await(runner.resolve(deps)) val filenames: Set[String] = res.artifacts().map(_.url.split("/").last).toSet assert(filenames.contains("avro-1.8.1.jar")) assert(filenames.contains("avro-1.8.1-tests.jar")) } } } test("artifacts") { test("uniqueness") { async { val deps = Seq( dep"org.scala-lang:scala-compiler:2.11.8", dep"org.scala-js:scalajs-compiler_2.11.8:0.6.8" ) val res = await(runner.resolve(deps)) val metadataErrors = res.errors val conflicts = res.conflicts val isDone = res.isDone assert(metadataErrors.isEmpty) assert(conflicts.isEmpty) assert(isDone) val artifacts = res.artifacts() val map = artifacts.groupBy(a => a) val nonUnique = map.filter { case (_, l) => l.length > 1 } if (nonUnique.nonEmpty) println( "Found non unique artifacts:" + System.lineSeparator() + nonUnique.keys.toVector.map(" " + _).mkString(System.lineSeparator()) ) assert(nonUnique.isEmpty) } } test("testJarType") { // dependencies with type "test-jar" should be given the classifier "tests" by default async { val deps = Seq(dep"org.apache.hadoop:hadoop-yarn-server-resourcemanager:2.7.1") val res = await(runner.resolve(deps)) val metadataErrors = res.errors val conflicts = res.conflicts val isDone = res.isDone assert(metadataErrors.isEmpty) assert(conflicts.isEmpty) assert(isDone) val dependencyArtifacts = res.dependencyArtifacts() val zookeeperTestArtifacts = dependencyArtifacts.collect { case (dep, pub, artifact) if dep.module == mod"org.apache.zookeeper:zookeeper" && pub.`type` == Type.testJar => (pub, artifact) } assert(zookeeperTestArtifacts.length == 1) val (pub, artifact) = zookeeperTestArtifacts.head assert(pub.`type` == Type.testJar) assert(pub.classifier == Classifier.tests) assert(artifact.url.endsWith("-tests.jar")) } } } test("ignoreUtf8Bom") - { runner.resolutionCheck( mod"dk.brics.automaton:automaton", "1.11-8" ) } test("ignoreWhitespaces") { runner.resolutionCheck( mod"org.jboss.resteasy:resteasy-jaxrs", "3.0.9.Final" ) } test("nd4jNative") - { // In particular: // - uses OS-based activation, // - requires converting a "x86-64" to "x86_64" in it, and // - uses "project.packaging" property runner.resolutionCheck( mod"org.nd4j:nd4j-native", "0.5.0" ) } test("scalaCompilerJLine") { // optional should bring jline test - runner.resolutionCheck( mod"org.scala-lang:scala-compiler", "2.11.8" ) test - runner.resolutionCheck( mod"org.scala-lang:scala-compiler", "2.11.8", configuration = Configuration.optional ) } test("deepLearning4j") - { runner.resolutionCheck( mod"org.deeplearning4j:deeplearning4j-core", "0.8.0" ) } test("tarGzZipArtifacts") { val mod = mod"org.apache.maven:apache-maven" val version = "3.3.9" test - runner.resolutionCheck(mod, version) val mainTarGzUrl = s"$centralBase/org/apache/maven/apache-maven/3.3.9/apache-maven-3.3.9-bin.tar.gz" val mainZipUrl = s"$centralBase/org/apache/maven/apache-maven/3.3.9/apache-maven-3.3.9-bin.zip" test("tarGz") { test { runner.withArtifacts( mod, version, attributes = Attributes(Type("tar.gz"), Classifier("bin")), transitive = true ) { artifacts => assert(artifacts.nonEmpty) val urls = artifacts.map(_.url).toSet assert(urls.contains(mainTarGzUrl)) } } test { runner.withArtifacts( mod, version, attributes = Attributes(Type("tar.gz"), Classifier("bin")), classifierOpt = Some(Classifier("bin")), transitive = true ) { artifacts => assert(artifacts.nonEmpty) val urls = artifacts.map(_.url).toSet assert(urls.contains(mainTarGzUrl)) } } } test("zip") { test { runner.withArtifacts( mod, version, attributes = Attributes(Type("zip"), Classifier("bin")), transitive = true ) { artifacts => assert(artifacts.nonEmpty) val urls = artifacts.map(_.url).toSet assert(urls.contains(mainZipUrl)) } } test { runner.withArtifacts( mod, version, attributes = Attributes(Type("zip"), Classifier("bin")), classifierOpt = Some(Classifier("bin")), transitive = true ) { artifacts => assert(artifacts.nonEmpty) val urls = artifacts.map(_.url).toSet assert(urls.contains(mainZipUrl)) } } } } test("groupIdVersionProperties") { runner.resolutionCheck( mod"org.apache.directory.shared:shared-ldap", "0.9.19" ) } test("relocation") { test - runner.resolutionCheck( mod"bouncycastle:bctsp-jdk14", "138" ) test("ignoreRelocationJars") { val mod = mod"org.apache.commons:commons-io" val ver = "1.3.2" val expectedUrl = s"$centralBase/commons-io/commons-io/1.3.2/commons-io-1.3.2.jar" test - runner.resolutionCheck(mod, ver) test - runner.withArtifacts(mod, ver, transitive = true) { artifacts => assert(artifacts.exists(_.url == expectedUrl)) } } } test("entities") { test("odash") - runner.resolutionCheck( mod"org.codehaus.plexus:plexus", "1.0.4" ) } test("parentVersionInPom") { runner.resolutionCheck( mod"io.swagger.parser.v3:swagger-parser-v3", "2.0.1" ) } test("parentBeforeImports") { runner.resolutionCheck( mod"org.kie:kie-api", "6.5.0.Final", extraRepos = Seq(MavenRepository("https://repository.jboss.org/nexus/content/repositories/public")) ) } test("signaturesOfSignatures") { val mod = mod"org.yaml:snakeyaml" val ver = "1.17" def hasSha1(a: Artifact) = a.checksumUrls.contains("SHA-1") def hasMd5(a: Artifact) = a.checksumUrls.contains("MD5") def hasSig(a: Artifact) = a.extra.contains("sig") test - runner.resolutionCheck(mod, ver) test - runner.withDetailedArtifacts( Seq(Dependency(mod, ver).withAttributes(Attributes(Type.bundle))), Nil, None ) { artifacts => val jarOpt = artifacts.collect { case (attr, artifact) if attr.`type` == Type.bundle || attr.`type` == Type.jar => artifact } assert(jarOpt.nonEmpty) assert(jarOpt.forall(hasSha1)) assert(jarOpt.forall(hasMd5)) assert(jarOpt.forall(hasSig)) } test - runner.withDetailedArtifacts( Seq(Dependency(mod, ver).withAttributes(Attributes(Type.pom))), Nil, None ) { artifacts => val pomOpt = artifacts.collect { case (attr, artifact) if attr.`type` == Type.pom => artifact } assert(pomOpt.nonEmpty) assert(pomOpt.forall(hasSha1)) assert(pomOpt.forall(hasMd5)) assert(pomOpt.forall(hasSig)) } } test("sbtPluginVersionRange") { val mod = mod"org.ensime:sbt-ensime;scalaVersion=2.10;sbtVersion=0.13" val ver = "1.12.+" test { // doesn't work via proxies, which don't list all the upstream available versions if (isActualCentral) runner.resolutionCheck(mod, ver) else Future.successful(()) } } test("multiVersionRanges") { val mod = mod"org.webjars.bower:dgrid" val ver = "1.0.0" test { // if false, the tests rely on things straight from Central, which can be updated sometimes… if (isActualCentral) runner.resolutionCheck(mod, ver) else Future.successful(()) } } test("dependencyManagementScopeOverriding") { val mod = mod"org.apache.tika:tika-app" val ver = "1.13" test - runner.resolutionCheck(mod, ver) } test("optionalArtifacts") { val mod = mod"io.monix:monix_2.12" val ver = "2.3.0" val mainUrl = s"$centralBase/io/monix/monix_2.12/2.3.0/monix_2.12-2.3.0.jar" test - runner.resolutionCheck(mod, ver) test - runner.withArtifacts(mod, ver) { artifacts => val mainArtifactOpt = artifacts.find(_.url == mainUrl) assert(mainArtifactOpt.nonEmpty) assert(mainArtifactOpt.forall(_.optional)) } test - runner.withArtifacts(mod"com.lihaoyi:scalatags_2.12", "0.6.2", transitive = true) { artifacts => val urls = artifacts.map(_.url).toSet val expectedUrls = Seq( s"$centralBase/org/scala-lang/scala-library/2.12.0/scala-library-2.12.0.jar", s"$centralBase/com/lihaoyi/sourcecode_2.12/0.1.3/sourcecode_2.12-0.1.3.jar", s"$centralBase/com/lihaoyi/scalatags_2.12/0.6.2/scalatags_2.12-0.6.2.jar" ) assert(expectedUrls.forall(urls)) } } test("packagingTpe") { val mod = mod"android.arch.lifecycle:extensions" val ver = "1.0.0-alpha3" val extraRepo = MavenRepository("https://maven.google.com") test - runner.resolutionCheck(mod, ver, extraRepos = Seq(extraRepo)) test - runner.withArtifacts( mod, ver, Attributes(Type("aar")), extraRepos = Seq(extraRepo), transitive = true ) { artifacts => val urls = artifacts.map(_.url).toSet val expectedUrls = Set( "https://maven.google.com/com/android/support/support-fragment/25.3.1/support-fragment-25.3.1.aar", "https://maven.google.com/android/arch/core/core/1.0.0-alpha3/core-1.0.0-alpha3.aar", "https://maven.google.com/android/arch/lifecycle/runtime/1.0.0-alpha3/runtime-1.0.0-alpha3.aar", "https://maven.google.com/android/arch/lifecycle/extensions/1.0.0-alpha3/extensions-1.0.0-alpha3.aar", "https://maven.google.com/com/android/support/support-compat/25.3.1/support-compat-25.3.1.aar", "https://maven.google.com/com/android/support/support-media-compat/25.3.1/support-media-compat-25.3.1.aar", "https://maven.google.com/com/android/support/support-core-ui/25.3.1/support-core-ui-25.3.1.aar", "https://maven.google.com/com/android/support/support-core-utils/25.3.1/support-core-utils-25.3.1.aar", "https://maven.google.com/com/android/support/support-annotations/25.3.1/support-annotations-25.3.1.jar", "https://maven.google.com/android/arch/lifecycle/common/1.0.0-alpha3/common-1.0.0-alpha3.jar" ) assert(expectedUrls.forall(urls)) } } test("noArtifactIdExclusion") { val mod = mod"org.datavec:datavec-api" val ver = "0.9.1" test - runner.resolutionCheck(mod, ver) } test("snapshotVersioningBundlePackaging") { val mod = mod"org.talend.daikon:daikon" val ver = "0.19.0-SNAPSHOT" val extraRepos = Seq( MavenRepository( "https://artifacts-oss.talend.com/nexus/content/repositories/TalendOpenSourceRelease" ), MavenRepository( "https://artifacts-oss.talend.com/nexus/content/repositories/TalendOpenSourceSnapshot" ) ) test - runner.resolutionCheck(mod, ver, extraRepos = extraRepos) test - runner.withArtifacts( mod, ver, Attributes(Type.jar), extraRepos = extraRepos, transitive = true ) { artifacts => val urls = artifacts.map(_.url).toSet val expectedUrls = Set( "https://artifacts-oss.talend.com/nexus/content/repositories/TalendOpenSourceRelease/com/cedarsoftware/json-io/4.9.9-TALEND/json-io-4.9.9-TALEND.jar", "https://artifacts-oss.talend.com/nexus/content/repositories/TalendOpenSourceSnapshot/org/talend/daikon/daikon/0.19.0-SNAPSHOT/daikon-0.19.0-20171201.100416-43.jar", s"$centralBase/com/fasterxml/jackson/core/jackson-annotations/2.5.3/jackson-annotations-2.5.3.jar", s"$centralBase/com/fasterxml/jackson/core/jackson-core/2.5.3/jackson-core-2.5.3.jar", s"$centralBase/com/fasterxml/jackson/core/jackson-databind/2.5.3/jackson-databind-2.5.3.jar", s"$centralBase/com/thoughtworks/paranamer/paranamer/2.7/paranamer-2.7.jar", s"$centralBase/commons-codec/commons-codec/1.6/commons-codec-1.6.jar", s"$centralBase/javax/inject/javax.inject/1/javax.inject-1.jar", s"$centralBase/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar", s"$centralBase/org/apache/avro/avro/1.8.1/avro-1.8.1.jar", s"$centralBase/org/apache/commons/commons-compress/1.8.1/commons-compress-1.8.1.jar", s"$centralBase/org/apache/commons/commons-lang3/3.4/commons-lang3-3.4.jar", s"$centralBase/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar", s"$centralBase/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar", s"$centralBase/org/slf4j/slf4j-api/1.7.12/slf4j-api-1.7.12.jar", s"$centralBase/org/tukaani/xz/1.5/xz-1.5.jar", s"$centralBase/org/xerial/snappy/snappy-java/1.1.1.3/snappy-java-1.1.1.3.jar" ) assert(expectedUrls.forall(urls)) } } test("trees") { test("cycle") { async { val res = await(runner.resolution( mod"edu.illinois.cs.cogcomp:illinois-pos", "2.0.2", Seq(mvn"https://cogcomp.seas.upenn.edu/m2repo"), configuration = Configuration.compile )) val expectedTree = """└─ edu.illinois.cs.cogcomp:illinois-pos:2.0.2 | β”œβ”€ edu.illinois.cs.cogcomp:LBJava:1.0.3 | β”‚ β”œβ”€ de.bwaldvogel:liblinear:1.94 | β”‚ └─ nz.ac.waikato.cms.weka:weka-stable:3.6.10 | β”‚ └─ net.sf.squirrel-sql.thirdparty-non-maven:java-cup:0.11a | └─ edu.illinois.cs.cogcomp:illinois-pos:2.0.2 | └─ edu.illinois.cs.cogcomp:LBJava:1.0.3 | β”œβ”€ de.bwaldvogel:liblinear:1.94 | └─ nz.ac.waikato.cms.weka:weka-stable:3.6.10 | └─ net.sf.squirrel-sql.thirdparty-non-maven:java-cup:0.11a""".stripMargin val tree = Print.dependencyTree(res, colors = false) assert(tree.replace("\r\n", "\n") == expectedTree) } } test("reverse") { async { val res = await(runner.resolution(mod"io.get-coursier:coursier-cli_2.12", "1.1.0-M10")) // not sure the leftmost 'β”œβ”€ io.get-coursier:coursier-cli_2.12:1.1.0-M10' should be there… val expectedTree = """β”œβ”€ com.chuusai:shapeless_2.12:2.3.3 |β”‚ β”œβ”€ com.github.alexarchambault:argonaut-shapeless_6.2_2.12:1.2.0-M8 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ └─ com.github.alexarchambault:case-app-util_2.12:2.0.0-M5 |β”‚ └─ com.github.alexarchambault:case-app_2.12:2.0.0-M5 |β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”œβ”€ com.github.alexarchambault:argonaut-shapeless_6.2_2.12:1.2.0-M8 |β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”œβ”€ com.github.alexarchambault:case-app-annotations_2.12:2.0.0-M5 |β”‚ └─ com.github.alexarchambault:case-app_2.12:2.0.0-M5 |β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”œβ”€ com.github.alexarchambault:case-app-util_2.12:2.0.0-M5 |β”‚ └─ com.github.alexarchambault:case-app_2.12:2.0.0-M5 |β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”œβ”€ com.github.alexarchambault:case-app_2.12:2.0.0-M5 |β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”œβ”€ io.argonaut:argonaut_2.12:6.2.1 |β”‚ └─ com.github.alexarchambault:argonaut-shapeless_6.2_2.12:1.2.0-M8 |β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”œβ”€ io.get-coursier:coursier-bootstrap_2.12:1.1.0-M10 |β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”œβ”€ io.get-coursier:coursier-cache_2.12:1.1.0-M10 |β”‚ β”œβ”€ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ └─ io.get-coursier:coursier-extra_2.12:1.1.0-M10 |β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”œβ”€ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”œβ”€ io.get-coursier:coursier-core_2.12:1.1.0-M10 |β”‚ β”œβ”€ io.get-coursier:coursier-cache_2.12:1.1.0-M10 |β”‚ β”‚ β”œβ”€ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”‚ └─ io.get-coursier:coursier-extra_2.12:1.1.0-M10 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”œβ”€ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ └─ io.get-coursier:coursier-extra_2.12:1.1.0-M10 |β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”œβ”€ io.get-coursier:coursier-extra_2.12:1.1.0-M10 |β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”œβ”€ org.scala-lang:scala-library:2.12.8 |β”‚ β”œβ”€ com.chuusai:shapeless_2.12:2.3.3 org.scala-lang:scala-library:2.12.4 -> 2.12.8 |β”‚ β”‚ β”œβ”€ com.github.alexarchambault:argonaut-shapeless_6.2_2.12:1.2.0-M8 |β”‚ β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”‚ └─ com.github.alexarchambault:case-app-util_2.12:2.0.0-M5 |β”‚ β”‚ └─ com.github.alexarchambault:case-app_2.12:2.0.0-M5 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”œβ”€ com.github.alexarchambault:argonaut-shapeless_6.2_2.12:1.2.0-M8 org.scala-lang:scala-library:2.12.4 -> 2.12.8 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”œβ”€ com.github.alexarchambault:case-app-annotations_2.12:2.0.0-M5 org.scala-lang:scala-library:2.12.7 -> 2.12.8 |β”‚ β”‚ └─ com.github.alexarchambault:case-app_2.12:2.0.0-M5 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”œβ”€ com.github.alexarchambault:case-app-util_2.12:2.0.0-M5 org.scala-lang:scala-library:2.12.7 -> 2.12.8 |β”‚ β”‚ └─ com.github.alexarchambault:case-app_2.12:2.0.0-M5 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”œβ”€ com.github.alexarchambault:case-app_2.12:2.0.0-M5 org.scala-lang:scala-library:2.12.7 -> 2.12.8 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”œβ”€ io.get-coursier:coursier-bootstrap_2.12:1.1.0-M10 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”œβ”€ io.get-coursier:coursier-cache_2.12:1.1.0-M10 |β”‚ β”‚ β”œβ”€ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”‚ └─ io.get-coursier:coursier-extra_2.12:1.1.0-M10 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”œβ”€ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”œβ”€ io.get-coursier:coursier-core_2.12:1.1.0-M10 |β”‚ β”‚ β”œβ”€ io.get-coursier:coursier-cache_2.12:1.1.0-M10 |β”‚ β”‚ β”‚ β”œβ”€ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”‚ β”‚ └─ io.get-coursier:coursier-extra_2.12:1.1.0-M10 |β”‚ β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”‚ β”œβ”€ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”‚ └─ io.get-coursier:coursier-extra_2.12:1.1.0-M10 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”œβ”€ io.get-coursier:coursier-extra_2.12:1.1.0-M10 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”œβ”€ org.scala-lang:scala-reflect:2.12.6 org.scala-lang:scala-library:2.12.6 -> 2.12.8 |β”‚ β”‚ β”œβ”€ io.argonaut:argonaut_2.12:6.2.1 org.scala-lang:scala-reflect:2.12.4 -> 2.12.6 |β”‚ β”‚ β”‚ └─ com.github.alexarchambault:argonaut-shapeless_6.2_2.12:1.2.0-M8 |β”‚ β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”‚ └─ org.typelevel:machinist_2.12:0.6.6 |β”‚ β”‚ β”œβ”€ org.typelevel:cats-core_2.12:1.5.0 |β”‚ β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”‚ └─ org.typelevel:cats-macros_2.12:1.5.0 |β”‚ β”‚ └─ org.typelevel:cats-core_2.12:1.5.0 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”œβ”€ org.scala-lang.modules:scala-xml_2.12:1.1.1 org.scala-lang:scala-library:2.12.6 -> 2.12.8 |β”‚ β”‚ └─ io.get-coursier:coursier-core_2.12:1.1.0-M10 |β”‚ β”‚ β”œβ”€ io.get-coursier:coursier-cache_2.12:1.1.0-M10 |β”‚ β”‚ β”‚ β”œβ”€ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”‚ β”‚ └─ io.get-coursier:coursier-extra_2.12:1.1.0-M10 |β”‚ β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”‚ β”œβ”€ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”‚ └─ io.get-coursier:coursier-extra_2.12:1.1.0-M10 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”œβ”€ org.typelevel:cats-core_2.12:1.5.0 org.scala-lang:scala-library:2.12.7 -> 2.12.8 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”œβ”€ org.typelevel:cats-kernel_2.12:1.5.0 org.scala-lang:scala-library:2.12.7 -> 2.12.8 |β”‚ β”‚ └─ org.typelevel:cats-core_2.12:1.5.0 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”œβ”€ org.typelevel:cats-macros_2.12:1.5.0 org.scala-lang:scala-library:2.12.7 -> 2.12.8 |β”‚ β”‚ └─ org.typelevel:cats-core_2.12:1.5.0 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”œβ”€ org.typelevel:machinist_2.12:0.6.6 org.scala-lang:scala-library:2.12.6 -> 2.12.8 |β”‚ β”‚ β”œβ”€ org.typelevel:cats-core_2.12:1.5.0 |β”‚ β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”‚ └─ org.typelevel:cats-macros_2.12:1.5.0 |β”‚ β”‚ └─ org.typelevel:cats-core_2.12:1.5.0 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ └─ org.typelevel:macro-compat_2.12:1.1.1 org.scala-lang:scala-library:2.12.0 -> 2.12.8 |β”‚ └─ com.chuusai:shapeless_2.12:2.3.3 |β”‚ β”œβ”€ com.github.alexarchambault:argonaut-shapeless_6.2_2.12:1.2.0-M8 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ └─ com.github.alexarchambault:case-app-util_2.12:2.0.0-M5 |β”‚ └─ com.github.alexarchambault:case-app_2.12:2.0.0-M5 |β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”œβ”€ org.scala-lang:scala-reflect:2.12.6 |β”‚ β”œβ”€ io.argonaut:argonaut_2.12:6.2.1 org.scala-lang:scala-reflect:2.12.4 -> 2.12.6 |β”‚ β”‚ └─ com.github.alexarchambault:argonaut-shapeless_6.2_2.12:1.2.0-M8 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ └─ org.typelevel:machinist_2.12:0.6.6 |β”‚ β”œβ”€ org.typelevel:cats-core_2.12:1.5.0 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ └─ org.typelevel:cats-macros_2.12:1.5.0 |β”‚ └─ org.typelevel:cats-core_2.12:1.5.0 |β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”œβ”€ org.scala-lang.modules:scala-xml_2.12:1.1.1 |β”‚ └─ io.get-coursier:coursier-core_2.12:1.1.0-M10 |β”‚ β”œβ”€ io.get-coursier:coursier-cache_2.12:1.1.0-M10 |β”‚ β”‚ β”œβ”€ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”‚ └─ io.get-coursier:coursier-extra_2.12:1.1.0-M10 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ β”œβ”€ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ └─ io.get-coursier:coursier-extra_2.12:1.1.0-M10 |β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”œβ”€ org.typelevel:cats-core_2.12:1.5.0 |β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”œβ”€ org.typelevel:cats-kernel_2.12:1.5.0 |β”‚ └─ org.typelevel:cats-core_2.12:1.5.0 |β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”œβ”€ org.typelevel:cats-macros_2.12:1.5.0 |β”‚ └─ org.typelevel:cats-core_2.12:1.5.0 |β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”œβ”€ org.typelevel:machinist_2.12:0.6.6 |β”‚ β”œβ”€ org.typelevel:cats-core_2.12:1.5.0 |β”‚ β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |β”‚ └─ org.typelevel:cats-macros_2.12:1.5.0 |β”‚ └─ org.typelevel:cats-core_2.12:1.5.0 |β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 |└─ org.typelevel:macro-compat_2.12:1.1.1 | └─ com.chuusai:shapeless_2.12:2.3.3 | β”œβ”€ com.github.alexarchambault:argonaut-shapeless_6.2_2.12:1.2.0-M8 | β”‚ └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 | └─ com.github.alexarchambault:case-app-util_2.12:2.0.0-M5 | └─ com.github.alexarchambault:case-app_2.12:2.0.0-M5 | └─ io.get-coursier:coursier-cli_2.12:1.1.0-M10""".stripMargin val tree = Print.dependencyTree(res, reverse = true, colors = false) assert(tree.replace("\r\n", "\n") == expectedTree) } } test("module") - async { val res = await(runner.resolution(mod"io.get-coursier:coursier-cli_2.12", "1.1.0-M10")) val tree = ModuleTree(res) val str = Tree(tree.toVector)(_.children).render { t => s"${t.module}:${t.reconciledVersion}" } val expectedStr = """└─ io.get-coursier:coursier-cli_2.12:1.1.0-M10 | β”œβ”€ com.github.alexarchambault:argonaut-shapeless_6.2_2.12:1.2.0-M8 | β”‚ β”œβ”€ com.chuusai:shapeless_2.12:2.3.3 | β”‚ β”‚ β”œβ”€ org.scala-lang:scala-library:2.12.8 | β”‚ β”‚ └─ org.typelevel:macro-compat_2.12:1.1.1 | β”‚ β”‚ └─ org.scala-lang:scala-library:2.12.8 | β”‚ β”œβ”€ io.argonaut:argonaut_2.12:6.2.1 | β”‚ β”‚ └─ org.scala-lang:scala-reflect:2.12.6 | β”‚ β”‚ └─ org.scala-lang:scala-library:2.12.8 | β”‚ └─ org.scala-lang:scala-library:2.12.8 | β”œβ”€ com.github.alexarchambault:case-app_2.12:2.0.0-M5 | β”‚ β”œβ”€ com.github.alexarchambault:case-app-annotations_2.12:2.0.0-M5 | β”‚ β”‚ └─ org.scala-lang:scala-library:2.12.8 | β”‚ β”œβ”€ com.github.alexarchambault:case-app-util_2.12:2.0.0-M5 | β”‚ β”‚ β”œβ”€ com.chuusai:shapeless_2.12:2.3.3 | β”‚ β”‚ β”‚ β”œβ”€ org.scala-lang:scala-library:2.12.8 | β”‚ β”‚ β”‚ └─ org.typelevel:macro-compat_2.12:1.1.1 | β”‚ β”‚ β”‚ └─ org.scala-lang:scala-library:2.12.8 | β”‚ β”‚ └─ org.scala-lang:scala-library:2.12.8 | β”‚ └─ org.scala-lang:scala-library:2.12.8 | β”œβ”€ io.get-coursier:coursier-bootstrap_2.12:1.1.0-M10 | β”‚ └─ org.scala-lang:scala-library:2.12.8 | β”œβ”€ io.get-coursier:coursier-cache_2.12:1.1.0-M10 | β”‚ β”œβ”€ io.get-coursier:coursier-core_2.12:1.1.0-M10 | β”‚ β”‚ β”œβ”€ org.scala-lang:scala-library:2.12.8 | β”‚ β”‚ └─ org.scala-lang.modules:scala-xml_2.12:1.1.1 | β”‚ β”‚ └─ org.scala-lang:scala-library:2.12.8 | β”‚ └─ org.scala-lang:scala-library:2.12.8 | β”œβ”€ io.get-coursier:coursier-core_2.12:1.1.0-M10 | β”‚ β”œβ”€ org.scala-lang:scala-library:2.12.8 | β”‚ └─ org.scala-lang.modules:scala-xml_2.12:1.1.1 | β”‚ └─ org.scala-lang:scala-library:2.12.8 | β”œβ”€ io.get-coursier:coursier-extra_2.12:1.1.0-M10 | β”‚ β”œβ”€ io.get-coursier:coursier-cache_2.12:1.1.0-M10 | β”‚ β”‚ β”œβ”€ io.get-coursier:coursier-core_2.12:1.1.0-M10 | β”‚ β”‚ β”‚ β”œβ”€ org.scala-lang:scala-library:2.12.8 | β”‚ β”‚ β”‚ └─ org.scala-lang.modules:scala-xml_2.12:1.1.1 | β”‚ β”‚ β”‚ └─ org.scala-lang:scala-library:2.12.8 | β”‚ β”‚ └─ org.scala-lang:scala-library:2.12.8 | β”‚ β”œβ”€ io.get-coursier:coursier-core_2.12:1.1.0-M10 | β”‚ β”‚ β”œβ”€ org.scala-lang:scala-library:2.12.8 | β”‚ β”‚ └─ org.scala-lang.modules:scala-xml_2.12:1.1.1 | β”‚ β”‚ └─ org.scala-lang:scala-library:2.12.8 | β”‚ └─ org.scala-lang:scala-library:2.12.8 | β”œβ”€ org.scala-lang:scala-library:2.12.8 | └─ org.typelevel:cats-core_2.12:1.5.0 | β”œβ”€ org.scala-lang:scala-library:2.12.8 | β”œβ”€ org.typelevel:cats-kernel_2.12:1.5.0 | β”‚ └─ org.scala-lang:scala-library:2.12.8 | β”œβ”€ org.typelevel:cats-macros_2.12:1.5.0 | β”‚ β”œβ”€ org.scala-lang:scala-library:2.12.8 | β”‚ └─ org.typelevel:machinist_2.12:0.6.6 | β”‚ β”œβ”€ org.scala-lang:scala-library:2.12.8 | β”‚ └─ org.scala-lang:scala-reflect:2.12.6 | β”‚ └─ org.scala-lang:scala-library:2.12.8 | └─ org.typelevel:machinist_2.12:0.6.6 | β”œβ”€ org.scala-lang:scala-library:2.12.8 | └─ org.scala-lang:scala-reflect:2.12.6 | └─ org.scala-lang:scala-library:2.12.8""".stripMargin assert(str.replace("\r\n", "\n") == expectedStr) } test("conflicts") { async { val res = await(runner.resolution(mod"io.get-coursier:coursier-cli_2.12", "1.1.0-M10")) val conflicts = Conflict(res).toSet val expectedConflicts = Set( Conflict( mod"org.scala-lang:scala-library", "2.12.8", "2.12.4", wasExcluded = false, mod"com.chuusai:shapeless_2.12", "2.3.3" ), Conflict( mod"org.scala-lang:scala-library", "2.12.8", "2.12.4", wasExcluded = false, mod"com.github.alexarchambault:argonaut-shapeless_6.2_2.12", "1.2.0-M8" ), Conflict( mod"org.scala-lang:scala-library", "2.12.8", "2.12.7", wasExcluded = false, mod"com.github.alexarchambault:case-app-annotations_2.12", "2.0.0-M5" ), Conflict( mod"org.scala-lang:scala-library", "2.12.8", "2.12.7", wasExcluded = false, mod"com.github.alexarchambault:case-app-util_2.12", "2.0.0-M5" ), Conflict( mod"org.scala-lang:scala-library", "2.12.8", "2.12.7", wasExcluded = false, mod"com.github.alexarchambault:case-app_2.12", "2.0.0-M5" ), Conflict( mod"org.scala-lang:scala-library", "2.12.8", "2.12.6", wasExcluded = false, mod"org.scala-lang:scala-reflect", "2.12.6" ), Conflict( mod"org.scala-lang:scala-library", "2.12.8", "2.12.6", wasExcluded = false, mod"org.scala-lang.modules:scala-xml_2.12", "1.1.1" ), Conflict( mod"org.scala-lang:scala-library", "2.12.8", "2.12.7", wasExcluded = false, mod"org.typelevel:cats-core_2.12", "1.5.0" ), Conflict( mod"org.scala-lang:scala-library", "2.12.8", "2.12.7", wasExcluded = false, mod"org.typelevel:cats-kernel_2.12", "1.5.0" ), Conflict( mod"org.scala-lang:scala-library", "2.12.8", "2.12.7", wasExcluded = false, mod"org.typelevel:cats-macros_2.12", "1.5.0" ), Conflict( mod"org.scala-lang:scala-library", "2.12.8", "2.12.6", wasExcluded = false, mod"org.typelevel:machinist_2.12", "0.6.6" ), Conflict( mod"org.scala-lang:scala-library", "2.12.8", "2.12.0", wasExcluded = false, mod"org.typelevel:macro-compat_2.12", "1.1.1" ), Conflict( mod"org.scala-lang:scala-reflect", "2.12.6", "2.12.4", wasExcluded = false, mod"io.argonaut:argonaut_2.12", "6.2.1" ) ) assert(conflicts == expectedConflicts) } } } test("overrideScalaModule") { val sharedDeps = Set( "co.fs2:fs2-core_2.12:0.10.7", "com.chuusai:shapeless_2.12:2.3.3", "com.fasterxml.jackson.core:jackson-annotations:2.8.0", "com.fasterxml.jackson.core:jackson-core:2.8.4", "com.fasterxml.jackson.core:jackson-databind:2.8.4", "com.github.alexarchambault:argonaut-shapeless_6.2_2.12:1.2.0-M9", "com.github.alexarchambault:case-app-annotations_2.12:2.0.0-M5", "com.github.alexarchambault:case-app-util_2.12:2.0.0-M5", "com.github.alexarchambault:case-app_2.12:2.0.0-M5", "com.github.javaparser:javaparser-core:3.2.5", "com.github.jupyter:jvm-repr:0.4.0", "com.github.pathikrit:better-files_2.12:3.6.0", "com.github.scopt:scopt_2.12:3.5.0", "com.google.protobuf:protobuf-java:3.6.0", "com.lihaoyi:acyclic_2.12:0.1.5", "com.lihaoyi:ammonite-ops_2.12:1.5.0-4-6296f20", "com.lihaoyi:ammonite-runtime_2.12:1.5.0-4-6296f20", "com.lihaoyi:ammonite-terminal_2.12:1.5.0-4-6296f20", "com.lihaoyi:ammonite-util_2.12:1.5.0-4-6296f20", "com.lihaoyi:fastparse_2.12:2.0.5", "com.lihaoyi:geny_2.12:0.1.5", "com.lihaoyi:os-lib_2.12:0.2.6", "com.lihaoyi:scalaparse_2.12:2.0.5", "com.lihaoyi:scalatags_2.12:0.6.7", "com.lihaoyi:sourcecode_2.12:0.1.5", "com.lihaoyi:ujson_2.12:0.7.1", "com.lihaoyi:upack_2.12:0.7.1", "com.lihaoyi:upickle-core_2.12:0.7.1", "com.lihaoyi:upickle-implicits_2.12:0.7.1", "com.lihaoyi:upickle_2.12:0.7.1", "com.lihaoyi:utest_2.12:0.6.4", "com.thesamet.scalapb:lenses_2.12:0.8.0", "com.thesamet.scalapb:scalapb-json4s_2.12:0.7.1", "com.thesamet.scalapb:scalapb-runtime_2.12:0.8.0", "com.thoughtworks.paranamer:paranamer:2.8", "com.thoughtworks.qdox:qdox:2.0-M9", "io.argonaut:argonaut_2.12:6.2.2", "io.get-coursier:coursier-cache_2.12:1.1.0-M7", "io.get-coursier:coursier_2.12:1.1.0-M7", "io.github.soc:directories:11", "io.undertow:undertow-core:2.0.13.Final", "net.java.dev.jna:jna:4.2.2", "org.javassist:javassist:3.21.0-GA", "org.jboss.logging:jboss-logging:3.3.2.Final", "org.jboss.threads:jboss-threads:2.3.0.Beta2", "org.jboss.xnio:xnio-api:3.6.5.Final", "org.jboss.xnio:xnio-nio:3.6.5.Final", "org.jline:jline-reader:3.6.2", "org.jline:jline-terminal-jna:3.6.2", "org.jline:jline-terminal:3.6.2", "org.json4s:json4s-ast_2.12:3.5.1", "org.json4s:json4s-core_2.12:3.5.1", "org.json4s:json4s-jackson_2.12:3.5.1", "org.json4s:json4s-scalap_2.12:3.5.1", "org.scala-lang.modules:scala-xml_2.12:1.1.0", "org.scala-sbt:test-interface:1.0", "org.scalaj:scalaj-http_2.12:2.4.0", "org.scalameta:cli_2.12:4.1.4", "org.scalameta:common_2.12:4.1.4", "org.scalameta:dialects_2.12:4.1.4", "org.scalameta:fastparse-utils_2.12:1.0.0", "org.scalameta:fastparse_2.12:1.0.0", "org.scalameta:inputs_2.12:4.1.4", "org.scalameta:io_2.12:4.1.4", "org.scalameta:metabrowse-cli_2.12:0.2.1", "org.scalameta:metabrowse-core_2.12:0.2.1", "org.scalameta:metabrowse-server_2.12:0.2.1", "org.scalameta:metacp_2.12:4.1.4", "org.scalameta:mtags_2.12:0.2.0", "org.scalameta:parsers_2.12:4.1.4", "org.scalameta:quasiquotes_2.12:4.1.4", "org.scalameta:scalameta_2.12:4.1.4", "org.scalameta:semanticdb_2.12:4.1.4", "org.scalameta:tokenizers_2.12:4.1.4", "org.scalameta:tokens_2.12:4.1.4", "org.scalameta:transversers_2.12:4.1.4", "org.scalameta:trees_2.12:4.1.4", "org.slf4j:slf4j-api:1.8.0-beta2", "org.slf4j:slf4j-nop:1.7.25", "org.typelevel:cats-core_2.12:1.1.0", "org.typelevel:cats-effect_2.12:0.10", "org.typelevel:cats-kernel_2.12:1.1.0", "org.typelevel:cats-macros_2.12:1.1.0", "org.typelevel:machinist_2.12:0.6.2", "org.typelevel:macro-compat_2.12:1.1.1", "org.wildfly.client:wildfly-client-config:1.0.0.Final", "org.wildfly.common:wildfly-common:1.3.0.Final", "org.zeromq:jeromq:0.4.3", "org.zeromq:jnacl:0.1.0", "sh.almond:channels_2.12:0.2.2", "sh.almond:interpreter-api_2.12:0.2.2", "sh.almond:interpreter_2.12:0.2.2", "sh.almond:kernel_2.12:0.2.2", "sh.almond:logger_2.12:0.2.2", "sh.almond:protocol_2.12:0.2.2" ) def assertSameElements(expected: Set[String], got: Set[String]): Unit = if (expected != got) { val missing = expected -- got for (x <- missing.toVector.sorted) System.err.println(s"Missing element $x") val extra = got -- expected for (x <- extra.toVector.sorted) System.err.println(s"Extra element $x") assert(missing.nonEmpty || extra.nonEmpty) sys.error("sets differ") } test("force") { "2.12.7" - async { val res = await( runner.resolve( Seq( dep"sh.almond:scala-kernel_2.12.7:0.2.2", dep"org.scalameta:interactive_2.12.7:4.1.4" ), extraRepos = Seq(Repositories.jitpack), mapDependencies = Some(coursier.core.Resolution.overrideScalaModule("2.12.7")) ) ) val deps = res.dependencies.map { dep => s"${dep.module}:${dep.version}" } val expectedDeps = sharedDeps ++ Seq( "com.lihaoyi:ammonite-interp_2.12.7:1.5.0-4-6296f20", "com.lihaoyi:ammonite-repl_2.12.7:1.5.0-4-6296f20", "com.lihaoyi:fansi_2.12:0.2.4", "com.lihaoyi:pprint_2.12:0.5.2", "org.scala-lang:scala-compiler:2.12.7", "org.scala-lang:scala-library:2.12.7", "org.scala-lang:scala-reflect:2.12.7", "org.scala-lang:scalap:2.12.7", "org.scalameta:interactive_2.12.7:4.1.4", "org.scalameta:semanticdb-scalac-core_2.12.7:4.1.4", "sh.almond:scala-interpreter_2.12.7:0.2.2", "sh.almond:scala-kernel-api_2.12.7:0.2.2", "sh.almond:scala-kernel_2.12.7:0.2.2" ) assertSameElements(expectedDeps, deps) } "overrideFullSuffix" - async { val res = await( runner.resolve( Seq( dep"sh.almond:scala-kernel_2.12.8:0.2.2", dep"org.scalameta:interactive_2.12.8:4.1.4" ), extraRepos = Seq(Repositories.jitpack), mapDependencies = Some(coursier.core.Resolution.overrideFullSuffix("2.12.8")) ) ) val deps = res.dependencies.map { dep => s"${dep.module}:${dep.version}" } val expectedDeps = sharedDeps ++ Seq( "com.lihaoyi:ammonite-interp_2.12.8:1.5.0-4-6296f20", "com.lihaoyi:ammonite-repl_2.12.8:1.5.0-4-6296f20", "com.lihaoyi:fansi_2.12:0.2.4", "com.lihaoyi:pprint_2.12:0.5.2", "org.scala-lang:scala-compiler:2.12.8", "org.scala-lang:scala-library:2.12.8", "org.scala-lang:scala-reflect:2.12.8", "org.scala-lang:scalap:2.12.8", "org.scalameta:interactive_2.12.8:4.1.4", "org.scalameta:semanticdb-scalac-core_2.12.8:4.1.4", "sh.almond:scala-interpreter_2.12.8:0.2.2", "sh.almond:scala-kernel-api_2.12.8:0.2.2", "sh.almond:scala-kernel_2.12.8:0.2.2" ) assertSameElements(expectedDeps, deps) } } test("dontForce") { "2.12.7" - async { val res = await( runner.resolve( Seq( dep"sh.almond:scala-kernel_2.12.7:0.2.2", dep"org.scalameta:interactive_2.12.7:4.1.4" ), extraRepos = Seq(Repositories.jitpack) ) ) val deps = res.dependencies.map { dep => s"${dep.module}:${dep.version}" } val expectedDeps = sharedDeps ++ Seq( "com.lihaoyi:ammonite-interp_2.12.7:1.5.0-4-6296f20", "com.lihaoyi:ammonite-repl_2.12.7:1.5.0-4-6296f20", "com.lihaoyi:fansi_2.12:0.2.4", "com.lihaoyi:pprint_2.12:0.5.2", // borked classpath - 2.12.7 full cross-versioned stuff, along scala 2.12.8 JARs "org.scala-lang:scala-compiler:2.12.8", "org.scala-lang:scala-library:2.12.8", "org.scala-lang:scala-reflect:2.12.8", "org.scala-lang:scalap:2.12.8", "org.scalameta:interactive_2.12.7:4.1.4", "org.scalameta:semanticdb-scalac-core_2.12.7:4.1.4", "sh.almond:scala-interpreter_2.12.7:0.2.2", "sh.almond:scala-kernel-api_2.12.7:0.2.2", "sh.almond:scala-kernel_2.12.7:0.2.2" ) assertSameElements(expectedDeps, deps) } "2.12.8" - async { val res = await( runner.resolve( Seq( dep"sh.almond:scala-kernel_2.12.8:0.2.2", dep"org.scalameta:interactive_2.12.8:4.1.4" ), extraRepos = Seq(Repositories.jitpack) ) ) val deps = res.dependencies.map { dep => s"${dep.module}:${dep.version}" } val expectedDeps = sharedDeps ++ Seq( "com.lihaoyi:ammonite-interp_2.12.8:1.5.0-4-6296f20", "com.lihaoyi:ammonite-repl_2.12.8:1.5.0-4-6296f20", // not sure why fansi and pprint differ from the others here "com.lihaoyi:fansi_2.12:0.2.5", "com.lihaoyi:pprint_2.12:0.5.3", "org.scala-lang:scala-compiler:2.12.8", "org.scala-lang:scala-library:2.12.8", "org.scala-lang:scala-reflect:2.12.8", "org.scala-lang:scalap:2.12.8", // not forcing the scala version -> borked classpath, with both 2.12.7 and 2.12.8 stuff "org.scalameta:interactive_2.12.7:4.0.0", "org.scalameta:semanticdb-scalac-core_2.12.7:4.0.0", "org.scalameta:interactive_2.12.8:4.1.4", "org.scalameta:semanticdb-scalac-core_2.12.8:4.1.4", "sh.almond:scala-interpreter_2.12.8:0.2.2", "sh.almond:scala-kernel-api_2.12.8:0.2.2", "sh.almond:scala-kernel_2.12.8:0.2.2" ) assertSameElements(expectedDeps, deps) } } } } }
coursier/coursier
modules/tests/shared/src/test/scala/coursier/test/CentralTests.scala
Scala
apache-2.0
56,491