code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.badrecordloger
import java.io.File
import org.apache.spark.sql.Row
import org.apache.spark.sql.common.util.QueryTest
import org.apache.spark.sql.hive.HiveContext
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
/**
* Test Class for detailed query on timestamp dataDataTypes
*
*
*/
class NumericDimensionBadRecordTest extends QueryTest with BeforeAndAfterAll {
var hiveContext: HiveContext = _
override def beforeAll {
try {
sql("drop table IF EXISTS intDataType")
sql("drop table IF EXISTS longDataType")
sql("drop table IF EXISTS doubleDataType")
sql("drop table IF EXISTS floatDataType")
sql("drop table IF EXISTS bigDecimalDataType")
sql("drop table IF EXISTS stringDataType")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC,
new File("./target/test/badRecords")
.getCanonicalPath)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
var csvFilePath = ""
// 1. bad record int DataType dimension
sql("create table intDataType(name String, dob timestamp, weight int)" +
" STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='weight')")
csvFilePath = s"$resourcesPath/badrecords/dummy.csv"
sql("LOAD DATA local inpath '" + csvFilePath + "' INTO table intDataType options " +
"('BAD_RECORDS_LOGGER_ENABLE'='true','BAD_RECORDS_ACTION'='IGNORE')");
// 2. bad record long DataType dimension
sql("create table longDataType(name String, dob timestamp, weight long)" +
" STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='weight')")
csvFilePath = s"$resourcesPath/badrecords/dummy.csv"
sql("LOAD DATA local inpath '" + csvFilePath + "' INTO table longDataType options " +
"('BAD_RECORDS_LOGGER_ENABLE'='true','BAD_RECORDS_ACTION'='IGNORE')");
// 3. bad record double DataType dimension
sql("create table doubleDataType(name String, dob timestamp, weight double)" +
" STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='weight')")
csvFilePath = s"$resourcesPath/badrecords/dummy.csv"
sql("LOAD DATA local inpath '" + csvFilePath + "' INTO table doubleDataType options " +
"('BAD_RECORDS_LOGGER_ENABLE'='true','BAD_RECORDS_ACTION'='IGNORE')");
// 4. bad record float DataType dimension
sql("create table floatDataType(name String, dob timestamp, weight float)" +
" STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='weight')")
csvFilePath = s"$resourcesPath/badrecords/dummy.csv"
sql("LOAD DATA local inpath '" + csvFilePath + "' INTO table floatDataType options " +
"('BAD_RECORDS_LOGGER_ENABLE'='true','BAD_RECORDS_ACTION'='IGNORE')");
// 5. bad record decimal DataType dimension
sql("create table bigDecimalDataType(name String, dob timestamp, weight decimal(3,1))" +
" STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='weight')")
csvFilePath = s"$resourcesPath/badrecords/dummy.csv"
sql("LOAD DATA local inpath '" + csvFilePath + "' INTO table bigDecimalDataType options " +
"('BAD_RECORDS_LOGGER_ENABLE'='true','BAD_RECORDS_ACTION'='IGNORE')");
// 6. bad record string DataType dimension
sql("create table stringDataType(name String, dob timestamp, weight String)" +
" STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='weight')")
csvFilePath = s"$resourcesPath/badrecords/dummy.csv"
sql("LOAD DATA local inpath '" + csvFilePath + "' INTO table stringDataType options " +
"('BAD_RECORDS_LOGGER_ENABLE'='true','BAD_RECORDS_ACTION'='IGNORE')");
} catch {
case x: Throwable => {
System.out.println(x.getMessage)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
}
}
}
test("select count(*) from intDataType") {
checkAnswer(
sql("select count(*) from intDataType"),
Seq(Row(2)
)
)
}
test("select count(*) from longDataType") {
checkAnswer(
sql("select count(*) from longDataType"),
Seq(Row(2)
)
)
}
test("select count(*) from doubleDataType") {
checkAnswer(
sql("select count(*) from doubleDataType"),
Seq(Row(2)
)
)
}
test("select count(*) from floatDataType") {
checkAnswer(
sql("select count(*) from floatDataType"),
Seq(Row(2)
)
)
}
test("select count(*) from bigDecimalDataType") {
checkAnswer(
sql("select count(*) from bigDecimalDataType"),
Seq(Row(2)
)
)
}
test("select count(*) from stringDataType") {
checkAnswer(
sql("select count(*) from stringDataType"),
Seq(Row(3)
)
)
}
override def afterAll {
sql("drop table IF EXISTS intDataType")
sql("drop table IF EXISTS longDataType")
sql("drop table IF EXISTS doubleDataType")
sql("drop table IF EXISTS floatDataType")
sql("drop table IF EXISTS bigDecimalDataType")
sql("drop table IF EXISTS stringDataType")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
}
} | Sephiroth-Lin/incubator-carbondata | integration/spark2/src/test/scala/org/apache/spark/carbondata/datatype/NumericDimensionBadRecordTest.scala | Scala | apache-2.0 | 6,379 |
package org.hammerlab.guacamole.assembly
import org.hammerlab.guacamole.loci.partitioning.HalfWindowConfig
import org.kohsuke.args4j.{Option => Args4jOption}
trait AssemblyArgs
extends HalfWindowConfig {
@Args4jOption(
name = "--kmer-size",
usage = "Length of kmer used for DeBruijn Graph assembly"
)
var kmerSize: Int = 45
@Args4jOption(
name = "--assembly-window-range",
usage = "Number of bases before and after to check for additional matches or deletions"
)
var assemblyWindowRange: Int = 20
override def halfWindowSize: Int = assemblyWindowRange
@Args4jOption(
name = "--min-occurrence",
usage = "Minimum occurrences to include a kmer "
)
var minOccurrence: Int = 3
@Args4jOption(
name = "--min-area-vaf",
usage = "Minimum variant allele frequency to investigate area"
)
var minAreaVaf: Int = 5
@Args4jOption(
name = "--min-mean-kmer-quality",
usage = "Minimum mean base quality to include a kmer"
)
var minMeanKmerQuality: Int = 0
@Args4jOption(
name = "--shortcut-assembly",
usage = "Skip assembly process in inactive regions"
)
var shortcutAssembly: Boolean = false
}
| hammerlab/guacamole | src/main/scala/org/hammerlab/guacamole/assembly/AssemblyArgs.scala | Scala | apache-2.0 | 1,174 |
package org.pgscala.embedded
import java.io._
import java.nio.file.Files
import java.nio.file.attribute.PosixFilePermission
import java.util.EnumSet
import java.util.zip._
import org.apache.commons.compress.archivers.tar.TarArchiveInputStream
import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream
import org.apache.commons.io.IOUtils
import scala.annotation.tailrec
/** ArchiveProcessor has the ability to filter out files completely
* or modify their content while streaming the archive.
*
* In case archive entries were modified, their checksum and
* modifiedAt timestamp will be mutated. */
object ArchiveUnpacker {
def unpack(archive: File, targetFolder: File): Unit = {
val is = new BufferedInputStream(new FileInputStream(archive))
// peek into the archive to detect archive type via magic number (first 2 bytes)
val magicNumber = {
is.mark(2)
val header = new Array[Byte](2)
is.read(header)
is.reset()
new String(header, "ISO-8859-1")
}
if (!targetFolder.isDirectory) {
targetFolder.mkdirs()
}
magicNumber match {
case "PK" =>
unpackZipArchive(is, targetFolder)
case "\\u001f\\u008b" =>
unpackTarGZipArchive(is, targetFolder)
case _ =>
sys.error(s"""Could not detect archive type from filename - needed ".zip" or ".tar.gz""")
}
}
private[this] def unpackZipArchive(is: InputStream, targetFolder: File): Unit = {
val zis = new ZipInputStream(new BufferedInputStream(is))
try {
@tailrec
def unpack(): Unit = zis.getNextEntry() match {
case null => // end of archive
case ze if ze.isDirectory =>
unpack() // do not create empty folders
case ze =>
val name = ze.getName
val body = IOUtils.toByteArray(zis)
val target = new File(targetFolder, name)
if (!target.getParentFile.isDirectory) {
target.getParentFile.mkdirs()
}
val fos = new FileOutputStream(target)
try {
fos.write(body)
} finally {
fos.close()
}
target.setLastModified(ze.getLastModifiedTime.toMillis)
if (Util.isUnix) {
// TODO: this means we're on a Mac, unpacking a zip archive
// We need to read the actual permissions form the "extra"
// property of each zip entry and convert them insteaf of
// flipping on all permissions
val allPermissions = EnumSet.allOf(classOf[PosixFilePermission])
Files.setPosixFilePermissions(target.toPath, allPermissions)
}
unpack()
}
unpack()
} finally {
zis.close()
}
}
private[this] val PosixFilePermissions = PosixFilePermission.values.reverse
private[this] val PosixFileIndices = PosixFilePermissions.indices.toSet
private[this] def mode2Posix(mode: Int): Set[PosixFilePermission] =
PosixFileIndices filter { i =>
val mask = 1 << i
(mode & mask) == mask
} map PosixFilePermissions
private[this] def unpackTarGZipArchive(is: InputStream, targetFolder: File): Unit = {
val tgis = new TarArchiveInputStream(new GzipCompressorInputStream(is))
try {
@tailrec
def unpack(): Unit = tgis.getNextTarEntry() match {
case null => // end of archive
case tge if tge.isDirectory =>
unpack() // do not create empty folders
case tge =>
val name = tge.getName
val target = new File(targetFolder, name)
if (!target.getParentFile.isDirectory) {
target.getParentFile.mkdirs()
}
if (Util.isUnix && tge.isSymbolicLink) {
val destination = tge.getLinkName
Files.createSymbolicLink(target.toPath, new File(destination).toPath)
// TODO: Change symlink date (setting lastModified does not affect it)
} else {
val body = IOUtils.toByteArray(tgis)
val fos = new FileOutputStream(target)
try {
fos.write(body)
} finally {
fos.close()
}
target.setLastModified(tge.getModTime.getTime)
if (Util.isUnix) {
import scala.collection.JavaConverters._
Files.setPosixFilePermissions(target.toPath, mode2Posix(tge.getMode).asJava)
}
}
unpack()
}
unpack()
} finally {
tgis.close()
}
}
}
| melezov/pgscala-embedded | src/main/scala/org/pgscala/embedded/ArchiveUnpacker.scala | Scala | mit | 4,519 |
package ch.epfl.bluebrain.nexus.iam.routes
import java.time.Instant
import java.util.regex.Pattern.quote
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.testkit.ScalatestRouteTest
import ch.epfl.bluebrain.nexus.commons.test.{Randomness, Resources}
import ch.epfl.bluebrain.nexus.iam.auth.AccessToken
import ch.epfl.bluebrain.nexus.iam.config.{AppConfig, Settings}
import ch.epfl.bluebrain.nexus.iam.marshallers.instances._
import ch.epfl.bluebrain.nexus.iam.permissions._
import ch.epfl.bluebrain.nexus.iam.realms.Realms
import ch.epfl.bluebrain.nexus.iam.testsyntax._
import ch.epfl.bluebrain.nexus.iam.types.Identity.Anonymous
import ch.epfl.bluebrain.nexus.iam.types.{Caller, Permission, ResourceF}
import com.typesafe.config.{Config, ConfigFactory}
import io.circe.Json
import monix.eval.Task
import org.mockito.matchers.MacroBasedMatchers
import org.mockito.{IdiomaticMockito, Mockito}
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{BeforeAndAfter, Matchers, WordSpecLike}
import scala.concurrent.duration._
//noinspection TypeAnnotation,RedundantDefaultArgument
class PermissionsRoutesSpec
extends WordSpecLike
with Matchers
with ScalatestRouteTest
with BeforeAndAfter
with MacroBasedMatchers
with Resources
with ScalaFutures
with IdiomaticMockito
with Randomness {
override implicit def patienceConfig: PatienceConfig = PatienceConfig(3 second, 100 milliseconds)
override def testConfig: Config = ConfigFactory.load("test.conf")
private val appConfig: AppConfig = Settings(system).appConfig
private implicit val http = appConfig.http
private val perms: Permissions[Task] = mock[Permissions[Task]]
private val realms: Realms[Task] = mock[Realms[Task]]
before {
Mockito.reset(perms, realms)
realms.caller(any[AccessToken]) shouldReturn Task.pure(Caller.anonymous)
}
def response(rev: Long): Json =
jsonContentOf(
"/permissions/permissions-template.json",
Map(quote("{createdBy}") -> Anonymous.id.asString, quote("{updatedBy}") -> Anonymous.id.asString)
) deepMerge Json.obj("_rev" -> Json.fromLong(rev))
def resource(rev: Long, set: Set[Permission]): Resource =
ResourceF(id, rev, types, Instant.EPOCH, Anonymous, Instant.EPOCH, Anonymous, set)
def metaResponse(rev: Long): Json =
jsonContentOf(
"/permissions/permissions-meta-template.json",
Map(quote("{createdBy}") -> Anonymous.id.asString, quote("{updatedBy}") -> Anonymous.id.asString)
) deepMerge Json.obj("_rev" -> Json.fromLong(rev))
def meta(rev: Long): ResourceF[Unit] =
ResourceF.unit(id, rev, types, Instant.EPOCH, Anonymous, Instant.EPOCH, Anonymous)
def missingParams: Json =
jsonContentOf("/permissions/missing-rev.json")
"A PermissionsRoute" should {
val routes = Routes.wrap(new PermissionsRoutes(perms, realms).routes)
"return the default minimum permissions" in {
perms.fetch(any[Caller]) shouldReturn Task.pure(resource(0L, appConfig.permissions.minimum))
Get("/permissions") ~> routes ~> check {
responseAs[Json].sort shouldEqual response(0L).sort
status shouldEqual StatusCodes.OK
}
}
"return missing rev params" when {
"attempting to delete" in {
Delete("/permissions") ~> routes ~> check {
responseAs[Json].sort shouldEqual missingParams.sort
status shouldEqual StatusCodes.BadRequest
}
}
}
"replace permissions" in {
perms.replace(any[Set[Permission]], 2L)(any[Caller]) shouldReturn Task.pure(Right(meta(0L)))
val json = Json.obj("permissions" -> Json.arr(Json.fromString("random/a")))
Put("/permissions?rev=2", json) ~> routes ~> check {
responseAs[Json].sort shouldEqual metaResponse(0L).sort
status shouldEqual StatusCodes.OK
}
}
"default rev to 0L for replace permissions" in {
perms.replace(any[Set[Permission]], 0L)(any[Caller]) shouldReturn Task.pure(Right(meta(0L)))
val json = Json.obj("permissions" -> Json.arr(Json.fromString("random/a")))
Put("/permissions", json) ~> routes ~> check {
responseAs[Json].sort shouldEqual metaResponse(0L).sort
status shouldEqual StatusCodes.OK
}
}
"append new permissions" in {
perms.append(any[Set[Permission]], 2L)(any[Caller]) shouldReturn Task.pure(Right(meta(0L)))
val json = Json.obj("@type" -> Json.fromString("Append"), "permissions" -> Json.arr(Json.fromString("random/a")))
Patch("/permissions?rev=2", json) ~> routes ~> check {
responseAs[Json].sort shouldEqual metaResponse(0L).sort
status shouldEqual StatusCodes.OK
}
}
"subtract permissions" in {
perms.subtract(any[Set[Permission]], 2L)(any[Caller]) shouldReturn Task.pure(Right(meta(0L)))
val json =
Json.obj("@type" -> Json.fromString("Subtract"), "permissions" -> Json.arr(Json.fromString("random/a")))
Patch("/permissions?rev=2", json) ~> routes ~> check {
responseAs[Json].sort shouldEqual metaResponse(0L).sort
status shouldEqual StatusCodes.OK
}
}
"delete permissions" in {
perms.delete(2L)(any[Caller]) shouldReturn Task.pure(Right(meta(0L)))
Delete("/permissions?rev=2") ~> routes ~> check {
responseAs[Json].sort shouldEqual metaResponse(0L).sort
status shouldEqual StatusCodes.OK
}
}
"return 404 for wrong revision" in {
perms.fetchAt(any[Long])(any[Caller]) shouldReturn Task.pure(None)
Get("/permissions?rev=2") ~> routes ~> check {
status shouldEqual StatusCodes.NotFound
responseAs[Json] shouldEqual jsonContentOf("/resources/not-found.json")
}
}
"return 200 for correct revision" in {
perms.fetchAt(any[Long])(any[Caller]) shouldReturn Task.pure(Some(resource(3L, appConfig.permissions.minimum)))
Get("/permissions?rev=2") ~> routes ~> check {
status shouldEqual StatusCodes.OK
responseAs[Json].sort shouldEqual response(3L).sort
}
}
"return 400 when trying to create permission which is too long" in {
perms.append(any[Set[Permission]], 2L)(any[Caller]) shouldReturn Task.pure(Right(meta(0L)))
val json =
Json.obj("@type" -> Json.fromString("Append"),
"permissions" -> Json.arr(Json.fromString(s"${genString()}/${genString()}")))
Patch("/permissions?rev=2", json) ~> routes ~> check {
status shouldEqual StatusCodes.BadRequest
}
}
}
}
| hygt/nexus-iam | src/test/scala/ch/epfl/bluebrain/nexus/iam/routes/PermissionsRoutesSpec.scala | Scala | apache-2.0 | 6,536 |
package actions
import play.api.Play
import play.api.mvc._
import scala.concurrent.Future
import org.apache.commons.codec.binary.Base64.decodeBase64
object AuthenticationAction extends ActionBuilder[Request] {
private val headers = "WWW-Authenticate" -> "Basic realm=\\"whosmad\\""
private val login = Play.current.configuration.getString("auth.username").getOrElse("admin")
private val password = Play.current.configuration.getString("auth.password").getOrElse("admin")
private def getUser(request: RequestHeader): Option[List[String]] = {
request.headers.get("Authorization").flatMap { authorization =>
authorization.split(" ").drop(1).headOption.flatMap { encoded =>
new String(decodeBase64(encoded.getBytes)).split(":").toList match {
case c :: s :: Nil => Some(List(c, s))
case _ => None
}
}
}
}
def invokeBlock[A](request: Request[A], block: (Request[A]) => Future[Result]) = {
getUser(request) match {
case Some(c: List[String]) if c(0) == login && c(1) == password => block(request)
case _ => Future.successful(Results.Unauthorized.withHeaders(headers))
}
}
}
| Rydgel/Shorty | app/actions/AuthenticationAction.scala | Scala | mit | 1,163 |
package net.liftmodules.ng
package test.snippet
import Angular._
import net.liftweb.actor.LAFuture
import net.liftmodules.ng.test.model.StringInt
import net.liftweb.common.{Empty, Failure, Full, Box}
import net.liftweb.util.Schedule
import net.liftweb.util.Helpers._
import net.liftweb.http.S
import scala.concurrent.{ Future, Promise => ScalaPromise }
import scala.util.Try
case class EmbeddedFutures(
resolved: LAFuture[Box[String]],
failed: LAFuture[Box[String]],
string: LAFuture[Box[String]],
obj: LAFuture[Box[StringInt]],
arr: List[LAFuture[Box[String]]],
fobj: LAFuture[Box[EmbeddedObj]]
) extends NgModel
case class EmbeddedObj(
resolved: LAFuture[Box[String]],
failed: LAFuture[Box[String]],
string: LAFuture[Box[String]],
obj: LAFuture[Box[StringInt]]
) extends NgModel
case class EmbeddedScalaFutures(
resolved: Future[String],
failed: Future[String],
string: Future[String],
obj: Future[StringInt]
) extends NgModel
object EmbeddedFuturesSnips {
def services = renderIfNotAlreadyDefined(
angular.module("EmbeddedFutures")
.factory("embeddedFutureServices", jsObjFactory()
.future("fetch", {
S.session.map(_.sendCometActorMessage("EmbeddedFutureActor", Empty, "go"))
buildFuture
})
.jsonCall("sfetch", { Full(buildScalaModel) })
)
)
def buildModel = {
val resolved = new LAFuture[Box[String]]
resolved.satisfy(Full("resolved"))
val failed = new LAFuture[Box[String]]
satisfy(failed, Failure("failed"))
val string = new LAFuture[Box[String]]
satisfy(string, Full("future"))
val obj = new LAFuture[Box[StringInt]]
satisfy(obj, Full(StringInt("string", 42)))
val arr = List(new LAFuture[Box[String]], new LAFuture[Box[String]])
satisfy(arr(0), Full("Roll"))
satisfy(arr(1), Full("Tide!"))
val fobj = new LAFuture[Box[EmbeddedObj]]
val fobjResolved = new LAFuture[Box[String]]
val fobjFailed = new LAFuture[Box[String]]
val fobjString = new LAFuture[Box[String]]
val fobjObj = new LAFuture[Box[StringInt]]
satisfy(fobj, Full(EmbeddedObj(fobjResolved, fobjFailed, fobjString, fobjObj)))
fobjResolved.satisfy(Full("sub resolved"))
satisfy(fobjFailed, Failure("sub fail"))
satisfy(fobjString, Full("sub string"))
satisfy(fobjObj, Full(StringInt("sub obj string", 44)))
EmbeddedFutures(resolved, failed, string, obj, arr, fobj)
}
def buildFuture = {
val f = new LAFuture[Box[EmbeddedFutures]]
f.satisfy(Full(buildModel))
f
}
def sched(f: => Unit) = {
def delay = (Math.random() * 3000).toInt.millis
Schedule(() => { f }, delay)
}
def satisfy[T](future:LAFuture[Box[T]], value:Box[T]) {
sched( future.satisfy(value) )
}
def satisfy[T](p:ScalaPromise[T], value:T) {
sched( p.complete(Try(value)) )
}
def buildScalaModel = {
import scala.concurrent.ExecutionContext.Implicits.global
val resolved = Future("resolved")
val failed = ScalaPromise[String]()
sched( failed.failure(new Exception("failed")) )
val string = ScalaPromise[String]()
satisfy(string, "future")
val obj = ScalaPromise[StringInt]()
satisfy(obj, StringInt("string", 42))
EmbeddedScalaFutures(resolved, failed.future, string.future, obj.future)
}
}
| janheise/lift-ng-test-project | src/main/scala/net/liftmodules/ng/test/snippet/EmbeddedFuturesSnips.scala | Scala | apache-2.0 | 3,353 |
package com.atomist.rug.kind.grammar
import java.nio.charset.StandardCharsets
import com.atomist.source.FileArtifact
import com.atomist.tree.content.text.PositionedTreeNode
import com.atomist.tree.content.text.grammar.antlr.{AntlrGrammar, AstNodeCreationStrategy}
import com.atomist.util.Utils.withCloseable
import org.apache.commons.io.IOUtils
import org.springframework.core.io.DefaultResourceLoader
/**
* Convenient superclass for Antlr grammars.
*
* @param grammars g4 files
* @param topLevelProduction name of the top level production
*/
abstract class AntlrRawFileType(
topLevelProduction: String,
nodeCreationStrategy: AstNodeCreationStrategy,
grammars: String*
)
extends TypeUnderFile {
private val g4s: Seq[String] = {
val cp = new DefaultResourceLoader()
val resources = grammars.map(grammar => cp.getResource(grammar))
resources.map(r => withCloseable(r.getInputStream)(is => IOUtils.toString(is, StandardCharsets.UTF_8)))
}
private[kind] def parser = antlrGrammar
private lazy val antlrGrammar = new AntlrGrammar(topLevelProduction, nodeCreationStrategy, g4s: _*)
override def fileToRawNode(f: FileArtifact): Option[PositionedTreeNode] = {
antlrGrammar.parse(f.content)
}
}
| atomist/rug | src/main/scala/com/atomist/rug/kind/grammar/AntlrRawFileType.scala | Scala | gpl-3.0 | 1,373 |
package com.ajjpj.adiagram.render.text
import com.ajjpj.adiagram.ADiagramSpec
class FixedWidthTypeSetterSpec extends ADiagramSpec {
"FixedWidthTypeSetter.splitIntoWords" should "split a sentence into words" in {
FixedWidthTypeSetter.splitToWords("The dog jumps") shouldBe Vector("The", " ", "dog", " ", "jumps")
}
it should "separate duplicate blanks" in {
FixedWidthTypeSetter.splitToWords("a b") shouldBe Vector("a", " ", " ", "b")
}
it should "keep leadinog and trailing blanks" in {
FixedWidthTypeSetter.splitToWords(" a ") shouldBe Vector(" ", " ", "a", " ", " ")
}
it should "handle the empty string" in {
FixedWidthTypeSetter.splitToWords("") shouldBe Vector()
}
}
| arnohaase/a-diagram | src/test/scala/com/ajjpj/adiagram/render/text/FixedWidthTypeSetterSpec.scala | Scala | apache-2.0 | 713 |
/**
* Copyright (C) 2010 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.processor.handlers.xhtml
import org.orbeon.oxf.util.StringUtils._
import org.orbeon.oxf.xforms.analysis.ElementAnalysis
import org.orbeon.oxf.xforms.analysis.controls.{LHHA, StaticLHHASupport}
import org.orbeon.oxf.xforms.control.controls.XFormsOutputControl
import org.orbeon.oxf.xforms.control.{XFormsControl, XFormsSingleNodeControl}
import org.orbeon.oxf.xforms.processor.handlers.XFormsBaseHandler.isStaticReadonly
import org.orbeon.oxf.xforms.processor.handlers.{HandlerContext, HandlerSupport, XFormsBaseHandler}
import org.orbeon.oxf.xml.XMLConstants.{FORMATTING_URL_TYPE_QNAME, XHTML_NAMESPACE_URI}
import org.orbeon.oxf.xml.XMLReceiverHelper._
import org.orbeon.oxf.xml.{XMLReceiver, XMLReceiverHelper, XMLReceiverSupport, XMLUtils}
import org.orbeon.xforms.Constants.DUMMY_IMAGE_URI
import org.orbeon.xforms.XFormsNames._
import org.orbeon.xforms.XFormsCrossPlatformSupport
import org.xml.sax.Attributes
import org.xml.sax.helpers.AttributesImpl
trait XFormsOutputHandler extends XFormsControlLifecyleHandler with HandlerSupport {
protected def getContainerAttributes(
effectiveId : String,
outputControl : XFormsSingleNodeControl,
isField : Boolean
): AttributesImpl = {
// Add custom class
val containerAttributes = super.getEmptyNestedControlAttributesMaybeWithId(effectiveId, outputControl, addId = true)
val nestedCssClass = if (isField) "xforms-field" else "xforms-output-output"
containerAttributes.addAttribute("", "class", "class", XMLReceiverHelper.CDATA, nestedCssClass)
containerAttributes
}
}
// Default xf:output handler
class XFormsOutputDefaultHandler(
uri : String,
localname : String,
qName : String,
localAtts : Attributes,
elementAnalysis : ElementAnalysis,
handlerContext : HandlerContext
) extends
XFormsControlLifecyleHandler(
uri,
localname,
qName,
localAtts,
elementAnalysis,
handlerContext,
repeating = false,
forwarding = false
) with XFormsOutputHandler {
protected def handleControlStart(): Unit = {
implicit val xmlReceiver: XMLReceiver = handlerContext.controller.output
val outputControl = currentControl.asInstanceOf[XFormsOutputControl]
val hasLabel =
elementAnalysis.asInstanceOf[StaticLHHASupport].hasLHHA(LHHA.Label)
val isMinimal =
XFormsControl.appearances(elementAnalysis)(XFORMS_MINIMAL_APPEARANCE_QNAME)
val containerAttributes =
getContainerAttributes(getEffectiveId, outputControl, isField = hasLabel && ! isMinimal)
// Handle accessibility attributes on control element
XFormsBaseHandler.handleAccessibilityAttributes(attributes, containerAttributes)
handleAriaByAtts(containerAttributes)
// See https://github.com/orbeon/orbeon-forms/issues/3583
if (hasLabel && ! isStaticReadonly(outputControl)) {
containerAttributes.addAttribute("", "tabindex", "tabindex", XMLReceiverHelper.CDATA, "0")
containerAttributes.addAttribute("", "aria-readonly", "aria-readonly", XMLReceiverHelper.CDATA, "true")
containerAttributes.addAttribute("", "role", "role", XMLReceiverHelper.CDATA, "textbox")
}
val elementName = if (getStaticLHHA(getPrefixedId, LHHA.Label) ne null) "output" else "span"
withElement(elementName, prefix = handlerContext.findXHTMLPrefix, uri = XHTML_NAMESPACE_URI, atts = containerAttributes) {
val mediatypeValue = attributes.getValue("mediatype")
val textValue = XFormsOutputControl.getExternalValueOrDefault(outputControl, mediatypeValue)
if ((textValue ne null) && textValue.nonEmpty)
xmlReceiver.characters(textValue.toCharArray, 0, textValue.length)
}
}
}
// xf:output[@mediatype = 'text/html']
class XFormsOutputHTMLHandler(
uri : String,
localname : String,
qName : String,
localAtts : Attributes,
matched : ElementAnalysis,
handlerContext : HandlerContext
) extends XFormsControlLifecyleHandler(
uri,
localname,
qName,
localAtts,
matched,
handlerContext,
repeating = false,
forwarding = false
) with XFormsOutputHandler {
protected def handleControlStart(): Unit = {
implicit val xmlReceiver: XMLReceiver = handlerContext.controller.output
val outputControl = currentControl.asInstanceOf[XFormsOutputControl]
val xhtmlPrefix = handlerContext.findXHTMLPrefix
val containerAttributes = getContainerAttributes(getEffectiveId, outputControl, isField = false)
// Handle accessibility attributes on <div>
XFormsBaseHandler.handleAccessibilityAttributes(attributes, containerAttributes)
withElement("div", prefix = xhtmlPrefix, uri = XHTML_NAMESPACE_URI, atts = containerAttributes) {
val mediatypeValue = attributes.getValue("mediatype")
val htmlValue = XFormsOutputControl.getExternalValueOrDefault(outputControl, mediatypeValue)
XFormsCrossPlatformSupport.streamHTMLFragment(htmlValue, outputControl.getLocationData, xhtmlPrefix)
}
}
// Don't use @for as we are not pointing to an HTML control
override def getForEffectiveIdWithNs(effectiveId: String): Option[String] = None
override def getContainingElementName: String = "div"
}
// xf:output[starts-with(@appearance, 'image/')]
class XFormsOutputImageHandler(
uri : String,
localname : String,
qName : String,
localAtts : Attributes,
matched : ElementAnalysis,
handlerContext : HandlerContext
) extends XFormsControlLifecyleHandler(
uri,
localname,
qName,
localAtts,
matched,
handlerContext,
repeating = false,
forwarding = false
) with XFormsOutputHandler {
protected def handleControlStart(): Unit = {
implicit val xmlReceiver: XMLReceiver = handlerContext.controller.output
val outputControl = currentControl.asInstanceOf[XFormsOutputControl]
val xhtmlPrefix = handlerContext.findXHTMLPrefix
val mediatypeValue = attributes.getValue("mediatype")
val containerAttributes = getContainerAttributes(getEffectiveId, outputControl, isField = false)
// @src="..."
// NOTE: If producing a template, or if the image URL is blank, we point to an existing dummy image
val srcValue = XFormsOutputControl.getExternalValueOrDefault(outputControl, mediatypeValue)
containerAttributes.addAttribute("", "src", "src", XMLReceiverHelper.CDATA, if (srcValue ne null) srcValue else DUMMY_IMAGE_URI)
XFormsBaseHandler.handleAccessibilityAttributes(attributes, containerAttributes)
currentControl.addExtensionAttributesExceptClassAndAcceptForHandler(containerAttributes, XXFORMS_NAMESPACE_URI)
element("img", prefix = xhtmlPrefix, uri = XHTML_NAMESPACE_URI, atts = containerAttributes)
}
// Don't use @for as we are not pointing to an HTML control
override def getForEffectiveIdWithNs(effectiveId: String): Option[String] = None
}
// xf:output[@appearance = 'xxf:text']
class XFormsOutputTextHandler(
uri : String,
localname : String,
qName : String,
localAtts : Attributes,
matched : ElementAnalysis,
handlerContext : HandlerContext
) extends XFormsControlLifecyleHandler(
uri,
localname,
qName,
localAtts,
matched,
handlerContext,
repeating = false,
forwarding = false
) with XFormsOutputHandler {
protected def handleControlStart(): Unit = {
val outputControl = currentControl.asInstanceOf[XFormsOutputControl]
val xmlReceiver = handlerContext.controller.output
val externalValue = outputControl.getExternalValue()
if ((externalValue ne null) && externalValue.nonEmpty)
xmlReceiver.characters(externalValue.toCharArray, 0, externalValue.length)
}
// Don't use @for as we are not pointing to an HTML control
override def getForEffectiveIdWithNs(effectiveId: String): Option[String] = null
}
// xf:output[@appearance = 'xxf:download']
class XFormsOutputDownloadHandler(
uri : String,
localname : String,
qName : String,
localAtts : Attributes,
matched : ElementAnalysis,
handlerContext : HandlerContext
) extends XFormsControlLifecyleHandler(
uri,
localname,
qName,
localAtts,
matched,
handlerContext,
repeating = false,
forwarding = false
) with XFormsOutputHandler {
// NOP because the label is output as the text within <a>
protected override def handleLabel(): Unit = ()
protected def handleControlStart(): Unit = {
implicit val context : HandlerContext = handlerContext
implicit val xmlReceiver: XMLReceiver = handlerContext.controller.output
val outputControl = currentControl.asInstanceOf[XFormsOutputControl]
val containerAttributes = getContainerAttributes(getEffectiveId, outputControl, isField = false)
val xhtmlPrefix = handlerContext.findXHTMLPrefix
// For f:url-type="resource"
withFormattingPrefix { formattingPrefix =>
def anchorAttributes = {
val hrefValue = XFormsOutputControl.getExternalValueOrDefault(outputControl, null)
if (hrefValue.isAllBlank) {
// No URL so make sure a click doesn't cause navigation, and add class
containerAttributes.addAttribute("", "href", "href", CDATA, "#")
XMLReceiverSupport.addOrAppendToAttribute(containerAttributes, "class", "xforms-readonly")
} else {
// URL value
containerAttributes.addAttribute("", "href", "href", CDATA, hrefValue)
}
// Specify resource URL type for proxy portlet
containerAttributes.addAttribute(
FORMATTING_URL_TYPE_QNAME.namespace.uri,
FORMATTING_URL_TYPE_QNAME.localName,
XMLUtils.buildQName(formattingPrefix, FORMATTING_URL_TYPE_QNAME.localName),
CDATA, "resource")
// Add _blank target in order to prevent:
// 1. The browser replacing the current page, and
// 2. The browser displaying the "Are you sure you want to navigate away from this page?" warning dialog
// This, as of 2009-05, seems to be how most sites handle this
containerAttributes.addAttribute("", "target", "target", CDATA, "_blank")
// Output xxf:* extension attributes
if (outputControl ne null)
outputControl.addExtensionAttributesExceptClassAndAcceptForHandler(containerAttributes, XXFORMS_NAMESPACE_URI)
containerAttributes
}
val aAttributes = anchorAttributes
XFormsBaseHandler.handleAccessibilityAttributes(attributes, aAttributes)
withElement(localName = "a", prefix = xhtmlPrefix, uri = XHTML_NAMESPACE_URI, atts = aAttributes) {
val labelValue = currentControl.getLabel
val mustOutputHTMLFragment = currentControl.isHTMLLabel
XFormsBaseHandlerXHTML.outputLabelTextIfNotEmpty(labelValue, xhtmlPrefix, mustOutputHTMLFragment, Option(currentControl.getLocationData))
}
}
}
// Don't use @for as we are not pointing to an HTML control
override def getForEffectiveIdWithNs(effectiveId: String): Option[String] = null
} | orbeon/orbeon-forms | xforms-runtime/shared/src/main/scala/org/orbeon/oxf/xforms/processor/handlers/xhtml/XFormsOutputHandler.scala | Scala | lgpl-2.1 | 11,716 |
package pipelines
import org.slf4j.{Logger, LoggerFactory}
/**
* Utility trait for Logging
*/
trait Logging {
// Make the log field transient so that objects with Logging can
// be serialized and used on another machine
@transient private var log_ : Logger = null
// Method to get or create the logger for this object
protected def log: Logger = {
if (log_ == null) {
var className = this.getClass.getName
// Ignore trailing $'s in the class names for Scala objects
if (className.endsWith("$")) {
className = className.substring(0, className.length - 1)
}
log_ = LoggerFactory.getLogger(className)
}
log_
}
// Log methods that take only a String
protected def logInfo(msg: => String) {
if (log.isInfoEnabled) log.info(msg)
}
protected def logDebug(msg: => String) {
if (log.isDebugEnabled) log.debug(msg)
}
protected def logTrace(msg: => String) {
if (log.isTraceEnabled) log.trace(msg)
}
protected def logWarning(msg: => String) {
if (log.isWarnEnabled) log.warn(msg)
}
protected def logError(msg: => String) {
if (log.isErrorEnabled) log.error(msg)
}
// Log methods that take Throwables (Exceptions/Errors) too
protected def logInfo(msg: => String, throwable: Throwable) {
if (log.isInfoEnabled) log.info(msg, throwable)
}
protected def logDebug(msg: => String, throwable: Throwable) {
if (log.isDebugEnabled) log.debug(msg, throwable)
}
protected def logTrace(msg: => String, throwable: Throwable) {
if (log.isTraceEnabled) log.trace(msg, throwable)
}
protected def logWarning(msg: => String, throwable: Throwable) {
if (log.isWarnEnabled) log.warn(msg, throwable)
}
protected def logError(msg: => String, throwable: Throwable) {
if (log.isErrorEnabled) log.error(msg, throwable)
}
}
| o0neup/keystone | src/main/scala/pipelines/Logging.scala | Scala | apache-2.0 | 1,848 |
// Copyright 2014 Commonwealth Bank of Australia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package au.com.cba.omnia.maestro.core.upload
import scala.util.Random
import java.io.File
import java.io.FileNotFoundException
import org.specs2.execute.AsResult
import org.specs2.specification.Fixture
import org.apache.hadoop.fs.Path
/** Isolated directories for upload tests */
case class IsolatedDirs(rootDir: File) {
val testDir = new File(rootDir, "testInputDirectory")
val testDirS = testDir.toString
val archiveDir = new File(rootDir, "testArchiveDirectory")
val archiveDirS = archiveDir.toString
val hdfsArchiveDir = new File(rootDir.toString, "testHdfsArchiveDirectory")
val hdfsArchiveDirS = hdfsArchiveDir.toString
val hdfsArchiveDirP = new Path(hdfsArchiveDirS)
val hdfsDir = new File(rootDir, "testHdfsDirectory")
val hdfsDirS = hdfsDir.toString
}
/** Provides clean test directories for Upload tests */
object isolatedTest extends Fixture[IsolatedDirs] {
val random = new Random() // Random is thread-safe
val tmpDir = System.getProperty("java.io.tmpdir")
def findUniqueRoot(): File = {
val possibleDir = new File(tmpDir, "isolated-test-" + random.nextInt(Int.MaxValue)).getAbsoluteFile
val unique = possibleDir.mkdir
if (unique) possibleDir else findUniqueRoot() // don't expect to recurse often
}
// delete directory plus contents
// WARNING: deleteAll will follow symlinks!
// Java 7 supports symlinks, so once we drop Java 6 support we should be
// able to do this properly, or use a library function that does it properly
def deleteAll(file: File): Unit = {
if (file.isDirectory)
file.listFiles.foreach(deleteAll)
if (file.exists) {
file.delete
()
}
}
def apply[R: AsResult](test: IsolatedDirs => R) = {
val dirs = IsolatedDirs(findUniqueRoot())
List(dirs.testDir, dirs.archiveDir, dirs.hdfsDir) foreach (dir =>
if (!dir.mkdir) throw new FileNotFoundException(dir.toString)
)
val result = AsResult(test(dirs))
deleteAll(dirs.rootDir)
result
}
}
| CommBank/maestro | maestro-core/src/test/scala/au/com/cba/omnia/maestro/core/upload/IsolatedTest.scala | Scala | apache-2.0 | 2,657 |
package hephaestus
import cats._, cats.data._
import monocle._
trait CatsInstances {
implicit def toStateTLensOps[F[_], S, A](s: StateT[F, S, A]): StateTLensOps[F, S, A] = new StateTLensOps(s)
}
final class StateTLensOps[F[_], S, A](val s: StateT[F, S, A]) extends AnyVal {
def transformLens[R](lens: Lens[R, S])(implicit F: Functor[F]): StateT[F, R, A] =
s.transformS(r => lens.get(r), (r, s) => lens.set(s)(r))
}
| to-ithaca/hephaestus | core/src/main/scala/hephaestus/cats.scala | Scala | apache-2.0 | 427 |
package com.twitter.finagle.http
import com.twitter.finagle.builder.{ClientBuilder, ServerBuilder}
import com.twitter.finagle.tracing._
import com.twitter.finagle.Service
import com.twitter.util.{Await, Closable, Future}
import java.net.{InetAddress, InetSocketAddress}
import org.scalatest.FunSuite
private object Svc extends Service[Request, Response] {
def apply(req: Request) = Future.value(req.response)
}
class TraceInitializationTest extends FunSuite {
def req = RequestBuilder().url("http://foo/this/is/a/uri/path").buildGet()
def assertAnnotationsInOrder(records: Seq[Record], annos: Seq[Annotation]): Unit = {
assert(records.collect { case Record(_, _, ann, _) if annos.contains(ann) => ann } == annos)
}
/**
* Ensure all annotations have the same TraceId (it should be passed between client and server)
* Ensure core annotations are present and properly ordered
*/
def testTraces(f: (Tracer, Tracer) => (Service[Request, Response], Closable)): Unit = {
val tracer = new BufferingTracer
val (svc, closable) = f(tracer, tracer)
try Await.result(svc(req))
finally {
Closable.all(svc, closable).close()
}
assertAnnotationsInOrder(
tracer.toSeq,
Seq(
Annotation.Rpc("GET"),
Annotation.BinaryAnnotation("http.uri", "/this/is/a/uri/path"),
Annotation.ServiceName("theClient"),
Annotation.ClientSend,
Annotation.Rpc("GET"),
Annotation.BinaryAnnotation("http.uri", "/this/is/a/uri/path"),
Annotation.ServiceName("theServer"),
Annotation.ServerRecv,
Annotation.ServerSend,
Annotation.ClientRecv
)
)
assert(tracer.map(_.traceId).toSet.size == 1)
}
test("TraceId is propagated through the protocol") {
testTraces { (serverTracer, clientTracer) =>
import com.twitter.finagle
val server = finagle.Http.server
.withTracer(serverTracer)
.withLabel("theServer")
.serve(":*", Svc)
val port = server.boundAddress.asInstanceOf[InetSocketAddress].getPort
val client = finagle.Http.client
.withTracer(clientTracer)
.newService(":" + port, "theClient")
(client, server)
}
}
test("TraceId is propagated through the protocol (builder)") {
import com.twitter.finagle
testTraces { (serverTracer, clientTracer) =>
val server = ServerBuilder()
.name("theServer")
.bindTo(new InetSocketAddress(InetAddress.getLoopbackAddress, 0))
.stack(finagle.Http.server)
.tracer(serverTracer)
.build(Svc)
val port = server.boundAddress.asInstanceOf[InetSocketAddress].getPort
val client = ClientBuilder()
.name("theClient")
.hosts(s"localhost:$port")
.stack(finagle.Http.client)
.hostConnectionLimit(1)
.tracer(clientTracer)
.build()
(client, server)
}
}
}
| luciferous/finagle | finagle-http/src/test/scala/com/twitter/finagle/http/TraceInitializationTest.scala | Scala | apache-2.0 | 2,909 |
package com.github.tarao
package slickjdbc
package getresult
import scala.util.DynamicVariable
import scala.annotation.implicitNotFound
import slick.jdbc.{GetResult => GR, PositionedResult}
import java.sql.{ResultSet}
trait GetResult {
private val positionedResult = new DynamicVariable[PositionedResult](null)
private[getresult] def positionedResultValue =
Option(positionedResult.value) getOrElse {
throw new RuntimeException("Column access must be in getResult method")
}
def getResult[T](block: => T): GR[T] =
GR[T] { r => positionedResult.withValue(r) { block } }
def column[T](index: Int)(implicit
check: CheckGetter[T],
binder: TypeBinder[T]
): T = binder(positionedResultValue.rs, index)
def column[T](field: String)(implicit
check: CheckGetter[T],
binder: TypeBinder[T]
): T = binder(positionedResultValue.rs, field)
def skip = { positionedResultValue.skip; this }
def <<[T](implicit
check: CheckGetter[T],
binder: TypeBinder[T]
): T = column[T](positionedResultValue.skip.currentPos)
def <<?[T](implicit
check: CheckGetter[Option[T]],
binder: TypeBinder[Option[T]]
): Option[T] = <<[Option[T]]
}
object GetResult {
def apply[T](block: GetResult => T): GR[T] =
GR[T] { r => block(new GetResult {
override def positionedResultValue = r
}) }
}
@implicitNotFound(msg = "No conversion rule for type ${T}\\n" +
"[NOTE] You need an implicit of getresult.TypeBinder[${T}] to convert the result.")
sealed trait CheckGetter[+T]
object CheckGetter {
implicit def valid[T](implicit binder: TypeBinder[T]): CheckGetter[T] =
new CheckGetter[T] {}
}
trait TypeBinder[+T] {
def apply(rs: ResultSet, index: Int): T
def apply(rs: ResultSet, field: String): T
def map[S](f: T => S): TypeBinder[S] = new TypeBinder[S] {
def apply(rs: ResultSet, index: Int): S =
f(TypeBinder.this.apply(rs, index))
def apply(rs: ResultSet, field: String): S =
f(TypeBinder.this.apply(rs, field))
}
}
object TypeBinder {
type Get[X, R] = (ResultSet, X) => R
def apply[T](byIndex: Get[Int, T])(byField: Get[String, T]): TypeBinder[T] =
new TypeBinder[T] {
def apply(rs: ResultSet, index: Int): T = byIndex(rs, index)
def apply(rs: ResultSet, field: String): T = byField(rs, field)
}
val any: TypeBinder[Any] =
TypeBinder[Any](_ getObject _)(_ getObject _)
implicit val javaBoolean: TypeBinder[Option[java.lang.Boolean]] = any.map {
case b if b == null => None
case b: java.lang.Boolean => Some(b)
case s: String => Some({
try s.toInt != 0
catch { case e: NumberFormatException => !s.isEmpty }
}.asInstanceOf[java.lang.Boolean])
case n: Number => Some((n.intValue != 0).asInstanceOf[java.lang.Boolean])
case v => Some((v != 0).asInstanceOf[java.lang.Boolean])
}
implicit val scalaBoolean: TypeBinder[Option[Boolean]] =
javaBoolean.map(_.map(_.asInstanceOf[Boolean]))
def javaNumber[T](valueOf: String => T): TypeBinder[Option[T]] =
any.map {
case v if v == null => None
case v => try {
Some(valueOf(v.toString))
} catch { case e: NumberFormatException => None }
}
def javaFixedNumber[T](
to: Number => T,
valueOf: String => T
): TypeBinder[Option[T]] = any.map {
case v if v == null => None
case v: Number => Some(to(v))
case v => try {
Some(valueOf(v.toString))
} catch { case e: NumberFormatException => None }
}
implicit val javaByte: TypeBinder[Option[java.lang.Byte]] =
javaFixedNumber({ n => n.byteValue }, { s => java.lang.Byte.valueOf(s) })
implicit val scalaByte: TypeBinder[Option[Byte]] =
javaByte.map(_.map(_.asInstanceOf[Byte]))
implicit val javaShort: TypeBinder[Option[java.lang.Short]] =
javaFixedNumber({ n => n.shortValue }, { s => java.lang.Short.valueOf(s) })
implicit val scalaShort: TypeBinder[Option[Short]] =
javaShort.map(_.map(_.asInstanceOf[Short]))
implicit val javaInt: TypeBinder[Option[java.lang.Integer]] =
javaFixedNumber({ n => n.intValue }, { s => java.lang.Integer.valueOf(s) })
implicit val scalaInt: TypeBinder[Option[Int]] =
javaInt.map(_.map(_.asInstanceOf[Int]))
implicit val javaLong: TypeBinder[Option[java.lang.Long]] =
javaFixedNumber({ n => n.longValue }, { s => java.lang.Long.valueOf(s) })
implicit val scalaLong: TypeBinder[Option[Long]] =
javaLong.map(_.map(_.asInstanceOf[Long]))
implicit val javaDouble: TypeBinder[Option[java.lang.Double]] =
javaNumber { s => java.lang.Double.valueOf(s) }
implicit val scalaDouble: TypeBinder[Option[Double]] =
javaDouble.map(_.map(_.asInstanceOf[Double]))
implicit val javaFloat: TypeBinder[Option[java.lang.Float]] =
javaNumber { s => java.lang.Float.valueOf(s) }
implicit val scalaFloat: TypeBinder[Option[Float]] =
javaFloat.map(_.map(_.asInstanceOf[Float]))
implicit val javaBigDecimal: TypeBinder[Option[java.math.BigDecimal]] =
TypeBinder(_ getBigDecimal _)(_ getBigDecimal _).map(Option(_))
implicit val scalaBigDecimal: TypeBinder[Option[BigDecimal]] =
javaBigDecimal.map(_.map(BigDecimal(_)))
implicit val string: TypeBinder[Option[String]] =
TypeBinder(_ getString _)(_ getString _).map(Option(_))
implicit val bytes: TypeBinder[Option[Array[Byte]]] =
TypeBinder(_ getBytes _)(_ getBytes _).map(Option(_))
implicit val characterStream: TypeBinder[Option[java.io.Reader]] =
TypeBinder(_ getCharacterStream _)(_ getCharacterStream _).map(Option(_))
implicit val binaryStream: TypeBinder[Option[java.io.InputStream]] =
TypeBinder(_ getBinaryStream _)(_ getBinaryStream _).map(Option(_))
implicit val blob: TypeBinder[Option[java.sql.Blob]] =
TypeBinder(_ getBlob _)(_ getBlob _).map(Option(_))
implicit val clob: TypeBinder[Option[java.sql.Clob]] =
TypeBinder(_ getClob _)(_ getClob _).map(Option(_))
implicit val nClob: TypeBinder[Option[java.sql.NClob]] =
TypeBinder(_ getNClob _)(_ getNClob _).map(Option(_))
implicit val array: TypeBinder[Option[java.sql.Array]] =
TypeBinder(_ getArray _)(_ getArray _).map(Option(_))
implicit val url: TypeBinder[Option[java.net.URL]] =
TypeBinder(_ getURL _)(_ getURL _).map(Option(_))
implicit val date: TypeBinder[Option[java.sql.Date]] =
TypeBinder(_ getDate _)(_ getDate _).map(Option(_))
implicit val time: TypeBinder[Option[java.sql.Time]] =
TypeBinder(_ getTime _)(_ getTime _).map(Option(_))
implicit val timestamp: TypeBinder[Option[java.sql.Timestamp]] =
TypeBinder(_ getTimestamp _)(_ getTimestamp _).map(Option(_))
implicit val sqlxml: TypeBinder[Option[java.sql.SQLXML]] =
TypeBinder(_ getSQLXML _)(_ getSQLXML _).map(Option(_))
implicit val ref: TypeBinder[Option[java.sql.Ref]] =
TypeBinder(_ getRef _)(_ getRef _).map(Option(_))
implicit val rowId: TypeBinder[Option[java.sql.RowId]] =
TypeBinder(_ getRowId _)(_ getRowId _).map(Option(_))
}
trait AutoUnwrapOption {
implicit def some[T](implicit
check: NoOption[T], // We do this to enable a diagnostic type
// error by CheckGetter. Otherwise an
// implicit expansion of an unknown type fails
// on divergence.
option: TypeBinder[Option[T]]
): TypeBinder[T] = option.map(_.get) // throws
}
object AutoUnwrapOption extends AutoUnwrapOption
sealed trait NoOption[+T]
object NoOption {
implicit def some[T]: NoOption[T] = new NoOption[T] {}
// $COVERAGE-OFF$
implicit def ambig1[T]: NoOption[Option[T]] = sys.error("unexpected")
implicit def ambig2[T]: NoOption[Option[T]] = sys.error("unexpected")
// $COVERAGE-ON$
}
| TimothyKlim/slick-jdbc-extension-scala | src/main/scala/com/github/tarao/slickjdbc/getresult/GetResult.scala | Scala | mit | 7,646 |
package com.github.opengrabeso.mixtio
package moveslink2
import java.io._
import java.time.{ZoneId, ZoneOffset, ZonedDateTime}
import java.time.format.DateTimeFormatter
import Main._
import common.model._
import scala.collection.immutable.SortedMap
import common.Util._
import scala.collection.mutable.ArrayBuffer
import scala.util.Try
object XMLParser {
private val PositionConstant = 57.2957795131
private val dateFormatNoZone = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss").withZone(ZoneId.systemDefault)
private val dateFormatNoZoneUTC = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss").withZone(ZoneOffset.UTC)
def getRRArray(rrData: String): Seq[Int] = {
val rrArray = rrData.split(" ")
for (rr <- rrArray) yield rr.toInt
}
def parseXML(fileName: String, inputStream: InputStream, digest: String): Option[ActivityEvents] = {
import SAXParser._
object parsed extends SAXParserWithGrammar {
var rrData = Seq.empty[Int]
var deviceName = Option.empty[String]
var startTime = Option.empty[ZonedDateTime]
var distance: Int = 0
var durationMs: Int = 0
var paused: Boolean = false
var pauseStartTime = Option.empty[ZonedDateTime]
class Sample {
/* GPS Track Pod example:
<Sample>
<Latitude>0.86923005364868888</Latitude>
<Longitude>0.24759951117797119</Longitude>
<GPSAltitude>416</GPSAltitude>
<GPSHeading>1.4116222990130136</GPSHeading>
<EHPE>4</EHPE>
<Time>2534</Time>
<UTC>2016-10-21T07:28:14Z</UTC>
</Sample>
<Sample>
<VerticalSpeed>0</VerticalSpeed>
<Distance>7868</Distance>
<Speed>3.9399999999999999</Speed>
<Time>2534.6120000000001</Time>
<SampleType>periodic</SampleType>
<UTC>2016-10-21T07:28:14.612Z</UTC>
</Sample>
*/
var time = Option.empty[ZonedDateTime]
var distance = Option.empty[Double]
var latitude = Option.empty[Double]
var longitude = Option.empty[Double]
var accuracy = Option.empty[Double]
var elevation: Option[Int] = None
var heartRate: Option[Int] = None
}
val samples = ArrayBuffer.empty[Sample]
val laps = ArrayBuffer.empty[Lap]
/**
* When there is no zone, assume UTC
* */
def safeParse(s: String): Try[ZonedDateTime] = {
Try {
ZonedDateTime.parse(s)
} orElse Try {
ZonedDateTime.parse(s, dateFormatNoZoneUTC)
}
}
def grammar = root(
"Device" tag ("Name" text (text => deviceName = Some(text))),
"Header" tag (
"Distance" text (text => distance = text.toInt),
// caution: GPS Track POD <Header>/<DateTime> is given in local timezone with no designation - better ignore it
"DateTime" text (text => startTime = Some(timeToUTC(ZonedDateTime.parse(text, dateFormatNoZone)))),
"Duration" text (text => durationMs = (text.toDouble * 1000).toInt)
),
"R-R" tag ("Data" text (text => rrData = getRRArray(text))),
"Samples" tag (
"Sample" tagWithOpen (
samples += new Sample,
"Latitude" text (text => samples.last.latitude = Some(text.toDouble * XMLParser.PositionConstant)),
"Longitude" text (text => samples.last.longitude = Some(text.toDouble * XMLParser.PositionConstant)),
"GPSAltitude" text (text => samples.last.elevation = Some(text.toInt)),
"EHPE" text (text => samples.last.accuracy = Some(text.toInt)),
// TODO: handle relative time when UTC is not present
"UTC" text (text => samples.last.time = safeParse(text).toOption),
"Distance" text (text => samples.last.distance = Some(text.toDouble)),
"HR" text (text => samples.last.heartRate = Some(text.toInt)),
// TODO: add other properties (power, cadence, temperature ...)
"Events" tag (
"Pause" tag ("State" text (text => paused = text.equalsIgnoreCase("true"))),
"Lap" tagWithOpen {
// caution: lap time is bad for GPS Track Pod. It is marked as <UTC>, but in fact it is written in local time zone
// use time of the last previous sample instead, or we might consider using duration inside of the lap event
val lastTime = samples.reverseIterator.toIterable.tail.find(_.time.isDefined).flatMap(_.time)
for (timestamp <- lastTime) {
laps += Lap("Lap", timestamp)
println(s"SML lap $timestamp")
}
}
//"Type" text { text =>}
// we are not interested about any other Lap properties
//"Duration" text { text => text.toDouble }
//"Duration" text {text => }
//"Distance" text {text => }
)
)
)
)
}
SAXParser.parse(inputStream)(parsed)
// always check time last, as this is present in almost each entry. We want first check to filter out as much as possible
val gpsSamples = for {
s <- parsed.samples
longitude <- s.longitude
latitude <- s.latitude
time <- s.time
} yield {
time -> GPSPoint(latitude, longitude, s.elevation)(s.accuracy)
}
val ret = for (gpsInterestingRange <- DataStreamGPS.dropAlmostEmpty(gpsSamples.toList)) yield {
def inRange(t: ZonedDateTime) = t >= gpsInterestingRange._1 && t <= gpsInterestingRange._2
val distSamples = for {
s <- parsed.samples
distance <- s.distance
time <- s.time if inRange(time)
} yield {
time -> distance
}
val hrSamples = for {
s <- parsed.samples
v <- s.heartRate if v != 0
time <- s.time if inRange(time)
} yield {
time -> v
}
val gpsStream = new DataStreamGPS(SortedMap(gpsSamples.filter(s => inRange(s._1)): _*))
val distStream = new DataStreamDist(SortedMap(distSamples: _*))
val hrStream = if (hrSamples.exists(_._2 != 0)) Some(new DataStreamHR(SortedMap(hrSamples: _*))) else None
val lapTimes = parsed.laps.map(_.timestamp).filter(inRange)
// TODO: read ActivityType from XML
val sport = Event.Sport.Workout
val allStreams = Seq(distStream, gpsStream) ++ hrStream
val activity = for {
startTime <- allStreams.flatMap(_.startTime).minOpt
endTime <- allStreams.flatMap(_.endTime).maxOpt
d <- distStream.stream.lastOption.map(_._2)
} yield {
val id = ActivityId(FileId.FilenameId(fileName), digest, "Activity", startTime, endTime, sport, d)
val events = Array[Event](BegEvent(id.startTime, sport), EndEvent(id.endTime))
// TODO: avoid duplicate timestamp events
val lapEvents = lapTimes.map(LapEvent)
val allEvents = (events ++ lapEvents).sortBy(_.stamp)
ActivityEvents(id, allEvents, distStream, gpsStream, hrStream.toSeq)
}
activity
}
ret.flatten
}
} | OndrejSpanel/Stravamat | backend/src/main/scala/com/github/opengrabeso/mixtio/moveslink2/XMLParser.scala | Scala | gpl-2.0 | 7,131 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.scala.batch.utils
import org.apache.flink.table.api.TableConfig
import org.apache.flink.table.api.scala.batch.utils.TableProgramsTestBase.{NO_NULL, TableConfigMode}
import org.apache.flink.test.util.MultipleProgramsTestBase
import org.apache.flink.test.util.MultipleProgramsTestBase.TestExecutionMode
class TableProgramsTestBase(
mode: TestExecutionMode,
tableConfigMode: TableConfigMode)
extends MultipleProgramsTestBase(mode) {
def config: TableConfig = {
val conf = new TableConfig
tableConfigMode match {
case NO_NULL =>
conf.setNullCheck(false)
case _ => // keep default
}
conf
}
}
object TableProgramsTestBase {
case class TableConfigMode(nullCheck: Boolean)
val DEFAULT = TableConfigMode(nullCheck = true)
val NO_NULL = TableConfigMode(nullCheck = false)
}
| hongyuhong/flink | flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/scala/batch/utils/TableProgramsTestBase.scala | Scala | apache-2.0 | 1,668 |
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2015, Gary Keorkunian **
** **
\\* */
package squants.thermal
import squants._
import squants.energy.{ Energy, Joules }
/**
* Represents the capacity of some substance or system to hold thermal energy.
*
* Also a representation of Entropy
*
* @author garyKeorkunian
* @since 0.1
*
* @param value the value in [[squants.thermal.JoulesPerKelvin]]
*/
final class ThermalCapacity private (val value: Double, val unit: ThermalCapacityUnit)
extends Quantity[ThermalCapacity] {
def dimension = ThermalCapacity
def *(that: Temperature): Energy = Joules(this.toJoulesPerKelvin * that.toKelvinScale)
def toJoulesPerKelvin = to(JoulesPerKelvin)
}
object ThermalCapacity extends Dimension[ThermalCapacity] {
private[thermal] def apply[A](n: A, unit: ThermalCapacityUnit)(implicit num: Numeric[A]) = new ThermalCapacity(num.toDouble(n), unit)
def apply = parse _
def name = "ThermalCapacity"
def primaryUnit = JoulesPerKelvin
def siUnit = JoulesPerKelvin
def units = Set(JoulesPerKelvin)
}
trait ThermalCapacityUnit extends UnitOfMeasure[ThermalCapacity] with UnitConverter {
def apply[A](n: A)(implicit num: Numeric[A]) = ThermalCapacity(n, this)
}
object JoulesPerKelvin extends ThermalCapacityUnit with PrimaryUnit with SiUnit {
def symbol = "J/K"
}
object ThermalCapacityConversions {
lazy val joulePerKelvin = JoulesPerKelvin(1)
implicit class ThermalCapacityConversions[A](n: A)(implicit num: Numeric[A]) {
def joulesPerKelvin = JoulesPerKelvin(n)
}
implicit object ThermalCapacityNumeric extends AbstractQuantityNumeric[ThermalCapacity](ThermalCapacity.primaryUnit)
}
| underscorenico/squants | shared/src/main/scala/squants/thermal/ThermalCapacity.scala | Scala | apache-2.0 | 2,111 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s.features
import io.fabric8.kubernetes.api.model.PodBuilder
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.deploy.k8s.{KubernetesConf, KubernetesExecutorSpecificConf, SecretVolumeUtils, SparkPod}
class MountSecretsFeatureStepSuite extends SparkFunSuite {
private val SECRET_FOO = "foo"
private val SECRET_BAR = "bar"
private val SECRET_MOUNT_PATH = "/etc/secrets/driver"
test("mounts all given secrets") {
val baseDriverPod = SparkPod.initialPod()
val secretNamesToMountPaths = Map(
SECRET_FOO -> SECRET_MOUNT_PATH,
SECRET_BAR -> SECRET_MOUNT_PATH)
val sparkConf = new SparkConf(false)
val kubernetesConf = KubernetesConf(
sparkConf,
KubernetesExecutorSpecificConf("1", Some(new PodBuilder().build())),
"resource-name-prefix",
"app-id",
Map.empty,
Map.empty,
secretNamesToMountPaths,
Map.empty,
Map.empty,
Nil,
hadoopConfSpec = None)
val step = new MountSecretsFeatureStep(kubernetesConf)
val driverPodWithSecretsMounted = step.configurePod(baseDriverPod).pod
val driverContainerWithSecretsMounted = step.configurePod(baseDriverPod).container
Seq(s"$SECRET_FOO-volume", s"$SECRET_BAR-volume").foreach { volumeName =>
assert(SecretVolumeUtils.podHasVolume(driverPodWithSecretsMounted, volumeName))
}
Seq(s"$SECRET_FOO-volume", s"$SECRET_BAR-volume").foreach { volumeName =>
assert(SecretVolumeUtils.containerHasVolume(
driverContainerWithSecretsMounted, volumeName, SECRET_MOUNT_PATH))
}
}
}
| ahnqirage/spark | resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/MountSecretsFeatureStepSuite.scala | Scala | apache-2.0 | 2,414 |
object Test extends App {
t
def t: Unit = {
val c1 = C1()(1)
println(c1.copy()(2))
{
implicit val i: Int = 2873
println(c1.copy())
}
val c2 = C2()(1)
println(c2.copy()(37))
val c3 = C3(1,2)(3)
println(c3.copy()(27))
println(c3.copy(y = 22)(27))
println(c3.copy(y = 7, x = 11)(27))
val c4 = C4(1)
println(c4.copy())
println(c4.copy(x = 23))
val c5 = C5(1,2)(3,"a")
println(c5.copy()(33,"b"))
println(c5.copy(y = 19)(33,"b"))
{
implicit val i: Int = 193
implicit val s: String = "c"
println(c5.copy())
println(c5.copy(y = 371))
println(c5.copy(x = -1)(-2, "lken"))
}
val c6 = C6(1)(2)(3)
println(c6.copy(29)(18)(-12))
{
implicit val i: Int = 2892
println(c6.copy(x = 1)(93))
println(c6.copy(x = 1)(93)(761))
}
val c7 = C7(1)(2)(3)("h")
println(c7.copy()(22)(33)("elkj"))
{
implicit val s: String = "me"
println(c7.copy()(283)(29872))
println(c7.copy(37)(298)(899)("ekjr"))
}
val c8 = C8(1)(2,3)()("els")
println(c8.copy(x = 172)(989, 77)()("eliurna"))
{
implicit val s: String = "schtring"
println(c8.copy()(82,2111)())
println(c8.copy(x = -1)(92,29)()("lken"))
}
val c9 = C9(1)(2)()()("u")
println(c9.copy()(271)()()("ehebab"))
{
implicit val s: String = "enag"
println(c9.copy()(299))
println(c9.copy()(299)())
println(c9.copy()(299)()())
println(c9.copy(x = -42)(99)()()("flae"))
}
class KA { override def toString = "ka" }
class KB extends KA { override def toString = "kb" }
val c10 = C10(10)(3)(19)
println(c10.copy()(298)(27))
println(c10.copy("elkn")("en")("emn"))
println(c10.copy(new KA)(new KB)(new KB))
{
implicit val k: KA = new KA
println(c10.copy(new KA)(new KB))
}
}
}
case class C1(implicit x: Int) {
override def toString = s"c1: $x"
}
case class C2()(y: Int) {
override def toString = s"c2: $y"
}
case class C3(x: Int, y: Int)(z: Int) {
override def toString = s"c3: $x, $y, $z"
}
case class C4(x: Int) {
override def toString = s"c4: $x"
}
case class C5(x: Int, y: Int)(implicit z: Int, s: String) {
override def toString = s"c5: $x, $y, $z, $s"
}
case class C6(x: Int)(y: Int)(implicit z: Int) {
override def toString = s"c6: $x, $y, $z"
}
case class C7(x: Int)(y: Int)(z: Int)(implicit s: String) {
override def toString = s"c7: $x, $y, $z, $s"
}
case class C8(x: Int)(y: Int, z: Int)()(implicit s: String) {
override def toString = s"c8: $x, $y, $z, $s"
}
case class C9(x: Int)(y: Int)()()(implicit s: String) {
override def toString = s"c9: $x, $y, $s"
}
case class C10[T,U <: T](x: T)(y: U)(implicit z: T) {
override def toString = s"c9: $x, $y, $z"
}
| som-snytt/dotty | tests/pending/run/t5907.scala | Scala | apache-2.0 | 2,825 |
package org.plummtw.jinrou.enum
import org.plummtw.jinrou.data._
object MTypeEnum extends Enumeration {
type MTypeEnum = Value
val TALK = Value("T")
val TALK_ADMIN = Value("TA")
val TALK_ADMIN_PRIVATE = Value("TAP")
val TALK_DAY = Value("TD")
val TALK_DAY_FOG = Value("TDF")
val TALK_SECRET = Value("TS")
val TALK_VENTRILOQUIST = Value("TV")
val TALK_SEALED = Value("TZ")
val TALK_NIGHT = Value("TN")
val TALK_WEREWOLF = Value("TW")
val TALK_WOLFCUB = Value("TX")
val TALK_GEMINI = Value("TG")
val TALK_LOVER = Value("TL")
val TALK_GEMINI_DAY = Value("TGD")
val TALK_FOX = Value("TF")
val TALK_DISGUISED = Value("TB")
val TALK_PONTIFF = Value("TJ")
val TALK_HEAVEN = Value("TH")
val TALK_END = Value("TE")
val MESSAGE = Value("S")
val MESSAGE_GENERAL = Value("S0")
val MESSAGE_GENERAL_NORUNNER = Value("S01")
val MESSAGE_COME = Value("S1")
val MESSAGE_LEAVE = Value("S2")
val MESSAGE_KICKED = Value("S3")
val MESSAGE_REVOTE0 = Value("S4")
val MESSAGE_LAST2MIN = Value("S5")
val MESSAGE_DEATHSUDDEN = Value("S6")
val MESSAGE_REVOTE = Value("S7")
val MESSAGE_NIGHT = Value("S8")
val MESSAGE_EVIL = Value("S9")
val MESSAGE_FOX = Value("SF")
val OBJECTION_MALE = Value("OM")
val OBJECTION_FEMALE = Value("OF")
val BITED_FOX = Value("BF")
val BITED_DEMON = Value("BD")
val DEATH = Value("D")
val DEATH_BETRAYER = Value("DB")
val DEATH_CLERIC = Value("DC")
val DEATH_DEMON = Value("DD")
val DEATH_FOX = Value("DF")
val DEATH_SORCEROR = Value("DU")
val DEATH_HANGED = Value("DH")
val DEATH_HUNTER = Value("DHH")
val DEATH_HUNTER_KILL = Value("DHK")
val DEATH_MADMAN = Value("DM")
val DEATH_POISON_D = Value("DP")
val DEATH_POISON_N = Value("DQ")
val DEATH_POISON_H = Value("DJ")
val DEATH_RUNNER = Value("DR")
val DEATH_SUDDEN = Value("DS")
val DEATH_EATEN = Value("DE")
val DEATH_GODFAT = Value("DT")
val DEATH_WOLFCUB = Value("DX")
val DEATH_WOLFCUB_EATEN = Value("DXE")
val DEATH_FALLENANGEL = Value("DFF")
val DEATH_PENGUIN_ICE = Value("DKI")
val DEATH_SUBPONTIFF = Value("DSP")
val DEATH_LINKS = Value("DL")
val DEATH_LOVER = Value("DLL")
val DEATH_DEATH_NOTE = Value("DDN")
val ITEM_PREFIX = "I"
val ITEM_NO_ITEM = Value(ITEM_PREFIX + ItemEnum.ITEM_NO_ITEM.toString)
val ITEM_UNLUCKY_PURSE = Value(ITEM_PREFIX + ItemEnum.UNLUCKY_PURSE.toString)
val ITEM_BLESS_STAFF = Value(ITEM_PREFIX + ItemEnum.BLESS_STAFF.toString)
val ITEM_BLACK_FEATHER = Value(ITEM_PREFIX + ItemEnum.BLACK_FEATHER.toString)
val ITEM_THIEF_SECRET = Value(ITEM_PREFIX + ItemEnum.THIEF_SECRET.toString)
val ITEM_VENTRILOQUIST = Value(ITEM_PREFIX + ItemEnum.VENTRILOQUIST.toString)
val ITEM_DMESSAGE_SEAL = Value(ITEM_PREFIX + ItemEnum.DMESSAGE_SEAL.toString)
val ITEM_MIRROR_SHIELD = Value(ITEM_PREFIX + ItemEnum.MIRROR_SHIELD.toString)
val ITEM_SHAMAN_CROWN = Value(ITEM_PREFIX + ItemEnum.SHAMAN_CROWN.toString)
val ITEM_WEATHER_ROD = Value(ITEM_PREFIX + ItemEnum.WEATHER_ROD.toString)
val ITEM_DEATH_NOTE = Value(ITEM_PREFIX + ItemEnum.DEATH_NOTE.toString)
val ITEM_PANDORA_BOX = Value(ITEM_PREFIX + ItemEnum.PANDORA_BOX.toString)
val ITEM_CUBIC_ARROW = Value(ITEM_PREFIX + ItemEnum.CUBIC_ARROW.toString)
val ITEM_POPULATION_CENSUS = Value(ITEM_PREFIX + ItemEnum.POPULATION_CENSUS.toString)
val VOTE = Value("V")
val VOTE_STARTGAME = Value("V_")
val VOTE_BECOMEMOB = Value("VBM")
val VOTE_HANG = Value("VV")
val VOTE_KICK = Value("VK")
val VOTE_VILLAGER = Value("VE")
val VOTE_HIDE = Value("VYH")
val VOTE_REVERSEVOTE = Value("VYR")
val VOTE_AUGURER = Value("VA")
val VOTE_HUNTER = Value("VH")
val VOTE_CLERIC_BLESS = Value("VCB")
val VOTE_CLERIC_SANCTURE = Value("VCS")
val VOTE_HERBALIST_ELIXIR = Value("VLE")
val VOTE_HERBALIST_POISON = Value("VLP")
val VOTE_HERBALIST_MIX = Value("VLM")
val VOTE_HERBALIST_DROP = Value("VLD")
val VOTE_ALCHEMIST_ELIXIR = Value("VLF")
val VOTE_ALCHEMIST_POISON = Value("VLQ")
val VOTE_RUNNER = Value("VR")
val VOTE_SCHOLAR_EXAMINE = Value("VOE")
val VOTE_SCHOLAR_EXAMINE2 = Value("VOF")
val VOTE_SCHOLAR_ANALYZE = Value("VOA")
val VOTE_SCHOLAR_REPORT = Value("VOR")
val VOTE_ARCHMAGE_DISPELL = Value("VZD")
val VOTE_ARCHMAGE_SUMMON = Value("VZS")
val VOTE_WEREWOLF = Value("VW")
val VOTE_WOLFCUB = Value("VX")
val VOTE_MADMAN_STUN1 = Value("VM1")
val VOTE_MADMAN_STUN3 = Value("VM3")
val VOTE_MADMAN_STUN = Value("VMT")
val VOTE_MADMAN_SUICIDE = Value("VMS")
val VOTE_MADMAN_DUEL = Value("VMD")
val VOTE_SORCEROR_AUGURE = Value("VSA")
val VOTE_SORCEROR_WHISPER = Value("VSW")
val VOTE_SORCEROR_CONJURE = Value("VSC")
val VOTE_SORCEROR_SHOUT = Value("VSS")
val VOTE_SORCEROR_BELIEVE = Value("VSB")
val VOTE_SORCEROR_SEAR = Value("VSE")
val VOTE_SORCEROR_SUMMON = Value("VSM")
val VOTE_FOX = Value("VF")
val VOTE_FOX1 = Value("VF1")
val VOTE_FOX2 = Value("VF2")
val VOTE_BETRAYER_DISGUISE = Value("VBD")
val VOTE_BETRAYER_CHANGE = Value("VBC")
val VOTE_BETRAYER_FOG = Value("VBF")
val VOTE_GODFAT_SPECIAL1 = Value("VT1")
val VOTE_GODFAT_SPECIAL2 = Value("VT2")
val VOTE_GODFAT_SPECIAL3 = Value("VT3")
val VOTE_GODFAT_SPECIAL4 = Value("VT4")
val VOTE_GODFAT_DEATHGAZE = Value("VTD")
val VOTE_GODFAT_HELLWORD = Value("VTH")
val VOTE_GODFAT_COLORSPRAY = Value("VTC")
val VOTE_GODFAT_BLIND = Value("VTB")
val VOTE_GODFAT_BLIND2 = Value("VTL")
val VOTE_GODFAT_EXCHANGE = Value("VTE")
val VOTE_GODFAT_NECROMANCER = Value("VT!")
val VOTE_GODFAT_HUNTER = Value("VT@")
val VOTE_GODFAT_HERBALIST = Value("VT#")
val VOTE_GODFAT_POISONER = Value("VT$")
val VOTE_GODFAT_SCHOLAR = Value("VT%")
val VOTE_DEMON_CHAOS = Value("VDV")
val VOTE_DEMON_CURSE = Value("VDC")
val VOTE_DEMON_CURSE2 = Value("VDO")
val VOTE_DEMON_DOMINATE = Value("VDD")
val VOTE_DEMON_VORTEX = Value("VDR")
val VOTE_FALLENANGEL_FALLEN = Value("VFF")
val VOTE_PENGUIN_ICE = Value("VKI")
val VOTE_PENGUIN_CHILL = Value("VKC")
val VOTE_INHERITER = Value("VI")
val VOTE_SHIFTER = Value("VS")
val VOTE_SHIFTER2 = Value("VS2")
val VOTE_PONTIFF = Value("VJ")
val VOTE_PONTIFF_COMMAND = Value("VJC")
val VOTE_PONTIFF_AURA = Value("VJA")
val VOTE_CARD_FOOL = Value("Vr")
val VOTE_CARD_MAGICIAN = Value("Va")
val VOTE_CARD_CHARIOT = Value("Vh")
val VOTE_CARD_STRENGTH = Value("Vm3")
val VOTE_CARD_HERMIT = Value("Vbd")
val VOTE_CARD_JUSTICE = Value("Voe")
val VOTE_CARD_TOWER = Value("Vdv")
val VOTE_CARD_SUN = Value("VXS")
val VOTE_NO_ACTION = Value("VN")
val SPECIAL_VOTE_ABANDON = Value("SVA")
def DEATH_MAP = scala.collection.immutable.TreeMap(
DEATH -> "死因不明",
DEATH_BETRAYER -> "跟隨妖狐死亡",
DEATH_CLERIC -> "牧師聖域術犧牲",
DEATH_FOX -> "被占卜師咒殺",
DEATH_SORCEROR -> "被狂巫咒殺",
DEATH_HANGED -> "被吊死",
DEATH_HUNTER -> "獵人特殊死亡",
DEATH_HUNTER_KILL -> "被獵人擊殺",
DEATH_MADMAN -> "狂人自爆死亡",
DEATH_POISON_D -> "被埋毒者毒死",
DEATH_POISON_N -> "被埋毒者毒死",
DEATH_POISON_H -> "被藥師毒死",
DEATH_RUNNER -> "逃亡者特殊死亡",
DEATH_SUDDEN -> "暴斃死亡",
DEATH_EATEN -> "被人狼襲擊",
DEATH_GODFAT -> "被哥德法逆咒殺",
DEATH_WOLFCUB -> "跟隨人狼死亡",
DEATH_WOLFCUB_EATEN -> "被幼狼襲擊",
DEATH_FALLENANGEL -> "被墮天使生贅",
DEATH_PENGUIN_ICE -> "被企鵝冰凍",
DEATH_SUBPONTIFF -> "跟隨教主死亡",
DEATH_LINKS -> "跟隨生命連繫者死亡",
DEATH_LOVER -> "跟隨戀人死亡",
DEATH_DEATH_NOTE -> "被死亡筆記寫上",
)
def DEATH_MAP_GIF = scala.collection.immutable.TreeMap(
DEATH -> "death_unknown.gif",
DEATH_BETRAYER -> "death_betrayer.gif",
DEATH_CLERIC -> "death_cleric.gif",
DEATH_FOX -> "death_fox.gif",
DEATH_SORCEROR -> "death_sorceror.gif",
DEATH_HANGED -> "death_hanged.gif",
DEATH_HUNTER -> "death_hunter.gif",
DEATH_HUNTER_KILL -> "death_hunter_kill.gif",
DEATH_MADMAN -> "death_madman.gif",
DEATH_POISON_D -> "death_poison.gif",
DEATH_POISON_N -> "death_poison.gif",
DEATH_POISON_H -> "death_herbalist.gif",
DEATH_RUNNER -> "death_runner.gif",
DEATH_SUDDEN -> "death_sudden.gif",
DEATH_EATEN -> "death_eaten.gif",
DEATH_GODFAT -> "death_godfat.gif",
DEATH_WOLFCUB -> "death_wolfcub.gif",
DEATH_WOLFCUB_EATEN -> "death_wolfcub_eaten.gif",
DEATH_FALLENANGEL -> "death_fallenangel.gif",
DEATH_PENGUIN_ICE -> "death_penguin.gif",
DEATH_SUBPONTIFF -> "death_subpontiff.gif",
DEATH_LINKS -> "death_links.gif",
DEATH_LOVER -> "death_lovers.gif",
DEATH_DEATH_NOTE -> "death_death_note.gif",
)
def get_death_text(death : MTypeEnum.Value) : String = {
val result = DEATH_MAP.get(death)
//if (result.isEmpty)
// println(role.toString + "is null")
return result.getOrElse("死因不明")
}
def get_death_text(mtype_string : String) : String = {
return get_death_text(valueOf(mtype_string).getOrElse(DEATH))
}
def get_death_gif(death : MTypeEnum.Value) : String = {
val result = DEATH_MAP_GIF.get(death)
//if (result.isEmpty)
// println(role.toString + "is null")
return result.getOrElse("death_unknown.gif")
}
def get_death_gif(mtype_string : String) : String = {
return get_death_gif(valueOf(mtype_string).getOrElse(DEATH))
}
} | Plummtw/jinrou_Lift | src/main/scala/org/plummtw/jinrou/enum/MTypeEnum.scala | Scala | apache-2.0 | 10,271 |
package skinny.mailer.feature
import skinny.mailer.{ SkinnyMessage, SkinnyMailerBase }
import javax.mail.{ Session, Transport }
/**
* Provides SkinnyMessage builder.
*/
trait MessageBuilderFeature extends SkinnyMailerBase {
self: ConfigFeature with SmtpConfigFeature with ExtraConfigFeature with JavaMailSessionFeature =>
def from(from: String)(implicit s: Session = session): SkinnyMessageBuilder = SkinnyMessageBuilder(mail(from = from))
def to(to: String*)(implicit s: Session = session): SkinnyMessageBuilder = SkinnyMessageBuilder(mail(to = to))
def subject(subject: String)(implicit s: Session = session): SkinnyMessageBuilder = SkinnyMessageBuilder(mail(subject = subject))
def body(body: String)(implicit s: Session = session): SkinnyMessageBuilder = SkinnyMessageBuilder(mail(body = body))
def htmlBody(body: String, charset: String = config.charset)(implicit s: Session = session): SkinnyMessageBuilder = {
val msg = mail(body = body)
msg.charset = charset
msg.contentType = "text/html"
msg.setText(body, charset, "html")
SkinnyMessageBuilder(msg)
}
def bcc(bcc: String)(implicit s: Session = session): SkinnyMessageBuilder = SkinnyMessageBuilder({
val m = mail()
m.bcc = bcc
m
})
def cc(cc: String)(implicit s: Session = session): SkinnyMessageBuilder = SkinnyMessageBuilder({
val m = mail()
m.cc = cc
m
})
/**
* SkinnyMessage builder.
*
* @param message underlying message (mutable)
*/
case class SkinnyMessageBuilder(message: SkinnyMessage) {
def from(from: String): SkinnyMessageBuilder = {
message.from = from
this
}
def to(to: String*): SkinnyMessageBuilder = {
message.to = to
this
}
def subject(subject: String): SkinnyMessageBuilder = {
message.setSubject(subject, config.charset)
this
}
def body(body: String): SkinnyMessageBuilder = {
message.body = body
this
}
def htmlBody(body: String, charset: String = config.charset): SkinnyMessageBuilder = {
message.charset = charset
message.contentType = "text/html"
message.setText(body, charset, "html")
this
}
def bcc(bcc: String*): SkinnyMessageBuilder = {
message.bcc = bcc
this
}
def cc(cc: String*): SkinnyMessageBuilder = {
message.cc = cc
this
}
def attachment(filename: String, content: AnyRef, mimeType: String): SkinnyMessageBuilder = {
message.attachments ++= (filename, content, mimeType)
this
}
def validate() = {
// TODO NPE in sbt test (sbt mailer/test works)
if (message != null && message.from != null && message.to != null) {
if (message.from.isEmpty) throw new IllegalStateException("from address is absent")
else if (message.to.isEmpty) throw new IllegalStateException("to addresses are empty")
}
}
def deliver()(implicit t: Transport = message.connect()): Unit = {
validate()
message.deliver()
}
def deliver(t: Transport, keepConnection: Boolean): Unit = {
validate()
message.deliver(t, keepConnection)
}
}
}
| BlackPrincess/skinny-framework | mailer/src/main/scala/skinny/mailer/feature/MessageBuilderFeature.scala | Scala | mit | 3,158 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package system.basic
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import common.TestHelpers
import common.TestUtils
import common.WskOperations
import common.WskProps
import common.WskTestHelpers
import common.RuleActivationResult
import spray.json._
import spray.json.DefaultJsonProtocol._
import java.time.Instant
import scala.concurrent.duration.DurationInt
@RunWith(classOf[JUnitRunner])
abstract class WskRuleTests extends TestHelpers with WskTestHelpers {
implicit val wskprops = WskProps()
val wsk: WskOperations
val defaultAction = TestUtils.getTestActionFilename("wc.js")
val secondAction = TestUtils.getTestActionFilename("hello.js")
val testString = "this is a test"
val testResult = JsObject("count" -> testString.split(" ").length.toJson)
val retriesOnTestFailures = 5
val waitBeforeRetry = 1.second
/**
* Sets up trigger -> rule -> action triplets. Deduplicates triggers and rules
* and links it all up.
*
* @param rules Tuple3s containing
* (rule, trigger, (action name for created action, action name for the rule binding, actionFile))
* where the action name for the created action is allowed to differ from that used by the rule binding
* for cases that reference actions in a package binding.
*/
def ruleSetup(rules: Seq[(String, String, (String, String, String))], assetHelper: AssetCleaner): Unit = {
val triggers = rules.map(_._2).distinct
val actions = rules.map(_._3).distinct
triggers.foreach { trigger =>
assetHelper.withCleaner(wsk.trigger, trigger) { (trigger, name) =>
trigger.create(name)
}
}
actions.foreach {
case (actionName, _, file) =>
assetHelper.withCleaner(wsk.action, actionName) { (action, name) =>
action.create(name, Some(file))
}
}
rules.foreach {
case (ruleName, triggerName, action) =>
assetHelper.withCleaner(wsk.rule, ruleName) { (rule, name) =>
rule.create(name, triggerName, action._2)
}
}
}
val behaviorname = "Whisk rules"
behavior of s"$behaviorname"
it should "invoke the action attached on trigger fire, creating an activation for each entity including the cause" in withAssetCleaner(
wskprops) { (wp, assetHelper) =>
org.apache.openwhisk.utils
.retry(
{
assetHelper.deleteAssets()
val ruleName = withTimestamp("r1to1")
val triggerName = withTimestamp("t1to1")
val actionName = withTimestamp("a1 to 1") // spaces in name intended for greater test coverage
ruleSetup(Seq((ruleName, triggerName, (actionName, actionName, defaultAction))), assetHelper)
val run = wsk.trigger.fire(triggerName, Map("payload" -> testString.toJson))
withActivation(wsk.activation, run) {
triggerActivation =>
triggerActivation.cause shouldBe None
val ruleActivations = triggerActivation.logs.get.map(_.parseJson.convertTo[RuleActivationResult])
ruleActivations should have size 1
val ruleActivation = ruleActivations.head
ruleActivation.success shouldBe true
ruleActivation.statusCode shouldBe 0
withActivation(wsk.activation, ruleActivation.activationId) { actionActivation =>
actionActivation.response.result shouldBe Some(testResult)
actionActivation.cause shouldBe None
}
}
},
retriesOnTestFailures,
Some(waitBeforeRetry),
Some(
s"${this.getClass.getName} > $behaviorname should invoke the action attached on trigger fire, creating an activation for each entity including the cause not successful, retrying.."))
}
it should "invoke the action from a package attached on trigger fire, creating an activation for each entity including the cause" in withAssetCleaner(
wskprops) { (wp, assetHelper) =>
org.apache.openwhisk.utils
.retry(
{
assetHelper.deleteAssets()
val ruleName = withTimestamp("pr1to1")
val triggerName = withTimestamp("pt1to1")
val pkgName = withTimestamp("rule pkg") // spaces in name intended to test uri path encoding
val actionName = withTimestamp("a1 to 1")
val pkgActionName = s"$pkgName/$actionName"
assetHelper.withCleaner(wsk.pkg, pkgName) { (pkg, name) =>
pkg.create(name)
}
ruleSetup(Seq((ruleName, triggerName, (pkgActionName, pkgActionName, defaultAction))), assetHelper)
val now = Instant.now
val run = wsk.trigger.fire(triggerName, Map("payload" -> testString.toJson))
withActivation(wsk.activation, run) {
triggerActivation =>
triggerActivation.cause shouldBe None
val ruleActivations = triggerActivation.logs.get.map(_.parseJson.convertTo[RuleActivationResult])
ruleActivations should have size 1
val ruleActivation = ruleActivations.head
ruleActivation.success shouldBe true
ruleActivation.statusCode shouldBe 0
withActivation(wsk.activation, ruleActivation.activationId) { actionActivation =>
actionActivation.response.result shouldBe Some(testResult)
}
}
},
retriesOnTestFailures,
Some(waitBeforeRetry),
Some(
s"${this.getClass.getName} > $behaviorname should invoke the action from a package attached on trigger fire, creating an activation for each entity including the cause not successful, retrying.."))
}
it should "invoke the action from a package binding attached on trigger fire, creating an activation for each entity including the cause" in withAssetCleaner(
wskprops) { (wp, assetHelper) =>
org.apache.openwhisk.utils
.retry(
{
assetHelper.deleteAssets()
val ruleName = withTimestamp("pr1to1")
val triggerName = withTimestamp("pt1to1")
val pkgName = withTimestamp("rule pkg") // spaces in name intended to test uri path encoding
val pkgBindingName = withTimestamp("rule pkg binding")
val actionName = withTimestamp("a1 to 1")
val pkgActionName = s"$pkgName/$actionName"
assetHelper.withCleaner(wsk.pkg, pkgName) { (pkg, name) =>
pkg.create(name)
}
assetHelper.withCleaner(wsk.pkg, pkgBindingName) { (pkg, name) =>
pkg.bind(pkgName, pkgBindingName)
}
ruleSetup(
Seq((ruleName, triggerName, (pkgActionName, s"$pkgBindingName/$actionName", defaultAction))),
assetHelper)
val run = wsk.trigger.fire(triggerName, Map("payload" -> testString.toJson))
withActivation(wsk.activation, run) {
triggerActivation =>
triggerActivation.cause shouldBe None
val ruleActivations = triggerActivation.logs.get.map(_.parseJson.convertTo[RuleActivationResult])
ruleActivations should have size 1
val ruleActivation = ruleActivations.head
ruleActivation.success shouldBe true
ruleActivation.statusCode shouldBe 0
withActivation(wsk.activation, ruleActivation.activationId) { actionActivation =>
actionActivation.response.result shouldBe Some(testResult)
actionActivation.cause shouldBe None
}
}
},
retriesOnTestFailures,
Some(waitBeforeRetry),
Some(
s"${this.getClass.getName} > $behaviorname should invoke the action from a package binding attached on trigger fire, creating an activation for each entity including the cause not successful, retrying.."))
}
it should "not activate an action if the rule is deleted when the trigger is fired" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
org.apache.openwhisk.utils
.retry(
{
assetHelper.deleteAssets()
val ruleName = withTimestamp("ruleDelete")
val triggerName = withTimestamp("ruleDeleteTrigger")
val actionName = withTimestamp("ruleDeleteAction")
assetHelper.withCleaner(wsk.trigger, triggerName) { (trigger, name) =>
trigger.create(name)
}
assetHelper.withCleaner(wsk.action, actionName) { (action, name) =>
action.create(name, Some(defaultAction))
}
assetHelper.withCleaner(wsk.rule, ruleName, confirmDelete = false) { (rule, name) =>
rule.create(name, triggerName, actionName)
}
val first = wsk.trigger.fire(triggerName, Map("payload" -> "bogus".toJson))
wsk.rule.delete(ruleName)
val second = wsk.trigger.fire(triggerName, Map("payload" -> "bogus2".toJson))
withActivation(wsk.activation, first)(activation => activation.logs.get should have size 1)
// there won't be an activation for the second fire since there is no rule
},
retriesOnTestFailures,
Some(waitBeforeRetry),
Some(
s"${this.getClass.getName} > $behaviorname should not activate an action if the rule is deleted when the trigger is fired not successful, retrying.."))
}
it should "enable and disable a rule and check action is activated only when rule is enabled" in withAssetCleaner(
wskprops) { (wp, assetHelper) =>
org.apache.openwhisk.utils
.retry(
{
assetHelper.deleteAssets()
val ruleName = withTimestamp("ruleDisable")
val triggerName = withTimestamp("ruleDisableTrigger")
val actionName = withTimestamp("ruleDisableAction")
ruleSetup(Seq((ruleName, triggerName, (actionName, actionName, defaultAction))), assetHelper)
val first = wsk.trigger.fire(triggerName, Map("payload" -> testString.toJson))
wsk.rule.disable(ruleName)
val second = wsk.trigger.fire(triggerName, Map("payload" -> s"$testString with added words".toJson))
wsk.rule.enable(ruleName)
val third = wsk.trigger.fire(triggerName, Map("payload" -> testString.toJson))
withActivation(wsk.activation, first) { triggerActivation =>
val ruleActivations = triggerActivation.logs.get.map(_.parseJson.convertTo[RuleActivationResult])
ruleActivations should have size 1
val ruleActivation = ruleActivations.head
withActivation(wsk.activation, ruleActivation.activationId) { actionActivation =>
actionActivation.response.result shouldBe Some(testResult)
}
}
// second fire will not write an activation
withActivation(wsk.activation, third) { triggerActivation =>
val ruleActivations = triggerActivation.logs.get.map(_.parseJson.convertTo[RuleActivationResult])
ruleActivations should have size 1
val ruleActivation = ruleActivations.head
withActivation(wsk.activation, ruleActivation.activationId) { actionActivation =>
actionActivation.response.result shouldBe Some(testResult)
}
}
},
retriesOnTestFailures,
Some(waitBeforeRetry),
Some(
s"${this.getClass.getName} > $behaviorname should enable and disable a rule and check action is activated only when rule is enabled not successful, retrying.."))
}
it should "be able to recreate a rule with the same name and match it successfully" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
org.apache.openwhisk.utils
.retry(
{
assetHelper.deleteAssets()
val ruleName = withTimestamp("ruleRecreate")
val triggerName1 = withTimestamp("ruleRecreateTrigger1")
val triggerName2 = withTimestamp("ruleRecreateTrigger2")
val actionName = withTimestamp("ruleRecreateAction")
assetHelper.withCleaner(wsk.trigger, triggerName1) { (trigger, name) =>
trigger.create(name)
}
assetHelper.withCleaner(wsk.action, actionName) { (action, name) =>
action.create(name, Some(defaultAction))
}
assetHelper.withCleaner(wsk.rule, ruleName, confirmDelete = false) { (rule, name) =>
rule.create(name, triggerName1, actionName)
}
wsk.rule.delete(ruleName)
assetHelper.withCleaner(wsk.trigger, triggerName2) { (trigger, name) =>
trigger.create(name)
}
assetHelper.withCleaner(wsk.rule, ruleName) { (rule, name) =>
rule.create(name, triggerName2, actionName)
}
val first = wsk.trigger.fire(triggerName2, Map("payload" -> testString.toJson))
withActivation(wsk.activation, first) { triggerActivation =>
val ruleActivations = triggerActivation.logs.get.map(_.parseJson.convertTo[RuleActivationResult])
ruleActivations should have size 1
val ruleActivation = ruleActivations.head
withActivation(wsk.activation, ruleActivation.activationId) { actionActivation =>
actionActivation.response.result shouldBe Some(testResult)
}
}
},
retriesOnTestFailures,
Some(waitBeforeRetry),
Some(
s"${this.getClass.getName} > $behaviorname should be able to recreate a rule with the same name and match it successfully not successful, retrying.."))
}
it should "connect two triggers via rules to one action and activate it accordingly" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
org.apache.openwhisk.utils
.retry(
{
assetHelper.deleteAssets()
val triggerName1 = withTimestamp("t2to1a")
val triggerName2 = withTimestamp("t2to1b")
val actionName = withTimestamp("a2to1")
ruleSetup(
Seq(
("r2to1a", triggerName1, (actionName, actionName, defaultAction)),
("r2to1b", triggerName2, (actionName, actionName, defaultAction))),
assetHelper)
val testPayloads = Seq("got three words", "got four words, period")
val runs = testPayloads.map(payload => wsk.trigger.fire(triggerName1, Map("payload" -> payload.toJson)))
runs.zip(testPayloads).foreach {
case (run, payload) =>
withActivation(wsk.activation, run) { triggerActivation =>
val ruleActivations = triggerActivation.logs.get.map(_.parseJson.convertTo[RuleActivationResult])
ruleActivations should have size 1
val ruleActivation = ruleActivations.head
withActivation(wsk.activation, ruleActivation.activationId) { actionActivation =>
actionActivation.response.result shouldBe Some(
JsObject("count" -> payload.split(" ").length.toJson))
}
}
}
},
retriesOnTestFailures,
Some(waitBeforeRetry),
Some(
s"${this.getClass.getName} > $behaviorname should connect two triggers via rules to one action and activate it accordingly not successful, retrying.."))
}
it should "connect one trigger to two different actions, invoking them both eventually" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
org.apache.openwhisk.utils
.retry(
{
assetHelper.deleteAssets()
val triggerName = withTimestamp("t1to2")
val actionName1 = withTimestamp("a1to2a")
val actionName2 = withTimestamp("a1to2b")
ruleSetup(
Seq(
("r1to2a", triggerName, (actionName1, actionName1, defaultAction)),
("r1to2b", triggerName, (actionName2, actionName2, secondAction))),
assetHelper)
val run = wsk.trigger.fire(triggerName, Map("payload" -> testString.toJson))
withActivation(wsk.activation, run) {
triggerActivation =>
val ruleActivations = triggerActivation.logs.get.map(_.parseJson.convertTo[RuleActivationResult])
ruleActivations should have size 2
val action1Result = ruleActivations.find(_.action.contains(actionName1)).get
val action2Result = ruleActivations.find(_.action.contains(actionName2)).get
withActivation(wsk.activation, action1Result.activationId) { actionActivation =>
actionActivation.response.result shouldBe Some(testResult)
}
withActivation(wsk.activation, action2Result.activationId) { actionActivation =>
actionActivation.logs.get.mkString(" ") should include(s"hello, $testString")
}
}
},
retriesOnTestFailures,
Some(waitBeforeRetry),
Some(
s"${this.getClass.getName} > $behaviorname should connect one trigger to two different actions, invoking them both eventually not successful, retrying.."))
}
it should "connect two triggers to two different actions, invoking them both eventually" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
org.apache.openwhisk.utils
.retry(
{
assetHelper.deleteAssets()
val triggerName1 = withTimestamp("t1to1a")
val triggerName2 = withTimestamp("t1to1b")
val actionName1 = withTimestamp("a1to1a")
val actionName2 = withTimestamp("a1to1b")
ruleSetup(
Seq(
("r2to2a", triggerName1, (actionName1, actionName1, defaultAction)),
("r2to2b", triggerName1, (actionName2, actionName2, secondAction)),
("r2to2c", triggerName2, (actionName1, actionName1, defaultAction)),
("r2to2d", triggerName2, (actionName2, actionName2, secondAction))),
assetHelper)
val testPayloads = Seq("got three words", "got four words, period")
val runs = Seq(triggerName1, triggerName2).zip(testPayloads).map {
case (trigger, payload) =>
payload -> wsk.trigger.fire(trigger, Map("payload" -> payload.toJson))
}
runs.foreach {
case (payload, run) =>
withActivation(wsk.activation, run) {
triggerActivation =>
val ruleActivations = triggerActivation.logs.get.map(_.parseJson.convertTo[RuleActivationResult])
ruleActivations should have size 2 // each trigger has 2 actions attached
val action1Result = ruleActivations.find(_.action.contains(actionName1)).get
val action2Result = ruleActivations.find(_.action.contains(actionName2)).get
withActivation(wsk.activation, action1Result.activationId) { actionActivation =>
actionActivation.response.result shouldBe Some(
JsObject("count" -> payload.split(" ").length.toJson))
}
withActivation(wsk.activation, action2Result.activationId) { actionActivation =>
actionActivation.logs.get.mkString(" ") should include(s"hello, $payload")
}
}
}
},
retriesOnTestFailures,
Some(waitBeforeRetry),
Some(
s"${this.getClass.getName} > $behaviorname should connect two triggers to two different actions, invoking them both eventually not successful, retrying.."))
}
it should "disable a rule and check its status is displayed when listed" in withAssetCleaner(wskprops) {
(wp, assetHelper) =>
org.apache.openwhisk.utils
.retry(
{
assetHelper.deleteAssets()
val ruleName = withTimestamp("ruleDisable")
val ruleName2 = withTimestamp("ruleEnable")
val triggerName = withTimestamp("ruleDisableTrigger")
val actionName = withTimestamp("ruleDisableAction")
ruleSetup(
Seq(
(ruleName, triggerName, (actionName, actionName, defaultAction)),
(ruleName2, triggerName, (actionName, actionName, defaultAction))),
assetHelper)
wsk.rule.disable(ruleName)
val ruleListResult = wsk.rule.list()
verifyRuleList(ruleName2, ruleName)
},
retriesOnTestFailures,
Some(waitBeforeRetry),
Some(
s"${this.getClass.getName} > $behaviorname should disable a rule and check its status is displayed when listed not successful, retrying.."))
}
def verifyRuleList(ruleNameEnable: String, ruleName: String) = {
val ruleListResult = wsk.rule.list()
val ruleList = ruleListResult.stdout
val listOutput = ruleList.linesIterator
listOutput.find(_.contains(ruleNameEnable)).get should (include(ruleNameEnable) and include("active"))
listOutput.find(_.contains(ruleName)).get should (include(ruleName) and include("inactive"))
ruleList should not include "Unknown"
}
}
| RSulzmann/openwhisk | tests/src/test/scala/system/basic/WskRuleTests.scala | Scala | apache-2.0 | 22,192 |
package domain.strategy.other
import domain.Side._
import domain.models.{Ticker, Ordering}
import domain.strategy.{StrategyState, Strategy}
import repository.model.scalatrader.User
class OnceForTestStrategy(st: StrategyState, user: User) extends Strategy(st, user) {
var done = false
override def judgeByTicker(ticker: Ticker): Option[Ordering] = {
if (state.order.isEmpty) {
if (!done) {
done = true
entry(Buy)
} else {
None
}
} else {
close()
}
}
}
| rysh/scalatrader | scalatrader/app/domain/strategy/other/OnceForTestStrategy.scala | Scala | mit | 522 |
/*******************************************************************************
* Copyright (c) 2012-2013
* - Bruno C.d.S. Oliveira (oliveira@comp.nus.edu.sg)
* - Tijs van der Storm (storm@cwi.nl)
* - Alex Loh (alexloh@cs.utexas.edu)
* - William R. Cook (wcook@cs.utexas.edu)
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
********************************************************************************/
package oalg.algebra.paper
object Exp {
trait ExpAlg[Exp] {
def Lit(x : Int) : Exp
def Add(e1 : Exp, e2 : Exp) : Exp
}
trait IEval {
def eval() : Int
}
trait ExpEval extends ExpAlg[IEval] {
def Lit(x : Int) : IEval = new IEval {
def eval() : Int = x
}
def Add(e1 : IEval, e2 : IEval) : IEval = new IEval {
def eval() : Int = e1.eval() + e2.eval()
}
}
object ExpEval extends ExpEval
trait IPrint {
def print() : String
}
trait ExpPrint extends ExpAlg[IPrint] {
def Lit(x : Int) : IPrint = new IPrint {
def print() : String = x.toString()
}
def Add(e1 : IPrint, e2 : IPrint) : IPrint = new IPrint {
def print() : String = e1.print() + " + " + e2.print()
}
}
object ExpPrint extends ExpPrint
object OATesting {
def exp[Exp](f : ExpAlg[Exp]) : Exp =
f.Add(f.Lit(5), f.Add(f.Lit(6),f.Lit(6)))
val test1 = {
val o1 : IEval = exp(ExpEval)
val o2 : IPrint = exp(ExpPrint)
println("Eval: " + o1.eval() + "\\nPrint: " + o2.print())
}
}
// Combinators using pairs
trait ExpMergePair[A,B] extends ExpAlg[(A,B)] {
val alg1 : ExpAlg[A]
val alg2 : ExpAlg[B]
def Lit(x : Int) : (A,B) =
(alg1.Lit(x), alg2.Lit(x))
def Add(e1 : (A,B), e2 : (A,B)) : (A,B) =
(alg1.Add(e1._1,e2._1), alg2.Add(e1._2,e2._2))
}
object OACTesting {
def exp[Exp](f : ExpAlg[Exp]) : Exp =
f.Add(f.Lit(5), f.Add(f.Lit(6),f.Lit(6)))
object ExpPrintEval extends ExpMergePair[IPrint,IEval] {
val alg1 = ExpPrint
val alg2 = ExpEval
}
val test2 = {
val o : (IPrint,IEval) = exp(ExpPrintEval)
println("Eval: " + o._2.eval() + "\\nPrint: " + o._1.print())
}
}
} | tvdstorm/oalgcomp | src/oalg/algebra/paper/Basic.scala | Scala | epl-1.0 | 2,373 |
package bita
import akka.actor.{ ActorSystem, Actor, Props, ActorRef }
import akka.bita.{ RandomScheduleHelper, Scheduler }
import akka.bita.pattern.Patterns._
import akka.util.duration._
import akka.util.Timeout
import akka.dispatch.Await
import bita.util.FileHelper
import bita.criteria._
import bita.ScheduleOptimization._
import org.scalatest._
import java.util.concurrent.TimeUnit
import akka.testkit.TestProbe
import util._
import java.io.File
import scala.io.Source
abstract class BitaTests extends FunSuite with ImprovedTestHelper with BeforeAndAfterEach {
// The name of this test battery
def name = "unkown"
// Are we expecting certain shedules to fail?
def expectFailures = false
// The delay to wait Futures/Awaits/...
implicit val timeout = Timeout(5000, TimeUnit.MILLISECONDS)
// delay between start and end message
def delay = 1000
// Available criterions in Bita: PRCriterion, PCRCriterion, PMHRCriterion
def criteria = Array[Criterion](PRCriterion, PCRCriterion, PMHRCriterion)
// Folders where we need to store the test results
val resultDir = "test-results/%s/".format(this.name)
val randomTracesDir = resultDir + "random/"
val randomTracesTestDir = resultDir + "random-test/"
var verbose = 0
var randomTime = 0
var randomTraces = 1
// This test will keep on generating random schedules for 5 minutes or until an bug is found.
test("Test with random sheduler within a timeout", Tag("random-schedule")) {
random = true
if (randomTime > 0) {
testRandomByTime(name, randomTracesTestDir, randomTime)
}
random = false
}
// Generate and test schedules at once.
test("Generate and test schedules with criterion", Tag("test")) {
var randomTrace = FileHelper.getFiles(randomTracesDir, (name ⇒ name.contains("-trace.txt")))
for (criterion ← criteria) {
for (opt ← criterion.optimizations.-(NONE)) {
var scheduleDir = resultDir + "%s-%s/".format(criterion.name, opt)
FileHelper.emptyDir(scheduleDir)
runGenerateSchedulesAndTest(name, scheduleDir, randomTraces, criterion, opt)
}
}
measure();
summary();
validate();
}
// This will count how many bugs there were found with a certain schedule.
// Giving you an indication of how good a shedule is.
private def measure() = {
if (verbose >= 3) {
// The number of traces after which the coverage should be measured.
var interval = 5
for (criterion ← criteria) {
for (opt ← criterion.optimizations.-(NONE)) {
var scheduleDir = resultDir + "%s-%s/".format(criterion.name, opt)
if (new java.io.File(scheduleDir).exists) {
var randomTraces = FileHelper.getFiles(randomTracesDir, (name ⇒ name.contains("-trace.txt")))
FileHelper.copyFiles(randomTraces, scheduleDir)
var resultFile = scheduleDir + "%s-%s-result.txt".format(criterion.name, opt)
var traceFiles = FileHelper.getFiles(scheduleDir, (name ⇒ name.contains("-trace.txt")))
traceFiles = FileHelper.sortTracesByName(traceFiles, "-%s-")
criterion.measureCoverage(traceFiles, resultFile, interval)
}
}
}
}
}
// Give a summary of where the bugs
// This is tool dependendant information
private def summary() = {
if (verbose >= 2) {
for (path ← new File(resultDir).listFiles if path.isDirectory()) { // Iterate over all directories
val file: File = new File(path + "\\\\time-bug-report.txt")
val faulty = Source.fromFile(file).getLines().size
if (file.isFile()) { // Check if they contain a bug report file from Bita
if (faulty <= 4) { // Check if the shedule was faulty shedules (should be more then 4 lines then)
print(Console.GREEN)
} else {
print(Console.RED)
}
Source.fromFile(file).getLines().foreach { // Iterate over the content and print it
println
}
println(Console.RESET)
}
}
}
}
// This will validate if we have found a valid race condition.
private def validate() = {
var msg = ""
if (verbose >= 1) {
if (numShedules != 0) {
if (expectFailures) {
if ((numFaulty == 0)) { // Show the info
print(Console.RED + Console.BOLD)
msg = "**FAILURE** Generated %d shedules and %d of them failed (Failures expected).".format(numShedules, numFaulty)
} else {
print(Console.GREEN + Console.BOLD)
msg = "**SUCCESS** Generated %d shedules and %d of them failed.".format(numShedules, numFaulty)
}
} else {
if ((numFaulty == 0)) { // Show the info
print(Console.GREEN + Console.BOLD)
msg = "**SUCCESS** Generated %d shedules and %d of them failed.".format(numShedules, numFaulty)
} else {
print(Console.RED + Console.BOLD)
msg = "**FAILURE** Generated %d shedules and %d of them failed (No failures expected).".format(numShedules, numFaulty)
}
}
} else {
print(Console.RED + Console.BOLD)
msg = "**FAILURE** Something went wrong, generated %d shedules".format(numShedules, numFaulty)
}
println("*===========================================================================================*")
println("| |")
println("| " + msg.padTo(87, ' ') + " |")
println("| |")
println("*===========================================================================================*" + Console.RESET)
}
// Assert to make the test fail or succeed, for showing it in the testrunner
assert(numShedules != 0, "Generated %d shedules.".format(numShedules))
assert((numFaulty != 0) == expectFailures, msg)
}
override def beforeEach(td: TestData) {
val config: Map[String, Any] = td.configMap
verbose = config.getOrElse("verbose", "1").asInstanceOf[String].toInt // read out the config passed via scalatest options
randomTime = config.getOrElse("randomTime", "60").asInstanceOf[String].toInt
randomTraces = config.getOrElse("randomTraces", "1").asInstanceOf[String].toInt
}
} | Tjoene/thesis | Case_Programs/fyrie-redis-master/src/test/scala/bita/BitaTests.scala | Scala | gpl-2.0 | 6,406 |
/*
* Build.scala
* The Figaro project SBT build program.
*
* Created By: Michael Reposa (mreposa@cra.com)
* Creation Date: Feb 17, 2014
*
* Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email figaro@cra.com for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
import sbt._
import Keys._
import sbtassembly.Plugin._
import AssemblyKeys._
import sbt.Package.ManifestAttributes
import scoverage.ScoverageSbtPlugin._
import com.typesafe.sbteclipse.plugin.EclipsePlugin.EclipseKeys
object FigaroBuild extends Build {
override val settings = super.settings ++ Seq(
organization := "com.cra.figaro",
description := "Figaro: a language for probablistic programming",
version := "3.3.0.0",
scalaVersion := "2.11.6",
crossPaths := true,
publishMavenStyle := true,
pomExtra :=
<url>http://www.github.com/p2t2/figaro</url>
<developers>
<developer>
<name>Avrom J. Pfeffer</name>
<email>apfeffer@cra.com</email>
<organization>Charles River Analytics, Inc.</organization>
<organizationUrl>http://www.cra.com</organizationUrl>
</developer>
</developers>
<licenses>
<license>
<name>Figaro License</name>
<url>https://github.com/p2t2/figaro/blob/master/LICENSE</url>
</license>
</licenses>
<scm>
<connection>scm:git:git@github.com:p2t2/figaro.git</connection>
<developerConnection>scm:git:git@github.com:p2t2/figaro.git</developerConnection>
<url>git@github.com:p2t2/figaro.git</url>
</scm>
)
lazy val scalaMajorMinor = "2.11"
// Read exisiting Figaro MANIFEST.MF from file
lazy val figaroManifest = Using.fileInputStream(file("Figaro/META-INF/MANIFEST.MF")) {
in => new java.util.jar.Manifest(in)
}
// Read exisiting FigaroExamples MANIFEST.MF from file
lazy val examplesManifest = Using.fileInputStream(file("FigaroExamples/META-INF/MANIFEST.MF")) {
in => new java.util.jar.Manifest(in)
}
lazy val root = Project("root", file("."))
.settings(publishLocal := {})
.settings(publish := {})
.dependsOn(figaro, examples)
.aggregate(figaro, examples)
lazy val figaro = Project("Figaro", file("Figaro"))
.settings (scalacOptions ++= Seq(
"-feature",
"-language:existentials",
"-deprecation",
"-language:postfixOps"
))
.settings(packageOptions := Seq(Package.JarManifest(figaroManifest)))
.settings(libraryDependencies ++= Seq(
"org.scala-lang" % "scala-reflect" % scalaVersion.value,
"asm" % "asm" % "3.3.1",
"org.apache.commons" % "commons-math3" % "3.3",
"net.sf.jsci" % "jsci" % "1.2",
"com.typesafe.akka" %% "akka-actor" % "2.3.8",
"org.scalanlp" %% "breeze" % "0.10",
"io.argonaut" %% "argonaut" % "6.0.4",
"org.prefuse" % "prefuse" % "beta-20071021",
"org.scala-lang.modules" %% "scala-swing" % "1.0.1",
"com.storm-enroute" %% "scalameter" % "0.6" % "provided",
"org.scalatest" %% "scalatest" % "2.2.4" % "provided, test"
))
// Copy all managed dependencies to \lib_managed directory
.settings(retrieveManaged := true)
// Enable forking
.settings(fork := true)
// Increase max memory for JVM for both testing and runtime
.settings(javaOptions in (Test,run) += "-Xmx8G")
// test settings
.settings(parallelExecution in Test := false)
.settings(testOptions in Test += Tests.Argument("-oD"))
.configs(detTest)
.settings(inConfig(detTest)(Defaults.testTasks): _*)
.settings(testOptions in detTest := Seq(Tests.Argument("-l", "com.cra.figaro.test.nonDeterministic")))
.configs(nonDetTest)
.settings(inConfig(nonDetTest)(Defaults.testTasks): _*)
.settings(testOptions in nonDetTest := Seq(Tests.Argument("-n", "com.cra.figaro.test.nonDeterministic")))
// sbt-assembly settings
.settings(assemblySettings: _*)
.settings(test in assembly := {})
.settings(jarName in assembly := "figaro_" + scalaMajorMinor + "-" + version.value + "-fat.jar")
.settings(assemblyOption in assembly ~= { _.copy(includeScala = false) })
.settings(excludedJars in assembly := {
val cp = (fullClasspath in assembly).value
cp filter {_.data.getName == "arpack_combined_all-0.1-javadoc.jar"}
})
// ScalaMeter settings
.settings(testFrameworks += new TestFramework("org.scalameter.ScalaMeterFramework"))
.settings(logBuffered := false)
// SBTEclipse settings
.settings(EclipseKeys.eclipseOutput := Some("target/scala-2.11/classes"))
lazy val examples = Project("FigaroExamples", file("FigaroExamples"))
.dependsOn(figaro)
.settings(packageOptions := Seq(Package.JarManifest(examplesManifest)))
// SBTEclipse settings
.settings(EclipseKeys.eclipseOutput := Some("target/scala-2.11/classes"))
lazy val detTest = config("det") extend(Test)
lazy val nonDetTest = config("nonDet") extend(Test)
}
| jyuhuan/figaro | project/Build.scala | Scala | bsd-3-clause | 4,926 |
package withOrb
import leon._
import lang._
import annotation._
import instrumentation._
import invariant._
import leon.collection._
import mem._
import higherorder._
import stats._
/**
* Computing the kthe min using a version of merge sort that operates bottom-up.
* This allows accessing the first element in the sorted list in O(n) time,
* and kth element in O(kn) time.
* Needs unrollfactor = 3
*/
object BottomUpMergeSort {
private sealed abstract class LList {
def size: BigInt = {
this match {
case SCons(_, t) => 1 + t.size
case _ => BigInt(0)
}
} ensuring (_ >= 0)
}
private case class SCons(x: BigInt, tailFun: Stream) extends LList
private case class SNil() extends LList
private case class Stream(lfun: () => LList) {
@inline
def size = (list*).size
lazy val list: LList = lfun()
}
private def valid(sl: List[Stream]): Boolean = {
sl match {
case Cons(s, tail) => s.size > 0 && valid(tail)
case Nil() => true
}
}
private def fullSize(sl: List[Stream]): BigInt = {
sl match {
case Nil() => BigInt(0)
case Cons(l, t) => l.size + fullSize(t)
}
} ensuring (_ >= 0)
/**
* A function that given a list of (lazy) sorted lists,
* groups them into pairs and lazily invokes the 'merge' function on each pair.
* Takes time linear in the size of the input list.
*/
@invisibleBody
private def pairs(l: List[Stream]): List[Stream] = {
require(valid(l))
l match {
case Nil() => Nil[Stream]()
case Cons(_, Nil()) => l
case Cons(l1, Cons(l2, rest)) =>
Cons(Stream(() => forceAndMerge(l1, l2)), pairs(rest))
}
} ensuring (res => res.size <= (l.size + 1) / 2 &&
fullSize(l) == fullSize(res) &&
valid(res) &&
steps <= ? * l.size + ? // 2 * steps <= 15 * l.size + 6
)
/**
* Create a linearized tree of merges e.g. merge(merge(2, 1), merge(17, 19)).
* Takes time linear in the size of the input list.
*/
@invisibleBody
private def constructMergeTree(l: List[Stream]): List[Stream] = {
require(valid(l))
l match {
case Nil() => Nil[Stream]()
case Cons(_, Nil()) => l
case _ =>
constructMergeTree(pairs(l))
}
} ensuring {res =>
res.size <= 1 && fullSize(res) == fullSize(l) &&
(res match {
case Cons(il, Nil()) =>
fullSize(res) == il.size // this is implied by the previous conditions
case _ => true
}) &&
valid(res) &&
steps <= ? * l.size + ? // 32 * l.size + 3
}
// @invisibleBody
private def merge(a: Stream, b: Stream): LList = {
require((cached(a.list) && cached(b.list)))
b.list match {
case SNil() => a.list
case SCons(x, xs) =>
a.list match {
case SNil() => b.list
case SCons(y, ys) =>
if (y < x)
SCons(y, Stream(() => forceAndMerge(ys, b)))
else
SCons(x, Stream(() => forceAndMerge(a, xs)))
}
}
} ensuring(_ => steps <= ?) // steps <= 21
/**
* A function that merges two sorted streams of integers.
* Note: the sorted stream of integers may by recursively constructed using merge.
* Takes time linear in the size of the streams (non-trivial to prove due to cascading of lazy calls)
*/
@invisibleBody
@usePost
private def forceAndMerge(a: Stream, b: Stream): LList = {
require {
val alist = (a.list*)
val blist = (b.list*)
(alist != SNil() || cached(b.list)) && // if one of the arguments is Nil then the other is evaluated
(blist != SNil() || cached(a.list)) &&
(alist != SNil() || blist != SNil()) // at least one of the arguments is not Nil
}
(a.list, b.list) match {
case _ => merge(a, b)
}
} ensuring {res =>
val rsize = res.size
a.size + b.size == rsize && rsize >= 1 &&
steps <= 123 * rsize - 86 // steps <= 111 * rsize - 86 // Orb cannot infer this due to issues with CVC4 set solving !
}
/**
* Converts a list of integers to a list of streams of integers
*/
@inline
private val nilStream: Stream = Stream(lift(SNil()))
@invisibleBody
private def ListToStreamList(l: List[BigInt]): List[Stream] = {
l match {
case Nil() => Nil[Stream]()
case Cons(x, xs) =>
Cons[Stream](Stream(lift(SCons(x, nilStream))), ListToStreamList(xs))
}
} ensuring { res =>
fullSize(res) == l.size &&
res.size == l.size &&
valid(res) &&
steps <= ? * l.size + ? // steps <= 13 * l.size + 3
}
/**
* Takes list of integers and returns a sorted stream of integers.
* Takes time linear in the size of the input since it sorts lazily.
*/
@invisibleBody
private def mergeSort(l: List[BigInt]): Stream = {
l match {
case Nil() => Stream(lift(SNil()))
case _ =>
constructMergeTree(ListToStreamList(l)) match {
case Cons(r, Nil()) => r
}
}
} ensuring (res => l.size == res.size && steps <= ? * l.size + ?) // steps <= 45 * l.size + 15
private def kthMinStream(s: Stream, k: BigInt): BigInt = {
require(k >= 0)
s.list match {
case SCons(x, xs) =>
if (k == 0) x
else
kthMinStream(xs, k - 1)
case SNil() => BigInt(0)
}
} ensuring (_ => steps <= ? * (k * s.size) + ? * (s.size) + ?) // steps <= (123 * (k * s.list-mem-time(uiState)._1._1.size) + 123 * s.list-mem-time(uiState)._1._1.size) + 9
/**
* A function that accesses the kth element of a list using lazy sorting.
*/
def kthMin(l: List[BigInt], k: BigInt): BigInt = {
kthMinStream(mergeSort(l), k)
} ensuring(_ => steps <= ? * (k * l.size) + ? * (l.size) + ?)
@ignore
def main(args: Array[String]) {
//import eagerEval.MergeSort
import scala.util.Random
import scala.math.BigInt
import stats._
import collection._
println("Running merge sort test...")
val length = 3000000
val maxIndexValue = 100
val randomList = Random.shuffle((0 until length).toList)
val l1 = randomList.foldRight(List[BigInt]()){
case (i, acc) => BigInt(i) :: acc
}
val l2 = randomList.foldRight(Nil[BigInt](): List[BigInt]){
case (i, acc) => Cons(BigInt(i), acc)
}
println(s"Created inputs of size (${l1.size},${l2.size}), starting operations...")
val sort2 = timed{ mergeSort(l2) }{t => println(s"Lazy merge sort completed in ${t/1000.0} sec") }
//val sort1 = timed{ MergeSort.msort((x: BigInt, y: BigInt) => x <= y)(l1) } {t => println(s"Eager merge sort completed in ${t/1000.0} sec") }
// sample 10 elements from a space of [0-100]
val rand = Random
var totalTime1 = 0L
var totalTime2 = 0L
for(i <- 0 until 10) {
val idx = rand.nextInt(maxIndexValue)
//val e1 = timed { sort1(idx) } { totalTime1 +=_ }
//val e2 = timed { kthMin(sort2, idx) }{ totalTime2 += _ }
//println(s"Element at index $idx - Eager: $e1 Lazy: $e2")
//assert(e1 == e2)
}
println(s"Time-taken to pick first 10 minimum elements - Eager: ${totalTime1/1000.0}s Lazy: ${totalTime2/1000.0}s")
}
}
| regb/leon | testcases/lazy-datastructures/withOrb/BottomUpMegeSort.scala | Scala | gpl-3.0 | 7,153 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
* Contributors:
* Hao Jiang - initial API and implementation
*/
package edu.uchicago.cs.encsel.ptnmining
import edu.uchicago.cs.encsel.dataset.persist.Persistence
import edu.uchicago.cs.encsel.model.DataType
import edu.uchicago.cs.encsel.ptnmining.parser.Tokenizer
import scala.collection.mutable
import scala.io.Source
/**
* Mine Pattern from string type data
*/
object MinePattern extends App {
val histogram = new mutable.HashMap[Int, Int]
Persistence.get.lookup(DataType.STRING).foreach(column => {
val rows = Source.fromFile(column.colFile).getLines().slice(0, 2000).toSeq
val train = rows.slice(0, 1000)
val test = rows.slice(1000, 2000)
val pattern = Pattern.generate(train.map(Tokenizer.tokenize(_).toSeq))
var failed = 0
test.map(Tokenizer.tokenize).foreach(tokens => {
pattern.matchon(tokens.toSeq) match {
case None => {
failed += 1
}
case Some(record) => {
}
}
})
val failure = failed.toDouble / 1000
val index = (failure / 0.1).toInt
histogram.put(index, histogram.getOrElseUpdate(index, 0) + 1)
})
println(histogram)
}
| harperjiang/enc-selector | src/main/scala/edu/uchicago/cs/encsel/ptnmining/MinePattern.scala | Scala | apache-2.0 | 1,960 |
package com.github.romangrebennikov.columnize.tools
import org.slf4j.LoggerFactory
/**
* Created by shutty on 10/13/15.
*/
trait Logging {
lazy val log = LoggerFactory.getLogger(getClass)
}
| shuttie/columnize | src/main/scala/com/github/romangrebennikov/columnize/tools/Logging.scala | Scala | bsd-2-clause | 196 |
/*
* Copyright (c) 2018 OVO Energy
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.ovoenergy.comms.model
import org.scalacheck.{Arbitrary, Gen}
import shapeless.{:+:, CNil, Coproduct, Lazy, Nat}
import shapeless._
import shapeless.ops.nat.ToInt
object ArbitraryCoproduct {
implicit def cNilArb: Arbitrary[CNil] = Arbitrary(Gen.const[CNil](null))
implicit def cConsArb[H, T <: Coproduct, L <: Nat](
implicit hArb: Lazy[Arbitrary[H]],
tArb: Arbitrary[T],
tLength: shapeless.ops.coproduct.Length.Aux[T, L],
tLengthAsInt: ToInt[L]
): Arbitrary[H :+: T] = {
// Generate an element of coproduct with even distribution
val length = tLengthAsInt() + 1
Arbitrary {
Gen.frequency(
1 -> hArb.value.arbitrary.map(t => Inl(t)),
(length - 1) -> tArb.arbitrary.map(t => Inr(t))
)
}
}
}
| ovotech/comms-kafka-messages | modules/core/src/test/scala/com/ovoenergy/comms/model/ArbitraryCoproduct.scala | Scala | mit | 1,892 |
package com.asksunny.demotest
import org.specs2._
import org.openqa.selenium._
import org.openqa.selenium.chrome._
//http://chromedriver.storage.googleapis.com/index.html
class Spec2DemoTest extends mutable.Specification
{
"The chrome Driver test" should {
"launch chrome broswer" in {
System.setProperty("webdriver.chrome.driver", "c:/java/webdriver/chromedriver.exe")
val driver = new ChromeDriver()
driver.get("http://www.google.com/xhtml")
Thread.sleep(5000)
val searchBox = driver.findElement(By.name("q"))
searchBox.sendKeys("ChromeDriver")
searchBox.submit()
Thread.sleep(5000); // Let the user actually see something!
val title = driver.getTitle()
println("****************************************************Page title is: " + title)
driver.quit()
title must startWith("ChromeDriver")
}
}
}
| devsunny/common-tools | src/test/scala/com/asksunny/demotest/Spec2TestDemoSpec.scala | Scala | mit | 914 |
package memnets.fx
import com.sun.javafx.geom.Vec3d
import scalafx.scene._
import scalafx.scene.paint._
import scalafx.scene.shape._
package object fx3d {
type JVec3d = com.sun.javafx.geom.Vec3d
type JGroup = javafx.scene.Group
type JNode = javafx.scene.Node
implicit def t3dfx2Option[T <: Tickable3DFX](fx: T): Option[Tickable3DFX] = Some(fx)
implicit class PhongExt(val src: PhongMaterial) extends AnyVal {
def copyTo(tgt: PhongMaterial): Unit = {
tgt.diffuseColor = src.diffuseColor.value
tgt.diffuseMap = src.diffuseMap.value
tgt.specularColor = src.specularColor.value
tgt.specularPower = src.specularPower.value
tgt.specularMap = src.specularMap.value
tgt.bumpMap = src.bumpMap.value
tgt.selfIlluminationMap = src.selfIlluminationMap.value
}
}
// NOTE : need this on Node, not Shape3D
implicit class NodeExt[T <: Node](val src: T) extends AnyVal {
def intersects3D(other: Node): Boolean = {
src.boundsInParent.value.intersects(other.boundsInParent.value)
}
def contains3D(other: Node): Boolean = {
src.boundsInParent.value.contains(other.boundsInParent.value)
}
def scaleAll(scale: Double): Unit = {
val del = src.delegate
del.setScaleX(scale)
del.setScaleY(scale)
del.setScaleZ(scale)
}
}
implicit class Vec3dExt(val vec: Vec3d) extends AnyVal {
def clear(): Unit = {
vec.x = 0.0
vec.y = 0.0
vec.z = 0.0
}
}
// case class Pt3D(var x : Double, var y : Double, var z : Double) {
// def clear { x = 0.0; y = 0.0; z = 0.0 }
// }
def tiledPlane(f: => Box): Group = {
val tileGroup = new Group()
tileGroup.autoSizeChildren = false
val n = 3
val off = Math.floor(n / 2.0)
for {
r <- 0 until n
c <- 0 until n
} {
val tile = f
val xdim = (-off + r) * (tile.width.value)
val zdim = (-off + c) * (tile.width.value)
tile.translateX = xdim
tile.translateZ = zdim
tileGroup.children.add(tile)
}
tileGroup
}
}
| MemoryNetworks/memnets | fx/src/main/scala/memnets/fx/fx3d/package.scala | Scala | apache-2.0 | 2,054 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testsuite.typedarray
import org.junit.Assert._
import org.junit.Test
import org.scalajs.testsuite.utils.Requires
import scala.scalajs.js.typedarray._
object ArrayBufferTest extends Requires.TypedArray
class ArrayBufferTest {
@Test def lengthConstructor(): Unit = {
val x = new ArrayBuffer(100)
assertTrue(x.isInstanceOf[ArrayBuffer])
assertEquals(100, x.byteLength)
}
@Test def sliceWithOneArg(): Unit = {
val x = new ArrayBuffer(100)
val y = x.slice(10)
assertEquals(90, y.byteLength)
}
@Test def sliceWithTwoArgs(): Unit = {
val x = new ArrayBuffer(100)
val y = x.slice(10, 20)
assertEquals(10, y.byteLength)
}
}
| scala-js/scala-js | test-suite/js/src/test/scala/org/scalajs/testsuite/typedarray/ArrayBufferTest.scala | Scala | apache-2.0 | 966 |
/*
* Copyright (C) 2012 Pavel Fatin <http://pavelfatin.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.pavelfatin.fs
package internal
package toyfs
private class MockChunk(val id: Int, content: String) extends Data with Chunk {
val data: ByteData with DataStats = new ByteData(content, extendable = true, lazyLength = true) with DataStats
private var _deleted = false
def length: Long = throw new UnsupportedOperationException()
def read(position: Long, length: Int, buffer: Array[Byte], offset: Int) {
data.read(position, length, buffer, offset)
}
def write(position: Long, length: Int, buffer: Array[Byte], offset: Int) {
data.write(position, length, buffer, offset)
}
def projection(position: Long, length: Long) = data.projection(position, length)
def truncate(threshold: Long) {
data.truncate(threshold)
}
def deleted: Boolean = _deleted
def delete() {
_deleted = true
}
def presentation: String = data.presentation
override def toString = s"${getClass.getSimpleName}($id: $presentation)"
}
| pavelfatin/toyfs | src/test/scala/com/pavelfatin/fs/internal/toyfs/MockChunk.scala | Scala | gpl-3.0 | 1,677 |
/*
* Copyright 2016 Coursera Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.coursera.naptime
import akka.util.ByteString
import org.coursera.common.jsonformat.JsonFormats
import org.coursera.common.jsonformat.JsonFormats.Implicits.optionalReads
import play.api.http.HttpEntity
import play.api.http.MimeTypes
import play.api.libs.json.JsValue
import play.api.libs.json.Json
import play.api.libs.json.OFormat
import play.api.libs.json.__
import play.api.mvc.ResponseHeader
import play.api.mvc.Result
/**
* Throw to break out of a Naptime action body on error.
*/
case class NaptimeActionException(
httpCode: Int,
errorCode: Option[String],
message: Option[String],
details: Option[JsValue] = None,
cause: Option[Throwable] = None)
extends RuntimeException(s"Naptime error $httpCode [$errorCode]: $message", cause.orNull) {
def result: Result = {
val bodyJson = Json.toJson(NaptimeActionException.Body(errorCode, message, details))
val bodyString = Json.stringify(bodyJson)
Result(
ResponseHeader(httpCode),
HttpEntity.Strict(ByteString.fromString(bodyString), Some(MimeTypes.JSON)))
}
/**
* Add exception details which can be used by downstream services for debugging to
* NaptimeActionException.
*
*
*
* @return A new NaptimeActionException with exception details formatted as JSON.
*/
def withExceptionDetails[T](details: Option[T])(
implicit format: OFormat[T]): NaptimeActionException =
this.copy(
details = details.map(format.writes)
)
}
object NaptimeActionException {
case class Body(errorCode: Option[String], message: Option[String], details: Option[JsValue])
object Body {
/**
* Note: We use a custom format instead of the case class format macro so that empty options
* become null values in the JSON object instead of being omitted.
*/
implicit val format: OFormat[Body] = {
import play.api.libs.functional.syntax.{unapply => _, _}
val builder =
(__ \ "errorCode").format[Option[String]] and
(__ \ "message").format[Option[String]] and
(__ \ "details").format[Option[JsValue]]
builder(apply, unlift(unapply))
}
object Extract extends JsonFormats.Extract[Body]
}
}
/**
* Error responses.
* Note: These error responses are special exceptions that can be caught by the framework.
* They are not designed to work outside the framework.
*/
trait Errors {
import play.api.http.Status._
/**
* Error out with a BadRequest (400) response.
*
* Note: Only use this within a Rest Action, and not a general action.
*/
def BadRequest(
errorCode: String = null,
msg: String = null,
details: Option[JsValue] = None,
cause: Option[Throwable] = None): NaptimeActionException =
new NaptimeActionException(BAD_REQUEST, Option(errorCode), Option(msg), details, cause)
/**
* Error out with a BadRequest (400) response.
* Detail will be converted to a json object.
*
* Note: Only use this within a Rest Action, and not a general action.
*/
def BadRequestT[T](
errorCode: String = null,
msg: String = null,
details: Option[T] = None,
cause: Option[Throwable] = None)(implicit format: OFormat[T]): NaptimeActionException =
new NaptimeActionException(BAD_REQUEST, Option(errorCode), Option(msg), cause = cause)
.withExceptionDetails(details)
/**
* Error out with an Unauthorized (401) response.
*
* Note: Only use this within a Rest Action, and not a general action.
*/
def Unauthorized(
errorCode: String = null,
msg: String = null,
details: Option[JsValue] = None,
cause: Option[Throwable] = None): NaptimeActionException =
new NaptimeActionException(UNAUTHORIZED, Option(errorCode), Option(msg), details, cause)
/**
* Error out with an Unauthorized (401) response.
* Detail will be converted to a json object.
*
* Note: Only use this within a Rest Action, and not a general action.
*/
def UnauthorizedT[T](
errorCode: String = null,
msg: String = null,
details: Option[T] = None,
cause: Option[Throwable] = None)(implicit format: OFormat[T]): NaptimeActionException =
new NaptimeActionException(UNAUTHORIZED, Option(errorCode), Option(msg), cause = cause)
.withExceptionDetails(details)
/**
* Error out with an Forbidden (403) response.
*
* Note: Only use this within a Rest Action, and not a general action.
*/
def Forbidden(
errorCode: String = null,
msg: String = null,
details: Option[JsValue] = None,
cause: Option[Throwable] = None): NaptimeActionException =
new NaptimeActionException(FORBIDDEN, Option(errorCode), Option(msg), details, cause)
/**
* Error out with an Not Found (404) response.
*
* Note: Only use this within a Rest Action, and not a general action.
*/
def NotFound(
errorCode: String = null,
msg: String = null,
details: Option[JsValue] = None,
cause: Option[Throwable] = None): NaptimeActionException =
new NaptimeActionException(NOT_FOUND, Option(errorCode), Option(msg), details, cause)
/**
* Error out with an Not Found (404) response.
* Detail will be converted to a json object.
*
* Note: Only use this within a Rest Action, and not a general action.
*/
def NotFoundT[T](
errorCode: String = null,
msg: String = null,
details: Option[T] = None,
cause: Option[Throwable] = None)(implicit format: OFormat[T]): NaptimeActionException =
new NaptimeActionException(NOT_FOUND, Option(errorCode), Option(msg), cause = cause)
.withExceptionDetails(details)
import scala.reflect.runtime.universe.TypeTag
def NotFound[MissingType](id: Int)(
implicit typeTag: TypeTag[MissingType]): NaptimeActionException = {
NotFound("missing", s"No ${typeTag.tpe.toString} with id: $id")
}
/**
* Error out with a conflict (409) response.
*
* Note: Only use this within a Rest Action, and not a general action.
*/
def Conflict(
errorCode: String = null,
msg: String = null,
details: Option[JsValue] = None,
cause: Option[Throwable] = None): NaptimeActionException =
new NaptimeActionException(CONFLICT, Option(errorCode), Option(msg), details, cause)
/**
* Error out with an Gone (410) response.
*
* Note: Only use this within a Rest Action, and not a general action.
*/
def Gone(
errorCode: String = null,
msg: String = null,
details: Option[JsValue] = None,
cause: Option[Throwable] = None): NaptimeActionException =
new NaptimeActionException(GONE, Option(errorCode), Option(msg), details, cause)
/**
* Error out with a precondition failed (412) response.
*
* Note: Only use this within a Rest Action, and not a general action.
*/
def PreconditionFailed(
errorCode: String = null,
msg: String = null,
details: Option[JsValue] = None,
cause: Option[Throwable] = None): NaptimeActionException =
new NaptimeActionException(PRECONDITION_FAILED, Option(errorCode), Option(msg), details, cause)
/**
* Error out with an internal server error (500) response.
*
* Note: Only use this within a Rest Action, and not a general action.
*/
def InternalServerError(
errorCode: String = null,
msg: String = null,
details: Option[JsValue] = None,
cause: Option[Throwable] = None): NaptimeActionException =
new NaptimeActionException(
INTERNAL_SERVER_ERROR,
Option(errorCode),
Option(msg),
details,
cause)
/**
* Error out with a bad gateway error (502) response.
*
* Note: Only use this within a Rest Action, and not a general action.
*/
def BadGateway(
errorCode: String = null,
msg: String = null,
details: Option[JsValue] = None,
cause: Option[Throwable] = None): NaptimeActionException =
new NaptimeActionException(BAD_GATEWAY, Option(errorCode), Option(msg), details, cause)
/**
* Error out with a service unavailable (503) response.
*
* Note: Only use this within a Rest Action, and not a general action.
*/
def ServiceUnavailable(
errorCode: String = null,
msg: String = null,
details: Option[JsValue] = None,
cause: Option[Throwable] = None): NaptimeActionException =
new NaptimeActionException(SERVICE_UNAVAILABLE, Option(errorCode), Option(msg), details, cause)
/**
* Error out with a gateway timeout (504) response.
*
* Note: Only use this within a Rest Action, and not a general action.
*/
def GatewayTimeout(
errorCode: String = null,
msg: String = null,
details: Option[JsValue] = None,
cause: Option[Throwable] = None): NaptimeActionException =
new NaptimeActionException(GATEWAY_TIMEOUT, Option(errorCode), Option(msg), details, cause)
/**
* Generate your own HTTP 4XX or 5XX response, specifying your own HTTP code.
*/
def error(
httpCode: Int,
errorCode: String = null,
msg: String = null,
details: Option[JsValue] = None,
cause: Option[Throwable] = None): NaptimeActionException =
new NaptimeActionException(httpCode, Option(errorCode), Option(msg), details, cause)
}
object Errors extends Errors
| coursera/naptime | naptime/src/main/scala/org/coursera/naptime/errors.scala | Scala | apache-2.0 | 9,878 |
/*
* Copyright (c) 2016 SnappyData, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package org.apache.spark.sql.streaming
import java.io.InputStream
import scala.io.Codec
trait StreamConverter extends Serializable {
def convert(inputStream: InputStream): Iterator[Any]
def getTargetType: scala.Predef.Class[_]
}
class MyStreamConverter extends StreamConverter with Serializable {
override def convert(inputStream: java.io.InputStream): Iterator[Any] = {
scala.io.Source.fromInputStream(inputStream)(Codec.UTF8)
}
override def getTargetType: scala.Predef.Class[_] = classOf[String]
} | vjr/snappydata | core/src/main/scala/org/apache/spark/sql/streaming/StreamConverter.scala | Scala | apache-2.0 | 1,174 |
/*
* Vector2Spec.scala
*
* Copyright (c) 2013 Lonnie Pryor III
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fulcrum.math.mutable
import org.scalatest.FunSpec
import org.scalatest.matchers.ShouldMatchers
/**
* Test case for [[fulcrum.math.mutable.Vector2]].
*
* @author Lonnie Pryor III (lonnie@pryor.us.com)
*/
@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner])
class Vector2Spec extends FunSpec with ShouldMatchers {
describe("mutable.Vector2") {
it("should create direct copies") {
val v = Vector2(1, 2)
v.copy() should equal(Vector2(1, 2))
v.copy(x = 5) should equal(Vector2(5, 2))
v.copy(y = 6) should equal(Vector2(1, 6))
v.copy(5, 6) should equal(Vector2(5, 6))
}
it("should support updating individual elements and assigning all elements") {
val v = Vector2(1, 2)
v should equal(Vector2(1, 2))
v(0) = 0
v(1) = 3
v should equal(Vector2(0, 3))
v := Vector2(1, 2)
v should equal(Vector2(1, 2))
}
it("should support patern matching") {
val v = Vector2(1, 2)
val Vector2(x, y) = v
x should be(1)
y should be(2)
}
}
} | lpryor/fulcrum | math/src/test/scala/fulcrum/math/mutable/Vector2Spec.scala | Scala | apache-2.0 | 1,700 |
// Copyright 2011-2012 James Michael Callahan
// See LICENSE-2.0 file for licensing information.
package org.scalagfx.math
object Benchmark
{
class Stats(val duration: Long, val ops: Long)
{
val timeInSec = duration.toDouble / 1000.0
val timePerOp = duration.toDouble / ops.toDouble
val opsPerSec = (ops*1000L) / duration
def report() {
println("Total Time: %.2f sec".format(timeInSec))
println(" Total Ops: " + ops)
println(" Time / Op: %.8f ms".format(timePerOp))
println(" Ops / Sec: " + opsPerSec)
}
def summary(title: String) {
println("%s (MOps/Sec): %.2f".format(title, opsPerSec.toDouble / 1000000.0))
}
def + (that: Stats) = new Stats(duration+that.duration, ops+that.ops)
}
def timeTest(title: String, iters: Int, ops: Long)(test: => Double): Stats = {
println("----------------------------------------------")
print("Starting " + title + " Test: ")
val stamp = System.currentTimeMillis
var result = 0.0
for(_ <- 0 until iters)
result = result + test
val duration = System.currentTimeMillis - stamp
println(" Done (%.2f)".format(result))
println
val stats = new Stats(duration, iters.toLong * ops)
stats.report
stats
}
/** The top-level entry method. */
def main(args: Array[String]) {
try {
println("Benchmark of Math3 Vectors.")
println
print("Generating Test Data: ")
val size = 100000
val vecA = Array.fill(size)(Vec3d.random)
val vecB = Array.fill(size)(Vec3d.random)
val vecC = Array.fill(size)(Vec3d.random)
println(" Done.")
val iters = 10000
timeTest("Warmup", iters/2, size*3) {
var result = Vec3d(0.0)
for(i <- 0 until size)
result = (vecA(i) cross vecB(i)) * (vecB(i) dot vecC(i)) - (vecA(i) / vecB(i))
result.lengthSq
}
val add = timeTest("Add", iters, size*3) {
var result = Vec3d(0.0)
for(i <- 0 until size)
result = result + vecA(i) + vecB(i) + vecC(i)
result.lengthSq
}
val subtract = timeTest("Subtract", iters, size*3) {
var result = Vec3d(0.0)
for(i <- 0 until size)
result = result - vecA(i) - vecB(i) - vecC(i)
result.lengthSq
}
val multiply = timeTest("Multiply", iters, size*3) {
var result = Vec3d(1.0)
for(i <- 0 until size)
result = result * vecA(i) * vecB(i) * vecC(i)
result.lengthSq
}
val divide = timeTest("Divide", iters, size*3) {
var result = Vec3d(1.0)
for(i <- 0 until size)
result = result / vecA(i) / vecB(i) / vecC(i)
result.lengthSq
}
val dot = timeTest("Dot", iters, size*3) {
var result = 0.0
for(i <- 0 until size)
result = result + (vecA(i) dot vecB(i)) + (vecB(i) dot vecC(i)) + (vecC(i) dot vecA(i))
result
}
val cross = timeTest("Cross", iters, size*3) {
var result = Vec3d(1.0)
for(i <- 0 until size)
result = result cross vecA(i) cross vecB(i) cross vecC(i)
result.lengthSq
}
val mag = timeTest("Mag", iters, size*2) {
var result = 0.0
for(i <- 0 until size)
result = vecA(i).length + vecB(i).length + vecC(i).length
result
}
println("----------------------------------------------")
add.summary (" Add")
subtract.summary(" Subract")
multiply.summary("Multiply")
divide.summary (" Divide")
dot.summary (" Dot")
cross.summary (" Cross")
mag.summary (" Mag")
println
(add + subtract + multiply + divide + dot + cross + mag).summary(" TOTAL")
sys.exit
}
catch {
case ex =>
println("Uncaught Exception: " + ex.getMessage + "\\n" +
"Stack Trace:\\n" + ex.getStackTraceString)
}
}
} | JimCallahan/Graphics | src/org/scalagfx/math/Benchmark.scala | Scala | apache-2.0 | 4,184 |
/*
* Copyright 2013 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.storehaus.algebra.reporting
import com.twitter.util.{ Await, Future }
import org.scalacheck.{ Arbitrary, Properties }
import org.scalacheck.Gen.choose
import org.scalacheck.Prop._
import com.twitter.storehaus._
import com.twitter.storehaus.algebra._
object ReportingStoreProperties extends Properties("ReportingStore") {
def newStore[K, V] = new JMapStore[K, V]
class DummyReporter[K, V](val self: Store[K, V]) extends StoreProxy[K, V] with StoreReporter[Store[K, V], K, V] {
def traceMultiGet[K1 <: K](ks: Set[K1], request: Map[K1, Future[Option[V]]]) = request.mapValues(_.unit)
def traceGet(k: K, request: Future[Option[V]]) = request.unit
def tracePut(kv: (K, Option[V]), request: Future[Unit]) = request.unit
def traceMultiPut[K1 <: K](kvs: Map[K1, Option[V]], request: Map[K1, Future[Unit]]) = request.mapValues(_.unit)
}
property("Put Some/None count matches") = forAll { (inserts: Map[Int, Option[Int]]) =>
var putSomeCount = 0
var putNoneCount = 0
val baseStore = newStore[Int, Int]
val wrappedStore = new DummyReporter[Int, Int](baseStore) {
override def tracePut(kv: (Int, Option[Int]), request: Future[Unit]) = {
Future {
kv._2 match {
case Some(_) => putSomeCount += 1
case None => putNoneCount += 1
}
}.unit
}
}
inserts.foreach{ i =>
wrappedStore.put(i._1, i._2)
}
inserts.map(_._2).collect{case Some(b) => b}.size == putSomeCount &&
inserts.map(_._2).collect{case None => 1}.size == putNoneCount
}
property("MultiPut Some/None count matches") = forAll { (inserts: Map[Int, Option[Int]]) =>
var multiPutSomeCount = 0
var multiPutNoneCount = 0
val baseStore = newStore[Int, Int]
val wrappedStore = new DummyReporter[Int, Int](baseStore) {
override def traceMultiPut[K1 <: Int](kvs: Map[K1, Option[Int]], request: Map[K1, Future[Unit]]): Map[K1, Future[Unit]] = {
kvs.mapValues {v =>
Future {
v match {
case Some(_) => multiPutSomeCount += 1
case None => multiPutNoneCount += 1
}
}.unit
}
}
}
wrappedStore.multiPut(inserts)
inserts.map(_._2).collect{case Some(b) => b}.size == multiPutSomeCount &&
inserts.map(_._2).collect{case None => 1}.size == multiPutNoneCount
}
}
| tresata/storehaus | storehaus-algebra/src/test/scala/com/twitter/storehaus/algebra/reporting/ReportingStoreProperties.scala | Scala | apache-2.0 | 3,134 |
package org.bitcoins.script.constant
import org.bitcoins.script.ScriptOperationFactory
/**
* Created by chris on 1/9/16.
* Represents a the amount of bytes that need to be pushed onto the stack
*/
trait BytesToPushOntoStack extends ScriptOperation
object BytesToPushOntoStack extends ScriptOperationFactory[BytesToPushOntoStack] {
/**
* Represents that zero bytes need to be pushed onto the stack
* this really means we need to push an empty byte vector on the stack
*/
lazy val zero : BytesToPushOntoStack = apply(0)
private case class BytesToPushOntoStackImpl(num : Int) extends BytesToPushOntoStack {
/* //see the 'Constants; section in https://en.bitcoin.it/wiki/Script
require(num >= -1 && num <= 75, "A valid script number is between 1 and 75, the number passed in was: " + num)*/
require(num >= 0, "BytesToPushOntoStackImpl cannot be negative")
override def opCode = num
}
override def operations : Seq[BytesToPushOntoStack] =
(for { i <- 0 to 75 } yield BytesToPushOntoStackImpl(i)).toList
def fromNumber(num : Int) : BytesToPushOntoStack = {
if (num > 75) throw new IllegalArgumentException("We cannot have a BytesToPushOntoStack for greater than 75 bytes")
else {
val bytesToPushOntoStackOpt = operations.find(_.opCode == num)
bytesToPushOntoStackOpt match {
case Some(bytesToPushOntoStack) => bytesToPushOntoStack
case None => throw new IllegalArgumentException("We cannot have a BytesToPushOntoStack for greater than 75 bytes")
}
}
}
def apply(num : Int) : BytesToPushOntoStack = fromNumber(num)
}
| Christewart/scalacoin | src/main/scala/org/bitcoins/script/constant/BytesToPushOntoStack.scala | Scala | mit | 1,618 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.stream.sql
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.config.ExecutionConfigOptions
import org.apache.flink.table.functions.ScalarFunction
import org.apache.flink.table.planner.plan.`trait`.RelModifiedMonotonicity
import org.apache.flink.table.planner.plan.metadata.FlinkRelMetadataQuery
import org.apache.flink.table.planner.plan.utils.JavaUserDefinedAggFunctions.WeightedAvgWithMerge
import org.apache.flink.table.planner.utils.{StreamTableTestUtil, TableTestBase, TableTestUtil}
import org.apache.calcite.sql.validate.SqlMonotonicity.{CONSTANT, DECREASING, INCREASING, NOT_MONOTONIC}
import org.junit.Assert.assertEquals
import org.junit.Test
class ModifiedMonotonicityTest extends TableTestBase {
val util: StreamTableTestUtil = streamTestUtil()
util.addDataStream[(Int, Long, Long)]("A", 'a1, 'a2, 'a3)
util.addDataStream[(Int, Long, Long)]("B", 'b1, 'b2, 'b3)
util.addDataStream[(Int, String, Long)](
"MyTable", 'a, 'b, 'c, 'proctime.proctime, 'rowtime.rowtime)
util.addFunction("weightedAvg", new WeightedAvgWithMerge)
util.addTableSource[(Int, Long, String)]("AA", 'a1, 'a2, 'a3)
util.addTableSource[(Int, Long, Int, String, Long)]("BB", 'b1, 'b2, 'b3, 'b4, 'b5)
@Test
def testMaxWithRetractOptimize(): Unit = {
val query =
"SELECT a1, MAX(a3) FROM (SELECT a1, a2, MAX(a3) AS a3 FROM A GROUP BY a1, a2) t GROUP BY a1"
util.verifyPlan(query, ExplainDetail.CHANGELOG_MODE)
}
@Test
def testMinWithRetractOptimize(): Unit = {
val query =
"SELECT a1, MIN(a3) FROM (SELECT a1, a2, MIN(a3) AS a3 FROM A GROUP BY a1, a2) t GROUP BY a1"
util.verifyPlan(query, ExplainDetail.CHANGELOG_MODE)
}
@Test
def testMinCanNotOptimize(): Unit = {
val query =
"SELECT a1, MIN(a3) FROM (SELECT a1, a2, MAX(a3) AS a3 FROM A GROUP BY a1, a2) t GROUP BY a1"
util.verifyPlan(query, ExplainDetail.CHANGELOG_MODE)
}
@Test
def testMaxWithRetractOptimizeWithLocalGlobal(): Unit = {
util.tableEnv.getConfig.getConfiguration.setBoolean(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ENABLED, true)
util.tableEnv.getConfig.getConfiguration
.setString(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, "100 ms")
val query = "SELECT a1, max(a3) from (SELECT a1, a2, max(a3) as a3 FROM A GROUP BY a1, a2) " +
"group by a1"
util.verifyPlan(query, ExplainDetail.CHANGELOG_MODE)
}
@Test
def testMinWithRetractOptimizeWithLocalGlobal(): Unit = {
util.tableEnv.getConfig.getConfiguration.setBoolean(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ENABLED, true)
util.tableEnv.getConfig.getConfiguration
.setString(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, "100 ms")
val query = "SELECT min(a3) from (SELECT a1, a2, min(a3) as a3 FROM A GROUP BY a1, a2)"
util.verifyPlan(query, ExplainDetail.CHANGELOG_MODE)
}
@Test
def testMinCanNotOptimizeWithLocalGlobal(): Unit = {
util.tableEnv.getConfig.getConfiguration.setBoolean(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ENABLED, true)
util.tableEnv.getConfig.getConfiguration
.setString(ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, "100 ms")
val query =
"SELECT a1, MIN(a3) FROM (SELECT a1, a2, MAX(a3) AS a3 FROM A GROUP BY a1, a2) t GROUP BY a1"
util.verifyPlan(query, ExplainDetail.CHANGELOG_MODE)
}
@Test
def testTumbleFunAndRegularAggFunInGroupBy(): Unit = {
val sql = "SELECT b, d, weightedAvg(c, a) FROM " +
" (SELECT a, b, c, count(*) d," +
" TUMBLE_START(rowtime, INTERVAL '15' MINUTE) as ping_start " +
" FROM MyTable " +
" GROUP BY a, b, c, TUMBLE(rowtime, INTERVAL '15' MINUTE)) AS t1 " +
"GROUP BY b, d, ping_start"
verifyMonotonicity(sql, new RelModifiedMonotonicity(Array(CONSTANT, CONSTANT, NOT_MONOTONIC)))
}
@Test
def testAntiJoin(): Unit = {
val sql = "SELECT * FROM AA WHERE NOT EXISTS (SELECT b1 from BB WHERE a1 = b1)"
verifyMonotonicity(sql, null)
}
@Test
def testSemiJoinWithNonEqual(): Unit = {
val query1 = "SELECT MAX(a2) AS a2, a1 FROM AA group by a1"
val query2 = "SELECT SUM(b2) AS b2, b1 FROM BB group by b1"
val sql = s"SELECT * FROM ($query1) WHERE a1 in (SELECT b1 from ($query2) WHERE a2 < b2)"
verifyMonotonicity(sql, null)
}
@Test
def testSemiJoin(): Unit = {
val query1 = "SELECT MAX(a2) AS a2, a1 FROM AA group by a1"
val query2 = "SELECT SUM(b2) AS b2, b1 FROM BB group by b1"
val sql = s"SELECT a1, a2 FROM ($query1) WHERE a1 in (SELECT b1 from ($query2))"
verifyMonotonicity(sql, new RelModifiedMonotonicity(Array(CONSTANT, INCREASING)))
}
@Test
def testInnerJoinOnAggResult(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM AA group by a1"
val query2 = "SELECT SUM(b2) AS b2, b1 FROM BB group by b1"
val sql = s"SELECT a1, a2, b1, b2 FROM ($query1) JOIN ($query2) ON a2 = b2"
verifyMonotonicity(sql, null)
}
@Test
def testInnerJoin(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM AA group by a1"
val query2 = "SELECT SUM(b2) AS b2, b1 FROM BB group by b1"
val sql = s"SELECT a1, a2, b1, b2 FROM ($query1) JOIN ($query2) ON a1 = b1"
verifyMonotonicity(
sql,
new RelModifiedMonotonicity(Array(CONSTANT, NOT_MONOTONIC, CONSTANT, NOT_MONOTONIC)))
}
@Test
def testUnionAll(): Unit = {
val query1 = "SELECT SUM(a2) AS a2, a1 FROM AA group by a1"
val query2 = "SELECT SUM(b2) AS b2, b1 FROM BB group by b1"
val sql = s"SELECT a1, a2 FROM ($query1) union all ($query2)"
verifyMonotonicity(sql, new RelModifiedMonotonicity(Array(NOT_MONOTONIC, NOT_MONOTONIC)))
}
@Test
def testOver(): Unit = {
val sql = "SELECT a, " +
" SUM(c) OVER (" +
" PARTITION BY a ORDER BY proctime ROWS BETWEEN 4 PRECEDING AND CURRENT ROW), " +
" MIN(c) OVER (" +
" PARTITION BY a ORDER BY proctime ROWS BETWEEN 4 PRECEDING AND CURRENT ROW) " +
"FROM MyTable"
verifyMonotonicity(sql, new RelModifiedMonotonicity(Array(CONSTANT, CONSTANT, CONSTANT)))
}
@Test
def testMultiOperandsForCalc(): Unit = {
util.addFunction("func1", new Func1)
val sql = "SELECT func1(func1(a1, a3)) from " +
"(SELECT last_value(a1) as a1, last_value(a3) as a3 FROM AA group by a2) "
verifyMonotonicity(sql, new RelModifiedMonotonicity(Array(NOT_MONOTONIC)))
}
@Test
def testTopNDesc(): Unit = {
val sql =
"""
|SELECT *
|FROM (
| SELECT b, c, a,
| ROW_NUMBER() OVER (PARTITION BY a ORDER BY b DESC, c ASC) as rank_num
| FROM MyTable)
|WHERE rank_num <= 10
""".stripMargin
verifyMonotonicity(
sql,
new RelModifiedMonotonicity(Array(INCREASING, NOT_MONOTONIC, CONSTANT, CONSTANT)))
}
@Test
def testTopNAsc(): Unit = {
val sql =
"""
|SELECT *
|FROM (
| SELECT a, b, c,
| ROW_NUMBER() OVER (PARTITION BY a ORDER BY b ASC) as rank_num
| FROM MyTable)
|WHERE rank_num <= 10
""".stripMargin
verifyMonotonicity(
sql,
new RelModifiedMonotonicity(Array(CONSTANT, DECREASING, NOT_MONOTONIC, CONSTANT)))
}
@Test
def testTopNAfterGroupBy(): Unit = {
val subquery =
"""
|SELECT a, b, COUNT(*) as count_c
|FROM MyTable
|GROUP BY a, b
""".stripMargin
val sql =
s"""
|SELECT *
|FROM (
| SELECT a, b, count_c,
| ROW_NUMBER() OVER (PARTITION BY b ORDER BY count_c DESC) as rank_num
| FROM ($subquery))
|WHERE rank_num <= 10
""".stripMargin
verifyMonotonicity(
sql,
new RelModifiedMonotonicity(Array(NOT_MONOTONIC, CONSTANT, INCREASING, CONSTANT)))
}
@Test
def testTopNAfterGroupBy2(): Unit = {
val subquery =
"""
|SELECT a, b, COUNT(*) as count_c
|FROM MyTable
|GROUP BY a, b
""".stripMargin
val sql =
s"""
|SELECT *
|FROM (
| SELECT a, b, count_c,
| ROW_NUMBER() OVER (PARTITION BY count_c ORDER BY a DESC) as rank_num
| FROM ($subquery))
|WHERE rank_num <= 10
""".stripMargin
verifyMonotonicity(sql, null)
}
def verifyMonotonicity(sql: String, expect: RelModifiedMonotonicity): Unit = {
val table = util.tableEnv.sqlQuery(sql)
val relNode = TableTestUtil.toRelNode(table)
val optimized = util.getPlanner.optimize(relNode)
val actualMono = FlinkRelMetadataQuery.reuseOrCreate(optimized.getCluster.getMetadataQuery)
.getRelModifiedMonotonicity(optimized)
assertEquals(expect, actualMono)
}
}
@SerialVersionUID(1L)
class Func1 extends ScalarFunction {
def eval(str: String): String = {
s"$str"
}
def eval(index: Integer, str: String): String = {
s"$index and $str"
}
}
| GJL/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/ModifiedMonotonicityTest.scala | Scala | apache-2.0 | 9,785 |
package com.danielasfregola.twitter4s.http.clients.rest.v2.tweets.paramaters
import com.danielasfregola.twitter4s.entities.v2.enums.expansions.TweetExpansions.TweetExpansions
import com.danielasfregola.twitter4s.entities.v2.enums.fields.MediaFields.MediaFields
import com.danielasfregola.twitter4s.entities.v2.enums.fields.PlaceFields.PlaceFields
import com.danielasfregola.twitter4s.entities.v2.enums.fields.PollFields.PollFields
import com.danielasfregola.twitter4s.entities.v2.enums.fields.TweetFields.TweetFields
import com.danielasfregola.twitter4s.entities.v2.enums.fields.UserFields.UserFields
import com.danielasfregola.twitter4s.http.marshalling.Parameters
private[twitter4s] final case class TweetParameters(expansions: Seq[TweetExpansions] = Seq.empty[TweetExpansions],
`media.fields`: Seq[MediaFields] = Seq.empty[MediaFields],
`place.fields`: Seq[PlaceFields] = Seq.empty[PlaceFields],
`poll.fields`: Seq[PollFields] = Seq.empty[PollFields],
`tweet.fields`: Seq[TweetFields] = Seq.empty[TweetFields],
`user.fields`: Seq[UserFields] = Seq.empty[UserFields])
extends Parameters
| DanielaSfregola/twitter4s | src/main/scala/com/danielasfregola/twitter4s/http/clients/rest/v2/tweets/paramaters/TweetParameters.scala | Scala | apache-2.0 | 1,355 |
package ai
class OpticNerve {
val neurons = collection.mutable.LinkedHashMap.empty[String, Neuron]
val image = new TestImage
for(x <- 0 until image.width; y <- 0 until image.height){
val neuronReference = Array[String]("ON", x.toString, y.toString).mkString("_")
val postSynapticConnectionsBuffer = collection.mutable.ListBuffer.empty[PostSynapticConnection]
// Excites LGN neuron on same pixel for middle-right border
postSynapticConnectionsBuffer += PostSynapticConnection("E", 1F,
Array[String]("LGN", x.toString, y.toString, "MR").mkString("_"))
// Excites LGN neuron on same pixel for bottom-middle border
postSynapticConnectionsBuffer += PostSynapticConnection("E", 1F,
Array[String]("LGN", x.toString, y.toString, "BM").mkString("_"))
// Inhibits LGN neuron on pixel to the middle-left
if (!image.pixelOutOfBounds(x - 1, y)) {
postSynapticConnectionsBuffer += PostSynapticConnection("I", 1F,
Array[String]("LGN", (x-1).toString, y.toString, "MR").mkString("_"))
}
// Inhibits LGN neuron on pixel to the top-middle
if (!image.pixelOutOfBounds(x, y - 1)) {
postSynapticConnectionsBuffer += PostSynapticConnection("I", 1F,
Array[String]("LGN", x.toString, (y-1).toString, "BM").mkString("_"))
}
val postSynapticConnections = postSynapticConnectionsBuffer.toList
val neuron = new Neuron(postSynapticConnections)
neuron.firingRate = x*y
neurons += (neuronReference -> neuron)
}
}
class TestImage {
val width = 16
val height = 16
def pixelOutOfBounds(x: Int, y: Int): Boolean = {
(x < 0) || (y < 0) || (x > width - 1) || (y > height - 1)
}
} | magicalbob/aisee | play-app/app/ai/OpticNerve.scala | Scala | gpl-2.0 | 1,677 |
package com.sksamuel.scapegoat.inspections.collections
import com.sksamuel.scapegoat.PluginRunner
import org.scalatest.{ FreeSpec, Matchers, OneInstancePerTest }
/** @author Stephen Samuel */
class AvoidSizeEqualsZeroTest extends FreeSpec with Matchers with PluginRunner with OneInstancePerTest {
override val inspections = Seq(new AvoidSizeEqualsZero)
"collection.size == 0" - {
"should report warning" in {
val code = """object Test {
val isEmpty1 = List(1).size == 0
val isEmpty2 = List(1).length == 0
val isEmpty3 = Set(1).size == 0
val isEmpty5 = Seq(1).size == 0
val isEmpty6 = Seq(1).length == 0
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 5
}
// github issue #94
"should ignore durations" in {
val code = """object Test {
|case class Duration(start: Long, stop: Long) {
| def length: Long = stop - start
| def isEmpty: Boolean = length == 0
| }
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
}
}
| jasonchaffee/scalac-scapegoat-plugin | src/test/scala/com/sksamuel/scapegoat/inspections/collections/AvoidSizeEqualsZeroTest.scala | Scala | apache-2.0 | 1,302 |
/*
* Copyright 2016 Dennis Vriend
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package akka.stream.integration
import akka.camel.CamelMessage
import com.google.protobuf.Message
trait ProtobufCamelMessageBuilder {
implicit def protoMessageBuilder[In: ProtobufWriter](implicit headersBuilder: HeadersBuilder[In] = null) = new CamelMessageBuilder[In] {
override def build(in: In): CamelMessage = {
val protobufMessage: Message = implicitly[ProtobufWriter[In]].write(in)
val headers: Map[String, Any] = Option(headersBuilder).map(_.build(in)).getOrElse(Map.empty[String, Any])
CamelMessage(protobufMessage, headers)
}
}
} | dnvriend/reactive-activemq | src/main/scala/akka/stream/integration/ProtobufCamelMessageBuilder.scala | Scala | apache-2.0 | 1,168 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.admin
import java.util.concurrent.ExecutorService
import java.util.concurrent.Executors
import java.util.concurrent.TimeUnit
import java.util.Collections
import java.util.Properties
import org.easymock.EasyMock
import org.junit.Assert._
import org.junit.Before
import org.junit.Test
import kafka.admin.ConsumerGroupCommand.ConsumerGroupCommandOptions
import kafka.admin.ConsumerGroupCommand.KafkaConsumerGroupService
import kafka.admin.ConsumerGroupCommand.ZkConsumerGroupService
import kafka.consumer.OldConsumer
import kafka.consumer.Whitelist
import kafka.integration.KafkaServerTestHarness
import kafka.server.KafkaConfig
import kafka.utils.TestUtils
import org.apache.kafka.clients.consumer.KafkaConsumer
import org.apache.kafka.common.errors.GroupCoordinatorNotAvailableException
import org.apache.kafka.common.errors.TimeoutException
import org.apache.kafka.common.errors.WakeupException
import org.apache.kafka.common.serialization.StringDeserializer
class DescribeConsumerGroupTest extends KafkaServerTestHarness {
val overridingProps = new Properties()
val topic = "foo"
val topicFilter = Whitelist(topic)
val group = "test.group"
val props = new Properties
// configure the servers and clients
override def generateConfigs() = TestUtils.createBrokerConfigs(1, zkConnect, enableControlledShutdown = false).map(KafkaConfig.fromProps(_, overridingProps))
@Before
override def setUp() {
super.setUp()
AdminUtils.createTopic(zkUtils, topic, 1, 1)
props.setProperty("group.id", group)
}
@Test
def testDescribeNonExistingGroup() {
// mocks
props.setProperty("zookeeper.connect", zkConnect)
val consumerMock = EasyMock.createMockBuilder(classOf[OldConsumer]).withConstructor(topicFilter, props).createMock()
// stubs
val opts = new ConsumerGroupCommandOptions(Array("--zookeeper", zkConnect, "--describe", "--group", "missing.group"))
val consumerGroupCommand = new ZkConsumerGroupService(opts)
// simulation
EasyMock.replay(consumerMock)
// action/test
TestUtils.waitUntilTrue(() => consumerGroupCommand.describeGroup()._2.isEmpty, "Expected no rows in describe group results.")
// cleanup
consumerGroupCommand.close()
consumerMock.stop()
}
@Test
def testDescribeExistingGroup() {
// mocks
props.setProperty("zookeeper.connect", zkConnect)
val consumerMock = EasyMock.createMockBuilder(classOf[OldConsumer]).withConstructor(topicFilter, props).createMock()
// stubs
val opts = new ConsumerGroupCommandOptions(Array("--zookeeper", zkConnect, "--describe", "--group", group))
val consumerGroupCommand = new ZkConsumerGroupService(opts)
// simulation
EasyMock.replay(consumerMock)
// action/test
TestUtils.waitUntilTrue(() => {
val (_, assignments) = consumerGroupCommand.describeGroup()
assignments.isDefined &&
assignments.get.count(_.group == group) == 1 &&
assignments.get.filter(_.group == group).head.consumerId.exists(_.trim != ConsumerGroupCommand.MISSING_COLUMN_VALUE)
}, "Expected rows and a consumer id column in describe group results.")
// cleanup
consumerGroupCommand.close()
consumerMock.stop()
}
@Test
def testDescribeExistingGroupWithNoMembers() {
// mocks
props.setProperty("zookeeper.connect", zkConnect)
val consumerMock = EasyMock.createMockBuilder(classOf[OldConsumer]).withConstructor(topicFilter, props).createMock()
// stubs
val opts = new ConsumerGroupCommandOptions(Array("--zookeeper", zkConnect, "--describe", "--group", group))
val consumerGroupCommand = new ZkConsumerGroupService(opts)
// simulation
EasyMock.replay(consumerMock)
// action/test
TestUtils.waitUntilTrue(() => {
val (_, assignments) = consumerGroupCommand.describeGroup()
assignments.isDefined &&
assignments.get.count(_.group == group) == 1 &&
assignments.get.filter(_.group == group).head.consumerId.exists(_.trim != ConsumerGroupCommand.MISSING_COLUMN_VALUE)
}, "Expected rows and a consumer id column in describe group results.")
consumerMock.stop()
TestUtils.waitUntilTrue(() => {
val (_, assignments) = consumerGroupCommand.describeGroup()
assignments.isDefined &&
assignments.get.count(_.group == group) == 1 &&
assignments.get.filter(_.group == group).head.consumerId.exists(_.trim == ConsumerGroupCommand.MISSING_COLUMN_VALUE) // the member should be gone
}, "Expected no active member in describe group results.")
// cleanup
consumerGroupCommand.close()
}
@Test
def testDescribeConsumersWithNoAssignedPartitions() {
// mocks
props.setProperty("zookeeper.connect", zkConnect)
val consumer1Mock = EasyMock.createMockBuilder(classOf[OldConsumer]).withConstructor(topicFilter, props).createMock()
val consumer2Mock = EasyMock.createMockBuilder(classOf[OldConsumer]).withConstructor(topicFilter, props).createMock()
// stubs
val opts = new ConsumerGroupCommandOptions(Array("--zookeeper", zkConnect, "--describe", "--group", group))
val consumerGroupCommand = new ZkConsumerGroupService(opts)
EasyMock.replay(consumer1Mock)
EasyMock.replay(consumer2Mock)
// action/test
TestUtils.waitUntilTrue(() => {
val (_, assignments) = consumerGroupCommand.describeGroup()
assignments.isDefined &&
assignments.get.count(_.group == group) == 2 &&
assignments.get.count { x => x.group == group && x.partition.isDefined } == 1 &&
assignments.get.count { x => x.group == group && !x.partition.isDefined } == 1
}, "Expected rows for consumers with no assigned partitions in describe group results.")
// cleanup
consumerGroupCommand.close()
consumer1Mock.stop()
consumer2Mock.stop()
}
@Test
def testDescribeNonExistingGroupWithNewConsumer() {
// run one consumer in the group consuming from a single-partition topic
val executor = new ConsumerGroupExecutor(brokerList, 1, group, topic)
// note the group to be queried is a different (non-existing) group
val cgcArgs = Array("--bootstrap-server", brokerList, "--describe", "--group", "missing.group")
val opts = new ConsumerGroupCommandOptions(cgcArgs)
val consumerGroupCommand = new KafkaConsumerGroupService(opts)
val (state, assignments) = consumerGroupCommand.describeGroup()
assertTrue("Expected the state to be 'Dead' with no members in the group.", state == Some("Dead") && assignments == Some(List()))
consumerGroupCommand.close()
}
@Test
def testDescribeExistingGroupWithNewConsumer() {
// run one consumer in the group consuming from a single-partition topic
val executor = new ConsumerGroupExecutor(brokerList, 1, group, topic)
val cgcArgs = Array("--bootstrap-server", brokerList, "--describe", "--group", group)
val opts = new ConsumerGroupCommandOptions(cgcArgs)
val consumerGroupCommand = new KafkaConsumerGroupService(opts)
TestUtils.waitUntilTrue(() => {
val (state, assignments) = consumerGroupCommand.describeGroup()
state == Some("Stable") &&
assignments.isDefined &&
assignments.get.count(_.group == group) == 1 &&
assignments.get.filter(_.group == group).head.consumerId.exists(_.trim != ConsumerGroupCommand.MISSING_COLUMN_VALUE) &&
assignments.get.filter(_.group == group).head.clientId.exists(_.trim != ConsumerGroupCommand.MISSING_COLUMN_VALUE) &&
assignments.get.filter(_.group == group).head.host.exists(_.trim != ConsumerGroupCommand.MISSING_COLUMN_VALUE)
}, "Expected a 'Stable' group status, rows and valid values for consumer id / client id / host columns in describe group results.")
consumerGroupCommand.close()
}
@Test
def testDescribeExistingGroupWithNoMembersWithNewConsumer() {
// run one consumer in the group consuming from a single-partition topic
val executor = new ConsumerGroupExecutor(brokerList, 1, group, topic)
val cgcArgs = Array("--bootstrap-server", brokerList, "--describe", "--group", group)
val opts = new ConsumerGroupCommandOptions(cgcArgs)
val consumerGroupCommand = new KafkaConsumerGroupService(opts)
TestUtils.waitUntilTrue(() => {
val (state, _) = consumerGroupCommand.describeGroup()
state == Some("Stable")
}, "Expected the group to initially become stable.")
// stop the consumer so the group has no active member anymore
executor.shutdown()
TestUtils.waitUntilTrue(() => {
val (state, assignments) = consumerGroupCommand.describeGroup()
state == Some("Empty") &&
assignments.isDefined &&
assignments.get.count(_.group == group) == 1 &&
assignments.get.filter(_.group == group).head.consumerId.exists(_.trim == ConsumerGroupCommand.MISSING_COLUMN_VALUE) && // the member should be gone
assignments.get.filter(_.group == group).head.clientId.exists(_.trim == ConsumerGroupCommand.MISSING_COLUMN_VALUE) &&
assignments.get.filter(_.group == group).head.host.exists(_.trim == ConsumerGroupCommand.MISSING_COLUMN_VALUE)
}, "Expected no active member in describe group results.")
consumerGroupCommand.close()
}
@Test
def testDescribeConsumersWithNoAssignedPartitionsWithNewConsumer() {
// run two consumers in the group consuming from a single-partition topic
val executor = new ConsumerGroupExecutor(brokerList, 2, group, topic)
val cgcArgs = Array("--bootstrap-server", brokerList, "--describe", "--group", group)
val opts = new ConsumerGroupCommandOptions(cgcArgs)
val consumerGroupCommand = new KafkaConsumerGroupService(opts)
TestUtils.waitUntilTrue(() => {
val (state, assignments) = consumerGroupCommand.describeGroup()
state == Some("Stable") &&
assignments.isDefined &&
assignments.get.count(_.group == group) == 2 &&
assignments.get.count{ x => x.group == group && x.partition.isDefined} == 1 &&
assignments.get.count{ x => x.group == group && !x.partition.isDefined} == 1
}, "Expected rows for consumers with no assigned partitions in describe group results.")
consumerGroupCommand.close()
}
@Test
def testDescribeWithMultiPartitionTopicAndMultipleConsumersWithNewConsumer() {
val topic2 = "foo2"
AdminUtils.createTopic(zkUtils, topic2, 2, 1)
// run two consumers in the group consuming from a two-partition topic
val executor = new ConsumerGroupExecutor(brokerList, 2, group, topic2)
val cgcArgs = Array("--bootstrap-server", brokerList, "--describe", "--group", group)
val opts = new ConsumerGroupCommandOptions(cgcArgs)
val consumerGroupCommand = new KafkaConsumerGroupService(opts)
TestUtils.waitUntilTrue(() => {
val (state, assignments) = consumerGroupCommand.describeGroup()
state == Some("Stable") &&
assignments.isDefined &&
assignments.get.count(_.group == group) == 2 &&
assignments.get.count{ x => x.group == group && x.partition.isDefined} == 2 &&
assignments.get.count{ x => x.group == group && !x.partition.isDefined} == 0
}, "Expected two rows (one row per consumer) in describe group results.")
consumerGroupCommand.close()
}
@Test
def testDescribeGroupWithNewConsumerWithShortInitializationTimeout() {
// run one consumer in the group consuming from a single-partition topic
val executor = new ConsumerGroupExecutor(brokerList, 1, group, topic)
// set the group initialization timeout too low for the group to stabilize
val cgcArgs = Array("--bootstrap-server", brokerList, "--describe", "--group", "group", "--timeout", "10")
val opts = new ConsumerGroupCommandOptions(cgcArgs)
val consumerGroupCommand = new KafkaConsumerGroupService(opts)
try {
val (state, assignments) = consumerGroupCommand.describeGroup()
fail("The consumer group command should fail due to low initialization timeout")
} catch {
case e: TimeoutException =>
// OK
case e: Throwable =>
fail("An unexpected exception occurred: " + e.getMessage)
throw e
} finally {
consumerGroupCommand.close()
}
}
}
class ConsumerThread(broker: String, id: Int, groupId: String, topic: String) extends Runnable {
val props = new Properties
props.put("bootstrap.servers", broker)
props.put("group.id", groupId)
props.put("key.deserializer", classOf[StringDeserializer].getName)
props.put("value.deserializer", classOf[StringDeserializer].getName)
val consumer = new KafkaConsumer(props)
def run() {
try {
consumer.subscribe(Collections.singleton(topic))
while (true)
consumer.poll(Long.MaxValue)
} catch {
case e: WakeupException => // OK
} finally {
consumer.close()
}
}
def shutdown() {
consumer.wakeup()
}
}
class ConsumerGroupExecutor(broker: String, numConsumers: Int, groupId: String, topic: String) {
val executor: ExecutorService = Executors.newFixedThreadPool(numConsumers)
var consumers = List[ConsumerThread]()
for (i <- 1 to numConsumers) {
val consumer = new ConsumerThread(broker, i, groupId, topic)
consumers ++= List(consumer)
executor.submit(consumer);
}
Runtime.getRuntime().addShutdownHook(new Thread() {
override def run() {
shutdown()
}
})
def shutdown() {
consumers.foreach(_.shutdown)
executor.shutdown();
try {
executor.awaitTermination(5000, TimeUnit.MILLISECONDS);
} catch {
case e: InterruptedException =>
e.printStackTrace()
}
}
}
| ijuma/kafka | core/src/test/scala/unit/kafka/admin/DescribeConsumerGroupTest.scala | Scala | apache-2.0 | 14,466 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package router.connectors
import config.AppConfig
import javax.inject.{Inject, Singleton}
import router.httpParsers.SelfAssessmentHttpParser
import uk.gov.hmrc.http.HttpClient
import scala.concurrent.ExecutionContext
@Singleton
class SavingsAccountConnector @Inject()(val http: HttpClient,
val httpParser: SelfAssessmentHttpParser,
val appConfig: AppConfig)
(implicit val ec: ExecutionContext) extends BaseConnector {
override val serviceUrl: String = appConfig.savingsAccountApiUrl
}
| hmrc/self-assessment-api | app/router/connectors/SavingsAccountConnector.scala | Scala | apache-2.0 | 1,214 |
package com.wbillingsley.handy
import com.wbillingsley.handy.reactivestreams.TakeWhileR
import scala.annotation.tailrec
import scala.language.{higherKinds, postfixOps}
/**
* A reference to a collection of items.
*/
case class RefIterableOnce[T, C[TT] <: IterableOnce[TT]](items: C[T]) extends RefManySync[T] {
override def first:RefOpt[T] = RefOpt(headOption)
def headOption:Option[T] = items.iterator.to(LazyList).headOption
override def map[B](f: T => B):RefManySync[B] = {
val result = for (item <- items.iterator) yield f(item)
RefIterableOnce(result)
}
override def flatMapMany[B](f: T => RefMany[B]) = RefIterableRefMany(items.iterator map f)
override def flatMapOpt[B](f: T => RefOpt[B]):RefMany[B] = RefIterableRefOpt(items.iterator map f)
override def flatMapOne[B](f: T => Ref[B]) = RefIterableRef(items.iterator map f)
override def foreach[U](f: T => U):Unit = { items.iterator.foreach(f) }
override def recoverManyWith[B >: T](pf: PartialFunction[Throwable, RefMany[B]]):RefMany[B] = this
override def withFilter(p: T => Boolean) = RefIterableOnce(items.iterator withFilter p)
override def foldLeft[B](initial: =>B)(each:(B, T) => B) = RefItself(items.iterator.foldLeft(initial)(each))
override def whenReady[B](block: RefMany[T] => B):Ref[B] = RefItself(block(this))
override def collect = RefItself(items.iterator.toSeq)
override def toSeq: Ref[Seq[T]] = collect
}
case class RefIterableRef[T, C[TT] <: IterableOnce[TT]](refs: IterableOnce[Ref[T]]) extends RefMany[T] {
override def first:RefOpt[T] = RefOpt(refs.iterator.to(Iterable).headOption).flatMapOne(identity)
override def map[B](f: T => B):RefMany[B] = {
val result = refs.iterator.map(_.map(f))
RefIterableRef(result)
}
override def flatMapOne[B](f: T => Ref[B]):RefMany[B] = {
val result = refs.iterator.map(_.flatMap(f))
RefIterableRef(result)
}
override def flatMapOpt[B](func: T => RefOpt[B]): RefMany[B] = {
val result = refs.iterator.map(_.flatMap(func))
RefIterableRefOpt(result)
}
override def flatMapMany[B](f: T => RefMany[B]):RefMany[B] = {
val result = refs.iterator.map(_.flatMap(f))
RefIterableRefMany(result)
}
override def foreach[U](f: T => U):Unit = { refs.iterator.foreach(_.foreach(f)) }
override def withFilter(p: T => Boolean):RefMany[T] = {
flatMapOpt(x => if (p(x)) RefSome(x) else RefNone)
}
override def foldLeft[B](initial: =>B)(each:(B, T) => B):Ref[B] = {
refs.iterator.foldLeft[Ref[B]](RefItself(initial)){(ar, br) =>
for (a <- ar; ob <- br) yield each(a, ob)
}
}
override def whenReady[B](block: RefMany[T] => B):Ref[B] = RefItself(block(this))
override def recoverManyWith[B >: T](pf: PartialFunction[Throwable, RefMany[B]]):RefMany[T] = this
}
case class RefIterableRefOpt[+T](refs: IterableOnce[RefOpt[T]]) extends RefMany[T] {
/** map, as in functors */
override def map[B](func: T => B): RefMany[B] = {
RefIterableRefOpt(refs.iterator.map(_.map(func)))
}
override def foreach[U](func: T => U): Unit = map(func)
override def flatMapOne[B](func: T => Ref[B]): RefMany[B] = {
RefIterableRefOpt(refs.iterator.map(_.flatMapOne(func)))
}
override def flatMapOpt[B](func: T => RefOpt[B]): RefMany[B] = {
RefIterableRefOpt(refs.iterator.map(_.flatMapOpt(func)))
}
override def flatMapMany[B](func: T => RefMany[B]): RefMany[B] = {
RefIterableRefMany(refs.iterator.map(_.flatMapMany(func)))
}
override def withFilter(func: T => Boolean): RefMany[T] = {
RefIterableRefOpt(refs.iterator.map(_.withFilter(func)))
}
/** TODO: this consumes the whole stream */
override def first: RefOpt[T] = refs.iterator.foldLeft[RefOpt[T]](RefNone) { (opt, ro) => opt orElse ro }
/**
* A fold across this (possibly asynchronous) collection
* initial will only be evaluated in the success case.
*/
override def foldLeft[B](initial: => B)(each: (B, T) => B): Ref[B] = {
refs.iterator.foldLeft[Ref[B]](RefItself(initial)) { (soFar, ro) =>
soFar.flatMapOpt { b =>
ro map { t =>
each(b, t)
} orElse RefSome(b)
}.require
}
}
/**
* Recovers from failures producing the list -- for instance if this is a RefFailed, or a RefFutureRefMany that fails.
* Note that it does not recover from individual elements within the list failing.
*/
override def recoverManyWith[B >: T](pf: PartialFunction[Throwable, RefMany[B]]): RefMany[B] = this
/**
* Called when the RefMany is "ready". This is equivalent to
* fold(initial){ (_,_) => initial } but without calling the empty folder for each value
*/
override def whenReady[B](f: RefMany[T] => B): Ref[B] = RefItself(f(this))
}
case class RefIterableRefMany[T, C[TT] <: IterableOnce[TT]](refs: IterableOnce[RefMany[T]]) extends RefMany[T] {
def first:RefOpt[T] = refs.iterator.foldLeft[RefOpt[T]](RefNone) { (opt, rm) => opt orElse rm.first }
def map[B](f: T => B):RefMany[B] = {
val result = refs.iterator.map(_.map(f))
RefIterableRefMany(result)
}
def flatMapOne[B](f: T => Ref[B]):RefMany[B] = {
val result = refs.iterator.map(_.flatMap(f))
RefIterableRefMany(result)
}
override def flatMapOpt[B](func: T => RefOpt[B]): RefMany[B] = {
val result = refs.iterator.map(_.flatMap(func))
RefIterableRefMany(result)
}
def flatMapMany[B](f: T => RefMany[B]):RefMany[B] = {
val result = refs.iterator.map(_.flatMap(f))
RefIterableRefMany(result)
}
def foreach[U](f: T => U):Unit = { refs.iterator.foreach(_.foreach(f)) }
def withFilter(p: T => Boolean) = RefIterableRefMany(refs.iterator map (_ withFilter p))
override def foldLeft[B](initial: =>B)(each:(B, T) => B):Ref[B] = {
refs.iterator.foldLeft[Ref[B]](RefItself(initial)){(ar, refMany) =>
ar.flatMapOne(a => refMany.foldLeft(a)(each))
}
}
def whenReady[B](block: RefMany[T] => B):Ref[B] = RefItself(block(this))
def recoverManyWith[B >: T](pf: PartialFunction[Throwable, RefMany[B]]):RefMany[B] = this
}
| wbillingsley/handy | handy/src/main/scala/com/wbillingsley/handy/RefIterableOnce.scala | Scala | mit | 6,071 |
// cats core
import cats._, cats.data._, cats.implicits._
import cats._
import cats.data._
import cats.implicits._
3.show
"hello".show | OlegYch/scastie | demo/cats.scala | Scala | apache-2.0 | 136 |
/*
* Copyright (c) 2014 Paul Bernard
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Spectrum Finance is based in part on:
* QuantLib. http://quantlib.org/
*
*/
package org.quantintel.ql.util
import scala.collection.JavaConverters._
import java.lang.ref.{WeakReference => JWeakReference}
/**
* @author Paul Bernard
*/
class WeakReferenceObservable(observable: Observable) extends DefaultObservable(observable) {
override def addObserver(referent: Observer) {
super.addObserver(new WeakReferenceObserver(referent))
}
override def deleteObserver(observer: Observer){
getObservers.asScala.foreach(
(weakObserver : Observer) => {
val weakReference: WeakReferenceObserver = weakObserver.asInstanceOf[WeakReferenceObserver]
val referent : Observer = weakReference.get()
if (referent == null || referent == observer) deleteWeakReference(weakReference)
}
)
}
private def deleteWeakReference(observer: WeakReferenceObserver) {
super.deleteObserver(observer)
}
private class WeakReferenceObserver(observer: Observer) extends JWeakReference(observer) with Observer {
override def update(){
val referent : Observer = get()
if (referent != null) referent.update() else deleteWeakReference(this)
}
}
}
| quantintel/spectrum | financial/src/main/scala/org/quantintel/ql/util/WeakReferenceObservable.scala | Scala | apache-2.0 | 1,812 |
import scala.reflect.runtime.universe._
object Test extends App {
val tree = reify{trait C { private[this] val x = 2; var y = x; lazy val z = y }}
println(showRaw(tree.tree))
}
| som-snytt/dotty | tests/disabled/macro/run/showraw_mods.scala | Scala | apache-2.0 | 182 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.dstream
import org.apache.spark.Logging
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{ Time, DStreamCheckpointData, StreamingContext }
import java.util.Properties
import java.util.concurrent.Executors
import java.io.IOException
import org.eclipse.paho.client.mqttv3.MqttCallback
import org.eclipse.paho.client.mqttv3.MqttClient
import org.eclipse.paho.client.mqttv3.MqttClientPersistence
import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence
import org.eclipse.paho.client.mqttv3.IMqttDeliveryToken
import org.eclipse.paho.client.mqttv3.MqttException
import org.eclipse.paho.client.mqttv3.MqttMessage
import org.eclipse.paho.client.mqttv3.MqttTopic
import scala.collection.Map
import scala.collection.mutable.HashMap
import scala.collection.JavaConversions._
import scala.reflect.ClassTag
/**
* Input stream that subscribe messages from a Mqtt Broker.
* Uses eclipse paho as MqttClient http://www.eclipse.org/paho/
* @param brokerUrl Url of remote mqtt publisher
* @param topic topic name to subscribe to
* @param storageLevel RDD storage level.
*/
private[streaming]
class MQTTInputDStream[T: ClassTag](
@transient ssc_ : StreamingContext,
brokerUrl: String,
topic: String,
storageLevel: StorageLevel
) extends NetworkInputDStream[T](ssc_) with Logging {
def getReceiver(): NetworkReceiver[T] = {
new MQTTReceiver(brokerUrl, topic, storageLevel)
.asInstanceOf[NetworkReceiver[T]]
}
}
private[streaming]
class MQTTReceiver(brokerUrl: String,
topic: String,
storageLevel: StorageLevel
) extends NetworkReceiver[Any] {
lazy protected val blockGenerator = new BlockGenerator(storageLevel)
def onStop() {
blockGenerator.stop()
}
def onStart() {
blockGenerator.start()
// Set up persistence for messages
var peristance: MqttClientPersistence = new MemoryPersistence()
// Initializing Mqtt Client specifying brokerUrl, clientID and MqttClientPersistance
var client: MqttClient = new MqttClient(brokerUrl, "MQTTSub", peristance)
// Connect to MqttBroker
client.connect()
// Subscribe to Mqtt topic
client.subscribe(topic)
// Callback automatically triggers as and when new message arrives on specified topic
var callback: MqttCallback = new MqttCallback() {
// Handles Mqtt message
override def messageArrived(arg0: String, arg1: MqttMessage) {
blockGenerator += new String(arg1.getPayload())
}
override def deliveryComplete(arg0: IMqttDeliveryToken) {
}
override def connectionLost(arg0: Throwable) {
logInfo("Connection lost " + arg0)
}
}
// Set up callback for MqttClient
client.setCallback(callback)
}
}
| mkolod/incubator-spark | streaming/src/main/scala/org/apache/spark/streaming/dstream/MQTTInputDStream.scala | Scala | apache-2.0 | 3,580 |
package doc.examples.school
import java.util.concurrent.atomic.AtomicLong
import org.apache.commons.lang3.StringUtils
import org.mentha.tools.archimate.model.Model
import org.mentha.tools.archimate.model._
import org.mentha.tools.archimate.model.nodes.dsl.Business._
import org.mentha.tools.archimate.model.nodes.dsl.Motivation._
import org.mentha.tools.archimate.model.nodes.dsl._
import org.mentha.tools.archimate.model.view._
import org.mentha.tools.archimate.model.view.dsl._
object MkSchool {
trait base {
implicit def model: Model
implicit def space: Size
}
trait learning {
this: base =>
private val counter = new AtomicLong()
private def learningView(title: String): View = {
val num = StringUtils.leftPad(counter.incrementAndGet().toString, 2, '0')
model.add(s"v-learning-${num}") { new View() withName { title.replace("#", num) } }
}
val $learner = businessRole withName "Learner"
val $learnerGoal = driver withName "Overall Learner Goal / Driver"
val $learnerGoalAssoc = $learnerGoal `associated with` $learner
val $learnerSkillsReq = requirement withName "Specific skills are required"
val $learnerSkillsReqAssoc = $learnerSkillsReq `associated with` $learner
val $learnerSkillReqInfluencesLearnerGoal = $learnerSkillsReq `influences` "+" `in` $learnerGoal
{
in { learningView("Learning: #. Basics: Learner has a Goal") }
.add { $learnerGoalAssoc }
.addNotes { $learnerGoal } { "The Goal (Driver) which motivates the Learner." }
.placeLikeBefore()
.resizeNodesToTitle()
.layout()
}
{
in { learningView("Learning: #. Basics: Skills are required") }
.add { $learnerGoalAssoc }.addNotes { $learnerGoal } { "The motivator." }
.add { $learnerSkillsReqAssoc }.addNotes { $learnerSkillsReq } { "Skills, Knowledge and Experience could help." }
.add { $learnerSkillReqInfluencesLearnerGoal }
.placeLikeBefore()
.resizeNodesToTitle()
.layout()
}
val $learning = businessProcess withName "Learning"
val $learnerAssignedToLearning = $learner `assigned to` $learning
val $learningInfluencesLearnerSkillsReq = $learning `influences` "+" `in` $learnerSkillsReq
{
in { learningView("Learning: #. Learning could help") }
.add { $learnerAssignedToLearning }
.add { $learnerGoalAssoc }
.add { $learnerSkillsReqAssoc }
.add { $learnerSkillReqInfluencesLearnerGoal }
.add { $learningInfluencesLearnerSkillsReq }
.addNotes { $learning } { "Learning produces necessary Skills, Knowledge and Experience." }
.placeLikeBefore()
.resizeNodesToTitle()
.layout()
}
val $educationProcess = businessProcess withName "Education"
val $educationProcessComposesLearning = $educationProcess `composes` $learning
val $learnerAssignedToEduProcess = $learner `assigned to` $educationProcess
{
in { learningView("Learning: #. Education does the Learning") }
.add { $educationProcessComposesLearning }
.add { $learnerAssignedToLearning }
.add { $learnerSkillsReqAssoc }
.add { $learningInfluencesLearnerSkillsReq }
.placeLikeBefore()
.resizeNodesToTitle()
.layout()
}
val $teacher = businessRole withName "Teacher"
val $teacherAssignedToEduProcess = $teacher `assigned to` $educationProcess
{
in { learningView("Learning: #. Education Overview (1)") }
.add { $teacherAssignedToEduProcess }
.add { $learnerAssignedToEduProcess }
.placeLikeBefore()
.resizeNodesToTitle()
.layout()
}
val $teaching = businessProcess withName "Teaching"
val $educationProcessComposesTeaching = $educationProcess `composes` $teaching
val $teachingFlowsKnowledgeToLearning = $teaching `flows` "Knowledge" `to` $learning
val $teacherAssignedToTeaching = $teacher `assigned to` $teaching
{
in { learningView("Learning: #. Education Overview (2)") }
.add { $educationProcessComposesTeaching }
.add { $educationProcessComposesLearning }
.add { $teacherAssignedToTeaching }
.add { $learnerAssignedToLearning }
.add { $teachingFlowsKnowledgeToLearning }
.placeLikeBefore()
.resizeNodesToTitle()
.layout()
}
{
in { learningView("Learning: #. Education Overview (3)") }
.add { $teacherAssignedToTeaching }
.add { $learnerAssignedToLearning }
.add { $teachingFlowsKnowledgeToLearning }
.add { $learnerSkillsReqAssoc }
.add { $learningInfluencesLearnerSkillsReq }
.placeLikeBefore()
.resizeNodesToTitle()
.layout()
}
}
}
| zhuj/mentha-web-archimate | junk/src/test/scala/doc/examples/school/MkSchool.scala | Scala | mit | 4,781 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This code is copied from here:
* https://github.com/apache/spark/blob/master/examples/src/main/scala/org/apache/spark/examples/pythonconverters/AvroConverters.scala
*
* For context and usage, please see: https://databricks.com/blog/2014/09/17/spark-1-1-bringing-hadoop-inputoutput-formats-to-pyspark.html
*/
package io.divolte.spark.pyspark.avro
import java.util.{Collection => JCollection, Map => JMap}
import scala.collection.JavaConversions._
import org.apache.avro.generic.{GenericFixed, IndexedRecord}
import org.apache.avro.mapred.AvroWrapper
import org.apache.avro.Schema
import org.apache.avro.Schema.Type._
import org.apache.spark.api.python.Converter
import org.apache.spark.SparkException
object AvroConversionUtil extends Serializable {
def fromAvro(obj: Any, schema: Schema): Any = {
if (obj == null) {
return null
}
schema.getType match {
case UNION => unpackUnion(obj, schema)
case ARRAY => unpackArray(obj, schema)
case FIXED => unpackFixed(obj, schema)
case MAP => unpackMap(obj, schema)
case BYTES => unpackBytes(obj)
case RECORD => unpackRecord(obj)
case STRING |
ENUM => obj.toString
case NULL |
BOOLEAN |
DOUBLE |
FLOAT |
INT |
LONG => obj
case other => throw new SparkException(
s"Unknown Avro schema type ${other.getName}")
}
}
def unpackRecord(obj: Any): JMap[String, Any] = {
val map = new java.util.HashMap[String, Any]
obj match {
case record: IndexedRecord =>
record.getSchema.getFields.zipWithIndex.foreach { case (f, i) =>
map.put(f.name, fromAvro(record.get(i), f.schema))
}
case other => throw new SparkException(
s"Unsupported RECORD type ${other.getClass.getName}")
}
map
}
def unpackMap(obj: Any, schema: Schema): JMap[String, Any] = {
obj.asInstanceOf[JMap[_, _]].map { case (key, value) =>
(key.toString, fromAvro(value, schema.getValueType))
}
}
def unpackFixed(obj: Any, schema: Schema): Array[Byte] = {
unpackBytes(obj.asInstanceOf[GenericFixed].bytes())
}
def unpackBytes(obj: Any): Array[Byte] = {
val bytes: Array[Byte] = obj match {
case buf: java.nio.ByteBuffer => buf.array()
case arr: Array[Byte] => arr
case other => throw new SparkException(
s"Unknown BYTES type ${other.getClass.getName}")
}
val byteArray = new Array[Byte](bytes.length)
System.arraycopy(bytes, 0, byteArray, 0, bytes.length)
byteArray
}
def unpackArray(obj: Any, schema: Schema): JCollection[Any] = obj match {
case c: JCollection[_] =>
c.map(fromAvro(_, schema.getElementType))
case arr: Array[_] if arr.getClass.getComponentType.isPrimitive =>
arr.toSeq
case arr: Array[_] =>
arr.map(fromAvro(_, schema.getElementType)).toSeq
case other => throw new SparkException(
s"Unknown ARRAY type ${other.getClass.getName}")
}
def unpackUnion(obj: Any, schema: Schema): Any = {
schema.getTypes.toList match {
case List(s) => fromAvro(obj, s)
case List(n, s) if n.getType == NULL => fromAvro(obj, s)
case List(s, n) if n.getType == NULL => fromAvro(obj, s)
case _ => throw new SparkException(
"Unions may only consist of a concrete type and null")
}
}
}
/**
* Implementation of [[org.apache.spark.api.python.Converter]] that converts
* an Avro IndexedRecord (e.g., derived from AvroParquetInputFormat) to a Java Map.
*/
class IndexedRecordToJavaConverter extends Converter[IndexedRecord, JMap[String, Any]]{
override def convert(record: IndexedRecord): JMap[String, Any] = {
if (record == null) {
return null
}
val map = new java.util.HashMap[String, Any]
AvroConversionUtil.unpackRecord(record)
}
}
/**
* Implementation of [[org.apache.spark.api.python.Converter]] that converts
* an Avro Record wrapped in an AvroKey (or AvroValue) to a Java Map. It tries
* to work with all 3 Avro data mappings (Generic, Specific and Reflect).
*/
class AvroWrapperToJavaConverter extends Converter[Any, Any] {
override def convert(obj: Any): Any = {
if (obj == null) {
return null
}
obj.asInstanceOf[AvroWrapper[_]].datum() match {
case null => null
case record: IndexedRecord => AvroConversionUtil.unpackRecord(record)
case other => throw new SparkException(
s"Unsupported top-level Avro data type ${other.getClass.getName}")
}
}
}
| divolte/divolte-spark | src/main/scala/io/divolte/spark/pyspark/avro/AvroConversionUtil.scala | Scala | apache-2.0 | 5,345 |
package com.github.puscala
import java.io.{ByteArrayOutputStream, DataOutputStream, IOException}
import java.{util => JUtil}
trait ApnsNotification {
def getDeviceToken: Array[Byte]
def getPayload: Array[Byte]
def getIdentifier: Int
def getExpiry: Int
}
sealed case class EnhancedNotificationFields(identifier: Int, expiry: Int, deviceToken: Array[Byte], payload: Array[Byte])
/**
* APNS notification to be sent to Apple server
* @param fields instance fields
*/
class EnhancedApnsNotification(fields: EnhancedNotificationFields) extends ApnsNotification {
private val command: Byte = 1
private val identifier = fields.identifier
private val expiry = fields.expiry
private val deviceTokenBytes = fields.deviceToken
private val payloadBytes = fields.payload
private lazy val marshall = marshallEnhanced(command, identifier, expiry, deviceTokenBytes, payloadBytes).clone()
/**
* constructs an instance of ApnsNotification
* message encodes the payload with UTF-8 encoding
* @param identifier notification id
* @param expiry expire time
* @param deviceToken device token to be sent
* @param payload payload message to be sent
* @return
*/
def this(identifier: Int, expiry: Int, deviceToken: String, payload: String) =
this(EnhancedNotificationFields(identifier, expiry, Utilities.decodeHex(deviceToken), Utilities.toUTF8Bytes(payload)))
/**
* constructs an instance of ApnsNotification
* message encodes the payload with UTF-8 encoding
* @param identifier notification id
* @param expiry expire time
* @param deviceTokenBytes binary representation of the device token
* @param payloadBytes binary representation of the payload message
* @return
*/
def this(identifier: Int, expiry: Int, deviceTokenBytes: Array[Byte], payloadBytes: Array[Byte]) =
this(EnhancedNotificationFields(identifier, expiry, deviceTokenBytes, payloadBytes))
/**
* Return the binary representation of the device token
* @return
*/
def getDeviceToken: Array[Byte] = Utilities.copyOf(deviceTokenBytes)
/**
* Return the binary representation of the payload message
* @return
*/
def getPayload: Array[Byte] = Utilities.copyOf(payloadBytes)
/**
* Return notification id
* @return
*/
def getIdentifier: Int = identifier
/**
* Return expiry time
* @return
*/
def getExpiry: Int = expiry
/**
* Return the length of the message in bytes as it is encoded on the wire
* @return
*/
def length: Int = {
val l = 1 + 4 + 4 + 2 + deviceTokenBytes.length + 2 + payloadBytes.length
val marshalledLength = marshall.length
assert(marshalledLength == l)
l
}
override def hashCode: Int =
21 + 31 * identifier + 31 * expiry + 31 * JUtil.Arrays.hashCode(deviceTokenBytes) + 31 * JUtil.Arrays.hashCode(payloadBytes)
override def equals(obj: Any): Boolean = obj match {
case o: EnhancedApnsNotification =>
identifier == o.identifier &&
expiry == o.expiry &&
JUtil.Arrays.equals(deviceTokenBytes, o.deviceTokenBytes) &&
JUtil.Arrays.equals(payloadBytes, o.payloadBytes)
case _ => false
}
override def toString: String = {
val payloadString = try {
new String(payloadBytes, "UTF-8")
} catch {
case _: Throwable => "???"
}
s"Message(id=$identifier, deviceToken=${Utilities.encodeHex(deviceTokenBytes)}, payload=$payloadString"
}
/**
* Return the binary representation of the message as expected by the APNS server.
* @param command notification command
* @param identifier notification id
* @param expiryTime expire time
* @param tokenBytes binary representation of the device token
* @param payloadBytes binary representation of the payload message
* @return
*/
private def marshallEnhanced(
command: Byte,
identifier: Int,
expiryTime: Int,
tokenBytes: Array[Byte],
payloadBytes: Array[Byte]
): Array[Byte] = {
val baos = new ByteArrayOutputStream()
val dos = new DataOutputStream(baos)
try {
dos.writeByte(command)
dos.writeInt(identifier)
dos.writeInt(expiry)
dos.writeShort(tokenBytes.length)
dos.write(tokenBytes)
dos.writeShort(payloadBytes.length)
dos.write(payloadBytes)
baos.toByteArray
} catch {
case e: IOException => throw new AssertionError()
} finally {
dos.close()
baos.close()
}
}
}
| uriborn/puscala | src/main/scala/com/github/puscala/ApnsNotification.scala | Scala | mit | 4,452 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import java.util
import java.util.Locale
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import scala.util.Random
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalog.v2._
import org.apache.spark.sql.catalog.v2.expressions.{FieldReference, IdentityTransform, Transform}
import org.apache.spark.sql.catalyst._
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.encoders.OuterScopes
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.SubExprUtils._
import org.apache.spark.sql.catalyst.expressions.aggregate._
import org.apache.spark.sql.catalyst.expressions.objects._
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.plans.logical.sql._
import org.apache.spark.sql.catalyst.rules._
import org.apache.spark.sql.catalyst.trees.TreeNodeRef
import org.apache.spark.sql.catalyst.util.toPrettySQL
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.{PartitionOverwriteMode, StoreAssignmentPolicy}
import org.apache.spark.sql.sources.v2.Table
import org.apache.spark.sql.sources.v2.internal.V1Table
import org.apache.spark.sql.types._
import org.apache.spark.sql.util.CaseInsensitiveStringMap
/**
* A trivial [[Analyzer]] with a dummy [[SessionCatalog]] and [[EmptyFunctionRegistry]].
* Used for testing when all relations are already filled in and the analyzer needs only
* to resolve attribute references.
*/
object SimpleAnalyzer extends Analyzer(
new SessionCatalog(
new InMemoryCatalog,
EmptyFunctionRegistry,
new SQLConf().copy(SQLConf.CASE_SENSITIVE -> true)) {
override def createDatabase(dbDefinition: CatalogDatabase, ignoreIfExists: Boolean) {}
},
new SQLConf().copy(SQLConf.CASE_SENSITIVE -> true))
object FakeV2SessionCatalog extends TableCatalog {
private def fail() = throw new UnsupportedOperationException
override def listTables(namespace: Array[String]): Array[Identifier] = fail()
override def loadTable(ident: Identifier): Table = {
throw new NoSuchTableException(ident.toString)
}
override def createTable(
ident: Identifier,
schema: StructType,
partitions: Array[Transform],
properties: util.Map[String, String]): Table = fail()
override def alterTable(ident: Identifier, changes: TableChange*): Table = fail()
override def dropTable(ident: Identifier): Boolean = fail()
override def renameTable(oldIdent: Identifier, newIdent: Identifier): Unit = fail()
override def initialize(name: String, options: CaseInsensitiveStringMap): Unit = fail()
override def name(): String = fail()
}
/**
* Provides a way to keep state during the analysis, this enables us to decouple the concerns
* of analysis environment from the catalog.
* The state that is kept here is per-query.
*
* Note this is thread local.
*
* @param defaultDatabase The default database used in the view resolution, this overrules the
* current catalog database.
* @param nestedViewDepth The nested depth in the view resolution, this enables us to limit the
* depth of nested views.
*/
case class AnalysisContext(
defaultDatabase: Option[String] = None,
nestedViewDepth: Int = 0)
object AnalysisContext {
private val value = new ThreadLocal[AnalysisContext]() {
override def initialValue: AnalysisContext = AnalysisContext()
}
def get: AnalysisContext = value.get()
def reset(): Unit = value.remove()
private def set(context: AnalysisContext): Unit = value.set(context)
def withAnalysisContext[A](database: Option[String])(f: => A): A = {
val originContext = value.get()
val context = AnalysisContext(defaultDatabase = database,
nestedViewDepth = originContext.nestedViewDepth + 1)
set(context)
try f finally { set(originContext) }
}
}
/**
* Provides a logical query plan analyzer, which translates [[UnresolvedAttribute]]s and
* [[UnresolvedRelation]]s into fully typed objects using information in a [[SessionCatalog]].
*/
class Analyzer(
catalog: SessionCatalog,
v2SessionCatalog: TableCatalog,
conf: SQLConf,
maxIterations: Int)
extends RuleExecutor[LogicalPlan] with CheckAnalysis with LookupCatalog {
// Only for tests.
def this(catalog: SessionCatalog, conf: SQLConf) = {
this(catalog, FakeV2SessionCatalog, conf, conf.optimizerMaxIterations)
}
def this(catalog: SessionCatalog, v2SessionCatalog: TableCatalog, conf: SQLConf) = {
this(catalog, v2SessionCatalog, conf, conf.optimizerMaxIterations)
}
override val catalogManager: CatalogManager = new CatalogManager(conf, v2SessionCatalog)
def executeAndCheck(plan: LogicalPlan, tracker: QueryPlanningTracker): LogicalPlan = {
AnalysisHelper.markInAnalyzer {
val analyzed = executeAndTrack(plan, tracker)
try {
checkAnalysis(analyzed)
analyzed
} catch {
case e: AnalysisException =>
val ae = new AnalysisException(e.message, e.line, e.startPosition, Option(analyzed))
ae.setStackTrace(e.getStackTrace)
throw ae
}
}
}
override def execute(plan: LogicalPlan): LogicalPlan = {
AnalysisContext.reset()
try {
executeSameContext(plan)
} finally {
AnalysisContext.reset()
}
}
private def executeSameContext(plan: LogicalPlan): LogicalPlan = super.execute(plan)
def resolver: Resolver = conf.resolver
protected val fixedPoint = FixedPoint(maxIterations)
/**
* Override to provide additional rules for the "Resolution" batch.
*/
val extendedResolutionRules: Seq[Rule[LogicalPlan]] = Nil
/**
* Override to provide rules to do post-hoc resolution. Note that these rules will be executed
* in an individual batch. This batch is to run right after the normal resolution batch and
* execute its rules in one pass.
*/
val postHocResolutionRules: Seq[Rule[LogicalPlan]] = Nil
lazy val batches: Seq[Batch] = Seq(
Batch("Hints", fixedPoint,
new ResolveHints.ResolveJoinStrategyHints(conf),
ResolveHints.ResolveCoalesceHints,
new ResolveHints.RemoveAllHints(conf)),
Batch("Simple Sanity Check", Once,
LookupFunctions),
Batch("Substitution", fixedPoint,
CTESubstitution,
WindowsSubstitution,
EliminateUnions,
new SubstituteUnresolvedOrdinals(conf)),
Batch("Resolution", fixedPoint,
ResolveTableValuedFunctions ::
ResolveAlterTable ::
ResolveDescribeTable ::
ResolveInsertInto ::
ResolveTables ::
ResolveRelations ::
ResolveReferences ::
ResolveCreateNamedStruct ::
ResolveDeserializer ::
ResolveNewInstance ::
ResolveUpCast ::
ResolveGroupingAnalytics ::
ResolvePivot ::
ResolveOrdinalInOrderByAndGroupBy ::
ResolveAggAliasInGroupBy ::
ResolveMissingReferences ::
ExtractGenerator ::
ResolveGenerate ::
ResolveFunctions ::
ResolveAliases ::
ResolveSubquery ::
ResolveSubqueryColumnAliases ::
ResolveWindowOrder ::
ResolveWindowFrame ::
ResolveNaturalAndUsingJoin ::
ResolveOutputRelation ::
ExtractWindowExpressions ::
GlobalAggregates ::
ResolveAggregateFunctions ::
TimeWindowing ::
ResolveInlineTables(conf) ::
ResolveHigherOrderFunctions(catalog) ::
ResolveLambdaVariables(conf) ::
ResolveTimeZone(conf) ::
ResolveRandomSeed ::
TypeCoercion.typeCoercionRules(conf) ++
extendedResolutionRules : _*),
Batch("Post-Hoc Resolution", Once, postHocResolutionRules: _*),
Batch("Nondeterministic", Once,
PullOutNondeterministic),
Batch("UDF", Once,
HandleNullInputsForUDF),
Batch("UpdateNullability", Once,
UpdateAttributeNullability),
Batch("Subquery", Once,
UpdateOuterReferences),
Batch("Cleanup", fixedPoint,
CleanupAliases)
)
/**
* Substitute child plan with WindowSpecDefinitions.
*/
object WindowsSubstitution extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
// Lookup WindowSpecDefinitions. This rule works with unresolved children.
case WithWindowDefinition(windowDefinitions, child) => child.resolveExpressions {
case UnresolvedWindowExpression(c, WindowSpecReference(windowName)) =>
val errorMessage =
s"Window specification $windowName is not defined in the WINDOW clause."
val windowSpecDefinition =
windowDefinitions.getOrElse(windowName, failAnalysis(errorMessage))
WindowExpression(c, windowSpecDefinition)
}
}
}
/**
* Replaces [[UnresolvedAlias]]s with concrete aliases.
*/
object ResolveAliases extends Rule[LogicalPlan] {
private def assignAliases(exprs: Seq[NamedExpression]) = {
exprs.map(_.transformUp { case u @ UnresolvedAlias(child, optGenAliasFunc) =>
child match {
case ne: NamedExpression => ne
case go @ GeneratorOuter(g: Generator) if g.resolved => MultiAlias(go, Nil)
case e if !e.resolved => u
case g: Generator => MultiAlias(g, Nil)
case c @ Cast(ne: NamedExpression, _, _) => Alias(c, ne.name)()
case e: ExtractValue => Alias(e, toPrettySQL(e))()
case e if optGenAliasFunc.isDefined =>
Alias(child, optGenAliasFunc.get.apply(e))()
case e => Alias(e, toPrettySQL(e))()
}
}
).asInstanceOf[Seq[NamedExpression]]
}
private def hasUnresolvedAlias(exprs: Seq[NamedExpression]) =
exprs.exists(_.find(_.isInstanceOf[UnresolvedAlias]).isDefined)
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case Aggregate(groups, aggs, child) if child.resolved && hasUnresolvedAlias(aggs) =>
Aggregate(groups, assignAliases(aggs), child)
case g: GroupingSets if g.child.resolved && hasUnresolvedAlias(g.aggregations) =>
g.copy(aggregations = assignAliases(g.aggregations))
case Pivot(groupByOpt, pivotColumn, pivotValues, aggregates, child)
if child.resolved && groupByOpt.isDefined && hasUnresolvedAlias(groupByOpt.get) =>
Pivot(Some(assignAliases(groupByOpt.get)), pivotColumn, pivotValues, aggregates, child)
case Project(projectList, child) if child.resolved && hasUnresolvedAlias(projectList) =>
Project(assignAliases(projectList), child)
}
}
object ResolveGroupingAnalytics extends Rule[LogicalPlan] {
/*
* GROUP BY a, b, c WITH ROLLUP
* is equivalent to
* GROUP BY a, b, c GROUPING SETS ( (a, b, c), (a, b), (a), ( ) ).
* Group Count: N + 1 (N is the number of group expressions)
*
* We need to get all of its subsets for the rule described above, the subset is
* represented as sequence of expressions.
*/
def rollupExprs(exprs: Seq[Expression]): Seq[Seq[Expression]] = exprs.inits.toIndexedSeq
/*
* GROUP BY a, b, c WITH CUBE
* is equivalent to
* GROUP BY a, b, c GROUPING SETS ( (a, b, c), (a, b), (b, c), (a, c), (a), (b), (c), ( ) ).
* Group Count: 2 ^ N (N is the number of group expressions)
*
* We need to get all of its subsets for a given GROUPBY expression, the subsets are
* represented as sequence of expressions.
*/
def cubeExprs(exprs: Seq[Expression]): Seq[Seq[Expression]] = {
// `cubeExprs0` is recursive and returns a lazy Stream. Here we call `toIndexedSeq` to
// materialize it and avoid serialization problems later on.
cubeExprs0(exprs).toIndexedSeq
}
def cubeExprs0(exprs: Seq[Expression]): Seq[Seq[Expression]] = exprs.toList match {
case x :: xs =>
val initial = cubeExprs0(xs)
initial.map(x +: _) ++ initial
case Nil =>
Seq(Seq.empty)
}
private[analysis] def hasGroupingFunction(e: Expression): Boolean = {
e.collectFirst {
case g: Grouping => g
case g: GroupingID => g
}.isDefined
}
private def replaceGroupingFunc(
expr: Expression,
groupByExprs: Seq[Expression],
gid: Expression): Expression = {
expr transform {
case e: GroupingID =>
if (e.groupByExprs.isEmpty ||
e.groupByExprs.map(_.canonicalized) == groupByExprs.map(_.canonicalized)) {
Alias(gid, toPrettySQL(e))()
} else {
throw new AnalysisException(
s"Columns of grouping_id (${e.groupByExprs.mkString(",")}) does not match " +
s"grouping columns (${groupByExprs.mkString(",")})")
}
case e @ Grouping(col: Expression) =>
val idx = groupByExprs.indexWhere(_.semanticEquals(col))
if (idx >= 0) {
Alias(Cast(BitwiseAnd(ShiftRight(gid, Literal(groupByExprs.length - 1 - idx)),
Literal(1)), ByteType), toPrettySQL(e))()
} else {
throw new AnalysisException(s"Column of grouping ($col) can't be found " +
s"in grouping columns ${groupByExprs.mkString(",")}")
}
}
}
/*
* Create new alias for all group by expressions for `Expand` operator.
*/
private def constructGroupByAlias(groupByExprs: Seq[Expression]): Seq[Alias] = {
groupByExprs.map {
case e: NamedExpression => Alias(e, e.name)()
case other => Alias(other, other.toString)()
}
}
/*
* Construct [[Expand]] operator with grouping sets.
*/
private def constructExpand(
selectedGroupByExprs: Seq[Seq[Expression]],
child: LogicalPlan,
groupByAliases: Seq[Alias],
gid: Attribute): LogicalPlan = {
// Change the nullability of group by aliases if necessary. For example, if we have
// GROUPING SETS ((a,b), a), we do not need to change the nullability of a, but we
// should change the nullabilty of b to be TRUE.
// TODO: For Cube/Rollup just set nullability to be `true`.
val expandedAttributes = groupByAliases.map { alias =>
if (selectedGroupByExprs.exists(!_.contains(alias.child))) {
alias.toAttribute.withNullability(true)
} else {
alias.toAttribute
}
}
val groupingSetsAttributes = selectedGroupByExprs.map { groupingSetExprs =>
groupingSetExprs.map { expr =>
val alias = groupByAliases.find(_.child.semanticEquals(expr)).getOrElse(
failAnalysis(s"$expr doesn't show up in the GROUP BY list $groupByAliases"))
// Map alias to expanded attribute.
expandedAttributes.find(_.semanticEquals(alias.toAttribute)).getOrElse(
alias.toAttribute)
}
}
Expand(groupingSetsAttributes, groupByAliases, expandedAttributes, gid, child)
}
/*
* Construct new aggregate expressions by replacing grouping functions.
*/
private def constructAggregateExprs(
groupByExprs: Seq[Expression],
aggregations: Seq[NamedExpression],
groupByAliases: Seq[Alias],
groupingAttrs: Seq[Expression],
gid: Attribute): Seq[NamedExpression] = aggregations.map {
// collect all the found AggregateExpression, so we can check an expression is part of
// any AggregateExpression or not.
val aggsBuffer = ArrayBuffer[Expression]()
// Returns whether the expression belongs to any expressions in `aggsBuffer` or not.
def isPartOfAggregation(e: Expression): Boolean = {
aggsBuffer.exists(a => a.find(_ eq e).isDefined)
}
replaceGroupingFunc(_, groupByExprs, gid).transformDown {
// AggregateExpression should be computed on the unmodified value of its argument
// expressions, so we should not replace any references to grouping expression
// inside it.
case e: AggregateExpression =>
aggsBuffer += e
e
case e if isPartOfAggregation(e) => e
case e =>
// Replace expression by expand output attribute.
val index = groupByAliases.indexWhere(_.child.semanticEquals(e))
if (index == -1) {
e
} else {
groupingAttrs(index)
}
}.asInstanceOf[NamedExpression]
}
/*
* Construct [[Aggregate]] operator from Cube/Rollup/GroupingSets.
*/
private def constructAggregate(
selectedGroupByExprs: Seq[Seq[Expression]],
groupByExprs: Seq[Expression],
aggregationExprs: Seq[NamedExpression],
child: LogicalPlan): LogicalPlan = {
val gid = AttributeReference(VirtualColumn.groupingIdName, IntegerType, false)()
// In case of ANSI-SQL compliant syntax for GROUPING SETS, groupByExprs is optional and
// can be null. In such case, we derive the groupByExprs from the user supplied values for
// grouping sets.
val finalGroupByExpressions = if (groupByExprs == Nil) {
selectedGroupByExprs.flatten.foldLeft(Seq.empty[Expression]) { (result, currentExpr) =>
// Only unique expressions are included in the group by expressions and is determined
// based on their semantic equality. Example. grouping sets ((a * b), (b * a)) results
// in grouping expression (a * b)
if (result.find(_.semanticEquals(currentExpr)).isDefined) {
result
} else {
result :+ currentExpr
}
}
} else {
groupByExprs
}
// Expand works by setting grouping expressions to null as determined by the
// `selectedGroupByExprs`. To prevent these null values from being used in an aggregate
// instead of the original value we need to create new aliases for all group by expressions
// that will only be used for the intended purpose.
val groupByAliases = constructGroupByAlias(finalGroupByExpressions)
val expand = constructExpand(selectedGroupByExprs, child, groupByAliases, gid)
val groupingAttrs = expand.output.drop(child.output.length)
val aggregations = constructAggregateExprs(
finalGroupByExpressions, aggregationExprs, groupByAliases, groupingAttrs, gid)
Aggregate(groupingAttrs, aggregations, expand)
}
private def findGroupingExprs(plan: LogicalPlan): Seq[Expression] = {
plan.collectFirst {
case a: Aggregate =>
// this Aggregate should have grouping id as the last grouping key.
val gid = a.groupingExpressions.last
if (!gid.isInstanceOf[AttributeReference]
|| gid.asInstanceOf[AttributeReference].name != VirtualColumn.groupingIdName) {
failAnalysis(s"grouping()/grouping_id() can only be used with GroupingSets/Cube/Rollup")
}
a.groupingExpressions.take(a.groupingExpressions.length - 1)
}.getOrElse {
failAnalysis(s"grouping()/grouping_id() can only be used with GroupingSets/Cube/Rollup")
}
}
// This require transformUp to replace grouping()/grouping_id() in resolved Filter/Sort
def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperatorsUp {
case a if !a.childrenResolved => a // be sure all of the children are resolved.
// Ensure group by expressions and aggregate expressions have been resolved.
case Aggregate(Seq(c @ Cube(groupByExprs)), aggregateExpressions, child)
if (groupByExprs ++ aggregateExpressions).forall(_.resolved) =>
constructAggregate(cubeExprs(groupByExprs), groupByExprs, aggregateExpressions, child)
case Aggregate(Seq(r @ Rollup(groupByExprs)), aggregateExpressions, child)
if (groupByExprs ++ aggregateExpressions).forall(_.resolved) =>
constructAggregate(rollupExprs(groupByExprs), groupByExprs, aggregateExpressions, child)
// Ensure all the expressions have been resolved.
case x: GroupingSets if x.expressions.forall(_.resolved) =>
constructAggregate(x.selectedGroupByExprs, x.groupByExprs, x.aggregations, x.child)
// We should make sure all expressions in condition have been resolved.
case f @ Filter(cond, child) if hasGroupingFunction(cond) && cond.resolved =>
val groupingExprs = findGroupingExprs(child)
// The unresolved grouping id will be resolved by ResolveMissingReferences
val newCond = replaceGroupingFunc(cond, groupingExprs, VirtualColumn.groupingIdAttribute)
f.copy(condition = newCond)
// We should make sure all [[SortOrder]]s have been resolved.
case s @ Sort(order, _, child)
if order.exists(hasGroupingFunction) && order.forall(_.resolved) =>
val groupingExprs = findGroupingExprs(child)
val gid = VirtualColumn.groupingIdAttribute
// The unresolved grouping id will be resolved by ResolveMissingReferences
val newOrder = order.map(replaceGroupingFunc(_, groupingExprs, gid).asInstanceOf[SortOrder])
s.copy(order = newOrder)
}
}
object ResolvePivot extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case p: Pivot if !p.childrenResolved || !p.aggregates.forall(_.resolved)
|| (p.groupByExprsOpt.isDefined && !p.groupByExprsOpt.get.forall(_.resolved))
|| !p.pivotColumn.resolved || !p.pivotValues.forall(_.resolved) => p
case Pivot(groupByExprsOpt, pivotColumn, pivotValues, aggregates, child) =>
if (!RowOrdering.isOrderable(pivotColumn.dataType)) {
throw new AnalysisException(
s"Invalid pivot column '${pivotColumn}'. Pivot columns must be comparable.")
}
// Check all aggregate expressions.
aggregates.foreach(checkValidAggregateExpression)
// Check all pivot values are literal and match pivot column data type.
val evalPivotValues = pivotValues.map { value =>
val foldable = value match {
case Alias(v, _) => v.foldable
case _ => value.foldable
}
if (!foldable) {
throw new AnalysisException(
s"Literal expressions required for pivot values, found '$value'")
}
if (!Cast.canCast(value.dataType, pivotColumn.dataType)) {
throw new AnalysisException(s"Invalid pivot value '$value': " +
s"value data type ${value.dataType.simpleString} does not match " +
s"pivot column data type ${pivotColumn.dataType.catalogString}")
}
Cast(value, pivotColumn.dataType, Some(conf.sessionLocalTimeZone)).eval(EmptyRow)
}
// Group-by expressions coming from SQL are implicit and need to be deduced.
val groupByExprs = groupByExprsOpt.getOrElse {
val pivotColAndAggRefs = pivotColumn.references ++ AttributeSet(aggregates)
child.output.filterNot(pivotColAndAggRefs.contains)
}
val singleAgg = aggregates.size == 1
def outputName(value: Expression, aggregate: Expression): String = {
val stringValue = value match {
case n: NamedExpression => n.name
case _ =>
val utf8Value =
Cast(value, StringType, Some(conf.sessionLocalTimeZone)).eval(EmptyRow)
Option(utf8Value).map(_.toString).getOrElse("null")
}
if (singleAgg) {
stringValue
} else {
val suffix = aggregate match {
case n: NamedExpression => n.name
case _ => toPrettySQL(aggregate)
}
stringValue + "_" + suffix
}
}
if (aggregates.forall(a => PivotFirst.supportsDataType(a.dataType))) {
// Since evaluating |pivotValues| if statements for each input row can get slow this is an
// alternate plan that instead uses two steps of aggregation.
val namedAggExps: Seq[NamedExpression] = aggregates.map(a => Alias(a, a.sql)())
val namedPivotCol = pivotColumn match {
case n: NamedExpression => n
case _ => Alias(pivotColumn, "__pivot_col")()
}
val bigGroup = groupByExprs :+ namedPivotCol
val firstAgg = Aggregate(bigGroup, bigGroup ++ namedAggExps, child)
val pivotAggs = namedAggExps.map { a =>
Alias(PivotFirst(namedPivotCol.toAttribute, a.toAttribute, evalPivotValues)
.toAggregateExpression()
, "__pivot_" + a.sql)()
}
val groupByExprsAttr = groupByExprs.map(_.toAttribute)
val secondAgg = Aggregate(groupByExprsAttr, groupByExprsAttr ++ pivotAggs, firstAgg)
val pivotAggAttribute = pivotAggs.map(_.toAttribute)
val pivotOutputs = pivotValues.zipWithIndex.flatMap { case (value, i) =>
aggregates.zip(pivotAggAttribute).map { case (aggregate, pivotAtt) =>
Alias(ExtractValue(pivotAtt, Literal(i), resolver), outputName(value, aggregate))()
}
}
Project(groupByExprsAttr ++ pivotOutputs, secondAgg)
} else {
val pivotAggregates: Seq[NamedExpression] = pivotValues.flatMap { value =>
def ifExpr(e: Expression) = {
If(
EqualNullSafe(
pivotColumn,
Cast(value, pivotColumn.dataType, Some(conf.sessionLocalTimeZone))),
e, Literal(null))
}
aggregates.map { aggregate =>
val filteredAggregate = aggregate.transformDown {
// Assumption is the aggregate function ignores nulls. This is true for all current
// AggregateFunction's with the exception of First and Last in their default mode
// (which we handle) and possibly some Hive UDAF's.
case First(expr, _) =>
First(ifExpr(expr), Literal(true))
case Last(expr, _) =>
Last(ifExpr(expr), Literal(true))
case a: AggregateFunction =>
a.withNewChildren(a.children.map(ifExpr))
}.transform {
// We are duplicating aggregates that are now computing a different value for each
// pivot value.
// TODO: Don't construct the physical container until after analysis.
case ae: AggregateExpression => ae.copy(resultId = NamedExpression.newExprId)
}
Alias(filteredAggregate, outputName(value, aggregate))()
}
}
Aggregate(groupByExprs, groupByExprs ++ pivotAggregates, child)
}
}
// Support any aggregate expression that can appear in an Aggregate plan except Pandas UDF.
// TODO: Support Pandas UDF.
private def checkValidAggregateExpression(expr: Expression): Unit = expr match {
case _: AggregateExpression => // OK and leave the argument check to CheckAnalysis.
case expr: PythonUDF if PythonUDF.isGroupedAggPandasUDF(expr) =>
failAnalysis("Pandas UDF aggregate expressions are currently not supported in pivot.")
case e: Attribute =>
failAnalysis(
s"Aggregate expression required for pivot, but '${e.sql}' " +
s"did not appear in any aggregate function.")
case e => e.children.foreach(checkValidAggregateExpression)
}
}
/**
* Resolve table relations with concrete relations from v2 catalog.
*
* [[ResolveRelations]] still resolves v1 tables.
*/
object ResolveTables extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case u: UnresolvedRelation =>
val v2TableOpt = lookupV2Relation(u.multipartIdentifier) match {
case scala.Left((_, _, tableOpt)) => tableOpt
case scala.Right(tableOpt) => tableOpt
}
v2TableOpt.map(DataSourceV2Relation.create).getOrElse(u)
}
}
/**
* Replaces [[UnresolvedRelation]]s with concrete relations from the catalog.
*/
object ResolveRelations extends Rule[LogicalPlan] {
// If the unresolved relation is running directly on files, we just return the original
// UnresolvedRelation, the plan will get resolved later. Else we look up the table from catalog
// and change the default database name(in AnalysisContext) if it is a view.
// We usually look up a table from the default database if the table identifier has an empty
// database part, for a view the default database should be the currentDb when the view was
// created. When the case comes to resolving a nested view, the view may have different default
// database with that the referenced view has, so we need to use
// `AnalysisContext.defaultDatabase` to track the current default database.
// When the relation we resolve is a view, we fetch the view.desc(which is a CatalogTable), and
// then set the value of `CatalogTable.viewDefaultDatabase` to
// `AnalysisContext.defaultDatabase`, we look up the relations that the view references using
// the default database.
// For example:
// |- view1 (defaultDatabase = db1)
// |- operator
// |- table2 (defaultDatabase = db1)
// |- view2 (defaultDatabase = db2)
// |- view3 (defaultDatabase = db3)
// |- view4 (defaultDatabase = db4)
// In this case, the view `view1` is a nested view, it directly references `table2`, `view2`
// and `view4`, the view `view2` references `view3`. On resolving the table, we look up the
// relations `table2`, `view2`, `view4` using the default database `db1`, and look up the
// relation `view3` using the default database `db2`.
//
// Note this is compatible with the views defined by older versions of Spark(before 2.2), which
// have empty defaultDatabase and all the relations in viewText have database part defined.
def resolveRelation(plan: LogicalPlan): LogicalPlan = plan match {
case u @ UnresolvedRelation(AsTemporaryViewIdentifier(ident))
if catalog.isTemporaryTable(ident) =>
resolveRelation(lookupTableFromCatalog(ident, u, AnalysisContext.get.defaultDatabase))
case u @ UnresolvedRelation(AsTableIdentifier(ident)) if !isRunningDirectlyOnFiles(ident) =>
val defaultDatabase = AnalysisContext.get.defaultDatabase
val foundRelation = lookupTableFromCatalog(ident, u, defaultDatabase)
if (foundRelation != u) {
resolveRelation(foundRelation)
} else {
u
}
// The view's child should be a logical plan parsed from the `desc.viewText`, the variable
// `viewText` should be defined, or else we throw an error on the generation of the View
// operator.
case view @ View(desc, _, child) if !child.resolved =>
// Resolve all the UnresolvedRelations and Views in the child.
val newChild = AnalysisContext.withAnalysisContext(desc.viewDefaultDatabase) {
if (AnalysisContext.get.nestedViewDepth > conf.maxNestedViewDepth) {
view.failAnalysis(s"The depth of view ${view.desc.identifier} exceeds the maximum " +
s"view resolution depth (${conf.maxNestedViewDepth}). Analysis is aborted to " +
s"avoid errors. Increase the value of ${SQLConf.MAX_NESTED_VIEW_DEPTH.key} to work " +
"around this.")
}
executeSameContext(child)
}
view.copy(child = newChild)
case p @ SubqueryAlias(_, view: View) =>
val newChild = resolveRelation(view)
p.copy(child = newChild)
case _ => plan
}
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case i @ InsertIntoTable(u @ UnresolvedRelation(AsTableIdentifier(ident)), _, child, _, _)
if child.resolved =>
EliminateSubqueryAliases(lookupTableFromCatalog(ident, u)) match {
case v: View =>
u.failAnalysis(s"Inserting into a view is not allowed. View: ${v.desc.identifier}.")
case other => i.copy(table = other)
}
case u: UnresolvedRelation => resolveRelation(u)
}
// Look up the table with the given name from catalog. The database we used is decided by the
// precedence:
// 1. Use the database part of the table identifier, if it is defined;
// 2. Use defaultDatabase, if it is defined(In this case, no temporary objects can be used,
// and the default database is only used to look up a view);
// 3. Use the currentDb of the SessionCatalog.
private def lookupTableFromCatalog(
tableIdentifier: TableIdentifier,
u: UnresolvedRelation,
defaultDatabase: Option[String] = None): LogicalPlan = {
val tableIdentWithDb = tableIdentifier.copy(
database = tableIdentifier.database.orElse(defaultDatabase))
try {
catalog.lookupRelation(tableIdentWithDb)
} catch {
case _: NoSuchTableException | _: NoSuchDatabaseException =>
u
}
}
// If the database part is specified, and we support running SQL directly on files, and
// it's not a temporary view, and the table does not exist, then let's just return the
// original UnresolvedRelation. It is possible we are matching a query like "select *
// from parquet.`/path/to/query`". The plan will get resolved in the rule `ResolveDataSource`.
// Note that we are testing (!db_exists || !table_exists) because the catalog throws
// an exception from tableExists if the database does not exist.
private def isRunningDirectlyOnFiles(table: TableIdentifier): Boolean = {
table.database.isDefined && conf.runSQLonFile && !catalog.isTemporaryTable(table) &&
(!catalog.databaseExists(table.database.get) || !catalog.tableExists(table))
}
}
object ResolveInsertInto extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case i @ InsertIntoStatement(u: UnresolvedRelation, _, _, _, _) if i.query.resolved =>
lookupV2Relation(u.multipartIdentifier) match {
case scala.Left((_, _, Some(v2Table: Table))) =>
resolveV2Insert(i, v2Table)
case scala.Right(Some(v2Table: Table)) =>
resolveV2Insert(i, v2Table)
case _ =>
InsertIntoTable(i.table, i.partitionSpec, i.query, i.overwrite, i.ifPartitionNotExists)
}
}
private def resolveV2Insert(i: InsertIntoStatement, table: Table): LogicalPlan = {
val relation = DataSourceV2Relation.create(table)
// ifPartitionNotExists is append with validation, but validation is not supported
if (i.ifPartitionNotExists) {
throw new AnalysisException(
s"Cannot write, IF NOT EXISTS is not supported for table: ${relation.table.name}")
}
val partCols = partitionColumnNames(relation.table)
validatePartitionSpec(partCols, i.partitionSpec)
val staticPartitions = i.partitionSpec.filter(_._2.isDefined).mapValues(_.get)
val query = addStaticPartitionColumns(relation, i.query, staticPartitions)
val dynamicPartitionOverwrite = partCols.size > staticPartitions.size &&
conf.partitionOverwriteMode == PartitionOverwriteMode.DYNAMIC
if (!i.overwrite) {
AppendData.byPosition(relation, query)
} else if (dynamicPartitionOverwrite) {
OverwritePartitionsDynamic.byPosition(relation, query)
} else {
OverwriteByExpression.byPosition(
relation, query, staticDeleteExpression(relation, staticPartitions))
}
}
private def partitionColumnNames(table: Table): Seq[String] = {
// get partition column names. in v2, partition columns are columns that are stored using an
// identity partition transform because the partition values and the column values are
// identical. otherwise, partition values are produced by transforming one or more source
// columns and cannot be set directly in a query's PARTITION clause.
table.partitioning.flatMap {
case IdentityTransform(FieldReference(Seq(name))) => Some(name)
case _ => None
}
}
private def validatePartitionSpec(
partitionColumnNames: Seq[String],
partitionSpec: Map[String, Option[String]]): Unit = {
// check that each partition name is a partition column. otherwise, it is not valid
partitionSpec.keySet.foreach { partitionName =>
partitionColumnNames.find(name => conf.resolver(name, partitionName)) match {
case Some(_) =>
case None =>
throw new AnalysisException(
s"PARTITION clause cannot contain a non-partition column name: $partitionName")
}
}
}
private def addStaticPartitionColumns(
relation: DataSourceV2Relation,
query: LogicalPlan,
staticPartitions: Map[String, String]): LogicalPlan = {
if (staticPartitions.isEmpty) {
query
} else {
// add any static value as a literal column
val withStaticPartitionValues = {
// for each static name, find the column name it will replace and check for unknowns.
val outputNameToStaticName = staticPartitions.keySet.map(staticName =>
relation.output.find(col => conf.resolver(col.name, staticName)) match {
case Some(attr) =>
attr.name -> staticName
case _ =>
throw new AnalysisException(
s"Cannot add static value for unknown column: $staticName")
}).toMap
val queryColumns = query.output.iterator
// for each output column, add the static value as a literal, or use the next input
// column. this does not fail if input columns are exhausted and adds remaining columns
// at the end. both cases will be caught by ResolveOutputRelation and will fail the
// query with a helpful error message.
relation.output.flatMap { col =>
outputNameToStaticName.get(col.name).flatMap(staticPartitions.get) match {
case Some(staticValue) =>
Some(Alias(Cast(Literal(staticValue), col.dataType), col.name)())
case _ if queryColumns.hasNext =>
Some(queryColumns.next)
case _ =>
None
}
} ++ queryColumns
}
Project(withStaticPartitionValues, query)
}
}
private def staticDeleteExpression(
relation: DataSourceV2Relation,
staticPartitions: Map[String, String]): Expression = {
if (staticPartitions.isEmpty) {
Literal(true)
} else {
staticPartitions.map { case (name, value) =>
relation.output.find(col => conf.resolver(col.name, name)) match {
case Some(attr) =>
// the delete expression must reference the table's column names, but these attributes
// are not available when CheckAnalysis runs because the relation is not a child of
// the logical operation. instead, expressions are resolved after
// ResolveOutputRelation runs, using the query's column names that will match the
// table names at that point. because resolution happens after a future rule, create
// an UnresolvedAttribute.
EqualTo(UnresolvedAttribute(attr.name), Cast(Literal(value), attr.dataType))
case None =>
throw new AnalysisException(s"Unknown static partition column: $name")
}
}.reduce(And)
}
}
}
/**
* Resolve ALTER TABLE statements that use a DSv2 catalog.
*
* This rule converts unresolved ALTER TABLE statements to v2 when a v2 catalog is responsible
* for the table identifier. A v2 catalog is responsible for an identifier when the identifier
* has a catalog specified, like prod_catalog.db.table, or when a default v2 catalog is set and
* the table identifier does not include a catalog.
*/
object ResolveAlterTable extends Rule[LogicalPlan] {
import org.apache.spark.sql.catalog.v2.CatalogV2Implicits._
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case alter @ AlterTableAddColumnsStatement(tableName, cols) =>
val changes = cols.map { col =>
TableChange.addColumn(col.name.toArray, col.dataType, true, col.comment.orNull)
}
resolveV2Alter(tableName, changes).getOrElse(alter)
case alter @ AlterTableAlterColumnStatement(tableName, colName, dataType, comment) =>
val typeChange = dataType.map { newDataType =>
TableChange.updateColumnType(colName.toArray, newDataType, true)
}
val commentChange = comment.map { newComment =>
TableChange.updateColumnComment(colName.toArray, newComment)
}
resolveV2Alter(tableName, typeChange.toSeq ++ commentChange.toSeq).getOrElse(alter)
case alter @ AlterTableRenameColumnStatement(tableName, col, newName) =>
val changes = Seq(TableChange.renameColumn(col.toArray, newName))
resolveV2Alter(tableName, changes).getOrElse(alter)
case alter @ AlterTableDropColumnsStatement(tableName, cols) =>
val changes = cols.map(col => TableChange.deleteColumn(col.toArray))
resolveV2Alter(tableName, changes).getOrElse(alter)
case alter @ AlterTableSetPropertiesStatement(tableName, props) =>
val changes = props.map { case (key, value) =>
TableChange.setProperty(key, value)
}
resolveV2Alter(tableName, changes.toSeq).getOrElse(alter)
case alter @ AlterTableUnsetPropertiesStatement(tableName, keys, _) =>
resolveV2Alter(tableName, keys.map(key => TableChange.removeProperty(key))).getOrElse(alter)
case alter @ AlterTableSetLocationStatement(tableName, newLoc) =>
resolveV2Alter(tableName, Seq(TableChange.setProperty("location", newLoc))).getOrElse(alter)
}
private def resolveV2Alter(
tableName: Seq[String],
changes: Seq[TableChange]): Option[AlterTable] = {
lookupV2Relation(tableName) match {
case scala.Left((v2Catalog, ident, tableOpt)) =>
Some(AlterTable(
v2Catalog.asTableCatalog,
ident,
tableOpt.map(DataSourceV2Relation.create).getOrElse(UnresolvedRelation(tableName)),
changes
))
case scala.Right(tableOpt) =>
tableOpt.map { table =>
AlterTable(
sessionCatalog.asTableCatalog,
Identifier.of(tableName.init.toArray, tableName.last),
DataSourceV2Relation.create(table),
changes
)
}
}
}
}
/**
* Resolve DESCRIBE TABLE statements that use a DSv2 catalog.
*
* This rule converts unresolved DESCRIBE TABLE statements to v2 when a v2 catalog is responsible
* for the table identifier. A v2 catalog is responsible for an identifier when the identifier
* has a catalog specified, like prod_catalog.db.table, or when a default v2 catalog is set and
* the table identifier does not include a catalog.
*/
object ResolveDescribeTable extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case describe @ DescribeTableStatement(
CatalogObjectIdentifier(Some(v2Catalog), ident), _, isExtended) =>
DescribeTable(UnresolvedRelation(describe.tableName), isExtended)
}
}
/**
* Replaces [[UnresolvedAttribute]]s with concrete [[AttributeReference]]s from
* a logical plan node's children.
*/
object ResolveReferences extends Rule[LogicalPlan] {
/**
* Generate a new logical plan for the right child with different expression IDs
* for all conflicting attributes.
*/
private def dedupRight (left: LogicalPlan, right: LogicalPlan): LogicalPlan = {
val conflictingAttributes = left.outputSet.intersect(right.outputSet)
logDebug(s"Conflicting attributes ${conflictingAttributes.mkString(",")} " +
s"between $left and $right")
right.collect {
// Handle base relations that might appear more than once.
case oldVersion: MultiInstanceRelation
if oldVersion.outputSet.intersect(conflictingAttributes).nonEmpty =>
val newVersion = oldVersion.newInstance()
(oldVersion, newVersion)
case oldVersion: SerializeFromObject
if oldVersion.outputSet.intersect(conflictingAttributes).nonEmpty =>
(oldVersion, oldVersion.copy(serializer = oldVersion.serializer.map(_.newInstance())))
// Handle projects that create conflicting aliases.
case oldVersion @ Project(projectList, _)
if findAliases(projectList).intersect(conflictingAttributes).nonEmpty =>
(oldVersion, oldVersion.copy(projectList = newAliases(projectList)))
case oldVersion @ Aggregate(_, aggregateExpressions, _)
if findAliases(aggregateExpressions).intersect(conflictingAttributes).nonEmpty =>
(oldVersion, oldVersion.copy(aggregateExpressions = newAliases(aggregateExpressions)))
case oldVersion @ FlatMapGroupsInPandas(_, _, output, _)
if oldVersion.outputSet.intersect(conflictingAttributes).nonEmpty =>
(oldVersion, oldVersion.copy(output = output.map(_.newInstance())))
case oldVersion: Generate
if oldVersion.producedAttributes.intersect(conflictingAttributes).nonEmpty =>
val newOutput = oldVersion.generatorOutput.map(_.newInstance())
(oldVersion, oldVersion.copy(generatorOutput = newOutput))
case oldVersion @ Window(windowExpressions, _, _, child)
if AttributeSet(windowExpressions.map(_.toAttribute)).intersect(conflictingAttributes)
.nonEmpty =>
(oldVersion, oldVersion.copy(windowExpressions = newAliases(windowExpressions)))
}
// Only handle first case, others will be fixed on the next pass.
.headOption match {
case None =>
/*
* No result implies that there is a logical plan node that produces new references
* that this rule cannot handle. When that is the case, there must be another rule
* that resolves these conflicts. Otherwise, the analysis will fail.
*/
right
case Some((oldRelation, newRelation)) =>
val attributeRewrites = AttributeMap(oldRelation.output.zip(newRelation.output))
right transformUp {
case r if r == oldRelation => newRelation
} transformUp {
case other => other transformExpressions {
case a: Attribute =>
dedupAttr(a, attributeRewrites)
case s: SubqueryExpression =>
s.withNewPlan(dedupOuterReferencesInSubquery(s.plan, attributeRewrites))
}
}
}
}
private def dedupAttr(attr: Attribute, attrMap: AttributeMap[Attribute]): Attribute = {
val exprId = attrMap.getOrElse(attr, attr).exprId
attr.withExprId(exprId)
}
/**
* The outer plan may have been de-duplicated and the function below updates the
* outer references to refer to the de-duplicated attributes.
*
* For example (SQL):
* {{{
* SELECT * FROM t1
* INTERSECT
* SELECT * FROM t1
* WHERE EXISTS (SELECT 1
* FROM t2
* WHERE t1.c1 = t2.c1)
* }}}
* Plan before resolveReference rule.
* 'Intersect
* :- Project [c1#245, c2#246]
* : +- SubqueryAlias t1
* : +- Relation[c1#245,c2#246] parquet
* +- 'Project [*]
* +- Filter exists#257 [c1#245]
* : +- Project [1 AS 1#258]
* : +- Filter (outer(c1#245) = c1#251)
* : +- SubqueryAlias t2
* : +- Relation[c1#251,c2#252] parquet
* +- SubqueryAlias t1
* +- Relation[c1#245,c2#246] parquet
* Plan after the resolveReference rule.
* Intersect
* :- Project [c1#245, c2#246]
* : +- SubqueryAlias t1
* : +- Relation[c1#245,c2#246] parquet
* +- Project [c1#259, c2#260]
* +- Filter exists#257 [c1#259]
* : +- Project [1 AS 1#258]
* : +- Filter (outer(c1#259) = c1#251) => Updated
* : +- SubqueryAlias t2
* : +- Relation[c1#251,c2#252] parquet
* +- SubqueryAlias t1
* +- Relation[c1#259,c2#260] parquet => Outer plan's attributes are de-duplicated.
*/
private def dedupOuterReferencesInSubquery(
plan: LogicalPlan,
attrMap: AttributeMap[Attribute]): LogicalPlan = {
plan transformDown { case currentFragment =>
currentFragment transformExpressions {
case OuterReference(a: Attribute) =>
OuterReference(dedupAttr(a, attrMap))
case s: SubqueryExpression =>
s.withNewPlan(dedupOuterReferencesInSubquery(s.plan, attrMap))
}
}
}
/**
* Resolves the attribute and extract value expressions(s) by traversing the
* input expression in top down manner. The traversal is done in top-down manner as
* we need to skip over unbound lamda function expression. The lamda expressions are
* resolved in a different rule [[ResolveLambdaVariables]]
*
* Example :
* SELECT transform(array(1, 2, 3), (x, i) -> x + i)"
*
* In the case above, x and i are resolved as lamda variables in [[ResolveLambdaVariables]]
*
* Note : In this routine, the unresolved attributes are resolved from the input plan's
* children attributes.
*/
private def resolveExpressionTopDown(e: Expression, q: LogicalPlan): Expression = {
if (e.resolved) return e
e match {
case f: LambdaFunction if !f.bound => f
case u @ UnresolvedAttribute(nameParts) =>
// Leave unchanged if resolution fails. Hopefully will be resolved next round.
val result =
withPosition(u) {
q.resolveChildren(nameParts, resolver)
.orElse(resolveLiteralFunction(nameParts, u, q))
.getOrElse(u)
}
logDebug(s"Resolving $u to $result")
result
case UnresolvedExtractValue(child, fieldExpr) if child.resolved =>
ExtractValue(child, fieldExpr, resolver)
case _ => e.mapChildren(resolveExpressionTopDown(_, q))
}
}
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case p: LogicalPlan if !p.childrenResolved => p
// If the projection list contains Stars, expand it.
case p: Project if containsStar(p.projectList) =>
p.copy(projectList = buildExpandedProjectList(p.projectList, p.child))
// If the aggregate function argument contains Stars, expand it.
case a: Aggregate if containsStar(a.aggregateExpressions) =>
if (a.groupingExpressions.exists(_.isInstanceOf[UnresolvedOrdinal])) {
failAnalysis(
"Star (*) is not allowed in select list when GROUP BY ordinal position is used")
} else {
a.copy(aggregateExpressions = buildExpandedProjectList(a.aggregateExpressions, a.child))
}
// If the script transformation input contains Stars, expand it.
case t: ScriptTransformation if containsStar(t.input) =>
t.copy(
input = t.input.flatMap {
case s: Star => s.expand(t.child, resolver)
case o => o :: Nil
}
)
case g: Generate if containsStar(g.generator.children) =>
failAnalysis("Invalid usage of '*' in explode/json_tuple/UDTF")
// To resolve duplicate expression IDs for Join and Intersect
case j @ Join(left, right, _, _, _) if !j.duplicateResolved =>
j.copy(right = dedupRight(left, right))
// intersect/except will be rewritten to join at the begininng of optimizer. Here we need to
// deduplicate the right side plan, so that we won't produce an invalid self-join later.
case i @ Intersect(left, right, _) if !i.duplicateResolved =>
i.copy(right = dedupRight(left, right))
case e @ Except(left, right, _) if !e.duplicateResolved =>
e.copy(right = dedupRight(left, right))
case u @ Union(children) if !u.duplicateResolved =>
// Use projection-based de-duplication for Union to avoid breaking the checkpoint sharing
// feature in streaming.
val newChildren = children.foldRight(Seq.empty[LogicalPlan]) { (head, tail) =>
head +: tail.map {
case child if head.outputSet.intersect(child.outputSet).isEmpty =>
child
case child =>
val projectList = child.output.map { attr =>
Alias(attr, attr.name)()
}
Project(projectList, child)
}
}
u.copy(children = newChildren)
// When resolve `SortOrder`s in Sort based on child, don't report errors as
// we still have chance to resolve it based on its descendants
case s @ Sort(ordering, global, child) if child.resolved && !s.resolved =>
val newOrdering =
ordering.map(order => resolveExpressionBottomUp(order, child).asInstanceOf[SortOrder])
Sort(newOrdering, global, child)
// A special case for Generate, because the output of Generate should not be resolved by
// ResolveReferences. Attributes in the output will be resolved by ResolveGenerate.
case g @ Generate(generator, _, _, _, _, _) if generator.resolved => g
case g @ Generate(generator, join, outer, qualifier, output, child) =>
val newG = resolveExpressionBottomUp(generator, child, throws = true)
if (newG.fastEquals(generator)) {
g
} else {
Generate(newG.asInstanceOf[Generator], join, outer, qualifier, output, child)
}
// Skips plan which contains deserializer expressions, as they should be resolved by another
// rule: ResolveDeserializer.
case plan if containsDeserializer(plan.expressions) => plan
// SPARK-25942: Resolves aggregate expressions with `AppendColumns`'s children, instead of
// `AppendColumns`, because `AppendColumns`'s serializer might produce conflict attribute
// names leading to ambiguous references exception.
case a @ Aggregate(groupingExprs, aggExprs, appendColumns: AppendColumns) =>
a.mapExpressions(resolveExpressionTopDown(_, appendColumns))
case o: OverwriteByExpression if !o.outputResolved =>
// do not resolve expression attributes until the query attributes are resolved against the
// table by ResolveOutputRelation. that rule will alias the attributes to the table's names.
o
case q: LogicalPlan =>
logTrace(s"Attempting to resolve ${q.simpleString(SQLConf.get.maxToStringFields)}")
q.mapExpressions(resolveExpressionTopDown(_, q))
}
def newAliases(expressions: Seq[NamedExpression]): Seq[NamedExpression] = {
expressions.map {
case a: Alias => Alias(a.child, a.name)()
case other => other
}
}
def findAliases(projectList: Seq[NamedExpression]): AttributeSet = {
AttributeSet(projectList.collect { case a: Alias => a.toAttribute })
}
/**
* Build a project list for Project/Aggregate and expand the star if possible
*/
private def buildExpandedProjectList(
exprs: Seq[NamedExpression],
child: LogicalPlan): Seq[NamedExpression] = {
exprs.flatMap {
// Using Dataframe/Dataset API: testData2.groupBy($"a", $"b").agg($"*")
case s: Star => s.expand(child, resolver)
// Using SQL API without running ResolveAlias: SELECT * FROM testData2 group by a, b
case UnresolvedAlias(s: Star, _) => s.expand(child, resolver)
case o if containsStar(o :: Nil) => expandStarExpression(o, child) :: Nil
case o => o :: Nil
}.map(_.asInstanceOf[NamedExpression])
}
/**
* Returns true if `exprs` contains a [[Star]].
*/
def containsStar(exprs: Seq[Expression]): Boolean =
exprs.exists(_.collect { case _: Star => true }.nonEmpty)
/**
* Expands the matching attribute.*'s in `child`'s output.
*/
def expandStarExpression(expr: Expression, child: LogicalPlan): Expression = {
expr.transformUp {
case f1: UnresolvedFunction if containsStar(f1.children) =>
f1.copy(children = f1.children.flatMap {
case s: Star => s.expand(child, resolver)
case o => o :: Nil
})
case c: CreateNamedStruct if containsStar(c.valExprs) =>
val newChildren = c.children.grouped(2).flatMap {
case Seq(k, s : Star) => CreateStruct(s.expand(child, resolver)).children
case kv => kv
}
c.copy(children = newChildren.toList )
case c: CreateArray if containsStar(c.children) =>
c.copy(children = c.children.flatMap {
case s: Star => s.expand(child, resolver)
case o => o :: Nil
})
case p: Murmur3Hash if containsStar(p.children) =>
p.copy(children = p.children.flatMap {
case s: Star => s.expand(child, resolver)
case o => o :: Nil
})
case p: XxHash64 if containsStar(p.children) =>
p.copy(children = p.children.flatMap {
case s: Star => s.expand(child, resolver)
case o => o :: Nil
})
// count(*) has been replaced by count(1)
case o if containsStar(o.children) =>
failAnalysis(s"Invalid usage of '*' in expression '${o.prettyName}'")
}
}
}
private def containsDeserializer(exprs: Seq[Expression]): Boolean = {
exprs.exists(_.find(_.isInstanceOf[UnresolvedDeserializer]).isDefined)
}
/**
* Literal functions do not require the user to specify braces when calling them
* When an attributes is not resolvable, we try to resolve it as a literal function.
*/
private def resolveLiteralFunction(
nameParts: Seq[String],
attribute: UnresolvedAttribute,
plan: LogicalPlan): Option[Expression] = {
if (nameParts.length != 1) return None
val isNamedExpression = plan match {
case Aggregate(_, aggregateExpressions, _) => aggregateExpressions.contains(attribute)
case Project(projectList, _) => projectList.contains(attribute)
case Window(windowExpressions, _, _, _) => windowExpressions.contains(attribute)
case _ => false
}
val wrapper: Expression => Expression =
if (isNamedExpression) f => Alias(f, toPrettySQL(f))() else identity
// support CURRENT_DATE and CURRENT_TIMESTAMP
val literalFunctions = Seq(CurrentDate(), CurrentTimestamp())
val name = nameParts.head
val func = literalFunctions.find(e => caseInsensitiveResolution(e.prettyName, name))
func.map(wrapper)
}
/**
* Resolves the attribute, column value and extract value expressions(s) by traversing the
* input expression in bottom-up manner. In order to resolve the nested complex type fields
* correctly, this function makes use of `throws` parameter to control when to raise an
* AnalysisException.
*
* Example :
* SELECT a.b FROM t ORDER BY b[0].d
*
* In the above example, in b needs to be resolved before d can be resolved. Given we are
* doing a bottom up traversal, it will first attempt to resolve d and fail as b has not
* been resolved yet. If `throws` is false, this function will handle the exception by
* returning the original attribute. In this case `d` will be resolved in subsequent passes
* after `b` is resolved.
*/
protected[sql] def resolveExpressionBottomUp(
expr: Expression,
plan: LogicalPlan,
throws: Boolean = false): Expression = {
if (expr.resolved) return expr
// Resolve expression in one round.
// If throws == false or the desired attribute doesn't exist
// (like try to resolve `a.b` but `a` doesn't exist), fail and return the origin one.
// Else, throw exception.
try {
expr transformUp {
case GetColumnByOrdinal(ordinal, _) => plan.output(ordinal)
case u @ UnresolvedAttribute(nameParts) =>
val result =
withPosition(u) {
plan.resolve(nameParts, resolver)
.orElse(resolveLiteralFunction(nameParts, u, plan))
.getOrElse(u)
}
logDebug(s"Resolving $u to $result")
result
case UnresolvedExtractValue(child, fieldName) if child.resolved =>
ExtractValue(child, fieldName, resolver)
}
} catch {
case a: AnalysisException if !throws => expr
}
}
/**
* In many dialects of SQL it is valid to use ordinal positions in order/sort by and group by
* clauses. This rule is to convert ordinal positions to the corresponding expressions in the
* select list. This support is introduced in Spark 2.0.
*
* - When the sort references or group by expressions are not integer but foldable expressions,
* just ignore them.
* - When spark.sql.orderByOrdinal/spark.sql.groupByOrdinal is set to false, ignore the position
* numbers too.
*
* Before the release of Spark 2.0, the literals in order/sort by and group by clauses
* have no effect on the results.
*/
object ResolveOrdinalInOrderByAndGroupBy extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case p if !p.childrenResolved => p
// Replace the index with the related attribute for ORDER BY,
// which is a 1-base position of the projection list.
case Sort(orders, global, child)
if orders.exists(_.child.isInstanceOf[UnresolvedOrdinal]) =>
val newOrders = orders map {
case s @ SortOrder(UnresolvedOrdinal(index), direction, nullOrdering, _) =>
if (index > 0 && index <= child.output.size) {
SortOrder(child.output(index - 1), direction, nullOrdering, Set.empty)
} else {
s.failAnalysis(
s"ORDER BY position $index is not in select list " +
s"(valid range is [1, ${child.output.size}])")
}
case o => o
}
Sort(newOrders, global, child)
// Replace the index with the corresponding expression in aggregateExpressions. The index is
// a 1-base position of aggregateExpressions, which is output columns (select expression)
case Aggregate(groups, aggs, child) if aggs.forall(_.resolved) &&
groups.exists(_.isInstanceOf[UnresolvedOrdinal]) =>
val newGroups = groups.map {
case u @ UnresolvedOrdinal(index) if index > 0 && index <= aggs.size =>
aggs(index - 1)
case ordinal @ UnresolvedOrdinal(index) =>
ordinal.failAnalysis(
s"GROUP BY position $index is not in select list " +
s"(valid range is [1, ${aggs.size}])")
case o => o
}
Aggregate(newGroups, aggs, child)
}
}
/**
* Replace unresolved expressions in grouping keys with resolved ones in SELECT clauses.
* This rule is expected to run after [[ResolveReferences]] applied.
*/
object ResolveAggAliasInGroupBy extends Rule[LogicalPlan] {
// This is a strict check though, we put this to apply the rule only if the expression is not
// resolvable by child.
private def notResolvableByChild(attrName: String, child: LogicalPlan): Boolean = {
!child.output.exists(a => resolver(a.name, attrName))
}
private def mayResolveAttrByAggregateExprs(
exprs: Seq[Expression], aggs: Seq[NamedExpression], child: LogicalPlan): Seq[Expression] = {
exprs.map { _.transform {
case u: UnresolvedAttribute if notResolvableByChild(u.name, child) =>
aggs.find(ne => resolver(ne.name, u.name)).getOrElse(u)
}}
}
override def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case agg @ Aggregate(groups, aggs, child)
if conf.groupByAliases && child.resolved && aggs.forall(_.resolved) &&
groups.exists(!_.resolved) =>
agg.copy(groupingExpressions = mayResolveAttrByAggregateExprs(groups, aggs, child))
case gs @ GroupingSets(selectedGroups, groups, child, aggs)
if conf.groupByAliases && child.resolved && aggs.forall(_.resolved) &&
groups.exists(_.isInstanceOf[UnresolvedAttribute]) =>
gs.copy(
selectedGroupByExprs = selectedGroups.map(mayResolveAttrByAggregateExprs(_, aggs, child)),
groupByExprs = mayResolveAttrByAggregateExprs(groups, aggs, child))
}
}
/**
* In many dialects of SQL it is valid to sort by attributes that are not present in the SELECT
* clause. This rule detects such queries and adds the required attributes to the original
* projection, so that they will be available during sorting. Another projection is added to
* remove these attributes after sorting.
*
* The HAVING clause could also used a grouping columns that is not presented in the SELECT.
*/
object ResolveMissingReferences extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
// Skip sort with aggregate. This will be handled in ResolveAggregateFunctions
case sa @ Sort(_, _, child: Aggregate) => sa
case s @ Sort(order, _, child)
if (!s.resolved || s.missingInput.nonEmpty) && child.resolved =>
val (newOrder, newChild) = resolveExprsAndAddMissingAttrs(order, child)
val ordering = newOrder.map(_.asInstanceOf[SortOrder])
if (child.output == newChild.output) {
s.copy(order = ordering)
} else {
// Add missing attributes and then project them away.
val newSort = s.copy(order = ordering, child = newChild)
Project(child.output, newSort)
}
case f @ Filter(cond, child) if (!f.resolved || f.missingInput.nonEmpty) && child.resolved =>
val (newCond, newChild) = resolveExprsAndAddMissingAttrs(Seq(cond), child)
if (child.output == newChild.output) {
f.copy(condition = newCond.head)
} else {
// Add missing attributes and then project them away.
val newFilter = Filter(newCond.head, newChild)
Project(child.output, newFilter)
}
}
/**
* This method tries to resolve expressions and find missing attributes recursively. Specially,
* when the expressions used in `Sort` or `Filter` contain unresolved attributes or resolved
* attributes which are missed from child output. This method tries to find the missing
* attributes out and add into the projection.
*/
private def resolveExprsAndAddMissingAttrs(
exprs: Seq[Expression], plan: LogicalPlan): (Seq[Expression], LogicalPlan) = {
// Missing attributes can be unresolved attributes or resolved attributes which are not in
// the output attributes of the plan.
if (exprs.forall(e => e.resolved && e.references.subsetOf(plan.outputSet))) {
(exprs, plan)
} else {
plan match {
case p: Project =>
// Resolving expressions against current plan.
val maybeResolvedExprs = exprs.map(resolveExpressionBottomUp(_, p))
// Recursively resolving expressions on the child of current plan.
val (newExprs, newChild) = resolveExprsAndAddMissingAttrs(maybeResolvedExprs, p.child)
// If some attributes used by expressions are resolvable only on the rewritten child
// plan, we need to add them into original projection.
val missingAttrs = (AttributeSet(newExprs) -- p.outputSet).intersect(newChild.outputSet)
(newExprs, Project(p.projectList ++ missingAttrs, newChild))
case a @ Aggregate(groupExprs, aggExprs, child) =>
val maybeResolvedExprs = exprs.map(resolveExpressionBottomUp(_, a))
val (newExprs, newChild) = resolveExprsAndAddMissingAttrs(maybeResolvedExprs, child)
val missingAttrs = (AttributeSet(newExprs) -- a.outputSet).intersect(newChild.outputSet)
if (missingAttrs.forall(attr => groupExprs.exists(_.semanticEquals(attr)))) {
// All the missing attributes are grouping expressions, valid case.
(newExprs, a.copy(aggregateExpressions = aggExprs ++ missingAttrs, child = newChild))
} else {
// Need to add non-grouping attributes, invalid case.
(exprs, a)
}
case g: Generate =>
val maybeResolvedExprs = exprs.map(resolveExpressionBottomUp(_, g))
val (newExprs, newChild) = resolveExprsAndAddMissingAttrs(maybeResolvedExprs, g.child)
(newExprs, g.copy(unrequiredChildIndex = Nil, child = newChild))
// For `Distinct` and `SubqueryAlias`, we can't recursively resolve and add attributes
// via its children.
case u: UnaryNode if !u.isInstanceOf[Distinct] && !u.isInstanceOf[SubqueryAlias] =>
val maybeResolvedExprs = exprs.map(resolveExpressionBottomUp(_, u))
val (newExprs, newChild) = resolveExprsAndAddMissingAttrs(maybeResolvedExprs, u.child)
(newExprs, u.withNewChildren(Seq(newChild)))
// For other operators, we can't recursively resolve and add attributes via its children.
case other =>
(exprs.map(resolveExpressionBottomUp(_, other)), other)
}
}
}
}
/**
* Checks whether a function identifier referenced by an [[UnresolvedFunction]] is defined in the
* function registry. Note that this rule doesn't try to resolve the [[UnresolvedFunction]]. It
* only performs simple existence check according to the function identifier to quickly identify
* undefined functions without triggering relation resolution, which may incur potentially
* expensive partition/schema discovery process in some cases.
* In order to avoid duplicate external functions lookup, the external function identifier will
* store in the local hash set externalFunctionNameSet.
* @see [[ResolveFunctions]]
* @see https://issues.apache.org/jira/browse/SPARK-19737
*/
object LookupFunctions extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = {
val externalFunctionNameSet = new mutable.HashSet[FunctionIdentifier]()
plan.resolveExpressions {
case f: UnresolvedFunction
if externalFunctionNameSet.contains(normalizeFuncName(f.name)) => f
case f: UnresolvedFunction if catalog.isRegisteredFunction(f.name) => f
case f: UnresolvedFunction if catalog.isPersistentFunction(f.name) =>
externalFunctionNameSet.add(normalizeFuncName(f.name))
f
case f: UnresolvedFunction =>
withPosition(f) {
throw new NoSuchFunctionException(f.name.database.getOrElse(catalog.getCurrentDatabase),
f.name.funcName)
}
}
}
def normalizeFuncName(name: FunctionIdentifier): FunctionIdentifier = {
val funcName = if (conf.caseSensitiveAnalysis) {
name.funcName
} else {
name.funcName.toLowerCase(Locale.ROOT)
}
val databaseName = name.database match {
case Some(a) => formatDatabaseName(a)
case None => catalog.getCurrentDatabase
}
FunctionIdentifier(funcName, Some(databaseName))
}
protected def formatDatabaseName(name: String): String = {
if (conf.caseSensitiveAnalysis) name else name.toLowerCase(Locale.ROOT)
}
}
/**
* Replaces [[UnresolvedFunction]]s with concrete [[Expression]]s.
*/
object ResolveFunctions extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case q: LogicalPlan =>
q transformExpressions {
case u if !u.childrenResolved => u // Skip until children are resolved.
case u: UnresolvedAttribute if resolver(u.name, VirtualColumn.hiveGroupingIdName) =>
withPosition(u) {
Alias(GroupingID(Nil), VirtualColumn.hiveGroupingIdName)()
}
case u @ UnresolvedGenerator(name, children) =>
withPosition(u) {
catalog.lookupFunction(name, children) match {
case generator: Generator => generator
case other =>
failAnalysis(s"$name is expected to be a generator. However, " +
s"its class is ${other.getClass.getCanonicalName}, which is not a generator.")
}
}
case u @ UnresolvedFunction(funcId, children, isDistinct) =>
withPosition(u) {
catalog.lookupFunction(funcId, children) match {
// AggregateWindowFunctions are AggregateFunctions that can only be evaluated within
// the context of a Window clause. They do not need to be wrapped in an
// AggregateExpression.
case wf: AggregateWindowFunction =>
if (isDistinct) {
failAnalysis(
s"DISTINCT specified, but ${wf.prettyName} is not an aggregate function")
} else {
wf
}
// We get an aggregate function, we need to wrap it in an AggregateExpression.
case agg: AggregateFunction => AggregateExpression(agg, Complete, isDistinct)
// This function is not an aggregate function, just return the resolved one.
case other =>
if (isDistinct) {
failAnalysis(
s"DISTINCT specified, but ${other.prettyName} is not an aggregate function")
} else {
other
}
}
}
}
}
}
/**
* This rule resolves and rewrites subqueries inside expressions.
*
* Note: CTEs are handled in CTESubstitution.
*/
object ResolveSubquery extends Rule[LogicalPlan] with PredicateHelper {
/**
* Resolve the correlated expressions in a subquery by using the an outer plans' references. All
* resolved outer references are wrapped in an [[OuterReference]]
*/
private def resolveOuterReferences(plan: LogicalPlan, outer: LogicalPlan): LogicalPlan = {
plan resolveOperatorsDown {
case q: LogicalPlan if q.childrenResolved && !q.resolved =>
q transformExpressions {
case u @ UnresolvedAttribute(nameParts) =>
withPosition(u) {
try {
outer.resolve(nameParts, resolver) match {
case Some(outerAttr) => OuterReference(outerAttr)
case None => u
}
} catch {
case _: AnalysisException => u
}
}
}
}
}
/**
* Resolves the subquery plan that is referenced in a subquery expression. The normal
* attribute references are resolved using regular analyzer and the outer references are
* resolved from the outer plans using the resolveOuterReferences method.
*
* Outer references from the correlated predicates are updated as children of
* Subquery expression.
*/
private def resolveSubQuery(
e: SubqueryExpression,
plans: Seq[LogicalPlan])(
f: (LogicalPlan, Seq[Expression]) => SubqueryExpression): SubqueryExpression = {
// Step 1: Resolve the outer expressions.
var previous: LogicalPlan = null
var current = e.plan
do {
// Try to resolve the subquery plan using the regular analyzer.
previous = current
current = executeSameContext(current)
// Use the outer references to resolve the subquery plan if it isn't resolved yet.
val i = plans.iterator
val afterResolve = current
while (!current.resolved && current.fastEquals(afterResolve) && i.hasNext) {
current = resolveOuterReferences(current, i.next())
}
} while (!current.resolved && !current.fastEquals(previous))
// Step 2: If the subquery plan is fully resolved, pull the outer references and record
// them as children of SubqueryExpression.
if (current.resolved) {
// Record the outer references as children of subquery expression.
f(current, SubExprUtils.getOuterReferences(current))
} else {
e.withNewPlan(current)
}
}
/**
* Resolves the subquery. Apart of resolving the subquery and outer references (if any)
* in the subquery plan, the children of subquery expression are updated to record the
* outer references. This is needed to make sure
* (1) The column(s) referred from the outer query are not pruned from the plan during
* optimization.
* (2) Any aggregate expression(s) that reference outer attributes are pushed down to
* outer plan to get evaluated.
*/
private def resolveSubQueries(plan: LogicalPlan, plans: Seq[LogicalPlan]): LogicalPlan = {
plan transformExpressions {
case s @ ScalarSubquery(sub, _, exprId) if !sub.resolved =>
resolveSubQuery(s, plans)(ScalarSubquery(_, _, exprId))
case e @ Exists(sub, _, exprId) if !sub.resolved =>
resolveSubQuery(e, plans)(Exists(_, _, exprId))
case InSubquery(values, l @ ListQuery(_, _, exprId, _))
if values.forall(_.resolved) && !l.resolved =>
val expr = resolveSubQuery(l, plans)((plan, exprs) => {
ListQuery(plan, exprs, exprId, plan.output)
})
InSubquery(values, expr.asInstanceOf[ListQuery])
}
}
/**
* Resolve and rewrite all subqueries in an operator tree..
*/
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
// In case of HAVING (a filter after an aggregate) we use both the aggregate and
// its child for resolution.
case f @ Filter(_, a: Aggregate) if f.childrenResolved =>
resolveSubQueries(f, Seq(a, a.child))
// Only a few unary nodes (Project/Filter/Aggregate) can contain subqueries.
case q: UnaryNode if q.childrenResolved =>
resolveSubQueries(q, q.children)
case d: DeleteFromTable if d.childrenResolved =>
resolveSubQueries(d, d.children)
}
}
/**
* Replaces unresolved column aliases for a subquery with projections.
*/
object ResolveSubqueryColumnAliases extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case u @ UnresolvedSubqueryColumnAliases(columnNames, child) if child.resolved =>
// Resolves output attributes if a query has alias names in its subquery:
// e.g., SELECT * FROM (SELECT 1 AS a, 1 AS b) t(col1, col2)
val outputAttrs = child.output
// Checks if the number of the aliases equals to the number of output columns
// in the subquery.
if (columnNames.size != outputAttrs.size) {
u.failAnalysis("Number of column aliases does not match number of columns. " +
s"Number of column aliases: ${columnNames.size}; " +
s"number of columns: ${outputAttrs.size}.")
}
val aliases = outputAttrs.zip(columnNames).map { case (attr, aliasName) =>
Alias(attr, aliasName)()
}
Project(aliases, child)
}
}
/**
* Turns projections that contain aggregate expressions into aggregations.
*/
object GlobalAggregates extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperators {
case Project(projectList, child) if containsAggregates(projectList) =>
Aggregate(Nil, projectList, child)
}
def containsAggregates(exprs: Seq[Expression]): Boolean = {
// Collect all Windowed Aggregate Expressions.
val windowedAggExprs: Set[Expression] = exprs.flatMap { expr =>
expr.collect {
case WindowExpression(ae: AggregateExpression, _) => ae
case WindowExpression(e: PythonUDF, _) if PythonUDF.isGroupedAggPandasUDF(e) => e
}
}.toSet
// Find the first Aggregate Expression that is not Windowed.
exprs.exists(_.collectFirst {
case ae: AggregateExpression if !windowedAggExprs.contains(ae) => ae
case e: PythonUDF if PythonUDF.isGroupedAggPandasUDF(e) &&
!windowedAggExprs.contains(e) => e
}.isDefined)
}
}
/**
* This rule finds aggregate expressions that are not in an aggregate operator. For example,
* those in a HAVING clause or ORDER BY clause. These expressions are pushed down to the
* underlying aggregate operator and then projected away after the original operator.
*/
object ResolveAggregateFunctions extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case f @ Filter(cond, agg @ Aggregate(grouping, originalAggExprs, child)) if agg.resolved =>
// Try resolving the condition of the filter as though it is in the aggregate clause
try {
val aggregatedCondition =
Aggregate(
grouping,
Alias(cond, "havingCondition")() :: Nil,
child)
val resolvedOperator = executeSameContext(aggregatedCondition)
def resolvedAggregateFilter =
resolvedOperator
.asInstanceOf[Aggregate]
.aggregateExpressions.head
// If resolution was successful and we see the filter has an aggregate in it, add it to
// the original aggregate operator.
if (resolvedOperator.resolved) {
// Try to replace all aggregate expressions in the filter by an alias.
val aggregateExpressions = ArrayBuffer.empty[NamedExpression]
val transformedAggregateFilter = resolvedAggregateFilter.transform {
case ae: AggregateExpression =>
val alias = Alias(ae, ae.toString)()
aggregateExpressions += alias
alias.toAttribute
// Grouping functions are handled in the rule [[ResolveGroupingAnalytics]].
case e: Expression if grouping.exists(_.semanticEquals(e)) &&
!ResolveGroupingAnalytics.hasGroupingFunction(e) &&
!agg.output.exists(_.semanticEquals(e)) =>
e match {
case ne: NamedExpression =>
aggregateExpressions += ne
ne.toAttribute
case _ =>
val alias = Alias(e, e.toString)()
aggregateExpressions += alias
alias.toAttribute
}
}
// Push the aggregate expressions into the aggregate (if any).
if (aggregateExpressions.nonEmpty) {
Project(agg.output,
Filter(transformedAggregateFilter,
agg.copy(aggregateExpressions = originalAggExprs ++ aggregateExpressions)))
} else {
f
}
} else {
f
}
} catch {
// Attempting to resolve in the aggregate can result in ambiguity. When this happens,
// just return the original plan.
case ae: AnalysisException => f
}
case sort @ Sort(sortOrder, global, aggregate: Aggregate) if aggregate.resolved =>
// Try resolving the ordering as though it is in the aggregate clause.
try {
// If a sort order is unresolved, containing references not in aggregate, or containing
// `AggregateExpression`, we need to push down it to the underlying aggregate operator.
val unresolvedSortOrders = sortOrder.filter { s =>
!s.resolved || !s.references.subsetOf(aggregate.outputSet) || containsAggregate(s)
}
val aliasedOrdering =
unresolvedSortOrders.map(o => Alias(o.child, "aggOrder")())
val aggregatedOrdering = aggregate.copy(aggregateExpressions = aliasedOrdering)
val resolvedAggregate: Aggregate =
executeSameContext(aggregatedOrdering).asInstanceOf[Aggregate]
val resolvedAliasedOrdering: Seq[Alias] =
resolvedAggregate.aggregateExpressions.asInstanceOf[Seq[Alias]]
// If we pass the analysis check, then the ordering expressions should only reference to
// aggregate expressions or grouping expressions, and it's safe to push them down to
// Aggregate.
checkAnalysis(resolvedAggregate)
val originalAggExprs = aggregate.aggregateExpressions.map(
CleanupAliases.trimNonTopLevelAliases(_).asInstanceOf[NamedExpression])
// If the ordering expression is same with original aggregate expression, we don't need
// to push down this ordering expression and can reference the original aggregate
// expression instead.
val needsPushDown = ArrayBuffer.empty[NamedExpression]
val evaluatedOrderings = resolvedAliasedOrdering.zip(unresolvedSortOrders).map {
case (evaluated, order) =>
val index = originalAggExprs.indexWhere {
case Alias(child, _) => child semanticEquals evaluated.child
case other => other semanticEquals evaluated.child
}
if (index == -1) {
needsPushDown += evaluated
order.copy(child = evaluated.toAttribute)
} else {
order.copy(child = originalAggExprs(index).toAttribute)
}
}
val sortOrdersMap = unresolvedSortOrders
.map(new TreeNodeRef(_))
.zip(evaluatedOrderings)
.toMap
val finalSortOrders = sortOrder.map(s => sortOrdersMap.getOrElse(new TreeNodeRef(s), s))
// Since we don't rely on sort.resolved as the stop condition for this rule,
// we need to check this and prevent applying this rule multiple times
if (sortOrder == finalSortOrders) {
sort
} else {
Project(aggregate.output,
Sort(finalSortOrders, global,
aggregate.copy(aggregateExpressions = originalAggExprs ++ needsPushDown)))
}
} catch {
// Attempting to resolve in the aggregate can result in ambiguity. When this happens,
// just return the original plan.
case ae: AnalysisException => sort
}
}
def containsAggregate(condition: Expression): Boolean = {
condition.find(_.isInstanceOf[AggregateExpression]).isDefined
}
}
/**
* Extracts [[Generator]] from the projectList of a [[Project]] operator and creates [[Generate]]
* operator under [[Project]].
*
* This rule will throw [[AnalysisException]] for following cases:
* 1. [[Generator]] is nested in expressions, e.g. `SELECT explode(list) + 1 FROM tbl`
* 2. more than one [[Generator]] is found in projectList,
* e.g. `SELECT explode(list), explode(list) FROM tbl`
* 3. [[Generator]] is found in other operators that are not [[Project]] or [[Generate]],
* e.g. `SELECT * FROM tbl SORT BY explode(list)`
*/
object ExtractGenerator extends Rule[LogicalPlan] {
private def hasGenerator(expr: Expression): Boolean = {
expr.find(_.isInstanceOf[Generator]).isDefined
}
private def hasNestedGenerator(expr: NamedExpression): Boolean = {
CleanupAliases.trimNonTopLevelAliases(expr) match {
case UnresolvedAlias(_: Generator, _) => false
case Alias(_: Generator, _) => false
case MultiAlias(_: Generator, _) => false
case other => hasGenerator(other)
}
}
private def trimAlias(expr: NamedExpression): Expression = expr match {
case UnresolvedAlias(child, _) => child
case Alias(child, _) => child
case MultiAlias(child, _) => child
case _ => expr
}
private object AliasedGenerator {
/**
* Extracts a [[Generator]] expression, any names assigned by aliases to the outputs
* and the outer flag. The outer flag is used when joining the generator output.
* @param e the [[Expression]]
* @return (the [[Generator]], seq of output names, outer flag)
*/
def unapply(e: Expression): Option[(Generator, Seq[String], Boolean)] = e match {
case Alias(GeneratorOuter(g: Generator), name) if g.resolved => Some((g, name :: Nil, true))
case MultiAlias(GeneratorOuter(g: Generator), names) if g.resolved => Some((g, names, true))
case Alias(g: Generator, name) if g.resolved => Some((g, name :: Nil, false))
case MultiAlias(g: Generator, names) if g.resolved => Some((g, names, false))
case _ => None
}
}
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case Project(projectList, _) if projectList.exists(hasNestedGenerator) =>
val nestedGenerator = projectList.find(hasNestedGenerator).get
throw new AnalysisException("Generators are not supported when it's nested in " +
"expressions, but got: " + toPrettySQL(trimAlias(nestedGenerator)))
case Project(projectList, _) if projectList.count(hasGenerator) > 1 =>
val generators = projectList.filter(hasGenerator).map(trimAlias)
throw new AnalysisException("Only one generator allowed per select clause but found " +
generators.size + ": " + generators.map(toPrettySQL).mkString(", "))
case Aggregate(_, aggList, _) if aggList.exists(hasNestedGenerator) =>
val nestedGenerator = aggList.find(hasNestedGenerator).get
throw new AnalysisException("Generators are not supported when it's nested in " +
"expressions, but got: " + toPrettySQL(trimAlias(nestedGenerator)))
case Aggregate(_, aggList, _) if aggList.count(hasGenerator) > 1 =>
val generators = aggList.filter(hasGenerator).map(trimAlias)
throw new AnalysisException("Only one generator allowed per aggregate clause but found " +
generators.size + ": " + generators.map(toPrettySQL).mkString(", "))
case agg @ Aggregate(groupList, aggList, child) if aggList.forall {
case AliasedGenerator(_, _, _) => true
case other => other.resolved
} && aggList.exists(hasGenerator) =>
// If generator in the aggregate list was visited, set the boolean flag true.
var generatorVisited = false
val projectExprs = Array.ofDim[NamedExpression](aggList.length)
val newAggList = aggList
.map(CleanupAliases.trimNonTopLevelAliases(_).asInstanceOf[NamedExpression])
.zipWithIndex
.flatMap {
case (AliasedGenerator(generator, names, outer), idx) =>
// It's a sanity check, this should not happen as the previous case will throw
// exception earlier.
assert(!generatorVisited, "More than one generator found in aggregate.")
generatorVisited = true
val newGenChildren: Seq[Expression] = generator.children.zipWithIndex.map {
case (e, idx) => if (e.foldable) e else Alias(e, s"_gen_input_${idx}")()
}
val newGenerator = {
val g = generator.withNewChildren(newGenChildren.map { e =>
if (e.foldable) e else e.asInstanceOf[Alias].toAttribute
}).asInstanceOf[Generator]
if (outer) GeneratorOuter(g) else g
}
val newAliasedGenerator = if (names.length == 1) {
Alias(newGenerator, names(0))()
} else {
MultiAlias(newGenerator, names)
}
projectExprs(idx) = newAliasedGenerator
newGenChildren.filter(!_.foldable).asInstanceOf[Seq[NamedExpression]]
case (other, idx) =>
projectExprs(idx) = other.toAttribute
other :: Nil
}
val newAgg = Aggregate(groupList, newAggList, child)
Project(projectExprs.toList, newAgg)
case p @ Project(projectList, child) =>
// Holds the resolved generator, if one exists in the project list.
var resolvedGenerator: Generate = null
val newProjectList = projectList
.map(CleanupAliases.trimNonTopLevelAliases(_).asInstanceOf[NamedExpression])
.flatMap {
case AliasedGenerator(generator, names, outer) if generator.childrenResolved =>
// It's a sanity check, this should not happen as the previous case will throw
// exception earlier.
assert(resolvedGenerator == null, "More than one generator found in SELECT.")
resolvedGenerator =
Generate(
generator,
unrequiredChildIndex = Nil,
outer = outer,
qualifier = None,
generatorOutput = ResolveGenerate.makeGeneratorOutput(generator, names),
child)
resolvedGenerator.generatorOutput
case other => other :: Nil
}
if (resolvedGenerator != null) {
Project(newProjectList, resolvedGenerator)
} else {
p
}
case g: Generate => g
case p if p.expressions.exists(hasGenerator) =>
throw new AnalysisException("Generators are not supported outside the SELECT clause, but " +
"got: " + p.simpleString(SQLConf.get.maxToStringFields))
}
}
/**
* Rewrites table generating expressions that either need one or more of the following in order
* to be resolved:
* - concrete attribute references for their output.
* - to be relocated from a SELECT clause (i.e. from a [[Project]]) into a [[Generate]]).
*
* Names for the output [[Attribute]]s are extracted from [[Alias]] or [[MultiAlias]] expressions
* that wrap the [[Generator]].
*/
object ResolveGenerate extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case g: Generate if !g.child.resolved || !g.generator.resolved => g
case g: Generate if !g.resolved =>
g.copy(generatorOutput = makeGeneratorOutput(g.generator, g.generatorOutput.map(_.name)))
}
/**
* Construct the output attributes for a [[Generator]], given a list of names. If the list of
* names is empty names are assigned from field names in generator.
*/
private[analysis] def makeGeneratorOutput(
generator: Generator,
names: Seq[String]): Seq[Attribute] = {
val elementAttrs = generator.elementSchema.toAttributes
if (names.length == elementAttrs.length) {
names.zip(elementAttrs).map {
case (name, attr) => attr.withName(name)
}
} else if (names.isEmpty) {
elementAttrs
} else {
failAnalysis(
"The number of aliases supplied in the AS clause does not match the number of columns " +
s"output by the UDTF expected ${elementAttrs.size} aliases but got " +
s"${names.mkString(",")} ")
}
}
}
/**
* Extracts [[WindowExpression]]s from the projectList of a [[Project]] operator and
* aggregateExpressions of an [[Aggregate]] operator and creates individual [[Window]]
* operators for every distinct [[WindowSpecDefinition]].
*
* This rule handles three cases:
* - A [[Project]] having [[WindowExpression]]s in its projectList;
* - An [[Aggregate]] having [[WindowExpression]]s in its aggregateExpressions.
* - A [[Filter]]->[[Aggregate]] pattern representing GROUP BY with a HAVING
* clause and the [[Aggregate]] has [[WindowExpression]]s in its aggregateExpressions.
* Note: If there is a GROUP BY clause in the query, aggregations and corresponding
* filters (expressions in the HAVING clause) should be evaluated before any
* [[WindowExpression]]. If a query has SELECT DISTINCT, the DISTINCT part should be
* evaluated after all [[WindowExpression]]s.
*
* For every case, the transformation works as follows:
* 1. For a list of [[Expression]]s (a projectList or an aggregateExpressions), partitions
* it two lists of [[Expression]]s, one for all [[WindowExpression]]s and another for
* all regular expressions.
* 2. For all [[WindowExpression]]s, groups them based on their [[WindowSpecDefinition]]s
* and [[WindowFunctionType]]s.
* 3. For every distinct [[WindowSpecDefinition]] and [[WindowFunctionType]], creates a
* [[Window]] operator and inserts it into the plan tree.
*/
object ExtractWindowExpressions extends Rule[LogicalPlan] {
private def hasWindowFunction(exprs: Seq[Expression]): Boolean =
exprs.exists(hasWindowFunction)
private def hasWindowFunction(expr: Expression): Boolean = {
expr.find {
case window: WindowExpression => true
case _ => false
}.isDefined
}
/**
* From a Seq of [[NamedExpression]]s, extract expressions containing window expressions and
* other regular expressions that do not contain any window expression. For example, for
* `col1, Sum(col2 + col3) OVER (PARTITION BY col4 ORDER BY col5)`, we will extract
* `col1`, `col2 + col3`, `col4`, and `col5` out and replace their appearances in
* the window expression as attribute references. So, the first returned value will be
* `[Sum(_w0) OVER (PARTITION BY _w1 ORDER BY _w2)]` and the second returned value will be
* [col1, col2 + col3 as _w0, col4 as _w1, col5 as _w2].
*
* @return (seq of expressions containing at least one window expression,
* seq of non-window expressions)
*/
private def extract(
expressions: Seq[NamedExpression]): (Seq[NamedExpression], Seq[NamedExpression]) = {
// First, we partition the input expressions to two part. For the first part,
// every expression in it contain at least one WindowExpression.
// Expressions in the second part do not have any WindowExpression.
val (expressionsWithWindowFunctions, regularExpressions) =
expressions.partition(hasWindowFunction)
// Then, we need to extract those regular expressions used in the WindowExpression.
// For example, when we have col1 - Sum(col2 + col3) OVER (PARTITION BY col4 ORDER BY col5),
// we need to make sure that col1 to col5 are all projected from the child of the Window
// operator.
val extractedExprBuffer = new ArrayBuffer[NamedExpression]()
def extractExpr(expr: Expression): Expression = expr match {
case ne: NamedExpression =>
// If a named expression is not in regularExpressions, add it to
// extractedExprBuffer and replace it with an AttributeReference.
val missingExpr =
AttributeSet(Seq(expr)) -- (regularExpressions ++ extractedExprBuffer)
if (missingExpr.nonEmpty) {
extractedExprBuffer += ne
}
// alias will be cleaned in the rule CleanupAliases
ne
case e: Expression if e.foldable =>
e // No need to create an attribute reference if it will be evaluated as a Literal.
case e: Expression =>
// For other expressions, we extract it and replace it with an AttributeReference (with
// an internal column name, e.g. "_w0").
val withName = Alias(e, s"_w${extractedExprBuffer.length}")()
extractedExprBuffer += withName
withName.toAttribute
}
// Now, we extract regular expressions from expressionsWithWindowFunctions
// by using extractExpr.
val seenWindowAggregates = new ArrayBuffer[AggregateExpression]
val newExpressionsWithWindowFunctions = expressionsWithWindowFunctions.map {
_.transform {
// Extracts children expressions of a WindowFunction (input parameters of
// a WindowFunction).
case wf: WindowFunction =>
val newChildren = wf.children.map(extractExpr)
wf.withNewChildren(newChildren)
// Extracts expressions from the partition spec and order spec.
case wsc @ WindowSpecDefinition(partitionSpec, orderSpec, _) =>
val newPartitionSpec = partitionSpec.map(extractExpr)
val newOrderSpec = orderSpec.map { so =>
val newChild = extractExpr(so.child)
so.copy(child = newChild)
}
wsc.copy(partitionSpec = newPartitionSpec, orderSpec = newOrderSpec)
// Extract Windowed AggregateExpression
case we @ WindowExpression(
ae @ AggregateExpression(function, _, _, _),
spec: WindowSpecDefinition) =>
val newChildren = function.children.map(extractExpr)
val newFunction = function.withNewChildren(newChildren).asInstanceOf[AggregateFunction]
val newAgg = ae.copy(aggregateFunction = newFunction)
seenWindowAggregates += newAgg
WindowExpression(newAgg, spec)
case AggregateExpression(aggFunc, _, _, _) if hasWindowFunction(aggFunc.children) =>
failAnalysis("It is not allowed to use a window function inside an aggregate " +
"function. Please use the inner window function in a sub-query.")
// Extracts AggregateExpression. For example, for SUM(x) - Sum(y) OVER (...),
// we need to extract SUM(x).
case agg: AggregateExpression if !seenWindowAggregates.contains(agg) =>
val withName = Alias(agg, s"_w${extractedExprBuffer.length}")()
extractedExprBuffer += withName
withName.toAttribute
// Extracts other attributes
case attr: Attribute => extractExpr(attr)
}.asInstanceOf[NamedExpression]
}
(newExpressionsWithWindowFunctions, regularExpressions ++ extractedExprBuffer)
} // end of extract
/**
* Adds operators for Window Expressions. Every Window operator handles a single Window Spec.
*/
private def addWindow(
expressionsWithWindowFunctions: Seq[NamedExpression],
child: LogicalPlan): LogicalPlan = {
// First, we need to extract all WindowExpressions from expressionsWithWindowFunctions
// and put those extracted WindowExpressions to extractedWindowExprBuffer.
// This step is needed because it is possible that an expression contains multiple
// WindowExpressions with different Window Specs.
// After extracting WindowExpressions, we need to construct a project list to generate
// expressionsWithWindowFunctions based on extractedWindowExprBuffer.
// For example, for "sum(a) over (...) / sum(b) over (...)", we will first extract
// "sum(a) over (...)" and "sum(b) over (...)" out, and assign "_we0" as the alias to
// "sum(a) over (...)" and "_we1" as the alias to "sum(b) over (...)".
// Then, the projectList will be [_we0/_we1].
val extractedWindowExprBuffer = new ArrayBuffer[NamedExpression]()
val newExpressionsWithWindowFunctions = expressionsWithWindowFunctions.map {
// We need to use transformDown because we want to trigger
// "case alias @ Alias(window: WindowExpression, _)" first.
_.transformDown {
case alias @ Alias(window: WindowExpression, _) =>
// If a WindowExpression has an assigned alias, just use it.
extractedWindowExprBuffer += alias
alias.toAttribute
case window: WindowExpression =>
// If there is no alias assigned to the WindowExpressions. We create an
// internal column.
val withName = Alias(window, s"_we${extractedWindowExprBuffer.length}")()
extractedWindowExprBuffer += withName
withName.toAttribute
}.asInstanceOf[NamedExpression]
}
// Second, we group extractedWindowExprBuffer based on their Partition and Order Specs.
val groupedWindowExpressions = extractedWindowExprBuffer.groupBy { expr =>
val distinctWindowSpec = expr.collect {
case window: WindowExpression => window.windowSpec
}.distinct
// We do a final check and see if we only have a single Window Spec defined in an
// expressions.
if (distinctWindowSpec.isEmpty) {
failAnalysis(s"$expr does not have any WindowExpression.")
} else if (distinctWindowSpec.length > 1) {
// newExpressionsWithWindowFunctions only have expressions with a single
// WindowExpression. If we reach here, we have a bug.
failAnalysis(s"$expr has multiple Window Specifications ($distinctWindowSpec)." +
s"Please file a bug report with this error message, stack trace, and the query.")
} else {
val spec = distinctWindowSpec.head
(spec.partitionSpec, spec.orderSpec, WindowFunctionType.functionType(expr))
}
}.toSeq
// Third, we aggregate them by adding each Window operator for each Window Spec and then
// setting this to the child of the next Window operator.
val windowOps =
groupedWindowExpressions.foldLeft(child) {
case (last, ((partitionSpec, orderSpec, _), windowExpressions)) =>
Window(windowExpressions, partitionSpec, orderSpec, last)
}
// Finally, we create a Project to output windowOps's output
// newExpressionsWithWindowFunctions.
Project(windowOps.output ++ newExpressionsWithWindowFunctions, windowOps)
} // end of addWindow
// We have to use transformDown at here to make sure the rule of
// "Aggregate with Having clause" will be triggered.
def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperatorsDown {
case Filter(condition, _) if hasWindowFunction(condition) =>
failAnalysis("It is not allowed to use window functions inside WHERE and HAVING clauses")
// Aggregate with Having clause. This rule works with an unresolved Aggregate because
// a resolved Aggregate will not have Window Functions.
case f @ Filter(condition, a @ Aggregate(groupingExprs, aggregateExprs, child))
if child.resolved &&
hasWindowFunction(aggregateExprs) &&
a.expressions.forall(_.resolved) =>
val (windowExpressions, aggregateExpressions) = extract(aggregateExprs)
// Create an Aggregate operator to evaluate aggregation functions.
val withAggregate = Aggregate(groupingExprs, aggregateExpressions, child)
// Add a Filter operator for conditions in the Having clause.
val withFilter = Filter(condition, withAggregate)
val withWindow = addWindow(windowExpressions, withFilter)
// Finally, generate output columns according to the original projectList.
val finalProjectList = aggregateExprs.map(_.toAttribute)
Project(finalProjectList, withWindow)
case p: LogicalPlan if !p.childrenResolved => p
// Aggregate without Having clause.
case a @ Aggregate(groupingExprs, aggregateExprs, child)
if hasWindowFunction(aggregateExprs) &&
a.expressions.forall(_.resolved) =>
val (windowExpressions, aggregateExpressions) = extract(aggregateExprs)
// Create an Aggregate operator to evaluate aggregation functions.
val withAggregate = Aggregate(groupingExprs, aggregateExpressions, child)
// Add Window operators.
val withWindow = addWindow(windowExpressions, withAggregate)
// Finally, generate output columns according to the original projectList.
val finalProjectList = aggregateExprs.map(_.toAttribute)
Project(finalProjectList, withWindow)
// We only extract Window Expressions after all expressions of the Project
// have been resolved.
case p @ Project(projectList, child)
if hasWindowFunction(projectList) && !p.expressions.exists(!_.resolved) =>
val (windowExpressions, regularExpressions) = extract(projectList)
// We add a project to get all needed expressions for window expressions from the child
// of the original Project operator.
val withProject = Project(regularExpressions, child)
// Add Window operators.
val withWindow = addWindow(windowExpressions, withProject)
// Finally, generate output columns according to the original projectList.
val finalProjectList = projectList.map(_.toAttribute)
Project(finalProjectList, withWindow)
}
}
/**
* Pulls out nondeterministic expressions from LogicalPlan which is not Project or Filter,
* put them into an inner Project and finally project them away at the outer Project.
*/
object PullOutNondeterministic extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case p if !p.resolved => p // Skip unresolved nodes.
case p: Project => p
case f: Filter => f
case a: Aggregate if a.groupingExpressions.exists(!_.deterministic) =>
val nondeterToAttr = getNondeterToAttr(a.groupingExpressions)
val newChild = Project(a.child.output ++ nondeterToAttr.values, a.child)
a.transformExpressions { case e =>
nondeterToAttr.get(e).map(_.toAttribute).getOrElse(e)
}.copy(child = newChild)
// todo: It's hard to write a general rule to pull out nondeterministic expressions
// from LogicalPlan, currently we only do it for UnaryNode which has same output
// schema with its child.
case p: UnaryNode if p.output == p.child.output && p.expressions.exists(!_.deterministic) =>
val nondeterToAttr = getNondeterToAttr(p.expressions)
val newPlan = p.transformExpressions { case e =>
nondeterToAttr.get(e).map(_.toAttribute).getOrElse(e)
}
val newChild = Project(p.child.output ++ nondeterToAttr.values, p.child)
Project(p.output, newPlan.withNewChildren(newChild :: Nil))
}
private def getNondeterToAttr(exprs: Seq[Expression]): Map[Expression, NamedExpression] = {
exprs.filterNot(_.deterministic).flatMap { expr =>
val leafNondeterministic = expr.collect { case n: Nondeterministic => n }
leafNondeterministic.distinct.map { e =>
val ne = e match {
case n: NamedExpression => n
case _ => Alias(e, "_nondeterministic")()
}
e -> ne
}
}.toMap
}
}
/**
* Set the seed for random number generation.
*/
object ResolveRandomSeed extends Rule[LogicalPlan] {
private lazy val random = new Random()
override def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case p if p.resolved => p
case p => p transformExpressionsUp {
case Uuid(None) => Uuid(Some(random.nextLong()))
case Shuffle(child, None) => Shuffle(child, Some(random.nextLong()))
}
}
}
/**
* Correctly handle null primitive inputs for UDF by adding extra [[If]] expression to do the
* null check. When user defines a UDF with primitive parameters, there is no way to tell if the
* primitive parameter is null or not, so here we assume the primitive input is null-propagatable
* and we should return null if the input is null.
*/
object HandleNullInputsForUDF extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case p if !p.resolved => p // Skip unresolved nodes.
case p => p transformExpressionsUp {
case udf @ ScalaUDF(_, _, inputs, inputPrimitives, _, _, _, _)
if inputPrimitives.contains(true) =>
// Otherwise, add special handling of null for fields that can't accept null.
// The result of operations like this, when passed null, is generally to return null.
assert(inputPrimitives.length == inputs.length)
val inputPrimitivesPair = inputPrimitives.zip(inputs)
val inputNullCheck = inputPrimitivesPair.collect {
case (isPrimitive, input) if isPrimitive && input.nullable =>
IsNull(input)
}.reduceLeftOption[Expression](Or)
if (inputNullCheck.isDefined) {
// Once we add an `If` check above the udf, it is safe to mark those checked inputs
// as null-safe (i.e., wrap with `KnownNotNull`), because the null-returning
// branch of `If` will be called if any of these checked inputs is null. Thus we can
// prevent this rule from being applied repeatedly.
val newInputs = inputPrimitivesPair.map {
case (isPrimitive, input) =>
if (isPrimitive && input.nullable) {
KnownNotNull(input)
} else {
input
}
}
val newUDF = udf.copy(children = newInputs)
If(inputNullCheck.get, Literal.create(null, udf.dataType), newUDF)
} else {
udf
}
}
}
}
/**
* Check and add proper window frames for all window functions.
*/
object ResolveWindowFrame extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan resolveExpressions {
case WindowExpression(wf: WindowFunction, WindowSpecDefinition(_, _, f: SpecifiedWindowFrame))
if wf.frame != UnspecifiedFrame && wf.frame != f =>
failAnalysis(s"Window Frame $f must match the required frame ${wf.frame}")
case WindowExpression(wf: WindowFunction, s @ WindowSpecDefinition(_, _, UnspecifiedFrame))
if wf.frame != UnspecifiedFrame =>
WindowExpression(wf, s.copy(frameSpecification = wf.frame))
case we @ WindowExpression(e, s @ WindowSpecDefinition(_, o, UnspecifiedFrame))
if e.resolved =>
val frame = if (o.nonEmpty) {
SpecifiedWindowFrame(RangeFrame, UnboundedPreceding, CurrentRow)
} else {
SpecifiedWindowFrame(RowFrame, UnboundedPreceding, UnboundedFollowing)
}
we.copy(windowSpec = s.copy(frameSpecification = frame))
}
}
/**
* Check and add order to [[AggregateWindowFunction]]s.
*/
object ResolveWindowOrder extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan resolveExpressions {
case WindowExpression(wf: WindowFunction, spec) if spec.orderSpec.isEmpty =>
failAnalysis(s"Window function $wf requires window to be ordered, please add ORDER BY " +
s"clause. For example SELECT $wf(value_expr) OVER (PARTITION BY window_partition " +
s"ORDER BY window_ordering) from table")
case WindowExpression(rank: RankLike, spec) if spec.resolved =>
val order = spec.orderSpec.map(_.child)
WindowExpression(rank.withOrder(order), spec)
}
}
/**
* Removes natural or using joins by calculating output columns based on output from two sides,
* Then apply a Project on a normal Join to eliminate natural or using join.
*/
object ResolveNaturalAndUsingJoin extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case j @ Join(left, right, UsingJoin(joinType, usingCols), _, hint)
if left.resolved && right.resolved && j.duplicateResolved =>
commonNaturalJoinProcessing(left, right, joinType, usingCols, None, hint)
case j @ Join(left, right, NaturalJoin(joinType), condition, hint)
if j.resolvedExceptNatural =>
// find common column names from both sides
val joinNames = left.output.map(_.name).intersect(right.output.map(_.name))
commonNaturalJoinProcessing(left, right, joinType, joinNames, condition, hint)
}
}
/**
* Resolves columns of an output table from the data in a logical plan. This rule will:
*
* - Reorder columns when the write is by name
* - Insert casts when data types do not match
* - Insert aliases when column names do not match
* - Detect plans that are not compatible with the output table and throw AnalysisException
*/
object ResolveOutputRelation extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperators {
case append @ AppendData(table, query, isByName)
if table.resolved && query.resolved && !append.outputResolved =>
val projection =
TableOutputResolver.resolveOutputColumns(
table.name, table.output, query, isByName, conf, storeAssignmentPolicy)
if (projection != query) {
append.copy(query = projection)
} else {
append
}
case overwrite @ OverwriteByExpression(table, _, query, isByName)
if table.resolved && query.resolved && !overwrite.outputResolved =>
val projection =
TableOutputResolver.resolveOutputColumns(
table.name, table.output, query, isByName, conf, storeAssignmentPolicy)
if (projection != query) {
overwrite.copy(query = projection)
} else {
overwrite
}
case overwrite @ OverwritePartitionsDynamic(table, query, isByName)
if table.resolved && query.resolved && !overwrite.outputResolved =>
val projection =
TableOutputResolver.resolveOutputColumns(
table.name, table.output, query, isByName, conf, storeAssignmentPolicy)
if (projection != query) {
overwrite.copy(query = projection)
} else {
overwrite
}
}
}
private def storeAssignmentPolicy: StoreAssignmentPolicy.Value = {
val policy = conf.storeAssignmentPolicy.getOrElse(StoreAssignmentPolicy.STRICT)
// SPARK-28730: LEGACY store assignment policy is disallowed in data source v2.
if (policy == StoreAssignmentPolicy.LEGACY) {
val configKey = SQLConf.STORE_ASSIGNMENT_POLICY.key
throw new AnalysisException(s"""
|"LEGACY" store assignment policy is disallowed in Spark data source V2.
|Please set the configuration $configKey to other values.""".stripMargin)
}
policy
}
private def commonNaturalJoinProcessing(
left: LogicalPlan,
right: LogicalPlan,
joinType: JoinType,
joinNames: Seq[String],
condition: Option[Expression],
hint: JoinHint) = {
val leftKeys = joinNames.map { keyName =>
left.output.find(attr => resolver(attr.name, keyName)).getOrElse {
throw new AnalysisException(s"USING column `$keyName` cannot be resolved on the left " +
s"side of the join. The left-side columns: [${left.output.map(_.name).mkString(", ")}]")
}
}
val rightKeys = joinNames.map { keyName =>
right.output.find(attr => resolver(attr.name, keyName)).getOrElse {
throw new AnalysisException(s"USING column `$keyName` cannot be resolved on the right " +
s"side of the join. The right-side columns: [${right.output.map(_.name).mkString(", ")}]")
}
}
val joinPairs = leftKeys.zip(rightKeys)
val newCondition = (condition ++ joinPairs.map(EqualTo.tupled)).reduceOption(And)
// columns not in joinPairs
val lUniqueOutput = left.output.filterNot(att => leftKeys.contains(att))
val rUniqueOutput = right.output.filterNot(att => rightKeys.contains(att))
// the output list looks like: join keys, columns from left, columns from right
val projectList = joinType match {
case LeftOuter =>
leftKeys ++ lUniqueOutput ++ rUniqueOutput.map(_.withNullability(true))
case LeftExistence(_) =>
leftKeys ++ lUniqueOutput
case RightOuter =>
rightKeys ++ lUniqueOutput.map(_.withNullability(true)) ++ rUniqueOutput
case FullOuter =>
// in full outer join, joinCols should be non-null if there is.
val joinedCols = joinPairs.map { case (l, r) => Alias(Coalesce(Seq(l, r)), l.name)() }
joinedCols ++
lUniqueOutput.map(_.withNullability(true)) ++
rUniqueOutput.map(_.withNullability(true))
case _ : InnerLike =>
leftKeys ++ lUniqueOutput ++ rUniqueOutput
case _ =>
sys.error("Unsupported natural join type " + joinType)
}
// use Project to trim unnecessary fields
Project(projectList, Join(left, right, joinType, newCondition, hint))
}
/**
* Replaces [[UnresolvedDeserializer]] with the deserialization expression that has been resolved
* to the given input attributes.
*/
object ResolveDeserializer extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case p if !p.childrenResolved => p
case p if p.resolved => p
case p => p transformExpressions {
case UnresolvedDeserializer(deserializer, inputAttributes) =>
val inputs = if (inputAttributes.isEmpty) {
p.children.flatMap(_.output)
} else {
inputAttributes
}
validateTopLevelTupleFields(deserializer, inputs)
val resolved = resolveExpressionBottomUp(
deserializer, LocalRelation(inputs), throws = true)
val result = resolved transformDown {
case UnresolvedMapObjects(func, inputData, cls) if inputData.resolved =>
inputData.dataType match {
case ArrayType(et, cn) =>
MapObjects(func, inputData, et, cn, cls) transformUp {
case UnresolvedExtractValue(child, fieldName) if child.resolved =>
ExtractValue(child, fieldName, resolver)
}
case other =>
throw new AnalysisException("need an array field but got " + other.catalogString)
}
case u: UnresolvedCatalystToExternalMap if u.child.resolved =>
u.child.dataType match {
case _: MapType =>
CatalystToExternalMap(u) transformUp {
case UnresolvedExtractValue(child, fieldName) if child.resolved =>
ExtractValue(child, fieldName, resolver)
}
case other =>
throw new AnalysisException("need a map field but got " + other.catalogString)
}
}
validateNestedTupleFields(result)
result
}
}
private def fail(schema: StructType, maxOrdinal: Int): Unit = {
throw new AnalysisException(s"Try to map ${schema.catalogString} to Tuple${maxOrdinal + 1}" +
", but failed as the number of fields does not line up.")
}
/**
* For each top-level Tuple field, we use [[GetColumnByOrdinal]] to get its corresponding column
* by position. However, the actual number of columns may be different from the number of Tuple
* fields. This method is used to check the number of columns and fields, and throw an
* exception if they do not match.
*/
private def validateTopLevelTupleFields(
deserializer: Expression, inputs: Seq[Attribute]): Unit = {
val ordinals = deserializer.collect {
case GetColumnByOrdinal(ordinal, _) => ordinal
}.distinct.sorted
if (ordinals.nonEmpty && ordinals != inputs.indices) {
fail(inputs.toStructType, ordinals.last)
}
}
/**
* For each nested Tuple field, we use [[GetStructField]] to get its corresponding struct field
* by position. However, the actual number of struct fields may be different from the number
* of nested Tuple fields. This method is used to check the number of struct fields and nested
* Tuple fields, and throw an exception if they do not match.
*/
private def validateNestedTupleFields(deserializer: Expression): Unit = {
val structChildToOrdinals = deserializer
// There are 2 kinds of `GetStructField`:
// 1. resolved from `UnresolvedExtractValue`, and it will have a `name` property.
// 2. created when we build deserializer expression for nested tuple, no `name` property.
// Here we want to validate the ordinals of nested tuple, so we should only catch
// `GetStructField` without the name property.
.collect { case g: GetStructField if g.name.isEmpty => g }
.groupBy(_.child)
.mapValues(_.map(_.ordinal).distinct.sorted)
structChildToOrdinals.foreach { case (expr, ordinals) =>
val schema = expr.dataType.asInstanceOf[StructType]
if (ordinals != schema.indices) {
fail(schema, ordinals.last)
}
}
}
}
/**
* Resolves [[NewInstance]] by finding and adding the outer scope to it if the object being
* constructed is an inner class.
*/
object ResolveNewInstance extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case p if !p.childrenResolved => p
case p if p.resolved => p
case p => p transformExpressions {
case n: NewInstance if n.childrenResolved && !n.resolved =>
val outer = OuterScopes.getOuterScope(n.cls)
if (outer == null) {
throw new AnalysisException(
s"Unable to generate an encoder for inner class `${n.cls.getName}` without " +
"access to the scope that this class was defined in.\\n" +
"Try moving this class out of its parent class.")
}
n.copy(outerPointer = Some(outer))
}
}
}
/**
* Replace the [[UpCast]] expression by [[Cast]], and throw exceptions if the cast may truncate.
*/
object ResolveUpCast extends Rule[LogicalPlan] {
private def fail(from: Expression, to: DataType, walkedTypePath: Seq[String]) = {
val fromStr = from match {
case l: LambdaVariable => "array element"
case e => e.sql
}
throw new AnalysisException(s"Cannot up cast $fromStr from " +
s"${from.dataType.catalogString} to ${to.catalogString}.\\n" +
"The type path of the target object is:\\n" + walkedTypePath.mkString("", "\\n", "\\n") +
"You can either add an explicit cast to the input data or choose a higher precision " +
"type of the field in the target object")
}
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case p if !p.childrenResolved => p
case p if p.resolved => p
case p => p transformExpressions {
case u @ UpCast(child, _, _) if !child.resolved => u
case UpCast(child, dt: AtomicType, _)
if SQLConf.get.getConf(SQLConf.LEGACY_LOOSE_UPCAST) &&
child.dataType == StringType =>
Cast(child, dt.asNullable)
case UpCast(child, dataType, walkedTypePath) if !Cast.canUpCast(child.dataType, dataType) =>
fail(child, dataType, walkedTypePath)
case UpCast(child, dataType, _) => Cast(child, dataType.asNullable)
}
}
}
/**
* Performs the lookup of DataSourceV2 Tables. The order of resolution is:
* 1. Check if this relation is a temporary table
* 2. Check if it has a catalog identifier. Here we try to load the table. If we find the table,
* we can return the table. The result returned by an explicit catalog will be returned on
* the Left projection of the Either.
* 3. Try resolving the relation using the V2SessionCatalog if that is defined. If the
* V2SessionCatalog returns a V1 table definition (UnresolvedTable), then we return a `None`
* on the right side so that we can fallback to the V1 code paths.
* The basic idea is, if a value is returned on the Left, it means a v2 catalog is defined and
* must be used to resolve the table. If a value is returned on the right, then we can try
* creating a V2 relation if a V2 Table is defined. If it isn't defined, then we should defer
* to V1 code paths.
*/
private def lookupV2Relation(
identifier: Seq[String]
): Either[(CatalogPlugin, Identifier, Option[Table]), Option[Table]] = {
import org.apache.spark.sql.catalog.v2.utils.CatalogV2Util._
identifier match {
case AsTemporaryViewIdentifier(ti) if catalog.isTemporaryTable(ti) =>
scala.Right(None)
case CatalogObjectIdentifier(Some(v2Catalog), ident) =>
scala.Left((v2Catalog, ident, loadTable(v2Catalog, ident)))
case CatalogObjectIdentifier(None, ident) =>
loadTable(catalogManager.v2SessionCatalog, ident) match {
case Some(_: V1Table) => scala.Right(None)
case other => scala.Right(other)
}
case _ => scala.Right(None)
}
}
}
/**
* Removes [[SubqueryAlias]] operators from the plan. Subqueries are only required to provide
* scoping information for attributes and can be removed once analysis is complete.
*/
object EliminateSubqueryAliases extends Rule[LogicalPlan] {
// This is also called in the beginning of the optimization phase, and as a result
// is using transformUp rather than resolveOperators.
def apply(plan: LogicalPlan): LogicalPlan = AnalysisHelper.allowInvokingTransformsInAnalyzer {
plan transformUp {
case SubqueryAlias(_, child) => child
}
}
}
/**
* Removes [[Union]] operators from the plan if it just has one child.
*/
object EliminateUnions extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case Union(children) if children.size == 1 => children.head
}
}
/**
* Cleans up unnecessary Aliases inside the plan. Basically we only need Alias as a top level
* expression in Project(project list) or Aggregate(aggregate expressions) or
* Window(window expressions). Notice that if an expression has other expression parameters which
* are not in its `children`, e.g. `RuntimeReplaceable`, the transformation for Aliases in this
* rule can't work for those parameters.
*/
object CleanupAliases extends Rule[LogicalPlan] {
def trimAliases(e: Expression): Expression = {
e.transformDown {
case Alias(child, _) => child
case MultiAlias(child, _) => child
}
}
def trimNonTopLevelAliases(e: Expression): Expression = e match {
case a: Alias =>
a.copy(child = trimAliases(a.child))(
exprId = a.exprId,
qualifier = a.qualifier,
explicitMetadata = Some(a.metadata))
case a: MultiAlias =>
a.copy(child = trimAliases(a.child))
case other => trimAliases(other)
}
override def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case Project(projectList, child) =>
val cleanedProjectList =
projectList.map(trimNonTopLevelAliases(_).asInstanceOf[NamedExpression])
Project(cleanedProjectList, child)
case Aggregate(grouping, aggs, child) =>
val cleanedAggs = aggs.map(trimNonTopLevelAliases(_).asInstanceOf[NamedExpression])
Aggregate(grouping.map(trimAliases), cleanedAggs, child)
case Window(windowExprs, partitionSpec, orderSpec, child) =>
val cleanedWindowExprs =
windowExprs.map(e => trimNonTopLevelAliases(e).asInstanceOf[NamedExpression])
Window(cleanedWindowExprs, partitionSpec.map(trimAliases),
orderSpec.map(trimAliases(_).asInstanceOf[SortOrder]), child)
// Operators that operate on objects should only have expressions from encoders, which should
// never have extra aliases.
case o: ObjectConsumer => o
case o: ObjectProducer => o
case a: AppendColumns => a
case other =>
other transformExpressionsDown {
case Alias(child, _) => child
}
}
}
/**
* Ignore event time watermark in batch query, which is only supported in Structured Streaming.
* TODO: add this rule into analyzer rule list.
*/
object EliminateEventTimeWatermark extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case EventTimeWatermark(_, _, child) if !child.isStreaming => child
}
}
/**
* Maps a time column to multiple time windows using the Expand operator. Since it's non-trivial to
* figure out how many windows a time column can map to, we over-estimate the number of windows and
* filter out the rows where the time column is not inside the time window.
*/
object TimeWindowing extends Rule[LogicalPlan] {
import org.apache.spark.sql.catalyst.dsl.expressions._
private final val WINDOW_COL_NAME = "window"
private final val WINDOW_START = "start"
private final val WINDOW_END = "end"
/**
* Generates the logical plan for generating window ranges on a timestamp column. Without
* knowing what the timestamp value is, it's non-trivial to figure out deterministically how many
* window ranges a timestamp will map to given all possible combinations of a window duration,
* slide duration and start time (offset). Therefore, we express and over-estimate the number of
* windows there may be, and filter the valid windows. We use last Project operator to group
* the window columns into a struct so they can be accessed as `window.start` and `window.end`.
*
* The windows are calculated as below:
* maxNumOverlapping <- ceil(windowDuration / slideDuration)
* for (i <- 0 until maxNumOverlapping)
* windowId <- ceil((timestamp - startTime) / slideDuration)
* windowStart <- windowId * slideDuration + (i - maxNumOverlapping) * slideDuration + startTime
* windowEnd <- windowStart + windowDuration
* return windowStart, windowEnd
*
* This behaves as follows for the given parameters for the time: 12:05. The valid windows are
* marked with a +, and invalid ones are marked with a x. The invalid ones are filtered using the
* Filter operator.
* window: 12m, slide: 5m, start: 0m :: window: 12m, slide: 5m, start: 2m
* 11:55 - 12:07 + 11:52 - 12:04 x
* 12:00 - 12:12 + 11:57 - 12:09 +
* 12:05 - 12:17 + 12:02 - 12:14 +
*
* @param plan The logical plan
* @return the logical plan that will generate the time windows using the Expand operator, with
* the Filter operator for correctness and Project for usability.
*/
def apply(plan: LogicalPlan): LogicalPlan = plan.resolveOperatorsUp {
case p: LogicalPlan if p.children.size == 1 =>
val child = p.children.head
val windowExpressions =
p.expressions.flatMap(_.collect { case t: TimeWindow => t }).toSet
val numWindowExpr = windowExpressions.size
// Only support a single window expression for now
if (numWindowExpr == 1 &&
windowExpressions.head.timeColumn.resolved &&
windowExpressions.head.checkInputDataTypes().isSuccess) {
val window = windowExpressions.head
val metadata = window.timeColumn match {
case a: Attribute => a.metadata
case _ => Metadata.empty
}
def getWindow(i: Int, overlappingWindows: Int): Expression = {
val division = (PreciseTimestampConversion(
window.timeColumn, TimestampType, LongType) - window.startTime) / window.slideDuration
val ceil = Ceil(division)
// if the division is equal to the ceiling, our record is the start of a window
val windowId = CaseWhen(Seq((ceil === division, ceil + 1)), Some(ceil))
val windowStart = (windowId + i - overlappingWindows) *
window.slideDuration + window.startTime
val windowEnd = windowStart + window.windowDuration
CreateNamedStruct(
Literal(WINDOW_START) ::
PreciseTimestampConversion(windowStart, LongType, TimestampType) ::
Literal(WINDOW_END) ::
PreciseTimestampConversion(windowEnd, LongType, TimestampType) ::
Nil)
}
val windowAttr = AttributeReference(
WINDOW_COL_NAME, window.dataType, metadata = metadata)()
if (window.windowDuration == window.slideDuration) {
val windowStruct = Alias(getWindow(0, 1), WINDOW_COL_NAME)(
exprId = windowAttr.exprId, explicitMetadata = Some(metadata))
val replacedPlan = p transformExpressions {
case t: TimeWindow => windowAttr
}
// For backwards compatibility we add a filter to filter out nulls
val filterExpr = IsNotNull(window.timeColumn)
replacedPlan.withNewChildren(
Filter(filterExpr,
Project(windowStruct +: child.output, child)) :: Nil)
} else {
val overlappingWindows =
math.ceil(window.windowDuration * 1.0 / window.slideDuration).toInt
val windows =
Seq.tabulate(overlappingWindows)(i => getWindow(i, overlappingWindows))
val projections = windows.map(_ +: child.output)
val filterExpr =
window.timeColumn >= windowAttr.getField(WINDOW_START) &&
window.timeColumn < windowAttr.getField(WINDOW_END)
val substitutedPlan = Filter(filterExpr,
Expand(projections, windowAttr +: child.output, child))
val renamedPlan = p transformExpressions {
case t: TimeWindow => windowAttr
}
renamedPlan.withNewChildren(substitutedPlan :: Nil)
}
} else if (numWindowExpr > 1) {
p.failAnalysis("Multiple time window expressions would result in a cartesian product " +
"of rows, therefore they are currently not supported.")
} else {
p // Return unchanged. Analyzer will throw exception later
}
}
}
/**
* Resolve a [[CreateNamedStruct]] if it contains [[NamePlaceholder]]s.
*/
object ResolveCreateNamedStruct extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = plan.resolveExpressions {
case e: CreateNamedStruct if !e.resolved =>
val children = e.children.grouped(2).flatMap {
case Seq(NamePlaceholder, e: NamedExpression) if e.resolved =>
Seq(Literal(e.name), e)
case kv =>
kv
}
CreateNamedStruct(children.toList)
}
}
/**
* The aggregate expressions from subquery referencing outer query block are pushed
* down to the outer query block for evaluation. This rule below updates such outer references
* as AttributeReference referring attributes from the parent/outer query block.
*
* For example (SQL):
* {{{
* SELECT l.a FROM l GROUP BY 1 HAVING EXISTS (SELECT 1 FROM r WHERE r.d < min(l.b))
* }}}
* Plan before the rule.
* Project [a#226]
* +- Filter exists#245 [min(b#227)#249]
* : +- Project [1 AS 1#247]
* : +- Filter (d#238 < min(outer(b#227))) <-----
* : +- SubqueryAlias r
* : +- Project [_1#234 AS c#237, _2#235 AS d#238]
* : +- LocalRelation [_1#234, _2#235]
* +- Aggregate [a#226], [a#226, min(b#227) AS min(b#227)#249]
* +- SubqueryAlias l
* +- Project [_1#223 AS a#226, _2#224 AS b#227]
* +- LocalRelation [_1#223, _2#224]
* Plan after the rule.
* Project [a#226]
* +- Filter exists#245 [min(b#227)#249]
* : +- Project [1 AS 1#247]
* : +- Filter (d#238 < outer(min(b#227)#249)) <-----
* : +- SubqueryAlias r
* : +- Project [_1#234 AS c#237, _2#235 AS d#238]
* : +- LocalRelation [_1#234, _2#235]
* +- Aggregate [a#226], [a#226, min(b#227) AS min(b#227)#249]
* +- SubqueryAlias l
* +- Project [_1#223 AS a#226, _2#224 AS b#227]
* +- LocalRelation [_1#223, _2#224]
*/
object UpdateOuterReferences extends Rule[LogicalPlan] {
private def stripAlias(expr: Expression): Expression = expr match { case a: Alias => a.child }
private def updateOuterReferenceInSubquery(
plan: LogicalPlan,
refExprs: Seq[Expression]): LogicalPlan = {
plan resolveExpressions { case e =>
val outerAlias =
refExprs.find(stripAlias(_).semanticEquals(stripOuterReference(e)))
outerAlias match {
case Some(a: Alias) => OuterReference(a.toAttribute)
case _ => e
}
}
}
def apply(plan: LogicalPlan): LogicalPlan = {
plan resolveOperators {
case f @ Filter(_, a: Aggregate) if f.resolved =>
f transformExpressions {
case s: SubqueryExpression if s.children.nonEmpty =>
// Collect the aliases from output of aggregate.
val outerAliases = a.aggregateExpressions collect { case a: Alias => a }
// Update the subquery plan to record the OuterReference to point to outer query plan.
s.withNewPlan(updateOuterReferenceInSubquery(s.plan, outerAliases))
}
}
}
}
| pgandhi999/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala | Scala | apache-2.0 | 142,171 |
package com.ponkotuy.value
/**
*
* @author ponkotuy
* Date: 15/07/24.
*/
object ShipIds {
val Akebono = 15
val Mochizuki = 31
val Hibiki = 35
val Yudachi = 45
val Tenryu = 51
val Sendai = 54
val Naka = 56
val Chokai = 69
val Tone = 71
val Kirishima = 85
val Sazanami = 94
val Mikuma = 120
val Kumano = 125
val I8 = 128
val Yamato = 131
val Makigumo = 134
val Noshiro = 138
val Yahagi = 139
val Sakawa = 140
val Musashi = 143
val Vernyj = 147 // べーるぬい
val Taiho = 153
val Katori = 154
val I401 = 155
val AkitsuMaru = 161
val Tanikaze = 169
val Bismarck = 171
val Ooyodo = 183
val Taigei = 184
val Ryuho = 185
val Hatsukaze = 190
val Unryu = 404
val Harusame = 405
val Hayashimo = 409
val Kiyoshimo = 410
val Asagumo = 413
val Nowaki = 415
val Asashimo = 425
val U511 = 431
val Ro500 = 436
val Littorio = 441
val Roma = 442
val Italia = 446
def isEnemy(id: Int): Boolean = 500 < id && id <= 900
}
| nekoworkshop/MyFleetGirls | library/src/main/scala/com/ponkotuy/value/ShipIds.scala | Scala | mit | 999 |
/*
* Copyright 2015 the original author or authors.
* @https://github.com/scouter-project/scouter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package scouter.server.db.io;
import java.io.{File, RandomAccessFile}
import scouter.io.{DataInputX, DataOutputX}
import scouter.util.IClose;
class ITEM {
var deleted = false;
var prevPos = 0L;
var timeKey: Array[Byte] = null;
var dataPos: Array[Byte] = null;
var offset = 0L
}
class RealKeyFile(_path: String) extends IClose {
val path = _path;
val file = new File(path + ".kfile");
protected var raf = new RandomAccessFile(file, "rw");
if (this.raf.length() == 0) {
this.raf.write(Array[Byte](0xCA.toByte, 0xFE.toByte));
}
def getRecord(pos: Long): ITEM = {
this.synchronized {
this.raf.seek(pos);
val in = new DataInputX(this.raf);
val r = new ITEM();
r.deleted = in.readBoolean();
r.prevPos = in.readLong5();
r.timeKey = in.readShortBytes();
r.dataPos = in.readBlob();
r.offset = this.raf.getFilePointer();
return r;
}
}
def isDeleted(pos: Long): Boolean = {
this.synchronized {
this.raf.seek(pos);
return new DataInputX(this.raf).readBoolean();
}
}
def getPrevPos(pos: Long): Long = {
this.synchronized {
this.raf.seek(pos + 1);
return new DataInputX(this.raf).readLong5();
}
}
def getTimeKey(pos: Long): Array[Byte] = {
this.synchronized {
this.raf.seek(pos + 1 + 5);
return new DataInputX(this.raf).readShortBytes();
}
}
def getDataPos(pos: Long): Array[Byte] = {
this.synchronized {
this.raf.seek(pos + 1 + 5);
val in = new DataInputX(raf);
val keyLen = this.raf.readShort();
in.skipBytes(keyLen);
return in.readBlob()
}
}
def setDelete(pos: Long, bool: Boolean) {
this.synchronized {
this.raf.seek(pos);
new DataOutputX(this.raf).writeBoolean(bool);
}
}
def setHashLink(pos: Long, value: Long) {
this.synchronized {
this.raf.seek(pos + 1);
new DataOutputX(this.raf).writeLong5(value);
}
}
def write(pos: Long, prevPos: Long, indexKey: Array[Byte], dataPos: Array[Byte]) {
this.synchronized {
this.raf.seek(pos);
val out = new DataOutputX();
out.writeBoolean(false);
out.writeLong5(prevPos);
out.writeShortBytes(indexKey);
out.writeBlob(dataPos);
this.raf.write(out.toByteArray())
}
}
def update(pos: Long, key: Array[Byte], value: Array[Byte]): Boolean = {
this.synchronized {
this.raf.seek(pos + 1 + 5);
val in = new DataInputX(raf);
val keylen = this.raf.readShort();
in.skipBytes(keylen);
val org = in.readBlob();
if (org.length < value.length)
return false;
this.raf.seek(pos + 1 + 5 + 2 + keylen);
val out = new DataOutputX(this.raf);
out.writeBlob(value);
return true;
}
}
def append(prevPos: Long, indexKey: Array[Byte], datePos: Array[Byte]): Long = {
this.synchronized {
val pos = this.raf.length();
write(pos, prevPos, indexKey, datePos);
return pos;
}
}
def close() {
if (this.raf == null)
return ;
try {
this.raf.close();
} catch {
case t: Throwable => t.printStackTrace();
}
this.raf = null;
}
def terminate() {
close();
try {
file.delete();
} catch {
case t: Throwable => t.printStackTrace();
}
}
def getFirstPos(): Long = {
return 2;
}
def getLength(): Long = {
return if (raf == null) 0 else raf.length()
}
} | scouter-project/scouter | scouter.server/src/main/scala/scouter/server/db/io/RealKeyFile.scala | Scala | apache-2.0 | 4,678 |
package net.machinemuse.numina.death
import cpw.mods.fml.common.eventhandler.SubscribeEvent
import net.minecraftforge.event.entity.living.LivingDeathEvent
import net.minecraft.entity.player.EntityPlayer
import net.machinemuse.numina.scala.OptionCast
import net.machinemuse.numina.basemod.Numina
import net.machinemuse.numina.general.MuseLogger
import net.minecraftforge.client.event.GuiOpenEvent
import net.minecraft.client.gui.GuiGameOver
import net.minecraft.client.Minecraft
/**
* Author: MachineMuse (Claire Semple)
* Created: 6:31 PM, 10/15/13
*/
object DeathEventHandler {
@SubscribeEvent def onLivingDeath(e: LivingDeathEvent) {
OptionCast[EntityPlayer](e.entityLiving) map {
player =>
e.setCanceled(true)
player.openGui(Numina, 0, player.worldObj, player.posX.toInt, player.posY.toInt, player.posZ.toInt)
MuseLogger.logDebug("Death")
// player.setHealth(10f)
}
}
@SubscribeEvent def onOpenGui(e: GuiOpenEvent) {
if (e.gui.isInstanceOf[GuiGameOver]) {
e.setCanceled(true)
val player = Minecraft.getMinecraft.thePlayer
player.openGui(Numina, 0, player.worldObj, player.posX.toInt, player.posY.toInt, player.posZ.toInt)
}
}
}
| MachineMuse/Numina | src/main/scala/net/machinemuse/numina/death/DeathEventHandler.scala | Scala | bsd-2-clause | 1,216 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import scala.concurrent.duration._
import org.apache.spark.SparkConf
import org.apache.spark.internal.config
import org.apache.spark.internal.config.Network._
import org.apache.spark.rpc.{RpcAddress, RpcEndpointRef, RpcEnv, RpcTimeout}
private[spark] object RpcUtils {
/**
* Retrieve a `RpcEndpointRef` which is located in the driver via its name.
*/
def makeDriverRef(name: String, conf: SparkConf, rpcEnv: RpcEnv): RpcEndpointRef = {
val driverHost: String = conf.get(config.DRIVER_HOST_ADDRESS.key, "localhost")
val driverPort: Int = conf.getInt(config.DRIVER_PORT.key, 7077)
Utils.checkHost(driverHost)
rpcEnv.setupEndpointRef(RpcAddress(driverHost, driverPort), name)
}
/** Returns the configured number of times to retry connecting */
def numRetries(conf: SparkConf): Int = {
conf.get(RPC_NUM_RETRIES)
}
/** Returns the configured number of milliseconds to wait on each retry */
def retryWaitMs(conf: SparkConf): Long = {
conf.get(RPC_RETRY_WAIT)
}
/** Returns the default Spark timeout to use for RPC ask operations. */
def askRpcTimeout(conf: SparkConf): RpcTimeout = {
RpcTimeout(conf, Seq(RPC_ASK_TIMEOUT.key, NETWORK_TIMEOUT.key), "120s")
}
/** Returns the default Spark timeout to use for RPC remote endpoint lookup. */
def lookupRpcTimeout(conf: SparkConf): RpcTimeout = {
RpcTimeout(conf, Seq(RPC_LOOKUP_TIMEOUT.key, NETWORK_TIMEOUT.key), "120s")
}
/**
* Infinite timeout is used internally, so there's no timeout configuration property that
* controls it. Therefore, we use "infinite" without any specific reason as its timeout
* configuration property. And its timeout property should never be accessed since infinite
* means we never timeout.
*/
val INFINITE_TIMEOUT = new RpcTimeout(Long.MaxValue.nanos, "infinite")
private val MAX_MESSAGE_SIZE_IN_MB = Int.MaxValue / 1024 / 1024
/** Returns the configured max message size for messages in bytes. */
def maxMessageSizeBytes(conf: SparkConf): Int = {
val maxSizeInMB = conf.get(RPC_MESSAGE_MAX_SIZE)
if (maxSizeInMB > MAX_MESSAGE_SIZE_IN_MB) {
throw new IllegalArgumentException(
s"${RPC_MESSAGE_MAX_SIZE.key} should not be greater than $MAX_MESSAGE_SIZE_IN_MB MB")
}
maxSizeInMB * 1024 * 1024
}
}
| shaneknapp/spark | core/src/main/scala/org/apache/spark/util/RpcUtils.scala | Scala | apache-2.0 | 3,133 |
import sbt._
object Dependencies {
// Versions
lazy val scalatestVersion = "3.0.0"
// Libraries
val scalatic = "org.scalactic" %% "scalactic" % scalatestVersion
val scalatest = "org.scalatest" %% "scalatest" % scalatestVersion
// Projects
val backendDeps = Seq(scalatic, scalatest % Test)
}
| luax/scala-immutability-plugin | project/Dependencies.scala | Scala | mit | 308 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar
import slamdata.Predef._
import quasar.RenderTree.ops._
import quasar.fp.ski._
import quasar.fp._
import org.specs2.matcher._
import scalaz._, Scalaz._
trait TreeMatchers {
// TODO remove in favor of `beTreeEqual`
// uses `==`
def beTree[A: RenderTree](expected: A): Matcher[A] = new Matcher[A] {
def apply[S <: A](ex: Expectable[S]) = {
val actual: A = ex.value
val diff: String = (RenderTree[A].render(actual) diff expected.render).shows
result(actual == expected, s"trees match:\\n$diff", s"trees do not match:\\n$diff", ex)
}
}
// uses `scalaz.Equal`
def beTreeEqual[A: Equal: RenderTree](expected: A): Matcher[A] = new Matcher[A] {
def apply[S <: A](s: Expectable[S]) = {
val v: A = s.value
// TODO: these are unintuitively reversed b/c of the `diff` implementation, should be fixed
val diff = (RenderTree[A].render(v) diff expected.render).shows
result(v ≟ expected, s"trees match:\\n$diff", s"trees do not match:\\n$diff", s)
}
}
}
trait ValidationMatchers {
def beEqualIfSuccess[E, A](expected: Validation[E, A]) =
new Matcher[Validation[E, A]] {
def apply[S <: Validation[E, A]](s: Expectable[S]) = {
val v = s.value
v.fold(
κ(result(
expected.fold(κ(true), κ(false)),
"both failed",
s"$v is not $expected",
s)),
a => expected.fold(
κ(result(false, "", "expected failure", s)),
ex => result(a == ex, "both are equal", s"$a is not $ex", s)))
}
}
}
trait DisjunctionMatchers {
def beRightDisjOrDiff[A, B: Equal](expected: B)(implicit rb: RenderTree[B]): Matcher[A \\/ B] = new Matcher[A \\/ B] {
def apply[S <: A \\/ B](s: Expectable[S]) = {
val v = s.value
v.fold(
a => result(false, s"$v is right", s"$v is not right", s),
b => {
val d = (b.render diff expected.render).shows
result(b ≟ expected,
s"\\n$v is right and equals:\\n$d",
s"\\n$v is right but does not equal:\\n$d",
s)
})
}
}
}
object QuasarMatchers extends ValidationMatchers with DisjunctionMatchers with TreeMatchers
| slamdata/slamengine | foundation/src/test/scala/quasar/matchers.scala | Scala | apache-2.0 | 2,823 |
package org.jetbrains.plugins.scala.codeInspection.methodSignature
import com.intellij.codeInspection._
import org.jetbrains.plugins.scala.codeInspection.methodSignature.quickfix.AddCallParentheses
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.types.result.{TypeResult, TypingContext}
import org.jetbrains.plugins.scala.lang.psi.types.{ScFunctionType, ScType}
import org.jetbrains.plugins.scala.util.IntentionAvailabilityChecker
/**
* Pavel Fatin
*
* TODO test:
* {{{
* object A {
* def foo(): Int = 1
* foo // warn
*
* def goo(x: () => Int) = 1
* goo(foo) // okay
*
* foo : () => Int // okay
*
* def bar[A]() = 0
* bar[Int] // warn
* bar[Int]: () => Any // okay
* }
* }}}
*/
class EmptyParenMethodAccessedAsParameterlessInspection extends AbstractMethodSignatureInspection(
"ScalaEmptyParenMethodAccessedAsParameterless", "Empty-paren method accessed as parameterless") {
def actionFor(holder: ProblemsHolder) = {
case e: ScReferenceExpression if e.isValid && IntentionAvailabilityChecker.checkInspection(this, e) =>
e.getParent match {
case gc: ScGenericCall =>
ScalaPsiUtil.findCall(gc) match {
case None => check(e, holder, gc.getType(TypingContext.empty))
case Some(_) =>
}
case _: ScMethodCall | _: ScInfixExpr | _: ScPrefixExpr | _: ScUnderscoreSection => // okay
case _ => check(e, holder, e.getType(TypingContext.empty))
}
}
private def check(e: ScReferenceExpression, holder: ProblemsHolder, callType: TypeResult[ScType]) {
e.resolve() match {
case (f: ScFunction) if !f.isInCompiledFile && f.isEmptyParen =>
callType.toOption match {
case Some(ScFunctionType(_, Seq())) =>
// might have been eta-expanded to () => A, so don't worn.
// this avoids false positives. To be more accurate, we would need an 'etaExpanded'
// flag in ScalaResolveResult.
case _ => holder.registerProblem(e.nameId, getDisplayName, new AddCallParentheses(e))
}
case _ =>
}
}
} | LPTK/intellij-scala | src/org/jetbrains/plugins/scala/codeInspection/methodSignature/EmptyParenMethodAccessedAsParameterlessInspection.scala | Scala | apache-2.0 | 2,280 |
package nozzle.routing
import spray.http._
import spray.routing._
import spray.routing.directives._
import spray.routing.Directives._
import spray.httpx.encoding._
// Logging
class LoggingRouterDirectives private[routing] (logger: ingredients.logging.PlainOldLogger) {
private def showErrorResponses(request: HttpRequest): Any => Unit = {
case HttpResponse(StatusCodes.OK | StatusCodes.NotModified | StatusCodes.PartialContent, _, _, _) => ()
case response @ HttpResponse(StatusCodes.NotFound, _, _, _) =>
logger.debug(s"${response.status.intValue} ${request.method} ${request.uri}")
case response @ HttpResponse(StatusCodes.Found | StatusCodes.MovedPermanently, _, _, _) =>
logger.info(s"${response.status.intValue} ${request.method} ${request.uri} -> ${response.header[HttpHeaders.Location].map(_.uri.toString).getOrElse("")}")
case response @ HttpResponse(_, _, _, _) =>
logger.error(s"${response.status.intValue} ${request.method} ${request.uri}: ${response}")
case response =>
logger.error(s"UNKNOWN ${request.method} ${request.uri}: ${response}")
}
def withErrorResponseLogger: Directive0 = {
logRequestResponse({ request: HttpRequest =>
(resp: Any) => {
showErrorResponses(request)(resp)
(None : Option[spray.routing.directives.LogEntry])
}
})
}
}
trait Logging {
def logging(logger: ingredients.logging.PlainOldLogger) =
new LoggingRouterDirectives(logger)
}
object logging extends Logging
// Gzip and cors
trait WebRouterDirectives {
def compressRequestResponse(magnet: RefFactoryMagnet): Directive0 = decompressRequest & compressResponseIfRequested(magnet)
private[this] def corsHandler(allowedOrigins: AllowedOrigins, allowedHeaders: Set[String], innerRoute: Route): Route = {
respondWithHeaders(
HttpHeaders.`Access-Control-Allow-Origin`(allowedOrigins),
HttpHeaders.`Access-Control-Allow-Credentials`(true),
HttpHeaders.`Access-Control-Allow-Headers`(allowedHeaders.toSeq),
HttpHeaders.`Access-Control-Allow-Methods`(List(HttpMethods.POST, HttpMethods.PUT, HttpMethods.GET, HttpMethods.DELETE, HttpMethods.OPTIONS))
) ((options (complete(StatusCodes.NoContent))) ~ innerRoute)
}
def cors(allowedHostnames: Set[String], allowedHeaders: Set[String]): Directive0 = mapInnerRoute { innerRoute =>
optionalHeaderValueByType[HttpHeaders.Origin]() { originOption =>
originOption.flatMap { case HttpHeaders.Origin(origins) =>
origins.find {
case HttpOrigin(_, HttpHeaders.Host(hostname, _)) => allowedHostnames.contains(hostname)
}
}.map(allowedOrigin => corsHandler(SomeOrigins(Seq(allowedOrigin)), allowedHeaders, innerRoute)).getOrElse(innerRoute)
}
}
def corsWildcard(allowedHeaders: Set[String]): Directive0 = mapInnerRoute { innerRoute =>
corsHandler(AllOrigins, allowedHeaders, innerRoute)
}
case class AllowedHeaders(headers: Set[String])
sealed abstract trait AllowOriginsFrom
object AllowOriginsFrom {
case class TheseHostnames(hostnames: Set[String]) extends AllowOriginsFrom
case object AllHostnames extends AllowOriginsFrom
}
def cors(
allowedHostnames: AllowOriginsFrom,
allowedHeaders: AllowedHeaders
): Directive0 = allowedHostnames match {
case AllowOriginsFrom.TheseHostnames(hostnames) => cors(hostnames, allowedHeaders.headers)
case AllowOriginsFrom.AllHostnames => corsWildcard(allowedHeaders.headers)
}
}
object WebRouterDirectives extends WebRouterDirectives
// Rejection handling
case class CommitRejection(innerRejection: Rejection) extends Rejection
trait RejectionHandling {
private[this] val jsendMalformedRequestParamRejectionHandlerPF: PartialFunction[List[Rejection], Route] = {
case (innerRejection@(_: MalformedQueryParamRejection |
_: MalformedRequestContentRejection |
_: ValidationRejection)) :: _ =>
reject(CommitRejection(innerRejection))
}
/*
* When `!!` ("commit") is matched, any subsequents match failure,
* generating one of the Rejections listed in `jsendMalformedRequestParamRejectionHandlerPF`,
* will make the request fail as malformed instead of falling back.
*/
val `!!`: Directive0 = handleRejections(RejectionHandler (jsendMalformedRequestParamRejectionHandlerPF))
}
object RejectionHandling extends RejectionHandling
object RouterDirectives extends
Logging
with WebRouterDirectives
with RejectionHandling
| utaal/nozzle | src/main/scala/routing/routerDirectives.scala | Scala | mit | 4,478 |
package be.angelcorp.glsl.ast
import be.angelcorp.glsl.util.GlslType
object GlslEmptyNode extends GlslEmpty with GlslStatementLike {
override lazy val toString: String = getClass.getSimpleName
override def typ: GlslType = GlslType.Unit
}
| AODtorusan/scala-glsl | macros/src/main/scala/be/angelcorp/glsl/ast/GlslEmptyNode.scala | Scala | mit | 246 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.physical.couchbase.planner
import quasar.fp.ski.κ
import quasar.physical.couchbase._
import quasar.Planner.{PlannerErrorME, InternalError}
import matryoshka._
final class UnreachablePlanner[T[_[_]], F[_]: PlannerErrorME, QS[_]] extends Planner[T, F, QS] {
def plan: AlgebraM[F, QS, T[N1QL]] =
κ(PlannerErrorME[F].raiseError(InternalError.fromMsg("unreachable")))
}
| jedesah/Quasar | couchbase/src/main/scala/quasar/physical/couchbase/planner/UnreachablePlanner.scala | Scala | apache-2.0 | 996 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model
import enumeratum.{Enum, EnumEntry}
import io.truthencode.ddo.support.naming.DisplayProperties
trait DisplayHelper {
type Entry = EnumEntry with DisplayProperties
type E = Enum[_ <: Entry]
val displayEnum: E
}
| adarro/ddo-calc | subprojects/common/ddo-core/src/specs/scala/io/truthencode/ddo/model/DisplayHelper.scala | Scala | apache-2.0 | 896 |
package frameless
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{SQLContext, SparkSession}
import org.scalactic.anyvals.PosZInt
import org.scalatest.BeforeAndAfterAll
import org.scalatestplus.scalacheck.Checkers
import org.scalacheck.Prop
import org.scalacheck.Prop._
import scala.util.{Properties, Try}
import org.scalatest.funsuite.AnyFunSuite
trait SparkTesting { self: BeforeAndAfterAll =>
val appID: String = new java.util.Date().toString + math.floor(math.random * 10E4).toLong.toString
val conf: SparkConf = new SparkConf()
.setMaster("local[*]")
.setAppName("test")
.set("spark.ui.enabled", "false")
.set("spark.app.id", appID)
private var s: SparkSession = _
implicit def session: SparkSession = s
implicit def sc: SparkContext = session.sparkContext
implicit def sqlContext: SQLContext = session.sqlContext
override def beforeAll(): Unit = {
assert(s == null)
s = SparkSession.builder().config(conf).getOrCreate()
}
override def afterAll(): Unit = {
if (s != null) {
s.stop()
s = null
}
}
}
class TypedDatasetSuite extends AnyFunSuite with Checkers with BeforeAndAfterAll with SparkTesting {
// Limit size of generated collections and number of checks to avoid OutOfMemoryError
implicit override val generatorDrivenConfig: PropertyCheckConfiguration = {
def getPosZInt(name: String, default: PosZInt) = Properties.envOrNone(s"FRAMELESS_GEN_${name}")
.flatMap(s => Try(s.toInt).toOption)
.flatMap(PosZInt.from)
.getOrElse(default)
PropertyCheckConfiguration(
sizeRange = getPosZInt("SIZE_RANGE", PosZInt(20)),
minSize = getPosZInt("MIN_SIZE", PosZInt(0))
)
}
implicit val sparkDelay: SparkDelay[Job] = Job.framelessSparkDelayForJob
def approximatelyEqual[A](a: A, b: A)(implicit numeric: Numeric[A]): Prop = {
val da = numeric.toDouble(a)
val db = numeric.toDouble(b)
val epsilon = 1E-6
// Spark has a weird behaviour concerning expressions that should return Inf
// Most of the time they return NaN instead, for instance stddev of Seq(-7.827553978923477E227, -5.009124275715786E153)
if((da.isNaN || da.isInfinity) && (db.isNaN || db.isInfinity)) proved
else if (
(da - db).abs < epsilon ||
(da - db).abs < da.abs / 100)
proved
else falsified :| s"Expected $a but got $b, which is more than 1% off and greater than epsilon = $epsilon."
}
}
| adelbertc/frameless | dataset/src/test/scala/frameless/TypedDatasetSuite.scala | Scala | apache-2.0 | 2,463 |
package org.jetbrains.plugins.scala
// tests marked with this category will be run as a separate step
trait SlowTests
trait PerfCycleTests
trait HighlightingTests
trait DebuggerTests
trait ScalacTests
| triplequote/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/testCategories.scala | Scala | apache-2.0 | 206 |
// (458,1023)
play {
RandID.ir(4)
RandSeed.ir(1, 12) // highly dependent on seed
val leastChange = LeastChange.ar(a = 63.436977, b = 5.908494)
val adParam = FreeVerb2.ar(inL = 9456.961, inR = 9456.961, mix = 81.04753, room = 97.025475, damp = leastChange)
val durScale = DetectSilence.ar(63.436977, amp = leastChange, dur = 224.32455, doneAction = doNothing)
val peakFollower = PeakFollower.ar(15.453109, decay = 17.015131)
val setResetFF = SetResetFF.ar(trig = 17.015131, reset = 5.908494)
val gendy2 = Gendy2.ar(ampDist = 1.3109143, durDist = 5.908494, adParam = adParam, ddParam = 63.436977, minFreq = -468.03256, maxFreq = 6414.315, ampScale = 17.015131, durScale = durScale, initCPs = 0.017860591, kNum = 17.015131, a = 0.017860591, c = 1.3109143)
val mix_0 = Mix(Seq[GE](gendy2, setResetFF, peakFollower))
val bad = CheckBadValues.ar(Mix.Mono(mix_0), id = 0.0, post = 0.0)
val lim = LeakDC.ar(Limiter.ar(LeakDC.ar(Gate.ar(Mix.Mono(mix_0), gate = bad sig_== 0.0))))
Out.ar(0, lim)
}
| Sciss/Grenzwerte | individual_sounds/quadtree_458_1023.scala | Scala | gpl-3.0 | 1,047 |
// scalac: -Werror -Wunused
object Test extends App {
for (x @ 1 <- List(1.0)) {
assert(x.isInstanceOf[Double])
assert(!x.isNaN)
}
}
| scala/scala | test/files/run/t11938.scala | Scala | apache-2.0 | 145 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package whisk.core.controller.test
import java.time.Instant
import scala.concurrent.duration.DurationInt
import scala.language.postfixOps
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport.sprayJsonMarshaller
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport.sprayJsonUnmarshaller
import akka.http.scaladsl.server.Route
import spray.json._
import spray.json.DefaultJsonProtocol._
import whisk.core.controller.WhiskActionsApi
import whisk.core.entity._
import whisk.core.entity.size._
import whisk.http.ErrorResponse
import whisk.http.Messages
/**
* Tests Actions API.
*
* Unit tests of the controller service as a standalone component.
* These tests exercise a fresh instance of the service object in memory -- these
* tests do NOT communication with a whisk deployment.
*
*
* @Idioglossia
* "using Specification DSL to write unit tests, as in should, must, not, be"
* "using Specs2RouteTest DSL to chain HTTP requests for unit testing, as in ~>"
*/
@RunWith(classOf[JUnitRunner])
class ActionsApiTests extends ControllerTestCommon with WhiskActionsApi {
/** Actions API tests */
behavior of "Actions API"
val creds = WhiskAuthHelpers.newIdentity()
val namespace = EntityPath(creds.subject.asString)
val collectionPath = s"/${EntityPath.DEFAULT}/${collection.path}"
def aname = MakeName.next("action_tests")
val actionLimit = Exec.sizeLimit
val parametersLimit = Parameters.sizeLimit
//// GET /actions
it should "list actions by default namespace" in {
implicit val tid = transid()
val actions = (1 to 2).map { i =>
WhiskAction(namespace, aname, jsDefault("??"), Parameters("x", "b"))
}.toList
actions foreach { put(entityStore, _) }
waitOnView(entityStore, WhiskAction, namespace, 2)
Get(s"$collectionPath") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[List[JsObject]]
actions.length should be(response.length)
actions forall { a => response contains a.summaryAsJson } should be(true)
}
}
// ?docs disabled
ignore should "list action by default namespace with full docs" in {
implicit val tid = transid()
val actions = (1 to 2).map { i =>
WhiskAction(namespace, aname, jsDefault("??"), Parameters("x", "b"))
}.toList
actions foreach { put(entityStore, _) }
waitOnView(entityStore, WhiskAction, namespace, 2)
Get(s"$collectionPath?docs=true") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[List[WhiskAction]]
actions.length should be(response.length)
actions forall { a => response contains a } should be(true)
}
}
it should "list action with explicit namespace" in {
implicit val tid = transid()
val actions = (1 to 2).map { i =>
WhiskAction(namespace, aname, jsDefault("??"), Parameters("x", "b"))
}.toList
actions foreach { put(entityStore, _) }
waitOnView(entityStore, WhiskAction, namespace, 2)
Get(s"/$namespace/${collection.path}") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[List[JsObject]]
actions.length should be(response.length)
actions forall { a => response contains a.summaryAsJson } should be(true)
}
// it should "reject list action with explicit namespace not owned by subject" in {
val auser = WhiskAuthHelpers.newIdentity()
Get(s"/$namespace/${collection.path}") ~> Route.seal(routes(auser)) ~> check {
status should be(Forbidden)
}
}
it should "list should reject request with post" in {
implicit val tid = transid()
Post(s"$collectionPath") ~> Route.seal(routes(creds)) ~> check {
status should be(MethodNotAllowed)
}
}
//// GET /actions/name
it should "get action by name in default namespace" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, jsDefault("??"), Parameters("x", "b"))
put(entityStore, action)
Get(s"$collectionPath/${action.name}") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[WhiskAction]
response should be(action)
}
}
it should "get action by name in explicit namespace" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, jsDefault("??"), Parameters("x", "b"))
put(entityStore, action)
Get(s"/$namespace/${collection.path}/${action.name}") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[WhiskAction]
response should be(action)
}
// it should "reject get action by name in explicit namespace not owned by subject" in
val auser = WhiskAuthHelpers.newIdentity()
Get(s"/$namespace/${collection.path}/${action.name}") ~> Route.seal(routes(auser)) ~> check {
status should be(Forbidden)
}
}
it should "report NotFound for get non existent action" in {
implicit val tid = transid()
Get(s"$collectionPath/xyz") ~> Route.seal(routes(creds)) ~> check {
status should be(NotFound)
}
}
it should "report Conflict if the name was of a different type" in {
implicit val tid = transid()
val trigger = WhiskTrigger(namespace, aname)
put(entityStore, trigger)
Get(s"/$namespace/${collection.path}/${trigger.name}") ~> Route.seal(routes(creds)) ~> check {
status should be(Conflict)
}
}
it should "reject long entity names" in {
implicit val tid = transid()
val longName = "a" * (EntityName.ENTITY_NAME_MAX_LENGTH + 1)
Get(s"/$longName/${collection.path}/$longName") ~> Route.seal(routes(creds)) ~> check {
status should be(BadRequest)
responseAs[String] shouldBe {
Messages.entityNameTooLong(
SizeError(namespaceDescriptionForSizeError, longName.length.B, EntityName.ENTITY_NAME_MAX_LENGTH.B))
}
}
Seq(s"/$namespace/${collection.path}/$longName",
s"/$namespace/${collection.path}/pkg/$longName",
s"/$namespace/${collection.path}/$longName/a",
s"/$namespace/${collection.path}/$longName/$longName").
foreach { p =>
Get(p) ~> Route.seal(routes(creds)) ~> check {
status should be(BadRequest)
responseAs[String] shouldBe {
Messages.entityNameTooLong(
SizeError(segmentDescriptionForSizeError, longName.length.B, EntityName.ENTITY_NAME_MAX_LENGTH.B))
}
}
}
}
//// DEL /actions/name
it should "delete action by name" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, jsDefault("??"), Parameters("x", "b"))
put(entityStore, action)
// it should "reject delete action by name not owned by subject" in
val auser = WhiskAuthHelpers.newIdentity()
Get(s"/$namespace/${collection.path}/${action.name}") ~> Route.seal(routes(auser)) ~> check {
status should be(Forbidden)
}
Delete(s"$collectionPath/${action.name}") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[WhiskAction]
response should be(action)
}
}
it should "report NotFound for delete non existent action" in {
implicit val tid = transid()
Delete(s"$collectionPath/xyz") ~> Route.seal(routes(creds)) ~> check {
status should be(NotFound)
}
}
//// PUT /actions/name
it should "put should reject request missing json content" in {
implicit val tid = transid()
Put(s"$collectionPath/xxx", "") ~> Route.seal(routes(creds)) ~> check {
val response = responseAs[String]
status should be(UnsupportedMediaType)
}
}
it should "put should reject request missing property exec" in {
implicit val tid = transid()
val content = """|{"name":"name","publish":true}""".stripMargin.parseJson.asJsObject
Put(s"$collectionPath/xxx", content) ~> Route.seal(routes(creds)) ~> check {
val response = responseAs[String]
status should be(BadRequest)
}
}
it should "put should reject request with malformed property exec" in {
implicit val tid = transid()
val content = """|{"name":"name",
|"publish":true,
|"exec":""}""".stripMargin.parseJson.asJsObject
Put(s"$collectionPath/xxx", content) ~> Route.seal(routes(creds)) ~> check {
val response = responseAs[String]
status should be(BadRequest)
}
}
it should "reject create with exec which is too big" in {
implicit val tid = transid()
val code = "a" * (actionLimit.toBytes.toInt + 1)
val exec: Exec = jsDefault(code)
val content = JsObject("exec" -> exec.toJson)
Put(s"$collectionPath/${aname}", content) ~> Route.seal(routes(creds)) ~> check {
status should be(RequestEntityTooLarge)
responseAs[String] should include {
Messages.entityTooBig(SizeError(WhiskAction.execFieldName, exec.size, Exec.sizeLimit))
}
}
}
it should "reject update with exec which is too big" in {
implicit val tid = transid()
val oldCode = "function main()"
val code = "a" * (actionLimit.toBytes.toInt + 1)
val action = WhiskAction(namespace, aname, jsDefault("??"))
val exec: Exec = jsDefault(code)
val content = JsObject("exec" -> exec.toJson)
put(entityStore, action)
Put(s"$collectionPath/${action.name}?overwrite=true", content) ~> Route.seal(routes(creds)) ~> check {
status should be(RequestEntityTooLarge)
responseAs[String] should include {
Messages.entityTooBig(SizeError(WhiskAction.execFieldName, exec.size, Exec.sizeLimit))
}
}
}
it should "reject create with parameters which are too big" in {
implicit val tid = transid()
val keys: List[Long] = List.range(Math.pow(10, 9) toLong, (parametersLimit.toBytes / 20 + Math.pow(10, 9) + 2) toLong)
val parameters = keys map { key =>
Parameters(key.toString, "a" * 10)
} reduce (_ ++ _)
val content = s"""{"exec":{"kind":"nodejs","code":"??"},"parameters":$parameters}""".stripMargin
Put(s"$collectionPath/${aname}", content.parseJson.asJsObject) ~> Route.seal(routes(creds)) ~> check {
status should be(RequestEntityTooLarge)
responseAs[String] should include {
Messages.entityTooBig(SizeError(WhiskEntity.paramsFieldName, parameters.size, Parameters.sizeLimit))
}
}
}
it should "reject create with annotations which are too big" in {
implicit val tid = transid()
val keys: List[Long] = List.range(Math.pow(10, 9) toLong, (parametersLimit.toBytes / 20 + Math.pow(10, 9) + 2) toLong)
val annotations = keys map { key =>
Parameters(key.toString, "a" * 10)
} reduce (_ ++ _)
val content = s"""{"exec":{"kind":"nodejs","code":"??"},"annotations":$annotations}""".stripMargin
Put(s"$collectionPath/${aname}", content.parseJson.asJsObject) ~> Route.seal(routes(creds)) ~> check {
status should be(RequestEntityTooLarge)
responseAs[String] should include {
Messages.entityTooBig(SizeError(WhiskEntity.annotationsFieldName, annotations.size, Parameters.sizeLimit))
}
}
}
it should "reject activation with entity which is too big" in {
implicit val tid = transid()
val code = "a" * (allowedActivationEntitySize.toInt + 1)
val content = s"""{"a":"$code"}""".stripMargin
Post(s"$collectionPath/${aname}", content.parseJson.asJsObject) ~> Route.seal(routes(creds)) ~> check {
status should be(RequestEntityTooLarge)
responseAs[String] should include {
Messages.entityTooBig(SizeError(fieldDescriptionForSizeError, (content.length).B, allowedActivationEntitySize.B))
}
}
}
it should "put should accept request with missing optional properties" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, jsDefault("??"))
val content = WhiskActionPut(Some(action.exec))
Put(s"$collectionPath/${action.name}", content) ~> Route.seal(routes(creds)) ~> check {
deleteAction(action.docid)
status should be(OK)
val response = responseAs[WhiskAction]
response should be(WhiskAction(action.namespace, action.name, action.exec,
action.parameters, action.limits, action.version,
action.publish, action.annotations ++ Parameters(WhiskAction.execFieldName, NODEJS6)))
}
}
it should "put should accept blackbox exec with empty code property" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, bb("bb"))
val content = Map("exec" -> Map("kind" -> "blackbox", "code" -> "", "image" -> "bb")).toJson.asJsObject
Put(s"$collectionPath/${action.name}", content) ~> Route.seal(routes(creds)) ~> check {
deleteAction(action.docid)
status should be(OK)
val response = responseAs[WhiskAction]
response should be(WhiskAction(action.namespace, action.name, action.exec,
action.parameters, action.limits, action.version,
action.publish, action.annotations ++ Parameters(WhiskAction.execFieldName, Exec.BLACKBOX)))
response.exec shouldBe an[BlackBoxExec]
response.exec.asInstanceOf[BlackBoxExec].code shouldBe empty
}
}
it should "put should accept blackbox exec with non-empty code property" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, bb("bb", "cc"))
val content = Map("exec" -> Map("kind" -> "blackbox", "code" -> "cc", "image" -> "bb")).toJson.asJsObject
Put(s"$collectionPath/${action.name}", content) ~> Route.seal(routes(creds)) ~> check {
deleteAction(action.docid)
status should be(OK)
val response = responseAs[WhiskAction]
response should be(WhiskAction(action.namespace, action.name, action.exec,
action.parameters, action.limits, action.version,
action.publish, action.annotations ++ Parameters(WhiskAction.execFieldName, Exec.BLACKBOX)))
response.exec shouldBe an[BlackBoxExec]
val bb = response.exec.asInstanceOf[BlackBoxExec]
bb.code shouldBe Some("cc")
bb.binary shouldBe false
}
}
private implicit val fqnSerdes = FullyQualifiedEntityName.serdes
private def seqParameters(seq: Vector[FullyQualifiedEntityName]) = Parameters("_actions", seq.toJson)
// this test is sneaky; the installation of the sequence is done directly in the db
// and api checks are skipped
it should "reset parameters when changing sequence action to non sequence" in {
implicit val tid = transid()
val components = Vector("x/a", "x/b").map(stringToFullyQualifiedName(_))
val action = WhiskAction(namespace, aname, sequence(components), seqParameters(components))
val content = WhiskActionPut(Some(jsDefault("")))
put(entityStore, action, false)
// create an action sequence
Put(s"$collectionPath/${action.name}?overwrite=true", content) ~> Route.seal(routes(creds)) ~> check {
deleteAction(action.docid)
status should be(OK)
val response = responseAs[WhiskAction]
response.exec.kind should be(NODEJS6)
response.parameters shouldBe Parameters()
}
}
// this test is sneaky; the installation of the sequence is done directly in the db
// and api checks are skipped
it should "preserve new parameters when changing sequence action to non sequence" in {
implicit val tid = transid()
val components = Vector("x/a", "x/b").map(stringToFullyQualifiedName(_))
val action = WhiskAction(namespace, aname, sequence(components), seqParameters(components))
val content = WhiskActionPut(Some(jsDefault("")), parameters = Some(Parameters("a", "A")))
put(entityStore, action, false)
// create an action sequence
Put(s"$collectionPath/${action.name}?overwrite=true", content) ~> Route.seal(routes(creds)) ~> check {
deleteAction(action.docid)
status should be(OK)
val response = responseAs[WhiskAction]
response.exec.kind should be(NODEJS6)
response.parameters should be(Parameters("a", "A"))
}
}
it should "put should accept request with parameters property" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, jsDefault("??"), Parameters("x", "b"))
val content = WhiskActionPut(Some(action.exec), Some(action.parameters))
// it should "reject put action in namespace not owned by subject" in
val auser = WhiskAuthHelpers.newIdentity()
Put(s"/$namespace/${collection.path}/${action.name}", content) ~> Route.seal(routes(auser)) ~> check {
status should be(Forbidden)
}
Put(s"$collectionPath/${action.name}", content) ~> Route.seal(routes(creds)) ~> check {
deleteAction(action.docid)
status should be(OK)
val response = responseAs[WhiskAction]
response should be(WhiskAction(action.namespace, action.name, action.exec,
action.parameters, action.limits, action.version,
action.publish, action.annotations ++ Parameters(WhiskAction.execFieldName, NODEJS6)))
}
}
it should "put should reject request with parameters property as jsobject" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, jsDefault("??"), Parameters("x", "b"))
val content = WhiskActionPut(Some(action.exec), Some(action.parameters))
val params = """{ "parameters": { "a": "b" } }""".parseJson.asJsObject
val json = JsObject(WhiskActionPut.serdes.write(content).asJsObject.fields ++ params.fields)
Put(s"$collectionPath/${action.name}", json) ~> Route.seal(routes(creds)) ~> check {
status should be(BadRequest)
}
}
it should "put should accept request with limits property" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, jsDefault("??"), Parameters("x", "b"))
val content = WhiskActionPut(Some(action.exec), Some(action.parameters), Some(ActionLimitsOption(Some(action.limits.timeout), Some(action.limits.memory), Some(action.limits.logs))))
Put(s"$collectionPath/${action.name}", content) ~> Route.seal(routes(creds)) ~> check {
deleteAction(action.docid)
status should be(OK)
val response = responseAs[WhiskAction]
response should be(WhiskAction(action.namespace, action.name, action.exec,
action.parameters, action.limits, action.version,
action.publish, action.annotations ++ Parameters(WhiskAction.execFieldName, NODEJS6)))
}
}
it should "put and then get action from cache" in {
val action = WhiskAction(namespace, aname, jsDefault("??"), Parameters("x", "b"))
val content = WhiskActionPut(Some(action.exec), Some(action.parameters), Some(ActionLimitsOption(Some(action.limits.timeout), Some(action.limits.memory), Some(action.limits.logs))))
val name = action.name
// first request invalidates any previous entries and caches new result
Put(s"$collectionPath/$name", content) ~> Route.seal(routes(creds)(transid())) ~> check {
status should be(OK)
val response = responseAs[WhiskAction]
response should be(WhiskAction(action.namespace, action.name, action.exec,
action.parameters, action.limits, action.version,
action.publish, action.annotations ++ Parameters(WhiskAction.execFieldName, NODEJS6)))
}
stream.toString should include regex (s"caching*.*${action.docid.asDocInfo}")
stream.reset()
// second request should fetch from cache
Get(s"$collectionPath/$name") ~> Route.seal(routes(creds)(transid())) ~> check {
status should be(OK)
val response = responseAs[WhiskAction]
response should be(WhiskAction(action.namespace, action.name, action.exec,
action.parameters, action.limits, action.version,
action.publish, action.annotations ++ Parameters(WhiskAction.execFieldName, NODEJS6)))
}
stream.toString should include regex (s"serving from cache:*.*${action.docid.asDocInfo}")
stream.reset()
// delete should invalidate cache
Delete(s"$collectionPath/$name") ~> Route.seal(routes(creds)(transid())) ~> check {
status should be(OK)
val response = responseAs[WhiskAction]
response should be(WhiskAction(action.namespace, action.name, action.exec,
action.parameters, action.limits, action.version,
action.publish, action.annotations ++ Parameters(WhiskAction.execFieldName, NODEJS6)))
}
stream.toString should include regex (s"invalidating*.*${action.docid.asDocInfo}")
stream.reset()
}
it should "reject put with conflict for pre-existing action" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, jsDefault("??"), Parameters("x", "b"))
val content = WhiskActionPut(Some(action.exec))
put(entityStore, action)
Put(s"$collectionPath/${action.name}", content) ~> Route.seal(routes(creds)) ~> check {
status should be(Conflict)
}
}
it should "update action with a put" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, jsDefault("??"), Parameters("x", "b"))
val content = WhiskActionPut(Some(jsDefault("_")), Some(Parameters("x", "X")))
put(entityStore, action)
Put(s"$collectionPath/${action.name}?overwrite=true", content) ~> Route.seal(routes(creds)) ~> check {
deleteAction(action.docid)
status should be(OK)
val response = responseAs[WhiskAction]
response should be {
WhiskAction(action.namespace, action.name, content.exec.get, content.parameters.get, version = action.version.upPatch,
annotations = action.annotations ++ Parameters(WhiskAction.execFieldName, NODEJS6))
}
}
}
it should "update action parameters with a put" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, jsDefault("??"), Parameters("x", "b"))
val content = WhiskActionPut(parameters = Some(Parameters("x", "X")))
put(entityStore, action)
Put(s"$collectionPath/${action.name}?overwrite=true", content) ~> Route.seal(routes(creds)) ~> check {
deleteAction(action.docid)
status should be(OK)
val response = responseAs[WhiskAction]
response should be {
WhiskAction(action.namespace, action.name, action.exec, content.parameters.get, version = action.version.upPatch,
annotations = action.annotations ++ Parameters(WhiskAction.execFieldName, NODEJS6))
}
}
}
//// POST /actions/name
it should "invoke an action with arguments, nonblocking" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, jsDefault("??"), Parameters("x", "b"))
val args = JsObject("xxx" -> "yyy".toJson)
put(entityStore, action)
// it should "reject post to action in namespace not owned by subject"
val auser = WhiskAuthHelpers.newIdentity()
Post(s"/$namespace/${collection.path}/${action.name}", args) ~> Route.seal(routes(auser)) ~> check {
status should be(Forbidden)
}
Post(s"$collectionPath/${action.name}", args) ~> Route.seal(routes(creds)) ~> check {
status should be(Accepted)
val response = responseAs[JsObject]
response.fields("activationId") should not be None
}
// it should "ignore &result when invoking nonblocking action"
Post(s"$collectionPath/${action.name}?result=true", args) ~> Route.seal(routes(creds)) ~> check {
status should be(Accepted)
val response = responseAs[JsObject]
response.fields("activationId") should not be None
}
}
it should "invoke an action, nonblocking" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, jsDefault("??"))
put(entityStore, action)
Post(s"$collectionPath/${action.name}") ~> Route.seal(routes(creds)) ~> check {
status should be(Accepted)
val response = responseAs[JsObject]
response.fields("activationId") should not be None
}
}
it should "invoke an action, blocking with default timeout" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, jsDefault("??"), limits = ActionLimits(TimeLimit(1 second), MemoryLimit(), LogLimit()))
put(entityStore, action)
Post(s"$collectionPath/${action.name}?blocking=true") ~> Route.seal(routes(creds)) ~> check {
// status should be accepted because there is no active ack response and
// db polling will fail since there is no record of the activation
status should be(Accepted)
val response = responseAs[JsObject]
response.fields("activationId") should not be None
}
}
it should "invoke an action, blocking and retrieve result via db polling" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, jsDefault("??"))
val activation = WhiskActivation(action.namespace, action.name, creds.subject, activationIdFactory.make(),
start = Instant.now,
end = Instant.now,
response = ActivationResponse.success(Some(JsObject("test" -> "yes".toJson))))
put(entityStore, action)
// storing the activation in the db will allow the db polling to retrieve it
// the test harness makes sure the activation id observed by the test matches
// the one generated by the api handler
put(activationStore, activation)
try {
Post(s"$collectionPath/${action.name}?blocking=true") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response should be(activation.withoutLogs.toExtendedJson)
}
// repeat invoke, get only result back
Post(s"$collectionPath/${action.name}?blocking=true&result=true") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response should be(activation.resultAsJson)
}
} finally {
deleteActivation(activation.docid)
}
}
it should "invoke an action, blocking and retrieve result via active ack" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, jsDefault("??"))
val activation = WhiskActivation(action.namespace, action.name, creds.subject, activationIdFactory.make(),
start = Instant.now,
end = Instant.now,
response = ActivationResponse.success(Some(JsObject("test" -> "yes".toJson))))
put(entityStore, action)
try {
// do not store the activation in the db, instead register it as the response to generate on active ack
loadBalancer.whiskActivationStub = Some((1.milliseconds, activation))
Post(s"$collectionPath/${action.name}?blocking=true") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response should be(activation.withoutLogs.toExtendedJson)
}
// repeat invoke, get only result back
Post(s"$collectionPath/${action.name}?blocking=true&result=true") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response should be(activation.resultAsJson)
}
} finally {
loadBalancer.whiskActivationStub = None
}
}
it should "invoke an action, blocking up to specified timeout and retrieve result via active ack" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, jsDefault("??"))
val activation = WhiskActivation(action.namespace, action.name, creds.subject, activationIdFactory.make(),
start = Instant.now,
end = Instant.now,
response = ActivationResponse.success(Some(JsObject("test" -> "yes".toJson))))
put(entityStore, action)
try {
// do not store the activation in the db, instead register it as the response to generate on active ack
loadBalancer.whiskActivationStub = Some((300.milliseconds, activation))
Post(s"$collectionPath/${action.name}?blocking=true&timeout=0") ~> Route.seal(routes(creds)) ~> check {
status shouldBe BadRequest
responseAs[String] should include(Messages.invalidTimeout(WhiskActionsApi.maxWaitForBlockingActivation))
}
Post(s"$collectionPath/${action.name}?blocking=true&timeout=65000") ~> Route.seal(routes(creds)) ~> check {
status shouldBe BadRequest
responseAs[String] should include(Messages.invalidTimeout(WhiskActionsApi.maxWaitForBlockingActivation))
}
// will not wait long enough should get accepted status
Post(s"$collectionPath/${action.name}?blocking=true&timeout=100") ~> Route.seal(routes(creds)) ~> check {
status shouldBe Accepted
}
// repeat this time wait longer than active ack delay
Post(s"$collectionPath/${action.name}?blocking=true&timeout=500") ~> Route.seal(routes(creds)) ~> check {
status shouldBe OK
val response = responseAs[JsObject]
response shouldBe activation.withoutLogs.toExtendedJson
}
} finally {
loadBalancer.whiskActivationStub = None
}
}
it should "invoke a blocking action and return error response when activation fails" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, jsDefault("??"))
val activation = WhiskActivation(action.namespace, action.name, creds.subject, activationIdFactory.make(),
start = Instant.now,
end = Instant.now,
response = ActivationResponse.whiskError("test"))
put(entityStore, action)
// storing the activation in the db will allow the db polling to retrieve it
// the test harness makes sure the activaiton id observed by the test matches
// the one generated by the api handler
put(activationStore, activation)
try {
Post(s"$collectionPath/${action.name}?blocking=true") ~> Route.seal(routes(creds)) ~> check {
status should be(InternalServerError)
val response = responseAs[JsObject]
response should be(activation.withoutLogs.toExtendedJson)
}
} finally {
deleteActivation(activation.docid)
}
}
it should "report proper error when record is corrupted on delete" in {
implicit val tid = transid()
val entity = BadEntity(namespace, aname)
put(entityStore, entity)
Delete(s"$collectionPath/${entity.name}") ~> Route.seal(routes(creds)) ~> check {
status should be(InternalServerError)
responseAs[ErrorResponse].error shouldBe Messages.corruptedEntity
}
}
it should "report proper error when record is corrupted on get" in {
implicit val tid = transid()
val entity = BadEntity(namespace, aname)
put(entityStore, entity)
Get(s"$collectionPath/${entity.name}") ~> Route.seal(routes(creds)) ~> check {
status should be(InternalServerError)
responseAs[ErrorResponse].error shouldBe Messages.corruptedEntity
}
}
it should "report proper error when record is corrupted on put" in {
implicit val tid = transid()
val entity = BadEntity(namespace, aname)
put(entityStore, entity)
val components = Vector(stringToFullyQualifiedName(s"$namespace/${entity.name}"))
val content = WhiskActionPut(Some(sequence(components)))
Put(s"$collectionPath/$aname", content) ~> Route.seal(routes(creds)) ~> check {
status should be(InternalServerError)
responseAs[ErrorResponse].error shouldBe Messages.corruptedEntity
}
}
// get and delete allowed, create/update with deprecated exec not allowed, post/invoke not allowed
it should "report proper error when runtime is deprecated" in {
implicit val tid = transid()
val action = WhiskAction(namespace, aname, swift("??"))
val okUpdate = WhiskActionPut(Some(swift3("_")))
val badUpdate = WhiskActionPut(Some(swift("_")))
Put(s"$collectionPath/${action.name}", WhiskActionPut(Some(action.exec))) ~> Route.seal(routes(creds)) ~> check {
status shouldBe BadRequest
responseAs[ErrorResponse].error shouldBe Messages.runtimeDeprecated(action.exec)
}
Put(s"$collectionPath/${action.name}?overwrite=true", WhiskActionPut(Some(action.exec))) ~> Route.seal(routes(creds)) ~> check {
status shouldBe BadRequest
responseAs[ErrorResponse].error shouldBe Messages.runtimeDeprecated(action.exec)
}
put(entityStore, action)
Put(s"$collectionPath/${action.name}?overwrite=true", JsObject()) ~> Route.seal(routes(creds)) ~> check {
status shouldBe BadRequest
responseAs[ErrorResponse].error shouldBe Messages.runtimeDeprecated(action.exec)
}
Put(s"$collectionPath/${action.name}?overwrite=true", badUpdate) ~> Route.seal(routes(creds)) ~> check {
status shouldBe BadRequest
responseAs[ErrorResponse].error shouldBe Messages.runtimeDeprecated(action.exec)
}
Post(s"$collectionPath/${action.name}") ~> Route.seal(routes(creds)) ~> check {
status shouldBe BadRequest
responseAs[ErrorResponse].error shouldBe Messages.runtimeDeprecated(action.exec)
}
Get(s"$collectionPath/${action.name}") ~> Route.seal(routes(creds)) ~> check {
status shouldBe OK
}
Delete(s"$collectionPath/${action.name}") ~> Route.seal(routes(creds)) ~> check {
status shouldBe OK
}
put(entityStore, action)
Put(s"$collectionPath/${action.name}?overwrite=true", okUpdate) ~> Route.seal(routes(creds)) ~> check {
deleteAction(action.docid)
status shouldBe OK
}
}
}
| prccaraujo/openwhisk | tests/src/test/scala/whisk/core/controller/test/ActionsApiTests.scala | Scala | apache-2.0 | 37,256 |
package pipelines.text
import evaluation.BinaryClassifierEvaluator
import loaders.{AmazonReviewsDataLoader, LabeledData}
import nodes.learning.LogisticRegressionEstimator
import nodes.nlp._
import nodes.stats.TermFrequency
import nodes.util.CommonSparseFeatures
import org.apache.spark.{SparkConf, SparkContext}
import pipelines.Logging
import scopt.OptionParser
import workflow.Optimizer
object AmazonReviewsPipeline extends Logging {
val appName = "AmazonReviewsPipeline"
def run(sc: SparkContext, conf: AmazonReviewsConfig) {
val amazonTrainData = AmazonReviewsDataLoader(sc, conf.trainLocation, conf.threshold).labeledData
val trainData = LabeledData(amazonTrainData.repartition(conf.numParts).cache())
val training = trainData.data
val labels = trainData.labels
// Build the classifier estimator
val predictor = Trim andThen
LowerCase() andThen
Tokenizer() andThen
NGramsFeaturizer(1 to conf.nGrams) andThen
TermFrequency(x => 1) andThen
(CommonSparseFeatures(conf.commonFeatures), training) andThen
(LogisticRegressionEstimator(numClasses = 2, numIters = conf.numIters), training, labels)
// Evaluate the classifier
val amazonTestData = AmazonReviewsDataLoader(sc, conf.testLocation, conf.threshold).labeledData
val testData = LabeledData(amazonTestData.repartition(conf.numParts).cache())
val testLabels = testData.labels
val testResults = predictor(testData.data)
val eval = BinaryClassifierEvaluator(testResults.map(_ > 0), testLabels.map(_ > 0))
logInfo("\\n" + eval.summary())
}
case class AmazonReviewsConfig(
trainLocation: String = "",
testLocation: String = "",
threshold: Double = 3.5,
nGrams: Int = 2,
commonFeatures: Int = 100000,
numIters: Int = 20,
numParts: Int = 512)
def parse(args: Array[String]): AmazonReviewsConfig = new OptionParser[AmazonReviewsConfig](appName) {
head(appName, "0.1")
opt[String]("trainLocation") required() action { (x,c) => c.copy(trainLocation=x) }
opt[String]("testLocation") required() action { (x,c) => c.copy(testLocation=x) }
opt[Double]("threshold") action { (x,c) => c.copy(threshold=x)}
opt[Int]("nGrams") action { (x,c) => c.copy(nGrams=x) }
opt[Int]("commonFeatures") action { (x,c) => c.copy(commonFeatures=x) }
opt[Int]("numIters") action { (x,c) => c.copy(numParts=x) }
opt[Int]("numParts") action { (x,c) => c.copy(numParts=x) }
}.parse(args, AmazonReviewsConfig()).get
/**
* The actual driver receives its configuration parameters from spark-submit usually.
* @param args
*/
def main(args: Array[String]) = {
val conf = new SparkConf().setAppName(appName)
conf.setIfMissing("spark.master", "local[2]") // This is a fallback if things aren't set via spark submit.
val sc = new SparkContext(conf)
val appConfig = parse(args)
run(sc, appConfig)
sc.stop()
}
}
| zhaozhang/keystone | src/main/scala/pipelines/text/AmazonReviewsPipeline.scala | Scala | apache-2.0 | 2,943 |
import sbt._
import Keys._
import com.typesafe.sbt.sbtghpages.GhpagesPlugin
import com.typesafe.sbt.sbtghpages.GhpagesPlugin.autoImport._
import com.typesafe.sbt.SbtGit.{git, GitKeys}
import com.typesafe.sbt.git.GitRunner
import com.typesafe.sbt.site.pamflet.PamfletPlugin
import com.typesafe.sbt.site.SitePlugin
object TravisSitePlugin extends sbt.AutoPlugin {
override def requires = PamfletPlugin && GhpagesPlugin
import PamfletPlugin.autoImport._
import SitePlugin.autoImport._
object autoImport {
lazy val pushSiteIfChanged = taskKey[Unit]("push the site if changed")
lazy val siteGitHubRepo = settingKey[String]("")
lazy val siteEmail = settingKey[String]("")
}
import autoImport._
override lazy val projectSettings = Seq(
Pamflet / sourceDirectory := { baseDirectory.value / "docs" },
// ghpagesBranch in ghpagesUpdatedRepository := Some("gh-pages"),
// This task is responsible for updating the master branch on some temp dir.
// On the branch there are files that was generated in some other ways such as:
// - CNAME file
//
// This task's job is to call "git rm" on files and directories that this project owns
// and then copy over the newly generated files.
ghpagesSynchLocal := {
// sync the generated site
val repo = ghpagesUpdatedRepository.value
val s = streams.value
val r = GitKeys.gitRunner.value
gitConfig(repo, siteEmail.value, r, s.log)
gitRemoveFiles(repo, (repo * "*.html").get.toList, r, s)
val mappings = for {
(file, target) <- siteMappings.value
} yield (file, repo / target)
IO.copy(mappings)
repo
},
// https://gist.github.com/domenic/ec8b0fc8ab45f39403dd
// 1. generate a new SSH key: `ssh-keygen -t rsa -b 4096 -C "foo@example.com"` and
// name it ~/.ssh/yourprojectname_deploy_rsa
// 2. add the public key ~/.ssh/yourprojectname_deploy_rsa.pub to GitHub: https://github.com/foo/bar/settings/keys
// 3. copy the private key ~/.ssh/yourprojectname_deploy_rsa to ./deploy_rsa
// 4. encrypt the token: `travis encrypt-file deploy_rsa`
// 5. remove the private key ./deploy_rsa
// 4. rename it to deploy_rsa.enc
pushSiteIfChanged := (Def.taskDyn {
val repo = (LocalRootProject / baseDirectory).value
val r = GitKeys.gitRunner.value
val s = streams.value
val changed = gitDocsChanged(repo, r, s.log)
if (changed) {
ghpagesPushSite
} else {
Def.task {
s.log.info("skip push site")
}
}
}).value,
git.remoteRepo := s"git@github.com:${siteGitHubRepo.value}.git"
)
def gitRemoveFiles(dir: File, files: List[File], git: GitRunner, s: TaskStreams): Unit = {
if (!files.isEmpty)
git(("rm" :: "-r" :: "-f" :: "--ignore-unmatch" :: files.map(_.getAbsolutePath)): _*)(dir, s.log)
()
}
def gitDocsChanged(dir: File, git: GitRunner, log: Logger): Boolean = {
// git diff --shortstat HEAD^..HEAD docs
val range = sys.env.get("TRAVIS_COMMIT_RANGE") match {
case Some(x) => x
case _ => "HEAD^..HEAD"
}
val stat = git(("diff" :: "--shortstat" :: range :: "--" :: "docs" :: Nil): _*)(dir, log)
stat.trim.nonEmpty
}
def gitConfig(dir: File, email: String, git: GitRunner, log: Logger): Unit =
sys.env.get("CI") match {
case Some(_) =>
git(("config" :: "user.name" :: "foundweekends-bot[bot]" :: Nil): _*)(dir, log)
git(("config" :: "user.email" :: email :: Nil): _*)(dir, log)
case _ => ()
}
}
| foundweekends/giter8 | project/TravisSitePlugin.scala | Scala | apache-2.0 | 3,600 |
/* Copyright (C) 2008-2016 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.app.nlp.phrase
import cc.factorie.app.nlp.{Document, DocumentAnnotator, Token}
import scala.collection.mutable
import cc.factorie.app.nlp.parse.ParseTree
import cc.factorie.app.nlp.coref.{AnyNerPhraseFinder, MentionList, Mention}
import cc.factorie.app.nlp.ner.NerTag
class ParseBasedMentionList(spans:Iterable[Mention]) extends MentionList(spans)
object ParseBasedPhraseFinder extends ParseBasedPhraseFinder(false)
object ParseAndNerBasedPhraseFinder extends ParseBasedPhraseFinder(true)
class ParseBasedPhraseFinder(val useNER: Boolean) extends DocumentAnnotator {
def prereqAttrs: Iterable[Class[_]] = if (!useNER) List(classOf[ParseTree]) else List(classOf[ParseTree], classOf[NerTag])
def postAttrs: Iterable[Class[_]] = List(classOf[PhraseList])
def process(doc: Document): Document = {
// Filter Mentions that have no MentionType and that are longer than 5 words -akm
doc.attr += new PhraseList(dedup(getPhrases(doc)).filter(phrase => phrase.attr[NounPhraseType] ne null))
doc
}
def getPhrases(doc:Document): Seq[Phrase] = {
var docPhrases = new mutable.ArrayBuffer[Phrase]
//if NER has already been done, then convert the NER tags to NER spans
//Note that this doesn't change the postAttrs for the annotator, since it may not necessarily add spans
if(useNER) docPhrases ++= AnyNerPhraseFinder(doc)
// NAM = proper noun, NOM = common noun, PRO = pronoun
docPhrases ++= personalPronounSpans(doc) map( phrase => {phrase.attr += new NounPhraseType(phrase,"PRO");phrase})
docPhrases ++= nounPhraseSpans(doc, isCommonNoun) map( phrase => {phrase.attr += new NounPhraseType(phrase,"NOM");phrase})
docPhrases ++= nounPhraseSpans(doc, isProperNoun) map( phrase => {phrase.attr += new NounPhraseType(phrase,"NAM");phrase})
docPhrases ++= NNPSpans(doc) map( phrase => {phrase.attr += new NounPhraseType(phrase,"NAM");phrase})
}
private final val PERSONAL_PRONOUNS = Seq("PRP", "PRP$")
private final val COMMON_NOUNS = Seq("NN" , "NNS")
private final val PROPER_NOUNS = Seq("NNP", "NNPS")
private def isPersonalPronoun(t: Token) = PERSONAL_PRONOUNS.contains(t.posTag.categoryValue.toUpperCase)
private def isCommonNoun (t: Token) = COMMON_NOUNS.contains(t.posTag.categoryValue.toUpperCase)
private def isProperNoun (t: Token) = PROPER_NOUNS.contains(t.posTag.categoryValue.toUpperCase)
def predictMentionType(t: Token): Option[String] =
if(isPersonalPronoun(t)) Some("PRO")
else if(isCommonNoun(t)) Some("NOM")
else if(isProperNoun(t)) Some("NAM")
else None
var FILTER_APPOS = true /* This flag is so that appositive filtering can be turned off.
If the mentions that we are extracting do not include the appositives as part of a mention
we want to make sure that we are extracting the appositives separately
default behavior is that we do filter the appositives. */
private def NNPSpans(doc : Document) : Seq[Phrase] = {
val spans = mutable.ArrayBuffer[mutable.ArrayBuffer[Token]]()
spans += mutable.ArrayBuffer[Token]()
for(section <- doc.sections; sentence <- section.sentences; token <- sentence.tokens) {
if(spans.last.nonEmpty && spans.last.last.next != token) spans += mutable.ArrayBuffer[Token]()
if(isProperNoun(token)) spans.last += token
}
if(spans.nonEmpty && spans.last.isEmpty) spans.remove(spans.length-1)
(for(span <- spans) yield
new Phrase(span.head.section, span.head.positionInSection, span.last.positionInSection-span.head.positionInSection+1, span.last.positionInSection-span.head.positionInSection)).toSeq
}
// [Assumes personal pronouns are single tokens.]
private def personalPronounSpans(doc: Document): Seq[Phrase] =
for (section <- doc.sections; s <- section.sentences;
(t,i) <- s.tokens.zipWithIndex if isPersonalPronoun(t)) yield
new Phrase(section, s.start + i, 1,0)
//this expects as input indices in the **document section** not the sentence
//note that this never returns the root as the head, it always returns a pointer to an actual token in the sentence
//it will either return the root of a parse tree span, or a token that is a child of the root
def getHead(parse: ParseTree, subtree: Seq[Int]): Int = {
val sentenceLevelIndices = subtree.map(i => i - parse.sentence.start)
var curr = sentenceLevelIndices.head
val leftBoundary = sentenceLevelIndices.head
val rightBoundary = sentenceLevelIndices.last
while(parse.parentIndex(curr) > 0 && containedInInterval(leftBoundary,rightBoundary,parse.parentIndex(curr))){
curr = parse.parentIndex(curr)
}
curr + parse.sentence.start //this shifts it back to have section-level indices
}
private def containedInInterval(left: Int, right: Int, testIndex: Int): Boolean = {
testIndex >= left && testIndex <= right
}
final val copularVerbs = collection.immutable.HashSet[String]() ++ Seq("is","are","was","'m")
final val allowedChildLabels = Set("amod", "det", "nn", "num", "hmod", "hyph", "possessive", "poss", "predet", "nmod", "dep")
final val disallowedChildLabels = Set("conj", "punct", "prep", "cc", "appos", "npadvmod", "advmod", "quantmod", "partmod", "rcmod", "dobj", "nsubj", "infmod", "ccomp", "advcl", "aux", "intj", "neg", "preconj", "prt", "meta", "parataxis", "complm", "mark")
private def nounPhraseSpans(doc: Document, nounFilter: Token => Boolean): Seq[Phrase] = {
val phrases = mutable.ArrayBuffer[Phrase]()
for (section <- doc.sections; s <- section.sentences; (t, si) <- s.tokens.zipWithIndex if nounFilter(t);
label = s.parse.label(t.positionInSentence).categoryValue
if label != "nn" && label != "hmod") {
val children = s.parse.children(t.positionInSentence)
children.foreach(c => {
val cat = s.parse.label(c.positionInSentence).categoryValue
if (!(allowedChildLabels.contains(cat) || disallowedChildLabels.contains(cat))) {
println("BAD LABEL: " + cat)
// println(doc.owplString(DepParser1))
}
})
val goodChildren = children.filter{c =>
val parseNode = s.parse.label(c.positionInSentence)
allowedChildLabels.contains(parseNode.categoryValue) || (parseNode.categoryValue == "prep" && c.string.toLowerCase == "of")
}
val tokens = Seq(t) ++ goodChildren.flatMap(c => s.parse.subtree(c.positionInSentence))
val sTokens = tokens.sortBy(_.positionInSection)
val start = sTokens.head.positionInSection
val end = sTokens.last.positionInSection
phrases += new Phrase(section, start, end-start+1, sTokens.zipWithIndex.filter(i => i._1 eq t).head._2)
}
phrases
}
private def dedup(phrases: Seq[Phrase]): Seq[Phrase] = {
def dedupOverlappingMentions(phrases: Seq[Phrase]): Phrase = {
if(phrases.length == 1){
phrases.head
}else{
phrases.find( _.attr[NounPhraseType].categoryValue == "NAM").getOrElse(phrases.head)
}
}
phrases
.groupBy(phrase => (phrase.section, phrase.start, phrase.length))
.values.map(phraseSet => dedupOverlappingMentions(phraseSet)).toSeq
.sortBy(phrase => (phrase.tokens.head.stringStart, phrase.length))
}
override def tokenAnnotationString(token:Token): String = token.document.attr[MentionList].filter(mention => mention.phrase.contains(token)) match { case ms:Seq[Mention] if ms.nonEmpty => ms.map(m => m.phrase.attr[NounPhraseType].categoryValue+":"+m.phrase.indexOf(token)).mkString(","); case _ => "_" }
}
| strubell/factorie | src/main/scala/cc/factorie/app/nlp/phrase/ParseBasedMention.scala | Scala | apache-2.0 | 8,407 |
package com.nutomic.ensichat.core.util
import java.io.File
import com.nutomic.ensichat.core.interfaces.SettingsInterface
import com.nutomic.ensichat.core.messages.MessageTest
import com.nutomic.ensichat.core.util
import junit.framework.TestCase
import org.junit.Assert._
object CryptoTest {
class TestSettings extends SettingsInterface {
private var map = Map[String, Any]()
override def get[T](key: String, default: T): T = map.getOrElse(key, default).asInstanceOf[T]
override def put[T](key: String, value: T): Unit = map += (key -> value)
}
def getCrypto: util.Crypto = {
val tempFolder = new File(System.getProperty("testDir"), "/crypto/")
val crypto = new util.Crypto(new TestSettings(), tempFolder)
if (!crypto.localKeysExist) {
crypto.generateLocalKeys()
}
crypto
}
}
class CryptoTest extends TestCase {
private lazy val crypto = CryptoTest.getCrypto
def testSignVerify(): Unit = {
MessageTest.messages.foreach { m =>
val signed = crypto.sign(m)
assertTrue(crypto.verify(signed, Option(crypto.getLocalPublicKey)))
assertEquals(m.header, signed.header)
assertEquals(m.body, signed.body)
}
}
def testEncryptDecrypt(): Unit = {
MessageTest.messages.foreach{ m =>
val encrypted = crypto.encryptAndSign(m, Option(crypto.getLocalPublicKey))
assertTrue(crypto.verify(encrypted, Option(crypto.getLocalPublicKey)))
val decrypted = crypto.decrypt(encrypted)
assertEquals(m.body, decrypted.body)
assertEquals(m.header, encrypted.header)
}
}
}
| Nutomic/ensichat | core/src/test/scala/com/nutomic/ensichat/core/util/CryptoTest.scala | Scala | mpl-2.0 | 1,576 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.scalar.examples
import org.apache.ignite.scalar.scalar
import org.apache.ignite.scalar.scalar._
import scala.util.Random
import scala.util.control.Breaks._
/**
* Scalar-based Monte-Carlo example.
* <p/>
* Remote nodes should always be started with special configuration file which
* enables P2P class loading: `'ignite.{sh|bat} examples/config/example-ignite.xml'`.
* <p/>
* Alternatively you can run `ExampleNodeStartup` in another JVM which will
* start node with `examples/config/example-ignite.xml` configuration.
*/
object ScalarCreditRiskExample {
def main(args: Array[String]) {
scalar("examples/config/example-ignite.xml") {
// Create portfolio.
var portfolio = Seq.empty[Credit]
val rnd = new Random
// Generate some test portfolio items.
(0 until 5000).foreach(i =>
portfolio +:= Credit(
50000 * rnd.nextDouble,
rnd.nextInt(1000),
rnd.nextDouble / 10,
rnd.nextDouble / 20 + 0.02
)
)
// Forecast horizon in days.
val horizon = 365
// Number of Monte-Carlo iterations.
val iter = 10000
// Percentile.
val percentile = 0.95
// Mark the stopwatch.
val start = System.currentTimeMillis
// Calculate credit risk and print it out.
// As you can see the ignite cluster enabling is completely hidden from the caller
// and it is fully transparent to him. In fact, the caller is never directly
// aware if method was executed just locally or on the 100s of cluster nodes.
// Credit risk crdRisk is the minimal amount that creditor has to have
// available to cover possible defaults.
val crdRisk = ignite$ @< (closures(ignite$.cluster().nodes().size(), portfolio.toArray, horizon, iter, percentile),
(s: Seq[Double]) => s.sum / s.size, null)
println("Credit risk [crdRisk=" + crdRisk + ", duration=" +
(System.currentTimeMillis - start) + "ms]")
}
}
/**
* Creates closures for calculating credit risks.
*
* @param clusterSize Size of the cluster.
* @param portfolio Portfolio.
* @param horizon Forecast horizon in days.
* @param iter Number of Monte-Carlo iterations.
* @param percentile Percentile.
* @return Collection of closures.
*/
private def closures(clusterSize: Int, portfolio: Array[Credit], horizon: Int, iter: Int,
percentile: Double): Seq[() => Double] = {
val iterPerNode: Int = math.round(iter / clusterSize.asInstanceOf[Float])
val lastNodeIter: Int = iter - (clusterSize - 1) * iterPerNode
var cls = Seq.empty[() => Double]
(0 until clusterSize).foreach(i => {
val nodeIter = if (i == clusterSize - 1) lastNodeIter else iterPerNode
cls +:= (() => new CreditRiskManager().calculateCreditRiskMonteCarlo(
portfolio, horizon, nodeIter, percentile))
})
cls
}
}
/**
* This class provides a simple model for a credit contract (or a loan). It is basically
* defines as remaining crediting amount to date, credit remaining term, APR and annual
* probability on default. Although this model is simplified for the purpose
* of this example, it is close enough to emulate the real-life credit
* risk assessment application.
*/
private case class Credit(
remAmnt: Double, // Remaining crediting amount.
remTerm: Int, // Remaining crediting remTerm.
apr: Double, // Annual percentage rate (APR).
edf: Double // Expected annual probability of default (EaDF).
) {
/**
* Gets either credit probability of default for the given period of time
* if remaining term is less than crediting time or probability of default
* for whole remained crediting time.
*
* @param term Default term.
* @return Credit probability of default in relative percents
* (percentage / 100).
*/
def getDefaultProbability(term: Int): Double = {
(1 - math.exp(math.log(1 - edf) * math.min(remTerm, term) / 365.0))
}
}
/**
* This class abstracts out the calculation of risk for a credit portfolio.
*/
private class CreditRiskManager {
/**
* Default randomizer with normal distribution.
* Note that since every JVM on the ignite cluster will have its own random
* generator (independently initialized) the Monte-Carlo simulation
* will be slightly skewed when performed on the ignite cluster due to skewed
* normal distribution of the sub-jobs comparing to execution on the
* local node only with single random generator. Real-life applications
* may want to provide its own implementation of distributed random
* generator.
*/
private val rndGen = new Random
/**
* Calculates credit risk for a given credit portfolio. This calculation uses
* Monte-Carlo Simulation to produce risk value.
*
* @param portfolio Credit portfolio.
* @param horizon Forecast horizon (in days).
* @param num Number of Monte-Carlo iterations.
* @param percentile Cutoff level.
* @return Credit risk value, i.e. the minimal amount that creditor has to
* have available to cover possible defaults.
*/
def calculateCreditRiskMonteCarlo(portfolio: Seq[Credit], horizon: Int, num:
Int, percentile: Double): Double = {
println(">>> Calculating credit risk for portfolio [size=" + portfolio.length + ", horizon=" +
horizon + ", percentile=" + percentile + ", iterations=" + num + "] <<<")
val start = System.currentTimeMillis
val losses = calculateLosses(portfolio, horizon, num).sorted
val lossProbs = new Array[Double](losses.size)
(0 until losses.size).foreach(i => {
if (i == 0)
lossProbs(i) = getLossProbability(losses, 0)
else if (losses(i) != losses(i - 1))
lossProbs(i) = getLossProbability(losses, i) + lossProbs(i - 1)
else
lossProbs(i) = lossProbs(i - 1)
})
var crdRisk = 0.0
breakable {
(0 until lossProbs.size).foreach(i => {
if (lossProbs(i) > percentile) {
crdRisk = losses(i - 1)
break()
}
})
}
println(">>> Finished calculating portfolio risk [risk=" + crdRisk +
", time=" + (System.currentTimeMillis - start) + "ms]")
crdRisk
}
/**
* Calculates losses for the given credit portfolio using Monte-Carlo Simulation.
* Simulates probability of default only.
*
* @param portfolio Credit portfolio.
* @param horizon Forecast horizon.
* @param num Number of Monte-Carlo iterations.
* @return Losses array simulated by Monte Carlo method.
*/
private def calculateLosses(portfolio: Seq[Credit], horizon: Int, num: Int): Array[Double] = {
val losses = new Array[Double](num)
// Count losses using Monte-Carlo method. We generate random probability of default,
// if it exceeds certain credit default value we count losses - otherwise count income.
(0 until num).foreach(i => {
portfolio.foreach(crd => {
val remDays = math.min(crd.remTerm, horizon)
if (rndGen.nextDouble >= 1 - crd.getDefaultProbability(remDays))
// (1 + 'r' * min(H, W) / 365) * S.
// Where W is a horizon, H is a remaining crediting term, 'r' is an annual credit rate,
// S is a remaining credit amount.
losses(i) += (1 + crd.apr * math.min(horizon, crd.remTerm) / 365) * crd.remAmnt
else
// - 'r' * min(H,W) / 365 * S
// Where W is a horizon, H is a remaining crediting term, 'r' is a annual credit rate,
// S is a remaining credit amount.
losses(i) -= crd.apr * math.min(horizon, crd.remTerm) / 365 * crd.remAmnt
})
})
losses
}
/**
* Calculates probability of certain loss in array of losses.
*
* @param losses Array of losses.
* @param i Index of certain loss in array.
* @return Probability of loss with given index.
*/
private def getLossProbability(losses: Array[Double], i: Int): Double = {
var count = 0.0
losses.foreach(tmp => {
if (tmp == losses(i))
count += 1
})
count / losses.size
}
}
| samaitra/ignite | examples/src/main/scala/org/apache/ignite/scalar/examples/ScalarCreditRiskExample.scala | Scala | apache-2.0 | 9,626 |
package org.jetbrains.plugins.scala
package lang
package completion
package filters.other
import psi.api.base.types.ScTypeElement
import com.intellij.lang.ASTNode;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiErrorElement;
import com.intellij.psi.filters.ElementFilter;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.plugins.scala.lang.psi._
import com.intellij.psi._
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates._
import org.jetbrains.plugins.scala.lang.parser._
import org.jetbrains.plugins.scala.lang.completion.ScalaCompletionUtil._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef._
import org.jetbrains.plugins.scala.lang.psi.types._
/**
* @author Alexander Podkhalyuzin
* Date: 28.05.2008
*/
class WithFilter extends ElementFilter {
def isAcceptable(element: Object, context: PsiElement): Boolean = {
if (context.isInstanceOf[PsiComment]) return false
val leaf = getLeafByOffset(context.getTextRange().getStartOffset(), context)
if (leaf != null) {
var i = context.getTextRange().getStartOffset() - 1
while (i >= 0 && context.getContainingFile.getText.charAt(i) == ' ') i = i - 1
if (i >= 0) {
var leaf1 = getLeafByOffset(i, context)
while (leaf1 != null &&
!leaf1.isInstanceOf[ScTypeDefinition]) {
leaf1 = leaf1.getParent
}
if (leaf1 != null && leaf1.getTextRange.getEndOffset != i+1 && leaf1.getTextRange.getEndOffset != leaf.getTextRange.getEndOffset &&
leaf1.getTextRange.getEndOffset != leaf.getTextRange.getStartOffset) leaf1 = null
leaf1 match {
case null =>
case x: ScTypeDefinition => {
return checkClassWith(x, "with A", x.getManager)
}
}
leaf1 = getLeafByOffset(i, context)
while (leaf1 != null && !leaf1.isInstanceOf[ScTypeElement] &&
!leaf1.isInstanceOf[ScNewTemplateDefinition]) {
leaf1 = leaf1.getParent
}
if (leaf1 != null && leaf1.getTextRange.getEndOffset != i+1 && leaf1.getTextRange.getEndOffset != leaf.getTextRange.getEndOffset &&
leaf1.getTextRange.getEndOffset != leaf.getTextRange.getStartOffset) leaf1 = null
leaf1 match {
case null => return false
case x: ScTypeElement => {
return checkTypeWith(x, "with A", x.getManager)
}
case x: ScNewTemplateDefinition => {
return checkNewWith(x, "with A", x.getManager)
}
}
}
}
return false
}
def isClassAcceptable(hintClass: java.lang.Class[_]): Boolean = {
return true;
}
@NonNls
override def toString(): String = {
return "'with' keyword filter"
}
} | consulo/consulo-scala | src/org/jetbrains/plugins/scala/lang/completion/filters/other/WithFilter.scala | Scala | apache-2.0 | 2,808 |
package jp.mwsoft.wikipedia.totext
import org.junit.runner.RunWith
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.FlatSpec
import org.scalatest.junit.JUnitRunner
import org.joda.time.format.DateTimeFormat
@RunWith(classOf[JUnitRunner])
class UtilsSpec extends FlatSpec with ShouldMatchers {
"removeWikiSymbols" should "remove all wiki syntax" in {
val data1 = """{{WikipediaPage|ウィキペディアでのテスト編集や試し書きには、[[Wikipedia:サンドボックス]]をご利用ください。}}"""
val result1 = """WikipediaPage ウィキペディアでのテスト編集や試し書きには、Wikipedia:サンドボックスをご利用ください。"""
Utils.removeWikiSymbols(data1) should be === result1
val data2 = """* [[TEST (x86命令)]] - [[x86]][[アセンブリ言語]]の命令。"""
val result2 = """TEST (x86命令) - x86アセンブリ言語の命令。"""
Utils.removeWikiSymbols(data2) should be === result2
}
"removeTag" should "remove html tag" in {
val data1 = """<ul><li>14:46 2004年4月30日 [[利用者:Oxhop|Oxhop]] "[[:画像:LocationMacedonia.png|LocationMacedonia.png]]"をアップロードしました。 <em>(マケドニアの位置 - 英語版より)</em></li>"""
val result1 = """14:46 2004年4月30日 [[利用者:Oxhop|Oxhop]] "[[:画像:LocationMacedonia.png|LocationMacedonia.png]]"をアップロードしました。 (マケドニアの位置 - 英語版より)"""
Utils.removeTags(data1) should be === result1
}
"removeDisusedText" should "remove DEFAULTSORT line" in {
val data1 = """DEFAULTSORT:けんこCategory:言語 *"""
val result1 = ""
Utils.removeDisusedText(data1) should be === result1
}
"removeDisusedText" should "remove Category line" in {
val data1 = """Category:言語学Category:民族"""
val result1 = "言語学民族"
Utils.removeDisusedText(data1) should be === result1
}
"removeDisusedText" should "remove url" in {
val data1 = """生物学的な観点から言語の起源を探ろうという試みもある。最近の分子生物学的研究によれば、FOXP2と名づけられている遺伝子に生じたある種の変異が言語能力の獲得につながった可能性があるhttp://www.ncbi.nlm.nih.gov/pubmed/11586359?dopt=Abstract Nature. 413(6855):519-23.。"""
val result1 = "生物学的な観点から言語の起源を探ろうという試みもある。最近の分子生物学的研究によれば、FOXP2と名づけられている遺伝子に生じたある種の変異が言語能力の獲得につながった可能性がある Nature. 413(6855):519-23.。"
Utils.removeDisusedText(data1) should be === result1
}
}
| mwsoft/wikipedia2text | src/test/scala/jp/mwsoft/wikipedia/totext/UtilsSpec.scala | Scala | mit | 2,745 |
package com.socrata.pg.server
class SoQLNumberFunctionsTest extends SoQLTest {
test("c < x") {
compareSoqlResult("select make, name, v_max where v_max < 50 order by v_max, name", "where-num-lt.json")
}
test("c <= x") {
compareSoqlResult("select make, name, v_max where v_max <= 50 order by v_max, name", "where-num-le.json")
}
test("c = x") {
compareSoqlResult("select make, name, v_max where v_max = 50 order by v_max, name", "where-num-eq.json")
}
test("c != x") {
compareSoqlResult("select make, name, v_max where v_max != 50 order by v_max, name", "where-num-ne.json")
}
test("c > x") {
compareSoqlResult("select make, name, v_max where v_max > 50 order by v_max, name", "where-num-gt.json")
}
test("c >= x") {
compareSoqlResult("select make, name, v_max where v_max >= 50 order by v_max, name", "where-num-ge.json")
}
test("c + x") {
compareSoqlResult("select make, name, v_max where v_max + 50 = 100 order by v_max, name", "where-num-add.json")
}
test("c - x") {
compareSoqlResult("select make, name, v_max where v_max - 50 = 0 order by v_max, name", "where-num-sub.json")
}
test("+c") {
compareSoqlResult("select make, name, v_max where +v_max = +50 order by v_max, name", "where-num-plus.json")
}
test("-c") {
compareSoqlResult("select make, name, v_max where -v_max = -50 order by v_max, name", "where-num-neg.json")
}
test("c * x") {
compareSoqlResult("select make, name, v_max where v_max*2 = 100 order by v_max, name", "where-num-mul.json")
}
test("c / x") {
compareSoqlResult("select make, name, v_max where v_max/2 = 25 order by v_max, name", "where-num-div.json")
}
test("c ^ x and ^'s precedence is higher than *'s") {
compareSoqlResult("select make, name, v_max, 10 * 2 ^ 3 = 10 * (2 ^ 3) as t1, 10 * 2 ^ 3 = 80 as t2 where v_max ^ 2 = 2500 order by name", "where-num-exp.json")
}
test("c % x") {
compareSoqlResult("select make, name, v_max where v_max % 10 = 2 order by name", "where-num-mod.json")
}
}
| socrata-platform/soql-postgres-adapter | soql-server-pg/src/test/scala/com/socrata/pg/server/SoQLNumberFunctionsTest.scala | Scala | apache-2.0 | 2,049 |
package org.openurp.edu.eams.teach.schedule.service.impl
import java.text.SimpleDateFormat
import java.util.Date
import org.beangle.commons.collection.Collections
import org.beangle.commons.dao.impl.BaseServiceImpl
import org.beangle.data.jpa.dao.OqlBuilder
import org.beangle.commons.lang.Strings
import org.beangle.security.blueprint.service.UserService
import org.openurp.edu.base.Teacher
import org.openurp.edu.teach.lesson.CourseTake
import org.openurp.edu.teach.lesson.Lesson
import org.openurp.edu.eams.teach.schedule.model.CourseArrangeAlteration
import org.openurp.edu.eams.teach.schedule.model.CourseMailSetting
import org.openurp.edu.eams.teach.schedule.service.CourseTableMailService
import org.openurp.base.User
import org.beangle.commons.logging.Logging
class CourseTableMailServiceImpl extends BaseServiceImpl with CourseTableMailService {
var mailService: MailService = _
var userService: UserService = _
def sendCourseTableChangeMsg(courseArrangeAlteration: CourseArrangeAlteration, courseMailSetting: CourseMailSetting, userIds: Array[java.lang.Long]): String = {
val lesson = entityDao.get(classOf[Lesson], courseArrangeAlteration.lessonId)
val title = courseMailSetting.title
var msg = courseMailSetting.module
val sdf = new SimpleDateFormat("yyyy-MM-dd")
msg = msg.replaceAll("\\\\n", "<br/>").replaceAll("\\\\t", " ")
msg = msg.replace("$(time)", sdf.format(new Date()))
msg = msg.replace("$(lesson)", lesson.no + "(" + lesson.course.name + ")")
msg = msg.replace("$(alterTime)", sdf.format(courseArrangeAlteration.alterationAt))
msg = msg.replace("$(content)", "<table style='width:100%;border-collapse: collapse;border:solid;border-width:1px;border-color:#006CB2;vertical-align: middle;table-layout:fixed'><tr align='center'><td style='border-color:#006CB2;border-style:solid;border-width:0 1px 1px 0;overflow:hidden;word-wrap:break-word;'>" +
courseArrangeAlteration.alterationBefore.replaceAll(",", "<br/>") +
"</td><td style='border-color:#006CB2;border-style:solid;border-width:0 1px 1px 0;overflow:hidden;word-wrap:break-word;'>" +
courseArrangeAlteration.alterationAfter.replaceAll(",", "<br/>") +
"</td></tr></table>")
val users = entityDao.find(classOf[User], userIds)
var errorMsg = ""
if (!users.isEmpty) {
for (user <- users) {
msg = msg.replace("$(username)", user.person.name)
try {
mailService.sendMimeMail(title, msg, null, user.getMail)
} catch {
case e: Exception => {
logger.info("info.sendMail.failure", e)
errorMsg += user.person.name + " 邮件发送失败"
}
}
}
errorMsg
} else {
"没有找到发送用户"
}
}
def sendCourseTableChangeMsgToTeacher(courseArrangeAlteration: CourseArrangeAlteration, courseMailSetting: CourseMailSetting): String = {
val lesson = entityDao.get(classOf[Lesson], courseArrangeAlteration.lessonId)
val title = courseMailSetting.title
var msgTemplate = courseMailSetting.module
val sdf = new SimpleDateFormat("yyyy-MM-dd")
msgTemplate = msgTemplate.replaceAll("\\\\n", "<br/>").replaceAll("\\\\t", " ")
msgTemplate = msgTemplate.replace("$(time)", sdf.format(new Date()))
msgTemplate = msgTemplate.replace("$(lesson)", lesson.no + "(" + lesson.course.name + ")")
msgTemplate = msgTemplate.replace("$(alterTime)", sdf.format(courseArrangeAlteration.alterationAt))
msgTemplate = msgTemplate.replace("$(content)", "<table style='width:100%;border-collapse: collapse;border:solid;border-width:1px;border-color:#006CB2;vertical-align: middle;table-layout:fixed'><tr align='center'><td style='border-color:#006CB2;border-style:solid;border-width:0 1px 1px 0;overflow:hidden;word-wrap:break-word;'>" +
courseArrangeAlteration.alterationBefore.replaceAll(",", "<br/>") +
"</td><td style='border-color:#006CB2;border-style:solid;border-width:0 1px 1px 0;overflow:hidden;word-wrap:break-word;'>" +
courseArrangeAlteration.alterationAfter.replaceAll(",", "<br/>") +
"</td></tr></table>")
val teachers = lesson.teachers
var userNames = ""
for (teacher <- teachers) {
userNames += teacher.code + ","
}
var errorMsg = ""
if (Strings.isNotEmpty(userNames)) {
val mailAddresses = this.getUseMailByUserNames(userNames)
for (username <- mailAddresses.keySet) {
val msg = msgTemplate.replace("$(username)", username + "老师")
try {
mailService.sendMimeMail(title, msg, null, mailAddresses.get(username))
} catch {
case e: Exception => {
logger.info("info.sendMail.failure", e)
errorMsg += username + " 邮件发送失败"
}
}
}
errorMsg
} else {
"该课程没有指定教师,发送失败"
}
}
def sendCourseTableChangeMsgToStd(courseArrangeAlteration: CourseArrangeAlteration, courseMailSetting: CourseMailSetting): String = {
val lesson = entityDao.get(classOf[Lesson], courseArrangeAlteration.lessonId)
val title = courseMailSetting.title
var msg = courseMailSetting.module
val sdf = new SimpleDateFormat("yyyy-MM-dd")
msg = msg.replaceAll("\\\\n", "<br/>").replaceAll("\\\\t", " ")
msg = msg.replace("$(time)", sdf.format(new Date()))
msg = msg.replace("$(lesson)", lesson.no + "(" + lesson.course.name + ")")
msg = msg.replace("$(alterTime)", sdf.format(courseArrangeAlteration.alterationAt))
msg = msg.replace("$(content)", "<table style='width:100%;border-collapse: collapse;border:solid;border-width:1px;border-color:#006CB2;vertical-align: middle;table-layout:fixed'><tr align='center'><td style='border-color:#006CB2;border-style:solid;border-width:0 1px 1px 0;overflow:hidden;word-wrap:break-word;'>" +
courseArrangeAlteration.alterationBefore.replaceAll(",", "<br/>") +
"</td><td style='border-color:#006CB2;border-style:solid;border-width:0 1px 1px 0;overflow:hidden;word-wrap:break-word;'>" +
courseArrangeAlteration.alterationAfter.replaceAll(",", "<br/>") +
"</td></tr></table>")
val courseTakeSet = lesson.teachClass.courseTakes
var userNames = ""
for (take <- courseTakeSet) {
userNames += take.std.code + ","
}
var errorMsg = ""
if (Strings.isNotEmpty(userNames)) {
val mailAddresses = this.getUseMailByUserNames(userNames)
for (username <- mailAddresses.keySet) {
msg = msg.replace("$(username)", username + "同学")
try {
mailService.sendMimeMail(title, msg, null, mailAddresses.get(username))
} catch {
case e: Exception => {
logger.info("info.sendMail.failure", e)
errorMsg += username + " 邮件发送失败"
}
}
}
errorMsg
} else {
"该课程没有学生选课,发送失败"
}
}
def getUseMailByUserNames(userNames: String): collection.mutable.Map[String, String] = {
val userList = getUserList(userNames)
val mailMap = Collections.newMap[String,String]
for (user <- userList if Strings.isNotEmpty(user.email)) {
mailMap.put(user.person.name, user.email)
}
mailMap
}
def getUserList(userNames: String): Seq[User] = {
val query = OqlBuilder.from(classOf[User], "user")
query.where("user.name in(:userNames)", userNames.split(","))
entityDao.search(query)
}
}
| openurp/edu-eams-webapp | schedule/src/main/scala/org/openurp/edu/eams/teach/schedule/service/impl/CourseTableMailServiceImpl.scala | Scala | gpl-3.0 | 7,520 |
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.crossdata.catalog.persistent
import java.sql.{Connection, DriverManager, ResultSet}
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.catalyst.{CatalystConf, TableIdentifier}
import org.apache.spark.sql.crossdata.{CrossdataVersion, XDContext}
import org.apache.spark.sql.crossdata.catalog.interfaces.XDAppsCatalog
import org.apache.spark.sql.crossdata.catalog.{IndexIdentifierNormalized, StringNormalized, TableIdentifierNormalized, XDCatalog, persistent}
import scala.annotation.tailrec
import scala.util.Try
object MySQLXDCatalog {
// SQLConfig
val Driver = "jdbc.driver"
val Url = "jdbc.url"
val Database = "jdbc.db.name"
val User = "jdbc.db.user"
val Pass = "jdbc.db.pass"
val PrefixConfig = "prefix"
//Default tables
val DefaultTablesMetadataTable = "crossdataTables"
val DefaultViewsMetadataTable = "crossdataViews"
val DefaultAppsMetadataTable = "crossdataJars"
val DefaultIndexesMetadataTable = "crossdataIndexes"
// CatalogFields
val DatabaseField = "db"
val TableNameField = "tableName"
val SchemaField = "tableSchema"
val DatasourceField = "datasource"
val PartitionColumnField = "partitionColumn"
val OptionsField = "options"
val CrossdataVersionField = "crossdataVersion"
//IndexMetadataFields
val IndexNameField = "indexName"
val IndexTypeField = "indexType"
val IndexedColsField = "indexedCols"
val PKField = "pk"
// ViewMetadataFields (databaseField, tableNameField, sqlViewField, CrossdataVersionField
val SqlViewField = "sqlView"
//App values
val JarPath = "jarPath"
val AppAlias = "alias"
val AppClass = "class"
}
/**
* Default implementation of the [[persistent.PersistentCatalogWithCache]] with persistence using
* Jdbc.
* Supported MySQL and PostgreSQL
*
* @param catalystConf An implementation of the [[CatalystConf]].
*/
class MySQLXDCatalog(override val catalystConf: CatalystConf)
extends PersistentCatalogWithCache(catalystConf) {
import MySQLXDCatalog._
import XDCatalog._
protected lazy val config = XDContext.catalogConfig
private lazy val db = config.getString(Database)
protected lazy val tablesPrefix = Try(s"${config.getString(PrefixConfig)}_") getOrElse ("") //prefix_
protected lazy val tableWithTableMetadata = s"$tablesPrefix$DefaultTablesMetadataTable"
protected lazy val tableWithViewMetadata = s"$tablesPrefix$DefaultViewsMetadataTable"
protected lazy val tableWithAppJars = s"$tablesPrefix$DefaultAppsMetadataTable"
protected lazy val tableWithIndexMetadata = s"$tablesPrefix$DefaultIndexesMetadataTable"
@transient lazy val connection: Connection = {
val driver = config.getString(Driver)
val user = config.getString(User)
val pass = config.getString(Pass)
val url = config.getString(Url)
Class.forName(driver)
try {
val jdbcConnection = DriverManager.getConnection(url, user, pass)
// CREATE PERSISTENT METADATA TABLE
jdbcConnection.createStatement().executeUpdate(s"CREATE SCHEMA IF NOT EXISTS $db")
jdbcConnection.createStatement().executeUpdate(
s"""|CREATE TABLE IF NOT EXISTS $db.$tableWithTableMetadata (
|$DatabaseField VARCHAR(50),
|$TableNameField VARCHAR(50),
|$SchemaField TEXT,
|$DatasourceField TEXT,
|$PartitionColumnField TEXT,
|$OptionsField TEXT,
|$CrossdataVersionField TEXT,
|PRIMARY KEY ($DatabaseField,$TableNameField))""".stripMargin)
jdbcConnection.createStatement().executeUpdate(
s"""|CREATE TABLE IF NOT EXISTS $db.$tableWithViewMetadata (
|$DatabaseField VARCHAR(50),
|$TableNameField VARCHAR(50),
|$SqlViewField TEXT,
|$CrossdataVersionField VARCHAR(30),
|PRIMARY KEY ($DatabaseField,$TableNameField))""".stripMargin)
jdbcConnection.createStatement().executeUpdate(
s"""|CREATE TABLE $db.$tableWithAppJars (
|$JarPath VARCHAR(100),
|$AppAlias VARCHAR(50),
|$AppClass VARCHAR(100),
|PRIMARY KEY ($AppAlias))""".stripMargin)
//Index support
jdbcConnection.createStatement().executeUpdate(
s"""|CREATE TABLE IF NOT EXISTS $db.$tableWithIndexMetadata (
|$DatabaseField VARCHAR(50),
|$TableNameField VARCHAR(50),
|$IndexNameField VARCHAR(50),
|$IndexTypeField VARCHAR(50),
|$IndexedColsField TEXT,
|$PKField VARCHAR(100),
|$DatasourceField TEXT,
|$OptionsField TEXT,
|$CrossdataVersionField VARCHAR(30),
|UNIQUE ($IndexNameField, $IndexTypeField),
|PRIMARY KEY ($DatabaseField,$TableNameField))""".stripMargin)
jdbcConnection
} catch {
case e: Exception =>
logError(e.getMessage)
null
}
}
override def lookupTable(tableIdentifier: TableIdentifierNormalized): Option[CrossdataTable] = {
val resultSet = selectMetadata(tableWithTableMetadata, tableIdentifier)
if (!resultSet.isBeforeFirst) {
None
} else {
resultSet.next()
val database = resultSet.getString(DatabaseField)
val table = resultSet.getString(TableNameField)
val schemaJSON = resultSet.getString(SchemaField)
val partitionColumn = resultSet.getString(PartitionColumnField)
val datasource = resultSet.getString(DatasourceField)
val optsJSON = resultSet.getString(OptionsField)
val version = resultSet.getString(CrossdataVersionField)
Some(
CrossdataTable(TableIdentifierNormalized(table, Some(database)), Option(deserializeUserSpecifiedSchema(schemaJSON)), datasource, deserializePartitionColumn(partitionColumn), deserializeOptions(optsJSON), version)
)
}
}
override def allRelations(databaseName: Option[StringNormalized]): Seq[TableIdentifierNormalized] = {
@tailrec
def getSequenceAux(resultset: ResultSet, next: Boolean, set: Set[TableIdentifierNormalized] = Set.empty): Set[TableIdentifierNormalized] = {
if (next) {
val database = resultset.getString(DatabaseField)
val table = resultset.getString(TableNameField)
val tableId = if (database.trim.isEmpty) TableIdentifierNormalized(table) else TableIdentifierNormalized(table, Option(database))
getSequenceAux(resultset, resultset.next(), set + tableId)
} else {
set
}
}
val statement = connection.createStatement
val dbFilter = databaseName.fold("")(dbName => s"WHERE $DatabaseField ='${dbName.normalizedString}'")
val resultSet = statement.executeQuery(s"SELECT $DatabaseField, $TableNameField FROM $db.$tableWithTableMetadata $dbFilter")
getSequenceAux(resultSet, resultSet.next).toSeq
}
override def persistTableMetadata(crossdataTable: CrossdataTable): Unit =
try {
val tableSchema = serializeSchema(crossdataTable.schema.getOrElse(schemaNotFound()))
val tableOptions = serializeOptions(crossdataTable.opts)
val partitionColumn = serializePartitionColumn(crossdataTable.partitionColumn)
connection.setAutoCommit(false)
// check if the database-table exist in the persisted catalog
val resultSet = selectMetadata(tableWithTableMetadata, crossdataTable.tableIdentifier)
if (!resultSet.isBeforeFirst) {
resultSet.close()
val prepped = connection.prepareStatement(
s"""|INSERT INTO $db.$tableWithTableMetadata (
| $DatabaseField, $TableNameField, $SchemaField, $DatasourceField, $PartitionColumnField, $OptionsField, $CrossdataVersionField
|) VALUES (?,?,?,?,?,?,?)
""".stripMargin)
prepped.setString(1, crossdataTable.tableIdentifier.database.getOrElse(""))
prepped.setString(2, crossdataTable.tableIdentifier.table)
prepped.setString(3, tableSchema)
prepped.setString(4, crossdataTable.datasource)
prepped.setString(5, partitionColumn)
prepped.setString(6, tableOptions)
prepped.setString(7, CrossdataVersion)
prepped.execute()
prepped.close()
} else {
resultSet.close()
val prepped =
connection.prepareStatement(
s"""|UPDATE $db.$tableWithTableMetadata
|SET $SchemaField=?, $DatasourceField=?,$PartitionColumnField=?,$OptionsField=?,$CrossdataVersionField=?
|WHERE $DatabaseField='${crossdataTable.tableIdentifier.database.getOrElse("")}' AND $TableNameField='${crossdataTable.tableIdentifier.table}';
""".stripMargin.replaceAll("\\n", " "))
prepped.setString(1, tableSchema)
prepped.setString(2, crossdataTable.datasource)
prepped.setString(3, partitionColumn)
prepped.setString(4, tableOptions)
prepped.setString(5, CrossdataVersion)
prepped.execute()
prepped.close()
}
connection.commit()
} finally {
connection.setAutoCommit(true)
}
override def dropTableMetadata(tableIdentifier: ViewIdentifierNormalized): Unit =
connection.createStatement.executeUpdate(s"DELETE FROM $db.$tableWithTableMetadata WHERE tableName='${tableIdentifier.table}' AND db='${tableIdentifier.database.getOrElse("")}'")
override def dropAllTablesMetadata(): Unit =
connection.createStatement.executeUpdate(s"TRUNCATE $db.$tableWithTableMetadata")
override def lookupView(tableIdentifier: TableIdentifierNormalized): Option[String] = {
val resultSet = selectMetadata(tableWithViewMetadata, tableIdentifier)
if (!resultSet.isBeforeFirst) {
None
} else {
resultSet.next()
Option(resultSet.getString(SqlViewField))
}
}
override def persistViewMetadata(tableIdentifier: TableIdentifierNormalized, sqlText: String): Unit =
try {
connection.setAutoCommit(false)
val resultSet = selectMetadata(tableWithViewMetadata, tableIdentifier)
if (!resultSet.isBeforeFirst) {
resultSet.close()
val prepped = connection.prepareStatement(
s"""|INSERT INTO $db.$tableWithViewMetadata (
| $DatabaseField, $TableNameField, $SqlViewField, $CrossdataVersionField
|) VALUES (?,?,?,?)
""".stripMargin)
prepped.setString(1, tableIdentifier.database.getOrElse(""))
prepped.setString(2, tableIdentifier.table)
prepped.setString(3, sqlText)
prepped.setString(4, CrossdataVersion)
prepped.execute()
prepped.close()
} else {
resultSet.close()
val prepped =
connection.prepareStatement(
s"""|UPDATE $db.$tableWithViewMetadata SET $SqlViewField=?
|WHERE $DatabaseField='${tableIdentifier.database.getOrElse("")}' AND $TableNameField='${tableIdentifier.table}'
""".stripMargin.replaceAll("\\n", " "))
prepped.setString(1, sqlText)
prepped.execute()
prepped.close()
}
connection.commit()
} finally {
connection.setAutoCommit(true)
}
private def selectMetadata(targetTable: String, tableIdentifier: TableIdentifierNormalized): ResultSet = {
val preparedStatement = connection.prepareStatement(s"SELECT * FROM $db.$targetTable WHERE $DatabaseField= ? AND $TableNameField= ?")
preparedStatement.setString(1, tableIdentifier.database.getOrElse(""))
preparedStatement.setString(2, tableIdentifier.table)
preparedStatement.executeQuery()
}
override def dropViewMetadata(viewIdentifier: ViewIdentifierNormalized): Unit =
connection.createStatement.executeUpdate(
s"DELETE FROM $db.$tableWithViewMetadata WHERE tableName='${viewIdentifier.table}' AND db='${viewIdentifier.database.getOrElse("")}'")
override def dropAllViewsMetadata(): Unit = {
connection.createStatement.executeUpdate(s"DELETE FROM $db.$tableWithViewMetadata")
}
override def saveAppMetadata(crossdataApp: CrossdataApp): Unit =
try {
connection.setAutoCommit(false)
val preparedStatement = connection.prepareStatement(s"SELECT * FROM $db.$tableWithAppJars WHERE $AppAlias= ?")
preparedStatement.setString(1, crossdataApp.appAlias)
val resultSet = preparedStatement.executeQuery()
preparedStatement.close()
if (!resultSet.next()) {
resultSet.close()
val prepped = connection.prepareStatement(
s"""|INSERT INTO $db.$tableWithAppJars (
| $JarPath, $AppAlias, $AppClass
|) VALUES (?,?,?)
""".stripMargin)
prepped.setString(1, crossdataApp.jar)
prepped.setString(2, crossdataApp.appAlias)
prepped.setString(3, crossdataApp.appClass)
prepped.execute()
prepped.close()
} else {
resultSet.close()
val prepped = connection.prepareStatement(
s"""|UPDATE $db.$tableWithAppJars SET $JarPath=?, $AppClass=?
|WHERE $AppAlias='${crossdataApp.appAlias}'
""".stripMargin)
prepped.setString(1, crossdataApp.jar)
prepped.setString(2, crossdataApp.appClass)
prepped.execute()
prepped.close()
}
connection.commit()
} finally {
connection.setAutoCommit(true)
}
override def getApp(alias: String): Option[CrossdataApp] = {
val preparedStatement = connection.prepareStatement(s"SELECT * FROM $db.$tableWithAppJars WHERE $AppAlias= ?")
preparedStatement.setString(1, alias)
val resultSet = preparedStatement.executeQuery()
if (!resultSet.next) {
resultSet.close()
preparedStatement.close()
None
} else {
val jar = resultSet.getString(JarPath)
val alias = resultSet.getString(AppAlias)
val clss = resultSet.getString(AppClass)
resultSet.close()
preparedStatement.close()
Some(
CrossdataApp(jar, alias, clss)
)
}
}
override def isAvailable: Boolean = Option(connection).isDefined
override def persistIndexMetadata(crossdataIndex: CrossdataIndex): Unit =
try {
connection.setAutoCommit(false)
// check if the database-table exist in the persisted catalog
val resultSet = selectMetadata(tableWithIndexMetadata, crossdataIndex.tableIdentifier)
val serializedIndexedCols = serializeSeq(crossdataIndex.indexedCols)
val serializedOptions = serializeOptions(crossdataIndex.opts)
if (!resultSet.next()) {
val prepped = connection.prepareStatement(
s"""|INSERT INTO $db.$tableWithIndexMetadata (
| $DatabaseField, $TableNameField, $IndexNameField, $IndexTypeField, $IndexedColsField,
| $PKField, $DatasourceField, $OptionsField, $CrossdataVersionField
|) VALUES (?,?,?,?,?,?,?,?,?)
""".stripMargin)
prepped.setString(1, crossdataIndex.tableIdentifier.database.getOrElse(""))
prepped.setString(2, crossdataIndex.tableIdentifier.table)
prepped.setString(3, crossdataIndex.indexIdentifier.indexName)
prepped.setString(4, crossdataIndex.indexIdentifier.indexType)
prepped.setString(5, serializedIndexedCols)
prepped.setString(6, crossdataIndex.pk)
prepped.setString(7, crossdataIndex.datasource)
prepped.setString(8, serializedOptions)
prepped.setString(9, CrossdataVersion)
prepped.execute()
} else {
//TODO: Support change index metadata?
sys.error(s"A global index already exists in table ${crossdataIndex.tableIdentifier.unquotedString}")
}
} finally {
connection.setAutoCommit(true)
}
override def dropIndexMetadata(indexIdentifier: IndexIdentifierNormalized): Unit =
connection.createStatement.executeUpdate(
s"DELETE FROM $db.$tableWithIndexMetadata WHERE $IndexTypeField='${indexIdentifier.indexType}' AND $IndexNameField='${indexIdentifier.indexName}'"
)
override def dropAllIndexesMetadata(): Unit =
connection.createStatement.executeUpdate(s"DELETE FROM $db.$tableWithIndexMetadata")
override def lookupIndex(indexIdentifier: IndexIdentifierNormalized): Option[CrossdataIndex] = {
val resultSet = selectIndex(indexIdentifier)
if (!resultSet.next) {
None
} else {
val database = resultSet.getString(DatabaseField)
val table = resultSet.getString(TableNameField)
val indexName = resultSet.getString(IndexNameField)
val indexType = resultSet.getString(IndexTypeField)
val indexedCols = resultSet.getString(IndexedColsField)
val pk = resultSet.getString(PKField)
val datasource = resultSet.getString(DatasourceField)
val optsJSON = resultSet.getString(OptionsField)
val version = resultSet.getString(CrossdataVersionField)
Option(
CrossdataIndex(TableIdentifierNormalized(table, Option(database)), IndexIdentifierNormalized(indexType, indexName),
deserializeSeq(indexedCols), pk, datasource, deserializeOptions(optsJSON), version)
)
}
}
private def selectIndex(indexIdentifier: IndexIdentifierNormalized): ResultSet = {
val preparedStatement = connection.prepareStatement(s"SELECT * FROM $db.$tableWithIndexMetadata WHERE $IndexNameField= ? AND $IndexTypeField= ?")
preparedStatement.setString(1, indexIdentifier.indexName)
preparedStatement.setString(2, indexIdentifier.indexType)
preparedStatement.executeQuery()
}
override def dropIndexMetadata(tableIdentifier: TableIdentifierNormalized): Unit =
connection.createStatement.executeUpdate(
s"DELETE FROM $db.$tableWithIndexMetadata WHERE $TableNameField='${tableIdentifier.table}' AND $DatabaseField='${tableIdentifier.database.getOrElse("")}'"
)
override def lookupIndexByTableIdentifier(tableIdentifier: TableIdentifierNormalized): Option[CrossdataIndex] = {
val query =
s"SELECT * FROM $db.$tableWithIndexMetadata WHERE $TableNameField='${tableIdentifier.table}' AND $DatabaseField='${tableIdentifier.database.getOrElse("")}'"
val preparedStatement = connection.prepareStatement(query)
val resultSet = preparedStatement.executeQuery()
if (!resultSet.next) {
None
} else {
val database = resultSet.getString(DatabaseField)
val table = resultSet.getString(TableNameField)
val indexName = resultSet.getString(IndexNameField)
val indexType = resultSet.getString(IndexTypeField)
val indexedCols = resultSet.getString(IndexedColsField)
val pk = resultSet.getString(PKField)
val datasource = resultSet.getString(DatasourceField)
val optsJSON = resultSet.getString(OptionsField)
val version = resultSet.getString(CrossdataVersionField)
Option(
CrossdataIndex(TableIdentifierNormalized(table, Option(database)), IndexIdentifierNormalized(indexType, indexName),
deserializeSeq(indexedCols), pk, datasource, deserializeOptions(optsJSON), version)
)
}
}
} | Stratio/crossdata | core/src/main/scala/org/apache/spark/sql/crossdata/catalog/persistent/MySQLXDCatalog.scala | Scala | apache-2.0 | 19,526 |
package org.scalatra
package auth
package strategy
import javax.servlet.http.HttpServletRequest
import org.scalatra.test.specs2._
import org.specs2.mock.Mockito
class BasicAuthStrategySpec extends MutableScalatraSpec with Mockito {
"params on a request with no auth headers" should {
val httpRequest = mock[HttpServletRequest]
val basicAuthRequest = new BasicAuthStrategy.BasicAuthRequest(httpRequest)
"return None" in { // https://github.com/scalatra/scalatra/issues/143
basicAuthRequest.params must_== None
}
}
}
| lightvector/scalatra | auth/src/test/scala/org/scalatra/auth/strategy/BasicAuthRequestSpec.scala | Scala | bsd-2-clause | 544 |
package com.lynbrookrobotics.potassium.logging
import com.lynbrookrobotics.potassium.clock.Clock
import squants.time.Milliseconds
import scala.collection.mutable
trait AsyncLogger {
val clock: Clock
abstract private class Loggable {
def log(): Unit
}
private final class InfoLog(message: String) extends Loggable {
override def log(): Unit = {
Logger.info(message)
}
}
private final class DebugLog(message: String) extends Loggable {
override def log(): Unit = {
Logger.debug(message)
}
}
private final class ErrorLog(message: String) extends Loggable {
override def log(): Unit = {
Logger.error(message)
}
}
private final class WarningLog(message: String) extends Loggable {
override def log(): Unit = {
Logger.warning(message)
}
}
private val gapBetweenFlush = Milliseconds(20)
private val toLog = new mutable.SynchronizedQueue[Loggable]()
def debug(msg: String): Unit = {
toLog.+=(new DebugLog(msg))
}
def warn(msg: String): Unit = {
toLog.+=(new WarningLog(msg))
}
def error(msg: String): Unit = {
toLog.+=(new ErrorLog(msg))
}
def info(msg: String): Unit = {
toLog.+=(new InfoLog(msg))
}
clock.apply(gapBetweenFlush)(
_ =>
while (toLog.nonEmpty) {
toLog.dequeue().log()
}
)
}
| Team846/potassium | core/shared/src/main/scala/com/lynbrookrobotics/potassium/logging/AsyncLogger.scala | Scala | mit | 1,340 |
package org.odfi.indesign.core.module.ui.www
import com.idyria.osi.wsb.webapp.localweb.DefaultLocalWebHTMLBuilder
import org.odfi.indesign.core.module.ui.www.external.ExternalBuilder
import java.net.URI
import com.idyria.osi.vui.html.HTMLNode
import org.w3c.dom.html.HTMLElement
trait IndesignUIHtmlBuilder extends ExternalBuilder {
override def externalAdd(targetNode: HTMLNode[HTMLElement, Any]): Unit = {
super.externalAdd(targetNode)
switchToNode(targetNode, {
script(new URI(createSpecialPath("resources", "modules/wwwui/indesign.js"))) {
}
})
}
} | opendesignflow/indesign | indesign-wwwui/src/main/scala/org/odfi/indesign/core/module/ui/www/IndesignUIHtmlBuilder.scala | Scala | gpl-3.0 | 612 |
/*
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.spark.boot
import akka.actor.{ActorRef, ActorSystem}
import com.ibm.spark.boot.layer._
import com.ibm.spark.interpreter.Interpreter
import com.ibm.spark.kernel.protocol.v5.KernelStatusType._
import com.ibm.spark.kernel.protocol.v5._
import com.ibm.spark.kernel.protocol.v5.kernel.ActorLoader
import com.ibm.spark.security.KernelSecurityManager
import com.ibm.spark.utils.LogLike
import com.typesafe.config.Config
import org.apache.spark.SparkContext
import org.zeromq.ZMQ
class KernelBootstrap(config: Config) extends LogLike {
this: BareInitialization with ComponentInitialization
with HandlerInitialization with HookInitialization =>
private val DefaultAppName = SparkKernelInfo.banner
private val DefaultActorSystemName = "spark-kernel-actor-system"
private var actorSystem: ActorSystem = _
private var actorLoader: ActorLoader = _
private var kernelMessageRelayActor: ActorRef = _
private var statusDispatch: ActorRef = _
private var sparkContext: SparkContext = _
private var interpreters: Seq[Interpreter] = Nil
/**
* Initializes all kernel systems.
*/
def initialize() = {
// TODO: Investigate potential to initialize System out/err/in to capture
// Console DynamicVariable initialization (since takes System fields)
// and redirect it to a workable location (like an actor) with the
// thread's current information attached
//
// E.G. System.setOut(customPrintStream) ... all new threads will have
// customPrintStream as their initial Console.out value
//
displayVersionInfo()
// Initialize the bare minimum to report a starting message
val (actorSystem, actorLoader, kernelMessageRelayActor, statusDispatch) =
initializeBare(
config = config,
actorSystemName = DefaultActorSystemName
)
this.actorSystem = actorSystem
this.actorLoader = actorLoader
this.kernelMessageRelayActor = kernelMessageRelayActor
this.statusDispatch = statusDispatch
// Indicate that the kernel is now starting
publishStatus(KernelStatusType.Starting)
// Initialize components needed elsewhere
val (commStorage, commRegistrar, commManager, interpreter,
kernel, sparkContext, dependencyDownloader,
magicLoader, responseMap) =
initializeComponents(
config = config,
appName = DefaultAppName,
actorLoader = actorLoader
)
this.sparkContext = sparkContext
this.interpreters ++= Seq(interpreter)
// Initialize our handlers that take care of processing messages
initializeHandlers(
actorSystem = actorSystem,
actorLoader = actorLoader,
kernel = kernel,
interpreter = interpreter,
commRegistrar = commRegistrar,
commStorage = commStorage,
magicLoader = magicLoader,
responseMap = responseMap
)
// Initialize our hooks that handle various JVM events
initializeHooks(
interpreter = interpreter
)
logger.debug("Initializing security manager")
System.setSecurityManager(new KernelSecurityManager)
logger.info("Marking relay as ready for receiving messages")
kernelMessageRelayActor ! true
this
}
/**
* Shuts down all kernel systems.
*/
def shutdown() = {
logger.info("Shutting down Spark Context")
sparkContext.stop()
logger.info("Shutting down interpreters")
interpreters.foreach(_.stop())
logger.info("Shutting down actor system")
actorSystem.shutdown()
this
}
/**
* Waits for the main actor system to terminate.
*/
def waitForTermination() = {
logger.debug("Waiting for actor system to terminate")
actorSystem.awaitTermination()
this
}
private def publishStatus(
status: KernelStatusType,
parentHeader: Option[ParentHeader] = None
): Unit = {
parentHeader match {
case Some(header) => statusDispatch ! ((status, header))
case None => statusDispatch ! status
}
}
@inline private def displayVersionInfo() = {
logger.info("Kernel version: " + SparkKernelInfo.implementationVersion)
logger.info("Scala version: " + SparkKernelInfo.languageVersion)
logger.info("ZeroMQ (JeroMQ) version: " + ZMQ.getVersionString)
}
}
| malcolmgreaves/spark-kernel | kernel/src/main/scala/com/ibm/spark/boot/KernelBootstrap.scala | Scala | apache-2.0 | 4,952 |
package org.jetbrains.plugins.scala
package highlighter
import com.intellij.internal.statistic.UsageTrigger
import com.intellij.lang.annotation.AnnotationHolder
import com.intellij.openapi.editor.colors.TextAttributesKey
import com.intellij.psi._
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.ScalaStubBasedElementImpl
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns._
import org.jetbrains.plugins.scala.lang.psi.api.base.{ScConstructor, ScReferenceElement, ScStableCodeReferenceElement}
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScParameter, ScTypeParam}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports.ScImportExpr
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.ScTemplateBody
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScClass, ScMember, ScObject, ScTrait}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.{ScEarlyDefinitions, ScModifierListOwner}
import org.jetbrains.plugins.scala.lang.psi.types.api.FunctionType
import org.jetbrains.plugins.scala.lang.psi.types.{ScType, ScTypeExt, ScalaType}
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaNamesUtil
import org.jetbrains.plugins.scala.project.ProjectContext
import org.jetbrains.plugins.scala.settings.ScalaProjectSettings
/**
* User: Alexander Podkhalyuzin
* Date: 17.07.2008
*/
object AnnotatorHighlighter {
private val JAVA_COLLECTIONS_BASES = List("java.util.Map", "java.util.Collection")
private val SCALA_FACTORY_METHODS_NAMES = Set("make", "apply")
private val SCALA_COLLECTION_MUTABLE_BASE = "_root_.scala.collection.mutable."
private val SCALA_COLLECTION_IMMUTABLE_BASE = "_root_.scala.collection.immutable."
private val SCALA_COLLECTION_GENERIC_BASE = "_root_.scala.collection.generic."
private val SCALA_PREDEFINED_OBJECTS = Set("scala", "scala.Predef")
private val SCALA_PREDEF_IMMUTABLE_BASES = Set("_root_.scala.PredefMap", "_root_.scala.PredefSet", "scalaList",
"scalaNil", "scalaStream", "scalaVector", "scalaSeq")
private def getParentByStub(x: PsiElement): PsiElement = {
x match {
case el: ScalaStubBasedElementImpl[_, _] => el.getParent
case _ => x.getContext
}
}
def highlightReferenceElement(refElement: ScReferenceElement, holder: AnnotationHolder) {
implicit val project: ProjectContext = refElement.projectContext
def annotateCollectionByType(resolvedType: ScType) {
if (ScalaNamesUtil.isOperatorName(
resolvedType.presentableText.substring(0, resolvedType.presentableText.prefixLength(_ != '.')))) return
val scalaProjectSettings: ScalaProjectSettings = ScalaProjectSettings.getInstance(project)
scalaProjectSettings.getCollectionTypeHighlightingLevel match {
case ScalaProjectSettings.COLLECTION_TYPE_HIGHLIGHTING_NONE => return
case ScalaProjectSettings.COLLECTION_TYPE_HIGHLIGHTING_NOT_QUALIFIED =>
refElement.qualifier match {
case None =>
case _ => return
}
case ScalaProjectSettings.COLLECTION_TYPE_HIGHLIGHTING_ALL =>
}
UsageTrigger.trigger("scala.collection.pack.highlighting")
def conformsByNames(tp: ScType, qn: List[String]): Boolean =
qn.flatMap {
refElement.elementScope.getCachedClass(_)
}.map {
ScalaType.designator
}.exists {
tp.conforms
}
def simpleAnnotate(annotationText: String, annotationAttributes: TextAttributesKey) {
if (SCALA_FACTORY_METHODS_NAMES.contains(refElement.nameId.getText)) {
return
}
val annotation = holder.createInfoAnnotation(refElement.nameId, annotationText)
annotation.setTextAttributes(annotationAttributes)
}
val text = resolvedType.canonicalText
if (text == null) return
if (text.startsWith(SCALA_COLLECTION_IMMUTABLE_BASE) || SCALA_PREDEF_IMMUTABLE_BASES.contains(text)) {
simpleAnnotate(ScalaBundle.message("scala.immutable.collection"), DefaultHighlighter.IMMUTABLE_COLLECTION)
} else if (text.startsWith(SCALA_COLLECTION_MUTABLE_BASE)) {
simpleAnnotate(ScalaBundle.message("scala.mutable.collection"), DefaultHighlighter.MUTABLE_COLLECTION)
} else if (conformsByNames(resolvedType, JAVA_COLLECTIONS_BASES)) {
simpleAnnotate(ScalaBundle.message("java.collection"), DefaultHighlighter.JAVA_COLLECTION)
} else if (resolvedType.canonicalText.startsWith(SCALA_COLLECTION_GENERIC_BASE) && refElement.isInstanceOf[ScReferenceExpression]) {
refElement.asInstanceOf[ScReferenceExpression].`type`().foreach {
case FunctionType(returnType, _) => Option(returnType).foreach(a =>
if (a.canonicalText.startsWith(SCALA_COLLECTION_MUTABLE_BASE)) {
simpleAnnotate(ScalaBundle.message("scala.mutable.collection"), DefaultHighlighter.MUTABLE_COLLECTION)
} else if (a.canonicalText.startsWith(SCALA_COLLECTION_IMMUTABLE_BASE)) {
simpleAnnotate(ScalaBundle.message("scala.immutable.collection"), DefaultHighlighter.IMMUTABLE_COLLECTION)
})
case _ =>
}
}
}
def annotateCollection(resolvedClazz: PsiClass) {
annotateCollectionByType(ScalaType.designator(resolvedClazz))
}
def isHighlightableScalaTestKeyword(fun: ScMember): Boolean = {
fun.getContainingClass != null &&
ScalaTestHighlighterUtil.isHighlightableScalaTestKeyword(fun.getContainingClass.getQualifiedName, fun match {
case p: ScPatternDefinition => p.bindings.headOption.map(_.getName).orNull
case _ => fun.getName
}, fun.getProject)
}
if (refElement.parentOfType(classOf[ScConstructor], strict = false)
.exists(_.getParent.isInstanceOf[ScAnnotationExpr])) return
val resolvedElement = refElement.resolve()
if (refElement.parentOfType(classOf[ScImportExpr]).isEmpty && resolvedElement.isInstanceOf[PsiClass]) {
annotateCollection(resolvedElement.asInstanceOf[PsiClass])
}
val annotation = holder.createInfoAnnotation(refElement.nameId, null)
val QualNameToType = project.stdTypes.QualNameToType
resolvedElement match {
case c: PsiClass if QualNameToType.contains(c.qualifiedName) => //this is td, it's important!
annotation.setTextAttributes(DefaultHighlighter.PREDEF)
case x: ScClass if x.getModifierList.has(ScalaTokenTypes.kABSTRACT) =>
annotation.setTextAttributes(DefaultHighlighter.ABSTRACT_CLASS)
case _: ScTypeParam =>
annotation.setTextAttributes(DefaultHighlighter.TYPEPARAM)
case x: ScTypeAlias =>
x.getOriginalElement match {
case originalElement: ScTypeAliasDefinition =>
originalElement.aliasedType.foreach(annotateCollectionByType)
case _ =>
}
annotation.setTextAttributes(DefaultHighlighter.TYPE_ALIAS)
case _: ScClass if referenceIsToCompanionObjectOfClass(refElement) =>
annotation.setTextAttributes(DefaultHighlighter.OBJECT)
case _: ScClass =>
annotation.setTextAttributes(DefaultHighlighter.CLASS)
case _: ScObject =>
annotation.setTextAttributes(DefaultHighlighter.OBJECT)
case _: ScTrait =>
annotation.setTextAttributes(DefaultHighlighter.TRAIT)
case x: PsiClass if x.isInterface =>
annotation.setTextAttributes(DefaultHighlighter.TRAIT)
case x: PsiClass if x.getModifierList != null && x.getModifierList.hasModifierProperty("abstract") =>
annotation.setTextAttributes(DefaultHighlighter.ABSTRACT_CLASS)
case _: PsiClass if refElement.isInstanceOf[ScStableCodeReferenceElement] =>
annotation.setTextAttributes(DefaultHighlighter.CLASS)
case _: PsiClass if refElement.isInstanceOf[ScReferenceExpression] =>
annotation.setTextAttributes(DefaultHighlighter.OBJECT)
case x: ScBindingPattern =>
val parent = x.nameContext
parent match {
case r@(_: ScValue | _: ScVariable) =>
Option(x.containingClass).foreach(a => if (SCALA_PREDEFINED_OBJECTS.contains(a.qualifiedName)) {
x.`type`().foreach(annotateCollectionByType)
})
getParentByStub(parent) match {
case _: ScTemplateBody | _: ScEarlyDefinitions =>
r match {
case mod: ScModifierListOwner if mod.hasModifierProperty("lazy") =>
annotation.setTextAttributes(DefaultHighlighter.LAZY)
case v: ScValue if isHighlightableScalaTestKeyword(v) =>
annotation.setTextAttributes(DefaultHighlighter.SCALATEST_KEYWORD)
case _: ScValue => annotation.setTextAttributes(DefaultHighlighter.VALUES)
case _: ScVariable => annotation.setTextAttributes(DefaultHighlighter.VARIABLES)
case _ =>
}
case _ =>
r match {
case mod: ScModifierListOwner if mod.hasModifierProperty("lazy") =>
annotation.setTextAttributes(DefaultHighlighter.LOCAL_LAZY)
case _: ScValue => annotation.setTextAttributes(DefaultHighlighter.LOCAL_VALUES)
case _: ScVariable => annotation.setTextAttributes(DefaultHighlighter.LOCAL_VARIABLES)
case _ =>
}
}
case _: ScCaseClause =>
annotation.setTextAttributes(DefaultHighlighter.PATTERN)
case _: ScGenerator | _: ScEnumerator =>
annotation.setTextAttributes(DefaultHighlighter.GENERATOR)
case _ =>
}
case x: PsiField =>
if (!x.hasModifierProperty("final")) annotation.setTextAttributes(DefaultHighlighter.VARIABLES)
else annotation.setTextAttributes(DefaultHighlighter.VALUES)
case x: ScParameter if x.isAnonymousParameter => annotation.setTextAttributes(DefaultHighlighter.ANONYMOUS_PARAMETER)
case _: ScParameter => annotation.setTextAttributes(DefaultHighlighter.PARAMETER)
case x@(_: ScFunctionDefinition | _: ScFunctionDeclaration | _: ScMacroDefinition) =>
if (SCALA_FACTORY_METHODS_NAMES.contains(x.asInstanceOf[PsiMethod].getName) || x.asInstanceOf[PsiMethod].isConstructor) {
x.parentOfType(classOf[PsiClass]).foreach(annotateCollection)
}
if (isHighlightableScalaTestKeyword(x.asInstanceOf[ScFunction])) {
annotation.setTextAttributes(DefaultHighlighter.SCALATEST_KEYWORD)
} else {
val fun = x.asInstanceOf[ScFunction]
val clazz = fun.containingClass
clazz match {
case o: ScObject if o.allSynthetics.contains(fun) =>
annotation.setTextAttributes(DefaultHighlighter.OBJECT_METHOD_CALL)
return
case _ =>
}
getParentByStub(x) match {
case _: ScTemplateBody | _: ScEarlyDefinitions =>
getParentByStub(getParentByStub(getParentByStub(x))) match {
case _: ScClass | _: ScTrait =>
annotation.setTextAttributes(DefaultHighlighter.METHOD_CALL)
case _: ScObject =>
annotation.setTextAttributes(DefaultHighlighter.OBJECT_METHOD_CALL)
case _ =>
}
case _ =>
annotation.setTextAttributes(DefaultHighlighter.LOCAL_METHOD_CALL)
}
}
case x: PsiMethod =>
if (x.isConstructor) {
x.parentOfType(classOf[PsiClass]).foreach(annotateCollection)
}
if (x.getModifierList != null && x.getModifierList.hasModifierProperty("static")) {
annotation.setTextAttributes(DefaultHighlighter.OBJECT_METHOD_CALL)
} else {
annotation.setTextAttributes(DefaultHighlighter.METHOD_CALL)
}
case _ => //println("" + x + " " + x.getText)
}
}
def highlightElement(element: PsiElement, holder: AnnotationHolder) {
element match {
case x: ScAnnotation => visitAnnotation(x, holder)
case x: ScParameter => visitParameter(x, holder)
case x: ScCaseClause => visitCaseClause(x, holder)
case x: ScGenerator => visitGenerator(x, holder)
case x: ScEnumerator => visitEnumerator(x, holder)
case x: ScTypeAlias => visitTypeAlias(x, holder)
case _ if element.getNode.getElementType == ScalaTokenTypes.kINLINE =>
val annotation = holder.createInfoAnnotation(element, null)
annotation.setTextAttributes(DefaultHighlighter.KEYWORD)
case _ if element.getNode.getElementType == ScalaTokenTypes.tIDENTIFIER =>
getParentByStub(element) match {
case _: ScNameValuePair =>
val annotation = holder.createInfoAnnotation(element, null)
annotation.setTextAttributes(DefaultHighlighter.ANNOTATION_ATTRIBUTE)
case _: ScTypeParam =>
val annotation = holder.createInfoAnnotation(element, null)
annotation.setTextAttributes(DefaultHighlighter.TYPEPARAM)
case clazz: ScClass =>
if (clazz.getModifierList.has(ScalaTokenTypes.kABSTRACT)) {
val annotation = holder.createInfoAnnotation(clazz.nameId, null)
annotation.setTextAttributes(DefaultHighlighter.ABSTRACT_CLASS)
} else {
val annotation = holder.createInfoAnnotation(clazz.nameId, null)
annotation.setTextAttributes(DefaultHighlighter.CLASS)
}
case _: ScObject =>
val annotation = holder.createInfoAnnotation(element, null)
annotation.setTextAttributes(DefaultHighlighter.OBJECT)
case _: ScTrait =>
val annotation = holder.createInfoAnnotation(element, null)
annotation.setTextAttributes(DefaultHighlighter.TRAIT)
case x: ScBindingPattern =>
x.nameContext match {
case r@(_: ScValue | _: ScVariable) =>
getParentByStub(r) match {
case _: ScTemplateBody | _: ScEarlyDefinitions =>
val annotation = holder.createInfoAnnotation(element, null)
r match {
case mod: ScModifierListOwner if mod.hasModifierProperty("lazy") =>
annotation.setTextAttributes(DefaultHighlighter.LAZY)
case _: ScValue => annotation.setTextAttributes(DefaultHighlighter.VALUES)
case _: ScVariable => annotation.setTextAttributes(DefaultHighlighter.VARIABLES)
case _ =>
}
case _ =>
val annotation = holder.createInfoAnnotation(element, null)
r match {
case mod: ScModifierListOwner if mod.hasModifierProperty("lazy") =>
annotation.setTextAttributes(DefaultHighlighter.LOCAL_LAZY)
case _: ScValue => annotation.setTextAttributes(DefaultHighlighter.LOCAL_VALUES)
case _: ScVariable => annotation.setTextAttributes(DefaultHighlighter.LOCAL_VARIABLES)
case _ =>
}
}
case _: ScCaseClause =>
val annotation = holder.createInfoAnnotation(element, null)
annotation.setTextAttributes(DefaultHighlighter.PATTERN)
case _: ScGenerator | _: ScEnumerator =>
val annotation = holder.createInfoAnnotation(element, null)
annotation.setTextAttributes(DefaultHighlighter.GENERATOR)
case _ =>
}
case _: ScFunctionDefinition | _: ScFunctionDeclaration =>
val annotation = holder.createInfoAnnotation(element, null)
annotation.setTextAttributes(DefaultHighlighter.METHOD_DECLARATION)
case _ =>
}
case _ =>
}
}
private def visitAnnotation(annotation: ScAnnotation, holder: AnnotationHolder): Unit = {
val annotation1 = holder.createInfoAnnotation(annotation.getFirstChild, null)
annotation1.setTextAttributes(DefaultHighlighter.ANNOTATION)
val element = annotation.annotationExpr.constr.typeElement
val annotation2 = holder.createInfoAnnotation(element, null)
annotation2.setTextAttributes(DefaultHighlighter.ANNOTATION)
}
private def visitTypeAlias(typeAlias: ScTypeAlias, holder: AnnotationHolder): Unit = {
val annotation = holder.createInfoAnnotation(typeAlias.nameId, null)
annotation.setTextAttributes(DefaultHighlighter.TYPE_ALIAS)
}
private def visitClass(clazz: ScClass, holder: AnnotationHolder): Unit = {
if (clazz.getModifierList.has(ScalaTokenTypes.kABSTRACT)) {
val annotation = holder.createInfoAnnotation(clazz.nameId, null)
annotation.setTextAttributes(DefaultHighlighter.ABSTRACT_CLASS)
} else {
val annotation = holder.createInfoAnnotation(clazz.nameId, null)
annotation.setTextAttributes(DefaultHighlighter.CLASS)
}
}
private def visitParameter(param: ScParameter, holder: AnnotationHolder): Unit = {
val annotation = holder.createInfoAnnotation(param.nameId, null)
val attributesKey =
if (param.isAnonymousParameter) DefaultHighlighter.ANONYMOUS_PARAMETER
else DefaultHighlighter.PARAMETER
annotation.setTextAttributes(attributesKey)
}
private def visitPattern(pattern: ScPattern, holder: AnnotationHolder, attribute: TextAttributesKey): Unit = {
for (binding <- pattern.bindings if !binding.isWildcard) {
val annotation = holder.createInfoAnnotation(binding.nameId, null)
annotation.setTextAttributes(attribute)
}
}
private def visitCaseClause(clause: ScCaseClause, holder: AnnotationHolder): Unit = {
clause.pattern match {
case Some(x) => visitPattern(x, holder, DefaultHighlighter.PATTERN)
case None =>
}
}
private def visitGenerator(generator: ScGenerator, holder: AnnotationHolder): Unit = {
visitPattern(generator.pattern, holder, DefaultHighlighter.GENERATOR)
}
private def visitEnumerator(enumerator: ScEnumerator, holder: AnnotationHolder): Unit = {
visitPattern(enumerator.pattern, holder, DefaultHighlighter.GENERATOR)
}
private def referenceIsToCompanionObjectOfClass(r: ScReferenceElement): Boolean = {
Option(r.getContext) exists {
case _: ScMethodCall | _: ScReferenceExpression => true // These references to 'Foo' should be 'object' references: case class Foo(a: Int); Foo(1); Foo.apply(1).
case _ => false
}
}
}
| triplequote/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/highlighter/AnnotatorHighlighter.scala | Scala | apache-2.0 | 18,737 |
/*
* Copyright 2009-2016 DigitalGlobe, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*
*/
package org.mrgeo.mapalgebra
import org.apache.spark.{Logging, SparkConf, SparkContext}
import org.mrgeo.job.JobArguments
import org.mrgeo.mapalgebra.parser._
object MapOp {
def decodeDouble(node: ParserNode): Option[Double] = {
node match {
case c: ParserConstantNode =>
c.getValue match {
case d: java.lang.Double => Some(d)
case f: java.lang.Float => Some(f.toDouble)
case l: java.lang.Long => Some(l.toDouble)
case i: java.lang.Integer => Some(i.toDouble)
case s: java.lang.Short => Some(s.toDouble)
case b: java.lang.Byte => Some(b.toDouble)
case s: String =>
try
Some(s.toDouble)
catch {
case e: Exception => None
}
case _ => None
}
case _ => None
}
}
def decodeDouble(node:ParserNode, variables: String => Option[ParserNode]): Option[Double] = {
node match {
case const: ParserConstantNode => decodeDouble(node)
case variable: ParserVariableNode =>
MapOp.decodeVariable(variable, variables).get match {
case const: ParserConstantNode => decodeDouble(const)
case _ => throw new ParserException("Term \\"" + node + "\\" is not a double")
}
case _ => throw new ParserException("Term \\"" + node + "\\" is not a double")
}
}
def decodeFloat(node: ParserNode): Option[Float] = {
val value = decodeDouble(node)
if (value.isDefined) {
Some(value.get.toFloat)
}
else {
None
}
}
def decodeFloat(node:ParserNode, variables: String => Option[ParserNode]): Option[Float] = {
node match {
case const: ParserConstantNode => decodeFloat(node)
case variable: ParserVariableNode =>
MapOp.decodeVariable(variable, variables).get match {
case const: ParserConstantNode => decodeFloat(const)
case _ => throw new ParserException("Term \\"" + node + "\\" is not a float")
}
case _ => throw new ParserException("Term \\"" + node + "\\" is not a float")
}
}
def decodeLong(node: ParserNode): Option[Long] = {
decodeDouble(node) match {
case Some(value) => Some(value.toLong)
case _ => None
}
}
def decodeLong(node:ParserNode, variables: String => Option[ParserNode]): Option[Long] = {
node match {
case const: ParserConstantNode => decodeLong(node)
case variable: ParserVariableNode =>
MapOp.decodeVariable(variable, variables).get match {
case const: ParserConstantNode => decodeLong(const)
case _ => throw new ParserException("Term \\"" + node + "\\" is not a long")
}
case _ => throw new ParserException("Term \\"" + node + "\\" is not a long")
}
}
def decodeInt(node: ParserNode): Option[Int] = {
decodeDouble(node) match {
case Some(value) => Some(value.toInt)
case _ => None
}
}
def decodeInt(node:ParserNode, variables: String => Option[ParserNode]): Option[Int] = {
node match {
case const: ParserConstantNode => decodeInt(node)
case variable: ParserVariableNode =>
MapOp.decodeVariable(variable, variables).get match {
case const: ParserConstantNode => decodeInt(const)
case _ => throw new ParserException("Term \\"" + node + "\\" is not a integer")
}
case _ => throw new ParserException("Term \\"" + node + "\\" is not a integer")
}
}
def decodeShort(node: ParserNode): Option[Short] = {
decodeDouble(node) match {
case Some(value) => Some(value.toShort)
case _ => None
}
}
def decodeShort(node:ParserNode, variables: String => Option[ParserNode]): Option[Short] = {
node match {
case const: ParserConstantNode => decodeShort(node)
case variable: ParserVariableNode =>
MapOp.decodeVariable(variable, variables).get match {
case const: ParserConstantNode => decodeShort(const)
case _ => throw new ParserException("Term \\"" + node + "\\" is not a short")
}
case _ => throw new ParserException("Term \\"" + node + "\\" is not a short")
}
}
def decodeByte(node: ParserNode): Option[Byte] = {
decodeDouble(node) match {
case Some(value) => Some(value.toByte)
case _ => None
}
}
def decodeByte(node:ParserNode, variables: String => Option[ParserNode]): Option[Byte] = {
node match {
case const: ParserConstantNode => decodeByte(node)
case variable: ParserVariableNode =>
MapOp.decodeVariable(variable, variables).get match {
case const: ParserConstantNode => decodeByte(const)
case _ => throw new ParserException("Term \\"" + node + "\\" is not a byte")
}
case _ => throw new ParserException("Term \\"" + node + "\\" is not a byte")
}
}
def decodeString(node: ParserNode): Option[String] = {
node match {
case c: ParserConstantNode => Some(c.getValue.toString)
case _ => None
}
}
def decodeString(node:ParserNode, variables: String => Option[ParserNode]): Option[String] = {
node match {
case const: ParserConstantNode => decodeString(node)
case variable: ParserVariableNode =>
MapOp.decodeVariable(variable, variables).get match {
case const: ParserConstantNode => decodeString(const)
case _ => throw new ParserException("Term \\"" + node + "\\" is not a string")
}
case _ => throw new ParserException("Term \\"" + node + "\\" is not a string")
}
}
def decodeBoolean(node: ParserNode): Option[Boolean] = {
decodeString(node) match {
case Some(value) => value.toLowerCase match {
case "true" | "1" | "yes" => Some(true)
case "false" | "0" | "no" => Some(false)
case _ => None
}
case _ => None
}
}
def decodeBoolean(node:ParserNode, variables: String => Option[ParserNode]): Option[Boolean] = {
node match {
case const: ParserConstantNode => decodeBoolean(node)
case variable: ParserVariableNode =>
MapOp.decodeVariable(variable, variables).get match {
case const: ParserConstantNode => decodeBoolean(const)
case _ => throw new ParserException("Term \\"" + node + "\\" is not a boolean")
}
case _ => throw new ParserException("Term \\"" + node + "\\" is not a boolean")
}
}
def decodeVariable(node: ParserVariableNode, variables: String => Option[ParserNode]): Option[ParserNode] = {
variables(node.getName) match {
case Some(value) =>
value match {
case v1: ParserVariableNode => decodeVariable(v1, variables)
case p: ParserNode => Some(p)
case _ => None
}
case _ => None
}
}
}
abstract class MapOp extends Logging {
private var sparkContext: SparkContext = null
def context(cont: SparkContext) = sparkContext = cont
def context(): SparkContext = sparkContext
def registerClasses(): Array[Class[_]] = {Array.empty[Class[_]] }
def setup(job: JobArguments, conf: SparkConf): Boolean
def execute(context: SparkContext): Boolean
def teardown(job: JobArguments, conf: SparkConf): Boolean
}
| akarmas/mrgeo | mrgeo-core/src/main/scala/org/mrgeo/mapalgebra/MapOp.scala | Scala | apache-2.0 | 7,522 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.stream.table.validation
import java.sql.Timestamp
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.planner.utils.{EmptyTableAggFunc, TableTestBase}
import org.junit.Test
class TableAggregateValidationTest extends TableTestBase {
@Test
def testInvalidParameterNumber(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Given parameters do not match any signature. \\n" +
"Actual: (java.lang.Long, java.lang.Integer, java.lang.String) \\n" +
"Expected: (int), (java.sql.Timestamp, java.sql.Timestamp), " +
"(long, int), (long, java.sql.Timestamp)")
val util = streamTestUtil()
val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
val func = new EmptyTableAggFunc
table
.groupBy('c)
// must fail. func does not take 3 parameters
.flatAggregate(func('a, 'b, 'c))
.select('_1, '_2, '_3)
}
@Test
def testInvalidParameterType(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Given parameters do not match any signature. \\n" +
"Actual: (java.lang.Long, java.lang.String) \\n" +
"Expected: (int), (java.sql.Timestamp, java.sql.Timestamp), " +
"(long, int), (long, java.sql.Timestamp)")
val util = streamTestUtil()
val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
val func = new EmptyTableAggFunc
table
.groupBy('c)
// must fail. func take 2 parameters of type Long and Timestamp or Long Int
.flatAggregate(func('a, 'c))
.select('_1, '_2, '_3)
}
@Test
def testInvalidWithWindowProperties(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Window properties can only be used on windowed tables.")
val util = streamTestUtil()
val table = util.addTableSource[(Long, Int, Timestamp)]('a, 'b, 'c)
val func = new EmptyTableAggFunc
table
.groupBy('b)
.flatAggregate(func('a, 'b) as ('x, 'y))
.select('x.start, 'y)
}
@Test
def testInvalidWithAggregation(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Aggregate functions are not supported in the " +
"select right after the aggregate or flatAggregate operation.")
val util = streamTestUtil()
val table = util.addTableSource[(Long, Int, Timestamp)]('a, 'b, 'c)
val func = new EmptyTableAggFunc
table
.groupBy('b)
.flatAggregate(func('a, 'b) as ('x, 'y))
.select('x.count)
}
@Test
def testInvalidParameterWithAgg(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage(
"It's not allowed to use an aggregate function as input of another aggregate function")
val util = streamTestUtil()
val table = util.addTableSource[(Long, Int, Timestamp)]('a, 'b, 'c)
val func = new EmptyTableAggFunc
table
.groupBy('b)
// must fail. func take agg function as input
.flatAggregate(func('a.sum, 'c))
.select('_1, '_2, '_3)
}
@Test
def testInvalidAliasWithWrongNumber(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("List of column aliases must have same degree as " +
"table; the returned table of function " +
"'org.apache.flink.table.planner.utils.EmptyTableAggFunc' has 2 columns, " +
"whereas alias list has 3 columns")
val util = streamTestUtil()
val table = util.addTableSource[(Long, Int, Timestamp)]('a, 'b, 'c)
val func = new EmptyTableAggFunc
table
.groupBy('b)
// must fail. alias with wrong number of fields
.flatAggregate(func('a, 'b) as ('a, 'b, 'c))
.select('*)
}
@Test
def testAliasWithNameConflict(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Ambiguous column name: b")
val util = streamTestUtil()
val table = util.addTableSource[(Long, Int, Timestamp)]('a, 'b, 'c)
val func = new EmptyTableAggFunc
table
.groupBy('b)
// must fail. alias with name conflict
.flatAggregate(func('a, 'b) as ('a, 'b))
.select('*)
}
}
| GJL/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/TableAggregateValidationTest.scala | Scala | apache-2.0 | 5,181 |
package org.jetbrains.plugins.scala.codeInsight.intention.types
import com.intellij.codeInsight.intention.PsiElementBaseIntentionAction
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.project.Project
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.ScalaBundle
import org.jetbrains.plugins.scala.codeInsight.intention.IntentionUtil
import org.jetbrains.plugins.scala.lang.psi.TypeAdjuster
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.{ScBindingPattern, ScTypedPattern, ScWildcardPattern}
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScParameter
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScFunctionDefinition, ScPatternDefinition, ScVariableDefinition}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import org.jetbrains.plugins.scala.lang.psi.types.{BaseTypes, ScType, ScTypeText}
import org.jetbrains.plugins.scala.util.IntentionAvailabilityChecker
/**
* Author: Svyatoslav Ilinskiy
* Date: 22.12.15.
*/
class MakeTypeMoreSpecificIntention extends PsiElementBaseIntentionAction {
override def invoke(project: Project, editor: Editor, element: PsiElement): Unit = {
ToggleTypeAnnotation.complete(new MakeTypeMoreSpecificStrategy(Option(editor)), element)
}
override def isAvailable(project: Project, editor: Editor, element: PsiElement): Boolean = {
if (element == null || !IntentionAvailabilityChecker.checkIntention(this, element)) false
else {
var isAvailable = false
def text(s: String): Unit = {
setText(s)
isAvailable = true
}
val desc = new StrategyAdapter {
override def removeFromVariable(variable: ScVariableDefinition): Unit = {
for {
declared <- variable.declaredType
expr <- variable.expr
tp <- expr.getType()
if MakeTypeMoreSpecificStrategy.computeBaseTypes(declared, tp).nonEmpty
} text(ScalaBundle.message("make.type.more.specific"))
}
override def removeFromValue(value: ScPatternDefinition): Unit = {
for {
declared <- value.declaredType
expr <- value.expr
tp <- expr.getType()
if MakeTypeMoreSpecificStrategy.computeBaseTypes(declared, tp).nonEmpty
} text(ScalaBundle.message("make.type.more.specific"))
}
override def removeFromFunction(function: ScFunctionDefinition): Unit = {
for {
declared <- function.returnType
expr <- function.body
tp <- expr.getType()
if MakeTypeMoreSpecificStrategy.computeBaseTypes(declared, tp).nonEmpty
} text(ScalaBundle.message("make.type.more.specific.fun"))
}
}
ToggleTypeAnnotation.complete(desc, element)
isAvailable
}
}
override def getFamilyName: String = ScalaBundle.message("make.type.more.specific")
}
class MakeTypeMoreSpecificStrategy(editor: Option[Editor]) extends Strategy {
import MakeTypeMoreSpecificStrategy._
def doTemplate(te: ScTypeElement, declaredType: ScType, dynamicType: ScType, context: PsiElement, editor: Editor): Unit = {
val types = computeBaseTypes(declaredType, dynamicType).sortWith((t1, t2) => t1.conforms(t2))
if (types.size == 1) {
val replaced = te.replace(ScalaPsiElementFactory.createTypeElementFromText(types.head.canonicalText, te.getContext, te))
TypeAdjuster.markToAdjust(replaced)
} else {
val texts = types.map(ScTypeText)
val expr = new ChooseTypeTextExpression(texts, ScTypeText(declaredType))
IntentionUtil.startTemplate(te, context, expr, editor)
}
}
override def removeFromFunction(function: ScFunctionDefinition): Unit = {
for {
edit <- editor
te <- function.returnTypeElement
body <- function.body
tp <- body.getType()
declared <- te.getType()
} doTemplate(te, declared, tp, function.getParent, edit)
}
override def removeFromValue(value: ScPatternDefinition): Unit = {
for {
edit <- editor
te <- value.typeElement
body <- value.expr
tp <- body.getType()
declared <- te.getType()
} doTemplate(te, declared, tp, value.getParent, edit)
}
override def removeFromVariable(variable: ScVariableDefinition): Unit = {
for {
edit <- editor
te <- variable.typeElement
body <- variable.expr
tp <- body.getType()
declared <- te.getType()
} doTemplate(te, declared, tp, variable.getParent, edit)
}
override def addToPattern(pattern: ScBindingPattern): Unit = ()
override def addToWildcardPattern(pattern: ScWildcardPattern): Unit = ()
override def addToValue(value: ScPatternDefinition): Unit = ()
override def addToFunction(function: ScFunctionDefinition): Unit = ()
override def removeFromPattern(pattern: ScTypedPattern): Unit = ()
override def addToVariable(variable: ScVariableDefinition): Unit = ()
override def removeFromParameter(param: ScParameter): Unit = ()
override def addToParameter(param: ScParameter): Unit = ()
}
object MakeTypeMoreSpecificStrategy {
def computeBaseTypes(declaredType: ScType, dynamicType: ScType): Seq[ScType] = {
val baseTypes = dynamicType +: BaseTypes.get(dynamicType)
baseTypes.filter(t => t.conforms(declaredType) && !t.equiv(declaredType))
}
} | jeantil/intellij-scala | src/org/jetbrains/plugins/scala/codeInsight/intention/types/MakeTypeMoreSpecificIntention.scala | Scala | apache-2.0 | 5,441 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.codegen.calls
import org.apache.commons.lang3.ClassUtils
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.table.codegen.CodeGenUtils._
import org.apache.flink.table.codegen.GeneratedExpression.NEVER_NULL
import org.apache.flink.table.codegen.{CodeGenException, CodeGenerator, GeneratedExpression}
import org.apache.flink.table.functions.TableFunction
import org.apache.flink.table.functions.utils.UserDefinedFunctionUtils._
import org.apache.flink.table.typeutils.TypeCheckUtils
import scala.collection.mutable
/**
* Generates a call to user-defined [[TableFunction]].
*
* @param tableFunction user-defined [[TableFunction]] that might be overloaded
* @param signature actual signature with which the function is called
* @param returnType actual return type required by the surrounding
*/
class TableFunctionCallGen(
tableFunction: TableFunction[_],
signature: Seq[TypeInformation[_]],
returnType: TypeInformation[_])
extends CallGenerator {
override def generate(
codeGenerator: CodeGenerator,
operands: Seq[GeneratedExpression])
: GeneratedExpression = {
// determine function method
val matchingSignature = getEvalMethodSignature(tableFunction, signature)
.getOrElse(throw new CodeGenException("No matching signature found."))
// get the expanded parameter types
var paramClasses = new mutable.ArrayBuffer[Class[_]]
for (i <- operands.indices) {
if (i < matchingSignature.length - 1) {
paramClasses += matchingSignature(i)
} else if (matchingSignature.last.isArray) {
// last argument is an array type
paramClasses += matchingSignature.last.getComponentType
} else {
// last argument is not an array type
paramClasses += matchingSignature.last
}
}
// convert parameters for function (output boxing)
val parameters = paramClasses.zip(operands).map { case (paramClass, operandExpr) =>
if (paramClass.isPrimitive) {
operandExpr
} else if (ClassUtils.isPrimitiveWrapper(paramClass)
&& TypeCheckUtils.isTemporal(operandExpr.resultType)) {
// we use primitives to represent temporal types internally, so no casting needed here
val exprOrNull: String = if (codeGenerator.nullCheck) {
s"${operandExpr.nullTerm} ? null : " +
s"(${paramClass.getCanonicalName}) ${operandExpr.resultTerm}"
} else {
operandExpr.resultTerm
}
operandExpr.copy(resultTerm = exprOrNull)
} else {
val boxedTypeTerm = boxedTypeTermForTypeInfo(operandExpr.resultType)
val boxedExpr = codeGenerator.generateOutputFieldBoxing(operandExpr)
val exprOrNull: String = if (codeGenerator.nullCheck) {
s"${boxedExpr.nullTerm} ? null : ($boxedTypeTerm) ${boxedExpr.resultTerm}"
} else {
boxedExpr.resultTerm
}
boxedExpr.copy(resultTerm = exprOrNull)
}
}
// generate function call
val functionReference = codeGenerator.addReusableFunction(tableFunction)
val functionCallCode =
s"""
|${parameters.map(_.code).mkString("\\n")}
|$functionReference.eval(${parameters.map(_.resultTerm).mkString(", ")});
|""".stripMargin
// has no result
GeneratedExpression(
functionReference,
NEVER_NULL,
functionCallCode,
returnType)
}
}
| zimmermatt/flink | flink-libraries/flink-table/src/main/scala/org/apache/flink/table/codegen/calls/TableFunctionCallGen.scala | Scala | apache-2.0 | 4,356 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.oap.expression
import org.apache.arrow.gandiva.evaluator._
import org.apache.arrow.gandiva.exceptions.GandivaException
import org.apache.arrow.gandiva.expression._
import org.apache.arrow.vector.types.pojo.ArrowType
import org.apache.arrow.vector.types.pojo.Field
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.types._
import scala.collection.mutable.ListBuffer
class ColumnarAlias(child: Expression, name: String)(
override val exprId: ExprId,
override val qualifier: Seq[String],
override val explicitMetadata: Option[Metadata])
extends Alias(child, name)(exprId, qualifier, explicitMetadata)
with ColumnarExpression {
override def doColumnarCodeGen(args: java.lang.Object): (TreeNode, ArrowType) = {
child.asInstanceOf[ColumnarExpression].doColumnarCodeGen(args)
}
}
class ColumnarAttributeReference(
name: String,
dataType: DataType,
nullable: Boolean = true,
override val metadata: Metadata = Metadata.empty)(
override val exprId: ExprId,
override val qualifier: Seq[String])
extends AttributeReference(name, dataType, nullable, metadata)(exprId, qualifier)
with ColumnarExpression {
override def doColumnarCodeGen(args: java.lang.Object): (TreeNode, ArrowType) = {
val resultType = CodeGeneration.getResultType(dataType)
val field = Field.nullable(s"${name}#${exprId.id}", resultType)
(TreeBuilder.makeField(field), resultType)
}
}
| Intel-bigdata/OAP | oap-native-sql/core/src/main/scala/com/intel/oap/expression/ColumnarNamedExpressions.scala | Scala | apache-2.0 | 2,275 |
class Outer(x: Int) {
class Inner(y: Int) {
}
}
class Outer2(x: Int) {
class Inner(y: Int) {
}
}
object Test {
def outer = Outer(1)
def outer2 = Outer2(1)
val x = outer.Inner(2) // error (at posttyper)
} | dotty-staging/dotty | tests/neg/i8569.scala | Scala | apache-2.0 | 218 |
//package codesniffer.deckard.search
//
//import java.io.File
//import java.lang.reflect.Modifier
//import java.util
//
//import codesniffer.core._
//import codesniffer.deckard.{ArrayVec, Indexer, CharacVec, MemWriter}
//import codesniffer.deckard.vgen.{Context, DirScanConfig}
//import codesniffer.api.Node
//import codesniffer.api.body.MethodDeclaration
//import codesniffer.api.expr.ThisExpr
//import codesniffer.api.stmt.EmptyStmt
//
//import scala.collection.convert.wrapAsScala._
//import scala.collection.mutable
//import scala.concurrent._
//import scala.util.{Failure, Success}
//
///**
// * Created by Bowen Cai on 5/22/2015.
// */
//object LibSearch {
//
// type SortedList = util.TreeMap[Double, (CharacVec[String], CharacVec[String])]
//
// def main (args: Array[String]): Unit = {
//
// val rt = Runtime.getRuntime
// val procNum = rt.availableProcessors() * 3 / 2
//
// var path2lib: String = "E:\\research\\top\\guava\\guava\\src"
//// var path2lib = "E:\\research\\top\\guava\\guava\\src\\com\\google\\common\\util\\concurrent"
//// var path2lib = "E:\\research\\top\\jdk-1.7\\java\\util\\concurrent"
//// var path2App: String = "E:\\research\\top\\derby\\java"
// var path2App = "E:\\research\\top\\h2-1.4.187-sources"
// var resultSizePerFunc = 8
// var resultSizeTotal = 200
//
// val _nodeFilter = (node: Node)=>node.isInstanceOf[EmptyStmt] || node.isInstanceOf[ThisExpr]
// val _libConfig = new DirScanConfig
// _libConfig.filterDirName = (name: String) => (
// name.equals("package-info.java") // filter out package file
// // || name.endsWith("Tests.java") // filter out test file
// || name.endsWith("Test.java") // filter out test file
// )
// _libConfig.filterNode = _nodeFilter
// _libConfig.filterMethod = (m: MethodDeclaration) => !Modifier.isPublic(m.getModifiers)
// val _appCfg = new DirScanConfig
//// _appCfg.filterDirName = (name: String) => (
//// name.equals("package-info.java") // filter out package file
//// || name.endsWith("Test.java") // filter out test file
//// )
// _appCfg.filterNode = _nodeFilter
//
// val _indexer = new Indexer[String]
//
// val vsLib = CrossMatch.vgen(path2lib, _indexer, _libConfig)
// val useCache = rt.maxMemory() > 1073741824 * (3 + vsLib.size / 1024)
//
// println(s"Library $path2lib processed, ${vsLib.size} generated. Cache ${if (useCache) "opened" else "closed" }")
//
// val searchedApp = if (useCache) {
// val hs = new mutable.HashMap[(Int, Int), MemWriter[String]]
// hs.sizeHint(9999)
// hs
// } else null
//
//
// val threshold = _appCfg.distThreshold
// val appDir = new File(path2App)
// val summary = new SortedList
//
// implicit val _exe = ExecutionContext.fromExecutor(java.util.concurrent.Executors.newFixedThreadPool(procNum)).prepare()
// println(s"searching applications with $procNum threads")
// val length = vsLib.length
// val step = length / procNum
//
// val t0 = System.currentTimeMillis()
// val tasks = for (left <- Range(0, length, step)) yield future[Unit] {
// val right = if (length - left < step) length else left + step
// for (i <- left until right) {
// val libFunc = vsLib(i)
// val libC = libFunc.count
// val lower: Int = libC * 8 / 10
// val upper: Int = libC * 5 / 3
//
// def newSearch(): MemWriter[String] = {
// val ls = new MemWriter[String]
// ls.sizeHint(1024)
// val scanner = new SlicerScanner[String](new Context(_appCfg, null, null, _indexer, ls))
// scanner.methodVisitor.lowerBound = lower
// scanner.methodVisitor.upperBound = upper
// appDir match {
// case where if where.isDirectory => scanner.scanDir(where, recursive = true)
// case src if src.isFile => scanner.scanFile(src)
// }
// ls
// }
// val appVecList = if (useCache) {
// searchedApp.getOrElse((lower, upper), {
// val nls = newSearch()
// searchedApp.put((lower, upper), nls)
// nls
// })
// } else newSearch()
//
// if (appVecList.size > 0) {
// val result = new SortedList
// for (appFunc <- appVecList) {
// val appC = appFunc.count
// if (math.abs(appC - libC) < 40) {
// val v1 = libFunc.asInstanceOf[ArrayVec[String]]
//// val v2: ArrayVec[String] = appFunc match {
//// case a: ArrayVec[String] =>a
//// case w: WeightedVec[String] => w.underlying.asInstanceOf[ArrayVec[String]]
//// }
//// val dist = v1.math.EuclideanDist(v2)
// val dist = v1.math.EuclideanDist(appFunc.asInstanceOf[ArrayVec[String]])
// if (dist < threshold)
// result.put(dist, (libFunc, appFunc))
// }
// }
// while (result.size() > resultSizePerFunc) result.pollLastEntry()
// println(s"app searched for ${libFunc.location.scope}")
// summary.putAll(result)
// }
// rt.gc()
// } // for apps
// rt.gc()
// rt.gc()
// println(s"lib $left to $right searched\r\n")
// }
//
// Future.sequence(tasks) onComplete{
// case Success(r) =>
// while (summary.size() > resultSizeTotal) summary.pollLastEntry()
// val t1 = System.currentTimeMillis()
// var rank = 1
// println(s"find ${summary.size()} clone pair, time ${t1 - t0} ms")
//
// for ((dist, pair) <- summary) {
// println(s"Rank $rank, distance $dist\r\nnode count: ${pair._1.count}, ${pair._1.location}\r\n${pair._1.data.get}")
// println(s"node count: ${pair._2.count}, ${pair._2.location}\r\n${pair._2.data.get}")
// println()
// rank += 1
// }
// System.exit(0)
// case Failure(t) =>
// println(s"Search failed")
// t.printStackTrace()
// System.exit(0)
// }
//
// }
//
//}
//
///// WARN: racing!!!
//// val appVecList = searchedApp.getOrElse((lower, upper),{
//// val ls = new MemWriter[String]
//// ls.sizeHint(9999)
//// val scanner = new SlicerScanner[String](new Context(_appCfg, null, null, _indexer, ls))
//// scanner.methodVisitor.vecGen.after =
//// (m: MethodDeclaration, v: CharacVec[String], ctx: Context[String]) => {
//// if (v.count > 20) {
////// v.data = Some(m.toString.intern())
//// ctx.vecWriter.write(v)
//// }
//// }
//// scanner.methodVisitor.lowerBound = lower
//// scanner.methodVisitor.upperBound = upper
//// appDir match {
//// case where if where.isDirectory => scanner.scanDir(where, recursive = true)
//// case src if src.isFile => scanner.scanFile(src)
//// }
//// searchedApp.put((lower, upper), ls)
//// ls
//// }) | xkommando/CodeSniffer | deckard/src/main/scala/codesniffer/deckard/search/LibSearch.scala | Scala | lgpl-3.0 | 7,235 |
package com.hyenawarrior.OldNorseGrammar.grammar.morphophonology.ProductiveTransforms
import com.hyenawarrior.OldNorseGrammar.grammar.phonology.Vowel
import com.hyenawarrior.OldNorseGrammar.grammar.phonology.Vowel.{LONG, SHORT, isBackVowel, isFrontVowel, isLabialVowel, isLong, isLow}
/**
* Created by HyenaWarrior on 2017.10.23..
*/
object VowelDeletion {
def apply(str: String): String = {
val stream = (' ' +: str) zip str
val filteredStream = stream.filterNot {
case (p, 'i') => isLong(p) && isFrontVowel(p)
case (p, 'a') => isBackVowel(p) && isLow(p)
case (p, 'u') => isBackVowel(p) && isLabialVowel(p)
case (Vowel(prev, LONG), Vowel(curr, SHORT)) => curr==prev
case _ => false
}
filteredStream
.map(_._2)
.mkString
}
}
| HyenaSoftware/IG-Dictionary | OldNorseGrammarEngine/src/main/scala/com/hyenawarrior/OldNorseGrammar/grammar/morphophonology/ProductiveTransforms/VowelDeletion.scala | Scala | lgpl-3.0 | 798 |
/*
* Monkeyman static web site generator
* Copyright (C) 2013 Wilfred Springer
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package nl.flotsam.monkeyman.decorator.markdown
import nl.flotsam.monkeyman.Resource
import nl.flotsam.monkeyman.decorator.ResourceDecoration
import nl.flotsam.monkeyman.util.Closeables._
import org.apache.commons.io.{FilenameUtils, IOUtils}
import org.pegdown.ast.{SimpleNode, TextNode, HeaderNode}
import org.pegdown.{LinkRenderer, ToHtmlSerializer, PegDownProcessor}
import nl.flotsam.monkeyman.util.Logging
case class MarkdownDecoration(resource: Resource, sections: Boolean)
extends ResourceDecoration(resource) with Logging {
def extractedTitle = extract._1
def html = extract._2
def extract = using(resource.open) {
in =>
val markdown = IOUtils.toString(in, "UTF-8")
val processor = new PegDownProcessor
val rootNode = processor.parseMarkdown(markdown.toCharArray)
val visitor = new TitleExtractingToHtmlSerializer(new LinkRenderer)
val html =
if (sections) "<section>" + visitor.toHtml(rootNode) + "</section>"
else visitor.toHtml(rootNode)
val title = visitor.title
(title, html)
}
override def title = resource.title.orElse(extractedTitle)
override val contentType = "text/x-html-fragment"
override def path = {
if (!FilenameUtils.getExtension(resource.path).isEmpty) FilenameUtils.removeExtension(resource.path) + ".frag"
else resource.path
}
override def open = IOUtils.toInputStream(html, "UTF-8")
override def asHtmlFragment = Some(html)
class TitleExtractingToHtmlSerializer(linkRenderer: LinkRenderer) extends ToHtmlSerializer(linkRenderer) {
var inheader = false
var title: Option[String] = None
override def visit(node: HeaderNode) {
if (title.isEmpty) {
inheader = true
visitChildren(node)
inheader = false
} else {
super.visit(node)
}
}
override def visit(node: TextNode) {
if (inheader && title.isEmpty) {
title = Some(node.getText)
} else {
super.visit(node)
}
}
override def visit(node: SimpleNode) {
node.getType match {
case SimpleNode.Type.HRule if (sections) =>
printer.println.print("</section><section>")
case _ => super.visit(node)
}
}
}
}
| wspringer/monkeyman | src/main/scala/nl/flotsam/monkeyman/decorator/markdown/MarkdownDecoration.scala | Scala | gpl-2.0 | 3,046 |
package me.panavtec
import cats.free.Free
import cats.{Id, ~>}
import scala.io.StdIn
object UserInteractionApplication {
sealed trait UserInteraction[A]
case object Ask extends UserInteraction[String]
case class Tell(what: String) extends UserInteraction[Unit]
implicit def liftFree[A](u: UserInteraction[A]): Free[UserInteraction, A] = Free.liftF(u)
def interpreter = new (UserInteraction ~> Id) {
override def apply[A](fa: UserInteraction[A]): Id[A] = fa match {
case Ask => StdIn.readLine
case Tell(what: String) => println(what)
}
}
def program: Free[UserInteraction, Unit] = for {
_ <- Tell("Introduce any input")
input <- Ask
_ <- Tell(input)
} yield ()
def main(args: Array[String]): Unit = repeat(program)(3).foldMap(interpreter)
def repeat[A[_], B](free: Free[A, B])(times: Int): Free[A, B] =
free.flatMap(a => if (times == 0) Free.pure(a) else repeat(free)(times - 1))
}
| PaNaVTEC/Katas | tic-tac-toe/scala/src/main/scala-2.12/me/panavtec/UserInteractionApplication.scala | Scala | apache-2.0 | 947 |
package dedep.bonobo.tournament
import dedep.bonobo.Common._
import dedep.bonobo.round.Round
import dedep.bonobo.round.group.GroupRound
import dedep.bonobo.round.pair.PlayoffRound
import dedep.bonobo.team.Team
import dedep.bonobo.utils.MathUtils
case class TournamentImpl(override val teams: List[Team], override val rounds: List[Round] = Nil) extends Tournament {
require(teams.size >= 2)
def doStep(): Tournament = {
val lastRound: Option[Round] = rounds.headOption
if (lastRound.isEmpty) createFirstRound()
else if (lastRound.get.isFinished()) {
lastRound.get match {
case round: PlayoffRound =>
if (round.preliminary)
createNextRound(teams.diff(lastRound.get.teams) ++ lastRound.get.getPromotedTeams)
else
createNextRound(lastRound.get.getPromotedTeams)
case _ => createNextRound(lastRound.get.getPromotedTeams)
}
}
else TournamentImpl(teams, rounds.updated(rounds.indexOf(lastRound), lastRound.get.doStep()))
}
private def createFirstRound(): Tournament =
if (isPreliminaryRoundRequired)
TournamentImpl(teams, PlayoffRound(getPreliminaryRoundTeams, preliminary = true) :: rounds)
else
createNextRound(teams)
private def createNextRound(teams: List[Team]): Tournament =
if (teams.length >= 32)
TournamentImpl(teams, GroupRound(teams) :: rounds)
else
TournamentImpl(teams, new PlayoffRound(teams) :: rounds)
private def isPreliminaryRoundRequired: Boolean = !MathUtils.isPowerOfTwo(teams.size)
private def getPreliminaryRoundTeams: List[Team] = teams.sortBy(_.points).take(getPreliminaryRoundTeamsNumber)
private def getPreliminaryRoundTeamsNumber: Int = (teams.length - MathUtils.getFloorPowerOfTwoNumber(teams.length)) * 2
override def isFinished(): Boolean = rounds.headOption match {
case None => false
case Some(r: Round) => r.isFinished() && r.isFinalRound()
}
}
| dedep/bonobo-core | src/main/scala/dedep/bonobo/tournament/TournamentImpl.scala | Scala | mit | 1,942 |
package com.harrys.hyppo.config
import com.typesafe.config.{Config, ConfigValue, ConfigValueFactory}
/**
* Created by jpetty on 8/27/15.
*/
final class CoordinatorConfig(config: Config) extends HyppoConfig(config) {
// Location to store jar files
val codeBucketName = config.getString("hyppo.code-bucket-name")
def withValue(path: String, value: ConfigValue): CoordinatorConfig = {
new CoordinatorConfig(underlying.withValue(path, value))
}
def withValue(path: String, value: String): CoordinatorConfig = {
withValue(path, ConfigValueFactory.fromAnyRef(value))
}
def withValue(path: String, value: Int): CoordinatorConfig = {
withValue(path, ConfigValueFactory.fromAnyRef(value.asInstanceOf[java.lang.Integer]))
}
def withValue(path: String, value: Boolean): CoordinatorConfig = {
withValue(path, ConfigValueFactory.fromAnyRef(value.asInstanceOf[java.lang.Boolean]))
}
def withValue(path: String, value: Double): CoordinatorConfig = {
withValue(path, ConfigValueFactory.fromAnyRef(value.asInstanceOf[java.lang.Double]))
}
}
| harrystech/hyppo-worker | worker/src/main/scala/com/harrys/hyppo/config/CoordinatorConfig.scala | Scala | mit | 1,080 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.process.knn
import org.geotools.factory.Hints
import org.geotools.feature.simple.SimpleFeatureBuilder
import org.junit.runner.RunWith
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.text.WKTUtils
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class GeoHashSpiralTest extends Specification {
def generateCvilleSF = {
val sftName = "geomesaKNNTestQueryFeature"
val sft = SimpleFeatureTypes.createType(sftName, "geom:Point:srid=4326,dtg:Date,dtg_end_time:Date")
val cvilleSF = SimpleFeatureBuilder.build(sft, List(), "charlottesville")
cvilleSF.setDefaultGeometry(WKTUtils.read(f"POINT(-78.4953560 38.0752150 )"))
cvilleSF.getUserData()(Hints.USE_PROVIDED_FID) = java.lang.Boolean.TRUE
cvilleSF
}
def generateLineSF = {
val sftName = "geomesaKNNTestQueryFeature"
val sft = SimpleFeatureTypes.createType(sftName, "geom:LineString:srid=4326,dtg:Date,dtg_end_time:Date")
val lineSF = SimpleFeatureBuilder.build(sft, List(), "route 29")
lineSF.setDefaultGeometry(WKTUtils.read(f"LINESTRING(-78.491 38.062, -78.474 38.082)"))
lineSF.getUserData()(Hints.USE_PROVIDED_FID) = java.lang.Boolean.TRUE
lineSF
}
"Geomesa GeoHashSpiral PriorityQueue" should {
"order GeoHashes correctly around Charlottesville" in {
val cvilleSF = generateCvilleSF
val cvillePQ = GeoHashSpiral(cvilleSF, 500.0, 5000.0)
val cvillePQ2List = cvillePQ.toList
val nearest9ByCalculation = cvillePQ2List.take(9).map{_.hash}
// the below are ordered by geodetic distances
val nearest9ByVisualInspection = List (
"dqb0tg",
"dqb0te",
"dqb0tf",
"dqb0td",
"dqb0tu",
"dqb0ts",
"dqb0w5",
"dqb0w4",
"dqb0tc")
nearest9ByCalculation must equalTo(nearest9ByVisualInspection)
}
"use the statefulDistanceFilter around Charlottesville correctly before pulling GeoHashes" in {
val cvilleSF = generateCvilleSF
val cvillePQ = GeoHashSpiral(cvilleSF, 500.0, 10000.0)
cvillePQ.mutateFilterDistance(1000.0) // units are meters
val numHashesAfterFilter = cvillePQ.toList.length
numHashesAfterFilter must equalTo(12)
}
"use the statefulDistanceFilter around Charlottesville correctly after pulling GeoHashes " in {
val cvilleSF = generateCvilleSF
val cvillePQ = GeoHashSpiral(cvilleSF, 500.0, 10000.0)
// take the 20 closest GeoHashes
val ghBeforeFilter = cvillePQ.take(20)
ghBeforeFilter.length must equalTo(20)
// now mutate the filter -- this is restrictive enough that no further GeoHashes should pass
cvillePQ.mutateFilterDistance(1000.0) // units are meters
// attempt to take five more
val ghAfterFilter = cvillePQ.take(5)
ghAfterFilter.length must equalTo(0)
}
"throw an exception if given a non-point geometry" in {
val route29SF = generateLineSF
GeoHashSpiral(route29SF, 500.0, 10000.0) should throwAn[RuntimeException]
}
}
}
| ddseapy/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/process/knn/GeoHashSpiralTest.scala | Scala | apache-2.0 | 3,657 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.