code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package tormenta
import sbt._
import Keys._
import com.typesafe.tools.mima.plugin.MimaPlugin.mimaDefaultSettings
import com.typesafe.tools.mima.plugin.MimaKeys.previousArtifact
import scalariform.formatter.preferences._
import com.typesafe.sbt.SbtScalariform._
object TormentaBuild extends Build {
val avroVersion = "1.7.5"
val bijectionVersion = "0.8.1"
val chillVersion = "0.7.0"
val scalaCheckVersion = "1.11.5"
val scalaTestVersion = "2.2.2"
val slf4jVersion = "1.6.6"
val stormKafkaVersion = "0.9.0-wip6-scala292-multischeme"
val stormKestrelVersion = "0.9.0-wip5-multischeme"
val stormVersion = "0.9.0-wip15"
val twitter4jVersion = "3.0.3"
val extraSettings =
Project.defaultSettings ++ mimaDefaultSettings ++ scalariformSettings
def ciSettings: Seq[Project.Setting[_]] =
if (sys.env.getOrElse("TRAVIS", "false").toBoolean) Seq(
ivyLoggingLevel := UpdateLogging.Quiet,
logLevel in Global := Level.Warn,
logLevel in Compile := Level.Warn,
logLevel in Test := Level.Info
) else Seq.empty[Project.Setting[_]]
val sharedSettings = extraSettings ++ ciSettings ++ Seq(
organization := "com.twitter",
scalaVersion := "2.10.5",
crossScalaVersions := Seq("2.10.5", "2.11.7"),
javacOptions ++= Seq("-source", "1.6", "-target", "1.6"),
javacOptions in doc := Seq("-source", "1.6"),
libraryDependencies ++= Seq(
"org.slf4j" % "slf4j-api" % slf4jVersion,
"storm" % "storm" % stormVersion % "provided",
"org.scalacheck" %% "scalacheck" % scalaCheckVersion % "test",
"org.scalatest" %% "scalatest" % scalaTestVersion % "test"
),
scalacOptions ++= Seq("-unchecked", "-deprecation", "-Yresolve-term-conflict:package"),
resolvers ++= Seq(
Opts.resolver.sonatypeSnapshots,
Opts.resolver.sonatypeReleases,
"Clojars Repository" at "http://clojars.org/repo",
"Conjars Repository" at "http://conjars.org/repo"
),
parallelExecution in Test := false,
scalacOptions ++= Seq("-unchecked", "-deprecation", "-language:implicitConversions", "-language:higherKinds", "-language:existentials"),
scalacOptions <++= (scalaVersion) map { sv =>
if (sv startsWith "2.10")
Seq("-Xdivergence211")
else
Seq()
},
// Publishing options:
publishMavenStyle := true,
publishArtifact in Test := false,
pomIncludeRepository := { x => false },
publishTo <<= version { v =>
Some(
if (v.trim.toUpperCase.endsWith("SNAPSHOT"))
Opts.resolver.sonatypeSnapshots
else
Opts.resolver.sonatypeStaging
)
},
pomExtra := (
<url>https://github.com/twitter/tormenta</url>
<licenses>
<license>
<name>Apache 2</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
<comments>A business-friendly OSS license</comments>
</license>
</licenses>
<scm>
<url>git@github.com:twitter/tormenta.git</url>
<connection>scm:git:git@github.com:twitter/tormenta.git</connection>
</scm>
<developers>
<developer>
<id>oscar</id>
<name>Oscar Boykin</name>
<url>http://twitter.com/posco</url>
</developer>
<developer>
<id>sritchie</id>
<name>Sam Ritchie</name>
<url>http://twitter.com/sritchie</url>
</developer>
</developers>)
)
lazy val formattingPreferences = {
import scalariform.formatter.preferences._
FormattingPreferences().
setPreference(AlignParameters, false).
setPreference(PreserveSpaceBeforeArguments, true)
}
/**
* This returns the youngest jar we released that is compatible
* with the current.
*/
val unreleasedModules = Set[String]()
def youngestForwardCompatible(subProj: String) =
Some(subProj)
.filterNot(unreleasedModules.contains(_))
.map { s => "com.twitter" % ("tormenta-" + s + "_2.10") % "0.11.0" }
lazy val tormenta = Project(
id = "tormenta",
base = file("."),
settings = sharedSettings ++ DocGen.publishSettings
).settings(
test := { },
publish := { }, // skip publishing for this root project.
publishLocal := { }
).aggregate(
tormentaCore,
tormentaKestrel,
tormentaKafka,
tormentaTwitter,
tormentaAvro
)
def module(name: String) = {
val id = "tormenta-%s".format(name)
Project(id = id, base = file(id), settings = sharedSettings ++ Seq(
Keys.name := id,
previousArtifact := youngestForwardCompatible(name))
)
}
lazy val tormentaCore = module("core").settings(
libraryDependencies += "com.twitter" %% "chill" % chillVersion
exclude("com.esotericsoftware.kryo", "kryo")
)
lazy val tormentaTwitter = module("twitter").settings(
libraryDependencies += "org.twitter4j" % "twitter4j-stream" % twitter4jVersion
).dependsOn(tormentaCore % "test->test;compile->compile")
lazy val tormentaKafka = module("kafka").settings(
libraryDependencies += "storm" % "storm-kafka" % stormKafkaVersion
).dependsOn(tormentaCore % "test->test;compile->compile")
lazy val tormentaKestrel = module("kestrel").settings(
libraryDependencies += "storm" % "storm-kestrel" % stormKestrelVersion
).dependsOn(tormentaCore % "test->test;compile->compile")
lazy val tormentaAvro = module("avro").settings(
libraryDependencies ++= Seq(
"org.apache.avro" % "avro" % avroVersion,
"com.twitter" %% "bijection-core" % bijectionVersion,
"com.twitter" %% "bijection-avro" % bijectionVersion)
).dependsOn(tormentaCore % "test->test;compile->compile")
}
| nkhuyu/tormenta | project/Build.scala | Scala | apache-2.0 | 5,713 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.batch.sql
import org.apache.flink.table.planner.utils.TableTestBase
import org.junit.Test
class ValuesTest extends TableTestBase {
private val util = batchTestUtil()
@Test
def testNullValues(): Unit = {
util.verifyExecPlan("SELECT * FROM (VALUES CAST(NULL AS INT))")
}
@Test
def testSingleRow(): Unit = {
util.verifyExecPlan("SELECT * FROM (VALUES (1, 2, 3)) AS T(a, b, c)")
}
@Test
def testMultiRows(): Unit = {
util.verifyExecPlan("SELECT * FROM (VALUES (1, 2), (3, CAST(NULL AS INT)), (4, 5)) AS T(a, b)")
}
@Test
def testDiffTypes(): Unit = {
util.verifyRelPlanWithType("SELECT * FROM (VALUES (1, 2.0), (3, CAST(4 AS BIGINT))) AS T(a, b)")
}
@Test
def testEmptyValuesWithSort(): Unit = {
util.verifyExecPlan("SELECT * FROM (VALUES 1, 2, 3) AS T (a) WHERE a = 1 and a = 2 ORDER BY a")
}
}
| lincoln-lil/flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/ValuesTest.scala | Scala | apache-2.0 | 1,705 |
/*
* Copyright (c) 2018. Fengguo Wei and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License v2.0
* which accompanies this distribution, and is available at
* https://www.apache.org/licenses/LICENSE-2.0
*
* Detailed contributors are listed in the CONTRIBUTOR.md
*/
package org.argus.jawa.flow.dfa
import org.argus.jawa.flow.{AlirLoc, Context}
import org.argus.jawa.flow.cfg._
import org.argus.jawa.flow.interprocedural.CallResolver
import org.argus.jawa.core.ast.{Location, MethodDeclaration, ResolvedBody, Statement}
import org.argus.jawa.core.Global
import org.argus.jawa.core.elements.Signature
import org.argus.jawa.core.util._
/**
* Created by fgwei on 4/16/17.
*/
class IntraIngredientProvider[LatticeElement](md: MethodDeclaration, cfg: IntraProceduralControlFlowGraph[CFGNode]) extends IngredientProvider[CFGNode, LatticeElement, (String, Int)] {
override def getBody(sig: Signature): ResolvedBody = md.resolvedBody
override def newLoc(currentNode: CFGNode with AlirLoc, newl: Location): (String, Int) = {
(newl.locationUri, newl.locationIndex)
}
override def next(currentNode: CFGNode with AlirLoc, body: ResolvedBody): CFGNode = {
val nl = body.locations(currentNode.locIndex + 1)
cfg.getNode(nl)
}
override def node(l: Location, loc: (String, Int)): CFGNode = cfg.getNode(loc._1, loc._2)
override def exitNode(currentNode: CFGNode): CFGNode = cfg.exitNode
override def returnNode(currentNode: CFGNode with AlirLoc): CFGNode = throw new RuntimeException("Should not be called.")
def process(
startNode: CFGNode,
mdaf: MonotoneDataFlowAnalysisBuilder[CFGNode, LatticeElement],
callr: Option[CallResolver[CFGNode, LatticeElement]]): Unit = {
val workList = mlistEmpty[CFGNode]
workList += startNode
while (workList.nonEmpty) {
val n = workList.remove(0)
n match {
case ln: CFGLocationNode =>
if (mdaf.visit(ln)) {
workList ++= cfg.successors(n)
}
case _ =>
for (succ <- cfg.successors(n)) {
mdaf.update(mdaf.entrySet(n), succ)
workList += succ
}
}
}
}
def preProcess(node: CFGNode, statement: Statement, s: ISet[LatticeElement]): Unit = {}
def postProcess(node: CFGNode, statement: Statement, s:ISet[LatticeElement]): Unit = {}
override def onPreVisitNode(node: CFGNode, preds: CSet[CFGNode]): Unit = {}
override def onPostVisitNode(node: CFGNode, succs: CSet[CFGNode]): Unit = {}
}
class InterIngredientProvider[LatticeElement](global: Global, icfg: InterProceduralControlFlowGraph[ICFGNode]) extends IngredientProvider[ICFGNode, LatticeElement, Context] {
def getBody(sig: Signature): ResolvedBody = {
global.getMethod(sig).get.getBody.resolvedBody
}
override def newLoc(currentNode: ICFGNode with AlirLoc, newl: Location): Context =
currentNode.getContext.copy.removeTopContext().setContext(currentNode.getOwner, newl.locationUri)
override def next(currentNode: ICFGNode with AlirLoc, body: ResolvedBody): ICFGNode = {
val newLoc = body.locations(currentNode.locIndex + 1)
val newContext = currentNode.getContext.copy.removeTopContext()
newContext.setContext(currentNode.getOwner, newLoc.locationUri)
if(icfg.isCall(newLoc))
icfg.getICFGCallNode(newContext)
else
icfg.getICFGNormalNode(newContext)
}
override def node(l: Location, loc: Context): ICFGNode = {
if(icfg.isCall(l))
icfg.getICFGCallNode(loc)
else
icfg.getICFGNormalNode(loc)
}
override def exitNode(currentNode: ICFGNode): ICFGNode = {
val exitContext = currentNode.getContext.copy.removeTopContext().setContext(currentNode.getOwner, "Exit")
icfg.getICFGExitNode(exitContext)
}
override def returnNode(currentNode: ICFGNode with AlirLoc): ICFGNode = {
icfg.getICFGReturnNode(currentNode.getContext)
}
def process(
startNode: ICFGNode,
mdaf: MonotoneDataFlowAnalysisBuilder[ICFGNode, LatticeElement],
callr: Option[CallResolver[ICFGNode, LatticeElement]]): Unit = {
def doProcess(n: ICFGNode): ISet[ICFGNode] = {
var result = isetEmpty[ICFGNode]
n match {
case en: ICFGEntryNode =>
for (succ <- icfg.successors(en)) {
if (mdaf.update(mdaf.entrySet(en), succ)) {
result += succ
}
}
case xn: ICFGExitNode =>
if (callr.isDefined) {
for (succ <- icfg.successors(n)) {
val factsForCaller = callr.get.getAndMapFactsForCaller(mdaf.entrySet(xn), succ, xn)
mdaf.update(mdaf.confluence(mdaf.entrySet(succ), factsForCaller), succ)
result += succ
}
}
case cn: ICFGCallNode =>
if (mdaf.visit(cn)) {
result ++= icfg.successors(n)
}
case _: ICFGReturnNode =>
for (succ <- icfg.successors(n)) {
if (mdaf.update(mdaf.entrySet(n), succ)) {
result += succ
}
}
case nn: ICFGNormalNode =>
if (mdaf.visit(nn)) {
result ++= icfg.successors(n)
}
case a => throw new RuntimeException("unexpected node type: " + a)
}
result
}
val workList = mlistEmpty[ICFGNode]
workList += startNode
val ensurer = new ConvergeEnsurer[ICFGNode]
var i = 0
while(workList.nonEmpty){
while (workList.nonEmpty) {
val n = workList.remove(0)
i += 1
if(ensurer.checkNode(n)) {
ensurer.updateNodeCount(n)
onPreVisitNode(n, icfg.predecessors(n))
val newWorks = doProcess(n)
workList ++= {newWorks -- workList}
onPostVisitNode(n, icfg.successors(n))
}
}
val nodes = icfg.nodes
workList ++= nodes.map{ node =>
var newnodes = isetEmpty[ICFGNode]
node match {
case xn: ICFGExitNode =>
if(ensurer.checkNode(xn)) {
onPreVisitNode(xn, icfg.predecessors(xn))
val succs = icfg.successors(xn)
for (succ <- succs) {
val factsForCaller = callr.get.getAndMapFactsForCaller(mdaf.entrySet(xn), succ, xn)
if (mdaf.update(mdaf.confluence(mdaf.entrySet(succ), factsForCaller), succ))
newnodes += succ
}
onPostVisitNode(xn, succs)
}
case _ =>
}
newnodes
}.reduce(iunion[ICFGNode])
}
}
def preProcess(node: ICFGNode, statement: Statement, s: ISet[LatticeElement]): Unit = {}
def postProcess(node: ICFGNode, statement: Statement, s: ISet[LatticeElement]): Unit = {}
override def onPreVisitNode(node: ICFGNode, preds: CSet[ICFGNode]): Unit = {}
override def onPostVisitNode(node: ICFGNode, succs: CSet[ICFGNode]): Unit = {}
}
/**
* Theoretically the algorithm should converge if it's implemented correctly, but just in case.
*/
class ConvergeEnsurer[N] {
private val limit: Int = 10
private val usageMap: MMap[N, Int] = mmapEmpty
private val nonConvergeNodes: MSet[N] = msetEmpty
def checkNode(n: N): Boolean = {
val c = this.usageMap.getOrElseUpdate(n, 0)
if(c >= limit){
this.nonConvergeNodes += n
false
}
else true
}
def updateNodeCount(n: N): Unit = this.usageMap(n) = this.usageMap.getOrElseUpdate(n, 0) + 1
} | arguslab/Argus-SAF | jawa/src/main/scala/org/argus/jawa/flow/dfa/IngredientProviderImpl.scala | Scala | apache-2.0 | 7,458 |
package com.github.marklister.base64
import scala.collection.mutable.ArrayBuilder
/**
* Base64 encoder
* @author Mark Lister
* This software is distributed under the 2-Clause BSD license. See the
* LICENSE file in the root of the repository.
*
* Copyright (c) 2014 - 2015 Mark Lister
*
* The repo for this Base64 encoder lives at https://github.com/marklister/base64
* Please send your issues, suggestions and pull requests there.
*/
object Base64 {
case class B64Scheme(encodeTable: Array[Char], strictPadding: Boolean = true,
postEncode: String => String = identity,
preDecode: String => String = identity) {
lazy val decodeTable = {
val b: Array[Int] = new Array[Int](256)
for (x <- encodeTable.zipWithIndex) {
b(x._1) = x._2.toInt
}
b
}
}
val base64 = new B64Scheme((('A' to 'Z') ++ ('a' to 'z') ++ ('0' to '9') ++ Seq('+', '/')).toArray)
val base64Url = new B64Scheme(base64.encodeTable.dropRight(2) ++ Seq('-', '_'), false,
_.replaceAllLiterally("=", "%3D"),
_.replaceAllLiterally("%3D", "="))
implicit class SeqEncoder(s: Seq[Byte]) {
def toBase64(implicit scheme: B64Scheme = base64): String = Encoder(s.toArray).toBase64
}
implicit class Encoder(b: Array[Byte]) {
private[this] val r = new java.lang.StringBuilder((b.length + 3) * 4 / 3)
lazy val pad = (3 - b.length % 3) % 3
def toBase64(implicit scheme: B64Scheme = base64): String = {
def sixBits(x: Byte, y: Byte, z: Byte): Unit = {
val zz = (x & 0xff) << 16 | (y & 0xff) << 8 | (z & 0xff)
r append scheme.encodeTable(zz >> 18)
r append scheme.encodeTable(zz >> 12 & 0x3f)
r append scheme.encodeTable(zz >> 6 & 0x3f)
r append scheme.encodeTable(zz & 0x3f)
}
for (p <- 0 until b.length - 2 by 3) {
sixBits(b(p), b(p + 1), b(p + 2))
}
pad match {
case 0 =>
case 1 => sixBits(b(b.length - 2), b(b.length - 1), 0)
case 2 => sixBits(b(b.length - 1), 0, 0)
}
r setLength (r.length - pad)
r append "=" * pad
scheme.postEncode(r.toString())
}
}
implicit class Decoder(s: String) {
def toByteArray(implicit scheme: B64Scheme = base64): Array[Byte] = {
val pre = scheme.preDecode(s)
val cleanS = pre.replaceAll("=+$", "")
val pad = pre.length - cleanS.length
val computedPad = (4 - (cleanS.length % 4)) % 4
val r = new ArrayBuilder.ofByte
def threeBytes(a: Int, b: Int, c: Int, d: Int): Unit = {
val i = a << 18 | b << 12 | c << 6 | d
r += ((i >> 16).toByte)
r += ((i >> 8).toByte)
r += (i.toByte)
}
if (scheme.strictPadding) {
if (pad > 2) throw new java.lang.IllegalArgumentException("Invalid Base64 String: (excessive padding) " + s)
if (s.length % 4 != 0) throw new java.lang.IllegalArgumentException("Invalid Base64 String: (padding problem) " + s)
}
if (computedPad == 3) throw new java.lang.IllegalArgumentException("Invalid Base64 String: (string length) " + s)
try {
val s = (cleanS + "A" * computedPad)
for (x <- 0 until s.length - 1 by 4) {
val i = scheme.decodeTable(s.charAt(x)) << 18 |
scheme.decodeTable(s.charAt(x + 1)) << 12 |
scheme.decodeTable(s.charAt(x + 2)) << 6 |
scheme.decodeTable(s.charAt(x + 3))
r += ((i >> 16).toByte)
r += ((i >> 8).toByte)
r += (i.toByte)
}
} catch {
case e: NoSuchElementException => throw new java.lang.IllegalArgumentException("Invalid Base64 String: (invalid character)" + e.getMessage + s)
}
val res = r.result
res.slice(0, res.length - computedPad)
}
}
} | marklister/base64 | shared/src/main/scala/Base64.scala | Scala | bsd-2-clause | 3,838 |
package org.scalacoin.script.constant
import org.scalacoin.util.{Factory, BitcoinSUtil}
/**
* Created by chris on 2/26/16.
*/
trait ScriptConstantFactory extends Factory[ScriptConstant] {
/**
* Creates a script constant from a sequence of bytes
* @param bytes
* @return
*/
def fromBytes(bytes : Seq[Byte]) : ScriptConstant = ScriptConstantImpl(BitcoinSUtil.encodeHex(bytes))
}
object ScriptConstantFactory extends ScriptConstantFactory
| TomMcCabe/scalacoin | src/main/scala/org/scalacoin/script/constant/ScriptConstantFactory.scala | Scala | mit | 458 |
package grinder
import java.nio.file._
class BaseLine(newPathStr: String, basePathStr: String) {
val newPath = Paths.get(newPathStr).toAbsolutePath().normalize()
val basePath = Paths.get(basePathStr).toAbsolutePath().normalize()
println("New path: " + newPath)
println("Base path: " + basePath)
val baseResults = ResultParser.getResults(basePath)
val (basePassed, baseFailed) = baseResults.partition(_.pass == "true")
val newResults = ResultParser.getResults(newPath)
val (existingResults, newlyAdded) = newResults.partition(n => baseResults.exists(_.id == n.id))
val (newPassed, newFailed) = existingResults.partition(_.pass == "true")
val progressions = newPassed.filterNot(n => basePassed.contains(n))
val regressions = newFailed.filterNot(n => baseFailed.contains(n))
def resolveNewScreenshot(id: String) = {
newPath.resolve("screenshot").resolve(id + ".png")
}
def resolveBaseScreenshot(id: String) = {
basePath.resolve("screenshot").resolve(id + ".png")
}
} | atiqsayyed/grinder | src/main/scala/grinder/BaseLine.scala | Scala | apache-2.0 | 1,009 |
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package scray.querying
import java.util.UUID
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicBoolean
import java.util.concurrent.locks.ReentrantLock
import java.util.concurrent.locks.ReentrantReadWriteLock
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashMap
import com.twitter.util.Duration
import com.twitter.util.JavaTimer
import com.twitter.util.Time
import com.twitter.util.Try
import com.typesafe.scalalogging.LazyLogging
import scray.querying.caching.Cache
import scray.querying.caching.MonitoringInfos
import scray.querying.description.Column
import scray.querying.description.ColumnConfiguration
import scray.querying.description.QueryspaceConfiguration
import scray.querying.description.TableConfiguration
import scray.querying.description.TableIdentifier
import scray.querying.description.internal.MaterializedView
import scray.querying.monitoring.Monitor
import scray.querying.monitoring.MonitorQuery
import scray.querying.planning.PostPlanningActions
import scray.querying.queries.DomainQuery
import scray.querying.queries.QueryInformation
import scray.querying.source.Source
/**
* default trait to represent get operations on the registry
*/
trait Registry {
/**
* returns the current queryspace configuration
*/
@inline def getQuerySpace(space: String, version: Int): Option[QueryspaceConfiguration]
/**
* returns a column configuration
*/
@inline def getQuerySpaceColumn(space: String, version: Int, column: Column): Option[ColumnConfiguration]
/**
* returns a table configuration
*/
@inline def getQuerySpaceTable(space: String, version: Int, ti: TableIdentifier): Option[TableConfiguration[_ <: DomainQuery, _ <: DomainQuery, _]]
/**
* returns the latest version of a given query space
*/
@inline def getLatestVersion(space: String): Option[Int]
/**
* returns all available version of a given query space
*/
@inline def getVersions(space: String): Set[Int]
/**
* return a metrializedView
*/
@inline def getMaterializedView(space: String, version: Integer, ti: TableIdentifier): Option[MaterializedView]
}
/**
* Registry for tables and resources
*/
object Registry extends LazyLogging with Registry {
// Object to send monitor information to
private val monitor = new Monitor
private val createQueryInformationListeners = new ArrayBuffer[QueryInformation => Unit]
private val querymonitor = new MonitorQuery
// makes registry thread safe at the cost of some performance;
// however, reads should not be blocking each other
private val rwlock = new ReentrantReadWriteLock
// all querySpaces, that can be queried
private val querySpaces = new HashMap[String, QueryspaceConfiguration]
private val querySpaceVersions = new HashMap[String, Set[Int]]
private val enableCaches = new AtomicBoolean(true)
// information about queries
private val queryMonitor = new HashMap[UUID, QueryInformation]
private val queryMonitorRwLock = new ReentrantReadWriteLock
@inline def getQuerySpaceNames(): List[String] = querySpaceVersions.map(_._1).toList
/**
* returns the latest version of a given query space
*/
@inline override def getLatestVersion(space: String): Option[Int] = Try(getVersions(space).max).toOption
/**
* returns all available version of a given query space
*/
@inline override def getVersions(space: String): Set[Int] = {
rwlock.readLock.lock
try {
querySpaceVersions.get(space).getOrElse(Set())
} finally {
rwlock.readLock().unlock()
}
}
/**
* returns the current queryspace configuration
* Cannot be used to query the Registry for tables or columns of a queryspace,
* because of concurrent updates. Use more specific methods instead.
*/
@inline override def getQuerySpace(space: String, version: Int): Option[QueryspaceConfiguration] = {
rwlock.readLock.lock
try {
querySpaces.get(space + version)
} finally {
rwlock.readLock.unlock
}
}
// shortcut to find table-configurations
private val querySpaceTables = new HashMap[String, HashMap[TableIdentifier, TableConfiguration[_ <: DomainQuery, _ <: DomainQuery, _]]]
@inline def getQuerySpaceTables(space: String, version: Int): Map[TableIdentifier, TableConfiguration[_ <: DomainQuery, _ <: DomainQuery, _]] = {
rwlock.readLock.lock
try {
logger.trace("Query table" + space + version + " Existing tables: " + querySpaceTables.keySet)
querySpaceTables.get(space + version).map(_.toMap).getOrElse(Map())
} finally {
rwlock.readLock.unlock
}
}
/**
* returns a table configuration
*/
@inline def getQuerySpaceTable(space: String, version: Int, ti: TableIdentifier): Option[TableConfiguration[_ <: DomainQuery, _ <: DomainQuery, _]] = {
rwlock.readLock.lock
try {
val qt = querySpaceTables.get(space + version)
logger.trace(s"Search table with identifier ${ti} in dataset ${qt}")
qt.flatMap(_.get(ti))
} finally {
rwlock.readLock.unlock
}
}
// shortcut to find column-configurations
private val querySpaceColumns = new HashMap[String, HashMap[Column, ColumnConfiguration]]
/**
* returns a column configuration
*/
@inline override def getQuerySpaceColumn(space: String, version: Int, column: Column): Option[ColumnConfiguration] = {
rwlock.readLock.lock
try {
querySpaceColumns.get(space + version).flatMap(_.get(column))
} finally {
rwlock.readLock.unlock
}
}
/**
* Register a querySpace, producing a new version
*/
def registerQuerySpace(querySpace: QueryspaceConfiguration, version: Option[Int] = None): Int = {
rwlock.writeLock.lock
try {
val newVersion = version.orElse(getLatestVersion(querySpace.name).map(_ + 1)).getOrElse(0)
querySpaces.put(querySpace.name + newVersion, querySpace)
querySpaceColumns.put(querySpace.name + newVersion, new HashMap[Column, ColumnConfiguration])
querySpaceTables.put(querySpace.name + newVersion, new HashMap[TableIdentifier, TableConfiguration[_ <: DomainQuery, _ <: DomainQuery, _]])
querySpace.getColumns(newVersion).foreach(col => querySpaceColumns.get(querySpace.name + newVersion).map(_.put(col.column, col)))
querySpace.getTables(newVersion).foreach(table => querySpaceTables.get(querySpace.name + newVersion).map(_.put(table.table, table)))
querySpaceVersions.put(querySpace.name, querySpaceVersions.get(querySpace.name).getOrElse(Set()) + newVersion)
logger.debug(s"Registered query space ${querySpaces.get(querySpace.name + newVersion)}")
// Register materialized views
this.materializedViews.put(querySpace.name + newVersion, new HashMap[TableIdentifier, MaterializedView])
querySpace.getMaterializedViews().map { mv => this.materializedViews.get(querySpace.name + newVersion).get.put(mv.table, mv)}
newVersion
} finally {
rwlock.writeLock.unlock
monitor.monitor(querySpaceTables)
}
}
/**
* Must be called to update tables and columns information. It suffices to update columns which
* actually have been updated. Does not update the queryspace-object itself - only the information
* that is really used by the planner is given a new version.
* TODO: find a mechanism to throw out old versions
*/
def updateQuerySpace(querySpace: String, tables: Set[(TableIdentifier, TableConfiguration[_ <: DomainQuery, _ <: DomainQuery, _], List[ColumnConfiguration])]): Unit = {
if(tables.size > 0) {
rwlock.writeLock.lock
try {
// get is o.k. since this method may not be called if the qs has not been previously created
val oldVersion = getLatestVersion(querySpace).get
val newVersion = oldVersion + 1
logger.debug(s"Creating new version for query-space $querySpace, updating $oldVersion to $newVersion by providing ${tables.size} new tables.")
// copy the stuff over...
querySpaceTables.get(querySpace + oldVersion).map { qtables =>
val newQuerySpaceTables = new HashMap[TableIdentifier, TableConfiguration[_ <: DomainQuery, _ <: DomainQuery, _]]
newQuerySpaceTables ++= qtables
querySpaceTables.put(querySpace + newVersion, newQuerySpaceTables)
}
querySpaceColumns.get(querySpace + oldVersion).map { qcolumns =>
val newQuerySpaceColumns = new HashMap[Column, ColumnConfiguration]
newQuerySpaceColumns ++= qcolumns
querySpaceColumns.put(querySpace + newVersion, newQuerySpaceColumns)
}
// replace old with new ones
tables.foreach(table => updateTableInformation(querySpace, newVersion, table._1, table._2, table._3))
} finally {
rwlock.writeLock.unlock
}
}
}
/**
* Must be called to update the table and columns information. It suffices to update columns which
* actually have been updated. Does not update the queryspace-object itself - only the information
* that is really used by the planner.
*/
private def updateTableInformation(
querySpace: String,
version: Int,
tableid: TableIdentifier,
tableconfig: TableConfiguration[_ <: DomainQuery, _ <: DomainQuery, _],
columConfigsToUpdate: List[ColumnConfiguration] = List()): Unit = {
rwlock.writeLock.lock
try {
querySpaceTables.get(querySpace + version).map(_.put(tableid, tableconfig))
columConfigsToUpdate.foreach(col => querySpaceColumns.get(querySpace).map(_.put(col.column, col)))
// TODO: invalidate relevant caches, if these exist in the future :)
} finally {
rwlock.writeLock.unlock
}
}
// planner post-pocessor
var queryPostProcessor: PostPlanningActions.PostPlanningAction = PostPlanningActions.doNothing
private val cachelock = new ReentrantLock
private val caches = new HashMap[String, Cache[_]]
/**
* retrieve an off-heap cache for reading
*/
def getCache[T, C <: Cache[T]](source: Source[_, _]): C = {
cachelock.lock
try {
caches.get(source.getDiscriminant).getOrElse {
val newCache = source.createCache
caches.put(source.getDiscriminant, newCache)
newCache
}.asInstanceOf[C]
} finally {
cachelock.unlock
}
}
/**
* return cache for given discriminant if it exists
*/
def getCache[T, C <: Cache[T]](cacheID: String): Option[C] = {
cachelock.lock
try {
caches.get(cacheID).asInstanceOf[Option[C]]
} finally {
cachelock.unlock
}
}
/**
* replace the cache with a new one
*/
def replaceCache[T](cacheID: String, oldCache: Option[Cache[T]], newCache: Cache[T]): Unit = {
cachelock.lock
try {
oldCache.map(_.close)
caches.put(cacheID, newCache)
} finally {
cachelock.unlock
}
}
/**
* Get cache information
*/
def getCacheCounter[T, C <: Cache[T]](cacheID: String): Option[MonitoringInfos] = {
cachelock.lock
try {
caches.get(cacheID).map(_.report)
} finally {
cachelock.unlock
}
}
/**
* en- or disable caching of column family values. Disable in case of memory pressure.
*/
def setCachingEnabled(enabled: Boolean) = enableCaches.set(enabled)
def getCachingEnabled = enableCaches.get
def addCreateQueryInformationListener(listener: QueryInformation => Unit) =
createQueryInformationListeners += listener
def createQueryInformation(query: Query): QueryInformation = {
queryMonitorRwLock.writeLock().lock()
try {
val info = new QueryInformation(query.getQueryID, query.getTableIdentifier, query.getWhereAST)
queryMonitor += ((query.getQueryID, info))
createQueryInformationListeners.foreach(_(info))
info
} finally {
queryMonitorRwLock.writeLock().unlock()
}
}
def getQueryInformation(qid: UUID): Option[QueryInformation] = {
queryMonitorRwLock.readLock().lock()
try {
queryMonitor.get(qid)
} finally {
queryMonitorRwLock.readLock().unlock()
}
}
val cleanupQueryInformation = new JavaTimer(true).schedule(Duration.fromTimeUnit(15, TimeUnit.MINUTES)) {
queryMonitorRwLock.writeLock().lock()
try {
val cutoffTime = Time.now - Duration.fromTimeUnit(1, TimeUnit.HOURS)
val qMon = queryMonitor.filter { entry =>
((entry._2.finished.get > 0) && (entry._2.finished.get < cutoffTime.inMillis)) || // finished more than one hour ago
((entry._2.pollingTime.get > 0) && (entry._2.pollingTime.get < cutoffTime.inMillis)) || // probably query has died
((entry._2.pollingTime.get == -1) && (entry._2.startTime < cutoffTime.inMillis)) } // probably query has died without a single result
qMon.foreach{ entry =>
entry._2.destroy()
queryMonitor -= entry._1
}
} finally {
queryMonitorRwLock.writeLock().unlock()
}
}
// shortcut to find materialized views
private val materializedViews = new HashMap[String, HashMap[TableIdentifier, MaterializedView]]
@inline def getMaterializedView(space: String, version: Integer, ti: TableIdentifier): Option[MaterializedView] = {
rwlock.readLock.lock
try {
val mv = materializedViews.get(space + version)
logger.debug(s"Search materialized view table with identifier ${ti} in dataset ${mv}")
mv.flatMap(_.get(ti))
} finally {
rwlock.readLock.unlock
}
}
}
| scray/scray | scray-querying/modules/scray-querying/src/main/scala/scray/querying/Registry.scala | Scala | apache-2.0 | 14,144 |
package chandu0101.scalajs.react.components
package materialui
import chandu0101.macros.tojs.JSMacro
import japgolly.scalajs.react._
import scala.scalajs.js
import scala.scalajs.js.`|`
/**
* This file is generated - submit issues instead of PR against it
*/
case class MuiFontIcon(
key: js.UndefOr[String] = js.undefined,
ref: js.UndefOr[String] = js.undefined,
/* This is the font color of the font icon. If not specified,
this component will default to muiTheme.palette.textColor.*/
color: js.UndefOr[MuiColor] = js.undefined,
/* This is the icon color when the mouse hovers over the icon.*/
hoverColor: js.UndefOr[MuiColor] = js.undefined,
/* Function called when mouse enters this element.*/
onMouseEnter: js.UndefOr[ReactMouseEventH => Callback] = js.undefined,
/* Function called when mouse leaves this element.*/
onMouseLeave: js.UndefOr[ReactMouseEventH => Callback] = js.undefined,
/* Override the inline-styles of the root element.*/
style: js.UndefOr[CssProperties] = js.undefined){
def apply() = {
val props = JSMacro[MuiFontIcon](this)
val f = React.asInstanceOf[js.Dynamic].createFactory(Mui.FontIcon)
f(props).asInstanceOf[ReactComponentU_]
}
}
| elacin/scalajs-react-components | core/src/main/scala/chandu0101/scalajs/react/components/materialui/MuiFontIcon.scala | Scala | apache-2.0 | 1,352 |
package com.epidata.lib.models.util
import com.datastax.driver.core.Row
import java.util.Date
import java.lang.{ Double => JDouble, Long => JLong }
import scala.util.{ Success, Try }
object TypeUtils {
def epochForTs(ts: Date): Int =
// Divide the timeline into epochs approximately 12 days in duration.
(ts.getTime() / (1000L * 1000L * 1000L)).toInt
/** Helpers for converting empty strings to None. */
def blankToNone(string: String): Option[String] = string match {
case "" => None
case string => Some(string)
}
def optionBlankToNone(string: Option[String]): Option[String] = string match {
case Some("") => None
case _ => string
}
def stringToOption(string: String): Option[String] = string match {
case s if (s != null) => Some(s)
case _ => None
}
def getOptionDouble(row: Row, field: String): Option[Double] = {
if (!row.isNull(field) && !JDouble.isNaN(row.getDouble(field)))
Option(row.getDouble(field))
else None
}
def getOptionLong(row: Row, field: String): Option[Long] = {
if (!row.isNull(field))
Option(row.getLong(field))
else None
}
def getOptionString(row: Row, field: String): Option[String] = {
if (!row.isNull(field) && row.getString(field).compareTo("") != 0)
Option(row.getString(field))
else None
}
def getOptionBinary(row: Row, field: String): Option[Binary] = {
val binaryBuf = row.getBytes(field)
binaryBuf match {
case null => None
case _ =>
val valueBytes = new Array[Byte](binaryBuf.limit - binaryBuf.position)
binaryBuf.get(valueBytes)
val binary = new Binary(valueBytes)
Option(binary)
}
}
def convertToLongNumber(value: String): Long = {
value.toDouble.asInstanceOf[Number].longValue
}
def getMeasValues(datatype: Datatype.Value, meas_value_jsonObject: Any, meas_lower_limit_jsonObject: Any, meas_upper_limit_jsonObject: Any): (Any, Option[AnyVal], Option[AnyVal], Boolean) = {
var datatype_from_value = datatype
var isInvalid = false
var meas_value =
try {
datatype match {
case Datatype.Double if meas_value_jsonObject != null =>
if (meas_value_jsonObject.isInstanceOf[String]) {
datatype_from_value = Datatype.String
meas_value_jsonObject.toString
} else if (meas_value_jsonObject.isInstanceOf[java.lang.Long] || meas_value_jsonObject.isInstanceOf[java.lang.Double]) {
datatype_from_value = Datatype.Double
meas_value_jsonObject.toString.toDouble
} else None
case Datatype.Long if meas_value_jsonObject != null =>
if (meas_value_jsonObject.isInstanceOf[String]) {
datatype_from_value = Datatype.String
meas_value_jsonObject.toString
} else if (meas_value_jsonObject.isInstanceOf[java.lang.Long]) {
datatype_from_value = Datatype.Long
meas_value_jsonObject.toString.toLong
} else if (meas_value_jsonObject.isInstanceOf[java.lang.Double]) {
datatype_from_value = Datatype.Double
meas_value_jsonObject.toString.toDouble
} else {
datatype_from_value = Datatype.String
None
}
case Datatype.String if meas_value_jsonObject != null =>
if (meas_value_jsonObject.isInstanceOf[String]) {
datatype_from_value = Datatype.String
meas_value_jsonObject.toString
} else if (meas_value_jsonObject.isInstanceOf[java.lang.Long]) {
datatype_from_value = Datatype.Long
meas_value_jsonObject.toString.toLong
} else if (meas_value_jsonObject.isInstanceOf[java.lang.Double]) {
datatype_from_value = Datatype.Double
meas_value_jsonObject.toString.toDouble
} else None
case Datatype.DoubleArray | Datatype.Waveform if meas_value_jsonObject != null => Binary.fromBase64(datatype, meas_value_jsonObject.asInstanceOf[String])
case _ if meas_value_jsonObject == null => None
case _ if meas_value_jsonObject != null =>
if (meas_value_jsonObject.isInstanceOf[String]) {
datatype_from_value = Datatype.String
meas_value_jsonObject.toString
} else if (meas_value_jsonObject.isInstanceOf[java.lang.Long] || meas_value_jsonObject.isInstanceOf[java.lang.Double]) {
datatype_from_value = Datatype.Double
meas_value_jsonObject.toString.toDouble
} else None
}
} catch {
case _: Throwable => {
datatype_from_value = datatype
None
}
}
if (meas_value == None)
datatype_from_value = null
val meas_lower_limit =
try {
datatype_from_value match {
case Datatype.Long if meas_lower_limit_jsonObject != null && meas_lower_limit_jsonObject.isInstanceOf[java.lang.Long] => Some(meas_lower_limit_jsonObject.toString.toLong)
case Datatype.Long if meas_lower_limit_jsonObject != null && meas_lower_limit_jsonObject.isInstanceOf[java.lang.Double] =>
datatype_from_value = Datatype.Double
if (meas_value != null) {
meas_value = meas_value.toString.toDouble
}
Some(meas_lower_limit_jsonObject.toString.toDouble)
case _ if meas_lower_limit_jsonObject != null && !meas_lower_limit_jsonObject.isInstanceOf[java.lang.Long] && !meas_lower_limit_jsonObject.isInstanceOf[java.lang.Double] =>
isInvalid = true
None
case _ if meas_lower_limit_jsonObject != null && (meas_lower_limit_jsonObject.isInstanceOf[java.lang.Long] || meas_lower_limit_jsonObject.isInstanceOf[java.lang.Double]) => Some(meas_lower_limit_jsonObject.toString.toDouble)
case _ if meas_lower_limit_jsonObject == null => None
}
} catch {
case e: Exception => None
}
val meas_upper_limit =
try {
datatype_from_value match {
case Datatype.Long if meas_upper_limit_jsonObject != null && meas_upper_limit_jsonObject.isInstanceOf[java.lang.Long] => Some(meas_upper_limit_jsonObject.toString.toLong)
case _ if meas_upper_limit_jsonObject != null && !meas_upper_limit_jsonObject.isInstanceOf[java.lang.Long] && !meas_upper_limit_jsonObject.isInstanceOf[java.lang.Double] =>
isInvalid = true
None
case _ if meas_upper_limit_jsonObject != null && (meas_upper_limit_jsonObject.isInstanceOf[java.lang.Long] || meas_upper_limit_jsonObject.isInstanceOf[java.lang.Double]) => Some(meas_upper_limit_jsonObject.toString.toDouble)
case _ if meas_upper_limit_jsonObject == null => None
}
} catch {
case e: Exception => None
}
(meas_value, meas_lower_limit, meas_upper_limit, isInvalid)
}
}
| epidataio/epidata-community | models/src/main/scala/com/epidata/lib/models/util/TypeUtils.scala | Scala | apache-2.0 | 6,938 |
/*
* Copyright (C) 2007-2008 Artima, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Example code from:
*
* Programming in Scala (First Edition, Version 6)
* by Martin Odersky, Lex Spoon, Bill Venners
*
* http://booksites.artima.com/programming_in_scala
*/
/* causes unchecked warning */
object Tree2 {
trait Tree[+T] {
def elem: T
def left: Tree[T]
def right: Tree[T]
}
object EmptyTree extends Tree[Nothing] {
def elem =
throw new NoSuchElementException("EmptyTree.elem")
def left =
throw new NoSuchElementException("EmptyTree.left")
def right =
throw new NoSuchElementException("EmptyTree.right")
}
class Branch[T](
val elem: T,
val left: Tree[T],
val right: Tree[T]
) extends Tree[T] {
override def equals(other: Any) = other match {
case that: Branch[T] => this.elem == that.elem &&
this.left == that.left &&
this.right == that.right
case _ => false
}
}
}
| peachyy/scalastu | equality/Tree2.scala | Scala | apache-2.0 | 1,572 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
/**
* A client that communicates with the cluster manager to request or kill executors.
* This is currently supported only in YARN mode.
*/
private[spark] trait ExecutorAllocationClient {
/** Get the list of currently active executors */
private[spark] def getExecutorIds(): Seq[String]
/**
* Whether an executor is active. An executor is active when it can be used to execute tasks
* for jobs submitted by the application.
*
* @return whether the executor with the given ID is currently active.
*/
def isExecutorActive(id: String): Boolean
/**
* Update the cluster manager on our scheduling needs. Three bits of information are included
* to help it make decisions.
* @param numExecutors The total number of executors we'd like to have. The cluster manager
* shouldn't kill any running executor to reach this number, but,
* if all existing executors were to die, this is the number of executors
* we'd want to be allocated.
* @param localityAwareTasks The number of tasks in all active stages that have a locality
* preferences. This includes running, pending, and completed tasks.
* @param hostToLocalTaskCount A map of hosts to the number of tasks from all active stages
* that would like to like to run on that host.
* This includes running, pending, and completed tasks.
* @return whether the request is acknowledged by the cluster manager.
*/
private[spark] def requestTotalExecutors(
numExecutors: Int,
localityAwareTasks: Int,
hostToLocalTaskCount: Map[String, Int]): Boolean
/**
* Request an additional number of executors from the cluster manager.
* @return whether the request is acknowledged by the cluster manager.
*/
def requestExecutors(numAdditionalExecutors: Int): Boolean
/**
* Request that the cluster manager kill the specified executors.
*
* @param executorIds identifiers of executors to kill
* @param adjustTargetNumExecutors whether the target number of executors will be adjusted down
* after these executors have been killed
* @param countFailures if there are tasks running on the executors when they are killed, whether
* to count those failures toward task failure limits
* @param force whether to force kill busy executors, default false
* @return the ids of the executors acknowledged by the cluster manager to be removed.
*/
def killExecutors(
executorIds: Seq[String],
adjustTargetNumExecutors: Boolean,
countFailures: Boolean,
force: Boolean = false): Seq[String]
/**
* Request that the cluster manager kill every executor on the specified host.
*
* @return whether the request is acknowledged by the cluster manager.
*/
def killExecutorsOnHost(host: String): Boolean
/**
* Request that the cluster manager kill the specified executor.
* @return whether the request is acknowledged by the cluster manager.
*/
def killExecutor(executorId: String): Boolean = {
val killedExecutors = killExecutors(Seq(executorId), adjustTargetNumExecutors = true,
countFailures = false)
killedExecutors.nonEmpty && killedExecutors(0).equals(executorId)
}
}
| pgandhi999/spark | core/src/main/scala/org/apache/spark/ExecutorAllocationClient.scala | Scala | apache-2.0 | 4,197 |
/**
* Created by peter_v on 22/02/15.
*/
package base
import common.{AMD_Subject, newUUID}
/** A logical representation of all facts for one subject. */
case class Resource(subject: AMD_Subject = newUUID)
| petervandenabeele/AllMyData | src/main/scala/base/Resource.scala | Scala | mit | 212 |
package uk.gov.gds.ier.transaction.ordinary.openRegister
import uk.gov.gds.ier.test._
import uk.gov.gds.ier.transaction.ordinary.InprogressOrdinary
class OpenRegisterMustacheTests
extends MustacheTestSuite
with OpenRegisterForms
with OpenRegisterMustache {
it should "empty progress form should produce empty Model" in runningApp {
val emptyApplicationForm = openRegisterForm
val openRegisterModel = mustache.data(
emptyApplicationForm,
Call("POST", "/foo"),
InprogressOrdinary()
).asInstanceOf[OpenRegisterModel]
openRegisterModel.question.title should be(
"Do you want to include your name and address on the open register?")
openRegisterModel.question.postUrl should be("/foo")
openRegisterModel.openRegister.value should be("false")
}
it should "progress form with open register marked should produce Mustache Model with open " +
"register value present (true)" in runningApp {
val partiallyFilledApplicationForm = openRegisterForm.fill(
InprogressOrdinary(
openRegisterOptin = Some(true)
)
)
val openRegisterModel = mustache.data(
partiallyFilledApplicationForm,
Call("POST", "/foo"),
InprogressOrdinary()
).asInstanceOf[OpenRegisterModel]
openRegisterModel.question.title should be(
"Do you want to include your name and address on the open register?")
openRegisterModel.question.postUrl should be("/foo")
openRegisterModel.openRegister.attributes should be("")
}
it should "progress form with open register marked should produce Mustache Model with open " +
"register value present (false)" in runningApp {
val partiallyFilledApplicationForm = openRegisterForm.fill(
InprogressOrdinary(
openRegisterOptin = Some(false)
)
)
val openRegisterModel = mustache.data(
partiallyFilledApplicationForm,
Call("POST", "/foo"),
InprogressOrdinary()
).asInstanceOf[OpenRegisterModel]
openRegisterModel.question.title should be(
"Do you want to include your name and address on the open register?")
openRegisterModel.question.postUrl should be("/foo")
openRegisterModel.openRegister.attributes should be("checked=\"checked\"")
}
}
| michaeldfallen/ier-frontend | test/uk/gov/gds/ier/transaction/ordinary/openRegister/OpenRegisterMustacheTests.scala | Scala | mit | 2,259 |
/*
* ScalaCL - putting Scala on the GPU with JavaCL / OpenCL
* http://scalacl.googlecode.com/
*
* Copyright (c) 2009-2013, Olivier Chafik (http://ochafik.com/)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Olivier Chafik nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY OLIVIER CHAFIK AND CONTRIBUTORS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package scalacl
package impl
import com.nativelibs4java.opencl.{ CLMem, CLEvent }
import org.bridj.{ Pointer, PointerIO }
import scala.collection.mutable.ArrayBuffer
private[scalacl] class TupleDataIO[T <: Product: Manifest](
ioArgs: DataIO[_]*)(
newTuple: PartialFunction[Array[Any], T])
extends DataIO[T] {
private val ios = ioArgs.toArray
override def toArray(length: Int, buffers: Array[ScheduledBuffer[_]]): Array[T] = {
val eventsToWaitFor = new ArrayBuffer[CLEvent]
val pointers = buffers.map(_.read(eventsToWaitFor).withoutValidityInformation) // unsafe, but faster
val context = buffers(0).context
// assert(buffers.map(_.context).toSet.length == 1)
context.waitFor(eventsToWaitFor)
(0 until length.toInt).par.map(i => get(i, pointers, 0)).toArray // TODO check
}
override def typeString = ios.map(_.typeString).mkString("(", ", ", ")")
override val bufferCount = ios.map(_.bufferCount).sum
private[scalacl] override def foreachScalar(f: ScalarDataIO[_] => Unit) {
ios.foreach(_.foreachScalar(f))
}
override def allocateBuffers(length: Long, out: ArrayBuffer[ScheduledBuffer[_]])(implicit context: Context) = {
ios.foreach(_.allocateBuffers(length, out))
}
// protected def newTuple(values: Array[Any]): T
override def get(index: Long, buffers: Array[Pointer[_]], bufferOffset: Int) = {
var offset = 0
newTuple(for (io <- ios) yield {
val v = io.get(index, buffers, offset)
offset += io.bufferCount
v
})
}
override def set(index: Long, buffers: Array[Pointer[_]], bufferOffset: Int, value: T) = {
val values = value.productIterator.toIterable
var offset = 0
for ((io, v) <- ios.zip(values)) {
io.asInstanceOf[DataIO[Any]].set(index, buffers, offset, v)
offset += io.bufferCount
}
}
}
| nativelibs4java/ScalaCL | src/main/scala/scalacl/impl/dataio/TupleDataIO.scala | Scala | bsd-3-clause | 3,534 |
package uk.gov.dvla.vehicles.presentation.common.clientsidesession
import uk.gov.dvla.vehicles.presentation.common.{UnitSpec, TestWithApplication}
class NoHashSpec extends UnitSpec {
"NoHash" should {
"return a clear text string" in new TestWithApplication {
noHash.hash(ClearText) should equal(ClearText)
}
"return expected length for the digest" in new TestWithApplication {
noHash.digestStringLength should equal(0)
}
}
private val noHash = new NoHashGenerator
// Sharing immutable fixture objects via instance variables
private final val ClearText = "qwerty"
} | dvla/vehicles-presentation-common | test/uk/gov/dvla/vehicles/presentation/common/clientsidesession/NoHashSpec.scala | Scala | mit | 607 |
package nl.soqua.lcpi.interpreter.transformation
import nl.soqua.lcpi.ast.lambda.{Application, Expression, LambdaAbstraction, Variable}
import nl.soqua.lcpi.interpreter.transformation.Names.unused
object AlphaReduction {
import Substitution.substitute
/**
* Alpha reduction. Rewrite expressions so that every variable is unique.
*
* @param e The expression to rewrite
* @return A rewritten expression.
*/
def α(e: Expression): Expression = α(e, List.empty)._1
/**
* Alias for `α`
*
* @param e The expression to rewrite
* @return A rewritten expression
*/
def alpha(e: Expression): Expression = α(e)
/**
* depth-first alpha-reduction that keeps track of encountered variables.
*/
private def α(e: Expression, encountered: List[Variable]): (Expression, List[Variable]) = e match {
case v: Variable => (v, encountered)
case Application(t, s) =>
val (t1, enc1) = α(t, encountered)
val (s1, enc2) = α(s, enc1)
(Application(t1, s1), enc2)
case LambdaAbstraction(x, a) if encountered contains x =>
val replacement = unused(x, encountered)
α(LambdaAbstraction(replacement, substitute(a, x, replacement)), encountered)
case LambdaAbstraction(x, a) => {
val (body, enc1) = α(a, x :: encountered)
(LambdaAbstraction(x, body), enc1)
}
}
}
| kevinvandervlist/lcpi | interpreter/src/main/scala/nl/soqua/lcpi/interpreter/transformation/AlphaReduction.scala | Scala | mit | 1,368 |
package org.json4s
import scala.language.dynamics
class DynamicJValue(val raw: JValue) extends Dynamic {
/**
* Adds dynamic style to JValues. Only meaningful for JObjects
* <p>
* Example:<pre>
* JObject(JField("name",JString("joe"))::Nil).name == JString("joe")
* </pre>
*/
def selectDynamic(name:String) = new DynamicJValue(raw \\ name)
override def hashCode():Int = raw.hashCode
override def equals(p1: Any): Boolean = p1 match {
case j: DynamicJValue => raw == j.raw
case j: JValue => raw == j
case _ => false
}
}
trait DynamicJValueImplicits {
implicit def dynamic2Jv(dynJv: DynamicJValue) = dynJv.raw
implicit def dynamic2monadic(dynJv: DynamicJValue) = new MonadicJValue(dynJv.raw)
def dyn(jv:JValue) = new DynamicJValue(jv)
}
object DynamicJValue extends DynamicJValueImplicits
| dozed/json4s | core/src/main/scala/org/json4s/DynamicJValue.scala | Scala | apache-2.0 | 839 |
/**
* Copyright 2011-2016 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.check.extractor
import java.util.regex.Matcher
import scala.annotation.tailrec
package object regex {
implicit class RichMatcher(val matcher: Matcher) extends AnyVal {
def foldLeft[T](zero: T)(f: (Matcher, T) => T): T = {
var temp = zero
while (matcher.find)
temp = f(matcher, temp)
temp
}
def findMatchN[X: GroupExtractor](n: Int): Option[X] = {
@tailrec
def findRec(countDown: Int): Boolean = matcher.find && (countDown == 0 || findRec(countDown - 1))
if (findRec(n))
Some(value[X])
else
None
}
def value[X: GroupExtractor]: X = GroupExtractor[X].extract(matcher)
}
}
| thkluge/gatling | gatling-core/src/main/scala/io/gatling/core/check/extractor/regex/package.scala | Scala | apache-2.0 | 1,315 |
/*******************************************************************************
* Copyright 2017 Capital One Services, LLC and Bitwise, Inc.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package hydrograph.engine.spark.components
import java.util
import scala.collection.JavaConversions.asScalaBuffer
import org.apache.spark.sql.Column
import org.apache.spark.sql.SaveMode
import org.apache.spark.sql.functions.col
import org.slf4j.Logger
import org.slf4j.LoggerFactory
import hydrograph.engine.core.component.entity.OutputFileAvroEntity
import hydrograph.engine.core.component.entity.elements.SchemaField
import hydrograph.engine.spark.components.base.SparkFlow
import hydrograph.engine.spark.components.platform.BaseComponentParams
/**
* The Class OutputFileAvroComponent.
*
* @author Bitwise
*
*/
class OutputFileAvroComponent(outputFileAvroEntity: OutputFileAvroEntity, baseComponentParams: BaseComponentParams) extends SparkFlow{
private val LOG: Logger = LoggerFactory.getLogger(classOf[OutputFileAvroComponent])
private def createSchema(fields: util.List[SchemaField]): Array[Column] = {
val schema = new Array[Column](fields.size())
fields.zipWithIndex.foreach {
case (f, i) =>
schema(i) = col(f.getFieldName)
}
LOG.debug("Schema created for Output File Avro Component : " + schema.mkString)
schema
}
override def execute() = {
val filePathToWrite = outputFileAvroEntity.getPath()
try {
val df = baseComponentParams.getDataFrame()
df.select(createSchema(outputFileAvroEntity.getFieldsList): _*).write
.mode( if (outputFileAvroEntity.isOverWrite) SaveMode.Overwrite else SaveMode.ErrorIfExists)
.format("hydrograph.engine.spark.datasource.avro").save(filePathToWrite)
LOG.info("Created Output File Avro Component "+ outputFileAvroEntity.getComponentId
+ " in Batch "+ outputFileAvroEntity.getBatch +" with path " + outputFileAvroEntity.getPath)
LOG.debug("Component Id: '"+ outputFileAvroEntity.getComponentId
+"' in Batch: " + outputFileAvroEntity.getBatch
+ " at Path: " + outputFileAvroEntity.getPath)
} catch {
case e:Exception =>
LOG.error("\\nException in Output File Avro Component with - \\nComponent Id : ["+outputFileAvroEntity.getComponentId+"]\\nComponent Name : ["+
outputFileAvroEntity.getComponentName+"]\\nBatch : ["+ outputFileAvroEntity.getBatch+"]\\nPath : ["+ outputFileAvroEntity.getPath)
throw new RuntimeException("\\nException in Output File Avro Component with - \\nComponent Id : ["+outputFileAvroEntity.getComponentId+"]\\nComponent Name : ["+
outputFileAvroEntity.getComponentName+"]\\nBatch : ["+ outputFileAvroEntity.getBatch+"]\\nPath : ["+ outputFileAvroEntity.getPath)
}
}
} | capitalone/Hydrograph | hydrograph.engine/hydrograph.engine.spark/src/main/scala/hydrograph/engine/spark/components/OutputFileAvroComponent.scala | Scala | apache-2.0 | 3,367 |
package datacollector
/**
* @author Emre Çelikten
*/
object Utils {
def getStackTraceString(ex: Exception): String = {
new StringBuilder().append(ex.getMessage).append("\\n").append(ex.getStackTrace.mkString("\\n")).toString
}
}
| emrecelikten/foursquare-data-collector | src/main/scala/datacollector/Utils.scala | Scala | gpl-3.0 | 240 |
package com.themillhousegroup.gasket.integration
import org.specs2.mutable.Specification
import com.themillhousegroup.gasket.Account
import scala.concurrent.Await
import com.themillhousegroup.gasket.test.{ TestHelpers, GasketIntegrationSettings }
import com.themillhousegroup.gasket.test.ExampleSpreadsheetFetcher._
/**
* For the purposes of these examples, there exists a spreadsheet
* called "Example Spreadsheet" with worksheets, "Sheet2" and "Sheet3".
* On each worksheet is 1 populated cell, with contents as follows:
*
* Cell A1
*
* See GasketIntegrationSettings for information about how to set
* up a suitable file on your local system to hold credentials.
*/
class CellUpdateSpec extends Specification with GasketIntegrationSettings with TestHelpers {
isolated
sequential
val defaultCellValue = "Cell A1"
val moddedValue = "MODIFIED"
"Cell Update example" should {
"make a single change" in IntegrationScope { (username, p12File) =>
val result = fetchSheetAndCells(username, p12File, "Sheet2")._2
result must not beEmpty
result must haveSize(1)
result.head.value must beEqualTo(defaultCellValue)
val futureModResult = result.head.update(moddedValue)
val modResult = Await.result(futureModResult, shortWait)
modResult.value must beEqualTo(moddedValue)
val futureRollbackResult = modResult.update(defaultCellValue)
val rollbackResult = Await.result(futureRollbackResult, shortWait)
rollbackResult.value must beEqualTo(defaultCellValue)
}
"make a single change as part of a for-comprehension" in IntegrationScope { (clientId, p12File) =>
val futureCell =
for {
acct <- Account(clientId, p12File)
ss <- acct.spreadsheets
ws <- ss("Example Spreadsheet").worksheets
cells <- ws("Sheet3").cells
newCell <- cells.head.update(moddedValue)
} yield newCell
Await.result(futureCell, shortWait).value must beEqualTo(moddedValue)
val rolledBackCell =
for {
acct <- Account(clientId, p12File)
ss <- acct.spreadsheets
ws <- ss("Example Spreadsheet").worksheets
cells <- ws("Sheet3").cells
newCell <- cells.head.update(defaultCellValue)
} yield newCell
Await.result(rolledBackCell, shortWait).value must beEqualTo(defaultCellValue)
}
}
}
| themillhousegroup/gasket | src/test/scala/com/themillhousegroup/gasket/integration/CellUpdateSpec.scala | Scala | mit | 2,406 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.effect
import com.typesafe.scalalogging.LazyLogging
import io.truthencode.ddo.api.model.effect.DetailedEffect
import io.truthencode.ddo.enhancement.BonusType
import io.truthencode.ddo.model.effect.Feature.printFeature
import io.truthencode.ddo.model.effect.features.SkillEffect
import io.truthencode.ddo.model.feats.{Feat, GeneralFeat}
import io.truthencode.ddo.model.item.weapon.WeaponCategory.{filterByWeaponClass, icPlus1, icPlus2, icPlus3}
import io.truthencode.ddo.model.item.weapon.WeaponClass
import io.truthencode.ddo.model.skill.Skill.{Listen, Spot}
import io.truthencode.ddo.model.stats.{BasicStat, MissChance}
import io.truthencode.ddo.support.naming.UsingSearchPrefix
import io.truthencode.ddo.test.tags.{FeatTest, FeatureTest, SkillTest}
import org.scalatest.TryValues._
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers
import org.scalatestplus.mockito.MockitoSugar
import scala.collection.immutable
import scala.util.{Success, Try}
class FeatureTest extends AnyFunSpec with Matchers with MockitoSugar with LazyLogging {
val featEffects: Unit = {
val fList = for {
feat <- Feat.values
features <- feat.namedFeatures
feature <- features._2
} yield (feat, feature)
}
def fixture = new {
val sourceInfo: SourceInfo = SourceInfo("TestContext", this)
}
def optPlus[T](t: T, n: Int): Option[(T, Int)] = {
val x = Option(t)
logger.info(s"Maybe add $n to $x")
Option(x) match {
case Some(_) => Some(t, n)
case _ => None
}
}
describe("A feature") {
it("should be able to affect a dodge chance ", FeatureTest, FeatTest) {
val param = EffectParameter.BonusType(BonusType.Feat)
val part = EffectPart.MissChanceEffect(BasicStat.DodgeChance)
val mockDetailedEffect = mock[DetailedEffect]
val partMod = new PartModifier[Int, BasicStat with MissChance] with UsingSearchPrefix {
/**
* Used when qualifying a search with a prefix. Examples include finding "HalfElf" from
* qualified "Race:HalfElf"
*
* @return
* A default or applied prefix
*/
override def searchPrefixSource: String = partToModify.searchPrefixSource
override lazy val part: Try[EffectPart] = Success(EffectPart.MissChanceEffect(partToModify))
/**
* The General Description should be just that. This should not include specific values
* unless all instances will share that value. I.e. a Dodge Effect might state it increases
* your miss-chance, but omit any value such as 20%. Those values will be displayed in the
* effectText of a specific implementation such as the Dodge Feat or Uncanny Dodge
*/
override val generalDescription: String =
"Increases your chance to completely dodge an attack"
/**
* a list of Categories useful for menu / UI placement and also for searching / querying for
* Miss-Chance or other desired effects.
*
* This list might be constrained or filtered by an Enumeration or CSV file. The goal is to
* enable quick and advanced searching for specific categories from general (Miss-Chance) to
* specific (evasion). In addition, it may be useful for deep searching such as increasing
* Spot, which should suggest not only +Spot items, but +Wisdom or eventually include a feat
* or enhancement that allows the use of some other value as your spot score.
*/
override def categories: Seq[String] =
Seq(EffectCategories.General, EffectCategories.MissChance).map(_.toString)
override protected[this] lazy val partToModify: BasicStat with MissChance =
BasicStat.DodgeChance
private val eb = EffectParameterBuilder()
.toggleOffValue(TriggerEvent.OnDeath)
.toggleOnValue(TriggerEvent.Passive)
.addBonusType(BonusType.Feat)
.build
override protected[this] def effectParameters: Seq[ParameterModifier[_]] = eb.modifiers
override val effectDetail: DetailedEffect = mockDetailedEffect
override val value: Int = 3
override val source: SourceInfo = SourceInfo("ExampleDodge", this)
}
logger.info(s"found Feature Effect with Name ${partMod.name}")
partMod.parameters should contain(Success(param))
// partMod.parameters.foreach(_.success.value should be(param))
partMod.part.success.value should be(part)
// f.parameter should be a 'success //Be Success(BonusType(ActionBoost)) (EffectParameter.BonusType)
// f.part should be a 'success //shouldBe (EffectPart.Feat)
partMod.value shouldEqual 3
}
it("Should support Skill Augmentation", FeatTest, SkillTest, FeatureTest) {
val param = EffectParameter.BonusType(BonusType.Feat)
// val part = EffectPart.SkillPart
val feat = GeneralFeat.Alertness
val ff: immutable.Seq[SkillEffect] = feat.features.collect { case y: SkillEffect =>
y
}
ff.map(_.skill) should contain allOf (Listen, Spot)
}
it("should contain relevant source information", FeatTest, SkillTest, FeatureTest) {
val feat = GeneralFeat.Alertness
val ff: Option[SkillEffect] = feat.features.collectFirst { case y: SkillEffect =>
y
}
ff should not be empty
val effect = ff.get
val source = effect.source
source shouldEqual feat
logger.info(Feature.printFeature(effect))
}
it("Should be extractable using a filter like Basic Stat or BonusType", FeatureTest, FeatTest) {
val param = EffectParameter.BonusType(BonusType.Feat)
val part = EffectPart.MissChanceEffect(BasicStat.DodgeChance)
val feat = GeneralFeat.Dodge
feat.features.collectFirst { case y: PartModifier[Int, BasicStat] =>
y
} match {
case Some(x) =>
x.parameters.foreach(_ should be a Symbol("Success"))
x.parameters
.flatten(_.toOption)
.filter(_.entryName.contains("Bonus"))
.map(_ should be(param))
(x.part should be).a(Symbol("success"))
x.part.success.value should be(part)
}
}
it("Should be able to sort a part and parameter with a value") {
val param = EffectParameter.BonusType(BonusType.ActionBoost)
val part = EffectPart.Feat(GeneralFeat.Trip)
val mDetail = mock[DetailedEffect]
val f = fixture
val pm = new PartModifier[Int, GeneralFeat] with UsingSearchPrefix {
override protected[this] lazy val partToModify: GeneralFeat =
GeneralFeat.Trip
/**
* The General Description should be just that. This should not include specific values
* unless all instances will share that value. I.e. a Dodge Effect might state it increases
* your miss-chance, but omit any value such as 20%. Those values will be displayed in the
* effectText of a specific implementation such as the Dodge Feat or Uncanny Dodge
*/
override val generalDescription: String = "Not an ability to do illegal drugs"
/**
* a list of Categories useful for menu / UI placement and also for searching / querying for
* Miss-Chance or other desired effects.
*
* This list might be constrained or filtered by an Enumeration or CSV file. The goal is to
* enable quick and advanced searching for specific categories from general (Miss-Chance) to
* specific (evasion). In addition, it may be useful for deep searching such as increasing
* Spot, which should suggest not only +Spot items, but +Wisdom or eventually include a feat
* or enhancement that allows the use of some other value as your spot score.
*/
override def categories: Seq[String] = Seq(EffectCategories.SpecialAttack).map(_.toString)
/**
* Used when qualifying a search with a prefix. Examples include finding "HalfElf" from
* qualified "Race:HalfElf"
*
* @return
* A default or applied prefix
*/
override def searchPrefixSource: String = GeneralFeat.searchPrefixSource
override lazy val part: Try[EffectPart] = Success(EffectPart.Feat(partToModify))
private val eb = EffectParameterBuilder()
.toggleOffValue(TriggerEvent.OnAttackRoll)
.toggleOnValue(TriggerEvent.OnRest)
.addBonusType(BonusType.ActionBoost)
.build
override protected[this] def effectParameters: Seq[ParameterModifier[_]] = eb.modifiers
override val value: Int = 3
override val source: SourceInfo = f.sourceInfo
override val effectDetail: DetailedEffect = mDetail
}
val bonuses: Seq[EffectParameter] =
pm.parameters.flatMap(_.toOption).filter(_.entryName.contains("Bonus"))
bonuses should contain(param)
// pm.parameters.foreach(_.success.value should be(param))
pm.part.success.value should be(part)
pm.value shouldEqual 3
}
}
describe("Improved Critical Features") {
they("Should be broken down by weapon type") {
val result = WeaponClass.values.flatMap { weaponClass =>
filterByWeaponClass(weaponClass).map { weapon =>
icPlus1 shouldNot be(empty)
icPlus2 shouldNot be(empty)
icPlus3 shouldNot be(empty)
// The item we are looking for is in one of these lists
val a1 = icPlus1.filter(_ == weapon).flatMap(optPlus(_, 1))
val a2 = icPlus2.filter(_ == weapon).flatMap(optPlus(_, 2))
val a3 = icPlus3.filter(_ == weapon).flatMap(optPlus(_, 3))
val squish = a1 ++ a2 ++ a3
squish should have size 1
logger.info(s"squish is ${squish.size}")
val squished = squish.head
squished._1 shouldEqual weapon
squished
}
}
}
they("Should have name") {
val icFeat = GeneralFeat.ImprovedCritical.subFeats.head
icFeat.features.foreach { f =>
logger.info(printFeature(f))
}
}
}
}
| adarro/ddo-calc | subprojects/common/ddo-core/src/test/scala/io/truthencode/ddo/model/effect/FeatureTest.scala | Scala | apache-2.0 | 11,077 |
package org.odfi.indesign.core.harvest.fs
import java.nio.file._
import org.odfi.tea.logging.TLogSource
import org.odfi.tea.thread.ThreadLanguage
import java.io.File
import java.lang.ref.WeakReference
import org.odfi.indesign.core.heart.HeartTask
import org.odfi.indesign.core.harvest.HarvestedResource
import org.odfi.indesign.core.heart.Heart
import scala.jdk.CollectionConverters._
trait IDFileEvent {
val f: File
val k: IDWatchKey
}
class IDAddedEvent(val f: File, val k: IDWatchKey) extends IDFileEvent {
}
class IDDeletedEvent(val f: File, val k: IDWatchKey) extends IDFileEvent {
}
class IDModifiedEvent(val f: File, val k: IDWatchKey) extends IDFileEvent {
}
object IDAdded {
def unapply(arg: IDFileEvent): Option[File] = {
arg match {
case added: IDAddedEvent => Some(added.f)
case other => None
}
}
}
object IDModified {
def unapply(arg: IDFileEvent): Option[File] = {
arg match {
case modified: IDModifiedEvent => Some(modified.f)
case other => None
}
}
}
object IDDeleted {
def unapply(arg: IDFileEvent): Option[File] = {
arg match {
case deleted: IDDeletedEvent => Some(deleted.f)
case other => None
}
}
}
// Interface Sub classes
//------------------
class IDWatcher(val fileAccept: String, val listener: WeakReference[Any], var closure: Function1[IDFileEvent, Unit]) extends HarvestedResource {
var watchKey: Option[IDWatchKey] = None
def getId = s"IDFileWatcher:${hashCode()}:${listener.hashCode()}"
/* def doTask = {
closure(ev)
}*/
override def clean = {
super.clean
listener.clear()
closure = {
ev =>
}
}
/**
* Clean if the listener is not available anymore
*/
def check = {
listener.get match {
case null =>
// Clean
watchKey.get.removeWatcher(this)
false
case other =>
true
}
}
def dispatch(ev: IDFileEvent) = {
var taskId = s"IDFileWatcher:${hashCode()}:${listener.hashCode()}:${ev.getClass.getName}:${ev.f}"
Heart.running(taskId) match {
case Some(task) =>
logFine[IDFileWatcher](s"[W] Task ${taskId} is running")
case None =>
logFine[IDFileWatcher](s"[V] Task ${taskId} starting")
val task = new HeartTask[Any] {
def getId = taskId
def doTask = {
closure(ev)
}
}
Heart.pump(task)
}
//closure(ev)
}
def accept(path: String) = fileAccept match {
case "*" => true
case fileAccept => path == fileAccept
}
}
class IDWatchKey(val directory: File, val key: WatchKey) extends HarvestedResource {
var fileLastModify = -1L
def getId = s"WatchKey:${hashCode()}:directory:$key"
override def clean = {
super.clean
key.cancel()
cleanupWatchers
}
//-- Watchers
var watchers = List[IDWatcher]()
def addWatcher(w: IDWatcher) = {
watchers.contains(w) match {
case false =>
watchers = w :: watchers
w.watchKey = Some(this)
case true =>
}
}
def removeWatcher(w: IDWatcher) = watchers.contains(w) match {
case false =>
case true =>
w.watchKey = None
watchers = watchers.filter(_ != w)
}
def cleanupWatchers = {
watchers.foreach {
w =>
w.clean
removeWatcher(w)
}
}
/**
* Returns file instance resolved from this event
* Don't return canonical path because file may have been deleted
*/
def resolveFile(e: WatchEvent[Path]) = {
directory.toPath().resolve(e.context()).toFile()
}
def isWatching(f: File) = directory.getCanonicalPath == f.getCanonicalPath
def isKey(k: WatchKey) = key.equals(k)
/**
* Save the last modification date of a file to filter out double events send
*/
def filterDuplicateModifies(e: WatchEvent[Path]) = {
val targetFile = resolveFile(e)
if (fileLastModify != targetFile.lastModified()) {
fileLastModify = targetFile.lastModified()
true
} else {
false
}
}
}
class IDFileWatcher extends ThreadLanguage with TLogSource {
// Lifecycle
//-----------------
def start = {
watcherThread.start
}
def stop = {
clean
watcher.close()
///watcherThread.join
}
/**
* Deregister everyting
*/
def clean = {
this.watchedDirectories.foreach {
idKey =>
idKey.clean
}
this.watchedDirectories = this.watchedDirectories.filter(k => false)
}
// Watching Low Level
//------------
val watcher = FileSystems.getDefault().newWatchService();
/**
* Watching Keys are only defined for folders
* Watching a file is thus required to watch the folder above
*/
var watchedDirectories = List[IDWatchKey]()
def deleteWatchingKey(k: IDWatchKey) = watchedDirectories.contains(k) match {
case true =>
watchedDirectories = watchedDirectories.filter(_ != k)
k.clean
case false =>
}
/**
* Register provided directory to watch for events inside
* If the file is not a directory, then use parent
*/
def registerDirectoryForWatching(directory: File) = {
var targetFile = directory.exists() match {
case true if (directory.isDirectory) => directory.getCanonicalFile
case true if (!directory.isDirectory) => directory.getParentFile.getCanonicalFile
case other =>
sys.error("Cannot Watch non existing file/directory..." + directory.getCanonicalPath)
}
watchedDirectories.find(_.isWatching(targetFile)) match {
//-- FOund -> Return
case Some(key) => key
//-- Not Found -> add
case None =>
logFine[IDFileWatcher](s"Start watching: " + targetFile)
//-- Register
var path = targetFile.toPath()
var key = path.register(watcher, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_DELETE, StandardWatchEventKinds.ENTRY_MODIFY)
//-- Create IDkey
var IDKey = new IDWatchKey(targetFile, key)
watchedDirectories = IDKey :: watchedDirectories
IDKey
}
}
// Single File Watch
//--------------------
/**
*
*
* @param listener
* @param file
* @param closure
*/
def onFileChange(listener: Any, file: File)(closure: IDFileEvent => Unit) = {
//-- Register for watching
var registeredKey = registerDirectoryForWatching(file)
//-- Create a new Watcher
var watcher = new IDWatcher(file.getName, new WeakReference(listener), closure)
registeredKey.addWatcher(watcher)
watcher
}
// Single Directory Watch
//-------------------
/**
*
* @param listener
* @param file
* @param closure
*/
def onDirectoryChange(listener: Any, file: File, modifyOnParent: Boolean = false)(closure: IDFileEvent => Unit) = {
//-- Register for watching
var registeredKey = registerDirectoryForWatching(file)
//-- Create a new Watcher for new sub files
var watcher = new IDWatcher("*", new WeakReference(listener), closure)
registeredKey.addWatcher(watcher)
//-- Watch parent to be able to catch deleting of the folder
if (modifyOnParent) {
var parentKey = registerDirectoryForWatching(file.getParentFile)
var parentWatcher = new IDWatcher(file.getName, new WeakReference(listener), closure)
parentWatcher.deriveFrom(watcher)
parentKey.addWatcher(parentWatcher)
}
watcher
}
// Recursive Watch
//------------------------
def onRecursiveDirectoryChange(listener: Any, directory: File)(closure: IDFileEvent => Unit) = directory match {
case dir if (dir.exists() && dir.isDirectory) =>
//-- Watch base directory
val topWatcher = this.onDirectoryChange(listener, directory)(closure)
// var idKey = this.registerDirectoryForWatching(directory)
//-- Prepare the closure to be used on all listeners, to make sure we add new directories to the watching
def listeningClosure: (IDFileEvent => Unit) = {
//-- If directory is removed, make sure no watcher for key are present
case deleted if (IDDeleted.unapply(deleted).isDefined) =>
//logFine[IDFileWatcher]("Seen delete")
//-- Delete derived watch key which has a watcher with same name
this.watchedDirectories.find {
k =>
//logFine[IDFileWatcher]("Delete testing k: "+k.directory.getCanonicalPath)
//logFine[IDFileWatcher]("Deleted "+deleted.f.getCanonicalPath)
//k.watchers.find { w => w.accept(deleted.f.getName)}.isDefined
k.directory.getCanonicalPath == deleted.f.getCanonicalPath
} match {
case Some(key) =>
//logFine[IDFileWatcher]("Found key to remove: "+key)
deleteWatchingKey(key)
case None =>
}
//-- Dispatch
closure(deleted)
//-- If a directory is added, add to watch list
case added if (IDAdded.unapply(added).isDefined && added.f.isDirectory) =>
logFine[IDFileWatcher]("Created Directory, so add it to watch")
var newWatcher = onDirectoryChange(listener, added.f)(listeningClosure)
newWatcher.deriveFrom(topWatcher)
//-- Make new watcher and key derived from top watcher key
//newWatcher.watchKey.get.deriveFrom(topWatcher.watchKey.get)
//topWatcher.dispatch(new IDAddedEvent(newDirectory))
closure(added)
//-- Ignore events on directories, already handled
case other if (other.f.isDirectory) =>
logFine[IDFileWatcher]("Ignoring directory modify..." + other.f)
case other =>
closure(other)
//topWatcher.dispatch(other)
}
topWatcher.closure = listeningClosure
//-- Now walk the files and register watchers for everything
var filesStream = Files.walk(directory.toPath)
filesStream.forEach {
file =>
//-- Only listen to directories
file.toFile.isDirectory match {
case true if (file.toFile.getCanonicalPath != directory.getCanonicalPath) =>
/**
* When a change happens on a directory, call the same closure
* Link with top watcher to that top watcher cleans everything
* When new resources are added, listen to them too
*/
var dirWatcher = onDirectoryChange(listener, file.toFile)(listeningClosure)
dirWatcher.deriveFrom(topWatcher)
case other =>
}
}
topWatcher
case other =>
sys.error(s"Cannot Watch sub directory changes if the provided file ${directory.getCanonicalPath} is not a directory")
}
// Watcher Thread
//--------------------
val watcherThread = createDaemonThread {
var stop = false
while (!stop) {
// Get Key on which and event has happened
//----------
try {
var key = watcher.take()
// Get IDKey
//-------------
this.watchedDirectories.find(_.isKey(key)) match {
//-- Process
case Some(idKey) =>
// Try and always reset the key
logFine[IDFileWatcher](s"Watcher triggered on Key: " + key)
try {
//-- get events
val polledEvents = key.pollEvents().asScala
polledEvents.filter { ev => ev.kind() != StandardWatchEventKinds.OVERFLOW }.foreach {
case ben: WatchEvent[_] if (ben.count() <= 1) =>
val be = ben.asInstanceOf[WatchEvent[Path]]
logFine[IDFileWatcher](s"Change detected on key ${idKey}: " + be.kind() + " -> " + be.context() + " -> " + be.count())
//-- Create event
var event = be.kind() match {
case StandardWatchEventKinds.ENTRY_DELETE =>
Some(new IDDeletedEvent(idKey.resolveFile(be), idKey))
case StandardWatchEventKinds.ENTRY_CREATE =>
Some(new IDAddedEvent(idKey.resolveFile(be), idKey))
case StandardWatchEventKinds.ENTRY_MODIFY if (idKey.filterDuplicateModifies(be)) =>
logFine[IDFileWatcher](s"File modified: " + idKey.resolveFile(be).lastModified())
Some(new IDModifiedEvent(idKey.resolveFile(be), idKey))
case other =>
None
}
// Dispatch
//---------------
if (event.isDefined) {
idKey.watchers.filter { w => w.accept(be.context().toString) } foreach {
watcher =>
watcher.check match {
case true =>
logFine[IDFileWatcher]("Watcher acccepted request: " + watcher.fileAccept)
watcher.dispatch(event.get)
case false =>
}
}
}
case other =>
}
} finally {
key.reset()
}
//-- Deregister
case None =>
key.cancel()
}
} catch {
case e: ClosedWatchServiceException =>
stop = true
}
}
}
} | opendesignflow/indesign | indesign-core/src/main/scala/org/odfi/indesign/core/harvest/fs/IDFileWatcher.scala | Scala | gpl-3.0 | 13,311 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions.aggregate
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.types._
/**
* A central moment is the expected value of a specified power of the deviation of a random
* variable from the mean. Central moments are often used to characterize the properties of about
* the shape of a distribution.
*
* This class implements online, one-pass algorithms for computing the central moments of a set of
* points.
*
* Behavior:
* - null values are ignored
* - returns `Double.NaN` when the column contains `Double.NaN` values
*
* References:
* - Xiangrui Meng. "Simpler Online Updates for Arbitrary-Order Central Moments."
* 2015. http://arxiv.org/abs/1510.04923
*
* @see [[https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance
* Algorithms for calculating variance (Wikipedia)]]
*
* @param child to compute central moments of.
*/
abstract class CentralMomentAgg(child: Expression) extends DeclarativeAggregate {
/**
* The central moment order to be computed.
*/
protected def momentOrder: Int
override def children: Seq[Expression] = Seq(child)
override def nullable: Boolean = true
override def dataType: DataType = DoubleType
override def inputTypes: Seq[AbstractDataType] = Seq(DoubleType)
protected val n = AttributeReference("n", DoubleType, nullable = false)()
protected val avg = AttributeReference("avg", DoubleType, nullable = false)()
protected val m2 = AttributeReference("m2", DoubleType, nullable = false)()
protected val m3 = AttributeReference("m3", DoubleType, nullable = false)()
protected val m4 = AttributeReference("m4", DoubleType, nullable = false)()
private def trimHigherOrder[T](expressions: Seq[T]) = expressions.take(momentOrder + 1)
override val aggBufferAttributes = trimHigherOrder(Seq(n, avg, m2, m3, m4))
override val initialValues: Seq[Expression] = Array.fill(momentOrder + 1)(Literal(0.0))
override val updateExpressions: Seq[Expression] = {
val newN = n + Literal(1.0)
val delta = child - avg
val deltaN = delta / newN
val newAvg = avg + deltaN
val newM2 = m2 + delta * (delta - deltaN)
val delta2 = delta * delta
val deltaN2 = deltaN * deltaN
val newM3 = if (momentOrder >= 3) {
m3 - Literal(3.0) * deltaN * newM2 + delta * (delta2 - deltaN2)
} else {
Literal(0.0)
}
val newM4 = if (momentOrder >= 4) {
m4 - Literal(4.0) * deltaN * newM3 - Literal(6.0) * deltaN2 * newM2 +
delta * (delta * delta2 - deltaN * deltaN2)
} else {
Literal(0.0)
}
trimHigherOrder(Seq(
If(IsNull(child), n, newN),
If(IsNull(child), avg, newAvg),
If(IsNull(child), m2, newM2),
If(IsNull(child), m3, newM3),
If(IsNull(child), m4, newM4)
))
}
override val mergeExpressions: Seq[Expression] = {
val n1 = n.left
val n2 = n.right
val newN = n1 + n2
val delta = avg.right - avg.left
val deltaN = If(newN === Literal(0.0), Literal(0.0), delta / newN)
val newAvg = avg.left + deltaN * n2
// higher order moments computed according to:
// https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Higher-order_statistics
val newM2 = m2.left + m2.right + delta * deltaN * n1 * n2
// `m3.right` is not available if momentOrder < 3
val newM3 = if (momentOrder >= 3) {
m3.left + m3.right + deltaN * deltaN * delta * n1 * n2 * (n1 - n2) +
Literal(3.0) * deltaN * (n1 * m2.right - n2 * m2.left)
} else {
Literal(0.0)
}
// `m4.right` is not available if momentOrder < 4
val newM4 = if (momentOrder >= 4) {
m4.left + m4.right +
deltaN * deltaN * deltaN * delta * n1 * n2 * (n1 * n1 - n1 * n2 + n2 * n2) +
Literal(6.0) * deltaN * deltaN * (n1 * n1 * m2.right + n2 * n2 * m2.left) +
Literal(4.0) * deltaN * (n1 * m3.right - n2 * m3.left)
} else {
Literal(0.0)
}
trimHigherOrder(Seq(newN, newAvg, newM2, newM3, newM4))
}
}
// Compute the population standard deviation of a column
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(x) - Returns the population standard deviation calculated from values of a group.")
// scalastyle:on line.size.limit
case class StddevPop(child: Expression) extends CentralMomentAgg(child) {
override protected def momentOrder = 2
override val evaluateExpression: Expression = {
If(n === Literal(0.0), Literal.create(null, DoubleType),
Sqrt(m2 / n))
}
override def prettyName: String = "stddev_pop"
}
// Compute the sample standard deviation of a column
@ExpressionDescription(
usage = "_FUNC_(x) - Returns the sample standard deviation calculated from values of a group.")
case class StddevSamp(child: Expression) extends CentralMomentAgg(child) {
override protected def momentOrder = 2
override val evaluateExpression: Expression = {
If(n === Literal(0.0), Literal.create(null, DoubleType),
If(n === Literal(1.0), Literal(Double.NaN),
Sqrt(m2 / (n - Literal(1.0)))))
}
override def prettyName: String = "stddev_samp"
}
// Compute the population variance of a column
@ExpressionDescription(
usage = "_FUNC_(x) - Returns the population variance calculated from values of a group.")
case class VariancePop(child: Expression) extends CentralMomentAgg(child) {
override protected def momentOrder = 2
override val evaluateExpression: Expression = {
If(n === Literal(0.0), Literal.create(null, DoubleType),
m2 / n)
}
override def prettyName: String = "var_pop"
}
// Compute the sample variance of a column
@ExpressionDescription(
usage = "_FUNC_(x) - Returns the sample variance calculated from values of a group.")
case class VarianceSamp(child: Expression) extends CentralMomentAgg(child) {
override protected def momentOrder = 2
override val evaluateExpression: Expression = {
If(n === Literal(0.0), Literal.create(null, DoubleType),
If(n === Literal(1.0), Literal(Double.NaN),
m2 / (n - Literal(1.0))))
}
override def prettyName: String = "var_samp"
}
@ExpressionDescription(
usage = "_FUNC_(x) - Returns the Skewness value calculated from values of a group.")
case class Skewness(child: Expression) extends CentralMomentAgg(child) {
override def prettyName: String = "skewness"
override protected def momentOrder = 3
override val evaluateExpression: Expression = {
If(n === Literal(0.0), Literal.create(null, DoubleType),
If(m2 === Literal(0.0), Literal(Double.NaN),
Sqrt(n) * m3 / Sqrt(m2 * m2 * m2)))
}
}
@ExpressionDescription(
usage = "_FUNC_(x) - Returns the Kurtosis value calculated from values of a group.")
case class Kurtosis(child: Expression) extends CentralMomentAgg(child) {
override protected def momentOrder = 4
override val evaluateExpression: Expression = {
If(n === Literal(0.0), Literal.create(null, DoubleType),
If(m2 === Literal(0.0), Literal(Double.NaN),
n * m4 / (m2 * m2) - Literal(3.0)))
}
override def prettyName: String = "kurtosis"
}
| gioenn/xSpark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/CentralMomentAgg.scala | Scala | apache-2.0 | 7,968 |
package dielectric.spark
trait AllInstances
extends DStreamInstances
with RDDInstances
| adelbertc/dielectric | src/main/scala/dielectric/spark/All.scala | Scala | apache-2.0 | 95 |
package scalaSci.math.plot
// object to plot Complex numbers
object ComplexArrayPlots {
import java.awt.Color
import scalaSci.math.plot.plot._
// Apache Commons plotting
def plotApacheCommonsComplexArray(
y: Array[org.apache.commons.math3.complex.Complex], yname: String = "Real " +
"Values", plotType: String = "l",
color: Color = Color.GREEN, lineWidth: Int = 1, plotName: String = "Plotting of " +
"Real Parts"): scalaSci.math.plot.PlotPanel = {
// construct time axis
var N = y.length
var t = new Array[Double](N)
var k = 0
while (k < N) {
t(k) = k;
k += 1
}
// construct array of real values
var yreals = new Array[Double](N)
k = 0
while (k < N) {
yreals(k) = y(k).getReal()
k += 1
}
scalaSci.math.plot.plotTypes.plot(t, yreals, color, plotType, plotName, lineWidth)
}
// ScalaSci complex plotting
def plotScalaSciComplexArray(
y: Array[scalaSci.Complex], yname: String = "Real Values", plotType: String = "l",
color: Color = Color.GREEN, lineWidth: Int = 1, plotName: String = "Plotting of Real " +
"Parts"): scalaSci.math.plot.PlotPanel = {
// construct time axis
var N = y.length
var t = new Array[Double](N)
var k = 0
while (k < N) {
t(k) = k;
k += 1
}
// construct array of real values
var yreals = new Array[Double](N)
k = 0
while (k < N) {
yreals(k) = y(k).getReal()
k += 1
}
scalaSci.math.plot.plotTypes.plot(t, yreals, color, plotType, plotName, lineWidth)
}
}
/*
var x = vrand(1024)
var y = fft(x)
ComplexArrayPlots.plotApacheCommonsComplexArray(y)
*/
| scalalab/scalalab | source/src/main/scala/scalaSci/math/plot/ComplexArrayPlots.scala | Scala | mit | 1,789 |
package com.collective.analytics
import com.collective.analytics.schema.ImpressionLog
import org.apache.spark.sql.hyperloglog.HyperLogLogConfig
class InMemoryAudienceAnalytics(impressions: Vector[ImpressionLog]) extends AudienceAnalytics {
// Unique segments in impression log
private val segments: Set[String] = impressions.flatMap(_.segments.toVector).toSet
def audienceEstimate(ads: Vector[String], sites: Vector[String]): AudienceEstimate = {
val (cnt, i, c) = impressions
.filter(impression => ads.contains(impression.adId) || sites.contains(impression.siteId))
.foldLeft((HyperLogLogConfig.zeroHLL, 0L, 0L)) { case ((counter, imp, clk), log) =>
counter.put(log.cookieId)
(counter, imp + log.impressions, clk + log.clicks)
}
AudienceEstimate(ads, sites, cnt, i, c)
}
def segmentsEstimate(segments: Vector[String]): SegmentsEstimate = {
val (cnt, i, c) = impressions
.filter(impression => segments.intersect(impression.segments).nonEmpty)
.foldLeft((HyperLogLogConfig.zeroHLL, 0L, 0L)) { case ((counter, imp, clk), log) =>
counter.put(log.cookieId)
(counter, imp + log.impressions, clk + log.clicks)
}
SegmentsEstimate(segments, cnt, i, c)
}
def segmentsIntersection(audience: AudienceEstimate): Vector[(String, Long)] = {
segments.toVector.map { segment =>
(segment, intersection(audience, segmentsEstimate(Vector(segment))))
}
}
}
| collectivemedia/spark-hyperloglog | src/main/scala/com/collective/analytics/InMemoryAudienceAnalytics.scala | Scala | mit | 1,451 |
package org.skycastle.client.rendering
import com.jme.math.Vector3f
import com.jme.system.DisplaySystem
/**
*
*
* @author Hans Haggstrom
*/
class CameraWrapper(display: DisplaySystem, width: Int, height: Int) {
private val loc = new Vector3f( 0, 0, 0 )
private val left = new Vector3f( -0.5f, 0, 0.5f )
private val up = new Vector3f( 0, 1, 0 )
private val dir = new Vector3f( -0.5f, 0, -0.5f )
//initialize the camera
private val cam = display.getRenderer().createCamera(width, height)
def camera = cam
cam.setFrustumPerspective(45.0f, width.toFloat / height.toFloat, 1, 1000)
// Move our camera to a correct place and orientation.
cam.setFrame(loc, left, up, dir)
/**Signal that we've changed our camera's location/frustum. */
cam.update()
display.getRenderer().setCamera(cam)
def setPosition(position: Vector3f) : Unit = setPosition(position.x, position.y, position.z)
def setPosition(x: Float, y: Float, z: Float) {
loc.setX(x)
loc.setY(y)
loc.setZ(z)
cam.update()
}
} | weimingtom/skycastle | src/main/scala/org/skycastle/client/rendering/CameraWrapper.scala | Scala | gpl-2.0 | 1,040 |
package org.jetbrains.plugins.scala.lang.completion.lookups
import java.util
import com.intellij.codeInsight.completion.InsertionContext
import com.intellij.codeInsight.lookup.{LookupElementDecorator, LookupElementPresentation}
import gnu.trove.THashSet
import org.jetbrains.plugins.scala.lang.completion.handlers.ScalaInsertHandler
/**
* @author Alefas
* @since 31.03.12
*/
class ScalaChainLookupElement(val prefix: ScalaLookupItem, val element: ScalaLookupItem) extends LookupElementDecorator[ScalaLookupItem](element) {
override def getAllLookupStrings: util.Set[String] = {
val strings: util.Set[String] = getDelegate.getAllLookupStrings
val result: THashSet[String] = new THashSet[String]
result.addAll(strings)
result.add(getLookupString)
result
}
override def getLookupString: String = prefix.getLookupString + "." + element.getLookupString
override def toString: String = getLookupString
override def renderElement(presentation: LookupElementPresentation) {
val prefixPresentation: LookupElementPresentation = new LookupElementPresentation
prefix.renderElement(prefixPresentation)
val old = element.someSmartCompletion
element.someSmartCompletion = false
element.renderElement(presentation)
element.someSmartCompletion = old
presentation.setItemText(prefixPresentation.getItemText + "." + presentation.getItemText)
if (element.someSmartCompletion) {
presentation.setItemText("Some(" + presentation.getItemText + ")")
}
}
override def handleInsert(context: InsertionContext) {
val editor = context.getEditor
val caretModel = editor.getCaretModel
val offsetForPrefix = caretModel.getOffset + (if (element.someSmartCompletion) 5 else 0) - element.getLookupString.length - 1
element.handleInsert(context)
val document = context.getDocument
val status = ScalaInsertHandler.getItemParametersAndAccessorStatus(prefix)
val addParams = status._1 >= 0 && (status._1 > 0 || !status._3)
if (addParams) {
document.insertString(offsetForPrefix, "()")
// val offset = editor.getCaretModel.getOffset
// editor.getCaretModel.moveToOffset(offset + 2)
}
}
}
| whorbowicz/intellij-scala | src/org/jetbrains/plugins/scala/lang/completion/lookups/ScalaChainLookupElement.scala | Scala | apache-2.0 | 2,203 |
import org.scalatest.Spec
import org.scalatest.matchers.ShouldMatchers
import java.io.File
import org.apache.commons.io._
import com.s21g.rubyist._
class HathnameTest extends Spec with ShouldMatchers {
def cleanDir(path:String): Unit = {
FileUtils.deleteDirectory(new File(path))
val dir = new File(path)
dir.mkdirs
}
describe("Hashname") {
describe("should hash directory") {
// [given] file is 'data/uesrs/910.xml'
val file = "data/users/910.xml"
// [when] digest of '910' is 'e205ee2a5de471a70c1fd1b46033a75f'
val hash = Hashname(file)
// [then] path is "data/users/e/20/5ee/e205ee2a5de471a70c1fd1b46033a75f/910.xml"
hash.path should equal("data/users/e/20/5ee/e205ee2a5de471a70c1fd1b46033a75f/910.xml")
}
describe("should write and read physical files") {
cleanDir("tmp")
val file = "tmp/users/910.xml"
val path = Pathname(file)
val hash = Hashname(file)
path.exists should be(false)
hash.exists should be(false)
hash.write("rubyist")
path.exists should be(false)
hash.exists should be(true)
hash.read should equal("rubyist")
}
}
}
| s21g/rubyist | src/test/scala/com/s21g/rubyist/HashnameTest.scala | Scala | mit | 1,179 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.sparkfy.scheduler
import java.util.concurrent.CopyOnWriteArrayList
import com.github.sparkfy.Logging
import com.github.sparkfy.util.Utils
import scala.collection.JavaConverters._
import scala.reflect.ClassTag
import scala.util.control.NonFatal
/**
* An event bus which posts events to its listeners.
*/
trait ListenerBus[L <: AnyRef, E] extends Logging {
// Marked `private[spark]` for access in tests.
private[this] val listeners = new CopyOnWriteArrayList[L]
/**
* Add a listener to listen events. This method is thread-safe and can be called in any thread.
*/
final def addListener(listener: L): Unit = {
listeners.add(listener)
}
/**
* Remove a listener and it won't receive any events. This method is thread-safe and can be called
* in any thread.
*/
final def removeListener(listener: L): Unit = {
listeners.remove(listener)
}
/**
* Post the event to all registered listeners. The `postToAll` caller should guarantee calling
* `postToAll` in the same thread for all events.
*/
final def postToAll(event: E): Unit = {
// JavaConverters can create a JIterableWrapper if we use asScala.
// However, this method will be called frequently. To avoid the wrapper cost, here ewe use
// Java Iterator directly.
val iter = listeners.iterator
while (iter.hasNext) {
val listener = iter.next()
try {
doPostEvent(listener, event)
} catch {
case NonFatal(e) =>
logError(s"Listener ${Utils.getFormattedClassName(listener)} threw an exception", e)
}
}
}
/**
* Post an event to the specified listener. `onPostEvent` is guaranteed to be called in the same
* thread.
*/
protected def doPostEvent(listener: L, event: E): Unit
def findListenersByClass[T <: L : ClassTag](): Seq[T] = {
val c = implicitly[ClassTag[T]].runtimeClass
listeners.asScala.filter(_.getClass == c).map(_.asInstanceOf[T]).toSeq
}
}
| sparkfy/sparkfy | sparkfy-common/src/main/scala/com/github/sparkfy/scheduler/ListenerBus.scala | Scala | apache-2.0 | 2,777 |
package test
class ComponentRegistry extends ComponentBastiImpl with ComponentMukiImpl
trait Basti
trait ComponentBasti { self: ComponentMuki =>
val basti: Basti
}
trait ComponentBastiImpl { self: ComponentMuki with FloComponent =>
val basti = new Basti {
}
}
trait Muki
trait ComponentMuki {
val muki: Muki
}
trait ComponentMukiImpl {
val muki = new Muki {
}
}
trait Flo
trait ComponentFlo {
val flo: Flo
}
trait ComponentFloImpl {
val flo = new Flo {
}
}
| lunaryorn/sbt-poop | test-project/src/main/scala/test/CakePattern.scala | Scala | mit | 483 |
/*
* Copyright 2013 The SIRIS Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* The SIRIS Project is a cooperation between Beuth University, Berlin and the
* HCI Group at the University of Würzburg. The project is funded by the German
* Federal Ministry of Education and Research (grant no. 17N4409).
*/
package simx.components.ai.feature.collection
import simx.core.entity.description.{Providing, SValNotFound, SVal}
import scala.collection.mutable
import simx.core.entity.typeconversion.{TypeInfo, ConvertibleTrait}
/**
* Created by IntelliJ IDEA.
* User: martin
* Date: 9/5/13
* Time: 1:02 PM
*/
class BufferedSValSet(
accessErrorMsg: (TypeInfo[_, _]) => String =
(tpe: TypeInfo[_, _]) => "[error][BufferedSValSet] Requested type '" + tpe + "' not in set."
) {
private[feature] var _rec = false
private val _data = mutable.HashMap[TypeInfo[_, _], RingBuffer[_]]()
private[feature] def add[T](buffer : RingBuffer[T])(implicit description: TypeInfo[T, T]) {
_data.update(description, buffer)
}
override def toString: String =
"BufferedSValSet\\n\\t" + _data.mkString("\\n\\t")
def apply[T](description: TypeInfo[T, T]): RingBuffer[T] =
_data.get(description).getOrElse(throw new Exception(accessErrorMsg(description))).asInstanceOf[RingBuffer[T]]
def of[T](description: TypeInfo[T, T]): RingBuffer[T] = apply(description)
def record = _rec
} | simulator-x/feature | src/simx/components/ai/feature/collection/BufferedSValSet.scala | Scala | apache-2.0 | 1,942 |
package techex.cases
import java.util.concurrent.Executors
import com.typesafe.config.Config
import doobie.util.process
import doobie.util.transactor.Transactor
import org.http4s.Header
import org.http4s.server._
import org.joda.time.Instant
import techex._
import streams._
import techex.data._
import techex.domain._
import techex.web.test
import scalaz._, Scalaz._
import scalaz.concurrent.Task
import scalaz.stream._
object startup {
val streamRunner =
namedSingleThreadExecutor("Streamsetup")
val messagehandlerPipe =
process1.lift(trackPlayer.calcActivity orElse updateSchedule.handleScheduling orElse playerSignup.toFact orElse noOp) pipe
appendAccumP1(locateOnSessionTimeBoundaries.handleTimeBoundsFacts) pipe
appendAccumP1(calculatAchievements.calcAchievementsAndAwardBadges)
def setupStream(txor: Transactor[Task]): Task[Unit] = {
val handleTicks =
produceTicks.days to eventstreams.events.publish
val handleTenSecs =
produceTicks.tenSecs to eventstreams.events.publish
val inputHandlerQueue =
async.unboundedQueue[InputMessage]
val storeToDatabaseQueue =
async.unboundedQueue[InputMessage]
val enqueueToInputHandlerProcess =
eventstreams.events.subscribe to inputHandlerQueue.enqueue
val enqeueToDatabaseQueueProcess =
eventstreams.events.subscribe to storeToDatabaseQueue.enqueue
val handleStoreToDatabase =
storeToDatabaseQueue.dequeue to
process.sink[Task, InputMessage]((input: InputMessage) => {
txor.transact(InputMessageDAO.storeObservation(input))
})
val handleInputStream =
inputHandlerQueue.dequeue pipe
messagehandlerPipe through
Storage.updates[List[Fact]] pipe
process1.unchunk[Fact] to eventstreams.factUdpates.publish
Task {
handleTicks.run.runAsync(_.toString)
handleTenSecs.run.runAsync(_.toString)
notifyGCM.setup(eventstreams.factUdpates.subscribe).runAsync(_.toString)
notifySlack.setup(eventstreams.factUdpates.subscribe).runAsync(_.toString)
printFactsToLog.setup(eventstreams.factUdpates.subscribe).runAsync(_.toString)
notifyAPNS.setup(eventstreams.factUdpates.subscribe).runAsync(_.toString)
enqueueToInputHandlerProcess.handle(streams.printAndReset(enqueueToInputHandlerProcess)).run.runAsync(_.toString)
handleInputStream.handle(streams.printAndReset(handleInputStream)).run.runAsync(_.toString)
enqeueToDatabaseQueueProcess.handle(streams.printAndReset(enqeueToDatabaseQueueProcess)).run.runAsync(_.toString)
handleStoreToDatabase.handle(streams.printAndReset(handleStoreToDatabase)).run.runAsync(_.toString)
println("Streams set up")
}
}
def noOp: PartialFunction[InputMessage, State[Storage, List[Fact]]] = {
case m: Fact => State.state(List(m))
case _ => State.state(List())
}
def setupSchedule: Task[Unit] = {
val commands =
scheduling.scheduleEntries.map(entry => AddEntry(entry,Instant.now())).toSeq
val t: Process[Task, Command] =
Process.emitAll(commands)
val tt =
t to eventstreams.events.publish
tt.run
}
def setup(cfg: Config): Task[(HttpService, WSHandler)] = {
val dbConfig =
if (getStringOr(cfg, "db_type", "mem") == "mysql")
db.mysqlConfig(cfg.getString("db_username"), cfg.getString("db_password"))
else
db.inMemConfig
val task = for {
_ <- slack.sendMessage("Starting up server for venue "+ getStringOr(cfg,"venue","technoport") +" with database " + getStringOr(cfg, "db_type", "mem"), Attention)
ds <- db.ds(dbConfig)
_ <- ds.transact(InputMessageDAO.createObservationtable)
_ <- setupStream(ds)
_ <- setupSchedule
} yield (HttpService(
(playerSignup.restApi(getStringOr(cfg,"venue","technoport"),eventstreams.events) orElse
test.testApi orElse
listPersonalAchievements.restApi orElse
listPersonalQuests.restApi orElse
listTotalProgress.restApi orElse
listTotalAchievements.restApi orElse
trackPlayer.restApi(eventstreams.events) orElse
unregisterPlayer.restApi orElse
startSession.restApi(eventstreams.events) orElse
endSession.restApi(eventstreams.events) orElse
listSchedule.restApi orElse
serveHelptext.restApi orElse
getBeaconRegions.restApi orElse
getAreas.restApi).andThen(resp => resp.map(r => r.withHeaders(Header("Access-Control-Allow-Origin","*"))))
), getUpdateStream.wsApi(eventstreams.factUdpates))
task
}
}
| kantega/tech-ex-2015 | backend/src/main/scala/techex/cases/startup.scala | Scala | mit | 4,584 |
/**
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.crossdata.driver
import com.stratio.crossdata.common.SQLCommand
import com.stratio.crossdata.driver.metadata.FieldMetadata
import org.apache.spark.sql.types._
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class FlattenedTablesIT extends MongoWithSharedContext {
"The Driver" should " List table's description with nested and array fields flattened" in {
assumeCrossdataUpAndRunning
val flattenedDriver = Driver(true)
//Experimentation
val result:Seq[FieldMetadata] = flattenedDriver.describeTable(Some(Database), Collection)
//Expectations
result should contain (new FieldMetadata("address.zip", IntegerType))
result should contain (new FieldMetadata("account.details.bank", StringType))
result should contain (new FieldMetadata("account.details.bank", StringType))
result should contain (new FieldMetadata("grades.FP", DoubleType))
}
it should " List table's description with nested fields Not flattened" in {
assumeCrossdataUpAndRunning
val driver = Driver()
//Experimentation
val result:Seq[FieldMetadata] = driver.describeTable(Some(Database), Collection)
//Expectations
val addressType = StructType(Seq(StructField("street", StringType), StructField("city", StringType), StructField("zip", IntegerType)))
val detailAccount = StructType(Seq(StructField("bank", StringType), StructField("office", IntegerType)))
val accountType = StructType(Seq(StructField("number", IntegerType), StructField("details", detailAccount)))
result should contain (new FieldMetadata("address", addressType))
result should contain (new FieldMetadata("account", accountType))
}
it should " Query with Flattened Fields" in {
assumeCrossdataUpAndRunning
val flattenedDriver = Driver(true)
//Experimentation
val result= flattenedDriver.syncQuery(SQLCommand(s"SELECT address.street from $Database.$Collection")).resultSet
//Expectations
result.head.toSeq(0).toString should fullyMatch regex "[0-9]+th Avenue"
}
it should " Query with Flattened Fields On Filters" in {
assumeCrossdataUpAndRunning
val flattenedDriver = Driver(true)
//Experimentation
val result= flattenedDriver.syncQuery(SQLCommand(s"SELECT description FROM $Database.$Collection WHERE address.street = '5th Avenue'")).resultSet
//Expectations
result.head.toSeq(0).toString should be equals "description5"
}
}
| luismcl/crossdata | driver/src/test/scala/com/stratio/crossdata/driver/FlattenedTablesIT.scala | Scala | apache-2.0 | 3,112 |
import java.sql.PreparedStatement
import play.twirl.api.Html
import util.ControlUtil._
import scala.collection.mutable.ListBuffer
package object plugin {
case class Redirect(path: String)
case class Fragment(html: Html)
case class RawData(contentType: String, content: Array[Byte])
object db {
// TODO labelled place holder support
def select(sql: String, params: Any*): Seq[Map[String, String]] = {
defining(PluginConnectionHolder.threadLocal.get){ conn =>
using(conn.prepareStatement(sql)){ stmt =>
setParams(stmt, params: _*)
using(stmt.executeQuery()){ rs =>
val list = new ListBuffer[Map[String, String]]()
while(rs.next){
defining(rs.getMetaData){ meta =>
val map = Range(1, meta.getColumnCount + 1).map { i =>
val name = meta.getColumnName(i)
(name, rs.getString(name))
}.toMap
list += map
}
}
list
}
}
}
}
// TODO labelled place holder support
def update(sql: String, params: Any*): Int = {
defining(PluginConnectionHolder.threadLocal.get){ conn =>
using(conn.prepareStatement(sql)){ stmt =>
setParams(stmt, params: _*)
stmt.executeUpdate()
}
}
}
private def setParams(stmt: PreparedStatement, params: Any*): Unit = {
params.zipWithIndex.foreach { case (p, i) =>
p match {
case x: String => stmt.setString(i + 1, x)
case x: Int => stmt.setInt(i + 1, x)
case x: Boolean => stmt.setBoolean(i + 1, x)
}
}
}
}
}
| tily/gitbucket2 | src/main/scala/plugin/package.scala | Scala | apache-2.0 | 1,693 |
package de.unihamburg.vsis.sddf.indexing.blocking
import org.apache.spark.rdd.RDD
import org.apache.spark.rdd.RDD.rddToPairRDDFunctions
import de.unihamburg.vsis.sddf.Parameterized
import de.unihamburg.vsis.sddf.indexing.blocking.keygeneration.BlockingKeyBuilder
import de.unihamburg.vsis.sddf.logging.Logging
import de.unihamburg.vsis.sddf.pipe.context.AbstractPipeContext
import de.unihamburg.vsis.sddf.reading.Tuple
import de.unihamburg.vsis.sddf.visualisation.model.AlgoAnalysable
/**
* Creates a SuffixArrayBlocker using the bkvBuilder function to create the
* BlockingKeyValue (BKV) for each Tuple.
*/
class PipeBlockerSuffixArray(
minimumSuffixLength: Int = 6,
maximumBlockSize: Int = 12)(
implicit bkvBuilder: BlockingKeyBuilder)
extends BlockingPipe
with Parameterized
with Logging {
@transient override val _analysable = new AlgoAnalysable
_analysable.algo = this
_analysable.name = this.name
override val name = "SuffixArrayBlocker"
override val paramMap = Map[String, Any]("minimumSuffixLength" -> minimumSuffixLength,
"maximumBlockSize" -> maximumBlockSize, "BlockingKeyBuilder" -> bkvBuilder)
/**
* Calculate blocks of the given list of tuples.
*/
def step(input: RDD[Tuple])(implicit pipeContext: AbstractPipeContext): RDD[Seq[Tuple]] = {
val bkvTuplePairs: RDD[(String, Tuple)] = input.map(t => (bkvBuilder.buildBlockingKey(t), t))
val suffixTuplePairs: RDD[(String, Tuple)] = bkvTuplePairs.flatMap(pair => calcSuffixes(pair))
val suffixBlocks: RDD[(String, Seq[Tuple])] = suffixTuplePairs.groupByKey().map(
pair => (pair._1, pair._2.toSeq)
)
val filteredSuffixBlocks: RDD[(String, Seq[Tuple])] = suffixBlocks.filter(this.filterBlocks)
log.debug("bkvTuplePairs count " + bkvTuplePairs.count)
log.debug("SuffixTuplePairs count " + suffixTuplePairs.count)
log.debug("SuffixBlocks count " + suffixBlocks.count)
log.debug("filteredSuffixBlocks count " + filteredSuffixBlocks.count)
filteredSuffixBlocks.map(_._2)
}
/**
* Calculates the suffixes of a given BKV.
* Longest suffix is the BKV it self.
* Shortest suffix is the one with the size == minimumSuffixLength.
*/
def calcSuffixes(bkvTuplePair: (String, Tuple)): Seq[(String, Tuple)] = {
var result = scala.collection.mutable.MutableList[(String, Tuple)]()
val bkv = bkvTuplePair._1
val tuple = bkvTuplePair._2
var i = 0
var maxOffset = bkv.length() - minimumSuffixLength
for (i <- 0 to maxOffset) {
val suffix: String = bkv.substring(i)
(suffix, tuple) +=: result
}
result
}
/**
* Filter out all blocks with block size < 2 or block size > maximumBlockSize
*/
def filterBlocks(suffixTuplePair: (String, Seq[Tuple])): Boolean = {
val tupleCount = suffixTuplePair._2.length
if (tupleCount > maximumBlockSize) {
false
} else if (tupleCount < 2) {
false
} else {
true
}
}
}
object PipeBlockerSuffixArray {
def apply(minimumSuffixLength: Int = 6, maximumBlockSize: Int = 12)(
implicit bkvBuilder: BlockingKeyBuilder) = {
new PipeBlockerSuffixArray(minimumSuffixLength, maximumBlockSize)
}
}
| numbnut/sddf | src/main/scala/de/unihamburg/vsis/sddf/indexing/blocking/PipeBlockerSuffixArray.scala | Scala | gpl-3.0 | 3,183 |
/***********************************************************************
* Crown Copyright (c) 2017 Dstl
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.tools.authentication
import java.io.File
import com.typesafe.config.{ConfigFactory, ConfigRenderOptions}
import org.junit.runner.RunWith
import org.locationtech.geomesa.accumulo.tools.{AccumuloDataStoreCommand, AccumuloRunner}
import org.locationtech.geomesa.utils.geotools.Conversions._
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class AuthenticationTest extends Specification {
sequential
"GeoMesa Accumulo Commands" should {
// These tests all invoke an ingest command using a mock instance
// The authentication parameters aren't actually used by the mock instance, but these tests serve to test parameter
// validation in AccumuloRunner and associated classes.
val conf = ConfigFactory.load("examples/example1-csv.conf")
val sft = conf.root().render(ConfigRenderOptions.concise())
val converter = conf.root().render(ConfigRenderOptions.concise())
val dataFile = new File(this.getClass.getClassLoader.getResource("examples/example1.csv").getFile)
// Only test ingest, other commands use the same Accumulo Parameters anyway
val cmd = Array("ingest")
"fail without user" >> {
val authArgs = Array("")
val args = cmd ++ authArgs ++ Array("--instance", "instance", "--zookeepers", "zoo", "--mock", "--catalog", "z", "--converter", converter, "-s", sft, dataFile.getPath)
val command = AccumuloRunner.parseCommand(args).asInstanceOf[AccumuloDataStoreCommand] must throwA[com.beust.jcommander.ParameterException]
ok
}
// With just user parameter, the user is prompted to enter a password at the command line which we can't test here
"work with user and password" >> {
val authArgs = Array("--user", "root", "--password", "secret")
val args = cmd ++ authArgs ++ Array("--instance", "instance", "--zookeepers", "zoo", "--mock", "--catalog", "userandpassword", "--converter", converter, "-s", sft, dataFile.getPath)
val command = AccumuloRunner.parseCommand(args).asInstanceOf[AccumuloDataStoreCommand]
command.execute()
val features = command.withDataStore(_.getFeatureSource("renegades").getFeatures.features().toList)
features.size mustEqual 3
features.map(_.get[String]("name")) must containTheSameElementsAs(Seq("Hermione", "Harry", "Severus"))
}
"work with user and keytab" >> {
val authArgs = Array("--user", "root", "--keytab", "/path/to/some/file")
val args = cmd ++ authArgs ++ Array("--instance", "instance", "--zookeepers", "zoo", "--mock", "--catalog", "userandkeytab", "--converter", converter, "-s", sft, dataFile.getPath)
val command = AccumuloRunner.parseCommand(args).asInstanceOf[AccumuloDataStoreCommand]
command.execute()
val features = command.withDataStore(_.getFeatureSource("renegades").getFeatures.features().toList)
features.size mustEqual 3
features.map(_.get[String]("name")) must containTheSameElementsAs(Seq("Hermione", "Harry", "Severus"))
}
"fail with user and password and keytab" >> {
val authArgs = Array("--instance", "instance", "--zookeepers", "zoo", "--user", "user", "--password", "secret", "--keytab", "/path/to/some/file")
val args = cmd ++ authArgs ++ Array("--mock", "--catalog", "z", "--converter", converter, "-s", sft, dataFile.getPath)
val command = AccumuloRunner.parseCommand(args).asInstanceOf[AccumuloDataStoreCommand] must throwA[com.beust.jcommander.ParameterException]
ok
}
}
}
| tkunicki/geomesa | geomesa-accumulo/geomesa-accumulo-tools/src/test/scala/org/locationtech/geomesa/accumulo/tools/authentication/AuthenticationTest.scala | Scala | apache-2.0 | 3,999 |
//
// Random.scala -- Orc methods Random and URandom
// Project OrcScala
//
// Copyright (c) 2018 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.lib.util
import java.math.BigInteger
import java.util.concurrent.ThreadLocalRandom
import orc.{ DirectInvoker, OrcRuntime }
import orc.values.sites.{ DirectSite, FunctionalSite, IllegalArgumentInvoker, LocalSingletonSite, Range, SiteMetadata }
object Random extends DirectSite with SiteMetadata with FunctionalSite with Serializable with LocalSingletonSite {
class ArgInvoker extends DirectInvoker {
def canInvoke(target: AnyRef, arguments: Array[AnyRef]): Boolean = {
target == Random && arguments.length == 1 && arguments(0).isInstanceOf[Number]
}
def invokeDirect(target: AnyRef, arguments: Array[AnyRef]): AnyRef = {
orc.run.StopWatches.implementation {
val n = arguments(0).asInstanceOf[Number]
if (n.longValue() <= Int.MaxValue)
ThreadLocalRandom.current().nextInt(n.intValue()).asInstanceOf[AnyRef]
else
throw new IllegalArgumentException(s"$Random($n): bound much be less than 2**31.")
}
}
}
class NoArgInvoker extends DirectInvoker {
def canInvoke(target: AnyRef, arguments: Array[AnyRef]): Boolean = {
target == Random && arguments.length == 0
}
def invokeDirect(target: AnyRef, arguments: Array[AnyRef]): AnyRef = {
orc.run.StopWatches.implementation {
ThreadLocalRandom.current().nextInt().asInstanceOf[AnyRef]
}
}
}
def getInvoker(runtime: OrcRuntime, args: Array[AnyRef]) = {
args.length match {
case 0 => new NoArgInvoker()
case 1 =>
args(0) match {
case _: BigInt | _: BigInteger | _: Integer | _: java.lang.Long =>
new ArgInvoker()
case _ =>
IllegalArgumentInvoker(this, args)
}
case _ =>
IllegalArgumentInvoker(this, args)
}
}
override def publications: Range = Range(0, 1)
}
object URandom extends DirectSite with SiteMetadata with FunctionalSite with Serializable with LocalSingletonSite {
class NoArgInvoker extends DirectInvoker {
def canInvoke(target: AnyRef, arguments: Array[AnyRef]): Boolean = {
target == URandom && arguments.length == 0
}
def invokeDirect(target: AnyRef, arguments: Array[AnyRef]): AnyRef = {
orc.run.StopWatches.implementation {
ThreadLocalRandom.current().nextDouble().asInstanceOf[AnyRef]
}
}
}
def getInvoker(runtime: OrcRuntime, args: Array[AnyRef]) = {
args.length match {
case 0 => new NoArgInvoker()
case _ =>
IllegalArgumentInvoker(this, args)
}
}
override def publications: Range = Range(0, 1)
}
| orc-lang/orc | OrcScala/src/orc/lib/util/Random.scala | Scala | bsd-3-clause | 2,952 |
package com.twitter.finagle.tracing.opencensus
import com.twitter.finagle.context.Contexts
import com.twitter.finagle._
import com.twitter.util.{Future, Try}
import io.opencensus.trace.{SpanContext, Tracing}
import io.opencensus.trace.propagation.TextFormat
/**
* Syntax enhancements to Finagle servers to attach OpenCensus tracing
* headers from requests.
*
* HTTP and ThriftMux protocols are supported.
*
* Clients should also participate by using [[StackClientOps]].
*
* @see [[StackClientOps]]
*
* @example
* Scala:
* {{{
* import com.twitter.finagle.Http
* import com.twitter.finagle.tracing.opencensus.StackServerOps._
*
* val serverWithOpenCensusTracing = Http.server.withOpenCensusTracing
* }}}
*
* Java users can explicitly use a [[StackServerOps]] class:
* {{{
* import com.twitter.finagle.Http;
* import com.twitter.finagle.tracing.opencensus.StackServerOps.HttpOpenCensusTracing;
*
* Http.Server serverWithOpenCensusTracing =
* new HttpOpenCensusTracing(Http.server()).withOpenCensusTracing();
* }}}
*/
object StackServerOps {
implicit final class ThriftMuxOpenCensusTracing(private val server: ThriftMux.Server)
extends AnyVal {
def withOpenCensusTracing: ThriftMux.Server = {
server.withStack(_.prepend(ServerTraceContextFilter.module))
}
}
implicit final class HttpOpenCensusTracing(private val server: Http.Server) extends AnyVal {
def withOpenCensusTracing: Http.Server =
withOpenCensusTracing(Tracing.getPropagationComponent.getB3Format)
def withOpenCensusTracing(textFormat: TextFormat): Http.Server = {
server.withStack { stack =>
stack
.prepend(ServerTraceContextFilter.module)
.prepend(httpDeserModule(textFormat)) // attach to broadcast ctx before setting OC
}
}
}
private def httpDeserFilter(textFormat: TextFormat): SimpleFilter[http.Request, http.Response] =
new SimpleFilter[http.Request, http.Response] {
private[this] val getter = new TextFormat.Getter[http.Request] {
def get(carrier: http.Request, key: String): String =
carrier.headerMap.getOrNull(key)
}
def apply(
request: http.Request,
service: Service[http.Request, http.Response]
): Future[http.Response] = {
val spanContext = Try(textFormat.extract(request, getter)).getOrElse(SpanContext.INVALID)
if (spanContext != SpanContext.INVALID) {
Contexts.broadcast.let(TraceContextFilter.SpanContextKey, spanContext) {
service(request)
}
} else {
service(request)
}
}
}
/** exposed for testing */
private[opencensus] val HttpDeserializationStackRole: Stack.Role =
Stack.Role("OpenCensusHeaderDeserialization")
private def httpDeserModule(
textFormat: TextFormat
): Stackable[ServiceFactory[http.Request, http.Response]] =
new Stack.Module0[ServiceFactory[http.Request, http.Response]] {
def make(
next: ServiceFactory[http.Request, http.Response]
): ServiceFactory[http.Request, http.Response] =
httpDeserFilter(textFormat).andThen(next)
def role: Stack.Role = HttpDeserializationStackRole
def description: String = "Attaches OpenCensus HTTP Headers to the broadcast context"
}
}
| twitter/finagle | finagle-opencensus-tracing/src/main/scala/com/twitter/finagle/tracing/opencensus/StackServerOps.scala | Scala | apache-2.0 | 3,301 |
package com.twitter.finatra.json.internal.caseclass.validation.validators
import com.twitter.finatra.json.internal.caseclass.validation.validators.PastTimeValidator._
import com.twitter.finatra.validation.{ErrorCode, PastTime, ValidationMessageResolver, ValidationResult, Validator}
import org.joda.time.DateTime
object PastTimeValidator {
def errorMessage(
resolver: ValidationMessageResolver,
value: DateTime) = {
resolver.resolve(classOf[PastTime], value)
}
}
/**
* Validates if a datetime is in the past.
*/
class PastTimeValidator(
validationMessageResolver: ValidationMessageResolver,
annotation: PastTime)
extends Validator[PastTime, DateTime](
validationMessageResolver,
annotation) {
override def isValid(value: DateTime) = {
ValidationResult(
value.isBeforeNow,
errorMessage(validationMessageResolver, value),
ErrorCode.TimeNotPast(value))
}
}
| nkhuyu/finatra | jackson/src/main/scala/com/twitter/finatra/json/internal/caseclass/validation/validators/PastTimeValidator.scala | Scala | apache-2.0 | 919 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.online
import org.apache.spark._
import org.apache.spark.rdd.{BlockRDD, RDD, ZippedPartitionsBaseRDD, ZippedPartitionsPartition}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.storage.{BlockId, OLABlockId, StorageLevel}
import scala.collection.mutable.ArrayBuffer
import scala.language.implicitConversions
import scala.reflect.ClassTag
object IolapUtils {
/**
* Check if the application disabled caching in iolap,
* in which case return DISK_ONLY instead of the specified one.
*/
def checkStorageLevel(level: StorageLevel): StorageLevel = {
if (SparkContext.getOrCreate().getConf.getBoolean("spark.approx.iolapCacheEnabled", true)) {
level
} else {
StorageLevel.DISK_ONLY
}
}
}
class CachedIterator(val iterator: Iterator[Row]) extends Iterator[Row] {
protected[this] val cache = new ArrayBuffer[Row]()
override def hasNext: Boolean = iterator.hasNext
override def next(): Row = {
val row = iterator.next()
cache += row.copy()
row
}
def cache(blockId: BlockId): Unit = {
SparkEnv.get.blockManager.putArray(blockId, cache.toArray,
IolapUtils.checkStorageLevel(StorageLevel.MEMORY_AND_DISK))
}
}
trait AutoSave {
self: CachedIterator =>
val blockId: OLABlockId
override def hasNext: Boolean = {
val open = iterator.hasNext
if (!open) cache(blockId)
open
}
}
trait SelectiveCache {
self: CachedIterator =>
val predicate: (Row => Boolean)
override def next(): Row = {
val row = iterator.next()
if (predicate(row)) {
cache += row.copy()
}
row
}
}
class AutoCachedIterator(val blockId: OLABlockId, override val iterator: Iterator[Row])
extends CachedIterator(iterator) with AutoSave
class SelectiveCachedIterator(val predicate: (Row => Boolean), override val iterator: Iterator[Row])
extends CachedIterator(iterator) with SelectiveCache
class SelectiveAutoCachedIterator(
val predicate: (Row => Boolean),
val blockId: OLABlockId,
override val iterator: Iterator[Row])
extends CachedIterator(iterator) with AutoSave with SelectiveCache
class ZippedPartitionsWithIndexRDD2[U: ClassTag, V: ClassTag, T: ClassTag](
sc: SparkContext,
var f: (Int, Iterator[U], Iterator[V]) => Iterator[T],
var rdd1: RDD[U],
var rdd2: RDD[V])
extends ZippedPartitionsBaseRDD[T](sc, Seq(rdd1, rdd2)) {
override def compute(partition: Partition, context: TaskContext): Iterator[T] = {
val partitions = partition.asInstanceOf[ZippedPartitionsPartition].partitions
f(partition.index, rdd1.iterator(partitions(0), context), rdd2.iterator(partitions(1), context))
}
override def clearDependencies() {
super.clearDependencies()
rdd1 = null
rdd2 = null
f = null
}
}
object ComposeRDDFunctions {
implicit def rddToComposeRDDFunctions[T: ClassTag](rdd: RDD[(T)]): ComposeRDDFunctions[T] =
new ComposeRDDFunctions(rdd)
}
class ComposeRDDFunctions[U](self: RDD[U])(implicit ut: ClassTag[U]) {
def zipPartitionsWithIndex[V: ClassTag, T: ClassTag]
(other: RDD[V])
(f: (Int, Iterator[U], Iterator[V]) => Iterator[T]): RDD[T] = {
val sc = self.context
new ZippedPartitionsWithIndexRDD2(sc, sc.clean(f), self, other)
}
}
class EmptyPartition(val index: Int) extends Partition
class EmptyIteratorRDD[T: ClassTag](sc: SparkContext, numParts: Int) extends RDD[T](sc, Nil) {
override protected def getPartitions: Array[Partition] =
Array.tabulate(numParts)(i => new EmptyPartition(i))
override def compute(split: Partition, context: TaskContext): Iterator[T] = Iterator()
}
class PartShuffledDependency[T](rdd: RDD[T], @transient mapping: Array[Int])
extends NarrowDependency[T](rdd) {
override def getParents(partitionId: Int): Seq[Int] = List(mapping(partitionId))
}
class PartShuffledPartition(val index: Int, val parentSplit: Partition) extends Partition
class PartShuffledRDD[T: ClassTag](sc: SparkContext, @transient rdd: RDD[T], mapping: Array[Int])
extends RDD[T](sc, new PartShuffledDependency(rdd, mapping) :: Nil) {
override protected def getPartitions: Array[Partition] =
Array.tabulate(rdd.partitions.length)(i => new PartShuffledPartition(i, rdd.partitions(i)))
override def compute(split: Partition, context: TaskContext): Iterator[T] =
firstParent[T].iterator(split.asInstanceOf[PartShuffledPartition].parentSplit, context)
}
object OLABlockRDD {
def create[T: ClassTag](
sc: SparkContext,
opId: Int,
batchSizes: Array[(Int, Int)],
reference: RDD[_]): RDD[T] = {
val blockIds = batchSizes.flatMap { case (size, batchId) =>
(0 until size).map(OLABlockId(opId, batchId, _).asInstanceOf[BlockId])
}
val numParts = reference.partitions.length
val rdd = new BlockRDD[T](sc, blockIds)
val padLength = numParts - rdd.partitions.length
val coalesced =
if (padLength > 0) {
rdd.union(new EmptyIteratorRDD(sc, padLength))
} else {
rdd
}.coalesce(numParts)
// Since numParts is usually small, we use a simple O(n^2) algorithm here
val locations = reference.partitions.map(reference.preferredLocations)
val mapping: Array[Int] = new Array[Int](numParts)
var i = 0
while (i < numParts) {
val preferred = coalesced.preferredLocations(coalesced.partitions(i))
var j = 0
while (j < numParts && (locations(j) == null || locations(j).intersect(preferred).isEmpty)) {
j += 1
}
if (j < numParts) {
locations(j) = null
mapping(i) = j
} else {
mapping(i) = -1
}
i += 1
}
i = 0
while (i < numParts) {
if (mapping(i) == -1) {
var j = 0
while (j < numParts && locations(j) == null) {
j += 1
}
locations(j) = null
mapping(i) = j
}
i += 1
}
new PartShuffledRDD(sc, coalesced, mapping)
}
def create[T: ClassTag](
sc: SparkContext,
opId: Int,
batchSizes: Array[(Int, Int)],
numPartitions: Int): RDD[T] = {
if (batchSizes.length == 0) {
new EmptyIteratorRDD[T](sc, numPartitions)
} else {
val rdds = batchSizes.map { case (size, batchId) =>
val blockIds = Array.tabulate(size)(OLABlockId(opId, batchId, _).asInstanceOf[BlockId])
new BlockRDD[T](sc, blockIds)
}
new ZippedPartitionsBaseRDD[T](sc, rdds) {
override def compute(split: Partition, context: TaskContext): Iterator[T] = {
val partitions = split.asInstanceOf[ZippedPartitionsPartition].partitions
rdds.zip(partitions).toIterator
.flatMap { case (rdd, part) => rdd.iterator(part, context) }.asInstanceOf[Iterator[T]]
}
}
}
}
}
| andrewor14/iolap | sql/hive/src/main/scala/org/apache/spark/sql/hive/online/utils.scala | Scala | apache-2.0 | 7,563 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.database.test.behavior
import java.time.Instant
import common.{StreamLogging, WskActorSystem}
import org.apache.openwhisk.common.TransactionId
import org.apache.openwhisk.core.database.{ActivationStore, CacheChangeNotification, UserContext}
import org.apache.openwhisk.core.database.test.behavior.ArtifactStoreTestUtil.storeAvailable
import org.apache.openwhisk.core.entity._
import org.scalatest.concurrent.{IntegrationPatience, ScalaFutures}
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers, Outcome}
import scala.collection.mutable.ListBuffer
import scala.concurrent.Await
import scala.concurrent.duration.Duration
import scala.concurrent.duration.DurationInt
import scala.language.postfixOps
import scala.util.{Random, Try}
trait ActivationStoreBehaviorBase
extends FlatSpec
with ScalaFutures
with Matchers
with StreamLogging
with WskActorSystem
with IntegrationPatience
with BeforeAndAfterEach {
protected implicit val notifier: Option[CacheChangeNotification] = None
def context: UserContext
def activationStore: ActivationStore
private val docsToDelete = ListBuffer[(UserContext, ActivationId)]()
def storeType: String
protected def transId() = TransactionId(Random.alphanumeric.take(32).mkString)
override def afterEach(): Unit = {
cleanup()
stream.reset()
}
override protected def withFixture(test: NoArgTest): Outcome = {
assume(storeAvailable(storeAvailableCheck), s"$storeType not configured or available")
val outcome = super.withFixture(test)
if (outcome.isFailed) {
println(logLines.mkString("\\n"))
}
outcome
}
protected def storeAvailableCheck: Try[Any] = Try(true)
//~----------------------------------------< utility methods >
protected def store(activation: WhiskActivation, context: UserContext)(
implicit transid: TransactionId,
notifier: Option[CacheChangeNotification]): DocInfo = {
val doc = activationStore.store(activation, context).futureValue
docsToDelete.append((context, ActivationId(activation.docid.asString)))
doc
}
protected def newActivation(ns: String, actionName: String, start: Long): WhiskActivation = {
WhiskActivation(
EntityPath(ns),
EntityName(actionName),
Subject(),
ActivationId.generate(),
Instant.ofEpochMilli(start),
Instant.ofEpochMilli(start + 1000))
}
protected def newBindingActivation(ns: String, actionName: String, binding: String, start: Long): WhiskActivation = {
WhiskActivation(
EntityPath(ns),
EntityName(actionName),
Subject(),
ActivationId.generate(),
Instant.ofEpochMilli(start),
Instant.ofEpochMilli(start + 1000),
annotations = Parameters(WhiskActivation.bindingAnnotation, binding))
}
/**
* Deletes all documents added to gc queue.
*/
def cleanup()(implicit timeout: Duration = 10 seconds): Unit = {
implicit val tid: TransactionId = transId()
docsToDelete.map { e =>
Try {
Await.result(activationStore.delete(e._2, e._1), timeout)
}
}
docsToDelete.clear()
}
}
| style95/openwhisk | tests/src/test/scala/org/apache/openwhisk/core/database/test/behavior/ActivationStoreBehaviorBase.scala | Scala | apache-2.0 | 3,944 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*package org.scalatest.fixture
import org.scalatest._
import SharedHelpers.EventRecordingReporter
import scala.concurrent.{Promise, ExecutionContext, Future}
import org.scalatest.concurrent.SleepHelper
import scala.util.Success
class AsyncPropSpecSpec extends org.scalatest.FunSpec {
describe("AsyncPropSpec") {
it("can be used for tests that return Future under parallel async test execution") {
class ExampleSpec extends AsyncPropSpec with ParallelTestExecution {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
val a = 1
property("test 1") { fixture =>
Future {
assert(a == 1)
}
}
property("test 2") { fixture =>
Future {
assert(a == 2)
}
}
property("test 3") { fixture =>
Future {
pending
}
}
property("test 4") { fixture =>
Future {
cancel
}
}
ignore("test 5") { fixture =>
Future {
cancel
}
}
override def newInstance = new ExampleSpec
}
val rep = new EventRecordingReporter
val spec = new ExampleSpec
val status = spec.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 4)
assert(rep.testSucceededEventsReceived.length == 1)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testFailedEventsReceived.length == 1)
assert(rep.testFailedEventsReceived(0).testName == "test 2")
assert(rep.testPendingEventsReceived.length == 1)
assert(rep.testPendingEventsReceived(0).testName == "test 3")
assert(rep.testCanceledEventsReceived.length == 1)
assert(rep.testCanceledEventsReceived(0).testName == "test 4")
assert(rep.testIgnoredEventsReceived.length == 1)
assert(rep.testIgnoredEventsReceived(0).testName == "test 5")
}
it("can be used for tests that did not return Future under parallel async test execution") {
class ExampleSpec extends AsyncPropSpec with ParallelTestExecution {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
val a = 1
property("test 1") { fixture =>
assert(a == 1)
}
property("test 2") { fixture =>
assert(a == 2)
}
property("test 3") { fixture =>
pending
}
property("test 4") { fixture =>
cancel
}
ignore("test 5") { fixture =>
cancel
}
override def newInstance = new ExampleSpec
}
val rep = new EventRecordingReporter
val spec = new ExampleSpec
val status = spec.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 4)
assert(rep.testSucceededEventsReceived.length == 1)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testFailedEventsReceived.length == 1)
assert(rep.testFailedEventsReceived(0).testName == "test 2")
assert(rep.testPendingEventsReceived.length == 1)
assert(rep.testPendingEventsReceived(0).testName == "test 3")
assert(rep.testCanceledEventsReceived.length == 1)
assert(rep.testCanceledEventsReceived(0).testName == "test 4")
assert(rep.testIgnoredEventsReceived.length == 1)
assert(rep.testIgnoredEventsReceived(0).testName == "test 5")
}
it("should run tests that return Future in serial by default") {
@volatile var count = 0
class ExampleSpec extends AsyncPropSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
property("test 1") { fixture =>
Future {
SleepHelper.sleep(30)
assert(count == 0)
count = 1
succeed
}
}
property("test 2") { fixture =>
Future {
assert(count == 1)
SleepHelper.sleep(50)
count = 2
succeed
}
}
property("test 3") { fixture =>
Future {
assert(count == 2)
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 3)
assert(rep.testSucceededEventsReceived.length == 3)
}
it("should run tests that does not return Future in serial by default") {
@volatile var count = 0
class ExampleSpec extends AsyncPropSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
property("test 1") { fixture =>
SleepHelper.sleep(30)
assert(count == 0)
count = 1
succeed
}
property("test 2") { fixture =>
assert(count == 1)
SleepHelper.sleep(50)
count = 2
succeed
}
property("test 3") { fixture =>
assert(count == 2)
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 3)
assert(rep.testSucceededEventsReceived.length == 3)
}
// SKIP-SCALATESTJS,NATIVE-START
it("should run tests and its future in same main thread when use SerialExecutionContext") {
var mainThread = Thread.currentThread
var test1Thread: Option[Thread] = None
var test2Thread: Option[Thread] = None
var onCompleteThread: Option[Thread] = None
class ExampleSpec extends AsyncPropSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
property("test 1") { fixture =>
Future {
test1Thread = Some(Thread.currentThread)
succeed
}
}
property("test 2") { fixture =>
Future {
test2Thread = Some(Thread.currentThread)
succeed
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
status.whenCompleted { s =>
onCompleteThread = Some(Thread.currentThread)
}
status.waitUntilCompleted()
assert(test1Thread.isDefined)
assert(test1Thread.get == mainThread)
assert(test2Thread.isDefined)
assert(test2Thread.get == mainThread)
assert(onCompleteThread.isDefined)
assert(onCompleteThread.get == mainThread)
}
it("should run tests and its true async future in the same thread when use SerialExecutionContext") {
var mainThread = Thread.currentThread
@volatile var test1Thread: Option[Thread] = None
@volatile var test2Thread: Option[Thread] = None
var onCompleteThread: Option[Thread] = None
class ExampleSpec extends AsyncPropSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
property("test 1") { fixture =>
val promise = Promise[Assertion]
val timer = new java.util.Timer
timer.schedule(
new java.util.TimerTask {
def run(): Unit = {
promise.complete(Success(succeed))
}
},
1000
)
promise.future.map { s =>
test1Thread = Some(Thread.currentThread)
s
}
}
property("test 2") { fixture =>
val promise = Promise[Assertion]
val timer = new java.util.Timer
timer.schedule(
new java.util.TimerTask {
def run(): Unit = {
promise.complete(Success(succeed))
}
},
500
)
promise.future.map { s =>
test2Thread = Some(Thread.currentThread)
s
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
status.whenCompleted { s =>
onCompleteThread = Some(Thread.currentThread)
}
status.waitUntilCompleted()
assert(test1Thread.isDefined)
assert(test1Thread.get == mainThread)
assert(test2Thread.isDefined)
assert(test2Thread.get == mainThread)
assert(onCompleteThread.isDefined)
assert(onCompleteThread.get == mainThread)
}
it("should not run out of stack space with nested futures when using SerialExecutionContext") {
class ExampleSpec extends AsyncPropSpec {
// Note we get a StackOverflowError with the following execution
// context.
// override implicit def executionContext: ExecutionContext = new ExecutionContext { def execute(runnable: Runnable) = runnable.run; def reportFailure(cause: Throwable) = () }
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
def sum(xs: List[Int]): Future[Int] =
xs match {
case Nil => Future.successful(0)
case x :: xs => Future(x).flatMap(xx => sum(xs).map(xxx => xx + xxx))
}
property("test 1") { fixture =>
val fut: Future[Int] = sum((1 to 50000).toList)
fut.map(total => assert(total == 1250025000))
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
status.waitUntilCompleted()
assert(!rep.testSucceededEventsReceived.isEmpty)
}
// SKIP-SCALATESTJS,NATIVE-END
it("should run tests that returns Future and report their result in serial") {
class ExampleSpec extends AsyncPropSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
property("test 1") { fixture =>
Future {
SleepHelper.sleep(60)
succeed
}
}
property("test 2") { fixture =>
Future {
SleepHelper.sleep(30)
succeed
}
}
property("test 3") { fixture =>
Future {
succeed
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 3)
assert(rep.testStartingEventsReceived(0).testName == "test 1")
assert(rep.testStartingEventsReceived(1).testName == "test 2")
assert(rep.testStartingEventsReceived(2).testName == "test 3")
assert(rep.testSucceededEventsReceived.length == 3)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testSucceededEventsReceived(1).testName == "test 2")
assert(rep.testSucceededEventsReceived(2).testName == "test 3")
}
it("should run tests that does not return Future and report their result in serial") {
class ExampleSpec extends AsyncPropSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
//SCALATESTNATIVE-ONLY implicit override def executionContext = scala.concurrent.ExecutionContext.Implicits.global
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
property("test 1") { fixture =>
SleepHelper.sleep(60)
succeed
}
property("test 2") { fixture =>
SleepHelper.sleep(30)
succeed
}
property("test 3") { fixture =>
succeed
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 3)
assert(rep.testStartingEventsReceived(0).testName == "test 1")
assert(rep.testStartingEventsReceived(1).testName == "test 2")
assert(rep.testStartingEventsReceived(2).testName == "test 3")
assert(rep.testSucceededEventsReceived.length == 3)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testSucceededEventsReceived(1).testName == "test 2")
assert(rep.testSucceededEventsReceived(2).testName == "test 3")
}
}
}*/
| dotty-staging/scalatest | scalatest-test/src/test/scala/org/scalatest/fixture/AsyncPropSpecSpec.scala | Scala | apache-2.0 | 15,250 |
package scife.enumeration.testcases.scalacheck
// we will use this deprecated Scala class
/* __ *\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
//package scala
//package collection
//package immutable
/** Old base class that was used by previous implementations of `TreeMaps` and `TreeSets`.
*
* Deprecated due to various performance bugs (see [[https://issues.scala-lang.org/browse/SI-5331 SI-5331]] for more information).
*
* @since 2.3
*/
//@deprecated("use `TreeMap` or `TreeSet` instead", "2.10.0")
//@SerialVersionUID(8691885935445612921L)
abstract class RedBlack[A] extends Serializable {
def isSmaller(x: A, y: A): Boolean
private def blacken[B](t: Tree[B]): Tree[B] = t match {
case RedTree(k, v, l, r) => BlackTree(k, v, l, r)
case t => t
}
private def mkTree[B](isBlack: Boolean, k: A, v: B, l: Tree[B], r: Tree[B]) =
if (isBlack) BlackTree(k, v, l, r) else RedTree(k, v, l, r)
abstract class Tree[+B] extends Serializable {
def isEmpty: Boolean
def isBlack: Boolean
def lookup(x: A): Tree[B]
def update[B1 >: B](k: A, v: B1): Tree[B1] = blacken(upd(k, v))
def delete(k: A): Tree[B] = blacken(del(k))
def range(from: Option[A], until: Option[A]): Tree[B] = blacken(rng(from, until))
def foreach[U](f: (A, B) => U)
def toStream: Stream[(A,B)]
def iterator: Iterator[(A, B)]
def upd[B1 >: B](k: A, v: B1): Tree[B1]
def del(k: A): Tree[B]
def smallest: NonEmpty[B]
def rng(from: Option[A], until: Option[A]): Tree[B]
def first : A
def last : A
def count : Int
}
abstract class NonEmpty[+B] extends Tree[B] with Serializable {
def isEmpty = false
def key: A
def value: B
def left: Tree[B]
def right: Tree[B]
def lookup(k: A): Tree[B] =
if (isSmaller(k, key)) left.lookup(k)
else if (isSmaller(key, k)) right.lookup(k)
else this
private[this] def balanceLeft[B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[B1], d: Tree[B1])/*: NonEmpty[B1]*/ = l match {
case RedTree(y, yv, RedTree(x, xv, a, b), c) =>
RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
case RedTree(x, xv, a, RedTree(y, yv, b, c)) =>
RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
case _ =>
mkTree(isBlack, z, zv, l, d)
}
private[this] def balanceRight[B1 >: B](isBlack: Boolean, x: A, xv: B, a: Tree[B1], r: Tree[B1])/*: NonEmpty[B1]*/ = r match {
case RedTree(z, zv, RedTree(y, yv, b, c), d) =>
RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
case RedTree(y, yv, b, RedTree(z, zv, c, d)) =>
RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
case _ =>
mkTree(isBlack, x, xv, a, r)
}
def upd[B1 >: B](k: A, v: B1): Tree[B1] = {
if (isSmaller(k, key)) balanceLeft(isBlack, key, value, left.upd(k, v), right)
else if (isSmaller(key, k)) balanceRight(isBlack, key, value, left, right.upd(k, v))
else mkTree(isBlack, k, v, left, right)
}
// Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees
// http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html
def del(k: A): Tree[B] = {
def balance(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match {
case (RedTree(y, yv, a, b), RedTree(z, zv, c, d)) =>
RedTree(x, xv, BlackTree(y, yv, a, b), BlackTree(z, zv, c, d))
case (RedTree(y, yv, RedTree(z, zv, a, b), c), d) =>
RedTree(y, yv, BlackTree(z, zv, a, b), BlackTree(x, xv, c, d))
case (RedTree(y, yv, a, RedTree(z, zv, b, c)), d) =>
RedTree(z, zv, BlackTree(y, yv, a, b), BlackTree(x, xv, c, d))
case (a, RedTree(y, yv, b, RedTree(z, zv, c, d))) =>
RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d))
case (a, RedTree(y, yv, RedTree(z, zv, b, c), d)) =>
RedTree(z, zv, BlackTree(x, xv, a, b), BlackTree(y, yv, c, d))
case (a, b) =>
BlackTree(x, xv, a, b)
}
def subl(t: Tree[B]) = t match {
case BlackTree(x, xv, a, b) => RedTree(x, xv, a, b)
case _ => sys.error("Defect: invariance violation; expected black, got "+t)
}
def balLeft(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match {
case (RedTree(y, yv, a, b), c) =>
RedTree(x, xv, BlackTree(y, yv, a, b), c)
case (bl, BlackTree(y, yv, a, b)) =>
balance(x, xv, bl, RedTree(y, yv, a, b))
case (bl, RedTree(y, yv, BlackTree(z, zv, a, b), c)) =>
RedTree(z, zv, BlackTree(x, xv, bl, a), balance(y, yv, b, subl(c)))
case _ => sys.error("Defect: invariance violation at "+right)
}
def balRight(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match {
case (a, RedTree(y, yv, b, c)) =>
RedTree(x, xv, a, BlackTree(y, yv, b, c))
case (BlackTree(y, yv, a, b), bl) =>
balance(x, xv, RedTree(y, yv, a, b), bl)
case (RedTree(y, yv, a, BlackTree(z, zv, b, c)), bl) =>
RedTree(z, zv, balance(y, yv, subl(a), b), BlackTree(x, xv, c, bl))
case _ => sys.error("Defect: invariance violation at "+left)
}
def delLeft = left match {
case _: BlackTree[_] => balLeft(key, value, left.del(k), right)
case _ => RedTree(key, value, left.del(k), right)
}
def delRight = right match {
case _: BlackTree[_] => balRight(key, value, left, right.del(k))
case _ => RedTree(key, value, left, right.del(k))
}
def append(tl: Tree[B], tr: Tree[B]): Tree[B] = (tl, tr) match {
case (Empty, t) => t
case (t, Empty) => t
case (RedTree(x, xv, a, b), RedTree(y, yv, c, d)) =>
append(b, c) match {
case RedTree(z, zv, bb, cc) => RedTree(z, zv, RedTree(x, xv, a, bb), RedTree(y, yv, cc, d))
case bc => RedTree(x, xv, a, RedTree(y, yv, bc, d))
}
case (BlackTree(x, xv, a, b), BlackTree(y, yv, c, d)) =>
append(b, c) match {
case RedTree(z, zv, bb, cc) => RedTree(z, zv, BlackTree(x, xv, a, bb), BlackTree(y, yv, cc, d))
case bc => balLeft(x, xv, a, BlackTree(y, yv, bc, d))
}
case (a, RedTree(x, xv, b, c)) => RedTree(x, xv, append(a, b), c)
case (RedTree(x, xv, a, b), c) => RedTree(x, xv, a, append(b, c))
}
// RedBlack is neither A : Ordering[A], nor A <% Ordered[A]
k match {
case _ if isSmaller(k, key) => delLeft
case _ if isSmaller(key, k) => delRight
case _ => append(left, right)
}
}
def smallest: NonEmpty[B] = if (left.isEmpty) this else left.smallest
def toStream: Stream[(A,B)] =
left.toStream ++ Stream((key,value)) ++ right.toStream
def iterator: Iterator[(A, B)] =
left.iterator ++ Iterator.single(Tuple2(key, value)) ++ right.iterator
def foreach[U](f: (A, B) => U) {
left foreach f
f(key, value)
right foreach f
}
override def rng(from: Option[A], until: Option[A]): Tree[B] = {
if (from == None && until == None) return this
if (from != None && isSmaller(key, from.get)) return right.rng(from, until);
if (until != None && (isSmaller(until.get,key) || !isSmaller(key,until.get)))
return left.rng(from, until);
val newLeft = left.rng(from, None)
val newRight = right.rng(None, until)
if ((newLeft eq left) && (newRight eq right)) this
else if (newLeft eq Empty) newRight.upd(key, value);
else if (newRight eq Empty) newLeft.upd(key, value);
else rebalance(newLeft, newRight)
}
// The zipper returned might have been traversed left-most (always the left child)
// or right-most (always the right child). Left trees are traversed right-most,
// and right trees are traversed leftmost.
// Returns the zipper for the side with deepest black nodes depth, a flag
// indicating whether the trees were unbalanced at all, and a flag indicating
// whether the zipper was traversed left-most or right-most.
// If the trees were balanced, returns an empty zipper
private[this] def compareDepth(left: Tree[B], right: Tree[B]): (List[NonEmpty[B]], Boolean, Boolean, Int) = {
// Once a side is found to be deeper, unzip it to the bottom
def unzip(zipper: List[NonEmpty[B]], leftMost: Boolean): List[NonEmpty[B]] = {
val next = if (leftMost) zipper.head.left else zipper.head.right
next match {
case node: NonEmpty[_] => unzip(node :: zipper, leftMost)
case Empty => zipper
}
}
// Unzip left tree on the rightmost side and right tree on the leftmost side until one is
// found to be deeper, or the bottom is reached
def unzipBoth(left: Tree[B],
right: Tree[B],
leftZipper: List[NonEmpty[B]],
rightZipper: List[NonEmpty[B]],
smallerDepth: Int): (List[NonEmpty[B]], Boolean, Boolean, Int) = (left, right) match {
case (l @ BlackTree(_, _, _, _), r @ BlackTree(_, _, _, _)) =>
unzipBoth(l.right, r.left, l :: leftZipper, r :: rightZipper, smallerDepth + 1)
case (l @ RedTree(_, _, _, _), r @ RedTree(_, _, _, _)) =>
unzipBoth(l.right, r.left, l :: leftZipper, r :: rightZipper, smallerDepth)
case (_, r @ RedTree(_, _, _, _)) =>
unzipBoth(left, r.left, leftZipper, r :: rightZipper, smallerDepth)
case (l @ RedTree(_, _, _, _), _) =>
unzipBoth(l.right, right, l :: leftZipper, rightZipper, smallerDepth)
case (Empty, Empty) =>
(Nil, true, false, smallerDepth)
case (Empty, r @ BlackTree(_, _, _, _)) =>
val leftMost = true
(unzip(r :: rightZipper, leftMost), false, leftMost, smallerDepth)
case (l @ BlackTree(_, _, _, _), Empty) =>
val leftMost = false
(unzip(l :: leftZipper, leftMost), false, leftMost, smallerDepth)
}
unzipBoth(left, right, Nil, Nil, 0)
}
private[this] def rebalance(newLeft: Tree[B], newRight: Tree[B]) = {
// This is like drop(n-1), but only counting black nodes
def findDepth(zipper: List[NonEmpty[B]], depth: Int): List[NonEmpty[B]] = zipper match {
case BlackTree(_, _, _, _) :: tail =>
if (depth == 1) zipper else findDepth(tail, depth - 1)
case _ :: tail => findDepth(tail, depth)
case Nil => sys.error("Defect: unexpected empty zipper while computing range")
}
// Blackening the smaller tree avoids balancing problems on union;
// this can't be done later, though, or it would change the result of compareDepth
val blkNewLeft = blacken(newLeft)
val blkNewRight = blacken(newRight)
val (zipper, levelled, leftMost, smallerDepth) = compareDepth(blkNewLeft, blkNewRight)
if (levelled) {
BlackTree(key, value, blkNewLeft, blkNewRight)
} else {
val zipFrom = findDepth(zipper, smallerDepth)
val union = if (leftMost) {
RedTree(key, value, blkNewLeft, zipFrom.head)
} else {
RedTree(key, value, zipFrom.head, blkNewRight)
}
val zippedTree = zipFrom.tail.foldLeft(union: Tree[B]) { (tree, node) =>
if (leftMost)
balanceLeft(node.isBlack, node.key, node.value, tree, node.right)
else
balanceRight(node.isBlack, node.key, node.value, node.left, tree)
}
zippedTree
}
}
def first = if (left .isEmpty) key else left.first
def last = if (right.isEmpty) key else right.last
def count = 1 + left.count + right.count
}
case object Empty extends Tree[Nothing] {
def isEmpty = true
def isBlack = true
def lookup(k: A): Tree[Nothing] = this
def upd[B](k: A, v: B): Tree[B] = RedTree(k, v, Empty, Empty)
def del(k: A): Tree[Nothing] = this
def smallest: NonEmpty[Nothing] = throw new NoSuchElementException("empty map")
def iterator: Iterator[(A, Nothing)] = Iterator.empty
def toStream: Stream[(A,Nothing)] = Stream.empty
def foreach[U](f: (A, Nothing) => U) {}
def rng(from: Option[A], until: Option[A]) = this
def first = throw new NoSuchElementException("empty map")
def last = throw new NoSuchElementException("empty map")
def count = 0
}
case class RedTree[+B](override val key: A,
override val value: B,
override val left: Tree[B],
override val right: Tree[B]) extends NonEmpty[B] {
def isBlack = false
}
case class BlackTree[+B](override val key: A,
override val value: B,
override val left: Tree[B],
override val right: Tree[B]) extends NonEmpty[B] {
def isBlack = true
}
} | kaptoxic/SciFe | src/test/scala/scife/enumeration/testcases/scalacheck/RedBlack.scala | Scala | gpl-2.0 | 13,348 |
package combinators
/*
Copyright [2013] [Tindr Solutions]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import scala.scalajs.js
//import org.scalajs.dom._
import js.Dynamic.{ global => g }
import Html._
import Markup.{static, addMarkup}
import js.annotation.JSExport
sealed trait Form[A] {
def +[B](b: Form[B]) = Pair(this, b)
def listener(onChange: A => Unit) = Listener(onChange, this)
def map[B](initialize: B => A, validate: A => B) = Map(initialize, validate, this)
def note(html: A => Html) = Note(this, html)
}
case class Labeled[A](label: String, element: Element[A]) extends Form[A]
case class Pair[A, B](a: Form[A], b: Form[B]) extends Form[(A,B)]
case class Listener[A](onChange: A => Unit, a: Form[A]) extends Form[A]
case class Map[A, B](initialize: B => A, validate: A => B, a: Form[A]) extends Form[B]
case class Note[A](a: Form[A], html: A => Html) extends Form[A]
sealed trait Element[A]
case class TextField(numChars: Int) extends Element[String]
case class PasswordField(numChars: Int) extends Element[String]
object Form {
def render[A](f: Form[A], initial: A, onChange: A => Unit) : Markup = f match {
case Labeled(l, e) => static(row(text(l), e match {
case TextField(n) => textInput(n, initial, onChange)
case PasswordField(n) => passwordInput(n, initial, onChange)
}))
case Pair(a, b) => renderPair(a, b, initial, onChange)
case Listener(l, a) => render(a, initial, (n : A) => {l(n); onChange(n)})
case Map(i, v, a) => renderMap(i, v, a, initial, onChange)
case Note(a, html) => renderNote(a, html, initial, onChange)
}
def row(e: Html*) = tr()(e.map(td()(_)) : _*)
def renderMap[A, B](i: B => A, v: A => B, a: Form[A], initial: B, onChange: B => Unit) =
render(a, i(initial), onChange compose v)
def renderPair[A, B](a: Form[A], b: Form[B], initial: (A, B), onChange: ((A, B)) => Unit) = {
var v = initial;
Markup.div()(
render(a, initial._1, (n : A) => {v = (n, v._2); onChange(v)}),
render(b, initial._2, (n : B) => {v = (v._1, n); onChange(v)}))
}
def renderNote[A](a: Form[A], html: A => Html, initial: A, onChange: A => Unit) = {
def note(a: A) = row(text(""), html(a))
Markup.withOnDiff(onDiff => Markup.div()(
render(a, initial, (n:A) => {
onDiff(HtmlDiff.atChild(HtmlDiff(Some(note(n)), Nil), 1, 2))
onChange(n)
}),
static(note(initial))))
}
implicit class Same[A](f: Form[(A, A)]) {
def same(default: A) = Map[(A, A), Option[A]](
oa => {val a = oa.getOrElse(default); (a, a)},
t => if (t._1 == t._2) Some(t._1) else None, f)
}
implicit class Render[A](f: Form[A]) {
def render(initial: A) : Markup = Form.render(f, initial, (_:A) => {})
}
}
case class User(name: String, password:String, email: String)
@JSExport
object Combinators {
val form : Form[Option[User]] =
((Labeled("Username", TextField(20)) +
Labeled("Password", PasswordField(20)).note(passwordStrength _)) +
(Labeled("E-Mail", TextField(30)) +
Labeled("Retype E-Mail", TextField(30)))
.same("").note(m => text(if (m.isEmpty) "E-Mails do not match" else "")))
.map(
user => {
val u = user.getOrElse(User("","",""))
((u.name, u.password), Some(u.email))},
{case ((name, pw), email) => email.map(User(name, pw,_))})
def passwordStrength(p: String) = text(
List((p.length < 8) -> "Too short",
(p.toSet.size < 5) -> "Weak",
(p.toSet.size < 7) -> "Fair",
(p.toSet.size < 9) -> "Good")
.find(_._1).fold("Strong")(_._2))
val page : Markup = Markup.p()(
static(p()(text("A simple form:"))),
form.listener(v => g.console.log(v.toString))
.note(v => div()(br(), text("Current form value: " + v.toString)))
.render(None)
)
@JSExport
def main() : Unit = {
val p = addMarkup(doc.getElementById("playground"), page)
}
}
| boldradius/form-combinators | src/main/scala/example/Combinators.scala | Scala | apache-2.0 | 4,384 |
/*
* Copyright 2014 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import sbt.Keys._
import sbt.{Configuration, _}
import uk.gov.hmrc.gitstamp.GitStampPlugin
object DefaultBuildSettings {
lazy val targetJvm = settingKey[String]("The version of the JVM the build targets")
lazy val scalaSettings : Seq[Setting[_]] = {
targetJvm := "jvm-1.8"
Seq(
scalaVersion := "2.12.10",
scalacOptions ++= Seq(
"-unchecked",
"-deprecation",
"-Xlint",
"-language:_",
"-target:" + targetJvm.value,
"-Xmax-classfile-name", "100",
"-encoding", "UTF-8"
)
)
}
def defaultSettings(addScalaTestReports: Boolean = true) : Seq[Setting[_]] =
Seq(
organization := "uk.gov.hmrc",
console / initialCommands := "import " + organization + "._",
Test / parallelExecution := false,
isSnapshot := version.value.matches("([\\\\w]+\\\\-SNAPSHOT)|([\\\\.\\\\w]+)\\\\-([\\\\d]+)\\\\-([\\\\w]+)")
) ++ GitStampPlugin.gitStampSettings ++
(if (addScalaTestReports) addTestReportOption(Test) else Seq.empty)
def addTestReportOption(conf: Configuration, directory: String = "test-reports") = {
val testResultDir = "target/" + directory
conf / testOptions += Tests.Argument("-o", "-u", testResultDir, "-h", testResultDir + "/html-report")
}
}
| hmrc/sbt-settings | project/DefaultBuildSettings.scala | Scala | apache-2.0 | 1,867 |
package jt.littlepieces
object Combine4 {
sealed class Operator(val f: (Int,Int) => Option[Int])
case object Plus extends Operator((a,b) => Some(a + b))
case object Times extends Operator((a,b) => Some(a * b))
case object RDiv extends Operator((a,b) => if (a != 0 && b % a == 0) Some(b / a) else None)
case object Div extends Operator((a,b) => if (b != 0 && a % b == 0) Some(a / b) else None)
case object RMinus extends Operator((a,b) => Some(b - a))
case object Minus extends Operator((a,b) => Some(a - b))
val operators = List(Plus, Times, RDiv, Div, RMinus, Minus)
case class Step(left: Int, right: Int, op: Operator, items: Seq[Int])
def find(items: Seq[Int], target: Int, result: Seq[Step] = Seq()) : Option[Seq[Step]] = {
assert(items.length >= 1)
if (items.length == 1) if (items(0) == target) Some(result) else None
else {
def removeAt(s: Seq[Int], inds: Seq[Int]) = s.zipWithIndex.filter(t => !inds.contains(t._2)).map(_._1)
val t = (0 until items.length).combinations(2).flatMap( inds => {
val (a,b) = (items(inds(0)), items(inds(1)))
for (op <- operators;
next <- op.f(a,b) if (next != 0);
step = Step(a,b,op,items)) yield find(next +: removeAt(items, inds), target, step +: result)
})
t.flatten.toIterable.headOption.map(_.reverse)
}
}
def main(args: Array[String]) {
println(find(Seq(1,2,4,6), 24))
println(find(Seq(1,1,1,1), 24))
println(find(Seq(7,8,8,13), 24))
println(find(Seq(2,3,12,12),24))
}
}
| jaety/little-pieces | scala/snippets/src/main/scala/jt/littlepieces/Combine4.scala | Scala | bsd-3-clause | 1,541 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.graphx.impl
import org.apache.spark.graphx._
import org.apache.spark.graphx.util.collection.GraphXPrimitiveKeyOpenHashMap
import org.apache.spark.util.collection.{BitSet, PrimitiveVector}
private[graphx]
object RoutingTablePartition {
/**
* A message from an edge partition to a vertex specifying the position in which the edge
* partition references the vertex (src, dst, or both). The edge partition is encoded in the lower
* 30 bits of the Int, and the position is encoded in the upper 2 bits of the Int.
*/
type RoutingTableMessage = (VertexId, Int)
private def toMessage(vid: VertexId, pid: PartitionID, position: Byte): RoutingTableMessage = {
val positionUpper2 = position << 30
val pidLower30 = pid & 0x3FFFFFFF
(vid, positionUpper2 | pidLower30)
}
private def vidFromMessage(msg: RoutingTableMessage): VertexId = msg._1
private def pidFromMessage(msg: RoutingTableMessage): PartitionID = msg._2 & 0x3FFFFFFF
private def positionFromMessage(msg: RoutingTableMessage): Byte = (msg._2 >> 30).toByte
val empty: RoutingTablePartition = new RoutingTablePartition(Array.empty)
/** Generate a `RoutingTableMessage` for each vertex referenced in `edgePartition`. */
def edgePartitionToMsgs(pid: PartitionID, edgePartition: EdgePartition[_, _])
: Iterator[RoutingTableMessage] = {
// Determine which positions each vertex id appears in using a map where the low 2 bits
// represent src and dst
val map = new GraphXPrimitiveKeyOpenHashMap[VertexId, Byte]
edgePartition.iterator.foreach { e =>
map.changeValue(e.srcId, 0x1, (b: Byte) => (b | 0x1).toByte)
map.changeValue(e.dstId, 0x2, (b: Byte) => (b | 0x2).toByte)
}
map.iterator.map { vidAndPosition =>
val vid = vidAndPosition._1
val position = vidAndPosition._2
toMessage(vid, pid, position)
}
}
/** Build a `RoutingTablePartition` from `RoutingTableMessage`s. */
def fromMsgs(numEdgePartitions: Int, iter: Iterator[RoutingTableMessage])
: RoutingTablePartition = {
val pid2vid = Array.fill(numEdgePartitions)(new PrimitiveVector[VertexId])
val srcFlags = Array.fill(numEdgePartitions)(new PrimitiveVector[Boolean])
val dstFlags = Array.fill(numEdgePartitions)(new PrimitiveVector[Boolean])
for (msg <- iter) {
val vid = vidFromMessage(msg)
val pid = pidFromMessage(msg)
val position = positionFromMessage(msg)
pid2vid(pid) += vid
srcFlags(pid) += (position & 0x1) != 0
dstFlags(pid) += (position & 0x2) != 0
}
new RoutingTablePartition(pid2vid.zipWithIndex.map {
case (vids, pid) => (vids.trim().array, toBitSet(srcFlags(pid)), toBitSet(dstFlags(pid)))
})
}
/** Compact the given vector of Booleans into a BitSet. */
private def toBitSet(flags: PrimitiveVector[Boolean]): BitSet = {
val bitset = new BitSet(flags.size)
var i = 0
while (i < flags.size) {
if (flags(i)) {
bitset.set(i)
}
i += 1
}
bitset
}
}
/**
* Stores the locations of edge-partition join sites for each vertex attribute in a particular
* vertex partition. This provides routing information for shipping vertex attributes to edge
* partitions.
*/
private[graphx]
class RoutingTablePartition(
private val routingTable: Array[(Array[VertexId], BitSet, BitSet)]) extends Serializable {
/** The maximum number of edge partitions this `RoutingTablePartition` is built to join with. */
val numEdgePartitions: Int = routingTable.length
/** Returns the number of vertices that will be sent to the specified edge partition. */
def partitionSize(pid: PartitionID): Int = routingTable(pid)._1.length
/** Returns an iterator over all vertex ids stored in this `RoutingTablePartition`. */
def iterator: Iterator[VertexId] = routingTable.iterator.flatMap(_._1.iterator)
/** Returns a new RoutingTablePartition reflecting a reversal of all edge directions. */
def reverse: RoutingTablePartition = {
new RoutingTablePartition(routingTable.map {
case (vids, srcVids, dstVids) => (vids, dstVids, srcVids)
})
}
/**
* Runs `f` on each vertex id to be sent to the specified edge partition. Vertex ids can be
* filtered by the position they have in the edge partition.
*/
def foreachWithinEdgePartition
(pid: PartitionID, includeSrc: Boolean, includeDst: Boolean)
(f: VertexId => Unit): Unit = {
val (vidsCandidate, srcVids, dstVids) = routingTable(pid)
val size = vidsCandidate.length
if (includeSrc && includeDst) {
// Avoid checks for performance
vidsCandidate.iterator.foreach(f)
} else if (!includeSrc && !includeDst) {
// Do nothing
} else {
val relevantVids = if (includeSrc) srcVids else dstVids
relevantVids.iterator.foreach { i => f(vidsCandidate(i)) }
}
}
}
| maropu/spark | graphx/src/main/scala/org/apache/spark/graphx/impl/RoutingTablePartition.scala | Scala | apache-2.0 | 5,668 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.runtime.stream.table
import org.apache.flink.api.scala._
import org.apache.flink.table.api.scala._
import org.apache.flink.table.api.{DataTypes, TableException, Tumble}
import org.apache.flink.table.planner.factories.TestValuesTableFactory
import org.apache.flink.table.planner.factories.TestValuesTableFactory.changelogRow
import org.apache.flink.table.planner.runtime.utils.StreamingTestBase
import org.apache.flink.table.planner.runtime.utils.TestData.{nullData4, smallTupleData3, tupleData3, tupleData5}
import org.apache.flink.util.ExceptionUtils
import org.junit.Assert.{assertEquals, assertFalse, assertTrue, fail}
import org.junit.Test
import java.lang.{Long => JLong}
import java.math.{BigDecimal => JBigDecimal}
import scala.collection.JavaConversions._
class TableSinkITCase extends StreamingTestBase {
@Test
def testAppendSinkOnAppendTable(): Unit = {
val t = env.fromCollection(tupleData3)
.assignAscendingTimestamps(_._1.toLong)
.toTable(tEnv, 'id, 'num, 'text, 'rowtime.rowtime)
tEnv.executeSql(
s"""
|CREATE TABLE appendSink (
| `t` TIMESTAMP(3),
| `icnt` BIGINT,
| `nsum` BIGINT
|) WITH (
| 'connector' = 'values',
| 'sink-insert-only' = 'true'
|)
|""".stripMargin)
t.window(Tumble over 5.millis on 'rowtime as 'w)
.groupBy('w)
.select('w.end as 't, 'id.count as 'icnt, 'num.sum as 'nsum)
.insertInto("appendSink")
tEnv.execute("job name")
val result = TestValuesTableFactory.getResults("appendSink")
val expected = List(
"1970-01-01T00:00:00.005,4,8",
"1970-01-01T00:00:00.010,5,18",
"1970-01-01T00:00:00.015,5,24",
"1970-01-01T00:00:00.020,5,29",
"1970-01-01T00:00:00.025,2,12")
assertEquals(expected.sorted, result.sorted)
}
@Test
def testAppendSinkWithNestedRow(): Unit = {
val t = env.fromCollection(smallTupleData3)
.toTable(tEnv, 'id, 'num, 'text)
tEnv.createTemporaryView("src", t)
tEnv.executeSql(
s"""
|CREATE TABLE appendSink (
| `t` INT,
| `item` ROW<`num` BIGINT, `text` STRING>
|) WITH (
| 'connector' = 'values',
| 'sink-insert-only' = 'true'
|)
|""".stripMargin)
tEnv.sqlUpdate("INSERT INTO appendSink SELECT id, ROW(num, text) FROM src")
tEnv.execute("job name")
val result = TestValuesTableFactory.getResults("appendSink")
val expected = List(
"1,1,Hi",
"2,2,Hello",
"3,2,Hello world")
assertEquals(expected.sorted, result.sorted)
}
@Test
def testAppendSinkOnAppendTableForInnerJoin(): Unit = {
val ds1 = env.fromCollection(smallTupleData3).toTable(tEnv, 'a, 'b, 'c)
val ds2 = env.fromCollection(tupleData5).toTable(tEnv, 'd, 'e, 'f, 'g, 'h)
tEnv.executeSql(
s"""
|CREATE TABLE appendSink (
| `c` STRING,
| `g` STRING
|) WITH (
| 'connector' = 'values',
| 'sink-insert-only' = 'true'
|)
|""".stripMargin)
ds1.join(ds2).where('b === 'e)
.select('c, 'g)
.insertInto("appendSink")
tEnv.execute("job name")
val result = TestValuesTableFactory.getResults("appendSink")
val expected = List("Hi,Hallo", "Hello,Hallo Welt", "Hello world,Hallo Welt")
assertEquals(expected.sorted, result.sorted)
}
@Test
def testRetractSinkOnUpdatingTable(): Unit = {
val t = env.fromCollection(tupleData3)
.assignAscendingTimestamps(_._1.toLong)
.toTable(tEnv, 'id, 'num, 'text)
tEnv.executeSql(
s"""
|CREATE TABLE retractSink (
| `len` INT,
| `icnt` BIGINT,
| `nsum` BIGINT
|) WITH (
| 'connector' = 'values',
| 'sink-insert-only' = 'false'
|)
|""".stripMargin)
t.select('id, 'num, 'text.charLength() as 'len)
.groupBy('len)
.select('len, 'id.count as 'icnt, 'num.sum as 'nsum)
.insertInto("retractSink")
tEnv.execute("job name")
val result = TestValuesTableFactory.getResults("retractSink")
val expected = List(
"2,1,1", "5,1,2", "11,1,2",
"25,1,3", "10,7,39", "14,1,3", "9,9,41")
assertEquals(expected.sorted, result.sorted)
}
@Test
def testRetractSinkOnAppendTable(): Unit = {
val t = env.fromCollection(tupleData3)
.assignAscendingTimestamps(_._1.toLong)
.toTable(tEnv, 'id, 'num, 'text, 'rowtime.rowtime)
tEnv.executeSql(
s"""
|CREATE TABLE retractSink (
| `t` TIMESTAMP(3),
| `icnt` BIGINT,
| `nsum` BIGINT
|) WITH (
| 'connector' = 'values',
| 'sink-insert-only' = 'false'
|)
|""".stripMargin)
t.window(Tumble over 5.millis on 'rowtime as 'w)
.groupBy('w)
.select('w.end as 't, 'id.count as 'icnt, 'num.sum as 'nsum)
.insertInto("retractSink")
tEnv.execute("job name")
val rawResult = TestValuesTableFactory.getRawResults("retractSink")
assertFalse(
"Received retraction messages for append only table",
rawResult.exists(_.startsWith("-"))) // maybe -U or -D
val result = TestValuesTableFactory.getResults("retractSink")
val expected = List(
"1970-01-01T00:00:00.005,4,8",
"1970-01-01T00:00:00.010,5,18",
"1970-01-01T00:00:00.015,5,24",
"1970-01-01T00:00:00.020,5,29",
"1970-01-01T00:00:00.025,2,12")
assertEquals(expected.sorted, result.sorted)
}
@Test
def testUpsertSinkOnNestedAggregation(): Unit = {
val t = env.fromCollection(tupleData3)
.assignAscendingTimestamps(_._1.toLong)
.toTable(tEnv, 'id, 'num, 'text)
tEnv.executeSql(
s"""
|CREATE TABLE upsertSink (
| `cnt` BIGINT,
| `lencnt` BIGINT,
| `cTrue` BOOLEAN,
| PRIMARY KEY (cnt, cTrue) NOT ENFORCED
|) WITH (
| 'connector' = 'values',
| 'sink-insert-only' = 'false'
|)
|""".stripMargin)
t.select('id, 'num, 'text.charLength() as 'len, ('id > 0) as 'cTrue)
.groupBy('len, 'cTrue)
// test query field name is different with registered sink field name
.select('len, 'id.count as 'count, 'cTrue)
.groupBy('count, 'cTrue)
.select('count, 'len.count as 'lencnt, 'cTrue)
.insertInto("upsertSink")
tEnv.execute("job name")
val rawResult = TestValuesTableFactory.getRawResults("upsertSink")
assertTrue(
"Results must include delete messages",
rawResult.exists(_.startsWith("-D(")))
val result = TestValuesTableFactory.getResults("upsertSink")
val expected = List("1,5,true", "7,1,true", "9,1,true")
assertEquals(expected.sorted, result.sorted)
}
@Test
def testUpsertSinkOnAppendingTable(): Unit = {
val t = env.fromCollection(tupleData3)
.assignAscendingTimestamps(_._1.toLong)
.toTable(tEnv, 'id, 'num, 'text, 'rowtime.rowtime)
tEnv.executeSql(
s"""
|CREATE TABLE upsertSink (
| `num` BIGINT,
| `wend` TIMESTAMP(3),
| `icnt` BIGINT,
| PRIMARY KEY (num, wend, icnt) NOT ENFORCED
|) WITH (
| 'connector' = 'values',
| 'sink-insert-only' = 'false'
|)
|""".stripMargin)
t.window(Tumble over 5.millis on 'rowtime as 'w)
.groupBy('w, 'num)
// test query field name is different with registered sink field name
.select('num, 'w.end as 'window_end, 'id.count as 'icnt)
.insertInto("upsertSink")
tEnv.execute("job name")
val rawResult = TestValuesTableFactory.getRawResults("upsertSink")
assertFalse(
"Received retraction messages for append only table",
rawResult.exists(_.startsWith("-"))) // maybe -D or -U
val result = TestValuesTableFactory.getResults("upsertSink")
val expected = List(
"1,1970-01-01T00:00:00.005,1",
"2,1970-01-01T00:00:00.005,2",
"3,1970-01-01T00:00:00.005,1",
"3,1970-01-01T00:00:00.010,2",
"4,1970-01-01T00:00:00.010,3",
"4,1970-01-01T00:00:00.015,1",
"5,1970-01-01T00:00:00.015,4",
"5,1970-01-01T00:00:00.020,1",
"6,1970-01-01T00:00:00.020,4",
"6,1970-01-01T00:00:00.025,2")
assertEquals(expected.sorted, result.sorted)
}
@Test
def testUpsertSinkOnAppendingTableWithoutFullKey1(): Unit = {
val t = env.fromCollection(tupleData3)
.assignAscendingTimestamps(_._1.toLong)
.toTable(tEnv, 'id, 'num, 'text, 'rowtime.rowtime)
tEnv.executeSql(
s"""
|CREATE TABLE upsertSink (
| `wend` TIMESTAMP(3),
| `icnt` BIGINT,
| PRIMARY KEY (wend, icnt) NOT ENFORCED
|) WITH (
| 'connector' = 'values',
| 'sink-insert-only' = 'false'
|)
|""".stripMargin)
t.window(Tumble over 5.millis on 'rowtime as 'w)
.groupBy('w, 'num)
.select('w.end as 'wend, 'id.count as 'cnt)
.insertInto("upsertSink")
tEnv.execute("job name")
val rawResult = TestValuesTableFactory.getRawResults("upsertSink")
assertFalse(
"Received retraction messages for append only table",
rawResult.exists(_.startsWith("-"))) // may -D or -U
val rawExpected = List(
"+I(1970-01-01T00:00:00.005,1)",
"+I(1970-01-01T00:00:00.005,2)",
"+I(1970-01-01T00:00:00.005,1)",
"+I(1970-01-01T00:00:00.010,2)",
"+I(1970-01-01T00:00:00.010,3)",
"+I(1970-01-01T00:00:00.015,1)",
"+I(1970-01-01T00:00:00.015,4)",
"+I(1970-01-01T00:00:00.020,1)",
"+I(1970-01-01T00:00:00.020,4)",
"+I(1970-01-01T00:00:00.025,2)")
assertEquals(rawExpected.sorted, rawResult.sorted)
}
@Test
def testUpsertSinkOnAppendingTableWithoutFullKey2(): Unit = {
val t = env.fromCollection(tupleData3)
.assignAscendingTimestamps(_._1.toLong)
.toTable(tEnv, 'id, 'num, 'text, 'rowtime.rowtime)
tEnv.executeSql(
s"""
|CREATE TABLE upsertSink (
| `num` BIGINT,
| `cnt` BIGINT,
| PRIMARY KEY (num) NOT ENFORCED
|) WITH (
| 'connector' = 'values',
| 'sink-insert-only' = 'false'
|)
|""".stripMargin)
t.window(Tumble over 5.millis on 'rowtime as 'w)
.groupBy('w, 'num)
.select('num, 'id.count as 'cnt)
.insertInto("upsertSink")
tEnv.execute("job name")
val rawResult = TestValuesTableFactory.getRawResults("upsertSink")
assertFalse(
"Received retraction messages for append only table",
rawResult.exists(_.startsWith("-"))) // may -D or -U
val expected = List(
"+I(1,1)",
"+I(2,2)",
"+I(3,1)",
"+I(3,2)",
"+I(4,3)",
"+I(4,1)",
"+I(5,4)",
"+I(5,1)",
"+I(6,4)",
"+I(6,2)")
assertEquals(expected.sorted, rawResult.sorted)
}
@Test
def testUpsertSinkWithFilter(): Unit = {
val t = env.fromCollection(tupleData3)
.assignAscendingTimestamps(_._1.toLong)
.toTable(tEnv, 'id, 'num, 'text)
tEnv.executeSql(
s"""
|CREATE TABLE upsertSink (
| `num` BIGINT,
| `cnt` BIGINT,
| PRIMARY KEY (num) NOT ENFORCED
|) WITH (
| 'connector' = 'values',
| 'sink-insert-only' = 'false'
|)
|""".stripMargin)
// num, cnt
// 1, 1
// 2, 2
// 3, 3
// 4, 4
// 5, 5
// 6, 6
t.groupBy('num)
.select('num, 'id.count as 'cnt)
.where('cnt <= 3)
.insertInto("upsertSink")
tEnv.execute("job name")
val result = TestValuesTableFactory.getResults("upsertSink")
val expected = List("1,1", "2,2", "3,3")
assertEquals(expected.sorted, result.sorted)
}
@Test
def testMultiRowtime(): Unit = {
val t = env.fromCollection(tupleData3)
.assignAscendingTimestamps(_._1.toLong)
.toTable(tEnv, 'id, 'num, 'text, 'rowtime.rowtime)
tEnv.executeSql(
s"""
|CREATE TABLE sink (
| `num` BIGINT,
| `ts1` TIMESTAMP(3),
| `ts2` TIMESTAMP(3)
|) WITH (
| 'connector' = 'values',
| 'sink-insert-only' = 'true'
|)
|""".stripMargin)
t.window(Tumble over 5.milli on 'rowtime as 'w)
.groupBy('num, 'w)
.select('num, 'w.rowtime as 'rowtime1, 'w.rowtime as 'rowtime2)
.insertInto("sink")
thrown.expect(classOf[TableException])
thrown.expectMessage("Found more than one rowtime field: [rowtime1, rowtime2] " +
"in the query when insert into 'default_catalog.default_database.sink'")
tEnv.execute("job name")
}
@Test
def testDecimalOnSinkFunctionTableSink(): Unit = {
tEnv.executeSql(
s"""
|CREATE TABLE sink (
| `c` VARCHAR(5),
| `b` DECIMAL(10, 0),
| `d` CHAR(5)
|) WITH (
| 'connector' = 'values',
| 'sink-insert-only' = 'true'
|)
|""".stripMargin)
env.fromCollection(tupleData3)
.toTable(tEnv, 'a, 'b, 'c)
.where('a > 20)
.select("12345", 55.cast(DataTypes.DECIMAL(10, 0)), "12345".cast(DataTypes.CHAR(5)))
.insertInto("sink")
tEnv.execute("job name")
val result = TestValuesTableFactory.getResults("sink")
val expected = Seq("12345,55,12345")
assertEquals(expected.sorted, result.sorted)
}
@Test
def testDecimalOnOutputFormatTableSink(): Unit = {
tEnv.executeSql(
s"""
|CREATE TABLE sink (
| `c` VARCHAR(5),
| `b` DECIMAL(10, 0),
| `d` CHAR(5)
|) WITH (
| 'connector' = 'values',
| 'sink-insert-only' = 'true',
| 'runtime-sink' = 'OutputFormat'
|)
|""".stripMargin)
env.fromCollection(tupleData3)
.toTable(tEnv, 'a, 'b, 'c)
.where('a > 20)
.select("12345", 55.cast(DataTypes.DECIMAL(10, 0)), "12345".cast(DataTypes.CHAR(5)))
.insertInto("sink")
tEnv.execute("job name")
val result = TestValuesTableFactory.getResults("sink")
val expected = Seq("12345,55,12345")
assertEquals(expected.sorted, result.sorted)
}
/**
* Writing changelog of an aggregation into a memory sink, and read it again as a
* changelog source, and apply another aggregation, then verify the result.
*/
@Test
def testChangelogSourceAndChangelogSink(): Unit = {
val orderData = List(
rowOf(1L, "user1", new JBigDecimal("10.02")),
rowOf(1L, "user2", new JBigDecimal("71.2")),
rowOf(1L, "user1", new JBigDecimal("8.1")),
rowOf(2L, "user3", new JBigDecimal("11.3")),
rowOf(2L, "user4", new JBigDecimal("9.99")),
rowOf(2L, "user1", new JBigDecimal("10")),
rowOf(2L, "user3", new JBigDecimal("21.03")))
val dataId = TestValuesTableFactory.registerData(orderData)
tEnv.executeSql(
s"""
|CREATE TABLE orders (
| product_id BIGINT,
| user_name STRING,
| order_price DECIMAL(18, 2)
|) WITH (
| 'connector' = 'values',
| 'data-id' = '$dataId'
|)
|""".stripMargin)
tEnv.executeSql(
"""
|CREATE TABLE changelog_sink (
| product_id BIGINT,
| user_name STRING,
| order_price DECIMAL(18, 2)
|) WITH (
| 'connector' = 'values',
| 'sink-insert-only' = 'false'
|)
|""".stripMargin)
tEnv.sqlUpdate(
"""
|INSERT INTO changelog_sink
|SELECT product_id, user_name, SUM(order_price)
|FROM orders
|GROUP BY product_id, user_name
|""".stripMargin)
tEnv.execute("job name")
val rawResult = TestValuesTableFactory.getRawResults("changelog_sink")
val expected = List(
"+I(1,user2,71.20)",
"+I(1,user1,10.02)",
"-U(1,user1,10.02)",
"+U(1,user1,18.12)",
"+I(2,user4,9.99)",
"+I(2,user1,10.00)",
"+I(2,user3,11.30)",
"-U(2,user3,11.30)",
"+U(2,user3,32.33)")
assertEquals(expected.sorted, rawResult.sorted)
// register the changelog sink as a changelog source again
val changelogData = expected.map { s =>
val kindString = s.substring(0, 2)
val fields = s.substring(3, s.length - 1).split(",")
changelogRow(kindString, JLong.valueOf(fields(0)), fields(1), new JBigDecimal(fields(2)))
}
val dataId2 = TestValuesTableFactory.registerChangelogData(changelogData)
tEnv.executeSql(
s"""
|CREATE TABLE changelog_source (
| product_id BIGINT,
| user_name STRING,
| price DECIMAL(18, 2)
|) WITH (
| 'connector' = 'values',
| 'data-id' = '$dataId2',
| 'changelog-mode' = 'I,UB,UA,D'
|)
|""".stripMargin)
tEnv.executeSql(
"""
|CREATE TABLE final_sink (
| user_name STRING,
| total_pay DECIMAL(18, 2),
| PRIMARY KEY (user_name) NOT ENFORCED
|) WITH (
| 'connector' = 'values',
| 'sink-insert-only' = 'false'
|)
|""".stripMargin)
tEnv.sqlUpdate(
"""
|INSERT INTO final_sink
|SELECT user_name, SUM(price) as total_pay
|FROM changelog_source
|GROUP BY user_name
|""".stripMargin)
tEnv.execute("job name")
val finalResult = TestValuesTableFactory.getResults("final_sink")
val finalExpected = List(
"user1,28.12", "user2,71.20", "user3,32.33", "user4,9.99")
assertEquals(finalExpected.sorted, finalResult.sorted)
}
@Test
def testNotNullEnforcer(): Unit = {
val dataId = TestValuesTableFactory.registerData(nullData4)
tEnv.executeSql(
s"""
|CREATE TABLE nullable_src (
| category STRING,
| shopId INT,
| num INT
|) WITH (
| 'connector' = 'values',
| 'data-id' = '$dataId'
|)
|""".stripMargin)
tEnv.executeSql(
s"""
|CREATE TABLE not_null_sink (
| category STRING,
| shopId INT,
| num INT NOT NULL
|) WITH (
| 'connector' = 'values',
| 'sink-insert-only' = 'true'
|)
|""".stripMargin)
tEnv.sqlUpdate("INSERT INTO not_null_sink SELECT * FROM nullable_src")
// default should fail, because there are null values in the source
try {
tEnv.execute("job name")
fail("Execution should fail.")
} catch {
case t: Throwable =>
val exception = ExceptionUtils.findThrowableWithMessage(
t,
"Column 'num' is NOT NULL, however, a null value is being written into it. " +
"You can set job configuration 'table.exec.sink.not-null-enforcer'='drop' " +
"to suppress this exception and drop such records silently.")
assertTrue(exception.isPresent)
}
// enable drop enforcer to make the query can run
tEnv.getConfig.getConfiguration.setString("table.exec.sink.not-null-enforcer", "drop")
tEnv.sqlUpdate("INSERT INTO not_null_sink SELECT * FROM nullable_src")
tEnv.execute("job name")
val result = TestValuesTableFactory.getResults("not_null_sink")
val expected = List("book,1,12", "book,4,11", "fruit,3,44")
assertEquals(expected.sorted, result.sorted)
}
}
| hequn8128/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/TableSinkITCase.scala | Scala | apache-2.0 | 20,192 |
package be.cmpg.qtl
import be.cmpg.graph.Gene
import be.cmpg.graph.Node
import be.cmpg.graph.interaction.NetworkManager
import be.cmpg.walk.SubNetworkSelector
import be.cmpg.walk.Path
import be.cmpg.graph.Interaction
import scala.collection.mutable.LinkedList
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.ListBuffer
import scala.collection.mutable.TreeSet
import scala.util.Random
import scala.collection.mutable.HashSet
import be.cmpg.graph.Node
/*
* The selected subnetwork is None if the start or end Gene does not have any marker
* that affect it
*
*
* FIXME What do we do if there are no other markers around the gene marker?
*/
class QtlWalker(startGene: Gene,
qtlHalfRegionSize: Int,
pathSize: Int,
network: NetworkManager[_]) extends SubNetworkSelector(network) {
val path = new Path(startGene)
val startNode = network.getNetwork().getNode(startGene)
val interactions = new HashSet[Interaction]
override def getStartGene(): Gene = startGene
override def getStartNode(): Node = startNode
override def getVisitedGenes() = interactions.map(_.genes).flatten
override def getPossibleInteractions(): List[Interaction] = {
network.getOutgoingInteractionsFor(path.currentEndpoint).filter(interaction => path.canTakeInteraction(interaction)).toList
}
def selectSubNetwork(): Option[Set[Interaction]] = {
val geneOfInterest = startGene
val (startGene2Markers, startMarkers) = getFlankingMarkers(geneOfInterest)
if (startGene2Markers.isEmpty)
None
startGene2Markers.foreach(interactions += _)
startMarkers.sliding(2).foreach(mt => interactions += Interaction(mt(0), mt(1), QtlConstants.startLoci))
var steps = 0
while (steps < pathSize) {
val next = network.getRandomInteraction(this)
if (next.isDefined)
path.expand(next.get)
steps += 1
}
if (startMarkers.contains(path.currentEndpoint))
None // return None if the random path returned to the original sets of markers
val (endGene2Markers, endMarkers) = getFlankingMarkers(path.currentEndpoint)
if (endGene2Markers.isEmpty)
None
endGene2Markers.foreach(interactions += _)
endMarkers.sliding(2).foreach(mt => interactions += Interaction(mt(0), mt(1), QtlConstants.endLoci))
Some(interactions.view.toSet)
}
private def getFlankingMarkers(geneOfInterest: be.cmpg.graph.Gene): (List[be.cmpg.graph.Interaction], List[be.cmpg.graph.Gene]) = {
val gene2markers = network.getOutgoingInteractionsFor(geneOfInterest).filter(_.typ == "marker").toList.sortBy(_.to.name)
val chromosome = gene2markers(0).to.get(QtlConstants.chromosome).get
val startPosition = chromosome + math.max(0, gene2markers(0).to.get(QtlConstants.position).get.toInt - qtlHalfRegionSize)
val endPosition = chromosome + (gene2markers(0).to.get(QtlConstants.position).get.toInt + qtlHalfRegionSize)
val markers = network.getGenes.filter(node => startPosition <= node.name && node.name <= endPosition).toList.sortBy(_.name)
(gene2markers, markers)
}
} | spulido99/SSA | src/main/scala/be/cmpg/qtl/QtlWalker.scala | Scala | gpl-2.0 | 3,112 |
package org.jetbrains.plugins.scala
package lang
package parser
package parsing
package types
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
import org.jetbrains.plugins.scala.lang.parser.util.ParserUtils
/**
* @author Alexander Podkhalyuzin
* Date: 08.02.2008
*/
/*
* SelfType ::= id [':' Type] '=>' |
* ['this' | '_'] ':' Type '=>'
*/
object SelfType {
def parse(builder: ScalaPsiBuilder) {
val selfTypeMarker = builder.mark
def handleFunArrow() {
builder.advanceLexer() //Ate '=>'
selfTypeMarker.done(ScalaElementTypes.SELF_TYPE)
}
def handleColon() {
builder.advanceLexer() //Ate ':'
if (!parseType(builder)) selfTypeMarker.rollbackTo()
else {
builder.getTokenType match {
case ScalaTokenTypes.tFUNTYPE => handleFunArrow()
case _ => selfTypeMarker.rollbackTo()
}
}
}
def handleLastPart() {
builder.getTokenType match {
case ScalaTokenTypes.tCOLON => handleColon()
case ScalaTokenTypes.tFUNTYPE => handleFunArrow()
case _ => selfTypeMarker.rollbackTo()
}
}
builder.getTokenType match {
case ScalaTokenTypes.kTHIS | ScalaTokenTypes.tUNDER =>
builder.advanceLexer() // Ate this or _
builder.getTokenType match {
case ScalaTokenTypes.tCOLON => handleColon()
case _ => selfTypeMarker.rollbackTo()
}
case ScalaTokenTypes.tIDENTIFIER =>
builder.advanceLexer() //Ate identifier
handleLastPart()
case ScalaTokenTypes.tLPARENTHESIS =>
if (ParserUtils.parseBalancedParenthesis(builder, TokenSets.SELF_TYPE_ID))
handleLastPart() else selfTypeMarker.rollbackTo()
case _ => selfTypeMarker.rollbackTo()
}
}
def parseType(builder : ScalaPsiBuilder) : Boolean = {
val typeMarker = builder.mark
if (!InfixType.parse(builder, star = false, isPattern = true)) {
typeMarker.drop()
return false
}
builder.getTokenType match {
case ScalaTokenTypes.kFOR_SOME =>
ExistentialClause parse builder
typeMarker.done(ScalaElementTypes.EXISTENTIAL_TYPE)
case _ => typeMarker.drop()
}
true
}
} | jastice/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/parser/parsing/types/SelfType.scala | Scala | apache-2.0 | 2,340 |
package sprawler.crawler
import sprawler.CrawlerExceptions._
import sprawler.CrawlerExceptions.JsonImplicits._
import play.api.libs.iteratee.{ Input, Concurrent }
import play.api.libs.json._
import spray.http.HttpResponse
import sprawler.CrawlerException
/**
* For pushing JSON results and exceptions into a [[play.api.libs.iteratee.Concurrent.Channel]].
*/
trait Streams {
def channel: Concurrent.Channel[JsValue]
def streamJson(json: JsValue, eof: Boolean = false) {
channel push json
if (eof) {
play.Logger.debug("channel is closing....")
channel.push(Input.EOF)
}
}
def streamJsonError(jsError: JsValue, eof: Boolean = true) {
streamJson(JsObject(Seq("error" -> jsError)), eof)
}
def streamJsonResponse(fromUrl: String, toUrl: String, response: HttpResponse, eof: Boolean = false) {
streamJson(responseToJson(fromUrl, toUrl, response), eof)
}
/**
* Streams the json representation of a crawler exception. Crawler exceptions
* are defined in [[sprawler.CrawlerException]].
*
* @param error The throwable to convert into json and stream.
*/
def streamJsonErrorFromException(error: Throwable, eof: Boolean = false) {
error match {
case e: CrawlerException => e match {
// NOTE: this code is left in this verbose manner, because Json.toJson doesn't work
// when trying to use this shortened version, due to the type inference getting generalized to
// "Throwable": http://stackoverflow.com/a/8481924/1093160. This should at least
// throw a compiler warning when a CrawlerException type is not matched, since CrawlerException
// is sealed.
case error @ UnprocessableUrlException(_, _, _, _) =>
streamJsonError(Json.toJson(error), eof)
case error @ FailedHttpRequestException(_, _, _, _) =>
streamJsonError(Json.toJson(error), eof)
case error @ UrlNotAllowedException(_, _, _, _) =>
streamJsonError(Json.toJson(error), eof)
case error @ RedirectLimitReachedException(_, _, _, _, _) =>
streamJsonError(Json.toJson(error), eof)
case error @ MissingRedirectUrlException(_, _, _) =>
streamJsonError(Json.toJson(error), eof)
case error @ UnknownException(_, _) =>
play.Logger.error(error.getStackTraceString)
streamJsonError(Json.toJson(error), eof)
}
case e: Throwable =>
streamJsonErrorFromException(UnknownException(e.getMessage))
play.Logger.error(error.getStackTraceString)
}
}
/**
* Converts a [[spray.http.HttpResponse]] to a [[play.api.libs.json.JsValue]] for streaming
* to the client.
*
* @param fromUrl The origin website's url where toUrl was found on.
* @param toUrl The url that was crawled -- the url that was used to retrive response.
* @param response This gets serialized into json for streaming back to the user.
* @return The response in json form.
*/
def responseToJson(fromUrl: String, toUrl: String, response: HttpResponse): JsValue = {
val status = response.status
val json = JsObject(
Seq(
"status" -> JsNumber(status.intValue),
"reason" -> JsString(status.reason),
"to_url" -> JsString(toUrl),
"from_url" -> JsString(fromUrl)
)
)
json
}
} | daniel-trinh/sprawler | examples/deadLinks/app/biz/crawler/Streams.scala | Scala | mit | 3,328 |
package controllers
import actors.{GameActor, GamesActor, SessionActor}
import akka.actor._
import akka.pattern.ask
import akka.util.Timeout
import model.{GameInstance, GameQuestion, UserSession}
import play.Logger
import play.api.Play.current
import play.api.libs.concurrent.Akka
import play.api.libs.json.{JsObject, JsString, JsValue, Json}
import play.api.mvc.{Action, Controller, Result, WebSocket}
import scala.concurrent.Future
import scala.concurrent.duration._
object GamePlayer extends Controller {
implicit val ec = Akka.system.dispatcher
implicit val timeout = Timeout(5, SECONDS)
def games = Action.async { req =>
gamesActor.ask(GamesActor.GetGames).mapTo[Set[GameInstance]].map {
case s: Set[GameInstance] =>
Ok(Json.toJson(s.map(gi => Json.obj("id" -> JsString(gi.id), "title" -> JsString(gi.game.title), "description" -> JsString(gi.game.description)))))
case _ => BadRequest
}
}
def gameAction(id: String) = Action.async(parse.json) { req =>
req.body \ "command" match {
case JsString("join") =>
(req.body \ "nickname", req.body \ "email") match {
case (JsString(nickname), JsString(email)) =>
joinGame(id, nickname, email)
case _ => Future.successful(BadRequest)
}
case _ => Future.successful(BadRequest("Unknown command"))
}
}
def joinGame(id: String, nickname: String, email: String): Future[Result] = {
for {
session <- sessionActor.ask(SessionActor.CreatePlayerSession(nickname, email)).mapTo[Option[UserSession]]
joinResp <- gameActor(id).ask(GameActor.RegisterPlayer(nickname)).recover({
case t => Logger.error("join error", t)
})
} yield joinResp match {
case GameActor.PlayerJoined =>
Ok(Json.obj("id" -> session.get.id))
case msg =>
Logger.debug(s"Unexpected response to JoinGame: $msg")
ExpectationFailed
}
}
def webSocket(gameId: String) = WebSocket.acceptWithActor[JsValue, JsValue] { request => out =>
PlayerWebSocketActor.props(out, gameId)
}
}
object PlayerWebSocketActor {
def props(out: ActorRef, gameId: String) = Props(new PlayerWebSocketActor(out, gameId))
}
class PlayerWebSocketActor(out: ActorRef, gameId: String) extends Actor with ActorLogging {
implicit val ec = Akka.system.dispatcher
implicit val timeout = Timeout(5, SECONDS)
var session: Option[UserSession] = None
gameActor(gameId) ! GameActor.AddEventSubscriber(self)
def receive = {
case JsObject(Seq(("sessionId", JsString(sessionId)))) =>
val ctx = context
sessionActor ? SessionActor.LookupSession(sessionId) onSuccess {
case Some(s: UserSession) =>
session = Some(s)
ctx.become(authenticated)
case o =>
Logger.error("No session, closing socket")
self ! PoisonPill
}
}
def authenticated: Receive = {
case msg: JsObject if msg.keys.contains("command") => (msg \ "command").as[String] match {
case "answerQuestion" =>
val JsString(answer) = msg \ "answer"
gameActor(gameId) ? GameActor.AnswerQuestion(session.get.userId, answer) recover {
case t => Logger.error(s"Error answering question $t")
}
case other => Logger.warn(s"Unknown command $other")
}
case GameActor.PlayerJoinedEvent(game) =>
out ! message("playerJoined", Json.obj("players" -> playerObject(game.players)))
case GameActor.QuestionOpenEvent(currentQuestion, game) =>
out ! message("questionOpen", questionWithoutPoints(currentQuestion))
case GameActor.ReviewingQuestionEvent(currentQuestion, game) =>
out ! message("reviewingQuestion", Json.obj("question" -> Json.toJson(currentQuestion), "playerScores" -> playerObject(game.players)))
case GameActor.GameOverEvent(game) =>
out ! message("gameOver", Json.obj("playerScores" -> playerObject(game.players)))
gameActor(gameId) ! GameActor.RemoveEventSubscriber(self)
self ! PoisonPill
}
def message(msgType: String, payload: JsValue) = Json.obj("messageType" -> msgType, "payload" -> payload)
def playerObject(players: Map[String, Int]) = players.map { p =>
Json.obj("player" -> p._1, "score" -> p._2)
}
def questionWithoutPoints(question: GameQuestion) = Json.obj("question" -> question.question, "answers" ->
question.answers.map(a => Json.obj("id" -> a.id, "answer" -> a.answer)))
override def unhandled(message: Any) = message match {
case msg =>
Logger.debug(s"Ignoring message $msg")
super.unhandled(msg)
}
}
| chariotsolutions/reactive-quizzo-code-sample | app/controllers/GamePlayer.scala | Scala | mit | 4,579 |
package org.bone.ircballoon
import org.eclipse.swt._
import org.eclipse.swt.widgets.Display
import org.eclipse.swt.graphics._
import org.xnap.commons.i18n.I18nFactory
import java.util.Locale
import javax.sound.sampled.AudioInputStream
import javax.sound.sampled.AudioSystem
import javax.sound.sampled.Clip
import scala.util.control.Exception._
object I18N
{
val flags = I18nFactory.FALLBACK|I18nFactory.READ_PROPERTIES
val i18n = I18nFactory.getI18n(getClass(), Locale.getDefault, flags)
}
object SoundUtils
{
def playSound(file: String)
{
val thread = new Thread() {
override def run() {
val audioIn = AudioSystem.getAudioInputStream(getClass.getResource(file))
val clip = AudioSystem.getClip()
clip.open(audioIn)
clip.start()
}
}
thread.start()
}
}
object MyIcon
{
import ImageUtil.loadFromResource
val appIcon = loadFromResource("/appIcon.png").get
val ircOP = loadFromResource("/opIcon.png").get
val preference = loadFromResource("/preference.png").get
val close = loadFromResource("/close.png").get
val add = loadFromResource("/add.png").get
val remove = loadFromResource("/remove.png").get
val vote = loadFromResource("/vote.png").get
}
object MyColor
{
lazy val Black = new Color(Display.getDefault, 0, 0, 0)
lazy val White = new Color(Display.getDefault, 255, 255, 255)
lazy val Blue = new Color(Display.getDefault, 100, 100, 255)
}
object MyFont
{
lazy val DefaultFont = Display.getDefault.getSystemFont
lazy val DefaultFontName = DefaultFont.getFontData()(0).getName
lazy val DefaultFontSize = DefaultFont.getFontData()(0).getHeight
lazy val DefaultFontStyle = DefaultFont.getFontData()(0).getStyle
lazy val LargeFont = new Font(
Display.getDefault, DefaultFontName,
DefaultFontSize + 3,
SWT.BOLD
)
}
object MessageSample
{
import scala.util.Random
val samples = List(
"guest: 這是第一個測試",
"user: 哈囉,大家好,我是 user",
"guest: This is a test.",
"long: 這是非常非常非常非常長的一段文字,一二三四五六七八九十,甲乙丙丁戊己庚辛",
"tester: Another test.",
"beta: This is a beta test"
)
def random(size: Int) = {
val repeat = (size / samples.length) + 1
Random.shuffle(List.fill(repeat)(samples).flatten).take(size)
}
}
| brianhsu/IRCBalloon | src/main/scala/utils/Util.scala | Scala | gpl-3.0 | 2,359 |
package com.socrata.datacoordinator.id
import org.scalatest.FunSuite
import org.scalatest.MustMatchers
import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks
class GlobalLogEntryIdTest extends FunSuite with MustMatchers with ScalaCheckPropertyChecks {
test("toString must include underlying value") {
forAll { (underlying: Long) =>
val id = new GlobalLogEntryId(underlying)
id.toString must include (underlying.toString)
id.toString must include (id.getClass().getSimpleName)
}
}
}
| socrata-platform/data-coordinator | coordinatorlib/src/test/scala/com/socrata/datacoordinator/id/GlobalLogEntryIdTest.scala | Scala | apache-2.0 | 522 |
// A tricky case of overriding behavior
// Note: It might be acceptable if this produced an error instead.
// But testing this is tricky.
abstract class Base[T] {
def foo(x: T): String
}
class C[T] extends Base[T] {
def foo(x: D): String = "D foo"
def foo(x: T): String = "T foo"
}
object Test {
def main(args: Array[String]) = {
val b1: Base[D] = new C[D] // which of the two foo's in C overrides the one in B?
assert(b1.foo(new D) == "T foo")
val b2: Base[D] = new C[D] {}
// In Java, this gives an error like this:
// methods foo(A) from C[D] and foo(String) from C[D] are inherited with the same signature
// But the analogous example with `b1` compiles OK in Java.
assert(b2.foo(new D) == "D foo")
// Here we get "D foo" since a bridge method for foo(x: D) was inserted
// in the anonymous class of b2.
}
}
class D
| dotty-staging/dotty | tests/run/i1240.scala | Scala | apache-2.0 | 887 |
package core.exceptions
import reactivemongo.core.commands.LastError
/**
* Created by inakov on 12.01.15.
*/
case class UnexpectedServiceException(message: String, nestedException: Throwable = null) extends ServiceException
case class DBServiceException(
message: String,
lastError: Option[LastError] = None,
nestedException: Throwable = null
) extends ServiceException
object DBServiceException {
def apply(lastError: LastError): ServiceException = {
DBServiceException(lastError.errMsg.getOrElse(lastError.message), Some(lastError))
}
}
case class DuplicateResourceException(
message: String = "error.duplicate.resource",
nestedException: Throwable = null
) extends ServiceException
case class OperationNotAllowedException(
message: String = "error.operation.not.allowed",
nestedException: Throwable = null
) extends ServiceException
case class ResourceNotFoundException(
id: String,
message: String = "error.resource.not.found",
nestedException: Throwable = null
) extends ServiceException | inakov/reactive-twitter | app/models/core/exceptions/CoreExceptions.scala | Scala | apache-2.0 | 1,095 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.demo
import akka.actor.{ActorPath, ActorLogging, Actor, ActorRef}
import com.rabbitmq.client.AMQP.BasicProperties
import com.rabbitmq.client.{Channel, ConnectionFactory, DefaultConsumer, Envelope}
import com.thenewmotion.akka.rabbitmq._
import org.apache.spark.streaming.receiver._
import scala.concurrent.duration.DurationInt
/**
* A Spark Receiver that subscribes to a RabbitMQ stream.
*/
class RMQReceiver(host : String,
username : Option[String],
password : Option[String],
qname : String,
exchange: String)
extends Actor with ActorHelper {
implicit val timeout = 5 seconds
val factory = new ConnectionFactory()
username.map(factory.setUsername(_))
password.map(factory.setPassword(_))
factory.setHost(host)
override def preStart() = {
log.debug("Starting Consumer Actor")
val connectionActor: ActorRef = context.actorOf(ConnectionActor.props(factory))
def setupChannel(channel: Channel, self: ActorRef) {
channel.queueBind(qname,exchange,"")
val consumer = new DefaultConsumer(channel) {
override def handleDelivery(consumerTag: String,
envelope: Envelope,
properties: BasicProperties,
body: Array[Byte]): Unit = {
//Minor hack placing args in a sequence because slf4j has an ambiguous vararg .debug(..) call
log.trace("Message received: {} - {}", Seq(fromBytes(body), properties.getMessageId):_*)
store(fromBytes(body))
}
}
//Start the consumer with auto-acknowledge on
channel.basicConsume(qname, true, consumer)
}
val channelActor: ActorRef = connectionActor.createChannel(ChannelActor.props(setupChannel))
}
def fromBytes(msg: Array[Byte]) : String = new String(msg, "UTF-8")
def receive: Receive = {
case unknownMsg => logInfo("Actor received $unknownMsg message but was not expecting it...")
}
} | jkds/spark-streaming-rabbitmq | src/main/scala/com/datastax/demo/RMQReceiver.scala | Scala | apache-2.0 | 2,851 |
/*
Copyright 2013 Stephen K Samuel
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.sksamuel.scrimage.filter
import com.sksamuel.scrimage.BufferedOpFilter
/** @author Stephen Samuel */
class RaysFilter(opacity: Float, threshold: Float, strength: Float) extends BufferedOpFilter {
val op = new thirdparty.jhlabs.image.RaysFilter()
op.setOpacity(opacity)
op.setThreshold(threshold)
op.setStrength(strength)
}
object RaysFilter {
def apply(opacity: Float = 1.0f, threshold: Float = 0, strength: Float = 0.5f) = new RaysFilter(opacity, threshold, strength)
}
| carlosFattor/scrimage | scrimage-filters/src/main/scala/com/sksamuel/scrimage/filter/RaysFilter.scala | Scala | apache-2.0 | 1,089 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.sj.crud.rest.model.instance.response
import com.bwsw.sj.common.dal.model.instance.{ExecutionPlan, FrameworkStage}
class OutputInstanceApiResponse(moduleName: String,
moduleVersion: String,
moduleType: String,
stage: FrameworkStage,
status: String,
name: String,
description: String,
parallelism: Any,
options: Map[String, Any],
perTaskCores: Double,
perTaskRam: Int,
jvmOptions: Map[String, String],
nodeAttributes: Map[String, String],
coordinationService: String,
environmentVariables: Map[String, String],
performanceReportingInterval: Long,
engine: String,
restAddress: String,
val checkpointMode: String,
val checkpointInterval: Long,
val executionPlan: ExecutionPlan,
val startFrom: String,
val input: String,
val output: String)
extends InstanceApiResponse(
moduleName,
moduleVersion,
moduleType,
stage,
status,
name,
description,
parallelism,
options,
perTaskCores,
perTaskRam,
jvmOptions,
nodeAttributes,
coordinationService,
environmentVariables,
performanceReportingInterval,
engine,
restAddress)
| bwsw/sj-platform | core/sj-crud-rest/src/main/scala/com/bwsw/sj/crud/rest/model/instance/response/OutputInstanceApiResponse.scala | Scala | apache-2.0 | 2,645 |
import scala.language.experimental.macros
object Csv extends CsvImplicits {
def writeCsv[A: CsvFormat](values: Traversable[A]): String = {
values.map(implicitly[CsvFormat[A]]).map(_.mkString(",")).mkString("\\n")
}
}
| underscoreio/essential-macros | csv/lib/src/main/scala/Csv.scala | Scala | apache-2.0 | 225 |
package walfie.gbf.raidfinder.server.controller
import akka.actor._
import akka.stream.scaladsl.Flow
import akka.stream.{Materializer, OverflowStrategy}
import monix.execution.Scheduler
import play.api.http.websocket.Message
import play.api.libs.streams._
import play.api.mvc._
import play.api.mvc.WebSocket.MessageFlowTransformer
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.Future
import walfie.gbf.raidfinder.domain._
import walfie.gbf.raidfinder.protocol._
import walfie.gbf.raidfinder.RaidFinder
import walfie.gbf.raidfinder.server.actor.WebsocketRaidsHandler
import walfie.gbf.raidfinder.server.util.MessageFlowTransformerUtil
import walfie.gbf.raidfinder.server.{BossNameTranslator, MetricsCollector}
class WebsocketController(
raidFinder: RaidFinder[BinaryProtobuf],
translator: BossNameTranslator,
keepAliveInterval: FiniteDuration,
metricsCollector: MetricsCollector
)(implicit system: ActorSystem, materializer: Materializer, scheduler: Scheduler) extends Controller {
private val jsonTransformer = MessageFlowTransformerUtil.protobufJsonMessageFlowTransformer
private val binaryTransformer = MessageFlowTransformerUtil.protobufBinaryMessageFlowTransformer
private val defaultTransformer = jsonTransformer
/**
* Open a websocket channel, communicating in either binary or JSON protobuf.
* Accepts subprotocols "binary" and "json" (default to "json" if subprotocol unspecified).
* If an unknown subprotocol is specified, return status code 400.
*/
def raids(keepAlive: Boolean) = WebSocket { request =>
// Subprotocols can either be comma-separated in the same header value,
// or specified across different header values.
val requestedProtocols = for {
headerValue <- request.headers.getAll("Sec-WebSocket-Protocol")
value <- headerValue.split(",")
} yield value.trim
val interval = if (keepAlive) Some(keepAliveInterval) else None
val transformerOpt: Option[MessageFlowTransformer[RequestMessage, BinaryProtobuf]] =
if (requestedProtocols.isEmpty) {
Some(defaultTransformer)
} else requestedProtocols.collectFirst {
case "binary" => binaryTransformer
case "json" => jsonTransformer
}
val result: Either[Result, Flow[Message, Message, _]] = transformerOpt match {
case Some(transformer) => Right {
val props = { out: ActorRef =>
WebsocketRaidsHandler.props(out, raidFinder, translator, interval, metricsCollector)
}
/**
* By default, `ActorFlow.actorRef` buffers 16 messages and drops new messages if
* the buffer is full. We'll likely hit that limit when returning backfill tweets.
*
* See: https://github.com/playframework/playframework/issues/6246
*
* There's not a great way to handle this server-side without increasing buffer
* size, so instead I'm going to throttle the startup requests client-side.
*/
val flow = ActorFlow.actorRef(props = props)
transformer.transform(flow)
}
case None => Left {
val unsupportedProtocols = requestedProtocols.mkString("[", ", ", "]")
Results.BadRequest("Unsupported websocket subprotocols " + unsupportedProtocols)
}
}
Future.successful(result)
}
}
| gnawnoraa/GBF-Raider-Copy | server/src/main/scala/walfie/gbf/raidfinder/server/controller/WebsocketController.scala | Scala | mit | 3,349 |
package com.sksamuel.elastic4s.analyzers
import com.sksamuel.elastic4s.json.XContentBuilder
import com.sksamuel.exts.OptionImplicits._
import scala.collection.JavaConverters._
trait TokenFilter extends AnalyzerFilter
trait TokenFilterDefinition extends TokenFilter with AnalyzerFilterDefinition
case object ReverseTokenFilter extends TokenFilter {
val name = "reverse"
}
case object ApostropheTokenFilter extends TokenFilter {
val name = "apostrophe"
}
case object TrimTokenFilter extends TokenFilter {
val name = "trim"
}
case object StandardTokenFilter extends TokenFilter {
val name = "standard"
}
case object AsciiFoldingTokenFilter extends TokenFilter {
val name = "asciifolding"
}
case object LowercaseTokenFilter extends TokenFilter {
val name = "lowercase"
}
case object UppercaseTokenFilter extends TokenFilter {
val name = "uppercase"
}
case object KStemTokenFilter extends TokenFilter {
val name = "kstem"
}
case object PorterStemTokenFilter extends TokenFilter {
val name = "porterStem"
}
case object UniqueTokenFilter extends TokenFilter {
val name = "unique"
}
case class SynonymTokenFilter(name: String,
path: Option[String] = None,
synonyms: Set[String] = Set.empty,
ignoreCase: Option[Boolean] = None,
format: Option[String] = None,
expand: Option[Boolean] = None,
tokenizer: Option[Tokenizer] = None)
extends TokenFilterDefinition {
require(path.isDefined || synonyms.nonEmpty, "synonym requires either `synonyms` or `synonyms_path` to be configured")
val filterType = "synonym"
override def build(source: XContentBuilder): Unit = {
path.foreach(source.field("synonyms_path", _))
source.array("synonyms", synonyms.toArray)
format.foreach(source.field("format", _))
ignoreCase.foreach(source.field("ignore_case", _))
expand.foreach(source.field("expand", _))
tokenizer.foreach(t => source.field("tokenizer", t.name))
}
def path(path: String): SynonymTokenFilter = copy(path = Some(path))
def synonyms(synonyms: Iterable[String]): SynonymTokenFilter = copy(synonyms = synonyms.toSet)
def tokenizer(tokenizer: Tokenizer): SynonymTokenFilter = copy(tokenizer = Some(tokenizer))
def format(format: String): SynonymTokenFilter = copy(format = Some(format))
def ignoreCase(ignoreCase: Boolean): SynonymTokenFilter = copy(ignoreCase = Some(ignoreCase))
def expand(expand: Boolean): SynonymTokenFilter = copy(expand = Some(expand))
}
case class TruncateTokenFilter(name: String, length: Option[Int] = None) extends TokenFilterDefinition {
val filterType = "truncate"
override def build(source: XContentBuilder): Unit =
length.foreach(source.field("length", _))
def length(length: Int): TruncateTokenFilter = copy(length = length.some)
}
case class LengthTokenFilter(name: String, min: Option[Int] = None, max: Option[Int] = None)
extends TokenFilterDefinition {
val filterType = "length"
override def build(source: XContentBuilder): Unit = {
min.foreach(source.field("min", _))
max.foreach(source.field("max", _))
}
def min(min: Int): LengthTokenFilter = copy(min = min.some)
def max(max: Int): LengthTokenFilter = copy(max = max.some)
}
case class UniqueTokenFilter(name: String, onlyOnSamePosition: Option[Boolean] = None) extends TokenFilterDefinition {
val filterType = "unique"
override def build(source: XContentBuilder): Unit =
onlyOnSamePosition.foreach(source.field("only_on_same_position", _))
def onlyOnSamePosition(onlyOnSamePosition: Boolean): UniqueTokenFilter =
copy(onlyOnSamePosition = onlyOnSamePosition.some)
}
case class KeywordMarkerTokenFilter(name: String,
keywords: Seq[String] = Nil,
keywordsPath: Option[String] = None,
keywordsPattern: Option[String] = None,
ignoreCase: Option[Boolean] = None)
extends TokenFilterDefinition {
val filterType = "keyword_marker"
override def build(source: XContentBuilder): Unit = {
if (keywords.nonEmpty)
source.array("keywords", keywords.toArray)
keywordsPath.foreach(source.field("keywords_path", _))
keywordsPattern.foreach(source.field("keywords_pattern", _))
ignoreCase.foreach(source.field("ignore_case", _))
}
def keywords(keywords: Seq[String]): KeywordMarkerTokenFilter = copy(keywords = keywords)
def keywords(first: String, rest: String*): KeywordMarkerTokenFilter = copy(keywords = first +: rest)
def keywordsPath(path: String): KeywordMarkerTokenFilter = copy(keywordsPath = path.some)
def keywordsPattern(pattern: String): KeywordMarkerTokenFilter = copy(keywordsPattern = pattern.some)
def ignoreCase(ignoreCase: Boolean): KeywordMarkerTokenFilter = copy(ignoreCase = ignoreCase.some)
}
case class ElisionTokenFilter(name: String, articles: Seq[String] = Nil) extends TokenFilterDefinition {
val filterType = "elision"
override def build(source: XContentBuilder): Unit =
source.array("articles", articles.toArray)
def articles(articles: Seq[String]): ElisionTokenFilter = copy(articles = articles)
def articles(first: String, rest: String*): ElisionTokenFilter = copy(articles = first +: rest)
}
case class LimitTokenCountTokenFilter(name: String,
maxTokenCount: Option[Int] = None,
consumeAllTokens: Option[Boolean] = None)
extends TokenFilterDefinition {
val filterType = "limit"
override def build(source: XContentBuilder): Unit = {
maxTokenCount.foreach(source.field("max_token_count", _))
consumeAllTokens.foreach(source.field("consume_all_tokens", _))
}
def maxTokenCount(maxTokenCount: Int): LimitTokenCountTokenFilter = copy(maxTokenCount = maxTokenCount.some)
def consumeAllTokens(consumeAllTokens: Boolean): LimitTokenCountTokenFilter =
copy(consumeAllTokens = consumeAllTokens.some)
}
case class StopTokenFilter(name: String,
language: Option[String] = None,
stopwords: Iterable[String] = Nil,
stopwordsPath: Option[String] = None,
enablePositionIncrements: Option[Boolean] = None, // ignored now as of 1.4.0
removeTrailing: Option[Boolean] = None,
ignoreCase: Option[Boolean] = None)
extends TokenFilterDefinition {
val filterType = "stop"
override def build(source: XContentBuilder): Unit = {
if (stopwords.nonEmpty)
source.array("stopwords", stopwords.toArray)
language.foreach(source.field("stopwords", _))
stopwordsPath.foreach(source.field("stopwords_path", _))
enablePositionIncrements.foreach(source.field("enable_position_increments", _))
ignoreCase.foreach(source.field("ignore_case", _))
removeTrailing.foreach(source.field("remove_trailing", _))
}
def ignoreCase(boolean: Boolean): StopTokenFilter = copy(ignoreCase = boolean.some)
def removeTrailing(boolean: Boolean): StopTokenFilter = copy(removeTrailing = boolean.some)
def enablePositionIncrements(boolean: Boolean): StopTokenFilter = copy(enablePositionIncrements = boolean.some)
def language(language: String): StopTokenFilter = copy(language = language.some)
def stopwords(stopwords: Iterable[String]): StopTokenFilter = copy(stopwords = stopwords)
def stopwords(stopwords: String, rest: String*): StopTokenFilter = copy(stopwords = stopwords +: rest)
def stopwordsPath(path: String): StopTokenFilter = copy(stopwordsPath = path.some)
}
object NamedStopTokenFilter {
val Arabic = "_arabic_"
val Armenian = "_armenian_"
val Basque = "_basque_"
val Brazilian = "_brazilian_"
val Bulgarian = "_bulgarian_"
val Catalan = "_catalan_"
val Czech = "_czech_"
val Danish = "_danish_"
val Dutch = "_dutch_"
val English = "_english_"
val Finnish = "_finnish_"
val French = "_french_"
val Galician = "_galician_"
val German = "_german_"
val Greek = "_greek_"
val Hindi = "_hindi_"
val Hungarian = "_hungarian_"
val Indonesian = "_indonesian_"
val Italian = "_italian_"
val Norwegian = "_norwegian_"
val Persian = "_persian_"
val Portuguese = "_portuguese_"
val Romanian = "_romanian_"
val Russian = "_russian_"
val Spanish = "_spanish_"
val Swedish = "_swedish_"
val Turkish = "_turkish_"
}
case class PatternCaptureTokenFilter(name: String, patterns: Seq[String] = Nil, preserveOriginal: Boolean = true)
extends TokenFilterDefinition {
val filterType = "pattern_capture"
override def build(source: XContentBuilder): Unit = {
if (patterns.nonEmpty)
source.array("patterns", patterns.toArray)
source.field("preserve_original", preserveOriginal)
}
def patterns(patterns: Seq[String]): PatternCaptureTokenFilter = copy(patterns = patterns)
def patterns(first: String, rest: String*): PatternCaptureTokenFilter = copy(patterns = first +: rest)
def preserveOriginal(preserveOriginal: Boolean): PatternCaptureTokenFilter = copy(preserveOriginal = preserveOriginal)
}
case class PatternReplaceTokenFilter(name: String, pattern: String, replacement: String) extends TokenFilterDefinition {
val filterType = "pattern_replace"
override def build(source: XContentBuilder): Unit = {
source.field("pattern", pattern)
source.field("replacement", replacement)
}
def pattern(p: String): PatternReplaceTokenFilter = copy(pattern = p)
def replacement(r: String): PatternReplaceTokenFilter = copy(replacement = r)
}
case class CommonGramsTokenFilter(name: String,
commonWords: Iterable[String] = Nil,
commonWordsPath: Option[String] = None,
ignoreCase: Option[Boolean] = None,
queryMode: Option[Boolean] = None)
extends TokenFilterDefinition {
val filterType = "common_grams"
override def build(source: XContentBuilder): Unit = {
if (commonWords.nonEmpty)
source.array("common_words", commonWords.toArray)
commonWordsPath.foreach(source.field("common_words_path", _))
ignoreCase.foreach(source.field("ignore_case", _))
queryMode.foreach(source.field("query_mode", _))
}
def commonWords(words: Iterable[String]): CommonGramsTokenFilter = copy(commonWords = words)
def commonWords(first: String, rest: String*): CommonGramsTokenFilter = copy(commonWords = first +: rest)
def ignoreCase(ignoreCase: Boolean): CommonGramsTokenFilter = copy(ignoreCase = ignoreCase.some)
def queryMode(queryMode: Boolean): CommonGramsTokenFilter = copy(queryMode = queryMode.some)
def commonWordsPath(path: String): CommonGramsTokenFilter = copy(commonWordsPath = path.some)
}
case class EdgeNGramTokenFilter(name: String,
minGram: Option[Int] = None,
maxGram: Option[Int] = None,
side: Option[String] = None)
extends TokenFilterDefinition {
val filterType = "edgeNGram"
override def build(source: XContentBuilder): Unit = {
minGram.foreach(source.field("min_gram", _))
maxGram.foreach(source.field("max_gram", _))
side.foreach(source.field("side", _))
}
def minMaxGrams(min: Int, max: Int): EdgeNGramTokenFilter = copy(minGram = min.some, maxGram = max.some)
def minGram(min: Int): EdgeNGramTokenFilter = copy(minGram = min.some)
def maxGram(max: Int): EdgeNGramTokenFilter = copy(maxGram = max.some)
def side(side: String): EdgeNGramTokenFilter = copy(side = side.some)
}
case class NGramTokenFilter(name: String, minGram: Option[Int] = None, maxGram: Option[Int] = None)
extends TokenFilterDefinition {
val filterType = "nGram"
override def build(source: XContentBuilder): Unit = {
minGram.foreach(source.field("min_gram", _))
maxGram.foreach(source.field("max_gram", _))
}
def minMaxGrams(min: Int, max: Int): NGramTokenFilter = copy(minGram = min.some, maxGram = max.some)
def minGram(min: Int): NGramTokenFilter = copy(minGram = min.some)
def maxGram(max: Int): NGramTokenFilter = copy(maxGram = max.some)
}
case class SnowballTokenFilter(name: String, language: String) extends TokenFilterDefinition {
val filterType = "snowball"
override def build(source: XContentBuilder): Unit =
source.field("language", language)
def lang(l: String): SnowballTokenFilter = copy(language = l)
}
case class StemmerTokenFilter(name: String, lang: String) extends TokenFilterDefinition {
val filterType = "stemmer"
override def build(source: XContentBuilder): Unit =
source.field("name", lang)
def lang(l: String): StemmerTokenFilter = copy(lang = l)
}
case class StemmerOverrideTokenFilter(name: String, rules: Seq[String] = Nil, rulesPath: Option[String] = None)
extends TokenFilterDefinition {
val filterType = "stemmer_override"
override def build(source: XContentBuilder): Unit = {
if (rules.nonEmpty)
source.array("rules", rules.toArray)
rulesPath.foreach(source.field("rules_path", _))
}
def rules(rules: Array[String]): StemmerOverrideTokenFilter = copy(rules = rules)
def rulesPath(path: String): StemmerOverrideTokenFilter = copy(rulesPath = path.some)
}
case class WordDelimiterTokenFilter(name: String,
generateWordParts: Option[Boolean] = None,
generateNumberParts: Option[Boolean] = None,
catenateWords: Option[Boolean] = None,
catenateNumbers: Option[Boolean] = None,
catenateAll: Option[Boolean] = None,
splitOnCaseChange: Option[Boolean] = None,
preserveOriginal: Option[Boolean] = None,
splitOnNumerics: Option[Boolean] = None,
stemEnglishPossesive: Option[Boolean] = None)
extends TokenFilterDefinition {
val filterType = "word_delimiter"
override def build(source: XContentBuilder): Unit = {
generateWordParts.foreach(source.field("generate_word_parts", _))
generateNumberParts.foreach(source.field("generate_number_parts", _))
catenateWords.foreach(source.field("catenate_words", _))
catenateNumbers.foreach(source.field("catenate_numbers", _))
catenateAll.foreach(source.field("catenate_all", _))
splitOnCaseChange.foreach(source.field("split_on_case_change", _))
preserveOriginal.foreach(source.field("preserve_original", _))
splitOnNumerics.foreach(source.field("split_on_numerics", _))
stemEnglishPossesive.foreach(source.field("stem_english_possessive", _))
}
def generateWordParts(bool: Boolean): WordDelimiterTokenFilter = copy(generateWordParts = bool.some)
def generateNumberParts(bool: Boolean): WordDelimiterTokenFilter = copy(generateNumberParts = bool.some)
def catenateWords(bool: Boolean): WordDelimiterTokenFilter = copy(catenateWords = bool.some)
def catenateNumbers(bool: Boolean): WordDelimiterTokenFilter = copy(catenateNumbers = bool.some)
def catenateAll(bool: Boolean): WordDelimiterTokenFilter = copy(catenateAll = bool.some)
def splitOnCaseChange(bool: Boolean): WordDelimiterTokenFilter = copy(splitOnCaseChange = bool.some)
def preserveOriginal(bool: Boolean): WordDelimiterTokenFilter = copy(preserveOriginal = bool.some)
def splitOnNumerics(bool: Boolean): WordDelimiterTokenFilter = copy(splitOnNumerics = bool.some)
def stemEnglishPossesive(bool: Boolean): WordDelimiterTokenFilter = copy(stemEnglishPossesive = bool.some)
}
case class ShingleTokenFilter(name: String,
maxShingleSize: Option[Int] = None,
minShingleSize: Option[Int] = None,
outputUnigrams: Option[Boolean] = None,
outputUnigramsIfNoShingles: Option[Boolean] = None,
tokenSeparator: Option[String] = None,
fillerToken: Option[String] = None)
extends TokenFilterDefinition {
val filterType = "shingle"
override def build(source: XContentBuilder): Unit = {
maxShingleSize.foreach(source.field("max_shingle_size", _))
minShingleSize.foreach(source.field("min_shingle_size", _))
outputUnigrams.foreach(source.field("output_unigrams", _))
outputUnigramsIfNoShingles.foreach(source.field("output_unigrams_if_no_shingles", _))
tokenSeparator.foreach(source.field("token_separator", _))
fillerToken.foreach(source.field("filler_token", _))
}
def maxShingleSize(max: Int): ShingleTokenFilter = copy(maxShingleSize = max.some)
def minShingleSize(min: Int): ShingleTokenFilter = copy(minShingleSize = min.some)
def outputUnigrams(b: Boolean): ShingleTokenFilter = copy(outputUnigrams = b.some)
def outputUnigramsIfNoShingles(b: Boolean): ShingleTokenFilter = copy(outputUnigramsIfNoShingles = b.some)
def tokenSeparator(sep: String): ShingleTokenFilter = copy(tokenSeparator = sep.some)
def fillerToken(filler: String): ShingleTokenFilter = copy(fillerToken = filler.some)
}
sealed trait CompoundWordTokenFilterType {
def name: String
}
case object HyphenationDecompounder extends CompoundWordTokenFilterType {
override def name = "hyphenation_decompounder"
}
case object DictionaryDecompounder extends CompoundWordTokenFilterType {
override def name = "dictionary_decompounder"
}
case class CompoundWordTokenFilter(name: String,
`type`: CompoundWordTokenFilterType,
wordList: Iterable[String] = Nil,
wordListPath: Option[String] = None,
hyphenationPatternsPath: Option[String] = None,
minWordSize: Option[Int] = None,
minSubwordSize: Option[Int] = None,
maxSubwordSize: Option[Int] = None,
onlyLongestMatch: Option[Boolean] = None)
extends TokenFilterDefinition {
val filterType = `type`.name
override def build(source: XContentBuilder): Unit = {
if (wordList.nonEmpty) {
source.array("word_list", wordList.toArray)
}
wordListPath.foreach(source.field("word_list_path", _))
hyphenationPatternsPath.foreach(source.field("hyphenation_patterns_path", _))
minWordSize.foreach(source.field("min_word_size", _))
minSubwordSize.foreach(source.field("min_subword_size", _))
maxSubwordSize.foreach(source.field("max_subword_size", _))
onlyLongestMatch.foreach(source.field("only_longest_match", _))
}
def wordList(wordList: Iterable[String]): CompoundWordTokenFilter =
copy(wordList = wordList)
def wordList(word: String, rest: String*): CompoundWordTokenFilter =
copy(wordList = word +: rest)
def wordListPath(wordListPath: String): CompoundWordTokenFilter =
copy(wordListPath = wordListPath.some)
def hyphenationPatternsPath(hyphenationPatternsPath: String): CompoundWordTokenFilter =
copy(hyphenationPatternsPath = hyphenationPatternsPath.some)
def minWordSize(minWordSize: Int): CompoundWordTokenFilter =
copy(minWordSize = minWordSize.some)
def minSubwordSize(minSubwordSize: Int): CompoundWordTokenFilter =
copy(minSubwordSize = minSubwordSize.some)
def maxSubwordSize(maxSubwordSize: Int): CompoundWordTokenFilter =
copy(maxSubwordSize = maxSubwordSize.some)
def onlyLongestMatch(onlyLongestMatch: Boolean): CompoundWordTokenFilter =
copy(onlyLongestMatch = onlyLongestMatch.some)
}
| Tecsisa/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/analyzers/TokenFilter.scala | Scala | apache-2.0 | 20,340 |
package com.socrata.querycoordinator.caching
import com.socrata.soql.{Leaf, SoQLAnalysis}
import com.socrata.soql.typed._
import com.socrata.soql.collection.OrderedMap
import com.socrata.soql.environment.ColumnName
import scala.util.parsing.input.NoPosition
object SoQLAnalysisDepositioner {
def apply[ColumnId,Type](sa: SoQLAnalysis[ColumnId,Type]): SoQLAnalysis[ColumnId,Type] = {
val SoQLAnalysis(isGrouped, distinct, selection, from, joins, where, groupBys, having, orderBys, limit, offset, search, hints) = sa
SoQLAnalysis(isGrouped = isGrouped,
distinct = distinct,
selection = depositionSelection(selection),
from = from,
joins = depsoitionOptJoins(joins),
where = depositionOptExpr(where),
groupBys = depositionGroupBys(groupBys),
having = depositionOptExpr(having),
orderBys = depositionOrderBys(orderBys),
limit = limit,
offset = offset,
search = search,
hints = hints.map(depositionHint))
}
private def depositionSelection[ColumnId,Type](selection: OrderedMap[ColumnName, CoreExpr[ColumnId, Type]]) = {
selection.mapValues(depositionExpr)
}
private def depositionExpr[ColumnId,Type](expr: CoreExpr[ColumnId, Type]): CoreExpr[ColumnId, Type] = {
expr match {
case ColumnRef(qual, column, typ) => ColumnRef(qual, column, typ)(NoPosition)
case NumberLiteral(value, typ) => NumberLiteral(value, typ)(NoPosition)
case StringLiteral(value, typ) => StringLiteral(value, typ)(NoPosition)
case BooleanLiteral(value, typ) => BooleanLiteral(value, typ)(NoPosition)
case NullLiteral(typ) => NullLiteral(typ)(NoPosition)
case fc: FunctionCall[ColumnId, Type] => FunctionCall[ColumnId, Type](fc.function, fc.parameters.map(depositionExpr), fc.filter.map(depositionExpr), fc.window)(NoPosition, NoPosition)
}
}
private def depositionOptExpr[ColumnId,Type](expr: Option[CoreExpr[ColumnId, Type]]) = expr.map(depositionExpr)
private def depositionGroupBys[ColumnId,Type](expr: Seq[CoreExpr[ColumnId, Type]]) = expr.map(depositionExpr)
private def depositionOrderBys[ColumnId,Type](expr: Seq[OrderBy[ColumnId, Type]]) = expr.map(depositionOrderBy)
private def depositionOrderBy[ColumnId,Type](ob: OrderBy[ColumnId, Type]) = ob.copy(expression = depositionExpr(ob.expression))
private def depsoitionOptJoins[ColumnId, Type](joins: Seq[Join[ColumnId, Type]]) = {
joins.map { join =>
val mappedSubAna = join.from.subAnalysis.map { sa =>
val depositioned = sa.analyses.flatMap(analysis => Leaf(SoQLAnalysisDepositioner(analysis)))
sa.copy(analyses = depositioned)
}
Join(join.typ, join.from.copy(subAnalysis = mappedSubAna), depositionExpr(join.on), join.lateral)
}
}
private def depositionHint[ColumnId,Type](hint: Hint[ColumnId, Type]) = {
hint match {
case x@Materialized(_) => x.copy(NoPosition)
case x@NoRollup(_) => x.copy(NoPosition)
case x@NoChainMerge(_) => x.copy(NoPosition)
}
}
}
| socrata-platform/query-coordinator | query-coordinator/src/main/scala/com/socrata/querycoordinator/caching/SoQLAnalysisDepositioner.scala | Scala | apache-2.0 | 3,148 |
package dk.tennis.compare.rating.multiskill.infer.perfdiffgivenskills
import dk.bayes.math.gaussian.MultivariateGaussian
import dk.tennis.compare.rating.multiskill.model.perfdiff.PerfDiff
import scala.math._
import dk.bayes.math.gaussian.Gaussian
import breeze.linalg.DenseVector
import breeze.linalg.DenseMatrix
import breeze.linalg.Matrix
object inferPerfDiffGivenSkills {
def apply(gameSkill: MultivariateGaussian, logPerfStdDev: Double): PerfDiff = {
val A_d = DenseMatrix(1d, -1d).t
val V_d = new DenseMatrix(2, 2, Array(exp(logPerfStdDev) * exp(logPerfStdDev), 0, 0, exp(logPerfStdDev) * exp(logPerfStdDev))).t
val perfDiff = MultivariateGaussian((A_d * gameSkill.m), (A_d * (gameSkill.v + V_d) * A_d.t)).toGaussian
PerfDiff(perfDiff, gameSkill)
}
def apply(p1Skill: Gaussian, p2Skill: Gaussian, logPerfStdDev: Double): PerfDiff = {
val skillsMean = DenseVector(p1Skill.m, p2Skill.m)
val skillsVar = new DenseMatrix(2, 2, Array(p1Skill.v, 0, 0, p2Skill.v)).t
val gameSkills = MultivariateGaussian(skillsMean, skillsVar)
inferPerfDiffGivenSkills(gameSkills, logPerfStdDev)
}
} | danielkorzekwa/tennis-player-compare | multiskill/src/main/scala/dk/tennis/compare/rating/multiskill/infer/perfdiffgivenskills/inferPerfDiffGivenSkills.scala | Scala | bsd-2-clause | 1,130 |
import sbt._
object Dependencies {
val akkaVersion = "2.2.4"
val sprayVersion = "1.2.2"
val allBuildDependencies = Seq(
//spray
"io.spray" % "spray-can" % sprayVersion,
"io.spray" % "spray-io" % sprayVersion,
"io.spray" % "spray-httpx" % sprayVersion,
"io.spray" % "spray-routing" % sprayVersion,
//akka
"com.typesafe.akka" %% "akka-actor" % akkaVersion,
"com.typesafe.akka" %% "akka-slf4j" % akkaVersion,
//logging
"ch.qos.logback" % "logback-classic" % "1.0.13"
)
val allTestDependencies = Seq(
//spray
"io.spray" % "spray-testkit" % sprayVersion % "test",
//akka
"com.typesafe.akka" %% "akka-testkit" % akkaVersion % "test",
//testing
"org.scalatest" %% "scalatest" % "2.2.1" % "test"
)
val allDependencies = allBuildDependencies ++ allTestDependencies
}
| natalinobusa/akka-galore | akka-1/project/dependencies.scala | Scala | apache-2.0 | 955 |
class GreetingNullCheck(name : String) {
if (name == null)
throw new NullPointerException("name is null")
def greet() = {
println("hello " + name)
}
}
/*
run the lines below in Scala shell
val g1 = new GreetingNullCheck("sujee")
g1.greet()
val g2 = new GreetingNullCheck(null) // exception will be thrown
g2.greet()
*/ | elephantscale/learning-scala | scala-with-sujee/src/main/scala/com/es/scala/runinshell/GreetingNullCheck.scala | Scala | apache-2.0 | 337 |
package mypipe.pipe
import akka.actor.{ ActorSystem, Cancellable }
import mypipe.api.consumer.{ BinaryLogConsumer, BinaryLogConsumerListener }
import mypipe.api.data.Table
import mypipe.api.event.{ AlterEvent, Mutation }
import mypipe.api.Conf
import mypipe.api.producer.Producer
import mypipe.mysql._
import org.slf4j.LoggerFactory
import com.github.shyiko.mysql.binlog.event.{ Event ⇒ MEvent }
import scala.concurrent.duration._
class Pipe(id: String, consumers: List[MySQLBinaryLogConsumer], producer: Producer) {
protected val log = LoggerFactory.getLogger(getClass)
protected var CONSUMER_DISCONNECT_WAIT_SECS = 2
protected val system = ActorSystem("mypipe")
implicit val ec = system.dispatcher
@volatile protected var _connected: Boolean = false
protected var threads = List.empty[Thread]
protected var flusher: Option[Cancellable] = None
protected val listener = new BinaryLogConsumerListener[MEvent, BinaryLogFilePosition]() {
override def onConnect(consumer: BinaryLogConsumer[MEvent, BinaryLogFilePosition]) {
log.info(s"Pipe $id connected!")
_connected = true
flusher = Some(system.scheduler.schedule(Conf.FLUSH_INTERVAL_SECS.seconds,
Conf.FLUSH_INTERVAL_SECS.seconds) {
Conf.binlogSaveFilePosition(consumer.id,
consumer.getBinaryLogPosition.get,
id)
// TODO: if flush fails, stop and disconnect
producer.flush()
})
}
override def onDisconnect(consumer: BinaryLogConsumer[MEvent, BinaryLogFilePosition]) {
log.info(s"Pipe $id disconnected!")
_connected = false
flusher.foreach(_.cancel())
Conf.binlogSaveFilePosition(
consumer.id,
consumer.getBinaryLogPosition.get,
id)
producer.flush()
}
override def onMutation(consumer: BinaryLogConsumer[MEvent, BinaryLogFilePosition], mutation: Mutation): Boolean = {
producer.queue(mutation)
}
override def onMutation(consumer: BinaryLogConsumer[MEvent, BinaryLogFilePosition], mutations: Seq[Mutation]): Boolean = {
producer.queueList(mutations.toList)
}
override def onTableAlter(consumer: BinaryLogConsumer[MEvent, BinaryLogFilePosition], event: AlterEvent): Boolean = {
producer.handleAlter(event)
}
}
def isConnected = _connected
def connect() {
if (threads.size > 0) {
log.warn("Attempting to reconnect pipe while already connected, aborting!")
} else {
threads = consumers.map(c ⇒ {
c.registerListener(listener)
val t = new Thread() {
override def run() {
log.info(s"Connecting pipe between $c -> $producer")
c.connect()
}
}
t.start()
t
})
}
}
def disconnect() {
for (
c ← consumers;
t ← threads
) {
try {
log.info(s"Disconnecting pipe between $c -> $producer")
c.disconnect()
t.join(CONSUMER_DISCONNECT_WAIT_SECS * 1000)
} catch {
case e: Exception ⇒ log.error(s"Caught exception while trying to disconnect from $c.id at binlog position $c.getBinaryLogPosition.")
}
}
}
override def toString: String = id
}
| Asana/mypipe | mypipe-api/src/main/scala/mypipe/pipe/Pipe.scala | Scala | apache-2.0 | 3,217 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.view.rewrite
import org.apache.spark.sql.Row
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll
class MVFilterAndJoinTest extends QueryTest with BeforeAndAfterAll {
override def beforeAll(): Unit = {
defaultConfig()
drop
sql("create table main_table (name string,age int,height int) STORED AS carbondata")
sql("create table dim_table (name string,age int,height int) STORED AS carbondata")
sql("create table sdr_table (name varchar(20),score int) STORED AS carbondata")
}
def drop() {
sql("drop table if exists main_table")
sql("drop materialized view if exists main_table_mv")
sql("drop table if exists dim_table")
sql("drop table if exists sdr_table")
sql("drop materialized view if exists main_table_mv1")
}
test("test mv no filter and query with filter") {
val querySQL = "select sum(age),name from main_table where name = 'tom' group by name"
sql("insert into main_table select 'tom',20,170")
sql("insert into main_table select 'lily',30,160")
sql("create materialized view main_table_mv as select sum(age),name " +
"from main_table group by name")
sql("refresh materialized view main_table_mv")
assert(TestUtil.verifyMVHit(sql(querySQL).queryExecution.optimizedPlan, "main_table_mv"))
checkAnswer(sql(querySQL), Seq(Row(20, "tom")))
}
test("test mv rebuild twice and varchar string") {
val querySQL = "select A.sum_score,A.name from (select sum(score) as sum_score,age,sdr.name " +
"as name from sdr_table sdr left join dim_table dim on sdr.name = dim.name " +
"where sdr.name in ('tom','lily') group by sdr.name,age) A where name = 'tom'"
sql("insert into dim_table select 'tom',20,170")
sql("insert into dim_table select 'lily',30,160")
sql(
"create materialized view main_table_mv1 as select count(score),sum(score),count(dim.name)," +
"age,sdr.name from sdr_table sdr left join dim_table dim on sdr.name = dim.name group by " +
"sdr.name,age")
sql("refresh materialized view main_table_mv1")
sql("insert into sdr_table select 'tom',70")
sql("insert into sdr_table select 'tom',50")
sql("insert into sdr_table select 'lily',80")
sql("refresh materialized view main_table_mv1")
assert(TestUtil.verifyMVHit(sql(querySQL).queryExecution.optimizedPlan, "main_table_mv1"))
checkAnswer(sql(querySQL), Seq(Row(120, "tom")))
}
override def afterAll(): Unit = {
drop
}
}
| zzcclp/carbondata | integration/spark/src/test/scala/org/apache/carbondata/view/rewrite/MVFilterAndJoinTest.scala | Scala | apache-2.0 | 3,347 |
/*
* Copyright 2014-2020 Rik van der Kleij
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package intellij.haskell.external.component
import com.github.blemale.scaffeine.{Cache, LoadingCache, Scaffeine}
import com.intellij.openapi.project.Project
import com.intellij.openapi.util.text.StringUtil
import intellij.haskell.HaskellNotificationGroup
import intellij.haskell.external.execution.CommandLine
import intellij.haskell.util.StringUtil.removePackageQualifier
import intellij.haskell.util.{HaskellProjectUtil, ScalaUtil}
import scala.jdk.CollectionConverters._
private[component] object LibraryPackageInfoComponent {
private case class Key(project: Project, packageName: String)
private type Result = Option[LibraryPackageInfo]
type Exposed = Boolean
private final val AllCache: Cache[String, Exposed] = Scaffeine().build()
AllCache.put(HaskellProjectUtil.Prelude, true)
private final val Cache: LoadingCache[Key, Result] = Scaffeine().build((k: Key) => findPackageInfo(k))
private def splitLines(s: String, excludeEmptyLines: Boolean) = {
val converted = StringUtil.convertLineSeparators(s)
StringUtil.split(converted, "\\n", true, excludeEmptyLines).asScala.toSeq
}
import scala.concurrent.duration._
def preloadLibraryPackageInfos(project: Project): Unit = {
val projectPackageNames = HaskellComponentsManager.findProjectModulePackageNames(project).map(_._2)
val globalProjectInfo = HaskellComponentsManager.getGlobalProjectInfo(project)
val result = globalProjectInfo.map(info => CommandLine.run(project, info.ghcPkgPath,
Seq("dump",
s"--package-db=${info.packageDbPaths.globalPackageDbPath}",
s"--package-db=${info.packageDbPaths.snapshotPackageDbPath}",
s"--package-db=${info.packageDbPaths.localPackageDbPath}"), notifyBalloonError = true, timeoutInMillis = 60.seconds.toMillis)).map { processOutput =>
val packageOutputs = processOutput.getStdout.split("(?m)^---\\n")
packageOutputs.map(o => {
val outputLines = splitLines(o, excludeEmptyLines = true)
val packageInfo = findPackageInfo(outputLines)
packageInfo.foreach(_.exposedModuleNames.foreach(AllCache.put(_, true)))
packageInfo.foreach(_.hiddenModuleNames.foreach(AllCache.put(_, false)))
packageInfo
})
}
result match {
case Some(r) => r.foreach {
case d@Some(packageInfo) => if (!projectPackageNames.contains(packageInfo.packageName) && packageInfo.packageName != "rts") Cache.put(Key(project, packageInfo.packageName), d)
case None => HaskellNotificationGroup.logInfoBalloonEvent(project, s"Could not retrieve all package information via `ghc-pkg dump`")
}
case None => HaskellNotificationGroup.logErrorBalloonEvent(project, "Executing `ghc-pkg dump` failed")
}
}
def findLibraryModuleName(moduleName: String): Option[Boolean] = {
AllCache.getIfPresent(moduleName)
}
def findLibraryPackageInfo(project: Project, packageName: String): Result = {
val key = Key(project, packageName)
Cache.get(key) match {
case result@Some(_) => result
case _ => None
}
}
def libraryPackageInfos(project: Project): Iterable[LibraryPackageInfo] = {
Cache.asMap().values.flatten
}
private def findPackageInfo(key: Key): Result = {
// Because preloadLibraryModuleNames should already have done all the work, something is wrong if this method is called
HaskellNotificationGroup.logWarningEvent(key.project, s"Package ${key.packageName} is not in library module names cache")
None
}
private final val PackageNameVersionPattern = """([\\w\\-]+)-([\\d\\.]+)(?:\\-.*)?""".r
def toPackageNameversion(depends: String): Option[PackageId] = {
depends match {
case PackageNameVersionPattern(name, version) => Some(PackageId(name, version))
case _ => None
}
}
private def findPackageInfo(lines: Seq[String]): Option[LibraryPackageInfo] = {
val packageInfoMap = ScalaUtil.linesToMap(lines)
for {
name <- packageInfoMap.get("name")
version <- packageInfoMap.get("version")
id <- packageInfoMap.get("id")
exposedModuleNames = packageInfoMap.get("exposed-modules").map(splitLine).getOrElse(Seq())
hiddenModuleNames = packageInfoMap.get("hidden-modules").map(splitLine).getOrElse(Seq())
dependsPackageNames = packageInfoMap.get("depends").map(splitLine).getOrElse(Seq()).flatMap(toPackageNameversion)
} yield LibraryPackageInfo(name, version, id, exposedModuleNames, hiddenModuleNames, dependsPackageNames)
}
private def splitLine(s: String): Seq[String] = {
s.replaceAll("""\\s+""", ",").split(",").map(_.trim).filterNot(_ == "from").map(removePackageQualifier).toSeq
}
def invalidate(project: Project): Unit = {
val keys = Cache.asMap().keys.filter(_.project == project)
keys.foreach(Cache.invalidate)
}
}
case class LibraryPackageInfo(packageName: String, version: String, id: String, exposedModuleNames: Seq[String], hiddenModuleNames: Seq[String], dependsOnPackageIds: Seq[PackageId])
case class PackageId(name: String, version: String) | rikvdkleij/intellij-haskell | src/main/scala/intellij/haskell/external/component/LibraryPackageInfoComponent.scala | Scala | apache-2.0 | 5,638 |
package org.http4s
package client
import cats.effect.IO
class JavaNetClientSpec extends ClientRouteTestBattery("JavaNetClient") {
def clientResource = JavaNetClientBuilder[IO](testBlocker).resource
}
| ChristopherDavenport/http4s | client/src/test/scala/org/http4s/client/JavaNetClientSpec.scala | Scala | apache-2.0 | 204 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.graphx
import scala.reflect.ClassTag
import org.apache.spark._
import org.apache.spark.graphx.impl.RoutingTablePartition
import org.apache.spark.graphx.impl.ShippableVertexPartition
import org.apache.spark.graphx.impl.VertexAttributeBlock
import org.apache.spark.graphx.impl.VertexRDDImpl
import org.apache.spark.rdd._
import org.apache.spark.storage.StorageLevel
/**
* Extends `RDD[(VertexId, VD)]` by ensuring that there is only one entry for each vertex and by
* pre-indexing the entries for fast, efficient joins. Two VertexRDDs with the same index can be
* joined efficiently. All operations except [[reindex]] preserve the index. To construct a
* `VertexRDD`, use the [[org.apache.spark.graphx.VertexRDD$ VertexRDD object]].
*
* Additionally, stores routing information to enable joining the vertex attributes with an
* [[EdgeRDD]].
*
* @example Construct a `VertexRDD` from a plain RDD:
* {{{
* // Construct an initial vertex set
* val someData: RDD[(VertexId, SomeType)] = loadData(someFile)
* val vset = VertexRDD(someData)
* // If there were redundant values in someData we would use a reduceFunc
* val vset2 = VertexRDD(someData, reduceFunc)
* // Finally we can use the VertexRDD to index another dataset
* val otherData: RDD[(VertexId, OtherType)] = loadData(otherFile)
* val vset3 = vset2.innerJoin(otherData) { (vid, a, b) => b }
* // Now we can construct very fast joins between the two sets
* val vset4: VertexRDD[(SomeType, OtherType)] = vset.leftJoin(vset3)
* }}}
*
* @tparam VD the vertex attribute associated with each vertex in the set.
*/
abstract class VertexRDD[VD](
sc: SparkContext,
deps: Seq[Dependency[_]]) extends RDD[(VertexId, VD)](sc, deps) {
implicit protected def vdTag: ClassTag[VD]
private[graphx] def partitionsRDD: RDD[ShippableVertexPartition[VD]]
override protected def getPartitions: Array[Partition] = partitionsRDD.partitions
/**
* Provides the `RDD[(VertexId, VD)]` equivalent output.
*/
override def compute(part: Partition, context: TaskContext): Iterator[(VertexId, VD)] = {
firstParent[ShippableVertexPartition[VD]].iterator(part, context).next().iterator
}
/**
* Construct a new VertexRDD that is indexed by only the visible vertices. The resulting
* VertexRDD will be based on a different index and can no longer be quickly joined with this
* RDD.
*/
def reindex(): VertexRDD[VD]
/**
* Applies a function to each `VertexPartition` of this RDD and returns a new VertexRDD.
*/
private[graphx] def mapVertexPartitions[VD2: ClassTag](
f: ShippableVertexPartition[VD] => ShippableVertexPartition[VD2])
: VertexRDD[VD2]
/**
* Restricts the vertex set to the set of vertices satisfying the given predicate. This operation
* preserves the index for efficient joins with the original RDD, and it sets bits in the bitmask
* rather than allocating new memory.
*
* It is declared and defined here to allow refining the return type from `RDD[(VertexId, VD)]` to
* `VertexRDD[VD]`.
*
* @param pred the user defined predicate, which takes a tuple to conform to the
* `RDD[(VertexId, VD)]` interface
*/
override def filter(pred: Tuple2[VertexId, VD] => Boolean): VertexRDD[VD] =
this.mapVertexPartitions(_.filter(Function.untupled(pred)))
/**
* Maps each vertex attribute, preserving the index.
*
* @tparam VD2 the type returned by the map function
*
* @param f the function applied to each value in the RDD
* @return a new VertexRDD with values obtained by applying `f` to each of the entries in the
* original VertexRDD
*/
def mapValues[VD2: ClassTag](f: VD => VD2): VertexRDD[VD2]
/**
* Maps each vertex attribute, additionally supplying the vertex ID.
*
* @tparam VD2 the type returned by the map function
*
* @param f the function applied to each ID-value pair in the RDD
* @return a new VertexRDD with values obtained by applying `f` to each of the entries in the
* original VertexRDD. The resulting VertexRDD retains the same index.
*/
def mapValues[VD2: ClassTag](f: (VertexId, VD) => VD2): VertexRDD[VD2]
/**
* For each VertexId present in both `this` and `other`, minus will act as a set difference
* operation returning only those unique VertexId's present in `this`.
*
* @param other an RDD to run the set operation against
*/
def minus(other: RDD[(VertexId, VD)]): VertexRDD[VD]
/**
* For each VertexId present in both `this` and `other`, minus will act as a set difference
* operation returning only those unique VertexId's present in `this`.
*
* @param other a VertexRDD to run the set operation against
*/
def minus(other: VertexRDD[VD]): VertexRDD[VD]
/**
* For each vertex present in both `this` and `other`, `diff` returns only those vertices with
* differing values; for values that are different, keeps the values from `other`. This is
* only guaranteed to work if the VertexRDDs share a common ancestor.
*
* @param other the other RDD[(VertexId, VD)] with which to diff against.
*/
def diff(other: RDD[(VertexId, VD)]): VertexRDD[VD]
/**
* For each vertex present in both `this` and `other`, `diff` returns only those vertices with
* differing values; for values that are different, keeps the values from `other`. This is
* only guaranteed to work if the VertexRDDs share a common ancestor.
*
* @param other the other VertexRDD with which to diff against.
*/
def diff(other: VertexRDD[VD]): VertexRDD[VD]
/**
* Left joins this RDD with another VertexRDD with the same index. This function will fail if
* both VertexRDDs do not share the same index. The resulting vertex set contains an entry for
* each vertex in `this`.
* If `other` is missing any vertex in this VertexRDD, `f` is passed `None`.
*
* @tparam VD2 the attribute type of the other VertexRDD
* @tparam VD3 the attribute type of the resulting VertexRDD
*
* @param other the other VertexRDD with which to join.
* @param f the function mapping a vertex id and its attributes in this and the other vertex set
* to a new vertex attribute.
* @return a VertexRDD containing the results of `f`
*/
def leftZipJoin[VD2: ClassTag, VD3: ClassTag]
(other: VertexRDD[VD2])(f: (VertexId, VD, Option[VD2]) => VD3): VertexRDD[VD3]
/**
* Left joins this VertexRDD with an RDD containing vertex attribute pairs. If the other RDD is
* backed by a VertexRDD with the same index then the efficient [[leftZipJoin]] implementation is
* used. The resulting VertexRDD contains an entry for each vertex in `this`. If `other` is
* missing any vertex in this VertexRDD, `f` is passed `None`. If there are duplicates,
* the vertex is picked arbitrarily.
*
* @tparam VD2 the attribute type of the other VertexRDD
* @tparam VD3 the attribute type of the resulting VertexRDD
*
* @param other the other VertexRDD with which to join
* @param f the function mapping a vertex id and its attributes in this and the other vertex set
* to a new vertex attribute.
* @return a VertexRDD containing all the vertices in this VertexRDD with the attributes emitted
* by `f`.
*/
def leftJoin[VD2: ClassTag, VD3: ClassTag]
(other: RDD[(VertexId, VD2)])
(f: (VertexId, VD, Option[VD2]) => VD3)
: VertexRDD[VD3]
/**
* Efficiently inner joins this VertexRDD with another VertexRDD sharing the same index. See
* [[innerJoin]] for the behavior of the join.
*/
def innerZipJoin[U: ClassTag, VD2: ClassTag](other: VertexRDD[U])
(f: (VertexId, VD, U) => VD2): VertexRDD[VD2]
/**
* Inner joins this VertexRDD with an RDD containing vertex attribute pairs. If the other RDD is
* backed by a VertexRDD with the same index then the efficient [[innerZipJoin]] implementation
* is used.
*
* @param other an RDD containing vertices to join. If there are multiple entries for the same
* vertex, one is picked arbitrarily. Use [[aggregateUsingIndex]] to merge multiple entries.
* @param f the join function applied to corresponding values of `this` and `other`
* @return a VertexRDD co-indexed with `this`, containing only vertices that appear in both
* `this` and `other`, with values supplied by `f`
*/
def innerJoin[U: ClassTag, VD2: ClassTag](other: RDD[(VertexId, U)])
(f: (VertexId, VD, U) => VD2): VertexRDD[VD2]
/**
* Aggregates vertices in `messages` that have the same ids using `reduceFunc`, returning a
* VertexRDD co-indexed with `this`.
*
* @param messages an RDD containing messages to aggregate, where each message is a pair of its
* target vertex ID and the message data
* @param reduceFunc the associative aggregation function for merging messages to the same vertex
* @return a VertexRDD co-indexed with `this`, containing only vertices that received messages.
* For those vertices, their values are the result of applying `reduceFunc` to all received
* messages.
*/
def aggregateUsingIndex[VD2: ClassTag](
messages: RDD[(VertexId, VD2)], reduceFunc: (VD2, VD2) => VD2): VertexRDD[VD2]
/**
* Returns a new `VertexRDD` reflecting a reversal of all edge directions in the corresponding
* [[EdgeRDD]].
*/
def reverseRoutingTables(): VertexRDD[VD]
/** Prepares this VertexRDD for efficient joins with the given EdgeRDD. */
def withEdges(edges: EdgeRDD[_]): VertexRDD[VD]
/** Replaces the vertex partitions while preserving all other properties of the VertexRDD. */
private[graphx] def withPartitionsRDD[VD2: ClassTag](
partitionsRDD: RDD[ShippableVertexPartition[VD2]]): VertexRDD[VD2]
/**
* Changes the target storage level while preserving all other properties of the
* VertexRDD. Operations on the returned VertexRDD will preserve this storage level.
*
* This does not actually trigger a cache; to do this, call
* [[org.apache.spark.graphx.VertexRDD#cache]] on the returned VertexRDD.
*/
private[graphx] def withTargetStorageLevel(
targetStorageLevel: StorageLevel): VertexRDD[VD]
/** Generates an RDD of vertex attributes suitable for shipping to the edge partitions. */
private[graphx] def shipVertexAttributes(
shipSrc: Boolean, shipDst: Boolean): RDD[(PartitionID, VertexAttributeBlock[VD])]
/** Generates an RDD of vertex IDs suitable for shipping to the edge partitions. */
private[graphx] def shipVertexIds(): RDD[(PartitionID, Array[VertexId])]
} // end of VertexRDD
/**
* The VertexRDD singleton is used to construct VertexRDDs.
*/
object VertexRDD {
/**
* Constructs a standalone `VertexRDD` (one that is not set up for efficient joins with an
* [[EdgeRDD]]) from an RDD of vertex-attribute pairs. Duplicate entries are removed arbitrarily.
*
* @tparam VD the vertex attribute type
*
* @param vertices the collection of vertex-attribute pairs
*/
def apply[VD: ClassTag](vertices: RDD[(VertexId, VD)]): VertexRDD[VD] = {
val vPartitioned: RDD[(VertexId, VD)] = vertices.partitioner match {
case Some(p) => vertices
case None => vertices.partitionBy(new HashPartitioner(vertices.partitions.length))
}
val vertexPartitions = vPartitioned.mapPartitions(
iter => Iterator(ShippableVertexPartition(iter)),
preservesPartitioning = true)
new VertexRDDImpl(vertexPartitions)
}
/**
* Constructs a `VertexRDD` from an RDD of vertex-attribute pairs. Duplicate vertex entries are
* removed arbitrarily. The resulting `VertexRDD` will be joinable with `edges`, and any missing
* vertices referred to by `edges` will be created with the attribute `defaultVal`.
*
* @tparam VD the vertex attribute type
*
* @param vertices the collection of vertex-attribute pairs
* @param edges the [[EdgeRDD]] that these vertices may be joined with
* @param defaultVal the vertex attribute to use when creating missing vertices
*/
def apply[VD: ClassTag](
vertices: RDD[(VertexId, VD)], edges: EdgeRDD[_], defaultVal: VD): VertexRDD[VD] = {
VertexRDD(vertices, edges, defaultVal, (a, b) => a)
}
/**
* Constructs a `VertexRDD` from an RDD of vertex-attribute pairs. Duplicate vertex entries are
* merged using `mergeFunc`. The resulting `VertexRDD` will be joinable with `edges`, and any
* missing vertices referred to by `edges` will be created with the attribute `defaultVal`.
*
* @tparam VD the vertex attribute type
*
* @param vertices the collection of vertex-attribute pairs
* @param edges the [[EdgeRDD]] that these vertices may be joined with
* @param defaultVal the vertex attribute to use when creating missing vertices
* @param mergeFunc the commutative, associative duplicate vertex attribute merge function
*/
def apply[VD: ClassTag](
vertices: RDD[(VertexId, VD)], edges: EdgeRDD[_], defaultVal: VD, mergeFunc: (VD, VD) => VD
): VertexRDD[VD] = {
val vPartitioned: RDD[(VertexId, VD)] = vertices.partitioner match {
case Some(p) => vertices
case None => vertices.partitionBy(new HashPartitioner(vertices.partitions.length))
}
val routingTables = createRoutingTables(edges, vPartitioned.partitioner.get)
val vertexPartitions = vPartitioned.zipPartitions(routingTables, preservesPartitioning = true) {
(vertexIter, routingTableIter) =>
val routingTable =
if (routingTableIter.hasNext) routingTableIter.next() else RoutingTablePartition.empty
Iterator(ShippableVertexPartition(vertexIter, routingTable, defaultVal, mergeFunc))
}
new VertexRDDImpl(vertexPartitions)
}
/**
* Constructs a `VertexRDD` containing all vertices referred to in `edges`. The vertices will be
* created with the attribute `defaultVal`. The resulting `VertexRDD` will be joinable with
* `edges`.
*
* @tparam VD the vertex attribute type
*
* @param edges the [[EdgeRDD]] referring to the vertices to create
* @param numPartitions the desired number of partitions for the resulting `VertexRDD`
* @param defaultVal the vertex attribute to use when creating missing vertices
*/
def fromEdges[VD: ClassTag](
edges: EdgeRDD[_], numPartitions: Int, defaultVal: VD): VertexRDD[VD] = {
val routingTables = createRoutingTables(edges, new HashPartitioner(numPartitions))
val vertexPartitions = routingTables.mapPartitions({ routingTableIter =>
val routingTable =
if (routingTableIter.hasNext) routingTableIter.next() else RoutingTablePartition.empty
Iterator(ShippableVertexPartition(Iterator.empty, routingTable, defaultVal))
}, preservesPartitioning = true)
new VertexRDDImpl(vertexPartitions)
}
private[graphx] def createRoutingTables(
edges: EdgeRDD[_], vertexPartitioner: Partitioner): RDD[RoutingTablePartition] = {
// Determine which vertices each edge partition needs by creating a mapping from vid to pid.
val vid2pid = edges.partitionsRDD.mapPartitions(_.flatMap(
Function.tupled(RoutingTablePartition.edgePartitionToMsgs)))
.setName("VertexRDD.createRoutingTables - vid2pid (aggregation)")
val numEdgePartitions = edges.partitions.length
vid2pid.partitionBy(vertexPartitioner).mapPartitions(
iter => Iterator(RoutingTablePartition.fromMsgs(numEdgePartitions, iter)),
preservesPartitioning = true)
}
}
| bravo-zhang/spark | graphx/src/main/scala/org/apache/spark/graphx/VertexRDD.scala | Scala | apache-2.0 | 16,215 |
package ccf.messaging
import org.specs.Specification
import ccf.tree.operation.{TreeOperationDecoder, NoOperation}
class MessageSpec extends Specification {
val operationDecoder = TreeOperationDecoder
"Message factory method with default decoder" should {
"create operation context from map" in {
val operationContext = OperationContext(new NoOperation, 1, 2)
Message(operationContext.encode.asInstanceOf[Map[String, String]], operationDecoder) must equalTo(operationContext)
}
"create error messsage from map" in {
val errorMessage = ErrorMessage("not so critical error")
Message(errorMessage.encode.asInstanceOf[Map[String, String]], operationDecoder) must equalTo(errorMessage)
}
"create channel shutdown from map" in {
val channelShutdown = ChannelShutdown("critical error")
Message(channelShutdown.encode.asInstanceOf[Map[String, String]], operationDecoder) must equalTo(channelShutdown)
}
"throw exception for unknown type" in {
Message(Map("abc" -> "123"), operationDecoder) must throwAn[Exception]
}
}
"OperationContext" should {
"be encoded to and from map of strings using default decoder" in {
val operation = new NoOperation()
val operationContext = OperationContext(operation, 1, 2)
val encodedOperation: Map[String, String] = operationContext.encode.asInstanceOf[Map[String, String]]
OperationContext(encodedOperation, operationDecoder) must equalTo(operationContext)
}
}
"Error message" should {
"be encodable" in {
val errorMessage = ErrorMessage("some kind of error")
ErrorMessage(errorMessage.encode) must equalTo(errorMessage)
}
}
"Shutdown message" should {
"be encodable" in {
val shutdownMessage = ChannelShutdown("shut it down")
ChannelShutdown(shutdownMessage.encode) must equalTo(shutdownMessage)
}
}
} | akisaarinen/ccf | ccf/src/test/scala/ccf/messaging/MessageSpec.scala | Scala | apache-2.0 | 1,902 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gearpump.transport.netty
import com.typesafe.config.Config
import org.apache.gearpump.util.Constants
class NettyConfig(conf: Config) {
val buffer_size = conf.getInt(Constants.NETTY_BUFFER_SIZE)
val max_retries = conf.getInt(Constants.NETTY_MAX_RETRIES)
val base_sleep_ms = conf.getInt(Constants.NETTY_BASE_SLEEP_MS)
val max_sleep_ms = conf.getInt(Constants.NETTY_MAX_SLEEP_MS)
val messageBatchSize = conf.getInt(Constants.NETTY_MESSAGE_BATCH_SIZE)
val flushCheckInterval = conf.getInt(Constants.NETTY_FLUSH_CHECK_INTERVAL)
def newTransportSerializer: ITransportMessageSerializer = {
Class.forName(
conf.getString(Constants.GEARPUMP_TRANSPORT_SERIALIZER))
.newInstance().asInstanceOf[ITransportMessageSerializer]
}
}
| manuzhang/incubator-gearpump | core/src/main/scala/org/apache/gearpump/transport/netty/NettyConfig.scala | Scala | apache-2.0 | 1,580 |
package jsky.app.ot.tpe
import java.time.Instant
import javax.swing._
import scalaz._
import Scalaz._
import edu.gemini.catalog.image._
import edu.gemini.catalog.ui.image.{ImageLoadingListener, ObsWavelengthExtractor, ObservationCatalogOverrides}
import edu.gemini.catalog.ui.tpe.CatalogImageDisplay
import edu.gemini.pot.sp.ISPObservation
import edu.gemini.shared.util.immutable.ScalaConverters._
import edu.gemini.ui.miglayout.MigPanel
import edu.gemini.ui.miglayout.constraints._
import jsky.app.ot.userprefs.images.ImageCatalogPreferencesPanel
import jsky.util.gui.Resources
import scala.swing.event.{ButtonClicked, MouseClicked}
import scala.swing.{Button, Component, Dialog, Label, RadioButton, Swing}
import scalaz.concurrent.Task
object ImageCatalogPanel {
/**
* Tries to reset the catalog panel state if open
*/
private def resetCatalogPanel: Task[Unit] = Task.delay {
for {
tpe <- Option(TpeManager.get())
ctx <- TpeContext.fromTpeManager
obs <- ctx.obsShell
} {
tpe.getTpeToolBar.updateImageCatalogState(obs)
}
}
def resetListener: ImageLoadingListener[Unit] =
ImageLoadingListener(
Task.delay(Swing.onEDT(ImageCatalogPanel.resetCatalogPanel.unsafePerformSync)),
Task.delay(Swing.onEDT(ImageCatalogPanel.resetCatalogPanel.unsafePerformSync)),
Task.delay(Swing.onEDT(ImageCatalogPanel.resetCatalogPanel.unsafePerformSync)))
def isCatalogSelected(catalog: ImageCatalog): Boolean =
Option(TpeManager.get()).exists(_.getTpeToolBar.isCatalogSelected(catalog))
}
/**
* Panel of radio buttons of Image catalogs offered by the TPE.
*/
final class ImageCatalogPanel(imageDisplay: CatalogImageDisplay) {
case class ImageLoadingFeedback(catalog: ImageCatalog) extends Label {
def markDownloading(): Unit = {
icon = ImageLoadingFeedback.spinnerIcon
tooltip = "Downloading..."
deafTo(mouse.clicks)
}
def markIdle(): Unit = {
icon = null
tooltip = ""
deafTo(mouse.clicks)
}
def markError(): Unit = {
icon = ImageLoadingFeedback.errorIcon
tooltip = "Error when downloading"
listenTo(mouse.clicks)
reactions += {
case MouseClicked(_, _, _, _, _) =>
requestImage(catalog)
}
}
}
case class CatalogRow(button: RadioButton, feedback: ImageLoadingFeedback)
object ImageLoadingFeedback {
val spinnerIcon: ImageIcon = Resources.getIcon("spinner16.gif")
val warningIcon: ImageIcon = Resources.getIcon("eclipse/alert.gif")
val errorIcon: ImageIcon = Resources.getIcon("error_tsk.gif")
}
private lazy val buttonGroup = new ButtonGroup
private lazy val catalogRows = ImageCatalog.all.map(mkRow)
private lazy val toolsButton = new Button("") {
tooltip = "Preferences..."
icon = new ImageIcon(getClass.getResource("/resources/images/eclipse/engineering.gif"))
reactions += {
case ButtonClicked(_) =>
new Dialog() {
title = "Image Catalog Preferences"
Resources.setOTFrameIcon(this.peer)
val closeButton = new Button("Close") {
reactions += {
case ButtonClicked(_) =>
close()
}
}
contents = new MigPanel(LC().fill().insets(5)) {
add(new ImageCatalogPreferencesPanel().component, CC())
add(closeButton, CC().alignX(RightAlign).newline())
}
defaultButton = closeButton
modal = true
setLocationRelativeTo(this)
}.open()
}
}
lazy val panel: Component = new MigPanel(LC().fill().insets(0.px).gridGap(0.px, 0.px)) {
add(new Label("Image Catalog:"), CC())
add(toolsButton, CC().alignX(RightAlign))
catalogRows.foreach { row =>
add(row.button, CC().newline())
add(row.feedback, CC().alignX(RightAlign))
buttonGroup.add(row.button.peer)
row.button.reactions += {
case ButtonClicked(_) =>
requestImage(row.feedback.catalog)
}
}
}
private def requestImage(catalog: ImageCatalog) = {
// Read the current key and wavelength on the tpe
for {
tpe <- TpeContext.fromTpeManager
key <- tpe.obsKey
wv <- ObsWavelengthExtractor.extractObsWavelength(tpe)
} {
// Update the image and store the override
val actions =
for {
_ <- ObservationCatalogOverrides.storeOverride(key, catalog, wv)
_ <- Task.delay(imageDisplay.loadSkyImage())
} yield ()
actions.unsafePerformSync
}
}
private def updateSelection(catalog: ImageCatalog): Unit =
catalogRows.find(_.feedback.catalog === catalog).foreach { _.button.selected = true }
private def selectedCatalog: Option[ImageCatalog] =
catalogRows.find(_.button.selected).map(_.feedback.catalog)
private def showAsLoading(catalogues: CataloguesInUse): Unit = {
val cataloguesInProgress = catalogRows.filter(r => catalogues.inProgress.contains(r.feedback.catalog))
val cataloguesInError = catalogRows.filter(r => catalogues.failed.contains(r.feedback.catalog))
val cataloguesIdle = catalogRows.filterNot(u => cataloguesInError.contains(u) || cataloguesInProgress.contains(u))
cataloguesInProgress.foreach(_.feedback.markDownloading())
cataloguesInError.foreach(_.feedback.markError())
cataloguesIdle.foreach(_.feedback.markIdle())
}
/**
* Updates the UI to reflect the state of downloading images
*
* Must be called from the EDT
*/
private def resetCatalogProgressState: Task[Option[Unit]] = {
// Verify we are on the EDT. We don't want to use Swing.onEDT inside
assert(SwingUtilities.isEventDispatchThread)
val tpeContext = TpeContext.fromTpeManager
val catalogButtonsUpdate = for {
tpe <- tpeContext
ctx <- tpe.obsContext
base <- tpe.targets.base
when = ctx.getSchedulingBlockStart.asScalaOpt | Instant.now.toEpochMilli
coords <- base.getTarget.coords(when)
} yield KnownImagesSets.cataloguesInUse(coords).map(showAsLoading)
catalogButtonsUpdate.sequenceU
}
def resetCatalogue(observation: ISPObservation): Unit = {
// Verify we are on the EDT. We don't want to use Swing.onEDT
assert(SwingUtilities.isEventDispatchThread)
val wavelength = TpeContext.fromTpeManager.flatMap(ObsWavelengthExtractor.extractObsWavelength)
val updateSelectedCatalog = ObservationCatalogOverrides.catalogFor(observation.getNodeKey, wavelength).map(updateSelection)
// run both side effects synchronously inside EDT
Nondeterminism[Task].both(updateSelectedCatalog, resetCatalogProgressState).unsafePerformSync
}
def isCatalogSelected(catalog: ImageCatalog): Boolean = {
catalogRows.find(_.button.selected).exists(_.feedback.catalog === catalog)
}
private def mkRow(c: ImageCatalog): CatalogRow =
CatalogRow(new RadioButton(s"${c.shortName}") <| {_.tooltip = c.displayName} , new ImageLoadingFeedback(c))
}
| spakzad/ocs | bundle/jsky.app.ot/src/main/scala/jsky/app/ot/tpe/ImageCatalogPanel.scala | Scala | bsd-3-clause | 7,006 |
package redis
import akka.actor.{Props, ActorRef, ActorRefFactory}
import scala.concurrent.stm._
import redis.actors.RedisClientActor
import java.net.InetSocketAddress
import scala.concurrent.{Future, ExecutionContext}
import redis.protocol.RedisReply
import redis.commands.Transactions
case class RedisServer(host: String = "localhost",
port: Int = 6379,
password: Option[String] = None,
db: Option[Int] = None,
active: Ref[Boolean] = Ref(false))
abstract class RedisClientPoolLike(system: ActorRefFactory) extends RoundRobinPoolRequest {
val redisServers: Seq[RedisServer]
val name: String
implicit val executionContext = system.dispatcher
val redisConnectionPoolAll: Seq[ActorRef] = redisServers.map(server => {
system.actorOf(
Props(classOf[RedisClientActor], new InetSocketAddress(server.host, server.port), getConnectOperations(server), onConnectStatus(server))
.withDispatcher(Redis.dispatcher),
name + '-' + Redis.tempName()
)
})
def getConnectionsActive: Seq[ActorRef] = {
val redisConnectionZip = redisServers zip redisConnectionPoolAll
redisConnectionZip.collect {
case (server, actorRef) if server.active.single.get => actorRef
}
}
val redisConnectionRef: Ref[Seq[ActorRef]] = Ref(getConnectionsActive)
def redisConnectionPool: Seq[ActorRef] = {
redisConnectionRef.single.get
}
def onConnect(redis: RedisCommands, server: RedisServer): Unit = {
server.password.foreach(redis.auth(_)) // TODO log on auth failure
server.db.foreach(redis.select)
}
def onConnectStatus(server: RedisServer): (Boolean) => Unit = (status: Boolean) => {
if (server.active.single.compareAndSet(!status, status)) {
redisConnectionRef.single.set(getConnectionsActive)
}
}
def getConnectOperations(server: RedisServer): () => Seq[Operation[_, _]] = () => {
val self = this
val redis = new BufferedRequest with RedisCommands {
implicit val executionContext: ExecutionContext = self.executionContext
}
onConnect(redis, server)
redis.operations.result()
}
/**
* Disconnect from the server (stop the actor)
*/
def stop() {
redisConnectionPoolAll.foreach(redisConnection => {
system stop redisConnection
})
}
}
case class RedisClientPool(redisServers: Seq[RedisServer],
name: String = "RedisClientPool")
(implicit _system: ActorRefFactory) extends RedisClientPoolLike(_system) with RedisCommands
case class RedisClientMasterSlaves(master: RedisServer,
slaves: Seq[RedisServer])
(implicit _system: ActorRefFactory) extends RedisCommands with Transactions {
implicit val executionContext = _system.dispatcher
val masterClient = RedisClient(master.host, master.port, master.password, master.db)
val slavesClients = RedisClientPool(slaves)
override def send[T](redisCommand: RedisCommand[_ <: RedisReply, T]): Future[T] = {
if (redisCommand.isMasterOnly || slaves.isEmpty) {
masterClient.send(redisCommand)
} else {
slavesClients.send(redisCommand)
}
}
def redisConnection: ActorRef = masterClient.redisConnection
}
| actsasbuffoon/rediscala | src/main/scala/redis/RedisPool.scala | Scala | apache-2.0 | 3,317 |
package org.dsa.utils
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
/**
* Created by xubo on 2016/10/31.
*/
object ReduceTest {
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("ReduceTest Application").setMaster("local[4]")
val sc = new SparkContext(conf)
val rdd = sc.parallelize(Array(("hello", 1), ("hello", 2), ("world", 5)))
val result=reduceByKeyTest(sc, rdd)
println(result)
sc.stop()
}
/**
*
* @param sc
* @param rdd
* @return
*/
def reduceByKeyTest(sc: SparkContext, rdd: RDD[(String, Int)]):(String, Int) = {
val rdd2=rdd.reduce { (a, b) =>
if (a._1 == b._1) {
(a._1, a._2 + b._2)
} else {
a
}
}
rdd2
}
}
| xubo245/CloudSW | src/main/scala/org/dsa/utils/ReduceTest.scala | Scala | gpl-2.0 | 780 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.sbt
object Colors {
import play.runsupport.{ Colors => RunColors }
lazy val isANSISupported = RunColors.isANSISupported
def red(str: String): String = RunColors.red(str)
def blue(str: String): String = RunColors.blue(str)
def cyan(str: String): String = RunColors.cyan(str)
def green(str: String): String = RunColors.green(str)
def magenta(str: String): String = RunColors.magenta(str)
def white(str: String): String = RunColors.white(str)
def black(str: String): String = RunColors.black(str)
def yellow(str: String): String = RunColors.yellow(str)
}
| wsargent/playframework | framework/src/sbt-plugin/src/main/scala/play/sbt/Colors.scala | Scala | apache-2.0 | 667 |
import org.scalatest.{Matchers, FunSuite}
/** @version 1.3.0 */
class NucleotideCountTest extends FunSuite with Matchers {
test("empty strand") {
new DNA("").nucleotideCounts should be(
Right(Map('A' -> 0, 'C' -> 0, 'G' -> 0, 'T' -> 0)))
}
test("can count one nucleotide in single-character input") {
pending
new DNA("G").nucleotideCounts should be(
Right(Map('A' -> 0, 'C' -> 0, 'G' -> 1, 'T' -> 0)))
}
test("strand with repeated nucleotide") {
pending
new DNA("GGGGGGG").nucleotideCounts should be(
Right(Map('A' -> 0, 'C' -> 0, 'G' -> 7, 'T' -> 0)))
}
test("strand with multiple nucleotides") {
pending
new DNA(
"AGCTTTTCATTCTGACTGCAACGGGCAATATGTCTCTGTGTGGATTAAAAAAAGAGTGTCTGATAGCAGC").nucleotideCounts should be(
Right(Map('A' -> 20, 'C' -> 12, 'G' -> 17, 'T' -> 21)))
}
test("strand with invalid nucleotides") {
pending
new DNA("AGXXACT").nucleotideCounts.isLeft should be(true)
}
}
| exercism/xscala | exercises/practice/nucleotide-count/src/test/scala/NucleotideCountTest.scala | Scala | mit | 980 |
/*package com.example
import org.specs2.mutable.Specification
import spray.testkit.Specs2RouteTest
import spray.http._
import StatusCodes._
class MyServiceSpec extends Specification with Specs2RouteTest with MyService {
def actorRefFactory = system
"MyService" should {
"return a greeting for GET requests to the root path" in {
Get() ~> myRoute ~> check {
entityAs[String] must contain("Say hello")
}
}
"leave GET requests to other paths unhandled" in {
Get("/kermit") ~> myRoute ~> check {
handled must beFalse
}
}
"return a MethodNotAllowed error for PUT requests to the root path" in {
Put() ~> sealRoute(myRoute) ~> check {
status === MethodNotAllowed
entityAs[String] === "HTTP method not allowed, supported methods: GET"
}
}
}
}*/ | jfmwz/SprayLearning | src/test/scala/com/example/MyServiceSpec.scala | Scala | mit | 843 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.thriftserver
import java.io.IOException
import java.util.concurrent.RejectedExecutionException
import org.apache.hive.service.ServiceException
import org.apache.hive.service.cli.{HiveSQLException, OperationType}
import org.apache.spark.SparkThrowable
/**
* Object for grouping error messages from (most) exceptions thrown during
* hive execution with thrift server.
*/
object HiveThriftServerErrors {
def taskExecutionRejectedError(rejected: RejectedExecutionException): Throwable = {
new HiveSQLException("The background threadpool cannot accept" +
" new task for execution, please retry the operation", rejected)
}
def runningQueryError(e: Throwable): Throwable = e match {
case st: SparkThrowable =>
val errorClassPrefix = Option(st.getErrorClass).map(e => s"[$e] ").getOrElse("")
new HiveSQLException(
s"Error running query: ${errorClassPrefix}${st.toString}", st.getSqlState, st)
case _ => new HiveSQLException(s"Error running query: ${e.toString}", e)
}
def hiveOperatingError(operationType: OperationType, e: Throwable): Throwable = {
new HiveSQLException(s"Error operating $operationType ${e.getMessage}", e)
}
def failedToOpenNewSessionError(e: Throwable): Throwable = {
new HiveSQLException(s"Failed to open new session: $e", e)
}
def cannotLoginToKerberosError(e: Throwable): Throwable = {
new ServiceException("Unable to login to kerberos with given principal/keytab", e)
}
def cannotLoginToSpnegoError(
principal: String, keyTabFile: String, e: IOException): Throwable = {
new ServiceException("Unable to login to spnego with given principal " +
s"$principal and keytab $keyTabFile: $e", e)
}
def failedToStartServiceError(serviceName: String, e: Throwable): Throwable = {
new ServiceException(s"Failed to Start $serviceName", e)
}
}
| ueshin/apache-spark | sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServerErrors.scala | Scala | apache-2.0 | 2,695 |
package com.scalableminds.util.datastructures
import scala.annotation.tailrec
// based on https://codereview.stackexchange.com/questions/17621/disjoint-sets-implementation
class UnionFind[T](initialElements: Seq[T] = Nil) {
/**
* Add a new single-node forest to the disjoint-set forests. It will
* be placed into its own set.
*/
def add(elem: T): Unit =
nodes += (elem -> UnionFind.Node(elem, 0, None))
/**
* Union the disjoint-sets of which elem1
* and elem2 are members of.
*/
def union(elem1: T, elem2: T): Unit =
// retrieve representative nodes
(nodes.get(elem1).map(_.getRepresentative), nodes.get(elem2).map(_.getRepresentative)) match {
// Distinguish the different union cases and return the new set representative
// Case #1: both elements already in same set
case (Some(n1), Some(n2)) if n1 == n2 =>
// nothing to do
// Case #2: rank1 > rank2 -> make n1 parent of n2
case (Some(n1 @ UnionFind.Node(_, rank1, _)), Some(n2 @ UnionFind.Node(_, rank2, _))) if rank1 > rank2 =>
n2.parent = Some(n1)
// Case #3: rank1 < rank2 -> make n2 parent of n1
case (Some(n1 @ UnionFind.Node(_, rank1, _)), Some(n2 @ UnionFind.Node(_, rank2, _))) if rank1 < rank2 =>
n1.parent = Some(n2)
// Case #4: rank1 == rank2 -> keep n1 as representative and increment rank
case (Some(n1 @ UnionFind.Node(_, rank1, _)), Some(n2 @ UnionFind.Node(_, rank2, _))) =>
n1.rank = rank1 + 1
n2.parent = Some(n1)
// we are guaranteed to find the two nodes in the map,
// and the above cases cover all possibilities
case _ => throw new MatchError("either element could not be found")
}
/**
* Finds the representative for a disjoint-set, of which
* elem is a member of.
*/
def find(elem: T): Option[T] =
nodes.get(elem) match {
case Some(node) =>
val rootNode = node.getRepresentative
// path compression
if (node != rootNode) node.parent = Some(rootNode)
Some(rootNode.elem)
case None => None
}
/**
* Returns the number of disjoint-sets managed in this data structure.
* Keep in mind: this is a non-vital/non-standard operation, so we do
* not keep track of the number of sets, and instead this method recomputes
* them each time.
*/
def size: Int =
nodes.values.count(_.parent.isEmpty)
////
// Internal parts
private val nodes: scala.collection.mutable.Map[T, UnionFind.Node[T]] = scala.collection.mutable.Map.empty
// Initialization
initialElements.foreach(add)
}
object UnionFind {
def apply[T](initialElements: Seq[T] = Nil) = new UnionFind[T](initialElements)
private case class Node[T](elem: T, var rank: Int, var parent: Option[Node[T]]) {
/**
* Compute representative of this set.
* @return root element of the set
*/
@tailrec
final def getRepresentative: Node[T] = parent match {
case None => this
case Some(p) => p.getRepresentative
}
}
}
| scalableminds/webknossos | util/src/main/scala/com/scalableminds/util/datastructures/UnionFind.scala | Scala | agpl-3.0 | 3,057 |
/*
* This file is part of EasyForger which is released under GPLv3 License.
* See file LICENSE.txt or go to http://www.gnu.org/licenses/gpl-3.0.en.html for full license details.
*/
package com.easyforger.creatures
import net.minecraft.entity.EntityLiving
import net.minecraft.item.ItemStack
import net.minecraft.util.{EnumHand, ResourceLocation}
case class CommonEntityConfig(heldItemMainHand: Option[ItemStack],
heldItemOffHand: Option[ItemStack],
dropJson: Option[String])
trait CommonCustomMonster extends EntityLiving {
val config: CommonEntityConfig
lazy val dropJsonResource: Option[ResourceLocation] =
config.dropJson.map(new ResourceLocation(_))
/**
* TODO: this is bad because implies "memory-based" programming. Can we do this better?
* issue: https://github.com/easyforger/easyforger/issues/64
*
* Subclasses should call this method as their constructor's last statement.
*/
def init() {
config.heldItemMainHand.foreach(setHeldItem(EnumHand.MAIN_HAND, _))
config.heldItemOffHand.foreach(setHeldItem(EnumHand.OFF_HAND, _))
}
override def getLootTable: ResourceLocation =
dropJsonResource.getOrElse(super.getLootTable)
}
| easyforger/easyforger | src/main/scala/com/easyforger/creatures/CommonEntityConfig.scala | Scala | gpl-3.0 | 1,247 |
/*
* Copyright 2014 Alan Rodas Bonjour
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.alanrodas.fronttier
import com.alanrodas.fronttier.io._
import com.alanrodas.scaland.logging._
import rapture.core.strategy.throwExceptions
import rapture.fs._
import rapture.io._
import rapture.net._
trait Downloader {
def download(destination: FileUrl, url: String, dependency: Dependency)
(implicit fronttier :Fronttier): Option[(Configuration, Seq[FronttierException])]
}
object HttpDownloader extends Downloader with LazyLogging {
/*
def download(destination: FileUrl, url: String): Boolean = {
try {
Http.parse(url) > (destination / url.split("/").last)
true
} catch {
case e: java.io.FileNotFoundException => false
}
}
*/
def download(destination: FileUrl, url: String, dependency: Dependency)
(implicit fronttier :Fronttier): (Option[(Configuration, Seq[FronttierException])]) = {
fronttier.configureLogger(logger)
logger.info(s"Searching for $dependency declaration file")
val remoteUrl = Http.parse(Http.parse(url).toString() + dependency.path)
fronttier.parsers.map { parser =>
val remoteDeclarationFile = remoteUrl / parser.fileName
val localDeclarationFile = destination / parser.fileName
try {
remoteDeclarationFile > localDeclarationFile
if (parser.existsFileAt(destination)) {
logger.info("Found " + parser.name + " declaration file at: " + remoteDeclarationFile)
val configuration = parser.parseAt(destination)
logger.info("Downloading declared files:")
val errors = configuration.files.foldLeft(Seq[FronttierException]()){ (sum, file) =>
try {
logger.info(" downloading: " + (remoteUrl / file))
val folder = destination / file.substring(0, file.lastIndexOf("/"))
if (!folder.exists) folder.mkdir(makeParents = true)
(remoteUrl / file).>(destination / file)
sum
}
catch { case e : java.io.FileNotFoundException =>
logger.info(" download fail: " + e.getMessage)
sum :+ UnavailableFileException(dependency, file)
}
}
logger.info("===============")
fronttier.download(configuration)
localDeclarationFile.delete()
Some((configuration, errors))
} else None
}
catch { case e : java.io.FileNotFoundException =>
None
}
}.find(_.isDefined).flatten
}
}
object GitDownloader extends Downloader {
def download(destination: FileUrl, url: String, dependency: Dependency)
(implicit fronttier :Fronttier): Option[(Configuration, Seq[FronttierException])] = {
None
}
}
object SvnDownloader extends Downloader {
def download(destination: FileUrl, url: String, dependency: Dependency)
(implicit fronttier :Fronttier): Option[(Configuration, Seq[FronttierException])] = {
None
}
} | alanrodas/Fronttier | core/src/main/scala/com/alanrodas/fronttier/downloaders.scala | Scala | apache-2.0 | 3,505 |
package models.view
import org.joda.time.DateTime
import play.api.libs.json._
case class Comment( id: Option[Int],
card_id: Int,
author: String,
date: DateTime,
message: String)
object Comment {
implicit val format = Json.format[Comment]
implicit def fromDM(dm: models.domain.Comment): Comment = {
import dm._
Comment(id, card_id, author, date, message)
}
} | PureHacks/KudosBoard | app/models/view/Comment.scala | Scala | mit | 455 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.data.view
import org.apache.predictionio.data.storage.{DataMap, Event, EventValidation, Storage}
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.joda.time.DateTime
import org.json4s.JValue
// each JValue data associated with the time it is set
private[predictionio] case class PropTime(val d: JValue, val t: Long) extends Serializable
private[predictionio] case class SetProp (
val fields: Map[String, PropTime],
// last set time. Note: fields could be empty with valid set time
val t: Long) extends Serializable {
def ++ (that: SetProp): SetProp = {
val commonKeys = fields.keySet.intersect(that.fields.keySet)
val common: Map[String, PropTime] = commonKeys.map { k =>
val thisData = this.fields(k)
val thatData = that.fields(k)
// only keep the value with latest time
val v = if (thisData.t > thatData.t) thisData else thatData
(k, v)
}.toMap
val combinedFields = common ++
(this.fields -- commonKeys) ++ (that.fields -- commonKeys)
// keep the latest set time
val combinedT = if (this.t > that.t) this.t else that.t
SetProp(
fields = combinedFields,
t = combinedT
)
}
}
private[predictionio] case class UnsetProp (fields: Map[String, Long]) extends Serializable {
def ++ (that: UnsetProp): UnsetProp = {
val commonKeys = fields.keySet.intersect(that.fields.keySet)
val common: Map[String, Long] = commonKeys.map { k =>
val thisData = this.fields(k)
val thatData = that.fields(k)
// only keep the value with latest time
val v = if (thisData > thatData) thisData else thatData
(k, v)
}.toMap
val combinedFields = common ++
(this.fields -- commonKeys) ++ (that.fields -- commonKeys)
UnsetProp(
fields = combinedFields
)
}
}
private[predictionio] case class DeleteEntity (t: Long) extends Serializable {
def ++ (that: DeleteEntity): DeleteEntity = {
if (this.t > that.t) this else that
}
}
private[predictionio] case class EventOp (
val setProp: Option[SetProp] = None,
val unsetProp: Option[UnsetProp] = None,
val deleteEntity: Option[DeleteEntity] = None
) extends Serializable {
def ++ (that: EventOp): EventOp = {
EventOp(
setProp = (setProp ++ that.setProp).reduceOption(_ ++ _),
unsetProp = (unsetProp ++ that.unsetProp).reduceOption(_ ++ _),
deleteEntity = (deleteEntity ++ that.deleteEntity).reduceOption(_ ++ _)
)
}
def toDataMap(): Option[DataMap] = {
setProp.flatMap { set =>
val unsetKeys: Set[String] = unsetProp.map( unset =>
unset.fields.filter{ case (k, v) => (v >= set.fields(k).t) }.keySet
).getOrElse(Set())
val combinedFields = deleteEntity.map { delete =>
if (delete.t >= set.t) {
None
} else {
val deleteKeys: Set[String] = set.fields
.filter { case (k, PropTime(kv, t)) =>
(delete.t >= t)
}.keySet
Some(set.fields -- unsetKeys -- deleteKeys)
}
}.getOrElse{
Some(set.fields -- unsetKeys)
}
// Note: mapValues() doesn't return concrete Map and causes
// NotSerializableException issue. Use map(identity) to work around this.
// see https://issues.scala-lang.org/browse/SI-7005
combinedFields.map(f => DataMap(f.mapValues(_.d).map(identity)))
}
}
}
private[predictionio] object EventOp {
def apply(e: Event): EventOp = {
val t = e.eventTime.getMillis
e.event match {
case "$set" => {
val fields = e.properties.fields.mapValues(jv =>
PropTime(jv, t)
).map(identity)
EventOp(
setProp = Some(SetProp(fields = fields, t = t))
)
}
case "$unset" => {
val fields = e.properties.fields.mapValues(jv => t).map(identity)
EventOp(
unsetProp = Some(UnsetProp(fields = fields))
)
}
case "$delete" => {
EventOp(
deleteEntity = Some(DeleteEntity(t))
)
}
case _ => {
EventOp()
}
}
}
}
@deprecated("Use PEvents or PEventStore instead.", "0.9.2")
class PBatchView(
val appId: Int,
val startTime: Option[DateTime],
val untilTime: Option[DateTime],
val sc: SparkContext) {
// NOTE: parallel Events DB interface
@transient lazy val eventsDb = Storage.getPEvents()
@transient lazy val _events: RDD[Event] =
eventsDb.getByAppIdAndTimeAndEntity(
appId = appId,
startTime = startTime,
untilTime = untilTime,
entityType = None,
entityId = None)(sc)
// TODO: change to use EventSeq?
@transient lazy val events: RDD[Event] = _events
def aggregateProperties(
entityType: String,
startTimeOpt: Option[DateTime] = None,
untilTimeOpt: Option[DateTime] = None
): RDD[(String, DataMap)] = {
_events
.filter( e => ((e.entityType == entityType) &&
(EventValidation.isSpecialEvents(e.event))) )
.map( e => (e.entityId, EventOp(e) ))
.aggregateByKey[EventOp](EventOp())(
// within same partition
seqOp = { case (u, v) => u ++ v },
// across partition
combOp = { case (accu, u) => accu ++ u }
)
.mapValues(_.toDataMap)
.filter{ case (k, v) => v.isDefined }
.map{ case (k, v) => (k, v.get) }
}
}
| PredictionIO/PredictionIO | data/src/main/scala/org/apache/predictionio/data/view/PBatchView.scala | Scala | apache-2.0 | 6,190 |
/*
* Copyright (C) 2005, The OpenURP Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openurp.std.info.model
import org.beangle.data.model.LongId
import org.beangle.data.model.pojo.Updated
import org.openurp.base.edu.model.Student
import org.openurp.code.edu.model.{DisciplineCategory, Institution}
/** 学生的主修信息
*/
class MajorStudent extends LongId with Updated {
/** 主修学号 */
var code: String = _
/** 学生 */
var std: Student = _
/** 主修学校 */
var school: Institution = _
/** 主修专业 */
var majorName: String = _
/** 主修专业英文名 */
var enMajorName: Option[String] = None
/** 主修专业学科门类 */
var majorCategory: DisciplineCategory = _
}
| openurp/api | std/src/main/scala/org/openurp/std/info/model/MajorStudent.scala | Scala | lgpl-3.0 | 1,371 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.cluster.sdv.generated
import org.apache.spark.sql.Row
import org.apache.spark.sql.common.util._
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
import org.apache.spark.sql.test.TestQueryExecutor
/**
* Test Class for singlepassTestCase to verify all scenerios
*/
class SinglepassTestCase extends QueryTest with BeforeAndAfterAll {
//To check data loading with OPTIONS ‘SINGLE_PASS’=’true’
test("Loading-004-01-01-01_001-TC_001", Include) {
sql(s"""drop table if exists test1""").collect
sql(s"""create table test1(imei string,AMSize string,channelsId string,ActiveCountry string, Activecity string,gamePointId double,deviceInformationId double,productionDate Timestamp,deliveryDate timestamp,deliverycharge double) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('table_blocksize'='1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/vardhandaterestruct.csv' INTO TABLE test1 OPTIONS('DELIMITER'=',', 'QUOTECHAR'= '"','SINGLE_PASS'='TRUE', 'FILEHEADER'= 'imei,deviceInformationId,AMSize,channelsId,ActiveCountry,Activecity,gamePointId,productionDate,deliveryDate,deliverycharge')""").collect
checkAnswer(s"""select count(*) from test1""",
Seq(Row(99)), "singlepassTestCase_Loading-004-01-01-01_001-TC_001")
sql(s"""drop table test1""").collect
}
//To check data loading with OPTIONS ‘SINGLE_PASS’=’false’
test("Loading-004-01-01-01_001-TC_002", Include) {
sql(s"""create table test1(imei string,AMSize string,channelsId string,ActiveCountry string, Activecity string,gamePointId double,deviceInformationId double,productionDate Timestamp,deliveryDate timestamp,deliverycharge double) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('table_blocksize'='1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/vardhandaterestruct.csv' INTO TABLE test1 OPTIONS('DELIMITER'=',', 'QUOTECHAR'= '"','SINGLE_PASS'='FALSE', 'FILEHEADER'= 'imei,deviceInformationId,AMSize,channelsId,ActiveCountry,Activecity,gamePointId,productionDate,deliveryDate,deliverycharge')""").collect
checkAnswer(s"""select count(*) from test1""",
Seq(Row(99)), "singlepassTestCase_Loading-004-01-01-01_001-TC_002")
}
//To check data loading from CSV with incomplete data
test("Loading-004-01-01-01_001-TC_003", Include) {
intercept[Exception] {
sql(s"""drop table if exists uniqdata""").collect
sql(s"""CREATE TABLE if not exists uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_incomplete.csv' INTO TABLE uniqdata OPTIONS('DELIMITER'=',', 'QUOTECHAR'= '"','SINGLE_PASS'='TRUE', 'FILEHEADER'= 'imei,deviceInformationId,AMSize,channelsId,ActiveCountry,Activecity,gamePointId,productionDate,deliveryDate,deliverycharge')""").collect
}
}
//To check data loading from CSV with bad records
test("Loading-004-01-01-01_001-TC_004", Include) {
intercept[Exception] {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_badrec.csv' INTO TABLE uniqdata OPTIONS('DELIMITER'=',', 'QUOTECHAR'= '"','SINGLE_PASS'='TRUE', 'FILEHEADER'= 'imei,deviceInformationId,AMSize,channelsId,ActiveCountry,Activecity,gamePointId,productionDate,deliveryDate,deliverycharge')""").collect
}
}
//To check data loading from CSV with no data
test("Loading-004-01-01-01_001-TC_005", Include) {
intercept[Exception] {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_nodata.csv' INTO TABLE uniqdata OPTIONS('DELIMITER'=',', 'QUOTECHAR'= '"','SINGLE_PASS'='TRUE', 'FILEHEADER'= 'imei,deviceInformationId,AMSize,channelsId,ActiveCountry,Activecity,gamePointId,productionDate,deliveryDate,deliverycharge')""").collect
}
}
//To check data loading from CSV with incomplete data
test("Loading-004-01-01-01_001-TC_006", Include) {
intercept[Exception] {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_incomplete.csv' INTO TABLE uniqdata OPTIONS('DELIMITER'=',', 'QUOTECHAR'= '"','SINGLE_PASS'='FALSE', 'FILEHEADER'= 'imei,deviceInformationId,AMSize,channelsId,ActiveCountry,Activecity,gamePointId,productionDate,deliveryDate,deliverycharge')""").collect
}
}
//To check data loading from CSV with wrong data
test("Loading-004-01-01-01_001-TC_007", Include) {
intercept[Exception] {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_incomplete.csv' INTO TABLE uniqdata OPTIONS('DELIMITER'=',', 'QUOTECHAR'= '"','SINGLE_PASS'='FALSE', 'FILEHEADER'= 'imei,deviceInformationId,AMSize,channelsId,ActiveCountry,Activecity,gamePointId,productionDate,deliveryDate,deliverycharge')""").collect
}
}
//To check data loading from CSV with no data and 'SINGLEPASS' = 'FALSE'
test("Loading-004-01-01-01_001-TC_008", Include) {
intercept[Exception] {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_nodata.csv.csv' INTO TABLE uniqdata OPTIONS('DELIMITER'=',', 'QUOTECHAR'= '"','SINGLE_PASS'='FALSE', 'FILEHEADER'= 'imei,deviceInformationId,AMSize,channelsId,ActiveCountry,Activecity,gamePointId,productionDate,deliveryDate,deliverycharge')""").collect
}
}
//To check data loading using 'SINGLE_PASS'='NULL/any invalid string'
test("Loading-004-01-01-01_001-TC_009", Include) {
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/vardhandaterestruct.csv' INTO TABLE test1 OPTIONS('DELIMITER'=',', 'QUOTECHAR'= '"','SINGLE_PASS'='NULL', 'FILEHEADER'= 'imei,deviceInformationId,AMSize,channelsId,ActiveCountry,Activecity,gamePointId,productionDate,deliveryDate,deliverycharge')""").collect
checkAnswer(s"""select count(*) from test1""",
Seq(Row(198)), "singlepassTestCase_Loading-004-01-01-01_001-TC_009")
sql(s"""drop table test1""").collect
}
//To check data load using multiple CSV from folder into table with single_pass=true
test("Loading-004-01-01-01_001-TC_010", Include) {
sql(s"""drop table if exists emp_record12""").collect
sql(s"""create table emp_record12 (ID int,Name string,DOJ timestamp,Designation string,Salary double,Dept string,DOB timestamp,Addr string,Gender string,Mob bigint) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA inpath '$resourcesPath/Data/singlepass/data' into table emp_record12 options('DELIMITER'=',', 'QUOTECHAR'='"','SINGLE_PASS'='TRUE','FILEHEADER'='ID,Name,DOJ,Designation,Salary,Dept,DOB,Addr,Gender,Mob','BAD_RECORDS_ACTION'='FORCE')""").collect
sql(s"""select count(*) from emp_record12""").collect
sql(s"""drop table emp_record12""").collect
}
//To check data load using CSV from multiple level of folders into table
test("Loading-004-01-01-01_001-TC_011", Include) {
sql(s"""create table emp_record12 (ID int,Name string,DOJ timestamp,Designation string,Salary double,Dept string,DOB timestamp,Addr string,Gender string,Mob bigint) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA inpath '$resourcesPath/Data/singlepass/data' into table emp_record12 options('DELIMITER'=',', 'QUOTECHAR'='"','SINGLE_PASS'='TRUE','FILEHEADER'='ID,Name,DOJ,Designation,Salary,Dept,DOB,Addr,Gender,Mob','BAD_RECORDS_ACTION'='FORCE')""").collect
sql(s"""select count(*) from emp_record12""").collect
sql(s"""drop table emp_record12""").collect
}
//To check data load using multiple CSV from folder into table with single_pass=false
test("Loading-004-01-01-01_001-TC_012", Include) {
sql(s"""create table emp_record12 (ID int,Name string,DOJ timestamp,Designation string,Salary double,Dept string,DOB timestamp,Addr string,Gender string,Mob bigint) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA inpath '$resourcesPath/Data/singlepass/data' into table emp_record12 options('DELIMITER'=',', 'QUOTECHAR'='"','SINGLE_PASS'='FALSE','FILEHEADER'='ID,Name,DOJ,Designation,Salary,Dept,DOB,Addr,Gender,Mob','BAD_RECORDS_ACTION'='FORCE')""").collect
sql(s"""select count(*) from emp_record12""").collect
sql(s"""drop table emp_record12""").collect
}
//To check data load using CSV from multiple level of folders into table
test("Loading-004-01-01-01_001-TC_013", Include) {
sql(s"""create table emp_record12 (ID int,Name string,DOJ timestamp,Designation string,Salary double,Dept string,DOB timestamp,Addr string,Gender string,Mob bigint) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA inpath '$resourcesPath/Data/singlepass/data' into table emp_record12 options('DELIMITER'=',', 'QUOTECHAR'='"','SINGLE_PASS'='FALSE','FILEHEADER'='ID,Name,DOJ,Designation,Salary,Dept,DOB,Addr,Gender,Mob','BAD_RECORDS_ACTION'='FORCE')""").collect
sql(s"""select count(*) from emp_record12""").collect
sql(s"""drop table emp_record12""").collect
}
//To check Data loading in proper CSV format with .dat
test("Loading-004-01-01-01_001-TC_014", Include) {
sql(s"""drop table if exists uniqdata_file_extn""").collect
sql(s"""CREATE TABLE if not exists uniqdata_file_extn (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA inpath '$resourcesPath/Data/singlepass/2000_UniqData.dat' into table uniqdata_file_extn OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_file_extn""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_014")
sql(s"""drop table uniqdata_file_extn""").collect
}
//To check Data loading in proper CSV format with .xls
test("Loading-004-01-01-01_001-TC_015", Include) {
sql(s"""CREATE TABLE if not exists uniqdata_file_extn (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA inpath '$resourcesPath/Data/singlepass/2000_UniqData.xls' into table uniqdata_file_extn OPTIONS('DELIMITER'='\\001' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_file_extn""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_015")
sql(s"""drop table uniqdata_file_extn""").collect
}
//To check Data loading in proper CSV format with .doc
test("Loading-004-01-01-01_001-TC_016", Include) {
sql(s"""CREATE TABLE if not exists uniqdata_file_extn (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA inpath '$resourcesPath/Data/singlepass/2000_UniqData.dat' into table uniqdata_file_extn OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_file_extn""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_016")
sql(s"""drop table uniqdata_file_extn""").collect
}
//To check Data loading in proper CSV format with .txt
test("Loading-004-01-01-01_001-TC_017", Include) {
sql(s"""CREATE TABLE if not exists uniqdata_file_extn (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA inpath '$resourcesPath/Data/singlepass/2000_UniqData.txt' into table uniqdata_file_extn OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_file_extn""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_017")
sql(s"""drop table uniqdata_file_extn""").collect
}
//To check Data loading in proper CSV format wiithout any extension
test("Loading-004-01-01-01_001-TC_020", Include) {
sql(s"""CREATE TABLE if not exists uniqdata_file_extn (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA inpath '$resourcesPath/Data/singlepass/2000_UniqData' into table uniqdata_file_extn OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_file_extn""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_020")
sql(s"""drop table uniqdata_file_extn""").collect
}
//To check Data loading in proper CSV format with .dat with single_pass=false
test("Loading-004-01-01-01_001-TC_021", Include) {
sql(s"""CREATE TABLE if not exists uniqdata_file_extn (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA inpath '$resourcesPath/Data/singlepass/2000_UniqData.dat' into table uniqdata_file_extn OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_file_extn""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_021")
sql(s"""drop table uniqdata_file_extn""").collect
}
//To check Data loading in proper CSV format with .xls with single_pass=false
test("Loading-004-01-01-01_001-TC_022", Include) {
sql(s"""CREATE TABLE if not exists uniqdata_file_extn (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA inpath '$resourcesPath/Data/singlepass/2000_UniqData.xls' into table uniqdata_file_extn OPTIONS('DELIMITER'='\\001' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_file_extn""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_022")
sql(s"""drop table uniqdata_file_extn""").collect
}
//To check Data loading in proper CSV format with .txt with single_pass=false
test("Loading-004-01-01-01_001-TC_024", Include) {
sql(s"""CREATE TABLE if not exists uniqdata_file_extn (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA inpath '$resourcesPath/Data/singlepass/2000_UniqData.txt' into table uniqdata_file_extn OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_file_extn""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_024")
sql(s"""drop table uniqdata_file_extn""").collect
}
//To check Data loading in proper CSV format wiithout any extension with single_pass=false
test("Loading-004-01-01-01_001-TC_027", Include) {
sql(s"""CREATE TABLE if not exists uniqdata_file_extn (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA inpath '$resourcesPath/Data/singlepass/2000_UniqData' into table uniqdata_file_extn OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_file_extn""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_027")
sql(s"""drop table uniqdata_file_extn""").collect
}
//To check Data loading with delimiters as / [slash]
test("Loading-004-01-01-01_001-TC_028", Include) {
sql(s"""drop table if exists uniqdata_slash""").collect
sql(s"""CREATE TABLE if not exists uniqdata_slash(CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_slash.csv' into table uniqdata_slash OPTIONS('DELIMITER'='/' , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_slash""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_028")
sql(s"""drop table uniqdata_slash""").collect
}
//To check Data loading with delimiters as " [double quote]
test("Loading-004-01-01-01_001-TC_029", Include) {
sql(s"""drop table if exists uniqdata_doublequote""").collect
sql(s"""CREATE TABLE if not exists uniqdata_doublequote (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_quote.csv' into table uniqdata_doublequote OPTIONS('DELIMITER'='"' , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_doublequote""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_029")
sql(s"""drop table uniqdata_doublequote""").collect
}
//To check Data loading with delimiters as ! [exclamation]
test("Loading-004-01-01-01_001-TC_030", Include) {
sql(s"""drop table if exists uniqdata_exclamation""").collect
sql(s"""CREATE TABLE if not exists uniqdata_exclamation (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_exclamation.csv' into table uniqdata_exclamation OPTIONS('DELIMITER'='!' , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_exclamation""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_030")
sql(s"""drop table uniqdata_exclamation""").collect
}
//To check Data loading with delimiters as | [pipeline]
test("Loading-004-01-01-01_001-TC_031", Include) {
sql(s"""drop table if exists uniqdata_pipe""").collect
sql(s"""CREATE TABLE if not exists uniqdata_pipe (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_pipe.csv' into table uniqdata_pipe OPTIONS('DELIMITER'='|' , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_pipe""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_031")
sql(s"""drop table uniqdata_pipe""").collect
}
//To check Data loading with delimiters as ' [single quota]
test("Loading-004-01-01-01_001-TC_032", Include) {
sql(s"""drop table if exists uniqdata_singleQuote""").collect
sql(s"""CREATE TABLE if not exists uniqdata_singleQuote (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_singlequote.csv' into table uniqdata_singleQuote OPTIONS('DELIMITER'="'" , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_singleQuote""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_032")
sql(s"""drop table uniqdata_singleQuote""").collect
}
//To check Data loading with delimiters as \\017
test("Loading-004-01-01-01_001-TC_033", Include) {
sql(s"""drop table if exists uniqdata_017""").collect
sql(s"""CREATE TABLE if not exists uniqdata_017 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_017.csv' into table uniqdata_017 OPTIONS('DELIMITER'="\\017" , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_017""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_033")
sql(s"""drop table uniqdata_017""").collect
}
//To check Data loading with delimiters as \\001
test("Loading-004-01-01-01_001-TC_034", Include) {
sql(s"""drop table if exists uniqdata_001""").collect
sql(s"""CREATE TABLE if not exists uniqdata_001 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_001.csv' into table uniqdata_001 OPTIONS('DELIMITER'="\\001" , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_001""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_034")
sql(s"""drop table uniqdata_001""").collect
}
//To check Data loading with delimiters as / [slash] and SINGLE_PASS= FALSE
test("Loading-004-01-01-01_001-TC_035", Include) {
sql(s"""drop table if exists uniqdata_slash""").collect
sql(s"""CREATE TABLE if not exists uniqdata_slash(CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_slash.csv' into table uniqdata_slash OPTIONS('DELIMITER'='/' , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_slash""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_035")
sql(s"""drop table uniqdata_slash""").collect
}
//To check Data loading with delimiters as " [double quote] and SINGLE_PASS= FALSE
test("Loading-004-01-01-01_001-TC_036", Include) {
sql(s"""drop table if exists uniqdata_doublequote""").collect
sql(s"""CREATE TABLE if not exists uniqdata_doublequote (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_quote.csv' into table uniqdata_doublequote OPTIONS('DELIMITER'='"' , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_doublequote""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_036")
sql(s"""drop table uniqdata_doublequote""").collect
}
//To check Data loading with delimiters as ! [exclamation] and SINGLE_PASS= FALSE
test("Loading-004-01-01-01_001-TC_037", Include) {
sql(s"""drop table if exists uniqdata_exclamation""").collect
sql(s"""CREATE TABLE if not exists uniqdata_exclamation (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_exclamation.csv' into table uniqdata_exclamation OPTIONS('DELIMITER'='!' , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_exclamation""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_037")
sql(s"""drop table uniqdata_exclamation""").collect
}
//To check Data loading with delimiters as | [pipeline] and SINGLE_PASS= FALSE
test("Loading-004-01-01-01_001-TC_038", Include) {
sql(s"""drop table if exists uniqdata_pipe""").collect
sql(s"""CREATE TABLE if not exists uniqdata_pipe (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_pipe.csv' into table uniqdata_pipe OPTIONS('DELIMITER'='|' , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_pipe""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_038")
sql(s"""drop table uniqdata_pipe""").collect
}
//To check Data loading with delimiters as ' [single quota] and SINGLE_PASS= FALSE
test("Loading-004-01-01-01_001-TC_039", Include) {
sql(s"""drop table if exists uniqdata_singleQuote""").collect
sql(s"""CREATE TABLE if not exists uniqdata_singleQuote (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_singlequote.csv' into table uniqdata_singleQuote OPTIONS('DELIMITER'="'" , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_singleQuote""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_039")
sql(s"""drop table uniqdata_singleQuote""").collect
}
//To check Data loading with delimiters as \\017 and SINGLE_PASS= FALSE
test("Loading-004-01-01-01_001-TC_040", Include) {
sql(s"""drop table if exists uniqdata_017""").collect
sql(s"""CREATE TABLE if not exists uniqdata_017 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_017.csv' into table uniqdata_017 OPTIONS('DELIMITER'="\\017" , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_017""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_040")
sql(s"""drop table uniqdata_017""").collect
}
//To check Data loading with delimiters as \\001 and SINGLE_PASS= FALSE
test("Loading-004-01-01-01_001-TC_041", Include) {
sql(s"""CREATE TABLE if not exists uniqdata_001 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData_001.csv' into table uniqdata_001 OPTIONS('DELIMITER'="\\001" , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
checkAnswer(s"""select count(*) from uniqdata_001""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_041")
sql(s"""drop table uniqdata_001""").collect
}
//To check Auto compaction is successful with carbon.enable.auto.load.merge= True & SINGLE_PASS=TRUE
test("Loading-004-01-01-01_001-TC_043", Include) {
sql(s"""drop table if exists uniqdata_001""").collect
sql(s"""CREATE TABLE if not exists uniqdata_001 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData.csv' into table uniqdata_001 OPTIONS('DELIMITER'="\\001" , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData.csv' into table uniqdata_001 OPTIONS('DELIMITER'="\\001" , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData.csv' into table uniqdata_001 OPTIONS('DELIMITER'="\\001" , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""alter table uniqdata_001 compact 'minor'""").collect
sql(s"""show segments for table uniqdata_001""").collect
sql(s"""drop table uniqdata_001""").collect
}
//To check Auto compaction is successful with carbon.enable.auto.load.merge= True & SINGLE_PASS=FALSE
test("Loading-004-01-01-01_001-TC_044", Include) {
sql(s"""CREATE TABLE if not exists uniqdata_001 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData.csv' into table uniqdata_001 OPTIONS('DELIMITER'="\\001" , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData.csv' into table uniqdata_001 OPTIONS('DELIMITER'="\\001" , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData.csv' into table uniqdata_001 OPTIONS('DELIMITER'="\\001" , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""alter table uniqdata_001 compact 'minor'""").collect
sql(s"""show segments for table uniqdata_001""").collect
sql(s"""drop table uniqdata_001""").collect
}
//To check Auto compaction is successful with carbon.enable.auto.load.merge= false & SINGLE_PASS=TRUE
test("Loading-004-01-01-01_001-TC_045", Include) {
sql(s"""CREATE TABLE if not exists uniqdata_001 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData.csv' into table uniqdata_001 OPTIONS('DELIMITER'="\\001" , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData.csv' into table uniqdata_001 OPTIONS('DELIMITER'="\\001" , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData.csv' into table uniqdata_001 OPTIONS('DELIMITER'="\\001" , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""alter table uniqdata_001 compact 'major'""").collect
sql(s"""show segments for table uniqdata_001""").collect
sql(s"""drop table uniqdata_001""").collect
}
//To check Auto compaction is successful with carbon.enable.auto.load.merge= false & SINGLE_PASS=FALSE
test("Loading-004-01-01-01_001-TC_046", Include) {
sql(s"""CREATE TABLE if not exists uniqdata_001 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData.csv' into table uniqdata_001 OPTIONS('DELIMITER'="\\001" , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData.csv' into table uniqdata_001 OPTIONS('DELIMITER'="\\001" , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/2000_UniqData.csv' into table uniqdata_001 OPTIONS('DELIMITER'="\\001" , 'QUOTECHAR'='"', 'BAD_RECORDS_ACTION'='FORCE','SINGLE_PASS'='FALSE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
sql(s"""alter table uniqdata_001 compact 'major'""").collect
sql(s"""show segments for table uniqdata_001""").collect
sql(s"""drop table uniqdata_001""").collect
}
//To check Data loading is success with 'SINGLE_PASS'='TRUE' with already created table with Include dictionary
test("Loading-004-01-01-01_001-TC_051", Include) {
sql(s"""create database includeexclude""").collect
sql(s"""use includeexclude""").collect
sql(s"""create table test2 (imei string,AMSize string,channelsId string,ActiveCountry string, Activecity string,gamePointId double,deviceInformationId int,productionDate Timestamp,deliveryDate timestamp,deliverycharge decimal(10,2)) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='gamePointId,deviceInformationId')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/vardhandaterestruct.csv' INTO TABLE test2 OPTIONS('DELIMITER'=',', 'QUOTECHAR'= '"','SINGLE_PASS'='TRUE', 'FILEHEADER'= 'imei,deviceInformationId,AMSize,channelsId,ActiveCountry,Activecity,gamePointId,productionDate,deliveryDate,deliverycharge')""").collect
checkAnswer(s"""select count(*) from test2""",
Seq(Row(99)), "singlepassTestCase_Loading-004-01-01-01_001-TC_051")
sql(s"""drop table includeexclude.test2""").collect
sql(s"""drop database includeexclude cascade""").collect
}
//To check Data loading is success with 'SINGLE_PASS'='FALSE' with already created table with Include dictionary
test("Loading-004-01-01-01_001-TC_052", Include) {
sql(s"""create database includeexclude""").collect
sql(s"""use includeexclude""").collect
sql(s"""create table test2 (imei string,AMSize string,channelsId string,ActiveCountry string, Activecity string,gamePointId double,deviceInformationId int,productionDate Timestamp,deliveryDate timestamp,deliverycharge decimal(10,2)) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='gamePointId,deviceInformationId')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/vardhandaterestruct.csv' INTO TABLE test2 OPTIONS('DELIMITER'=',', 'QUOTECHAR'= '"','SINGLE_PASS'='FALSE', 'FILEHEADER'= 'imei,deviceInformationId,AMSize,channelsId,ActiveCountry,Activecity,gamePointId,productionDate,deliveryDate,deliverycharge')""").collect
checkAnswer(s"""select count(*) from test2""",
Seq(Row(99)), "singlepassTestCase_Loading-004-01-01-01_001-TC_052")
sql(s"""drop table includeexclude.test2""").collect
sql(s"""use default""").collect
sql(s"""drop database includeexclude cascade""").collect
}
//To check Data loading is success with 'SINGLE_PASS'='TRUE' with already created table with Exclude dictionary
test("Loading-004-01-01-01_001-TC_053", Include) {
sql(s"""drop table if exists test2""").collect
sql(s"""create table test2 (imei string,AMSize string,channelsId string,ActiveCountry string, Activecity string,gamePointId double,deviceInformationId int,productionDate Timestamp,deliveryDate timestamp,deliverycharge decimal(10,2)) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_EXCLUDE'='imei,channelsId,AMSize,ActiveCountry,Activecity')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/vardhandaterestruct.csv' INTO TABLE test2 OPTIONS('DELIMITER'=',', 'QUOTECHAR'= '"','SINGLE_PASS'='TRUE', 'FILEHEADER'= 'imei,deviceInformationId,AMSize,channelsId,ActiveCountry,Activecity,gamePointId,productionDate,deliveryDate,deliverycharge')""").collect
checkAnswer(s"""select count(*) from test2""",
Seq(Row(99)), "singlepassTestCase_Loading-004-01-01-01_001-TC_053")
sql(s"""drop table test2""").collect
}
//To check Data loading is success with 'SINGLE_PASS'='FALSE' with already created table with Exclude dictionary
test("Loading-004-01-01-01_001-TC_054", Include) {
sql(s"""create table test2 (imei string,AMSize string,channelsId string,ActiveCountry string, Activecity string,gamePointId double,deviceInformationId int,productionDate Timestamp,deliveryDate timestamp,deliverycharge decimal(10,2)) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_EXCLUDE'='imei,channelsId,AMSize,ActiveCountry,Activecity')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/vardhandaterestruct.csv' INTO TABLE test2 OPTIONS('DELIMITER'=',', 'QUOTECHAR'= '"','SINGLE_PASS'='FALSE', 'FILEHEADER'= 'imei,deviceInformationId,AMSize,channelsId,ActiveCountry,Activecity,gamePointId,productionDate,deliveryDate,deliverycharge')""").collect
checkAnswer(s"""select count(*) from test2""",
Seq(Row(99)), "singlepassTestCase_Loading-004-01-01-01_001-TC_054")
sql(s"""drop table test2""").collect
}
//To check data loading is success when loading from Carbon Table using ‘SINGLE_PASS’=TRUE
test("Loading-004-01-01-01_001-TC_061", Include) {
sql(s"""create table test1(imei string,AMSize string,channelsId string,ActiveCountry string, Activecity string,gamePointId double,deviceInformationId double,productionDate Timestamp,deliveryDate timestamp,deliverycharge double) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('table_blocksize'='1')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/vardhandaterestruct.csv' INTO TABLE test1 OPTIONS('DELIMITER'=',', 'QUOTECHAR'= '"','SINGLE_PASS'='TRUE', 'FILEHEADER'= 'imei,deviceInformationId,AMSize,channelsId,ActiveCountry,Activecity,gamePointId,productionDate,deliveryDate,deliverycharge')""").collect
checkAnswer(s"""select count(*) from test1""",
Seq(Row(99)), "singlepassTestCase_Loading-004-01-01-01_001-TC_061")
sql(s"""drop table test1""").collect
}
//Verifying load data with single Pass true and BAD_RECORDS_ACTION= ='FAIL
test("Loading-004-01-01-01_001-TC_067", Include) {
sql(s"""drop table if exists uniqdata""").collect
intercept[Exception] {
sql(s"""
| CREATE TABLE uniqdata(
| shortField SHORT,
| booleanField BOOLEAN,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| decimalField DECIMAL(18,2),
| charField CHAR(5),
| floatField FLOAT,
| complexData ARRAY<STRING>,
| booleanField2 BOOLEAN
| )
| STORED BY 'carbondata'
""".stripMargin)
.collect
sql(
s"""LOAD DATA INPATH '${TestQueryExecutor
.integrationPath}/spark2/src/test/resources/bool/supportBooleanBadRecords.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='FAIL','FILEHEADER'='shortField,booleanField,intField,bigintField,doubleField,stringField,timestampField,decimalField,dateField,charField,floatField,complexData,booleanField2','SINGLE_Pass'='true')""".stripMargin)
.collect
checkAnswer(
s"""select count(*) from uniqdata""",
Seq(Row(2013)),
"singlepassTestCase_Loading-004-01-01-01_001-TC_067")
}
sql(s"""drop table uniqdata""").collect
}
//Verifying load data with single Pass true and BAD_RECORDS_ACTION= ='REDIRECT'
test("Loading-004-01-01-01_001-TC_071", Include) {
sql(s"""drop table if exists uniqdata""").collect
sql(s"""CREATE TABLE if not exists uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/2000_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='REDIRECT','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1','SINGLE_Pass'='true')""").collect
checkAnswer(s"""select count(*) from uniqdata""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_071")
sql(s"""drop table uniqdata""").collect
}
//Verifying load data with single Pass false and BAD_RECORDS_ACTION= ='REDIRECT'
test("Loading-004-01-01-01_001-TC_072", Include) {
dropTable("uniqdata")
sql(s"""CREATE TABLE if not exists uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/2000_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='REDIRECT','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1','SINGLE_Pass'='false')""").collect
checkAnswer(s"""select count(*) from uniqdata""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_072")
sql(s"""drop table uniqdata""").collect
}
//Verifying load data with single Pass true and BAD_RECORDS_ACTION= ='IGNORE'
test("Loading-004-01-01-01_001-TC_073", Include) {
dropTable("uniqdata")
sql(s"""CREATE TABLE if not exists uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/2000_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='IGNORE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1','SINGLE_Pass'='true')""").collect
checkAnswer(s"""select count(*) from uniqdata""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_073")
sql(s"""drop table uniqdata""").collect
}
//Verifying load data with single Pass false and BAD_RECORDS_ACTION= ='IGNORE'
test("Loading-004-01-01-01_001-TC_074", Include) {
sql(s"""drop table if exists uniqdata""").collect
sql(s"""CREATE TABLE if not exists uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/2000_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='IGNORE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1','SINGLE_Pass'='false')""").collect
checkAnswer(s"""select count(*) from uniqdata""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_074")
sql(s"""drop table uniqdata""").collect
}
//Verifying load data with single Pass true and BAD_RECORDS_ACTION= ='FORCE'
test("Loading-004-01-01-01_001-TC_075", Include) {
dropTable("uniqdata")
sql(s"""CREATE TABLE if not exists uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/2000_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1','SINGLE_Pass'='true')""").collect
checkAnswer(s"""select count(*) from uniqdata""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_075")
sql(s"""drop table uniqdata""").collect
}
//Verifying load data with single Pass false and BAD_RECORDS_ACTION= ='FORCE'
test("Loading-004-01-01-01_001-TC_076", Include) {
dropTable("uniqdata")
sql(s"""CREATE TABLE if not exists uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/2000_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1','SINGLE_Pass'='false')""").collect
checkAnswer(s"""select count(*) from uniqdata""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_076")
sql(s"""drop table uniqdata""").collect
}
//Verifying load data with single Pass false and 'BAD_RECORDS_LOGGER_ENABLE'='TRUE',
test("Loading-004-01-01-01_001-TC_077", Include) {
dropTable("uniqdata")
sql(s"""CREATE TABLE if not exists uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/2000_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1','SINGLE_Pass'='false')""").collect
checkAnswer(s"""select count(*) from uniqdata""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_077")
sql(s"""drop table uniqdata""").collect
}
//Verifying load data with single Pass false and 'BAD_RECORDS_LOGGER_ENABLE'='FALSE',
test("Loading-004-01-01-01_001-TC_078", Include) {
dropTable("uniqdata")
sql(s"""CREATE TABLE if not exists uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/2000_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='FALSE', 'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1','SINGLE_Pass'='false')""").collect
checkAnswer(s"""select count(*) from uniqdata""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_078")
sql(s"""drop table uniqdata""").collect
}
//Verifying load data with single Pass true and 'BAD_RECORDS_LOGGER_ENABLE'='TRUE',
test("Loading-004-01-01-01_001-TC_079", Include) {
dropTable("uniqdata")
sql(s"""CREATE TABLE if not exists uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/2000_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1','SINGLE_Pass'='true')""").collect
checkAnswer(s"""select count(*) from uniqdata""",
Seq(Row(2013)), "singlepassTestCase_Loading-004-01-01-01_001-TC_079")
sql(s"""drop table uniqdata""").collect
}
//Verifying load data with single Pass true, NO_INVERTED_INDEX, and dictionary_exclude
test("Loading-004-01-01-01_001-TC_080", Include) {
dropTable("uniqdata")
sql(s"""CREATE TABLE if not exists uniqdata (CUST_ID int,CUST_NAME String, DOB timestamp) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('NO_INVERTED_INDEX'='CUST_NAME','dictionary_exclude'='CUST_NAME')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/10_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,DOB','SINGLE_Pass'='true')""").collect
checkAnswer(s"""select count(*) from uniqdata""",
Seq(Row(10)), "singlepassTestCase_Loading-004-01-01-01_001-TC_080")
sql(s"""drop table uniqdata""").collect
}
//Verifying load data with single Pass true, NO_INVERTED_INDEX and dictionary_include a measure
test("Loading-004-01-01-01_001-TC_081", Include) {
dropTable("uniqdata")
sql(s"""CREATE TABLE if not exists uniqdata (CUST_ID int,CUST_NAME String, DOB timestamp) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_INCLUDE'='CUST_ID','NO_INVERTED_INDEX'='CUST_ID')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/10_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,DOB','SINGLE_Pass'='true')""").collect
checkAnswer(s"""select count(*) from uniqdata""",
Seq(Row(10)), "singlepassTestCase_Loading-004-01-01-01_001-TC_081")
sql(s"""drop table uniqdata""").collect
}
//Verifying load data with single pass=false and column dictionary path
test("Loading-004-01-01-01_001-TC_084", Include) {
dropTable("uniqdata")
intercept[Exception] {
sql(s"""CREATE TABLE if not exists uniqdata (CUST_ID int,CUST_NAME String, DOB timestamp) STORED BY 'org.apache.carbondata.format'""")
.collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/10_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='FAIL','FILEHEADER'='CUST_ID,CUST_NAME,DOB','SINGLE_PASS'='false','COLUMNDICT'='CUST_NAME:$resourcesPath/Data/singlepass/data/cust_name.txt')""")
.collect
checkAnswer(
s"""select count(*) from uniqdata""",
Seq(Row(10)),
"singlepassTestCase_Loading-004-01-01-01_001-TC_084")
}
sql(s"""drop table uniqdata""").collect
}
//Verifying load data with single pass=true and column dictionary path
test("Loading-004-01-01-01_001-TC_085", Include) {
dropTable("uniqdata")
sql(s"""CREATE TABLE if not exists uniqdata (CUST_ID int,CUST_NAME String, DOB timestamp) STORED BY 'org.apache.carbondata.format'""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/10_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='FAIL','FILEHEADER'='CUST_ID,CUST_NAME,DOB','SINGLE_PASS'='true','COLUMNDICT'='CUST_NAME:$resourcesPath/Data/singlepass/data/cust_name.txt')""").collect
checkAnswer(s"""select count(*) from uniqdata""",
Seq(Row(10)), "singlepassTestCase_Loading-004-01-01-01_001-TC_085")
sql(s"""drop table uniqdata""").collect
}
//Verifying single pass false with all dimensions as dictionary_exclude and dictionary_include
test("Loading-004-01-01-01_001-TC_088", Include) {
dropTable("uniqdata")
sql(s"""CREATE TABLE if not exists uniqdata (CUST_ID int,CUST_NAME String, DOB timestamp) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_EXCLUDE'='CUST_NAME','DICTIONARY_INCLUDE'='CUST_ID')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/10_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='FAIL','FILEHEADER'='CUST_ID,CUST_NAME,DOB','SINGLE_PASS'='false')""").collect
checkAnswer(s"""select count(*) from uniqdata""",
Seq(Row(10)), "singlepassTestCase_Loading-004-01-01-01_001-TC_088")
sql(s"""drop table uniqdata""").collect
}
//Verifying single pass true with all dimensions as dictionary_exclude and dictionary_include
test("Loading-004-01-01-01_001-TC_089", Include) {
dropTable("uniqdata")
sql(s"""CREATE TABLE if not exists uniqdata (CUST_ID int,CUST_NAME String, DOB timestamp) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('DICTIONARY_EXCLUDE'='CUST_NAME','DICTIONARY_INCLUDE'='CUST_ID')""").collect
sql(s"""LOAD DATA INPATH '$resourcesPath/Data/singlepass/data/10_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='FAIL','FILEHEADER'='CUST_ID,CUST_NAME,DOB','SINGLE_PASS'='false')""").collect
checkAnswer(s"""select count(*) from uniqdata""",
Seq(Row(10)), "singlepassTestCase_Loading-004-01-01-01_001-TC_089")
sql(s"""drop table uniqdata""").collect
}
val prop = CarbonProperties.getInstance()
val p1 = prop.getProperty("carbon.enable.auto.load.merge", CarbonCommonConstants.DEFAULT_ENABLE_AUTO_LOAD_MERGE)
override protected def beforeAll() {
// Adding new properties
prop.addProperty("carbon.enable.auto.load.merge", "true")
}
override def afterAll: Unit = {
//Reverting to old
prop.addProperty("carbon.enable.auto.load.merge", p1)
}
} | sgururajshetty/carbondata | integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/SinglepassTestCase.scala | Scala | apache-2.0 | 69,515 |
package com.eevolution.context.dictionary.infrastructure.repository
import com.eevolution.context.dictionary.domain.model.TaskAccess
import com.eevolution.context.dictionary.infrastructure.db.DbContext._
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: emeris.hernandez@e-evolution.com, http://www.e-evolution.com , http://github.com/EmerisScala
* Created by emeris.hernandez@e-evolution.com , www.e-evolution.com on 27/10/17.
*/
/**
* Task Access Mapping
*/
trait TaskAccessMapping {
val queryTaskAccess = quote {
querySchema[TaskAccess]("AD_Task_Access",
_.taskId-> "AD_Task_ID",
_.roleId-> "AD_Role_ID",
_.tenantId-> "AD_Client_ID",
_.organizationId-> "AD_Org_ID",
_.isActive-> "IsActive",
_.created-> "Created",
_.createdBy-> "CreatedBy",
_.updated-> "Updated",
_.updatedBy-> "UpdatedBy",
_.isReadWrite-> "IsReadWrite",
_.uuid-> "UUID")
}
}
| adempiere/ADReactiveSystem | dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/repository/TaskAccessMapping.scala | Scala | gpl-3.0 | 1,634 |
// scalac: -Xsource:2.13
//
import scala.language.higherKinds
trait Forall[F[_]] {
def instantiate[A]: F[A]
}
object Forall {
implicit class Ops[F[_]](f: Forall[F]) {
def apply[A]: F[A] = f.instantiate[A]
}
}
trait Forall2[F[_, _]] {
def instantiate[A, B]: F[A, B]
}
object Forall2 {
implicit class Ops[F[_, _]](f: Forall2[F]) {
def apply[A, B]: F[A, B] = f.instantiate[A, B]
}
}
trait FlatMap[F[_]] {
def flatMap[A, B](fa: F[A])(f: A => F[B]): F[B]
}
object FlatMap {
implicit val optionInstance: FlatMap[Option] = new FlatMap[Option] {
def flatMap[A, B](fa: Option[A])(f: A => Option[B]) = fa.flatMap(f)
}
}
object Test extends App {
// natural transformation
type ~>[F[_], G[_]] = Forall[({ type L[A] = F[A] => G[A] })#L]
// binatural transformation
type ~~>[F[_, _], G[_, _]] = Forall2[({ type L[A, B] = F[A, B] => G[A, B] })#L]
type RightAction[G[_], F[_, _]] = Forall2[({ type L[A, B] = (G[A], F[A, B]) => G[B] })#L]
type LeftAction[G[_], F[_, _]] = Forall2[({ type L[A, B] = (F[A, B], G[B]) => G[A] })#L]
val headOpt = new (List ~> Option) {
def instantiate[A]: List[A] => Option[A] = _.headOption
}
// tests that implicit Forall.Ops is found
println(headOpt.apply(List(1, 2, 3)))
println(headOpt[Int](List(1, 2, 3)))
val someEntry = new (Map ~~> ({ type L[K, V] = Option[(K, V)] })#L) {
def instantiate[K, V]: Map[K, V] => Option[(K, V)] = _.headOption
}
// tests that implicit Forall2.Ops is found
println(someEntry.apply(Map(("hi", 5))))
println(someEntry[String, Int](Map(("hi", 5))))
def kleisliPostCompose[F[_], Z](implicit F: FlatMap[F]) =
new RightAction[({ type L[A] = Z => F[A] })#L, ({ type L[A, B] = A => F[B] })#L] {
def instantiate[A, B]: (Z => F[A], A => F[B]) => (Z => F[B]) = (f, g) => (z => F.flatMap(f(z))(g))
}
def kleisliPreCompose[F[_], C](implicit F: FlatMap[F]) =
new LeftAction[({ type L[B] = B => F[C] })#L, ({ type L[A, B] = A => F[B] })#L] {
def instantiate[A, B]: (A => F[B], B => F[C]) => (A => F[C]) = (f, g) => (a => F.flatMap(f(a))(g))
}
def parseInt(s: String): Option[Int] = Some(42)
def toChar(i: Int): Option[Char] = Some('X')
val ra = kleisliPostCompose[Option, String]
val la = kleisliPreCompose[Option, Char]
// tests that implicit Forall2.Ops is found
println( ra.apply(parseInt(_), toChar(_)).apply("") )
println( ra[Int, Char](parseInt(_), toChar(_))("") )
println( la.apply(parseInt(_), toChar(_))("") )
println( la[String, Int](parseInt(_), toChar(_))("") )
}
| martijnhoekstra/scala | test/files/run/hk-typevar-unification.scala | Scala | apache-2.0 | 2,564 |
/*
* Copyright 2016 Nicolas Rinaudo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kantan.regex
import kantan.regex.ops._
import org.scalacheck.{Arbitrary, Gen}
import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers
import org.scalatestplus.scalacheck.ScalaCheckPropertyChecks
class MatchTests extends AnyFunSuite with ScalaCheckPropertyChecks with Matchers {
test("length should return the expected value") {
def validating(length: Int): MatchDecoder[List[Int]] = MatchDecoder[List[Int]].contramapEncoded { (m: Match) =>
m.length should be(length)
m
}
forAll(Gen.nonEmptyListOf(Arbitrary.arbitrary[Int])) { (is: List[Int]) =>
implicit val decoder: MatchDecoder[List[Int]] = validating(is.length)
val regex = is.map(_ => "(-?\\\\d+)").mkString(" ").asUnsafeRegex[List[Int]].map(_.fold(e => throw e, identity))
regex.eval(is.mkString(" ")).next() should be(is)
}
}
test("Out of bound groups should generate a NoSuchGroupId") {
def outOfBounds(i: Int): MatchDecoder[Int] = MatchDecoder.from(_.decode[Int](i))
forAll(Gen.nonEmptyListOf(Arbitrary.arbitrary[Int]), Arbitrary.arbitrary[Int].suchThat(_ != -1)) { (is, offset) =>
val index = is.length + 1 + offset
implicit val decoder: MatchDecoder[Int] = outOfBounds(index)
val regex = is.map(_ => "(-?\\\\d+)").mkString(" ").asUnsafeRegex[Int]
regex.eval(is.mkString(" ")).next() should be(DecodeResult.noSuchGroupId(index))
}
}
}
| nrinaudo/kantan.regex | core/shared/src/test/scala/kantan/regex/MatchTests.scala | Scala | apache-2.0 | 2,057 |
package code
package snippet
import net.liftweb.util.Helpers._
import java.util.Date
class HelloWorld {
lazy val date = new Date()
// replace the contents of the element with id "time" with the date
def howdy = "#time *" #> date.toString
} | jkutner/lift-blank-heroku | src/main/scala/code/snippet/HelloWorld.scala | Scala | apache-2.0 | 249 |
/*
* Copyright (c) 2016 SnappyData, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package org.apache.spark.sql.streaming
import scala.collection.immutable
import org.apache.spark.rdd.{EmptyRDD, RDD}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.execution.SparkPlan
import org.apache.spark.sql.streaming.StreamBaseRelation._
import org.apache.spark.streaming.StreamUtils
import org.apache.spark.streaming.dstream.DStream
/**
* A PhysicalPlan wrapper of SchemaDStream, inject the validTime and
* generate an effective RDD of current batchDuration.
*
* @param output
* @param rowStream
*/
case class PhysicalDStreamPlan(output: Seq[Attribute],
@transient rowStream: DStream[InternalRow])
extends SparkPlan with StreamPlan {
def children: immutable.Nil.type = Nil
override def doExecute(): RDD[InternalRow] = {
assert(validTime != null)
StreamUtils.getOrCompute(rowStream, validTime)
.getOrElse(new EmptyRDD[InternalRow](sparkContext))
}
}
| vjr/snappydata | core/src/main/scala/org/apache/spark/sql/streaming/PhysicalDStreamPlan.scala | Scala | apache-2.0 | 1,646 |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2015-2021 Andre White.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.truthencode.ddo.model.feats
import io.truthencode.ddo.model.attribute.Attribute
import io.truthencode.ddo.support.requisite.{
AttributeRequisiteImpl,
FeatRequisiteImpl,
RequiresAllOfAttribute,
RequiresBaB
}
/**
* Icon Feat Brutal Throw.png Brutal Throw Passive You can use your Strength bonus instead of
* Dexterity bonus to determine bonus to attack with Thrown weapons if it is higher.
*
* Strength 13 Base Attack Bonus +1
*/
trait BrutalThrow
extends FeatRequisiteImpl with Passive with AttributeRequisiteImpl with RequiresAllOfAttribute
with RequiresBaB with FighterBonusFeat {
self: GeneralFeat =>
override def allOfAttributes: Seq[(Attribute, Int)] = List((Attribute.Strength, 13))
override def requiresBaB: Int = 1
}
| adarro/ddo-calc | subprojects/common/ddo-core/src/main/scala/io/truthencode/ddo/model/feats/BrutalThrow.scala | Scala | apache-2.0 | 1,446 |
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2015, Gary Keorkunian **
** **
\\* */
package squants.energy
import squants.mass.Kilograms
import squants.QuantityParseException
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
/**
* @author garyKeorkunian
* @since 0.1
*
*/
class SpecificEnergySpec extends AnyFlatSpec with Matchers {
behavior of "SpecificEnergy and its Units of Measure"
it should "create values using UOM factories" in {
Grays(1).toGrays should be(1)
Rads(1).toRads should be(1)
ErgsPerGram(1).toErgsPerGram should be(1)
}
it should "create values from properly formatted Strings" in {
SpecificEnergy("10.22 Gy").get should be(Grays(10.22))
SpecificEnergy("8.47 rad").get should be(Rads(8.47))
SpecificEnergy("6.79 erg/g").get should be(ErgsPerGram(6.79))
SpecificEnergy("10.22 zz").failed.get should be(QuantityParseException("Unable to parse SpecificEnergy", "10.22 zz"))
SpecificEnergy("ZZ Gy").failed.get should be(QuantityParseException("Unable to parse SpecificEnergy", "ZZ Gy"))
}
it should "properly convert to all supported Units of Measure" in {
val x = Grays(1)
x.toGrays should be(1)
x.toRads should be(100)
x.toErgsPerGram should be(10000)
val y = Rads(1)
y.toRads should be(1)
y.toGrays should be(0.01)
y.toErgsPerGram should be(100)
val z = ErgsPerGram(1)
z.toErgsPerGram should be(1)
z.toGrays should be(0.0001)
z.toRads should be(0.01)
}
it should "return properly formatted strings for all supported Units of Measure" in {
Grays(1).toString(Grays) should be("1.0 Gy")
Rads(1).toString(Rads) should be("1.0 rad")
ErgsPerGram(1).toString(ErgsPerGram) should be("1.0 erg/g")
}
it should "return Energy when multiplied by Mass" in {
Grays(1) * Kilograms(10) should be(Joules(10))
Rads(1) * Kilograms(10) should be(Joules(0.1))
ErgsPerGram(1) * Kilograms(10) should be(Joules(0.001))
}
behavior of "Conversions"
it should "provide aliases for single unit values" in {
import SpecificEnergyConversions._
gray should be(Grays(1))
rad should be(Rads(1))
ergsPerGram should be(ErgsPerGram(1))
}
it should "provide implicit conversion from Double" in {
import SpecificEnergyConversions._
val d = 10d
d.grays should be(Grays(d))
d.rads should be(Rads(d))
d.ergsPerGram should be(ErgsPerGram(d))
}
it should "provide Numeric support" in {
import SpecificEnergyConversions.SpecificEnergyNumeric
val ses = List(Grays(100), Grays(10))
ses.sum should be(Grays(110))
val sesRad = List(Rads(100), Rads(10))
sesRad.sum should be(Rads(110))
// The Grays(0) value ensures we get the sum in Grays, otherwise unit depends on Scala version
// due to changed .sum implementation in 2.13
val sesErg = List(Grays(0), ErgsPerGram(100), ErgsPerGram(10))
sesErg.sum should be(Grays(0.011))
}
}
| garyKeorkunian/squants | shared/src/test/scala/squants/energy/SpecificEnergySpec.scala | Scala | apache-2.0 | 3,419 |
package cn.gridx.logback.custom.layouts
import org.slf4j.LoggerFactory
/**
* Created by tao on 10/9/16.
*/
object TestMyHighlight extends App {
println("开始")
val log = LoggerFactory.getLogger(this.getClass)
println(log)
var c = 0;
for (i <- Range(0, 2)) {
c += 1
log.debug(s"$c : " + "DEBUG")
log.info(s"$c : " + "Info")
log.warn(s"$c : " + "WARN")
log.error(s"$c : " + "Error")
}
}
| TaoXiao/Scala | logback/src/main/scala/cn/gridx/logback/custom/layouts/TestMyHighlight.scala | Scala | apache-2.0 | 426 |
/* scala-stm - (c) 2009-2010, Stanford University, PPL */
package scala.concurrent.stm.ccstm
import java.util.concurrent.atomic.AtomicLong
import scala.annotation.tailrec
/** A counter with a linearizable increment operator and adaptive contention
* avoidance. Reading the counter with `apply()` is not linearizable (unless
* the only delta passed to += is 1) and is not optimized.
*/
private[ccstm] class Counter extends {
private def MaxStripes = 64
// this doesn't need to be volatile because when we grow it we retain all of
// the old AtomicLong-s
private var _stripes = Array(new AtomicLong)
private def grow() {
synchronized {
if (_stripes.length < MaxStripes) {
val repl = new Array[AtomicLong](_stripes.length * 2)
System.arraycopy(_stripes, 0, repl, 0, _stripes.length)
var i = _stripes.length
while (i < repl.length) {
repl(i) = new AtomicLong
i += 1
}
_stripes = repl
}
}
}
def += (delta: Int) {
if (delta != 0)
incr(delta)
}
@tailrec private def incr(delta: Int) {
val s = _stripes
val i = CCSTM.hash(Thread.currentThread) & (s.length - 1)
val prev = s(i).get
if (!s(i).compareAndSet(prev, prev + delta)) {
grow()
incr(delta)
}
}
def apply(): Long = _stripes.foldLeft(0L)( _ + _.get )
override def toString = apply().toString
} | djspiewak/scala-stm | src/main/scala/scala/concurrent/stm/ccstm/Counter.scala | Scala | bsd-3-clause | 1,409 |
package com.ponkotuy.data
import org.json4s._
import scala.util.Try
import com.ponkotuy.tool.Pretty
/**
*
* @author ponkotuy
* Date: 14/04/17.
*/
case class BattleResult(
enemies: List[Int], winRank: String, exp: Int, mvp: Int,
baseExp: Int, shipExp: List[Int], lostFlag: List[Boolean], questName: String, questLevel: Int, enemyDeck: String,
firstClear: Boolean, getShip: Option[GetShip]) {
def summary: String = Pretty(
Map(
("Rank", winRank),
("MVP", mvp),
("Enemy", enemyDeck),
("ドロップ艦娘", getShip.map(_.name).getOrElse("なし"))
)
)
def win: Boolean = winRank == "S" || winRank == "A" || winRank == "B"
}
object BattleResult {
implicit val format = DefaultFormats
def fromJson(obj: JValue): BattleResult = {
val enemies = toIntList(obj \\ "api_ship_id").drop(1)
val winRank = (obj \\ "api_win_rank").extractOrElse("N")
val exp = (obj \\ "api_get_exp").extract[Int]
val mvp = (obj \\ "api_mvp").extract[Int]
val baseExp = (obj \\ "api_get_base_exp").extract[Int]
val shipExp = toIntList(obj \\ "api_get_ship_exp").drop(1)
val lostFlag = {
val result = toIntList(obj \\ "api_lost_flag")
if(result.isEmpty) Nil
else result.tail.map(_ != 0)
}
val JString(questName) = obj \\ "api_quest_name"
val questLevel = (obj \\ "api_quest_level").extract[Int]
val JString(enemyDeck) = obj \\ "api_enemy_info" \\ "api_deck_name"
val firstClear = (obj \\ "api_first_clear").extract[Int] != 0
val getShip = GetShip.fromJson(obj \\ "api_get_ship")
BattleResult(
enemies, winRank, exp, mvp, baseExp, shipExp, lostFlag, questName, questLevel, enemyDeck, firstClear, getShip
)
}
def toIntList(obj: JValue): List[Int] = {
Try {
val JArray(xs) = obj
xs.flatMap(_.extractOpt[Int])
}.getOrElse(Nil)
}
}
case class GetShip(id: Int, stype: String, name: String)
object GetShip {
implicit val format = DefaultFormats
def fromJson(obj: JValue): Option[GetShip] = {
Try {
val id = (obj \\ "api_ship_id").extract[Int]
val JString(stype) = obj \\ "api_ship_type"
val JString(name) = obj \\ "api_ship_name"
GetShip(id, stype, name)
}.toOption
}
val empty = GetShip(0, "-", "無し")
}
| ttdoda/MyFleetGirls | library/src/main/scala/com/ponkotuy/data/BattleResult.scala | Scala | mit | 2,269 |
package com.ing.baker.runtime.akka.actor.process_instance
import com.ing.baker.petrinet.api._
import com.ing.baker.runtime.akka.actor.serialization.BakerSerializable
/**
* Describes the messages to and from a PetriNetInstance actor.
*/
object ProcessInstanceProtocol {
/**
* A common trait for all commands to a petri net instance.
*/
sealed trait Command extends BakerSerializable
/**
* Command to request the current state of the petri net instance.
*/
case object GetState extends Command
/**
* Command to stop and optionally delete the process instance.
*/
case class Stop(delete: Boolean = false) extends Command
object Initialize {
def apply[P : Identifiable](marking: Marking[P]): Initialize = Initialize(marking.marshall, null)
def apply[P : Identifiable](marking: Marking[P], state: Any): Initialize = Initialize(marking.marshall, state)
}
/**
* Command to initialize a petri net instance.
*/
case class Initialize(marking: Marking[Id], state: Any) extends Command
/**
* Command to fire a specific transition with input.
*/
case class FireTransition(
transitionId: Long,
input: Any,
correlationId: Option[String] = None) extends Command
/**
* Overrides the chosen exception strategy of a job (running transition)
*
* @param jobId The id of the job.
* @param failureStrategy The new failure strategy
*/
case class OverrideExceptionStrategy(jobId: Long, failureStrategy: ExceptionStrategy) extends Command
/**
* A common trait for all responses coming from a petri net instance.
*/
sealed trait Response extends BakerSerializable
/**
* A response send in case any other command then 'Initialize' is sent to the actor in unitialized state.
*
* @param recipeInstanceId The identifier of the uninitialized actor.
*/
case class Uninitialized(recipeInstanceId: String) extends Response
/**
* Returned in case a second Initialize is send after a first is processed
*/
case class AlreadyInitialized(recipeInstanceId: String) extends Response
/**
* Indicates that the received FireTransition command with a specific correlation id was already received.
*/
case class AlreadyReceived(correlationId: String) extends Response
object Initialized {
def apply[P : Identifiable](marking: Marking[P]): Initialized = Initialized(marking.marshall, null)
def apply[P : Identifiable](marking: Marking[P], state: Any): Initialized = Initialized(marking.marshall, state)
}
/**
* A response indicating that the instance has been initialized in a certain state.
*
* This message is only send in response to an Initialize message.
*/
case class Initialized(
marking: Marking[Id],
state: Any) extends Response
/**
* Any message that is a response to a FireTransition command.
*/
sealed trait TransitionResponse extends Response {
val transitionId: Long
}
/**
* Response indicating that a transition has fired successfully
*/
case class TransitionFired(
jobId: Long,
override val transitionId: Id,
correlationId: Option[String],
consumed: Marking[Id],
produced: Marking[Id],
newJobsIds: Set[Long],
output: Any) extends TransitionResponse
/**
* Response indicating that a transition has failed.
*/
case class TransitionFailed(
jobId: Long,
override val transitionId: Id,
correlationId: Option[String],
consume: Marking[Id],
input: Any,
reason: String,
strategy: ExceptionStrategy) extends TransitionResponse
/**
* Response indicating that the transition could not be fired because it is not enabled.
*/
case class TransitionNotEnabled(
override val transitionId: Id,
reason: String) extends TransitionResponse
/**
* General response indicating that the send command was invalid.
*
* @param reason The invalid reason.
*/
case class InvalidCommand(reason: String) extends Response
/**
* The exception state of a transition.
*/
case class ExceptionState(
failureCount: Int,
failureReason: String,
failureStrategy: ExceptionStrategy)
sealed trait ExceptionStrategy {
def isBlock: Boolean
def isRetry: Boolean
def isContinue: Boolean
}
object ExceptionStrategy {
case object BlockTransition extends ExceptionStrategy {
def isBlock: Boolean = true
def isRetry: Boolean = false
def isContinue: Boolean = false
}
case class RetryWithDelay(delay: Long) extends ExceptionStrategy {
require(delay >= 0, "Delay must be greater then zero")
def isBlock: Boolean = false
def isRetry: Boolean = true
def isContinue: Boolean = false
}
case class Continue(marking: Marking[Id], output: Any) extends ExceptionStrategy {
def isBlock: Boolean = false
def isRetry: Boolean = false
def isContinue: Boolean = true
}
}
/**
* Response containing the state of the `Job`.
*/
case class JobState(
id: Long,
transitionId: Long,
consumedMarking: Marking[Id],
input: Any,
exceptionState: Option[ExceptionState]) {
def isActive: Boolean = exceptionState match {
case Some(ExceptionState(_, _, ExceptionStrategy.RetryWithDelay(_))) => true
case None => true
case _ => false
}
}
/**
* Response containing the state of the process.
*/
case class InstanceState(
sequenceNr: Long,
marking: Marking[Id],
state: Any,
jobs: Map[Long, JobState]) extends Response
}
| ing-bank/baker | core/akka-runtime/src/main/scala/com/ing/baker/runtime/akka/actor/process_instance/ProcessInstanceProtocol.scala | Scala | mit | 5,668 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.