code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package infra.sockjs
import java.io.StringWriter
import java.util.Locale
object StringEscapeUtils {
/**
* <p>Escapes the characters in a <code>String</code> using JavaScript String rules.</p>
* <p>Escapes any values it finds into their JavaScript String form.
* Deals correctly with quotes and control-chars (tab, backslash, cr, ff, etc.) </p>
* <p/>
* <p>So a tab becomes the characters <code>'\\\\'</code> and
* <code>'t'</code>.</p>
* <p/>
* <p>The only difference between Java strings and JavaScript strings
* is that in JavaScript, a single quote must be escaped.</p>
* <p/>
* <p>Example:
* <pre>
* input string: He didn't say, "Stop!"
* output string: He didn\\'t say, \\"Stop!\\"
* </pre>
* </p>
*
* ref:
* - https://github.com/eclipse/vert.x/blob/master/vertx-core/src/main/java/org/vertx/java/core/impl/StringEscapeUtils.java#L110
*
* @param str String to escape values in, may be null
* @return String with escaped values, <code>null</code> if null string input
*/
def escapeJavaScript(str: String, escapeSingleQuote: Boolean = true, escapeForwardSlash: Boolean = true): String = {
val out = new StringWriter(str.length() * 2)
str.foreach { ch =>
if (ch > 0xfff) out.write("\\\\u" + hex(ch))
else if (ch > 0xff) out.write("\\\\u0" + hex(ch))
else if (ch > 0x7f) out.write("\\\\u00" + hex(ch))
else if (ch < 32) ch match {
case '\\b' => out.write('\\\\'); out.write('b')
case '\\n' => out.write('\\\\'); out.write('n')
case '\\t' => out.write('\\\\'); out.write('t')
case '\\f' => out.write('\\\\'); out.write('f')
case '\\r' => out.write('\\\\'); out.write('r')
case _ =>
if (ch > 0xf) out.write("\\\\u00" + hex(ch))
else out.write("\\\\u000" + hex(ch))
}
else ch match {
case '\\'' =>
if (escapeSingleQuote) {
out.write('\\\\')
out.write('\\'')
}
case '"' => out.write('\\\\'); out.write('"')
case '\\\\' => out.write('\\\\'); out.write('\\\\')
case '/' =>
if (escapeForwardSlash) {
out.write('\\\\')
out.write('/')
}
case _ =>
out.write(ch);
}
}
out.toString
}
def hex(ch: Char) = Integer.toHexString(ch).toUpperCase(Locale.ENGLISH)
} | alari/play-sockjs | module-code/app/infra/sockjs/StringEscapeUtils.scala | Scala | apache-2.0 | 2,346 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.indexer
import scala.collection.immutable.Queue
import akka.event.slf4j.SLF4JLogging
import org.apache.commons.vfs2.FileObject
import org.objectweb.asm._
import org.objectweb.asm.Opcodes._
trait ClassfileIndexer {
this: SLF4JLogging =>
/**
* @param file to index
* @return the parsed version of the classfile and FQNs referenced within
*/
def indexClassfile(file: FileObject): (RawClassfile, Set[FullyQualifiedName]) = {
val name = file.getName
require(file.exists(), s"$name does not exist")
require(name.getBaseName.endsWith(".class"), s"$name is not a class file")
val in = file.getContent.getInputStream
val raw = try {
val reader = new ClassReader(in)
val receiver = new AsmCallback
reader.accept(receiver, ClassReader.SKIP_FRAMES)
receiver
} finally in.close()
(raw.clazz, raw.refs)
}
// extracts all the classnames from a descriptor
private def classesInDescriptor(desc: String): List[ClassName] =
DescriptorParser.parse(desc) match {
case Descriptor(params, ret) => (ret :: params).map {
case c: ClassName => c
case a: ArrayDescriptor => a.reifier
}
}
private class AsmCallback extends ClassVisitor(ASM5) with ReferenceInClassHunter {
// updated every time we get more info
@volatile var clazz: RawClassfile = _
override def visit(
version: Int, access: Int, name: String, signature: String,
superName: String, interfaces: Array[String]
): Unit = {
val signatureClass = if (signature != null && signature.nonEmpty) {
Some(SignatureParser.parseGeneric(signature))
} else {
None
}
clazz = RawClassfile(
ClassName.fromInternal(name),
signatureClass,
Option(superName).map(ClassName.fromInternal),
interfaces.toList.map(ClassName.fromInternal),
Access(access),
(ACC_DEPRECATED & access) > 0,
Queue.empty, Queue.empty, RawSource(None, None)
)
}
override def visitSource(filename: String, debug: String): Unit = {
clazz = clazz.copy(source = RawSource(Option(filename), None))
}
override def visitField(access: Int, name: String, desc: String, signature: String, value: AnyRef): FieldVisitor = {
val field = RawField(
FieldName(clazz.name, name),
DescriptorParser.parseType(desc),
Option(signature), Access(access)
)
clazz = clazz.copy(fields = clazz.fields enqueue field)
super.visitField(access, name, desc, signature, value)
}
override def visitMethod(access: Int, region: String, desc: String, signature: String, exceptions: Array[String]): MethodVisitor = {
super.visitMethod(access, region, desc, signature, exceptions)
new MethodVisitor(ASM5) with ReferenceInMethodHunter {
var firstLine: Option[Int] = None
override def visitLineNumber(line: Int, start: Label): Unit = {
val isEarliestLineSeen = firstLine.map(_ < line).getOrElse(true)
if (isEarliestLineSeen)
firstLine = Some(line)
}
override def visitEnd(): Unit = {
addRefs(internalRefs)
region match {
case "<init>" | "<clinit>" =>
(clazz.source.line, firstLine) match {
case (_, None) =>
case (Some(existing), Some(latest)) if existing <= latest =>
case _ =>
clazz = clazz.copy(source = clazz.source.copy(line = firstLine))
}
case name =>
val descriptor = DescriptorParser.parse(desc)
val method = RawMethod(MethodName(clazz.name, name, descriptor), Access(access), Option(signature), firstLine)
clazz = clazz.copy(methods = clazz.methods enqueue method)
}
}
}
}
}
// factors out much of the verbose code that looks for references to members
private trait ReferenceInClassHunter {
this: ClassVisitor =>
def clazz: RawClassfile
// NOTE: only mutate via addRefs
var refs = Set.empty[FullyQualifiedName]
protected def addRefs(seen: Seq[FullyQualifiedName]): Unit = {
refs ++= seen.filterNot(_.contains(clazz.name))
}
protected def addRef(seen: FullyQualifiedName): Unit = addRefs(seen :: Nil)
private val fieldVisitor = new FieldVisitor(ASM5) {
override def visitAnnotation(desc: String, visible: Boolean) = handleAnn(desc)
override def visitTypeAnnotation(
typeRef: Int, typePath: TypePath, desc: String, visible: Boolean
) = handleAnn(desc)
}
override def visitField(access: Int, name: String, desc: String, signature: String, value: AnyRef): FieldVisitor = {
addRef(ClassName.fromDescriptor(desc))
fieldVisitor
}
override def visitMethod(access: Int, region: String, desc: String, signature: String, exceptions: Array[String]): MethodVisitor = {
addRefs(classesInDescriptor(desc))
if (exceptions != null)
addRefs(exceptions.map(ClassName.fromInternal))
null
}
override def visitInnerClass(name: String, outerName: String, innerName: String, access: Int): Unit = {
addRef(ClassName.fromInternal(name))
}
override def visitOuterClass(owner: String, name: String, desc: String): Unit = {
addRef(ClassName.fromInternal(owner))
}
private val annVisitor: AnnotationVisitor = new AnnotationVisitor(ASM5) {
override def visitAnnotation(name: String, desc: String) = handleAnn(desc)
override def visitEnum(
name: String, desc: String, value: String
): Unit = handleAnn(desc)
}
private def handleAnn(desc: String): AnnotationVisitor = {
addRef(ClassName.fromDescriptor(desc))
annVisitor
}
override def visitAnnotation(desc: String, visible: Boolean) = handleAnn(desc)
override def visitTypeAnnotation(
typeRef: Int, typePath: TypePath, desc: String, visible: Boolean
) = handleAnn(desc)
}
private trait ReferenceInMethodHunter {
this: MethodVisitor =>
// NOTE: :+ and :+= are really slow (scala 2.10), prefer "enqueue"
protected var internalRefs = Queue.empty[FullyQualifiedName]
// doesn't disambiguate FQNs of methods, so storing as FieldName references
private def memberOrInit(owner: String, name: String): FullyQualifiedName =
name match {
case "<init>" | "<clinit>" => ClassName.fromInternal(owner)
case member => FieldName(ClassName.fromInternal(owner), member)
}
override def visitLocalVariable(
name: String, desc: String, signature: String,
start: Label, end: Label, index: Int
): Unit = {
internalRefs = internalRefs enqueue ClassName.fromDescriptor(desc)
}
override def visitMultiANewArrayInsn(desc: String, dims: Int): Unit = {
internalRefs = internalRefs enqueue ClassName.fromDescriptor(desc)
}
override def visitTypeInsn(opcode: Int, desc: String): Unit = {
internalRefs = internalRefs enqueue ClassName.fromInternal(desc)
}
override def visitFieldInsn(
opcode: Int, owner: String, name: String, desc: String
): Unit = {
internalRefs = internalRefs enqueue memberOrInit(owner, name)
internalRefs = internalRefs enqueue ClassName.fromDescriptor(desc)
}
override def visitMethodInsn(
opcode: Int, owner: String, name: String, desc: String, itf: Boolean
): Unit = {
internalRefs :+= memberOrInit(owner, name)
internalRefs = internalRefs.enqueue(classesInDescriptor(desc))
}
override def visitInvokeDynamicInsn(name: String, desc: String, bsm: Handle, bsmArgs: AnyRef*): Unit = {
internalRefs :+= memberOrInit(bsm.getOwner, bsm.getName)
internalRefs = internalRefs.enqueue(classesInDescriptor(bsm.getDesc))
}
private val annVisitor: AnnotationVisitor = new AnnotationVisitor(ASM5) {
override def visitAnnotation(name: String, desc: String) = handleAnn(desc)
override def visitEnum(name: String, desc: String, value: String): Unit = handleAnn(desc)
}
private def handleAnn(desc: String): AnnotationVisitor = {
internalRefs = internalRefs enqueue ClassName.fromDescriptor(desc)
annVisitor
}
override def visitAnnotation(desc: String, visible: Boolean) = handleAnn(desc)
override def visitAnnotationDefault() = annVisitor
override def visitInsnAnnotation(
typeRef: Int, typePath: TypePath, desc: String, visible: Boolean
) = handleAnn(desc)
override def visitLocalVariableAnnotation(
typeRef: Int, typePath: TypePath, start: Array[Label], end: Array[Label],
index: Array[Int], desc: String, visible: Boolean
) = handleAnn(desc)
override def visitParameterAnnotation(
parameter: Int, desc: String, visible: Boolean
) = handleAnn(desc)
override def visitTryCatchAnnotation(
typeRef: Int, typePath: TypePath, desc: String, visible: Boolean
) = handleAnn(desc)
override def visitTypeAnnotation(
typeRef: Int, typePath: TypePath, desc: String, visible: Boolean
) = handleAnn(desc)
}
}
| sugakandrey/ensime-server | core/src/main/scala/org/ensime/indexer/ClassfileIndexer.scala | Scala | gpl-3.0 | 9,276 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.io.File
import scala.language.implicitConversions
import org.apache.spark.SparkContext
import org.apache.spark.sql.catalyst.ParserDialect
import org.apache.spark.sql.catalyst.analysis.{Analyzer, OverrideCatalog}
import org.apache.spark.sql.catalyst.optimizer.Optimizer
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.ExtractPythonUDFs
import org.apache.spark.sql.execution.datasources.{PreInsertCastAndRename, PreWriteCheck}
import org.apache.spark.sql.hive._
import org.apache.spark.sql.optimizer.CarbonOptimizer
import org.apache.carbondata.common.logging.LogServiceFactory
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.stats.{QueryStatistic, QueryStatisticsConstants}
import org.apache.carbondata.core.util.{CarbonProperties, CarbonTimeStatisticsFactory}
class CarbonContext(
val sc: SparkContext,
val storePath: String,
metaStorePath: String) extends HiveContext(sc) {
self =>
def this(sc: SparkContext) = {
this(sc,
new File(CarbonCommonConstants.STORE_LOCATION_DEFAULT_VAL).getCanonicalPath,
new File(CarbonCommonConstants.METASTORE_LOCATION_DEFAULT_VAL).getCanonicalPath)
}
def this(sc: SparkContext, storePath: String) = {
this(sc,
storePath,
new File(CarbonCommonConstants.METASTORE_LOCATION_DEFAULT_VAL).getCanonicalPath)
}
CarbonContext.addInstance(sc, this)
CodeGenerateFactory.init(sc.version)
udf.register("getTupleId", () => "")
CarbonEnv.init(this)
var lastSchemaUpdatedTime = System.currentTimeMillis()
val hiveClientInterface = metadataHive
protected[sql] override lazy val conf: SQLConf = new CarbonSQLConf
@transient
override lazy val catalog = {
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.STORE_LOCATION, storePath)
new CarbonMetastore(this, storePath, metadataHive, queryId) with OverrideCatalog
}
@transient
override protected[sql] lazy val analyzer =
new Analyzer(catalog, functionRegistry, conf) {
override val extendedResolutionRules =
catalog.ParquetConversions ::
catalog.CreateTables ::
CarbonIUDAnalysisRule ::
CarbonPreInsertionCasts ::
ExtractPythonUDFs ::
ResolveHiveWindowFunction ::
PreInsertCastAndRename ::
Nil
override val extendedCheckRules = Seq(
PreWriteCheck(catalog)
)
}
@transient
override protected[sql] lazy val optimizer: Optimizer =
CarbonOptimizer.optimizer(
CodeGenerateFactory.createDefaultOptimizer(conf, sc),
conf.asInstanceOf[CarbonSQLConf],
sc.version)
protected[sql] override def getSQLDialect(): ParserDialect = new CarbonSQLDialect(this)
experimental.extraStrategies = {
val carbonStrategy = new CarbonStrategies(self)
Seq(carbonStrategy.CarbonTableScan, carbonStrategy.DDLStrategies)
}
override protected def configure(): Map[String, String] = {
sc.hadoopConfiguration.addResource("hive-site.xml")
if (sc.hadoopConfiguration.get(CarbonCommonConstants.HIVE_CONNECTION_URL) == null) {
val metaStorePathAbsolute = new File(metaStorePath).getCanonicalPath
val hiveMetaStoreDB = metaStorePathAbsolute + "/metastore_db"
logDebug(s"metastore db is going to be created in location: $hiveMetaStoreDB")
super.configure() ++ Map[String, String]((CarbonCommonConstants.HIVE_CONNECTION_URL,
s"jdbc:derby:;databaseName=$hiveMetaStoreDB;create=true"),
("hive.metastore.warehouse.dir", metaStorePathAbsolute + "/hivemetadata"))
} else {
super.configure()
}
}
@transient
private val LOGGER = LogServiceFactory.getLogService(CarbonContext.getClass.getName)
var queryId: String = ""
override def sql(sql: String): DataFrame = {
// queryId will be unique for each query, creting query detail holder
queryId = System.nanoTime() + ""
this.setConf("queryId", queryId)
CarbonContext.updateCarbonPorpertiesPath(this)
val sqlString = sql.toUpperCase
LOGGER.info(s"Query [$sqlString]")
val recorder = CarbonTimeStatisticsFactory.createDriverRecorder()
val statistic = new QueryStatistic()
val logicPlan: LogicalPlan = parseSql(sql)
statistic.addStatistics(QueryStatisticsConstants.SQL_PARSE, System.currentTimeMillis())
recorder.recordStatisticsForDriver(statistic, queryId)
val result = new DataFrame(this, logicPlan)
// We force query optimization to happen right away instead of letting it happen lazily like
// when using the query DSL. This is so DDL commands behave as expected. This is only
// generates the RDD lineage for DML queries, but do not perform any execution.
result
}
}
object CarbonContext {
val datasourceName: String = "org.apache.carbondata.format"
val datasourceShortName: String = "carbondata"
@transient
private val LOGGER = LogServiceFactory.getLogService(CarbonContext.getClass.getName)
final def updateCarbonPorpertiesPath(hiveContext: HiveContext) {
val carbonPropertiesFilePath = hiveContext.getConf("carbon.properties.filepath", null)
val systemcarbonPropertiesFilePath = System.getProperty("carbon.properties.filepath", null)
if (null != carbonPropertiesFilePath && null == systemcarbonPropertiesFilePath) {
System.setProperty("carbon.properties.filepath",
carbonPropertiesFilePath + "/" + "carbon.properties")
}
// configuring the zookeeper URl .
val zooKeeperUrl = hiveContext.getConf("spark.deploy.zookeeper.url", "127.0.0.1:2181")
CarbonProperties.getInstance().addProperty("spark.deploy.zookeeper.url", zooKeeperUrl)
}
// this cache is used to avoid creating multiple CarbonContext from same SparkContext,
// to avoid the derby problem for metastore
private val cache = collection.mutable.Map[SparkContext, CarbonContext]()
def getInstance(sc: SparkContext): CarbonContext = {
cache(sc)
}
def addInstance(sc: SparkContext, cc: CarbonContext): Unit = {
if (cache.contains(sc)) {
sys.error("creating multiple instances of CarbonContext is not " +
"allowed using the same SparkContext instance")
}
cache(sc) = cc
}
}
object SQLParser {
def parse(sql: String, sqlContext: SQLContext): LogicalPlan = sqlContext.parseSql(sql)
}
| ksimar/incubator-carbondata | integration/spark/src/main/scala/org/apache/spark/sql/CarbonContext.scala | Scala | apache-2.0 | 7,180 |
package org.broadinstitute.dsde.firecloud.page.methodrepo
import org.broadinstitute.dsde.firecloud.page.MethodTable
import org.openqa.selenium.WebDriver
class MethodRepoTable(implicit webDriver: WebDriver) extends MethodTable[MethodDetailPage] {
override protected def awaitInnerPage(namespace: String, name: String): MethodDetailPage = {
await ready new MethodDetailPage(namespace, name)
}
}
| broadinstitute/firecloud-ui | automation/src/test/scala/org/broadinstitute/dsde/firecloud/page/methodrepo/MethodRepoTable.scala | Scala | bsd-3-clause | 404 |
package gitbucket.core.model
trait ActivityComponent extends TemplateComponent { self: Profile =>
import profile.simple._
import self._
lazy val Activities = TableQuery[Activities]
class Activities(tag: Tag) extends Table[Activity](tag, "ACTIVITY") with BasicTemplate {
val activityId = column[Int]("ACTIVITY_ID", O AutoInc)
val activityUserName = column[String]("ACTIVITY_USER_NAME")
val activityType = column[String]("ACTIVITY_TYPE")
val message = column[String]("MESSAGE")
val additionalInfo = column[String]("ADDITIONAL_INFO")
val activityDate = column[java.util.Date]("ACTIVITY_DATE")
def * = (userName, repositoryName, activityUserName, activityType, message, additionalInfo.?, activityDate, activityId) <> (Activity.tupled, Activity.unapply)
}
}
case class Activity(
userName: String,
repositoryName: String,
activityUserName: String,
activityType: String,
message: String,
additionalInfo: Option[String],
activityDate: java.util.Date,
activityId: Int = 0
)
| intermezzo-fr/gitbucket | src/main/scala/gitbucket/core/model/Activity.scala | Scala | apache-2.0 | 1,023 |
package redbot
import redbot.discord.User
import redbot.discord.Snowflake._
object GlobalRefs {
val RedId: User.Id = 135553137699192832L.asId
val Red: String = User.mention(RedId)
val ServerInvite: String = "http://discord.gg/QVwRdk3"
}
| JamesGallicchio/RedBot | src/main/scala/redbot/GlobalRefs.scala | Scala | mit | 245 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import com.google.inject.{Inject, Singleton}
import config.{ApplicationConfig, GmpContext}
import controllers.auth.{AuthAction, ExternalUrls, UUIDGenerator}
import play.api.mvc.{Action, AnyContent, MessagesControllerComponents}
import services.SessionService
import uk.gov.hmrc.auth.core.AuthConnector
import uk.gov.hmrc.play.audit.http.connector.AuditConnector
import uk.gov.hmrc.play.audit.model.DataEvent
import views.Views
import scala.concurrent.ExecutionContext
@Singleton
class ApplicationController @Inject()(authAction: AuthAction,
auditConnector: AuditConnector,
val authConnector: AuthConnector,
uuidGenerator: UUIDGenerator,
sessionService: SessionService,implicit val config:GmpContext,
messagesControllerComponents: MessagesControllerComponents,
implicit val executionContext: ExecutionContext,
ac:ApplicationConfig,
views: Views,
externalUrls: ExternalUrls)
extends GmpController(messagesControllerComponents,ac,sessionService,config) {
def unauthorised: Action[AnyContent] = Action {
implicit request =>
Ok(views.unauthorised())
}
def signout: Action[AnyContent] = authAction {
implicit request =>
val uuid: String = uuidGenerator.generate
val auditData = Map("feedbackId" -> uuid)
val dataEvent: DataEvent = DataEvent("GMP", "signout", detail = auditData)
auditConnector.sendEvent(dataEvent)
Redirect(externalUrls.signOutCallback).withSession(("feedbackId", uuid))
}
def keepAlive: Action[AnyContent] = authAction {
_ =>
Ok("OK")
}
}
| hmrc/gmp-frontend | app/controllers/ApplicationController.scala | Scala | apache-2.0 | 2,546 |
package opennlp.scalabha.tag.support
import org.junit.Assert._
import org.junit._
import opennlp.scalabha.util.CollectionUtils._
class FreqCountsTests {
@Test
def test_FreqCounts() {
val f = Map('A' -> 2)
val g = Map('B' -> 4)
val h = Map('A' -> 3)
val i = f +++ g +++ h
assertEquals(Map('B' -> 4, 'A' -> 5), i.toMap)
}
@Test
def test_FreqCounts_double() {
val f = Map('A' -> 2.2)
val g = Map('B' -> 4.4)
val h = Map('A' -> 3.0)
val i = f +++ g +++ h
assertEquals(Map('B' -> 4.4, 'A' -> 5.2), i.toMap)
}
@Test
def test_CondFreqCounts() {
val f = CondFreqCounts(Map('A' -> Map('a -> 2)))
val g = CondFreqCounts(Map('B' -> Map('a -> 3, 'b -> 4)))
val h = CondFreqCounts(Map('A' -> Map('a -> 5), 'B' -> Map('b -> 6)))
val i = f ++ g ++ h
assertEquals(Map('A' -> Map('a -> 7), 'B' -> Map('a -> 3, 'b -> 10)), i.toMap)
}
@Test
def test_CondFreqCounts_double() {
val f = CondFreqCounts(Map('A' -> Map('a -> 2.1)))
val g = CondFreqCounts(Map('B' -> Map('a -> 3.0, 'b -> 4.2)))
val h = CondFreqCounts(Map('A' -> Map('a -> 5.5), 'B' -> Map('b -> 6.1)))
val i = f ++ g ++ h
assertEquals(Map('A' -> Map('a -> 7.6), 'B' -> Map('a -> 3.0, 'b -> 10.3)), i.toMap)
}
}
| eponvert/Scalabha | src/test/scala/opennlp/scalabha/tag/support/FreqCountsTests.scala | Scala | apache-2.0 | 1,264 |
package org.broadinstitute.dsde.firecloud.fixture
import org.broadinstitute.dsde.workbench.auth.AuthToken
import org.broadinstitute.dsde.workbench.config.{Credentials, UserPool}
import org.broadinstitute.dsde.workbench.fixture._
import org.broadinstitute.dsde.workbench.service.test.{RandomUtil, WebBrowserSpec}
import org.scalatest.{Matchers, Outcome, fixture}
import scala.util.Try
abstract class BillingFixtureSpec extends fixture.FreeSpec with BillingFixtures with Matchers
with WebBrowserSpec with UserFixtures with WorkspaceFixtures with GroupFixtures with RandomUtil {
val owner: Credentials = UserPool.chooseProjectOwner
implicit val ownerAuthToken: AuthToken = owner.makeAuthToken()
var claimedBillingProject: ClaimedProject = _
/**
* See
* https://www.artima.com/docs-scalatest-2.0.M5/org/scalatest/FreeSpec.html
* Section: "Overriding withFixture(OneArgTest)"
*
* Claim a billing project for project owner
* @param billingProject
*/
case class OwnerBillingProjectFixture(billingProject: String)
type FixtureParam = OwnerBillingProjectFixture
override def withFixture(test: OneArgTest): Outcome = {
withFixture(test.toNoArgTest(OwnerBillingProjectFixture(claimedBillingProject.projectName)))
}
def claimBillingProject(): Unit = {
Try {
claimedBillingProject = claimGPAllocProject(owner)
logger.info(s"Billing project claimed: ${claimedBillingProject.projectName}")
}.recover {
case ex: Exception =>
logger.error(s"Error occurred in billing project claim as owner $owner", ex)
throw ex
}
}
def unclaimBillingProject(): Unit = {
val projectName = claimedBillingProject.projectName
Try {
claimedBillingProject.cleanup(owner)
logger.info(s"Billing project unclaimed: $projectName")
}.recover{
case ex: Exception =>
logger.error(s"Error occurred in billing project clean $projectName as owner $owner", ex)
throw ex
}
}
}
| broadinstitute/firecloud-ui | automation/src/test/scala/org/broadinstitute/dsde/firecloud/fixture/BillingFixtureSpec.scala | Scala | bsd-3-clause | 1,994 |
/*
* Copyright 2012 Eike Kettner
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eknet.publet.auth
import com.google.common.eventbus.EventBus
import org.apache.shiro.authc.{AuthenticationInfo, AuthenticationException, AuthenticationToken, AuthenticationListener}
import org.apache.shiro.subject.PrincipalCollection
import org.eknet.publet.event.Event
import com.google.inject.{Inject, Singleton}
/**
* Is registered on the [[org.apache.shiro.mgt.SecurityManager]] and posts all
* events on the global event bus.
*
* @author Eike Kettner eike.kettner@gmail.com
* @since 11.10.12 00:56
*/
@Singleton
class AuthListener @Inject() (bus:EventBus) extends AuthenticationListener {
def onSuccess(token: AuthenticationToken, info: AuthenticationInfo) {
bus.post(new AuthSuccessEvent(token, info))
}
def onFailure(token: AuthenticationToken, ae: AuthenticationException) {
bus.post(new AuthFailedEvent(token, ae))
}
def onLogout(principals: PrincipalCollection) {
bus.post(new LogoutEvent(principals))
}
}
abstract sealed class AuthEvent extends Event
case class AuthFailedEvent(token: AuthenticationToken, ae: AuthenticationException) extends AuthEvent
case class AuthSuccessEvent(token: AuthenticationToken, info: AuthenticationInfo) extends AuthEvent
case class LogoutEvent(principals: PrincipalCollection)
| eikek/publet | auth/src/main/scala/org/eknet/publet/auth/AuthListener.scala | Scala | apache-2.0 | 1,864 |
/*
* Copyright 2014-present Regents of the University of California
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package arkiplane
import utils._
import utils.Dataset._
import scala.collection.mutable._
import scala.concurrent.duration._
import scala.math._
import java.io._
import java.util.Calendar
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
object Geo_Analysis {
private def getFilename(prefix: String, name: String, suffix: String) = {
prefix + "-" + name + "." + suffix
}
def main(args: Array[String]) {
if (args.length < 2) {
System.err.println("Usage: Geo_Analysis <ip-X-X-X-X> <ARK|IPLANE>")
System.exit(1)
}
val ipx = args(0)
val SPARK_PATH = "spark://" + ipx + ":7077"
val HDFS_PATH = "hdfs://" + ipx + ":9010/"
val DATASET_NAME = args(1).toUpperCase() match {
case "ARK" => ARK
case "IPLANE" => IPLANE
}
val cityOrCountry = "City".toUpperCase()
val FN_SUFFIX = {
if (cityOrCountry == "CITY")
"City"
else
"Country"
}
val MAPPING_SUFFIX = {
if (cityOrCountry == "CITY")
"-City"
else
""
}
// Setup system properties
System.setProperty("spark.storage.blockManagerTimeoutIntervalMs", "300000")
System.setProperty("spark.akka.timeout", "300")
System.setProperty("spark.worker.timeout", "300")
System.setProperty("spark.executor.memory", "56G")
System.setProperty("spark.akka.frameSize", "1024")
val sc = new SparkContext(SPARK_PATH, "Geo_Analysis_ARKIPLANE", "/root/spark", Seq("/root/bigtrace/target/scala-2.9.3/bigtrace_2.9.3-0.0.jar"))
// Create IP-to-AS mapping
val PATH_TO_MAPPING_FILE = "" + MAPPING_SUFFIX
val ipMap = LineReaders.ipEdgescapeTreeMapNew(sc, PATH_TO_MAPPING_FILE)
val ipMap_bc = sc.broadcast(ipMap)
var traceLines = sc.textFile(HDFS_PATH + "/" + DATASET_NAME)
// Preprocessing for ARK (Keep those without error codes)
if (DATASET_NAME == ARK) {
traceLines = traceLines.filter(_.contains("MOS"))
}
// Remove 0.0.0.0
traceLines = traceLines.filter(x => !x.contains("0.0.0.0") && !x.contains("-"))
val GeoTracesAll = traceLines.flatMap(LineReaders.newTraceDetailed_GeoLevel(_, DATASET_NAME, ipMap_bc.value)).cache
for (year <- List("2008", "2009", "2010", "2011", "2012", "2013")) {
val GeoTraces = {
if (year == "ALL")
GeoTracesAll
else
GeoTracesAll.filter(x => Utils.monthFromTimestamp(x.timestamp).contains(year))
}
val FN_PREFIX = DATASET_NAME + "-" + year
/*************************
* Groupby Src-Dst Pairs *
*************************/
val pairs = GeoTraces.groupBy(x => (x.from, x.to))//.filter(_._2.size >= 12)
val pairStats = pairs.map(x => {
val y = x._2.map(_.e2eLatency).toArray
val z = x._2.map(_.numHops.toDouble).toArray
val a = x._2.map(_.hopIndices.last.toDouble).toArray
val q = x._2.sortBy(_.timestamp).map(w => Utils.arrToString(w.hops.toArray.map(_.toString))).toArray
(x._1, y.length,
Utils.min(y), Utils.max(y), Utils.average(y), Utils.stdev(y), Utils.coeffOfVar(y), Utils.percentiles(y),
Utils.min(z), Utils.max(z), Utils.average(z), Utils.stdev(z), Utils.coeffOfVar(z), Utils.percentiles(z),
Utils.min(a), Utils.max(a), Utils.average(a), Utils.stdev(a), Utils.coeffOfVar(a), Utils.percentiles(a),
Utils.pathPrevalence(q)
)
})
pairStats.saveAsTextFile(HDFS_PATH + "/" + getFilename(FN_PREFIX, "pairStats", FN_SUFFIX))
/*******************
* Groupby Sources *
*******************/
val srcs = GeoTraces.groupBy(_.from)
val srcsStats = srcs.map(x => {
val y = x._2.map(_.e2eLatency).toArray
val z = x._2.map(_.numHops.toDouble).toArray
(x._1, y.length,
Utils.average(y), Utils.stdev(y), Utils.coeffOfVar(y),
Utils.average(z), Utils.stdev(z), Utils.coeffOfVar(z))
}).collect
Utils.writeToFile(srcsStats, getFilename(FN_PREFIX, "srcsStats", FN_SUFFIX))
/************************
* Groupby Destinations *
************************/
val dsts = GeoTraces.groupBy(_.to).cache
val dstsStats = dsts.map(x => {
val y = x._2.map(_.e2eLatency).toArray
val z = x._2.map(_.numHops.toDouble).toArray
(x._1, y.length,
Utils.average(y), Utils.stdev(y), Utils.coeffOfVar(y),
Utils.average(z), Utils.stdev(z), Utils.coeffOfVar(z))
}).collect
Utils.writeToFile(dstsStats, getFilename(FN_PREFIX, "dstsStats", FN_SUFFIX))
/************************
* Individual countries *
************************/
// Calculate bigrams
val bigrams = GeoTraces.flatMap(h => Utils.emitDirectedLinks(h.hops, h.segmentLats)).groupBy(_._1).map(l => {
val y = l._2.map(_._2).toArray
(l._1, y.length,
Utils.min(y), Utils.max(y), Utils.average(y), Utils.stdev(y), Utils.coeffOfVar(y), Utils.percentiles(y)
)
}).collect
Utils.writeToFile(bigrams, getFilename(FN_PREFIX, "bigrams", FN_SUFFIX))
}
}
}
| mosharaf/hummus | src/main/scala/arkiplane/Geo_Analysis.scala | Scala | apache-2.0 | 5,801 |
package com.twitter.finagle.stats
/**
* A StatsReceiver receiver proxy that translates all counter, stat, and gauge
* names according to a `translate` function.
*
* @param self The underlying StatsReceiver to which translated names are passed
*
* @param namespacePrefix the namespace used for translations
*/
abstract class NameTranslatingStatsReceiver(
val self: StatsReceiver,
namespacePrefix: String)
extends StatsReceiver
{
def this(self: StatsReceiver) = this(self, "<namespacePrefix>")
override def toString: String =
s"$self/$namespacePrefix"
protected[this] def translate(name: Seq[String]): Seq[String]
val repr = self.repr
override def isNull: Boolean = self.isNull
def counter(name: String*): Counter =
self.counter(translate(name): _*)
def stat(name: String*): Stat =
self.stat(translate(name): _*)
def addGauge(name: String*)(f: => Float): Gauge =
self.addGauge(translate(name): _*)(f)
}
| tdyas/util | util-stats/src/main/scala/com/twitter/finagle/stats/NameTranslatingStatsReceiver.scala | Scala | apache-2.0 | 956 |
package com.fustigatedcat.heystk.ui.dao
import java.sql.Timestamp
import java.util.Calendar
import com.fustigatedcat.heystk.ui.model.{Privilege, heystk, User}
import com.fustigatedcat.heystk.ui.model.UITypeMode._
import org.apache.commons.codec.digest.DigestUtils
import scala.util.Random
object UserDAO extends AbstractDAO[Long, User] {
override val table = heystk.User
def createSalt() : String = Random.alphanumeric.take(64).mkString
def hash(password : String, salt : String) : String = {
DigestUtils.sha256Hex(password + salt)
}
def authenticate(password : String)(user : User) : Boolean = {
hash(password, user.salt) == user.hash
}
def authenticateUser(username : String, password : String) : Option[User] = {
from(heystk.User)(u =>
where(u.username === username)
select u
).headOption.filter(authenticate(password))
}
def getPrivilegesForUser(user : User) : List[String] = {
join(heystk.UserRoleMap,heystk.RolePrivilegeMap,heystk.Privilege)((urm,rpm,p) =>
where(urm.userId === user.id)
select p.name
on(urm.roleId === rpm.roleId, rpm.privilegeId === p.id)
).distinct.toList
}
def getUserByUsername(username : String) : Option[User] = {
table.where(u => u.username === username).headOption
}
def createUser(username : String, firstName : String, lastName : String, password : String) : User = {
val salt = createSalt()
this.insert(
User(
0,
username,
firstName,
lastName,
salt,
hash(password, salt),
new Timestamp(Calendar.getInstance.getTimeInMillis)
)
)
}
def updateUser(user : User, firstName : String, lastName : String, password: Option[String]) = {
val (s,p) = password.map(str => {
val salt = createSalt()
(salt, hash(str, salt))
}).getOrElse((user.salt, user.hash))
heystk.User.update(
User(
user.id,
user.username,
firstName,
lastName,
s,
p,
user.created
)
)
}
def deleteUsers(ids : List[Long]) : Unit = {
table.deleteWhere(u => u.id in ids)
}
}
| fustigatedcat/heystk | system-ui/src/main/scala/com/fustigatedcat/heystk/ui/dao/UserDAO.scala | Scala | gpl-3.0 | 2,157 |
import stainless.annotation.{ghost => ghostAnnot}
import stainless.lang._
import stainless.collection._
object GhostFlow2 {
case class Ghost(@ghostAnnot var l: List[BigInt]) {
def f(x: BigInt) = {
ghost {
l = x :: l // Argument to ghost parameter `value` of `ghost` must only have effects on ghost fields
}
}
}
}
| epfl-lara/stainless | frontends/benchmarks/extraction/valid/GhostFlow2.scala | Scala | apache-2.0 | 346 |
//
// TextRange.scala -- Scala traits TextPosition and TextRange
// Project OrcScala
//
// Created by jthywiss on Jul 20, 2016.
//
// Copyright (c) 2019 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.util
/** A position in a line-oriented character string.
*
* [This description is based on, but modified from RFC 5147:]
* A position does not identify an actual substring of text, but a position
* inside the text, which can be regarded as a substring of length zero. The
* use case for positions is to provide pointers for applications that may
* use them to implement functionalities such as "insert some text here",
* which needs a position rather than a substring. Positions are counted from
* zero; position zero being before the first character or line of a line-
* oriented character string. Thus, a character string having one character
* has two positions, one before the first character (offset 0), and one
* after the first character (offset 1).
*
* Note: Line and column numbers are one-based, following strong convention.
* This is different from RFC 5147.
*
* @author jthywiss
*/
trait TextPosition[R] {
/** For 0-based indices */
type NonnegativeIntegralType = Int
/** For 1-based indices */
type PositiveIntegralType = Int
type CharacterNumber = NonnegativeIntegralType
type LineNumber = PositiveIntegralType
type ColumnNumber = PositiveIntegralType
/** Some type of reference to the container of the lines of text. */
val resource: R
/** Number of characters (not bytes) in the text that precede this position. Zero-based. */
val offset: CharacterNumber
/** Line number of the text that contains this position. One-based. */
val line: LineNumber
/** Column number of the text that this position immediately precedes. One-based. */
val column: ColumnNumber
/** A string identifying the resource, to be used when printing this position. (E.g., pathname) */
def resourceDescription: String
override def equals(that: Any) = that match {
case thatTP: TextPosition[_] => {
if (resource.equals(thatTP.resource) && offset.equals(thatTP.offset)) {
assert(line.equals(thatTP.line) && column.equals(thatTP.column), ".equals doesn't report equal line & column")
assert(thatTP.resource.equals(resource) && thatTP.offset.equals(offset), ".equals symmetry violation")
true
} else {
false
}
}
}
override def hashCode = resource.hashCode + offset
override def toString = resourceDescription + ':' + line + ':' + column
def <(that: TextPosition[R]) = this.resource == that.resource && this.offset < that.offset
def <=(that: TextPosition[R]) = this == that || this < that
def >(that: TextPosition[R]) = this.resource == that.resource && this.offset > that.offset
def >=(that: TextPosition[R]) = this == that || this > that
}
/** A range of positions specifying a substring of a line-oriented character
* string.
*
* [This description is based on, but modified from RFC 5147:]
* Ranges identify substring of a character string that have a length that
* may be greater than zero. Ranges specify both a lower and an upper bound.
* The end of a range must have a value greater than or equal to the start.
* A range with identical start and end is legal and identifies a range of
* length zero, which is equivalent to a position.
*
* @author jthywiss
*/
trait TextRange[P <: TextPosition[R], R] {
/** TextPosition immediately preceding this range. */
def start: P
/** TextPosition immediately following this range. */
def end: P
assert(start <= end, "start must precede end")
override def toString() = {
if (start.resource == end.resource) {
if (start.line == end.line) {
if (start.column == end.column) {
start.resourceDescription + ':' + start.line + ':' + start.column
} else {
start.resourceDescription + ':' + start.line + ':' + start.column + '-' + end.column
}
} else {
start.resourceDescription + ':' + start.line + ':' + start.column + '-' + end.line + ':' + end.column
}
} else {
start.resourceDescription + ':' + start.line + ':' + start.column + '-' + end.resourceDescription + ':' + end.line + ':' + end.column
}
}
private def bothDefinedOnSameResource(that: TextRange[P, R]) =
this.start.resource == that.start.resource && this.end.resource == that.end.resource
private def ifBothDefinedOnSameResource[T](that: TextRange[P, R], f: () => T): Option[T] =
if (bothDefinedOnSameResource(that)) Some(f()) else None
/** The two ranges begin and end at exactly the same position.
*/
def equalRange(that: TextRange[P, R]) = ifBothDefinedOnSameResource(that, () => this.start == that.start && this.end == that.end)
/** Two ranges r and s abut if and only if r precedes s, yet there
* are no characters between r and s, and r and s do not overlap.
*/
def abuts(that: TextRange[P, R]) = ifBothDefinedOnSameResource(that, () => this.end == that.start)
/** This range ends _strictly_ precedes the given range begins.
* I.e., there are characters between them.
*/
def precedesStrictly(that: TextRange[P, R]) = ifBothDefinedOnSameResource(that, () => this.end < that.start)
/** Precedes or abuts.
*
* Convenience method:
* r precedesOrAbuts s β£ r precedes s β¨ r abuts j
*/
def precedesOrAbuts(that: TextRange[P, R]) = ifBothDefinedOnSameResource(that, () => precedesStrictly(that).get || abuts(that).get)
/** The given position falls inside (non-strictly) this range. */
def contains(that: TextPosition[R]) =
if (this.start.resource == that.resource && this.end.resource == that.resource) {
this.start <= that && that <= this.end
} else {
None
}
/** The given range falls inside (non-strictly) this range. */
def coversOrEquals(that: TextRange[P, R]) = ifBothDefinedOnSameResource(that, () => this.start <= that.start && this.end <= that.end)
/** This range begins _strictly_ before and ends _strictly_ after the
* given range.
*/
def coversStrictly(that: TextRange[P, R]) = ifBothDefinedOnSameResource(that, () => this.start < that.start && this.end < that.end)
/** This range begins after and ends before the given range; one of
* these bounds must be strict. In other words, this range is contained
* in the given range.
*/
def containedStrictlyIn(that: TextRange[P, R]) =
ifBothDefinedOnSameResource(that, () =>
this.start >= that.start && this.end < that.end ||
this.start > that.start && this.end <= that.end)
/** Contained strictly in or equals.
*
* Convenience method:
* r containedInOrEquals s β£ r containedStrictlyIn s β¨ r = s
*/
def containedInOrEquals(that: TextRange[P, R]) = ifBothDefinedOnSameResource(that, () => containedStrictlyIn(that).get || equalRange(that).get)
/** The ranges begin at the same position.
*/
def startsWith(that: TextRange[P, R]) = ifBothDefinedOnSameResource(that, () => this.start == that.start)
/** The ranges end at the same position.
*/
def endsWith(that: TextRange[P, R]) = ifBothDefinedOnSameResource(that, () => this.end == that.end)
/** The ranges do not overlap in any way.
*
* Convenience method:
* r disjointWith s β£ r precedesOrAbuts s β¨ s precedesOrAbuts r
*/
def disjointWith(that: TextRange[P, R]) = ifBothDefinedOnSameResource(that, () => this.precedesOrAbuts(that).get || that.precedesOrAbuts(this).get)
}
| orc-lang/orc | OrcScala/src/orc/util/TextRange.scala | Scala | bsd-3-clause | 7,792 |
package org.mysql.employee.utils
import org.mysql.employee.constants.DateConstants
import java.text.SimpleDateFormat
object DateUtils {
def toHumanTime(ms: Long) = {
var x = ms / 1000
val seconds = x % 60; x /= 60;
val minutes = x % 60; x /= 60;
val hours = x % 24
val days = x / 24
(s"${if (days > 0) days + "days " else ""}" +
s"${if (hours > 0) hours + "hrs. " else ""}" +
s"${if (minutes > 0) minutes + "min. " else ""}" +
s"${if (seconds > 0) seconds + "secs. " else ""}").trim()
}
def outputFormat() = {
new SimpleDateFormat(DateConstants.outputDateFormat)
}
def ingestionFormat() = {
new SimpleDateFormat(DateConstants.ingestionDateFormat)
}
def outputTimeFormat() = {
new SimpleDateFormat(DateConstants.outputDateFormat + " hh:mm:ss")
}
} | matyb/spark-employee | src/main/scala/org/mysql/employee/utils/DateUtils.scala | Scala | mit | 834 |
package akkaviz.serialization
import org.scalatest.{FlatSpec, Matchers}
class CustomSerializersTest extends FlatSpec with Matchers {
"MessageSerialization" should "have default serializers" in {
MessageSerialization.render("Test") shouldBe "\\"Test\\""
MessageSerialization.render(1) shouldBe "1"
MessageSerialization.render(1L) shouldBe "1"
MessageSerialization.render(1.1) shouldBe "1.1"
MessageSerialization.render(true) shouldBe "true"
}
}
| blstream/akka-viz | monitoring/src/test/scala/akkaviz/serialization/CustomSerializersTest.scala | Scala | mit | 472 |
package com.twitter.finatra.httpclient
import com.fasterxml.jackson.databind.JsonNode
import com.google.inject.testing.fieldbinder.Bind
import com.twitter.finagle.Service
import com.twitter.finagle.httpx.{Request, Response, Status}
import com.twitter.finatra.httpclient.modules.HttpClientModule
import com.twitter.finatra.httpclient.test.InMemoryHttpService
import com.twitter.finatra.json.modules.FinatraJacksonModule
import com.twitter.inject.IntegrationTest
import com.twitter.inject.app.TestInjector
import com.twitter.util.Await
class HttpClientIntegrationTest extends IntegrationTest {
val inMemoryHttpService = new InMemoryHttpService()
@Bind
val httpService: Service[Request, Response] = inMemoryHttpService
override val injector = TestInjector(
modules = Seq(MyHttpClientModule, FinatraJacksonModule),
overrideModules = Seq(integrationTestModule))
val httpClient = injector.instance[HttpClient]
"execute" in {
val okResponse = Response(Status.Ok)
inMemoryHttpService.mockGet("/foo", okResponse)
val request = RequestBuilder.get("/foo")
Await.result(
httpClient.execute(request)) should be(okResponse)
}
"executeJson" in {
val mockResponse = Response(Status.Ok)
mockResponse.setContentString("{}")
inMemoryHttpService.mockPost("/foo", "bar", mockResponse)
val request = RequestBuilder.post("/foo").body("bar")
Await.result(
httpClient.executeJson[JsonNode](request)).toString should be("{}")
}
"executeJson w/ unexpected response" in {
val errorResponse = Response(Status.InternalServerError)
inMemoryHttpService.mockGet("/foo", errorResponse)
val request = RequestBuilder.get("/foo")
intercept[HttpClientException] {
Await.result(
httpClient.executeJson[JsonNode](request))
}
}
"get" in {
val mockResponse = Response(Status.Ok)
mockResponse.setContentString("{}")
inMemoryHttpService.mockGet("/foo", mockResponse)
Await.result(
httpClient.get("/foo")) should be(mockResponse)
}
object MyHttpClientModule extends HttpClientModule {
override val dest = "/my-http-service"
override val hostname = "hostname123"
override val defaultHeaders = Map("a" -> "b")
}
}
| deanh/finatra | httpclient/src/test/scala/com/twitter/finatra/httpclient/HttpClientIntegrationTest.scala | Scala | apache-2.0 | 2,238 |
package com.hasanozgan.services.myservice.httpservices
import com.wordnik.swagger.annotations._
import spray.http.MediaTypes
import spray.routing.HttpService
/**
* Created by hozgan on 10/08/15.
*/
@Api(value = "/user", description = "Operations about users.", position=1)
trait UserHttpService extends HttpService {
val routes = getUserByName ~ getUsers
@ApiOperation(value = "Get user by name", notes = "", response=classOf[String], nickname = "getUserByName", httpMethod = "GET")
@ApiImplicitParams(Array(
new ApiImplicitParam(name = "username", value = "ID of pet that needs to be updated", required = true, dataType = "string", paramType = "path"),
new ApiImplicitParam(name = "body", value = "Updated user object.", required = false, dataType = "string", paramType = "form")
))
@ApiResponses(Array(
new ApiResponse(code = 404, message = "User not found"),
new ApiResponse(code = 400, message = "Invalid username supplied")
))
def getUserByName = get { path("user" / Segment) { id => {
complete(s"{$id}")
}}}
@ApiOperation(value = "Get users", notes = "", response=classOf[String], nickname = "getUsers", httpMethod = "GET")
def getUsers = get { path("users") { respondWithMediaType(MediaTypes.`application/json`) {
complete("users")
}}}
}
| hasanozgan/spray-microservice-template | service/src/main/scala/com/hasanozgan/services/myservice/httpservices/UserHttpService.scala | Scala | mit | 1,300 |
package dbtarzan.messages
import dbtarzan.db.TableId
/* identifies a table tab in the GUI, which is relative to the query that loads its rows.
Since there can potentially be several tabs based on the same table, we use a generated id to distinguish among them */
case class QueryId(tableId : TableId, uuid : String)
object IDGenerator {
def queryId(tableId : TableId): QueryId = QueryId(tableId, java.util.UUID.randomUUID.toString)
} | aferrandi/dbtarzan | src/main/scala/dbtarzan/messages/IDGenerator.scala | Scala | apache-2.0 | 437 |
/*
* Copyright (C) 2012 reuillon
* Copyright (C) 2014 Jonathan Passerat-Palmbach
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openmole.plugin.environment.gridscale
import org.openmole.core.workflow.execution.ExecutionState._
import org.openmole.core.batch.jobservice.JobService
import fr.iscpif.gridscale.jobservice.{ JobService β GSJobService, JobState, Submitted, Running, Done, Failed }
trait GridScaleJobService extends JobService {
val jobService: GSJobService
type J = jobService.J
protected def _state(j: J) = translateStatus(jobService.state(j))
protected def _cancel(j: J) = jobService.cancel(j)
protected def _purge(j: J) = jobService.purge(j)
private def translateStatus(state: JobState) =
state match {
case Submitted β SUBMITTED
case Running β RUNNING
case Done β DONE
case Failed β FAILED
}
}
| ISCPIF/PSEExperiments | openmole-src/openmole/plugins/org.openmole.plugin.environment.gridscale/src/main/scala/org/openmole/plugin/environment/gridscale/GridScaleJobService.scala | Scala | agpl-3.0 | 1,500 |
import scala.util.parsing.combinator.syntactical._
import scala.util.parsing.input._
case class Ident(name: String) extends Positional
case class Var(left: Ident, right: Any)
class MyParser extends StandardTokenParsers {
lexical.reserved += ("var", "true", "false")
lexical.delimiters += "="
def value: Parser[Any] = numericLit | "true" | "false" |
failure("Not a valid value")
def vardecl: Parser[Var] = ("var" ~> positioned(ident ^^ { Ident(_) }) <~ "=") ~ value ^^ { case i ~ v => Var(i, v) }
def parseAll[T](p: Parser[T], in: String): ParseResult[T] =
phrase(p)(new lexical.Scanner(in))
}
object Main extends App {
val parser = new MyParser
// Showing the location of an error
val result = parser.parseAll(parser.value, "Fred")
result match {
case f: parser.Failure => println(f.next.pos.line + "." + f.next.pos.column + ": " + f.msg + ": " + f.next.first.chars)
case _ => println(result.get)
}
// Showing the location of an item in a successful parse
val result2 = parser.parseAll(parser.vardecl, "var n = 42").get
println(result2)
result2 match {
case Var(id, v) => println(id.name + " is at position " + id.pos)
}
}
| P7h/ScalaPlayground | Scala for the Impatient/examples/ch19/sec13/MyParser.scala | Scala | apache-2.0 | 1,180 |
package com.twitter.finagle.ssl.client
import com.twitter.finagle.ssl.{
ApplicationProtocols, CipherSuites, KeyCredentials, Protocols, TrustCredentials}
/**
* SslClientConfiguration represents the collection of parameters that an engine factory
* should use to configure a TLS client [[Engine]].
*
* @param hostname If specified, this value should match one of the names specified in
* the server's X.509 certificate.
*
* @param keyCredentials The credentials used by the client engine to verify itself to
* a remote peer.
*
* @param trustCredentials The credentials used by the client to validate a remote
* peer's credentials.
*
* @param cipherSuites The cipher suites which should be used by a particular client engine.
*
* @param protocols The protocols which should be enabled for use with a particular client engine.
*
* @param applicationProtocols The ALPN or NPN protocols which should be supported by a particular
* client engine.
*/
private[finagle] case class SslClientConfiguration(
hostname: Option[String] = None,
keyCredentials: KeyCredentials = KeyCredentials.Unspecified,
trustCredentials: TrustCredentials = TrustCredentials.Unspecified,
cipherSuites: CipherSuites = CipherSuites.Unspecified,
protocols: Protocols = Protocols.Unspecified,
applicationProtocols: ApplicationProtocols = ApplicationProtocols.Unspecified)
| spockz/finagle | finagle-core/src/main/scala/com/twitter/finagle/ssl/client/SslClientConfiguration.scala | Scala | apache-2.0 | 1,383 |
package models
import java.security.MessageDigest
import org.joda.time.DateTime
import scalikejdbc._
import skinny.orm._
case class Account(id: Long, email: String, password: String, createdAt: DateTime)
object Account extends SkinnyCRUDMapper[Account] {
override def defaultAlias = createAlias("a")
val ownerAlias = createAlias("owner")
override def extract(rs: WrappedResultSet, a: ResultName[Account]) = new Account(
id = rs.get(a.id),
email = rs.get(a.email),
password = rs.get(a.password),
createdAt = rs.get(a.createdAt)
)
private def digestString(s: String): String = {
val md = MessageDigest.getInstance("SHA-1")
md.update(s.getBytes)
md.digest.foldLeft("") { (s, b) =>
s + "%02x".format(if (b < 0) b + 256 else b)
}
}
def authenticate(email: String, password: String)(implicit s: DBSession): Option[Account] = {
val hashedPassword = digestString(password)
val a = Account.defaultAlias
Account.where(sqls.eq(a.email, email).and.eq(a.password, hashedPassword)).apply().headOption
}
}
| tsuyoshizawa/scala-oauth2-provider-example-skinny-orm | app/models/Account.scala | Scala | mit | 1,064 |
/**
* The MIT License (MIT)
* <p/>
* Copyright (c) 2016 ScalateKids
* <p/>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p/>
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* <p/>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
* <p/>
* @author Scalatekids
* @version 1.0
* @since 1.0
*/
package com.actorbase.cli.models
/**
* A command of the ActorbaseCLI.
* This trait is extended by all the Command classes of the Command Pattern
* in the models package of the ActorbaseCLI.
*/
trait Command {
/**
* Method used to call the right method of a class that contains the code to run the Command.
*
* @return a String containing the result to return to the user of the Command invoked
*/
def execute() : String
} | ScalateKids/Actorbase-Client | src/main/scala/com/actorbase/cli/models/Command.scala | Scala | mit | 1,704 |
package net.wrap_trap.goju
import org.scalatest._
/**
* goju: HanoiDB(LSM-trees (Log-Structured Merge Trees) Indexed Storage) clone
* Copyright (c) 2016 Masayuki Takahashi
* This software is released under the MIT License.
* http://opensource.org/licenses/mit-license.php
*/
class PlainRpcProtocolSpec extends FlatSpec with Matchers {
"PlainRpc.CALL" should "replace to ('CALL, msg)" in {
PlainRpcProtocol.CALL("some message") should equal('CALL, "some message")
}
"PlainRpc.REPLY" should "replace to ('REPLY, msg)" in {
PlainRpcProtocol.REPLY("some reply") should equal('REPLY, "some reply")
}
"PlainRpc.CAST" should "replace to ('CAST, msg)" in {
PlainRpcProtocol.CAST("some message") should equal('CAST, "some message")
}
}
| masayuki038/goju | src/test/scala/net/wrap_trap/goju/PlainRpcProtocolSpec.scala | Scala | mit | 768 |
package orc.android
//
// OrcBindings.scala -- Scala class OrcBindings
// Project OrcScala
//
// $Id: OrcBindings.scala 2933 2011-12-15 16:26:02Z jthywissen $
//
// Created by jthywiss on May 26, 2010.
//
// Copyright (c) 2012 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
import java.io.File
import java.util.Map
import orc.OrcOptions
/** An extended implementation of <code>javax.script.Bindings</code>
* with type-specific get and put methods.
*
* @author jthywiss
*/
class OrcBindings(m: Map[String, Object]) extends java.util.HashMap[String, Object] with OrcOptions {
import scala.collection.JavaConversions._
def this() = this(new java.util.HashMap[String, Object])
def filename: String = getString("javax.script.filename", "")
def filename_=(newVal: String) = putString("javax.script.filename", newVal)
def logLevel: String = getString("orc.logLevel", "INFO")
def logLevel_=(newVal: String) = putString("orc.logLevel", newVal)
// Compile options
def usePrelude: Boolean = getBoolean("orc.usePrelude", true)
def usePrelude_=(newVal: Boolean) = putBoolean("orc.usePrelude", newVal)
def includePath: java.util.List[String] = getPathList("orc.includePath", List("."))
def includePath_=(newVal: java.util.List[String]) = putPathList("orc.includePath", newVal)
def additionalIncludes: java.util.List[String] = getPathList("orc.additionalIncludes", List())
def additionalIncludes_=(newVal: java.util.List[String]) = putPathList("orc.additionalIncludes", newVal)
def typecheck: Boolean = getBoolean("orc.typecheck", false)
def typecheck_=(newVal: Boolean) = putBoolean("orc.typecheck", newVal)
def disableRecursionCheck: Boolean = getBoolean("orc.disableRecursionCheck", false)
def disableRecursionCheck_=(newVal: Boolean) = putBoolean("orc.disableRecursionCheck", newVal)
def echoOil: Boolean = getBoolean("orc.echoOil", false)
def echoOil_=(newVal: Boolean) = putBoolean("orc.echoOil", newVal)
def oilOutputFile: Option[File] = {
getString("orc.oilOutputFile", "") match {
case "" => None
case f => Some(new File(f))
}
}
def oilOutputFile_=(newVal: Option[File]) = putString("orc.oilOutputFile", newVal.map(_.toString).getOrElse(""))
def compileOnly: Boolean = getBoolean("orc.onlyCompile", false)
def compileOnly_=(newVal: Boolean) = putBoolean("orc.onlyCompile", newVal)
def runOil: Boolean = getBoolean("orc.runOil", false)
def runOil_=(newVal: Boolean) = putBoolean("orc.runOil", newVal)
// Execution options
def classPath: java.util.List[String] = getPathList("orc.classPath", List())
def classPath_=(newVal: java.util.List[String]) = putPathList("orc.classPath", newVal)
def showJavaStackTrace: Boolean = getBoolean("orc.showJavaStackTrace", false)
def showJavaStackTrace_=(newVal: Boolean) = putBoolean("orc.showJavaStackTrace", newVal)
def disableTailCallOpt: Boolean = getBoolean("orc.disableTailCallOpt", false)
def disableTailCallOpt_=(newVal: Boolean) = putBoolean("orc.disableTailCallOpt", newVal)
def stackSize: Int = getInt("orc.stackSize", -1)
def stackSize_=(newVal: Int) = putInt("orc.stackSize", newVal)
def maxTokens: Int = getInt("orc.maxTokens", -1)
def maxTokens_=(newVal: Int) = putInt("orc.maxTokens", newVal)
def maxSiteThreads: Int = getInt("orc.maxSiteThreads", -1)
def maxSiteThreads_=(newVal: Int) = putInt("orc.maxSiteThreads", newVal)
var capabilities = new java.util.HashMap[String, Boolean]()
def hasRight(rightName: String): Boolean = {
if (capabilities.containsKey(rightName)) {
capabilities.get(rightName)
} else {
false
}
}
def setRight(capName: String, newVal: Boolean) {
capabilities.put(capName, newVal)
}
/** @param key
* @param value
*/
def putString(key: String, value: String) {
put(key, value.toString)
}
/** @param key
* @param default
* @return
*/
def getString(key: String, default: String): String = {
val value = get(key)
value match {
case s: String => s
case _ => default
}
}
/** @param key
* @param value
*/
def putInt(key: String, value: Int) {
put(key, value.toString)
}
/** @param key
* @param def
* @return
*/
def getInt(key: String, default: Int): Int = {
try {
get(key) match {
case s: String => s.toInt
case _ => default
}
} catch {
case e: NumberFormatException => default
}
}
/** @param key
* @param value
*/
def putLong(key: String, value: Long) {
put(key, value.toString)
}
/** @param key
* @param def
* @return
*/
def getLong(key: String, default: Long): Long = {
try {
get(key) match {
case s: String => s.toLong
case _ => default
}
} catch {
case e: NumberFormatException => default
}
}
/** @param key
* @param value
*/
def putBoolean(key: String, value: Boolean) {
put(key, value.toString)
}
/** @param key
* @param def
* @return
*/
def getBoolean(key: String, default: Boolean): Boolean = {
get(key) match {
case s: String if s.equalsIgnoreCase("true") => true
case s: String if s.equalsIgnoreCase("false") => false
case _ => default
}
}
/** @param key
* @param value
*/
def putFloat(key: String, value: Float) {
put(key, value.toString)
}
/** @param key
* @param def
* @return
*/
def getFloat(key: String, default: Float): Float = {
try {
get(key) match {
case s: String => s.toFloat
case _ => default
}
} catch {
case e: NumberFormatException => default
}
}
/** @param key
* @param value
*/
def putDouble(key: String, value: Double) {
put(key, value.toString)
}
/** @param key
* @param def
* @return
*/
def getDouble(key: String, default: Double): Double = {
try {
get(key) match {
case s: String => s.toDouble
case _ => default
}
} catch {
case e: NumberFormatException => default
}
}
/** @param key
* @param value
*/
def putPathList(key: String, value: java.util.List[String]) {
if (value.length > 0) {
put(key, value.mkString(File.pathSeparator))
} else {
put(key, "")
}
}
def getPathList(key: String, default: java.util.List[String]): java.util.List[String] = {
val value = get(key)
value match {
case s: String if (s.length == 0) => new java.util.ArrayList[String](0)
case s: String => s.split(File.pathSeparator).toList
case _ => default
}
}
}
| orc-lang/orc | OrcAndroid/src/orc/android/OrcBindings.scala | Scala | bsd-3-clause | 6,836 |
/*
* Copyright ActionML, LLC under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* ActionML licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.actionml.router.http.routes
import akka.actor.ActorRef
import akka.event.LoggingAdapter
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Route
import akka.pattern.ask
import cats.data.Validated
import cats.data.Validated.Valid
import com.actionml.admin.Administrator
import com.actionml.authserver.{AccessToken, ResourceId}
import com.actionml.authserver.Roles.{engine, system}
import com.actionml.authserver.directives.AuthorizationDirectives
import com.actionml.authserver.service.AuthorizationService
import com.actionml.core.model.Response
import com.actionml.core.validate.ValidateError
import com.actionml.router.config.AppConfig
import com.actionml.router.service._
import org.json4s.JValue
import org.json4s.jackson.JsonMethods
import scaldi.Injector
/**
*
* Engine endpoints:
*
* Add new engine
* PUT, POST /engines/ {JSON body for PIO engine}
* Response: HTTP code 201 if the engine was successfully created; otherwise, 400.
*
* Todo: Update existing engine
* PUT, POST /engines/<engine-id>?data_delete=true&force=true {JSON body for PIO event}
* Response: HTTP code 200 if the engine was successfully updated; otherwise, 400.
*
* Update existing engine
* PUT, POST /engines/<engine-id>?import=true&location=String
* Response: HTTP code 200 if the engine was successfully updated; otherwise, 400.
*
* Get existing engine
* GET /engines/<engine-id>
* Response: {JSON body for PIO event}
* HTTP code 200 if the engine exist; otherwise, 404
*
* Delete existing engine
* DELETE /engines/<engine-id>
* Response: HTTP code 200 if the engine was successfully deleted; otherwise, 400.
*
* @author The ActionML Team (<a href="http://actionml.com">http://actionml.com</a>)
* 29.01.17 17:36
*/
class EnginesRouter(implicit inj: Injector) extends BaseRouter with AuthorizationDirectives {
private val engineService = inject[ActorRef]('EngineService)
override val authorizationService = inject[AuthorizationService]
private val config = inject[AppConfig]
private val admin = inject[Administrator]
override val authEnabled = config.auth.enabled
override val route: Route = (rejectEmptyResponse & extractAccessToken & extractLog) { (accessToken, log) => {
implicit val l: LoggingAdapter = log
implicit val at: Option[AccessToken] = accessToken
pathPrefix("engines") {
(pathEndOrSingleSlash & hasAccess(engine.create, ResourceId.*)) {
getEngines ~
createEngine
} ~
pathPrefix(Segment) { engineId β
hasAccess(engine.read, engineId).apply {
pathEndOrSingleSlash(getEngine(engineId))
} ~
hasAccess(engine.modify, engineId).apply {
(pathEndOrSingleSlash & delete) (deleteEngine(engineId)) ~
(path("imports") & post) (updateEngineWithImport(engineId)) ~
(path("configs") & post) (updateEngineWithConfig(engineId)) ~
pathPrefix("jobs") {
(pathEndOrSingleSlash & post) (updateEngineWithTrain(engineId)) ~
(path(Segment) & delete) { jobId =>
cancelJob(engineId, jobId)
}
} ~
pathPrefix("entities") {
path(Segment) { userId =>
get(getUserData(engineId, userId)) ~
delete(deleteUserData(engineId, userId))
}
}
}
}
} ~
(pathPrefix("system") & hasAccess(system.info)) {
path("health")(healthCheck) ~
pathEndOrSingleSlash(getSystemInfo)
}
}}
private def healthCheck: Route = get {
completeByValidated(StatusCodes.OK) {
admin.healthCheck.map(Valid(_))
}
}
private def getSystemInfo(implicit log: LoggingAdapter): Route = get {
log.info("Get system info")
completeByValidated(StatusCodes.OK) {
(engineService ? GetSystemInfo()).mapTo[Validated[ValidateError, Response]]
}
}
private def getEngine(engineId: String)(implicit log: LoggingAdapter): Route = get {
log.info("Get engine: {}", engineId)
completeByValidated(StatusCodes.OK) {
(engineService ? GetEngine(engineId)).mapTo[Validated[ValidateError, Response]]
}
}
private def getEngines(implicit log: LoggingAdapter): Route = get {
log.info("Get engines information")
completeByValidated(StatusCodes.OK) {
(engineService ? GetEngines).mapTo[Validated[ValidateError, List[Response]]]
}
}
private def createEngine(implicit log: LoggingAdapter): Route = entity(as[JValue]) { engineConfig =>
log.info("Create engine: {}", engineConfig)
completeByValidated(StatusCodes.Created) {
(engineService ? CreateEngine(JsonMethods.compact(engineConfig))).mapTo[Validated[ValidateError, Response]]
}
}
private def updateEngineWithConfig(engineId: String)(implicit log: LoggingAdapter): Route = entity(as[JValue]) { engineConfig β
log.info("Update engine: {}, updateConfig: true", engineId)
completeByValidated(StatusCodes.OK) {
(engineService ? UpdateEngine(JsonMethods.compact(engineConfig))).mapTo[Validated[ValidateError, Response]]
}
}
private def updateEngineWithImport(engineId: String)(implicit log: LoggingAdapter): Route = parameter('import_path) { importPath β
log.info("Update engine: {}, importPath: {}", engineId, importPath)
completeByValidated(StatusCodes.OK) {
(engineService ? UpdateEngineWithImport(engineId, importPath)).mapTo[Validated[ValidateError, Response]]
}
}
private def updateEngineWithTrain(engineId: String)(implicit log: LoggingAdapter): Route = {
log.info("Update engine: {}, trainPath: {}", engineId)
completeByValidated(StatusCodes.OK) {
(engineService ? UpdateEngineWithTrain(engineId)).mapTo[Validated[ValidateError, Response]]
}
}
private def deleteEngine(engineId: String)(implicit log: LoggingAdapter): Route = {
log.info("Delete engine: {}", engineId)
completeByValidated(StatusCodes.OK) {
(engineService ? DeleteEngine(engineId)).mapTo[Validated[ValidateError, Response]]
}
}
private def cancelJob(engineId: String, jobId: String)(implicit log: LoggingAdapter): Route = {
log.info(s"Cancel job $jobId")
completeByValidated(StatusCodes.OK) {
(engineService ? CancelJob(engineId, jobId)).mapTo[Validated[ValidateError, Response]]
}
}
private def getUserData(engineId: String, userId: String) = parameters('num.as[Int].?, 'from.as[Int].?) { (num, from) =>
completeByValidated(StatusCodes.OK) {
(engineService ? GetUserData(engineId, userId, num = num.getOrElse(100), from = from.getOrElse(0)))
.mapTo[Validated[ValidateError, Response]]
}
}
private def deleteUserData(engineId: String, userId: String) = {
completeByValidated(StatusCodes.OK) {
(engineService ? DeleteUserData(engineId, userId)).mapTo[Validated[ValidateError, Response]]
}
}
}
| actionml/harness | rest-server/server/src/main/scala/com/actionml/router/http/routes/EnginesRouter.scala | Scala | apache-2.0 | 7,776 |
/**
* Copyright (C) 2009-2011 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.scalate.servlet
import java.io.File
import javax.servlet.{ ServletConfig, ServletContext }
import org.fusesource.scalate.layout.{ DefaultLayoutStrategy, LayoutStrategy }
import org.fusesource.scalate.util._
import org.fusesource.scalate.{ Binding, TemplateEngine }
import scala.tools.nsc.Global
object ServletTemplateEngine {
val log = Log(getClass)
val templateEngineKey = classOf[ServletTemplateEngine].getName
/**
* Gets the current template engine
*
* @throws IllegalArgumentException if no template engine has been registered with the [[javax.servlet.ServletContext]]
*/
def apply(servletContext: ServletContext): ServletTemplateEngine = {
val answer = servletContext.getAttribute(templateEngineKey)
if (answer == null) {
throw new IllegalArgumentException("No ServletTemplateEngine instance registered on ServletContext for key " +
templateEngineKey + ". Are you sure your web application has registered the Scalate TemplateEngineServlet?")
} else {
answer.asInstanceOf[ServletTemplateEngine]
}
}
/**
* Updates the current template engine - called on initialisation of the [[org.fusesource.scalate.servlet.TemplateEngineServlet]]
*/
def update(servletContext: ServletContext, templateEngine: ServletTemplateEngine): Unit = {
servletContext.setAttribute(templateEngineKey, templateEngine)
// now lets fire the bootstrap code
templateEngine.boot()
}
/**
* Configures the given TemplateEngine to use the default servlet style layout strategy.
*
* The default layout files searched if no layout attribute is defined by a template are:
* * "WEB-INF/scalate/layouts/default.jade"
* * "WEB-INF/scalate/layouts/default.mustache"
* * "WEB-INF/scalate/layouts/default.scaml"
* * "WEB-INF/scalate/layouts/default.ssp"
*/
def setLayoutStrategy(engine: TemplateEngine): LayoutStrategy = {
engine.layoutStrategy = new DefaultLayoutStrategy(engine, TemplateEngine.templateTypes.map("/WEB-INF/scalate/layouts/default." + _): _*)
engine.layoutStrategy
}
/**
* Returns the source directories to use for the given config
*/
def sourceDirectories(config: Config): List[File] = {
config.getServletContext.getRealPath("/") match {
case path: String => List(new File(path))
case null => List()
}
}
}
/**
* A Servlet based TemplateEngine which initializes itself using a ServletConfig or a FilterConfig.
*
* The default layout files searched if no layout attribute is defined by a template are:
* * "WEB-INF/scalate/layouts/default.jade"
* * "WEB-INF/scalate/layouts/default.mustache"
* * "WEB-INF/scalate/layouts/default.scaml"
* * "WEB-INF/scalate/layouts/default.ssp"
* *
* @author <a href="http://hiramchirino.com">Hiram Chirino</a>
*/
class ServletTemplateEngine(
val config: Config) extends TemplateEngine(ServletTemplateEngine.sourceDirectories(config)) {
import ServletTemplateEngine.log._
templateDirectories ::= "/WEB-INF"
bindings = List(Binding("context", "_root_." + classOf[ServletRenderContext].getName, true, isImplicit = true))
classpath = buildClassPath()
classLoader = Thread.currentThread.getContextClassLoader
resourceLoader = new ServletResourceLoader(config.getServletContext, new FileResourceLoader())
ServletTemplateEngine.setLayoutStrategy(this)
bootInjections = List(this, config.getServletContext)
Option(config.getInitParameter("boot.class")).foreach(clazz => bootClassName = clazz)
info("Scalate template engine using working directory: %s", workingDirectory)
private def buildClassPath(): String = {
val builder = new ClassPathBuilder
// Add optional classpath prefix via web.xml parameter
builder.addEntry(config.getInitParameter("compiler.classpath.prefix"))
// Add containers class path
builder.addPathFrom(getClass)
.addPathFrom(classOf[ServletConfig])
.addPathFrom(classOf[Product])
try {
builder.addPathFrom(classOf[Global])
} catch {
case x: Throwable => // the scala compiler might not be on the path.
}
// Always include WEB-INF/classes and all the JARs in WEB-INF/lib just in case
builder
.addClassesDir(config.getServletContext.getRealPath("/WEB-INF/classes"))
.addLibDir(config.getServletContext.getRealPath("/WEB-INF/lib"))
// Add optional classpath suffix via web.xml parameter
builder.addEntry(config.getInitParameter("compiler.classpath.suffix"))
builder.classPath
}
}
| scalate/scalate | scalate-core/src/main/scala/org/fusesource/scalate/servlet/ServletTemplateEngine.scala | Scala | apache-2.0 | 5,274 |
package fastparsers.parsers
/**
* FaΓ§ade of a Parser. It only appears in the tree given to the macro in order to be expanded
*/
trait Parser[+T]
| begeric/FastParsers | FastParsers/src/main/scala/fastparsers/parsers/Parser.scala | Scala | mit | 149 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.db.evolutions
import org.specs2.mutable.Specification
import play.api.Configuration
class DefaultEvolutionsConfigParserSpec extends Specification {
def parse(config: (String, Any)*): EvolutionsConfig = {
new DefaultEvolutionsConfigParser(Configuration.from(config.toMap).withFallback(Configuration.reference)).get
}
def test(key: String)(read: EvolutionsDatasourceConfig => Boolean) = {
read(parse(key -> true).forDatasource("default")) must_== true
read(parse(key -> false).forDatasource("default")) must_== false
}
def testN(key: String)(read: EvolutionsDatasourceConfig => Boolean) = {
// This ensures that the config for default is detected, ensuring that a configuration based fallback is used
val fooConfig = "play.evolutions.db.default.foo" -> "foo"
read(parse(s"play.evolutions.$key" -> true, fooConfig).forDatasource("default")) must_== true
read(parse(s"play.evolutions.$key" -> false, fooConfig).forDatasource("default")) must_== false
}
def testNString(key: String)(read: EvolutionsDatasourceConfig => String) = {
// This ensures that the config for default is detected, ensuring that a configuration based fallback is used
val fooConfig = "play.evolutions.db.default.foo" -> "foo"
read(parse(s"play.evolutions.$key" -> "", fooConfig).forDatasource("default")) must_== ""
read(parse(s"play.evolutions.$key" -> "something", fooConfig).forDatasource("default")) must_== "something"
}
val default = parse().forDatasource("default")
"The evolutions config parser" should {
"parse the deprecated style of configuration" in {
"autocommit" in {
test("evolutions.autocommit")(_.autocommit)
}
"useLocks" in {
test("evolutions.use.locks")(_.useLocks)
}
"autoApply" in {
test("applyEvolutions.default")(_.autoApply)
}
"autoApplyDowns" in {
test("applyDownEvolutions.default")(_.autoApplyDowns)
}
}
"fallback to global configuration if not configured" in {
"enabled" in {
testN("enabled")(_.enabled)
}
"schema" in {
testNString("schema")(_.schema)
}
"autocommit" in {
testN("autocommit")(_.autocommit)
}
"useLocks" in {
testN("useLocks")(_.useLocks)
}
"autoApply" in {
testN("autoApply")(_.autoApply)
}
"autoApplyDowns" in {
testN("autoApplyDowns")(_.autoApplyDowns)
}
}
"parse datasource specific configuration" in {
"enabled" in {
testN("db.default.enabled")(_.enabled)
}
"schema" in {
testNString("db.default.schema")(_.schema)
}
"autocommit" in {
testN("db.default.autocommit")(_.autocommit)
}
"useLocks" in {
testN("db.default.useLocks")(_.useLocks)
}
"autoApply" in {
testN("db.default.autoApply")(_.autoApply)
}
"autoApplyDowns" in {
testN("db.default.autoApplyDowns")(_.autoApplyDowns)
}
}
"parse defaults" in {
"enabled" in {
default.enabled must_== true
}
"schema" in {
default.schema must_== ""
}
"autocommit" in {
default.autocommit must_== true
}
"useLocks" in {
default.useLocks must_== false
}
"autoApply" in {
default.autoApply must_== false
}
"autoApplyDowns" in {
default.autoApplyDowns must_== false
}
}
}
}
| benmccann/playframework | persistence/play-jdbc-evolutions/src/test/scala/play/api/db/evolutions/DefaultEvolutionsConfigParserSpec.scala | Scala | apache-2.0 | 3,557 |
package actors
import actors.persistent.staffing.{GetState, ShiftsActor, ShiftsReadActor, UpdateShifts, UpdateShiftsAck}
import akka.actor.{ActorRef, PoisonPill, Props}
import akka.testkit.ImplicitSender
import drt.shared.Terminals.{T1, Terminal}
import drt.shared._
import services.SDate
import services.crunch.CrunchTestLike
import scala.concurrent.duration._
object StaffAssignmentGenerator {
def generateStaffAssignment(name: String, terminal: Terminal, startTime: String, endTime: String, staff: Int): StaffAssignment = {
val start = MilliDate(SDate(startTime).millisSinceEpoch)
val end = MilliDate(SDate(endTime).millisSinceEpoch)
StaffAssignment(name, terminal, start, end, staff, None)
}
}
class ShiftsActorSpec extends CrunchTestLike with ImplicitSender {
sequential
isolated
import StaffAssignmentGenerator._
"Shifts actor" should {
"remember a shift staff assignment added before a shutdown" in {
val startTime = MilliDate(SDate(s"2017-01-01T07:00").millisSinceEpoch)
val endTime = MilliDate(SDate(s"2017-01-01T15:00").millisSinceEpoch)
val shifts = ShiftAssignments(Seq(StaffAssignment("Morning", T1, startTime, endTime, 10, None)))
val now: () => SDateLike = () => SDate("2017-01-01T23:59")
val expireAfterOneDay: () => SDateLike = () => now().addDays(-1)
val actor = system.actorOf(Props(classOf[ShiftsActor], now, expireAfterOneDay), "shiftsActor")
actor ! UpdateShifts(shifts.assignments)
expectMsg(UpdateShiftsAck(shifts.assignments))
actor ! PoisonPill
val newActor = system.actorOf(Props(classOf[ShiftsActor], now, expireAfterOneDay), "shiftsActor2")
newActor ! GetState
expectMsg(shifts)
true
}
"correctly remember an update to a shift after a restart" in {
val shift1 = generateStaffAssignment("Morning 1", T1, "2017-01-01T07:00", "2017-01-01T15:00", 10)
val shift2 = generateStaffAssignment("Morning 2", T1, "2017-01-01T07:30", "2017-01-01T15:30", 10)
val now: () => SDateLike = () => SDate("2017-01-01T23:59")
val expireAfterOneDay: () => SDateLike = () => now().addDays(-1)
val actor = system.actorOf(Props(classOf[ShiftsActor], now, expireAfterOneDay), "shiftsActor1")
actor ! UpdateShifts(Seq(shift1, shift2))
expectMsg(UpdateShiftsAck(Seq(shift1, shift2)))
val updatedShifts = Seq(shift1, shift2).map(_.copy(numberOfStaff = 0))
actor ! UpdateShifts(updatedShifts)
expectMsg(UpdateShiftsAck(updatedShifts))
actor ! PoisonPill
val newActor = system.actorOf(Props(classOf[ShiftsActor], now, expireAfterOneDay), "shiftsActor2")
newActor ! GetState
val expected = ShiftAssignments(updatedShifts)
expectMsg(expected)
true
}
"remember multiple added shifts and correctly remember movements after a restart" in {
val shift1 = generateStaffAssignment("Morning 1", T1, "2017-01-01T07:00", "2017-01-01T15:00", 10)
val shift2 = generateStaffAssignment("Morning 2", T1, "2017-01-01T07:30", "2017-01-01T15:30", 5)
val shift3 = generateStaffAssignment("Evening 1", T1, "2017-01-01T17:00", "2017-01-01T23:00", 11)
val shift4 = generateStaffAssignment("Evening 2", T1, "2017-01-01T17:30", "2017-01-01T23:30", 6)
val now: () => SDateLike = () => SDate("2017-01-01T23:59")
val expireAfterOneDay: () => SDateLike = () => now().addDays(-1)
val actor = system.actorOf(Props(classOf[ShiftsActor], now, expireAfterOneDay), "shiftsActor1")
actor ! UpdateShifts(Seq(shift1, shift2, shift3, shift4))
expectMsg(UpdateShiftsAck(Seq(shift1, shift2, shift3, shift4)))
val updatedShift1 = shift1.copy(numberOfStaff = 0)
val updatedShift3 = shift3.copy(numberOfStaff = 0)
actor ! UpdateShifts(Seq(updatedShift1, updatedShift3))
expectMsg(UpdateShiftsAck(Seq(updatedShift1, updatedShift3)))
actor ! PoisonPill
val newActor = system.actorOf(Props(classOf[ShiftsActor], now, expireAfterOneDay), "shiftsActor2")
newActor ! GetState
val expected = Set(updatedShift1, shift2, updatedShift3, shift4)
val result = expectMsgPF(1 second) {
case ShiftAssignments(sa) => sa.toSet
}
result === expected
}
"restore shifts to a point in time view" in {
val shift1 = generateStaffAssignment("Morning 1", T1, "2017-01-01T07:00", "2017-01-01T15:00", 10)
val shift2 = generateStaffAssignment("Morning 2", T1, "2017-01-01T07:30", "2017-01-01T15:30", 5)
val shift3 = generateStaffAssignment("Evening 1", T1, "2017-01-01T17:00", "2017-01-01T23:00", 11)
val shift4 = generateStaffAssignment("Evening 2", T1, "2017-01-01T17:30", "2017-01-01T23:30", 6)
val actor2000 = newStaffActor(nowAs("2017-01-01T20:00"))
actor2000 ! UpdateShifts(Seq(shift1))
expectMsg(UpdateShiftsAck(Seq(shift1)))
actor2000 ! PoisonPill
val actor2005 = newStaffActor(nowAs("2017-01-01T20:05"))
actor2005 ! UpdateShifts(Seq(shift2))
expectMsg(UpdateShiftsAck(Seq(shift2)))
actor2005 ! PoisonPill
val actor2010 = newStaffActor(nowAs("2017-01-01T20:10"))
actor2010 ! UpdateShifts(Seq(shift3, shift4))
expectMsg(UpdateShiftsAck(Seq(shift3, shift4)))
actor2010 ! PoisonPill
val actorPit2006 = newStaffPointInTimeActor(nowAs("2017-01-01T20:06"))
actorPit2006 ! GetState
val expected = Set(shift1, shift2)
val result = expectMsgPF(1 second) {
case ShiftAssignments(sa) => sa.toSet
}
result === expected
}
}
def newStaffActor(now: () => SDateLike): ActorRef = system.actorOf(Props(classOf[ShiftsActor], now, expiryDateXDaysFrom(now, 1)))
def newStaffPointInTimeActor(now: () => SDateLike): ActorRef = system.actorOf(Props(classOf[ShiftsReadActor], now(), expiryDateXDaysFrom(now, 1)))
def nowAs(date: String): () => SDateLike = () => SDate(date)
def expiryDateXDaysFrom(now: () => SDateLike, days: Int): () => SDateLike = () => now().addDays(-1 * days)
}
| UKHomeOffice/drt-scalajs-spa-exploration | server/src/test/scala/actors/ShiftsActorSpec.scala | Scala | apache-2.0 | 6,045 |
package com.barclays.corp.ams.util
import org.joda.time.DateTime
import org.joda.time.format.DateTimeFormat
import com.barclays.corp.ams.log.Logging
import org.joda.time.format.DateTimeFormatter
import com.barclays.corp.ams.util.btr.BTRRecord
import com.barclays.corp.ams.util.btr.BTRHeaderLabelRecord100
import com.barclays.corp.ams.util.btr.EmptyBTR
import com.barclays.corp.ams.util.btr.BTRCurrencyHeaderLabelRecord300
object DDSimulatorUtil extends Logging {
val DT_FORMAT_CCYYMMDD = "CCYYMMdd"
val DT_FORMAT_DDMMYY = "ddMMYY"
val DT_FORMAT_YYMMDD = "YYMMdd"
val DT_FORMAT_YYDDD = "YYDDD"
def getDateInFormat(date: DateTime, outFormat: String): String = {
try {
date.toString(DateTimeFormat.forPattern(outFormat))
} catch {
case t: Throwable =>
"01010101"
}
}
def getDateTime(dateStr: String, inputFormat: String): DateTime = {
try {
DateTimeFormat.forPattern(inputFormat).parseDateTime(dateStr)
} catch {
case _: Throwable =>
DateTimeFormat.forPattern(DT_FORMAT_CCYYMMDD).parseDateTime("01010101")
}
}
def convertRecordToBTRRecord(record: String): BTRRecord = {
if (record == null || record.isEmpty())
EmptyBTR
else {
//Pick up only the Header/Footers for GPB and Sterling files
(record.substring(0, 4), record.substring(10, 11).charAt(0)) match {
case ("FLH1", 'T') =>
new BTRHeaderLabelRecord100(
labelIdentifier = record.substring(0, 3),
labelNumber = record.substring(3, 4).charAt(0),
barclaysIdentifier = record.substring(4, 10),
fileID = record.substring(10, 11).charAt(0),
volumeIdentifier = record.substring(11, 17),
volumeSequenceNumber = record.substring(17, 19),
setIdentifier = record.substring(19, 25),
recordFormat = record.substring(25, 26).charAt(0),
blockLength = record.substring(26, 31),
recordLength = record.substring(31, 36),
creationDate = DDSimulatorUtil.getDateTime(record.substring(36, 41), DDSimulatorUtil.DT_FORMAT_YYDDD),
customerIdentifier = record.substring(46, 51),
workDate = DDSimulatorUtil.getDateTime(record.substring(51, 57), DDSimulatorUtil.DT_FORMAT_DDMMYY))
//Pick up only the Header/Footers for CCY and Currency files
case ("FLH1", 'F') =>
new BTRCurrencyHeaderLabelRecord300(
labelIdentifier = record.substring(0, 3),
labelNumber = record.substring(3, 4).charAt(0),
barclaysIdentifier = record.substring(4, 10),
fileID = record.substring(10, 11).charAt(0),
volumeIdentifier = record.substring(11, 17),
volumeSequenceNumber = record.substring(17, 19),
setIdentifier = record.substring(19, 25),
recordFormat = record.substring(25, 26).charAt(0),
blockLength = record.substring(26, 31),
recordLength = record.substring(31, 36),
creationDate = DDSimulatorUtil.getDateTime(record.substring(36, 41), DDSimulatorUtil.DT_FORMAT_YYDDD),
customerIdentifier = record.substring(46, 51),
workDate = DDSimulatorUtil.getDateTime(record.substring(51, 57), DDSimulatorUtil.DT_FORMAT_DDMMYY))
case _ =>
EmptyBTR
}
}
}
}
| pratimsc/ddsimulator | src/main/scala/com/barclays/corp/ams/util/DDSimulatorUtil.scala | Scala | gpl-3.0 | 3,349 |
package org.jetbrains.plugins.scala.dfa
package cfg
package impl
private final class EndImpl[Info] extends NodeImpl[Info] with End {
override protected def asmString: String = "end"
}
| JetBrains/intellij-scala | scala/dfa/src/org/jetbrains/plugins/scala/dfa/cfg/impl/EndImpl.scala | Scala | apache-2.0 | 187 |
package com.sksamuel.avro4s.record.encoder
import java.nio.ByteBuffer
import com.sksamuel.avro4s.{AvroSchema, DefaultNamingStrategy, Encoder}
import org.apache.avro.SchemaBuilder
import org.apache.avro.generic.{GenericFixed, GenericRecord}
import org.scalatest.{FunSuite, Matchers}
class ByteArrayEncoderTest extends FunSuite with Matchers {
test("encode byte arrays as BYTES type") {
case class Test(z: Array[Byte])
val schema = AvroSchema[Test]
Encoder[Test].encode(Test(Array[Byte](1, 4, 9)), schema, DefaultNamingStrategy)
.asInstanceOf[GenericRecord]
.get("z")
.asInstanceOf[ByteBuffer]
.array().toList shouldBe List[Byte](1, 4, 9)
}
test("encode byte vectors as BYTES type") {
case class Test(z: Vector[Byte])
val schema = AvroSchema[Test]
Encoder[Test].encode(Test(Vector[Byte](1, 4, 9)), schema, DefaultNamingStrategy)
.asInstanceOf[GenericRecord]
.get("z")
.asInstanceOf[ByteBuffer]
.array().toList shouldBe List[Byte](1, 4, 9)
}
test("encode byte seq as BYTES type") {
case class Test(z: Seq[Byte])
val schema = AvroSchema[Test]
Encoder[Test].encode(Test(Seq[Byte](1, 4, 9)), schema, DefaultNamingStrategy)
.asInstanceOf[GenericRecord]
.get("z")
.asInstanceOf[ByteBuffer]
.array().toList shouldBe List[Byte](1, 4, 9)
}
test("encode byte list as BYTES type") {
case class Test(z: List[Byte])
val schema = AvroSchema[Test]
Encoder[Test].encode(Test(List[Byte](1, 4, 9)), schema, DefaultNamingStrategy)
.asInstanceOf[GenericRecord]
.get("z")
.asInstanceOf[ByteBuffer]
.array().toList shouldBe List[Byte](1, 4, 9)
}
test("encode top level byte arrays") {
val schema = AvroSchema[Array[Byte]]
Encoder[Array[Byte]].encode(Array[Byte](1, 4, 9), schema, DefaultNamingStrategy)
.asInstanceOf[ByteBuffer]
.array().toList shouldBe List[Byte](1, 4, 9)
}
test("encode ByteBuffers as BYTES type") {
case class Test(z: ByteBuffer)
val schema = AvroSchema[Test]
Encoder[Test].encode(Test(ByteBuffer.wrap(Array[Byte](1, 4, 9))), schema, DefaultNamingStrategy)
.asInstanceOf[GenericRecord]
.get("z")
.asInstanceOf[ByteBuffer]
.array().toList shouldBe List[Byte](1, 4, 9)
}
test("encode top level ByteBuffers") {
val schema = AvroSchema[ByteBuffer]
Encoder[ByteBuffer].encode(ByteBuffer.wrap(Array[Byte](1, 4, 9)), schema, DefaultNamingStrategy)
.asInstanceOf[ByteBuffer]
.array().toList shouldBe List[Byte](1, 4, 9)
}
test("support FIXED") {
val schema = SchemaBuilder.fixed("foo").size(7)
val fixed = Encoder.ByteArrayEncoder.encode("hello".getBytes, schema, DefaultNamingStrategy).asInstanceOf[GenericFixed]
fixed.bytes().toList shouldBe Seq(104, 101, 108, 108, 111, 0, 0)
fixed.bytes().length shouldBe 7
}
}
| 51zero/avro4s | avro4s-core/src/test/scala/com/sksamuel/avro4s/record/encoder/ByteArrayEncoderTest.scala | Scala | mit | 2,883 |
package org.atnos.eff.syntax
import cats.{Eval, Monoid}
import cats.data.Writer
import org.atnos.eff._
object writer extends writer
trait writer {
implicit class WriterEffectOps[R, A](e: Eff[R, A]) {
def runWriter[O](implicit member: Member[Writer[O, ?], R]): Eff[member.Out, (A, List[O])] =
WriterInterpretation.runWriter(e)(member.aux)
def runWriterU[O, U](implicit member: Member.Aux[Writer[O, ?], R, U]): Eff[U, (A, List[O])] =
WriterInterpretation.runWriter(e)(member)
def runWriterNoLog[O](implicit member: Member[Writer[O, ?], R]): Eff[member.Out, A] =
runWriterUnsafe[O](_ => ())
def runWriterNoLogU[O, U](implicit member: Member.Aux[Writer[O, ?], R, U]): Eff[U, A] =
runWriterUnsafe[O](_ => ())
def discardWriter[O, U](implicit member: Member.Aux[Writer[O, ?], R, U]): Eff[U, A] =
runWriterNoLogU[O, U]
def runWriterLog[O](implicit member: Member[Writer[O, ?], R]): Eff[member.Out, List[O]] =
runWriter[O](member).map(_._2)
def runWriterFold[O, B](fold: RightFold[O, B])(implicit member: Member[Writer[O, ?], R]): Eff[member.Out, (A, B)] =
WriterInterpretation.runWriterFold(e)(fold)(member.aux)
def runWriterMonoid[B](implicit member: Member[Writer[B, ?], R], B: Monoid[B]): Eff[member.Out, (A, B)] =
WriterInterpretation.runWriterMonoid(e)(member.aux, B)
def runWriterIntoMonoid[O, M](f: O => M)(implicit member: Member[Writer[O, ?], R], M: Monoid[M]): Eff[member.Out, (A, M)] =
WriterInterpretation.runWriterIntoMonoid(e)(f)(member.aux, M)
def runWriterUnsafe[O](f: O => Unit)(implicit member: Member[Writer[O, ?], R]): Eff[member.Out, A] =
WriterInterpretation.runWriterUnsafe(e)(f)(member.aux)
def runWriterEval[O, U](f: O => Eval[Unit])(implicit member: Member.Aux[Writer[O, ?], R, U], v: Eval |= U): Eff[U, A] =
WriterInterpretation.runWriterEval(e)(f)(member, v)
}
}
| etorreborre/eff | shared/src/main/scala/org/atnos/eff/syntax/writer.scala | Scala | mit | 1,916 |
/*
* Copyright (c) 2014 the original author or authors.
*
* Licensed under the MIT License;
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://opensource.org/licenses/MIT
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package git
import scala.collection.mutable.ListBuffer
import git.util.{FileUtil, DataReader, Conversion}
import java.io.File
case class PackIndex(
fanOutTable: Seq[Int],
objectIds: Seq[ObjectId],
offsets: Seq[Int],
length: Int = 0,
packFile: PackFile
) {
def has(id: ObjectId) = objectIds.contains(id)
}
object PackIndex {
/** Returns the offset for the given ID inside the pack index. */
def findOffset(packIndex: PackIndex, id: ObjectId): Option[Int] = {
if (packIndex.has(id)) Some(packIndex.offsets(packIndex.objectIds.indexOf(id)))
else None
}
/** Returns the bytes decoded as a pack index. */
def decode(bytes: Seq[Byte], packFile: PackFile): PackIndex = {
val reader = new DataReader(bytes)
// Confirm the header is correct.
if (reader.take(4).toArray.deep != Array(0xff, 0x74, 0x4f, 0x63).map(_.toByte).deep) throw new CorruptRepositoryException("Index file header signature is corrupt.")
// Confirm the version.
if (reader.take(4).toArray.deep != Array(0, 0, 0, 2).map(_.toByte).deep) throw new UnsupportedOperationException("Older Pack Index file format is not supported.")
// Create the fan-out table.
val fanOutBuffer = new ListBuffer[Int]
for (i <- 0 to 255) fanOutBuffer += Conversion.bytesToInt(reader.take(4))
val fanOutTable = fanOutBuffer.toList
// Set the length (the last value of the fan-out table).
val length = fanOutTable.last
// Set the object id table.
val objectIdBuffer = new ListBuffer[ObjectId]
for (i <- 0 until length) objectIdBuffer += reader.takeObjectId()
val objectIds = objectIdBuffer.toList
// Skip CRC32's for now.
reader >> length * 4
// Let's set the offsets.
val offsetBuffer = new ListBuffer[Int]
// TODO: Implement support for very large offsets (>4 GB pack files).
for (i <- 0 until length) offsetBuffer += Conversion.bytesToInt(reader.take(4))
val offsets = offsetBuffer.toList
PackIndex(fanOutTable, objectIds, offsets, length, packFile)
}
/** Returns every pack index. */
private[git] def findPackIndexes(repository: Repository): Seq[PackIndex] = {
Cache.getPackIndexes(repository) match {
case Some(indexes: Seq[PackIndex]) => indexes
case _ =>
val buffer = Vector.newBuilder[PackIndex]
new File(repository.path + "/objects/pack").listFiles.filter(_.getName.endsWith(".idx")).foreach((file: File) => {
val packName = file.getName.replace(".idx", ".pack")
val pack = PackFile(new File(repository.path + s"/objects/pack/$packName"))
buffer += PackIndex.decode(FileUtil.readContents(file), pack)
})
Cache.setPackIndexes(repository, buffer.result())
}
}
} | kaisellgren/ScalaGit | src/main/scala/git/PackIndex.scala | Scala | mit | 3,409 |
/*
* Copyright 2014 porter <https://github.com/eikek/porter>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eknet.spray.openid.provider
import scala.util.parsing.combinator.RegexParsers
import scala.util.Try
import java.io.InputStream
import scala.io.Source
object Mustache {
import Template._
import TemplateParser._
type Context = Map[String, Any]
def tryApply(str: String): Try[Template] = Try(makeTemplate(parse(str)))
def tryApply(in: InputStream): Try[Template] = Try {
val s = Source.fromInputStream(in).getLines().mkString("\\n")
tryApply(s).get
}
def apply(str: String): Template = tryApply(str).get
def apply(in: InputStream): Template = tryApply(in).get
private def makeTemplate(tokens: List[Token]): Template = {
def loop(ts: List[Token], template: Template = Template.empty): Template = {
ts match {
case Nil => template
case t::tx => t match {
case StringToken(s) => loop(tx, template ++ Text(s))
case LookupToken(n) => loop(tx, template ++ Lookup(n))
case BlockStart(n, i) =>
val (h, t) = tx.span(!_.isBlockEnd(n))
if (t == Nil) throw new Exception(s"No end block for '$n'")
loop(t, template ++ Block(n, i, loop(h)))
case BlockEnd(n) => loop(tx, template)
}
}
}
loop(tokens)
}
trait Template extends (Context => String) {
def ++(t: Template): Template = Composite(Vector(this, t))
}
object Template {
val empty = new Template {
def apply(ctx: Context): String = ""
override def ++(t: Template) = t
override def toString() = "empty()"
}
case class Composite(ts: Vector[Template]) extends Template {
def apply(ctx: Context): String =
ts.foldLeft(StringBuilder.newBuilder) { (s, t) => s append t(ctx) }.toString()
override def ++(t: Template): Template = Composite(ts :+ t)
}
case class Text(s: String) extends Template {
def apply(v1: Mustache.Context): String = s
}
case class Lookup(name: String) extends Template {
def apply(ctx: Context): String = ctx.get(name) match {
case Some(a) => a.toString
case _ => ""
}
}
case class Block(name: String, inverse: Boolean, inner: Template) extends Template {
private val emptyValues = Set("", false, None, 0, null, Map(), List())
private def createContext(context: Context, name: String): Seq[Context] = {
context(name) match {
case map: Map[_, _] => Seq(map.asInstanceOf[Map[String, Any]])
case seq: Iterable[_] =>
seq.headOption match {
case Some(h : Map[_, _]) => seq.asInstanceOf[Seq[Context]]
case Some(h) => seq.map(e => Map("." -> e)).toList
case _ => Seq.empty
}
case obj if !emptyValues.contains(obj) => Seq(context)
case _ => Seq.empty
}
}
def apply(context: Context): String = {
if (inverse) {
context.get(name) match {
case Some(x) if !emptyValues.contains(x) => ""
case _ => inner(context)
}
} else {
val seq = Try(createContext(context, name)).getOrElse(Seq.empty[Context])
seq.foldLeft("") { (s, ctx) => s + inner(ctx) }
}
}
}
}
object TemplateParser {
sealed trait Token {
def isBlockEnd(name: String): Boolean = false
}
case class BlockStart(name: String, inverse: Boolean) extends Token
case class BlockEnd(name: String) extends Token {
override def isBlockEnd(name: String): Boolean = this.name == name
}
case class LookupToken(name: String) extends Token
case class StringToken(str: String) extends Token
def parse(templateStr: String): List[Token] = Parse.parse(templateStr)
private object Parse extends RegexParsers {
override def skipWhitespace = false
val deliStart = "{{"
val deliEnd = "}}"
val sectionName = "[\\\\w\\\\s\\\\.\\\\-_]+".r
val arbitraryText = rep1(not(deliStart) ~> ".|\\r|\\n".r) ^^ { s => StringToken(s.mkString) }
val blockStart = deliStart ~ ("#" | "^") ~ sectionName ~ deliEnd ^^ {
case d1 ~ t ~ section ~ d2 => BlockStart(section, inverse = t == "^")
}
val blockEnd = deliStart ~ "/" ~ sectionName ~ deliEnd ^^ {
case d1 ~ x ~ section ~ d2 => BlockEnd(section)
}
val contextLookup = deliStart ~ sectionName ~ deliEnd ^^ {
case d1 ~ name ~ d2 => LookupToken(name)
}
val token = contextLookup | blockStart | blockEnd | arbitraryText
def tokens = rep(token)
def parse(template: String): List[Token] = parseAll(tokens, template) match {
case Success(r, _) => r
case Failure(msg, next) => throw new Exception(msg+": "+next.source + "@"+next.pos.line+":"+next.pos.column)
case Error(msg, next) => throw new Exception(msg+": "+next.source + "@"+next.pos.line+":"+next.pos.column)
}
}
}
} | eikek/spray-openid | src/main/scala/org/eknet/spray/openid/provider/Mustache.scala | Scala | apache-2.0 | 5,524 |
package org.scalarules.dsl.nl.grammar
import org.scalarules.facts.SingularFact
import org.scalarules.finance.nl.{Bedrag, Percentage}
import org.scalatest.{FlatSpec, Matchers}
class DslEvaluationTest extends FlatSpec with Matchers {
val sutBedrag = new SingularFact[Bedrag]("testFactBedrag")
val sutBigDecimal = new SingularFact[BigDecimal]("testFactBigDecimal")
val sutString = new SingularFact[String]("testFactString")
val sutPercentage = new SingularFact[Percentage]("testFactPercentage")
it should "compile" in {
-sutBedrag
sutBedrag + sutBedrag
sutBedrag - sutBedrag
sutBedrag / sutBedrag
sutBedrag / sutBigDecimal
sutBedrag * sutBigDecimal
-sutBigDecimal
sutBigDecimal + sutBigDecimal
sutBigDecimal - sutBigDecimal
sutBigDecimal * sutBedrag
sutBigDecimal / sutBigDecimal
sutBigDecimal * sutBigDecimal
sutBigDecimal * sutPercentage
sutPercentage * sutBigDecimal
sutBedrag * sutPercentage
sutPercentage * sutBedrag
}
it should "not compile" in {
"-sutString" shouldNot compile
"sutBedrag + sutString" shouldNot compile
"sutBedrag + sutBigDecimal" shouldNot compile
"sutBedrag - sutBigDecimal" shouldNot compile
"sutBedrag * sutBedrag" shouldNot compile
"sutBigDecimal + sutBedrag" shouldNot compile
"sutBigDecimal - sutBedrag" shouldNot compile
"-sutPercentage" shouldNot compile
"sutPercentage + sutPercentage" shouldNot compile
"sutPercentage - sutPercentage" shouldNot compile
}
}
| scala-rules/rule-engine | engine/src/test/scala/org/scalarules/dsl/nl/grammar/DslEvaluationTest.scala | Scala | mit | 1,513 |
package pl.touk.nussknacker.engine.api.deployment
import org.scalatest.{FunSpec, Inside, Matchers}
import pl.touk.nussknacker.engine.api.deployment.simple.{SimpleProcessStateDefinitionManager, SimpleStateStatus}
import pl.touk.nussknacker.engine.deployment.ExternalDeploymentId
import scala.collection.immutable.List
class SimpleProcessStateSpec extends FunSpec with Matchers with Inside {
def createProcessState(stateStatus: StateStatus): ProcessState =
SimpleProcessStateDefinitionManager.processState(stateStatus, Some(ExternalDeploymentId("12")))
it ("scenario state should be during deploy") {
val state = createProcessState(SimpleStateStatus.DuringDeploy)
state.status.isDuringDeploy shouldBe true
state.allowedActions shouldBe List(ProcessActionType.Cancel)
}
it ("scenario state should be running") {
val state = createProcessState(SimpleStateStatus.Running)
state.status.isRunning shouldBe true
state.allowedActions shouldBe List(ProcessActionType.Cancel, ProcessActionType.Pause, ProcessActionType.Deploy)
}
it ("scenario state should be finished") {
val state = createProcessState(SimpleStateStatus.Finished)
state.status.isFinished shouldBe true
state.allowedActions shouldBe List(ProcessActionType.Deploy, ProcessActionType.Archive)
}
}
| TouK/nussknacker | ui/deployment-manager-api/src/test/scala/pl/touk/nussknacker/engine/api/deployment/SimpleProcessStateSpec.scala | Scala | apache-2.0 | 1,311 |
package enumeratum.values
/** Created by Lloyd on 4/12/16.
*
* Copyright 2016
*/
sealed abstract class Drinks(val value: Short, name: String) extends ShortEnumEntry
case object Drinks extends ShortEnum[Drinks] {
case object OrangeJuice extends Drinks(value = 1, name = "oj")
case object AppleJuice extends Drinks(value = 2, name = "aj")
case object Cola extends Drinks(value = 3, name = "cola")
case object Beer extends Drinks(value = 4, name = "beer")
val values = findValues
}
case object CoughSyrup extends Drinks(5, "cough-syrup")
| lloydmeta/enumeratum | enumeratum-test/src/main/scala/enumeratum/values/Drinks.scala | Scala | mit | 572 |
/**
* Copyright 2013 Alex Jones
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with work for additional information
* regarding copyright ownership. The ASF licenses file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package controllers
import play.api.mvc.{Action, BaseController}
import scala.concurrent.{ExecutionContext, Future}
/**
* A trait that defines a secret action that is kept secret(ish) by its path containing a random string
* @author alex
*
*/
trait Secret extends BaseController {
/**
* The secret part of the path.
*/
val secret: SecretToken
def Secret[A](secretPayload: String)(action: Action[A])(implicit ec: ExecutionContext): Action[A] =
Action.async(action.parser) { request =>
if (secret.token == secretPayload) {
action(request)
} else {
Future(NotFound)
}
}
} | unclealex72/west-ham-calendar | app/controllers/Secret.scala | Scala | apache-2.0 | 1,460 |
//===========================================================================
// Copyright 2014 Delving B.V.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//===========================================================================
package dataset
import java.io.File
import akka.actor.SupervisorStrategy.Stop
import akka.actor._
import analysis.Analyzer
import analysis.Analyzer.{AnalysisComplete, AnalyzeFile}
import dataset.DatasetActor._
import dataset.DsInfo.DsState._
import dataset.SourceRepo.SourceFacts
import harvest.Harvester
import harvest.Harvester.{HarvestAdLib, HarvestComplete, HarvestPMH}
import harvest.Harvesting.HarvestType._
import mapping.CategoryCounter.{CategoryCountComplete, CountCategories}
import mapping.Skosifier.SkosificationComplete
import mapping.{CategoryCounter, Skosifier}
import organization.OrgContext
import org.apache.commons.io.FileUtils._
import org.joda.time.DateTime
import play.api.Logger
import play.api.libs.json.{Json, Writes}
import record.SourceProcessor
import record.SourceProcessor._
import services.ProgressReporter.ProgressState._
import services.ProgressReporter.ProgressType._
import services.ProgressReporter.{ProgressState, ProgressType}
import services.{MailService, ProgressReporter}
import triplestore.GraphProperties._
import triplestore.GraphSaver
import triplestore.GraphSaver.{GraphSaveComplete, SaveGraphs}
import triplestore.Sparql.SkosifiedField
import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext}
import scala.language.postfixOps
import scala.util.Try
object DatasetActor {
// state machine
sealed trait DatasetActorState
case object Idle extends DatasetActorState
case object Harvesting extends DatasetActorState
case object Adopting extends DatasetActorState
case object Analyzing extends DatasetActorState
case object Generating extends DatasetActorState
case object Processing extends DatasetActorState
case object Saving extends DatasetActorState
case object Skosifying extends DatasetActorState
case object Categorizing extends DatasetActorState
trait DatasetActorData
case object Dormant extends DatasetActorData
case class Active(spec: String,
childOpt: Option[ActorRef],
progressState: ProgressState,
progressType: ProgressType = TYPE_IDLE,
count: Int = 0,
interrupt: Boolean = false)
extends DatasetActorData
implicit val activeWrites = new Writes[Active] {
def writes(active: Active) = Json.obj(
"datasetSpec" -> active.spec,
"progressState" -> active.progressState.toString,
"progressType" -> active.progressType.toString,
"count" -> active.count
)
}
case class InError(error: String) extends DatasetActorData
// messages to receive
trait HarvestStrategy
case class ModifiedAfter(mod: DateTime, justDate: Boolean)
extends HarvestStrategy
case object Sample extends HarvestStrategy
case object FromScratch extends HarvestStrategy
case object FromScratchIncremental extends HarvestStrategy
case class StartHarvest(strategy: HarvestStrategy)
case class Command(name: String)
case class StartAnalysis(processed: Boolean)
case class Scheduled(modifiedAfter: Option[DateTime], file: File)
case class StartProcessing(scheduledOpt: Option[Scheduled])
case class StartSaving(scheduledOpt: Option[Scheduled])
case class StartSkosification(skosifiedField: SkosifiedField)
case object StartCategoryCounting
case class WorkFailure(message: String,
exceptionOpt: Option[Throwable] = None)
case class ProgressTick(reporterOpt: Option[ProgressReporter],
progressState: ProgressState,
progressType: ProgressType = TYPE_IDLE,
count: Int = 0)
def props(datasetContext: DatasetContext,
mailService: MailService,
orgContext: OrgContext,
harvestingExecutionContext: ExecutionContext) =
Props(classOf[DatasetActor],
datasetContext,
mailService,
orgContext,
harvestingExecutionContext)
}
class DatasetActor(val datasetContext: DatasetContext,
mailService: MailService,
orgContext: OrgContext,
harvestingExecutionContext: ExecutionContext)
extends LoggingFSM[DatasetActorState, DatasetActorData]
with ActorLogging {
import context.dispatcher
override val supervisorStrategy = OneForOneStrategy() {
case throwable: Throwable =>
self ! WorkFailure(s"Child failure: $throwable", Some(throwable))
Stop
}
val dsInfo = datasetContext.dsInfo
val errorMessage = dsInfo.getLiteralProp(datasetErrorMessage).getOrElse("")
def broadcastRaw(message: Any) =
context.system.actorSelection("/user/*/flowActor") ! message
def broadcastIdleState() = broadcastRaw(datasetContext.dsInfo)
def broadcastProgress(active: Active) = broadcastRaw(active)
startWith(Idle, if (errorMessage.nonEmpty) InError(errorMessage) else Dormant)
when(Idle) {
case Event(Command(commandName), Dormant) =>
val replyTry = Try {
def startHarvest(strategy: HarvestStrategy) = {
val harvestTypeStringOpt = dsInfo.getLiteralProp(harvestType)
log.info(s"Start harvest $strategy, type is $harvestTypeStringOpt")
val harvestTypeOpt =
harvestTypeStringOpt.flatMap(harvestTypeFromString)
harvestTypeOpt.map { harvestType =>
log.info(s"Starting harvest $strategy with type $harvestType")
strategy match {
case FromScratch =>
datasetContext.sourceRepoOpt match {
case Some(sourceRepo) =>
sourceRepo.clearData()
case None =>
// no source repo, use the default for the harvest type
datasetContext.createSourceRepo(SourceFacts(harvestType))
}
case _ =>
//case FromScratchIncremental =>
datasetContext.sourceRepoOpt match {
case Some(sourceRepo) =>
sourceRepo.clearData()
case None =>
// no source repo, use the default for the harvest type
datasetContext.createSourceRepo(SourceFacts(harvestType))
}
//case _ =>
}
self ! StartHarvest(strategy)
"harvest started"
} getOrElse {
val message =
s"Unable to harvest $datasetContext: unknown harvest type [$harvestTypeStringOpt]"
self ! WorkFailure(message, None)
message
}
}
commandName match {
case "delete" =>
Await.ready(datasetContext.dsInfo.dropDataset, 2.minutes)
deleteQuietly(datasetContext.rootDir)
datasetContext.sipFiles.foreach(_.delete())
self ! PoisonPill
"deleted"
case "delete records" =>
Await.ready(datasetContext.dropRecords, 2.minutes)
broadcastIdleState()
"deleted records"
case "delete index" =>
Await.ready(datasetContext.dropIndex, 2.minutes)
broadcastIdleState()
"deleted index"
case "disable dataset" =>
Await.ready(datasetContext.disableDataSet, 2.minutes)
broadcastIdleState()
"disabled dataset"
case "remove raw" =>
datasetContext.dropRaw()
broadcastIdleState()
"raw removed"
case "remove source" =>
datasetContext.dropSourceRepo()
broadcastIdleState()
"source removed"
case "remove processed" =>
datasetContext.dropProcessedRepo()
broadcastIdleState()
"processed data removed"
case "remove tree" =>
datasetContext.dropTree()
broadcastIdleState()
"tree removed"
case "start sample harvest" =>
startHarvest(Sample)
case "start first harvest" =>
startHarvest(FromScratch)
case "start generating sip" =>
self ! GenerateSipZip
"sip generation started"
case "start processing" =>
self ! StartProcessing(None)
"processing started"
case "start raw analysis" =>
datasetContext.dropTree()
self ! StartAnalysis(processed = false)
"analysis started"
case "start processed analysis" =>
datasetContext.dropTree()
self ! StartAnalysis(processed = true)
"analysis started"
case "start saving" =>
// full save, not incremental
self ! StartSaving(None)
"saving started"
case "start skosification" =>
self ! StartSkosification
"skosification started"
case "refresh" =>
log.info("refresh")
broadcastIdleState()
"refreshed"
// todo: category counting?
case _ =>
log.warning(s"$this sent unrecognized command $commandName")
"unrecognized"
}
} recover {
case e: Exception =>
log.error(e, "Command exception")
s"exception: $e"
}
val replyString: String = replyTry.getOrElse(s"unrecovered exception")
log.info(s"Command $commandName: $replyString")
stay()
case Event(StartHarvest(strategy), Dormant) =>
datasetContext.dropTree()
def prop(p: NXProp) = dsInfo.getLiteralProp(p).getOrElse("")
harvestTypeFromString(prop(harvestType)).map { harvestType =>
log.info(s"Starting harvest $strategy with type $harvestType")
strategy match {
case FromScratch =>
datasetContext.sourceRepoOpt match {
case Some(sourceRepo) =>
sourceRepo.clearData()
case None =>
// no source repo, use the default for the harvest type
datasetContext.createSourceRepo(SourceFacts(harvestType))
}
//case FromScratchIncremental =>
case _ =>
datasetContext.sourceRepoOpt match {
case Some(sourceRepo) =>
sourceRepo.clearData()
case None =>
// no source repo, use the default for the harvest type
datasetContext.createSourceRepo(SourceFacts(harvestType))
}
//case _ =>
}
val (url, ds, pre, se, recordId) = (prop(harvestURL),
prop(harvestDataset),
prop(harvestPrefix),
prop(harvestSearch),
prop(harvestRecord))
val kickoff = harvestType match {
case PMH => HarvestPMH(strategy, url, ds, pre, recordId)
case ADLIB => HarvestAdLib(strategy, url, ds, se)
}
val harvester =
context.actorOf(Harvester.props(datasetContext,
orgContext.appConfig.harvestTimeOut,
orgContext.wsApi,
harvestingExecutionContext),
"harvester")
harvester ! kickoff
goto(Harvesting) using Active(dsInfo.spec, Some(harvester), HARVESTING)
} getOrElse {
stay() using InError("Unable to determine harvest type")
}
case Event(AdoptSource(file, orgContext), Dormant) =>
val sourceProcessor =
context.actorOf(SourceProcessor.props(datasetContext, orgContext),
"source-adopter")
sourceProcessor ! AdoptSource(file, orgContext)
goto(Adopting) using Active(dsInfo.spec, Some(sourceProcessor), ADOPTING)
case Event(GenerateSipZip, Dormant) =>
val sourceProcessor =
context.actorOf(SourceProcessor.props(datasetContext, orgContext),
"source-generator")
sourceProcessor ! GenerateSipZip
goto(Generating) using Active(dsInfo.spec,
Some(sourceProcessor),
GENERATING)
case Event(StartAnalysis(processed), Dormant) =>
log.info(s"Start analysis processed=$processed")
if (processed) {
val analyzer =
context.actorOf(Analyzer.props(datasetContext), "analyzer-processed")
analyzer ! AnalyzeFile(datasetContext.processedRepo.baseOutput.xmlFile,
processed)
goto(Analyzing) using Active(dsInfo.spec, Some(analyzer), SPLITTING)
} else {
val rawFile = datasetContext.rawXmlFile.getOrElse(
throw new Exception(s"Unable to find 'raw' file to analyze"))
val analyzer =
context.actorOf(Analyzer.props(datasetContext), "analyzer-raw")
analyzer ! AnalyzeFile(rawFile, processed = false)
goto(Analyzing) using Active(dsInfo.spec, Some(analyzer), SPLITTING)
}
case Event(StartProcessing(scheduledOpt), Dormant) =>
val sourceProcessor =
context.actorOf(SourceProcessor.props(datasetContext, orgContext),
"source-processor")
sourceProcessor ! Process(scheduledOpt)
goto(Processing) using Active(dsInfo.spec,
Some(sourceProcessor),
PROCESSING)
case Event(StartSaving(scheduledOpt), Dormant) =>
val graphSaver =
context.actorOf(GraphSaver.props(datasetContext, orgContext),
"graph-saver")
graphSaver ! SaveGraphs(scheduledOpt)
goto(Saving) using Active(dsInfo.spec, Some(graphSaver), PROCESSING)
case Event(StartSkosification(skosifiedField), Dormant) =>
val skosifier =
context.actorOf(Skosifier.props(dsInfo, orgContext), "skosifier")
skosifier ! skosifiedField
goto(Skosifying) using Active(dsInfo.spec, Some(skosifier), SKOSIFYING)
case Event(StartCategoryCounting, Dormant) =>
if (datasetContext.processedRepo.nonEmpty) {
implicit val ts = orgContext.ts
val categoryCounter =
context.actorOf(CategoryCounter.props(dsInfo,
datasetContext.processedRepo,
orgContext),
"category-counter")
categoryCounter ! CountCategories
goto(Categorizing) using Active(dsInfo.spec,
Some(categoryCounter),
CATEGORIZING)
} else {
stay() using InError(s"No source file for categorizing $datasetContext")
}
}
when(Harvesting) {
case Event(HarvestComplete(strategy, fileOpt, noRecordsMatch),
active: Active) =>
def processIncremental(fileOpt: Option[File],
noRecordsMatch: Boolean,
mod: Option[DateTime]) = {
noRecordsMatch match {
case true =>
Logger.debug(
"NoRecordsMatch, so setting state to Incremental Saved")
dsInfo.setState(INCREMENTAL_SAVED)
if (dsInfo
.getLiteralProp(harvestIncrementalMode)
.getOrElse("false") != "true") {
dsInfo.setHarvestIncrementalMode(true)
}
case _ =>
dsInfo.removeState(SAVED)
dsInfo.removeState(ANALYZED)
dsInfo.removeState(INCREMENTAL_SAVED)
dsInfo.removeState(PROCESSED)
dsInfo.removeState(PROCESSABLE)
dsInfo.setState(SOURCED)
dsInfo.setLastHarvestTime(incremental = true)
}
dsInfo.setHarvestCron(dsInfo.currentHarvestCron)
fileOpt match {
case Some(file) =>
if (datasetContext.sipMapperOpt.isDefined) {
log.info(s"There is a mapper, so trigger processing")
self ! StartProcessing(Some(Scheduled(mod, file)))
} else {
log.info("No mapper, so generating sip zip only")
self ! GenerateSipZip
}
case None =>
log.info("No incremental file, back to sleep")
}
}
strategy match {
case Sample =>
dsInfo.setState(RAW)
case FromScratch =>
dsInfo.removeState(SAVED)
dsInfo.removeState(ANALYZED)
dsInfo.removeState(INCREMENTAL_SAVED)
dsInfo.removeState(PROCESSED)
dsInfo.setState(SOURCED)
dsInfo.setLastHarvestTime(incremental = false)
case ModifiedAfter(mod, _) =>
processIncremental(fileOpt, noRecordsMatch, Some(mod))
case FromScratchIncremental =>
processIncremental(fileOpt, noRecordsMatch, None)
dsInfo.updatedSpecCountFromFile(dsInfo.spec,
orgContext.appConfig.narthexDataDir,
orgContext.appConfig.orgId)
}
active.childOpt.foreach(_ ! PoisonPill)
goto(Idle) using Dormant
}
when(Adopting) {
case Event(SourceAdoptionComplete(file), active: Active) =>
dsInfo.setState(SOURCED)
if (datasetContext.sipRepo.latestSipOpt.isDefined)
dsInfo.setState(PROCESSABLE)
datasetContext.dropTree()
active.childOpt.foreach(_ ! PoisonPill)
self ! GenerateSipZip
goto(Idle) using Dormant
}
when(Generating) {
case Event(SipZipGenerationComplete(recordCount), active: Active) =>
log.info(s"Generated $recordCount pockets")
dsInfo.setState(MAPPABLE)
dsInfo.setRecordCount(recordCount)
if (datasetContext.sipMapperOpt.isDefined) {
log.info(s"There is a mapper, so setting to processable")
dsInfo.setState(PROCESSABLE)
} else {
log.info("No mapper, not processing")
}
// todo: figure this out
// val rawFile = datasetContext.createRawFile(datasetContext.pocketFile.getName)
// FileUtils.copyFile(datasetContext.pocketFile, rawFile)
// db.setStatus(RAW_POCKETS)
active.childOpt.foreach(_ ! PoisonPill)
goto(Idle) using Dormant
}
when(Analyzing) {
case Event(AnalysisComplete(errorOption, processed), active: Active) =>
val dsState = if (processed) ANALYZED else RAW_ANALYZED
if (errorOption.isDefined) {
dsInfo.removeState(dsState)
datasetContext.dropTree()
} else {
dsInfo.setState(dsState)
}
active.childOpt.foreach(_ ! PoisonPill)
goto(Idle) using Dormant
}
when(Processing) {
case Event(ProcessingComplete(validRecords, invalidRecords, scheduledOpt),
active: Active) =>
dsInfo.setState(PROCESSED)
if (scheduledOpt.isDefined) {
dsInfo.setIncrementalProcessedRecordCounts(validRecords, invalidRecords)
//dsInfo.setState(PROCESSABLE)
val graphSaver =
context.actorOf(GraphSaver.props(datasetContext, orgContext),
"graph-saver")
graphSaver ! SaveGraphs(scheduledOpt)
dsInfo.setState(INCREMENTAL_SAVED)
active.childOpt.foreach(_ ! PoisonPill)
goto(Saving) using Active(dsInfo.spec, Some(graphSaver), SAVING)
} else {
dsInfo.removeState(INCREMENTAL_SAVED)
dsInfo.setProcessedRecordCounts(validRecords, invalidRecords)
mailService.sendProcessingCompleteMessage(dsInfo.spec,
dsInfo.processedValidVal,
dsInfo.processedInvalidVal)
active.childOpt.foreach(_ ! PoisonPill)
goto(Idle) using Dormant
}
}
when(Saving) {
case Event(GraphSaveComplete, active: Active) =>
dsInfo.setState(SAVED)
dsInfo.setRecordsSync(false)
active.childOpt.foreach(_ ! PoisonPill)
goto(Idle) using Dormant
}
when(Skosifying) {
case Event(SkosificationComplete(skosifiedField), active: Active) =>
log.info(s"Skosification complete: $skosifiedField")
dsInfo.setProxyResourcesSync(false)
dsInfo.setRecordsSync(false)
active.childOpt.foreach(_ ! PoisonPill)
goto(Idle) using Dormant
}
when(Categorizing) {
case Event(CategoryCountComplete(spec, categoryCounts), active: Active) =>
log.info(s"Category counting complete: $spec")
context.parent ! CategoryCountComplete(spec, categoryCounts)
active.childOpt.foreach(_ ! PoisonPill)
goto(Idle) using Dormant
}
whenUnhandled {
// this is because PeriodicSkosifyCheck may send multiple for us. he'll be back
case Event(StartSkosification(skosifiedField), active: Active) =>
log.info(s"Ignoring skosification work for now: $skosifiedField")
stay()
case Event(tick: ProgressTick, active: Active) =>
if (active.interrupt) {
tick.reporterOpt.foreach(_.interrupt())
stay() using active
} else {
val nextActive = active.copy(progressState = tick.progressState,
progressType = tick.progressType,
count = tick.count)
broadcastProgress(nextActive)
stay() using nextActive
}
case Event(Command(commandName), InError(message)) =>
log.info(s"In error. Command name: $commandName")
if (commandName == "clear error") {
dsInfo.removeLiteralProp(datasetErrorMessage)
log.info(s"clear error so releasing semaphore if set")
orgContext.semaphore.release(dsInfo.spec)
goto(Idle) using Dormant
} else {
log.info(s"in error so releasing semaphore if set")
orgContext.semaphore.release(dsInfo.spec)
stay()
}
case Event(whatever, InError(_)) =>
log.info(s"Not interested in: $whatever")
log.info(s"in error so releasing semaphore if set")
orgContext.semaphore.release(dsInfo.spec)
stay()
case Event(Command(commandName), active: Active) =>
if (commandName == "refresh") {
log.warning("refresh unhandled command")
stay()
} else {
log.warning(s"Active unhandled Command name: $commandName (reset to idle/dormant)")
// kill active actors
active.childOpt.foreach(_ ! PoisonPill)
goto(Idle) using Dormant
}
case Event(WorkFailure(message, exceptionOpt), active: Active) =>
log.warning(s"Work failure [$message] while in [$active]")
dsInfo.setError(s"While $stateName, failure: $message")
exceptionOpt match {
case Some(exception) => log.error(exception, message)
case None => log.error(message)
}
mailService.sendProcessingErrorMessage(dsInfo.spec, message, exceptionOpt)
active.childOpt.foreach(_ ! PoisonPill)
goto(Idle) using InError(message)
case Event(WorkFailure(message, exceptionOpt), _) =>
log.warning(s"Work failure $message while dormant")
exceptionOpt match {
case Some(exception) => log.error(exception, message)
case None => log.error(message)
}
dsInfo.setError(s"While not active, failure: $message")
goto(Idle) using InError(message)
case Event(request, data) =>
log.warning(s"Unhandled request $request in state $stateName/$data")
stay()
}
onTransition {
case _ -> Idle => broadcastIdleState()
}
}
| delving/narthex | app/dataset/DatasetActor.scala | Scala | apache-2.0 | 24,317 |
package com.rainysoft.valuation
/** A financial contract.
* Created by mikael.ohman on 23/11/14.
*/
sealed trait Contract {
def expiry: Long
}
/** One unit of account.
*
*/
case class One() extends Contract {
override def expiry: Long = Long.MaxValue
}
/** Enforces acquiring the contract at expiry.
*
* @param c The contract.
*/
case class Get(c: Contract) extends Contract {
override def expiry: Long = c.expiry
}
/** Truncates the contract to a specific expiry.
*
* @param t The expiry in days since the UNIX epoch.
* @param c The truncated contract.
*/
case class Truncate(t: Long, c: Contract) extends Contract {
override def expiry: Long = t
}
/** Short the specified contract.
*
* @param c The contract to short.
*/
case class Short(c: Contract) extends Contract {
override def expiry: Long = c.expiry
}
/** Scales the contract by the value of the instrument.
*
* @param id The instrument id.
* @param c The contract to scale.
*/
case class Scale1d(id: Long, c: Contract) extends Contract {
override def expiry: Long = c.expiry
}
/** Scales the contract by the value of the 2D instrument.
*
* @param id The instrument id.
* @param c The contract to scale.
* @param x The x parameter.
*/
case class Scale2d(id: Long, c: Contract, x: Double) extends Contract {
override def expiry: Long = c.expiry
}
/** Scales the contract by the value of the 3D instrument.
*
* @param id The instrument id.
* @param c The contract.
* @param x The x parameter.
* @param y The y parameter.
*/
case class Scale3d(id: Long, c: Contract, x: Double, y: Double) extends Contract {
override def expiry: Long = c.expiry
} | MikaelUmaN/Valuation | src/main/scala/com/rainysoft/valuation/Contract.scala | Scala | mit | 1,655 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s.features
import scala.collection.JavaConverters._
import io.fabric8.kubernetes.api.model.{ContainerPort, ContainerPortBuilder, LocalObjectReferenceBuilder, Quantity}
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.deploy.k8s.{KubernetesTestConf, SparkPod}
import org.apache.spark.deploy.k8s.Config._
import org.apache.spark.deploy.k8s.Constants._
import org.apache.spark.deploy.k8s.features.KubernetesFeaturesTestUtils.TestResourceInformation
import org.apache.spark.deploy.k8s.submit._
import org.apache.spark.internal.config._
import org.apache.spark.internal.config.UI._
import org.apache.spark.resource.{ResourceID, ResourceProfile}
import org.apache.spark.resource.ResourceUtils._
import org.apache.spark.util.Utils
class BasicDriverFeatureStepSuite extends SparkFunSuite {
private val CUSTOM_DRIVER_LABELS = Map("labelkey" -> "labelvalue")
private val CONTAINER_IMAGE_PULL_POLICY = "IfNotPresent"
private val DRIVER_ANNOTATIONS = Map("customAnnotation" -> "customAnnotationValue")
private val DRIVER_ENVS = Map(
"customDriverEnv1" -> "customDriverEnv2",
"customDriverEnv2" -> "customDriverEnv2")
private val TEST_IMAGE_PULL_SECRETS = Seq("my-secret-1", "my-secret-2")
private val TEST_IMAGE_PULL_SECRET_OBJECTS =
TEST_IMAGE_PULL_SECRETS.map { secret =>
new LocalObjectReferenceBuilder().withName(secret).build()
}
test("Check the pod respects all configurations from the user.") {
val resourceID = new ResourceID(SPARK_DRIVER_PREFIX, GPU)
val resources =
Map(("nvidia.com/gpu" -> TestResourceInformation(resourceID, "2", "nvidia.com")))
val sparkConf = new SparkConf()
.set(KUBERNETES_DRIVER_POD_NAME, "spark-driver-pod")
.set(DRIVER_CORES, 2)
.set(KUBERNETES_DRIVER_LIMIT_CORES, "4")
.set(DRIVER_MEMORY.key, "256M")
.set(DRIVER_MEMORY_OVERHEAD, 200L)
.set(CONTAINER_IMAGE, "spark-driver:latest")
.set(IMAGE_PULL_SECRETS, TEST_IMAGE_PULL_SECRETS)
resources.foreach { case (_, testRInfo) =>
sparkConf.set(testRInfo.rId.amountConf, testRInfo.count)
sparkConf.set(testRInfo.rId.vendorConf, testRInfo.vendor)
}
val kubernetesConf = KubernetesTestConf.createDriverConf(
sparkConf = sparkConf,
labels = CUSTOM_DRIVER_LABELS,
environment = DRIVER_ENVS,
annotations = DRIVER_ANNOTATIONS)
val featureStep = new BasicDriverFeatureStep(kubernetesConf)
val basePod = SparkPod.initialPod()
val configuredPod = featureStep.configurePod(basePod)
assert(configuredPod.container.getName === DEFAULT_DRIVER_CONTAINER_NAME)
assert(configuredPod.container.getImage === "spark-driver:latest")
assert(configuredPod.container.getImagePullPolicy === CONTAINER_IMAGE_PULL_POLICY)
val expectedPortNames = Set(
containerPort(DRIVER_PORT_NAME, DEFAULT_DRIVER_PORT),
containerPort(BLOCK_MANAGER_PORT_NAME, DEFAULT_BLOCKMANAGER_PORT),
containerPort(UI_PORT_NAME, UI_PORT.defaultValue.get)
)
val foundPortNames = configuredPod.container.getPorts.asScala.toSet
assert(expectedPortNames === foundPortNames)
val envs = configuredPod.container
.getEnv
.asScala
.map { env => (env.getName, env.getValue) }
.toMap
DRIVER_ENVS.foreach { case (k, v) =>
assert(envs(v) === v)
}
assert(envs(ENV_SPARK_USER) === Utils.getCurrentUserName())
assert(envs(ENV_APPLICATION_ID) === kubernetesConf.appId)
assert(configuredPod.pod.getSpec().getImagePullSecrets.asScala ===
TEST_IMAGE_PULL_SECRET_OBJECTS)
assert(configuredPod.container.getEnv.asScala.exists(envVar =>
envVar.getName.equals(ENV_DRIVER_BIND_ADDRESS) &&
envVar.getValueFrom.getFieldRef.getApiVersion.equals("v1") &&
envVar.getValueFrom.getFieldRef.getFieldPath.equals("status.podIP")))
val resourceRequirements = configuredPod.container.getResources
val requests = resourceRequirements.getRequests.asScala
assert(amountAndFormat(requests("cpu")) === "2")
assert(amountAndFormat(requests("memory")) === "456Mi")
val limits = resourceRequirements.getLimits.asScala
assert(amountAndFormat(limits("memory")) === "456Mi")
assert(amountAndFormat(limits("cpu")) === "4")
resources.foreach { case (k8sName, testRInfo) =>
assert(amountAndFormat(limits(k8sName)) === testRInfo.count)
}
val driverPodMetadata = configuredPod.pod.getMetadata
assert(driverPodMetadata.getName === "spark-driver-pod")
// Check custom and preset labels are as expected
CUSTOM_DRIVER_LABELS.foreach { case (k, v) =>
assert(driverPodMetadata.getLabels.get(k) === v)
}
assert(driverPodMetadata.getLabels === kubernetesConf.labels.asJava)
assert(driverPodMetadata.getAnnotations.asScala === DRIVER_ANNOTATIONS)
assert(configuredPod.pod.getSpec.getRestartPolicy === "Never")
val expectedSparkConf = Map(
KUBERNETES_DRIVER_POD_NAME.key -> "spark-driver-pod",
"spark.app.id" -> KubernetesTestConf.APP_ID,
"spark.kubernetes.submitInDriver" -> "true",
MEMORY_OVERHEAD_FACTOR.key -> MEMORY_OVERHEAD_FACTOR.defaultValue.get.toString)
assert(featureStep.getAdditionalPodSystemProperties() === expectedSparkConf)
}
test("Check driver pod respects kubernetes driver request cores") {
val sparkConf = new SparkConf()
.set(KUBERNETES_DRIVER_POD_NAME, "spark-driver-pod")
.set(CONTAINER_IMAGE, "spark-driver:latest")
val basePod = SparkPod.initialPod()
// if spark.driver.cores is not set default is 1
val requests1 = new BasicDriverFeatureStep(KubernetesTestConf.createDriverConf(sparkConf))
.configurePod(basePod)
.container.getResources
.getRequests.asScala
assert(amountAndFormat(requests1("cpu")) === "1")
// if spark.driver.cores is set it should be used
sparkConf.set(DRIVER_CORES, 10)
val requests2 = new BasicDriverFeatureStep(KubernetesTestConf.createDriverConf(sparkConf))
.configurePod(basePod)
.container.getResources
.getRequests.asScala
assert(amountAndFormat(requests2("cpu")) === "10")
// spark.kubernetes.driver.request.cores should be preferred over spark.driver.cores
Seq("0.1", "100m").foreach { value =>
sparkConf.set(KUBERNETES_DRIVER_REQUEST_CORES, value)
val requests3 = new BasicDriverFeatureStep(KubernetesTestConf.createDriverConf(sparkConf))
.configurePod(basePod)
.container.getResources
.getRequests.asScala
assert(amountAndFormat(requests3("cpu")) === value)
}
}
test("Check appropriate entrypoint rerouting for various bindings") {
val javaSparkConf = new SparkConf()
.set(DRIVER_MEMORY.key, "4g")
.set(CONTAINER_IMAGE, "spark-driver:latest")
val pythonSparkConf = new SparkConf()
.set(DRIVER_MEMORY.key, "4g")
.set(CONTAINER_IMAGE, "spark-driver-py:latest")
val javaKubernetesConf = KubernetesTestConf.createDriverConf(sparkConf = javaSparkConf)
val pythonKubernetesConf = KubernetesTestConf.createDriverConf(
sparkConf = pythonSparkConf,
mainAppResource = PythonMainAppResource(""))
val javaFeatureStep = new BasicDriverFeatureStep(javaKubernetesConf)
val pythonFeatureStep = new BasicDriverFeatureStep(pythonKubernetesConf)
val basePod = SparkPod.initialPod()
val configuredJavaPod = javaFeatureStep.configurePod(basePod)
val configuredPythonPod = pythonFeatureStep.configurePod(basePod)
assert(configuredJavaPod.container.getImage === "spark-driver:latest")
assert(configuredPythonPod.container.getImage === "spark-driver-py:latest")
}
// Memory overhead tests. Tuples are:
// test name, main resource, overhead factor, expected factor
Seq(
("java", JavaMainAppResource(None), None, MEMORY_OVERHEAD_FACTOR.defaultValue.get),
("python default", PythonMainAppResource(null), None, NON_JVM_MEMORY_OVERHEAD_FACTOR),
("python w/ override", PythonMainAppResource(null), Some(0.9d), 0.9d),
("r default", RMainAppResource(null), None, NON_JVM_MEMORY_OVERHEAD_FACTOR)
).foreach { case (name, resource, factor, expectedFactor) =>
test(s"memory overhead factor: $name") {
// Choose a driver memory where the default memory overhead is > MEMORY_OVERHEAD_MIN_MIB
val driverMem =
ResourceProfile.MEMORY_OVERHEAD_MIN_MIB / MEMORY_OVERHEAD_FACTOR.defaultValue.get * 2
// main app resource, overhead factor
val sparkConf = new SparkConf(false)
.set(CONTAINER_IMAGE, "spark-driver:latest")
.set(DRIVER_MEMORY.key, s"${driverMem.toInt}m")
factor.foreach { value => sparkConf.set(MEMORY_OVERHEAD_FACTOR, value) }
val conf = KubernetesTestConf.createDriverConf(
sparkConf = sparkConf,
mainAppResource = resource)
val step = new BasicDriverFeatureStep(conf)
val pod = step.configurePod(SparkPod.initialPod())
val mem = amountAndFormat(pod.container.getResources.getRequests.get("memory"))
val expected = (driverMem + driverMem * expectedFactor).toInt
assert(mem === s"${expected}Mi")
val systemProperties = step.getAdditionalPodSystemProperties()
assert(systemProperties(MEMORY_OVERHEAD_FACTOR.key) === expectedFactor.toString)
}
}
test("SPARK-35493: make spark.blockManager.port be able to be fallen back to in driver pod") {
val initPod = SparkPod.initialPod()
val sparkConf = new SparkConf()
.set(CONTAINER_IMAGE, "spark-driver:latest")
.set(BLOCK_MANAGER_PORT, 1234)
val driverConf1 = KubernetesTestConf.createDriverConf(sparkConf)
val pod1 = new BasicDriverFeatureStep(driverConf1).configurePod(initPod)
val portMap1 =
pod1.container.getPorts.asScala.map { cp => (cp.getName -> cp.getContainerPort) }.toMap
assert(portMap1(BLOCK_MANAGER_PORT_NAME) === 1234, s"fallback to $BLOCK_MANAGER_PORT.key")
val driverConf2 =
KubernetesTestConf.createDriverConf(sparkConf.set(DRIVER_BLOCK_MANAGER_PORT, 1235))
val pod2 = new BasicDriverFeatureStep(driverConf2).configurePod(initPod)
val portMap2 =
pod2.container.getPorts.asScala.map { cp => (cp.getName -> cp.getContainerPort) }.toMap
assert(portMap2(BLOCK_MANAGER_PORT_NAME) === 1235)
}
test("SPARK-36075: Check driver pod respects nodeSelector/driverNodeSelector") {
val initPod = SparkPod.initialPod()
val sparkConf = new SparkConf()
.set(CONTAINER_IMAGE, "spark-driver:latest")
.set(s"${KUBERNETES_NODE_SELECTOR_PREFIX}nodeLabelKey", "nodeLabelValue")
.set(s"${KUBERNETES_DRIVER_NODE_SELECTOR_PREFIX}driverNodeLabelKey", "driverNodeLabelValue")
.set(s"${KUBERNETES_EXECUTOR_NODE_SELECTOR_PREFIX}execNodeLabelKey", "execNodeLabelValue")
val driverConf = KubernetesTestConf.createDriverConf(sparkConf)
val driver = new BasicDriverFeatureStep(driverConf).configurePod(initPod)
assert(driver.pod.getSpec.getNodeSelector.asScala === Map(
"nodeLabelKey" -> "nodeLabelValue",
"driverNodeLabelKey" -> "driverNodeLabelValue"
))
}
def containerPort(name: String, portNumber: Int): ContainerPort =
new ContainerPortBuilder()
.withName(name)
.withContainerPort(portNumber)
.withProtocol("TCP")
.build()
private def amountAndFormat(quantity: Quantity): String = quantity.getAmount + quantity.getFormat
}
| shaneknapp/spark | resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/BasicDriverFeatureStepSuite.scala | Scala | apache-2.0 | 12,172 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the βLicenseβ); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an βAS ISβ BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.util.stream
import akka.stream.ActorAttributes.SupervisionStrategy
import akka.stream._
import akka.stream.stage.{GraphStage, GraphStageLogic, InHandler, OutHandler}
import scala.util.control.NonFatal
class MapInitAndLast[In, Out](init: In β Out, last: In β Out) extends GraphStage[FlowShape[In, Out]] {
val in: Inlet[In] = Inlet[In]("MapInitAndLast.in")
val out: Outlet[Out] = Outlet[Out]("MapInitAndLast.out")
override val shape = FlowShape(in, out)
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) {
private var pending: In = null.asInstanceOf[In]
private def decider =
inheritedAttributes.get[SupervisionStrategy].map(_.decider).getOrElse(Supervision.stoppingDecider)
override def preStart(): Unit = tryPull(in)
def pushWith(elem: In, f: In β Out): Unit = try { push(out, f(elem)) } catch {
case NonFatal(ex) β decider(ex) match {
case Supervision.Stop β failStage(ex)
case _ β pull(in)
}
}
setHandler(in, new InHandler {
override def onPush(): Unit = {
val elem = grab(in)
if (pending != null) pushWith(pending,init)
else if(isAvailable(out)) pull(in)
pending = elem
}
override def onUpstreamFinish(): Unit = {
if(pending == null) completeStage()
else {
if (isAvailable(out)) {
pushWith(pending, last)
completeStage()
}
}
}
})
setHandler(out, new OutHandler {
override def onPull(): Unit = {
if (!isClosed(in)) {
if(!hasBeenPulled(in)) {
pull(in)
}
}
else {
if(pending != null) {
pushWith(pending, last)
}
completeStage()
}
}
})
}
} | nruppin/CM-Well | server/cmwell-util/src/main/scala/cmwell/util/stream/MapInitAndLast.scala | Scala | apache-2.0 | 2,470 |
package mr
import org.apache.spark.rdd._
import org.apache.spark._
import scala.reflect.ClassTag
import scala.util.Random._
import scala.reflect.runtime.universe._
object CensorshipRDD {
implicit class StringRDDOps(rdd: RDD[String]) {
def censor() = new CensorshipRDD(rdd)
}
}
class CensorshipRDD(prev: RDD[String]) extends RDD[String](prev) {
override def compute(split: Partition, context: TaskContext): Iterator[String] =
firstParent[String].compute(split, context).map(str => str.replace("Hadoop", "******"))
override protected def getPartitions: Array[Partition] = firstParent[String].partitions
}
| rabbitonweb/my-rdd | src/main/scala/mr/CensorshipRDD.scala | Scala | apache-2.0 | 626 |
package lettergenerator
package mediator
import renderer.Wizard
import validators._
import formatter.{WordMLFormatter,Details,Template}
import org.docx4j.openpackaging.packages.WordprocessingMLPackage
import org.mockito.Mockito.when
import org.mockito.{Mockito, Matchers}
import org.mockito.AdditionalMatchers.not
class ValMedTester extends Tester {
val testObjects = new TestObjects with DetailsTestObjects
val mockGui: Wizard = testObjects.mockGui
val mockPathValidator: PathValidator = mock[PathValidator]
val vm = new ValidationMediator(mockGui)
val path = "./valid/path"
val otherPath = "./some/path"
val tuples: List[Map[String,String]] = testObjects.tuples
val headers: Array[String] = testObjects.headers
val details: Details = testObjects.details
when(mockPathValidator.validate(
Matchers.matches(path)))
.thenReturn(true)
when(mockPathValidator.validate(
not(Matchers.eq(path))))
.thenReturn(false)
describe("the validatePath method") {
it("should return Some") {
Given("a path that exists")
Then("it returns Some(path)")
assert(vm.validatePath(path, mockPathValidator).get == path)
}
it("should return None") {
Given("a path which does not exist")
Then("it returns None")
assert(vm.validatePath(otherPath, mockPathValidator) == None)
}
}
describe("the validatePathOrThrow method") {
it("should return the path String") {
Given("a path which exists")
Then("it should return return the path")
assert(vm.validatePathOrThrow(("valid",path),mockPathValidator) == path)
}
it("should throw an exception") {
Given("a path which does not exist")
Then("it should throw an exception")
assertThrows[Exception](
vm.validatePathOrThrow(("not valid",otherPath),mockPathValidator))
And("it should message the user")
Mockito.verify(mockGui,Mockito.times(1))
.message(Matchers.anyString())
}
}
describe("the validateAllPaths method") {
val pathMessage = "Could not reach the %s. Please check if path is correct" +
", or report this issue"
it("should validate all paths entered by the user") {
Given("existing paths for the details and template file, and the destination folder")
when(mockGui.detailsFile).thenReturn(path)
when(mockGui.templateFile).thenReturn(path)
when(mockGui.destinationFolder).thenReturn(path)
Then("it should message the user")
vm.validateAllPaths(mockPathValidator)
Mockito.verify(mockGui,Mockito.times(1)).message("File paths are valid.")
}
it("should throw an exception if it can't reach the details file") {
Given("an invalid details file path")
when(mockGui.detailsFile).thenReturn(otherPath)
when(mockGui.templateFile).thenReturn(path)
when(mockGui.destinationFolder).thenReturn(path)
Then("it should throw an exception")
assertThrows[Exception](vm.validateAllPaths(mockPathValidator))
And("it should message the user that the details file cannot be found")
Mockito.verify(mockGui,Mockito.times(1)).message(pathMessage.format("details file"))
}
it("should throw an exception if it can't reach the template file") {
Given("an invalid template file path")
when(mockGui.detailsFile).thenReturn(path)
when(mockGui.templateFile).thenReturn(otherPath)
when(mockGui.destinationFolder).thenReturn(path)
Then("it should throw an exception")
assertThrows[Exception](vm.validateAllPaths(mockPathValidator))
And("it should message the user that the template file cannot be found")
Mockito.verify(mockGui,Mockito.times(1)).message(pathMessage.format("template file"))
}
it("should throw an exception if it can't reach the destination folder") {
Given("an invalid destination folder path")
when(mockGui.detailsFile).thenReturn(path)
when(mockGui.templateFile).thenReturn(path)
when(mockGui.destinationFolder).thenReturn(otherPath)
Then("it should throw an exception")
assertThrows[Exception](vm.validateAllPaths(mockPathValidator))
And("it should message the user that the destination folder cannot be found")
Mockito.verify(mockGui,Mockito.times(1)).message(pathMessage.format("destination folder"))
}
}
describe("the validateDetails method") {
it("should not throw an exception when a Details object is valid") {
Given("valid details")
val validDetails = details
When("the method is called")
vm.validateDetails(validDetails)()
Then("it should not throw an exception")
Mockito.verify(mockGui,Mockito.never()).message("Error")
}
it("should throw an exception when one of the tuples has an empty value") {
Given("a Details object with a blank value")
val tuplesWithEmpty: List[Map[String,String]] = List(
Map("name" -> "The Quick Brown Fox", "action" -> "",
"consequence" -> "+35XP"), details.tuples.tail.head)
val headers: Array[String] = tuplesWithEmpty.head.keys.toArray
val detailsWithEmpty = new Details(headers, tuplesWithEmpty)
When("the method is called")
Then("it should throw an exception")
assertThrows[Exception](vm.validateDetails(detailsWithEmpty)())
And("it should message and alert the user")
Mockito.verify(mockGui,Mockito.times(1)).message("Error")
Mockito.verify(mockGui,Mockito.times(1)).alert(Matchers.anyString())
}
}
describe("the validateTemplate method") {
val mockTempl: Template = mock[Template]
val mockTemplForm: WordMLFormatter = mock[WordMLFormatter]
val tuplesWithFileName: List[Map[String,String]] = List(
Map("filename" -> "TheFox") ++ details.tuples.head,
Map("filename" -> "TheDog") ++ details.tuples.tail.head)
val headersWithFileName = tuplesWithFileName.head.keySet.toArray
val detailsWithFileName =
new Details(headersWithFileName,tuplesWithFileName)
it("should not throw an exception without file names") {
Given("a template file's contents")
when(mockTemplForm.text).thenReturn(
"Hi! Are you ${name}? You ${action}, so you ${consequence}.")
When("it is compared with the contents of a valid details file")
val detailsForComparison = details
vm.validateTemplate(detailsForComparison, mockTempl)(mockTemplForm)
Then("it will not throw an exception")
Mockito.verify(mockGui,Mockito.never()).message("Error on template validation")
And("it will tell the user that the template is valid")
Mockito.verify(mockGui,Mockito.times(1))
.message("Template variables are valid.")
}
it("should not throw an exception with ticks and file names") {
Given("a template file's contents which includes a file name")
when(mockTemplForm.text).thenReturn(
"Title: ${filename}\\n" +
"Hi! Are you ${name}? You ${action}, so you ${consequence}.")
When("the 'File name also part of letter' option is ticked")
when(mockGui.fnAlsoInTemplate).thenReturn(true)
And("it is compared with the contents of a valid details file")
val dwfn = detailsWithFileName
vm.validateTemplate(dwfn, mockTempl)(mockTemplForm)
Then("it will not throw an exception")
Mockito.verify(mockGui,Mockito.never()).message("Error on template validation")
And("it will tell the user that the template is valid")
Mockito.verify(mockGui,Mockito.times(2))
.message("Template variables are valid.")
}
it("should not throw an exception with file names but no ticks"){
Given("a column of file names in the details, but no " +
"\\ncorresponding variable in the template")
when(mockTemplForm.text).thenReturn(
"Hi! Are you ${name}? You ${action}, so you ${consequence}.")
When("the 'File name also part of letter' option is not ticked")
when(mockGui.fnAlsoInTemplate).thenReturn(false)
Then("it will not throw an exception")
Mockito.verify(mockGui,Mockito.never()).message("Error on template validation")
And("it will tell the user that the template is valid")
Mockito.verify(mockGui,Mockito.times(2))
.message("Template variables are valid.")
}
it("should throw an exception details has file name"+
"template does not, but option is ticked") {
Given("a column of file names in the details, but no " +
"\\ncorresponding variable in the template")
val dwfn = detailsWithFileName
when(mockTemplForm.text).thenReturn(
"Hi! Are you ${name}? You ${action}, so you ${consequence}.")
When("the 'File name also part of letter' option is ticked")
when(mockGui.fnAlsoInTemplate).thenReturn(true)
Then("it should throw an exception")
assertThrows[Exception](vm.validateTemplate(
detailsWithFileName, mockTempl)(mockTemplForm))
And("it should alert the user")
Mockito.verify(mockGui,Mockito.times(1)).message("Error on template validation")
Mockito.verify(mockGui,Mockito.times(1)).alert(Matchers.anyString())
}
}
}
| claudiusbr/LetterGenerator | src/test/scala/lettergenerator/mediator/ValMedTester.scala | Scala | mit | 9,284 |
package controllers.c4
import play.api.data._
import play.api.data.Forms._
import play.api.data.validation.{Invalid, Valid, ValidationError, Constraint}
import play.api.http.HttpVerbs
import play.api.mvc._
/**
* Created by trydofor on 7/9/15.
* @see https://playframework.com/documentation/2.4.x/ScalaCustomValidations
* @see https://github.com/playframework/playframework/blob/2.4.x/documentation/manual/working/scalaGuide/main/forms/code/CustomValidations.scala
*/
class S3ScalaCustomValidations extends Controller {
val allNumbers = """\\d*""".r
val allLetters = """[A-Za-z]*""".r
val passwordCheckConstraint: Constraint[String] = Constraint("constraints.passwordcheck")({
plainText =>
val errors = plainText match {
case allNumbers() => Seq(ValidationError("Password is all numbers"))
case allLetters() => Seq(ValidationError("Password is all letters"))
case _ => Nil
}
if (errors.isEmpty) {
Valid
} else {
Invalid(errors)
}
})
val passwordCheck: Mapping[String] = nonEmptyText(minLength = 10)
.verifying(passwordCheckConstraint)
val passwordForm = Form(
single(
"password" -> passwordCheck
)
)
import play.api.Play.current
import play.api.i18n.Messages.Implicits._
val a0 = Action { implicit request =>
if (request.method == HttpVerbs.GET) {
Ok(views.html.c4.s3(passwordForm))
} else {
passwordForm.bindFromRequest.fold(
formWithErrors => {
BadRequest(views.html.c4.s3(formWithErrors))
},
userData => {
Redirect(routes.S1ScalaForms.af).flashing("success" -> userData.toString)
}
)
}
}
}
| moilioncircle/playframework-2.4.x-scala | app/controllers/c4/S3ScalaCustomValidations.scala | Scala | apache-2.0 | 1,700 |
package base
import org.apache.commons.io.FileUtils
import java.io.File
import scala.io.Codec
import scala.util.Try
trait FileSpec { self: TestBaseDefinition =>
def readTestResourceFile(filename: String) =
Try(new File(s"test/res/$filename"))
.map(f => FileUtils.readFileToString(f, Codec.UTF8.charSet))
.success
.value
}
| THK-ADV/lwm-reloaded | test/base/FileSpec.scala | Scala | mit | 350 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala.math
/**
* @since 2.8
*/
trait Fractional[T] extends Numeric[T] {
def div(x: T, y: T): T
class FractionalOps(lhs: T) extends Ops(lhs) {
def /(rhs: T) = div(lhs, rhs)
}
override implicit def mkNumericOps(lhs: T): FractionalOps =
new FractionalOps(lhs)
}
| cran/rkafkajars | java/scala/math/Fractional.scala | Scala | apache-2.0 | 817 |
package jp.hotbrain.makecsv
/**
* Created by hideki takada on 2016/09/10.
*/
object Main {
def main(args: Array[String]): Unit = {
try {
if (null != args && 0 <= args.length) {
args(0).toLowerCase() match {
case "export" if 3 == args.length =>
Environment.setConfigFile(args(1))
encode(args(2))
println("ok")
System.exit(0)
case "decode" if 4 == args.length =>
Environment.setConfigFile(args(1))
decode(args(2), args(3), ungzip = true)
println("ok")
System.exit(0)
case "decrypt" if 4 == args.length =>
Environment.setConfigFile(args(1))
decode(args(2), args(3), ungzip = false)
println("ok")
System.exit(0)
case _ =>
}
}
println(
"""Usage:
| java -jar make_csv_assembly_1.0.0.jar export [encode.cnf] [export_file]
| java -jar make_csv_assembly_1.0.0.jar decode [decode.cnf] [import_file] [export_file]
| java -jar make_csv_assembly_1.0.0.jar decrypt [decode.cnf] [import_file] [export_file]
| |
|[encode.cnf]
|serial=201609101600
|
|file.aes.key=[KeyStr]
|file.aes.iv=[IVStr]
|
|file.gzip=false
|file.col_sep=,
|file.row_sep=\\n
|file.charset=UTF-8
|file.timezone=UTC
|file.dtformat=yyyy/MM/dd HH:mm:ss
|
|db.con=jdbc:mysql://localhost/bspark?user=root&password=root&useSSL=false
|db.sql=SELECT * FROM `master`
|
|[decode.cnf|decrypt.cnf]
|file.serial=201609101700
|
|file.aes.key=[KeyStr]
|file.aes.iv=[IVStr]
|
|file.gzip=false
|
| """.stripMargin)
} catch {
case ex: Throwable => println(ex); println
}
}
private[this] def getXsvExportSetting: XsvExportSetting = {
val constr = Environment.getValue("db.con").getOrElse(throw new Exception("no Sql Connection String"))
val sqlstr = Environment.getValue("db.sql").getOrElse(throw new Exception("no Select Sql String"))
XsvExportSetting(
constr = constr,
sqlstr = sqlstr,
optColSep = Environment.getValue("file.col_sep"),
optRowSep = Environment.getValue("file.row_sep"),
optCharset = Environment.getValue("file.charset"),
optTimeZone = Environment.getValue("file.timezone"),
optDatetimeFormat = Environment.getValue("file.dtformat")
)
}
private[this] def encode(exportFileName: String): Unit = {
println(s"Encode: to $exportFileName")
println
val conf = getXsvExportSetting
Encoder(
fileName = exportFileName,
aesParam = aesParam(),
gzip = Environment.getValue("file.gzip", v => "true".equalsIgnoreCase(v)).getOrElse(false)
).encodeOf(conf)
}
private[this] def decode(importFileName: String, exportFileName: String, ungzip: Boolean): Unit = {
println(s"Decode: $importFileName to $exportFileName")
println
val aes = aesParam()
val gzip = ungzip && Environment.getValue("file.gzip", v => "true".equalsIgnoreCase(v)).getOrElse(false)
if (aes.isEmpty && !gzip) {
println("nothing to do")
} else {
DecodeSetting(
importFileName = importFileName,
aesParam = aes,
gzip = gzip
).decodeTo(new Decoder(exportFileName))
}
}
private[this] def aesParam(): Option[AesParam] = {
val key = Environment.getValue("file.aes.key").map(_.trim).getOrElse("")
val iv = Environment.getValue("file.aes.iv").map(_.trim).getOrElse("")
if (key.isEmpty || iv.isEmpty) {
None
} else {
Option(AesParam(Environment.getValue("serial").getOrElse("0"), key, iv))
}
}
}
| HidekiTak/make_csv | src/main/scala/jp/hotbrain/makecsv/Main.scala | Scala | apache-2.0 | 3,860 |
package edu.depauw.escalatorx
import javax.swing.undo._
class InsertBlockUE extends AbstractUndoableEdit {
override def getPresentationName = "Insert Block"
override def undo() {
super.undo()
}
override def redo() {
super.redo()
}
}
class DeleteBlockUE extends AbstractUndoableEdit {
override def getPresentationName = "Delete Block"
override def undo() {
super.undo()
}
override def redo() {
super.redo()
}
}
class UpdateBlockUE extends AbstractUndoableEdit {
override def getPresentationName = "Update Block"
override def undo() {
super.undo()
}
override def redo() {
super.redo()
}
}
| bhoward/EscalatorOld | EscalatorX/src/edu/depauw/escalatorx/UndoableEdits.scala | Scala | apache-2.0 | 669 |
package org.jetbrains.plugins.scala
package runner
import com.intellij.execution.impl.RunConfigurationRefactoringElementListenerProvider
import com.intellij.psi.PsiElement
import com.intellij.refactoring.listeners.RefactoringElementListener
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTemplateDefinition
import org.jetbrains.plugins.scala.lang.psi.light.PsiClassWrapper
/**
* Nikolay.Tropin
* 10/21/13
*/
class ScalaRunConfigurationRefactoringListenerProvider extends RunConfigurationRefactoringElementListenerProvider {
private def wrap(td: ScTemplateDefinition) = new PsiClassWrapper(td, td.qualifiedName, td.name)
private def decorate(listener: RefactoringElementListener): RefactoringElementListener = {
if (listener == null) return null
new RefactoringElementListener {
def elementMoved(newElement: PsiElement): Unit = newElement match {
case td: ScTemplateDefinition => listener.elementMoved(wrap(td))
case _ =>
}
def elementRenamed(newElement: PsiElement): Unit = newElement match {
case td: ScTemplateDefinition => listener.elementRenamed(wrap(td))
case _ =>
}
}
}
override def getListener(element: PsiElement): RefactoringElementListener = {
element match {
case td: ScTemplateDefinition =>
val wrapper = wrap(td)
decorate(super.getListener(wrapper))
case _ => null
}
}
}
| ilinum/intellij-scala | src/org/jetbrains/plugins/scala/runner/ScalaRunConfigurationRefactoringListenerProvider.scala | Scala | apache-2.0 | 1,425 |
package com.github.andr83.parsek.pipe
import com.github.andr83.parsek._
import org.scalatest._
/**
* @author andr83
*/
class SplitSpec extends FlatSpec with Matchers with Inside {
implicit val context = new PipeContext()
"String value" should "be split to parts by separator" in {
val pipe = SplitPipe(";".r)
val result = pipe.transformString("a;b;c;d")
result shouldBe Some(PList.create("a", "b", "c", "d"))
}
"Additional values" should "be appended to the beginning and end of split parts" in {
val pipe = SplitPipe("\\\\}\\\\{".r, appendToBeginning = "{", appendToEnd = "}")
val result = pipe.transformString("{a:b}{c:d}")
result shouldBe Some(PList.create("{a:b}", "{c:d}"))
}
}
| andr83/parsek | core/src/test/scala/com/github/andr83/parsek/pipe/SplitSpec.scala | Scala | mit | 723 |
package t6601
object Test {
// After the first attempt to make seprately compiled value
// classes respect the privacy of constructors, we got:
//
// exception when typing v.a().==(v.a())/class scala.reflect.internal.Trees$Apply
// constructor V in class V cannot be accessed in object Test in file test/files/pos/t6601/UsePrivateValueClass_2.scala
// scala.reflect.internal.Types$TypeError: constructor V in class V cannot be accessed in object Test
def foo(v: V) = v.a == v.a
def bar(v: V) = v == v
}
| yusuke2255/dotty | tests/pos/valueclasses/t6601/UsePrivateValueClass_2.scala | Scala | bsd-3-clause | 534 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.cli
import org.apache.hadoop.mapreduce.Job
import org.apache.spark.{ Logging, SparkContext }
import org.apache.spark.rdd.RDD
import org.bdgenomics.adam.rdd.ADAMContext._
import org.bdgenomics.formats.avro.{ Genotype, GenotypeAllele }
import org.kohsuke.args4j.Argument
object AlleleCount extends ADAMCommandCompanion {
val commandName = "allelecount"
val commandDescription = "Calculate Allele frequencies"
def apply(cmdLine: Array[String]) = {
new AlleleCount(Args4j[AlleleCountArgs](cmdLine))
}
}
class AlleleCountArgs extends Args4jBase with ParquetArgs {
@Argument(required = true, metaVar = "ADAM",
usage = "The ADAM Variant file", index = 0)
var adamFile: String = _
@Argument(required = true, metaVar = "Output",
usage = "Location to write allele frequency data", index = 1)
var outputPath: String = null
}
object AlleleCountHelper extends Serializable {
def chooseAllele(x: (String, java.lang.Long, String, String, GenotypeAllele)) =
x match {
case (chr, position, refAllele, varAllele, GenotypeAllele.Ref) => Some(chr, position, refAllele)
case (chr, position, refAllele, varAllele, GenotypeAllele.Alt) => Some(chr, position, varAllele)
case _ => None
}
def countAlleles(adamVariants: RDD[Genotype], args: AlleleCountArgs) {
val usefulData = adamVariants.map(p => (p.getVariant.getContig.getContigName,
p.getVariant.getStart,
p.getVariant.getReferenceAllele,
p.getVariant.getAlternateAllele,
p.getAlleles.get(0),
p.getAlleles.get(1)))
val reduced_Variants = usefulData.flatMap(p => Seq((p._1, p._2, p._3, p._4, p._5), (p._1, p._2, p._3, p._4, p._6)))
val alleles = reduced_Variants.flatMap(chooseAllele)
alleles.groupBy(identity).map { case (a, b) => "%s\t%s\t%s\t%d".format(a._1, a._2, a._3, b.size) }
.saveAsTextFile(args.outputPath)
}
}
class AlleleCount(val args: AlleleCountArgs) extends ADAMSparkCommand[AlleleCountArgs] with Logging {
val companion = AlleleCount
def run(sc: SparkContext, job: Job) {
val adamVariants: RDD[Genotype] = sc.loadGenotypes(args.adamFile)
AlleleCountHelper.countAlleles(adamVariants, args)
}
}
| tomwhite/adam | adam-cli/src/main/scala/org/bdgenomics/adam/cli/AlleleCount.scala | Scala | apache-2.0 | 2,996 |
package models.label
import models.utils.MyPostgresDriver.simple._
import play.api.Play.current
case class ProblemSeverity(problemSeverityId: Int, labelId: Int, severity: Int)
class ProblemSeverityTable(tag: Tag) extends Table[ProblemSeverity](tag, Some("sidewalk"), "problem_severity") {
def problemSeverityId = column[Int]("problem_severity_id", O.PrimaryKey, O.AutoInc)
def labelId = column[Int]("label_id", O.NotNull)
def severity = column[Int]("severity", O.NotNull)
def * = (problemSeverityId, labelId, severity) <> ((ProblemSeverity.apply _).tupled, ProblemSeverity.unapply)
}
object ProblemSeverityTable {
val db = play.api.db.slick.DB
val problemSeverities = TableQuery[ProblemSeverityTable]
/**
* Saves a new problem temporariness to the table
* @param ps
* @return
*/
def save(ps: ProblemSeverity): Int = db.withTransaction { implicit session =>
val problemSeverityId: Int =
(problemSeverities returning problemSeverities.map(_.problemSeverityId)) += ps
problemSeverityId
}
}
| danZzyy/SidewalkWebpage | sidewalk-webpage/app/models/label/ProblemSeverityTable.scala | Scala | mit | 1,042 |
package com.twitter.finagle.ssl.server
import com.twitter.finagle.ssl.{CipherSuites, ClientAuth, Engine, Protocols}
import javax.net.ssl.{SSLContext, SSLEngine}
import org.scalatest.funsuite.AnyFunSuite
class SslServerEngineFactoryTest extends AnyFunSuite {
private[this] def createTestSslContext(): SSLContext = {
val sslContext = SSLContext.getInstance("TLSv1.2")
sslContext.init(null, null, null)
sslContext
}
private[this] def createTestSslEngine(): SSLEngine = {
val sslContext = createTestSslContext()
sslContext.createSSLEngine()
}
private[this] def createTestEngine(): Engine =
new Engine(createTestSslEngine())
test("configureClientAuth Unspecified doesn't change anything") {
val sslEngine = createTestSslEngine()
sslEngine.setWantClientAuth(true)
SslServerEngineFactory.configureClientAuth(sslEngine, ClientAuth.Unspecified)
assert(sslEngine.getWantClientAuth())
assert(!sslEngine.getNeedClientAuth())
}
test("configureClientAuth Off turns off client authentication") {
val sslEngine = createTestSslEngine()
sslEngine.setWantClientAuth(true)
SslServerEngineFactory.configureClientAuth(sslEngine, ClientAuth.Off)
assert(!sslEngine.getWantClientAuth())
assert(!sslEngine.getNeedClientAuth())
}
test("configureClientAuth Wanted turns on desired client authentication") {
val sslEngine = createTestSslEngine()
SslServerEngineFactory.configureClientAuth(sslEngine, ClientAuth.Wanted)
assert(sslEngine.getWantClientAuth())
assert(!sslEngine.getNeedClientAuth())
}
test("configureClientAuth Needed turns on required client authentication") {
val sslEngine = createTestSslEngine()
SslServerEngineFactory.configureClientAuth(sslEngine, ClientAuth.Needed)
assert(!sslEngine.getWantClientAuth())
assert(sslEngine.getNeedClientAuth())
}
test("configureEngine sets server mode, protocols, and cipher suites") {
val engine = createTestEngine()
val protocols = Protocols.Enabled(Seq("TLSv1.2"))
val cipherSuites = CipherSuites.Enabled(Seq("TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384"))
val config = SslServerConfiguration(protocols = protocols, cipherSuites = cipherSuites)
SslServerEngineFactory.configureEngine(engine, config)
val sslEngine = engine.self
// is a server engine
assert(!sslEngine.getUseClientMode())
// has the right protocols
val enabledProtocols = sslEngine.getEnabledProtocols()
assert(enabledProtocols.length == 1)
assert(enabledProtocols(0) == "TLSv1.2")
// has the right cipher suites
val enabledCipherSuites = sslEngine.getEnabledCipherSuites()
assert(enabledCipherSuites.length == 1)
assert(enabledCipherSuites(0) == "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384")
}
test("createEngine can create an engine from an SSLContext") {
val sslContext = createTestSslContext()
val engine = SslServerEngineFactory.createEngine(sslContext)
assert(engine != null)
assert(engine.self != null)
}
}
| twitter/finagle | finagle-core/src/test/scala/com/twitter/finagle/ssl/server/SslServerEngineFactoryTest.scala | Scala | apache-2.0 | 3,029 |
/*
* Copyright 2013 David Crosson
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.janalyse.ssh
trait Process {
val pid: Int
val ppid: Int
val user: String
val cmdline: String
private val tokens = cmdline.split("""\\s+""").toList filter {
_.size > 0
}
val cmd = tokens.head
val args = tokens.tail.toList
}
case class ProcessTime(days: Int, hours: Int, minutes: Int, seconds: Int) {
val ellapsedInS = days * 24 * 3600 + hours * 3600 + minutes * 60 + seconds
}
object ProcessTime {
def apply(spec: String): ProcessTime = {
val re1 = """(\\d+)""".r
val re2 = """(\\d+):(\\d+)""".r
val re3 = """(\\d+):(\\d+):(\\d+)""".r
val re4 = """(\\d+)-(\\d+):(\\d+):(\\d+)""".r
spec match {
case re1(s) => ProcessTime(0, 0, 0, s.toInt)
case re2(m, s) => ProcessTime(0, 0, m.toInt, s.toInt)
case re3(h, m, s) => ProcessTime(0, h.toInt, m.toInt, s.toInt)
case re4(d, h, m, s) => ProcessTime(d.toInt, h.toInt, m.toInt, s.toInt)
case _ => ProcessTime(0, 0, 0, 0)
}
}
}
trait ProcessState {
val name: String
}
case class LinuxProcessState(
name: String,
extra: String
) extends ProcessState
object LinuxProcessState {
val states = Map(
'D' -> "UninterruptibleSleep",
'R' -> "Running",
'S' -> "InterruptibleSleep",
'T' -> "Stopped",
'W' -> "Paging", // paging (not valid since the 2.6.xx kernel)
'X' -> "Dead",
'Z' -> "Zombie"
)
def fromSpec(spec: String): LinuxProcessState = {
val name = spec.headOption.flatMap(states get _) getOrElse "UnknownState"
val extra = if (spec.size > 0) spec.tail else ""
new LinuxProcessState(name, extra)
}
}
case class DarwinProcessState(
name: String,
extra: String
) extends ProcessState
object DarwinProcessState {
val states = Map(
'I' -> "Idle", // Marks a process that is idle (sleeping for longer than about 20 seconds).
'R' -> "Running", // Marks a runnable process.
'S' -> "Sleeping", // Marks a process that is sleeping for less than about 20 seconds.
'T' -> "Stopped", // Marks a stopped process.
'U' -> "UninterruptibleSleep", // Marks a process in uninterruptible wait.
'Z' -> "Zombie" // Marks a dead process (a ``zombie'').
)
def fromSpec(spec: String): DarwinProcessState = {
val name = spec.headOption.flatMap(states get _) getOrElse "UnknownState"
val extra = if (spec.size > 0) spec.tail else ""
new DarwinProcessState(name, extra)
}
}
case class AIXProcess(
pid: Int,
ppid: Int,
user: String,
cmdline: String
) extends Process
case class SunOSProcess(
pid: Int,
ppid: Int,
user: String,
cmdline: String
) extends Process
case class LinuxProcess(
pid: Int,
ppid: Int,
user: String,
state: LinuxProcessState,
rss: Int, // ResidentSizeSize (Ko)
vsz: Int, // virtual memory size of the process (Ko)
etime: ProcessTime, // Ellapsed time since start [DD-]hh:mm:ss
cputime: ProcessTime, // CPU time used since start [[DD-]hh:]mm:ss
cmdline: String
) extends Process
case class DarwinProcess(
pid: Int,
ppid: Int,
user: String,
state: DarwinProcessState,
rss: Int, // ResidentSizeSize (Ko)
vsz: Int, // virtual memory size of the process (Ko)
etime: ProcessTime, // Ellapsed time since start [DD-]hh:mm:ss
cputime: ProcessTime, // CPU time used since start [[DD-]hh:]mm:ss
cmdline: String
) extends Process
| dacr/jassh | src/main/scala/fr/janalyse/ssh/Process.scala | Scala | apache-2.0 | 3,943 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalactic._
import Matchers._
import TripleEquals._
class ShouldTripleEqualsToleranceSpec extends Spec /* with NonImplicitAssertions */ with Tolerance {
val sevenDotOh = 7.0
val minusSevenDotOh = -7.0
val sevenDotOhFloat = 7.0f
val minusSevenDotOhFloat = -7.0f
val sevenLong = 7L
val minusSevenLong = -7L
val sevenInt = 7
val minusSevenInt = -7
val sevenShort: Short = 7
val minusSevenShort: Short = -7
val sevenByte: Byte = 7
val minusSevenByte: Byte = -7
/*
I decided that for X +- Y, Y can be any numeric type that's implicitly
convertible to X. So if X is Double, Y could be Double, Float, Long, Int, Short, Byte.
If X is Long, Y could be Long, Int, Short, Byte. If X is Short, Y could be Short or Byte.
And if X is Byte, Y must be Byte.
assert(minusSevenDotOhFloat === (-6.8f +- 0.2d))
*/
/* Chose not to do the symmetry, because no one needs it and implementing it would require an implicit. So these fail:
(7.1 +- 0.2) should === (sevenDotOh)
(7.5 +- 0.2) should !== (sevenDotOh)
*/
object `The should === syntax` {
def `should be true if the number is within the given interval` {
// Double +- Double
sevenDotOh should === (7.1 +- 0.2)
sevenDotOh should === (6.9 +- 0.2)
sevenDotOh should === (7.0 +- 0.2)
sevenDotOh should === (7.2 +- 0.2)
sevenDotOh should === (6.8 +- 0.2)
minusSevenDotOh should === (-7.1 +- 0.2)
minusSevenDotOh should === (-6.9 +- 0.2)
minusSevenDotOh should === (-7.0 +- 0.2)
minusSevenDotOh should === (-7.2 +- 0.2)
minusSevenDotOh should === (-6.8 +- 0.2)
// Double +- Float
sevenDotOh should === (7.1 +- 0.2f)
sevenDotOh should === (6.9 +- 0.2f)
sevenDotOh should === (7.0 +- 0.2f)
sevenDotOh should === (7.2 +- 0.2f)
sevenDotOh should === (6.8 +- 0.2f)
minusSevenDotOh should === (-7.1 +- 0.2f)
minusSevenDotOh should === (-6.9 +- 0.2f)
minusSevenDotOh should === (-7.0 +- 0.2f)
minusSevenDotOh should === (-7.2 +- 0.2f)
minusSevenDotOh should === (-6.8 +- 0.2f)
// Double +- Long
sevenDotOh should === (7.1 +- 2L)
sevenDotOh should === (6.9 +- 2L)
sevenDotOh should === (7.0 +- 2L)
sevenDotOh should === (7.2 +- 2L)
sevenDotOh should === (6.8 +- 2L)
minusSevenDotOh should === (-7.1 +- 2L)
minusSevenDotOh should === (-6.9 +- 2L)
minusSevenDotOh should === (-7.0 +- 2L)
minusSevenDotOh should === (-7.2 +- 2L)
minusSevenDotOh should === (-6.8 +- 2L)
// Double +- Int
sevenDotOh should === (7.1 +- 2)
sevenDotOh should === (6.9 +- 2)
sevenDotOh should === (7.0 +- 2)
sevenDotOh should === (7.2 +- 2)
sevenDotOh should === (6.8 +- 2)
minusSevenDotOh should === (-7.1 +- 2)
minusSevenDotOh should === (-6.9 +- 2)
minusSevenDotOh should === (-7.0 +- 2)
minusSevenDotOh should === (-7.2 +- 2)
minusSevenDotOh should === (-6.8 +- 2)
// Double +- Short
sevenDotOh should === (7.1 +- 2.toShort)
sevenDotOh should === (6.9 +- 2.toShort)
sevenDotOh should === (7.0 +- 2.toShort)
sevenDotOh should === (7.2 +- 2.toShort)
sevenDotOh should === (6.8 +- 2.toShort)
minusSevenDotOh should === (-7.1 +- 2.toShort)
minusSevenDotOh should === (-6.9 +- 2.toShort)
minusSevenDotOh should === (-7.0 +- 2.toShort)
minusSevenDotOh should === (-7.2 +- 2.toShort)
minusSevenDotOh should === (-6.8 +- 2.toShort)
// Double +- Byte
sevenDotOh should === (7.1 +- 2.toByte)
sevenDotOh should === (6.9 +- 2.toByte)
sevenDotOh should === (7.0 +- 2.toByte)
sevenDotOh should === (7.2 +- 2.toByte)
sevenDotOh should === (6.8 +- 2.toByte)
minusSevenDotOh should === (-7.1 +- 2.toByte)
minusSevenDotOh should === (-6.9 +- 2.toByte)
minusSevenDotOh should === (-7.0 +- 2.toByte)
minusSevenDotOh should === (-7.2 +- 2.toByte)
minusSevenDotOh should === (-6.8 +- 2.toByte)
// Float +- Float
sevenDotOhFloat should === (7.1f +- 0.2f)
sevenDotOhFloat should === (6.9f +- 0.2f)
sevenDotOhFloat should === (7.0f +- 0.2f)
sevenDotOhFloat should === (7.2f +- 0.2f)
sevenDotOhFloat should === (6.8f +- 0.2f)
minusSevenDotOhFloat should === (-7.1f +- 0.2f)
minusSevenDotOhFloat should === (-6.9f +- 0.2f)
minusSevenDotOhFloat should === (-7.0f +- 0.2f)
minusSevenDotOhFloat should === (-7.2f +- 0.2f)
minusSevenDotOhFloat should === (-6.8f +- 0.2f)
// Float +- Long
sevenDotOhFloat should === (7.1f +- 2L)
sevenDotOhFloat should === (6.9f +- 2L)
sevenDotOhFloat should === (7.0f +- 2L)
sevenDotOhFloat should === (7.2f +- 2L)
sevenDotOhFloat should === (6.8f +- 2L)
minusSevenDotOhFloat should === (-7.1f +- 2L)
minusSevenDotOhFloat should === (-6.9f +- 2L)
minusSevenDotOhFloat should === (-7.0f +- 2L)
minusSevenDotOhFloat should === (-7.2f +- 2L)
minusSevenDotOhFloat should === (-6.8f +- 2L)
// Float +- Int
sevenDotOhFloat should === (7.1f +- 2)
sevenDotOhFloat should === (6.9f +- 2)
sevenDotOhFloat should === (7.0f +- 2)
sevenDotOhFloat should === (7.2f +- 2)
sevenDotOhFloat should === (6.8f +- 2)
minusSevenDotOhFloat should === (-7.1f +- 2)
minusSevenDotOhFloat should === (-6.9f +- 2)
minusSevenDotOhFloat should === (-7.0f +- 2)
minusSevenDotOhFloat should === (-7.2f +- 2)
minusSevenDotOhFloat should === (-6.8f +- 2)
// Float +- Short
sevenDotOhFloat should === (7.1f +- 2.toShort)
sevenDotOhFloat should === (6.9f +- 2.toShort)
sevenDotOhFloat should === (7.0f +- 2.toShort)
sevenDotOhFloat should === (7.2f +- 2.toShort)
sevenDotOhFloat should === (6.8f +- 2.toShort)
minusSevenDotOhFloat should === (-7.1f +- 2.toShort)
minusSevenDotOhFloat should === (-6.9f +- 2.toShort)
minusSevenDotOhFloat should === (-7.0f +- 2.toShort)
minusSevenDotOhFloat should === (-7.2f +- 2.toShort)
minusSevenDotOhFloat should === (-6.8f +- 2.toShort)
// Float +- Byte
sevenDotOhFloat should === (7.1f +- 2.toByte)
sevenDotOhFloat should === (6.9f +- 2.toByte)
sevenDotOhFloat should === (7.0f +- 2.toByte)
sevenDotOhFloat should === (7.2f +- 2.toByte)
sevenDotOhFloat should === (6.8f +- 2.toByte)
minusSevenDotOhFloat should === (-7.1f +- 2.toByte)
minusSevenDotOhFloat should === (-6.9f +- 2.toByte)
minusSevenDotOhFloat should === (-7.0f +- 2.toByte)
minusSevenDotOhFloat should === (-7.2f +- 2.toByte)
minusSevenDotOhFloat should === (-6.8f +- 2.toByte)
// Long +- Long
sevenLong should === (9L +- 2L)
sevenLong should === (8L +- 2L)
sevenLong should === (7L +- 2L)
sevenLong should === (6L +- 2L)
sevenLong should === (5L +- 2L)
minusSevenLong should === (-9L +- 2L)
minusSevenLong should === (-8L +- 2L)
minusSevenLong should === (-7L +- 2L)
minusSevenLong should === (-6L +- 2L)
minusSevenLong should === (-5L +- 2L)
// Long +- Int
sevenLong should === (9L +- 2)
sevenLong should === (8L +- 2)
sevenLong should === (7L +- 2)
sevenLong should === (6L +- 2)
sevenLong should === (5L +- 2)
minusSevenLong should === (-9L +- 2)
minusSevenLong should === (-8L +- 2)
minusSevenLong should === (-7L +- 2)
minusSevenLong should === (-6L +- 2)
minusSevenLong should === (-5L +- 2)
// Long +- Short
sevenLong should === (9L +- 2.toShort)
sevenLong should === (8L +- 2.toShort)
sevenLong should === (7L +- 2.toShort)
sevenLong should === (6L +- 2.toShort)
sevenLong should === (5L +- 2.toShort)
minusSevenLong should === (-9L +- 2.toShort)
minusSevenLong should === (-8L +- 2.toShort)
minusSevenLong should === (-7L +- 2.toShort)
minusSevenLong should === (-6L +- 2.toShort)
minusSevenLong should === (-5L +- 2.toShort)
// Long +- Byte
sevenLong should === (9L +- 2.toByte)
sevenLong should === (8L +- 2.toByte)
sevenLong should === (7L +- 2.toByte)
sevenLong should === (6L +- 2.toByte)
sevenLong should === (5L +- 2.toByte)
minusSevenLong should === (-9L +- 2.toByte)
minusSevenLong should === (-8L +- 2.toByte)
minusSevenLong should === (-7L +- 2.toByte)
minusSevenLong should === (-6L +- 2.toByte)
minusSevenLong should === (-5L +- 2.toByte)
// Int +- Int
sevenInt should === (9 +- 2)
sevenInt should === (8 +- 2)
sevenInt should === (7 +- 2)
sevenInt should === (6 +- 2)
sevenInt should === (5 +- 2)
minusSevenInt should === (-9 +- 2)
minusSevenInt should === (-8 +- 2)
minusSevenInt should === (-7 +- 2)
minusSevenInt should === (-6 +- 2)
minusSevenInt should === (-5 +- 2)
// Int +- Short
sevenInt should === (9 +- 2.toShort)
sevenInt should === (8 +- 2.toShort)
sevenInt should === (7 +- 2.toShort)
sevenInt should === (6 +- 2.toShort)
sevenInt should === (5 +- 2.toShort)
minusSevenInt should === (-9 +- 2.toShort)
minusSevenInt should === (-8 +- 2.toShort)
minusSevenInt should === (-7 +- 2.toShort)
minusSevenInt should === (-6 +- 2.toShort)
minusSevenInt should === (-5 +- 2.toShort)
// Int +- Byte
sevenInt should === (9 +- 2.toByte)
sevenInt should === (8 +- 2.toByte)
sevenInt should === (7 +- 2.toByte)
sevenInt should === (6 +- 2.toByte)
sevenInt should === (5 +- 2.toByte)
minusSevenInt should === (-9 +- 2.toByte)
minusSevenInt should === (-8 +- 2.toByte)
minusSevenInt should === (-7 +- 2.toByte)
minusSevenInt should === (-6 +- 2.toByte)
minusSevenInt should === (-5 +- 2.toByte)
// Short +- Short
sevenShort should === (9.toShort +- 2.toShort)
sevenShort should === (8.toShort +- 2.toShort)
sevenShort should === (7.toShort +- 2.toShort)
sevenShort should === (6.toShort +- 2.toShort)
sevenShort should === (5.toShort +- 2.toShort)
minusSevenShort should === ((-9).toShort +- 2.toShort)
minusSevenShort should === ((-8).toShort +- 2.toShort)
minusSevenShort should === ((-7).toShort +- 2.toShort)
minusSevenShort should === ((-6).toShort +- 2.toShort)
minusSevenShort should === ((-5).toShort +- 2.toShort)
// Short +- Byte
sevenShort should === (9.toShort +- 2.toByte)
sevenShort should === (8.toShort +- 2.toByte)
sevenShort should === (7.toShort +- 2.toByte)
sevenShort should === (6.toShort +- 2.toByte)
sevenShort should === (5.toShort +- 2.toByte)
minusSevenShort should === ((-9).toShort +- 2.toByte)
minusSevenShort should === ((-8).toShort +- 2.toByte)
minusSevenShort should === ((-7).toShort +- 2.toByte)
minusSevenShort should === ((-6).toShort +- 2.toByte)
minusSevenShort should === ((-5).toShort +- 2.toByte)
// Byte +- Byte
sevenByte should === (9.toByte +- 2.toByte)
sevenByte should === (8.toByte +- 2.toByte)
sevenByte should === (7.toByte +- 2.toByte)
sevenByte should === (6.toByte +- 2.toByte)
sevenByte should === (5.toByte +- 2.toByte)
minusSevenByte should === ((-9).toByte +- 2.toByte)
minusSevenByte should === ((-8).toByte +- 2.toByte)
minusSevenByte should === ((-7).toByte +- 2.toByte)
minusSevenByte should === ((-6).toByte +- 2.toByte)
minusSevenByte should === ((-5).toByte +- 2.toByte)
}
def `should throw TFE if the number is outside the given interval` {
// Double +- Double
val caught = intercept[TestFailedException] { sevenDotOh should === (7.5 +- 0.2) }
assert(caught.getMessage === "7.0 did not equal 7.5 plus or minus 0.2")
intercept[TestFailedException] { sevenDotOh should === (6.5 +- 0.2) }
intercept[TestFailedException] { minusSevenDotOh should === (-7.5 +- 0.2) }
intercept[TestFailedException] { minusSevenDotOh should === (-6.5 +- 0.2) }
// Double +- Float
intercept[TestFailedException] { sevenDotOh should === (7.5 +- 0.2f) }
intercept[TestFailedException] { sevenDotOh should === (6.5 +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOh should === (-7.5 +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOh should === (-6.5 +- 0.2f) }
// Double +- Long
intercept[TestFailedException] { sevenDotOh should === (4.0 +- 2L) }
intercept[TestFailedException] { sevenDotOh should === (9.1 +- 2L) }
intercept[TestFailedException] { minusSevenDotOh should === (-4.0 +- 2L) }
intercept[TestFailedException] { minusSevenDotOh should === (-9.1 +- 2L) }
// Double +- Int
intercept[TestFailedException] { sevenDotOh should === (4.0 +- 2) }
intercept[TestFailedException] { sevenDotOh should === (9.1 +- 2) }
intercept[TestFailedException] { minusSevenDotOh should === (-4.0 +- 2) }
intercept[TestFailedException] { minusSevenDotOh should === (-9.1 +- 2) }
// Double +- Short
intercept[TestFailedException] { sevenDotOh should === (4.0 +- 2.toShort) }
intercept[TestFailedException] { sevenDotOh should === (9.1 +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOh should === (-4.0 +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOh should === (-9.1 +- 2.toShort) }
// Double +- Byte
intercept[TestFailedException] { sevenDotOh should === (4.0 +- 2.toByte) }
intercept[TestFailedException] { sevenDotOh should === (9.1 +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOh should === (-4.0 +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOh should === (-9.1 +- 2.toByte) }
// Float +- Float
intercept[TestFailedException] { sevenDotOhFloat should === (7.5f +- 0.2f) }
intercept[TestFailedException] { sevenDotOhFloat should === (6.5f +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOhFloat should === (-7.5f +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOhFloat should === (-6.5f +- 0.2f) }
// Float +- Long
intercept[TestFailedException] { sevenDotOhFloat should === (4.0f +- 2L) }
intercept[TestFailedException] { sevenDotOhFloat should === (9.1f +- 2L) }
intercept[TestFailedException] { minusSevenDotOhFloat should === (-4.0f +- 2L) }
intercept[TestFailedException] { minusSevenDotOhFloat should === (-9.1f +- 2L) }
// Float +- Int
intercept[TestFailedException] { sevenDotOhFloat should === (4.0f +- 2) }
intercept[TestFailedException] { sevenDotOhFloat should === (9.1f +- 2) }
intercept[TestFailedException] { minusSevenDotOhFloat should === (-4.0f +- 2) }
intercept[TestFailedException] { minusSevenDotOhFloat should === (-9.1f +- 2) }
// Float +- Short
intercept[TestFailedException] { sevenDotOhFloat should === (4.0f +- 2.toShort) }
intercept[TestFailedException] { sevenDotOhFloat should === (9.1f +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOhFloat should === (-4.0f +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOhFloat should === (-9.1f +- 2.toShort) }
// Float +- Byte
intercept[TestFailedException] { sevenDotOhFloat should === (4.0f +- 2.toByte) }
intercept[TestFailedException] { sevenDotOhFloat should === (9.1f +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOhFloat should === (-4.0f +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOhFloat should === (-9.1f +- 2.toByte) }
// Long +- Long
intercept[TestFailedException] { sevenLong should === (4L +- 2L) }
intercept[TestFailedException] { sevenLong should === (10L +- 2L) }
intercept[TestFailedException] { minusSevenLong should === (-4L +- 2L) }
intercept[TestFailedException] { minusSevenLong should === (-10L +- 2L) }
// Long +- Int
intercept[TestFailedException] { sevenLong should === (4L +- 2) }
intercept[TestFailedException] { sevenLong should === (10L +- 2) }
intercept[TestFailedException] { minusSevenLong should === (-4L +- 2) }
intercept[TestFailedException] { minusSevenLong should === (-10L +- 2) }
// Long +- Short
intercept[TestFailedException] { sevenLong should === (4L +- 2.toShort) }
intercept[TestFailedException] { sevenLong should === (10L +- 2.toShort) }
intercept[TestFailedException] { minusSevenLong should === (-4L +- 2.toShort) }
intercept[TestFailedException] { minusSevenLong should === (-10L +- 2.toShort) }
// Long +- Byte
intercept[TestFailedException] { sevenLong should === (4L +- 2.toByte) }
intercept[TestFailedException] { sevenLong should === (10L +- 2.toByte) }
intercept[TestFailedException] { minusSevenLong should === (-4L +- 2.toByte) }
intercept[TestFailedException] { minusSevenLong should === (-10L +- 2.toByte) }
// Int +- Int
intercept[TestFailedException] { sevenInt should === (4 +- 2) }
intercept[TestFailedException] { sevenInt should === (10 +- 2) }
intercept[TestFailedException] { minusSevenInt should === (-4 +- 2) }
intercept[TestFailedException] { minusSevenInt should === (-10 +- 2) }
// Int +- Short
intercept[TestFailedException] { sevenInt should === (4 +- 2.toShort) }
intercept[TestFailedException] { sevenInt should === (10 +- 2.toShort) }
intercept[TestFailedException] { minusSevenInt should === (-4 +- 2.toShort) }
intercept[TestFailedException] { minusSevenInt should === (-10 +- 2.toShort) }
// Int +- Byte
intercept[TestFailedException] { sevenInt should === (4 +- 2.toByte) }
intercept[TestFailedException] { sevenInt should === (10 +- 2.toByte) }
intercept[TestFailedException] { minusSevenInt should === (-4 +- 2.toByte) }
intercept[TestFailedException] { minusSevenInt should === (-10 +- 2.toByte) }
// Short +- Short
intercept[TestFailedException] { sevenShort should === (4.toShort +- 2.toShort) }
intercept[TestFailedException] { sevenShort should === (10.toShort +- 2.toShort) }
intercept[TestFailedException] { minusSevenShort should === ((-4).toShort +- 2.toShort) }
intercept[TestFailedException] { minusSevenShort should === ((-10).toShort +- 2.toShort) }
// Short +- Byte
intercept[TestFailedException] { sevenShort should === (4.toShort +- 2.toByte) }
intercept[TestFailedException] { sevenShort should === (10.toShort +- 2.toByte) }
intercept[TestFailedException] { minusSevenShort should === ((-4).toShort +- 2.toByte) }
intercept[TestFailedException] { minusSevenShort should === ((-10).toShort +- 2.toByte) }
// Byte +- Byte
intercept[TestFailedException] { sevenByte should === (4.toByte +- 2.toByte) }
intercept[TestFailedException] { sevenByte should === (10.toByte +- 2.toByte) }
intercept[TestFailedException] { minusSevenByte should === ((-4).toByte +- 2.toByte) }
intercept[TestFailedException] { minusSevenByte should === ((-10).toByte +- 2.toByte) }
}
}
object `The !== syntax` {
def `should succeed if the number is outside the given interval` {
// Double +- Double
sevenDotOh should !== (7.5 +- 0.2)
sevenDotOh should !== (6.5 +- 0.2)
minusSevenDotOh should !== (-7.5 +- 0.2)
minusSevenDotOh should !== (-6.5 +- 0.2)
// Double +- Float
sevenDotOh should !== (7.5 +- 0.2f)
sevenDotOh should !== (6.5 +- 0.2f)
minusSevenDotOh should !== (-7.5 +- 0.2f)
minusSevenDotOh should !== (-6.5 +- 0.2f)
// Double +- Long
sevenDotOh should !== (4.0 +- 2L)
sevenDotOh should !== (9.1 +- 2L)
minusSevenDotOh should !== (-4.0 +- 2L)
minusSevenDotOh should !== (-9.1 +- 2L)
// Double +- Int
sevenDotOh should !== (4.0 +- 2)
sevenDotOh should !== (9.1 +- 2)
minusSevenDotOh should !== (-4.0 +- 2)
minusSevenDotOh should !== (-9.1 +- 2)
// Double +- Short
sevenDotOh should !== (4.0 +- 2.toShort)
sevenDotOh should !== (9.1 +- 2.toShort)
minusSevenDotOh should !== (-4.0 +- 2.toShort)
minusSevenDotOh should !== (-9.1 +- 2.toShort)
// Double +- Byte
sevenDotOh should !== (4.0 +- 2.toByte)
sevenDotOh should !== (9.1 +- 2.toByte)
minusSevenDotOh should !== (-4.0 +- 2.toByte)
minusSevenDotOh should !== (-9.1 +- 2.toByte)
// Float +- Float
sevenDotOhFloat should !== (7.5f +- 0.2f)
sevenDotOhFloat should !== (6.5f +- 0.2f)
minusSevenDotOhFloat should !== (-7.5f +- 0.2f)
minusSevenDotOhFloat should !== (-6.5f +- 0.2f)
// Float +- Long
sevenDotOhFloat should !== (4.0f +- 2L)
sevenDotOhFloat should !== (9.1f +- 2L)
minusSevenDotOhFloat should !== (-4.0f +- 2L)
minusSevenDotOhFloat should !== (-9.1f +- 2L)
// Float +- Int
sevenDotOhFloat should !== (4.0f +- 2)
sevenDotOhFloat should !== (9.1f +- 2)
minusSevenDotOhFloat should !== (-4.0f +- 2)
minusSevenDotOhFloat should !== (-9.1f +- 2)
// Float +- Short
sevenDotOhFloat should !== (4.0f +- 2.toShort)
sevenDotOhFloat should !== (9.1f +- 2.toShort)
minusSevenDotOhFloat should !== (-4.0f +- 2.toShort)
minusSevenDotOhFloat should !== (-9.1f +- 2.toShort)
// Float +- Byte
sevenDotOhFloat should !== (4.0f +- 2.toByte)
sevenDotOhFloat should !== (9.1f +- 2.toByte)
minusSevenDotOhFloat should !== (-4.0f +- 2.toByte)
minusSevenDotOhFloat should !== (-9.1f +- 2.toByte)
// Long +- Long
sevenLong should !== (4L +- 2L)
sevenLong should !== (10L +- 2L)
minusSevenLong should !== (-4L +- 2L)
minusSevenLong should !== (-10L +- 2L)
// Long +- Int
sevenLong should !== (4L +- 2)
sevenLong should !== (10L +- 2)
minusSevenLong should !== (-4L +- 2)
minusSevenLong should !== (-10L +- 2)
// Long +- Short
sevenLong should !== (4L +- 2.toShort)
sevenLong should !== (10L +- 2.toShort)
minusSevenLong should !== (-4L +- 2.toShort)
minusSevenLong should !== (-10L +- 2.toShort)
// Long +- Byte
sevenLong should !== (4L +- 2.toByte)
sevenLong should !== (10L +- 2.toByte)
minusSevenLong should !== (-4L +- 2.toByte)
minusSevenLong should !== (-10L +- 2.toByte)
// Int +- Int
sevenInt should !== (4 +- 2)
sevenInt should !== (10 +- 2)
minusSevenInt should !== (-4 +- 2)
minusSevenInt should !== (-10 +- 2)
// Int +- Short
sevenInt should !== (4 +- 2.toShort)
sevenInt should !== (10 +- 2.toShort)
minusSevenInt should !== (-4 +- 2.toShort)
minusSevenInt should !== (-10 +- 2.toShort)
// Int +- Byte
sevenInt should !== (4 +- 2.toByte)
sevenInt should !== (10 +- 2.toByte)
minusSevenInt should !== (-4 +- 2.toByte)
minusSevenInt should !== (-10 +- 2.toByte)
// Short +- Short
sevenShort should !== (4.toShort +- 2.toShort)
sevenShort should !== (10.toShort +- 2.toShort)
minusSevenShort should !== ((-4).toShort +- 2.toShort)
minusSevenShort should !== ((-10).toShort +- 2.toShort)
// Short +- Byte
sevenShort should !== (4.toShort +- 2.toByte)
sevenShort should !== (10.toShort +- 2.toByte)
minusSevenShort should !== ((-4).toShort +- 2.toByte)
minusSevenShort should !== ((-10).toShort +- 2.toByte)
// Byte +- Byte
sevenByte should !== (4.toByte +- 2.toByte)
sevenByte should !== (10.toByte +- 2.toByte)
minusSevenByte should !== ((-4).toByte +- 2.toByte)
minusSevenByte should !== ((-10).toByte +- 2.toByte)
}
def `should throw TFE if the number is within the given interval` {
// Double +- Double
val caught = intercept[TestFailedException] { sevenDotOh should !== (7.1 +- 0.2) }
assert(caught.getMessage === "7.0 equaled 7.1 plus or minus 0.2")
intercept[TestFailedException] { sevenDotOh should !== (6.9 +- 0.2) }
intercept[TestFailedException] { sevenDotOh should !== (7.0 +- 0.2) }
intercept[TestFailedException] { sevenDotOh should !== (7.2 +- 0.2) }
intercept[TestFailedException] { sevenDotOh should !== (6.8 +- 0.2) }
intercept[TestFailedException] { minusSevenDotOh should !== (-7.1 +- 0.2) }
intercept[TestFailedException] { minusSevenDotOh should !== (-6.9 +- 0.2) }
intercept[TestFailedException] { minusSevenDotOh should !== (-7.0 +- 0.2) }
intercept[TestFailedException] { minusSevenDotOh should !== (-7.2 +- 0.2) }
intercept[TestFailedException] { minusSevenDotOh should !== (-6.8 +- 0.2) }
// Double +- Float
intercept[TestFailedException] { sevenDotOh should !== (7.1 +- 0.2f) }
intercept[TestFailedException] { sevenDotOh should !== (6.9 +- 0.2f) }
intercept[TestFailedException] { sevenDotOh should !== (7.0 +- 0.2f) }
intercept[TestFailedException] { sevenDotOh should !== (7.2 +- 0.2f) }
intercept[TestFailedException] { sevenDotOh should !== (6.8 +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOh should !== (-7.1 +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOh should !== (-6.9 +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOh should !== (-7.0 +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOh should !== (-7.2 +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOh should !== (-6.8 +- 0.2f) }
// Double +- Long
intercept[TestFailedException] { sevenDotOh should !== (7.1 +- 2L) }
intercept[TestFailedException] { sevenDotOh should !== (6.9 +- 2L) }
intercept[TestFailedException] { sevenDotOh should !== (7.0 +- 2L) }
intercept[TestFailedException] { sevenDotOh should !== (7.2 +- 2L) }
intercept[TestFailedException] { sevenDotOh should !== (6.8 +- 2L) }
intercept[TestFailedException] { minusSevenDotOh should !== (-7.1 +- 2L) }
intercept[TestFailedException] { minusSevenDotOh should !== (-6.9 +- 2L) }
intercept[TestFailedException] { minusSevenDotOh should !== (-7.0 +- 2L) }
intercept[TestFailedException] { minusSevenDotOh should !== (-7.2 +- 2L) }
intercept[TestFailedException] { minusSevenDotOh should !== (-6.8 +- 2L) }
// Double +- Int
intercept[TestFailedException] { sevenDotOh should !== (7.1 +- 2) }
intercept[TestFailedException] { sevenDotOh should !== (6.9 +- 2) }
intercept[TestFailedException] { sevenDotOh should !== (7.0 +- 2) }
intercept[TestFailedException] { sevenDotOh should !== (7.2 +- 2) }
intercept[TestFailedException] { sevenDotOh should !== (6.8 +- 2) }
intercept[TestFailedException] { minusSevenDotOh should !== (-7.1 +- 2) }
intercept[TestFailedException] { minusSevenDotOh should !== (-6.9 +- 2) }
intercept[TestFailedException] { minusSevenDotOh should !== (-7.0 +- 2) }
intercept[TestFailedException] { minusSevenDotOh should !== (-7.2 +- 2) }
intercept[TestFailedException] { minusSevenDotOh should !== (-6.8 +- 2) }
// Double +- Short
intercept[TestFailedException] { sevenDotOh should !== (7.1 +- 2.toShort) }
intercept[TestFailedException] { sevenDotOh should !== (6.9 +- 2.toShort) }
intercept[TestFailedException] { sevenDotOh should !== (7.0 +- 2.toShort) }
intercept[TestFailedException] { sevenDotOh should !== (7.2 +- 2.toShort) }
intercept[TestFailedException] { sevenDotOh should !== (6.8 +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOh should !== (-7.1 +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOh should !== (-6.9 +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOh should !== (-7.0 +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOh should !== (-7.2 +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOh should !== (-6.8 +- 2.toShort) }
// Double +- Byte
intercept[TestFailedException] { sevenDotOh should !== (7.1 +- 2.toByte) }
intercept[TestFailedException] { sevenDotOh should !== (6.9 +- 2.toByte) }
intercept[TestFailedException] { sevenDotOh should !== (7.0 +- 2.toByte) }
intercept[TestFailedException] { sevenDotOh should !== (7.2 +- 2.toByte) }
intercept[TestFailedException] { sevenDotOh should !== (6.8 +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOh should !== (-7.1 +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOh should !== (-6.9 +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOh should !== (-7.0 +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOh should !== (-7.2 +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOh should !== (-6.8 +- 2.toByte) }
// Float +- Float
intercept[TestFailedException] { sevenDotOhFloat should !== (7.1f +- 0.2f) }
intercept[TestFailedException] { sevenDotOhFloat should !== (6.9f +- 0.2f) }
intercept[TestFailedException] { sevenDotOhFloat should !== (7.0f +- 0.2f) }
intercept[TestFailedException] { sevenDotOhFloat should !== (7.2f +- 0.2f) }
intercept[TestFailedException] { sevenDotOhFloat should !== (6.8f +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.1f +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.9f +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.0f +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.2f +- 0.2f) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.8f +- 0.2f) }
// Float +- Long
intercept[TestFailedException] { sevenDotOhFloat should !== (7.1f +- 2L) }
intercept[TestFailedException] { sevenDotOhFloat should !== (6.9f +- 2L) }
intercept[TestFailedException] { sevenDotOhFloat should !== (7.0f +- 2L) }
intercept[TestFailedException] { sevenDotOhFloat should !== (7.2f +- 2L) }
intercept[TestFailedException] { sevenDotOhFloat should !== (6.8f +- 2L) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.1f +- 2L) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.9f +- 2L) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.0f +- 2L) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.2f +- 2L) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.8f +- 2L) }
// Float +- Int
intercept[TestFailedException] { sevenDotOhFloat should !== (7.1f +- 2) }
intercept[TestFailedException] { sevenDotOhFloat should !== (6.9f +- 2) }
intercept[TestFailedException] { sevenDotOhFloat should !== (7.0f +- 2) }
intercept[TestFailedException] { sevenDotOhFloat should !== (7.2f +- 2) }
intercept[TestFailedException] { sevenDotOhFloat should !== (6.8f +- 2) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.1f +- 2) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.9f +- 2) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.0f +- 2) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.2f +- 2) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.8f +- 2) }
// Float +- Short
intercept[TestFailedException] { sevenDotOhFloat should !== (7.1f +- 2.toShort) }
intercept[TestFailedException] { sevenDotOhFloat should !== (6.9f +- 2.toShort) }
intercept[TestFailedException] { sevenDotOhFloat should !== (7.0f +- 2.toShort) }
intercept[TestFailedException] { sevenDotOhFloat should !== (7.2f +- 2.toShort) }
intercept[TestFailedException] { sevenDotOhFloat should !== (6.8f +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.1f +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.9f +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.0f +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.2f +- 2.toShort) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.8f +- 2.toShort) }
// Float +- Byte
intercept[TestFailedException] { sevenDotOhFloat should !== (7.1f +- 2.toByte) }
intercept[TestFailedException] { sevenDotOhFloat should !== (6.9f +- 2.toByte) }
intercept[TestFailedException] { sevenDotOhFloat should !== (7.0f +- 2.toByte) }
intercept[TestFailedException] { sevenDotOhFloat should !== (7.2f +- 2.toByte) }
intercept[TestFailedException] { sevenDotOhFloat should !== (6.8f +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.1f +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.9f +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.0f +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-7.2f +- 2.toByte) }
intercept[TestFailedException] { minusSevenDotOhFloat should !== (-6.8f +- 2.toByte) }
// Long +- Long
intercept[TestFailedException] { sevenLong should !== (9L +- 2L) }
intercept[TestFailedException] { sevenLong should !== (8L +- 2L) }
intercept[TestFailedException] { sevenLong should !== (7L +- 2L) }
intercept[TestFailedException] { sevenLong should !== (6L +- 2L) }
intercept[TestFailedException] { sevenLong should !== (5L +- 2L) }
intercept[TestFailedException] { minusSevenLong should !== (-9L +- 2L) }
intercept[TestFailedException] { minusSevenLong should !== (-8L +- 2L) }
intercept[TestFailedException] { minusSevenLong should !== (-7L +- 2L) }
intercept[TestFailedException] { minusSevenLong should !== (-6L +- 2L) }
intercept[TestFailedException] { minusSevenLong should !== (-5L +- 2L) }
// Long +- Int
intercept[TestFailedException] { sevenLong should !== (9L +- 2) }
intercept[TestFailedException] { sevenLong should !== (8L +- 2) }
intercept[TestFailedException] { sevenLong should !== (7L +- 2) }
intercept[TestFailedException] { sevenLong should !== (6L +- 2) }
intercept[TestFailedException] { sevenLong should !== (5L +- 2) }
intercept[TestFailedException] { minusSevenLong should !== (-9L +- 2) }
intercept[TestFailedException] { minusSevenLong should !== (-8L +- 2) }
intercept[TestFailedException] { minusSevenLong should !== (-7L +- 2) }
intercept[TestFailedException] { minusSevenLong should !== (-6L +- 2) }
intercept[TestFailedException] { minusSevenLong should !== (-5L +- 2) }
// Long +- Short
intercept[TestFailedException] { sevenLong should !== (9L +- 2.toShort) }
intercept[TestFailedException] { sevenLong should !== (8L +- 2.toShort) }
intercept[TestFailedException] { sevenLong should !== (7L +- 2.toShort) }
intercept[TestFailedException] { sevenLong should !== (6L +- 2.toShort) }
intercept[TestFailedException] { sevenLong should !== (5L +- 2.toShort) }
intercept[TestFailedException] { minusSevenLong should !== (-9L +- 2.toShort) }
intercept[TestFailedException] { minusSevenLong should !== (-8L +- 2.toShort) }
intercept[TestFailedException] { minusSevenLong should !== (-7L +- 2.toShort) }
intercept[TestFailedException] { minusSevenLong should !== (-6L +- 2.toShort) }
intercept[TestFailedException] { minusSevenLong should !== (-5L +- 2.toShort) }
// Long +- Byte
intercept[TestFailedException] { sevenLong should !== (9L +- 2.toByte) }
intercept[TestFailedException] { sevenLong should !== (8L +- 2.toByte) }
intercept[TestFailedException] { sevenLong should !== (7L +- 2.toByte) }
intercept[TestFailedException] { sevenLong should !== (6L +- 2.toByte) }
intercept[TestFailedException] { sevenLong should !== (5L +- 2.toByte) }
intercept[TestFailedException] { minusSevenLong should !== (-9L +- 2.toByte) }
intercept[TestFailedException] { minusSevenLong should !== (-8L +- 2.toByte) }
intercept[TestFailedException] { minusSevenLong should !== (-7L +- 2.toByte) }
intercept[TestFailedException] { minusSevenLong should !== (-6L +- 2.toByte) }
intercept[TestFailedException] { minusSevenLong should !== (-5L +- 2.toByte) }
// Int +- Int
intercept[TestFailedException] { sevenInt should !== (9 +- 2) }
intercept[TestFailedException] { sevenInt should !== (8 +- 2) }
intercept[TestFailedException] { sevenInt should !== (7 +- 2) }
intercept[TestFailedException] { sevenInt should !== (6 +- 2) }
intercept[TestFailedException] { sevenInt should !== (5 +- 2) }
intercept[TestFailedException] { minusSevenInt should !== (-9 +- 2) }
intercept[TestFailedException] { minusSevenInt should !== (-8 +- 2) }
intercept[TestFailedException] { minusSevenInt should !== (-7 +- 2) }
intercept[TestFailedException] { minusSevenInt should !== (-6 +- 2) }
intercept[TestFailedException] { minusSevenInt should !== (-5 +- 2) }
// Int +- Short
intercept[TestFailedException] { sevenInt should !== (9 +- 2.toShort) }
intercept[TestFailedException] { sevenInt should !== (8 +- 2.toShort) }
intercept[TestFailedException] { sevenInt should !== (7 +- 2.toShort) }
intercept[TestFailedException] { sevenInt should !== (6 +- 2.toShort) }
intercept[TestFailedException] { sevenInt should !== (5 +- 2.toShort) }
intercept[TestFailedException] { minusSevenInt should !== (-9 +- 2.toShort) }
intercept[TestFailedException] { minusSevenInt should !== (-8 +- 2.toShort) }
intercept[TestFailedException] { minusSevenInt should !== (-7 +- 2.toShort) }
intercept[TestFailedException] { minusSevenInt should !== (-6 +- 2.toShort) }
intercept[TestFailedException] { minusSevenInt should !== (-5 +- 2.toShort) }
// Int +- Byte
intercept[TestFailedException] { sevenInt should !== (9 +- 2.toByte) }
intercept[TestFailedException] { sevenInt should !== (8 +- 2.toByte) }
intercept[TestFailedException] { sevenInt should !== (7 +- 2.toByte) }
intercept[TestFailedException] { sevenInt should !== (6 +- 2.toByte) }
intercept[TestFailedException] { sevenInt should !== (5 +- 2.toByte) }
intercept[TestFailedException] { minusSevenInt should !== (-9 +- 2.toByte) }
intercept[TestFailedException] { minusSevenInt should !== (-8 +- 2.toByte) }
intercept[TestFailedException] { minusSevenInt should !== (-7 +- 2.toByte) }
intercept[TestFailedException] { minusSevenInt should !== (-6 +- 2.toByte) }
intercept[TestFailedException] { minusSevenInt should !== (-5 +- 2.toByte) }
// Short +- Short
intercept[TestFailedException] { sevenShort should !== (9.toShort +- 2.toShort) }
intercept[TestFailedException] { sevenShort should !== (8.toShort +- 2.toShort) }
intercept[TestFailedException] { sevenShort should !== (7.toShort +- 2.toShort) }
intercept[TestFailedException] { sevenShort should !== (6.toShort +- 2.toShort) }
intercept[TestFailedException] { sevenShort should !== (5.toShort +- 2.toShort) }
intercept[TestFailedException] { minusSevenShort should !== ((-9).toShort +- 2.toShort) }
intercept[TestFailedException] { minusSevenShort should !== ((-8).toShort +- 2.toShort) }
intercept[TestFailedException] { minusSevenShort should !== ((-7).toShort +- 2.toShort) }
intercept[TestFailedException] { minusSevenShort should !== ((-6).toShort +- 2.toShort) }
intercept[TestFailedException] { minusSevenShort should !== ((-5).toShort +- 2.toShort) }
// Short +- Byte
intercept[TestFailedException] { sevenShort should !== (9.toShort +- 2.toByte) }
intercept[TestFailedException] { sevenShort should !== (8.toShort +- 2.toByte) }
intercept[TestFailedException] { sevenShort should !== (7.toShort +- 2.toByte) }
intercept[TestFailedException] { sevenShort should !== (6.toShort +- 2.toByte) }
intercept[TestFailedException] { sevenShort should !== (5.toShort +- 2.toByte) }
intercept[TestFailedException] { minusSevenShort should !== ((-9).toShort +- 2.toByte) }
intercept[TestFailedException] { minusSevenShort should !== ((-8).toShort +- 2.toByte) }
intercept[TestFailedException] { minusSevenShort should !== ((-7).toShort +- 2.toByte) }
intercept[TestFailedException] { minusSevenShort should !== ((-6).toShort +- 2.toByte) }
intercept[TestFailedException] { minusSevenShort should !== ((-5).toShort +- 2.toByte) }
// Byte +- Byte
intercept[TestFailedException] { sevenByte should !== (9.toByte +- 2.toByte) }
intercept[TestFailedException] { sevenByte should !== (8.toByte +- 2.toByte) }
intercept[TestFailedException] { sevenByte should !== (7.toByte +- 2.toByte) }
intercept[TestFailedException] { sevenByte should !== (6.toByte +- 2.toByte) }
intercept[TestFailedException] { sevenByte should !== (5.toByte +- 2.toByte) }
intercept[TestFailedException] { minusSevenByte should !== ((-9).toByte +- 2.toByte) }
intercept[TestFailedException] { minusSevenByte should !== ((-8).toByte +- 2.toByte) }
intercept[TestFailedException] { minusSevenByte should !== ((-7).toByte +- 2.toByte) }
intercept[TestFailedException] { minusSevenByte should !== ((-6).toByte +- 2.toByte) }
intercept[TestFailedException] { minusSevenByte should !== ((-5).toByte +- 2.toByte) }
}
}
object `The X +- Y syntax` {
def `should throw IllegalArgumentException if the number passed to the right is 0 or negative` {
// Double +- Double
val caught1 = intercept[IllegalArgumentException] {
sevenDotOh should === (7.1 +- -0.2)
}
assert(caught1.getMessage === "-0.2 passed to +- was zero or negative. Must be a positive non-zero number.", caught1.getMessage)
// Double +- Float
val caught2 = intercept[IllegalArgumentException] {
sevenDotOh should === (7.1 +- -0.2f)
}
assert(caught2.getMessage === "-0.20000000298023224 passed to +- was zero or negative. Must be a positive non-zero number.")
// Double +- Long
val caught3 = intercept[IllegalArgumentException] {
sevenDotOh should === (7.1 +- -2L)
}
assert(caught3.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// Double +- Int
val caught4 = intercept[IllegalArgumentException] {
sevenDotOh should === (7.1 +- -2)
}
assert(caught4.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// Double +- Short
val caught5 = intercept[IllegalArgumentException] {
sevenDotOh should === (7.1 +- (-2).toShort)
}
assert(caught5.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// Double +- Byte
val caught6 = intercept[IllegalArgumentException] {
sevenDotOh should === (7.1 +- (-2).toByte)
}
assert(caught6.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// Float +- Float
val caught7 = intercept[IllegalArgumentException] {
sevenDotOhFloat should === (7.1f +- -0.2f)
}
assert(caught7.getMessage === "-0.2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Float +- Long
val caught8 = intercept[IllegalArgumentException] {
sevenDotOhFloat should === (7.1f +- -2L)
}
assert(caught8.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// Float +- Int
val caught9 = intercept[IllegalArgumentException] {
sevenDotOhFloat should === (7.1f +- -2)
}
assert(caught9.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// Float +- Short
val caught10 = intercept[IllegalArgumentException] {
sevenDotOhFloat should === (7.1f +- (-2).toShort)
}
assert(caught10.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// Float +- Byte
val caught11 = intercept[IllegalArgumentException] {
sevenDotOhFloat should === (7.1f +- (-2).toByte)
}
assert(caught11.getMessage === "-2.0 passed to +- was zero or negative. Must be a positive non-zero number.")
// Long +- Long
val caught12 = intercept[IllegalArgumentException] {
sevenLong should === (9L +- -2L)
}
assert(caught12.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Long +- Int
val caught13 = intercept[IllegalArgumentException] {
sevenLong should === (9L +- -2)
}
assert(caught13.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Long +- Short
val caught14 = intercept[IllegalArgumentException] {
sevenLong should === (9L +- (-2).toShort)
}
assert(caught14.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Long +- Byte
val caught15 = intercept[IllegalArgumentException] {
sevenLong should === (9L +- (-2).toByte)
}
assert(caught15.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Int +- Int
val caught16 = intercept[IllegalArgumentException] {
sevenInt should === (9 +- -2)
}
assert(caught16.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Int +- Short
val caught17 = intercept[IllegalArgumentException] {
sevenInt should === (9 +- (-2).toShort)
}
assert(caught17.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Int +- Byte
val caught18 = intercept[IllegalArgumentException] {
sevenInt should === (9 +- (-2).toByte)
}
assert(caught18.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Short +- Short
val caught19 = intercept[IllegalArgumentException] {
sevenShort should === (9.toShort +- (-2).toShort)
}
assert(caught19.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Short +- Byte
val caught20 = intercept[IllegalArgumentException] {
sevenShort should === (9.toShort +- (-2).toByte)
}
assert(caught20.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
// Byte +- Byte
val caught21 = intercept[IllegalArgumentException] {
sevenByte should === (9.toByte +- (-2).toByte)
}
assert(caught21.getMessage === "-2 passed to +- was zero or negative. Must be a positive non-zero number.")
}
}
}
| travisbrown/scalatest | src/test/scala/org/scalatest/ShouldTripleEqualsToleranceSpec.scala | Scala | apache-2.0 | 48,224 |
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package com.normation.rudder.web.services
import com.normation.rudder.domain.nodes.NodeInfo
import com.normation.utils.Utils.isEmpty
import com.normation.inventory.domain.NodeId
import com.normation.inventory.ldap.core._
import LDAPConstants._
import bootstrap.liftweb.LiftSpringApplicationContext.inject
import org.slf4j.LoggerFactory
import scala.xml._
import net.liftweb.common._
import net.liftweb.http._
import net.liftweb.util._
import Helpers._
import net.liftweb.http.js._
import JsCmds._
import JE._
import net.liftweb.http.SHtml._
import com.normation.exceptions.TechnicalException
import net.liftweb.http.Templates
import com.normation.rudder.repository.ReportsRepository
import bootstrap.liftweb.RudderConfig
import com.normation.rudder.web.components.DateFormaterService
import com.normation.rudder.reports.execution.RoReportsExecutionRepository
import com.normation.rudder.reports.execution.AgentRun
import com.normation.rudder.domain.logger.TimingDebugLogger
/**
* Very much like the NodeGrid, but with the new WB and without ldap information
*
* @author Nicolas CHARLES
*
*/
object SrvGrid {
val logger = LoggerFactory.getLogger(classOf[SrvGrid])
}
/**
* Present a grid of server in a jQuery Datatable
* widget.
*
* To use it:
* - add the need js/css dependencies by adding the result
* of head() in the calling template head
* - call the display(servers) method
*/
class SrvGrid(
roAgentRunsRepository : RoReportsExecutionRepository
) extends Loggable {
private def templatePath = List("templates-hidden", "srv_grid")
private def template() = Templates(templatePath) match {
case Empty | Failure(_,_,_) =>
throw new TechnicalException("Template for server grid not found. I was looking for %s.html".format(templatePath.mkString("/")))
case Full(n) => n
}
private def tableTemplate = chooseTemplate("servergrid","table",template)
/*
* All JS/CSS needed to have datatable working
*/
def head() : NodeSeq = DisplayNode.head
def jsVarNameForId(tableId:String) = "oTable" + tableId
/**
* Display and init the display for the list of server
* @param servers : a sequence of the node to show
* @param tableId : the id of the table
* @param callback : Optionnal callback to use on node, if missing, replaced by a link to that node
*/
def displayAndInit(
nodes : Seq[NodeInfo]
, tableId : String
, callback : Option[String => JsCmd] = None
, refreshNodes : Option[ () => Seq[NodeInfo]] = None
) : NodeSeq = {
tableXml( tableId) ++ Script(OnLoad(initJs(tableId,nodes,callback,refreshNodes)))
}
/**
* Initialize the table by javascript
* takes the id of the table as param
* the nodes to compute the datas
* and the optionnal callback
*/
def initJs(
tableId : String
, nodes : Seq[NodeInfo]
, callback : Option[String => JsCmd]
, refreshNodes : Option[ () => Seq[NodeInfo]]
) : JsCmd = {
val data = getTableData(nodes,callback)
val refresh = refreshNodes.map(refreshData(_,callback,tableId).toJsCmd).getOrElse("undefined")
JsRaw(s"""createNodeTable("${tableId}",${data.json.toJsCmd},"${S.contextPath}",${refresh});""")
}
def getTableData (
nodes : Seq[NodeInfo]
, callback : Option[String => JsCmd]
) = {
val now = System.currentTimeMillis
val runs = roAgentRunsRepository.getNodesLastRun(nodes.map(_.id).toSet)
if(TimingDebugLogger.isDebugEnabled) {
TimingDebugLogger.debug(s"Get all last run date time: ${System.currentTimeMillis - now}ms")
}
val lines = (for {
lastReports <- runs
} yield {
nodes.map(node => NodeLine(node,lastReports.get(node.id), callback))
}) match {
case eb: EmptyBox =>
val msg = "Error when trying to get nodes info"
val e = eb ?~! msg
logger.error(e.messageChain)
e.rootExceptionCause.foreach(ex => logger.error(ex) )
Nil
case Full(lines) => lines.toList
}
JsTableData(lines)
}
def refreshData (
refreshNodes : () => Seq[NodeInfo]
, callback : Option[String => JsCmd]
, tableId: String
) = {
val ajaxCall = SHtml.ajaxCall(JsNull, (s) => {
val nodes = refreshNodes()
val data = getTableData(nodes,callback)
JsRaw(s"""refreshTable("${tableId}",${data.json.toJsCmd});""")
} )
AnonFunc("",ajaxCall)
}
/**
* Html templace of the table, id is in parameter
*/
def tableXml(tableId:String) : NodeSeq = {
<table id={tableId} cellspacing="0"/>
}
}
/*
* Javascript object containing all data to create a line in the DataTable
* { "name" : Node hostname [String]
* , "id" : Node id [String]
* , "machineType" : Node machine type [String]
* , "osName" : Node OS name [String]
* , "osVersion" : Node OS version [ String ]
* , "servicePack" : Node OS service pack [ String ]
* , "lastReport" : Last report received about that node [ String ]
* , "callBack" : Callback on Node, if absend replaced by a link to nodeId [ Function ]
* }
*/
case class NodeLine (
node : NodeInfo
, lastReport : Box[Option[AgentRun]]
, callback : Option[String => JsCmd]
) extends JsTableLine {
val optCallback = {
callback.map(cb => ("callback", AnonFunc(ajaxCall(JsNull, s => cb(node.id.value)))))
}
val hostname = {
if (isEmpty(node.hostname)) {
s"(Missing name) ${node.id.value}"
} else {
node.hostname
}
}
val lastReportValue = {
lastReport match {
case Full(exec) =>
exec.map(report => DateFormaterService.getFormatedDate(report.agentRunId.date)).getOrElse("Never")
case eb : EmptyBox =>
"Error While fetching node executions"
}
}
val baseFields = {
JsObj(
( "name" -> hostname )
, ( "id" -> node.id.value )
, ( "machineType" -> node.machineType )
, ( "osName") -> S.?(s"os.name.${node.osName}")
, ( "osVersion" -> node.osVersion)
, ( "servicePack" -> node.servicePack.getOrElse("N/A"))
, ( "lastReport" -> lastReportValue )
)
}
val json = baseFields +* JsObj(optCallback.toSeq:_*)
}
| Kegeruneku/rudder | rudder-web/src/main/scala/com/normation/rudder/web/services/SrvGrid.scala | Scala | agpl-3.0 | 7,819 |
/*
* Copyright 2019 Qameta Software OΓ
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.qameta.allure.scalatest
import java.lang.annotation.Annotation
import java.util.UUID
import java.util.concurrent.TimeUnit
import java.util.concurrent.locks.ReentrantReadWriteLock
import io.qameta.allure._
import io.qameta.allure.model.{Status, StatusDetails, TestResult}
import io.qameta.allure.util.ResultsUtils._
import org.scalatest.Reporter
import org.scalatest.events._
import org.scalatest.exceptions.TestFailedException
import scala.jdk.CollectionConverters._
import scala.collection.mutable
/**
* @author charlie (Dmitry Baev).
*/
trait AllureScalatestContext {
AllureScalatestContextHolder.populate()
}
object AllureScalatestContextHolder {
private val populateTimeout = TimeUnit.SECONDS.toMillis(3)
private val lock = new ReentrantReadWriteLock()
private val threads: mutable.HashMap[String, String] = mutable.HashMap[String, String]()
def populate(): Unit = {
val threadName = Thread.currentThread().getName
var maybeUuid = get(threadName)
val current = System.currentTimeMillis()
while (maybeUuid.isEmpty && System.currentTimeMillis - current < populateTimeout) {
Thread.sleep(100)
maybeUuid = get(threadName)
}
maybeUuid.fold {} { uuid => Allure.getLifecycle.setCurrentTestCase(uuid) }
}
private[scalatest] def add(threadId: String, uuid: String): Unit = {
lock.writeLock().lock()
try {
threads += threadId -> uuid
} finally {
lock.writeLock().unlock()
}
}
private[scalatest] def get(threadId: String): Option[String] = {
lock.readLock().lock()
try {
threads.get(threadId)
} finally {
lock.readLock().unlock()
}
}
private[scalatest] def remove(threadId: String): Unit = {
lock.writeLock().lock()
try {
threads -= threadId
} finally {
lock.writeLock().unlock()
}
}
}
class AllureScalatest(val lifecycle: AllureLifecycle) extends Reporter {
private val lock = new ReentrantReadWriteLock()
private val suites = mutable.HashMap[String, Location]()
def this() = this(Allure.getLifecycle)
override def apply(event: Event): Unit = event match {
case event: SuiteStarting => startSuite(event)
case event: SuiteCompleted => completeSuite(event)
case event: SuiteAborted => abortSuite(event)
case event: TestStarting => startTestCase(event)
case event: TestFailed => failTestCase(event)
case event: TestCanceled => cancelTestCase(event)
case event: TestSucceeded => passTestCase(event)
case event: TestIgnored => ignoreTestCase(event)
case _ => ()
}
def startSuite(event: SuiteStarting): Unit = {
setSuiteLocation(event.suiteId, event.location)
}
def completeSuite(event: SuiteCompleted): Unit = {
removeSuiteLocation(event.suiteId)
}
def abortSuite(event: SuiteAborted): Unit = {
removeSuiteLocation(event.suiteId)
}
def startTestCase(event: TestStarting): Unit = {
startTest(
event.suiteId,
event.suiteName,
event.suiteClassName,
event.location,
event.testName,
Some(event.threadName)
)
}
def failTestCase(event: TestFailed): Unit = {
val throwable = event.throwable.getOrElse(new RuntimeException(event.message))
val status = throwable match {
case _: TestFailedException => Status.FAILED
case _ => Status.BROKEN
}
val statusDetails = getStatusDetails(throwable)
.orElse(new StatusDetails().setMessage(event.message))
stopTest(
Some(status),
Some(statusDetails),
Some(event.threadName)
)
}
def passTestCase(event: TestSucceeded): Unit = {
stopTest(
Some(Status.PASSED),
None,
Some(event.threadName)
)
}
def cancelTestCase(event: TestCanceled): Unit = {
stopTest(
Some(Status.SKIPPED),
Some(new StatusDetails().setMessage(event.message)),
Some(event.threadName)
)
}
def ignoreTestCase(event: TestIgnored): Unit = {
startTest(
event.suiteId,
event.suiteName,
event.suiteClassName,
event.location,
event.testName,
None
)
stopTest(
None,
Some(new StatusDetails().setMessage("Test ignored")),
Some(event.threadName)
)
}
private def startTest(suiteId: String,
suiteName: String,
suiteClassName: Option[String],
location: Option[Location],
testName: String,
threadId: Option[String]): Unit = {
val uuid = UUID.randomUUID().toString
var labels = mutable.ListBuffer(
createSuiteLabel(suiteName),
createThreadLabel(),
createHostLabel(),
createLanguageLabel("scala"),
createFrameworkLabel("scalatest")
)
labels ++= getProvidedLabels.asScala
var links = mutable.ListBuffer[io.qameta.allure.model.Link]()
val result = new TestResult()
.setFullName(suiteId + " " + testName)
.setName(testName)
.setUuid(uuid)
.setTestCaseId(md5(suiteId + testName))
.setHistoryId(md5(suiteId + testName))
val testAnnotations = getAnnotations(location)
val suiteAnnotations = getAnnotations(getSuiteLocation(suiteId))
(testAnnotations ::: suiteAnnotations).foreach {
case annotation: Severity => labels += createSeverityLabel(annotation.value())
case annotation: Owner => labels += createOwnerLabel(annotation.value())
case annotation: Description => result.setDescription(annotation.value())
case annotation: Epic => labels += createEpicLabel(annotation.value())
case annotation: Feature => labels += createFeatureLabel(annotation.value())
case annotation: Story => labels += createStoryLabel(annotation.value())
case annotation: Link => links += createLink(annotation)
case annotation: Issue => links += createIssueLink(annotation.value())
case annotation: TmsLink => links += createTmsLink(annotation.value())
case _ => None
}
suiteClassName.map(className => createTestClassLabel(className))
.fold {} { value => labels += value }
result.setLabels(labels.asJava)
lifecycle.scheduleTestCase(result)
lifecycle.startTestCase(uuid)
//this should be called after test case scheduled
threadId.fold {} { thread => AllureScalatestContextHolder.add(thread, uuid) }
}
private def stopTest(status: Option[Status],
statusDetails: Option[StatusDetails],
threadName: Option[String]): Unit = {
threadName.fold {} { thread => AllureScalatestContextHolder.remove(thread) }
lifecycle.getCurrentTestCase.ifPresent(uuid => {
lifecycle.updateTestCase(uuid, (result: TestResult) => {
status.fold {} { st => result.setStatus(st) }
statusDetails.fold {} { details => result.setStatusDetails(details) }
}: Unit)
lifecycle.stopTestCase(uuid)
lifecycle.writeTestCase(uuid)
})
}
private def getAnnotations(location: Option[Location]): List[Annotation] = location match {
case Some(TopOfClass(className)) => Class.forName(className).getAnnotations.toList
case Some(TopOfMethod(className, methodName)) => Class.forName(className).getMethod(methodName).getDeclaredAnnotations.toList
case _ => List()
}
private def setSuiteLocation(suiteId: String, location: Option[Location]): Unit = {
location.fold {} { l =>
lock.writeLock().lock()
try {
suites += suiteId -> l
} finally {
lock.writeLock().unlock()
}
}
}
private def getSuiteLocation(suiteId: String): Option[Location] = {
lock.readLock().lock()
try {
suites.get(suiteId)
} finally {
lock.readLock().unlock()
}
}
private def removeSuiteLocation(suiteId: String): Unit = {
lock.writeLock().lock()
try {
suites -= suiteId
} finally {
lock.writeLock().unlock()
}
}
}
| allure-framework/allure-java | allure-scalatest/src/main/scala/io/qameta/allure/scalatest/AllureScalatest.scala | Scala | apache-2.0 | 8,534 |
/*
* Copyright (c) 2012-2016 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.enrich
package common
package utils
// Scalaz
import scalaz._
import Scalaz._
// Scala
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.util.control.NonFatal
// Akka
import akka.actor.ActorSystem
// Akka Streams
import akka.http.scaladsl.Http
import akka.http.scaladsl.model._
import akka.stream.ActorMaterializer
import akka.http.scaladsl.model.headers.{ Authorization, BasicHttpCredentials }
class HttpClient(actorSystem: ActorSystem) {
/**
* Inner client to perform HTTP API requests
*/
implicit val system = actorSystem
implicit val context = system.dispatcher
implicit val materializer = ActorMaterializer()
private val http = Http()
/**
* Blocking method to get body of HTTP response
*
* @param request assembled request object
* @param timeout time in milliseconds after which request can be considered failed
* used for both connection and receiving
* @return validated body of HTTP request
*/
def getBody(request: HttpRequest, timeout: Int): Validation[Throwable, String] = {
try {
val response = http.singleRequest(request)
val body = response.flatMap(_.entity.toStrict(timeout.milliseconds).map(_.data.utf8String))
Await.result(body, timeout.milliseconds).success
} catch {
case NonFatal(e) => e.failure
}
}
/**
* Build HTTP request object
*
* @param uri full URI to request
* @param authUser optional username for basic auth
* @param authPassword optional password for basic auth
* @param method HTTP method
* @return successful HTTP request or throwable in case of invalid URI or method
*/
def buildRequest(
uri: String,
authUser: Option[String],
authPassword: Option[String],
method: String = "GET"): Validation[Throwable, HttpRequest] = {
val auth = buildAuthorization(authUser, authPassword)
try {
HttpRequest(method = HttpMethods.getForKey(method).get, uri = uri, headers = auth.toList).success
} catch {
case NonFatal(e) => e.failure
}
}
/**
* Build [[Authorization]] header .
* Unlike predefined behaviour which assumes both `authUser` and `authPassword` must be provided
* this will work if ANY of `authUser` or `authPassword` provided
*/
private def buildAuthorization(authUser: Option[String], authPassword: Option[String]): Option[Authorization] =
if (List(authUser, authPassword).flatten.isEmpty) none
else Authorization(BasicHttpCredentials(authUser.getOrElse(""), authPassword.getOrElse(""))).some
}
| haensel-ams/snowplow | 3-enrich/scala-common-enrich/src/main/scala/com.snowplowanalytics.snowplow.enrich/common/utils/HttpClient.scala | Scala | apache-2.0 | 3,309 |
package com.datasift.dropwizard.scala.test
import java.io.{IOException, File}
import java.nio.file.attribute.BasicFileAttributes
import java.nio.file.{FileVisitResult, SimpleFileVisitor, Path, Files}
import com.mysql.management.driverlaunched.ServerLauncherSocketFactory
import io.dropwizard.db.ManagedDataSource
import scala.util.{Failure, Try}
object MySQLTest {
private val UriRegex =
"^jdbc:mysql:mxj://[^:]+:\\\\d+/[^?]+?.*server\\\\.basedir=([^&]+).*$".r
def apply(suite: BeforeAndAfterAllMulti, connectionURI: => String)
(newDataSource: => ManagedDataSource): MySQLTest =
new MySQLTest(suite, connectionURI)(newDataSource)
}
class MySQLTest(suite: BeforeAndAfterAllMulti, connectionURI: => String)
(newDataSource: => ManagedDataSource) {
import MySQLTest.UriRegex
var _dataSource: Try[ManagedDataSource] = Failure(NotInitializedException)
var _baseDir: Try[File] = Failure(NotInitializedException)
def dataSource: Try[ManagedDataSource] = _dataSource
def baseDir: Try[File] = _baseDir
suite.beforeAll {
_dataSource = Try(newDataSource)
_baseDir = Try(connectionURI match {
case UriRegex(dir) => new File(dir)
})
_dataSource.foreach(_.getConnection)
}
suite.afterAll {
_baseDir.foreach { baseDir =>
if (!ServerLauncherSocketFactory.shutdown(baseDir, null)) {
deleteRecursively(baseDir.toPath)
}
}
}
private def deleteRecursively(path: Path) {
Files.walkFileTree(path, new SimpleFileVisitor[Path]() {
override def visitFile(file: Path,
attrs: BasicFileAttributes): FileVisitResult = {
Files.delete(file)
FileVisitResult.CONTINUE
}
override def postVisitDirectory(dir: Path,
ex: IOException): FileVisitResult = {
Files.delete(dir)
FileVisitResult.CONTINUE
}
})
}
}
| datasift/dropwizard-scala | test/src/main/scala/com/datasift/dropwizard/scala/test/MySQLTest.scala | Scala | apache-2.0 | 1,920 |
package org.broadinstitute.clio.client.dispatch
import akka.NotUsed
import akka.stream.scaladsl.Source
import cats.syntax.show._
import io.circe.Json
import org.broadinstitute.clio.client.commands.AddCommand
import org.broadinstitute.clio.client.util.IoUtil
import org.broadinstitute.clio.client.webclient.ClioWebClient
import org.broadinstitute.clio.transfer.model.ClioIndex
/**
* Executor for "add" commands, which read some metadata from disk and upsert
* it to the clio-server, optionally overwriting existing fields.
*/
class AddExecutor[CI <: ClioIndex](addCommand: AddCommand[CI]) extends Executor {
import addCommand.index.implicits._
private val prettyKey = addCommand.key.show
override def execute(
webClient: ClioWebClient,
ioUtil: IoUtil
): Source[Json, NotUsed] = {
import Executor.SourceMonadOps
for {
metadata <- ioUtil.readMetadata(addCommand.index)(addCommand.metadataLocation)
upsertId <- webClient.upsert(addCommand.index)(
addCommand.key,
metadata,
addCommand.force
)
} yield {
logger.info(s"Successfully updated record for $prettyKey.")
upsertId
}
}
}
| broadinstitute/clio | clio-client/src/main/scala/org/broadinstitute/clio/client/dispatch/AddExecutor.scala | Scala | bsd-3-clause | 1,173 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.nodes.physical.stream
import org.apache.flink.api.dag.Transformation
import org.apache.flink.streaming.api.transformations.OneInputTransformation
import org.apache.flink.table.dataformat.BaseRow
import org.apache.flink.table.planner.calcite.FlinkTypeFactory
import org.apache.flink.table.planner.codegen.{CodeGeneratorContext, ExpandCodeGenerator}
import org.apache.flink.table.planner.delegation.StreamPlanner
import org.apache.flink.table.planner.plan.nodes.calcite.Expand
import org.apache.flink.table.planner.plan.nodes.exec.{ExecNode, StreamExecNode}
import org.apache.flink.table.runtime.typeutils.BaseRowTypeInfo
import org.apache.calcite.plan.{RelOptCluster, RelTraitSet}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rex.RexNode
import java.util
import scala.collection.JavaConversions._
/**
* Stream physical RelNode for [[Expand]].
*/
class StreamExecExpand(
cluster: RelOptCluster,
traitSet: RelTraitSet,
inputRel: RelNode,
outputRowType: RelDataType,
projects: util.List[util.List[RexNode]],
expandIdIndex: Int)
extends Expand(cluster, traitSet, inputRel, outputRowType, projects, expandIdIndex)
with StreamPhysicalRel
with StreamExecNode[BaseRow] {
override def requireWatermark: Boolean = false
override def copy(traitSet: RelTraitSet, inputs: util.List[RelNode]): RelNode = {
new StreamExecExpand(cluster, traitSet, inputs.get(0), outputRowType, projects, expandIdIndex)
}
//~ ExecNode methods -----------------------------------------------------------
override def getInputNodes: util.List[ExecNode[StreamPlanner, _]] = {
getInputs.map(_.asInstanceOf[ExecNode[StreamPlanner, _]])
}
override def replaceInputNode(
ordinalInParent: Int,
newInputNode: ExecNode[StreamPlanner, _]): Unit = {
replaceInput(ordinalInParent, newInputNode.asInstanceOf[RelNode])
}
override protected def translateToPlanInternal(
planner: StreamPlanner): Transformation[BaseRow] = {
val config = planner.getTableConfig
val inputTransform = getInputNodes.get(0).translateToPlan(planner)
.asInstanceOf[Transformation[BaseRow]]
val inputType = inputTransform.getOutputType.asInstanceOf[BaseRowTypeInfo].toRowType
val outputType = FlinkTypeFactory.toLogicalRowType(getRowType)
val ctx = CodeGeneratorContext(config)
val operator = ExpandCodeGenerator.generateExpandOperator(
ctx,
inputType,
outputType,
config,
projects,
opName = "StreamExpand",
retainHeader = true)
val transform = new OneInputTransformation(
inputTransform,
getRelDetailedDescription,
operator,
BaseRowTypeInfo.of(outputType),
inputTransform.getParallelism)
if (inputsContainSingleton()) {
transform.setParallelism(1)
transform.setMaxParallelism(1)
}
transform
}
}
| bowenli86/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/nodes/physical/stream/StreamExecExpand.scala | Scala | apache-2.0 | 3,766 |
package scray.jdbc.extractors
import scray.querying.description.internal.Domain
import scray.querying.description.TableIdentifier
import scray.querying.description.QueryRange
/**
* Ancestor of all SQL dialects that implement relational DBMS dialects
*/
abstract class ScraySQLDialect(val name: String, unequal: String = "<>") {
/**
* returns true if the provided jdbcURL is valid for this type of dialect
*/
def isDialectJdbcURL(jdbcURL: String): Boolean
/**
* returns the name of this dialect for extraction
*/
def getName: String = name
/**
* returns the unequal operator for this relational DBMS
*/
def getUnequal: String = unequal
/**
* removes quotes for this relational DBMS, using a default, which removes " and ; and '
*/
def removeQuotes(in: String): String = in.filterNot(c => c == '"' || c == ';' || c == ''')
/**
* Returns a formatted Select String for this relational DBMS
* Warning: default assumption is, that where, group by, order by and limit clauses are directly consecutive
* as specified in this list
*/
def getFormattedSelectString(table: TableIdentifier, where: String, limit: String,
groupBy: String, orderBy: String): String =
s"""SELECT * FROM "${removeQuotes(table.dbId)}"."${removeQuotes(table.tableId)}" ${decideWhere(where)} ${groupBy} ${orderBy} ${limit}"""
/**
* limits are usually non-standard for DBMS systems, so we leave the implementation
* open to enforce implementation in concrete dialects
*/
def getEnforcedLimit(range: Option[QueryRange], where: List[Domain[_]]): (String, List[Domain[_]])
/**
* if the empty String should be consered to be equal to NULL values
*/
def emptyStringIsNull: Boolean = false
/**
* decide whether to place a WHERE in front of the domains
*/
def decideWhere(where: String): String = if(!where.isEmpty()) s"WHERE $where" else ""
/**
* class name of the driver for the database of this dialect
*/
val DRIVER_CLASS_NAME: String
} | scray/scray | scray-jdbc/src/main/scala/scray/jdbc/extractors/ScraySQLDialect.scala | Scala | apache-2.0 | 2,032 |
package com.rackspace.prefs
import com.nparry.orderly._
import com.rackspace.prefs.model.DBTables._
import com.rackspace.prefs.model.{DBTables, Preferences, PreferencesMetadata}
import org.joda.time.DateTime
import org.json4s.{JValue, JNothing, DefaultFormats, Formats}
import org.json4s.JsonDSL.WithDouble._
import org.scalatra._
import org.scalatra.json._
import org.scalatra.scalate.ScalateSupport
import org.slf4j.LoggerFactory
import org.apache.commons.validator.routines.UrlValidator
import org.springframework.web.util.UriUtils
import scala.slick.driver.JdbcDriver.simple._
import scala.slick.jdbc.JdbcBackend.Database
import scala.util.control.Breaks._
import collection.JavaConverters._
import javax.servlet.http.HttpServletRequest
case class PreferencesService(db: Database) extends ScalatraServlet
with ScalateSupport
with JacksonJsonSupport {
protected implicit val jsonFormats: Formats = DefaultFormats
val X_TENANT_ID = "x-tenant-id"
val logger = LoggerFactory.getLogger(getClass)
get("/") {
NotFound(jsonifyError("Invalid URI: /"))
}
get("/status") {
db.withSession { implicit session =>
val metadataCount = Query(preferencesMetadata.length).first
jsonifyStatus(metadataCount)
}
}
get("/metadata/:preference_slug/?") {
val preferenceSlug = params("preference_slug")
contentType = formats("json")
getMetadata(preferenceSlug) match {
case Some(metadata: PreferencesMetadata) => metadata.schema
case None => NotFound(jsonifyError("Metadata preferences for /" + preferenceSlug + " not found"))
}
}
// anything that's not /metadata* goes here
get( """^/(?!metadata)([^/]*)/([^/]*)/?$""".r) {
val uriParts = multiParams("captures")
val preferenceSlug = uriParts(0)
val id = uriParts(1)
contentType = formats("json")
db.withSession { implicit session =>
val getPayloadQuery = for {
(prefs, metadata) <- preferences innerJoin preferencesMetadata on (_.preferencesMetadataId === _.id)
if prefs.id === id && metadata.slug === preferenceSlug
} yield (prefs.payload)
getPayloadQuery.list match {
case List(payload: String) => payload
case _ => NotFound(jsonifyError("Preferences for " + preferenceSlug + " with id " + id + " not found"))
}
}
}
post("/:preference_slug/:id/?", request.getContentType() == "application/json") {
val preferenceSlug = params("preference_slug")
val id = params("id")
val payload = request.body
getMetadata(preferenceSlug) match {
case Some(metadata: PreferencesMetadata) => {
val orderly = Orderly(metadata.schema)
orderly.validate(payload) match {
case head :: tail =>
// give them hints of what's wrong. Only print the first violation.
BadRequest(jsonifyError("Preferences for /" + preferenceSlug + "/" + id +
" does not validate properly. " + head.path + " " + head.message))
case Nil => {
// valid and non-empty json, write to db
validateAndWritePreference(metadata, preferenceSlug, id, payload)
}
}
}
case None => BadRequest(jsonifyError("Preferences for /" + preferenceSlug + " does not have any metadata"))
}
}
/**
* Validate the preference json payload and write to the database
* @param metadata
* @param preferenceSlug
* @param id
* @param payload
* @return
*/
def validateAndWritePreference(metadata: PreferencesMetadata, preferenceSlug: String, id: String, payload: String): ActionResult = {
// validate payload
val jsonContent = parse(payload)
// validate default_archive_container_url
val defaultContainer = jsonContent \ "default_archive_container_url"
var validateError = validateContainer(preferenceSlug, id, defaultContainer)
//validate urls of archive_container_urls if defaultContainer is ok
if (validateError == null) {
val archiveContainers = (jsonContent \ "archive_container_urls").children
breakable {
archiveContainers.foreach { container =>
// validate and break when first validation failure occurred
validateError = validateContainer(preferenceSlug, id, container)
if (validateError != null) break
}
}
}
// if container urls and names are ok, validate that either default container is provided
// or all datacenters container urls are provided
if (validateError == null) {
if ((defaultContainer == JNothing) && (!allDataCenterArePresent(preferenceSlug, id, jsonContent))) {
// default container was not provided, and not all data centers container urls are provided , bad request
validateError = BadRequest(jsonifyError("Preferences for /" + preferenceSlug + "/" + id + " must have a default_container_url or must have all datacenter archive_container_urls present." +
" See Cloud Feeds documentation for a list of valid datacenters."))
}
}
// write to db if content pass validation
if (validateError != null) { validateError }
else { writePreferenceToDb(metadata, id, payload) }
}
/**
* true if all data centers are present, false otherwise
* @param preferenceSlug
* @param id
* @param preferenceJson
* @return
*/
def allDataCenterArePresent(preferenceSlug: String, id: String, preferenceJson: JValue): Boolean = {
// extract "archive_container_urls": { datacenter: url } to Map(String, Any)
val containerUrls = (preferenceJson \ "archive_container_urls").extract[Map[String, Any]]
// check for all data centers and return boolean
containerUrls.contains("iad") &&
containerUrls.contains("dfw") &&
containerUrls.contains("ord") &&
containerUrls.contains("lon") &&
containerUrls.contains("hkg") &&
containerUrls.contains("syd")
}
/**
* Validate that the container url is valid and that the container name is valid
* @param preferenceSlug
* @param id
* @param container
* @return
*/
def validateContainer(preferenceSlug: String, id: String, container: JValue): ActionResult = {
var result:ActionResult = null
if (container != JNothing) {
val validator = new UrlValidator()
val containerUrl = container.extract[String]
if (!validator.isValid(containerUrl)) {
// validate url
result = BadRequest(jsonifyError("Preferences for /" + preferenceSlug + "/" + id + " has an invalid url: " + containerUrl))
}
else {
// validate container name in the url
result = validateContainerName(preferenceSlug, id, containerUrl)
}
}
result
}
/**
* Cloud files has the following requirements for container name. This method validates to make sure the container name is compatible
* with cloud files.
*
* The only restrictions on container names is that they cannot contain a forward slash (/) and must be less than 256 bytes in length.
* Note that the length restriction applies to the name after it has been URL-encoded. For example, a container name of Course Docs
* would be URL-encoded as Course%20Docs and is therefore 13 bytes in length rather than the expected 11.
*
* http://docs.rackspace.com/files/api/v1/cf-devguide/content/Containers-d1e458.html
*
* @param preferenceSlug
* @param id
* @param containerUrl
* @return
*/
def validateContainerName(preferenceSlug: String, id: String, containerUrl: String): ActionResult = {
// validate container name
var result:ActionResult = null
// this pattern will match url of format http[s]://hostname/rootpath/nastId/container_name, and capture container_name
val patternForContainer = "^https?://[^/]+/[^/]+/[^/]+/(.*)$".r
val containerName = {
patternForContainer.findFirstMatchIn(containerUrl) match {
case Some(m) => m.group(1).replaceAll("/$", "") // get first captured group and remove trailing slash if present
case None => ""
}
}
if (containerName == "") {
// container name cannot be empty
result = BadRequest(jsonifyError("Preferences for /" + preferenceSlug + "/" + id + " is missing container name: " + containerUrl))
}
else {
if (containerName.length() >= 256) {
logger.debug(s"Encoded container name should be less than 256 bytes in length:[$containerUrl]")
result = BadRequest(jsonifyError("Preferences for /" + preferenceSlug + "/" + id + " has an encoded container name longer than 255 bytes: " + containerUrl + ". " +
"Url must be encoded and should not contain query parameters or url fragments. Encoded container name cannot contain a forward slash(/) and must be less than 256 bytes in length."))
} else {
// container name must be less than 256 bytes in length, url encoded, and does not contain '/'
val msgInvalidUrl =
"Preferences for /" + preferenceSlug + "/" + id + " has an invalid url: " + containerUrl + ". " +
"Url must be encoded and should not contain query parameters or url fragments. Encoded container name cannot contain a forward slash(/) and must be less than 256 bytes in length."
try {
// check to see if container has special chars and url encoded
// first decode the containerName
val decoded = UriUtils.decode(containerName, "UTF-8")
//If decoding results with the same container name, either it hasnt been encoded or encoding doesnt actually change anything (Ex: a alpha-numeric string like "Tom")
if (containerName == decoded) {
//To make sure whether encoding changes anything
val encode = UriUtils.encodePathSegment(containerName, "UTF-8")
// if encoding the container name isn't same as the original, then container has special chars that are not encoded, bad request
if (encode != containerName) {
logger.debug(s"Encoding the container name isn't same as the original:[$containerUrl]")
result = BadRequest(jsonifyError(msgInvalidUrl))
}
} else {
//Since decoded container name is not same as the original, we can think container name is probably already encoded.
//But they could have send mixed case where only part of the container name is encoded. So decoding results in a different container
//name but that doesnt mean the entire container name is properly encoded.
//Removing any hex-characters from original.
//Encoding the resultant string should not change it. If it changes, it indicates there are still special chars.
val hexStrippedContainerName = containerName.replaceAll("%[a-fA-F0-9][a-fA-F0-9]", "")
val encodedHexStrippedContainerName = UriUtils.encodePathSegment(hexStrippedContainerName, "UTF-8")
if (hexStrippedContainerName != encodedHexStrippedContainerName) {
logger.debug(s"mixed case(partially encoded) container name:[$containerUrl]")
// if encoding the container name isn't the same as the original, then container has special chars that are not encoded, bad request
result = BadRequest(jsonifyError(msgInvalidUrl))
}
if (decoded contains '/') {
logger.debug(s"Container name contains forward slash(/) which is invalid:[$containerUrl]")
// containerName contains '/', bad request
result = BadRequest(jsonifyError("Preferences for /" + preferenceSlug + "/" + id + " has an invalid container name containing '/': " + containerUrl + ". " +
"Url must be encoded and should not contain query parameters or url fragments."))
}
}
}
catch {
case e: Exception => result = BadRequest(jsonifyError(msgInvalidUrl + " Reason: " + e.getMessage()))
}
}
}
result
}
/**
* Write the preference json to the database
*
* @param metadata
* @param id
* @param payload
* @return
*/
def writePreferenceToDb(metadata: PreferencesMetadata, id: String, payload: String): ActionResult = {
db.withSession { implicit session =>
val prefsForIdandSlug = preferences.filter(prefs => prefs.id === id && prefs.preferencesMetadataId === metadata.id)
prefsForIdandSlug.list match {
case List(_: Preferences) => {
preferences
.filter(prefs => prefs.id === id && prefs.preferencesMetadataId === metadata.id)
.map(prefs => (prefs.payload, prefs.updated))
.update(payload, DateTime.now)
Ok()
}
case _ => {
preferences
.map(p => (p.id, p.preferencesMetadataId, p.payload, p.alternateId))
.insert(id, metadata.id.get, payload, getAlternateId(request))
Created()
}
}
}
}
def getMetadata(slug: String): Option[PreferencesMetadata] = {
db.withSession { implicit session =>
preferencesMetadata.filter(_.slug === slug).list match {
case List(metadata: PreferencesMetadata) => Some(metadata)
case _ => None
}
}
}
def getAlternateId(request: HttpServletRequest): Option[String] = {
// this is a temporary hack, until something better comes along
// to find which one is the NAST tenantId, we pick the longest string
request.getHeaders(X_TENANT_ID).asScala.reduceLeftOption((str1: String, str2: String) => if (str1.length > str2.length) str1 else str2) match {
case Some(tenant) => {
logger.debug("For request " + request.getRequestURI + ", alternateId is " + tenant)
Some(tenant)
}
case _ => None
}
}
def jsonifyError(errorMessage: String) : String = {
val json = ("error" -> errorMessage)
return pretty(render(json))
}
def jsonifyStatus(metadataCount: Int) : String = {
"{ \"metadata-count\": " + metadataCount + " }"
}
error {
case e => {
logger.error("Request failed with exception", e)
InternalServerError(jsonifyError("Request failed with exception:" + e + " message:" + e.getMessage))
}
}
}
| VinnyQ/cloudfeeds-preferences-svc | app/src/main/scala/com/rackspace/prefs/PreferencesService.scala | Scala | apache-2.0 | 15,691 |
// Jubatus: Online machine learning framework for distributed environment
// Copyright (C) 2014-2015 Preferred Networks and Nippon Telegraph and Telephone Corporation.
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License version 2.1 as published by the Free Software Foundation.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
package us.jubat.jubaql_server.processor
import org.apache.spark.sql.catalyst.types.BooleanType
import org.apache.spark.sql.catalyst.{SqlLexical, SqlParser}
import org.apache.spark.sql.catalyst.analysis.{Star, UnresolvedAttribute, UnresolvedRelation}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.logical._
import com.typesafe.scalalogging.slf4j.LazyLogging
import scala.util.parsing.input.CharArrayReader._
// TODO: move these to a proper file.
// TODO: rename to better ones.
sealed trait FeatureFunctionParameters
case object WildcardAnyParameter extends FeatureFunctionParameters
case class WildcardWithPrefixParameter(prefix: String) extends FeatureFunctionParameters
case class WildcardWithSuffixParameter(suffix: String) extends FeatureFunctionParameters
case class NormalParameters(params: List[String]) extends FeatureFunctionParameters
class JubaQLParser extends SqlParser with LazyLogging {
class JubaQLLexical(keywords: Seq[String]) extends SqlLexical(keywords) {
case class CodeLit(chars: String) extends Token {
override def toString = "$$"+chars+"$$"
}
// used for parsing $$-delimited code blocks
protected lazy val codeDelim: Parser[String] = '$' ~ '$' ^^
{ case a ~ b => "$$" }
protected lazy val stringWithoutCodeDelim: Parser[String] = rep1( chrExcept('$', EofCh) ) ^^
{ case chars => chars mkString "" }
protected lazy val codeContents: Parser[String] = repsep(stringWithoutCodeDelim, '$') ^^
{ case words => words mkString "$" }
override lazy val token: Parser[Token] =
( identChar ~ rep( identChar | digit ) ^^ { case first ~ rest => processIdent(first :: rest mkString "") }
| rep1(digit) ~ opt('.' ~> rep(digit)) ^^ {
case i ~ None => NumericLit(i mkString "")
case i ~ Some(d) => FloatLit(i.mkString("") + "." + d.mkString(""))
}
| '\'' ~ rep( chrExcept('\'', EofCh) ) ~ '\'' ^^ { case '\'' ~ chars ~ '\'' => StringLit(chars mkString "") }
| '\"' ~ rep( chrExcept('\"', EofCh) ) ~ '\"' ^^ { case '\"' ~ chars ~ '\"' => StringLit(chars mkString "") }
| codeDelim ~> codeContents <~ codeDelim ^^ { case chars => CodeLit(chars) }
| EofCh ^^^ EOF
| codeDelim ~> failure("unclosed code literal")
| '\'' ~> failure("unclosed string literal")
| '\"' ~> failure("unclosed string literal")
| delim
| failure("illegal character")
)
}
protected lazy val CREATE = Keyword("CREATE")
protected lazy val DATASOURCE = Keyword("DATASOURCE")
protected lazy val MODEL = Keyword("MODEL")
protected lazy val ANOMALY = Keyword("ANOMALY")
protected lazy val CLASSIFIER = Keyword("CLASSIFIER")
protected lazy val RECOMMENDER = Keyword("RECOMMENDER")
protected lazy val WITH = Keyword("WITH")
protected lazy val UPDATE = Keyword("UPDATE")
protected lazy val ANALYZE = Keyword("ANALYZE")
protected lazy val USING = Keyword("USING")
protected lazy val DATA = Keyword("DATA")
protected lazy val LOG = Keyword("LOG")
protected lazy val STORAGE = Keyword("STORAGE")
protected lazy val STREAM = Keyword("STREAM")
protected lazy val CONFIG = Keyword("CONFIG")
protected lazy val numeric = Keyword("numeric")
protected lazy val string = Keyword("string")
protected lazy val boolean = Keyword("boolean")
protected lazy val STATUS = Keyword("STATUS")
protected lazy val SHUTDOWN = Keyword("SHUTDOWN")
protected lazy val START = Keyword("START")
protected lazy val STOP = Keyword("STOP")
protected lazy val PROCESSING = Keyword("PROCESSING")
protected lazy val FUNCTION = Keyword("FUNCTION")
protected lazy val RETURNS = Keyword("RETURNS")
protected lazy val LANGUAGE = Keyword("LANGUAGE")
protected lazy val FEATURE = Keyword("FEATURE")
protected lazy val TRIGGER = Keyword("TRIGGER")
protected lazy val FOR = Keyword("FOR")
protected lazy val EACH = Keyword("EACH")
protected lazy val ROW = Keyword("ROW")
protected lazy val EXECUTE = Keyword("EXECUTE")
protected lazy val SLIDING = Keyword("SLIDING")
protected lazy val WINDOW = Keyword("WINDOW")
protected lazy val SIZE = Keyword("SIZE")
protected lazy val ADVANCE = Keyword("ADVANCE")
protected lazy val TIME = Keyword("TIME")
protected lazy val TUPLES = Keyword("TUPLES")
protected lazy val OVER = Keyword("OVER")
override val lexical = new JubaQLLexical(reservedWords)
// we should allow some common column names that have are also known as keywords
protected lazy val colIdent = (COUNT | TIME | STATUS | MODEL | GROUP |
ORDER | ident)
override lazy val baseExpression: PackratParser[Expression] =
expression ~ "[" ~ expression <~ "]" ^^ {
case base ~ _ ~ ordinal => GetItem(base, ordinal)
} |
TRUE ^^^ Literal(true, BooleanType) |
FALSE ^^^ Literal(false, BooleanType) |
cast |
"(" ~> expression <~ ")" |
function |
"-" ~> literal ^^ UnaryMinus |
colIdent ^^ UnresolvedAttribute | // was: ident
"*" ^^^ Star(None) |
literal
override lazy val projection: Parser[Expression] =
expression ~ (opt(AS) ~> opt(colIdent)) ^^ { // was: opt(ident)
case e ~ None => e
case e ~ Some(a) => Alias(e, a)()
}
protected lazy val streamIdent = ident
protected lazy val modelIdent = ident
protected lazy val funcIdent = ident
// column_name column_type
protected lazy val stringPairs: Parser[(String, String)] = {
colIdent ~ (numeric | string | boolean) ^^ {
case x ~ y => (x, y)
}
}
protected lazy val stream: Parser[String] = {
STREAM ~ ":" ~> stringLit ^^ {
case url => url
}
}
protected lazy val streamList: Parser[List[String]] = {
"," ~> rep1sep(stream, ",") ^^ {
case rep => rep
}
}
// CREATE DATASOURCE source_name ( column_name data_type, [...]) FROM sink_id
protected lazy val createDatasource: Parser[JubaQLAST] = {
CREATE ~ DATASOURCE ~> streamIdent ~ opt("(" ~ rep1sep(stringPairs, ",") ~ ")") ~
FROM ~ "(" ~ STORAGE ~ ":" ~ stringLit ~ opt(streamList) <~ ")" ^^ {
case sourceName ~ rep ~ _ /*FROM*/ ~ _ ~ _ /*STORAGE*/ ~ _ ~ storage ~ streams =>
rep match {
case Some(r) =>
CreateDatasource(sourceName, r._1._2, storage, streams.getOrElse(List[String]()))
case None =>
CreateDatasource(sourceName, List(), storage, streams.getOrElse(List[String]()))
}
}
}
protected lazy val jubatusAlgorithm: Parser[String] = {
(ANOMALY | CLASSIFIER | RECOMMENDER) ^^ {
case x => x
}
}
protected lazy val createModel: Parser[JubaQLAST] = {
val wildcardAny: Parser[FeatureFunctionParameters] = "*" ^^ {
case _ =>
WildcardAnyParameter
}
val wildcardWithPrefixParam: Parser[FeatureFunctionParameters] = ident <~ "*" ^^ {
case prefix =>
WildcardWithPrefixParameter(prefix)
}
val wildcardWithSuffixParam: Parser[FeatureFunctionParameters] = "*" ~> ident ^^ {
case suffix =>
WildcardWithSuffixParameter(suffix)
}
// wildcardWithSuffixParam is first.
// If wildcardAny precedes, *_suffix always matches to wildcardAny.
val wildcard: Parser[FeatureFunctionParameters] = wildcardWithSuffixParam | wildcardAny | wildcardWithPrefixParam
val oneParameter: Parser[NormalParameters] = colIdent ^^ {
case param =>
NormalParameters(List(param))
}
// this may take one parameter. Should such behavior avoided?
val moreThanOneParameters: Parser[FeatureFunctionParameters] = "(" ~> rep1sep(colIdent, ",") <~ ")" ^^ {
case params =>
NormalParameters(params)
}
val featureFunctionParameters: Parser[FeatureFunctionParameters] = wildcard | oneParameter | moreThanOneParameters
val labelOrId: Parser[(String, String)] = "(" ~> ident ~ ":" ~ colIdent <~ ")" ^^ {
case labelOrId ~ _ ~ value if labelOrId == "label" || labelOrId == "id" =>
(labelOrId, value)
}
val paramsAndFunction: Parser[(FeatureFunctionParameters, String)] = featureFunctionParameters ~ opt(WITH ~> funcIdent) ^^ {
case params ~ functionName =>
(params, functionName.getOrElse("id"))
}
CREATE ~> jubatusAlgorithm ~ MODEL ~ modelIdent ~ opt(labelOrId) ~ AS ~
rep1sep(paramsAndFunction, ",") ~ CONFIG ~ stringLit ^^ {
case algorithm ~ _ ~ modelName ~ maybeLabelOrId ~ _ ~ l ~ _ ~ config =>
CreateModel(algorithm, modelName, maybeLabelOrId, l, config)
}
}
protected lazy val createStreamFromSelect: Parser[JubaQLAST] = {
CREATE ~ STREAM ~> streamIdent ~ FROM ~ select ^^ {
case streamName ~ _ ~ selectPlan =>
CreateStreamFromSelect(streamName, selectPlan)
}
}
protected lazy val createStreamFromAnalyze: Parser[JubaQLAST] = {
CREATE ~ STREAM ~> streamIdent ~ FROM ~ analyzeStream ~ opt(AS ~> colIdent) ^^ {
case streamName ~ _ ~ analyzePlan ~ newColumn =>
CreateStreamFromAnalyze(streamName, analyzePlan, newColumn)
}
}
protected lazy val createTrigger: Parser[JubaQLAST] = {
CREATE ~ TRIGGER ~ ON ~> streamIdent ~ FOR ~ EACH ~ ROW ~ opt(WHEN ~> expression) ~ EXECUTE ~ function ^^ {
case dsName ~ _ ~ _ ~ _ ~ condition ~ _ ~ expr =>
CreateTrigger(dsName, condition, expr)
}
}
protected lazy val createStreamFromSlidingWindow: Parser[JubaQLAST] = {
val aggregation: Parser[(String, List[Expression], Option[String])] =
(ident | AVG) ~ "(" ~ rep1sep(expression, ",") ~ ")" ~ opt(AS ~> colIdent) ^^ {
case funcName ~ _ ~ parameters ~ _ ~ maybeAlias =>
(funcName, parameters, maybeAlias)
}
val aggregationList = rep1sep(aggregation, ",")
val filter: Parser[Expression] = WHERE ~ expression ^^ { case _ ~ e => e}
val having: Parser[Expression] = HAVING ~> expression
CREATE ~ STREAM ~> streamIdent ~ FROM ~ SLIDING ~ WINDOW ~
"(" ~ SIZE ~ numericLit ~ ADVANCE ~ numericLit ~ (TIME | TUPLES) ~ ")" ~
OVER ~ streamIdent ~ WITH ~ aggregationList ~ opt(filter) ~ opt(having) ^^ {
case streamName ~ _ ~ _ ~ _ ~ _ ~ _ /* FROM SLIDING WINDOW ( SIZE */ ~
size ~ _ /* ADVANCE */ ~ advance ~ windowType ~ _ /* ) */ ~
_ /* OVER */ ~ source ~ _ /* WITH */ ~ funcSpecs ~ f ~ h =>
// start from a table/stream with the given name
val base = UnresolvedRelation(Seq(source), None)
// apply the precondition
val withFilter = f.map(f => Filter(f, base)).getOrElse(base)
// select only the column that we use in the window.
val allColumns = funcSpecs.map(_._2.last)
val withProjection = Project(assignAliases(allColumns), withFilter)
// NB. we have to add a Cast to the correct type in every column later,
// after we have mapped function names to concrete functions.
CreateStreamFromSlidingWindow(streamName, size.toInt, advance.toInt,
windowType.toLowerCase, withProjection, funcSpecs,
h)
}
}
protected lazy val logStream: Parser[JubaQLAST] = {
LOG ~ STREAM ~> streamIdent ^^ {
case streamName =>
LogStream(streamName)
}
}
protected lazy val update: Parser[JubaQLAST] = {
UPDATE ~ MODEL ~> modelIdent ~ USING ~ funcIdent ~ FROM ~ streamIdent ^^ {
case modelName ~ _ ~ rpcName ~ _ ~ source =>
Update(modelName, rpcName, source)
}
}
protected lazy val analyze: Parser[JubaQLAST] = {
ANALYZE ~> stringLit ~ BY ~ MODEL ~ modelIdent ~ USING ~ funcIdent ^^ {
case data ~ _ ~ _ ~ modelName ~ _ ~ rpc =>
Analyze(modelName, rpc, data)
}
}
protected lazy val analyzeStream: Parser[Analyze] = {
ANALYZE ~> streamIdent ~ BY ~ MODEL ~ modelIdent ~ USING ~ funcIdent ^^ {
case source ~ _ ~ _ ~ modelName ~ _ ~ rpc =>
Analyze(modelName, rpc, source)
}
}
protected lazy val status: Parser[JubaQLAST] = {
STATUS ^^ {
case _ =>
Status()
}
}
protected lazy val shutdown: Parser[JubaQLAST] = {
SHUTDOWN ^^ {
case _ =>
Shutdown()
}
}
protected lazy val startProcessing: Parser[JubaQLAST] = {
START ~ PROCESSING ~> streamIdent ^^ {
case dsName =>
StartProcessing(dsName)
}
}
protected lazy val stopProcessing: Parser[JubaQLAST] = {
STOP ~> PROCESSING ^^ {
case _ =>
StopProcessing()
}
}
/** A parser which matches a code literal */
def codeLit: Parser[String] =
elem("code literal", _.isInstanceOf[lexical.CodeLit]) ^^ (_.chars)
protected lazy val createFunction: Parser[JubaQLAST] = {
CREATE ~ FUNCTION ~> funcIdent ~ "(" ~ repsep(stringPairs, ",") ~ ")" ~
RETURNS ~ (numeric | string| boolean) ~ LANGUAGE ~ ident ~ AS ~ codeLit ^^ {
case f ~ _ ~ args ~ _ ~ _ /*RETURNS*/ ~ retType ~ _ /*LANGUAGE*/ ~ lang ~
_ /*AS*/ ~ body =>
CreateFunction(f, args, retType, lang, body)
}
}
protected lazy val createFeatureFunction: Parser[JubaQLAST] = {
CREATE ~ FEATURE ~ FUNCTION ~> funcIdent ~ "(" ~ repsep(stringPairs, ",") ~ ")" ~
LANGUAGE ~ ident ~ AS ~ codeLit ^^ {
case f ~ _ ~ args ~ _ ~ _ /*LANGUAGE*/ ~ lang ~
_ /*AS*/ ~ body =>
CreateFeatureFunction(f, args, lang, body)
}
}
protected lazy val createTriggerFunction: Parser[JubaQLAST] = {
CREATE ~ TRIGGER ~ FUNCTION ~> funcIdent ~ "(" ~ repsep(stringPairs, ",") ~ ")" ~
LANGUAGE ~ ident ~ AS ~ codeLit ^^ {
case f ~ _ ~ args ~ _ ~ _ /*LANGUAGE*/ ~ lang ~
_ /*AS*/ ~ body =>
CreateTriggerFunction(f, args, lang, body)
}
}
protected lazy val jubaQLQuery: Parser[JubaQLAST] = {
createDatasource |
createModel |
createStreamFromSelect |
createStreamFromSlidingWindow |
createStreamFromAnalyze |
createTrigger |
logStream |
update |
analyze |
status |
shutdown |
startProcessing |
stopProcessing |
createFunction |
createFeatureFunction |
createTriggerFunction
}
// note: apply cannot override incompatible type with parent class
//override def apply(input: String): Option[JubaQLAST] = {
def parse(input: String): Option[JubaQLAST] = {
logger.info(s"trying to parse '$input'")
phrase(jubaQLQuery)(new lexical.Scanner(input)) match {
case Success(r, q) =>
logger.debug(s"successfully parsed input: $r")
Option(r)
case x =>
logger.warn(s"failed to parse input as JubaQL: $x")
None
}
}
}
| jubatus/jubaql-server | processor/src/main/scala/us/jubat/jubaql_server/processor/JubaQLParser.scala | Scala | lgpl-2.1 | 15,401 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Encoder, SparkSession}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.plans.physical.{Partitioning, UnknownPartitioning}
import org.apache.spark.sql.catalyst.util.truncatedString
import org.apache.spark.sql.execution.metric.SQLMetrics
object ExternalRDD {
def apply[T: Encoder](rdd: RDD[T], session: SparkSession): LogicalPlan = {
val externalRdd = ExternalRDD(CatalystSerde.generateObjAttr[T], rdd)(session)
CatalystSerde.serialize[T](externalRdd)
}
}
/** Logical plan node for scanning data from an RDD. */
case class ExternalRDD[T](
outputObjAttr: Attribute,
rdd: RDD[T])(session: SparkSession)
extends LeafNode with ObjectProducer with MultiInstanceRelation {
override protected final def otherCopyArgs: Seq[AnyRef] = session :: Nil
override def newInstance(): ExternalRDD.this.type =
ExternalRDD(outputObjAttr.newInstance(), rdd)(session).asInstanceOf[this.type]
override protected def stringArgs: Iterator[Any] = Iterator(output)
override def computeStats(): Statistics = Statistics(
// TODO: Instead of returning a default value here, find a way to return a meaningful size
// estimate for RDDs. See PR 1238 for more discussions.
sizeInBytes = BigInt(session.sessionState.conf.defaultSizeInBytes)
)
}
/** Physical plan node for scanning data from an RDD. */
case class ExternalRDDScanExec[T](
outputObjAttr: Attribute,
rdd: RDD[T]) extends LeafExecNode with ObjectProducerExec {
override lazy val metrics = Map(
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))
private def rddName: String = Option(rdd.name).map(n => s" $n").getOrElse("")
override val nodeName: String = s"Scan$rddName"
protected override def doExecute(): RDD[InternalRow] = {
val numOutputRows = longMetric("numOutputRows")
val outputDataType = outputObjAttr.dataType
rdd.mapPartitionsInternal { iter =>
val outputObject = ObjectOperator.wrapObjectToRow(outputDataType)
iter.map { value =>
numOutputRows += 1
outputObject(value)
}
}
}
override def simpleString: String = {
s"$nodeName${output.mkString("[", ",", "]")}"
}
}
/** Logical plan node for scanning data from an RDD of InternalRow. */
case class LogicalRDD(
output: Seq[Attribute],
rdd: RDD[InternalRow],
outputPartitioning: Partitioning = UnknownPartitioning(0),
override val outputOrdering: Seq[SortOrder] = Nil,
override val isStreaming: Boolean = false)(session: SparkSession)
extends LeafNode with MultiInstanceRelation {
override protected final def otherCopyArgs: Seq[AnyRef] = session :: Nil
override def newInstance(): LogicalRDD.this.type = {
val rewrite = output.zip(output.map(_.newInstance())).toMap
val rewrittenPartitioning = outputPartitioning match {
case p: Expression =>
p.transform {
case e: Attribute => rewrite.getOrElse(e, e)
}.asInstanceOf[Partitioning]
case p => p
}
val rewrittenOrdering = outputOrdering.map(_.transform {
case e: Attribute => rewrite.getOrElse(e, e)
}.asInstanceOf[SortOrder])
LogicalRDD(
output.map(rewrite),
rdd,
rewrittenPartitioning,
rewrittenOrdering,
isStreaming
)(session).asInstanceOf[this.type]
}
override protected def stringArgs: Iterator[Any] = Iterator(output, isStreaming)
override def computeStats(): Statistics = Statistics(
// TODO: Instead of returning a default value here, find a way to return a meaningful size
// estimate for RDDs. See PR 1238 for more discussions.
sizeInBytes = BigInt(session.sessionState.conf.defaultSizeInBytes)
)
}
/** Physical plan node for scanning data from an RDD of InternalRow. */
case class RDDScanExec(
output: Seq[Attribute],
rdd: RDD[InternalRow],
name: String,
override val outputPartitioning: Partitioning = UnknownPartitioning(0),
override val outputOrdering: Seq[SortOrder] = Nil) extends LeafExecNode with InputRDDCodegen {
private def rddName: String = Option(rdd.name).map(n => s" $n").getOrElse("")
override val nodeName: String = s"Scan $name$rddName"
override lazy val metrics = Map(
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))
protected override def doExecute(): RDD[InternalRow] = {
val numOutputRows = longMetric("numOutputRows")
rdd.mapPartitionsWithIndexInternal { (index, iter) =>
val proj = UnsafeProjection.create(schema)
proj.initialize(index)
iter.map { r =>
numOutputRows += 1
proj(r)
}
}
}
override def simpleString: String = {
s"$nodeName${truncatedString(output, "[", ",", "]")}"
}
// Input can be InternalRow, has to be turned into UnsafeRows.
override protected val createUnsafeProjection: Boolean = true
override def inputRDD: RDD[InternalRow] = rdd
}
| mdespriee/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/ExistingRDD.scala | Scala | apache-2.0 | 6,004 |
/*
* Copyright 2017 Mediative
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mediative.sangria.codegen
import org.scalatest.WordSpec
import java.io.File
import scala.io.Source
import scala.meta._
import sangria.schema.Schema
abstract class CodegenBaseSpec(name: String, schema: Option[Schema[_, _]] = None) extends WordSpec {
def this(name: String, schema: Schema[_, _]) = this(name, Some(schema))
val inputDir = new File("../samples", name)
def contentOf(file: File) =
Source.fromFile(file).mkString
"SangriaCodegen" should {
for {
input <- inputDir.listFiles()
if input.getName.endsWith(".graphql")
name = input.getName.replace(".graphql", "")
expected = new File(inputDir, s"$name.scala")
if expected.exists
} {
s"generate code for ${input.getName}" in {
val generator = ScalametaGenerator(s"${name}Api")
val builder = schema match {
case Some(schema) => Builder(schema)
case None => Builder(new File(inputDir, "schema.graphql"))
}
val Right(out) = builder
.withQuery(input)
.generate(generator)
val actual = out.show[Syntax]
if (actual.trim != contentOf(expected).trim)
println(actual)
assert(actual.trim == contentOf(expected).trim)
}
}
}
}
| mediative/sangria-codegen | sangria-codegen/src/test/scala/com.mediative.sangria.codegen/CodegenBaseSpec.scala | Scala | apache-2.0 | 1,868 |
package org.scalaide.core.internal.jdt.model
import scala.collection.immutable.Seq
import org.eclipse.jdt.core.IField
import org.eclipse.jdt.core.IJavaElement
import org.eclipse.jdt.core.IMethod
import org.eclipse.jdt.core.IType
import org.eclipse.jdt.core.ITypeParameter
import org.eclipse.jdt.internal.compiler.classfmt.ClassFileConstants
import org.eclipse.jdt.internal.core.JavaElement
import org.eclipse.jdt.internal.core.JavaElementInfo
import org.eclipse.jdt.internal.core.LocalVariable
import org.eclipse.jdt.internal.core.SourceConstructorInfo
import org.eclipse.jdt.internal.core.SourceField
import org.eclipse.jdt.internal.core.SourceFieldElementInfo
import org.eclipse.jdt.internal.core.SourceMethod
import org.eclipse.jdt.internal.core.SourceMethodElementInfo
import org.eclipse.jdt.internal.core.SourceMethodInfo
import org.eclipse.jdt.internal.core.SourceType
import org.eclipse.jdt.internal.core.SourceTypeElementInfo
import org.eclipse.jdt.internal.core.OpenableElementInfo
import org.eclipse.jdt.internal.core.TypeParameterElementInfo
import org.eclipse.jface.resource.ImageDescriptor
import org.scalaide.ui.ScalaImages
import scala.tools.eclipse.contribution.weaving.jdt.IScalaElement
import scala.tools.eclipse.contribution.weaving.jdt.ui.IMethodOverrideInfo
import org.scalaide.util.internal.ReflectionUtils
import scala.tools.nsc.Global
trait ScalaElement extends JavaElement with IScalaElement {
def getElementInfo: AnyRef
def getElementName: String
def scalaName: String = getElementName
def labelName: String = scalaName
def getLabelText(flags: Long): String = labelName
def getImageDescriptor: ImageDescriptor = null
def isVisible = true
override def getCompilationUnit() = {
val cu = super.getCompilationUnit()
if (cu != null) cu else new CompilationUnitAdapter(getClassFile().asInstanceOf[ScalaClassFile])
}
override def getAncestor(ancestorType: Int): IJavaElement = {
val ancestor = super.getAncestor(ancestorType)
if (ancestor != null)
ancestor
else if (ancestorType == IJavaElement.COMPILATION_UNIT)
new CompilationUnitAdapter(getClassFile().asInstanceOf[ScalaClassFile])
else
null
}
}
trait ScalaFieldElement extends ScalaElement
class ScalaSourceTypeElement(parent: JavaElement, name: String, declaringType: Option[Global#Type])
extends SourceType(parent, name) with ScalaElement {
def getCorrespondingElement(element: IJavaElement): Option[IJavaElement] = {
val name = element.getElementName
val tpe = element.getElementType
getChildren.find(e => e.getElementName == name && e.getElementType == tpe)
}
override def getType(typeName: String): IType = {
val tpe = super.getType(typeName)
getCorrespondingElement(tpe).getOrElse(tpe).asInstanceOf[IType]
}
override def getField(fieldName: String): IField = {
val field = super.getField(fieldName)
getCorrespondingElement(field).getOrElse(field).asInstanceOf[IField]
}
override def getMethod(selector: String, parameterTypeSignatures: Array[String]): IMethod = {
val method = super.getMethod(selector, parameterTypeSignatures)
getCorrespondingElement(method).getOrElse(method).asInstanceOf[IMethod]
}
override def getFullyQualifiedName: String =
declaringType.map { declaringType =>
val pkgSym = declaringType.typeSymbol.enclosingPackage
if (pkgSym.isEmptyPackage)
super.getFullyQualifiedName
else {
val pkg = pkgSym.javaClassName
pkg + "." + getTypeQualifiedName('$', /*showParameters =*/ false)
}
}.getOrElse(super.getFullyQualifiedName)
}
class ScalaClassElement(parent: JavaElement, name: String, synthetic: Boolean, declaringType: Option[Global#Type])
extends ScalaSourceTypeElement(parent, name, declaringType) {
override def getImageDescriptor = ScalaImages.SCALA_CLASS
override def isVisible = !synthetic
}
class ScalaAnonymousClassElement(parent: JavaElement, name: String, declaringType: Option[Global#Type])
extends ScalaClassElement(parent, name, false, declaringType) {
override def getLabelText(flags: Long) = if (name != null) "new " + name + " {...}" else "new {...}"
}
class ScalaTraitElement(parent: JavaElement, name: String, declaringType: Option[Global#Type])
extends ScalaSourceTypeElement(parent, name, declaringType) {
override def getImageDescriptor = ScalaImages.SCALA_TRAIT
}
class ScalaModuleElement(parent: JavaElement, name: String, synthetic: Boolean, declaringType: Option[Global#Type])
extends ScalaSourceTypeElement(parent, name + "$", declaringType) {
override def scalaName = name
override def getLabelText(flags: Long) = name
override def getImageDescriptor = ScalaImages.SCALA_OBJECT
override def isVisible = !synthetic
}
class ScalaPackageModuleElement(parent: JavaElement, name: String, synthetic: Boolean, declaringType: Option[Global#Type])
extends ScalaModuleElement(parent, name, synthetic, declaringType) {
override def getImageDescriptor = ScalaImages.SCALA_PACKAGE_OBJECT
}
class ScalaDefElement(parent: JavaElement, name: String, paramTypes: Array[String], synthetic: Boolean, display: String, overrideInfo: Int)
extends SourceMethod(parent, name, paramTypes) with ScalaElement with IMethodOverrideInfo {
override def getLabelText(flags: Long) = display
override def isVisible = !synthetic && !getElementInfo.isInstanceOf[ScalaSourceConstructorInfo]
def getOverrideInfo = overrideInfo
}
class ScalaFunctionElement(declaringType: JavaElement, parent: JavaElement, name: String, paramTypes: Array[String], display: String)
extends SourceMethod(parent, name, paramTypes) with ScalaElement {
override def getDeclaringType(): IType = declaringType.asInstanceOf[IType]
override def getLabelText(flags: Long) = display
}
class ScalaAccessorElement(parent: JavaElement, name: String, paramTypes: Array[String])
extends SourceMethod(parent, name, paramTypes) with ScalaElement {
override def isVisible = false
}
class ScalaValElement(parent: JavaElement, name: String, display: String)
extends SourceField(parent, name) with ScalaFieldElement {
override def getLabelText(flags: Long) = display
override def getImageDescriptor = {
val flags = getFlags
if ((flags & ClassFileConstants.AccPublic) != 0)
ScalaImages.PUBLIC_VAL
else if ((flags & ClassFileConstants.AccProtected) != 0)
ScalaImages.PROTECTED_VAL
else
ScalaImages.PRIVATE_VAL
}
}
class ScalaVarElement(parent: JavaElement, name: String, display: String)
extends SourceField(parent, name) with ScalaFieldElement {
override def getLabelText(flags: Long) = display
}
class ScalaTypeElement(parent: JavaElement, name: String, display: String, declaringType: Option[Global#Type])
extends ScalaSourceTypeElement(parent, name, declaringType) {
override def getLabelText(flags: Long) = display
override def getImageDescriptor = ScalaImages.SCALA_TYPE
}
class ScalaTypeFieldElement(parent: JavaElement, name: String, display: String)
extends SourceField(parent, name) with ScalaFieldElement {
override def getLabelText(flags: Long) = display
override def getImageDescriptor = ScalaImages.SCALA_TYPE
}
class ScalaLocalVariableElement(
parent: JavaElement, name: String,
declarationSourceStart: Int, declarationSourceEnd: Int, nameStart: Int, nameEnd: Int,
typeSignature: String, display: String, jdtFlags: Int, methodParameter: Boolean) extends LocalVariable(
parent, name, declarationSourceStart, declarationSourceEnd, nameStart, nameEnd, typeSignature, null, jdtFlags, methodParameter) with ScalaElement {
override def getLabelText(flags: Long) = display
}
class ScalaModuleInstanceElement(parent: JavaElement)
extends SourceField(parent, "MODULE$") with ScalaFieldElement {
override def getLabelText(flags: Long) = getElementName
override def isVisible = false
}
object ScalaMemberElementInfo extends ReflectionUtils {
val jeiClazz = Class.forName("org.eclipse.jdt.internal.core.JavaElementInfo")
val meiClazz = Class.forName("org.eclipse.jdt.internal.core.MemberElementInfo")
val aiClazz = Class.forName("org.eclipse.jdt.internal.core.AnnotatableInfo")
val sreiClazz = Class.forName("org.eclipse.jdt.internal.core.SourceRefElementInfo")
val setFlagsMethod = getDeclaredMethod(meiClazz, "setFlags", classOf[Int])
val getNameSourceStartMethod = try {
getDeclaredMethod(meiClazz, "getNameSourceStart")
} catch {
case _: NoSuchMethodException => getDeclaredMethod(aiClazz, "getNameSourceStart")
}
val getNameSourceEndMethod = try {
getDeclaredMethod(meiClazz, "getNameSourceEnd")
} catch {
case _: NoSuchMethodException => getDeclaredMethod(aiClazz, "getNameSourceEnd")
}
val setNameSourceStartMethod = try {
getDeclaredMethod(meiClazz, "setNameSourceStart", classOf[Int])
} catch {
case _: NoSuchMethodException => getDeclaredMethod(aiClazz, "setNameSourceStart", classOf[Int])
}
val setNameSourceEndMethod = try {
getDeclaredMethod(meiClazz, "setNameSourceEnd", classOf[Int])
} catch {
case _: NoSuchMethodException => getDeclaredMethod(aiClazz, "setNameSourceEnd", classOf[Int])
}
val setSourceRangeStartMethod = getDeclaredMethod(sreiClazz, "setSourceRangeStart", classOf[Int])
val setSourceRangeEndMethod = getDeclaredMethod(sreiClazz, "setSourceRangeEnd", classOf[Int])
val getDeclarationSourceStartMethod = getDeclaredMethod(sreiClazz, "getDeclarationSourceStart")
val getDeclarationSourceEndMethod = getDeclaredMethod(sreiClazz, "getDeclarationSourceEnd")
val hasChildrenField = try {
getDeclaredField(jeiClazz, "children")
true
} catch {
case _: NoSuchFieldException => false
}
val addChildMethod = if (hasChildrenField) getDeclaredMethod(jeiClazz, "addChild", classOf[IJavaElement]) else null
}
trait SourceRefScalaElementInfo extends JavaElementInfo {
import ScalaMemberElementInfo._
def getDeclarationSourceStart0: Int = getDeclarationSourceStartMethod.invoke(this).asInstanceOf[Integer].intValue
def getDeclarationSourceEnd0: Int = getDeclarationSourceEndMethod.invoke(this).asInstanceOf[Integer].intValue
def setSourceRangeStart0(start: Int): Unit = setSourceRangeStartMethod.invoke(this, new Integer(start))
def setSourceRangeEnd0(end: Int): Unit = setSourceRangeEndMethod.invoke(this, new Integer(end))
}
trait ScalaMemberElementInfo extends SourceRefScalaElementInfo {
import ScalaMemberElementInfo._
import java.lang.Integer
def addChild0(child: IJavaElement): Unit
def setFlags0(flags: Int) = setFlagsMethod.invoke(this, new Integer(flags))
def getNameSourceStart0: Int = getNameSourceStartMethod.invoke(this).asInstanceOf[Integer].intValue
def getNameSourceEnd0: Int = getNameSourceEndMethod.invoke(this).asInstanceOf[Integer].intValue
def setNameSourceStart0(start: Int) = setNameSourceStartMethod.invoke(this, new Integer(start))
def setNameSourceEnd0(end: Int) = setNameSourceEndMethod.invoke(this, new Integer(end))
}
trait AuxChildrenElementInfo extends JavaElementInfo {
import ScalaMemberElementInfo._
var auxChildren: Array[IJavaElement] = if (hasChildrenField) null else new Array(0)
override def getChildren = if (hasChildrenField) super.getChildren else auxChildren
def addChild0(child: IJavaElement): Unit =
if (hasChildrenField)
addChildMethod.invoke(this, child)
else if (auxChildren.length == 0)
auxChildren = Array(child)
else if (!auxChildren.contains(child))
auxChildren = auxChildren ++ Seq(child)
}
trait HasTypeParameters {
def setTypeParameters(typeParams: Array[ITypeParameter]): Unit
}
class TypeParameterScalaElementInfo extends TypeParameterElementInfo with SourceRefScalaElementInfo
class ScalaElementInfo extends SourceTypeElementInfo with ScalaMemberElementInfo with HasTypeParameters {
import ScalaMemberElementInfo._
override def addChild0(child: IJavaElement): Unit = {
if (hasChildrenField)
addChildMethod.invoke(this, child)
else if (children.length == 0)
children = Array(child)
else if (!children.contains(child))
children = children ++ Seq(child)
}
override def setHandle(handle: IType) = super.setHandle(handle)
override def setSuperclassName(superclassName: Array[Char]) = super.setSuperclassName(superclassName)
override def setSuperInterfaceNames(superInterfaceNames: Array[Array[Char]]) = super.setSuperInterfaceNames(superInterfaceNames)
override def setTypeParameters(tps: Array[ITypeParameter]): Unit = {
typeParameters = tps
}
}
trait FnInfo extends SourceMethodElementInfo with ScalaMemberElementInfo with HasTypeParameters {
override def setArgumentNames(argumentNames: Array[Array[Char]]) = super.setArgumentNames(argumentNames)
def setReturnType(returnType: Array[Char]): Unit
override def setExceptionTypeNames(exceptionTypeNames: Array[Array[Char]]) = super.setExceptionTypeNames(exceptionTypeNames)
}
class ScalaSourceConstructorInfo extends SourceConstructorInfo with FnInfo with AuxChildrenElementInfo with HasTypeParameters {
override def setReturnType(returnType: Array[Char]) = super.setReturnType(returnType)
override def setTypeParameters(tps: Array[ITypeParameter]): Unit = {
typeParameters = tps
}
}
class ScalaSourceMethodInfo extends SourceMethodInfo with FnInfo with AuxChildrenElementInfo with HasTypeParameters {
override def setReturnType(returnType: Array[Char]) = super.setReturnType(returnType)
override def setTypeParameters(tps: Array[ITypeParameter]): Unit = {
typeParameters = tps
}
}
class ScalaSourceFieldElementInfo extends SourceFieldElementInfo with ScalaMemberElementInfo with AuxChildrenElementInfo {
override def setTypeName(name: Array[Char]) = super.setTypeName(name)
}
class LazyToplevelClass(unit: ScalaCompilationUnit, name: String) extends SourceType(unit, name) with IType with ScalaElement {
/**
* I rewrote this method from the previous implementation, to what I believe was the initial intention.
* The commented line is the original, in case this causes any problems.
*
* TODO: Revisit this once there is a better structure builder.
*/
lazy val mirror: Option[ScalaSourceTypeElement] = {
// unit.getElementInfo.asInstanceOf[OpenableElementInfo].getChildren.find(e => e.getElementName == name).map(_.asInstanceOf[ScalaSourceTypeElement])
unit.getElementInfo match {
case openable: OpenableElementInfo =>
openable.getChildren.find(e => e.getElementType == IJavaElement.TYPE && e.getElementName == name) map (_.asInstanceOf[ScalaSourceTypeElement])
case _ => None
}
}
override def isAnonymous = false
override def isLocal = false
override def isEnum = false
override def isInterface = mirror map (_.isInterface) getOrElse false
override def getDeclaringType = null
override def exists = mirror.isDefined
}
| mlangc/scala-ide | org.scala-ide.sdt.core/src/org/scalaide/core/internal/jdt/model/ScalaElements.scala | Scala | bsd-3-clause | 14,866 |
package ml.combust.mleap.benchmark
import com.typesafe.config.{Config, ConfigFactory}
import com.typesafe.config.ConfigValueFactory.fromAnyRef
import ml.combust.mleap.BuildInfo
/**
* Created by hollinwilkins on 2/4/17.
*/
object Boot extends App {
val parser = new scopt.OptionParser[Config]("mleap-benchmark") {
head("mleap-benchmark", BuildInfo().version)
def modelPath = opt[String]("model-path").
required().
text("path to MLeap bundle containing model").action {
(path, config) => config.withValue("model-path", fromAnyRef(path))
}
def framePath = opt[String]("frame-path").
required().
text("path to frame containing data for transform").action {
(path, config) => config.withValue("frame-path", fromAnyRef(path))
}
cmd("mleap-transform").text("standard MLeap transform benchmark").action {
(_, config) =>
config.withValue("benchmark", fromAnyRef("ml.combust.mleap.benchmark.MleapTransformBenchmark")).
withFallback(ConfigFactory.load("mleap.conf"))
}.children(modelPath, framePath)
cmd("mleap-row").text("MLeap row transform benchmark").action {
(_, config) =>
config.withValue("benchmark", fromAnyRef("ml.combust.mleap.benchmark.MleapRowTransformBenchmark")).
withFallback(ConfigFactory.load("mleap.conf"))
}.children(modelPath, framePath)
cmd("spark-transform").text("standard Spark transform benchmark").action {
(_, config) =>
config.withValue("benchmark", fromAnyRef("ml.combust.mleap.benchmark.SparkTransformBenchmark")).
withFallback(ConfigFactory.load("spark.conf"))
}.children(modelPath, framePath)
}
parser.parse(args, ConfigFactory.empty()) match {
case Some(config) =>
Class.forName(config.getString("benchmark")).
newInstance().
asInstanceOf[Benchmark].benchmark(config)
case None => // do nothing
}
}
| combust/mleap | mleap-benchmark/src/main/scala/ml/combust/mleap/benchmark/Boot.scala | Scala | apache-2.0 | 1,924 |
package edu.kaist.tmtk.ie
import edu.kaist.tmtk._
import edu.knowitall.ollie.Ollie
import edu.knowitall.ollie.confidence.OllieConfidenceFunction
import edu.knowitall.tool.parse.MaltParser
class OpenIE(lv: AnyRef = "W", q: Boolean = true, conf: AMap[String, String] = XMap("dep.model" -> "maltparser/engmalt.linear-1.7.mco")) {
System.setProperty("Malt.verbosity", "WARN")
val parser = quite2(() => new MaltParser(findFile(conf("dep.model")).toURL), q)
val extractor = new Ollie
val confidence = OllieConfidenceFunction.loadDefaultClassifier()
override val toString = s"OpenIE(${extractor.getClass.getSimpleName})"
log(s"[DONE] Load $this", lv)
class Extraction(val arg1: String, val rel: String, val arg2: String, val attribution: String, val enabler: String, val score: Double) {
override def toString = Seq(arg1, rel, arg2, attribution, enabler, "%.4f" format score).mkString("\\t")
}
def extract(sentence: String) = {
val tagged = parser.postagger.postag(sentence)
val parsed = parser.dependencyGraphPostagged(tagged)
val extracted = for (e <- extractor.extract(parsed)) yield
new Extraction(e.extraction.arg1.text, e.extraction.rel.text, e.extraction.arg2.text, e.extraction.attribution.map(_.text).orNull, e.extraction.enabler.map(_.text).orNull, confidence(e))
extracted.toSeq
}
}
object OpenIE {
def apply() =
new OpenIE
def apply(q: Boolean) =
new OpenIE("W", q)
def apply(lv: AnyRef) =
new OpenIE(lv)
def apply(lv: AnyRef, q: Boolean) =
new OpenIE(lv, q)
}
| chrisjihee/tmtk | src/main/scala/edu/kaist/tmtk/ie/OpenIE.scala | Scala | mit | 1,543 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.index.stats
import java.time.ZonedDateTime
import java.util.Date
import com.vividsolutions.jts.geom.Geometry
import org.locationtech.geomesa.curve.{BinnedTime, Z2SFC, Z3SFC}
import org.locationtech.geomesa.filter.Bounds.Bound
import org.locationtech.geomesa.filter._
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
import org.locationtech.geomesa.utils.geotools._
import org.locationtech.geomesa.utils.stats._
import org.locationtech.sfcurve.IndexRange
import org.opengis.feature.simple.SimpleFeatureType
import org.opengis.filter._
import scala.collection.JavaConversions._
trait StatsBasedEstimator {
stats: GeoMesaStats =>
import CountEstimator.ZHistogramPrecision
import org.locationtech.geomesa.utils.conversions.ScalaImplicits.RichTraversableOnce
/**
* Estimates the count for a given filter, based off the per-attribute metadata we have stored
*
* @param sft simple feature type
* @param filter filter to apply - should have been run through QueryPlanFilterVisitor so all props are right
* @return estimated count, if available
*/
protected def estimateCount(sft: SimpleFeatureType, filter: Filter): Option[Long] = {
// TODO currently we don't consider if the dates are actually ANDed with everything else
CountEstimator.extractDates(sft, filter) match {
case None => Some(0L) // disjoint dates
case Some(Bounds(lo, hi)) => estimateCount(sft, filter, lo.value, hi.value)
}
}
/**
* Estimates the count for a given filter, based off the per-attribute metadata we have stored
*
* @param sft simple feature type
* @param filter filter to apply - should have been run through QueryPlanFilterVisitor so all props are right
* @param loDate bounds on the dates to be queried, if any
* @param hiDate bounds on the dates to be queried, if any
* @return estimated count, if available
*/
private def estimateCount(sft: SimpleFeatureType,
filter: Filter,
loDate: Option[Date],
hiDate: Option[Date]): Option[Long] = {
import Filter.{EXCLUDE, INCLUDE}
filter match {
case EXCLUDE => Some(0L)
case INCLUDE => stats.getStats[CountStat](sft).headOption.map(_.count)
case a: And => estimateAndCount(sft, a, loDate, hiDate)
case o: Or => estimateOrCount(sft, o, loDate, hiDate)
case n: Not => estimateNotCount(sft, n, loDate, hiDate)
case i: Id => Some(i.getIdentifiers.size)
case _ =>
// single filter - equals, between, less than, etc
val attribute = FilterHelper.propertyNames(filter, sft).headOption
attribute.flatMap(estimateAttributeCount(sft, filter, _, loDate, hiDate))
}
}
/**
* Estimate counts for AND filters. Since it's an AND, we calculate the child counts and
* return the minimum.
*
* We check for spatio-temporal filters first, as those are the only ones that operate on 2+ properties.
*
* @param sft simple feature type
* @param filter AND filter
* @param loDate bounds on the dates to be queried, if any
* @param hiDate bounds on the dates to be queried, if any
* @return estimated count, if available
*/
private def estimateAndCount(sft: SimpleFeatureType,
filter: And,
loDate: Option[Date],
hiDate: Option[Date]): Option[Long] = {
val stCount = estimateSpatioTemporalCount(sft, filter)
// note: we might over count if we get bbox1 AND bbox2, as we don't intersect them
val individualCounts = filter.getChildren.flatMap(estimateCount(sft, _, loDate, hiDate))
(stCount ++ individualCounts).minOption
}
/**
* Estimate counts for OR filters. Because this is an OR, we sum up the child counts
*
* @param sft simple feature type
* @param filter OR filter
* @param loDate bounds on the dates to be queried, if any
* @param hiDate bounds on the dates to be queried, if any
* @return estimated count, if available
*/
private def estimateOrCount(sft: SimpleFeatureType,
filter: Or,
loDate: Option[Date],
hiDate: Option[Date]): Option[Long] = {
import org.locationtech.geomesa.utils.conversions.ScalaImplicits.RichTraversableOnce
// estimate for each child separately and sum
// note that we might double count some values if the filter is complex
filter.getChildren.flatMap(estimateCount(sft, _, loDate, hiDate)).sumOption
}
/**
* Estimates the count for NOT filters
*
* @param sft simple feature type
* @param filter filter
* @param loDate bounds on the dates to be queried, if any
* @param hiDate bounds on the dates to be queried, if any
* @return count, if available
*/
private def estimateNotCount(sft: SimpleFeatureType,
filter: Not,
loDate: Option[Date],
hiDate: Option[Date]): Option[Long] = {
for {
all <- estimateCount(sft, Filter.INCLUDE, None, None)
neg <- estimateCount(sft, filter.getFilter, loDate, hiDate)
} yield {
math.max(0, all - neg)
}
}
/**
* Estimate spatio-temporal counts for an AND filter.
*
* @param sft simple feature type
* @param filter complex filter
* @return count, if available
*/
private def estimateSpatioTemporalCount(sft: SimpleFeatureType, filter: And): Option[Long] = {
// currently we don't consider if the spatial predicate is actually AND'd with the temporal predicate...
// TODO add filterhelper method that accurately pulls out the st values
for {
geomField <- Option(sft.getGeomField)
dateField <- sft.getDtgField
geometries = FilterHelper.extractGeometries(filter, geomField, sft.isPoints)
if geometries.nonEmpty
intervals = FilterHelper.extractIntervals(filter, dateField)
if intervals.nonEmpty
bounds <- stats.getStats[MinMax[Date]](sft, Seq(dateField)).headOption
} yield {
if (geometries.disjoint || intervals.disjoint) { 0L } else {
val inRangeIntervals = {
val minTime = bounds.min.getTime
val maxTime = bounds.max.getTime
intervals.values.filter { i =>
i.lower.value.forall(_.toInstant.toEpochMilli <= maxTime) &&
i.upper.value.forall(_.toInstant.toEpochMilli >= minTime)
}
}
estimateSpatioTemporalCount(sft, geomField, dateField, geometries.values, inRangeIntervals)
}
}
}
/**
* Estimates counts based on a combination of spatial and temporal values.
*
* @param sft simple feature type
* @param geomField geometry attribute name for the simple feature type
* @param dateField date attribute name for the simple feature type
* @param geometries geometry to evaluate
* @param intervals intervals to evaluate
* @return
*/
private def estimateSpatioTemporalCount(sft: SimpleFeatureType,
geomField: String,
dateField: String,
geometries: Seq[Geometry],
intervals: Seq[Bounds[ZonedDateTime]]): Long = {
val period = sft.getZ3Interval
val dateToBins = BinnedTime.dateToBinnedTime(period)
val boundsToDates = BinnedTime.boundsToIndexableDates(period)
val binnedTimes = intervals.map { interval =>
val (lower, upper) = boundsToDates(interval.bounds)
val BinnedTime(lb, lt) = dateToBins(lower)
val BinnedTime(ub, ut) = dateToBins(upper)
(Range.inclusive(lb, ub).map(_.toShort), lt, ut)
}
val allBins = binnedTimes.flatMap(_._1).distinct
stats.getStats[Z3Histogram](sft, Seq(geomField, dateField), allBins).headOption match {
case None => 0L
case Some(histogram) =>
// time range for a chunk is 0 to 1 week (in seconds)
val sfc = Z3SFC(period)
val (tmin, tmax) = (sfc.time.min.toLong, sfc.time.max.toLong)
val xy = geometries.map(GeometryUtils.bounds)
def getIndices(t1: Long, t2: Long): Seq[Int] = {
val w = histogram.timeBins.head // z3 histogram bounds are fixed, so indices should be the same
val zs = sfc.ranges(xy, Seq((t1, t2)), ZHistogramPrecision)
zs.flatMap(r => histogram.directIndex(w, r.lower) to histogram.directIndex(w, r.upper))
}
lazy val middleIndices = getIndices(tmin, tmax)
// build up our indices by week so that we can deduplicate them afterwards
val timeBinsAndIndices = scala.collection.mutable.Map.empty[Short, Seq[Int]].withDefaultValue(Seq.empty)
// the z3 index breaks time into 1 week chunks, so create a range for each week in our range
binnedTimes.foreach { case (bins, lt, ut) =>
if (bins.length == 1) {
timeBinsAndIndices(bins.head) ++= getIndices(lt, ut)
} else {
val head +: middle :+ last = bins.toList
timeBinsAndIndices(head) ++= getIndices(lt, tmax)
timeBinsAndIndices(last) ++= getIndices(tmin, ut)
middle.foreach(m => timeBinsAndIndices(m) ++= middleIndices)
}
}
timeBinsAndIndices.map { case (b, indices) => indices.distinct.map(histogram.count(b, _)).sum }.sum
}
}
/**
* Estimates the count for attribute filters (equals, less than, during, etc)
*
* @param sft simple feature type
* @param filter filter
* @param attribute attribute name to estimate
* @param loDate bounds on the dates to be queried, if any
* @param hiDate bounds on the dates to be queried, if any
* @return count, if available
*/
private def estimateAttributeCount(sft: SimpleFeatureType,
filter: Filter,
attribute: String,
loDate: Option[Date],
hiDate: Option[Date]): Option[Long] = {
import org.locationtech.geomesa.utils.geotools.RichAttributeDescriptors.RichAttributeDescriptor
// noinspection ExistsEquals
if (attribute == sft.getGeomField) {
estimateSpatialCount(sft, filter)
} else if (sft.getDtgField.exists(_ == attribute)) {
estimateTemporalCount(sft, filter)
} else {
// we have an attribute filter
val extractedBounds = for {
descriptor <- Option(sft.getDescriptor(attribute))
binding = if (descriptor.isList) { descriptor.getListType() } else { descriptor.getType.getBinding }
} yield {
FilterHelper.extractAttributeBounds(filter, attribute, binding.asInstanceOf[Class[Any]])
}
extractedBounds.flatMap { bounds =>
if (bounds.disjoint) {
Some(0L) // disjoint range
} else if (!bounds.values.exists(_.isBounded)) {
estimateCount(sft, Filter.INCLUDE, loDate, hiDate) // inclusive filter
} else {
val boundsValues = bounds.values.map(b => (b.lower.value, b.upper.value))
val (equalsBounds, rangeBounds) = boundsValues.partition { case (l, r) => l == r }
val equalsCount = if (equalsBounds.isEmpty) { Some(0L) } else {
// compare equals estimate with range estimate and take the smaller
val equals = estimateEqualsCount(sft, attribute, equalsBounds.map(_._1.get), loDate, hiDate)
val range = estimateRangeCount(sft, attribute, equalsBounds)
(equals, range) match {
case (Some(e), Some(r)) => Some(math.min(e, r))
case (None, r) => r
case (e, None) => e
}
}
val rangeCount = if (rangeBounds.isEmpty) { Some(0L) } else {
estimateRangeCount(sft, attribute, rangeBounds)
}
for { e <- equalsCount; r <- rangeCount } yield { e + r }
}
}
}
}
/**
* Estimates counts from spatial predicates. Non-spatial predicates will be ignored.
*
* @param filter filter to evaluate
* @return estimated count, if available
*/
private def estimateSpatialCount(sft: SimpleFeatureType, filter: Filter): Option[Long] = {
import org.locationtech.geomesa.utils.conversions.ScalaImplicits.RichTraversableOnce
val geometries = FilterHelper.extractGeometries(filter, sft.getGeomField, sft.isPoints)
if (geometries.isEmpty) {
None
} else if (geometries.disjoint) {
Some(0L)
} else {
stats.getStats[Histogram[Geometry]](sft, Seq(sft.getGeomField)).headOption.map { histogram =>
val (zLo, zHi) = {
val (xmin, ymin, _, _) = GeometryUtils.bounds(histogram.min)
val (_, _, xmax, ymax) = GeometryUtils.bounds(histogram.max)
(Z2SFC.index(xmin, ymin).z, Z2SFC.index(xmax, ymax).z)
}
def inRange(r: IndexRange) = r.lower < zHi && r.upper > zLo
val ranges = Z2SFC.ranges(geometries.values.map(GeometryUtils.bounds), ZHistogramPrecision)
val indices = ranges.filter(inRange).flatMap { range =>
val loIndex = Some(histogram.directIndex(range.lower)).filter(_ != -1).getOrElse(0)
val hiIndex = Some(histogram.directIndex(range.upper)).filter(_ != -1).getOrElse(histogram.length - 1)
loIndex to hiIndex
}
indices.distinct.map(histogram.count).sumOrElse(0L)
}
}
}
/**
* Estimates counts from temporal predicates. Non-temporal predicates will be ignored.
*
* @param sft simple feature type
* @param filter filter to evaluate
* @return estimated count, if available
*/
private def estimateTemporalCount(sft: SimpleFeatureType, filter: Filter): Option[Long] = {
import org.locationtech.geomesa.utils.conversions.ScalaImplicits.RichTraversableOnce
for {
dateField <- sft.getDtgField
intervals = FilterHelper.extractIntervals(filter, dateField)
if intervals.nonEmpty
histogram <- stats.getStats[Histogram[Date]](sft, Seq(dateField)).headOption
} yield {
def inRange(interval: Bounds[ZonedDateTime]) = {
interval.lower.value.forall(_.toInstant.toEpochMilli <= histogram.max.getTime) &&
interval.upper.value.forall(_.toInstant.toEpochMilli >= histogram.min.getTime)
}
if (intervals.disjoint) { 0L } else {
val indices = intervals.values.filter(inRange).flatMap { interval =>
val loIndex = interval.lower.value.map(i => histogram.indexOf(Date.from(i.toInstant))).filter(_ != -1).getOrElse(0)
val hiIndex = interval.upper.value.map(i => histogram.indexOf(Date.from(i.toInstant))).filter(_ != -1).getOrElse(histogram.length - 1)
loIndex to hiIndex
}
indices.distinct.map(histogram.count).sumOrElse(0L)
}
}
}
/**
* Estimates an equals predicate. Uses frequency (count min sketch) for estimated value.
*
* @param sft simple feature type
* @param attribute attribute to evaluate
* @param values values to be estimated
* @param loDate bounds on the dates to be queried, if any
* @param hiDate bounds on the dates to be queried, if any
* @return estimated count, if available.
*/
private def estimateEqualsCount(sft: SimpleFeatureType,
attribute: String,
values: Seq[Any],
loDate: Option[Date],
hiDate: Option[Date]): Option[Long] = {
val timeBins = for { d1 <- loDate; d2 <- hiDate } yield {
val timeToBin = BinnedTime.timeToBinnedTime(sft.getZ3Interval)
Range.inclusive(timeToBin(d1.getTime).bin, timeToBin(d2.getTime).bin).map(_.toShort)
}
val options = timeBins.getOrElse(Seq.empty)
stats.getStats[Frequency[Any]](sft, Seq(attribute), options).headOption.map { freq =>
// frequency estimates will never return less than the actual number, but will often return more
// frequency has ~0.5% error rate based on the total number of features in the data set
// we adjust the raw estimate based on the absolute error rate
import CountEstimator.ErrorThresholds
val absoluteError = math.floor(freq.size * freq.eps)
val counts = if (absoluteError < 1.0) { values.map(freq.count) } else {
values.map { v =>
val estimate = freq.count(v)
if (estimate == 0L) {
0L
} else if (estimate > absoluteError) {
val relativeError = absoluteError / estimate
estimate - (ErrorThresholds.dropWhile(_ <= relativeError).head * 0.5 * absoluteError).toLong
} else {
val relativeError = estimate / absoluteError
(ErrorThresholds.dropWhile(_ < relativeError).head * 0.5 * estimate).toLong
}
}
}
counts.sum
}
}
/**
* Estimates a potentially unbounded range predicate. Uses a binned histogram for estimated value.
*
* @param sft simple feature type
* @param attribute attribute to evaluate
* @param ranges ranges of values - may be unbounded (indicated by a None)
* @return estimated count, if available
*/
private def estimateRangeCount(sft: SimpleFeatureType,
attribute: String,
ranges: Seq[(Option[Any], Option[Any])]): Option[Long] = {
stats.getStats[Histogram[Any]](sft, Seq(attribute)).headOption.map { histogram =>
val inRangeRanges = ranges.filter {
case (None, None) => true // inclusive filter
case (Some(lo), None) => histogram.defaults.min(lo, histogram.max) == lo
case (None, Some(up)) => histogram.defaults.max(up, histogram.min) == up
case (Some(lo), Some(up)) =>
histogram.defaults.min(lo, histogram.max) == lo && histogram.defaults.max(up, histogram.min) == up
}
val indices = inRangeRanges.flatMap { case (lower, upper) =>
val lowerIndex = lower.map(histogram.indexOf).filter(_ != -1).getOrElse(0)
val upperIndex = upper.map(histogram.indexOf).filter(_ != -1).getOrElse(histogram.length - 1)
lowerIndex to upperIndex
}
indices.distinct.map(histogram.count).sumOrElse(0L)
}
}
}
object CountEstimator {
// we only need enough precision to cover the number of bins (e.g. 2^n == bins), plus 2 for unused bits
val ZHistogramPrecision: Int = math.ceil(math.log(GeoMesaStats.MaxHistogramSize) / math.log(2)).toInt + 2
val ErrorThresholds = Seq(0.1, 0.3, 0.5, 0.7, 0.9, 1.0)
/**
* Extracts date bounds from a filter. None is used to indicate a disjoint date range, otherwise
* there will be a bounds object (which may be unbounded).
*
* @param sft simple feature type
* @param filter filter
* @return None, if disjoint filters, otherwise date bounds (which may be unbounded)
*/
private [stats] def extractDates(sft: SimpleFeatureType, filter: Filter): Option[Bounds[Date]] = {
sft.getDtgField match {
case None => Some(Bounds.everything)
case Some(dtg) =>
val intervals = FilterHelper.extractIntervals(filter, dtg)
if (intervals.disjoint) { None } else {
// don't consider gaps, just get the endpoints of the intervals
val dateTimes = intervals.values.reduceOption[Bounds[ZonedDateTime]] { case (left, right) =>
val lower = Bounds.smallerLowerBound(left.lower, right.lower)
val upper = Bounds.largerUpperBound(left.upper, right.upper)
Bounds(lower, upper)
}
val lower = dateTimes.map(d => Bound(d.lower.value.map(i => Date.from(i.toInstant)), d.lower.inclusive))
val upper = dateTimes.map(d => Bound(d.upper.value.map(i => Date.from(i.toInstant)), d.upper.inclusive))
Some(Bounds(lower.getOrElse(Bound.unbounded[Date]), upper.getOrElse(Bound.unbounded[Date])))
}
}
}
}
| jahhulbert-ccri/geomesa | geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/stats/StatsBasedEstimator.scala | Scala | apache-2.0 | 20,642 |
/* Copyright (C) 2015 University of Massachusetts Amherst.
This file is part of βauthor_corefβ
http://github.com/iesl/author_coref
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package edu.umass.cs.iesl.author_coref.process
import java.io.{PrintWriter, File}
import cc.factorie.app.bib.parser.Dom
import cc.factorie.util.DefaultCmdOptions
import edu.umass.cs.iesl.author_coref._
import edu.umass.cs.iesl.author_coref.utilities.CodecCmdOption
class GenerateEmbeddingTrainingDataFromBibtexOpts extends DefaultCmdOptions with CodecCmdOption {
val output = new CmdOption[String]("output","Where to write the output", true)
val inputDir = new CmdOption[String]("input", "Where to read the input data from", true)
}
object GenerateEmbeddingTrainingDataFromBibtex {
def getData(file: File, codec: String) = {
println(s"[GenerateEmbeddingTrainingDataFromBibtex] Parsing ${file.getName}")
val fileContents = file.getStringContents(codec)
val parsed = Dom.stringToDom(fileContents)
if (parsed.isLeft) {
println(s"[GenerateEmbeddingTrainingDataFromBibtex] ERROR while parsing ${file.getName}")
Iterable()
} else {
val domDoc = parsed.right.get
val res = domDoc.entries.map{
case (_,entry) =>
val maybeTitle = entry.otherFields.get("title")
val maybeAbstract = entry.otherFields.get("abstract")
val maybeKeywords = entry.otherFields.get("keywords")
(maybeTitle ++ maybeAbstract ++ maybeKeywords).mkString(" ")
}
println(s"[GenerateEmbeddingTrainingDataFromBibtex] Found ${res.size} records")
res
}
}
def fromFilenames(filenames: Iterator[String], codec: String) = {
filenames.map(s => getData(new File(s),codec))
}
def main(args: Array[String]) = {
val opts = new GenerateEmbeddingTrainingDataFromBibtexOpts
opts.parse(args)
val pw = new PrintWriter(opts.output.value,opts.codec.value)
val filenames = new File(opts.inputDir.value).list().filterNot(_.startsWith("\\.")).map(new File(opts.inputDir.value,_).getAbsolutePath).toIterator
val data = fromFilenames(filenames,opts.codec.value)
data.foreach(_.foreach(pw.println))
pw.close()
}
}
| iesl/author_coref | src/main/scala/edu/umass/cs/iesl/author_coref/process/GenerateEmbeddingTrainingDataFromBibtex.scala | Scala | apache-2.0 | 2,713 |
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.sparta.plugin.output.kafka
import java.io.{Serializable => JSerializable}
import java.util.Properties
import com.stratio.sparta.plugin.input.kafka.KafkaBase
import com.stratio.sparta.sdk.pipeline.output.Output._
import com.stratio.sparta.sdk.pipeline.output.{Output, OutputFormatEnum, SaveModeEnum}
import com.stratio.sparta.sdk.properties.CustomProperties
import com.stratio.sparta.sdk.properties.ValidatingPropertyMap._
import org.apache.kafka.clients.producer.ProducerConfig._
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
import org.apache.spark.sql._
import scala.collection.mutable
class KafkaOutput(name: String, properties: Map[String, JSerializable])
extends Output(name, properties) with KafkaBase with CustomProperties {
val DefaultKafkaSerializer = classOf[StringSerializer].getName
val DefaultAck = "0"
val DefaultBatchNumMessages = "200"
val DefaultProducerPort = "9092"
override val customKey = "KafkaProperties"
override val customPropertyKey = "kafkaPropertyKey"
override val customPropertyValue = "kafkaPropertyValue"
val outputFormat = OutputFormatEnum.withName(properties.getString("format", "json").toUpperCase)
val rowSeparator = properties.getString("rowSeparator", ",")
override def supportedSaveModes: Seq[SaveModeEnum.Value] = Seq(SaveModeEnum.Append)
override def save(dataFrame: DataFrame, saveMode: SaveModeEnum.Value, options: Map[String, String]): Unit = {
val tableName = getTableNameFromOptions(options)
validateSaveMode(saveMode)
outputFormat match {
case OutputFormatEnum.ROW => dataFrame.rdd.foreachPartition(messages =>
messages.foreach(message => send(tableName, message.mkString(rowSeparator))))
case _ => dataFrame.toJSON.foreachPartition { messages =>
messages.foreach(message => send(tableName, message))
}
}
}
def send(topic: String, message: String): Unit = {
val record = new ProducerRecord[String, String](topic, message)
KafkaOutput.getProducer(getProducerConnectionKey, createProducerProps).send(record)
}
private[kafka] def getProducerConnectionKey: String =
getHostPort(BOOTSTRAP_SERVERS_CONFIG, DefaultHost, DefaultProducerPort)
.getOrElse(BOOTSTRAP_SERVERS_CONFIG, throw new Exception("Invalid metadata broker list"))
private[kafka] def createProducerProps: Properties = {
val props = new Properties()
properties.filter(_._1 != customKey).foreach { case (key, value) => props.put(key, value.toString) }
mandatoryOptions.foreach { case (key, value) => props.put(key, value) }
getCustomProperties.foreach { case (key, value) => props.put(key, value) }
props
}
private[kafka] def mandatoryOptions: Map[String, String] =
getHostPort(BOOTSTRAP_SERVERS_CONFIG, DefaultHost, DefaultProducerPort) ++
Map(
KEY_SERIALIZER_CLASS_CONFIG -> properties.getString(KEY_SERIALIZER_CLASS_CONFIG, DefaultKafkaSerializer),
VALUE_SERIALIZER_CLASS_CONFIG -> properties.getString(VALUE_SERIALIZER_CLASS_CONFIG, DefaultKafkaSerializer),
ACKS_CONFIG -> properties.getString(ACKS_CONFIG, DefaultAck),
BATCH_SIZE_CONFIG -> properties.getString(BATCH_SIZE_CONFIG, DefaultBatchNumMessages)
)
override def cleanUp(options: Map[String, String]): Unit = {
log.info(s"Closing Kafka producer in Kafka Output: $name")
KafkaOutput.closeProducers()
}
}
object KafkaOutput {
private val producers: mutable.Map[String, KafkaProducer[String, String]] = mutable.Map.empty
def getProducer(producerKey: String, properties: Properties): KafkaProducer[String, String] = {
getInstance(producerKey, properties)
}
def closeProducers(): Unit = {
producers.values.foreach(producer => producer.close())
}
private[kafka] def getInstance(key: String, properties: Properties): KafkaProducer[String, String] = {
producers.getOrElse(key, {
val producer = new KafkaProducer[String, String](properties)
producers.put(key, producer)
producer
})
}
}
| fjsc/sparta | plugins/src/main/scala/com/stratio/sparta/plugin/output/kafka/KafkaOutput.scala | Scala | apache-2.0 | 4,728 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.command.stream
import java.util
import scala.collection.JavaConverters._
import scala.collection.mutable
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference, Expression, ExprId, NamedExpression}
import org.apache.spark.sql.catalyst.plans.logical.{Filter, LogicalPlan, Project}
import org.apache.spark.sql.execution.command.DataCommand
import org.apache.spark.sql.execution.datasources.LogicalRelation
import org.apache.spark.sql.execution.streaming.StreamingRelation
import org.apache.spark.sql.types.{StringType, StructType}
import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
import org.apache.carbondata.core.metadata.schema.table.CarbonTable
import org.apache.carbondata.processing.loading.csvinput.CSVInputFormat
import org.apache.carbondata.spark.StreamingOption
import org.apache.carbondata.spark.util.{CarbonSparkUtil, Util}
import org.apache.carbondata.stream.StreamJobManager
/**
* This command will start a Spark streaming job to insert rows from source to sink
*/
case class CarbonCreateStreamCommand(
streamName: String,
sinkDbName: Option[String],
sinkTableName: String,
ifNotExists: Boolean,
optionMap: Map[String, String],
query: String
) extends DataCommand {
override def output: Seq[Attribute] =
Seq(AttributeReference("Stream Name", StringType, nullable = false)(),
AttributeReference("JobId", StringType, nullable = false)(),
AttributeReference("Status", StringType, nullable = false)())
override def processData(sparkSession: SparkSession): Seq[Row] = {
val inputQuery = sparkSession.sql(query)
val sourceTableSeq = inputQuery.logicalPlan collect {
case r: LogicalRelation
if r.relation.isInstanceOf[CarbonDatasourceHadoopRelation] &&
r.relation.asInstanceOf[CarbonDatasourceHadoopRelation].carbonTable.isStreamingSource =>
r.relation.asInstanceOf[CarbonDatasourceHadoopRelation].carbonTable
}
if (sourceTableSeq.isEmpty) {
throw new MalformedCarbonCommandException(
"Must specify stream source table in the stream query")
}
if (sourceTableSeq.size > 1) {
throw new MalformedCarbonCommandException(
"Stream query on more than one stream source table is not supported")
}
val sourceTable = sourceTableSeq.head
val tblProperty = sourceTable.getTableInfo.getFactTable.getTableProperties
val format = sourceTable.getFormat
if (format == null) {
throw new MalformedCarbonCommandException("Streaming from carbon file is not supported")
}
val updatedQuery = if (format.equals("kafka")) {
shouldHaveProperty(tblProperty, "kafka.bootstrap.servers", sourceTable)
shouldHaveProperty(tblProperty, "subscribe", sourceTable)
createPlan(sparkSession, inputQuery, sourceTable, "kafka", tblProperty.asScala)
} else if (format.equals("socket")) {
shouldHaveProperty(tblProperty, "host", sourceTable)
shouldHaveProperty(tblProperty, "port", sourceTable)
createPlan(sparkSession, inputQuery, sourceTable, "socket", tblProperty.asScala)
} else {
// Replace the logical relation with a streaming relation created
// from the stream source table
inputQuery.logicalPlan transform {
case r: LogicalRelation
if r.relation.isInstanceOf[CarbonDatasourceHadoopRelation] &&
r.relation.asInstanceOf[CarbonDatasourceHadoopRelation].carbonTable.isStreamingSource
=> prepareStreamingRelation(sparkSession, r)
case plan: LogicalPlan => plan
}
}
if (sourceTable == null) {
throw new MalformedCarbonCommandException("Must specify stream source table in the query")
}
// add CSV row parser if user does not specify
val newMap = mutable.Map[String, String]()
optionMap.foreach(x => newMap(x._1) = x._2)
newMap(CSVInputFormat.DELIMITER) = tblProperty.asScala.getOrElse("delimiter", ",")
// start the streaming job
val jobId = StreamJobManager.startStream(
sparkSession = sparkSession,
ifNotExists = ifNotExists,
streamName = streamName,
sourceTable = sourceTable,
sinkTable = CarbonEnv.getCarbonTable(sinkDbName, sinkTableName)(sparkSession),
query = query,
streamDf = Dataset.ofRows(sparkSession, updatedQuery),
options = new StreamingOption(newMap.toMap)
)
Seq(Row(streamName, jobId, "RUNNING"))
}
/**
* Create a new plan for the stream query on kafka and Socket source table.
* This is required because we need to convert the schema of the data stored in kafka
* The returned logical plan contains the complete plan tree of original plan with
* logical relation replaced with a streaming relation.
*
* @param sparkSession spark session
* @param inputQuery stream query from user
* @param sourceTable source table (kafka table)
* @param sourceName source name, kafka or socket
* @param tblProperty table property of source table
* @return a new logical plan
*/
private def createPlan(
sparkSession: SparkSession,
inputQuery: DataFrame,
sourceTable: CarbonTable,
sourceName: String,
tblProperty: mutable.Map[String, String]): LogicalPlan = {
// We follow 3 steps to generate new plan
// 1. replace the logical relation in stream query with streaming relation
// 2. collect the new ExprId generated
// 3. update the stream query plan with the new ExprId generated, to make the plan consistent
// exprList is used for UDF to extract the data from the 'value' column in kafka
val columnNames = Util.convertToSparkSchema(sourceTable).fieldNames
val exprList = columnNames.zipWithIndex.map {
case (columnName, i) =>
s"case when size(_values) > $i then _values[$i] else null end AS $columnName"
}
val aliasMap = new util.HashMap[String, ExprId]()
val updatedQuery = inputQuery.logicalPlan transform {
case r: LogicalRelation
if r.relation.isInstanceOf[CarbonDatasourceHadoopRelation] &&
r.relation.asInstanceOf[CarbonDatasourceHadoopRelation].carbonTable.isStreamingSource =>
// for kafka stream source, get the 'value' column and split it by using UDF
val plan = sparkSession.readStream
.format(sourceName)
.options(tblProperty)
.load()
.selectExpr("CAST(value as string) as _value")
val recordFormat = tblProperty.getOrElse("record_format", "csv")
val newPlan = recordFormat match {
case "csv" =>
val delimiter = tblProperty.getOrElse("delimiter", ",")
plan.selectExpr(
s"split(_value, '${CarbonSparkUtil.delimiterConverter4Udf(delimiter)}') as _values")
.selectExpr(exprList: _*)
.logicalPlan
case "json" =>
import org.apache.spark.sql.functions._
plan
.select(from_json(col("_value"), Util.convertToSparkSchema(sourceTable)) as "_data")
.select("_data.*")
.logicalPlan
}
// collect the newly generated ExprId
newPlan collect {
case p@Project(projectList, child) =>
projectList.map { expr =>
aliasMap.put(expr.name, expr.exprId)
}
p
}
newPlan
case plan: LogicalPlan => plan
}
// transform the stream plan to replace all attribute with the collected ExprId
val transFormedPlan = updatedQuery transform {
case p@Project(projectList: Seq[NamedExpression], child) =>
val newProjectList = projectList.map { expr =>
val newExpr = expr transform {
case attribute: Attribute =>
val exprId: ExprId = aliasMap.get(attribute.name)
if (exprId != null) {
if (exprId.id != attribute.exprId.id) {
AttributeReference(
attribute.name, attribute.dataType, attribute.nullable,
attribute.metadata)(exprId, attribute.qualifier)
} else {
attribute
}
} else {
attribute
}
}
newExpr.asInstanceOf[NamedExpression]
}
Project(newProjectList, child)
case f@Filter(condition: Expression, child) =>
val newCondition = condition transform {
case attribute: Attribute =>
val exprId: ExprId = aliasMap.get(attribute.name)
if (exprId != null) {
if (exprId.id != attribute.exprId.id) {
AttributeReference(
attribute.name, attribute.dataType, attribute.nullable,
attribute.metadata)(exprId, attribute.qualifier)
} else {
attribute
}
} else {
attribute
}
}
Filter(newCondition, child)
}
transFormedPlan
}
/**
* Create a streaming relation from the input logical relation (source table)
*
* @param sparkSession spark session
* @param logicalRelation source table to convert
* @return sourceTable and its streaming relation
*/
private def prepareStreamingRelation(
sparkSession: SparkSession,
logicalRelation: LogicalRelation): StreamingRelation = {
val sourceTable = logicalRelation.relation
.asInstanceOf[CarbonDatasourceHadoopRelation].carbonTable
val tblProperty = sourceTable.getTableInfo.getFactTable.getTableProperties
val format = sourceTable.getFormat
if (format == null) {
throw new MalformedCarbonCommandException("Streaming from carbon file is not supported")
}
val streamReader = sparkSession
.readStream
.schema(getSparkSchema(sourceTable))
.format(format)
val dataFrame: DataFrame = format match {
case "csv" | "text" | "json" | "parquet" =>
shouldHaveProperty(tblProperty, "path", sourceTable)
streamReader.load(tblProperty.get("path"))
case other =>
throw new MalformedCarbonCommandException(s"Streaming from $format is not supported")
}
val streamRelation = dataFrame.logicalPlan.asInstanceOf[StreamingRelation]
// Since SparkSQL analyzer will match the UUID in attribute,
// create a new StreamRelation and re-use the same attribute from LogicalRelation
StreamingRelation(streamRelation.dataSource, streamRelation.sourceName, logicalRelation.output)
}
private def shouldHaveProperty(
tblProperty: java.util.Map[String, String],
propertyName: String,
sourceTable: CarbonTable) : Unit = {
if (!tblProperty.containsKey(propertyName)) {
throw new MalformedCarbonCommandException(
s"tblproperty '$propertyName' should be provided for stream source " +
s"${sourceTable.getDatabaseName}.${sourceTable.getTableName}")
}
}
private def getSparkSchema(sourceTable: CarbonTable): StructType = {
val cols = sourceTable.getTableInfo.getFactTable.getListOfColumns.asScala.toArray
val sortedCols = cols.filter(_.getSchemaOrdinal != -1)
.sortWith(_.getSchemaOrdinal < _.getSchemaOrdinal)
Util.convertToSparkSchema(sourceTable, sortedCols)
}
}
| sgururajshetty/carbondata | integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/stream/CarbonCreateStreamCommand.scala | Scala | apache-2.0 | 12,148 |
package com.hippo.gps
import java.security.MessageDigest
import org.scala_tools.time.Imports._
import org.json4s._
import org.json4s.native.JsonMethods._
import scalaj.http._
case class GPSPoint(gps_time:String,lng:Double,lat:Double,course:Int,speed:Int) {
override def toString = s"$gps_time,$lng,$lat,$course,$speed"
}
case class GPSOOClient(username: String,password:String){
private val MAX_RECORDS_LIMIT = 500
//if access_token=="", then consider as not login, or access_token expired
private var access_token = ""
var error_message = ""
def getMessage = error_message
//private var isLogin = true
implicit class UnixTimestamp(dt:DateTime){
def asUnixTimestamp:String = (dt.getMillis/1000).toString
}
def md5(s:String):String = {
MessageDigest.getInstance("MD5").digest(s.getBytes).map("%02x".format(_)).mkString
}
def login = {
val url = "http://api.gpsoo.net/1/auth/access_token?"
val ts = DateTime.now.asUnixTimestamp
val signature = md5(md5(password) + ts)
//md5(md5(password of user) + time)
val data = Seq("account"->username,"time"->ts,"signature"->signature)
try {
val res:String = Http(url).postForm(data).asString.body
access_token = compact(render((parse(res) \ "access_token")))
}catch{
case e:Exception => error_message = e.getClass.getName + ":" + e.getMessage
}
//if login success
println(access_token)
this
}
def getHistory(imei:String,fromTime:DateTime,toTime:DateTime)(implicit f:GPSPoint=>Unit = p=>println(p.toString) ) : Unit = {
//fromTime toTime less than 30 days
def getHistoryInternal(fromTime:DateTime,toTime:DateTime)(implicit f:GPSPoint=>Unit){
implicit val Formats = DefaultFormats
var size = 0
var lastTime:String = "0"
val url = "http://api.gpsoo.net/1/devices/history?"
val data = Seq[(String,String)]("account"->username,
"access_token"->access_token,
"time"->DateTime.now.asUnixTimestamp,
"imei"->imei,
"map_type"->"BAIDU",
"begin_time"-> fromTime.asUnixTimestamp,
"end_time"-> toTime.asUnixTimestamp,
"limit"->s"$MAX_RECORDS_LIMIT"
)
try {
val json = parse(Http(url).postForm(data).asString.body)
//println(data)
//println(json)
(json \ "ret").extract[Int] match {
case 0 =>
val points:List[GPSPoint] = (json \ "data").extract[List[GPSPoint]]
size = points.size
if (size == MAX_RECORDS_LIMIT) lastTime = points.last.gps_time
points.foreach(f)
case 10006 => access_token="" //access_tokenε·²θΏζ,θ―·ιζ°θ·ε
case _ => println
}
}
catch {
case e: Exception => error_message = e.getClass.getName + ":" + e.getMessage
}
/*
Http(url).postForm(data).execute(parser={
inputStream=>
val points:List[GPSPoint] = (parse(inputStream) \ "data").extract[List[GPSPoint]]
size = points.size
lastTime = points.last.gps_time
points.map(f)
})
*/
//if get MAX_RECORDS_LIMIT, then get more records from gps_time of last record to toTime
if (size == MAX_RECORDS_LIMIT) getHistoryInternal(new DateTime( lastTime.toLong * 1000),toTime)(f)
}
if (access_token==""||(fromTime>=toTime)||(fromTime < DateTime.now - 150.day)) {
//wrong parameter,do nothing
} else if ((fromTime + 30.day) >= toTime)
getHistoryInternal(fromTime,toTime)(f)
else{
getHistoryInternal(fromTime,fromTime+30.day)(f)
getHistory(imei,fromTime+30.day,toTime)
}
}
}
| corbamico/GPSOO-Client | GPSOOClient_Sync/src/main/scala/com/hippo/gps/GPSOOClient.scala | Scala | gpl-2.0 | 4,175 |
class A(val self: Double) extends AnyVal
| som-snytt/dotty | tests/pos/i1137-1/A_1.scala | Scala | apache-2.0 | 41 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v3
import uk.gov.hmrc.ct.accounts.retriever.AccountsBoxRetriever
import uk.gov.hmrc.ct.box.{CtBoolean, Calculated, CtBoxIdentifier}
import uk.gov.hmrc.ct.ct600.calculations.PeriodOfAccountsCalculator
case class B50(value: Boolean) extends CtBoxIdentifier("Making more then one return for this company") with CtBoolean
object B50 extends Calculated[B50, AccountsBoxRetriever] with PeriodOfAccountsCalculator {
override def calculate(boxRetriever: AccountsBoxRetriever) =
B50(isLongPeriodOfAccounts(boxRetriever.retrieveAC3(), boxRetriever.retrieveAC4()))
} | scottcutts/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600/v3/B50.scala | Scala | apache-2.0 | 1,200 |
package suiryc.scala.javafx.scene.control
import com.sun.javafx.scene.control.VirtualScrollBar
import javafx.collections.ObservableList
import javafx.collections.transformation.SortedList
import javafx.scene.control._
import javafx.scene.control.skin.{TableViewSkin, TreeTableViewSkin, VirtualFlow}
import javafx.scene.layout.Region
import spray.json._
import suiryc.scala.javafx.beans.value.RichObservableValue
import suiryc.scala.javafx.beans.value.RichObservableValue._
import suiryc.scala.javafx.scene.Nodes
import scala.jdk.CollectionConverters._
import scala.reflect._
import scala.reflect.internal.util.WeakHashSet
/** TableView (and TreeTableView) helpers. */
object TableViews {
/**
* Sets items, keeping table sort order.
*
* Upon changing items which are not in a SortedList, the table sort order is
* cleared (see https://bugs.openjdk.java.net/browse/JDK-8092759).
* This function re-applies the sorting order if necessary.
*/
def setItems[A](table: TableView[A], items: ObservableList[A]): Unit = {
val sortOrder = table.getSortOrder
val restoreSortOrder =
if (sortOrder.isEmpty || table.getItems.isInstanceOf[SortedList[A]]) None
else Some(sortOrder)
table.setItems(items)
restoreSortOrder.foreach(table.getSortOrder.setAll(_))
}
/**
* Sets root, keeping table sort order.
*
* Upon changing root, the table sort order is cleared (see
* https://bugs.openjdk.java.net/browse/JDK-8092759).
* This function re-applies the sorting order if necessary.
*/
def setRoot[A](table: TreeTableView[A], root: TreeItem[A]): Unit = {
val sortOrder = table.getSortOrder
val restoreSortOrder =
if (sortOrder.isEmpty) None
else Some(sortOrder)
table.setRoot(root)
restoreSortOrder.foreach(table.getSortOrder.setAll(_))
}
/**
* Scrolls so that requested index is visible (in the view).
*
* If target row (+ padding) is already visible, nothing is done.
* Index outside the rows range is adjusted to see either first or last row.
*
* @param table table
* @param index row index (0-based) to make visible
* @param top whether the row is supposed to appear at the top (scroll up)
* or at the bottom (scroll down)
* @param padding number of extra rows that should also be visible
*/
def scrollTo(table: TableView[_], index: Int, top: Boolean, padding: Int): Unit = {
// To control scrolling, we need to access the VirtualFlow which is in the
// table skin (should be the first child, accessible once table is shown).
table.getSkin.asInstanceOf[TableViewSkin[_]]
.getChildren.asScala.find(_.isInstanceOf[VirtualFlow[_]]).foreach
{ flow =>
scrollTo(flow.asInstanceOf[VirtualFlow[_ <: IndexedCell[_]]], table.getItems.size, index, top, padding)
}
}
/**
* Scrolls so that requested index is visible (in the view).
*
* If target row (+ padding) is already visible, nothing is done.
* Index outside the rows range is adjusted to see either first or last row.
*
* @param table table
* @param index row index (0-based) to make visible
* @param top whether the row is supposed to appear at the top (scroll up)
* or at the bottom (scroll down)
* @param padding number of extra rows that should also be visible
*/
def scrollTo(table: TreeTableView[_], index: Int, top: Boolean, padding: Int): Unit = {
table.getSkin.asInstanceOf[TreeTableViewSkin[_]]
.getChildren.asScala.find(_.isInstanceOf[VirtualFlow[_]]).foreach
{ flow =>
scrollTo(flow.asInstanceOf[VirtualFlow[_ <: IndexedCell[_]]], table.getRoot.getChildren.size, index, top, padding)
}
}
private def scrollTo(flow: VirtualFlow[_ <: IndexedCell[_]], itemsCount: Int, index: Int, top: Boolean, padding: Int): Unit = {
// A row is considered visible starting with its very first pixel. We want
// most of the row to be seen, so the easiest solution is to make sure it is
// fully visible by targeting the row next to the one we want (+ padding).
// We determine two targets to see above (top) and below (bottom) the wanted
// one; padding is applied in the scrolling direction.
val targetTop = if (top) {
if (index > Int.MinValue + padding) index - (padding + 1) else Int.MinValue
} else {
index - 1
}
val targetBottom = if (top) {
index + 1
} else {
if (index < Int.MaxValue - padding) index + (padding + 1) else Int.MaxValue
}
@scala.annotation.tailrec
def adjust(top: Boolean, movedUp: Boolean, movedDown: Boolean): Unit = {
if (top) {
// Scroll up.
if (targetTop < 0) {
// Scroll to the top.
flow.scrollPixels(Double.MinValue)
()
} else if (flow.getFirstVisibleCell.getIndex > targetTop) {
// Target row (first pixel) not yet visible: scroll 1px and loop.
flow.scrollPixels(-1)
adjust(top = top, movedUp = true, movedDown = movedDown)
} else if (!movedUp) {
// Now that top side is visible, switch to bottom side; ensure we did
// not scroll down yet (to prevent possible infinite loop).
adjust(top = !top, movedUp = true, movedDown = true)
}
} else {
// Scroll down.
if (targetBottom >= itemsCount) {
// Scroll to the bottom.
flow.scrollPixels(Double.MaxValue)
()
} else if (flow.getLastVisibleCell.getIndex < targetBottom) {
flow.scrollPixels(1)
adjust(top = top, movedUp = movedUp, movedDown = true)
} else if (!movedDown) {
adjust(top = !top, movedUp = true, movedDown = true)
}
}
}
adjust(top = top, movedUp = false, movedDown = false)
}
/**
* Automatically adjusts column width to fill the table size.
*
* The target column is expected to be one at the root of the table, not a
* nested one.
* Listeners are used to adjust width when other elements change.
*
* @param column the column to adjust
*/
def autowidthColumn[S](column: TableColumn[S, _]): Unit = {
val tableView = column.getTableView
val otherColumns = tableView.getColumns.asScala.toList.filterNot(_ eq column)
autowidthColumn(tableView, column, otherColumns)
}
/**
* Automatically adjusts column width to fill the table size.
*
* The target column is expected to be one at the root of the table, not a
* nested one.
* Listeners are used to adjust width when other elements change.
*
* @param column the column to adjust
*/
def autowidthColumn[S](column: TreeTableColumn[S, _]): Unit = {
val tableView = column.getTreeTableView
val otherColumns = tableView.getColumns.asScala.toList.filterNot(_ eq column)
autowidthColumn(tableView, column, otherColumns)
}
/**
* Automatically adjusts column width to fill the table size.
*
* The target column is expected to be one at the root of the table, not a
* nested one.
* Listeners are used to adjust width when other elements change.
*
* @param table the table owning the column
* @param column the column to adjust
* @param otherColumns the other columns
*/
// scalastyle:off method.length
def autowidthColumn[S](table: Control, column: TableColumnBase[S, _], otherColumns: List[TableColumnBase[S, _]]): Unit = {
// Now what we want is for all columns to occupy the whole table width.
// Using a constrained resizing policy gets in the way of restoring the
// view, so a solution is to create a binding through which we set the
// target column (preferred) width according to the width of other
// elements:
// columnWidth = tableWidth - tablePadding - otherColumnsWidth
// However the vertical scrollbar which may appear is not taken into
// account in table width. It is in the "clipped-container" that is a
// Region of the viewed content:
// columnWidth = containerWidth - otherColumnsWidth
// (the container has 0 width when there is no content in the table)
//
// The table width is changed before the container one, which triggers
// glitches when resizing down using the second formula: the horizontal
// scrollbar appears (and disappears upon interaction or resizing up).
// Requesting layout (in 'runLater') makes it disappear right away.
//
// One solution, to apply correct width while preventing the horizontal
// scrollbar to appear, is to:
// - listen to table width (changed before container one)
// - take into account the scrollbar width when visible, similarly to
// what is done for the container; eliminating the need to take into
// account the container
// The scrollbar width happens to change the first time it appears;
// instead of listening for the scrollbar visibility and width, it is
// easier to listen to the container width.
// Belt and suspenders: we also keep the floor value of the target width,
// since it is a decimal value and rounding may make the scrollbar appear.
// Note: in some versions of JavaFX, it may have been necessary to also
// take into account the container in order to prevent the horizontal
// scrollbar from appearing in some corner cases.
//val tableView = column.getTableView
//val otherColumns = tableView.getColumns.asScala.toList.filterNot(_ eq column)
val clippedContainer = table.lookup(".clipped-container").asInstanceOf[Region]
val scrollBar = table.lookupAll(".scroll-bar").asScala.collect {
case scrollBar: VirtualScrollBar if scrollBar.getPseudoClassStates.asScala.map(_.getPseudoClassName).contains("vertical") => scrollBar
}.head
def updateColumnWidth(): Unit = {
val insets = table.getPadding
val padding = insets.getLeft + insets.getRight
val scrollbarWidth = if (!scrollBar.isVisible) 0 else scrollBar.getWidth
val viewWidth0 = table.getWidth - padding - scrollbarWidth
// It should not be necessary to take into account the container.
val viewWidth = viewWidth0
//val viewWidth =
// if (clippedContainer.getWidth > 0) math.min(viewWidth0, clippedContainer.getWidth)
// else viewWidth0
// Belt and suspenders: floor value to eliminate possible corner cases.
val columnWidth = (viewWidth - otherColumns.map(_.getWidth).sum).floor
// Setting max width helps applying target width in some cases (minimum
// width changed to a lower value after re-setting table items).
val columnMaxWidth = math.max(columnWidth, column.getMinWidth)
// Since current maxWidth may be lower than the one we want to apply, we
// need to update it first (otherwise the preferred width may not be
// applied as wanted).
// Note: *do not* delay setting width (e.g. through runLater) since it
// will make the scrollbar appear+disappear rapidly when resizing down.
column.setMaxWidth(columnMaxWidth)
column.setPrefWidth(columnWidth)
}
// Listening to the table and other columns width is necessary.
// We may also listen to the scrollbar visibility and width, but it's
// simpler to listen to the container width (impacted by both).
// Since the target column minimum width may change, listen to it to
// enforce value when necessary.
val listenTo = List(
table.widthProperty,
clippedContainer.widthProperty,
column.minWidthProperty
) ::: otherColumns.map(_.widthProperty())
RichObservableValue.listen[AnyRef](listenTo)(updateColumnWidth())
updateColumnWidth()
}
// scalastyle:on method.length
/** User data key: table rows. */
val USERDATA_TABLE_ROWS = "suiryc-scala.table-rows"
/**
* Tracks table rows.
*
* Stores the rows created for a table in its user data.
* Sets the table row factory to follow rows creation. Wraps the existing
* factory if any.
* Row creation or item updating is passed to given callback.
*
* @param table table to track rows
* @param updateCb callback for row creation or item updating
*/
def trackRows[A >: Null](table: TableView[A], updateCb: (TableRow[A], A, A) => Unit): Unit = {
// Notes:
// JavaFX only creates TableRows for visible (in view) rows. e.g. if view is
// resized up, new TableRows are created.
// Apparently when resizing down, the now unnecessary rows are not disposed
// of (and are re-used when applicable).
// To stay on the safe side, we still use a WeakHashSet so that if any row
// is GCed, we will stop tracking it automatically.
val rows = new WeakHashSet[TableRow[A]]
Nodes.setUserData(table, USERDATA_TABLE_ROWS, rows)
val rowFactory = Option(table.getRowFactory)
table.setRowFactory(tableView => {
val row = rowFactory.map(_.call(tableView)).getOrElse(new TableRow[A])
row.itemProperty.listen { (_, oldValue, newValue) =>
updateCb(row, oldValue, newValue)
}
rows.add(row)
// scalastyle:off null
updateCb(row, null, null)
// scalastyle:on null
row
})
}
/**
* Gets current table rows.
*
* Rows must be tracked.
*/
def getRows[A](table: TableView[A]): collection.mutable.Set[TableRow[A]] = {
Nodes.getUserData[collection.mutable.Set[TableRow[A]]](table, USERDATA_TABLE_ROWS)
}
/**
* Gets table columns view.
*
* Formats columns order, visibility, width and sort type into a string.
* Settings are derived from given columns description. Format is the JSON
* value of ColumnsView.
*
* Works with nested columns (only inner-most columns are processed).
*/
def getColumnsView[A <: AnyRef](handler: ViewHandler[A], columnsDesc: List[(String, ViewHandler[A]#Column)]): String = {
@scala.annotation.tailrec
def processColumns(columns: List[ViewHandler[A]#Column], acc: List[handler.ColumnView]): List[handler.ColumnView] = {
columns match {
case column :: tail =>
val children = handler.getColumns(column)
if (children.size > 0) {
processColumns(children.asScala.toList ::: tail, acc)
} else {
val key = columnsDesc.find(_._2 eq column).get._1
val view = handler.getColumnView(column, key)
processColumns(tail, acc :+ view)
}
case Nil => acc
}
}
val columnViews = processColumns(handler.getColumns.asScala.toList, Nil)
val sortOrder = handler.getSortOrder.asScala.toList.map { column =>
columnsDesc.find(_._2 eq column).get._1
}
val columnsView = ColumnsView[handler.SortType](columnViews, sortOrder)
handler.columnsViewFormat.write(columnsView).compactPrint
}
/**
* Sets table columns view.
*
* Orders columns and set preferred width from given view.
*
* Works with nested columns (parent column must have been set beforehand).
*/
// scalastyle:off method.length
def setColumnsView[A](handler: ViewHandler[A], columnsDesc: List[(String, ViewHandler[A]#Column)], view: Option[String]): Unit = {
var alreadyOrdered = List[ViewHandler[A]#Column]()
// Ordering a column being processed (in order) is 'simple': just add it at
// the (current) end of its parent (either another column, or the table).
@scala.annotation.tailrec
def orderColumn(column: ViewHandler[A]#Column): Unit = {
val owner = Option(handler.getParentColumn(column)).map(handler.getColumns).getOrElse(handler.getColumns)
owner.remove(column)
owner.add(column)
// Order recursively so that parent column is ordered too, unless it has
// already been done.
Option(handler.getParentColumn(column)) match {
case Some(parent) =>
if (!alreadyOrdered.contains(parent)) {
alreadyOrdered ::= parent
orderColumn(parent)
}
case None =>
}
}
// First parse columns views
val columnsView = view match {
case Some(str) =>
if (str.startsWith("{")) {
try {
handler.columnsViewFormat.read(str.parseJson)
} catch {
case _: Exception => ColumnsView[handler.SortType](Nil, Nil)
}
}
else {
// Old format
val columnViews = str.split(';').toList.flatMap { param =>
param.split('=').toList match {
case key :: value :: Nil =>
val columnView = try {
if (value.startsWith("-")) ColumnView[handler.SortType](key, visible = false, value.substring(1).toDouble, handler.defaultSortType)
else ColumnView[handler.SortType](key, visible = true, value.toDouble, handler.defaultSortType)
} catch {
case _: Exception => ColumnView[handler.SortType](key, visible = true, -1, handler.defaultSortType)
}
Some(columnView)
case _ => None
}
}
ColumnsView[handler.SortType](columnViews, Nil)
}
case None =>
ColumnsView[handler.SortType](Nil, Nil)
}
// Then order (and set width/visibility) known columns.
columnsView.columns.foreach { columnView =>
columnsDesc.find(_._1 == columnView.id).foreach { case (_, column) =>
handler.setColumnView(column, columnView)
orderColumn(column)
}
}
// Finally order remaining columns.
val keys = columnsView.columns.map(_.id).toSet
columnsDesc.filterNot { case (key, _) =>
keys.contains(key)
}.foreach { case (_, column) =>
orderColumn(column)
}
// And apply sort order
val sortOrder = columnsView.sortOrder.flatMap { key =>
columnsDesc.find(_._1 == key).map(_._2)
}
handler.setSortOrder(sortOrder)
()
}
// scalastyle:on method.length
case class ColumnView[A <: Enum[A]](id: String, visible: Boolean, width: Double, sortType: A)
case class ColumnsView[A <: Enum[A]](columns: List[ColumnView[A]], sortOrder: List[String])
object JsonProtocol extends DefaultJsonProtocol {
// TODO: move this where it can be shared ?
implicit def enumFormat[E <: Enum[E] : ClassTag]: JsonFormat[E] = new JsonFormat[E] {
def write(e: E): JsValue = JsString(e.toString)
def read(value: JsValue): E = value match {
case JsString(e) =>
try {
Enum.valueOf(classTag[E].runtimeClass.asInstanceOf[Class[E]], e)
} catch {
case ex: Exception => deserializationError(s"Invalid ${classTag[E]} format: $e", ex)
}
case _ => deserializationError(s"Expected ${classTag[E]} as JsString. Got $value")
}
}
implicit def columnViewFormat[A <: Enum[A] : JsonFormat]: RootJsonFormat[ColumnView[A]] = jsonFormat4(ColumnView[A])
def columnsViewFormat[A <: Enum[A] : JsonFormat]: RootJsonFormat[ColumnsView[A]] = jsonFormat2(ColumnsView[A])
}
/**
* View handler.
*
* Defines how to manipulate a view.
*/
trait ViewHandler[A] {
type Column = A
type SortType <: Enum[SortType]
type ColumnView = TableViews.ColumnView[SortType]
type ColumnsView = TableViews.ColumnsView[SortType]
val defaultSortType: SortType
def getColumns: ObservableList[Column]
def getSortOrder: ObservableList[Column]
def setSortOrder(order: List[Column]): Boolean
def getParentColumn(column: Column): Column
def getColumns(column: Column): ObservableList[Column]
def getColumnView(column: Column, id: String): ColumnView
def setColumnView(column: Column, view: ColumnView): Unit
val columnsViewFormat: RootJsonFormat[ColumnsView]
}
/** TableView handler. */
implicit class TableViewDesc[A](table: TableView[A]) extends ViewHandler[TableColumn[A, _]] {
type SortType = TableColumn.SortType
val defaultSortType = TableColumn.SortType.ASCENDING
override def getColumns: ObservableList[Column] = table.getColumns
override def getSortOrder: ObservableList[Column] = table.getSortOrder
override def setSortOrder(order: List[Column]): Boolean = table.getSortOrder.setAll(order.asJava)
override def getParentColumn(column: Column): Column = column.getParentColumn.asInstanceOf[Column]
override def getColumns(column: Column): ObservableList[Column] = column.getColumns
override def getColumnView(column: Column, id: String): ColumnView =
ColumnView(id, column.isVisible, column.getWidth, column.getSortType)
override def setColumnView(column: Column, view: ColumnView): Unit = {
column.setVisible(view.visible)
if (view.width > 0) column.setPrefWidth(view.width)
column.setSortType(view.sortType)
}
import JsonProtocol._
val columnsViewFormat: RootJsonFormat[ColumnsView] = JsonProtocol.columnsViewFormat[SortType]
}
/** TreeTableView handler. */
implicit class TreeTableViewDesc[A](table: TreeTableView[A]) extends ViewHandler[TreeTableColumn[A, _]] {
type SortType = TreeTableColumn.SortType
val defaultSortType = TreeTableColumn.SortType.ASCENDING
override def getColumns: ObservableList[Column] = table.getColumns
override def getSortOrder: ObservableList[Column] = table.getSortOrder
override def setSortOrder(order: List[Column]): Boolean = table.getSortOrder.setAll(order.asJava)
override def getParentColumn(column: Column): Column = column.getParentColumn.asInstanceOf[Column]
override def getColumns(column: Column): ObservableList[Column] = column.getColumns
override def getColumnView(column: Column, id: String): ColumnView =
ColumnView(id, column.isVisible, column.getWidth, column.getSortType)
override def setColumnView(column: Column, view: ColumnView): Unit = {
column.setVisible(view.visible)
if (view.width > 0) column.setPrefWidth(view.width)
column.setSortType(view.sortType)
}
import JsonProtocol._
val columnsViewFormat: RootJsonFormat[ColumnsView] = JsonProtocol.columnsViewFormat[SortType]
}
}
| suiryc/suiryc-scala | javafx/src/main/scala/suiryc/scala/javafx/scene/control/TableViews.scala | Scala | gpl-3.0 | 22,017 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.javadsl.api
import java.lang.reflect.Method
import com.lightbend.lagom.javadsl.api.Descriptor.Call
import scala.language.experimental.macros
import scala.language.implicitConversions
import scala.reflect.ClassTag
import scala.reflect.macros.blackbox.Context
import com.lightbend.lagom.javadsl.api.Descriptor.TopicCall
import com.lightbend.lagom.javadsl.api.broker.Topic
/**
* Support for implementing javadsl service calls with Scala.
*/
object ScalaService {
import ScalaServiceSupport.ScalaMethodCall
def call[Request, Response](method: ScalaMethodCall[ServiceCall[Request, Response]]): Call[Request, Response] =
Service.call[Request, Response](method.method)
def namedCall[Request, Response](name: String, method: ScalaMethodCall[ServiceCall[Request, Response]]): Call[Request, Response] =
Service.namedCall[Request, Response](name, method.method)
def pathCall[Request, Response](path: String, method: ScalaMethodCall[ServiceCall[Request, Response]]): Call[Request, Response] =
Service.pathCall[Request, Response](path, method.method)
def restCall[Request, Response](restMethod: transport.Method, path: String, method: ScalaMethodCall[ServiceCall[Request, Response]]): Call[Request, Response] =
Service.restCall[Request, Response](restMethod, path, method.method)
def topic[Message](topicId: String, method: ScalaMethodCall[Topic[Message]]): TopicCall[Message] =
Service.topic[Message](topicId, method.method)
def named(name: String): Descriptor = Service.named(name)
}
object ScalaServiceSupport {
final class ScalaMethodCall[T](val method: Method)
object ScalaMethodCall {
implicit def methodFor[T](f: () => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: _ => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _, _, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _, _, _, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _, _, _, _, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _, _, _, _, _, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _, _, _, _, _, _, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _, _, _, _, _, _, _, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _, _, _, _, _, _, _, _, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _, _, _, _, _, _, _, _, _, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _, _, _, _, _, _, _, _, _, _, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _, _, _, _, _, _, _, _, _, _, _, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
implicit def methodFor[T](f: (_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _) => T): ScalaMethodCall[T] = macro methodForImpl[T]
}
def getMethodWithName[T](clazz: Class[_], name: String): ScalaMethodCall[T] = {
new ScalaMethodCall[T](clazz.getMethods.find(_.getName == name).getOrElse(throw new NoSuchMethodException(name)))
}
def methodForImpl[T](c: Context)(f: c.Expr[Any])(implicit tType: c.WeakTypeTag[T]): c.Expr[ScalaMethodCall[T]] = {
import c.universe._
f match {
case Expr(Block((_, Function(_, Apply(Select(This(thisType), TermName(methodName)), _))))) =>
val methodNameString = Literal(Constant(methodName))
c.Expr[ScalaMethodCall[T]](q"_root_.com.lightbend.lagom.javadsl.api.ScalaServiceSupport.getMethodWithName[${tType.tpe}](_root_.scala.Predef.classOf[$thisType], $methodNameString)")
case other =>
c.abort(c.enclosingPosition, "methodFor must only be invoked with a reference to a function on this, for example, methodFor(this.someFunction)")
}
}
}
| rstento/lagom | service/javadsl/api/src/main/scala/com/lightbend/lagom/javadsl/api/ScalaServiceSupport.scala | Scala | apache-2.0 | 5,348 |
/*
* Copyright (c) 2016 SnappyData, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package org.apache.spark.sql.streaming
import scala.reflect.ClassTag
import org.apache.spark.api.java.function.{VoidFunction => JVoidFunction, VoidFunction2 => JVoidFunction2}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.collection.WrappedInternalRow
import org.apache.spark.sql.execution._
import org.apache.spark.sql.execution.exchange.ShuffleExchange
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.{DataFrame, Row, SnappySession}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.{Duration, SnappyStreamingContext, Time}
/**
* A SQL based DStream with support for schema/Product
* This class offers the ability to manipulate SQL query on DStreams
* It is similar to SchemaRDD, which offers the similar functions
* Internally, RDD of each batch duration is treated as a small
* table and CQs are evaluated on those small tables
* Some of the abstraction and code is borrowed from the project:
* https://github.com/Intel-bigdata/spark-streamingsql
*
* @param snsc
* @param queryExecution
*
*/
class SchemaDStream(@transient val snsc: SnappyStreamingContext,
@transient val queryExecution: QueryExecution)
extends DStream[Row](snsc) {
@transient private val snappySession: SnappySession = snsc.snappySession
@transient private val catalog = snappySession.sessionState.catalog
def this(ssc: SnappyStreamingContext, logicalPlan: LogicalPlan) =
this(ssc, ssc.snappySession.sessionState.executePlan(logicalPlan))
// =======================================================================
// Methods that should be implemented by subclasses of DStream
// =======================================================================
/** Time interval after which the SchemaDStream generates a RDD */
override def slideDuration: Duration = {
parentStreams.head.slideDuration
}
/** List of parent DStreams on which this SchemaDStream depends on */
override def dependencies: List[DStream[InternalRow]] = parentStreams.toList
/** Method that generates a RDD for the given time */
override def compute(validTime: Time): Option[RDD[Row]] = {
StreamBaseRelation.setValidTime(validTime)
val schema = this.schema
Some(executionPlan.execute().mapPartitionsInternal { itr =>
val wrappedRow = new WrappedInternalRow(schema)
itr.map(row => wrappedRow.internalRow = row )
})
}
// =======================================================================
// SchemaDStream operations
// =======================================================================
/** Return a new DStream by applying a function to all elements of this SchemaDStream. */
override def map[U: ClassTag](mapFunc: Row => U): DStream[U] = {
new MappedDStream(this, context.sparkContext.clean(mapFunc))
}
/**
* Return a new DStream by applying a function to all elements of this SchemaDStream,
* and then flattening the results
*/
override def flatMap[U: ClassTag](flatMapFunc: Row => TraversableOnce[U]): DStream[U] = {
new FlatMappedDStream(this, context.sparkContext.clean(flatMapFunc))
}
/** Return a new SchemaDStream containing only the elements that satisfy a predicate. */
override def filter(filterFunc: Row => Boolean): SchemaDStream = {
new FilteredSchemaDStream(this, filterFunc)
}
/**
* Return a new DStream in which each RDD is generated by applying glom() to each RDD of
* this SchemaDStream. Applying glom() to an RDD coalesces all elements within each
* partition into an array.
*/
override def glom(): DStream[Array[Row]] = {
new GlommedSchemaDStream(this)
}
/**
* Return a new SchemaDStream with an increased or decreased level of parallelism.
* Each RDD in the returned SchemaDStream has exactly numPartitions partitions.
*/
override def repartition(numPartitions: Int): SchemaDStream = {
snsc.createSchemaDStream(transform(_.repartition(numPartitions)), schema)
}
/**
* Return a new DStream in which each RDD is generated by applying mapPartitions() to each RDDs
* of this SchemaDStream. Applying mapPartitions() to an RDD applies a function to each partition
* of the RDD.
*/
override def mapPartitions[U: ClassTag](
mapPartFunc: Iterator[Row] => Iterator[U],
preservePartitioning: Boolean = false
): DStream[U] = {
new MapPartitionedSchemaDStream(this, context.sparkContext.clean(mapPartFunc),
preservePartitioning)
}
/**
* Return a new DStream in which each RDD is generated by applying a function
* on each RDD of 'this' SchemaDStream.
*/
override def transform[U: ClassTag](transformFunc: RDD[Row] => RDD[U]): DStream[U] = {
val cleanedF = context.sparkContext.clean(transformFunc, false)
transform((r: RDD[Row], _: Time) => cleanedF(r))
}
/**
* Return a new DStream in which each RDD is generated by applying a function
* on each RDD of 'this' SchemaDStream.
*/
override def transform[U: ClassTag](transformFunc: (RDD[Row], Time) => RDD[U]): DStream[U] = {
val cleanedF = context.sparkContext.clean(transformFunc, false)
val realTransformFunc = (rdds: Seq[RDD[_]], time: Time) => {
assert(rdds.length == 1)
cleanedF(rdds.head.asInstanceOf[RDD[Row]], time)
}
new TransformedSchemaDStream[U](this, Seq(this), realTransformFunc)
}
/**
* Return a new DStream in which each RDD is generated by applying a function
* on each RDD of 'this' SchemaDStream and 'other' SchemaDStream.
*/
override def transformWith[U: ClassTag, V: ClassTag](
other: DStream[U], transformFunc: (RDD[Row], RDD[U]) => RDD[V]
): DStream[V] = {
val cleanedF = snsc.sparkContext.clean(transformFunc, false)
transformWith(other, (rdd1: RDD[Row], rdd2: RDD[U], time: Time) => cleanedF(rdd1, rdd2))
}
/**
* Return a new DStream in which each RDD is generated by applying a function
* on each RDD of 'this' DStream and 'other' DStream.
*/
override def transformWith[U: ClassTag, V: ClassTag](
other: DStream[U], transformFunc: (RDD[Row], RDD[U], Time) => RDD[V]
): DStream[V] = {
val cleanedF = snsc.sparkContext.clean(transformFunc, false)
val realTransformFunc = (rdds: Seq[RDD[_]], time: Time) => {
assert(rdds.length == 2)
val rdd1 = rdds(0).asInstanceOf[RDD[Row]]
val rdd2 = rdds(1).asInstanceOf[RDD[U]]
cleanedF(rdd1, rdd2, time)
}
new TransformedSchemaDStream[V](this, Seq(this, other), realTransformFunc)
}
/**
* Apply a function to each DataFrame in this SchemaDStream. This is an output operator, so
* 'this' SchemaDStream will be registered as an output stream and therefore materialized.
*/
def foreachDataFrame(foreachFunc: DataFrame => Unit): Unit = {
foreachDataFrame(foreachFunc, needsConversion = true)
}
/**
* Apply a function to each DataFrame in this SchemaDStream. This is an output operator, so
* 'this' SchemaDStream will be registered as an output stream and therefore materialized.
*/
def foreachDataFrame(foreachFunc: DataFrame => Unit, needsConversion: Boolean): Unit = {
val func = (rdd: RDD[Row]) => {
foreachFunc(snappySession.createDataFrame(rdd, this.schema, needsConversion))
}
this.foreachRDD(func)
}
/**
* Apply a function to each DataFrame in this SchemaDStream. This is an output operator, so
* 'this' SchemaDStream will be registered as an output stream and therefore materialized.
*/
def foreachDataFrame(foreachFunc: JVoidFunction[DataFrame]): Unit = {
val func = (rdd: RDD[Row]) => {
foreachFunc.call(snappySession.createDataFrame(rdd, this.schema, needsConversion = true))
}
this.foreachRDD(func)
}
/**
* Apply a function to each DataFrame in this SchemaDStream. This is an output operator, so
* 'this' SchemaDStream will be registered as an output stream and therefore materialized.
*/
def foreachDataFrame(foreachFunc: JVoidFunction[DataFrame], needsConversion: Boolean): Unit = {
val func = (rdd: RDD[Row]) => {
foreachFunc.call(snappySession.createDataFrame(rdd, this.schema, needsConversion))
}
this.foreachRDD(func)
}
/**
* Apply a function to each DataFrame in this SchemaDStream. This is an output operator, so
* 'this' SchemaDStream will be registered as an output stream and therefore materialized.
*/
def foreachDataFrame(foreachFunc: JVoidFunction2[DataFrame, Time]): Unit = {
val func = (rdd: RDD[Row], time: Time) => {
foreachFunc.call(snappySession.createDataFrame(rdd, this.schema, needsConversion = true),
time)
}
this.foreachRDD(func)
}
/**
* Apply a function to each DataFrame in this SchemaDStream. This is an output operator, so
* 'this' SchemaDStream will be registered as an output stream and therefore materialized.
*/
def foreachDataFrame(foreachFunc: (DataFrame, Time) => Unit): Unit = {
foreachDataFrame(foreachFunc, needsConversion = true)
}
/**
* Apply a function to each DataFrame in this SchemaDStream. This is an output operator, so
* 'this' SchemaDStream will be registered as an output stream and therefore materialized.
*/
def foreachDataFrame(foreachFunc: (DataFrame, Time) => Unit, needsConversion: Boolean): Unit = {
val func = (rdd: RDD[Row], time: Time) => {
foreachFunc(snappySession.createDataFrame(rdd, this.schema, needsConversion), time)
}
this.foreachRDD(func)
}
/** Persist the RDDs of this SchemaDStream with the given storage level */
override def persist(level: StorageLevel): SchemaDStream = {
super.persist(level)
this
}
/** Persist RDDs of this SchemaDStream with the default storage level (MEMORY_ONLY_SER) */
override def persist(): SchemaDStream = persist(StorageLevel.MEMORY_ONLY_SER)
/** Persist RDDs of this SchemaDStream with the default storage level (MEMORY_ONLY_SER) */
override def cache(): SchemaDStream = persist()
/**
* Enable periodic checkpointing of RDDs of this SchemaDStream
*
* @param interval Time interval after which generated RDD will be checkpointed
*/
override def checkpoint(interval: Duration): SchemaDStream = {
super.checkpoint(interval)
this
}
/** Registers this SchemaDStream as a table in the catalog. */
def registerAsTable(tableName: String): Unit = {
catalog.registerTable(
catalog.newQualifiedTempTableName(tableName),
logicalPlan)
}
/** Returns the schema of this SchemaDStream (represented by
* a [[StructType]]). */
def schema: StructType = queryExecution.analyzed.schema
@transient val logicalPlan: LogicalPlan = queryExecution.logical
match {
case _: InsertIntoTable =>
throw new IllegalStateException(s"logical plan ${queryExecution.logical} " +
s"is not supported currently")
case _ => queryExecution.logical
}
private val _cachedField = {
val f = classOf[ShuffleExchange].getDeclaredFields.find(
_.getName.contains("cachedShuffleRDD")).get
f.setAccessible(true)
f
}
private def executionPlan: SparkPlan = {
queryExecution.executedPlan.foreach {
case s: ShuffleExchange => _cachedField.set(s, null)
case _ =>
}
queryExecution.executedPlan
}
@transient private lazy val parentStreams = {
def traverse(plan: SparkPlan): Seq[DStream[InternalRow]] = plan match {
case x: StreamPlan => x.rowStream :: Nil
case _ => plan.children.flatMap(traverse)
}
val streams = traverse(queryExecution.executedPlan)
streams
}
/**
* Returns all column names as an array.
*/
def columns: Array[String] = schema.fields.map(_.name)
def printSchema(): Unit = {
println(schema.treeString) // scalastyle:ignore
}
}
| vjr/snappydata | core/src/main/scala/org/apache/spark/sql/streaming/SchemaDStream.scala | Scala | apache-2.0 | 12,592 |
package io.swagger.client.model
import io.swagger.client.core.ApiModel
import org.joda.time.DateTime
case class UserTokenSuccessfulResponse (
/* Status code */
code: Int,
/* Message */
message: String,
user: UserTokenSuccessfulResponseInnerUserField)
extends ApiModel
| QuantiModo/QuantiModo-SDK-Akka-Scala | src/main/scala/io/swagger/client/model/UserTokenSuccessfulResponse.scala | Scala | gpl-2.0 | 286 |
package chandu0101.scalajs.react.components
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.html_<^._
import scala.scalajs.js
import scalacss.ProdDefaults._
import scalacss.ScalaCssReact._
import Implicits._
object ReactListView {
class Style extends StyleSheet.Inline {
import dsl._
val listGroup = style(
marginBottom(20.px),
paddingLeft.`0`,
&.firstChild.lastChild(
borderBottomLeftRadius(4 px),
borderBottomRightRadius(4 px)
)
)
val listItem = styleF.bool(
selected =>
styleS(
position.relative,
display.block,
padding(v = 10.px, h = 15.px),
border :=! "1px solid #ecf0f1",
cursor.pointer,
mixinIfElse(selected)(
color.white,
fontWeight._500,
backgroundColor :=! "#146699"
)(
backgroundColor.white,
&.hover(
color :=! "#555555",
backgroundColor :=! "#ecf0f1"
)
)
))
}
object DefaultStyle extends Style
case class State(filterText: String, selectedItem: String)
class Backend(t: BackendScope[Props, State]) {
def onTextChange(text: String): Callback =
t.modState(_.copy(filterText = text))
def onItemSelect(onItemSelect: js.UndefOr[String => Callback])(value: String): Callback = {
val setSelected = t.modState(_.copy(selectedItem = value))
val onSelect = onItemSelect.asCbo(value)
setSelected >> onSelect
}
def render(P: Props, S: State) = {
val fItems =
P.items.filter(item => item.toString.toLowerCase.contains(S.filterText.toLowerCase))
<.div(
ReactSearchBox(onTextChange = onTextChange).when(P.showSearchBox),
<.ul(
P.style.listGroup,
fItems.map { item =>
val selected = item.toString == S.selectedItem
<.li(
P.style.listItem(selected),
^.onClick --> onItemSelect(P.onItemSelect)(item.toString),
item
)
}.toTagMod
)
)
}
}
val component = ScalaComponent
.builder[Props]("ReactListView")
.initialState(State(filterText = "", selectedItem = ""))
.renderBackend[Backend]
.build
case class Props(
items: List[String],
onItemSelect: js.UndefOr[String => Callback],
showSearchBox: Boolean,
style: Style
)
def apply(
items: List[String],
onItemSelect: js.UndefOr[String => Callback] = js.undefined,
showSearchBox: Boolean = false,
style: Style = DefaultStyle,
ref: js.UndefOr[String] = js.undefined,
key: js.Any = {}
) =
component /*.set(key, ref)*/ (Props(items, onItemSelect, showSearchBox, style))
}
| aparo/scalajs-react-components | core/src/main/scala/chandu0101/scalajs/react/components/ReactListView.scala | Scala | apache-2.0 | 2,803 |
package jk_5.nailed.command
import jk_5.nailed.map.MapLoader
/**
* No description given
*
* @author jk-5
*/
object CommandWorld extends TCommand {
val commandName = "goto"
this.permissionLevel = 2
@inline override def getCommandUsage = "/goto"
def processCommand(sender: CommandSender, args: Array[String]) = if(sender.isValidPlayer) MapLoader.getMap(args(0).toInt).foreach(_.travelPlayerToMap(sender.player))
}
| nailed/nailed-legacy | src/main/scala/jk_5/nailed/command/CommandWorld.scala | Scala | unlicense | 427 |
package org.elasticmq.msg
import akka.actor.ActorRef
import org.elasticmq.actor.reply.Replyable
import org.elasticmq.{QueueData, QueueAlreadyExists}
sealed trait QueueManagerMsg[T] extends Replyable[T]
case class CreateQueue(queueData: QueueData) extends QueueManagerMsg[Either[QueueAlreadyExists, ActorRef]]
case class DeleteQueue(queueName: String) extends QueueManagerMsg[Unit]
case class LookupQueue(queueName: String) extends QueueManagerMsg[Option[ActorRef]]
case class ListQueues() extends QueueManagerMsg[Seq[String]] | nagyistoce/elasticmq | core/src/main/scala/org/elasticmq/msg/QueueManagerMsg.scala | Scala | apache-2.0 | 528 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.metrics.sink
import java.net.InetSocketAddress
import java.util.Properties
import java.util.concurrent.TimeUnit
import com.codahale.metrics.MetricRegistry
import com.codahale.metrics.graphite.{Graphite, GraphiteReporter, GraphiteUDP}
import org.apache.spark.SecurityManager
import org.apache.spark.metrics.MetricsSystem
private[spark] class GraphiteSink(val property: Properties, val registry: MetricRegistry,
securityMgr: SecurityManager) extends Sink {
val GRAPHITE_DEFAULT_PERIOD = 10
val GRAPHITE_DEFAULT_UNIT = "SECONDS"
val GRAPHITE_DEFAULT_PREFIX = ""
val GRAPHITE_KEY_HOST = "host"
val GRAPHITE_KEY_PORT = "port"
val GRAPHITE_KEY_PERIOD = "period"
val GRAPHITE_KEY_UNIT = "unit"
val GRAPHITE_KEY_PREFIX = "prefix"
val GRAPHITE_KEY_PROTOCOL = "protocol"
def propertyToOption(prop: String): Option[String] = Option(property.getProperty(prop))
if (!propertyToOption(GRAPHITE_KEY_HOST).isDefined) {
throw new Exception("Graphite sink requires 'host' property.")
}
if (!propertyToOption(GRAPHITE_KEY_PORT).isDefined) {
throw new Exception("Graphite sink requires 'port' property.")
}
val host = propertyToOption(GRAPHITE_KEY_HOST).get
val port = propertyToOption(GRAPHITE_KEY_PORT).get.toInt
val pollPeriod = propertyToOption(GRAPHITE_KEY_PERIOD) match {
case Some(s) => s.toInt
case None => GRAPHITE_DEFAULT_PERIOD
}
val pollUnit: TimeUnit = propertyToOption(GRAPHITE_KEY_UNIT) match {
case Some(s) => TimeUnit.valueOf(s.toUpperCase())
case None => TimeUnit.valueOf(GRAPHITE_DEFAULT_UNIT)
}
val prefix = propertyToOption(GRAPHITE_KEY_PREFIX).getOrElse(GRAPHITE_DEFAULT_PREFIX)
MetricsSystem.checkMinimalPollingPeriod(pollUnit, pollPeriod)
val graphite = propertyToOption(GRAPHITE_KEY_PROTOCOL).map(_.toLowerCase) match {
case Some("udp") => new GraphiteUDP(new InetSocketAddress(host, port))
case Some("tcp") | None => new Graphite(new InetSocketAddress(host, port))
case Some(p) => throw new Exception(s"Invalid Graphite protocol: $p")
}
val reporter: GraphiteReporter = GraphiteReporter.forRegistry(registry)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.convertRatesTo(TimeUnit.SECONDS)
.prefixedWith(prefix)
.build(graphite)
override def start() {
reporter.start(pollPeriod, pollUnit)
}
override def stop() {
reporter.stop()
}
override def report() {
reporter.report()
}
}
| sh-cho/cshSpark | metrics/sink/GraphiteSink.scala | Scala | apache-2.0 | 3,262 |
class C(val a: Int, val b: Int) {
println(/* offset: 12, type: org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScClassParameter */ a.getClass)
println(/* offset: 24, type: org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScClassParameter */ b.getClass)
} | ilinum/intellij-scala | testdata/resolve2/element/ClassParameterValue.scala | Scala | apache-2.0 | 279 |
package com.acework.js.components.bootstrap
import com.acework.js.logger._
import com.acework.js.utils.{Mappable, Mergeable}
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import scala.scalajs.js.{UndefOr, undefined}
/**
* Created by weiyin on 09/03/15.
*/
object NavItem extends BootstrapComponent {
override type P = NavItem
override type S = Unit
override type B = Unit
override type N = TopNode
override def defaultProps = NavItem()
case class NavItem(active: Boolean = false, disabled: Boolean = false,
href: UndefOr[String] = "#",
eventKey: UndefOr[String] = undefined,
title: UndefOr[String] = undefined,
target: UndefOr[String] = undefined,
onSelect: UndefOr[Seq[UndefOr[String]] => Unit] = undefined,
addClasses: String = "") extends MergeableProps[NavItem] {
def merge(t: Map[String, Any]): NavItem = implicitly[Mergeable[NavItem]].merge(this, t)
def asMap: Map[String, Any] = implicitly[Mappable[NavItem]].toMap(this)
def apply(children: ReactNode*) = component(this, children)
def apply() = component(this)
}
override val component = ReactComponentB[NavItem]("NavItem")
.render((P, C) => {
def handleClick(e: ReactEvent): Unit = {
P.onSelect.map { onSelect =>
e.preventDefault()
if (!P.disabled)
onSelect(Seq(P.eventKey, P.href, P.target))
}
}
val classes = Map("active" -> P.active, "disabled" -> P.disabled)
var link = <.a(^.href := P.href, ^.title := P.title, ^.target := P.target,
^.ref := "anchor", ^.onClick ==> handleClick)
if (P.href.getOrElse("NA") == "#")
link = link(^.role := "button")
<.li(^.classSet1M(P.addClasses, classes),
link(C)
)
}
).build
} | weiyinteo/scalajs-react-bootstrap | core/src/main/scala/com/acework/js/components/bootstrap/NavItem.scala | Scala | mit | 1,864 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @builder scalation.linalgebra.mem_mapped.bld.BldMatri
* @version 1.2
* @date Mon Sep 28 11:18:16 EDT 2015
* @see LICENSE (MIT style license file).
*/
package scalation.linalgebra.mem_mapped
import scalation.math.Real.{abs => ABS, _}
import scalation.math.Real
import scalation.util.{Error, MM_ArrayR}
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MatriR` trait specifies the operations to be defined by the concrete
* classes implementing `Real` matrices, i.e.,
* MatrixR - dense matrix
* BidMatrixR - bidiagonal matrix - useful for computing Singular Values
* SparseMatrixR - sparse matrix - majority of elements should be zero
* SymTriMatrixR - symmetric triangular matrix - useful for computing Eigenvalues
* par.MatrixR - parallel dense matrix
* par.SparseMatrixR - parallel sparse matrix
* Some of the classes provide a few custom methods, e.g., methods beginning with "times"
* or ending with "npp".
*------------------------------------------------------------------------------
* row-wise column-wise
* Append: matrix +: vector matrix +:^ vector
* Concatenate: matrix ++ matrix matrix ++^ matrix
*/
trait MatriR
extends Error
{
/** Matrix dimension 1 (# rows)
*/
val dim1: Int
/** Matrix dimension 2 (# columns)
*/
val dim2: Int
/** Range for the storage array on dimension 1 (rows)
*/
protected val range1 = 0 until dim1
/** Range for the storage array on dimension 2 (columns)
*/
protected val range2 = 0 until dim2
/** Format string used for printing vector values (change using setFormat)
*/
protected var fString = "%s,\\t"
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set the format to the 'newFormat'.
* @param newFormat the new format string
*/
def setFormat (newFormat: String) { fString = newFormat }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get 'this' matrix's element at the 'i,j'-th index position.
* @param i the row index
* @param j the column index
*/
def apply (i: Int, j: Int): Real
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get 'this' matrix's vector at the 'i'-th index position (i-th row).
* @param i the row index
*/
def apply (i: Int): VectorR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get a slice 'this' matrix row-wise on range 'ir' and column-wise on range 'jr'.
* Ex: b = a(2..4, 3..5)
* @param ir the row range
* @param jr the column range
*/
def apply (ir: Range, jr: Range): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get a slice 'this' matrix row-wise on range ir and column-wise at index j.
* Ex: u = a(2..4, 3)
* @param ir the row range
* @param j the column index
*/
def apply (ir: Range, j: Int): VectorR = col(j)(ir)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get a slice 'this' matrix row-wise at index 'i' and column-wise on range 'jr'.
* Ex: u = a(2, 3..5)
* @param i the row index
* @param jr the column range
*/
def apply (i: Int, jr: Range): VectorR = this(i)(jr)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set 'this' matrix's element at the 'i,j'-th index position to the scalar 'x'.
* @param i the row index
* @param j the column index
* @param x the scalar value to assign
*/
def update (i: Int, j: Int, x: Real)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set 'this' matrix's row at the 'i'-th index position to the vector 'u'.
* @param i the row index
* @param u the vector value to assign
*/
def update (i: Int, u: VectorR)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set a slice of 'this' matrix row-wise on range 'ir' and column-wise on
* range 'jr' to matrix 'b'.
* Ex: a(2..4, 3..5) = b
* @param ir the row range
* @param jr the column range
* @param b the matrix to assign
*/
def update (ir: Range, jr: Range, b: MatriR)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set a slice of 'this' matrix row-wise on range 'ir' and column-wise at
* index 'j' to vector 'u'.
* Ex: a(2..4, 3) = u
* @param ir the row range
* @param j the column index
* @param u the vector to assign
*/
def update (ir: Range, j: Int, u: VectorR) { col(j)(ir) = u }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set a slice of 'this' matrix row-wise at index 'i' and column-wise on range
* 'jr' to vector 'u'.
* Ex: a(2, 3..5) = u
* @param i the row index
* @param jr the column range
* @param u the vector to assign
*/
def update (i: Int, jr: Range, u: VectorR) { this(i)(jr) = u }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set all the elements in 'this' matrix to the scalar 'x'.
* @param x the scalar value to assign
*/
def set (x: Real)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set the values in 'this' matrix as copies of the values in 2D array 'u'.
* @param u the 2D array of values to assign
*/
def set (u: Array [Array [Real]])
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set 'this' matrix's 'i'th row starting a column 'j' to the vector 'u'.
* @param i the row index
* @param u the vector value to assign
* @param j the starting column index
*/
def set (i: Int, u: VectorR, j: Int = 0)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Iterate over 'this' matrix row by row applying method 'f'.
* @param f the function to apply
*/
def foreach [U] (f: MM_ArrayR => U)
{
var i = 0
while (i < dim1) { f (this(i)()); i += 1 }
} // foreach
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Slice 'this' matrix row-wise 'from' to 'end'.
* @param from the start row of the slice (inclusive)
* @param end the end row of the slice (exclusive)
*/
def slice (from: Int, end: Int): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Slice 'this' matrix column-wise 'from' to 'end'.
* @param from the start column of the slice (inclusive)
* @param end the end column of the slice (exclusive)
*/
def sliceCol (from: Int, end: Int): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Slice 'this' matrix row-wise 'r_from' to 'r_end' and column-wise 'c_from' to 'c_end'.
* @param r_from the start of the row slice (inclusive)
* @param r_end the end of the row slice (exclusive)
* @param c_from the start of the column slice (inclusive)
* @param c_end the end of the column slice (exclusive)
*/
def slice (r_from: Int, r_end: Int, c_from: Int, c_end: Int): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Slice 'this' matrix excluding the given 'row' and 'column'.
* @param row the row to exclude
* @param col the column to exclude
*/
def sliceExclude (row: Int, col: Int): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Select rows from 'this' matrix according to the given index/basis 'rowIndex'.
* @param rowIndex the row index positions (e.g., (0, 2, 5))
*/
def selectRows (rowIndex: Array [Int]): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get column 'col' starting 'from' in 'this' matrix, returning it as a vector.
* @param col the column to extract from the matrix
* @param from the position to start extracting from
*/
def col (col: Int, from: Int = 0): VectorR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set column 'col' of 'this' matrix to vector 'u'.
* @param col the column to set
* @param u the vector to assign to the column
*/
def setCol (col: Int, u: VectorR)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Select columns from 'this' matrix according to the given index/basis colIndex.
* Ex: Can be used to divide a matrix into a basis and a non-basis.
* @param colIndex the column index positions (e.g., (0, 2, 5))
*/
def selectCols (colIndex: Array [Int]): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Transpose 'this' matrix (rows => columns).
*/
def t: MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate 'this' matrix and (row) vector 'u', i.e. append 'u' to 'this'.
* @param u the vector to be concatenated as the new last row in matrix
*/
def +: (u: VectorR): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate 'this' matrix and (column) vector 'u', i.e. append 'u' to 'this'.
* @param u the vector to be concatenated as the new last column in matrix
*/
def +:^ (u: VectorR): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate (row-wise) 'this' matrix and matrix 'b'.
* @param b the matrix to be concatenated as the new last rows in matrix
*/
def ++ (b: MatriR): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Concatenate (column-wise) 'this' matrix and matrix 'b'.
* @param b the matrix to be concatenated as the new last columns in matrix
*/
def ++^ (b: MatriR): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add 'this' matrix and matrix 'b' for any type extending `MatriR`.
* Note, subtypes of MatriR should also implement a more efficient version,
* e.g., `def + (b: MatrixD): MatrixD`.
* @param b the matrix to add (requires leDimensions)
*/
def + (b: MatriR): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add 'this' matrix and (row) vector 'u'.
* @param u the vector to add
*/
def + (u: VectorR): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add 'this' matrix and scalar 'x'.
* @param x the scalar to add
*/
def + (x: Real): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add in-place 'this' matrix and matrix 'b' for any type extending `MatriR`.
* Note, subtypes of MatriR should also implement a more efficient version,
* e.g., `def += (b: MatrixD): MatrixD`.
* @param b the matrix to add (requires leDimensions)
*/
def += (b: MatriR): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add in-place 'this' matrix and (row) vector 'u'.
* @param u the vector to add
*/
def += (u: VectorR): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add in-place 'this' matrix and scalar 'x'.
* @param x the scalar to add
*/
def += (x: Real): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' matrix subtract matrix 'b' for any type extending `MatriR`.
* Note, subtypes of MatriR should also implement a more efficient version,
* e.g., `def - (b: MatrixD): MatrixD`.
* @param b the matrix to subtract (requires leDimensions)
*/
def - (b: MatriR): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' matrix subtract (row) vector 'u'.
* @param u the vector to subtract
*/
def - (u: VectorR): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' matrix subtract scalar 'x'.
* @param x the scalar to subtract
*/
def - (x: Real): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' matrix subtract in-place matrix 'b' for any type extending `MatriR`.
* Note, subtypes of MatriR should also implement a more efficient version,
* e.g., `def -= (b: MatrixD): MatrixD`.
* @param b the matrix to subtract (requires leDimensions)
*/
def -= (b: MatriR): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' matrix subtract in-place (row) vector 'u'.
* @param u the vector to subtract
*/
def -= (u: VectorR): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** From 'this' matrix subtract in-place scalar 'x'.
* @param x the scalar to subtract
*/
def -= (x: Real): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' matrix and matrix 'b' for any type extending `MatriR`.
* Note, subtypes of MatriR should also implement a more efficient version,
* e.g., `def * (b: MatrixD): MatrixD`.
* @param b the matrix to add (requires leDimensions)
*/
def * (b: MatriR): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' matrix by vector 'u'.
* @param u the vector to multiply by
*/
def * (u: VectorR): VectorR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' matrix by scalar 'x'.
* @param x the scalar to multiply by
*/
def * (x: Real): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply in-place 'this' matrix and matrix 'b' for any type extending `MatriR`.
* Note, subtypes of MatriR should also implement a more efficient version,
* e.g., `def *= (b: MatrixD): MatrixD`.
* @param b the matrix to multiply by (requires leDimensions)
*/
def *= (b: MatriR): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply in-place 'this' matrix by scalar 'x'.
* @param x the scalar to multiply by
*/
def *= (x: Real): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the dot product of 'this' matrix and vector 'u', by first transposing
* 'this' matrix and then multiplying by 'u' (ie., 'a dot u = a.t * u').
* @param u the vector to multiply by (requires same first dimensions)
*/
def dot (u: VectorR): VectorR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply 'this' matrix by vector 'u' to produce another matrix (a_ij * u_j)
* @param u the vector to multiply by
*/
def ** (u: VectorR): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Multiply in-place 'this' matrix by vector 'u' to produce another matrix (a_ij * u_j)
* @param u the vector to multiply by
*/
def **= (u: VectorR): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Divide 'this' matrix by scalar 'x'.
* @param x the scalar to divide by
*/
def / (x: Real): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Divide in-place 'this' matrix by scalar 'x'.
* @param x the scalar to divide by
*/
def /= (x: Real): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Raise 'this' matrix to the 'p'th power (for some integer p >= 2).
* @param p the power to raise 'this' matrix to
*/
def ~^ (p: Int): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the maximum element in 'this' matrix.
* @param e the ending row index (exclusive) for the search
*/
def max (e: Int = dim1): Real
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the minimum element in 'this' matrix.
* @param e the ending row index (exclusive) for the search
*/
def min (e: Int = dim1): Real
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the magnitude of 'this' matrix, the element value farthest from zero.
*/
def mag: Real = ABS (max ()) max ABS (min ())
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Swap the elements in rows 'i' and 'k' starting from column 'col'.
* @param i the first row in the swap
* @param k the second row in the swap
* @param col the starting column for the swap (default 0 => whole row)
*/
def swap (i: Int, k: Int, col: Int = 0)
{
val a = this; var t = _0
for (j <- col until dim2) { t = a(k, j); a(k, j) = a(i, j); a(i, j) = t }
} // swap
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Swap the elements in columns 'j' and 'l' starting from row 'row'.
* @param j the first column in the swap
* @param l the second column in the swap
* @param row the starting row for the swap (default 0 => whole column)
*/
def swapCol (j: Int, l: Int, row: Int = 0)
{
val a = this; var t = _0
for (i <- row until dim1) { t = a(i, l); a(i, l) = a(i, j); a(i, j) = t }
} // swapCol
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Decompose 'this' matrix into the product of lower and upper triangular
* matrices '(l, u)' using the LU Decomposition algorithm. This version uses
* partial pivoting.
*/
def lud: Tuple2 [MatriR, MatriR]
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Decompose in-place 'this' matrix into the product of lower and upper triangular
* matrices '(l, u)' using the LU Decomposition algorithm. This version uses
* partial pivoting.
*/
def lud_ip: Tuple2 [MatriR, MatriR]
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Solve for 'x' in the equation 'l*u*x = b' (see lud above).
* @param l the lower triangular matrix
* @param u the upper triangular matrix
* @param b the constant vector
*/
def solve (l: MatriR, u: MatriR, b: VectorR): VectorR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Solve for 'x' in the equation 'l*u*x = b' (see lud above).
* @param lu the lower and upper triangular matrices
* @param b the constant vector
*/
def solve (lu: Tuple2 [MatriR, MatriR], b: VectorR): VectorR = solve (lu._1, lu._2, b)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Solve for 'x' in the equation 'a*x = b' where 'a' is 'this' matrix.
* @param b the constant vector.
*/
def solve (b: VectorR): VectorR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Determine the rank of 'this' m-by-n matrix by taking the upper triangular
* matrix 'u' from the LU Decomposition and counting the number of non-zero
* diagonal elements. Implementing classes may override this method with
* a better one (e.g., SVD or Rank Revealing QR).
* @see http://en.wikipedia.org/wiki/Rank_%28linear_algebra%29
*/
def rank: Int =
{
val max = if (dim1 < dim2) dim1 else dim2 // rank <= min (m, n)
val u = lud._2 // upper triangular matrix
var count = 0
for (i <- 0 until max if ! (u(i, i) =~ _0)) count += 1
count
} // rank
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Combine 'this' matrix with matrix 'b', placing them along the diagonal and
* filling in the bottom left and top right regions with zeros: '[this, b]'.
* @param b the matrix to combine with this matrix
*/
def diag (b: MatriR): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Form a matrix '[Ip, this, Iq]' where 'Ir' is a 'r-by-r' identity matrix, by
* positioning the three matrices Ip, this and Iq along the diagonal.
* @param p the size of identity matrix Ip
* @param q the size of identity matrix Iq
*/
def diag (p: Int, q: Int = 0): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get the 'k'th diagonal of 'this' matrix. Assumes dim2 >= dim1.
* @param k how far above the main diagonal, e.g., (-1, 0, 1) for (sub, main, super)
*/
def getDiag (k: Int = 0): VectorR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set the 'k'th diagonal of 'this' matrix to the vector 'u'. Assumes dim2 >= dim1.
* @param u the vector to set the diagonal to
* @param k how far above the main diagonal, e.g., (-1, 0, 1) for (sub, main, super)
*/
def setDiag (u: VectorR, k: Int = 0)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Set the main diagonal of 'this' matrix to the scalar 'x'. Assumes dim2 >= dim1.
* @param x the scalar to set the diagonal to
*/
def setDiag (x: Real)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Invert 'this' matrix (requires a squareMatrix) and use partial pivoting.
*/
def inverse: MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Invert in-place 'this' matrix (requires a squareMatrix) and use partial pivoting.
*/
def inverse_ip: MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Use Gauss-Jordan reduction on 'this' matrix to make the left part embed an
* identity matrix. A constraint on 'this' m by n matrix is that n >= m.
*/
def reduce: MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Use Gauss-Jordan reduction in-place on 'this' matrix to make the left part
* embed an identity matrix. A constraint on 'this' m by n matrix is that n >= m.
*/
def reduce_ip
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Clean values in 'this' matrix at or below the threshold 'thres' by setting
* them to zero. Iterative algorithms give approximate values and if very close
* to zero, may throw off other calculations, e.g., in computing eigenvectors.
* @param thres the cutoff threshold (a small value)
* @param relative whether to use relative or absolute cutoff
*/
def clean (thres: Double, relative: Boolean = true): MatriR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the (right) nullspace of 'this' 'm-by-n' matrix (requires 'n = m+1')
* by performing Gauss-Jordan reduction and extracting the negation of the
* last column augmented by 1.
* <p>
* nullspace (a) = set of orthogonal vectors v s.t. a * v = 0
* <p>
* The left nullspace of matrix 'a' is the same as the right nullspace of 'a.t'.
* FIX: need a more robust algorithm for computing nullspace (@see Fac_QR.scala).
* FIX: remove the 'n = m+1' restriction.
* @see http://ocw.mit.edu/courses/mathematics/18-06sc-linear-algebra-fall-2011/ax-b-and-the-four-subspaces
* /solving-ax-0-pivot-variables-special-solutions/MIT18_06SCF11_Ses1.7sum.pdf
*/
def nullspace: VectorR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute in-place the (right) nullspace of 'this' 'm-by-n' matrix (requires 'n = m+1')
* by performing Gauss-Jordan reduction and extracting the negation of the
* last column augmented by 1.
* <p>
* nullspace (a) = set of orthogonal vectors v s.t. a * v = 0
* <p>
* The left nullspace of matrix 'a' is the same as the right nullspace of 'a.t'.
* FIX: need a more robust algorithm for computing nullspace (@see Fac_QR.scala).
* FIX: remove the 'n = m+1' restriction.
* @see http://ocw.mit.edu/courses/mathematics/18-06sc-linear-algebra-fall-2011/ax-b-and-the-four-subspaces
* /solving-ax-0-pivot-variables-special-solutions/MIT18_06SCF11_Ses1.7sum.pdf
*/
def nullspace_ip: VectorR
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the trace of 'this' matrix, i.e., the sum of the elements on the
* main diagonal. Should also equal the sum of the eigenvalues.
* @see Eigen.scala
*/
def trace: Real
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the sum of 'this' matrix, i.e., the sum of its elements.
*/
def sum: Real
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the sum of the lower triangular region of 'this' matrix.
*/
def sumLower: Real
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the abs sum of 'this' matrix, i.e., the sum of the absolute value
* of its elements. This is useful for comparing matrices (a - b).sumAbs
*/
def sumAbs: Real
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the column means of this matrix.
*/
def mean: VectorR =
{
var cm = new VectorR (dim2)
for (j <- range2) cm(j) = col (j).sum / dim1.toReal
cm
} // mean
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the 1-norm of 'this' matrix, i.e., the maximum 1-norm of the
* column vectors. This is useful for comparing matrices '(a - b).norm1'.
*/
def norm1: Real =
{
val c = new VectorR (dim2)
for (j <- range2) c(j) = col(j).norm1
c.max ()
} // norm1
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the determinant of 'this' matrix.
*/
def det: Real
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check whether 'this' matrix and the other matrix 'b' have the same dimensions.
* @param b the other matrix
*/
def sameDimensions (b: MatriR): Boolean = dim1 == b.dim1 && dim2 == b.dim2
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check whether 'this' matrix dimensions are less than or equal to (le) those
* of the other matrix 'b'.
* @param b the other matrix
*/
def leDimensions (b: MatriR): Boolean = dim1 <= b.dim1 && dim2 <= b.dim2
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check whether 'this' matrix and the other matrix 'b' have the same cross
* dimensions.
* @param b the other matrix
*/
def sameCrossDimensions (b: MatriR): Boolean = dim2 == b.dim1
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check whether 'this' matrix is bidiagonal (has non-zreo elements only in
* main diagonal and superdiagonal). The method may be overriding for
* efficiency.
*/
def isBidiagonal: Boolean =
{
for (i <- range1; j <- range2) {
if ((i != j || i != j+1) && this(i, j) =~ _0) return false
} // for
true
} // isBidiagonal
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check whether 'this' matrix is nonnegative (has no negative elements).
*/
def isNonnegative: Boolean =
{
for (i <- range1; j <- range2 if this(i, j) < _0) return false
true
} // isNonegative
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check whether 'this' matrix is rectangular (all rows have the same number
* of columns).
*/
def isRectangular: Boolean
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check whether 'this' matrix is square (same row and column dimensions).
*/
def isSquare: Boolean = dim1 == dim2 && isRectangular
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check whether 'this' matrix is symmetric.
*/
def isSymmetric: Boolean =
{
for (i <- 0 to dim1 - 2; j <- i + 1 until dim2) {
if (this(i, j) !=~ this(j, i)) return false
} // for
true
} // isSymmetric
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check whether 'this' matrix is bidiagonal (has non-zreo elements only in
* main diagonal and superdiagonal). The method may be overriding for
* efficiency.
*/
def isTridiagonal: Boolean =
{
for (i <- range1; j <- range2) {
if (ABS (i-j) > 1 && ! (this(i, j) =~ _0)) return false
} // for
true
} // isTridiagonal
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Write 'this' matrix to a CSV-formatted text file with name 'fileName'.
* @param fileName the name of file to hold the data
*/
def write (fileName: String)
} // MatriR trait
| mvnural/scalation | src/main/scala/scalation/linalgebra/mem_mapped/MatriR.scala | Scala | mit | 30,646 |
/*
* FILE: IndexSerializer.scala
* Copyright (c) 2015 - 2019 GeoSpark Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.datasyslab.geosparksql.utils
import java.io.{ByteArrayInputStream, ByteArrayOutputStream}
import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.{Input, Output}
import com.vividsolutions.jts.index.SpatialIndex
import org.apache.spark.sql.catalyst.util.ArrayData
// This is a wrapper of GeoSpark core kryo serializer
object IndexSerializer {
def serialize(index: SpatialIndex): Array[Byte] = {
val out = new ByteArrayOutputStream()
val kryo = new Kryo()
val output = new Output(out)
kryo.writeObject(output, index)
output.close()
return out.toByteArray
}
def deserialize(values: ArrayData): SpatialIndex = {
val in = new ByteArrayInputStream(values.toByteArray())
val kryo = new Kryo()
val input = new Input(in)
val spatialIndex = kryo.readObject(input, classOf[SpatialIndex])
input.close()
return spatialIndex.asInstanceOf[SpatialIndex]
}
}
| Sarwat/GeoSpark | sql/src/main/scala/org/datasyslab/geosparksql/utils/IndexSerializer.scala | Scala | mit | 1,584 |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.indexer.spark
import java.io.{Closeable, File}
import java.nio.file.Files
import java.util
import com.google.common.collect.ImmutableList
import com.google.common.io.Closer
import io.druid.data.input.impl.{DimensionsSpec, JSONParseSpec, StringDimensionSchema, TimestampSpec}
import io.druid.java.util.common.JodaUtils
import io.druid.java.util.common.granularity.Granularities
import io.druid.java.util.common.logger.Logger
import io.druid.java.util.common.{CompressionUtils, IAE}
import io.druid.query.aggregation.LongSumAggregatorFactory
import io.druid.segment.QueryableIndexIndexableAdapter
import org.apache.commons.io.FileUtils
import org.apache.spark.{SparkConf, SparkContext}
import org.joda.time.{DateTime, Interval}
import org.scalatest._
import scala.collection.JavaConverters._
class TestSparkDruidIndexer extends FlatSpec with Matchers
{
import TestScalaBatchIndexTask._
"The spark indexer" should "return proper DataSegments" in {
val data_files = Seq(
this.getClass.getResource("/lineitem.small.tbl").toString,
this.getClass.getResource("/empty.tbl").toString
)
val closer = Closer.create()
val outDir = Files.createTempDirectory("segments").toFile
(outDir.mkdirs() || outDir.exists()) && outDir.isDirectory should be(true)
closer.register(
new Closeable()
{
override def close(): Unit = FileUtils.deleteDirectory(outDir)
}
)
try {
val conf = new SparkConf()
.setAppName("Simple Application")
.setMaster("local[4]")
.set("user.timezone", "UTC")
.set("file.encoding", "UTF-8")
.set("java.util.logging.manager", "org.apache.logging.log4j.jul.LogManager")
.set("org.jboss.logging.provider", "slf4j")
.set("druid.processing.columnCache.sizeBytes", "1000000000")
.set("spark.driver.host", "localhost")
.set("spark.executor.userClassPathFirst", "true")
.set("spark.driver.userClassPathFirst", "true")
.set("spark.kryo.referenceTracking", "false")
.registerKryoClasses(SparkBatchIndexTask.getKryoClasses())
val sc = new SparkContext(conf)
closer.register(
new Closeable
{
override def close(): Unit = sc.stop()
}
)
val loadResults = SparkDruidIndexer.loadData(
data_files,
new SerializedJson(dataSchema),
SparkBatchIndexTask.mapToSegmentIntervals(Seq(interval), Granularities.YEAR),
rowsPerPartition,
rowsPerFlush,
outDir.toString,
indexSpec,
buildV9Directly,
sc
)
loadResults.length should be(7)
for (
segment <- loadResults
) {
segment.getBinaryVersion should be(9)
segment.getDataSource should equal(dataSource)
interval.contains(segment.getInterval) should be(true)
segment.getInterval.contains(interval) should be(false)
segment.getSize should be > 0L
segment.getDimensions.asScala.toSet should
equal(
dataSchema.getParser.getParseSpec.getDimensionsSpec.getDimensionNames.asScala.toSet --
dataSchema.getParser.getParseSpec.getDimensionsSpec.getDimensionExclusions.asScala.toSet
)
segment.getMetrics.asScala.toList should equal(dataSchema.getAggregators.map(_.getName).toList)
val file = new File(segment.getLoadSpec.get("path").toString)
file.exists() should be(true)
val segDir = Files.createTempDirectory(outDir.toPath, "loadableSegment-%s" format segment.getIdentifier).toFile
val copyResult = CompressionUtils.unzip(file, segDir)
copyResult.size should be > 0L
copyResult.getFiles.asScala.map(_.getName).toSet should equal(
Set("00000.smoosh", "meta.smoosh", "version.bin", "factory.json")
)
val index = StaticIndex.INDEX_IO.loadIndex(segDir)
try {
val qindex = new QueryableIndexIndexableAdapter(index)
qindex.getDimensionNames.asScala.toSet should
equal(
dataSchema.getParser.getParseSpec.getDimensionsSpec.getDimensionNames.asScala.toSet --
dataSchema.getParser.getParseSpec.getDimensionsSpec.getDimensionExclusions.asScala.toSet
)
for (dimension <- qindex.getDimensionNames.iterator().asScala) {
val dimVal = qindex.getDimValueLookup(dimension).asScala
dimVal should not be 'Empty
for (dv <- dimVal) {
Option(dv) match {
case Some(v) =>
// I had a problem at one point where dimension values were being stored as lists
// This is a check to make sure the dimension is a list of values rather than being a list of lists
// If the unit test is ever modified to have dimension values that start with this offending case
// then of course this test will fail.
dv.toString should not startWith "List("
dv.toString should not startWith "Set("
case None => //Ignore
}
}
}
qindex.getNumRows should be > 0
for (colName <- Seq("count")) {
val column = index.getColumn(colName).getGenericColumn
try {
for (i <- Range.apply(0, qindex.getNumRows)) {
column.getLongSingleValueRow(i) should not be 0
}
}
finally {
column.close()
}
}
for (colName <- Seq("L_QUANTITY_longSum")) {
val column = index.getColumn(colName).getGenericColumn
try {
Range.apply(0, qindex.getNumRows).map(column.getLongSingleValueRow).sum should not be 0
}
finally {
column.close()
}
}
for (colName <- Seq("L_DISCOUNT_doubleSum", "L_TAX_doubleSum")) {
val column = index.getColumn(colName).getGenericColumn
try {
Range.apply(0, qindex.getNumRows).map(column.getFloatSingleValueRow).sum should not be 0.0D
}
finally {
column.close()
}
}
index.getDataInterval.getEnd.getMillis should not be JodaUtils.MAX_INSTANT
}
finally {
index.close()
}
}
}
finally {
closer.close()
}
}
it should "return proper DataSegments from json" in {
val data_files = Seq(this.getClass.getResource("/event.json").toString)
val closer = Closer.create()
val outDir = Files.createTempDirectory("segments").toFile
(outDir.mkdirs() || outDir.exists()) && outDir.isDirectory should be(true)
closer.register(
new Closeable()
{
override def close(): Unit = FileUtils.deleteDirectory(outDir)
}
)
try {
val conf = new SparkConf()
.setAppName("Simple Application")
.setMaster("local[4]")
.set("user.timezone", "UTC")
.set("file.encoding", "UTF-8")
.set("java.util.logging.manager", "org.apache.logging.log4j.jul.LogManager")
.set("org.jboss.logging.provider", "slf4j")
.set("druid.processing.columnCache.sizeBytes", "1000000000")
.set("spark.driver.host", "localhost")
.set("spark.executor.userClassPathFirst", "true")
.set("spark.driver.userClassPathFirst", "true")
.set("spark.kryo.referenceTracking", "false")
.registerKryoClasses(SparkBatchIndexTask.getKryoClasses())
val sc = new SparkContext(conf)
closer.register(
new Closeable
{
override def close(): Unit = sc.stop()
}
)
val aggName = "agg_met1"
val aggregatorFactory = new LongSumAggregatorFactory(aggName, "met1")
val dataSchema = buildDataSchema(
parseSpec = new
JSONParseSpec(
new TimestampSpec("ts", null, null),
new DimensionsSpec(ImmutableList.of(new StringDimensionSchema("dim1")), ImmutableList.of("ts"), null),
null,
null
),
aggFactories = Seq(aggregatorFactory)
)
val loadResults = SparkDruidIndexer.loadData(
data_files,
new SerializedJson(dataSchema),
SparkBatchIndexTask.mapToSegmentIntervals(Seq(interval), Granularities.YEAR),
rowsPerPartition,
rowsPerFlush,
outDir.toString,
indexSpec,
buildV9Directly,
sc
)
loadResults.length should be(1)
for (
segment <- loadResults
) {
segment.getBinaryVersion should be(9)
segment.getDataSource should equal(dataSource)
interval.contains(segment.getInterval) should be(true)
segment.getInterval.contains(interval) should be(false)
segment.getSize should be > 0L
segment.getDimensions.asScala.toSet should equal(Set("dim1"))
segment.getMetrics.asScala.toList should equal(dataSchema.getAggregators.map(_.getName).toList)
val file = new File(segment.getLoadSpec.get("path").toString)
file.exists() should be(true)
val segDir = Files.createTempDirectory(outDir.toPath, "loadableSegment-%s" format segment.getIdentifier).toFile
val copyResult = CompressionUtils.unzip(file, segDir)
copyResult.size should be > 0L
copyResult.getFiles.asScala.map(_.getName).toSet should equal(
Set("00000.smoosh", "meta.smoosh", "version.bin", "factory.json")
)
val index = StaticIndex.INDEX_IO.loadIndex(segDir)
try {
val qindex = new QueryableIndexIndexableAdapter(index)
qindex.getDimensionNames.asScala.toSet should equal(Set("dim1"))
val dimVal = qindex.getDimValueLookup("dim1").asScala
dimVal should not be 'Empty
for (dv <- dimVal) {
dv should equal("val1")
}
qindex.getMetricNames.asScala.toSet should equal(Set(aggName))
qindex.getMetricType(aggName) should equal(aggregatorFactory.getTypeName)
qindex.getNumRows should be(1)
qindex.getRows.asScala.head.getMetrics()(0) should be(1)
index.getDataInterval.getEnd.getMillis should not be JodaUtils.MAX_INSTANT
}
finally {
index.close()
}
}
}
finally {
closer.close()
}
}
"The DateBucketPartitioner" should "properly partition single item data" in {
val intervals = SparkBatchIndexTask.mapToSegmentIntervals(Seq(Interval.parse("1992/1993")), Granularities.YEAR)
val partitioner = new DateBucketPartitioner(Granularities.YEAR, intervals)
partitioner.getPartition((intervals.head.getStartMillis, 0)) should equal(0L)
}
it should "throw an error if out of bounds" in {
val intervals = SparkBatchIndexTask.mapToSegmentIntervals(Seq(Interval.parse("1992/1993")), Granularities.YEAR)
val partitioner = new DateBucketPartitioner(Granularities.YEAR, intervals)
an[IAE] should be thrownBy {
partitioner.getPartition((0, 0))
}
}
it should "properly partition for multiple timespans" in {
val intervals = SparkBatchIndexTask.mapToSegmentIntervals(Seq(Interval.parse("1992/1999")), Granularities.YEAR)
val partitioner = new DateBucketPartitioner(Granularities.YEAR, intervals)
var idex = 0
intervals.foreach(
i => {
partitioner.getPartition((i.getStartMillis, 0)) should equal(idex)
idex += 1
}
)
idex should be(intervals.size)
}
it should "properly partition single data" in {
val intervals = SparkBatchIndexTask.mapToSegmentIntervals(Seq(Interval.parse("1992/1993")), Granularities.YEAR)
val partitioner = new
DateBucketAndHashPartitioner(Granularities.YEAR, Map((new DateTime("1992").getMillis, 0L) -> 0))
partitioner.getPartition(makeEvent(intervals.head.getStart)) should equal(0)
}
it should "properly partition multiple date ranges" in {
val intervals = SparkBatchIndexTask.mapToSegmentIntervals(Seq(Interval.parse("1992/1994")), Granularities.YEAR)
val partitioner = new DateBucketAndHashPartitioner(
Granularities.YEAR,
Map((new DateTime("1992").getMillis, 0L) -> 0, (new DateTime("1993").getMillis, 0L) -> 1)
)
partitioner.getPartition(makeEvent(intervals.head.getStart)) should equal(0)
partitioner.getPartition(makeEvent(intervals.last.getEnd.minus(10L))) should equal(1)
}
it should "properly partition disjoint date ranges" in {
val intervals = SparkBatchIndexTask
.mapToSegmentIntervals(Seq(Interval.parse("1992/1993"), Interval.parse("1995/1996")), Granularities.YEAR)
val partitioner = new DateBucketAndHashPartitioner(
Granularities.YEAR,
Map((new DateTime("1992").getMillis, 0L) -> 0, (new DateTime("1995").getMillis, 0L) -> 1)
)
partitioner.getPartition(makeEvent(intervals.head.getStart)) should equal(0)
partitioner.getPartition(makeEvent(intervals.last.getEnd.minus(10L))) should equal(1)
}
"The DateBucketAndHashPartitioner workflow" should "properly partition multiple date ranges and buckets" in {
val intervals = SparkBatchIndexTask.mapToSegmentIntervals(Seq(Interval.parse("1992/1994")), Granularities.YEAR)
val map = Map(new DateTime("1992").getMillis -> 100L, new DateTime("1993").getMillis -> 200L)
val m = SparkDruidIndexer.getSizedPartitionMap(map, 150)
val partitioner = new DateBucketAndHashPartitioner(Granularities.YEAR, m)
partitioner.getPartition(makeEvent(intervals.head.getStart)) should equal(0)
partitioner.getPartition(makeEvent(intervals.last.getEnd.minus(10L))) should equal(1)
partitioner.getPartition(makeEvent(intervals.last.getEnd.minus(10L), "something else1")) should equal(2)
partitioner.getPartition(makeEvent(intervals.last.getEnd.minus(10L), "anotherHash3")) should equal(2)
partitioner.getPartition(makeEvent(intervals.last.getEnd.minus(10L), "anotherHash1")) should equal(1)
}
it should "properly partition multiple date ranges and buckets when dim is specified" in
{
val intervals = SparkBatchIndexTask.mapToSegmentIntervals(Seq(Interval.parse("1992/1994")), Granularities.YEAR)
val map = Map(new DateTime("1992").getMillis -> 100L, new DateTime("1993").getMillis -> 200L)
val m = SparkDruidIndexer.getSizedPartitionMap(map, 150)
val partitioner = new DateBucketAndHashPartitioner(Granularities.YEAR, m, Option(Set("dim1")))
partitioner.getPartition(makeEvent(intervals.head.getStart)) should equal(0)
partitioner.getPartition(makeEvent(intervals.last.getEnd.minus(10L))) should equal(1)
partitioner.getPartition(makeEvent(intervals.last.getEnd.minus(10L), "something else1")) should equal(2)
partitioner.getPartition(makeEvent(intervals.last.getEnd.minus(10L), "anotherHash3")) should equal(2)
partitioner.getPartition(makeEvent(intervals.last.getEnd.minus(10L), "anotherHash1")) should equal(1)
}
it should "properly group multiple events together" in {
val intervals = SparkBatchIndexTask.mapToSegmentIntervals(Seq(Interval.parse("1992/1994")), Granularities.YEAR)
val map = Map(new DateTime("1992").getMillis -> 100L, new DateTime("1993").getMillis -> 200L)
val m = SparkDruidIndexer.getSizedPartitionMap(map, 150)
val partitioner = new DateBucketAndHashPartitioner(Granularities.YEAR, m, Option(Set[String]()))
partitioner.getPartition(makeEvent(intervals.head.getStart)) should equal(0)
partitioner.getPartition(makeEvent(intervals.last.getEnd.minus(10L))) should equal(1)
partitioner.getPartition(makeEvent(intervals.last.getEnd.minus(10L), "something else1")) should equal(1)
partitioner.getPartition(makeEvent(intervals.last.getEnd.minus(10L), "anotherHash3")) should equal(1)
partitioner.getPartition(makeEvent(intervals.last.getEnd.minus(10L), "anotherHash1")) should equal(1)
}
def makeEvent(d: DateTime, v: String = "dim11"): (Long, Map[String, List[String]]) = {
d.getMillis -> Map("dim1" -> List(v))
}
"getSizedPartitionMap" should "partition data correctly for single items" in {
val map = Map(0L -> 100L)
val m = SparkDruidIndexer.getSizedPartitionMap(map, 1000)
m.size should equal(1)
m.get((0L, 0L)) should equal(Some(0L))
}
it should "return nothing if unknown coordinates" in {
val map = Map(0L -> 100L)
val m = SparkDruidIndexer.getSizedPartitionMap(map, 1000)
m.size should equal(1)
m.get((1L, 0L)) should be('empty)
}
it should "partition data correctly for single boundary counts" in {
val map = Map(0L -> 100L)
val m = SparkDruidIndexer.getSizedPartitionMap(map, 100)
m.size should equal(2)
m.get((0L, 0L)) should equal(Some(0L))
m.get((0L, 1L)) should equal(Some(1L))
}
it should "partition data correctly for multiple date buckets" in {
val map = Map(0L -> 100L, 1L -> 100L)
val m = SparkDruidIndexer.getSizedPartitionMap(map, 1000)
m.size should equal(2)
m.get((0L, 0L)) should equal(Some(0L))
m.get((1L, 0L)) should equal(Some(1L))
}
it should "ignore empty intervals" in {
val map = Map(0L -> 100L, 1L -> 0L)
val m = SparkDruidIndexer.getSizedPartitionMap(map, 1000)
m.size should equal(1)
m.get((0L, 0L)) should equal(Some(0L))
}
it should "properly index skipped intervals" in {
val map = Map(0L -> 100L, 1L -> 0L, 2L -> 100L)
val m = SparkDruidIndexer.getSizedPartitionMap(map, 1000)
m.size should equal(2)
m.get((0L, 0L)) should equal(Some(0L))
m.get((2L, 0L)) should equal(Some(1L))
}
"DateBucketAndHashPartitioner" should "handle min value hash" in {
val partitioner = new
DateBucketAndHashPartitioner(Granularities.YEAR, Map[(Long, Long), Int]((0L, 0L) -> 1, (0L, 0L) -> 2))
partitioner.getPartition(
100000L -> new util.HashMap[String, AnyRef]() {
override def hashCode(): Int = {
Integer.MIN_VALUE
}
}
) should be(2)
}
}
object StaticTestSparkDruidIndexer
{
val log = new Logger(classOf[TestSparkDruidIndexer])
}
| metamx/druid-spark-batch | src/test/scala/io/druid/indexer/spark/TestSparkDruidIndexer.scala | Scala | apache-2.0 | 18,942 |
package uk.ac.surrey.xw.gui
import scala.collection.mutable.Publisher
import scala.collection.mutable.Subscriber
import org.nlogo.app.App
import org.nlogo.awt.EventQueue.invokeLater
import uk.ac.surrey.xw.api.ComponentWidget
import uk.ac.surrey.xw.api.ExtraWidget
import uk.ac.surrey.xw.api.PropertyKey
import uk.ac.surrey.xw.api.PropertyMap
import uk.ac.surrey.xw.api.PropertyValue
import uk.ac.surrey.xw.api.RichWorkspace.enrichWorkspace
import uk.ac.surrey.xw.api.Tab
import uk.ac.surrey.xw.api.TabKind
import uk.ac.surrey.xw.api.WidgetKey
import uk.ac.surrey.xw.api.WidgetKind
import uk.ac.surrey.xw.api.enrichOption
import uk.ac.surrey.xw.api.normalizeString
import uk.ac.surrey.xw.api.swing.enrichComponent
import uk.ac.surrey.xw.api.toRunnable
import uk.ac.surrey.xw.state.AddWidget
import uk.ac.surrey.xw.state.RemoveWidget
import uk.ac.surrey.xw.state.SetProperty
import uk.ac.surrey.xw.state.StateEvent
import uk.ac.surrey.xw.state.Writer
class GUI(
val app: App,
val writer: Writer,
val widgetKinds: Map[String, WidgetKind[_]])
extends Subscriber[StateEvent, Publisher[StateEvent]] {
writer.subscribe(this, {
case SetProperty(_,_,_,fromUI) => !fromUI
case _ => true
})
val tabs = app.tabs
val tabPropertyKey = new TabKind[Tab].name
override def notify(pub: Publisher[StateEvent], event: StateEvent): Unit =
invokeLater {
event match {
case AddWidget(widgetKey, propertyMap) β
addWidget(widgetKey, propertyMap)
case SetProperty(widgetKey, propertyKey, propertyValue, _) β
setProperty(widgetKey, propertyKey, propertyValue)
case RemoveWidget(widgetKey) β
removeWidget(widgetKey)
}
}
def getWidget(widgetKey: WidgetKey): Option[ExtraWidget] = {
val xwTabs = app.workspace.xwTabs
(xwTabs ++ xwTabs.flatMap(_.allChildren))
.collectFirst {
case w: ExtraWidget if w.key == widgetKey β w
}
}
private def addWidget(widgetKey: WidgetKey, propertyMap: PropertyMap): Unit =
for {
kindName β propertyMap.get("KIND").map(_.toString).orException(
"Can't find KIND for " + widgetKey + " in " + propertyMap).right
kind β widgetKinds.get(normalizeString(kindName)).orException(
"Kind " + kindName + " not loaded.").right
} kind.newWidget(widgetKey, writer, app.workspace).init(propertyMap)
private def setProperty(
widgetKey: WidgetKey,
propertyKey: PropertyKey,
propertyValue: PropertyValue): Unit =
getWidget(widgetKey).foreach(
_.setProperty(propertyKey, propertyValue)
)
private def removeWidget(widgetKey: WidgetKey): Unit =
for (w β getWidget(widgetKey)) w match {
case tab: Tab β tab.removeFromAppTabs()
case cw: ComponentWidget β
for (t β cw.tab) {
t.panel.remove(cw)
t.panel.repaint()
}
}
}
| CRESS-Surrey/eXtraWidgets | core/src/main/scala/uk/ac/surrey/xw/gui/GUI.scala | Scala | mit | 2,881 |
package sc.ala.kafka.utils
import kafka.api.{TopicMetadataRequest, TopicMetadataResponse}
import kafka.cluster.Broker
import java.util.Properties
import scala.util._
private[utils] trait Api {
// reference
def broker: KafkaBrokerUtils
// 1 hop api
def brokers(): Seq[Broker]
def topics(): Seq[String]
def brokerConnectString: String
// topics
def leader(topic: String, partition: Int): Option[Int]
def leaders(topic: String): Map[Int, Option[Int]]
def leaderBrokers(topic: String): Map[Int, Broker]
def partitions(topic: String): Seq[Int]
def count(topic: String): Long
def counts(topic: String): Map[Int, Long]
def create(topic: String, partitions: Int, replicationFactor: Int, topicConfig: Properties = new Properties): Unit
def delete(topic: String): Unit
def offset(topic: String): Long
def offsets(topic: String): Map[Int, Long]
// testing (api is not fixed yet)
def metadata(topic: String): TopicMetadataResponse
def metadatas(topic: String, partition: Int): Try[TopicMetadataResponse]
// TODO
}
| maiha/kafka-utils | src/main/scala/sc/ala/kafka/utils/Api.scala | Scala | mit | 1,050 |
/*
* Copyright 2013 Thomas Dy under the terms of the MIT license
* located at https://raw.github.com/thatsmydoing/sakay-gateway/master/LICENSE
*/
package models
case class BusFare(regular: Double, discounted: Double)
object BusFare {
val fares = Map(
"pub_aircon" -> List(
BusFare(0, 0),
BusFare(12.00, 9.50),
BusFare(12.00, 9.50),
BusFare(12.00, 9.50),
BusFare(12.00, 9.50),
BusFare(12.00, 9.50),
BusFare(14.25, 11.25),
BusFare(16.50, 13.00),
BusFare(18.50, 15.00),
BusFare(20.75, 16.75),
BusFare(23.00, 18.50),
BusFare(25.25, 20.25),
BusFare(27.50, 22.00),
BusFare(29.50, 23.75),
BusFare(31.75, 25.50),
BusFare(34.00, 27.25),
BusFare(36.25, 29.00),
BusFare(38.50, 30.75),
BusFare(40.50, 32.50),
BusFare(42.75, 34.25),
BusFare(45.00, 36.00),
BusFare(47.25, 37.75),
BusFare(49.50, 39.50),
BusFare(51.50, 41.25),
BusFare(53.75, 43.00),
BusFare(56.00, 44.75),
BusFare(58.25, 46.50),
BusFare(60.50, 48.25),
BusFare(62.50, 50.00),
BusFare(64.75, 51.75),
BusFare(67.00, 53.50),
BusFare(69.25, 55.25),
BusFare(71.50, 57.00),
BusFare(73.50, 59.00),
BusFare(75.75, 60.75),
BusFare(78.00, 62.50),
BusFare(80.25, 64.25),
BusFare(82.50, 66.00),
BusFare(84.50, 67.75),
BusFare(86.75, 69.50),
BusFare(89.00, 71.25),
BusFare(91.25, 73.00),
BusFare(93.50, 74.75),
BusFare(95.50, 76.50),
BusFare(97.75, 78.25),
BusFare(100.00, 80.00),
BusFare(102.25, 81.75),
BusFare(104.50, 83.50),
BusFare(106.50, 85.25),
BusFare(108.75, 87.00),
BusFare(111.00, 88.75),
BusFare(113.25, 90.50),
BusFare(115.50, 92.25),
BusFare(117.50, 94.00),
BusFare(119.75, 95.75),
BusFare(122.00, 97.50),
BusFare(124.25, 99.25),
BusFare(126.50, 101.00),
BusFare(128.50, 103.00),
BusFare(130.75, 104.75),
BusFare(133.00, 106.50)
),
"pub_ordinary" -> List(
BusFare(0, 0),
BusFare(10.00, 8.00),
BusFare(10.00, 8.00),
BusFare(10.00, 8.00),
BusFare(10.00, 8.00),
BusFare(10.00, 8.00),
BusFare(11.75, 9.50),
BusFare(13.75, 11.00),
BusFare(15.50, 12.50),
BusFare(17.50, 14.00),
BusFare(19.25, 15.50),
BusFare(21.00, 17.00),
BusFare(23.00, 18.25),
BusFare(24.75, 19.75),
BusFare(26.75, 21.25),
BusFare(28.50, 22.75),
BusFare(30.25, 24.25),
BusFare(32.25, 25.75),
BusFare(34.00, 27.25),
BusFare(36.00, 28.75),
BusFare(37.75, 30.25),
BusFare(39.50, 31.75),
BusFare(41.50, 33.25),
BusFare(43.25, 34.75),
BusFare(45.25, 36.00),
BusFare(47.00, 37.50),
BusFare(48.75, 39.00),
BusFare(50.75, 40.50),
BusFare(52.50, 42.00),
BusFare(54.50, 43.50),
BusFare(56.25, 45.00),
BusFare(58.00, 46.50),
BusFare(60.00, 48.00),
BusFare(61.75, 49.50),
BusFare(63.75, 51.00),
BusFare(65.50, 52.50),
BusFare(67.25, 54.00),
BusFare(69.25, 55.25),
BusFare(71.00, 56.75),
BusFare(73.00, 58.25),
BusFare(74.75, 59.75),
BusFare(76.50, 61.25),
BusFare(78.50, 62.75),
BusFare(80.25, 64.25),
BusFare(82.25, 65.75),
BusFare(84.00, 67.25),
BusFare(85.75, 68.75),
BusFare(87.75, 70.25),
BusFare(89.50, 71.75),
BusFare(91.50, 73.00),
BusFare(93.25, 74.50),
BusFare(95.00, 76.00),
BusFare(97.00, 77.50),
BusFare(98.75, 79.00),
BusFare(100.75, 80.50),
BusFare(102.50, 82.00),
BusFare(104.25, 83.50),
BusFare(106.25, 85.00),
BusFare(108.00, 86.50),
BusFare(110.00, 88.00),
BusFare(111.75, 89.50)
),
"puj" -> List(
BusFare(0, 0),
BusFare(8.00, 6.50),
BusFare(8.00, 6.50),
BusFare(8.00, 6.50),
BusFare(8.00, 6.50),
BusFare(9.50, 7.50),
BusFare(10.75, 8.75),
BusFare(12.25, 9.75),
BusFare(13.50, 11.00),
BusFare(15.00, 12.00),
BusFare(16.50, 13.25),
BusFare(17.75, 14.25),
BusFare(19.25, 15.50),
BusFare(20.50, 16.50),
BusFare(22.00, 17.75),
BusFare(23.50, 18.75),
BusFare(24.75, 19.75),
BusFare(26.25, 21.00),
BusFare(27.50, 22.25),
BusFare(29.00, 23.25),
BusFare(30.50, 24.50),
BusFare(31.75, 25.50),
BusFare(33.25, 26.75),
BusFare(34.50, 27.75),
BusFare(36.00, 28.75),
BusFare(37.50, 30.00),
BusFare(38.75, 31.00),
BusFare(40.25, 32.25),
BusFare(41.50, 33.25),
BusFare(43.00, 34.50),
BusFare(44.50, 35.50),
BusFare(45.75, 36.75),
BusFare(47.25, 37.75),
BusFare(48.50, 38.75),
BusFare(50.00, 40.00),
BusFare(51.50, 41.25),
BusFare(52.75, 42.25),
BusFare(54.25, 43.50),
BusFare(55.50, 44.50),
BusFare(57.00, 45.75),
BusFare(58.50, 46.75),
BusFare(59.75, 47.75),
BusFare(61.25, 49.00),
BusFare(62.50, 50.00),
BusFare(64.00, 51.25),
BusFare(65.50, 52.50),
BusFare(66.75, 53.50),
BusFare(68.25, 54.75),
BusFare(69.50, 55.75),
BusFare(71.00, 56.75),
BusFare(72.50, 58.00)
)
)
def get(typ: String, distance: Double) = {
fares(typ)(Math.ceil(distance / 1000).toInt)
}
}
case class RailFare(regular: Double, stored: Double)
case class RailLine(stations: List[String], fares: List[RailFare]) {
def has(station: String) = stations.contains(station)
def getFare(from: String, to: String) = {
val fromIndex = stations.indexOf(from)
val toIndex = stations.indexOf(to)
fares(Math.abs(fromIndex - toIndex))
}
}
object RailFare {
def apply(regular: Double): RailFare = RailFare(regular, regular)
val lrt1 = RailLine(
List(
"LTFRB_4944", // Baclaran LRT
"LTFRB_4945", // EDSA LRT
"LTFRB_4946", // Libertad LRT
"LTFRB_4947", // Gil Puyat LRT
"LTFRB_4948", // Vito Cruz LRT
"LTFRB_4949", // Quirino Ave LRT
"LTFRB_4950", // Pedro Gil LRT
"LTFRB_4951", // UN Ave LRT
"LTFRB_4952", // Central Terminal LRT
"LTFRB_4953", // Carriedo LRT
"LTFRB_4954", // Doroteo Jose LRT
"LTFRB_4955", // Bambang LRT
"LTFRB_4956", // Tayuman LRT
"LTFRB_4957", // Blumentritt LRT
"LTFRB_4958", // Abad Santos LRT
"LTFRB_4959", // R. Papa LRT
"LTFRB_4960", // 5th Ave LRT
"LTFRB_4961" // Monumento LRT
),
List(
RailFare(0, 0),
RailFare(12, 12),
RailFare(12, 12),
RailFare(12, 12),
RailFare(12, 12),
RailFare(15, 13),
RailFare(15, 13),
RailFare(15, 13),
RailFare(15, 13),
RailFare(15, 14),
RailFare(15, 14),
RailFare(15, 14),
RailFare(15, 14),
RailFare(15, 15),
RailFare(15, 15),
RailFare(15, 15),
RailFare(15, 15),
RailFare(15, 15)
)
)
val lrt1Extended = RailLine(
List(
"LTFRB_4944", // Baclaran LRT
"LTFRB_4945", // EDSA LRT
"LTFRB_4946", // Libertad LRT
"LTFRB_4947", // Gil Puyat LRT
"LTFRB_4948", // Vito Cruz LRT
"LTFRB_4949", // Quirino Ave LRT
"LTFRB_4950", // Pedro Gil LRT
"LTFRB_4951", // UN Ave LRT
"LTFRB_4952", // Central Terminal LRT
"LTFRB_4953", // Carriedo LRT
"LTFRB_4954", // Doroteo Jose LRT
"LTFRB_4955", // Bambang LRT
"LTFRB_4956", // Tayuman LRT
"LTFRB_4957", // Blumentritt LRT
"LTFRB_4958", // Abad Santos LRT
"LTFRB_4959", // R. Papa LRT
"LTFRB_4960", // 5th Ave LRT
"LTFRB_4961", // Monumento LRT
"LTFRB_4962", // LRT Balintawak
"LTFRB_4963" // Roosevelt LRT
),
List(
RailFare(0, 0),
RailFare(15, 13),
RailFare(15, 13),
RailFare(15, 14),
RailFare(15, 14),
RailFare(15, 15),
RailFare(15, 15),
RailFare(15, 15),
RailFare(20, 16),
RailFare(20, 16),
RailFare(20, 16),
RailFare(20, 17),
RailFare(20, 17),
RailFare(20, 17),
RailFare(20, 18),
RailFare(20, 18),
RailFare(20, 18),
RailFare(20, 19),
RailFare(20, 19),
RailFare(20, 20)
)
)
val lrt2 = RailLine(
List(
"LTFRB_4977", // Recto LRT
"LTFRB_4978", // Legarda LRT
"LTFRB_4979", // Pureza LRT
"LTFRB_4980", // V. Mapa LRT
"LTFRB_4981", // J. Ruiz LRT
"LTFRB_4982", // Gilmore LRT
"LTFRB_4983", // Betty Go Belmonte LRT
"LTFRB_4984", // Cubao LRT
"LTFRB_4985", // Anonas LRT
"LTFRB_4986", // Katipunan LRT
"LTFRB_4987" // Santolan LRT
),
List(
RailFare(0),
RailFare(12),
RailFare(12),
RailFare(12),
RailFare(13),
RailFare(13),
RailFare(13),
RailFare(14),
RailFare(14),
RailFare(14),
RailFare(15)
)
)
val mrt3 = RailLine(
List(
"STOP_880847", // North Avenue MRT
"LTFRB_4965", // Quezon MRT
"LTFRB_4966", // Kamuning MRT
"LTFRB_4967", // Cubao MRT
"LTFRB_4968", // Santolan MRT
"LTFRB_4969", // Ortigas MRT
"LTFRB_4970", // Shaw MRT
"LTFRB_4971", // Boni MRT
"LTFRB_4972", // Guadalupe MRT
"LTFRB_4973", // Buendia MRT
"LTFRB_4974", // Ayala MRT
"LTFRB_4975", // Magellanes MRT
"LTFRB_4976" // Taft Ave MRT
),
List(
RailFare(0),
RailFare(10),
RailFare(10),
RailFare(11),
RailFare(11),
RailFare(12),
RailFare(12),
RailFare(12),
RailFare(14),
RailFare(14),
RailFare(14),
RailFare(15),
RailFare(15)
)
)
val pnr = new RailLine(List(), List()) {
override def getFare(from: String, to: String) = RailFare(0)
}
def get(routeId: String, from: String, to: String) = {
val line = routeId match {
case "ROUTE_880747" if lrt1.has(from) && lrt1.has(to) => lrt1
case "ROUTE_880747" => lrt1Extended
case "ROUTE_880801" => lrt2
case "ROUTE_880854" => mrt3
case "ROUTE_880872" => pnr
}
line.getFare(from, to)
}
}
| sakayph/sakay-gateway | app/models/Fare.scala | Scala | mit | 10,229 |
package net.sansa_stack.rdf.flink
import net.sansa_stack.rdf.flink.utils.Logging
import org.antlr.runtime.misc.DoubleKeyMap
import org.apache.flink.api.scala._
import org.apache.flink.api.scala.DataSet
import org.apache.jena.graph.{Node, Triple}
import org.apache.jena.vocabulary.OWL
package object stats {
implicit class StatsCriteria(triples: DataSet[Triple]) extends Logging {
val env = ExecutionEnvironment.getExecutionEnvironment
/**
* Compute distributed RDF dataset statistics.
*
* @return VoID description of the given dataset
*/
def stats: DataSet[String] =
RDFStatistics.run(triples)
/**
* <b>1. Used Classes Criterion </b> <br>
* Creates an DataSet of classes are in use by instances of the analyzed dataset.
* As an example of such a triple that will be accepted by
* the filter is `sda:Gezim rdf:type distLODStats:Developer`.
* <b>Filter rule</b> : `?p=rdf:type && isIRI(?o)`
* <b>Action</b> : `S += ?o`
*
* @return DataSet of classes/instances
*/
def statsUsedClasses(): DataSet[Triple] =
Used_Classes(triples, env).Filter()
/**
* <b>2. Class Usage Count Criterion </b> <br>
* Count the usage of respective classes of a datase,
* the filter rule that is used to analyze a triple is the
* same as in the first criterion.
* As an action a map is being created having class IRIs as
* identifier and its respective usage count as value.
* If a triple is conform to the filter rule the respective
* value will be increased by one.
* <b>Filter rule</b> : `?p=rdf:type && isIRI(?o)`
* <b>Action</b> : `M[?o]++ `
*
* @return DataSet of classes used in the dataset and their frequencies.
*/
def statsClassUsageCount(): DataSet[(Node, Int)] =
Used_Classes(triples, env).Action()
/**
* <b>3. Classes Defined Criterion </b> <br>
* Gets a set of classes that are defined within a
* dataset this criterion is being used.
* Usually in RDF/S and OWL a class can be defined by a triple
* using the predicate `rdf:type` and either `rdfs:Class` or
* `owl:Class` as object.
* The filter rule illustrates the condition used to analyze the triple.
* If the triple is accepted by the rule, the IRI used as subject is added to the set of classes.
* <b>Filter rule</b> : `?p=rdf:type && isIRI(?s) &&(?o=rdfs:Class||?o=owl:Class)`
* <b>Action</b> : `S += ?s `
*
* @return DataSet of classes defined in the dataset.
*/
def statsClassesDefined(): DataSet[Node] =
Classes_Defined(triples, env).Action()
/**
* <b>5. Property Usage Criterion </b> <br>
* Count the usage of properties within triples.
* Therefore an DataSet will be created containing all property
* IRI's as identifier.
* Afterwards, their frequencies will be computed.
* <b>Filter rule</b> : `none`
* <b>Action</b> : `M[?p]++ `
*
* @return DataSet of predicates used in the dataset and their frequencies.
*/
def statsPropertyUsage(): DataSet[(Node, Int)] =
PropertyUsage(triples, env).Action()
/**
* <b>16. Distinct entities </b> <br>
* Count distinct entities of a dataset by filtering out all IRIs.
* <b>Filter rule</b> : `S+=iris({?s,?p,?o})`
* <b>Action</b> : `S`
*
* @return DataSet of distinct entities in the dataset.
*/
def statsDistinctEntities(): DataSet[Triple] =
DistinctEntities(triples, env).Action()
/**
* * 17. Literals criterion
*
* @return number of triples that are referencing literals to subjects.
*/
def statsLiterals(): DataSet[Triple] =
RDFStatistics.literals(triples)
/**
* 18. Blanks as subject criterion
*
* @return number of triples where blanknodes are used as subjects.
*/
def statsBlanksAsSubject(): DataSet[Triple] =
RDFStatistics.blanksAsSubject(triples)
/**
* 19. Blanks as object criterion
*
* @return number of triples where blanknodes are used as objects.
*/
def statsBlanksAsObject(): DataSet[Triple] =
RDFStatistics.blanksAsObject(triples)
/**
* 20. Datatypes criterion
*
* @return histogram of types used for literals.
*/
def statsDataTypes(): DataSet[(String, Int)] =
RDFStatistics.dataTypes(triples)
/**
* 21. Languages criterion
*
* @return histogram of languages used for literals.
*/
def statsLanguages(): DataSet[(String, Int)] =
RDFStatistics.languages(triples)
/**
* 24. Typed subjects criterion.
*
* @return list of typed subjects.
*/
def statsTypedSubjects(): DataSet[Node] =
RDFStatistics.typedSubjects(triples)
/**
* 24. Labeled subjects criterion.
*
* @return list of labeled subjects.
*/
def statsLabeledSubjects(): DataSet[Node] =
RDFStatistics.labeledSubjects(triples)
/**
* 25. SameAs criterion.
*
* @return list of triples with owl#sameAs as predicate
*/
def statsSameAs(): DataSet[Triple] =
RDFStatistics.sameAs(triples)
/**
* 26. Links criterion.
*
* Computes the frequencies of links between entities of different namespaces. This measure is directed, i.e.
* a link from `ns1 -> ns2` is different from `ns2 -> ns1`.
*
* @return list of namespace combinations and their frequencies.
*/
def statsLinks(): DataSet[(String, String, Int)] =
RDFStatistics.links(triples)
/**
* <b>30. Subject vocabularies </b> <br>
* Compute subject vocabularies/namespaces used through the dataset.
* <b>Filter rule</b> : `ns=ns(?s)`
* <b>Action</b> : `M[ns]++`
*
* @return DataSet of distinct subject vocabularies used in the dataset and their frequencies.
*/
def statsSubjectVocabularies(): AggregateDataSet[(String, Int)] =
SPO_Vocabularies(triples, env).SubjectVocabulariesPostProc()
/**
* <b>31. Predicate vocabularies </b> <br>
* Compute predicate vocabularies/namespaces used through the dataset.
* <b>Filter rule</b> : `ns=ns(?p)`
* <b>Action</b> : `M[ns]++`
*
* @return DataSet of distinct predicate vocabularies used in the dataset and their frequencies.
*/
def statsPredicateVocabularies(): AggregateDataSet[(String, Int)] =
SPO_Vocabularies(triples, env).PredicateVocabulariesPostProc()
/**
* <b>32. Object vocabularies </b> <br>
* Compute object vocabularies/namespaces used through the dataset.
* <b>Filter rule</b> : `ns=ns(?o)`
* <b>Action</b> : `M[ns]++`
*
* @return DataSet of distinct object vocabularies used in the dataset and their frequencies.
*/
def statsObjectVocabularies(): AggregateDataSet[(String, Int)] =
SPO_Vocabularies(triples, env).ObjectVocabulariesPostProc()
/**
* <b>Distinct Subjects</b> <br>
* Count distinct subject within triples.
* <b>Filter rule</b> : `isURI(?s)`
* <b>Action</b> : `M[?s]++`
*
* @return DataSet of subjects used in the dataset.
*/
def statsDistinctSubjects(): DataSet[Triple] =
DistinctSubjects(triples, env).Action()
/**
* <b>Distinct Objects</b> <br>
* Count distinct objects within triples.
* <b>Filter rule</b> : `isURI(?o)`
* <b>Action</b> : `M[?o]++`
*
* @return DataSet of objects used in the dataset.
*/
def statsDistinctObjects(): DataSet[Triple] =
DistinctObjects(triples, env).Action()
/**
* <b>Properties Defined</b> <br>
* Count the defined properties within triples.
* <b>Filter rule</b> : `?p=rdf:type && (?o=owl:ObjectProperty ||
* ?o=rdf:Property)&& !isIRI(?s)`
* <b>Action</b> : `M[?p]++`
*
* @return DataSet of predicates defined in the dataset.
*/
def statsPropertiesDefined(): DataSet[Node] =
PropertiesDefined(triples, env).Action()
}
implicit class StatsCriteriaVoidify(stats: DataSet[String]) extends Logging {
/**
* Voidify RDF dataset based on the Vocabulary of Interlinked Datasets (VoID) [[https://www.w3.org/TR/void/]]
*
* @param source name of the Dataset:source--usualy the file's name
* @param output the directory to save RDF dataset summary
*/
def voidify(source: String, output: String): Unit =
RDFStatistics.voidify(stats, source, output)
/**
* Prints the Voidiy version of the given RDF dataset
*
* @param source name of the Dataset:source--usualy the file's name
*/
def print(source: String): Unit =
RDFStatistics.print(stats, source)
}
}
| SANSA-Stack/Spark-RDF | sansa-rdf-flink/src/main/scala/net/sansa_stack/rdf/flink/stats/package.scala | Scala | gpl-3.0 | 8,874 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.tstreams.agents.integration
import java.util.concurrent.CountDownLatch
import com.bwsw.tstreams.agents.consumer.Offset.{Newest, Oldest}
import com.bwsw.tstreams.agents.producer.NewProducerTransactionPolicy
import com.bwsw.tstreams.testutils._
import com.bwsw.tstreamstransactionserver.rpc.TransactionStates
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
import scala.collection.mutable.ListBuffer
import scala.util.control.Breaks._
class ProducerAndConsumerSimpleTests extends FlatSpec with Matchers with BeforeAndAfterAll with TestUtils {
lazy val srv = TestStorageServer.getNewClean()
lazy val producer = f.getProducer(
name = "test_producer",
partitions = Set(0))
lazy val consumer = f.getConsumer(
name = "test_consumer",
partitions = Set(0),
offset = Oldest,
useLastOffset = true)
override def beforeAll(): Unit = {
srv
createNewStream()
consumer.start()
}
"producer, consumer" should "producer - generate one transaction, consumer - retrieve it with getAll method" in {
val DATA_IN_TRANSACTION = 10
val producerTransaction = producer.newTransaction(NewProducerTransactionPolicy.ErrorIfOpened)
val l = new CountDownLatch(1)
srv.notifyProducerTransactionCompleted(t => t.transactionID == producerTransaction.getTransactionID && t.state == TransactionStates.Checkpointed, l.countDown())
val sendData = (for (part <- 0 until DATA_IN_TRANSACTION) yield "data_part_" + randomKeyspace).sorted
sendData.foreach { x =>
producerTransaction.send(x.getBytes())
}
producerTransaction.checkpoint()
l.await()
val transaction = consumer.getTransaction(0).get
transaction.getAll.map(i => new String(i)).sorted shouldBe sendData
//assert that is nothing to read
(0 until consumer.stream.partitionsCount) foreach { _ =>
consumer.getTransaction(0).isEmpty shouldBe true
}
}
"producer, consumer" should "producer - generate one transaction, consumer - retrieve it using iterator" in {
val DATA_IN_TRANSACTION = 10
val sendData = (for (part <- 0 until DATA_IN_TRANSACTION) yield "data_part_" + randomKeyspace).sorted
val producerTransaction = producer.newTransaction(NewProducerTransactionPolicy.ErrorIfOpened)
val l = new CountDownLatch(1)
srv.notifyProducerTransactionCompleted(t => t.transactionID == producerTransaction.getTransactionID && t.state == TransactionStates.Checkpointed, l.countDown())
sendData.foreach { x => producerTransaction.send(x.getBytes()) }
producerTransaction.checkpoint()
l.await()
val transactionOpt = consumer.getTransaction(0)
transactionOpt.isDefined shouldBe true
val transaction = transactionOpt.get
var readData = ListBuffer[String]()
while (transaction.hasNext) {
val s = new String(transaction.next())
readData += s
}
readData.toList.sorted shouldBe sendData
//assert that is nothing to read
(0 until consumer.stream.partitionsCount) foreach { _ => consumer.getTransaction(0).isEmpty shouldBe true }
}
"producer, consumer" should "producer - generate some set of transactions, consumer - retrieve them all" in {
val TRANSACTIONS_COUNT = 100
val DATA_IN_TRANSACTION = 10
val sendData = (for (part <- 0 until DATA_IN_TRANSACTION) yield "data_part_" + randomKeyspace).sorted
val l = new CountDownLatch(1)
var counter = 0
(0 until TRANSACTIONS_COUNT).foreach { _ =>
val producerTransaction = producer.newTransaction(NewProducerTransactionPolicy.ErrorIfOpened)
counter += 1
if (counter == TRANSACTIONS_COUNT)
srv.notifyProducerTransactionCompleted(t => t.transactionID == producerTransaction.getTransactionID && t.state == TransactionStates.Checkpointed, l.countDown())
sendData.foreach { x => producerTransaction.send(x.getBytes()) }
producerTransaction.checkpoint()
}
l.await()
(0 until TRANSACTIONS_COUNT).foreach { _ =>
val transaction = consumer.getTransaction(0)
transaction.nonEmpty shouldBe true
transaction.get.getAll.map(i => new String(i)).sorted == sendData
}
//assert that is nothing to read
(0 until consumer.stream.partitionsCount) foreach { _ =>
consumer.getTransaction(0).isEmpty shouldBe true
}
}
"producer, consumer" should "producer - generate some set of transactions after cancel, consumer - retrieve them all" in {
val TRANSACTIONS_COUNT = 100
val DATA_IN_TRANSACTION = 1
val pl = ListBuffer[Long]()
val cl = ListBuffer[Long]()
val sendData = (for (part <- 0 until DATA_IN_TRANSACTION) yield "data_part_" + randomKeyspace).sorted
val l = new CountDownLatch(1)
var counter = 0
val producerTransaction = producer.newTransaction(NewProducerTransactionPolicy.ErrorIfOpened)
producerTransaction.cancel()
(0 until TRANSACTIONS_COUNT).foreach { _ =>
val producerTransaction = producer.newTransaction(NewProducerTransactionPolicy.ErrorIfOpened)
pl.append(producerTransaction.getTransactionID)
counter += 1
if (counter == TRANSACTIONS_COUNT)
srv.notifyProducerTransactionCompleted(t => t.transactionID == producerTransaction.getTransactionID && t.state == TransactionStates.Checkpointed, l.countDown())
sendData.foreach { x => producerTransaction.send(x.getBytes()) }
producerTransaction.checkpoint()
}
l.await()
(0 until TRANSACTIONS_COUNT).foreach { i =>
val transactionOpt = consumer.getTransaction(0)
transactionOpt.nonEmpty shouldBe true
cl.append(transactionOpt.get.getTransactionID)
}
cl shouldBe pl
}
"producer, consumer" should "producer - generate transaction, consumer retrieve it (both start async)" in {
val timeoutForWaiting = 5
val DATA_IN_TRANSACTION = 10
val sendData = (for (part <- 0 until DATA_IN_TRANSACTION) yield "data_part_" + part).sorted
val producerThread = new Thread(() => {
val transaction = producer.newTransaction(NewProducerTransactionPolicy.ErrorIfOpened)
Thread.sleep(100)
sendData.foreach { x =>
transaction.send(x.getBytes())
}
transaction.checkpoint()
})
val consumerThread = new Thread(() => {
breakable {
while (true) {
val transactionOpt = consumer.getTransaction(0)
if (transactionOpt.isDefined) {
transactionOpt.get.getAll.map(i => new String(i)).sorted shouldBe sendData
break()
}
Thread.sleep(100)
}
}
})
producerThread.start()
consumerThread.start()
producerThread.join(timeoutForWaiting * 1000)
consumerThread.join(timeoutForWaiting * 1000)
consumerThread.isAlive shouldBe false
producerThread.isAlive shouldBe false
(0 until consumer.stream.partitionsCount) foreach { _ => consumer.getTransaction(0).isEmpty shouldBe true }
}
"producer and consumer" should "work correctly for instant transactions" in {
val consumer = f.getConsumer(
name = "test_consumer",
partitions = Set(0),
offset = Newest,
useLastOffset = false)
val transactionID1 = producer.instantTransaction(0, Seq("test1".getBytes), isReliable = true)
val transactionID2 = producer.instantTransaction(0, Seq("test2".getBytes), isReliable = false)
consumer.start()
consumer.getTransactionById(0, transactionID1)
.foreach(transaction => {
transaction.getTransactionID shouldBe transactionID1
new String(transaction.next()) shouldBe "test1"
})
consumer.getTransactionById(0, transactionID2)
.foreach(transaction => {
transaction.getTransactionID shouldBe transactionID2
new String(transaction.next()) shouldBe "test2"
})
consumer.stop()
}
override def afterAll(): Unit = {
producer.stop()
consumer.stop()
TestStorageServer.dispose(srv)
onAfterAll()
}
}
| bwsw/t-streams | src/test/scala/com/bwsw/tstreams/agents/integration/ProducerAndConsumerSimpleTests.scala | Scala | apache-2.0 | 8,762 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.