code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.kudu.index
import com.github.benmanes.caffeine.cache.{CacheLoader, Caffeine}
import org.apache.kudu.client.{CreateTableOptions, KuduTable, PartialRow}
import org.apache.kudu.{ColumnSchema, Schema}
import org.locationtech.geomesa.index.api._
import org.locationtech.geomesa.index.index.attribute.AttributeIndex
import org.locationtech.geomesa.index.index.id.IdIndex
import org.locationtech.geomesa.index.index.z2.{XZ2Index, Z2Index}
import org.locationtech.geomesa.index.index.z3.{XZ3Index, Z3Index}
import org.locationtech.geomesa.kudu.schema.KuduIndexColumnAdapter.VisibilityAdapter
import org.locationtech.geomesa.kudu.schema.{KuduColumnAdapter, KuduSimpleFeatureSchema}
import org.locationtech.geomesa.kudu.{KuduSplitterOptions, KuduValue, Partitioning}
import org.locationtech.geomesa.utils.text.KVPairParser
import org.opengis.feature.simple.SimpleFeatureType
object KuduColumnMapper {
private val mappers = Caffeine.newBuilder().build(
new CacheLoader[GeoMesaFeatureIndex[_, _], KuduColumnMapper]() {
override def load(key: GeoMesaFeatureIndex[_, _]): KuduColumnMapper = {
key.name match {
case IdIndex.name => new IdColumnMapper(key)
case Z3Index.name | XZ3Index.name => new Z3ColumnMapper(key)
case Z2Index.name | XZ2Index.name => new Z2ColumnMapper(key)
case AttributeIndex.name => new AttributeColumnMapper(key)
case _ => throw new IllegalArgumentException(s"Unexpected index: ${key.name}")
}
}
}
)
def apply(index: GeoMesaFeatureIndex[_, _]): KuduColumnMapper = mappers.get(index)
private def splitters(sft: SimpleFeatureType): Map[String, String] =
Option(sft.getUserData.get(KuduSplitterOptions).asInstanceOf[String]).map(KVPairParser.parse).getOrElse(Map.empty)
}
/**
*
* @param index feature index
* @param keyColumns columns that are part of the primary key, used for range planning
*/
abstract class KuduColumnMapper(val index: GeoMesaFeatureIndex[_, _], val keyColumns: Seq[KuduColumnAdapter[_]]) {
protected val splitters: Map[String, String] = KuduColumnMapper.splitters(index.sft)
val schema: KuduSimpleFeatureSchema = KuduSimpleFeatureSchema(index.sft)
/**
* Table schema, which includes all the primary key columns and the feature type columns
*
* @return
*/
val tableSchema: Schema = {
val cols = new java.util.ArrayList[ColumnSchema]()
keyColumns.flatMap(_.columns).foreach(cols.add)
cols.add(VisibilityAdapter.column)
schema.writeSchema.foreach(cols.add)
new Schema(cols)
}
/**
* Create initial partitions based on table splitting config
*/
def configurePartitions(): CreateTableOptions
/**
* Creates a new ranges partition that will cover the time period. Only implemented by indices
* with a leading time period, as otherwise we have to partition up front. Kudu only supports
* adding new range partitions that don't overlap any existing partitions - you can't modify or
* split existing partitions
*
* @param table kudu table
* @param bin time period being covered (e.g. week)
* @return
*/
def createPartition(table: KuduTable, bin: Short): Option[Partitioning] = None
/**
* Turns scan ranges into kudu ranges
*
* @param ranges scan ranges
* @param tieredKeyRanges tiered ranges
* @return
*/
def toRowRanges(ranges: Seq[ScanRange[_]],
tieredKeyRanges: Seq[ByteRange]): Seq[(Option[PartialRow], Option[PartialRow])]
/**
* Creates key values for insert
*
* @param value index key
* @return
*/
def createKeyValues(value: SingleRowKeyValue[_]): Seq[KuduValue[_]]
}
| locationtech/geomesa | geomesa-kudu/geomesa-kudu-datastore/src/main/scala/org/locationtech/geomesa/kudu/index/KuduColumnMapper.scala | Scala | apache-2.0 | 4,200 |
package org.geneticmachine.navigation
import org.geneticmachine._
import org.geneticmachine.common.graph.{GraphBuilder, Graph}
import scala.concurrent.Future
import org.geneticmachine.navigation.vision._
import org.geneticmachine.navigation.generators._
import org.geneticmachine.navigation.feedback._
case class NavigationEnvironmentGen(labGen: LabyrinthGenerator)
(vision: Vision)
(feedbackStrategy: FeedbackStrategyGenerator)
(metrics: List[Metric[NavigationState]])
(continuousMetrics: List[ContinuousMetric[NavigationState]])
extends EnvironmentGen[NavigationInput, NavigationOutput, NavigationState, ExecutionContext] {
override def apply(c: ExecutionContext): NavigationEnvironment = {
new NavigationEnvironment(c)(labGen, vision, feedbackStrategy, metrics, continuousMetrics)
}
override def toString: String = s"Navigation environment [$labGen, $vision, $feedbackStrategy]"
}
class NavigationEnvironment(protected val context: ExecutionContext)
(labyrinthGen: LabyrinthGenerator, val vision: Vision,
feedbackStrategyGen: FeedbackStrategyGenerator,
override val metrics: List[Metric[NavigationState]],
override val continuousMetrics: List[ContinuousMetric[NavigationState]])
extends Environment[NavigationInput, NavigationOutput, NavigationState] {
import context.futureExecutionContext
var feedbackStrategy: FeedbackStrategy = null // NULLL!!!!
override def init: Future[(NavigationState, Option[NavigationInput])] = Future {
val (lab, start, goal) = labyrinthGen()
val obs = vision(lab, start)
val visionMap = obs.impose(Labyrinth.unknown(lab.rows, lab.cols))
val path = List(start)
val history = List()
val state = NavigationState(visionMap, lab, start, goal, path, history)
val initialInput = NavigationInput(visionMap, start, goal, 0.0)
feedbackStrategy = feedbackStrategyGen(state)
(state, Some(initialInput))
}
def process(state: NavigationState, algorithmAction: NavigationCommand) = Future {
context.logger.info {
s"\n${utils.NavigationInfo.formatNavigationState(state)}"
}
val newPosition = state.robotPosition.action(state.labyrinth)(algorithmAction)
val path = newPosition :: state.path
val history = algorithmAction :: state.history
val obs = vision(state.labyrinth, newPosition)
obs.impose(state.visionMap)
val newStatus = NavigationState(state.visionMap, state.labyrinth, newPosition, state.goal, path, history)
val feedback: Double = feedbackStrategy(state, algorithmAction)
val newInput = NavigationInput(state.visionMap, newPosition, state.goal, feedback)
(newStatus, if (newPosition.point == state.goal) { None } else { Some(newInput) })
}
override def serialize(status: NavigationState): Future[Graph] = Future.successful {
val builder = GraphBuilder(Graph.environmentLabel).withType("Navigation environment")
val input = builder.node("RobotInput").asInput()
val output = builder.node("BrainScore").asOutput()
val robotNode = builder.node("Experiment")
robotNode("Labyrinth generator" -> labyrinthGen.toString)
robotNode("Vision" -> vision.toString)
robotNode("Feedback" -> feedbackStrategy.toString)
val (labRepr, rows, cols) = Labyrinth.toArray(status.labyrinth)
robotNode("Labyrinth" -> labRepr)
robotNode("Rows" -> rows)
robotNode("Cols" -> cols)
robotNode("Commands" -> status.history.toArray)
robotNode("TrajectoryX" -> status.path.map { _.point.x }.toArray)
robotNode("TrajectoryY" -> status.path.map { _.point.y }.toArray)
robotNode("TrajectoryDir" -> status.path.map { rp => Direction.char(rp.direction) }.toArray)
input --> robotNode
robotNode --> output
builder.toGraph
}
} | ZloVechno/genetic-machine | src/main/scala/org/geneticmachine/navigation/NavigationEnvironment.scala | Scala | mit | 3,984 |
/**
* Log analyzer and summary builder written in Scala built for JVM projects
*
* @package LogAnalyzer
* @copyright Apache V2 License (see LICENSE)
* @url https://github.com/mcross1882/LogAnalyzer
*/
package mcross1882.loganalyzer.parser
import mcross1882.loganalyzer.analyzer.Analyzer
import scala.collection.mutable.HashMap
import scala.collection.mutable.ListBuffer
import scala.io.Source
import scala.xml.XML
/**
* SimpleParser allows you to define a parser
* programatically by providing helper methods
* for setting it up
*
* @since 1.0
* @author Matthew Cross <blacklightgfx@gmail.com>
* @param n the name to reference this parser by
* @param files to parse
* @param analyzersto use on the specified files
*/
class SimpleParser(n: String, files: List[String], analyzers: List[Analyzer]) extends Parser {
/**
* Temporary storage maintaining the number of occurrences
* a given log pattern appears
*
* @since 1.0
*/
protected val _records = new HashMap[String,Int]
/**
* The base timestamp format to parse out of the log line
*
* @since 1.0
*/
protected val _timestamps = analyzers.filter(x => "timestamp".equals(x.name))
/**
* {@inheritdoc}
*/
def parseFiles(dates: List[String]) {
for (file <- files) {
parseLinesFromFiles(file, dates)
}
}
/**
* {@inheritdoc}
*/
def results: String = {
val builder = new StringBuilder
var category = ""
val groups = groupAnalyzersByCategory
for (analyzer <- analyzers) {
category = analyzer.category
if (groups.contains(category)) {
appendAnalyzerSubset(builder, category, groups(category))
}
}
builder.toString
}
/**
* {@inheritdoc}
*/
def name: String = n
/**
* Opens up a source files and parses it line-by-line
*
* @since 1.0
* @param dates to filter on
*/
protected def parseLinesFromFiles(filename: String, dates: List[String]) {
try {
Source.fromFile(filename).getLines.foreach{ line =>
parseLine(line, dates)
}
} catch {
case e: Exception => println("An error occurred while reading %s. (%s)".format(filename, e.getMessage))
}
}
/**
* Parses a single line from the files
*
* @since 1.0
* @param line to parse
* @param dates to filter on
*/
protected def parseLine(line: String, dates: List[String]): Unit = {
if (dates.isEmpty || isTimestampInRange(line, dates)) {
storeAnalyzerMatches(line)
}
}
/**
* Determines if the line contains a valid timestamp. If the line
* does contain a timestamp it will be validated against the dates array
*
* @since 1.0
* @param line the line to be parsed
* @param dates valid dates in the range
* @return true if the timestamp is in range; false otherwise
*/
protected def isTimestampInRange(line: String, dates: List[String]): Boolean = {
var currentDate = ""
for (timestamp <- _timestamps if timestamp.isMatch(line)) {
currentDate = extractTextFromMessage(timestamp)
if (!dates.exists(_.equals(currentDate))) {
return false
}
}
true
}
/**
* Parses the line argument and increments the count for any
* analyzer category contained within the line
*
* @since 1.0
* @param line the line to be parsed
*/
protected def storeAnalyzerMatches(line: String): Unit = {
for (analyzer <- analyzers if analyzer.isMatch(line)) {
_records.put(analyzer.category, _records.getOrElse(analyzer.category, 0) + 1)
}
}
/**
* Extracts the text of a analyzer message
*
* @since 1.0
* @param analyzer the analyzer with a message to split
* @return the extracted text trimmed
*/
protected def extractTextFromMessage(analyzer: Analyzer): String = {
val fields = analyzer.message.split(":")
if (!fields.isEmpty) fields.head.trim else ""
}
/**
* Groups analyzers by their category producing while ignoring
* any timestamp analyzers
*
* @since 1.0
* @return a list of analyzers grouped by category
*/
protected def groupAnalyzersByCategory: Map[String, List[Analyzer]] =
analyzers.filter(!_.category.equals("timestamp")).groupBy(_.category)
/**
* Appends all messages from analyzer subset into builder
*
* @since 1.0
* @param builder the string builder to append text too
* @param category the analyzer category
* @param subset of filtered analyzers
*/
protected def appendAnalyzerSubset(builder: StringBuilder, category: String, subset: List[Analyzer]): Unit = {
builder.append("%s\\n".format(category))
for (analyzer <- subset if _records.contains(analyzer.category)) {
builder.append(analyzer.message)
}
builder.append("\\n")
}
}
| mcross1882/LogAnalyzer | src/main/scala/mcross1882/loganalyzer/parser/SimpleParser.scala | Scala | apache-2.0 | 5,242 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.ui
import java.util.Properties
import java.util.concurrent.atomic.AtomicInteger
import scala.collection.mutable
import scala.concurrent.duration._
import org.apache.spark.{SparkConf, TaskState}
import org.apache.spark.benchmark.{Benchmark, BenchmarkBase}
import org.apache.spark.executor.ExecutorMetrics
import org.apache.spark.internal.config.Status._
import org.apache.spark.resource.ResourceProfile
import org.apache.spark.scheduler._
import org.apache.spark.sql.execution._
import org.apache.spark.sql.execution.metric.SQLMetricInfo
import org.apache.spark.status.ElementTrackingStore
import org.apache.spark.util.{AccumulatorMetadata, LongAccumulator, Utils}
import org.apache.spark.util.kvstore.InMemoryStore
/**
* Benchmark for metrics aggregation in the SQL listener.
* {{{
* To run this benchmark:
* 1. without sbt:
* bin/spark-submit --class <this class> --jars <core test jar> <spark sql test jar>
* 2. build/sbt "core/test:runMain <this class>"
* 3. generate result: SPARK_GENERATE_BENCHMARK_FILES=1 build/sbt "core/test:runMain <this class>"
* Results will be written to "benchmarks/MetricsAggregationBenchmark-results.txt".
* }}}
*/
object MetricsAggregationBenchmark extends BenchmarkBase {
private def metricTrackingBenchmark(
timer: Benchmark.Timer,
numMetrics: Int,
numTasks: Int,
numStages: Int): Measurements = {
val conf = new SparkConf()
.set(LIVE_ENTITY_UPDATE_PERIOD, 0L)
.set(ASYNC_TRACKING_ENABLED, false)
val kvstore = new ElementTrackingStore(new InMemoryStore(), conf)
val listener = new SQLAppStatusListener(conf, kvstore, live = true)
val store = new SQLAppStatusStore(kvstore, Some(listener))
val metrics = (0 until numMetrics).map { i =>
new SQLMetricInfo(s"metric$i", i.toLong, "average")
}
val planInfo = new SparkPlanInfo(
getClass().getName(),
getClass().getName(),
Nil,
Map.empty,
metrics)
val idgen = new AtomicInteger()
val executionId = idgen.incrementAndGet()
val executionStart = SparkListenerSQLExecutionStart(
executionId,
getClass().getName(),
getClass().getName(),
getClass().getName(),
planInfo,
System.currentTimeMillis(),
Map.empty)
val executionEnd = SparkListenerSQLExecutionEnd(executionId, System.currentTimeMillis())
val properties = new Properties()
properties.setProperty(SQLExecution.EXECUTION_ID_KEY, executionId.toString)
timer.startTiming()
listener.onOtherEvent(executionStart)
val taskEventsTime = (0 until numStages).map { _ =>
val stageInfo = new StageInfo(idgen.incrementAndGet(), 0, getClass().getName(),
numTasks, Nil, Nil, getClass().getName(),
resourceProfileId = ResourceProfile.DEFAULT_RESOURCE_PROFILE_ID)
val jobId = idgen.incrementAndGet()
val jobStart = SparkListenerJobStart(
jobId = jobId,
time = System.currentTimeMillis(),
stageInfos = Seq(stageInfo),
properties)
val stageStart = SparkListenerStageSubmitted(stageInfo)
val taskOffset = idgen.incrementAndGet().toLong
val taskEvents = (0 until numTasks).map { i =>
val info = new TaskInfo(
taskId = taskOffset + i.toLong,
index = i,
attemptNumber = 0,
// The following fields are not used.
launchTime = 0,
executorId = "",
host = "",
taskLocality = null,
speculative = false)
info.markFinished(TaskState.FINISHED, 1L)
val accumulables = (0 until numMetrics).map { mid =>
val acc = new LongAccumulator
acc.metadata = AccumulatorMetadata(mid, None, false)
acc.toInfo(Some(i.toLong), None)
}
info.setAccumulables(accumulables)
val start = SparkListenerTaskStart(stageInfo.stageId, stageInfo.attemptNumber, info)
val end = SparkListenerTaskEnd(stageInfo.stageId, stageInfo.attemptNumber,
taskType = "",
reason = null,
info,
new ExecutorMetrics(),
null)
(start, end)
}
val jobEnd = SparkListenerJobEnd(
jobId = jobId,
time = System.currentTimeMillis(),
JobSucceeded)
listener.onJobStart(jobStart)
listener.onStageSubmitted(stageStart)
val (_, _taskEventsTime) = Utils.timeTakenMs {
taskEvents.foreach { case (start, end) =>
listener.onTaskStart(start)
listener.onTaskEnd(end)
}
}
listener.onJobEnd(jobEnd)
_taskEventsTime
}
val (_, aggTime) = Utils.timeTakenMs {
listener.onOtherEvent(executionEnd)
val metrics = store.executionMetrics(executionId)
assert(metrics.size == numMetrics, s"${metrics.size} != $numMetrics")
}
timer.stopTiming()
kvstore.close()
Measurements(taskEventsTime, aggTime)
}
override def runBenchmarkSuite(mainArgs: Array[String]): Unit = {
val metricCount = 50
val taskCount = 100000
val stageCounts = Seq(1, 2, 3)
val benchmark = new Benchmark(
s"metrics aggregation ($metricCount metrics, $taskCount tasks per stage)", 1,
warmupTime = 0.seconds, output = output)
// Run this outside the measurement code so that classes are loaded and JIT is triggered,
// otherwise the first run tends to be much slower than others. Also because this benchmark is a
// bit weird and doesn't really map to what the Benchmark class expects, so it's a bit harder
// to use warmupTime and friends effectively.
stageCounts.foreach { count =>
metricTrackingBenchmark(new Benchmark.Timer(-1), metricCount, taskCount, count)
}
val measurements = mutable.HashMap[Int, Seq[Measurements]]()
stageCounts.foreach { count =>
benchmark.addTimerCase(s"$count stage(s)") { timer =>
val m = metricTrackingBenchmark(timer, metricCount, taskCount, count)
val all = measurements.getOrElse(count, Nil)
measurements(count) = all ++ Seq(m)
}
}
benchmark.run()
benchmark.out.printf("Stage Count Stage Proc. Time Aggreg. Time\\n")
stageCounts.foreach { count =>
val data = measurements(count)
val eventsTimes = data.flatMap(_.taskEventsTimes)
val aggTimes = data.map(_.aggregationTime)
val msg = " %d %d %d\\n".format(
count,
eventsTimes.sum / eventsTimes.size,
aggTimes.sum / aggTimes.size)
benchmark.out.printf(msg)
}
}
/**
* Finer-grained measurements of how long it takes to run some parts of the benchmark. This is
* collected by the benchmark method, so this collection slightly affects the overall benchmark
* results, but this data helps with seeing where the time is going, since this benchmark is
* triggering a whole lot of code in the listener class.
*/
case class Measurements(
taskEventsTimes: Seq[Long],
aggregationTime: Long)
}
| ueshin/apache-spark | sql/core/src/test/scala/org/apache/spark/sql/execution/ui/MetricsAggregationBenchmark.scala | Scala | apache-2.0 | 7,852 |
package org.openapitools.client.model
case class ClassesByClass (
_classes: Option[List[String]],
_class: Option[String]
)
object ClassesByClass {
def toStringBody(var_classes: Object, var_class: Object) =
s"""
| {
| "classes":$var_classes,"class":$var_class
| }
""".stripMargin
}
| cliffano/swaggy-jenkins | clients/scala-gatling/generated/src/gatling/scala/org/openapitools/client/model/ClassesByClass.scala | Scala | mit | 336 |
/*
* Copyright (C) 2016-2017 Lightbend Inc. <http://www.lightbend.com>
*/
package akka.persistence.cassandra.journal
import akka.actor._
import akka.persistence._
import akka.persistence.cassandra.journal.CassandraSslSpec._
import akka.persistence.cassandra.{ CassandraLifecycle, CassandraSpec }
import akka.testkit._
import com.typesafe.config.ConfigFactory
import javax.crypto.Cipher
import org.scalatest._
object CassandraSslSpec {
def config(keyStore: Boolean) = {
val trustStoreConfig =
s"""
|akka.persistence.journal.max-deletion-batch-size = 3
|akka.persistence.publish-confirmations = on
|akka.persistence.publish-plugin-commands = on
|cassandra-journal.target-partition-size = 5
|cassandra-journal.max-result-size = 3
|cassandra-journal.keyspace=CassandraSslSpec${if (keyStore) 1 else 2}
|cassandra-snapshot-store.keyspace=CassandraSslSpec${if (keyStore) 1 else 2}Snapshot
|cassandra-snapshot-store.ssl.truststore.path="core/src/test/resources/security/cts_truststore.jks"
|cassandra-snapshot-store.ssl.truststore.password="hbbUtqn3Y1D4Tw"
|cassandra-journal.ssl.truststore.path="core/src/test/resources/security/cts_truststore.jks"
|cassandra-journal.ssl.truststore.password="hbbUtqn3Y1D4Tw"
""".stripMargin
val keyStoreConfig = if (keyStore) {
s"""
|cassandra-snapshot-store.ssl.keystore.path="core/src/test/resources/security/cts_keystore.jks"
|cassandra-snapshot-store.ssl.keystore.password="5zsGJ0LxnpozNQ"
|cassandra-journal.ssl.keystore.path="core/src/test/resources/security/cts_keystore.jks"
|cassandra-journal.ssl.keystore.password="5zsGJ0LxnpozNQ"
""".stripMargin
} else ""
ConfigFactory.parseString(trustStoreConfig + keyStoreConfig).withFallback(CassandraLifecycle.config)
}
class ProcessorA(val persistenceId: String) extends PersistentActor {
def receiveRecover: Receive = handle
def receiveCommand: Receive = {
case payload: String =>
persist(payload)(handle)
}
def handle: Receive = {
case payload: String =>
sender ! payload
sender ! lastSequenceNr
sender ! recoveryRunning
}
}
}
trait CassandraSslSpec extends WordSpecLike {
def hasJCESupport: Boolean = {
Cipher.getMaxAllowedKeyLength("AES") == Int.MaxValue
}
def skipIfNoJCESupport(): Unit =
if (!hasJCESupport) {
info("Skipping test because Java Cryptography Extensions (JCE) not installed")
pending
}
}
class CassandraSslSpecWithClientAuth extends TestKit(ActorSystem("CassandraSslSpecWithClientAuth", config(true)))
with ImplicitSender
with WordSpecLike
with Matchers
with CassandraLifecycle
with CassandraSslSpec {
override def cassandraConfigResource: String = "test-embedded-cassandra-ssl-server-client.yaml"
override def systemName: String = "CassandraSslSpec"
override protected def beforeAll(): Unit = {
if (hasJCESupport)
super.beforeAll()
}
"A Cassandra journal with 2-way SSL setup" must {
"write messages over SSL" in {
skipIfNoJCESupport()
val processor1 = system.actorOf(Props(classOf[ProcessorA], "p1"))
1L to 16L foreach { i =>
processor1 ! s"a-${i}"
expectMsgAllOf(s"a-${i}", i, false)
}
}
}
}
class CassandraSslSpecWithoutClientAuth extends CassandraSpec(config(false)) with CassandraSslSpec {
override def cassandraConfigResource: String = "test-embedded-cassandra-ssl-server.yaml"
override protected def beforeAll(): Unit = {
if (hasJCESupport)
super.beforeAll()
}
"A Cassandra journal with 1-way SSL setup" must {
"write messages over SSL" in {
skipIfNoJCESupport()
val processor1 = system.actorOf(Props(classOf[ProcessorA], "p1"))
1L to 16L foreach { i =>
processor1 ! s"a-${i}"
expectMsgAllOf(s"a-${i}", i, false)
}
}
}
}
| ktoso/akka-persistence-cassandra | core/src/test/scala/akka/persistence/cassandra/journal/CassandraSslSpec.scala | Scala | apache-2.0 | 3,953 |
package io.floyd.web
import spray.routing.authentication.{Authentication, ContextAuthenticator}
import spray.routing.{AuthenticationFailedRejection, RequestContext}
import scala.concurrent.{ExecutionContext, Future}
/** Token based authentication for Spray Routing.
*
* Extracts an API key from the header or querystring and authenticates requests.
*
* TokenAuthenticator[T] takes arguments for the named header/query string containing the API key and
* an authenticator that returns an Option[T]. If None is returned from the authenticator, the request
* is rejected.
*
* Usage:
*
* val authenticator = TokenAuthenticator[User](
* headerName = "My-Api-Key",
* queryStringParameterName = "api_key"
* ) { key =>
* User.findByAPIKey(key)
* }
*
* def auth: Directive1[User] = authenticate(authenticator)
*
* val home = path("home") {
* auth { user =>
* get {
* complete("OK")
* }
* }
* }
*/
object TokenAuthenticator {
object TokenExtraction {
type TokenExtractor = RequestContext => Option[String]
def fromHeader(headerName: String): TokenExtractor = { context: RequestContext =>
context.request.headers.find(_.name == headerName).map(_.value)
}
def fromQueryString(parameterName: String): TokenExtractor = { context: RequestContext =>
context.request.uri.query.get(parameterName)
}
}
class TokenAuthenticator[T](extractor: TokenExtraction.TokenExtractor, authenticator: (String => Future[Option[T]]))
(implicit executionContext: ExecutionContext) extends ContextAuthenticator[T] {
import spray.routing.AuthenticationFailedRejection._
def apply(context: RequestContext): Future[Authentication[T]] =
extractor(context) match {
case None =>
Future(
Left(AuthenticationFailedRejection(CredentialsMissing, List()))
)
case Some(token) =>
authenticator(token) map {
case Some(t) =>
Right(t)
case None =>
Left(AuthenticationFailedRejection(CredentialsRejected, List()))
}
}
}
def apply[T](headerName: String, queryStringParameterName: String)(authenticator: (String => Future[Option[T]]))
(implicit executionContext: ExecutionContext) = {
def extractor(context: RequestContext) =
TokenExtraction.fromHeader(headerName)(context) orElse
TokenExtraction.fromQueryString(queryStringParameterName)(context)
new TokenAuthenticator(extractor, authenticator)
}
} | floyd-io/floyd-scala | src/main/scala/io/floyd/web/TokenAuthenticator.scala | Scala | lgpl-3.0 | 2,643 |
package org.dbpedia.spotlight.db.memory
import org.dbpedia.spotlight.db.model.QuantizedCountStore
import scala.collection.mutable
class MemoryQuantizedCountStore extends MemoryStore with QuantizedCountStore {
var countMap: java.util.Map[Short, Int] = new java.util.HashMap[Short, Int]()
def getCount(quantized: Short): Int = countMap.get(quantized)
@transient
var countLookup: mutable.HashMap[Int, Short] = null
def addCount(count: Int): Short = {
if(countLookup == null)
countLookup = mutable.HashMap[Int, Short]()
countLookup.get(count) match {
case Some(s) => s
case None => {
val s = (Short.MinValue + 100 + countMap.size()).toShort
countMap.put(s, count)
countLookup.put(count, s)
s
}
}
}
def size: Int = countMap.size()
}
| Skunnyk/dbpedia-spotlight-model | core/src/main/scala/org/dbpedia/spotlight/db/memory/MemoryQuantizedCountStore.scala | Scala | apache-2.0 | 822 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.io.{DataInputStream, DataOutputStream}
import java.net.Socket
import java.nio.ByteBuffer
import java.util.Properties
import kafka.api.IntegrationTestHarness
import kafka.network.SocketServer
import kafka.utils.NotNothing
import org.apache.kafka.common.network.ListenerName
import org.apache.kafka.common.protocol.ApiKeys
import org.apache.kafka.common.requests.{AbstractRequest, AbstractResponse, RequestHeader, RequestTestUtils, ResponseHeader}
import org.apache.kafka.common.utils.Utils
import org.apache.kafka.metadata.BrokerState
import scala.annotation.nowarn
import scala.collection.Seq
import scala.reflect.ClassTag
abstract class BaseRequestTest extends IntegrationTestHarness {
private var correlationId = 0
// If required, set number of brokers
override def brokerCount: Int = 3
// If required, override properties by mutating the passed Properties object
protected def brokerPropertyOverrides(properties: Properties): Unit = {}
override def modifyConfigs(props: Seq[Properties]): Unit = {
props.foreach { p =>
p.put(KafkaConfig.ControlledShutdownEnableProp, "false")
brokerPropertyOverrides(p)
}
}
def anySocketServer: SocketServer = {
servers.find { server =>
val state = server.brokerState
state != BrokerState.NOT_RUNNING && state != BrokerState.SHUTTING_DOWN
}.map(_.socketServer).getOrElse(throw new IllegalStateException("No live broker is available"))
}
def controllerSocketServer: SocketServer = {
servers.find { server =>
server.kafkaController.isActive
}.map(_.socketServer).getOrElse(throw new IllegalStateException("No controller broker is available"))
}
def notControllerSocketServer: SocketServer = {
servers.find { server =>
!server.kafkaController.isActive
}.map(_.socketServer).getOrElse(throw new IllegalStateException("No non-controller broker is available"))
}
def brokerSocketServer(brokerId: Int): SocketServer = {
servers.find { server =>
server.config.brokerId == brokerId
}.map(_.socketServer).getOrElse(throw new IllegalStateException(s"Could not find broker with id $brokerId"))
}
def connect(socketServer: SocketServer = anySocketServer,
listenerName: ListenerName = listenerName): Socket = {
new Socket("localhost", socketServer.boundPort(listenerName))
}
private def sendRequest(socket: Socket, request: Array[Byte]): Unit = {
val outgoing = new DataOutputStream(socket.getOutputStream)
outgoing.writeInt(request.length)
outgoing.write(request)
outgoing.flush()
}
def receive[T <: AbstractResponse](socket: Socket, apiKey: ApiKeys, version: Short)
(implicit classTag: ClassTag[T], @nowarn("cat=unused") nn: NotNothing[T]): T = {
val incoming = new DataInputStream(socket.getInputStream)
val len = incoming.readInt()
val responseBytes = new Array[Byte](len)
incoming.readFully(responseBytes)
val responseBuffer = ByteBuffer.wrap(responseBytes)
ResponseHeader.parse(responseBuffer, apiKey.responseHeaderVersion(version))
AbstractResponse.parseResponse(apiKey, responseBuffer, version) match {
case response: T => response
case response =>
throw new ClassCastException(s"Expected response with type ${classTag.runtimeClass}, but found ${response.getClass}")
}
}
def sendAndReceive[T <: AbstractResponse](request: AbstractRequest,
socket: Socket,
clientId: String = "client-id",
correlationId: Option[Int] = None)
(implicit classTag: ClassTag[T], nn: NotNothing[T]): T = {
send(request, socket, clientId, correlationId)
receive[T](socket, request.apiKey, request.version)
}
def connectAndReceive[T <: AbstractResponse](request: AbstractRequest,
destination: SocketServer = anySocketServer,
listenerName: ListenerName = listenerName)
(implicit classTag: ClassTag[T], nn: NotNothing[T]): T = {
val socket = connect(destination, listenerName)
try sendAndReceive[T](request, socket)
finally socket.close()
}
/**
* Serializes and sends the request to the given api.
*/
def send(request: AbstractRequest,
socket: Socket,
clientId: String = "client-id",
correlationId: Option[Int] = None): Unit = {
val header = nextRequestHeader(request.apiKey, request.version, clientId, correlationId)
sendWithHeader(request, header, socket)
}
def sendWithHeader(request: AbstractRequest, header: RequestHeader, socket: Socket): Unit = {
val serializedBytes = Utils.toArray(RequestTestUtils.serializeRequestWithHeader(header, request))
sendRequest(socket, serializedBytes)
}
def nextRequestHeader[T <: AbstractResponse](apiKey: ApiKeys,
apiVersion: Short,
clientId: String = "client-id",
correlationIdOpt: Option[Int] = None): RequestHeader = {
val correlationId = correlationIdOpt.getOrElse {
this.correlationId += 1
this.correlationId
}
new RequestHeader(apiKey, apiVersion, clientId, correlationId)
}
}
| Chasego/kafka | core/src/test/scala/unit/kafka/server/BaseRequestTest.scala | Scala | apache-2.0 | 6,321 |
package outwatch.helpers
import outwatch._
@inline class ModifierBooleanOps(val condition: Boolean) extends AnyVal {
@inline def apply(m: => VDomModifier):VDomModifier = if(condition) VDomModifier(m) else VDomModifier.empty
@inline def apply(m: => VDomModifier, m2: => VDomModifier):VDomModifier = if(condition) VDomModifier(m,m2) else VDomModifier.empty
@inline def apply(m: => VDomModifier, m2: => VDomModifier, m3: => VDomModifier):VDomModifier = if(condition) VDomModifier(m,m2,m3) else VDomModifier.empty
@inline def apply(m: => VDomModifier, m2: => VDomModifier, m3: => VDomModifier, m4: => VDomModifier):VDomModifier = if(condition) VDomModifier(m,m2,m3,m4) else VDomModifier.empty
@inline def apply(m: => VDomModifier, m2: => VDomModifier, m3: => VDomModifier, m4: => VDomModifier, m5: => VDomModifier):VDomModifier = if(condition) VDomModifier(m,m2,m3,m4,m5) else VDomModifier.empty
@inline def apply(m: => VDomModifier, m2: => VDomModifier, m3: => VDomModifier, m4: => VDomModifier, m5: => VDomModifier, m6: => VDomModifier):VDomModifier = if(condition) VDomModifier(m,m2,m3,m4,m5,m6) else VDomModifier.empty
@inline def apply(m: => VDomModifier, m2: => VDomModifier, m3: => VDomModifier, m4: => VDomModifier, m5: => VDomModifier, m6: => VDomModifier, m7: => VDomModifier):VDomModifier = if(condition) VDomModifier(m,m2,m3,m4,m5,m6,m7) else VDomModifier.empty
}
| OutWatch/outwatch | outwatch/src/main/scala/outwatch/helpers/ModifierBooleanOps.scala | Scala | apache-2.0 | 1,390 |
/*
* scala-swing (https://www.scala-lang.org)
*
* Copyright EPFL, Lightbend, Inc., contributors
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.swing
package event
case class EditDone(override val source: TextField) extends ValueChanged(source)
| scala/scala-swing | src/main/scala/scala/swing/event/EditDone.scala | Scala | apache-2.0 | 423 |
package com.overviewdocs.database
import com.overviewdocs.models.tables.FileGroups
import com.overviewdocs.test.DbSpecification
class FileGroupDeleterSpec extends DbSpecification {
"FileGroupDeleter" should {
"mark file_group deleted" in new DbScope {
val fileGroup = factory.fileGroup()
val deleter = FileGroupDeleter
await(deleter.delete(fileGroup.id))
import database.api._
blockingDatabase.option(FileGroups.filter(_.id === fileGroup.id).map(_.deleted)) must beSome(true)
}
}
}
| overview/overview-server | worker/src/test/scala/com/overviewdocs/database/FileGroupDeleterSpec.scala | Scala | agpl-3.0 | 529 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.helpers
object IteratorSupport {
implicit final class RichIterator[T](iterator: Iterator[T]) {
def toSingleOption: Option[T] = {
if (!iterator.hasNext)
None
else {
val candidate = iterator.next()
if (iterator.hasNext)
None
else
Some(candidate)
}
}
}
}
| HuangLS/neo4j | community/cypher/cypher-compiler-2.3/src/main/scala/org/neo4j/cypher/internal/compiler/v2_3/helpers/IteratorSupport.scala | Scala | apache-2.0 | 1,179 |
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt.test
import org.scalacheck._, Prop._, util.Pretty
import sbt.internal.util.AttributeKey
import sbt.util.NoJsonWriter
import sbt.{ InputTask, Scope, Task }
import sbt.{ InputKey, Scoped, SettingKey, TaskKey }
import BuildSettingsInstances._
object ScopedSpec extends Properties("Scoped") {
val intManifest = manifest[Int]
val stringManifest = manifest[String]
implicit val arbManifest: Arbitrary[Manifest[_]] =
Arbitrary(Gen.oneOf(intManifest, stringManifest))
property("setting keys are structurally equal") = {
forAll { (label: Label, manifest: Manifest[_], scope: Scope) =>
val k1 = settingKey(label, manifest, scope)
val k2 = settingKey(label, manifest, scope)
expectEq(k1, k2)
}
}
property("task keys are structurally equal") = {
forAll { (label: Label, manifest: Manifest[_], scope: Scope) =>
val k1 = taskKey(label, manifest, scope)
val k2 = taskKey(label, manifest, scope)
expectEq(k1, k2)
}
}
property("input keys are structurally equal") = {
forAll { (label: Label, manifest: Manifest[_], scope: Scope) =>
val k1 = inputKey(label, manifest, scope)
val k2 = inputKey(label, manifest, scope)
expectEq(k1, k2)
}
}
property("different key types are not equal") = {
forAll { (label: Label, manifest: Manifest[_], scope: Scope) =>
val settingKey1 = settingKey(label, manifest, scope)
val taskKey1 = taskKey(label, manifest, scope)
val inputKey1 = inputKey(label, manifest, scope)
all(
expectNe(settingKey1, taskKey1),
expectNe(settingKey1, inputKey1),
expectNe(taskKey1, inputKey1),
)
}
}
property("different key types, with the same manifest, are not equal") = {
forAll { (label: Label, scope: Scope) =>
val prop1 = {
val manifest1 = manifest[Task[String]]
val attrKey = attributeKey(label, manifest1)
val k1 = SettingKey(attrKey) in scope
val k2 = TaskKey(attrKey) in scope
expectNeSameManifest(k1, k2)
}
val prop2 = {
val manifest1 = manifest[InputTask[String]]
val attrKey = attributeKey(label, manifest1)
val k1 = SettingKey(attrKey) in scope
val k2 = InputKey(attrKey) in scope
expectNeSameManifest(k1, k2)
}
all(prop1, prop2)
}
}
///
def settingKey[A](label: Label, manifest: Manifest[A], scope: Scope): SettingKey[A] = {
val noJsonWriter = NoJsonWriter[A]()
SettingKey[A](label.value)(manifest, noJsonWriter) in scope
}
def taskKey[A](label: Label, manifest: Manifest[A], s: Scope): TaskKey[A] =
TaskKey[A](label.value)(manifest) in s
def inputKey[A](label: Label, manifest: Manifest[A], scope: Scope): InputKey[A] =
InputKey[A](label.value)(manifest) in scope
def attributeKey[A](label: Label, manifest: Manifest[A]): AttributeKey[A] = {
val jsonWriter = NoJsonWriter[A]()
AttributeKey[A](label.value)(manifest, jsonWriter)
}
///
def expectEq(k1: Scoped, k2: Scoped): Prop =
?=(k1, k2) && ?=(k2, k1) map eqLabels(k1, k2)
def expectNe(k1: Scoped, k2: Scoped): Prop =
!=(k1, k2) && !=(k2, k1) map eqLabels(k1, k2)
def expectNeSameManifest(k1: Scoped, k2: Scoped) = {
all(
?=(k1.key.manifest, k2.key.manifest), // sanity check the manifests are the same
expectNe(k1, k2),
)
}
def eqLabels(k1: Scoped, k2: Scoped): Prop.Result => Prop.Result = r => {
val eqLabel = k1.key.label == k2.key.label
val eqManifest = k1.key.manifest == k2.key.manifest
val eqScope = k1.scope == k2.scope
r.label(s"label equality: ${k1.key.label} == ${k2.key.label} : $eqLabel")
.label(s"manifest equality: ${k1.key.manifest} == ${k2.key.manifest} : $eqManifest")
.label(s"scope equality: ${k1.scope} == ${k2.scope} : $eqScope")
}
def ?=[T](x: T, y: T)(implicit pp: T => Pretty): Prop =
if (x == y) proved
else
falsified :| {
val act = Pretty.pretty[T](x, Pretty.Params(0))
val exp = Pretty.pretty[T](y, Pretty.Params(0))
s"Expected $act to be equal to $exp"
}
def !=[T](x: T, y: T)(implicit pp: T => Pretty): Prop =
if (x == y) falsified
else
proved :| {
val act = Pretty.pretty[T](x, Pretty.Params(0))
val exp = Pretty.pretty[T](y, Pretty.Params(0))
s"Expected $act to NOT be equal to $exp"
}
}
| xuwei-k/xsbt | main-settings/src/test/scala/sbt/ScopedSpec.scala | Scala | apache-2.0 | 4,552 |
import sbt._
import sbt.Keys._
import Path.flat
object LibNotifyBuild extends Build {
import LibNotifyBuildKeys._
object LibNotifyBuildKeys {
val generateNativeHeaders = taskKey[Unit]("Generates native headers from the compiled classes (triggers compile if project not compiled)")
val javahClasses = settingKey[Seq[String]]("Defines full qualified names of the classes, which will be passed to the javah")
val javahOutputDirectory = settingKey[File]("Directory, where the javah generated files are placed")
val jvmHeaders = settingKey[Seq[File]]("Header provided by the JVM")
val compileNative = taskKey[File]("Executes compilation of the native code")
val compileNativeLibs = settingKey[Seq[String]]("Names of external libraries which will be linked during native compilation")
val compileNativeArtifactName = settingKey[String]("Name of the generated library file")
val nativeDirectory = settingKey[File]("Directory where the native sources are located")
val nativeSources = taskKey[Seq[File]]("Returns all native source files")
val nativeHeaders = taskKey[Seq[File]]("Returns all native header files")
}
lazy val root = Project(
id = "sbt-libnotify-plugin",
base = file(".")
).settings(libNotifySettings: _*)
lazy val libNotifySettings: Seq[Setting[_]] = Seq(
javahClasses := Seq("it.paperdragon.sbt.LibNotify$"),
compileNativeLibs := Seq("glib-2.0", "libnotify"),
nativeDirectory := baseDirectory.value / "src" / "main" / "native",
javahOutputDirectory := nativeDirectory.value,
jvmHeaders := standardJavaIncludes((javaHome in Compile).value.getOrElse(file(System.getProperty("java.home")))),
compileNativeArtifactName := "libLibNotify.so",
generateNativeHeaders := generateNativeHeadersTask.value,
compileNative := compileNativeTask.value,
(compile in Compile) := {
val analysis = (compile in Compile).value
val extensionLocation = compileNative.value
analysis
},
products in Compile += (target in Compile).value / "native",
nativeSources := nativeSourcesTask.value,
nativeHeaders := nativeHeadersTask.value,
(unmanagedSources in(Compile, packageSrc)) ++= nativeHeaders.value ++ nativeSources.value,
(mappings in(Compile, packageSrc)) ++= (nativeHeaders.value ++ nativeSources.value) pair flat
)
lazy val nativeSourcesTask = Def.task[Seq[File]] {
(nativeDirectory.value ** "*.c").get
}
lazy val nativeHeadersTask = Def.task[Seq[File]] {
(nativeDirectory.value ** "*.h").get
}
lazy val generateNativeHeadersTask = Def.task[Unit] {
val fullClasspathInCompile = (fullClasspath in Compile).value.files.mkString(java.io.File.pathSeparator)
val outputDirectory = javahOutputDirectory.value.getAbsolutePath
streams.value.log.info(s"Generated header files to $outputDirectory")
("javah" :: "-cp" :: fullClasspathInCompile :: "-d" :: outputDirectory :: javahClasses.value.mkString(" ") :: Nil).!
} dependsOn (compile in Compile)
lazy val compileNativeTask = Def.task {
val log = streams.value.log
val cFiles = nativeSources.value.mkString(" ")
log.debug(s"Discovered C sources: $cFiles")
val compilerFlags = compileNativeLibs.value.map { lib => s"`pkg-config --cflags $lib`"}.mkString(" ")
log.debug(s"Generated compiler flags: $compilerFlags")
val linkerOptions = compileNativeLibs.value.map { lib => s"`pkg-config --libs $lib`"}.mkString(" ")
log.debug(s"Generated linked options: $linkerOptions")
val jvmIncludes = jvmHeaders.value.map(header => s"-I ${header.getAbsolutePath}").mkString(" ")
//
log.debug(s"Using JVM includes: $jvmIncludes")
val outputDir = (target in Compile).value / "native"
IO.createDirectory(outputDir)
val outputFile = outputDir / compileNativeArtifactName.value
log.debug(s"Compilation output $outputFile")
val command = s"gcc -Wall -shared -fPIC $jvmIncludes $compilerFlags $cFiles $linkerOptions -o $outputFile"
log.debug(s"Compilation command: $command")
// this is a bit tricky as we want to evaluate `pkg-config` in bash
val exitCode = ("sh" :: "-c" :: command :: Nil).!
if (exitCode != 0) sys.error(s"Compilation failed. Exit code: $exitCode") else outputFile
}
/**
* Gets standard java includes for the given path
* @param javaHome the java home
* @return the includes which should be included in the native compilation process
*/
private def standardJavaIncludes(javaHome: File): Seq[File] =
List("include", "../include", "include/linux", "../include/linux").map(javaHome / _)
} | lpiepiora/sbt-libnotify-plugin | project/LibNotifyBuild.scala | Scala | apache-2.0 | 4,617 |
/*
* Copyright (c) 2015 Lucas Satabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package toolxit
object ReadingState extends Enumeration {
// reading state for input reading
// N = new line
// M = middle of a line
// S = skipping blanks
val N, M, S = Value
}
| satabin/toolxit-ng | core/src/main/scala/toolxit/ReadingState.scala | Scala | apache-2.0 | 772 |
package io.github.rlazoti.servicestats.infrastructure
import io.github.rlazoti.servicestats.utils.ExecutionContextProvider
import redis.RedisClient
trait Redis extends ExecutionContextProvider {
//TODO use config to get host and port from a properties file
private val redisHost = "localhost"
private val redisPort = 6379
implicit val client = RedisClient(host = redisHost, port = redisPort)
}
| rlazoti/microservice-dependency-graph | service-stats/src/main/scala/io/github/rlazoti/servicestats/infrastructure/Redis.scala | Scala | mit | 404 |
/*
* Copyright 2014 – 2018 Paul Horn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scalax.transducers
package internal
private[transducers] final class CombinedTransducer[A, B, C](left: Transducer[A, B], right: Transducer[B, C]) extends Transducer[A, C] {
def apply[R](rf: Reducer[C, R]): Reducer[A, R] =
left(right(rf))
override def toString: String = s"$left.$right"
}
private[transducers] final class EmptyTransducer[A, B] extends Transducer[A, B] {
def apply[R](rf: Reducer[B, R]): Reducer[A, R] =
new EmptyReducer[A, R](rf)
override def toString: String = "(empty)"
}
private[transducers] final class NoOpTransducer[A] extends Transducer[A, A] {
def apply[R](rf: Reducer[A, R]): Reducer[A, R] =
new NoOpReducer[A, R](rf)
override private[transducers] def combineWith[C](that: Transducer[A, C]): Transducer[A, C] =
that
override def toString: String = "(noop)"
}
private[transducers] final class OrElseTransducer[A](cont: ⇒ A) extends Transducer[A, A] {
def apply[R](rf: Reducer[A, R]): Reducer[A, R] =
new OrElseReducer[A, R](rf, cont)
override def toString: String = "(orElse)"
}
private[transducers] final class ForeachTransducer[A](f: A ⇒ Unit) extends Transducer[A, Unit] {
def apply[R](rf: Reducer[Unit, R]): Reducer[A, R] =
new ForeachReducer[A, R](rf, f)
override def toString: String = "(foreach)"
}
private[transducers] final class MapTransducer[A, B](f: A ⇒ B) extends Transducer[A, B] {
def apply[R](rf: Reducer[B, R]): Reducer[A, R] =
new MapReducer[B, A, R](rf, f)
override def toString: String = "(map)"
}
private[transducers] final class FlatMapTransducer[A, B, F[_]: AsSource](f: A ⇒ F[B]) extends Transducer[A, B] {
def apply[R](rf: Reducer[B, R]): Reducer[A, R] =
new FlatMapReducer[A, B, R, F](rf, f)
override def toString: String = "(flatMap)"
}
private[transducers] final class FilterTransducer[A](f: A ⇒ Boolean) extends Transducer[A, A] {
def apply[R](rf: Reducer[A, R]): Reducer[A, R] =
new FilterReducer[A, R](rf, f)
override def toString: String = "(filter)"
}
private[transducers] final class CollectTransducer[A, B](pf: PartialFunction[A, B]) extends Transducer[A, B] {
def apply[R](rf: Reducer[B, R]): Reducer[A, R] =
new CollectReducer[A, B, R](rf, pf)
override def toString: String = "(collect)"
}
private[transducers] final class ScanTransducer[A, B](z: B, f: (B, A) ⇒ B) extends Transducer[A, B] {
def apply[R](rf: Reducer[B, R]): Reducer[A, R] =
new ScanReducer[A, B, R](rf, z, f)
override def toString: String = "(scan)"
}
private[transducers] final class FoldAlongTransducer[A, B, S](z: S, f: (S, A) ⇒ (S, B)) extends Transducer[A, B] {
def apply[R](rf: Reducer[B, R]): Reducer[A, R] =
new FoldAlongReducer[A, B, S, R](rf, z, f)
override def toString: String = "(fold-along)"
}
private[transducers] final class TakeTransducer[A](n: Long) extends Transducer[A, A] {
def apply[R](rf: Reducer[A, R]): Reducer[A, R] =
new TakeReducer[A, R](rf, n)
override def toString: String = s"(take $n)"
}
private[transducers] final class TakeWhileTransducer[A](f: A ⇒ Boolean) extends Transducer[A, A] {
def apply[R](rf: Reducer[A, R]): Reducer[A, R] =
new TakeWhileReducer[A, R](rf, f)
override def toString: String = "(takeWhile)"
}
private[transducers] final class TakeRightTransducer[A](n: Int) extends Transducer[A, A] {
def apply[R](rf: Reducer[A, R]): Reducer[A, R] =
new TakeRightReducer[A, R](rf, n)
override def toString: String = s"(takeRight $n)"
}
private[transducers] final class LastTransducer[A] extends Transducer[A, A] {
def apply[R](rf: Reducer[A, R]): Reducer[A, R] =
new LastReducer[A, R](rf)
override def toString: String = s"(last)"
}
private[transducers] final class TakeNthTransducer[A](n: Long) extends Transducer[A, A] {
def apply[R](rf: Reducer[A, R]): Reducer[A, R] =
new TakeNthReducer[A, R](rf, n)
override def toString: String = s"(takeNth $n)"
}
private[transducers] final class DropTransducer[A](n: Long) extends Transducer[A, A] {
def apply[R](rf: Reducer[A, R]): Reducer[A, R] =
new DropReducer[A, R](rf, n)
override def toString: String = s"(drop $n)"
}
private[transducers] final class DropWhileTransducer[A](f: A ⇒ Boolean) extends Transducer[A, A] {
def apply[R](rf: Reducer[A, R]): Reducer[A, R] =
new DropWhileReducer[A, R](rf, f)
override def toString: String = "(dropWhile)"
}
private[transducers] final class DropRightTransducer[A](n: Int) extends Transducer[A, A] {
def apply[R](rf: Reducer[A, R]): Reducer[A, R] =
new DropRightReducer[A, R](rf, n)
override def toString: String = s"(dropRight $n)"
}
private[transducers] final class DropNthTransducer[A](n: Long) extends Transducer[A, A] {
def apply[R](rf: Reducer[A, R]): Reducer[A, R] =
new DropNthReducer[A, R](rf, n)
override def toString: String = s"(dropNth $n)"
}
private[transducers] final class DistinctTransducer[A] extends Transducer[A, A] {
def apply[R](rf: Reducer[A, R]): Reducer[A, R] =
new DistinctReducer[A, R](rf)
override def toString: String = "(distinct)"
}
private[transducers] final class ZipWithIndexTransducer[A] extends Transducer[A, (A, Int)] {
def apply[R](rf: Reducer[(A, Int), R]): Reducer[A, R] =
new ZipWithIndexReducer[A, R](rf)
override def toString: String = "(zipWithIndex)"
}
private[transducers] final class GroupedTransducer[A, F[_]: AsTarget](n: Int) extends Transducer[A, F[A]] {
def apply[R](rf: Reducer[F[A], R]): Reducer[A, R] =
new GroupedReducer[A, R, F](rf, n)
override def toString: String = s"(grouped $n)"
}
private[transducers] final class GroupByTransducer[A, B <: AnyRef, F[_]: AsTarget](f: A ⇒ B) extends Transducer[A, F[A]] {
def apply[R](rf: Reducer[F[A], R]): Reducer[A, R] =
new GroupByReducer[A, B, R, F](rf, f)
override def toString: String = "(groupBy)"
}
| knutwalker/transducers-scala | core/src/main/scala/scalax/transducers/internal/transducers.scala | Scala | apache-2.0 | 6,436 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.operator
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.typeutils.TupleTypeInfoBase
import org.apache.flink.api.scala.{SelectByMaxFunction, SelectByMinFunction}
import org.apache.flink.api.scala._
import org.junit.{Assert, Test}
/**
*
*/
class SelectByFunctionTest {
val tupleTypeInfo = implicitly[TypeInformation[(Int, Long, String, Long, Int)]]
.asInstanceOf[TupleTypeInfoBase[(Int, Long, String, Long, Int)]]
private val bigger = (10, 100L, "HelloWorld", 200L, 20)
private val smaller = (5, 50L, "Hello", 50L, 15)
//Special case where only the last value determines if bigger or smaller
private val specialCaseBigger = (10, 100L, "HelloWorld", 200L, 17)
private val specialCaseSmaller = (5, 50L, "Hello", 50L, 17)
/**
* This test validates whether the order of tuples has
*
* any impact on the outcome and if the bigger tuple is returned.
*/
@Test
def testMaxByComparison(): Unit = {
val a1 = Array(0)
val maxByTuple = new SelectByMaxFunction(tupleTypeInfo, a1)
try {
Assert.assertSame("SelectByMax must return bigger tuple",
bigger, maxByTuple.reduce(smaller, bigger))
Assert.assertSame("SelectByMax must return bigger tuple",
bigger, maxByTuple.reduce(bigger, smaller))
} catch {
case e : Exception =>
Assert.fail("No exception should be thrown while comapring both tuples")
}
}
// ----------------------- MAXIMUM FUNCTION TEST BELOW --------------------------
/**
* This test cases checks when two tuples only differ in one value, but this value is not
* in the fields list. In that case it should be seen as equal
* and then the first given tuple (value1) should be returned by reduce().
*/
@Test
def testMaxByComparisonSpecialCase1() : Unit = {
val a1 = Array(0, 3)
val maxByTuple = new SelectByMaxFunction(tupleTypeInfo, a1)
try {
Assert.assertSame("SelectByMax must return the first given tuple",
specialCaseBigger, maxByTuple.reduce(specialCaseBigger, bigger))
Assert.assertSame("SelectByMax must return the first given tuple",
bigger, maxByTuple.reduce(bigger, specialCaseBigger))
} catch {
case e : Exception => Assert.fail("No exception should be thrown " +
"while comapring both tuples")
}
}
/**
* This test cases checks when two tuples only differ in one value.
*/
@Test
def testMaxByComparisonSpecialCase2() : Unit = {
val a1 = Array(0, 2, 1, 4, 3)
val maxByTuple = new SelectByMaxFunction(tupleTypeInfo, a1)
try {
Assert.assertSame("SelectByMax must return bigger tuple",
bigger, maxByTuple.reduce(specialCaseBigger, bigger))
Assert.assertSame("SelectByMax must return bigger tuple",
bigger, maxByTuple.reduce(bigger, specialCaseBigger))
} catch {
case e : Exception => Assert.fail("No exception should be thrown" +
" while comapring both tuples")
}
}
/**
* This test validates that equality is independent of the amount of used indices.
*/
@Test
def testMaxByComparisonMultiple(): Unit = {
val a1 = Array(0, 1, 2, 3, 4)
val maxByTuple = new SelectByMaxFunction(tupleTypeInfo, a1)
try {
Assert.assertSame("SelectByMax must return bigger tuple",
bigger, maxByTuple.reduce(smaller, bigger))
Assert.assertSame("SelectByMax must return bigger tuple",
bigger, maxByTuple.reduce(bigger, smaller))
} catch {
case e : Exception => Assert.fail("No exception should be thrown " +
"while comapring both tuples")
}
}
/**
* Checks whether reduce does behave as expected if both values are the same object.
*/
@Test
def testMaxByComparisonMustReturnATuple() : Unit = {
val a1 = Array(0)
val maxByTuple = new SelectByMaxFunction(tupleTypeInfo, a1)
try {
Assert.assertSame("SelectByMax must return bigger tuple",
bigger, maxByTuple.reduce(bigger, bigger))
Assert.assertSame("SelectByMax must return smaller tuple",
smaller, maxByTuple.reduce(smaller, smaller))
} catch {
case e : Exception => Assert.fail("No exception should be thrown" +
" while comapring both tuples")
}
}
// ----------------------- MINIMUM FUNCTION TEST BELOW --------------------------
/**
* This test validates whether the order of tuples has any impact
* on the outcome and if the smaller tuple is returned.
*/
@Test
def testMinByComparison() : Unit = {
val a1 = Array(0)
val minByTuple = new SelectByMinFunction(tupleTypeInfo, a1)
try {
Assert.assertSame("SelectByMin must return smaller tuple",
smaller, minByTuple.reduce(smaller, bigger))
Assert.assertSame("SelectByMin must return smaller tuple",
smaller, minByTuple.reduce(bigger, smaller))
} catch {
case e : Exception => Assert.fail("No exception should be thrown " +
"while comapring both tuples")
}
}
/**
* This test cases checks when two tuples only differ in one value, but this value is not
* in the fields list. In that case it should be seen as equal and
* then the first given tuple (value1) should be returned by reduce().
*/
@Test
def testMinByComparisonSpecialCase1() : Unit = {
val a1 = Array(0, 3)
val minByTuple = new SelectByMinFunction(tupleTypeInfo, a1)
try {
Assert.assertSame("SelectByMin must return the first given tuple",
specialCaseBigger, minByTuple.reduce(specialCaseBigger, bigger))
Assert.assertSame("SelectByMin must return the first given tuple",
bigger, minByTuple.reduce(bigger, specialCaseBigger))
} catch {
case e : Exception => Assert.fail("No exception should be thrown " +
"while comapring both tuples")
}
}
/**
* This test validates that when two tuples only differ in one value
* and that value's index is given at construction time. The smaller tuple must be returned
* then.
*/
@Test
def testMinByComparisonSpecialCase2() : Unit = {
val a1 = Array(0, 2, 1, 4, 3)
val minByTuple = new SelectByMinFunction(tupleTypeInfo, a1)
try {
Assert.assertSame("SelectByMin must return smaller tuple",
smaller, minByTuple.reduce(specialCaseSmaller, smaller))
Assert.assertSame("SelectByMin must return smaller tuple",
smaller, minByTuple.reduce(smaller, specialCaseSmaller))
} catch {
case e : Exception => Assert.fail("No exception should be thrown" +
" while comapring both tuples")
}
}
/**
* Checks whether reduce does behave as expected if both values are the same object.
*/
@Test
def testMinByComparisonMultiple() : Unit = {
val a1 = Array(0, 1, 2, 3, 4)
val minByTuple = new SelectByMinFunction(tupleTypeInfo, a1)
try {
Assert.assertSame("SelectByMin must return smaller tuple",
smaller, minByTuple.reduce(smaller, bigger))
Assert.assertSame("SelectByMin must return smaller tuple",
smaller, minByTuple.reduce(bigger, smaller))
} catch {
case e : Exception => Assert.fail("No exception should be thrown" +
" while comapring both tuples")
}
}
}
| oscarceballos/flink-1.3.2 | flink-scala/src/test/scala/org/apache/flink/api/operator/SelectByFunctionTest.scala | Scala | apache-2.0 | 8,101 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.cli
import org.apache.spark.Logging
import scala.Some
import scala.collection.mutable.ListBuffer
import org.bdgenomics.adam.util.ParquetLogger
import java.util.logging.Level._
object ADAMMain extends Logging {
case class CommandGroup(name: String, commands: List[ADAMCommandCompanion])
private val commandGroups =
List(
CommandGroup(
"ADAM ACTIONS",
List(
CalculateDepth,
CountReadKmers,
CountContigKmers,
Transform,
Adam2Fastq,
/* TODO (nealsid): Reimplement in terms of new schema
ComputeVariants
*/
PluginExecutor
)
),
CommandGroup(
"CONVERSION OPERATIONS",
List(
Bam2ADAM,
Vcf2ADAM,
VcfAnnotation2ADAM,
ADAM2Vcf,
Fasta2ADAM,
Features2ADAM,
WigFix2Bed
)
),
CommandGroup(
"PRINT",
List(
PrintADAM,
PrintGenes,
FlagStat,
PrintTags,
ListDict,
AlleleCount,
BuildInformation,
View
)
)
)
private def printCommands() {
println("\n")
println(""" e 888~-_ e e e
| d8b 888 \ d8b d8b d8b
| /Y88b 888 | /Y88b d888bdY88b
| / Y88b 888 | / Y88b / Y88Y Y888b
| /____Y88b 888 / /____Y88b / YY Y888b
|/ Y88b 888_-~ / Y88b / Y888b""".stripMargin('|'))
println("\nChoose one of the following commands:")
commandGroups.foreach { grp =>
println("\n%s".format(grp.name))
grp.commands.foreach(cmd =>
println("%20s : %s".format(cmd.commandName, cmd.commandDescription)))
}
println("\n")
}
def main(args: Array[String]) {
log.info("ADAM invoked with args: %s".format(argsToString(args)))
if (args.size < 1) {
printCommands()
} else {
val commands =
for {
grp <- commandGroups
cmd <- grp.commands
} yield cmd
commands.find(_.commandName == args(0)) match {
case None => printCommands()
case Some(cmd) =>
init(Args4j[InitArgs](args drop 1, ignoreCmdLineExceptions = true))
cmd.apply(args drop 1).run()
}
}
}
// Attempts to format the `args` array into a string in a way
// suitable for copying and pasting back into the shell.
private def argsToString(args: Array[String]): String = {
def escapeArg(s: String) = "\"" + s.replaceAll("\\\"", "\\\\\"") + "\""
args.map(escapeArg).mkString(" ")
}
class InitArgs extends Args4jBase with ParquetArgs {}
private def init(args: InitArgs) {
// Set parquet logging (default: severe)
ParquetLogger.hadoopLoggerLevel(parse(args.logLevel))
}
}
| tomwhite/adam | adam-cli/src/main/scala/org/bdgenomics/adam/cli/ADAMMain.scala | Scala | apache-2.0 | 3,825 |
/*
* Copyright 2017-2022 John Snow Labs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.johnsnowlabs.nlp
import com.johnsnowlabs.nlp.AnnotatorType.TOKEN
import org.apache.spark.sql.Dataset
import scala.collection.mutable
object AssertAnnotations {
def getActualResult(dataSet: Dataset[_], columnName: String): Array[Seq[Annotation]] = {
val result = columnName + ".result"
val metadata = columnName + ".metadata"
val begin = columnName + ".begin"
val end = columnName + ".end"
dataSet.select(result, metadata, begin, end).rdd.map{ row=>
val resultSeq: Seq[String] = row.get(0).asInstanceOf[mutable.WrappedArray[String]]
val metadataSeq: Seq[Map[String, String]] = row.get(1).asInstanceOf[mutable.WrappedArray[Map[String, String]]]
val beginSeq: Seq[Int] = row.get(2).asInstanceOf[mutable.WrappedArray[Int]]
val endSeq: Seq[Int] = row.get(3).asInstanceOf[mutable.WrappedArray[Int]]
resultSeq.zipWithIndex.map{ case (token, index) =>
Annotation(TOKEN, beginSeq(index), endSeq(index), token, metadataSeq(index))
}
}.collect()
}
def assertFields(expectedResult: Array[Seq[Annotation]], actualResult: Array[Seq[Annotation]]): Unit = {
expectedResult.zipWithIndex.foreach { case (expectedAnnotationDocument, indexDocument) =>
val actualDocument = actualResult(indexDocument)
expectedAnnotationDocument.zipWithIndex.foreach { case (expectedAnnotation, index) =>
val actualResult = actualDocument(index).result
val actualBegin = actualDocument(index).begin
val actualEnd = actualDocument(index).end
val actualMetadata = actualDocument(index).metadata
val expectedResult = expectedAnnotation.result
val expectedBegin = expectedAnnotation.begin
val expectedEnd = expectedAnnotation.end
val expectedMetadata = expectedAnnotation.metadata
assert(actualResult == expectedResult, s"actual result $actualResult != expected result $expectedResult")
assert(actualBegin == expectedBegin, s"actual begin $actualBegin != expected result $expectedBegin")
assert(actualEnd == expectedEnd, s"actual end $actualEnd != expected end $expectedEnd")
assert(actualMetadata == expectedMetadata, s"actual begin $actualMetadata != expected result $expectedMetadata")
}
}
}
}
| JohnSnowLabs/spark-nlp | src/test/scala/com/johnsnowlabs/nlp/AssertAnnotations.scala | Scala | apache-2.0 | 2,875 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.trees
import scala.collection.mutable.ArrayBuffer
import org.scalatest.FunSuite
import org.apache.spark.sql.catalyst.expressions._
class TreeNodeSuite extends FunSuite {
test("top node changed") {
val after = Literal(1) transform { case Literal(1, _) => Literal(2) }
assert(after === Literal(2))
}
test("one child changed") {
val before = Add(Literal(1), Literal(2))
val after = before transform { case Literal(2, _) => Literal(1) }
assert(after === Add(Literal(1), Literal(1)))
}
test("no change") {
val before = Add(Literal(1), Add(Literal(2), Add(Literal(3), Literal(4))))
val after = before transform { case Literal(5, _) => Literal(1)}
assert(before === after)
assert(before.map(_.id) === after.map(_.id))
}
test("collect") {
val tree = Add(Literal(1), Add(Literal(2), Add(Literal(3), Literal(4))))
val literals = tree collect {case l: Literal => l}
assert(literals.size === 4)
(1 to 4).foreach(i => assert(literals contains Literal(i)))
}
test("pre-order transform") {
val actual = new ArrayBuffer[String]()
val expected = Seq("+", "1", "*", "2", "-", "3", "4")
val expression = Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4))))
expression transformDown {
case b: BinaryExpression => actual.append(b.symbol); b
case l: Literal => actual.append(l.toString); l
}
assert(expected === actual)
}
test("post-order transform") {
val actual = new ArrayBuffer[String]()
val expected = Seq("1", "2", "3", "4", "-", "*", "+")
val expression = Add(Literal(1), Multiply(Literal(2), Subtract(Literal(3), Literal(4))))
expression transformUp {
case b: BinaryExpression => actual.append(b.symbol); b
case l: Literal => actual.append(l.toString); l
}
assert(expected === actual)
}
}
| zhangjunfang/eclipse-dir | spark/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/trees/TreeNodeSuite.scala | Scala | bsd-2-clause | 2,696 |
package com.ru.waka
import com.google.api.client.auth.oauth2.Credential
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport
import com.google.api.client.json.jackson2.JacksonFactory
import com.google.api.client.util.DateTime
import com.google.api.services.calendar.Calendar
import com.ru.waka.repository.Event
import org.joda.time.{DateTime => JodaDateTime}
import scala.collection.JavaConverters._
import scalaz.\\/
class CalendarClient(credential: Credential, calendarId: String) {
private val jsonFactory = JacksonFactory.getDefaultInstance
private val transport = GoogleNetHttpTransport.newTrustedTransport()
private val applicationName = "dark-api"
private val service = new Calendar.Builder(transport, jsonFactory, credential).setApplicationName(applicationName).build()
def getEvents: Throwable \\/ Seq[Event] = \\/.fromTryCatchNonFatal {
service.events().list("primary")
.setCalendarId(calendarId)
.setMaxResults(10)
.setTimeMin(new DateTime(System.currentTimeMillis()))
.execute().getItems.asScala.map {
e => Event(
e.getSummary,
e.getDescription,
new JodaDateTime(e.getStart.getDateTime.getValue),
e.getLocation)
}
}
}
| ngineerxiv/dark-api | src/main/scala/com/ru/waka/CalendarClient.scala | Scala | mit | 1,257 |
package domain
import play.api.libs.json.Json
import org.specs2.mutable._
class InstagramAuthSpec extends Specification {
"InstagramAuth" >> {
"parse json into case class" >> {
val json = Json.obj(
"accessToken" -> "test123",
"user" -> Json.obj(
"id" -> "testid",
"username" -> "testusername",
"full_name" -> "testfullname",
"profile_picture" -> "testprofilepicture"
)
)
val caseClass = json.as[InstagramAuth]
json mustEqual Json.toJson(caseClass)
}
}
} | sofiaaacole/socialDashboard | test/domain/InstagramAuthSpec.scala | Scala | mit | 585 |
package org.objectify.responders.serializers
/**
* @author Joe Gaudet - (joe@learndot.com)
*/
object SerializerRegistry {
// defaults
var jsonSerializer: JsonSerializer = new JerksonJsonSerializers
var xmlSerializer: XMLSerializer = new NotImplementedXmlSerializer
def toJson(any: Any): String = {
jsonSerializer(any)
}
def toXML(any: Any): String = {
xmlSerializer(any)
}
}
| learndot/Objectify.scala | src/main/scala/org/objectify/responders/serializers/SerializerRegistry.scala | Scala | mit | 404 |
package testfeature
import feature._
import org.scalatest.FunSuite
/**
* Created by prussell on 9/30/16.
*/
class RegionUnionSuite extends FunSuite {
unionEmpty()
unionBlockBlock()
unionBlockBlockSet()
unionBlockSetBlockSet()
def unionEmpty(): Unit = {
test("Empty union") {
assert(Empty.union(chr1_1500_2500_both) === Block("1", 1500, 2500, Unstranded), "Union with empty should be self renamed")
assert(Empty.union(chr1_100_200_300_400_plus) === BlockSet(List(
Block("1", 100, 200, Plus),
Block("1", 300, 400, Plus)
)), "Union with empty should be self renamed")
assert(chr1_1500_2500_both.union(Empty) === Block("1", 1500, 2500, Unstranded), "Union with empty should be self renamed")
assert(chr1_100_200_300_400_plus.union(Empty) === BlockSet(List(
Block("1", 100, 200, Plus),
Block("1", 300, 400, Plus)
)), "Union with empty should be self renamed")
assert(Empty.union(Empty) === Empty, "Empty union with Empty should be Empty")
}
}
def unionBlockBlock(): Unit = {
test("Block union with block") {
// Same span
assert(chr1_1000_2000_minus.union(chr1_1000_2000_both) === Block("1", 1000, 2000, Minus),
"Union of blocks with same span and different orientations should have consensus orientation")
assert(chr1_1000_2000_both.union(chr1_1000_2000_minus) === Block("1", 1000, 2000, Minus),
"Union of blocks with same span and different orientations should have consensus orientation")
// Adjacent blocks
assert(chr1_1000_2000_both.union(chr1_2000_3000_plus) === Block("1", 1000, 3000, Plus),
"Union of adjacent blocks should be single block with consensus orientation")
assert(chr1_2000_3000_plus.union(chr1_1000_2000_both) === Block("1", 1000, 3000, Plus),
"Union of adjacent blocks should be single block with consensus orientation")
// Overlapping blocks
assert(chr1_1500_2500_both.union(chr1_2000_3000_plus) === Block("1", 1500, 3000, Plus),
"Union of overlapping blocks should be single block with consensus orientation")
assert(chr1_2000_3000_plus.union(chr1_1500_2500_both) === Block("1", 1500, 3000, Plus),
"Union of overlapping blocks should be single block with consensus orientation")
// Block with self
assert(chr1_1500_2500_both.union(chr1_1500_2500_both) === chr1_1500_2500_both,
"Union of block with self should be block renamed")
// Nested blocks
assert(chr1_4_6_minus.union(chr1_5_6_minus) === chr1_4_6_minus,
"Union of nested blocks should be outer block with consensus orientation")
assert(chr1_5_6_minus.union(chr1_4_6_minus) === chr1_4_6_minus,
"Union of nested blocks should be outer block with consensus orientation")
assert(chr1_1000_2000_both.union(chr1_1200_1300_plus) === chr1_1000_2000_plus_1,
"Union of nested blocks should be outer block with consensus orientation")
assert(chr1_1200_1300_plus.union(chr1_1000_2000_both) === chr1_1000_2000_plus_1,
"Union of nested blocks should be outer block with consensus orientation")
// Non-overlapping blocks
assert(chr1_2000_3000_plus.union(chr1_1200_1300_plus) === BlockSet(List(
Block("1", 1200, 1300, Plus),
Block("1", 2000, 3000, Plus))),
"Union of non-overlapping blocks should be block set with consensus orientation")
assert(chr1_1200_1300_plus.union(chr1_2000_3000_plus) === BlockSet(List(
Block("1", 1200, 1300, Plus),
Block("1", 2000, 3000, Plus))),
"Union of non-overlapping blocks should be block set with consensus orientation")
assert(chr2_3000_4000_both.union(chr2_1000_2000_plus) === BlockSet(List(
Block("2", 1000, 2000, Plus),
Block("2", 3000, 4000, Plus))),
"Union of non-overlapping blocks should be block set with consensus orientation")
assert(chr2_1000_2000_plus.union(chr2_3000_4000_both) === BlockSet(List(
Block("2", 1000, 2000, Plus),
Block("2", 3000, 4000, Plus))),
"Union of non-overlapping blocks should be block set with consensus orientation")
// Illegal unions
intercept[IllegalArgumentException](chr1_1000_2000_minus.union(chr1_1000_2000_plus_1))
intercept[IllegalArgumentException](chr1_1000_2000_plus_1.union(chr2_1000_2000_plus))
}
}
def unionBlockBlockSet(): Unit = {
test("Union: Block and BlockSet") {
// Strictly nested both directions
assert(chr1_1000_2000_both.union(chr1_900_2100_3000_4000_plus) === BlockSet(List(
Block("1", 900, 2100, Plus),
Block("1", 3000, 4000, Plus)
)),
"One block of block set completely contains the other block")
assert(chr1_1000_2000_both.union(chr1_1100_1200_1300_1400_plus) ===
Block("1", 1000, 2000, Plus),
"Block completely contains block set")
// Block set nested inside block sharing one endpoint
assert(chr1_1000_2000_both.union(chr1_1000_1100_1200_1300_plus) ===
Block("1", 1000, 2000, Plus),
"Block completely contains block set, sharing one endpoint")
// Block set nested inside block sharing two endpoints
assert(chr1_1000_2000_both.union(chr1_1000_1100_1900_2000_plus) ===
Block("1", 1000, 2000, Plus),
"Block completely contains block set, sharing two endpoints")
assert(chr1_1000_2000_both.union(chr1_1000_1100_1200_1300_1900_2000_plus) ===
Block("1", 1000, 2000, Plus),
"Block completely contains block set, sharing two endpoints")
// Block nested inside block set sharing one endpoint
assert(chr1_1000_1100_1200_1300_1900_2000_plus.union(Block("1", 1000, 1050, Plus)) ===
BlockSet(List(
Block("1", 1000, 1100, Plus),
Block("1", 1200, 1300, Plus),
Block("1", 1900, 2000, Plus)
)),
"Block nested inside block set sharing one endpoint")
assert(chr1_1000_1100_1200_1300_1900_2000_plus.union(Block("1", 1050, 1100, Plus)) ===
BlockSet(List(
Block("1", 1000, 1100, Plus),
Block("1", 1200, 1300, Plus),
Block("1", 1900, 2000, Plus)
)),
"Block nested inside block set sharing one endpoint")
assert(chr1_1000_1100_1200_1300_1900_2000_plus.union(Block("1", 1200, 1250, Plus)) ===
BlockSet(List(
Block("1", 1000, 1100, Plus),
Block("1", 1200, 1300, Plus),
Block("1", 1900, 2000, Plus)
)),
"Block nested inside block set sharing one endpoint")
assert(chr1_1000_1100_1200_1300_1900_2000_plus.union(Block("1", 1250, 1300, Plus)) ===
BlockSet(List(
Block("1", 1000, 1100, Plus),
Block("1", 1200, 1300, Plus),
Block("1", 1900, 2000, Plus)
)),
"Block nested inside block set sharing one endpoint")
assert(chr1_1000_1100_1200_1300_1900_2000_plus.union(Block("1", 1900, 1950, Plus)) ===
BlockSet(List(
Block("1", 1000, 1100, Plus),
Block("1", 1200, 1300, Plus),
Block("1", 1900, 2000, Plus)
)),
"Block nested inside block set sharing one endpoint")
assert(chr1_1000_1100_1200_1300_1900_2000_plus.union(Block("1", 1950, 2000, Plus)) ===
BlockSet(List(
Block("1", 1000, 1100, Plus),
Block("1", 1200, 1300, Plus),
Block("1", 1900, 2000, Plus)
)),
"Block nested inside block set sharing one endpoint")
// Overlapping one block off end
assert(chr1_1000_2000_both.union(chr1_1900_2100_3000_4000_plus) === BlockSet(List(
Block("1", 1000, 2100, Plus),
Block("1", 3000, 4000, Plus)
)), "Overlapping one block off end")
assert(chr1_1500_2500_plus.union(chr1_1900_2100_3000_4000_plus) === BlockSet(List(
Block("1", 1500, 2500, Plus),
Block("1", 3000, 4000, Plus)
)), "Overlapping one block off end")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 1400, 1550, Plus)) === BlockSet(List(
Block("1", 1400, 1600, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 2100, 2300, Plus)
)), "Overlapping one block off end")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 1550, 1650, Plus)) === BlockSet(List(
Block("1", 1500, 1650, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 2100, 2300, Plus)
)), "Overlapping one block off end")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 1650, 1750, Plus)) === BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1650, 1800, Plus),
Block("1", 2100, 2300, Plus)
)), "Overlapping one block off end")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 1750, 1850, Plus)) === BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1700, 1850, Plus),
Block("1", 2100, 2300, Plus)
)), "Overlapping one block off end")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 2000, 2150, Plus)) === BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 2000, 2300, Plus)
)), "Overlapping one block off end")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 2200, 2350, Plus)) === BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 2100, 2350, Plus)
)), "Overlapping one block off end")
// Overlapping two blocks off end
assert(chr1_1000_2000_both.union(chr1_1700_1800_1900_2100_plus) === Block("1", 1000, 2100, Plus),
"Overlapping two blocks off end")
assert(chr1_1700_1800_1900_2100_plus.union(Block("1", 1750, 2200, Plus)) === Block("1", 1700, 2200, Plus),
"Overlapping two blocks off end")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 1400, 1750, Plus)) === BlockSet(List(
Block("1", 1400, 1800, Plus),
Block("1", 2100, 2300, Plus)
)), "Overlapping two blocks off end")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 1550, 1850, Plus)) === BlockSet(List(
Block("1", 1500, 1850, Plus),
Block("1", 2100, 2300, Plus)
)), "Overlapping two blocks off end")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 1650, 2200, Plus)) === BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1650, 2300, Plus)
)), "Overlapping two blocks off end")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 1750, 2400, Plus)) === BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1700, 2400, Plus)
)), "Overlapping two blocks off end")
// Overlapping one block and intron
assert(chr1_1000_2000_both.union(chr1_1500_1600_2100_2300_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2100, 2300, Plus)
)), "Overlapping one block and an intron")
assert(chr1_1500_1600_2100_2300_plus.union(chr1_2000_3000_plus) === BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 2000, 3000, Plus)
)), "Overlapping one block and an intron")
// Overlapping two blocks and intron
assert(chr1_1000_2000_both.union(chr1_1500_1600_1700_1800_2100_2300_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2100, 2300, Plus)
)), "Overlapping two blocks and an intron")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 1650, 2400, Plus)) === BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1650, 2400, Plus)
)), "Overlapping two blocks and an intron")
// Overlapping three blocks and intron
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_plus.union(Block("1", 500, 6500, Plus)) === BlockSet(List(
Block("1", 500, 6500, Plus),
Block("1", 7000, 8000, Plus)
)), "Overlapping three blocks and an intron")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_plus.union(Block("1", 2500, 8500, Plus)) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 8500, Plus)
)), "Overlapping three blocks and an intron")
// Same span as one block
assert(chr1_1000_2000_both.union(chr1_1000_2000_3000_4000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus)
)), "Same span as one block")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 1500, 1600, Plus)) ===
BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 2100, 2300, Plus)
)), "Same span as one block")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 1700, 1800, Plus)) ===
BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 2100, 2300, Plus)
)), "Same span as one block")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 2100, 2300, Plus)) ===
BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 2100, 2300, Plus)
)), "Same span as one block")
// Contains one block and overlaps two adjacent blocks
assert(chr1_1000_2000_both.union(chr1_900_1100_1500_1600_1900_2100_plus) === Block("1", 900, 2100, Plus),
"Contains one block and overlaps two adjacent blocks")
// Contains two blocks and overlaps two adjacent blocks
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_plus.union(Block("1", 1500, 7500, Plus)) ===
Block("1", 1000, 8000, Plus), "Contains two blocks and overlaps two adjacent blocks")
// Contains one block sharing an endpoint
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 1400, 1600, Plus)) === BlockSet(List(
Block("1", 1400, 1600, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 2100, 2300, Plus)
)), "Contains one block sharing an endpoint")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 1500, 1650, Plus)) === BlockSet(List(
Block("1", 1500, 1650, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 2100, 2300, Plus)
)), "Contains one block sharing an endpoint")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 1650, 1800, Plus)) === BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1650, 1800, Plus),
Block("1", 2100, 2300, Plus)
)), "Contains one block sharing an endpoint")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 1700, 1850, Plus)) === BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1700, 1850, Plus),
Block("1", 2100, 2300, Plus)
)), "Contains one block sharing an endpoint")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 2000, 2300, Plus)) === BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 2000, 2300, Plus)
)), "Contains one block sharing an endpoint")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 2100, 2400, Plus)) === BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 2100, 2400, Plus)
)), "Contains one block sharing an endpoint")
// Non-overlapping because different spans
assert(chr1_1000_2000_plus_1.union(chr1_100_200_300_400_plus) === BlockSet(List(
Block("1", 100, 200, Plus),
Block("1", 300, 400, Plus),
Block("1", 1000, 2000, Plus)
)), "Non-overlapping because different spans")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 1000, 1100, Unstranded)) === BlockSet(List(
Block("1", 1000, 1100, Plus),
Block("1", 1500, 1600, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 2100, 2300, Plus)
)), "Non-overlapping because different spans")
assert(chr1_1500_1600_1700_1800_2100_2300_plus.union(Block("1", 2400, 2500, Unstranded)) === BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 2100, 2300, Plus),
Block("1", 2400, 2500, Plus)
)), "Non-overlapping because different spans")
// Non-overlapping because different orientations
intercept[IllegalArgumentException](chr1_1000_2000_minus.union(chr1_1500_1600_1700_1800_2100_2300_plus))
// Different chromosomes
intercept[IllegalArgumentException](chr2_1000_2000_plus.union(chr1_2000_3000_plus))
// Same span as an intron
// Two blocks
assert(Block("1", 200, 300, Plus).union(chr1_100_200_300_400_plus) ===
Block("1", 100, 400, Plus), "Same span as an intron")
// Three blocks
assert(Block("1", 1600, 1700, Plus).union(chr1_1500_1600_1700_1800_2100_2300_plus) ===
BlockSet(List(
Block("1", 1500, 1800, Plus),
Block("1", 2100, 2300, Plus)
)), "Same span as an intron")
assert(Block("1", 1800, 2100, Plus).union(chr1_1500_1600_1700_1800_2100_2300_plus) ===
BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1700, 2300, Plus)
)), "Same span as an intron")
// Adjacent to span
// Two blocks
assert(Block("1", 50, 100, Plus).union(chr1_100_200_300_400_plus) ===
BlockSet(List(
Block("1", 50, 200, Plus),
Block("1", 300, 400, Plus)
)), "Adjacent to span")
assert(Block("1", 400, 500, Plus).union(chr1_100_200_300_400_plus) ===
BlockSet(List(
Block("1", 100, 200, Plus),
Block("1", 300, 500, Plus)
)), "Adjacent to span")
// Three blocks
assert(Block("1", 1400, 1500, Plus).union(chr1_1500_1600_1700_1800_2100_2300_plus) ===
BlockSet(List(
Block("1", 1400, 1600, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 2100, 2300, Plus)
)), "Adjacent to span")
assert(Block("1", 2300, 2400, Plus).union(chr1_1500_1600_1700_1800_2100_2300_plus) ===
BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 2100, 2400, Plus)
)), "Adjacent to span")
// 1bp gap between blocks
// In intron
assert(Block("1", 201, 300, Plus).union(chr1_100_200_300_400_plus) ===
BlockSet(List(
Block("1", 100, 200, Plus),
Block("1", 201, 400, Plus)
)), "1bp gap between blocks")
assert(Block("1", 200, 299, Plus).union(chr1_100_200_300_400_plus) ===
BlockSet(List(
Block("1", 100, 299, Plus),
Block("1", 300, 400, Plus)
)), "1bp gap between blocks")
assert(Block("1", 201, 299, Plus).union(chr1_100_200_300_400_plus) ===
BlockSet(List(
Block("1", 100, 200, Plus),
Block("1", 201, 299, Plus),
Block("1", 300, 400, Plus)
)), "1bp gap between blocks")
assert(Block("1", 1800, 2099, Plus).union(chr1_1500_1600_1700_1800_2100_2300_plus) ===
BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1700, 2099, Plus),
Block("1", 2100, 2300, Plus)
)), "1bp gap between blocks")
assert(Block("1", 1801, 2100, Plus).union(chr1_1500_1600_1700_1800_2100_2300_plus) ===
BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 1801, 2300, Plus)
)), "1bp gap between blocks")
assert(Block("1", 1801, 2099, Plus).union(chr1_1500_1600_1700_1800_2100_2300_plus) ===
BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 1801, 2099, Plus),
Block("1", 2100, 2300, Plus)
)), "1bp gap between blocks")
assert(Block("1", 1600, 1699, Plus).union(chr1_1500_1600_1700_1800_2100_2300_plus) ===
BlockSet(List(
Block("1", 1500, 1699, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 2100, 2300, Plus)
)), "1bp gap between blocks")
assert(Block("1", 1601, 1700, Plus).union(chr1_1500_1600_1700_1800_2100_2300_plus) ===
BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1601, 1800, Plus),
Block("1", 2100, 2300, Plus)
)), "1bp gap between blocks")
assert(Block("1", 1601, 1699, Plus).union(chr1_1500_1600_1700_1800_2100_2300_plus) ===
BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1601, 1699, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 2100, 2300, Plus)
)), "1bp gap between blocks")
// Outside span
assert(Block("1", 1400, 1499, Plus).union(chr1_1500_1600_1700_1800_2100_2300_plus) ===
BlockSet(List(
Block("1", 1400, 1499, Plus),
Block("1", 1500, 1600, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 2100, 2300, Plus)
)), "1bp gap between blocks")
assert(Block("1", 2301, 2400, Plus).union(chr1_1500_1600_1700_1800_2100_2300_plus) ===
BlockSet(List(
Block("1", 1500, 1600, Plus),
Block("1", 1700, 1800, Plus),
Block("1", 2100, 2300, Plus),
Block("1", 2301, 2400, Plus)
)), "1bp gap between blocks")
}
}
def unionBlockSetBlockSet(): Unit = {
test("Union of BlockSet and BlockSet") {
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_1500_1600_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_700_800_1500_1600_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 700, 800, Plus),
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_700_800_3500_3600_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 700, 800, Plus),
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_700_800_5500_5600_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 700, 800, Plus),
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_2500_2600_3500_3600_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 1000, 2000, Plus),
Block("1", 2500, 2600, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_2500_2600_5500_5600_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 1000, 2000, Plus),
Block("1", 2500, 2600, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_2600_3500_3600_4500_4600_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 2600, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 4500, 4600, Plus),
Block("1", 5000, 6000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_2600_3500_3600_6500_6600_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 2600, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 6500, 6600, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_2600_5500_5600_6500_6600_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 2600, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 6500, 6600, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_5500_5600_6500_6600_7000_8000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 6500, 6600, Plus),
Block("1", 7000, 8000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_1000_1500_1600_plus) === BlockSet(List(
Block("1", 500, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_700_1000_1500_1600_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 700, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_700_1000_3500_3600_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 700, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_700_1000_5500_5600_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 700, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_2000_3000_3500_3600_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 1000, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_2500_3000_5500_5600_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 1000, 2000, Plus),
Block("1", 2500, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2000_3000_3500_3600_4500_4600_plus) === BlockSet(List(
Block("1", 1000, 4000, Plus),
Block("1", 4500, 4600, Plus),
Block("1", 5000, 6000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2000_2600_3500_3600_6500_6600_plus) === BlockSet(List(
Block("1", 1000, 2600, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 6500, 6600, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_3000_5500_5600_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_4000_4600_5500_5600_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4600, Plus),
Block("1", 5000, 6000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_4000_5000_5500_5600_6000_6600_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 6600, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_5500_5600_6000_6600_7000_8000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6600, Plus),
Block("1", 7000, 8000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_plus.union(chr1_500_600_3500_3600_both) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_plus.union(chr1_1100_1200_2500_3000_both) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 4000, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_plus.union(chr1_1100_1200_4000_4600_both) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4600, Plus)
)), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_plus.union(chr1_2000_3000_3500_3600_both) ===
Block("1", 1000, 4000, Plus), "One block nested, others non-overlapping")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_700_800_900_1100_both) === BlockSet(List(
Block("1", 500, 600, Unstranded),
Block("1", 700, 800, Unstranded),
Block("1", 900, 2000, Unstranded),
Block("1", 3000, 4000, Unstranded),
Block("1", 5000, 6000, Unstranded)
)), "One block overlapping off end of span")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_700_800_1000_2000_both) === BlockSet(List(
Block("1", 500, 600, Unstranded),
Block("1", 700, 800, Unstranded),
Block("1", 1000, 2000, Unstranded),
Block("1", 3000, 4000, Unstranded),
Block("1", 5000, 6000, Unstranded)
)), "One block overlapping off end of span")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_700_800_900_2000_both) === BlockSet(List(
Block("1", 500, 600, Unstranded),
Block("1", 700, 800, Unstranded),
Block("1", 900, 2000, Unstranded),
Block("1", 3000, 4000, Unstranded),
Block("1", 5000, 6000, Unstranded)
)), "One block overlapping off end of span")
assert(chr1_1000_2000_3000_4000_plus.union(chr1_500_600_900_1100_both) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 900, 2000, Plus),
Block("1", 3000, 4000, Plus)
)), "One block overlapping off end of span")
assert(chr1_1000_2000_3000_4000_plus.union(chr1_500_600_1000_2000_both) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus)
)), "One block overlapping off end of span")
assert(chr1_1000_2000_3000_4000_plus.union(chr1_500_600_900_2000_both) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 900, 2000, Plus),
Block("1", 3000, 4000, Plus)
)), "One block overlapping off end of span")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_1500_2500_4000_5000_plus) === BlockSet(List(
Block("1", 1000, 2500, Plus),
Block("1", 3000, 6000, Plus)
)), "One block overlapping one block of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_3500_7000_8000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping one block of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3000_4000_4500_5500_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 4500, 6000, Plus)
)), "One block overlapping one block of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_2600_2700_3500_6000_7000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 2600, Plus),
Block("1", 2700, 4000, Plus),
Block("1", 5000, 7000, Plus)
)), "One block overlapping one block of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_3500_4000_7000_8000_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping one block of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_4000_6000_7000_8000_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 1000, 2000, Plus),
Block("1", 3000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping one block of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_2000_5000_7000_8000_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 1000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping one block of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3500_5000_6000_7000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 7000, Plus)
)), "One block overlapping one block of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_5000_6000_7000_8000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping one block of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_1000_5000_7000_8000_9000_plus) === BlockSet(List(
Block("1", 500, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 7000, Plus),
Block("1", 8000, 9000, Plus)
)), "One block overlapping one block of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_4500_4600_5500_6500_7000_8000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 4500, 4600, Plus),
Block("1", 5000, 6500, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping one block of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2000_3000_4000_5000_5500_5600_plus) ===
Block("1", 1000, 6000, Plus), "One block overlapping one block of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2100_2200_2300_2400_2500_3100_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2100, 2200, Plus),
Block("1", 2300, 2400, Plus),
Block("1", 2500, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block overlapping one block of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2100_2200_2300_3000_4000_5000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2100, 2200, Plus),
Block("1", 2300, 6000, Plus)
)), "One block overlapping one block of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_1500_3500_3600_7000_8000_plus) === BlockSet(List(
Block("1", 500, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping one block, another block nested in another block")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_1000_2000_3500_3600_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block overlapping one block, another block nested in another block")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3500_4500_5500_5600_7000_8000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4500, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping one block, another block nested in another block")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_1500_2500_2600_3500_3600_plus) === BlockSet(List(
Block("1", 500, 2000, Plus),
Block("1", 2500, 2600, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block overlapping one block, another block nested in another block")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_2600_3500_3600_3800_4500_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 2600, Plus),
Block("1", 3000, 4500, Plus),
Block("1", 5000, 6000, Plus)
)), "One block overlapping one block, another block nested in another block")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_1500_1600_2500_2600_4500_4600_5000_6000_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 1000, 2000, Plus),
Block("1", 2500, 2600, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 4500, 4600, Plus),
Block("1", 5000, 6000, Plus)
)), "One block overlapping one block, another block nested in another block")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3500_3600_4500_6000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 4500, 6000, Plus)
)), "One block overlapping one block, another block nested in another block")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_3000_3500_3600_7000_8000_plus) === BlockSet(List(
Block("1", 500, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping one block, another block nested in another block")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_1000_3000_3500_3600_plus) === BlockSet(List(
Block("1", 1000, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block overlapping one block, another block nested in another block")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3000_4500_5500_5600_7000_8000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4500, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping one block, another block nested in another block")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_1500_2500_2600_3500_3600_plus) === BlockSet(List(
Block("1", 500, 2000, Plus),
Block("1", 2500, 2600, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block overlapping one block, another block nested in another block")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2000_2600_3500_3600_3800_4500_plus) === BlockSet(List(
Block("1", 1000, 2600, Plus),
Block("1", 3000, 4500, Plus),
Block("1", 5000, 6000, Plus)
)), "One block overlapping one block, another block nested in another block")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_1000_1500_1600_2500_2600_4500_4600_5000_6000_plus) === BlockSet(List(
Block("1", 500, 2000, Plus),
Block("1", 2500, 2600, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 4500, 4600, Plus),
Block("1", 5000, 6000, Plus)
)), "One block overlapping one block, another block nested in another block")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3500_3600_4000_6000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 6000, Plus)
)), "One block overlapping one block, another block nested in another block")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_6500_7000_8000_plus) === BlockSet(List(
Block("1", 500, 6500, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping three blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_1500_5500_7000_8000_plus) === BlockSet(List(
Block("1", 1000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping three blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_1000_7000_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 1000, 7000, Plus)
)), "One block overlapping three blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_1500_5500_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 1000, 6000, Plus)
)), "One block overlapping three blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_5500_6500_6600_plus) === BlockSet(List(
Block("1", 500, 6000, Plus),
Block("1", 6500, 6600, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping three blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_600_3500_9000_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 1000, 2000, Plus),
Block("1", 3000, 9000, Plus)
)), "One block overlapping three blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2500_2600_2800_9000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 2600, Plus),
Block("1", 2800, 9000, Plus)
)), "One block overlapping three blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2500_8500_9000_10000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 8500, Plus),
Block("1", 9000, 10000, Plus)
)), "One block overlapping three blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_1500_5500_6000_8000_plus) ===
Block("1", 1000, 8000, Plus), "One block overlapping three blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_1000_1500_7000_plus) ===
Block("1", 500, 7000, Plus), "One block overlapping three blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_1000_1500_5500_plus) ===
Block("1", 500, 6000, Plus), "One block overlapping three blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_5500_6500_7000_plus) === BlockSet(List(
Block("1", 500, 6000, Plus),
Block("1", 6500, 8000, Plus)
)), "One block overlapping three blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_600_2000_9000_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 1000, 9000, Plus)
)), "One block overlapping three blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2000_2600_2800_9000_plus) === BlockSet(List(
Block("1", 1000, 2600, Plus),
Block("1", 2800, 9000, Plus)
)), "One block overlapping three blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2000_8500_9000_10000_plus) === BlockSet(List(
Block("1", 1000, 8500, Plus),
Block("1", 9000, 10000, Plus)
)), "One block overlapping three blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_4500_7000_8000_plus) === BlockSet(List(
Block("1", 500, 4500, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping two blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_3500_5500_5600_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block overlapping two blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3500_5500_7000_8000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping two blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3000_6000_7000_8000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping two blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_3500_4500_4600_6500_6600_plus) === BlockSet(List(
Block("1", 500, 4000, Plus),
Block("1", 4500, 4600, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 6500, 6600, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping two blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2500_2600_3500_5500_6500_6600_8500_8600_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 2600, Plus),
Block("1", 3000, 6000, Plus),
Block("1", 6500, 6600, Plus),
Block("1", 7000, 8000, Plus),
Block("1", 8500, 8600, Plus)
)), "One block overlapping two blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_3500_4500_5500_6500_9000_10000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4500, Plus),
Block("1", 5000, 6500, Plus),
Block("1", 7000, 8000, Plus),
Block("1", 9000, 10000, Plus)
)), "One block overlapping two blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_4500_5500_6500_7500_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 4500, 6000, Plus),
Block("1", 6500, 8000, Plus)
)), "One block overlapping two blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2500_4500_6500_8500_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 4500, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 6500, 8500, Plus)
)), "One block overlapping two blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_5000_7000_8000_plus) === BlockSet(List(
Block("1", 500, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping two blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_5000_5500_5600_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 6000, Plus)
)), "One block overlapping two blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3500_5500_6000_8000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 8000, Plus)
)), "One block overlapping two blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2000_6000_7000_8000_plus) === BlockSet(List(
Block("1", 1000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping two blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_4000_4500_4600_6500_6600_plus) === BlockSet(List(
Block("1", 500, 4000, Plus),
Block("1", 4500, 4600, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 6500, 6600, Plus),
Block("1", 7000, 8000, Plus)
)), "One block overlapping two blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2500_3000_3500_5500_6500_6600_8500_8600_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 6000, Plus),
Block("1", 6500, 6600, Plus),
Block("1", 7000, 8000, Plus),
Block("1", 8500, 8600, Plus)
)), "One block overlapping two blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_3500_5000_5500_6500_9000_10000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 6500, Plus),
Block("1", 7000, 8000, Plus),
Block("1", 9000, 10000, Plus)
)), "One block overlapping two blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_4500_6000_6500_7500_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 4500, 6000, Plus),
Block("1", 6500, 8000, Plus)
)), "One block overlapping two blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2000_4500_6500_8500_plus) === BlockSet(List(
Block("1", 1000, 4500, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 6500, 8500, Plus)
)), "One block overlapping two blocks of other")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_1000_2000_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "One block same")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3000_4000_5500_6500_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6500, Plus)
)), "One block same")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_2600_4500_4600_5000_6000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 2600, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 4500, 4600, Plus),
Block("1", 5000, 6000, Plus)
)), "One block same")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_1500_2500_3500_4500_5000_6000_plus) === BlockSet(List(
Block("1", 1000, 2500, Plus),
Block("1", 3000, 4500, Plus),
Block("1", 5000, 6000, Plus)
)), "One block same")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_5000_6000_7000_8000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "One block same")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_3000_4500_4600_5000_6000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 4000, Plus),
Block("1", 4500, 4600, Plus),
Block("1", 5000, 6000, Plus)
)), "One block same")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_1500_2500_3000_4500_5000_6000_plus) === BlockSet(List(
Block("1", 1000, 2500, Plus),
Block("1", 3000, 4500, Plus),
Block("1", 5000, 6000, Plus)
)), "One block same")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_1000_2000_3000_4000_5000_6000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "Same blocks compatible orientation")
intercept[IllegalArgumentException](chr1_1000_2000_3000_4000_5000_6000_plus.union(chr1_7000_8000_9000_10000_minus))
intercept[IllegalArgumentException](chr1_1000_2000_3000_4000_5000_6000_plus.union(chr1_1000_4000_7000_8000_minus))
assert(chr1_1000_2000_3000_4000_5000_6000_plus.union(chr1_6000_7000_8000_9000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 7000, Plus),
Block("1", 8000, 9000, Plus)
)), "Same chromosome, same orientation, adjacent spans")
assert(chr1_1000_2000_3000_4000_5000_6000_plus.union(chr1_2000_3000_4000_5000_plus) ===
Block("1", 1000, 6000, Plus), "Same chromosome, same orientation, interleaved exons completely filling span")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2000_3000_4000_5000_6000_7000_both) ===
Block("1", 1000, 8000, Unstranded), "Same chromosome, same orientation, interleaved exons completely filling span")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2000_3000_8000_9000_both) === BlockSet(List(
Block("1", 1000, 4000, Unstranded),
Block("1", 5000, 6000, Unstranded),
Block("1", 7000, 9000, Unstranded)
)), "Same chromosome, same orientation, interleaved exons filling some introns not all")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2000_3000_4000_5000_8000_9000_both) === BlockSet(List(
Block("1", 1000, 6000, Unstranded),
Block("1", 7000, 9000, Unstranded)
)), "Same chromosome, same orientation, interleaved exons filling some introns not all")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_900_1000_4000_5000_6000_7000_8000_9000_both) === BlockSet(List(
Block("1", 900, 2000, Unstranded),
Block("1", 3000, 9000, Unstranded)
)), "Same chromosome, same orientation, interleaved exons filling some introns not all")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2500_2600_4500_4600_6500_6600_both) === BlockSet(List(
Block("1", 1000, 2000, Unstranded),
Block("1", 2500, 2600, Unstranded),
Block("1", 3000, 4000, Unstranded),
Block("1", 4500, 4600, Unstranded),
Block("1", 5000, 6000, Unstranded),
Block("1", 6500, 6600, Unstranded),
Block("1", 7000, 8000, Unstranded)
)), "Same chromosome, same orientation, interleaved exons none adjacent")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_4500_4600_6500_6600_8500_8600_both) === BlockSet(List(
Block("1", 1000, 2000, Unstranded),
Block("1", 3000, 4000, Unstranded),
Block("1", 4500, 4600, Unstranded),
Block("1", 5000, 6000, Unstranded),
Block("1", 6500, 6600, Unstranded),
Block("1", 7000, 8000, Unstranded),
Block("1", 8500, 8600, Unstranded)
)), "Same chromosome, same orientation, interleaved exons none adjacent")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_1000_2500_2600_6000_6500_8000_9000_both) === BlockSet(List(
Block("1", 500, 2000, Unstranded),
Block("1", 2500, 2600, Unstranded),
Block("1", 3000, 4000, Unstranded),
Block("1", 5000, 6500, Unstranded),
Block("1", 7000, 9000, Unstranded)
)), "Same chromosome, same orientation, interleaved exons some adjacent")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2500_3000_8000_9000_both) === BlockSet(List(
Block("1", 1000, 2000, Unstranded),
Block("1", 2500, 4000, Unstranded),
Block("1", 5000, 6000, Unstranded),
Block("1", 7000, 9000, Unstranded)
)), "Same chromosome, same orientation, interleaved exons some adjacent")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_4500_5000_6500_6600_both) === BlockSet(List(
Block("1", 1000, 2000, Unstranded),
Block("1", 3000, 4000, Unstranded),
Block("1", 4500, 6000, Unstranded),
Block("1", 6500, 6600, Unstranded),
Block("1", 7000, 8000, Unstranded)
)), "Same chromosome, same orientation, interleaved exons some adjacent")
assert(chr1_1000_2000_3000_4000_5000_6000_plus.union(chr1_7000_8000_9000_10000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 8000, Plus),
Block("1", 9000, 10000, Plus)
)), "Same chromosome, same orientation, non-overlapping spans")
assert(chr1_1000_2000_3000_4000_5000_6000_plus.union(chr1_6000_8000_9000_10000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 8000, Plus),
Block("1", 9000, 10000, Plus)
)), "Same chromosome, same orientation, non-overlapping spans")
assert(chr1_1000_2000_3000_4000_5000_6000_plus.union(chr1_1000_2000_3000_4000_5000_6000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus)
)), "Self")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1500_1600_3500_3600_5500_6500_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6500, Plus),
Block("1", 7000, 8000, Plus)
)), "Three blocks nested in three blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_600_1500_1600_3500_4500_5500_6500_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4500, Plus),
Block("1", 5000, 6500, Plus),
Block("1", 7000, 8000, Plus)
)), "Three blocks nested in three blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_3500_3600_5500_5600_7500_7600_8500_9500_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 8000, Plus),
Block("1", 8500, 9500, Plus)
)), "Three blocks nested in three blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1500_1600_2500_4500_7500_7600_9000_10000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 4500, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 8000, Plus),
Block("1", 9000, 10000, Plus)
)), "Three blocks nested in three blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_1000_1500_1600_3500_3600_5500_6500_plus) === BlockSet(List(
Block("1", 500, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6500, Plus),
Block("1", 7000, 8000, Plus)
)), "Three blocks nested in three blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2000_3000_3500_3600_5500_5600_7500_7600_8500_9500_plus) === BlockSet(List(
Block("1", 1000, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 8000, Plus),
Block("1", 8500, 9500, Plus)
)), "Three blocks nested in three blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1500_1600_2500_4500_7500_7600_8000_10000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 4500, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 10000, Plus)
)), "Three blocks nested in three blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1100_1200_1300_1400_3500_3600_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "Three blocks nested in two blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_3500_3600_5100_5200_5300_5400_6500_6600_8500_8600_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 6500, 6600, Plus),
Block("1", 7000, 8000, Plus),
Block("1", 8500, 8600, Plus)
)), "Three blocks nested in two blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1500_1600_2500_6500_7500_7600_7700_7800_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 6500, Plus),
Block("1", 7000, 8000, Plus)
)), "Three blocks nested in two blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1100_1200_1300_1400_3500_3600_6000_7000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 8000, Plus)
)), "Three blocks nested in two blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_1000_3500_3600_5100_5200_5300_5400_6500_6600_8500_8600_plus) === BlockSet(List(
Block("1", 500, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 6500, 6600, Plus),
Block("1", 7000, 8000, Plus),
Block("1", 8500, 8600, Plus)
)), "Three blocks nested in two blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1500_1600_2000_6500_7500_7600_7700_7800_plus) === BlockSet(List(
Block("1", 1000, 6500, Plus),
Block("1", 7000, 8000, Plus)
)), "Three blocks nested in two blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_1500_2500_3500_4500_5500_plus) === BlockSet(List(
Block("1", 500, 2000, Plus),
Block("1", 2500, 4000, Plus),
Block("1", 4500, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "Three blocks overlapping three blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_1500_2500_3500_4500_5500_6500_6600_plus) === BlockSet(List(
Block("1", 500, 2000, Plus),
Block("1", 2500, 4000, Plus),
Block("1", 4500, 6000, Plus),
Block("1", 6500, 6600, Plus),
Block("1", 7000, 8000, Plus)
)), "Three blocks overlapping three blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_600_900_1100_2500_2600_3500_4500_6500_6600_7500_8500_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 900, 2000, Plus),
Block("1", 2500, 2600, Plus),
Block("1", 3000, 4500, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 6500, 6600, Plus),
Block("1", 7000, 8500, Plus)
)), "Three blocks overlapping three blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_1500_2000_3500_4500_5500_plus) === BlockSet(List(
Block("1", 500, 4000, Plus),
Block("1", 4500, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "Three blocks overlapping three blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_1500_2500_3500_4500_5500_6000_7000_plus) === BlockSet(List(
Block("1", 500, 2000, Plus),
Block("1", 2500, 4000, Plus),
Block("1", 4500, 8000, Plus)
)), "Three blocks overlapping three blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_600_900_1100_2500_2600_3500_4000_6500_6600_7500_8500_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 900, 2000, Plus),
Block("1", 2500, 2600, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 6500, 6600, Plus),
Block("1", 7000, 8500, Plus)
)), "Three blocks overlapping three blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_900_1100_1900_2100_2900_4100_plus) === BlockSet(List(
Block("1", 900, 2100, Plus),
Block("1", 2900, 4100, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "Three blocks overlapping two blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1500_3500_3600_3700_3800_3900_plus) === BlockSet(List(
Block("1", 1000, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "Three blocks overlapping two blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_3500_4500_5000_5100_5900_6000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4500, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "Three blocks overlapping two blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1000_2500_7000_7100_7500_8000_9000_10000_plus) === BlockSet(List(
Block("1", 1000, 2500, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 8000, Plus),
Block("1", 9000, 10000, Plus)
)), "Three blocks overlapping two blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_900_1100_1900_2100_2900_6000_plus) === BlockSet(List(
Block("1", 900, 2100, Plus),
Block("1", 2900, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "Three blocks overlapping two blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1500_3500_3600_3700_3800_5000_plus) === BlockSet(List(
Block("1", 1000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "Three blocks overlapping two blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2000_4500_5000_5100_5900_6000_plus) === BlockSet(List(
Block("1", 1000, 4500, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 7000, 8000, Plus)
)), "Three blocks overlapping two blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1000_2500_6000_7100_7500_8000_9000_10000_plus) === BlockSet(List(
Block("1", 1000, 2500, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 8000, Plus),
Block("1", 9000, 10000, Plus)
)), "Three blocks overlapping two blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3000_3100_3200_3300_4500_4600_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 4500, 4600, Plus),
Block("1", 5000, 6000, Plus)
)), "Two blocks nested in one block")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_4500_5100_5200_5300_5400_plus) === BlockSet(List(
Block("1", 500, 4500, Plus),
Block("1", 5000, 6000, Plus)
)), "Two blocks nested in one block")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3000_3100_3200_3300_4500_5000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 4500, 6000, Plus)
)), "Two blocks nested in one block")
assert(chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_5000_5100_5200_5300_5400_plus) ===
Block("1", 500, 6000, Plus), "Two blocks nested in one block")
assert(chr1_1000_2000_3000_4000_plus.union(chr1_500_1500_3500_4500_plus) === BlockSet(List(
Block("1", 500, 2000, Plus),
Block("1", 3000, 4500, Plus)
)), "Two blocks overlapping two blocks")
assert(chr1_1000_2000_3000_4000_plus.union(chr1_1500_2400_2600_3500_plus) === BlockSet(List(
Block("1", 1000, 2400, Plus),
Block("1", 2600, 4000, Plus)
)), "Two blocks overlapping two blocks")
assert(chr1_1000_2000_3000_4000_plus.union(chr1_1500_2500_3500_4500_plus) === BlockSet(List(
Block("1", 1000, 2500, Plus),
Block("1", 3000, 4500, Plus)
)), "Two blocks overlapping two blocks")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_600_3000_4000_6500_6600_7000_8000_plus) === BlockSet(List(
Block("1", 500, 600, Plus),
Block("1", 1000, 2000, Plus),
Block("1", 3000, 4000, Plus),
Block("1", 5000, 6000, Plus),
Block("1", 6500, 6600, Plus),
Block("1", 7000, 8000, Plus)
)), "Two blocks same")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1000_2000_2500_6500_7000_8000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 6500, Plus),
Block("1", 7000, 8000, Plus)
)), "Two blocks same")
assert(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1000_2000_2500_3000_4000_5000_6000_6500_7000_8000_plus) === BlockSet(List(
Block("1", 1000, 2000, Plus),
Block("1", 2500, 6500, Plus),
Block("1", 7000, 8000, Plus)
)), "Two blocks same")
assert(chr1_500_600_1500_1600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_1500_1600_plus), "One block nested, others non-overlapping")
assert(chr1_500_600_700_800_1500_1600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_700_800_1500_1600_plus), "One block nested, others non-overlapping")
assert(chr1_500_600_700_800_3500_3600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_700_800_3500_3600_plus), "One block nested, others non-overlapping")
assert(chr1_500_600_700_800_5500_5600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_700_800_5500_5600_plus), "One block nested, others non-overlapping")
assert(chr1_500_600_2500_2600_3500_3600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_2500_2600_3500_3600_plus), "One block nested, others non-overlapping")
assert(chr1_500_600_2500_2600_5500_5600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_2500_2600_5500_5600_plus), "One block nested, others non-overlapping")
assert(chr1_2500_2600_3500_3600_4500_4600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_2600_3500_3600_4500_4600_plus), "One block nested, others non-overlapping")
assert(chr1_2500_2600_3500_3600_6500_6600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_2600_3500_3600_6500_6600_plus), "One block nested, others non-overlapping")
assert(chr1_2500_2600_5500_5600_6500_6600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_2600_5500_5600_6500_6600_plus), "One block nested, others non-overlapping")
assert(chr1_5500_5600_6500_6600_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_5500_5600_6500_6600_7000_8000_plus), "One block nested, others non-overlapping")
assert(chr1_500_1000_1500_1600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_1000_1500_1600_plus), "One block nested, others non-overlapping")
assert(chr1_500_600_700_1000_1500_1600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_700_1000_1500_1600_plus), "One block nested, others non-overlapping")
assert(chr1_500_600_700_1000_3500_3600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_700_1000_3500_3600_plus), "One block nested, others non-overlapping")
assert(chr1_500_600_700_1000_5500_5600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_700_1000_5500_5600_plus), "One block nested, others non-overlapping")
assert(chr1_500_600_2000_3000_3500_3600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_2000_3000_3500_3600_plus), "One block nested, others non-overlapping")
assert(chr1_500_600_2500_3000_5500_5600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_2500_3000_5500_5600_plus), "One block nested, others non-overlapping")
assert(chr1_2000_3000_3500_3600_4500_4600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2000_3000_3500_3600_4500_4600_plus), "One block nested, others non-overlapping")
assert(chr1_2000_2600_3500_3600_6500_6600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2000_2600_3500_3600_6500_6600_plus), "One block nested, others non-overlapping")
assert(chr1_2500_3000_5500_5600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_3000_5500_5600_plus), "One block nested, others non-overlapping")
assert(chr1_4000_4600_5500_5600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_4000_4600_5500_5600_plus), "One block nested, others non-overlapping")
assert(chr1_4000_5000_5500_5600_6000_6600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_4000_5000_5500_5600_6000_6600_plus), "One block nested, others non-overlapping")
assert(chr1_5500_5600_6000_6600_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_5500_5600_6000_6600_7000_8000_plus), "One block nested, others non-overlapping")
assert(chr1_500_600_3500_3600_both.union(chr1_1000_2000_3000_4000_plus) ===
chr1_1000_2000_3000_4000_plus.union(chr1_500_600_3500_3600_both), "One block nested, others non-overlapping")
assert(chr1_1100_1200_2500_3000_both.union(chr1_1000_2000_3000_4000_plus) ===
chr1_1000_2000_3000_4000_plus.union(chr1_1100_1200_2500_3000_both), "One block nested, others non-overlapping")
assert(chr1_1100_1200_4000_4600_both.union(chr1_1000_2000_3000_4000_plus) ===
chr1_1000_2000_3000_4000_plus.union(chr1_1100_1200_4000_4600_both), "One block nested, others non-overlapping")
assert(chr1_2000_3000_3500_3600_both.union(chr1_1000_2000_3000_4000_plus) ===
chr1_1000_2000_3000_4000_plus.union(chr1_2000_3000_3500_3600_both), "One block nested, others non-overlapping")
assert(chr1_500_600_700_800_900_1100_both.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_700_800_900_1100_both), "One block overlapping off end of span")
assert(chr1_500_600_700_800_1000_2000_both.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_700_800_1000_2000_both), "One block overlapping off end of span")
assert(chr1_500_600_700_800_900_2000_both.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_700_800_900_2000_both), "One block overlapping off end of span")
assert(chr1_500_600_900_1100_both.union(chr1_1000_2000_3000_4000_plus) ===
chr1_1000_2000_3000_4000_plus.union(chr1_500_600_900_1100_both), "One block overlapping off end of span")
assert(chr1_500_600_1000_2000_both.union(chr1_1000_2000_3000_4000_plus) ===
chr1_1000_2000_3000_4000_plus.union(chr1_500_600_1000_2000_both), "One block overlapping off end of span")
assert(chr1_500_600_900_2000_both.union(chr1_1000_2000_3000_4000_plus) ===
chr1_1000_2000_3000_4000_plus.union(chr1_500_600_900_2000_both), "One block overlapping off end of span")
assert(chr1_1500_2500_4000_5000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_1500_2500_4000_5000_plus), "One block overlapping one block of other")
assert(chr1_2500_3500_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_3500_7000_8000_plus), "One block overlapping one block of other")
assert(chr1_3000_4000_4500_5500_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3000_4000_4500_5500_plus), "One block overlapping one block of other")
assert(chr1_2500_2600_2700_3500_6000_7000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_2600_2700_3500_6000_7000_plus), "One block overlapping one block of other")
assert(chr1_500_600_3500_4000_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_3500_4000_7000_8000_plus), "One block overlapping one block of other")
assert(chr1_500_600_4000_6000_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_4000_6000_7000_8000_plus), "One block overlapping one block of other")
assert(chr1_500_600_2000_5000_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_2000_5000_7000_8000_plus), "One block overlapping one block of other")
assert(chr1_3500_5000_6000_7000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3500_5000_6000_7000_plus), "One block overlapping one block of other")
assert(chr1_5000_6000_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_5000_6000_7000_8000_plus), "One block overlapping one block of other")
assert(chr1_500_1000_5000_7000_8000_9000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_1000_5000_7000_8000_9000_plus), "One block overlapping one block of other")
assert(chr1_4500_4600_5500_6500_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_4500_4600_5500_6500_7000_8000_plus), "One block overlapping one block of other")
assert(chr1_2000_3000_4000_5000_5500_5600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2000_3000_4000_5000_5500_5600_plus), "One block overlapping one block of other")
assert(chr1_2100_2200_2300_2400_2500_3100_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2100_2200_2300_2400_2500_3100_plus), "One block overlapping one block of other")
assert(chr1_2100_2200_2300_3000_4000_5000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2100_2200_2300_3000_4000_5000_plus), "One block overlapping one block of other")
assert(chr1_500_1500_3500_3600_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_1500_3500_3600_7000_8000_plus), "One block overlapping one block, another block nested in another block")
assert(chr1_1000_2000_3500_3600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_1000_2000_3500_3600_plus), "One block overlapping one block, another block nested in another block")
assert(chr1_3500_4500_5500_5600_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3500_4500_5500_5600_7000_8000_plus), "One block overlapping one block, another block nested in another block")
assert(chr1_500_1500_2500_2600_3500_3600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_1500_2500_2600_3500_3600_plus), "One block overlapping one block, another block nested in another block")
assert(chr1_2500_2600_3500_3600_3800_4500_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_2600_3500_3600_3800_4500_plus), "One block overlapping one block, another block nested in another block")
assert(chr1_500_600_1500_1600_2500_2600_4500_4600_5000_6000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_1500_1600_2500_2600_4500_4600_5000_6000_plus), "One block overlapping one block, another block nested in another block")
assert(chr1_3500_3600_4500_6000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3500_3600_4500_6000_plus), "One block overlapping one block, another block nested in another block")
assert(chr1_500_3000_3500_3600_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_3000_3500_3600_7000_8000_plus), "One block overlapping one block, another block nested in another block")
assert(chr1_1000_3000_3500_3600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_1000_3000_3500_3600_plus), "One block overlapping one block, another block nested in another block")
assert(chr1_3000_4500_5500_5600_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3000_4500_5500_5600_7000_8000_plus), "One block overlapping one block, another block nested in another block")
assert(chr1_500_1500_2500_2600_3500_3600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_1500_2500_2600_3500_3600_plus), "One block overlapping one block, another block nested in another block")
assert(chr1_2000_2600_3500_3600_3800_4500_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2000_2600_3500_3600_3800_4500_plus), "One block overlapping one block, another block nested in another block")
assert(chr1_500_1000_1500_1600_2500_2600_4500_4600_5000_6000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_1000_1500_1600_2500_2600_4500_4600_5000_6000_plus), "One block overlapping one block, another block nested in another block")
assert(chr1_3500_3600_4000_6000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3500_3600_4000_6000_plus), "One block overlapping one block, another block nested in another block")
assert(chr1_500_6500_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_6500_7000_8000_plus), "One block overlapping three blocks of other")
assert(chr1_1500_5500_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_1500_5500_7000_8000_plus), "One block overlapping three blocks of other")
assert(chr1_500_600_1000_7000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_1000_7000_plus), "One block overlapping three blocks of other")
assert(chr1_500_600_1500_5500_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_1500_5500_plus), "One block overlapping three blocks of other")
assert(chr1_500_5500_6500_6600_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_5500_6500_6600_plus), "One block overlapping three blocks of other")
assert(chr1_500_600_3500_9000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_600_3500_9000_plus), "One block overlapping three blocks of other")
assert(chr1_2500_2600_2800_9000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2500_2600_2800_9000_plus), "One block overlapping three blocks of other")
assert(chr1_2500_8500_9000_10000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2500_8500_9000_10000_plus), "One block overlapping three blocks of other")
assert(chr1_1500_5500_6000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_1500_5500_6000_8000_plus), "One block overlapping three blocks of other")
assert(chr1_500_1000_1500_7000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_1000_1500_7000_plus), "One block overlapping three blocks of other")
assert(chr1_500_1000_1500_5500_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_1000_1500_5500_plus), "One block overlapping three blocks of other")
assert(chr1_500_5500_6500_7000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_5500_6500_7000_plus), "One block overlapping three blocks of other")
assert(chr1_500_600_2000_9000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_600_2000_9000_plus), "One block overlapping three blocks of other")
assert(chr1_2000_2600_2800_9000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2000_2600_2800_9000_plus), "One block overlapping three blocks of other")
assert(chr1_2000_8500_9000_10000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2000_8500_9000_10000_plus), "One block overlapping three blocks of other")
assert(chr1_500_4500_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_4500_7000_8000_plus), "One block overlapping two blocks of other")
assert(chr1_2500_3500_5500_5600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_3500_5500_5600_plus), "One block overlapping two blocks of other")
assert(chr1_3500_5500_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3500_5500_7000_8000_plus), "One block overlapping two blocks of other")
assert(chr1_3000_6000_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3000_6000_7000_8000_plus), "One block overlapping two blocks of other")
assert(chr1_500_3500_4500_4600_6500_6600_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_3500_4500_4600_6500_6600_plus), "One block overlapping two blocks of other")
assert(chr1_2500_2600_3500_5500_6500_6600_8500_8600_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2500_2600_3500_5500_6500_6600_8500_8600_plus), "One block overlapping two blocks of other")
assert(chr1_3500_4500_5500_6500_9000_10000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_3500_4500_5500_6500_9000_10000_plus), "One block overlapping two blocks of other")
assert(chr1_4500_5500_6500_7500_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_4500_5500_6500_7500_plus), "One block overlapping two blocks of other")
assert(chr1_2500_4500_6500_8500_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2500_4500_6500_8500_plus), "One block overlapping two blocks of other")
assert(chr1_500_5000_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_5000_7000_8000_plus), "One block overlapping two blocks of other")
assert(chr1_2500_5000_5500_5600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_5000_5500_5600_plus), "One block overlapping two blocks of other")
assert(chr1_3500_5500_6000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3500_5500_6000_8000_plus), "One block overlapping two blocks of other")
assert(chr1_2000_6000_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2000_6000_7000_8000_plus), "One block overlapping two blocks of other")
assert(chr1_500_4000_4500_4600_6500_6600_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_4000_4500_4600_6500_6600_plus), "One block overlapping two blocks of other")
assert(chr1_2500_3000_3500_5500_6500_6600_8500_8600_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2500_3000_3500_5500_6500_6600_8500_8600_plus), "One block overlapping two blocks of other")
assert(chr1_3500_5000_5500_6500_9000_10000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_3500_5000_5500_6500_9000_10000_plus), "One block overlapping two blocks of other")
assert(chr1_4500_6000_6500_7500_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_4500_6000_6500_7500_plus), "One block overlapping two blocks of other")
assert(chr1_2000_4500_6500_8500_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2000_4500_6500_8500_plus), "One block overlapping two blocks of other")
assert(chr1_500_600_1000_2000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_600_1000_2000_plus), "One block same")
assert(chr1_3000_4000_5500_6500_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3000_4000_5500_6500_plus), "One block same")
assert(chr1_2500_2600_4500_4600_5000_6000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_2600_4500_4600_5000_6000_plus), "One block same")
assert(chr1_1500_2500_3500_4500_5000_6000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_1500_2500_3500_4500_5000_6000_plus), "One block same")
assert(chr1_5000_6000_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_5000_6000_7000_8000_plus), "One block same")
assert(chr1_2500_3000_4500_4600_5000_6000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_2500_3000_4500_4600_5000_6000_plus), "One block same")
assert(chr1_1500_2500_3000_4500_5000_6000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_1500_2500_3000_4500_5000_6000_plus), "One block same")
assert(chr1_1000_2000_3000_4000_5000_6000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_1000_2000_3000_4000_5000_6000_plus), "Same blocks compatible orientation")
intercept[IllegalArgumentException](chr1_7000_8000_9000_10000_minus.union(chr1_1000_2000_3000_4000_5000_6000_plus))
intercept[IllegalArgumentException](chr1_1000_4000_7000_8000_minus.union(chr1_1000_2000_3000_4000_5000_6000_plus))
assert(chr1_6000_7000_8000_9000_plus.union(chr1_1000_2000_3000_4000_5000_6000_plus) ===
chr1_1000_2000_3000_4000_5000_6000_plus.union(chr1_6000_7000_8000_9000_plus), "Same chromosome, same orientation, adjacent spans")
assert(chr1_2000_3000_4000_5000_plus.union(chr1_1000_2000_3000_4000_5000_6000_plus) ===
chr1_1000_2000_3000_4000_5000_6000_plus.union(chr1_2000_3000_4000_5000_plus), "Same chromosome, same orientation, interleaved exons completely filling span")
assert(chr1_2000_3000_4000_5000_6000_7000_both.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2000_3000_4000_5000_6000_7000_both), "Same chromosome, same orientation, interleaved exons completely filling span")
assert(chr1_2000_3000_8000_9000_both.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2000_3000_8000_9000_both), "Same chromosome, same orientation, interleaved exons filling some introns not all")
assert(chr1_2000_3000_4000_5000_8000_9000_both.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2000_3000_4000_5000_8000_9000_both), "Same chromosome, same orientation, interleaved exons filling some introns not all")
assert(chr1_900_1000_4000_5000_6000_7000_8000_9000_both.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_900_1000_4000_5000_6000_7000_8000_9000_both), "Same chromosome, same orientation, interleaved exons filling some introns not all")
assert(chr1_2500_2600_4500_4600_6500_6600_both.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2500_2600_4500_4600_6500_6600_both), "Same chromosome, same orientation, interleaved exons none adjacent")
assert(chr1_4500_4600_6500_6600_8500_8600_both.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_4500_4600_6500_6600_8500_8600_both), "Same chromosome, same orientation, interleaved exons none adjacent")
assert(chr1_500_1000_2500_2600_6000_6500_8000_9000_both.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_1000_2500_2600_6000_6500_8000_9000_both), "Same chromosome, same orientation, interleaved exons some adjacent")
assert(chr1_4500_5000_6500_6600_both.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_4500_5000_6500_6600_both), "Same chromosome, same orientation, interleaved exons some adjacent")
assert(chr1_7000_8000_9000_10000_plus.union(chr1_1000_2000_3000_4000_5000_6000_plus) ===
chr1_1000_2000_3000_4000_5000_6000_plus.union(chr1_7000_8000_9000_10000_plus), "Same chromosome, same orientation, non-overlapping spans")
assert(chr1_6000_8000_9000_10000_plus.union(chr1_1000_2000_3000_4000_5000_6000_plus) ===
chr1_1000_2000_3000_4000_5000_6000_plus.union(chr1_6000_8000_9000_10000_plus), "Same chromosome, same orientation, non-overlapping spans")
assert(chr1_1000_2000_3000_4000_5000_6000_plus.union(chr1_1000_2000_3000_4000_5000_6000_plus) ===
chr1_1000_2000_3000_4000_5000_6000_plus.union(chr1_1000_2000_3000_4000_5000_6000_plus), "Self")
assert(chr1_1500_1600_3500_3600_5500_6500_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1500_1600_3500_3600_5500_6500_plus), "Three blocks nested in three blocks")
assert(chr1_500_600_1500_1600_3500_4500_5500_6500_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_600_1500_1600_3500_4500_5500_6500_plus), "Three blocks nested in three blocks")
assert(chr1_3500_3600_5500_5600_7500_7600_8500_9500_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_3500_3600_5500_5600_7500_7600_8500_9500_plus), "Three blocks nested in three blocks")
assert(chr1_1500_1600_2500_4500_7500_7600_9000_10000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1500_1600_2500_4500_7500_7600_9000_10000_plus), "Three blocks nested in three blocks")
assert(chr1_500_1000_1500_1600_3500_3600_5500_6500_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_1000_1500_1600_3500_3600_5500_6500_plus), "Three blocks nested in three blocks")
assert(chr1_2000_3000_3500_3600_5500_5600_7500_7600_8500_9500_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2000_3000_3500_3600_5500_5600_7500_7600_8500_9500_plus), "Three blocks nested in three blocks")
assert(chr1_1500_1600_2500_4500_7500_7600_8000_10000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1500_1600_2500_4500_7500_7600_8000_10000_plus), "Three blocks nested in three blocks")
assert(chr1_1100_1200_1300_1400_3500_3600_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1100_1200_1300_1400_3500_3600_plus), "Three blocks nested in two blocks")
assert(chr1_3500_3600_5100_5200_5300_5400_6500_6600_8500_8600_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_3500_3600_5100_5200_5300_5400_6500_6600_8500_8600_plus), "Three blocks nested in two blocks")
assert(chr1_1500_1600_2500_6500_7500_7600_7700_7800_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1500_1600_2500_6500_7500_7600_7700_7800_plus), "Three blocks nested in two blocks")
assert(chr1_1100_1200_1300_1400_3500_3600_6000_7000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1100_1200_1300_1400_3500_3600_6000_7000_plus), "Three blocks nested in two blocks")
assert(chr1_500_1000_3500_3600_5100_5200_5300_5400_6500_6600_8500_8600_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_1000_3500_3600_5100_5200_5300_5400_6500_6600_8500_8600_plus), "Three blocks nested in two blocks")
assert(chr1_1500_1600_2000_6500_7500_7600_7700_7800_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1500_1600_2000_6500_7500_7600_7700_7800_plus), "Three blocks nested in two blocks")
assert(chr1_500_1500_2500_3500_4500_5500_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_1500_2500_3500_4500_5500_plus), "Three blocks overlapping three blocks")
assert(chr1_500_1500_2500_3500_4500_5500_6500_6600_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_1500_2500_3500_4500_5500_6500_6600_plus), "Three blocks overlapping three blocks")
assert(chr1_500_600_900_1100_2500_2600_3500_4500_6500_6600_7500_8500_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_600_900_1100_2500_2600_3500_4500_6500_6600_7500_8500_plus), "Three blocks overlapping three blocks")
assert(chr1_500_1500_2000_3500_4500_5500_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_1500_2000_3500_4500_5500_plus), "Three blocks overlapping three blocks")
assert(chr1_500_1500_2500_3500_4500_5500_6000_7000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_1500_2500_3500_4500_5500_6000_7000_plus), "Three blocks overlapping three blocks")
assert(chr1_500_600_900_1100_2500_2600_3500_4000_6500_6600_7500_8500_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_600_900_1100_2500_2600_3500_4000_6500_6600_7500_8500_plus), "Three blocks overlapping three blocks")
assert(chr1_900_1100_1900_2100_2900_4100_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_900_1100_1900_2100_2900_4100_plus), "Three blocks overlapping two blocks")
assert(chr1_1500_3500_3600_3700_3800_3900_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1500_3500_3600_3700_3800_3900_plus), "Three blocks overlapping two blocks")
assert(chr1_3500_4500_5000_5100_5900_6000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_3500_4500_5000_5100_5900_6000_plus), "Three blocks overlapping two blocks")
assert(chr1_1000_2500_7000_7100_7500_8000_9000_10000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1000_2500_7000_7100_7500_8000_9000_10000_plus), "Three blocks overlapping two blocks")
assert(chr1_900_1100_1900_2100_2900_6000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_900_1100_1900_2100_2900_6000_plus), "Three blocks overlapping two blocks")
assert(chr1_1500_3500_3600_3700_3800_5000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1500_3500_3600_3700_3800_5000_plus), "Three blocks overlapping two blocks")
assert(chr1_2000_4500_5000_5100_5900_6000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_2000_4500_5000_5100_5900_6000_plus), "Three blocks overlapping two blocks")
assert(chr1_1000_2500_6000_7100_7500_8000_9000_10000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1000_2500_6000_7100_7500_8000_9000_10000_plus), "Three blocks overlapping two blocks")
assert(chr1_3000_3100_3200_3300_4500_4600_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3000_3100_3200_3300_4500_4600_plus), "Two blocks nested in one block")
assert(chr1_500_4500_5100_5200_5300_5400_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_4500_5100_5200_5300_5400_plus), "Two blocks nested in one block")
assert(chr1_3000_3100_3200_3300_4500_5000_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_3000_3100_3200_3300_4500_5000_plus), "Two blocks nested in one block")
assert(chr1_500_5000_5100_5200_5300_5400_plus.union(chr1_1000_2000_3000_4000_5000_6000_both) ===
chr1_1000_2000_3000_4000_5000_6000_both.union(chr1_500_5000_5100_5200_5300_5400_plus), "Two blocks nested in one block")
assert(chr1_500_1500_3500_4500_plus.union(chr1_1000_2000_3000_4000_plus) ===
chr1_1000_2000_3000_4000_plus.union(chr1_500_1500_3500_4500_plus), "Two blocks overlapping two blocks")
assert(chr1_1500_2400_2600_3500_plus.union(chr1_1000_2000_3000_4000_plus) ===
chr1_1000_2000_3000_4000_plus.union(chr1_1500_2400_2600_3500_plus), "Two blocks overlapping two blocks")
assert(chr1_1500_2500_3500_4500_plus.union(chr1_1000_2000_3000_4000_plus) ===
chr1_1000_2000_3000_4000_plus.union(chr1_1500_2500_3500_4500_plus), "Two blocks overlapping two blocks")
assert(chr1_500_600_3000_4000_6500_6600_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_500_600_3000_4000_6500_6600_7000_8000_plus), "Two blocks same")
assert(chr1_1000_2000_2500_6500_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1000_2000_2500_6500_7000_8000_plus), "Two blocks same")
assert(chr1_1000_2000_2500_3000_4000_5000_6000_6500_7000_8000_plus.union(chr1_1000_2000_3000_4000_5000_6000_7000_8000_both) ===
chr1_1000_2000_3000_4000_5000_6000_7000_8000_both.union(chr1_1000_2000_2500_3000_4000_5000_6000_6500_7000_8000_plus), "Two blocks same")
intercept[IllegalArgumentException](chr1_900_1100_1900_2100_2900_6000_plus.union(chr2_900_1100_1900_2100_2900_6000_plus))
}
}
}
| pamelarussell/sgxlib | src/test/scala/testfeature/RegionUnionSuite.scala | Scala | mit | 106,801 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.data.storage.elasticsearch
import java.io.IOException
import scala.collection.JavaConverters._
import scala.concurrent.ExecutionContext
import scala.concurrent.Future
import org.apache.http.entity.{ContentType, StringEntity}
import org.apache.http.nio.entity.NStringEntity
import org.apache.http.util.EntityUtils
import org.apache.predictionio.data.storage.Event
import org.apache.predictionio.data.storage.LEvents
import org.apache.predictionio.data.storage.StorageClientConfig
import org.elasticsearch.client.RestClient
import org.joda.time.DateTime
import org.json4s._
import org.json4s.JsonDSL._
import org.json4s.native.JsonMethods._
import org.json4s.native.Serialization.write
import org.json4s.ext.JodaTimeSerializers
import grizzled.slf4j.Logging
import org.apache.http.message.BasicHeader
class ESLEvents(val client: RestClient, config: StorageClientConfig, val eventdataName: String)
extends LEvents with Logging {
implicit val formats = DefaultFormats.lossless ++ JodaTimeSerializers.all
def eventdataKey(appId: Int, channelId: Option[Int] = None): String = {
channelId.map { ch =>
s"${appId}_${ch}"
}.getOrElse {
s"${appId}"
}
}
override def init(appId: Int, channelId: Option[Int] = None): Boolean = {
val index = eventdataName + "_" + eventdataKey(appId, channelId)
val json =
("mappings" ->
("properties" ->
("name" -> ("type" -> "keyword")) ~
("eventId" -> ("type" -> "keyword")) ~
("event" -> ("type" -> "keyword")) ~
("entityType" -> ("type" -> "keyword")) ~
("entityId" -> ("type" -> "keyword")) ~
("targetEntityType" -> ("type" -> "keyword")) ~
("targetEntityId" -> ("type" -> "keyword")) ~
("properties" -> ("enabled" -> false)) ~
("eventTime" -> ("type" -> "date")) ~
("tags" -> ("type" -> "keyword")) ~
("prId" -> ("type" -> "keyword")) ~
("creationTime" -> ("type" -> "date"))))
ESUtils.createIndex(client, index, compact(render(json)))
true
}
override def remove(appId: Int, channelId: Option[Int] = None): Boolean = {
val index = eventdataName + "_" + eventdataKey(appId, channelId)
try {
client.performRequest(
"DELETE",
s"/$index",
Map.empty[String, String].asJava
).getStatusLine.getStatusCode match {
case 200 => true
case _ =>
error(s"Failed to remove $index")
false
}
} catch {
case e: Exception =>
error(s"Failed to remove $index", e)
false
}
}
override def close(): Unit = {}
override def futureInsert(
event: Event,
appId: Int,
channelId: Option[Int])(implicit ec: ExecutionContext): Future[String] = {
Future {
val index = eventdataName + "_" + eventdataKey(appId, channelId)
val estype = ESUtils.esType(client, index)
try {
val id = event.eventId.getOrElse {
ESEventsUtil.getBase64UUID
}
val json =
("eventId" -> id) ~
("event" -> event.event) ~
("entityType" -> event.entityType) ~
("entityId" -> event.entityId) ~
("targetEntityType" -> event.targetEntityType) ~
("targetEntityId" -> event.targetEntityId) ~
("eventTime" -> ESUtils.formatUTCDateTime(event.eventTime)) ~
("tags" -> event.tags) ~
("prId" -> event.prId) ~
("creationTime" -> ESUtils.formatUTCDateTime(event.creationTime)) ~
("properties" -> write(event.properties.toJObject))
val entity = new NStringEntity(compact(render(json)), ContentType.APPLICATION_JSON)
val response = client.performRequest(
"PUT",
s"/$index/$estype/$id",
Map("refresh" -> ESUtils.getEventDataRefresh(config)).asJava,
entity)
val jsonResponse = parse(EntityUtils.toString(response.getEntity))
val result = (jsonResponse \\ "result").extract[String]
result match {
case "created" => id
case _ =>
error(s"[$result] Failed to update $index/$estype/$id")
""
}
} catch {
case e: IOException =>
error(s"Failed to update $index/$estype/<id>", e)
""
}
}
}
override def futureInsertBatch(
events: Seq[Event],
appId: Int,
channelId: Option[Int])(implicit ec: ExecutionContext): Future[Seq[String]] = {
Future {
val index = eventdataName + "_" + eventdataKey(appId, channelId)
val estype = ESUtils.esType(client, index)
try {
val ids = events.map { event =>
event.eventId.getOrElse(ESEventsUtil.getBase64UUID)
}
val json = events.zip(ids).map { case (event, id) =>
val commandJson =
("create" -> (
("_index" -> index) ~
("_type" -> estype) ~
("_id" -> id)
))
val documentJson =
("eventId" -> id) ~
("event" -> event.event) ~
("entityType" -> event.entityType) ~
("entityId" -> event.entityId) ~
("targetEntityType" -> event.targetEntityType) ~
("targetEntityId" -> event.targetEntityId) ~
("eventTime" -> ESUtils.formatUTCDateTime(event.eventTime)) ~
("tags" -> event.tags) ~
("prId" -> event.prId) ~
("creationTime" -> ESUtils.formatUTCDateTime(event.creationTime)) ~
("properties" -> write(event.properties.toJObject))
compact(render(commandJson)) + "\\n" + compact(render(documentJson))
}.mkString("", "\\n", "\\n")
val entity = new StringEntity(json)
val response = client.performRequest(
"POST",
"/_bulk",
Map("refresh" -> ESUtils.getEventDataRefresh(config)).asJava,
entity,
new BasicHeader("Content-Type", "application/x-ndjson"))
val responseJson = parse(EntityUtils.toString(response.getEntity))
val items = (responseJson \\ "items").asInstanceOf[JArray]
items.arr.map { case value: JObject =>
val result = (value \\ "create" \\ "result").extract[String]
val id = (value \\ "create" \\ "_id").extract[String]
result match {
case "created" => id
case _ =>
error(s"[$result] Failed to update $index/$estype/$id")
""
}
}
} catch {
case e: IOException =>
error(s"Failed to update $index/$estype/<id>", e)
Nil
}
}
}
override def futureGet(
eventId: String,
appId: Int,
channelId: Option[Int])(implicit ec: ExecutionContext): Future[Option[Event]] = {
Future {
val index = eventdataName + "_" + eventdataKey(appId, channelId)
try {
val json =
("query" ->
("term" ->
("eventId" -> eventId)))
val entity = new NStringEntity(compact(render(json)), ContentType.APPLICATION_JSON)
val response = client.performRequest(
"POST",
s"/$index/_search",
Map.empty[String, String].asJava,
entity)
val jsonResponse = parse(EntityUtils.toString(response.getEntity))
val results = (jsonResponse \\ "hits" \\ "hits").extract[Seq[JValue]]
results.headOption.map { jv =>
(jv \\ "_source").extract[Event]
}
} catch {
case e: IOException =>
error(s"Failed to access to /$index/_search", e)
None
}
}
}
override def futureDelete(
eventId: String,
appId: Int,
channelId: Option[Int])(implicit ec: ExecutionContext): Future[Boolean] = {
Future {
val index = eventdataName + "_" + eventdataKey(appId, channelId)
try {
val json =
("query" ->
("term" ->
("eventId" -> eventId)))
val entity = new NStringEntity(compact(render(json)), ContentType.APPLICATION_JSON)
val response = client.performRequest(
"POST",
s"/$index/_delete_by_query",
Map("refresh" -> ESUtils.getEventDataRefresh(config)).asJava,
entity)
val jsonResponse = parse(EntityUtils.toString(response.getEntity))
(jsonResponse \\ "deleted").extract[Int] > 0
} catch {
case e: IOException =>
error(s"Failed to delete $index:$eventId", e)
false
}
}
}
override def futureFind(
appId: Int,
channelId: Option[Int] = None,
startTime: Option[DateTime] = None,
untilTime: Option[DateTime] = None,
entityType: Option[String] = None,
entityId: Option[String] = None,
eventNames: Option[Seq[String]] = None,
targetEntityType: Option[Option[String]] = None,
targetEntityId: Option[Option[String]] = None,
limit: Option[Int] = None,
reversed: Option[Boolean] = None)
(implicit ec: ExecutionContext): Future[Iterator[Event]] = {
Future {
val index = eventdataName + "_" + eventdataKey(appId, channelId)
try {
val query = ESUtils.createEventQuery(
startTime, untilTime, entityType, entityId,
eventNames, targetEntityType, targetEntityId, reversed)
limit.getOrElse(20) match {
case -1 => ESUtils.getEventAll(client, index, query).toIterator
case size => ESUtils.getEvents(client, index, query, size).toIterator
}
} catch {
case e: IOException =>
error(e.getMessage)
Iterator.empty
}
}
}
}
| PredictionIO/PredictionIO | storage/elasticsearch/src/main/scala/org/apache/predictionio/data/storage/elasticsearch/ESLEvents.scala | Scala | apache-2.0 | 10,413 |
package org.gg.play.authentication.auth
import scala.concurrent.Future
import play.api.mvc._
import play.api.libs.iteratee.{Enumerator, Iteratee}
import play.api.libs.json.{Json, JsValue}
import play.api.libs.concurrent.Execution.Implicits._
import org.gg.play.authentication.misc.Loggable
/**
* User: luigi
* Date: 05/09/13
* Time: 11:50
*/
trait Secured[U <: SecureUser] extends BodyParsers with Loggable {
val CookieRememberMe: String = "PLAY_REMEMBER_ME"
val UnauthorizedRest = "not authorized"
def secureUsersRetriever: SecureUsersRetriever[U]
/**
* Try to retrieve the username of the user,
* before from the standard play session,
* and after from a the remember me cookie
*
* @param request
* @return
*/
def username(request: RequestHeader): Option[String] = {
val remoteAddress = s" ip : ${request.remoteAddress}"
request.session.get(Security.username) match {
case username: Some[String] =>
// log debug "username from session : " + username.get + remoteAddress
username
case None =>
request.cookies.get(CookieRememberMe) match {
case None => None
case Some(cookie) =>
val email = secureUsersRetriever.findByRemember(cookie.value).map(_.email)
// log debug "username from cookie : " + email.getOrElse("email not found") + " cookie : " + cookie.value + remoteAddress
email
}
}
}
/**
* This function is called when user is not authorized,
* because probably we want a redirect, we leave the controller to implement that
* function
*
* @param request
* @return
*/
def onUnauthorized(request: RequestHeader): Result
/**
* This function is called when a rest call is not authorized,
* a default message can be provided in that case
*
* @param request
* @return
*/
def onUnauthorizedRest(request: RequestHeader): Result = {
log.debug(s"on onUnauthorized ip : ${request.remoteAddress}")
Results.Unauthorized(UnauthorizedRest)
}
/**
* Create a WebSocket loading the username,
* if user not exist create a WebSocket which send the not
* authorized message and close the connection
*
* @param f
* @return
*/
def withAuthWS(f: => Int => Future[(Iteratee[JsValue, Unit], Enumerator[JsValue])]): WebSocket[JsValue, JsValue] = {
def errorFuture = {
// Just consume and ignore the input
val in = Iteratee.ignore[JsValue]
// Send a single 'Hello!' message and close
val out = Enumerator(Json.toJson("not authorized")).andThen(Enumerator.eof)
Future {
(in, out)
}
}
WebSocket.async[JsValue] {
request =>
username(request) match {
case None =>
errorFuture
case Some(username) =>
secureUsersRetriever.findByEmail(username).map { user =>
f(user.id)
}.getOrElse(errorFuture)
}
}
}
/**
* The scope of this function is to wrap the creation of
* an acton, providing the username of the user,
* retrieved from the session or from the remember cookie
*
* @param f
* @param unauthF
* @param parser
* @return
*/
def withAuthBase[T](parser: BodyParser[T] = parse.anyContent,
unauthF: RequestHeader => Result = onUnauthorized)
(f: => String => Request[T] => Future[Result]): EssentialAction = {
Security.Authenticated(username, unauthF) { user =>
Action.async(parser)(request => f(user)(request))
}
}
/**
* This function, relying on withAuthBase,
* try to pass the user loaded from SecureUsersRetriever to the
* wrapped action
*
* Don't use this function directly,
* use on of the withUser implementation
*
* @param unauthF
* @param parser
* @param userFilter
* @param f
* @tparam T
* @return
*/
def withUserBase[T](unauthF: RequestHeader => Result = onUnauthorized,
parser: BodyParser[T] = parse.anyContent,
userFilter: U => Boolean = _ => true)
(f: U => Request[T] => Future[Result]): EssentialAction = {
withAuthBase(parser, unauthF) { username => implicit request: Request[T] =>
secureUsersRetriever.findByEmail(username).filter(userFilter).map { user =>
f(user)(request)
}.getOrElse(Future.successful(unauthF(request)))
}
}
/**
* Default implementation, should be used to wrap
* html actions, call onUnauthorized implementation if
* user is not present
*
* @return
*/
def withUser: (U => Request[AnyContent] => Future[Result]) => EssentialAction = withUserBase() _
/**
* Default implementation for rest calls,
* call onUnauthorizedRest if user not exists,
* to use for standard rest GET
*
* @return
*/
def withRestUser: (U => Request[AnyContent] => Future[Result]) => EssentialAction = withUserBase(onUnauthorizedRest) _
/**
* This implementation must be used for rest calls,
* when the request have a JSON body
*
* @return
*/
def withJsonUser: (U => Request[JsValue] => Future[Result]) => EssentialAction = withUserBase(onUnauthorizedRest, parse.json) _
/**
* Same as withUser call,
* verify that user is admin
*
* @return
*/
def withAdmin: (U => Request[AnyContent] => Future[Result]) => EssentialAction = withUserBase(userFilter = _.isAdmin) _
/**
* Same as withUser call,
* verify that user is admin
*
* @return
*/
def withRestAdmin: (U => Request[AnyContent] => Future[Result]) => EssentialAction = withUserBase(onUnauthorizedRest, userFilter = _.isAdmin) _
/**
* Same as withUser call,
* verify that user is admin
*
* @return
*/
def withJsonAdmin: (U => Request[JsValue] => Future[Result]) => EssentialAction = withUserBase(onUnauthorizedRest, parse.json, userFilter = _.isAdmin)
}
/**
* The DAO class, must mix that trait for allow
* secured trait to load user from database
*
*/
trait SecureUsersRetriever[U <: SecureUser] {
def findByEmail(email: String): Option[U]
def findByRemember(cookie: String): Option[U]
}
/**
* Mi that trait in User implementation
*/
trait SecureUser {
def id: Int
def email: String
def isAdmin: Boolean
} | gigiigig/play2-authentication | app/auth/Secured.scala | Scala | gpl-3.0 | 6,318 |
package sexamples.virtualnetworking.pingpongselectors
import se.sics.kompics.sl._
import se.sics.kompics.network.Network
class Ponger extends ComponentDefinition {
val self = cfg.getValue[TAddress]("pingpong.ponger.addr");
val net = requires[Network];
private var counter: Long = 0L;
net uponEvent {
case TMessage(header, Ping) => {
counter += 1L;
log.info(s"Got Ping #${counter}!");
trigger(TMessage(self, header.getSource(), Pong) -> net);
}
}
}
| kompics/kompics-scala | docs/src/main/scala/sexamples/virtualnetworking/pingpongselectors/Ponger.scala | Scala | gpl-2.0 | 490 |
package com.scaledaction.core.spark
import com.scaledaction.core.cassandra.CassandraConfig
import org.apache.spark.{ SparkContext, SparkConf, Logging }
import org.apache.spark.streaming.{ StreamingContext, Seconds }
object SparkUtils extends Logging {
//TODO - Need to replace the "sparkMaster" and "sparkAppName" arguments with a SparkConfig argument
def getActiveOrCreateSparkContext(cassandraConfig: CassandraConfig, sparkMaster: String, sparkAppName: String): SparkContext = {
val conf = new SparkConf()
.set("spark.cassandra.connection.host", cassandraConfig.seednodes)
.setMaster(sparkMaster)
.setAppName(sparkAppName)
SparkContext.getOrCreate(conf)
}
//TODO - Need to replace the "sparkMaster" and "sparkAppName" arguments with a SparkConfig argument
def getActiveOrCreateStreamingContext(sc: SparkContext): StreamingContext = {
def createStreamingContext(): StreamingContext = {
@transient val newSsc = new StreamingContext(sc, Seconds(2))
logInfo(s"Creating new StreamingContext $newSsc")
newSsc
}
StreamingContext.getActiveOrCreate(createStreamingContext)
}
//TODO - Need to replace the "sparkMaster" and "sparkAppName" arguments with a SparkConfig argument
def getActiveOrCreateStreamingContext(cassandraConfig: CassandraConfig, sparkMaster: String, sparkAppName: String): StreamingContext = {
val conf = new SparkConf()
.set("spark.cassandra.connection.host", cassandraConfig.seednodes)
.setMaster(sparkMaster)
.setAppName(sparkAppName)
val sc = SparkContext.getOrCreate(conf)
def createStreamingContext(): StreamingContext = {
@transient val newSsc = new StreamingContext(sc, Seconds(2))
logInfo(s"Creating new StreamingContext $newSsc")
newSsc
}
StreamingContext.getActiveOrCreate(createStreamingContext)
}
} | benburford/core | core/src/main/scala/com/scaledaction/core/spark/SparkUtils.scala | Scala | apache-2.0 | 1,867 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600e.v2
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.ct600e.v2.retriever.CT600EBoxRetriever
case class E22(value: Option[Int]) extends CtBoxIdentifier("Shares in, and loans to, controlled companies") with CtOptionalInteger with Input with ValidatableBox[CT600EBoxRetriever] {
override def validate(boxRetriever: CT600EBoxRetriever): Set[CtValidation] = validateZeroOrPositiveInteger(this)
}
| liquidarmour/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600e/v2/E22.scala | Scala | apache-2.0 | 1,030 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.serializer
import java.io._
import scala.annotation.meta.param
import org.scalatest.BeforeAndAfterEach
import org.apache.spark.SparkFunSuite
class SerializationDebuggerSuite extends SparkFunSuite with BeforeAndAfterEach {
import SerializationDebugger.find
override def beforeEach(): Unit = {
super.beforeEach()
SerializationDebugger.enableDebugging = true
}
test("primitives, strings, and nulls") {
assert(find(1) === List.empty)
assert(find(1L) === List.empty)
assert(find(1.toShort) === List.empty)
assert(find(1.0) === List.empty)
assert(find("1") === List.empty)
assert(find(null) === List.empty)
}
test("primitive arrays") {
assert(find(Array[Int](1, 2)) === List.empty)
assert(find(Array[Long](1, 2)) === List.empty)
}
test("non-primitive arrays") {
assert(find(Array("aa", "bb")) === List.empty)
assert(find(Array(new SerializableClass1)) === List.empty)
}
test("serializable object") {
assert(find(new Foo(1, "b", 'c', 'd', null, null, null)) === List.empty)
}
test("nested arrays") {
val foo1 = new Foo(1, "b", 'c', 'd', null, null, null)
val foo2 = new Foo(1, "b", 'c', 'd', null, Array(foo1), null)
assert(find(new Foo(1, "b", 'c', 'd', null, Array(foo2), null)) === List.empty)
}
test("nested objects") {
val foo1 = new Foo(1, "b", 'c', 'd', null, null, null)
val foo2 = new Foo(1, "b", 'c', 'd', null, null, foo1)
assert(find(new Foo(1, "b", 'c', 'd', null, null, foo2)) === List.empty)
}
test("cycles (should not loop forever)") {
val foo1 = new Foo(1, "b", 'c', 'd', null, null, null)
foo1.g = foo1
assert(find(new Foo(1, "b", 'c', 'd', null, null, foo1)) === List.empty)
}
test("root object not serializable") {
val s = find(new NotSerializable)
assert(s.size === 1)
assert(s.head.contains("NotSerializable"))
}
test("array containing not serializable element") {
val s = find(new SerializableArray(Array(new NotSerializable)))
assert(s.size === 5)
assert(s(0).contains("NotSerializable"))
assert(s(1).contains("element of array"))
assert(s(2).contains("array"))
assert(s(3).contains("arrayField"))
assert(s(4).contains("SerializableArray"))
}
test("object containing not serializable field") {
val s = find(new SerializableClass2(new NotSerializable))
assert(s.size === 3)
assert(s(0).contains("NotSerializable"))
assert(s(1).contains("objectField"))
assert(s(2).contains("SerializableClass2"))
}
test("externalizable class writing out not serializable object") {
val s = find(new ExternalizableClass(new SerializableClass2(new NotSerializable)))
assert(s.size === 5)
assert(s(0).contains("NotSerializable"))
assert(s(1).contains("objectField"))
assert(s(2).contains("SerializableClass2"))
assert(s(3).contains("writeExternal"))
assert(s(4).contains("ExternalizableClass"))
}
test("externalizable class writing out serializable objects") {
assert(find(new ExternalizableClass(new SerializableClass1)).isEmpty)
}
test("object containing writeReplace() which returns not serializable object") {
val s = find(new SerializableClassWithWriteReplace(new NotSerializable))
assert(s.size === 3)
assert(s(0).contains("NotSerializable"))
assert(s(1).contains("writeReplace"))
assert(s(2).contains("SerializableClassWithWriteReplace"))
}
test("object containing writeReplace() which returns serializable object") {
assert(find(new SerializableClassWithWriteReplace(new SerializableClass1)).isEmpty)
}
test("no infinite loop with writeReplace() which returns class of its own type") {
assert(find(new SerializableClassWithRecursiveWriteReplace).isEmpty)
}
test("object containing writeObject() and not serializable field") {
val s = find(new SerializableClassWithWriteObject(new NotSerializable))
assert(s.size === 3)
assert(s(0).contains("NotSerializable"))
assert(s(1).contains("writeObject data"))
assert(s(2).contains("SerializableClassWithWriteObject"))
}
test("object containing writeObject() and serializable field") {
assert(find(new SerializableClassWithWriteObject(new SerializableClass1)).isEmpty)
}
test("object of serializable subclass with more fields than superclass (SPARK-7180)") {
// This should not throw ArrayOutOfBoundsException
find(new SerializableSubclass(new SerializableClass1))
}
test("crazy nested objects") {
def findAndAssert(shouldSerialize: Boolean, obj: Any): Unit = {
val s = find(obj)
if (shouldSerialize) {
assert(s.isEmpty)
} else {
assert(s.nonEmpty)
assert(s.head.contains("NotSerializable"))
}
}
findAndAssert(false,
new SerializableClassWithWriteReplace(new ExternalizableClass(new SerializableSubclass(
new SerializableArray(
Array(new SerializableClass1, new SerializableClass2(new NotSerializable))
)
)))
)
findAndAssert(true,
new SerializableClassWithWriteReplace(new ExternalizableClass(new SerializableSubclass(
new SerializableArray(
Array(new SerializableClass1, new SerializableClass2(new SerializableClass1))
)
)))
)
}
test("improveException") {
val e = SerializationDebugger.improveException(
new SerializableClass2(new NotSerializable), new NotSerializableException("someClass"))
assert(e.getMessage.contains("someClass")) // original exception message should be present
assert(e.getMessage.contains("SerializableClass2")) // found debug trace should be present
}
test("improveException with error in debugger") {
// Object that throws exception in the SerializationDebugger
val o = new SerializableClass1 {
private def writeReplace(): Object = {
throw new Exception()
}
}
withClue("requirement: SerializationDebugger should fail trying debug this object") {
intercept[Exception] {
SerializationDebugger.find(o)
}
}
val originalException = new NotSerializableException("someClass")
// verify that original exception is returned on failure
assert(SerializationDebugger.improveException(o, originalException).eq(originalException))
}
}
class SerializableClass1 extends Serializable
class SerializableClass2(val objectField: Object) extends Serializable
class SerializableArray(val arrayField: Array[Object]) extends Serializable
class SerializableSubclass(val objectField: Object) extends SerializableClass1
class SerializableClassWithWriteObject(val objectField: Object) extends Serializable {
val serializableObjectField = new SerializableClass1
@throws(classOf[IOException])
private def writeObject(oos: ObjectOutputStream): Unit = {
oos.defaultWriteObject()
}
}
class SerializableClassWithWriteReplace(@(transient @param) replacementFieldObject: Object)
extends Serializable {
private def writeReplace(): Object = {
replacementFieldObject
}
}
class SerializableClassWithRecursiveWriteReplace extends Serializable {
private def writeReplace(): Object = {
new SerializableClassWithRecursiveWriteReplace
}
}
class ExternalizableClass(objectField: Object) extends java.io.Externalizable {
val serializableObjectField = new SerializableClass1
override def writeExternal(out: ObjectOutput): Unit = {
out.writeInt(1)
out.writeObject(serializableObjectField)
out.writeObject(objectField)
}
override def readExternal(in: ObjectInput): Unit = {}
}
class Foo(
a: Int,
b: String,
c: Char,
d: Byte,
e: Array[Int],
f: Array[Object],
var g: Foo) extends Serializable
class NotSerializable
| akopich/spark | core/src/test/scala/org/apache/spark/serializer/SerializationDebuggerSuite.scala | Scala | apache-2.0 | 8,565 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600j.v3
import org.joda.time.LocalDate
import org.mockito.Mockito._
import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.box.CtValidation
import uk.gov.hmrc.ct.ct600.v3.retriever.CT600BoxRetriever
class J10Spec extends WordSpec with MockitoSugar with Matchers {
"J10 validate" should {
"not return errors when B140 is false" in {
val mockBoxRetriever = mock[TaxAvoidanceBoxRetrieverForTest]
when(mockBoxRetriever.b140()).thenReturn(B140(Some(false)))
when(mockBoxRetriever.j5()).thenReturn(J5(None))
when(mockBoxRetriever.j5A()).thenReturn(J5A(None))
when(mockBoxRetriever.j10A()).thenReturn(J10A(None))
J10(None).validate(mockBoxRetriever) shouldBe Set()
}
"not return errors when B140 is true and J5 and J5A are present and J10A is valid" in {
val mockBoxRetriever = mock[TaxAvoidanceBoxRetrieverForTest]
when(mockBoxRetriever.b140()).thenReturn(B140(Some(true)))
when(mockBoxRetriever.j5()).thenReturn(J5(Some("12345678")))
when(mockBoxRetriever.j5A()).thenReturn(J5A(Some(LocalDate.parse("2013-02-01"))))
when(mockBoxRetriever.j10A()).thenReturn(J10A(None))
J10(Some("12345678")).validate(mockBoxRetriever) shouldBe Set()
}
"return required error when B140 is true and J10A is blank" in {
val mockBoxRetriever = mock[TaxAvoidanceBoxRetrieverForTest]
when(mockBoxRetriever.b140()).thenReturn(B140(Some(true)))
when(mockBoxRetriever.j5()).thenReturn(J5(Some("12345678")))
when(mockBoxRetriever.j5A()).thenReturn(J5A(Some(LocalDate.parse("2013-02-01"))))
when(mockBoxRetriever.j10A()).thenReturn(J10A(Some(LocalDate.parse("2013-02-01"))))
J10(None).validate(mockBoxRetriever) shouldBe Set(CtValidation(Some("J10"), "error.J10.required", None))
}
"return regex error when B140 is true and J10A is invalid" in {
val mockBoxRetriever = mock[TaxAvoidanceBoxRetrieverForTest]
when(mockBoxRetriever.b140()).thenReturn(B140(Some(true)))
when(mockBoxRetriever.j5()).thenReturn(J5(Some("12345678")))
when(mockBoxRetriever.j5A()).thenReturn(J5A(Some(LocalDate.parse("2013-02-01"))))
when(mockBoxRetriever.j10A()).thenReturn(J10A(Some(LocalDate.parse("2013-02-01"))))
J10(Some("xyz")).validate(mockBoxRetriever) shouldBe Set(CtValidation(Some("J10"), "error.J10.regexFailure", None))
}
}
}
| hmrc/ct-calculations | src/test/scala/uk/gov/hmrc/ct/ct600j/v3/J10Spec.scala | Scala | apache-2.0 | 3,058 |
package controllers
import scala.annotation.implicitNotFound
import play.api.i18n.Lang
import play.api.mvc.RequestHeader
import org.squeryl.PrimitiveTypeMode.inTransaction
import securesocial.core.SecureSocial
class LangAwareController extends BaseController {
implicit override def lang(implicit request: RequestHeader): Lang = {
if (play.Play.isProd) {
Lang("pl")
} else {
val session = request.session
val sessionLanguage = session.get(LangAwareController.SESSION_LANG_KEY)
sessionLanguage.map { langString =>
Lang(langString)
}.getOrElse(super.lang)
}
}
}
object LangAwareController {
val SESSION_LANG_KEY = "lang"
val SESSION_LANG_CHANGED_AFTER_LOGIN = "lang-changed-after-login"
val DEFAULT_LANG = "en"
}
| lukaszbudnik/hackaton-portal | app/controllers/LangAwareController.scala | Scala | apache-2.0 | 756 |
package eventstore
package core
package cluster
import scala.collection.immutable.SortedSet
sealed trait NodeState extends Ordered[NodeState] {
def id: Int
def isAllowedToConnect: Boolean
def compare(that: NodeState) = this.id compare that.id
}
object NodeState {
val values: SortedSet[NodeState] = SortedSet(
Initializing,
ReadOnlyLeaderless,
DiscoverLeader,
Unknown,
PreReadOnlyReplica,
PreReplica,
CatchingUp,
Clone,
ReadOnlyReplica,
Follower,
PreLeader,
Leader,
Manager,
ShuttingDown,
Shutdown,
ResigningLeader
)
final val oldTerminology: Map[String, NodeState] = Map(
"Slave" -> NodeState.Follower,
"Master" -> NodeState.Leader,
"PreMaster" -> NodeState.PreLeader
)
private val map: Map[String, NodeState] =
values.map(x => x.toString -> x).toMap ++ oldTerminology
def apply(x: String): NodeState =
map.getOrElse(x, throw new IllegalArgumentException(s"No NodeState found for $x"))
// id value and order derived from:
// https://github.com/EventStore/EventStoreDB-Client-Dotnet-Legacy/blob/24cacecb6f003df5015258b05d8356e12c6b694f/src/EventStore.ClientAPI/Messages/ClusterMessages.cs#L74
@SerialVersionUID(1L) case object Initializing extends NodeState {
def id = 0
def isAllowedToConnect = true
}
@SerialVersionUID(1L) case object ReadOnlyLeaderless extends NodeState {
def id = 1
def isAllowedToConnect = true
}
@SerialVersionUID(1L) case object DiscoverLeader extends NodeState {
def id = 2
def isAllowedToConnect = true
}
@SerialVersionUID(1L) case object Unknown extends NodeState {
def id = 3
def isAllowedToConnect = true
}
@SerialVersionUID(1L) case object PreReadOnlyReplica extends NodeState {
def id = 4
def isAllowedToConnect = true
}
@SerialVersionUID(1L) case object PreReplica extends NodeState {
def id = 5
def isAllowedToConnect = true
}
@SerialVersionUID(1L) case object CatchingUp extends NodeState {
def id = 6
def isAllowedToConnect = true
}
@SerialVersionUID(1L) case object Clone extends NodeState {
def id = 7
def isAllowedToConnect = true
}
@SerialVersionUID(1L) case object ReadOnlyReplica extends NodeState {
def id = 8
def isAllowedToConnect = true
}
@SerialVersionUID(1L) case object Follower extends NodeState {
def id = 9
def isAllowedToConnect = true
}
@SerialVersionUID(1L) case object PreLeader extends NodeState {
def id = 10
def isAllowedToConnect = true
}
@SerialVersionUID(1L) case object Leader extends NodeState {
def id = 11
def isAllowedToConnect = true
}
@SerialVersionUID(1L) case object Manager extends NodeState {
def id = 12
def isAllowedToConnect = false
}
@SerialVersionUID(1L) case object ShuttingDown extends NodeState {
def id = 13
def isAllowedToConnect = false
}
@SerialVersionUID(1L) case object Shutdown extends NodeState {
def id = 14
def isAllowedToConnect = false
}
@SerialVersionUID(1L) case object ResigningLeader extends NodeState {
def id = 15
def isAllowedToConnect = false
}
} | EventStore/EventStore.JVM | core/src/main/scala/eventstore/core/cluster/NodeState.scala | Scala | bsd-3-clause | 3,176 |
package com.thoughtworks.datacommons.prepbuddy.imputations
import com.thoughtworks.datacommons.prepbuddy.rdds.TransformableRDD
import com.thoughtworks.datacommons.prepbuddy.utils.RowRecord
/**
* A contract for an imputation strategy
*/
trait ImputationStrategy extends Serializable {
def prepareSubstitute(rdd: TransformableRDD, missingDataColumn: Int)
def handleMissingData(record: RowRecord): String
}
| data-commons/prep-buddy | src/main/scala/com/thoughtworks/datacommons/prepbuddy/imputations/ImputationStrategy.scala | Scala | apache-2.0 | 423 |
package sp.widgetservice
import sp.domain._
import sp.domain.Logic._
import scala.util.Try
package API_Patient {
sealed trait PatientProperty
case class Priority(color: String, timestamp: String) extends PatientProperty
case class Attended(attended: Boolean, doctorId: String, timestamp: String) extends PatientProperty
case class Location(roomNr: String, timestamp: String) extends PatientProperty
case class Team(team: String, clinic: String, reasonForVisit: String, timestamp: String) extends PatientProperty
case class Examination(isOnExam: Boolean, timestamp: String) extends PatientProperty
case class LatestEvent(latestEvent: String, timeDiff: Long, needsAttention: Boolean, timestamp: String) extends PatientProperty
case class Plan(hasPlan: Boolean, timestamp: String) extends PatientProperty
case class ArrivalTime(timeDiff: String, timestamp: String) extends PatientProperty
case class Finished(finished: Boolean, finishedStillPresent: Boolean, timestamp: String) extends PatientProperty
case class Debugging(clinic: String, reasonForVisit: String, location: String) extends PatientProperty
case class Removed(timestamp: String) extends PatientProperty
case class Undefined() extends PatientProperty
case class Patient(
var careContactId: String,
var priority: Priority,
var attended: Attended,
var location: Location,
var team: Team,
var examination: Examination,
var latestEvent: LatestEvent,
var plan: Plan,
var arrivalTime: ArrivalTime,
var debugging: Debugging,
var finished: Finished
)
}
package API_PatientEvent {
import sp.widgetservice.{API_Patient => api}
sealed trait Event
sealed trait PatientEvent
case class NewPatient(careContactId: String, patientData: Map[String, String], events: List[Map[String, String]]) extends PatientEvent with Event
case class DiffPatient(careContactId: String, patientData: Map[String, String], newEvents: List[Map[String, String]], removedEvents: List[Map[String, String]]) extends PatientEvent with Event
case class RemovedPatient(careContactId: String, timestamp: String) extends PatientEvent with Event
sealed trait StateEvent
case class GetState() extends StateEvent with Event
case class State(patients: Map[String, api.Patient]) extends StateEvent with Event
case class Tick() extends StateEvent with Event
object attributes {
val service = "widgetservice"
}
}
import sp.widgetservice.{API_PatientEvent => api}
object WidgetComm {
def extractEvent(mess: Try[SPMessage]) = for {
m <- mess
h <- m.getHeaderAs[SPHeader]
b <- m.getBodyAs[api.Event]
} yield (h, b)
def makeMess(h: SPHeader, b: api.StateEvent) = SPMessage.makeJson[SPHeader, api.StateEvent](h, b)
}
| kristoferB/SP | spservices/widgetService/src/main/scala/widgetService/WidgetComm.scala | Scala | mit | 2,756 |
package org.openmole.web.db.tables
import slick.driver.H2Driver.simple._
/**
* Created with IntelliJ IDEA.
* User: luft
* Date: 6/14/13
* Time: 1:39 PM
*/
class MoleStats(tag: Tag) extends Table[(String, Int, Int, Int, Int, Int)](tag, "MoleStats") {
type Stats = Map[String, Int]
lazy val empty = Map("Ready" -> 0,
"Running" -> 0,
"Completed" -> 0,
"Failed" -> 0,
"Cancelled" -> 0)
def id = column[String]("ID", O.PrimaryKey)
def ready = column[Int]("READY")
def completed = column[Int]("COMPLETED")
def running = column[Int]("RUNNING")
def failed = column[Int]("FAILED")
def cancelled = column[Int]("CANCELLED")
def * = (id, ready, completed, running, failed, cancelled)
}
object MoleStats {
lazy val instance = TableQuery[MoleStats]
}
| ISCPIF/PSEExperiments | openmole-src/openmole/web/core/src/main/scala/org/openmole/web/db/tables/MoleStats.scala | Scala | agpl-3.0 | 783 |
package cromwell.database.migration.liquibase
import java.sql.Connection
import liquibase.database.jvm.{HsqlConnection, JdbcConnection}
import liquibase.database.{Database, DatabaseConnection, DatabaseFactory}
import liquibase.diff.compare.CompareControl
import liquibase.diff.{DiffGeneratorFactory, DiffResult}
import liquibase.resource.ClassLoaderResourceAccessor
import liquibase.{Contexts, LabelExpression, Liquibase}
import org.hsqldb.persist.HsqlDatabaseProperties
object LiquibaseUtils {
private val DefaultContexts = new Contexts()
private val DefaultLabelExpression = new LabelExpression()
/**
* Updates a liquibase schema to the latest version.
*
* @param settings The liquibase settings.
* @param jdbcConnection A jdbc connection to the database.
*/
def updateSchema(settings: LiquibaseSettings)(jdbcConnection: Connection): Unit = {
val liquibaseConnection = newConnection(jdbcConnection)
try {
val database = DatabaseFactory.getInstance.findCorrectDatabaseImplementation(liquibaseConnection)
database.setDatabaseChangeLogLockTableName(settings.databaseChangeLogLockTableName.toUpperCase)
database.setDatabaseChangeLogTableName(settings.databaseChangeLogTableName.toUpperCase)
val liquibase = new Liquibase(settings.changeLogResourcePath, new ClassLoaderResourceAccessor(), database)
updateSchema(liquibase)
} finally {
closeConnection(liquibaseConnection)
}
}
/**
* Wraps a jdbc connection in the database with the appropriate liquibase connection.
* As of 3.4.x, liquibase uses a custom connection for Hsql, Sybase, and Derby, although only Hsql is supported by
* cromwell.
*
* @param jdbcConnection The liquibase connection.
* @return
*/
def newConnection(jdbcConnection: Connection): DatabaseConnection = {
jdbcConnection.getMetaData.getDatabaseProductName match {
case HsqlDatabaseProperties.PRODUCT_NAME => new HsqlConnection(jdbcConnection)
case _ => new JdbcConnection(jdbcConnection)
}
}
/**
* Updates the liquibase database.
*
* @param liquibase The facade for interacting with liquibase.
*/
def updateSchema(liquibase: Liquibase): Unit = {
liquibase.update(DefaultContexts, DefaultLabelExpression)
}
/**
* Converts a liquibase connection to a liquibase database.
*
* @param liquibaseConnection The liquibase connection.
* @return The liquibase database.
*/
def toDatabase(liquibaseConnection: DatabaseConnection): Database = {
DatabaseFactory.getInstance().findCorrectDatabaseImplementation(liquibaseConnection)
}
/**
* Compares a reference to a comparison liquibase database.
*
* @param referenceDatabase The reference liquibase database.
* @param comparisonDatabase The comparison liquibase database.
* @return The complete diff results.
*/
def compare(referenceDatabase: Database, comparisonDatabase: Database): DiffResult = {
DiffGeneratorFactory.getInstance().compare(referenceDatabase, comparisonDatabase, CompareControl.STANDARD)
}
/**
* Compares a reference to a comparison JDBC connection.
*
* @param referenceJdbc The reference connection.
* @param comparisonJdbc The comparison connection.
* @param block Block of code to run before closing the connections.
* @return The complete diff results.
*/
def compare[T](referenceJdbc: Connection, comparisonJdbc: Connection)(block: DiffResult => T): T = {
withConnection(referenceJdbc) { referenceLiquibase =>
withConnection(comparisonJdbc) { comparisonLiquibase =>
val diffResult = compare(toDatabase(referenceLiquibase), toDatabase(comparisonLiquibase))
block(diffResult)
}
}
}
/**
* Provides a connection to a block of code, closing the connection afterwards.
*
* @param jdbcConnection The connection.
* @param block The block to run.
* @tparam T The return type of the block.
* @return The result of running the block.
*/
def withConnection[T](jdbcConnection: Connection)(block: DatabaseConnection => T): T = {
val liquibaseConnection = newConnection(jdbcConnection)
try {
block(liquibaseConnection)
} finally {
closeConnection(liquibaseConnection)
}
}
/**
* Attempts to close a liquibase connection.
*
* @param connection The liquibase connection.
*/
def closeConnection(connection: DatabaseConnection): Unit = {
try {
connection.close()
} finally {
/* ignore */
}
}
}
| ohsu-comp-bio/cromwell | database/migration/src/main/scala/cromwell/database/migration/liquibase/LiquibaseUtils.scala | Scala | bsd-3-clause | 4,577 |
package us.stivers.blue.codec
import scala.annotation.{implicitNotFound}
import scalax.util.{Try,Success,Failure}
import us.stivers.blue.http.{Content}
/**
* Encodes A into Content.
*/
@implicitNotFound("Cannot find ContentEncoder for ${A}")
trait ContentEncoder[A] extends Converter[A,Content]
/**
* Decodes A from Content.
*/
@implicitNotFound("Cannot find ContentDecoder for ${A}")
trait ContentDecoder[A] extends Converter[Content,A]
/**
* Encodes A into Content, decodes A from Content.
*/
@implicitNotFound("Cannot find ContentCodec for ${A}")
trait ContentCodec[A] extends Codec[A,Content]
/**
* Built-in ContentEncoders
*/
object ContentEncoder {
def apply[A](f: A => Content) = new ContentEncoder[A] {
def apply(a: A): Try[Content] = Try(f(a))
}
implicit val NodeSeqContentEncoder = ContentEncoder[scala.xml.NodeSeq] { a =>
Content("text/xml", a.toString)
}
}
/**
* Built-in ContentDecoders
*/
object ContentDecoder {
def apply[A](f: Content => A) = new ContentDecoder[A] {
def apply(c: Content): Try[A] = Try(f(c))
}
implicit val NodeSeqContentDecoder = ContentDecoder[scala.xml.NodeSeq] { c =>
scala.xml.XML.loadString(new String(c.buffer.array, scala.io.Codec.UTF8))
}
}
/**
* Built-in ContentCodecs
*/
object ContentCodec {
def apply[A](implicit encoder: ContentEncoder[A], decoder: ContentDecoder[A]) = new ContentCodec[A] {
val encode = encoder
val decode = decoder
}
import ContentEncoder._
import ContentDecoder._
implicit val NodeSeqContentCodec = ContentCodec[scala.xml.NodeSeq]
} | cstivers78/blue | blue-core/src/main/scala/us/stivers/blue/codec/ContentCodec.scala | Scala | apache-2.0 | 1,579 |
package breeze.linalg.support
import breeze.math.Complex
/*
Copyright 2012 David Hall
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* Marker for being able to transform the keys and values in a value collection
* to new values.
*
* @author dramage
* @author dlwh
*/
trait CanTransformValues[From, @specialized(Double, Int, Float) A] {
/**Transforms all key-value pairs from the given collection. */
def transform(from: From, fn: (A => A))
/**Transforms all active key-value pairs from the given collection. */
def transformActive(from: From, fn: (A => A))
}
object CanTransformValues {
type Op[From, A] = CanTransformValues[From, A]
//
// Arrays
//
class OpArray[@specialized(Double, Int, Float, Long) A]
extends Op[Array[A], A] {
/**Transforms all values from the given collection. */
def transform(from: Array[A], fn: (A) => A) {
import spire.syntax.cfor._
cforRange(0 until from.length) { i =>
from(i) = fn(from(i))
}
}
/**Transforms all active key-value pairs from the given collection. */
def transformActive(from: Array[A], fn: (A) => A) { transform(from, fn)}
}
implicit def opArray[@specialized A] =
new OpArray[A]
implicit object OpArrayII extends OpArray[Int]
implicit object OpArraySS extends OpArray[Short]
implicit object OpArrayLL extends OpArray[Long]
implicit object OpArrayFF extends OpArray[Float]
implicit object OpArrayDD extends OpArray[Double]
implicit object OpArrayCC extends OpArray[Complex]
} | wstcpyt/breeze | math/src/main/scala/breeze/linalg/support/CanTransformValues.scala | Scala | apache-2.0 | 2,022 |
package com.github.akovari.rdfp.api.ql.db
import com.github.akovari.rdfp.api.ql.UQLContext.IllegalUQLFieldException
import com.github.akovari.rdfp.api.ql.UQLContext.UnifiedResult.UnifiedResultFromResourceType
import com.github.akovari.rdfp.api.ql.db.SQLContext._
import com.typesafe.config.ConfigFactory
import scala.collection.JavaConverters._
import org.jooq._
import com.github.akovari.rdfp.api.ql.UQLContext
/**
* Created by akovari on 12.09.14.
*/
object SQLContext {
protected val sqlConfig = ConfigFactory.load("ql/sqlMappings.conf")
def config(implicit entityType: UQLContext.UnifiedResult.UnifiedEntityType[_]) = entityType match {
case resourceType: UQLContext.UnifiedResult.UnifiedResultFromResourceType => sqlConfig
}
def fieldToTableColumn(field: String)(implicit entityType: UQLContext.UnifiedResult.UnifiedEntityType[_]) = {
val tablesConfs = config.getConfig(entityType.value.toString)
val tableNames = config.getObject(entityType.value.toString).keySet().asScala
val tableFound = tableNames.find(table => tablesConfs.hasPath(s"$table.columns.$field"))
if (tableFound.isDefined) tablesConfs.getString(s"${tableFound.get}.alias") + "." + tablesConfs.getString(s"${tableFound.get}.columns.$field")
else throw IllegalUQLFieldException(s"""Invalid Field "$field"""")
}
def tableColumnToField[R <: Record, R2 <: Record, T](table: Table[R2], column: TableField[R, T], suffix: Option[String] = None)(implicit entityType: UQLContext.UnifiedResult.UnifiedEntityType[_]) = {
val cols = config.getConfig(s"${entityType.value.toString}.${table.getName + suffix.getOrElse("")}.columns")
cols.entrySet().asScala.find(_.getValue.unwrapped == column.getName).map(_.getKey)
}
def aliasForTable[R <: Record](entityType: UQLContext.UnifiedResult.UnifiedEntityType[_], table: Table[R], suffix: Option[String] = None) =
config(entityType).getString(s"${entityType.value.toString}.${table.getName + suffix.getOrElse("")}.alias")
}
object SQLTables {
import com.github.akovari.rdfp.data.schema.public_.Tables._
def fields(tables: Table[_]*) = tables.map(_.fields()).flatten.toList.asJava
def CASE_LINKS_T(suffix: Option[String] = None)(implicit resourceType: UQLContext.UnifiedResult.UnifiedResultFromResourceType) = CASE_LINKS.as(aliasForTable(resourceType, CASE_LINKS, suffix))
def CASE_LINKS_T(implicit resourceType: UQLContext.UnifiedResult.UnifiedResultFromResourceType) = CASE_LINKS.as(aliasForTable(resourceType, CASE_LINKS))
}
| akovari/reactive-data-federation-poc | src/main/scala/com/github/akovari/rdfp/api/ql/db/SQLContext.scala | Scala | apache-2.0 | 2,501 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.tools.data
import java.io.IOException
import java.util.regex.Pattern
import com.beust.jcommander.ParameterException
import org.geotools.data.DataStore
import org.locationtech.geomesa.tools._
import org.locationtech.geomesa.tools.utils.Prompt
trait RemoveSchemaCommand[DS <: DataStore] extends DataStoreCommand[DS] {
override val name = "remove-schema"
override def params: RemoveSchemaParams
override def execute(): Unit = {
(Option(params.pattern), Option(params.featureName)) match {
case (None, None) => throw new ParameterException("Please provide either featureName or pattern")
case (Some(_), Some(_)) => throw new ParameterException("Cannot specify both featureName and pattern")
case (Some(pattern), None) => withDataStore(remove(_, pattern))
case (None, Some(typeName)) => withDataStore(remove(_, Seq(typeName)))
}
}
protected def remove(ds: DS, pattern: Pattern): Unit = {
val typeNames = ds.getTypeNames.filter(pattern.matcher(_).matches)
if (typeNames.isEmpty) {
Command.user.warn("No schemas matched the provided pattern")
} else {
remove(ds, typeNames)
}
}
protected def remove(ds: DS, typeNames: Seq[String]): Unit = {
if (params.force || promptConfirm(typeNames)) {
typeNames.foreach { typeName =>
if (try { ds.getSchema(typeName) == null } catch { case _: IOException => true }) {
Command.user.warn(s"Schema '$typeName' doesn't exist")
} else {
Command.user.info(s"Removing '$typeName'")
ds.removeSchema(typeName)
if (try { ds.getSchema(typeName) != null } catch { case _: IOException => false }) {
Command.user.error(s"Error removing feature type '$typeName'")
}
}
}
} else {
Command.user.info(s"Cancelled schema removal")
}
}
protected def promptConfirm(featureNames: Seq[String]): Boolean =
Prompt.confirm(s"Remove schema(s) ${featureNames.mkString(", ")} (y/n)? ")
}
// @Parameters(commandDescription = "Remove a schema and associated features from a GeoMesa catalog")
trait RemoveSchemaParams extends OptionalTypeNameParam with OptionalForceParam with OptionalPatternParam
| aheyne/geomesa | geomesa-tools/src/main/scala/org/locationtech/geomesa/tools/data/RemoveSchemaCommand.scala | Scala | apache-2.0 | 2,702 |
import leon.lang._
import leon.annotation._
import leon.collection._
import leon._
object Trees {
abstract class Expr
case class Plus(lhs: Expr, rhs: Expr) extends Expr
case class Minus(lhs: Expr, rhs: Expr) extends Expr
case class LessThan(lhs: Expr, rhs: Expr) extends Expr
case class And(lhs: Expr, rhs: Expr) extends Expr
case class Or(lhs: Expr, rhs: Expr) extends Expr
case class Not(e : Expr) extends Expr
case class Eq(lhs: Expr, rhs: Expr) extends Expr
case class Ite(cond: Expr, thn: Expr, els: Expr) extends Expr
case class IntLiteral(v: Int) extends Expr
case class BoolLiteral(b : Boolean) extends Expr
}
object Types {
abstract class Type
case object IntType extends Type
case object BoolType extends Type
}
object TypeChecker {
import Trees._
import Types._
def typeOf(e :Expr) : Option[Type] = e match {
case Plus(l,r) => (typeOf(l), typeOf(r)) match {
case (Some(IntType), Some(IntType)) => Some(IntType)
case _ => None()
}
case Minus(l,r) => (typeOf(l), typeOf(r)) match {
case (Some(IntType), Some(IntType)) => Some(IntType)
case _ => None()
}
case LessThan(l,r) => ( typeOf(l), typeOf(r)) match {
case (Some(IntType), Some(IntType)) => Some(BoolType)
case _ => None()
}
case And(l,r) => ( typeOf(l), typeOf(r)) match {
case (Some(BoolType), Some(BoolType)) => Some(BoolType)
case _ => None()
}
case Or(l,r) => ( typeOf(l), typeOf(r)) match {
case (Some(BoolType), Some(BoolType)) => Some(BoolType)
case _ => None()
}
case Not(e) => typeOf(e) match {
case Some(BoolType) => Some(BoolType)
case _ => None()
}
case Eq(lhs, rhs) => (typeOf(lhs), typeOf(rhs)) match {
case (Some(t1), Some(t2)) if t1 == t2 => Some(BoolType)
case _ => None()
}
case Ite(c, th, el) => (typeOf(c), typeOf(th), typeOf(el)) match {
case (Some(BoolType), Some(t1), Some(t2)) if t1 == t2 => Some(t1)
case _ => None()
}
case IntLiteral(_) => Some(IntType)
case BoolLiteral(_) => Some(BoolType)
}
def typeChecks(e : Expr) = typeOf(e).isDefined
}
object Semantics {
import Trees._
import Types._
import TypeChecker._
def semI(t : Expr) : Int = {
require( typeOf(t) == ( Some(IntType) : Option[Type] ))
t match {
case Plus(lhs , rhs) => semI(lhs) + semI(rhs)
case Minus(lhs , rhs) => semI(lhs) - semI(rhs)
case Ite(cond, thn, els) =>
if (semB(cond)) semI(thn) else semI(els)
case IntLiteral(v) => v
}
}
def semB(t : Expr) : Boolean = {
require( (Some(BoolType): Option[Type]) == typeOf(t))
t match {
case And(lhs, rhs ) => semB(lhs) && semB(rhs)
case Or(lhs , rhs ) => semB(lhs) || semB(rhs)
case Not(e) => !semB(e)
case LessThan(lhs, rhs) => semI(lhs) < semI(rhs)
case Ite(cond, thn, els) =>
if (semB(cond)) semB(thn) else semB(els)
case Eq(lhs, rhs) => (typeOf(lhs), typeOf(rhs)) match {
case ( Some(IntType), Some(IntType) ) => semI(lhs) == semI(rhs)
case ( Some(BoolType), Some(BoolType) ) => semB(lhs) == semB(rhs)
}
case BoolLiteral(b) => b
}
}
def b2i(b : Boolean) = if (b) 1 else 0
@induct
def semUntyped( t : Expr) : Int = { t match {
case Plus (lhs, rhs) => semUntyped(lhs) + semUntyped(rhs)
case Minus(lhs, rhs) => semUntyped(lhs) - semUntyped(rhs)
case And (lhs, rhs) => if (semUntyped(lhs)!=0) semUntyped(rhs) else 0
case Or(lhs, rhs ) =>
if (semUntyped(lhs) == 0) semUntyped(rhs) else 1
case Not(e) =>
b2i(semUntyped(e) == 0)
case LessThan(lhs, rhs) =>
b2i(semUntyped(lhs) < semUntyped(rhs))
case Eq(lhs, rhs) =>
b2i(semUntyped(lhs) == semUntyped(rhs))
case Ite(cond, thn, els) =>
if (semUntyped(cond) == 0) semUntyped(els) else semUntyped(thn)
case IntLiteral(v) => v
case BoolLiteral(b) => b2i(b)
}} ensuring { res => typeOf(t) match {
case Some(IntType) => res == semI(t)
case Some(BoolType) => res == b2i(semB(t))
case None() => true
}}
}
object Desugar {
import Types._
import TypeChecker._
import Semantics.b2i
abstract class SimpleE
case class Plus(lhs : SimpleE, rhs : SimpleE) extends SimpleE
case class Neg(arg : SimpleE) extends SimpleE
case class Ite(cond : SimpleE, thn : SimpleE, els : SimpleE) extends SimpleE
case class Eq(lhs : SimpleE, rhs : SimpleE) extends SimpleE
case class LessThan(lhs : SimpleE, rhs : SimpleE) extends SimpleE
case class Literal(i : Int) extends SimpleE
@induct
def desugar(e : Trees.Expr) : SimpleE = { e match {
case Trees.Plus (lhs, rhs) => Plus(desugar(lhs), desugar(rhs))
case Trees.Minus(lhs, rhs) => Literal(0)//Plus(desugar(lhs), desugar(rhs)) // FIXME forgot Neg
case Trees.LessThan(lhs, rhs) => LessThan(desugar(lhs), desugar(rhs))
case Trees.And (lhs, rhs) => Ite(desugar(lhs), desugar(rhs), Literal(0))
case Trees.Or (lhs, rhs) => Ite(desugar(lhs), Literal(1), desugar(rhs))
case Trees.Not(e) => Ite(desugar(e), Literal(0), Literal(1))
case Trees.Eq(lhs, rhs) =>
Eq(desugar(lhs), desugar(rhs))
case Trees.Ite(cond, thn, els) => Ite(desugar(cond), desugar(thn), desugar(els))
case Trees.IntLiteral(v) => Literal(v)
case Trees.BoolLiteral(b) => Literal(b2i(b))
}} ensuring { res =>
sem(res) == Semantics.semUntyped(e) && ((e,res) passes {
case Trees.Minus(Trees.IntLiteral(42), Trees.IntLiteral(i)) =>
Plus(Literal(42), Neg(Literal(i)))
})
}
def sem(e : SimpleE) : Int = e match {
case Plus (lhs, rhs) => sem(lhs) + sem(rhs)
case Ite(cond, thn, els) => if (sem(cond) != 0) sem(thn) else sem(els)
case Neg(arg) => -sem(arg)
case Eq(lhs,rhs) => b2i(sem(lhs) == sem(rhs))
case LessThan(lhs, rhs) => b2i(sem(lhs) < sem(rhs))
case Literal(i) => i
}
}
| epfl-lara/leon | testcases/repair/Desugar/Desugar2.scala | Scala | gpl-3.0 | 5,907 |
package views.html
package practice
import play.api.libs.json.Json
import lila.api.Context
import lila.app.templating.Environment._
import lila.app.ui.ScalatagsTemplate._
import lila.common.String.html.safeJsonValue
object show {
def apply(
us: lila.practice.UserStudy,
data: lila.practice.JsonView.JsData
)(implicit ctx: Context) =
views.html.base.layout(
title = us.practiceStudy.name,
moreCss = cssTag("analyse.practice"),
moreJs = frag(
analyseTag,
analyseNvuiTag,
embedJsUnsafe(s"""lichess.practice=${safeJsonValue(
Json.obj(
"practice" -> data.practice,
"study" -> data.study,
"data" -> data.analysis,
"i18n" -> board.userAnalysisI18n(),
"explorer" -> views.html.board.bits.explorerConfig
)
)}""")
),
csp = defaultCsp.withWebAssembly.some,
chessground = false,
zoomable = true
) {
main(cls := "analyse")
}
}
| luanlv/lila | app/views/practice/show.scala | Scala | mit | 1,017 |
package io.finch
import cats.data.NonEmptyList
import com.twitter.concurrent.AsyncStream
import com.twitter.finagle.http.{Cookie, Method, Request}
import com.twitter.finagle.http.exp.Multipart.FileUpload
import com.twitter.finagle.netty3.ChannelBufferBuf
import com.twitter.io.Buf
import com.twitter.util.{Base64StringEncoder, Future, Try}
import io.catbird.util.Rerunnable
import io.finch.endpoint._
import io.finch.internal._
import java.util.UUID
import scala.reflect.ClassTag
import shapeless._
/**
* A collection of [[Endpoint]] combinators.
*/
trait Endpoints {
type Endpoint0 = Endpoint[HNil]
type Endpoint2[A, B] = Endpoint[A :: B :: HNil]
type Endpoint3[A, B, C] = Endpoint[A :: B :: C :: HNil]
/**
* An universal [[Endpoint]] that matches the given string.
*/
private[finch] class Matcher(s: String) extends Endpoint[HNil] {
final def apply(input: Input): Endpoint.Result[HNil] = input.headOption match {
case Some(`s`) => EndpointResult.Matched(input.drop(1), Rs.OutputHNil)
case _ => EndpointResult.Skipped
}
override final def toString: String = s
}
implicit def stringToMatcher(s: String): Endpoint0 = new Matcher(s)
implicit def intToMatcher(i: Int): Endpoint0 = new Matcher(i.toString)
implicit def booleanToMatcher(b: Boolean): Endpoint0 = new Matcher(b.toString)
/**
* An universal extractor that extracts some value of type `A` if it's possible to fetch the value
* from the string.
*/
private[finch] case class Extractor[A](name: String, f: String => Option[A]) extends Endpoint[A] {
final def apply(input: Input): Endpoint.Result[A] = input.headOption match {
case Some(ss) => f(ss) match {
case Some(a) => EndpointResult.Matched(input.drop(1), Rs.const(Output.payload(a)))
case _ => EndpointResult.Skipped
}
case _ => EndpointResult.Skipped
}
def apply(n: String): Endpoint[A] = copy[A](name = n)
override final def toString: String = s":$name"
}
private[finch] case class StringExtractor(name: String) extends Endpoint[String] {
final def apply(input: Input): Endpoint.Result[String] = input.headOption match {
case Some(s) => EndpointResult.Matched(input.drop(1), Rs.const(Output.payload(s)))
case _ => EndpointResult.Skipped
}
final def apply(n: String): Endpoint[String] = copy(name = n)
final override def toString: String = s":$name"
}
/**
* An extractor that extracts a value of type `Seq[A]` from the tail of the route.
*/
private[finch] case class TailExtractor[A](
name: String,
f: String => Option[A]) extends Endpoint[Seq[A]] {
final def apply(input: Input): Endpoint.Result[Seq[A]] =
EndpointResult.Matched(
input.copy(path = Nil),
Rs.const(Output.payload(input.path.flatMap(f.andThen(_.toSeq))))
)
final def apply(n: String): Endpoint[Seq[A]] = copy[A](name = n)
final override def toString: String = s":$name*"
}
private[this] def extractUUID(s: String): Option[UUID] =
if (s.length != 36) None
else try Some(UUID.fromString(s)) catch { case _: Exception => None }
/**
* A matching [[Endpoint]] that reads a string value from the current path segment.
*
* @note This is an experimental API and might be removed without any notice.
*/
val path: Endpoint[String] = new Endpoint[String] {
final def apply(input: Input): Endpoint.Result[String] = input.headOption match {
case Some(s) => EndpointResult.Matched(input.drop(1), Rs.const(Output.payload(s)))
case _ => EndpointResult.Skipped
}
final override def toString: String = ":path"
}
/**
* A matching [[Endpoint]] that reads a value of type `A` (using the implicit
* [[DecodePath]] instances defined for `A`) from the current path segment.
*/
def path[A](implicit c: DecodePath[A]): Endpoint[A] = new Endpoint[A] {
final def apply(input: Input): Endpoint.Result[A] = input.headOption match {
case Some(s) => c(s) match {
case Some(a) =>
EndpointResult.Matched(input.drop(1), Rs.const(Output.payload(a)))
case _ => EndpointResult.Skipped
}
case _ => EndpointResult.Skipped
}
final override def toString: String = ":path"
}
/**
* A matching [[Endpoint]] that reads an integer value from the current path segment.
*/
object int extends Extractor("int", _.tooInt)
/**
* A matching [[Endpoint]] that reads an integer tail from the current path segment.
*/
object ints extends TailExtractor("int", _.tooInt)
/**
* A matching [[Endpoint]] that reads a long value from the current path segment.
*/
object long extends Extractor("long", _.tooLong)
/**
* A matching [[Endpoint]] that reads a long tail from the current path segment.
*/
object longs extends TailExtractor("long", _.tooLong)
/**
* A matching [[Endpoint]] that reads a string value from the current path segment.
*/
object string extends StringExtractor("string")
/**
* A matching [[Endpoint]] that reads a string tail from the current path segment.
*/
object strings extends TailExtractor("string", s => Some(s))
/**
* A matching [[Endpoint]] that reads a boolean value from the current path segment.
*/
object boolean extends Extractor("boolean", _.tooBoolean)
/**
* A matching [[Endpoint]] that reads a boolean tail from the current path segment.
*/
object booleans extends TailExtractor("boolean", _.tooBoolean)
/**
* A matching [[Endpoint]] that reads an UUID value from the current path segment.
*/
object uuid extends Extractor("uuid", extractUUID)
/**
* A matching [[Endpoint]] that reads an UUID tail from the current path segment.
*/
object uuids extends TailExtractor("uuid", extractUUID)
/**
* An [[Endpoint]] that skips all path segments.
*/
object * extends Endpoint[HNil] {
final def apply(input: Input): Endpoint.Result[HNil] =
EndpointResult.Matched(input.copy(path = Nil), Rs.OutputHNil)
final override def toString: String = "*"
}
/**
* An identity [[Endpoint]].
*/
object / extends Endpoint[HNil] {
final def apply(input: Input): Endpoint.Result[HNil] =
EndpointResult.Matched(input, Rs.OutputHNil)
final override def toString: String = ""
}
private[this] def method[A](m: Method)(r: Endpoint[A]): Endpoint[A] = new Endpoint[A] {
final def apply(input: Input): Endpoint.Result[A] =
if (input.request.method == m) r(input)
else EndpointResult.Skipped
final override def toString: String = s"${m.toString().toUpperCase} /${r.toString}"
}
/**
* A combinator that wraps the given [[Endpoint]] with additional check of the HTTP method. The
* resulting [[Endpoint]] succeeds on the request only if its method is `GET` and the underlying
* endpoint succeeds on it.
*/
def get[A]: Endpoint[A] => Endpoint[A] = method(Method.Get)
/**
* A combinator that wraps the given [[Endpoint]] with additional check of the HTTP method. The
* resulting [[Endpoint]] succeeds on the request only if its method is `POST` and the underlying
* endpoint succeeds on it.
*/
def post[A]: Endpoint[A] => Endpoint[A] = method(Method.Post)
/**
* A combinator that wraps the given [[Endpoint]] with additional check of the HTTP method. The
* resulting [[Endpoint]] succeeds on the request only if its method is `PATCH` and the underlying
* endpoint succeeds on it.
*/
def patch[A]: Endpoint[A] => Endpoint[A] = method(Method.Patch)
/**
* A combinator that wraps the given [[Endpoint]] with additional check of the HTTP method. The
* resulting [[Endpoint]] succeeds on the request only if its method is `DELETE` and the
* underlying endpoint succeeds on it.
*/
def delete[A]: Endpoint[A] => Endpoint[A] = method(Method.Delete)
/**
* A combinator that wraps the given [[Endpoint]] with additional check of the HTTP method. The
* resulting [[Endpoint]] succeeds on the request only if its method is `HEAD` and the underlying
* endpoint succeeds on it.
*/
def head[A]: Endpoint[A] => Endpoint[A] = method(Method.Head)
/**
* A combinator that wraps the given [[Endpoint]] with additional check of the HTTP method. The
* resulting [[Endpoint]] succeeds on the request only if its method is `OPTIONS` and the
* underlying endpoint succeeds on it.
*/
def options[A]: Endpoint[A] => Endpoint[A] = method(Method.Options)
/**
* A combinator that wraps the given [[Endpoint]] with additional check of the HTTP method. The
* resulting [[Endpoint]] succeeds on the request only if its method is `PUT` and the underlying
* endpoint succeeds on it.
*/
def put[A]: Endpoint[A] => Endpoint[A] = method(Method.Put)
/**
* A combinator that wraps the given [[Endpoint]] with additional check of the HTTP method. The
* resulting [[Endpoint]] succeeds on the request only if its method is `CONNECT` and the
* underlying endpoint succeeds on it.
*/
def connect[A]: Endpoint[A] => Endpoint[A] = method(Method.Connect)
/**
* A combinator that wraps the given [[Endpoint]] with additional check of the HTTP method. The
* resulting [[Endpoint]] succeeds on the request only if its method is `TRACE` and the underlying
* router endpoint on it.
*/
def trace[A]: Endpoint[A] => Endpoint[A] = method(Method.Trace)
// Helper functions.
private[this] def requestParam(param: String)(req: Request): Option[String] =
req.params.get(param)
.orElse(req.multipart.flatMap(m => m.attributes.get(param).flatMap(_.headOption)))
private[this] def requestParams(params: String)(req: Request): Seq[String] =
req.params.getAll(params).toList
private[this] def requestHeader(header: String)(req: Request): Option[String] =
req.headerMap.get(header)
private[this] def requestCookie(cookie: String)(req: Request): Option[Cookie] =
req.cookies.get(cookie)
private[this] val someEmptyString = Some("")
private[this] def requestBodyString(req: Request): Option[String] =
req.contentLength match {
case Some(0) => someEmptyString
case Some(_) =>
val buffer = ChannelBufferBuf.Owned.extract(req.content)
val charset = req.charsetOrUtf8
// Note: We usually have an array underneath the ChannelBuffer (at least on Netty 3).
// This check is mostly about a safeguard.
if (buffer.hasArray) Some(new String(buffer.array(), 0, buffer.readableBytes(), charset))
else Some(buffer.toString(charset))
case None => None
}
private[this] val someEmptyBuf = Some(Buf.Empty)
private[this] final def requestBody(req: Request): Option[Buf] =
req.contentLength match {
case Some(0) => someEmptyBuf
case Some(_) => Some(req.content)
case None => None
}
private[this] val someEmptyByteArray = Some(Array.empty[Byte])
private[this] def requestBodyByteArray(req: Request): Option[Array[Byte]] =
req.contentLength match {
case Some(0) => someEmptyByteArray
case Some(_) => Some(Buf.ByteArray.Shared.extract(req.content))
case None => None
}
private[this] def requestUpload(upload: String)(req: Request): Option[FileUpload] =
Try(req.multipart).getOrElse(None).flatMap(m => m.files.get(upload).flatMap(fs => fs.headOption))
private[this] def option[A](item: items.RequestItem)(f: Request => A): Endpoint[A] =
Endpoint.embed(item)(input =>
EndpointResult.Matched(input, Rerunnable(Output.payload(f(input.request))))
)
private[this] def exists[A](item: items.RequestItem)(f: Request => Option[A]): Endpoint[A] =
Endpoint.embed(item) { input =>
f(input.request) match {
case Some(a) => EndpointResult.Matched(input, Rerunnable(Output.payload(a)))
case _ => EndpointResult.Skipped
}
}
private[this] def matches[A]
(item: items.RequestItem)
(p: Request => Boolean)
(f: Request => A): Endpoint[A] = Endpoint.embed(item)(input =>
if (p(input.request))
EndpointResult.Matched(input, Rerunnable(Output.payload(f(input.request))))
else
EndpointResult.Skipped
)
/**
* A root [[Endpoint]] that always matches and extracts the current request.
*/
val root: Endpoint[Request] = option(items.MultipleItems)(identity)
/**
* An evaluating [[Endpoint]] that reads an optional query-string param `name` from the request
* into an `Option`.
*/
def paramOption(name: String): Endpoint[Option[String]] =
option(items.ParamItem(name))(requestParam(name))
/**
* An evaluating [[Endpoint]] that reads a required query-string param `name` from the
* request or raises an [[Error.NotPresent]] exception when the param is missing; an
* [[Error.NotValid]] exception is the param is empty.
*/
def param(name: String): Endpoint[String] =
paramOption(name).failIfNone
/**
* A matching [[Endpoint]] that only matches the requests that contain a given query-string
* param `name`.
*/
def paramExists(name: String): Endpoint[String] =
exists(items.ParamItem(name))(requestParam(name))
/**
* An evaluating [[Endpoint]] that reads an optional (in a meaning that a resulting
* `Seq` may be empty) multi-value query-string param `name` from the request into a `Seq`.
*/
def params(name: String): Endpoint[Seq[String]] =
option(items.ParamItem(name))(i => requestParams(name)(i))
/**
* An evaluating [[Endpoint]] that reads a required multi-value query-string param `name`
* from the request into a `NonEmptyList` or raises a [[Error.NotPresent]] exception
* when the params are missing or empty.
*/
def paramsNel(name: String): Endpoint[NonEmptyList[String]] =
option(items.ParamItem(name))(requestParams(name)).mapAsync { values =>
values.filter(_.nonEmpty).toList match {
case Nil => Future.exception(Error.NotPresent(items.ParamItem(name)))
case seq => Future.value(NonEmptyList(seq.head, seq.tail))
}
}
/**
* An evaluating [[Endpoint]] that reads a required HTTP header `name` from the request or raises
* an [[Error.NotPresent]] exception when the header is missing.
*/
def header(name: String): Endpoint[String] =
option(items.HeaderItem(name))(requestHeader(name)).failIfNone
/**
* An evaluating [[Endpoint]] that reads an optional HTTP header `name` from the request into an
* `Option`.
*/
def headerOption(name: String): Endpoint[Option[String]] =
option(items.HeaderItem(name))(requestHeader(name))
/**
* A matching [[Endpoint]] that only matches the requests that contain a given header `name`.
*/
def headerExists(name: String): Endpoint[String] =
exists(items.HeaderItem(name))(requestHeader(name))
/**
* An evaluating [[Endpoint]] that reads a binary request body, interpreted as a `Array[Byte]`,
* into an `Option`. The returned [[Endpoint]] only matches non-chunked (non-streamed) requests.
*/
val binaryBodyOption: Endpoint[Option[Array[Byte]]] =
matches(items.BodyItem)(!_.isChunked)(requestBodyByteArray)
/**
* An evaluating [[Endpoint]] that reads a required binary request body, interpreted as an
* `Array[Byte]`, or throws a [[Error.NotPresent]] exception. The returned [[Endpoint]] only
* matches non-chunked (non-streamed) requests.
*/
val binaryBody: Endpoint[Array[Byte]] = binaryBodyOption.failIfNone
/**
* An evaluating [[Endpoint]] that reads an optional request body, interpreted as a `String`, into
* an `Option`. The returned [[Endpoint]] only matches non-chunked (non-streamed) requests.
*/
val stringBodyOption: Endpoint[Option[String]] =
matches(items.BodyItem)(!_.isChunked)(requestBodyString)
/**
* An evaluating [[Endpoint]] that reads the required request body, interpreted as a `String`, or
* throws an [[Error.NotPresent]] exception. The returned [[Endpoint]] only matches non-chunked
* (non-streamed) requests.
*/
val stringBody: Endpoint[String] = stringBodyOption.failIfNone
/**
* An [[Endpoint]] that reads an optional request body represented as `CT` (`ContentType`) and
* interpreted as `A`, into an `Option`. The returned [[Endpoint]] only matches non-chunked
* (non-streamed) requests.
*/
def bodyOption[A, CT <: String](implicit
d: Decode.Aux[A, CT], ct: ClassTag[A]): Endpoint[Option[A]] = new OptionalBody[A, CT](d, ct)
/**
* An [[Endpoint]] that reads the required request body represented as `CT` (`ContentType`) and
* interpreted as `A`, or throws an [[Error.NotPresent]] exception. The returned [[Endpoint]]
* only matches non-chunked (non-streamed) requests.
*/
def body[A, CT <: String](implicit
d: Decode.Aux[A, CT], ct: ClassTag[A]): Endpoint[A] = new RequiredBody[A, CT](d, ct)
/**
* Alias for `body[A, Application.Json]`.
*/
def jsonBody[A: Decode.Json : ClassTag]: Endpoint[A] = body[A, Application.Json]
/**
* Alias for `bodyOption[A, Application.Json]`.
*/
def jsonBodyOption[A: Decode.Json : ClassTag]: Endpoint[Option[A]] =
bodyOption[A, Application.Json]
/**
* Alias for `body[A, Text.Plain]`
*/
def textBody[A: Decode.Text : ClassTag]: Endpoint[A] = body[A, Text.Plain]
/**
* Alias for `bodyOption[A, Text.Plain]`
*/
def textBodyOption[A: Decode.Text : ClassTag]: Endpoint[Option[A]] = bodyOption[A, Text.Plain]
/**
* An evaluating [[Endpoint]] that reads a required chunked streaming binary body, interpreted as
* an `AsyncStream[Buf]`. The returned [[Endpoint]] only matches chunked (streamed) requests.
*/
val asyncBody: Endpoint[AsyncStream[Buf]] =
matches(items.BodyItem)(_.isChunked)(req => AsyncStream.fromReader(req.reader))
/**
* An evaluating [[Endpoint]] that reads an optional HTTP cookie from the request into an
* `Option`.
*/
def cookieOption(name: String): Endpoint[Option[Cookie]] =
option(items.CookieItem(name))(requestCookie(name))
/**
* An evaluating [[Endpoint]] that reads a required cookie from the request or raises an
* [[Error.NotPresent]] exception when the cookie is missing.
*/
def cookie(name: String): Endpoint[Cookie] = cookieOption(name).failIfNone
/**
* An evaluating[[Endpoint]] that reads an optional file upload from a multipart/form-data request
* into an `Option`.
*/
def fileUploadOption(name: String): Endpoint[Option[FileUpload]] =
matches(items.ParamItem(name))(!_.isChunked)(requestUpload(name))
/**
* An evaluating [[Endpoint]] that reads a required file upload from a multipart/form-data
* request.
*/
def fileUpload(name: String): Endpoint[FileUpload] = fileUploadOption(name).failIfNone
/**
* An [[Exception]] representing a failed authorization with [[BasicAuth]].
*/
object BasicAuthFailed extends Exception {
override def getMessage: String = "Wrong credentials"
}
/**
* Maintains Basic HTTP Auth for an arbitrary [[Endpoint]].
*/
case class BasicAuth(realm: String)(authenticate: (String, String) => Future[Boolean]) {
def apply[A](e: Endpoint[A]): Endpoint[A] = new Endpoint[A] {
private[this] val unauthorized = new Rerunnable[Output[A]] {
override def run = Future.value(Unauthorized(BasicAuthFailed)
.withHeader("WWW-Authenticate" -> s"""Basic realm="$realm""""))
}
final def apply(input: Input): Endpoint.Result[A] = e(input) match {
case EndpointResult.Matched(rem, out) =>
EndpointResult.Matched(rem, authenticated(rem).flatMap(if (_) out else unauthorized))
case _ => EndpointResult.Skipped
}
private[this] def authenticated(input: Input): Rerunnable[Boolean] =
Rerunnable.fromFuture(
input.request.authorization
.flatMap(parse)
.map(authenticate.tupled)
.getOrElse(Future.False))
private[this] def parse(authorization: String): Option[(String, String)] = for {
(scheme, params) <- parseAuthorization(authorization)
if scheme == "basic"
(username, password) <- parseCredentials(params)
} yield (username, password)
private[this] def parseAuthorization(authorization: String): Option[(String, String)] =
authorization.split(" ", 2) match {
case Array(scheme, params) => Some((scheme.toLowerCase, params))
case _ => None
}
private[this] def parseCredentials(params: String): Option[(String, String)] =
new String(Base64StringEncoder.decode(params)).split(":", 2) match {
case Array(username, password) => Some((username, password))
case _ => None
}
override def toString: String = s"""BasicAuth(realm="$realm", $e)"""
}
}
}
| yanana/finch | core/src/main/scala/io/finch/Endpoints.scala | Scala | apache-2.0 | 20,782 |
package one.lockstep.vault.sdk.impl
import java.util
import one.lockstep.vault._
import one.lockstep.util._
import one.lockstep.util.japi._
class UnlockedVaultAdapter(impl: one.lockstep.vault.UnlockedVault) extends sdk.UnlockedVault {
override def getSecret(id: String): Array[Byte] = {
require(impl.index.secretIds.contains(id), s"no secret with id '$id' found in vault")
impl.unlockedIndex.secret(id)
}
override def getEntryAttr(entryId: String, attrId: String): Array[Byte] = {
impl.index.entryAttrs(entryId).getOrElse(attrId, null)
}
override def getVaultAttr(attrId: String): Array[Byte] = {
impl.index.vaultAttrs(attrId)
}
override def getVaultAttrs: JMap[String, Array[Byte]] = {
impl.index.vaultAttrs.mapValues(_.toArray).asJava
}
override def getSignature(entryId: String): sdk.Signature = {
val signer = impl.unlockedIndex.signature(entryId)
new sdk.Signature {
override def sign(value: Array[Byte]): Array[Byte] = ??? //signer.sign(value)
}
}
override def getEntryAttrs(entryId: String): util.Map[String, Array[Byte]] =
impl.entryAttrs(entryId).mapValues(_.toArray).asJava
override def changePasscode(newPasscode: Array[Byte]): Unit = impl.changePasscode(newPasscode)
override def put(entryId: String, spec: sdk.Spec): Unit = impl.put(entryId, spec)
override def putEntryAttr(entryId: String, attrId: String, value: Array[Byte]): Unit = impl.putEntryAttr(entryId, attrId, value)
override def removeEntryAttr(entryId: String, attrId: String): Unit = impl.removeEntryAttr(entryId, attrId)
override def putVaultAttr(attrId: String, value: Array[Byte]): Unit = impl.putVaultAttr(attrId, value)
override def removeVaultAttr(attrId: String): Unit = impl.removeVaultAttr(attrId)
override def remove(id: String): Unit = impl.remove(id)
override def commit(): Unit = impl.commit()
override def dispose(): Unit = impl.dispose()
}
| lockstep-one/vault | vault-client/src/main/scala/one/lockstep/vault/sdk/impl/UnlockedVaultAdapter.scala | Scala | agpl-3.0 | 1,926 |
import sbt.Keys._
import sbt._
//import Dependencies._
import spray.revolver.RevolverPlugin._
object BuildSettings {
val VERSION = "3.1.0"
val defaultBuildSettings = Defaults.coreDefaultSettings ++ Format.settings ++ /* Revolver.settings ++ */ Seq(
version := VERSION,
organization := "com.github.dmrolfs",
licenses += ("MIT", url("http://opensource.org/licenses/MIT")),
crossScalaVersions := Seq( "2.12.2" ),
scalaVersion := crossScalaVersions{ (vs: Seq[String]) => vs.head }.value,
// updateOptions := updateOptions.value.withCachedResolution(true),
scalacOptions ++= Seq(
// "-encoding",
// "utf8",
"-target:jvm-1.8",
"-feature",
"-unchecked",
"-deprecation",
"-language:implicitConversions",
// "-Ylog-classpath",
// "-Xlog-implicits",
// "-Ymacro-debug-verbose",
// "-Ywarn-adapted-args",
// "-Xfatal-warnings",
"-Xlog-reflective-calls"
),
javacOptions ++= Seq(
"-source", "1.8",
"-target", "1.8"
),
javaOptions ++= Seq(
"-Dconfig.trace=loads"
),
homepage := Some( url("http://github.com/dmrolfs/spotlight") ),
// licenses := Seq("Apache 2.0" -> url("http://www.apache.org/licenses/LICENSE-2.0.html")),
conflictManager := ConflictManager.latestRevision,
dependencyOverrides := Dependencies.defaultDependencyOverrides,
resolvers += "Typesafe Repository" at "http://repo.typesafe.com/typesafe/releases/",
resolvers += "krasserm at bintray" at "http://dl.bintray.com/krasserm/maven",
resolvers += "omen-bintray" at "http://dl.bintray.com/omen/maven",
resolvers += "dnvriend at bintray" at "http://dl.bintray.com/dnvriend/maven",
resolvers += "IESL Releases" at "http://dev-iesl.cs.umass.edu/nexus/content/groups/public",
resolvers += "spray repo" at "http://repo.spray.io",
resolvers += "Typesafe releases" at "http://repo.typesafe.com/typesafe/releases",
resolvers += "eaio releases" at "http://eaio.com/maven2",
resolvers += "Sonatype OSS Releases" at "http://oss.sonatype.org/content/repositories/releases/",
resolvers += "Scalaz Bintray Repo" at "http://dl.bintray.com/scalaz/releases",
resolvers += "velvia maven" at "http://dl.bintray.com/velvia/maven",
// resolvers += "Numerical Method's Repository" at "http://repo.numericalmethod.com/maven/", // don't want to use due to $$$
resolvers += Resolver.jcenterRepo,
resolvers += Resolver.sonatypeRepo( "snapshots" ),
resolvers += Classpaths.sbtPluginReleases,
resolvers += "OSS JFrog Artifactory" at "http://oss.jfrog.org/artifactory/oss-snapshot-local",
// SLF4J initializes itself upon the first logging call. Because sbt
// runs tests in parallel it is likely that a second thread will
// invoke a second logging call before SLF4J has completed
// initialization from the first thread's logging call, leading to
// these messages:
// SLF4J: The following loggers will not work because they were created
// SLF4J: during the default configuration phase of the underlying logging system.
// SLF4J: See also http://www.slf4j.org/codes.html#substituteLogger
// SLF4J: com.imageworks.common.concurrent.SingleThreadInfiniteLoopRunner
//
// As a workaround, load SLF4J's root logger before starting the unit
// tests [1].
//
// [1] http://stackoverflow.com/a/12095245
testOptions in Test += Tests.Setup( classLoader =>
classLoader
.loadClass( "org.slf4j.LoggerFactory" )
.getMethod( "getLogger", classLoader.loadClass("java.lang.String") )
.invoke( null, "ROOT" )
),
parallelExecution in Test := false,
testOptions in Test += Tests.Argument( TestFrameworks.ScalaTest, "-oDFT" ),
triggeredMessage in ThisBuild := Watched.clearWhenTriggered,
cancelable in Global := true
)
def doNotPublishSettings = Seq( publish := {} )
def publishSettings = {
// if ( (version in ThisBuild).toString.endsWith("-SNAPSHOT") ) {
if ( VERSION.toString.endsWith("-SNAPSHOT") ) {
Seq(
publishTo := Some("Artifactory Realm" at "http://oss.jfrog.org/artifactory/oss-snapshot-local"),
publishMavenStyle := true,
// Only setting the credentials file if it exists (#52)
credentials := List(Path.userHome / ".bintray" / ".artifactory").filter(_.exists).map(Credentials(_))
)
} else {
Seq(
pomExtra := {
<scm>
<url>https://github.com</url>
<connection>https://github.com/dmrolfs/spotlight.git</connection>
</scm>
<developers>
<developer>
<id>dmrolfs</id>
<name>Damon Rolfs</name>
<url>http://dmrolfs.github.io/</url>
</developer>
</developers>
},
publishMavenStyle := true,
resolvers += Resolver.url("omen bintray resolver", url("http://dl.bintray.com/omen/maven"))(Resolver.ivyStylePatterns),
licenses := ("MIT", url("http://opensource.org/licenses/MIT")) :: Nil // this is required! otherwise Bintray will reject the code
)
}
}
}
| dmrolfs/lineup | project/BuildSettings.scala | Scala | mit | 5,156 |
package com.seanshubin.uptodate.logic
import scala.annotation.tailrec
case class Version(originalString: String, words: List[String]) extends Ordered[Version] {
def this(version: String) = this(version, Version.breakIntoWords(version))
def isRelease: Boolean = words.forall(Version.isNumberOrReleaseWord)
def isVariable: Boolean = originalString.contains("$")
def shouldUpgradeTo(that: Version): Boolean = {
if (this.isVariable || that.isVariable) {
false
} else {
if (this.isRelease) {
if (that.isRelease) {
if (this < that) {
true
} else {
false
}
} else {
false
}
} else {
if (that.isRelease) {
if (this.dropReleaseCandidateParts > that) {
false
} else {
true
}
} else {
if (this < that) {
if (this.dropReleaseCandidateParts < that.dropReleaseCandidateParts) {
false
} else {
true
}
} else {
false
}
}
}
}
}
def shouldUpgradeTo(that: String): Boolean = shouldUpgradeTo(Version(that))
override def compare(that: Version): Int = {
val compareResult = Version.compareWordLists(this.words, that.words)
compareResult
}
def dropReleaseCandidateParts: Version = {
copy(words = words.takeWhile(Version.notReleaseCandidate))
}
def selectUpgrade(versions: Set[Version]): Option[Version] = {
def shouldUpgrade(that: Version) = shouldUpgradeTo(that)
val releases = versions.filter(shouldUpgrade).filter(_.isRelease)
val potentialUpgrades = if (releases.isEmpty) {
versions.filter(shouldUpgrade)
} else {
releases
}
potentialUpgrades.toSeq.sorted.reverse.headOption
}
}
object Version {
def apply(version: String): Version = new Version(version)
private val WholeNumberPattern = """[0-9]+"""
private val WordPattern = """[a-zA-Z]+"""
private val OrPattern = "|"
private val NumberOrWordPattern = capture(WholeNumberPattern) + OrPattern + capture(WordPattern)
private val NumberOrWordRegex = NumberOrWordPattern.r
private def capture(pattern: String) = "(" + pattern + ")"
private val releaseWords = Set("ga", "final", "patch", "java", "groovy", "r", "v")
private val releaseCandidateWords = Set("rc", "SNAPSHOT")
def notReleaseCandidate(word: String): Boolean = !releaseCandidateWords.contains(word)
@tailrec
def compareWordLists(leftVersionWords: List[String], rightVersionWords: List[String]): Int = {
(leftVersionWords, rightVersionWords) match {
case (Nil, Nil) => 0
case (Nil, _) => -1
case (_, Nil) => 1
case (left, right) =>
val headCompare = compareWord(left.head, right.head)
if (headCompare == 0) compareWordLists(left.tail, right.tail)
else headCompare
}
}
def breakIntoWords(version: String): List[String] = {
val words = Version.NumberOrWordRegex.findAllIn(version).toList
words
}
def compareWord(leftVersionWord: String, rightVersionWord: String): Int = {
val leftNumber = toNumber(leftVersionWord)
val rightNumber = toNumber(rightVersionWord)
val compareResult = leftNumber.compareTo(rightNumber)
compareResult
}
def toNumber(versionWord: String): Long = {
try {
versionWord.toLong
} catch {
case ex: NumberFormatException => -1
}
}
def isNumberOrReleaseWord(s: String) = {
val isReleaseWord = releaseWords.contains(s.toLowerCase)
val isNumber = s.matches(WholeNumberPattern)
isReleaseWord || isNumber
}
def bestAvailableVersionFrom(versions: Set[Version]): Version = {
val releaseVersions = versions.filter(_.isRelease)
val availableVersions = if (releaseVersions.isEmpty) {
versions
} else {
releaseVersions
}
availableVersions.toSeq.sorted.reverse.head
}
def selectUpgrade(currentVersionString: String, versionStrings: Seq[String]): Option[String] = {
val version = Version(currentVersionString)
val versions = versionStrings.toSet.map(Version.apply)
val upgrade = version.selectUpgrade(versions)
upgrade.map(_.originalString)
}
def stringDescending(left: String, right: String): Boolean = {
Version(left).compareTo(Version(right)) > 0
}
}
| SeanShubin/up-to-date | logic/src/main/scala/com/seanshubin/uptodate/logic/Version.scala | Scala | unlicense | 4,373 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.feature
import breeze.linalg.{DenseVector => BDV}
import org.apache.spark.annotation.{Experimental, Since}
import org.apache.spark.api.java.JavaRDD
import org.apache.spark.mllib.linalg.{DenseVector, SparseVector, Vector, Vectors}
import org.apache.spark.rdd.RDD
/**
* :: Experimental ::
* Inverse document frequency (IDF).
* The standard formulation is used: `idf = log((m + 1) / (d(t) + 1))`, where `m` is the total
* number of documents and `d(t)` is the number of documents that contain term `t`.
*
* This implementation supports filtering out terms which do not appear in a minimum number
* of documents (controlled by the variable `minDocFreq`). For terms that are not in
* at least `minDocFreq` documents, the IDF is found as 0, resulting in TF-IDFs of 0.
*
* @param minDocFreq minimum of documents in which a term
* should appear for filtering
*/
@Since("1.1.0")
@Experimental
class IDF @Since("1.2.0") (@Since("1.2.0") val minDocFreq: Int) {
@Since("1.1.0")
def this() = this(0)
// TODO: Allow different IDF formulations.
/**
* Computes the inverse document frequency.
* @param dataset an RDD of term frequency vectors
*/
@Since("1.1.0")
def fit(dataset: RDD[Vector]): IDFModel = {
val idf = dataset.treeAggregate(new IDF.DocumentFrequencyAggregator(
minDocFreq = minDocFreq))(
seqOp = (df, v) => df.add(v),
combOp = (df1, df2) => df1.merge(df2)
).idf()
new IDFModel(idf)
}
/**
* Computes the inverse document frequency.
* @param dataset a JavaRDD of term frequency vectors
*/
@Since("1.1.0")
def fit(dataset: JavaRDD[Vector]): IDFModel = {
fit(dataset.rdd)
}
}
private object IDF {
/** Document frequency aggregator. */
class DocumentFrequencyAggregator(val minDocFreq: Int) extends Serializable {
/** number of documents */
private var m = 0L
/** document frequency vector */
private var df: BDV[Long] = _
def this() = this(0)
/** Adds a new document. */
def add(doc: Vector): this.type = {
if (isEmpty) {
df = BDV.zeros(doc.size)
}
doc match {
case SparseVector(size, indices, values) =>
val nnz = indices.size
var k = 0
while (k < nnz) {
if (values(k) > 0) {
df(indices(k)) += 1L
}
k += 1
}
case DenseVector(values) =>
val n = values.size
var j = 0
while (j < n) {
if (values(j) > 0.0) {
df(j) += 1L
}
j += 1
}
case other =>
throw new UnsupportedOperationException(
s"Only sparse and dense vectors are supported but got ${other.getClass}.")
}
m += 1L
this
}
/** Merges another. */
def merge(other: DocumentFrequencyAggregator): this.type = {
if (!other.isEmpty) {
m += other.m
if (df == null) {
df = other.df.copy
} else {
df += other.df
}
}
this
}
private def isEmpty: Boolean = m == 0L
/** Returns the current IDF vector. */
def idf(): Vector = {
if (isEmpty) {
throw new IllegalStateException("Haven't seen any document yet.")
}
val n = df.length
val inv = new Array[Double](n)
var j = 0
while (j < n) {
/*
* If the term is not present in the minimum
* number of documents, set IDF to 0. This
* will cause multiplication in IDFModel to
* set TF-IDF to 0.
*
* Since arrays are initialized to 0 by default,
* we just omit changing those entries.
*/
if (df(j) >= minDocFreq) {
inv(j) = math.log((m + 1.0) / (df(j) + 1.0))
}
j += 1
}
Vectors.dense(inv)
}
}
}
/**
* :: Experimental ::
* Represents an IDF model that can transform term frequency vectors.
*/
@Experimental
@Since("1.1.0")
class IDFModel private[spark] (@Since("1.1.0") val idf: Vector) extends Serializable {
/**
* Transforms term frequency (TF) vectors to TF-IDF vectors.
*
* If `minDocFreq` was set for the IDF calculation,
* the terms which occur in fewer than `minDocFreq`
* documents will have an entry of 0.
*
* @param dataset an RDD of term frequency vectors
* @return an RDD of TF-IDF vectors
*/
@Since("1.1.0")
def transform(dataset: RDD[Vector]): RDD[Vector] = {
val bcIdf = dataset.context.broadcast(idf)
dataset.mapPartitions(iter => iter.map(v => IDFModel.transform(bcIdf.value, v)))
}
/**
* Transforms a term frequency (TF) vector to a TF-IDF vector
*
* @param v a term frequency vector
* @return a TF-IDF vector
*/
@Since("1.3.0")
def transform(v: Vector): Vector = IDFModel.transform(idf, v)
/**
* Transforms term frequency (TF) vectors to TF-IDF vectors (Java version).
* @param dataset a JavaRDD of term frequency vectors
* @return a JavaRDD of TF-IDF vectors
*/
@Since("1.1.0")
def transform(dataset: JavaRDD[Vector]): JavaRDD[Vector] = {
transform(dataset.rdd).toJavaRDD()
}
}
private object IDFModel {
/**
* Transforms a term frequency (TF) vector to a TF-IDF vector with a IDF vector
*
* @param idf an IDF vector
* @param v a term frequence vector
* @return a TF-IDF vector
*/
def transform(idf: Vector, v: Vector): Vector = {
val n = v.size
v match {
case SparseVector(size, indices, values) =>
val nnz = indices.size
val newValues = new Array[Double](nnz)
var k = 0
while (k < nnz) {
newValues(k) = values(k) * idf(indices(k))
k += 1
}
Vectors.sparse(n, indices, newValues)
case DenseVector(values) =>
val newValues = new Array[Double](n)
var j = 0
while (j < n) {
newValues(j) = values(j) * idf(j)
j += 1
}
Vectors.dense(newValues)
case other =>
throw new UnsupportedOperationException(
s"Only sparse and dense vectors are supported but got ${other.getClass}.")
}
}
}
| practice-vishnoi/dev-spark-1 | mllib/src/main/scala/org/apache/spark/mllib/feature/IDF.scala | Scala | apache-2.0 | 6,998 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.utils
import scala.annotation.StaticAnnotation
/* Some helpful annotations */
/**
* Indicates that the annotated class is meant to be threadsafe. For an abstract class it is a part of the interface that an implementation
* must respect
*/
class threadsafe extends StaticAnnotation
/**
* Indicates that the annotated class is not threadsafe
*/
class nonthreadsafe extends StaticAnnotation
/**
* Indicates that the annotated class is immutable
*/
class immutable extends StaticAnnotation
| wangcy6/storm_app | frame/kafka-0.11.0/kafka-0.11.0.1-src/core/src/main/scala/kafka/utils/Annotations.scala | Scala | apache-2.0 | 1,315 |
package com.nodeta.scalandra
/**
* Range class used in range based limiting
*
* @author Ville Lautanala
*/
case class Range[T](start : Option[T], finish : Option[T], order : Order, count : Int) {}
| nodeta/scalandra | src/main/scala/com/nodeta/scalandra/Range.scala | Scala | mit | 202 |
package org.geoscript.support
package object graph {
def isClique[V](vs: Set[V], connected: (V, V) => Boolean): Boolean =
vs.toSeq.combinations(2).forall { case Seq(a, b) => connected(a, b) }
def maximalCliques[V]
(vertices: Set[V], connected: (V, V) => Boolean)
: Set[Set[V]]
= {
// leveraging the fact that Map[A,B] <: (A => B) here to pre-compute
// neighborhoods
val neighbors: V => Set[V] = vertices
.map { v => (v, (vertices - v).filter(connected(_, v))) }
.toMap
def degree(v: V) = vertices.count(connected(_, v))
def recurse
(r: Set[V], p: Set[V], x: Set[V], accum: Set[Set[V]])
: Set[Set[V]]
= {
if (p.isEmpty && x.isEmpty)
accum + r
else {
val pivot = (p ++ x).head
val pruned = (p -- neighbors(pivot)).toSeq
split(p, pruned, x).foldLeft(accum) {
case (accum, (v, p1, x1)) =>
recurse(
r + v,
p1 & neighbors(v),
x1 & neighbors(v),
accum
)
}
}
}
def split
(vs: Set[V], pruned: Seq[V], x: Set[V])
: Seq[(V, Set[V], Set[V])]
= {
val splits =
((pruned scanLeft (vs.toSet, x)) {
case ((p, x), v) => (p - v, x + v)
})
(pruned zip splits).map { case (a, (b, c)) => (a, b, c) }
}
val sorted = vertices.toSeq.sortBy(degree)
split(vertices, sorted, Set.empty).foldLeft(Set.empty[Set[V]]) {
case (accum, (v, p, x)) =>
recurse(Set(v), p & neighbors(v), x & neighbors(v), accum)
}
}
// Combinations of vertices containing at most one member of each clique
def enumerateCombinations[V](cliques: Set[Set[V]]): Set[Set[V]] = {
def recurse
(results: Set[Set[V]], accum: Set[V], cliques: Seq[Set[V]])
: Set[Set[V]]
= {
if (cliques.isEmpty)
results + accum
else {
val clique = cliques.head
val tail = cliques.tail
(clique foldLeft recurse(results, accum, tail)) { (results, v) =>
recurse(results, accum + v, tail.filterNot(_ contains v))
}
}
}
// largest cliques first for moar speedz!!
val sorted = cliques.toSeq.sortBy(- _.size)
recurse(Set.empty, Set.empty, sorted)
}
}
| dwins/geoscript.scala | geocss/src/main/scala/org/geoscript/support/graph/package.scala | Scala | mit | 2,294 |
package de.choffmeister.secpwd
import java.io.File
import java.util.Properties
import de.choffmeister.secpwd.utils.SshConnectionInfo
import de.choffmeister.secpwd.utils.RichFile._
import de.choffmeister.secpwd.utils.SshConnectionInfo
import java.io.StringReader
import java.io.FileNotFoundException
case class Config(syncConnInfo: Option[SshConnectionInfo] = None, syncRemoteDir: Option[String] = None)
object Config {
def load(dir: File): Config = {
try {
val prop = new Properties()
val confFile = new File(dir, "config")
prop.load(new StringReader(confFile.text))
val host = Option(prop.getProperty("host"))
val userName = Option(prop.getProperty("username"))
val syncConnInfo = if (!host.isDefined || !userName.isDefined) None
else Some(SshConnectionInfo(
host = host.get,
userName = userName.get,
password = Option(prop.getProperty("password")),
keyFile = Option(prop.getProperty("keyfile")) match {
case Some(kf) => Some(new File(kf))
case _ => None
},
keyFilePass = Option(prop.getProperty("keyfile_pass")) match {
case Some(kfp) => Some(kfp.getBytes("UTF-8"))
case _ => None
},
port = Option(prop.getProperty("port")) match {
case Some(p) => p.toInt
case _ => 22
}
))
Config(
syncConnInfo = syncConnInfo,
syncRemoteDir = Option(prop.getProperty("remote_dir"))
)
} catch {
case e: FileNotFoundException => Config()
}
}
} | choffmeister/secpwd | src/main/scala/de/choffmeister/secpwd/Config.scala | Scala | apache-2.0 | 1,562 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @builder scalation.util.bld.BldMM_Sorting
* @version 1.2
* @date Sat Sep 26 20:25:19 EDT 2015
* @see LICENSE (MIT style license file).
*/
package scalation.util
import scala.util.Random
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MM_SortingI` class provides direct and indirect methods to:
* <p>
* find 'k'-th median ('k'-th smallest element) using QuickSelect
* sort large arrays using QuickSort
* sort small arrays using SelectionSort
* <p>
* Direct methods are faster, but modify the array, while indirect methods are
* slower, but do not modify the array. This class is specialized for Int.
* @see `Sorting` for a generic version of this class.
* @param a the array to operate on
*/
class MM_SortingI (a: MM_ArrayI)
{
private val n = a.length // length of array a
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
// Direct Median and Sorting
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the 'k'-median of the 'p' to 'r' partition of array 'a' using
* the QuickSelect algorithm.
* @see http://en.wikipedia.org/wiki/Quickselect
* @param p the left cursor
* @param r the right cursor
* @param k the type of median (k-th smallest element)
*/
def median (p: Int, r: Int, k: Int): Int =
{
if (p == r) return a(p)
swap (r, med3 (p, (p+r)/2, r)) // use median-of-3, comment out for simple pivot
val q = partition (p, r) // partition into left (<=) and right (>=)
if (q == k-1) return a(q) // found k-median
else if (q > k-1) median (p, q - 1, k) // recursively find median in left partition
else median (q + 1, r, k) // recursively find median in right partition
} // median
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the 'k'-median ('k'-th smallest element) of array 'a'.
* @param k the type of median (e.g., k = (n+1)/2 is the median)
*/
def median (k: Int = (n+1)/2): Int = median (0, n-1, k)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Partition the array from 'p' to 'q' into a left partition (<= 'x') and
* a right partition (>= 'x').
* @param p the left cursor
* @param q the right cursor
*/
def partition (p: Int, r: Int): Int =
{
val x = a(r) // pivot
var i = p - 1
for (j <- p until r if a(j) <= x) { i += 1; swap (i, j) }
swap (i + 1, r)
i + 1
} // partition
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Recursively sort the 'p' to 'r' partition of array 'a' using QuickSort.
* @see http://mitpress.mit.edu/books/introduction-algorithms
* @param p the left cursor
* @param r the right cursor
*/
def qsort (p: Int, r: Int)
{
if (r - p > 5) {
swap (r, med3 (p, (p+r)/2, r)) // use median-of-3, comment out for simple pivot
val q = partition (p, r) // partition into left (<=) and right (>=)
qsort (p, q - 1) // recursively sort left partition
qsort (q + 1, r) // recursively sort right partition
} else {
selsort (p, r) // use simple sort when small
} // if
} // qsort
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Sort array 'a' using QuickSort.
*/
def qsort () { qsort (0, n-1) }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Sort the 'p' to 'r' partition of array 'a' using SelectionSort.
* @param p the left cursor
* @param r the right cursor
*/
def selsort (p: Int = 0, r: Int = n-1)
{
for (i <- p until r) {
var k = i
for (j <- i+1 to r if a(j) < a(k)) k = j
if (i != k) swap (i, k)
} // for
} // selsort
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Swap the elements at 'i' and 'j', i.e., a(i) <-> a(j).
* @param i the first index position
* @param j the second index position
*/
@inline private def swap (i: Int, j: Int) { val t = a(i); a(i) = a(j); a(j) = t }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the index of the median of three elements.
* @param i element 1
* @param j element 2
* @param k element 3
*/
@inline private def med3 (i: Int, j: Int, k: Int): Int =
{
if (a(i) < a(j))
if (a(j) < a(k)) j else if (a(i) < a(k)) k else i
else
if (a(j) > a(k)) j else if (a(i) > a(k)) k else i
} // med3
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Determine whether the array 'a' is sorted in ascending order.
*/
def isSorted: Boolean =
{
for (i <- 1 until n if a(i-1) > a(i)) {
println ("isSorted: failed @ (i-1, a) = " + (i-1, a(i-1)))
println ("isSorted: failed @ (i, a) = " + (i, a(i)))
return false
} // for
true
} // isSorted
// Directly sorting in decreasing order ----------------------------------
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Partition the array from 'p' to 'q' into a left partition (<= 'x') and
* a right partition (>= 'x').
* For sorting in decreasing order.
* @param p the left cursor
* @param q the right cursor
*/
def partition2 (p: Int, r: Int): Int =
{
val x = a(r) // pivot
var i = p - 1
for (j <- p until r if a(j) >= x) { i += 1; swap (i, j) }
swap (i + 1, r)
i + 1
} // partition2
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Recursively sort the 'p' to 'r' partition of array 'a' using QuickSort.
* Sort in decreasing order.
* @see http://mitpress.mit.edu/books/introduction-algorithms
* @param p the left cursor
* @param r the right cursor
*/
def qsort2 (p: Int, r: Int)
{
if (r - p > 5) {
swap (r, med3 (p, (p+r)/2, r)) // use median-of-3, comment out for simple pivot
val q = partition2 (p, r) // partition into left (<=) and right (>=)
qsort2 (p, q - 1) // recursively sort left partition
qsort2 (q + 1, r) // recursively sort right partition
} else {
selsort2 (p, r) // use simple sort when small
} // if
} // qsort2
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Sort array 'a' using QuickSort. Sort in decreasing order.
*/
def qsort2 () { qsort2 (0, n-1) }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Sort the 'p' to 'r' partition of array 'a' using SelectionSort.
* Sort in decreasing order.
* @param p the left cursor
* @param r the right cursor
*/
def selsort2 (p: Int = 0, r: Int = n-1)
{
for (i <- p until r) {
var k = i
for (j <- i+1 to r if a(j) > a(k)) k = j
if (i != k) swap (i, k)
} // for
} // selsort2
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Determine whether the array 'a' is sorted in descending order.
*/
def isSorted2: Boolean =
{
for (i <- 1 until n if a(i-1) < a(i)) {
println ("isSorted2: failed @ (i-1, a) = " + (i-1, a(i-1)))
println ("isSorted2: failed @ (i, a) = " + (i, a(i)))
return false
} // for
true
} // isSorted2
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
// Indirect Median and Sorting
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Indirectly find the 'k'-median of the 'p' to 'r' partition of array 'a'
* using the QuickSelect algorithm.
* @see http://en.wikipedia.org/wiki/Quickselect
* @param rk the rank order
* @param p the left cursor
* @param r the right cursor
* @param k the type of median (k-th smallest element)
*/
def imedian (rk: Array [Int], p: Int, r: Int, k: Int): Int =
{
if (p == r) return a(rk(p))
iswap (rk, r, med3 (p, (p+r)/2, r)) // use median-of-3, comment out for simple pivot, ?imed3
val q = ipartition (rk, p, r) // partition into left (<=) and right (>=)
if (q == k-1) return a(rk(q)) // found k-median
else if (q > k-1) imedian (rk, p, q - 1, k) // recursively find median in left partition
else imedian (rk, q + 1, r, k) // recursively find median in right partition
} // imedian
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Indirectly find the 'k'-median ('k'-th smallest element) of array 'a'.
* @param k the type of median (e.g., k = (n+1)/2 is the median)
*/
def imedian (k: Int = (n+1)/2): Int =
{
val rk = Array.range (0, n) // rank order
imedian (rk, 0, n-1, k)
} // imedian
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Indirectly partition the array from 'p' to 'r' into a left partition
* (<= 'x') and a right partition (>= 'x').
* @param rk the rank order
* @param p the left cursor
* @param r the right cursor
*/
def ipartition (rk: Array [Int], p: Int, r: Int): Int =
{
val x = a(rk(r)) // pivot
var i = p - 1
for (j <- p until r if a(rk(j)) <= x) { i += 1; iswap (rk, i, j) }
iswap (rk, i + 1, r)
i + 1
} // ipartition
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Recursively and indirectly sort the 'p' to 'r' partition of array 'a'
* using QuickSort.
* @param rk the rank order
* @param p the left cursor
* @param r the right cursor
*/
def iqsort (rk: Array [Int], p: Int, r: Int)
{
if (r - p > 5) {
iswap (rk, r, med3 (p, (p+r)/2, r)) // use median-of-3, comment out for simple pivot, ?imed3
val q = ipartition (rk, p, r) // partition into left (<=) and right (>=)
iqsort (rk, p, q - 1) // recursively sort left partition
iqsort (rk, q + 1, r) // recursively sort right partition
} else {
iselsort (rk, p, r) // use simple sort when small
} // if
} // iqsort
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Indirectly sort array 'a' using QuickSort, returning the rank order.
*/
def iqsort (): Array [Int] =
{
val rk = Array.range (0, n) // rank order
iqsort (rk, 0, n-1) // re-order rank
rk // return rank
} // iqsort
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Indirectly sort the 'p' to 'r' partition of array 'a' using SelectionSort.
* @param rk the rank order
* @param p the left cursor
* @param r the right cursor
*/
def iselsort (rk: Array [Int], p: Int, r: Int)
{
for (i <- p to r) {
var k = i
for (j <- i+1 to r if a(rk(j)) < a(rk(k))) k = j
if (i != k) iswap (rk, i, k)
} // for
} // iselsort
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Indirectly sort array 'a' using SelectionSort, returning the rank order.
*/
def iselsort (): Array [Int] =
{
val rk = Array.range (0, n) // rank order
iselsort (rk, 0, n-1) // re-order rank
rk // return rank
} // iselsort
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Indirectly swap the elements at 'i' and 'j', i.e., rk(i) <-> rk(j).
* @param rk the rank order
* @param i the first index position
* @param j the second index position
*/
@inline private def iswap (rk: Array [Int], i: Int, j: Int)
{
val t = rk(i); rk(i) = rk(j); rk(j) = t
} // iswap
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the indirect index of the median of three elements.
* @param rk the rank order
* @param i element 1
* @param j element 2
* @param k element 3
*/
@inline private def imed3 (rk: Array [Int], i: Int, j: Int, k: Int): Int =
{
if (a(rk(i)) < a(rk(j)))
if (a(rk(j)) < a(rk(k))) j else if (a(rk(i)) < a(rk(k))) k else i
else
if (a(rk(j)) > a(rk(k))) j else if (a(rk(i)) > a(rk(k))) k else i
} // imed3
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Determine whether the array 'a' is indirectly sorted in ascending order.
* @param rk the rank order
*/
def isiSorted (rk: Array [Int]): Boolean =
{
for (i <- 1 until n if a(rk(i-1)) > a(rk(i))) {
println ("isiSorted: failed @ (i-1, rk, a) = " + (i-1, rk(i-1), a(rk(i-1))))
println ("isiSorted: failed @ (i, rk, a) = " + (i, rk(i), a(rk(i))))
return false
} // for
true
} // isiSorted
// Indirectly sorting in decreasing order --------------------------------
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Indirectly sort array 'a' using QuickSort.
* Sort in decreasing order.
*/
def iqsort2 (): Array [Int] =
{
val rk = Array.range (0, n) // rank order
println ("iqsort2 method not yet implemented") // FIX
rk
} // iqsort2
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Indirectly sort the 'p' to 'r' partition of array 'a' using SelectionSort.
* Sort in decreasing order.
* @param rk the rank order
* @param p the left cursor
* @param r the right cursor
*/
def iselsort2 (rk: Array [Int], p: Int, r: Int)
{
for (i <- p to r) {
var k = i
for (j <- i+1 to r if a(rk(j)) > a(rk(k))) k = j
if (i != k) iswap (rk, i, k)
} // for
} // iselsort2
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Indirectly sort array 'a' using SelectionSort, returning the rank order.
* Sort in decreasing order.
*/
def iselsort2 (): Array [Int] =
{
val rk = Array.range (0, n) // rank order
iselsort2 (rk, 0, n-1) // re-order rank
rk // return rank
} // iselsort2
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Determine whether the array 'a' is indirectly sorted in ascending order.
* @param rk the rank order
*/
def isiSorted2 (rk: Array [Int]): Boolean =
{
for (i <- 1 until n if a(rk(i-1)) < a(rk(i))) {
println ("isiSorted2: failed @ (i-1, rk, a) = " + (i-1, rk(i-1), a(rk(i-1))))
println ("isiSorted2: failed @ (i, rk, a) = " + (i, rk(i), a(rk(i))))
return false
} // for
true
} // isiSorted2
} // MM_SortingI class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MM_SortingI` companion object provides shortcuts for calling methods from
* the `MM_SortingI` class.
*/
object MM_SortingI
{
// Direct median and sorting --------------------------------------------
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Find the median value in the array.
* @param a the array to be examined
*/
def median (a: MM_ArrayI, k: Int): Int = (new MM_SortingI (a)).median (k)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Fast, ascending, unstable sort.
* @param a the array to be sorted
*/
def qsort (a: MM_ArrayI) { (new MM_SortingI (a)).qsort () }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Slow, ascending, stable sort.
* @param a the array to be sorted
*/
def selsort (a: MM_ArrayI) { (new MM_SortingI (a)).selsort () }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Fast, descending, unstable sort.
* @param a the array to be sorted
*/
def qsort2 (a: MM_ArrayI) { (new MM_SortingI (a)).qsort2 () }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Slow, descending, stable sort.
* @param a the array to be sorted
*/
def selsort2 (a: MM_ArrayI) { (new MM_SortingI (a)).selsort2 () }
// Indirect median and sorting -------------------------------------------
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Indirectly find the median value in the array.
* @param a the array to be examined
*/
def imedian (a: MM_ArrayI, k: Int): Int = (new MM_SortingI (a)).imedian (k)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Fast, ascending, unstable indirect sort.
* @param a the array to be sorted
*/
def iqsort (a: MM_ArrayI): Array [Int] = (new MM_SortingI (a)).iqsort ()
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Slow, ascending, stable indirect sort.
* @param a the array to be sorted
*/
def iselsort (a: MM_ArrayI): Array [Int] = (new MM_SortingI (a)).iselsort ()
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Fast, descending, unstable indirect sort.
* @param a the array to be sorted
*/
// def iqsort2 (a: MM_ArrayI) { (new MM_SortingI (a)).iqsort2 () } // FIX: implement
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Slow, descending, stable indirect sort.
* @param a the array to be sorted
*/
def iselsort2 (a: MM_ArrayI): Array [Int] = (new MM_SortingI (a)).iselsort2 ()
} // MM_SortingI
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MM_SortingITest` object is used to test the correctness and performance
* of the 'median' and 'imedian' methods in the `MM_SortingI` class.
*/
object MM_SortingITest extends App
{
var md = 0
val rn = new Random ()
val n = 1000000
val a = MM_ArrayI (9, 1, 8, 2, 7, 3, 6, 4, 5)
val aa = MM_ArrayI.ofDim (n)
// test direct k-medians (will modify the data array)
println ("--------------------------------------------------------------")
println ("Test direct: a = " + a.deep)
for (k <- 1 to 5) {
val med = new MM_SortingI (MM_ArrayI (9, 1, 8, 2, 7, 3, 6, 4, 5))
println ("median (" + k + ") = " + med.median (k))
} // for
val med = new MM_SortingI (MM_ArrayI (9, 1, 8, 2, 7, 3, 6, 4, 5))
println ("median () = " + med.median ())
// test indirect k-medians (will not modify the data array)
println ("--------------------------------------------------------------")
println ("Test indirect: a = " + a.deep)
val imed = new MM_SortingI (a)
for (k <- 1 to 5) {
println ("imedian (" + k + ") = " + imed.imedian (k))
} // for
println ("imedian () = " + imed.imedian ())
println ("Unmodified: a = " + a.deep)
// test the performance of direct k-medians
println ("--------------------------------------------------------------")
println ("Performance Test direct: aa.length = " + aa.length)
for (k <- 0 until 20) {
for (i <- 0 until n) aa(i) = rn.nextInt ()
val med = new MM_SortingI (aa)
print ("median: "); time { md = med.median () }
println ("median = " + md)
} // for
// test the performance of indirect k-medians
println ("--------------------------------------------------------------")
println ("Performance Test indirect: aa.length = " + aa.length)
for (k <- 0 until 20) {
for (i <- 0 until n) aa(i) = rn.nextInt ()
val imed = new MM_SortingI (aa)
print ("imedian: "); time { md = imed.imedian () }
println ("median = " + md)
} // for
} // MM_SortingITest
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MM_SortingITest2` object is used to test the correctness and performance
* of the 'qsort' and 'iqsort' sorting methods in the `MM_SortingI` class.
*/
object MM_SortingITest2 extends App
{
import scala.util.Sorting.quickSort
var rk: Array [Int] = null // to hold rank order
val n = 1000000
val rn = new Random ()
val a = MM_ArrayI (9, 1, 8, 2, 7, 3, 6, 4, 5)
val aa = MM_ArrayI.ofDim (n)
// test direct sorting (will modify the data array)
println ("--------------------------------------------------------------")
val a1 = MM_ArrayI (9, 1, 8, 2, 7, 3, 6, 4, 5)
println ("Test direct: a1 = " + a1.deep)
val srt = new MM_SortingI (a1)
srt.qsort ()
println ("qsort a1 = " + a1.deep)
println ("isSorted = " + srt.isSorted)
// test indirect sorting (will not modify the data array)
println ("--------------------------------------------------------------")
val a2 = MM_ArrayI (9, 1, 8, 2, 7, 3, 6, 4, 5)
println ("Test indirect: a2 = " + a2.deep)
val isrt = new MM_SortingI (a2)
rk = isrt.iqsort ()
println ("iqsort rk = " + rk.deep) // rank order
println ("isiSorted = " + isrt.isiSorted (rk))
// test the performance of direct sorting
println ("--------------------------------------------------------------")
println ("Performance Test direct: aa.length = " + aa.length)
for (k <- 0 until 20) {
// for (i <- 0 until n) aa(i) = rn.nextInt ()
// print ("quicksort: "); time { quickSort (aa) } // Scala's QuickSort
for (i <- 0 until n) aa(i) = rn.nextInt ()
val srt = new MM_SortingI (aa)
print ("qsort: "); time { srt.qsort () }
println ("isSorted = " + srt.isSorted)
} // for
// test the performance of indirect sorting
println ("--------------------------------------------------------------")
println ("Performance Test indirect: aa.length = " + aa.length)
for (k <- 0 until 20) {
for (i <- 0 until n) aa(i) = rn.nextInt ()
val isrt = new MM_SortingI (aa)
print ("iqsort: "); time { rk = isrt.iqsort () }
println ("isiSorted = " + isrt.isiSorted (rk))
} // for
} // MM_SortingITest2
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MM_SortingITest3` object is used to test the correctness and performance
* of the 'selsort' and 'iselsort' sorting methods in the `MM_SortingI` class.
*/
object MM_SortingITest3 extends App
{
import scala.util.Sorting.quickSort
var rk: Array [Int] = null // to hold rank order
val n = 10000
val rn = new Random ()
val a = MM_ArrayI (9, 1, 8, 2, 7, 3, 6, 4, 5)
val aa = MM_ArrayI.ofDim (n)
// test direct sorting (will modify the data array)
println ("--------------------------------------------------------------")
val a1 = MM_ArrayI (9, 1, 8, 2, 7, 3, 6, 4, 5)
println ("Test direct: a1 = " + a1.deep)
val srt = new MM_SortingI (a1)
srt.selsort ()
println ("selsort a1 = " + a1.deep)
println ("isSorted = " + srt.isSorted)
// test indirect sorting (will not modify the data array)
println ("--------------------------------------------------------------")
val a2 = MM_ArrayI (9, 1, 8, 2, 7, 3, 6, 4, 5)
println ("Test indirect: a2 = " + a2.deep)
val isrt = new MM_SortingI (a2)
rk = isrt.iselsort ()
println ("iselsort rk = " + rk.deep) // rank order
println ("isiSorted = " + isrt.isiSorted (rk))
// test the performance of direct sorting
println ("--------------------------------------------------------------")
println ("Performance Test direct: aa.length = " + aa.length)
for (k <- 0 until 20) {
for (i <- 0 until n) aa(i) = rn.nextInt ()
val srt = new MM_SortingI (aa)
print ("selsort: "); time { srt.selsort () }
println ("isSorted = " + srt.isSorted)
} // for
// test the performance of indirect sorting
println ("--------------------------------------------------------------")
println ("Performance Test indirect: aa.length = " + aa.length)
for (k <- 0 until 20) {
for (i <- 0 until n) aa(i) = rn.nextInt ()
val isrt = new MM_SortingI (aa)
print ("iselsort: "); time { rk = isrt.iselsort () }
println ("isiSorted = " + isrt.isiSorted (rk))
} // for
} // MM_SortingITest3
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MM_SortingITest4` object is used to test the correctness and performance
* of the 'qsort2' and 'iqsort2' sorting methods in the `MM_SortingI` class.
*/
object MM_SortingITest4 extends App
{
import scala.util.Sorting.quickSort
var rk: Array [Int] = null // to hold rank order
val n = 1000000
val rn = new Random ()
val a = MM_ArrayI (9, 1, 8, 2, 7, 3, 6, 4, 5)
val aa = MM_ArrayI.ofDim (n)
// test direct sorting (will modify the data array)
println ("--------------------------------------------------------------")
val a1 = MM_ArrayI (9, 1, 8, 2, 7, 3, 6, 4, 5)
println ("Test direct: a1 = " + a1.deep)
val srt = new MM_SortingI (a1)
srt.qsort2 ()
println ("qsort2 a1 = " + a1.deep)
println ("isSorted2 = " + srt.isSorted2)
// test indirect sorting (will not modify the data array)
println ("--------------------------------------------------------------")
val a2 = MM_ArrayI (9, 1, 8, 2, 7, 3, 6, 4, 5)
println ("Test indirect: a2 = " + a2.deep)
val isrt = new MM_SortingI (a2)
rk = isrt.iqsort2 ()
println ("iqsort2 rk = " + rk.deep) // rank order
println ("isiSorted2 = " + isrt.isiSorted2 (rk))
// test the performance of direct sorting
println ("--------------------------------------------------------------")
println ("Performance Test direct: aa.length = " + aa.length)
for (k <- 0 until 20) {
// for (i <- 0 until n) aa(i) = rn.nextInt ()
// print ("quicksort: "); time { quickSort (aa) } // Scala's QuickSort
for (i <- 0 until n) aa(i) = rn.nextInt ()
val srt = new MM_SortingI (aa)
print ("qsort2: "); time { srt.qsort2 () }
println ("isSorted2 = " + srt.isSorted2)
} // for
// test the performance of indirect sorting
println ("--------------------------------------------------------------")
println ("Performance Test indirect: aa.length = " + aa.length)
for (k <- 0 until 20) {
for (i <- 0 until n) aa(i) = rn.nextInt ()
val isrt = new MM_SortingI (aa)
print ("iqsort2: "); time { rk = isrt.iqsort2 () }
println ("isiSorted2 = " + isrt.isiSorted2 (rk))
} // for
} // MM_SortingITest4Test
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `MM_SortingITest5` object is used to test the correctness and performance
* of the 'selsort2' and 'iselsort2' sorting methods in the `MM_SortingI` class.
* Thet sort in decreasing order.
*/
object MM_SortingITest5 extends App
{
import scala.util.Sorting.quickSort
var rk: Array [Int] = null // to hold rank order
val n = 10000
val rn = new Random ()
val a = MM_ArrayI (9, 1, 8, 2, 7, 3, 6, 4, 5)
val aa = MM_ArrayI.ofDim (n)
// test direct sorting (will modify the data array)
println ("--------------------------------------------------------------")
val a1 = MM_ArrayI (9, 1, 8, 2, 7, 3, 6, 4, 5)
println ("Test direct: a1 = " + a1.deep)
val srt = new MM_SortingI (a1)
srt.selsort2 ()
println ("selsort2 a1 = " + a1.deep)
println ("isSorted2 = " + srt.isSorted2)
// test indirect sorting (will not modify the data array)
println ("--------------------------------------------------------------")
val a2 = MM_ArrayI (9, 1, 8, 2, 7, 3, 6, 4, 5)
println ("Test indirect: a2 = " + a2.deep)
val isrt = new MM_SortingI (a2)
rk = isrt.iselsort2 ()
println ("iselsort2 rk = " + rk.deep) // rank order
println ("isiSorted2 = " + isrt.isiSorted2 (rk))
// test the performance of direct sorting
println ("--------------------------------------------------------------")
println ("Performance Test direct: aa.length = " + aa.length)
for (k <- 0 until 20) {
for (i <- 0 until n) aa(i) = rn.nextInt ()
val srt = new MM_SortingI (aa)
print ("selsort2: "); time { srt.selsort2 () }
println ("isSorted2 = " + srt.isSorted2)
} // for
// test the performance of indirect sorting
println ("--------------------------------------------------------------")
println ("Performance Test indirect: aa.length = " + aa.length)
for (k <- 0 until 20) {
for (i <- 0 until n) aa(i) = rn.nextInt ()
val isrt = new MM_SortingI (aa)
print ("iselsort2: "); time { rk = isrt.iselsort2 () }
println ("isiSorted2 = " + isrt.isiSorted2 (rk))
} // for
} // MM_SortingITest5 object
| mvnural/scalation | src/main/scala/scalation/util/MM_SortingI.scala | Scala | mit | 30,825 |
package com.btcontract.wallet.sqlite
import immortan.sqlite._
import android.database.sqlite._
import android.content.Context
class DBInterfaceSQLiteAndroidMisc(context: Context, name: String) extends SQLiteOpenHelper(context, name, null, 3) with DBInterfaceSQLiteAndroid {
val base: SQLiteDatabase = getWritableDatabase
def onCreate(dbs: SQLiteDatabase): Unit = {
TxTable.createStatements.foreach(dbs.execSQL)
ChannelTxFeesTable.createStatements.foreach(dbs.execSQL)
ElectrumHeadersTable.createStatements.foreach(dbs.execSQL)
ChainWalletTable.createStatements.foreach(dbs.execSQL)
LNUrlPayTable.createStatements.foreach(dbs.execSQL)
PaymentTable.createStatements.foreach(dbs.execSQL)
RelayTable.createStatements.foreach(dbs.execSQL)
DataTable.createStatements.foreach(dbs.execSQL)
LogTable.createStatements.foreach(dbs.execSQL)
}
def onUpgrade(dbs: SQLiteDatabase, v0: Int, v1: Int): Unit = {
LNUrlPayTable.createStatements.foreach(dbs.execSQL) // 1 -> 2 migration creates LNURL-PAY table
LogTable.createStatements.foreach(dbs.execSQL) // 1 | 2 -> 3 migration creates error log table
}
} | btcontract/wallet | app/src/main/java/com/btcontract/wallet/sqlite/DBInterfaceSQLiteAndroidMisc.scala | Scala | apache-2.0 | 1,147 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.eval
import monix.eval.Task.Context
import monix.execution.Callback
package object internal {
/** Handy alias for building `Task.Async` nodes. */
private[eval] type Start[+A] = (Context, Callback[Throwable, A]) => Unit
/** Internal API: A run-loop frame index is a number representing the current
* run-loop cycle, being incremented whenever a `flatMap` evaluation happens.
*
* It gets used for automatically forcing asynchronous boundaries, according to the
* [[monix.execution.ExecutionModel ExecutionModel]]
* injected by the [[monix.execution.Scheduler Scheduler]] when
* the task gets evaluated with `runAsync`.
*
* @see [[FrameIndexRef]]
*/
private[eval] type FrameIndex = Int
}
| monixio/monix | monix-eval/shared/src/main/scala/monix/eval/internal/package.scala | Scala | apache-2.0 | 1,421 |
package knot.data.util
import scala.reflect.ClassTag
trait SafeParser {
def toIntOrDefault(s: String): Int = {
toIntOption(s).getOrElse(0)
}
def toIntOption(s: String): Option[Int] = {
throwableThenOption[Int, NumberFormatException](s.toInt)
}
def toFloatOrDefault(s: String): Float = {
toFloatOption(s).getOrElse(0f)
}
def toFloatOption(s: String): Option[Float] = {
throwableThenOption[Float, NumberFormatException](s.toFloat)
}
private def throwableThenOption[T, E <: Throwable : ClassTag](a: => T): Option[T] = {
val c = implicitly[ClassTag[E]]
try {
Some(a)
} catch {
case e if c.runtimeClass.isInstance(e) => None
}
}
}
| defvar/knot | knot-data/src/main/scala/knot/data/util/SafeParser.scala | Scala | mit | 697 |
import akka.LifeActors
import play.api.GlobalSettings
object Global extends GlobalSettings {
override def onStart(application: play.api.Application) {
//LifeActors
}
override def onStop(application: play.api.Application) {
//LifeActors.system.shutdown()
}
}
| kirhgoff/life-server | app/Global.scala | Scala | mit | 280 |
// Copyright (c) 2013, Johns Hopkins University. All rights reserved.
// This software is released under the 2-clause BSD license.
// See /LICENSE.txt
// Travis Wolfe, twolfe18@gmail.com, 30 July 2013
package edu.jhu.hlt.parma.input
import edu.jhu.hlt.parma.types._
import edu.jhu.hlt.parma.inference.{ DocMetaAligner, CanonicalMentionFinder }
import edu.jhu.hlt.parma.util.Describe
import edu.jhu.hlt.parma.diagnostics.GeneralDiagnostics
import scala.util.Random
import scala.collection.JavaConversions._
import scala.collection.mutable.{ArrayBuffer, HashSet}
/**
* the purpose of this code is to make negative examples for pred-arg alignment
*/
object DocAlignmentPerturber {
private def randomElem[T](iseq: IndexedSeq[T]): T = iseq(Random.nextInt(iseq.size))
val docsSeen = new HashSet[Document]
sealed class SubIn(val sentence: Sentence,
val preds: Seq[Predicate], // these have corefSets begining with "frankenstein"
val args: Seq[Argument]) // these have corefSets begining with "frankenstein"
private def chooseIncoming(howManySentences: Int, source: Seq[Document]): Seq[SubIn] = {
// choose one sentence per document
val subIns = new ArrayBuffer[SubIn]
for(d <- Random.shuffle(source.toBuffer).take(howManySentences)) {
val sentIdx = Random.nextInt(d.sentences.size)
val sent: Sentence = d.sentences(sentIdx)
val preds = d.predicates.filter(_.location.getSentenceIdx == sentIdx)
val args = d.arguments.filter(_.location.getSentenceIdx == sentIdx)
val anonPreds = preds.map(p => new Predicate(p.location))//, "frankenstein"+p.getCorefSet))
val anonArgs = args.map(a => new Argument(a.location))//, "frankenstein"+a.getCorefSet))
subIns += new SubIn(sent, anonPreds, anonArgs)
}
subIns
}
private def changeMentionSentence(newSentIdx: Int, m: Mention, isRepresentative: Boolean = false): Mention =
MentionBuilder.from(newSentIdx, m.getStartTokenIdx, m.getEndTokenIdx, m.getHeadTokenIdx)
private def modifyDocument(doc: DocumentBuilder, targetSentences: Seq[Int], substitutions: Seq[SubIn], propRemove: Double): DocumentBuilder = {
assert(targetSentences.size == substitutions.size)
val newDoc = doc.deepCopy
// remove preds/args in targetSentences
val predsKeep = doc.predicates.filterNot(pred => targetSentences.contains(pred.location.getSentenceIdx))
val argsKeep = doc.arguments.filterNot(arg => targetSentences.contains(arg.location.getSentenceIdx))
for(pred <- predsKeep)
newDoc.addPredicate(pred)//, doc.corefSet(pred))
for(arg <- argsKeep)
newDoc.addArgument(arg)//, doc.corefSet(arg))
// add preds/args from subIns
// swap in Sentences from subIns
for((targetIdx, subIn) <- targetSentences.zip(substitutions)) {
newDoc.setSentence(targetIdx, subIn.sentence)
// need to make sure all these preds/args have locations which point to sentenceIdx=targetIdx
if(Random.nextDouble > propRemove) {
// move all of the predicates and arguments to the new document
for(pred <- subIn.preds) {
val movedPred = new Predicate(changeMentionSentence(targetIdx, pred.location))
//val corefSet = doc.corefSet(pred)
newDoc.addPredicate(movedPred)//, corefSet)
}
for(arg <- subIn.args) {
val movedArg = new Argument(changeMentionSentence(targetIdx, arg.location))
//val corefSet = doc.corefSet(arg)
newDoc.addArgument(movedArg)//, corefSet)
}
}
}
// for testing
for(s <- substitutions) {
for(p <- s.preds) println("[modifyDocument] sub.pred=" + Describe.predicate(p, newDoc))
for(a <- s.args) println("[modifyDocument] sub.arg=" + Describe.argument(a, newDoc))
}
for(arg <- argsKeep) println("[modifyDocument] argsKeep=" + Describe.argument(arg, newDoc))
for(pred <- predsKeep) println("[modifyDocument] predsKeep=" + Describe.predicate(pred, newDoc))
for(arg <- newDoc.arguments) println("[modifyDocument] arg=" + Describe.argument(arg, newDoc))
for(pred <- newDoc.predicates) println("[modifyDocument] pred=" + Describe.predicate(pred, newDoc))
assert(newDoc.predicates.size > 0)
assert(newDoc.arguments.size > 0)
newDoc
}
private def modifyAlignment[D <: DocumentBuilder](da: ParametricDocAlignment[D], newPassage: D, targetSentences: Seq[Int]): ParametricDocAlignment[D] = {
val zombieSents = targetSentences.toSet
def inZombieSentence(a: Alignment): Boolean = a match {
case pa: PredicateAlignment =>
zombieSents.contains(pa.passagePred.location.getSentenceIdx)
case aca: ArgCorefAlignment =>
val ps = aca.passageCoref.map(_.location.getSentenceIdx).toSet
(zombieSents & ps).size > 0
}
val skeep = da.sureAlignments.filterNot(inZombieSentence)
val pkeep = da.possibleAlignments.filterNot(inZombieSentence)
//println("[modifyAlignment] da.sure %d => %d, da.possible %d => %d"
// .format(da.sureAlignments.size, skeep.size, da.possibleAlignments.size, pkeep.size))
val newDA = new ParametricDocAlignment[D](da.id, da.domain, da.report, newPassage, skeep, pkeep)
GeneralDiagnostics.checkDocAlignment(newDA)
//println("[modifyAlignment] success!")
//for(a <- newDA.sureAlignments)
// println("[modifyAlignment] sure=" + Describe.alignment(a, newDA.report, newDA.passage))
//for(a <- newDA.possibleAlignments)
// println("[modifyAlignment] possible=" + Describe.alignment(a, newDA.report, newDA.passage))
/*
assert(newDA.report.predicates.size > 0)
assert(newDA.report.arguments.size > 0)
assert(newDA.passage.predicates.size > 0)
assert(newDA.passage.arguments.size > 0)
*/
newDA
}
/**
* propSub of the sentences in passage will be swapped out for a random sentence
* propRemove of the swapped sentences will have their Arguments and Predicates dropped
* (fewer Arguments/Predicates makes for faster inference)
*/
def degradeDocAlignment(
da: ParametricDocAlignment[DocumentBuilder],
outOfDomain: Seq[Document],
propSub: Double = 0.6,
propRemove: Double = 0.75): ParametricDocAlignment[DocumentBuilder] = {
val n = da.passage.sentences.size
val howManySents = math.ceil(n * propSub).toInt
val subIns = chooseIncoming(howManySents, outOfDomain)
val targetSentences = Random.shuffle((0 until n).toBuffer).take(howManySents)
val newPassage = modifyDocument(da.passage, targetSentences, subIns, propRemove)
val newDA = modifyAlignment[DocumentBuilder](da, newPassage, targetSentences)
// i don't think that the way of aligning here works once corefSet
// is removed from Document. need to use AnnotationAligner.HalfAlignment
throw new RuntimeException("you need to update this code")
newDA
}
// TODO everything below this <<<<<<<<<<<<<<<<<<<<<<<<<<<
// should be moved to another class
// below this is used to select a subset, not perturb a doc alignment
def lowOverlap[DA <: DocAlignment](alignments: Seq[DA], maxOverlappiness: Double): Seq[DA] = {
val fewer = alignments.filter(da => overlappiness(da) <= maxOverlappiness)
println("[lowOverlap] thresh=%.1f, went from %d to %d alignments".format(maxOverlappiness, alignments.size, fewer.size))
fewer
}
def leastOverlapSubset[DA <: DocAlignment](alignments: Seq[DA], howMany: Int): Seq[DA] = {
val sorted = alignments.sortBy(overlappiness)
val k = 20
println("[DocAlignmentPerturber leastOverlapSubset] taking %d of %d doc alignments".format(howMany, alignments.size))
println("[DocAlignmentPerturber leastOverlapSubset] least-overlappy %d alignments:".format(k))
for((da, idx) <- sorted.take(k).zipWithIndex)
println("[DocAlignmentPerturber leastOverlapSubset] overlappy=%.3g, da(%d)=%s".format(overlappiness(da), idx, Describe.docAlignment(da)))
println("[DocAlignmentPerturber leastOverlapSubset] most-overlappy %d alignments:".format(k))
for((da, idx) <- sorted.reverse.take(k).zipWithIndex)
println("[DocAlignmentPerturber leastOverlapSubset] overlappy=%.3g, da(%d)=%s".format(overlappiness(da), idx, Describe.docAlignment(da)))
val as = sorted.take(howMany)
println("[DocAlignmentPerturber leastOverlapSubset] overlappiness of keep: " + as.map(overlappiness).mkString(", "))
println("[DocAlignmentPerturber leastOverlapSubset] overlappiness of everything: " + sorted.map(overlappiness).mkString(", "))
as
}
def leastOverlapSubset[DA <: DocAlignment](alignments: Seq[DA], propKeep: Double): Seq[DA] =
leastOverlapSubset(alignments, math.ceil(alignments.size * propKeep).toInt)
var counter = 0
def overlappiness(da: DocAlignment): Double = {
val sure = da.sureAlignments.toBuffer // don't use set.map because it will collapse repeats
val possible = da.possibleAlignments.toBuffer
val s = sure.map(a => overlappiness(a, da.context)).sum
val p = possible.map(a => overlappiness(a, da.context)).sum
val eps = 1e-6d // damn you floating point ops!
if(p+eps < s) {
println("p = " + p)
println("s = " + s)
println("sure.size = "+ sure.size)
println("possible.size = " + possible.size)
println("da.sureAlignments.size = " + da.sureAlignments.size)
println("da.possibleAlignments.size = " + da.possibleAlignments.size)
}
assert(p+eps >= s)
assert(da.sureAlignments.size <= da.possibleAlignments.size)
assert(sure.size <= possible.size)
val discount = 3d
val num = s + (p - s ) / discount
val denom = sure.size + (possible.size - sure.size) / discount
//val num = s + (p-s) / discount
//val denom = DocMetaAligner.allPossibleAlignments(da.report, da.passage).size.toDouble
if(counter % 100 == 0) {
println("[overlappiness] da=%s sure.size=%d possible.size=%d s=%.3g p=%.3g overlappy=%.8f"
.format(da.id, da.sureAlignments.size, da.possibleAlignments.size, s, p, num/denom))
}
counter += 1
if(denom <= 3)
println("[overlappiness] WARNING: denom=%.1f for da.id=%s".format(denom, da.id))
assert(denom > 0d)
num / denom
}
def overlappiness(a: Alignment, c: Context): Double = {
val (rCM, pCM) = CanonicalMentionFinder.canonicalMentions(a, c)
val rt = c.report.getHeadToken(rCM)
val pt = c.passage.getHeadToken(pCM)
val lemma = if(rt.getLemma equalsIgnoreCase pt.getLemma) 1d else 0d
val word = if(rt.getWord equalsIgnoreCase pt.getWord) 1d else 0d
val pos = if(rt.getPosTag equalsIgnoreCase pt.getPosTag) 1d else 0d
lemma + 0.1d*word + 0.05d*pos
}
}
| hltcoe/parma | src/main/scala/edu/jhu/hlt/parma/input/DocAlignmentPerturber.scala | Scala | bsd-2-clause | 10,285 |
package algebra
package instances
import algebra.lattice._
import algebra.ring._
package object long extends LongInstances
trait LongInstances extends cats.kernel.instances.LongInstances {
implicit val longAlgebra: LongAlgebra =
new LongAlgebra
val LongMinMaxLattice: BoundedDistributiveLattice[Long] =
BoundedDistributiveLattice.minMax[Long](Long.MinValue, Long.MaxValue)
}
class LongAlgebra extends CommutativeRing[Long] with Serializable {
def zero: Long = 0
def one: Long = 1
def plus(x: Long, y: Long): Long = x + y
def negate(x: Long): Long = -x
override def minus(x: Long, y: Long): Long = x - y
def times(x: Long, y: Long): Long = x * y
override def pow(x: Long, y: Int): Long = StaticMethods.pow(x, y.toLong)
override def fromInt(n: Int): Long = n.toLong
override def fromBigInt(n: BigInt): Long = n.toLong
}
| sritchie/algebra | core/src/main/scala/algebra/instances/long.scala | Scala | mit | 858 |
package org.pdfextractor.algorithm.finder.it
import org.pdfextractor.db.domain.dictionary.PaymentFieldType
import org.pdfextractor.db.domain.dictionary.PaymentFieldType.TOTAL_BEFORE_TAXES
import org.springframework.stereotype.Service
@Service
class ItalianTotalBeforeTaxesFinder extends AbstractItalianTotalFinder(TOTAL_BEFORE_TAXES) {} | kveskimae/pdfalg | src/main/scala/org/pdfextractor/algorithm/finder/it/ItalianTotalBeforeTaxesFinder.scala | Scala | mit | 338 |
package org.apache.spark.mllib.sparselr
import org.apache.spark.mllib.sparselr.Utils._
abstract class Gradient extends Serializable {
/**
* Compute the gradient and loss given the features of a single data point.
*
* @param data features for one data point
* @param label label for this data point
* @param weights weights/coefficients corresponding to features
*
* @return (gradient: Vector, loss: Double)
*/
def compute(
data: Vector,
label: Double,
weights: Vector): (Vector, Double)
/**
* Compute the gradient and loss given the features of a single data point.
*
* @param data features for one data point
* @param label label for this data point
* @param weights weights/coefficients corresponding to features
*
* @return loss: Double
*/
def compute(
data: Vector,
label: Double,
weights: Vector,
cumGradient: Vector): Double
}
class LogisticGradient extends Gradient {
override def compute(
data: Vector,
label: Double,
weights: Vector): (Vector, Double) = {
val gradient = org.apache.spark.mllib.sparselr.Utils.Vectors.hashSparseVector()
val loss = compute(data, label, weights, gradient)
(gradient, loss)
}
override def compute(
data: Vector,
label: Double,
weights: Vector,
cumGradient: Vector): Double = {
val margin = -1.0 * BLAS.dot(data, weights)
val multiplier = (1.0 / (1.0 + math.exp(margin))) - label
BLAS.axpy(multiplier, data, cumGradient)
if (label > 0) {
// The following is equivalent to log(1 + exp(margin)) but more numerically stable.
LRUtils.log1pExp(margin)
} else {
LRUtils.log1pExp(margin) - margin
}
}
}
| hhbyyh/SparseML | src/main/scala/org/apache/spark/mllib/sparselr/Gradient.scala | Scala | apache-2.0 | 1,910 |
package biz.jackman.brewmaster.phases
trait SettingsAction {
type ReturnType
def perform : ReturnType
}
| benjaminjackman/brewmaster | brewmaster-core/src/main/scala/biz/jackman/brewmaster/phases/SettingsAction.scala | Scala | gpl-3.0 | 107 |
package com.tribbloids.spookystuff.testutils
trait LocalPathDocsFixture extends RemoteDocsFixture {
import TestDocsResolver._
override def HTML_URL: String = unpacked("testutils/files/Wikipedia.html")
override def JSON_URL: String = unpacked("testutils/files/tribbloid.json")
override def PNG_URL: String = unpacked("testutils/files/logo11w.png")
override def PDF_URL: String = unpacked("testutils/files/Test.pdf")
override def XML_URL: String = unpacked("testutils/files/example.xml")
override def CSV_URL: String = unpacked("testutils/files/table.csv")
def DIR_URL: String = unpacked("testutils/files")
def DEEP_DIR_URL: String = unpacked("testutils/dir")
}
| tribbloid/spookystuff | core/src/test/scala/com/tribbloids/spookystuff/testutils/LocalPathDocsFixture.scala | Scala | apache-2.0 | 682 |
package de.tototec.cmvn.configfile
import java.io.File
import java.util.List
trait ConfigFileReader {
def readKeyValues(configFile: File): List[KeyValue]
}
| jwausle/cmvn | de.tototec.cmvn/src/main/scala/de/tototec/cmvn/configfile/ConfigFileReader.scala | Scala | apache-2.0 | 160 |
// Copyright: 2010 - 2017 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/lgpl-3.0.en.html
package org.ensime.sexp
import org.scalatest._
import org.scalactic.TypeCheckedTripleEquals
import org.slf4j.LoggerFactory
import org.slf4j.bridge.SLF4JBridgeHandler
/**
* Boilerplate remover and preferred testing style in S-Express.
*/
abstract class SexpSpec extends FlatSpec
with Matchers
with Inside
with TryValues
with Inspectors
with TypeCheckedTripleEquals {
SLF4JBridgeHandler.removeHandlersForRootLogger()
SLF4JBridgeHandler.install()
val log = LoggerFactory.getLogger(this.getClass)
}
| VlachJosef/ensime-server | s-express/src/test/scala/org/ensime/sexp/SexpSpec.scala | Scala | gpl-3.0 | 657 |
package com.htc.studio.demo
import anorm._
import play.api.db.DB
import play.api.mvc._
import play.api.libs.json._
import play.api.Play.current
import play.api.libs.concurrent.Execution.Implicits.defaultContext
object Application extends Controller {
/**
* Create a json string from a h2 table. Only shows two columns, one is
* String type and the other is Long type.
* @param table Table name.
* @param strCol Name of the String column.
* @param longCol Name of the Long column.
* @author Zhongyang Zheng (zhongyang_zheng@htc.com)
*/
def h2(table: String, strCol: String, longCol: String) = Action {
DB.withConnection("h2") { implicit conn =>
val data = SQL("select * from " + table)()
.map(row => Json.obj(strCol -> row[String](strCol), longCol -> row[Long](longCol)))
Ok(Json.toJson(data))
}
}
/**
* Create a map graph for map count demo.
* @param table Table name of the data source. The column names must be word
* and count.
* @param reload The interval (in seconds) to sleep before reload the graph.
* Useful for real-time demos. Default is -1 (i.e. no reload).
* @author Zhongyang Zheng (zhongyang_zheng@htc.com)
*/
def mapcount(table: String, reload: Int) =
Action(Ok(html.MapCount(table, reload)))
/**
* Create a tag cloud graph for word count demo.
* @param table Table name of the data source. The column names must be word
* and count.
* @param min Only words with at least min counts are shown.
* Default is 1 (all words).
* @param length Only words not longer than length are shown.
* Default is Int.MaxValue (all words).
* @param reload The interval (in seconds) to sleep before reload the graph.
* Useful for real-time demos. Default is -1 (i.e. no reload).
* @author Zhongyang Zheng (zhongyang_zheng@htc.com)
*/
def wordcount(table: String, min: Int, length: Int, reload: Int) =
Action(Ok(html.WordCount(table, min, length, reload)))
/**
* Create a json string from a h2 table for sentiment demo.
* @param table Table name.
* @param id id of tweet.
* @param tweet content of tweet.
* @param sentiment type of tweet.
* @author Yonglin Fu (yonglin_fu@htc.com)
*/
def h2Sentiment(table: String, id: String, tweet: String, sentiment: String) =
Action { DB.withConnection("h2") { implicit conn =>
val data = SQL("select * from " + table +
" where SENTIMENT <4 and SENTIMENT != 2 order by ID DESC ")()
.map(row => Json.obj(id -> row[Long](id).toString, tweet -> row[String](tweet),
sentiment -> row[Int](sentiment)))
Ok(Json.toJson(data))
}}
/**
* Charts for tweet sentiment demo.
* @param table Table name of the data source.
* @author Yonglin Fu (yonglin_fu@htc.com)
*/
def sentiment(table: String) = Action(Ok(html.Sentiment(table)))
}
| simonandluna/lama-demo | play/app/com/htc/studio/demo/Application.scala | Scala | agpl-3.0 | 2,875 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.tail.internal
import cats.effect.Sync
import cats.syntax.all._
import monix.execution.internal.collection.ArrayStack
import monix.tail.Iterant
import monix.tail.Iterant.{Concat, Halt, Last, Next, NextBatch, NextCursor, Scope, Suspend}
import monix.tail.batches.BatchCursor
private[tail] object IterantHeadOptionL {
/**
* Implementation for `Iterant#headOption`.
*/
def apply[F[_], A](source: Iterant[F, A])
(implicit F: Sync[F]): F[Option[A]] = {
source match {
case Next(a, _) => F.pure(Some(a))
case Last(a) => F.pure(Some(a))
case _ =>
F.suspend(new Loop[F, A].apply(source))
}
}
private final class Loop[F[_], A](implicit F: Sync[F])
extends Iterant.Visitor[F, A, F[Option[A]]] {
//-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
// Used in visit(Concat)
private[this] var stackRef: ArrayStack[F[Iterant[F, A]]] = _
private def stackPush(item: F[Iterant[F, A]]): Unit = {
if (stackRef == null) stackRef = new ArrayStack()
stackRef.push(item)
}
private def stackPop(): F[Iterant[F, A]] = {
if (stackRef != null) stackRef.pop()
else null.asInstanceOf[F[Iterant[F, A]]]
}
private[this] val concatContinue: (Option[A] => F[Option[A]]) = {
case None =>
stackPop() match {
case null => F.pure(None)
case xs => xs.flatMap(this)
}
case some =>
F.pure(some)
}
//-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
def visit(ref: Next[F, A]): F[Option[A]] =
F.pure(Some(ref.item))
def visit(ref: NextBatch[F, A]): F[Option[A]] =
processCursor(ref.batch.cursor(), ref.rest)
def visit(ref: NextCursor[F, A]): F[Option[A]] =
processCursor(ref.cursor, ref.rest)
def visit(ref: Suspend[F, A]): F[Option[A]] =
ref.rest.flatMap(this)
def visit(ref: Concat[F, A]): F[Option[A]] = {
stackPush(ref.rh)
ref.lh.flatMap(this).flatMap(concatContinue)
}
def visit[S](ref: Scope[F, S, A]): F[Option[A]] =
ref.runFold(this)
def visit(ref: Last[F, A]): F[Option[A]] =
F.pure(Some(ref.item))
def visit(ref: Halt[F, A]): F[Option[A]] =
ref.e match {
case Some(e) => F.raiseError(e)
case None => F.pure(None)
}
def fail(e: Throwable): F[Option[A]] =
F.raiseError(e)
private def processCursor(cursor: BatchCursor[A], rest: F[Iterant[F, A]]): F[Option[A]] = {
if (cursor.hasNext())
F.pure(Some(cursor.next()))
else
rest.flatMap(this)
}
}
} | ddworak/monix | monix-tail/shared/src/main/scala/monix/tail/internal/IterantHeadOptionL.scala | Scala | apache-2.0 | 3,285 |
import org.scalatest.{Matchers, FlatSpec}
class SquaresTest extends FlatSpec with Matchers {
it should "calc square of sums to 5" in {
val result = Squares.squareOfSums(5)
result should equal(225)
}
it should "calc sum of squares to 5" in {
val result = Squares.sumOfSquares(5)
result should equal(55)
}
it should "calc difference of sums to 5" in {
val result = Squares.difference(5)
result should equal(170)
}
it should "calc square of sums to 10" in {
val result = Squares.squareOfSums(10)
result should equal(3025)
}
it should "calc sum of squares to 10" in {
val result = Squares.sumOfSquares(10)
result should equal(385)
}
it should "calc difference of sums to 10" in {
val result = Squares.difference(10)
result should equal(2640)
}
it should "calc square of sums to 100" in {
val result = Squares.squareOfSums(100)
result should equal(25502500)
}
it should "calc sum of squares to 100" in {
val result = Squares.sumOfSquares(100)
result should equal(338350)
}
it should "calc difference of sums to 100" in {
val result = Squares.difference(100)
result should equal(25164150)
}
}
| stanciua/exercism | scala/difference-of-squares/src/test/scala/SquaresTest.scala | Scala | mit | 1,204 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.expressions
import org.apache.flink.table.api._
import org.apache.flink.table.expressions.{Expression, ExpressionParser, TimeIntervalUnit, TimePointUnit}
import org.apache.flink.table.planner.expressions.utils.ScalarTypesTestBase
import org.junit.Test
class ScalarFunctionsTest extends ScalarTypesTestBase {
// ----------------------------------------------------------------------------------------------
// String functions
// ----------------------------------------------------------------------------------------------
@Test
def testOverlay(): Unit = {
// (tableApiString, sqlApiString, expectedResult)
val cases = Seq(
// constants, no length argument
("'xxxxxtest'.overlay('xxxx', 6)",
"OVERLAY('xxxxxtest' PLACING 'xxxx' FROM 6)",
"xxxxxxxxx"),
// constants
("'xxxxxtest'.overlay('xxxx', 6, 2)",
"OVERLAY('xxxxxtest' PLACING 'xxxx' FROM 6 FOR 2)",
"xxxxxxxxxst"),
// invalid position on the constants
("'123456789'.overlay('abc', 100, 2)",
"OVERLAY('123456789' PLACING 'It' FROM -1 FOR 4)",
"123456789"),
// invalid position on the constants
("'123456789'.overlay('abc', -1, 2)",
"OVERLAY('123456789' PLACING 'It' FROM -1 FOR 2)",
"123456789"),
// invalid len on the constants
("'123456789'.overlay('abc', 2, 100)",
"OVERLAY('123456789' PLACING 'abc' FROM 2 FOR 100)",
"1abc"),
// invalid len on the constants
("'123456789'.overlay('abc', 2, -1)",
"OVERLAY('123456789' PLACING 'abc' FROM 2 FOR -1)",
"1abc"),
// invalid start & len on the constants
("'123456789'.overlay('abc', 100, -1)",
"OVERLAY('123456789' PLACING 'abc' FROM 100 FOR -1)",
"123456789"),
// field
("f0.overlay('It', 1, 4)",
"OVERLAY(f0 PLACING 'It' FROM 1 FOR 4)",
"It is a test String."),
// invalid position
("f0.overlay('It', -1, 4)",
"OVERLAY(f0 PLACING 'It' FROM -1 FOR 4)",
"This is a test String."),
// invalid position
("f0.overlay('It', 100, 4)",
"OVERLAY(f0 PLACING 'It' FROM 100 FOR 4)",
"This is a test String."),
// invalid position
("f0.overlay('It', -1, 2)",
"OVERLAY(f0 PLACING 'It' FROM -1 FOR 2)",
"This is a test String."),
// invalid position
("f0.overlay('It', 100, 2)",
"OVERLAY(f0 PLACING 'It' FROM 100 FOR 2)",
"This is a test String."),
// invalid length
("f0.overlay('IS', 6, 100)",
"OVERLAY(f0 PLACING 'IS' FROM 6 FOR 100)",
"This IS"),
// invalid length
("f0.overlay('IS', 6, -1)",
"OVERLAY(f0 PLACING 'IS' FROM 6 FOR -1)",
"This IS"),
// null field. f40 is NULL.
("f40.overlay('It', 1, 4)",
"OVERLAY(f40 PLACING 'It' FROM 1 FOR 2)",
"null")
)
cases.foreach(x => {
testAllApis(
ExpressionParser.parseExpression(x._1),
x._1,
x._2,
x._3
)
})
}
@Test
def testPosition(): Unit = {
testAllApis(
"test".position("xxxtest"),
"'test'.position('xxxtest')",
"POSITION('test' IN 'xxxtest')",
"4")
testAllApis(
"testx".position("xxxtest"),
"'testx'.position('xxxtest')",
"POSITION('testx' IN 'xxxtest')",
"0")
testSqlApi(
"POSITION('aa' IN 'aaads')",
"1")
testSqlApi("position('aa' in 'aaads')", "1")
}
@Test
def testLocate(): Unit = {
testSqlApi(
"locate('test', 'xxxtest')",
"4")
testSqlApi(
"locate('testx', 'xxxtest')",
"0")
testSqlApi("locate('aa', 'aaads')", "1")
testSqlApi("locate('aa', 'aaads', 2)", "2")
}
@Test
def testLeft(): Unit = {
testSqlApi(
"`LEFT`(f0, 2)",
"Th")
testSqlApi(
"`LEFT`(f0, 100)",
"This is a test String.")
testSqlApi(
"`LEFT`(f0, -2)",
"")
testSqlApi(
"`LEFT`(f0, 0)",
"")
testSqlApi(
"`LEFT`(f0, CAST(null as Integer))",
"null")
testSqlApi(
"`LEFT`(CAST(null as VARCHAR), -2)",
"null")
testSqlApi(
"`LEFT`(CAST(null as VARCHAR), 2)",
"null")
}
@Test
def testRight(): Unit = {
testSqlApi(
"`right`(f0, 2)",
"g.")
testSqlApi(
"`right`(f0, 100)",
"This is a test String.")
testSqlApi(
"`right`(f0, -2)",
"")
testSqlApi(
"`right`(f0, 0)",
"")
testSqlApi(
"`right`(f0, CAST(null as Integer))",
"null")
testSqlApi(
"`right`(CAST(null as VARCHAR), -2)",
"null")
testSqlApi(
"`right`(CAST(null as VARCHAR), 2)",
"null")
}
@Test
def testAscii(): Unit = {
testSqlApi(
"ascii('efg')",
"101")
testSqlApi(
"ascii('abcdef')",
"97")
testSqlApi(
"ascii('')",
"0")
testSqlApi(
"ascii(cast (null AS VARCHAR))",
"null"
)
testSqlApi(
"ascii('val_238') = ascii('val_239')",
"true"
)
}
@Test
def testInstr(): Unit = {
testSqlApi(
"instr('Corporate Floor', 'or', 3, 2)",
"14")
testSqlApi(
"instr('Corporate Floor', 'or', -3, 2)",
"2")
testSqlApi(
"instr('Tech on the net', 'e')",
"2")
testSqlApi(
"instr('Tech on the net', 'e', 1, 2)",
"11")
testSqlApi(
"instr('Tech on the net', 'e', 1, 3)",
"14")
testSqlApi(
"instr('Tech on the net', 'e', -3, 2)",
"2")
testSqlApi(
"instr('myteststring', 'st')",
"5")
testSqlApi(
"instr(cast (null AS VARCHAR), 'e')",
"null"
)
testSqlApi(
"instr('e', cast (null AS VARCHAR))",
"null"
)
testSqlApi(
"instr('val_238', '_') = instr('val_239', '_')",
"true"
)
testSqlApi(
"instr('val_239', '_')",
"4"
)
}
@Test
def testSubstring(): Unit = {
testAllApis(
'f0.substring(2),
"f0.substring(2)",
"SUBSTRING(f0, 2)",
"his is a test String.")
testAllApis(
'f0.substring(2, 5),
"f0.substring(2, 5)",
"SUBSTRING(f0, 2, 5)",
"his i")
testAllApis(
'f0.substring(1, 'f7),
"f0.substring(1, f7)",
"SUBSTRING(f0, 1, f7)",
"Thi")
testAllApis(
'f0.substring(1.cast(DataTypes.TINYINT), 'f7),
"f0.substring(1.cast(BYTE), f7)",
"SUBSTRING(f0, CAST(1 AS TINYINT), f7)",
"Thi")
testSqlApi(
"SUBSTRING(f0 FROM 2 FOR 1)",
"h")
testSqlApi(
"SUBSTRING(f0 FROM 2)",
"his is a test String.")
testSqlApi(
"SUBSTRING(f0 FROM -2)",
"g.")
testSqlApi(
"SUBSTRING(f0 FROM -2 FOR 1)",
"g")
testSqlApi(
"SUBSTRING(f0 FROM -2 FOR 0)",
"")
}
@Test
def testReplace(): Unit = {
testAllApis(
'f0.replace(" ", "_"),
"f0.replace(' ', '_')",
"REPLACE(f0, ' ', '_')",
"This_is_a_test_String.")
testAllApis(
'f0.replace("i", ""),
"f0.replace('i', '')",
"REPLACE(f0, 'i', '')",
"Ths s a test Strng.")
testAllApis(
'f33.replace("i", ""),
"f33.replace('i', '')",
"REPLACE(f33, 'i', '')",
"null")
testAllApis(
'f0.replace(nullOf(DataTypes.STRING), ""),
"f0.replace(Null(STRING), '')",
"REPLACE(f0, NULLIF('', ''), '')",
"null")
testAllApis(
'f0.replace(" ", nullOf(DataTypes.STRING)),
"f0.replace(' ', Null(STRING))",
"REPLACE(f0, ' ', NULLIF('', ''))",
"null")
}
@Test
def testTrim(): Unit = {
testAllApis(
'f8.trim(),
"f8.trim()",
"TRIM(f8)",
"This is a test String.")
testAllApis(
'f8.trim(removeLeading = true, removeTrailing = true, " "),
"trim(f8)",
"TRIM(f8)",
"This is a test String.")
testAllApis(
'f8.trim(removeLeading = false, removeTrailing = true, " "),
"f8.trim(TRAILING, ' ')",
"TRIM(TRAILING FROM f8)",
" This is a test String.")
testAllApis(
'f0.trim(removeLeading = true, removeTrailing = true, "."),
"trim(BOTH, '.', f0)",
"TRIM(BOTH '.' FROM f0)",
"This is a test String")
testSqlApi(
"trim(BOTH 'abc' FROM 'abcddcba')",
"dd")
testSqlApi(
"trim(BOTH 'abd' FROM 'abcddcba')",
"cddc")
testSqlApi(
"trim(BOTH '开心' FROM '心情开开心心')",
"情")
testSqlApi(
"trim(BOTH '开心' FROM '心情开开心心')",
"情")
testSqlApi("trim(LEADING from ' example ')", "example ")
testSqlApi("trim(TRAILING from ' example ')", " example")
testSqlApi("trim(BOTH from ' example ')", "example")
testSqlApi("trim(LEADING 'e' from 'example')", "xample")
testSqlApi("trim(TRAILING 'e' from 'example')", "exampl")
testSqlApi("trim(BOTH 'e' from 'example')", "xampl")
testSqlApi("trim(BOTH 'xyz' from 'example')", "example")
}
@Test
def testLTrim(): Unit = {
testAllApis(
'f8.ltrim(),
"f8.ltrim()",
"LTRIM(f8)",
"This is a test String. ")
testSqlApi(
"LTRIM(f8)",
"This is a test String. ")
testSqlApi(
"LTRIM(f0, 'This ')",
"a test String.")
testSqlApi(
"ltrim('abcddcba', 'abc')",
"ddcba")
testSqlApi(
"LTRIM('abcddcba', 'abd')",
"cddcba")
testSqlApi(
"ltrim('心情开开心心', '开心')",
"情开开心心")
testSqlApi(
"LTRIM('abcddcba', CAST(null as VARCHAR))",
"null")
testSqlApi(
"LTRIM(CAST(null as VARCHAR), 'abcddcba')",
"null")
}
@Test
def testRTrim(): Unit = {
testAllApis(
'f8.rtrim(),
"f8.rtrim()",
"rtrim(f8)",
" This is a test String.")
testSqlApi(
"rtrim(f8)",
" This is a test String.")
testSqlApi(
"rtrim(f0, 'String. ')",
"This is a tes")
testSqlApi(
"rtrim('abcddcba', 'abc')",
"abcdd")
testSqlApi(
"rtrim('abcddcba', 'abd')",
"abcddc")
testSqlApi(
"rtrim('心情开开心心', '开心')",
"心情")
testSqlApi(
"rtrim('abcddcba', CAST(null as VARCHAR))",
"null")
testSqlApi(
"rtrim(CAST(null as VARCHAR), 'abcddcba')",
"null")
}
@Test
def testCharLength(): Unit = {
testAllApis(
'f0.charLength(),
"f0.charLength()",
"CHAR_LENGTH(f0)",
"22")
testAllApis(
'f0.charLength(),
"charLength(f0)",
"CHARACTER_LENGTH(f0)",
"22")
}
@Test
def testUpperCase(): Unit = {
testAllApis(
'f0.upperCase(),
"f0.upperCase()",
"UPPER(f0)",
"THIS IS A TEST STRING.")
}
@Test
def testLowerCase(): Unit = {
testAllApis(
'f0.lowerCase(),
"f0.lowerCase()",
"LOWER(f0)",
"this is a test string.")
}
@Test
def testInitCap(): Unit = {
testAllApis(
'f0.initCap(),
"f0.initCap()",
"INITCAP(f0)",
"This Is A Test String.")
testSqlApi("INITCAP('ab')", "Ab")
testSqlApi("INITCAP('a B')", "A B")
testSqlApi("INITCAP('fLinK')", "Flink")
}
@Test
def testConcat(): Unit = {
testAllApis(
'f0 + 'f0,
"f0 + f0",
"f0||f0",
"This is a test String.This is a test String.")
}
@Test
def testLike(): Unit = {
testAllApis(
'f0.like("Th_s%"),
"f0.like('Th_s%')",
"f0 LIKE 'Th_s%'",
"true")
testAllApis(
'f0.like("%is a%"),
"f0.like('%is a%')",
"f0 LIKE '%is a%'",
"true")
testSqlApi("'abcxxxdef' LIKE 'abcx%'", "true")
testSqlApi("'abcxxxdef' LIKE '%%def'", "true")
testSqlApi("'abcxxxdef' LIKE 'abcxxxdef'", "true")
testSqlApi("'abcxxxdef' LIKE '%xdef'", "true")
testSqlApi("'abcxxxdef' LIKE 'abc%def%'", "true")
testSqlApi("'abcxxxdef' LIKE '%abc%def'", "true")
testSqlApi("'abcxxxdef' LIKE '%abc%def%'", "true")
testSqlApi("'abcxxxdef' LIKE 'abc%def'", "true")
// false
testSqlApi("'abcxxxdef' LIKE 'abdxxxdef'", "false")
testSqlApi("'abcxxxdef' LIKE '%xqef'", "false")
testSqlApi("'abcxxxdef' LIKE 'abc%qef%'", "false")
testSqlApi("'abcxxxdef' LIKE '%abc%qef'", "false")
testSqlApi("'abcxxxdef' LIKE '%abc%qef%'", "false")
testSqlApi("'abcxxxdef' LIKE 'abc%qef'", "false")
}
@Test
def testNotLike(): Unit = {
testAllApis(
!'f0.like("Th_s%"),
"!f0.like('Th_s%')",
"f0 NOT LIKE 'Th_s%'",
"false")
testAllApis(
!'f0.like("%is a%"),
"!f0.like('%is a%')",
"f0 NOT LIKE '%is a%'",
"false")
}
@Test
def testLikeWithEscape(): Unit = {
testSqlApi(
"f23 LIKE '&%Th_s%' ESCAPE '&'",
"true")
testSqlApi(
"f23 LIKE '&%%is a%' ESCAPE '&'",
"true")
testSqlApi(
"f0 LIKE 'Th_s%' ESCAPE '&'",
"true")
testSqlApi(
"f0 LIKE '%is a%' ESCAPE '&'",
"true")
}
@Test
def testNotLikeWithEscape(): Unit = {
testSqlApi(
"f23 NOT LIKE '&%Th_s%' ESCAPE '&'",
"false")
testSqlApi(
"f23 NOT LIKE '&%%is a%' ESCAPE '&'",
"false")
testSqlApi(
"f0 NOT LIKE 'Th_s%' ESCAPE '&'",
"false")
testSqlApi(
"f0 NOT LIKE '%is a%' ESCAPE '&'",
"false")
}
@Test
def testSimilar(): Unit = {
testAllApis(
'f0.similar("_*"),
"f0.similar('_*')",
"f0 SIMILAR TO '_*'",
"true")
testAllApis(
'f0.similar("This (is)? a (test)+ Strin_*"),
"f0.similar('This (is)? a (test)+ Strin_*')",
"f0 SIMILAR TO 'This (is)? a (test)+ Strin_*'",
"true")
}
@Test
def testNotSimilar(): Unit = {
testAllApis(
!'f0.similar("_*"),
"!f0.similar('_*')",
"f0 NOT SIMILAR TO '_*'",
"false")
testAllApis(
!'f0.similar("This (is)? a (test)+ Strin_*"),
"!f0.similar('This (is)? a (test)+ Strin_*')",
"f0 NOT SIMILAR TO 'This (is)? a (test)+ Strin_*'",
"false")
}
@Test
def testSimilarWithEscape(): Unit = {
testSqlApi(
"f24 SIMILAR TO '&*&__*' ESCAPE '&'",
"true")
testSqlApi(
"f0 SIMILAR TO '_*' ESCAPE '&'",
"true")
testSqlApi(
"f24 SIMILAR TO '&*&_This (is)? a (test)+ Strin_*' ESCAPE '&'",
"true")
testSqlApi(
"f0 SIMILAR TO 'This (is)? a (test)+ Strin_*' ESCAPE '&'",
"true")
}
@Test
def testNotSimilarWithEscape(): Unit = {
testSqlApi(
"f24 NOT SIMILAR TO '&*&__*' ESCAPE '&'",
"false")
testSqlApi(
"f0 NOT SIMILAR TO '_*' ESCAPE '&'",
"false")
testSqlApi(
"f24 NOT SIMILAR TO '&*&_This (is)? a (test)+ Strin_*' ESCAPE '&'",
"false")
testSqlApi(
"f0 NOT SIMILAR TO 'This (is)? a (test)+ Strin_*' ESCAPE '&'",
"false")
}
@Test
def testMultiConcat(): Unit = {
testAllApis(
concat("xx", 'f33),
"concat('xx', f33)",
"CONCAT('xx', f33)",
"null")
testAllApis(
concat("AA", "BB", "CC", "---"),
"concat('AA','BB','CC','---')",
"CONCAT('AA','BB','CC','---')",
"AABBCC---")
testAllApis(
concat("x~x", "b~b", "c~~~~c", "---"),
"concat('x~x','b~b','c~~~~c','---')",
"CONCAT('x~x','b~b','c~~~~c','---')",
"x~xb~bc~~~~c---")
testSqlApi("concat(f35)", "a")
testSqlApi("concat(f35,f36)", "ab")
testSqlApi("concat(f35,f36,f33)", "null")
}
@Test
def testConcatWs(): Unit = {
testAllApis(
concat_ws('f33, "AA"),
"concat_ws(f33, 'AA')",
"CONCAT_WS(f33, 'AA')",
"null")
testAllApis(
concat_ws("~~~~", "AA"),
"concat_ws('~~~~','AA')",
"concat_ws('~~~~','AA')",
"AA")
testAllApis(
concat_ws("~", "AA", "BB"),
"concat_ws('~','AA','BB')",
"concat_ws('~','AA','BB')",
"AA~BB")
testAllApis(
concat_ws("~", 'f33, "AA", "BB", "", 'f33, "CC"),
"concat_ws('~',f33, 'AA','BB','',f33, 'CC')",
"concat_ws('~',f33, 'AA','BB','',f33, 'CC')",
"AA~BB~~CC")
testAllApis(
concat_ws("~~~~", "Flink", 'f33, "xx", 'f33, 'f33),
"concat_ws('~~~~','Flink', f33, 'xx', f33, f33)",
"CONCAT_WS('~~~~','Flink', f33, 'xx', f33, f33)",
"Flink~~~~xx")
testSqlApi("concat_ws('||', f35, f36, f33)", "a||b")
}
@Test
def testRegexpReplace(): Unit = {
testAllApis(
"foobar".regexpReplace("oo|ar", "abc"),
"'foobar'.regexpReplace('oo|ar', 'abc')",
"regexp_replace('foobar', 'oo|ar', 'abc')",
"fabcbabc")
testAllApis(
"foofar".regexpReplace("^f", ""),
"'foofar'.regexpReplace('^f', '')",
"regexp_replace('foofar', '^f', '')",
"oofar")
testAllApis(
"foobar".regexpReplace("^f*.*r$", ""),
"'foobar'.regexpReplace('^f*.*r$', '')",
"regexp_replace('foobar', '^f*.*r$', '')",
"")
testAllApis(
"foo1bar2".regexpReplace("\\\\d", ""),
"'foo1bar2'.regexpReplace('\\\\d', '')",
"regexp_replace('foobar', '\\\\d', '')",
"foobar")
testAllApis(
"foobar".regexpReplace("\\\\w", ""),
"'foobar'.regexpReplace('\\\\w', '')",
"regexp_replace('foobar', '\\\\w', '')",
"")
testAllApis(
"fooobar".regexpReplace("oo", "$"),
"'fooobar'.regexpReplace('oo', '$')",
"regexp_replace('fooobar', 'oo', '$')",
"f$obar")
testAllApis(
"foobar".regexpReplace("oo", "\\\\"),
"'foobar'.regexpReplace('oo', '\\\\')",
"regexp_replace('foobar', 'oo', '\\\\')",
"f\\\\bar")
testAllApis(
'f33.regexpReplace("oo|ar", ""),
"f33.regexpReplace('oo|ar', '')",
"REGEXP_REPLACE(f33, 'oo|ar', '')",
"null")
testAllApis(
"foobar".regexpReplace('f33, ""),
"'foobar'.regexpReplace(f33, '')",
"REGEXP_REPLACE('foobar', f33, '')",
"null")
testAllApis(
"foobar".regexpReplace("oo|ar", 'f33),
"'foobar'.regexpReplace('oo|ar', f33)",
"REGEXP_REPLACE('foobar', 'oo|ar', f33)",
"null")
// This test was added for the null literal problem in string expression parsing (FLINK-10463).
testAllApis(
nullOf(Types.STRING).regexpReplace("oo|ar", 'f33),
"nullOf(STRING).regexpReplace('oo|ar', f33)",
"REGEXP_REPLACE(CAST(NULL AS VARCHAR), 'oo|ar', f33)",
"null")
}
@Test
def testRegexpExtract(): Unit = {
testAllApis(
"foothebar".regexpExtract("foo(.*?)(bar)", 2),
"'foothebar'.regexpExtract('foo(.*?)(bar)', 2)",
"REGEXP_EXTRACT('foothebar', 'foo(.*?)(bar)', 2)",
"bar")
testAllApis(
"foothebar".regexpExtract("foo(.*?)(bar)", 0),
"'foothebar'.regexpExtract('foo(.*?)(bar)', 0)",
"REGEXP_EXTRACT('foothebar', 'foo(.*?)(bar)', 0)",
"foothebar")
testAllApis(
"foothebar".regexpExtract("foo(.*?)(bar)", 1),
"'foothebar'.regexpExtract('foo(.*?)(bar)', 1)",
"REGEXP_EXTRACT('foothebar', 'foo(.*?)(bar)', 1)",
"the")
testAllApis(
"foothebar".regexpExtract("foo([\\\\w]+)", 1),
"'foothebar'.regexpExtract('foo([\\\\w]+)', 1)",
"REGEXP_EXTRACT('foothebar', 'foo([\\\\w]+)', 1)",
"thebar")
testAllApis(
"foothebar".regexpExtract("foo([\\\\d]+)", 1),
"'foothebar'.regexpExtract('foo([\\\\d]+)', 1)",
"REGEXP_EXTRACT('foothebar', 'foo([\\\\d]+)', 1)",
"null")
testAllApis(
'f33.regexpExtract("foo(.*?)(bar)", 2),
"f33.regexpExtract('foo(.*?)(bar)', 2)",
"REGEXP_EXTRACT(f33, 'foo(.*?)(bar)', 2)",
"null")
testAllApis(
"foothebar".regexpExtract('f33, 2),
"'foothebar'.regexpExtract(f33, 2)",
"REGEXP_EXTRACT('foothebar', f33, 2)",
"null")
//test for optional group index
testAllApis(
"foothebar".regexpExtract("foo(.*?)(bar)"),
"'foothebar'.regexpExtract('foo(.*?)(bar)')",
"REGEXP_EXTRACT('foothebar', 'foo(.*?)(bar)')",
"foothebar")
}
@Test
def testFromBase64(): Unit = {
testSqlApi(
"FROM_BASE64('aGVsbG8gd29ybGQ=')",
"hello world")
testSqlApi(
"CONCAT(FROM_BASE64('5L2g5aW9'), ' flink')",
"你好 flink")
//null test
testSqlApi(
"FROM_BASE64(f33)",
"null")
testSqlApi(
"FROM_BASE64('5L2g5aW9')",
"你好"
)
testSqlApi(
"FROM_BASE64(CAST(x'6147567362473867643239796247513D' AS VARBINARY))",
"hello world")
testSqlApi(
"FROM_BASE64(x'6147567362473867643239796247513D')",
"hello world")
testSqlApi(
"FROM_BASE64(f58)",
"你好")
}
@Test
def testToBase64(): Unit = {
testSqlApi(
"TO_BASE64(f0)",
"VGhpcyBpcyBhIHRlc3QgU3RyaW5nLg==")
testSqlApi(
"TO_BASE64(f8)",
"IFRoaXMgaXMgYSB0ZXN0IFN0cmluZy4g")
testSqlApi(
"TO_BASE64('')",
"")
//null test
testSqlApi(
"TO_BASE64(f33)",
"null")
testSqlApi(
"TO_BASE64('你好')",
"5L2g5aW9"
)
testSqlApi(
"to_base64(f37)",
"AQIDBA==")
testSqlApi(
"to_base64(from_base64(f38))",
"AQIDBA==")
}
@Test
def testSubString(): Unit = {
Array("substring", "substr").foreach {
substr =>
testSqlApi(s"$substr(f0, 2, 3)", "his")
testSqlApi(s"$substr(f0, 2, 100)", "his is a test String.")
testSqlApi(s"$substr(f0, 100, 10)", "")
testSqlApi(s"$substr(f0, 2, -1)", "null")
testSqlApi(s"$substr(f40, 2, 3)", "null")
testSqlApi(s"$substr(CAST(null AS VARCHAR), 2, 3)", "null")
testSqlApi(s"$substr(f0, 2, f14)", "null")
testSqlApi(s"$substr(f0, f30, f7)", "Thi")
testSqlApi(s"$substr(f39, 1, 2)", "1世")
}
}
@Test
def testLPad(): Unit = {
testSqlApi("lpad(f33,1,'??')", "null")
testSqlApi("lpad(f35, 1, '??')", "a")
testSqlApi("lpad(f35, 2, '??')", "?a")
testSqlApi("lpad(f35, 5, '??')", "????a")
testSqlApi("lpad(f35, CAST(null as INT), '??')", "null")
testSqlApi("lpad(f35, 5, CAST(null as VARCHAR))", "null")
testSqlApi("lpad(f40, 1, '??')", "null")
testSqlApi("lpad('hi', 1, '??')", "h")
testSqlApi("lpad('hi', 5, '??')", "???hi")
testSqlApi("lpad(CAST(null as VARCHAR), 5, '??')", "null")
testSqlApi("lpad('hi', CAST(null as INT), '??')", "null")
testSqlApi("lpad('hi', 5, CAST(null as VARCHAR))", "null")
testSqlApi("lpad('',1,'??')", "?")
testSqlApi("lpad('',30,'??')", "??????????????????????????????")
testSqlApi("lpad('111',-2,'??')", "null")
testSqlApi("lpad('\\u0061\\u0062',1,'??')", "a") // the unicode of ab is \\u0061\\u0062
testSqlApi("lpad('⎨⎨',1,'??')", "⎨")
testSqlApi("lpad('äääääääää',2,'??')", "ää")
testSqlApi("lpad('äääääääää',10,'??')", "?äääääääää")
testSqlApi("lpad('Hello', -1, 'x') IS NULL", "true")
testSqlApi("lpad('Hello', -1, 'x') IS NOT NULL", "false")
testSqlApi("lpad('ab', 5, '')", "null")
testAllApis(
"äää".lpad(13, "12345"),
"'äää'.lpad(13, '12345')",
"lpad('äää',13,'12345')",
"1234512345äää")
}
@Test
def testRPad(): Unit = {
testSqlApi("rpad(f33,1,'??')", "null")
testSqlApi("rpad(f35, 1, '??')", "a")
testSqlApi("rpad(f35, 2, '??')", "a?")
testSqlApi("rpad(f35, 5, '??')", "a????")
testSqlApi("rpad(f35, CAST(null as INT), '??')", "null")
testSqlApi("rpad(f35, 5, CAST(null as VARCHAR))", "null")
testSqlApi("rpad(f40, 1, '??')", "null")
testSqlApi("rpad('hi', 1, '??')", "h")
testSqlApi("rpad('hi', 5, '??')", "hi???")
testSqlApi("rpad(CAST(null as VARCHAR), 5, '??')", "null")
testSqlApi("rpad('hi', CAST(null as INT), '??')", "null")
testSqlApi("rpad('hi', 5, CAST(null as VARCHAR))", "null")
testSqlApi("rpad('',1,'??')", "?")
testSqlApi("rpad('111',-2,'??')", "null")
testSqlApi("rpad('\\u0061\\u0062',1,'??')", "a") // the unicode of ab is \\u0061\\u0062
testSqlApi("rpad('üö',1,'??')", "ü")
testSqlApi("rpad('abcd', 5, '')", "null")
testAllApis(
"äää".rpad(13, "12345"),
"'äää'.rpad(13, '12345')",
"rpad('äää',13,'12345')",
"äää1234512345")
}
@Test
def testParseUrl(): Unit = {
// NOTE: parse_url() requires HOST PATH etc. all capitalized
def testUrl(
url: String,
host: String,
path: String,
query: String,
ref: String,
protocol: String,
file: String,
authority: String,
userInfo: String,
qv: String)
: Unit = {
val parts =
Map(
"HOST" -> host,
"PATH" -> path,
"QUERY" -> query,
"REF" -> ref,
"PROTOCOL" -> protocol,
"FILE" -> file,
"AUTHORITY" -> authority,
"USERINFO" -> userInfo)
for ((n, v) <- parts) {
testSqlApi(s"parse_url('$url', '$n')", v)
}
testSqlApi(s"parse_url('$url', 'QUERY', 'query')", qv)
}
testUrl(
"http://userinfo@flink.apache.org/path?query=1#Ref",
"flink.apache.org", "/path", "query=1", "Ref",
"http", "/path?query=1", "userinfo@flink.apache.org", "userinfo", "1")
testUrl(
"https://use%20r:pas%20s@example.com/dir%20/pa%20th.HTML?query=x%20y&q2=2#Ref%20two",
"example.com", "/dir%20/pa%20th.HTML", "query=x%20y&q2=2", "Ref%20two",
"https", "/dir%20/pa%20th.HTML?query=x%20y&q2=2", "use%20r:pas%20s@example.com",
"use%20r:pas%20s", "x%20y")
testUrl(
"http://user:pass@host",
"host", "", "null", "null", "http", "", "user:pass@host", "user:pass", "null")
testUrl(
"http://user:pass@host/",
"host", "/", "null", "null", "http", "/", "user:pass@host", "user:pass", "null")
testUrl(
"http://user:pass@host/?#",
"host", "/", "", "", "http", "/?", "user:pass@host", "user:pass", "null")
testUrl(
"http://user:pass@host/file;param?query;p2",
"host", "/file;param", "query;p2", "null", "http", "/file;param?query;p2",
"user:pass@host", "user:pass", "null")
testUrl(
"invalid://user:pass@host/file;param?query;p2",
"null", "null", "null", "null", "null", "null", "null", "null", "null")
}
@Test
def testRepeat(): Unit = {
testAllApis(
'f0.repeat(1),
"f0.repeat(1)",
"REPEAT(f0, 1)",
"This is a test String.")
testAllApis(
'f0.repeat(2),
"f0.repeat(2)",
"REPEAT(f0, 2)",
"This is a test String.This is a test String.")
testAllApis(
'f0.repeat(0),
"f0.repeat(0)",
"REPEAT(f0, 0)",
"")
testAllApis(
'f0.repeat(-1),
"f0.repeat(-1)",
"REPEAT(f0, -1)",
"")
testAllApis(
'f33.repeat(2),
"f33.repeat(2)",
"REPEAT(f33, 2)",
"null")
testAllApis(
"".repeat(1),
"''.repeat(1)",
"REPEAT('', 2)",
"")
}
@Test
def testReverse(): Unit = {
testSqlApi("reverse(f38)", "==ABDIQA")
testSqlApi("reverse(f40)", "null")
testSqlApi("reverse('hi')", "ih")
testSqlApi("reverse('hhhi')", "ihhh")
testSqlApi("reverse(CAST(null as VARCHAR))", "null")
}
@Test
def testSplitIndex(): Unit = {
testSqlApi("split_index(f38, 'I', 0)", "AQ")
testSqlApi("split_index(f38, 'I', 2)", "null")
testSqlApi("split_index(f38, 'I', -1)", "null")
testSqlApi("split_index(f38, CAST(null as VARCHAR), 0)", "null")
testSqlApi("split_index(f38, 'I', CAST(null as INT))", "null")
testSqlApi("split_index(f38, 'I', -1)", "null")
testSqlApi("split_index(f40, 'I', 0)", "null")
testSqlApi("split_index(f38, 73, 0)", "AQ")
testSqlApi("split_index(f38, 256, 0)", "null")
testSqlApi("split_index(f38, 0, 0)", "null")
testSqlApi("split_index('Test', 'e', 1)", "st")
testSqlApi("split_index(CAST(null as VARCHAR), 'e', 1)", "null")
testSqlApi("split_index('test', CAST(null as VARCHAR), 1)", "null")
testSqlApi("split_index('test', 'e', -1)", "null")
}
@Test
def testHashCode(): Unit = {
testSqlApi("hash_code('abc')", "96354")
testSqlApi("hash_code(f35)", "97")
testSqlApi("hash_code(f40)", "null")
testSqlApi("hash_code(CAST(null as VARCHAR))", "null")
}
@Test
def testMD5(): Unit = {
testSqlApi("md5('abc')", "900150983cd24fb0d6963f7d28e17f72")
testSqlApi("md5('')", "d41d8cd98f00b204e9800998ecf8427e")
testSqlApi("md5(f35)", "0cc175b9c0f1b6a831c399e269772661")
testSqlApi("md5(f40)", "null")
testSqlApi("md5(CAST(null as VARCHAR))", "null")
}
@Test
def testRegexp(): Unit = {
testSqlApi("regexp('100-200', '(\\\\d+)')", "true")
testSqlApi("regexp('abc-def', '(\\\\d+)')", "false")
testSqlApi("regexp(f35, 'a')", "true")
testSqlApi("regexp(f40, '(\\\\d+)')", "null")
testSqlApi("regexp(CAST(null as VARCHAR), '(\\\\d+)')", "null")
testSqlApi("regexp('100-200', CAST(null as VARCHAR))", "null")
}
@Test
def testHex(): Unit = {
testAllApis(
100.hex(),
"100.hex()",
"HEX(100)",
"64")
testAllApis(
'f2.hex(),
"f2.hex()",
"HEX(f2)",
"2A")
testAllApis(
nullOf(DataTypes.TINYINT).hex(),
"hex(Null(BYTE))",
"HEX(CAST(NULL AS TINYINT))",
"null")
testAllApis(
'f3.hex(),
"f3.hex()",
"HEX(f3)",
"2B")
testAllApis(
'f4.hex(),
"f4.hex()",
"HEX(f4)",
"2C")
testAllApis(
'f7.hex(),
"f7.hex()",
"HEX(f7)",
"3")
testAllApis(
12.hex(),
"12.hex()",
"HEX(12)",
"C")
testAllApis(
10.hex(),
"10.hex()",
"HEX(10)",
"A")
testAllApis(
0.hex(),
"0.hex()",
"HEX(0)",
"0")
testAllApis(
"ö".hex(),
"'ö'.hex()",
"HEX('ö')",
"C3B6")
testAllApis(
'f32.hex(),
"f32.hex()",
"HEX(f32)",
"FFFFFFFFFFFFFFFF")
testAllApis(
'f0.hex(),
"f0.hex()",
"HEX(f0)",
"546869732069732061207465737420537472696E672E")
testAllApis(
'f8.hex(),
"f8.hex()",
"HEX(f8)",
"20546869732069732061207465737420537472696E672E20")
testAllApis(
'f23.hex(),
"f23.hex()",
"HEX(f23)",
"25546869732069732061207465737420537472696E672E")
testAllApis(
'f24.hex(),
"f24.hex()",
"HEX(f24)",
"2A5F546869732069732061207465737420537472696E672E")
}
@Test
def testBin(): Unit = {
testAllApis(
nullOf(DataTypes.TINYINT).bin(),
"bin(Null(BYTE))",
"BIN((CAST(NULL AS TINYINT)))",
"null")
testAllApis(
'f2.bin(),
"f2.bin()",
"BIN(f2)",
"101010")
testAllApis(
'f3.bin(),
"f3.bin()",
"BIN(f3)",
"101011")
testAllApis(
'f4.bin(),
"f4.bin()",
"BIN(f4)",
"101100")
testAllApis(
'f7.bin(),
"f7.bin()",
"BIN(f7)",
"11")
testAllApis(
12.bin(),
"12.bin()",
"BIN(12)",
"1100")
testAllApis(
10.bin(),
"10.bin()",
"BIN(10)",
"1010")
testAllApis(
0.bin(),
"0.bin()",
"BIN(0)",
"0")
testAllApis(
'f32.bin(),
"f32.bin()",
"BIN(f32)",
"1111111111111111111111111111111111111111111111111111111111111111")
}
@Test
def testUUID(): Unit = {
testAllApis(
uuid().charLength(),
"uuid().charLength",
"CHARACTER_LENGTH(UUID())",
"36")
testAllApis(
uuid().substring(9, 1),
"uuid().substring(9, 1)",
"SUBSTRING(UUID(), 9, 1)",
"-")
testAllApis(
uuid().substring(14, 1),
"uuid().substring(14, 1)",
"SUBSTRING(UUID(), 14, 1)",
"-")
testAllApis(
uuid().substring(19, 1),
"uuid().substring(19, 1)",
"SUBSTRING(UUID(), 19, 1)",
"-")
testAllApis(
uuid().substring(24, 1),
"uuid().substring(24, 1)",
"SUBSTRING(UUID(), 24, 1)",
"-")
}
@Test
def testTruncate(): Unit = {
testAllApis(
'f29.truncate('f30),
"f29.truncate(f30)",
"truncate(f29, f30)",
"0.4")
testAllApis(
'f31.truncate('f7),
"f31.truncate(f7)",
"truncate(f31, f7)",
"-0.123")
testAllApis(
'f4.truncate('f32),
"f4.truncate(f32)",
"truncate(f4, f32)",
"40")
testAllApis(
'f28.cast(DataTypes.DOUBLE).truncate(1),
"f28.cast(DOUBLE).truncate(1)",
"truncate(cast(f28 as DOUBLE), 1)",
"0.4")
// TODO: ignore TableApiTest for cast to DECIMAL(p, s) is not support now.
// see https://issues.apache.org/jira/browse/FLINK-13651
// testAllApis(
// 'f31.cast(DataTypes.DECIMAL(38, 18)).truncate(2),
// "f31.cast(DECIMAL(10, 10)).truncate(2)",
// "truncate(cast(f31 as decimal(38, 18)), 2)",
// "-0.12")
//
// testAllApis(
// 'f36.cast(DataTypes.DECIMAL(38, 18)).truncate(),
// "f36.cast(DECIMAL(10, 10)).truncate()",
// "truncate(42.324)",
// "42")
testSqlApi("truncate(cast(f31 as decimal(38, 18)), 2)", "-0.12")
testAllApis(
'f5.cast(DataTypes.FLOAT).truncate(),
"f5.cast(FLOAT).truncate()",
"truncate(cast(f5 as float))",
"4.0")
testAllApis(
42.truncate(-1),
"42.truncate(-1)",
"truncate(42, -1)",
"40")
testAllApis(
42.truncate(-3),
"42.truncate(-3)",
"truncate(42, -3)",
"0")
// The validation parameter is null
testAllApis(
'f33.cast(DataTypes.INT).truncate(1),
"f33.cast(INT).truncate(1)",
"truncate(cast(null as integer), 1)",
"null")
testAllApis(
43.21.truncate('f33.cast(DataTypes.INT)),
"43.21.truncate(f33.cast(INT))",
"truncate(43.21, cast(null as integer))",
"null")
testAllApis(
'f33.cast(DataTypes.DOUBLE).truncate(1),
"f33.cast(DOUBLE).truncate(1)",
"truncate(cast(null as double), 1)",
"null")
testAllApis(
'f33.cast(DataTypes.INT).truncate(1),
"f33.cast(INT).truncate(1)",
"truncate(cast(null as integer))",
"null")
testAllApis(
'f33.cast(DataTypes.DOUBLE).truncate(),
"f33.cast(DOUBLE).truncate()",
"truncate(cast(null as double))",
"null")
}
// ----------------------------------------------------------------------------------------------
// Math functions
// ----------------------------------------------------------------------------------------------
@Test
def testExp(): Unit = {
testAllApis(
'f2.exp(),
"f2.exp()",
"EXP(f2)",
math.exp(42.toByte).toString)
testAllApis(
'f3.exp(),
"f3.exp()",
"EXP(f3)",
math.exp(43.toShort).toString)
testAllApis(
'f4.exp(),
"f4.exp()",
"EXP(f4)",
math.exp(44.toLong).toString)
testAllApis(
'f5.exp(),
"f5.exp()",
"EXP(f5)",
math.exp(4.5.toFloat).toString)
testAllApis(
'f6.exp(),
"f6.exp()",
"EXP(f6)",
math.exp(4.6).toString)
testAllApis(
'f7.exp(),
"exp(f7)",
"EXP(f7)",
math.exp(3).toString)
testAllApis(
3.exp(),
"exp(3)",
"EXP(3)",
math.exp(3).toString)
}
@Test
def testLog10(): Unit = {
testAllApis(
'f2.log10(),
"f2.log10()",
"LOG10(f2)",
math.log10(42.toByte).toString)
testAllApis(
'f3.log10(),
"f3.log10()",
"LOG10(f3)",
math.log10(43.toShort).toString)
testAllApis(
'f4.log10(),
"f4.log10()",
"LOG10(f4)",
math.log10(44.toLong).toString)
testAllApis(
'f5.log10(),
"f5.log10()",
"LOG10(f5)",
math.log10(4.5.toFloat).toString)
testAllApis(
'f6.log10(),
"f6.log10()",
"LOG10(f6)",
math.log10(4.6).toString)
}
@Test
def testPower(): Unit = {
// f7: int , f4: long, f6: double
testAllApis(
'f2.power('f7),
"f2.power(f7)",
"POWER(f2, f7)",
math.pow(42.toByte, 3).toString)
testAllApis(
'f3.power('f6),
"f3.power(f6)",
"POWER(f3, f6)",
math.pow(43.toShort, 4.6D).toString)
testAllApis(
'f4.power('f5),
"f4.power(f5)",
"POWER(f4, f5)",
math.pow(44.toLong, 4.5.toFloat).toString)
testAllApis(
'f4.power('f5),
"f4.power(f5)",
"POWER(f4, f5)",
math.pow(44.toLong, 4.5.toFloat).toString)
// f5: float
testAllApis('f5.power('f5),
"f5.power(f5)",
"power(f5, f5)",
math.pow(4.5F, 4.5F).toString)
testAllApis('f5.power('f6),
"f5.power(f6)",
"power(f5, f6)",
math.pow(4.5F, 4.6D).toString)
testAllApis('f5.power('f7),
"f5.power(f7)",
"power(f5, f7)",
math.pow(4.5F, 3).toString)
testAllApis('f5.power('f4),
"f5.power(f4)",
"power(f5, f4)",
math.pow(4.5F, 44L).toString)
// f22: bigDecimal
// TODO delete casting in SQL when CALCITE-1467 is fixed
testAllApis(
'f22.cast(DataTypes.DOUBLE).power('f5),
"f22.cast(DOUBLE).power(f5)",
"power(CAST(f22 AS DOUBLE), f5)",
math.pow(2, 4.5F).toString)
testAllApis(
'f22.cast(DataTypes.DOUBLE).power('f6),
"f22.cast(DOUBLE).power(f6)",
"power(CAST(f22 AS DOUBLE), f6)",
math.pow(2, 4.6D).toString)
testAllApis(
'f22.cast(DataTypes.DOUBLE).power('f7),
"f22.cast(DOUBLE).power(f7)",
"power(CAST(f22 AS DOUBLE), f7)",
math.pow(2, 3).toString)
testAllApis(
'f22.cast(DataTypes.DOUBLE).power('f4),
"f22.cast(DOUBLE).power(f4)",
"power(CAST(f22 AS DOUBLE), f4)",
math.pow(2, 44L).toString)
testAllApis(
'f6.power('f22.cast(DataTypes.DOUBLE)),
"f6.power(f22.cast(DOUBLE))",
"power(f6, f22)",
math.pow(4.6D, 2).toString)
}
@Test
def testSqrt(): Unit = {
testAllApis(
'f6.sqrt(),
"f6.sqrt",
"SQRT(f6)",
math.sqrt(4.6D).toString)
testAllApis(
'f7.sqrt(),
"f7.sqrt",
"SQRT(f7)",
math.sqrt(3).toString)
testAllApis(
'f4.sqrt(),
"f4.sqrt",
"SQRT(f4)",
math.sqrt(44L).toString)
testAllApis(
'f22.cast(DataTypes.DOUBLE).sqrt(),
"f22.cast(DOUBLE).sqrt",
"SQRT(CAST(f22 AS DOUBLE))",
math.sqrt(2.0).toString)
testAllApis(
'f5.sqrt(),
"f5.sqrt",
"SQRT(f5)",
math.pow(4.5F, 0.5).toString)
testAllApis(
25.sqrt(),
"25.sqrt()",
"SQRT(25)",
"5.0")
testAllApis(
2.2.sqrt(),
"2.2.sqrt()",
"POWER(CAST(2.2 AS DOUBLE), CAST(0.5 AS DOUBLE))", // TODO fix FLINK-4621
math.sqrt(2.2).toString)
}
@Test
def testCosh(): Unit = {
testAllApis(
0.cosh(),
"0.cosh()",
"COSH(0)",
math.cosh(0).toString
)
testAllApis(
-1.cosh(),
"-1.cosh()",
"COSH(-1)",
math.cosh(-1).toString
)
testAllApis(
'f4.cosh(),
"f4.cosh",
"COSH(f4)",
math.cosh(44L).toString)
testAllApis(
'f6.cosh(),
"f6.cosh",
"COSH(f6)",
math.cosh(4.6D).toString)
testAllApis(
'f7.cosh(),
"f7.cosh",
"COSH(f7)",
math.cosh(3).toString)
testAllApis(
'f22.cosh(),
"f22.cosh",
"COSH(f22)",
math.cosh(2.0).toString)
}
@Test
def testLn(): Unit = {
testAllApis(
'f2.ln(),
"f2.ln()",
"LN(f2)",
math.log(42.toByte).toString)
testAllApis(
'f3.ln(),
"f3.ln()",
"LN(f3)",
math.log(43.toShort).toString)
testAllApis(
'f4.ln(),
"f4.ln()",
"LN(f4)",
math.log(44.toLong).toString)
testAllApis(
'f5.ln(),
"f5.ln()",
"LN(f5)",
math.log(4.5.toFloat).toString)
testAllApis(
'f6.ln(),
"f6.ln()",
"LN(f6)",
math.log(4.6).toString)
}
@Test
def testAbs(): Unit = {
testAllApis(
'f2.abs(),
"f2.abs()",
"ABS(f2)",
"42")
testAllApis(
'f3.abs(),
"f3.abs()",
"ABS(f3)",
"43")
testAllApis(
'f4.abs(),
"f4.abs()",
"ABS(f4)",
"44")
testAllApis(
'f5.abs(),
"f5.abs()",
"ABS(f5)",
"4.5")
testAllApis(
'f6.abs(),
"f6.abs()",
"ABS(f6)",
"4.6")
testAllApis(
'f9.abs(),
"f9.abs()",
"ABS(f9)",
"42")
testAllApis(
'f10.abs(),
"f10.abs()",
"ABS(f10)",
"43")
testAllApis(
'f11.abs(),
"f11.abs()",
"ABS(f11)",
"44")
testAllApis(
'f12.abs(),
"f12.abs()",
"ABS(f12)",
"4.5")
testAllApis(
'f13.abs(),
"f13.abs()",
"ABS(f13)",
"4.6")
testAllApis(
'f15.abs(),
"f15.abs()",
"ABS(f15)",
"1231.1231231321321321111")
}
@Test
def testArithmeticFloorCeil(): Unit = {
testAllApis(
'f5.floor(),
"f5.floor()",
"FLOOR(f5)",
"4.0")
testAllApis(
'f5.ceil(),
"f5.ceil()",
"CEIL(f5)",
"5.0")
testAllApis(
'f3.floor(),
"f3.floor()",
"FLOOR(f3)",
"43")
testAllApis(
'f3.ceil(),
"f3.ceil()",
"CEIL(f3)",
"43")
testAllApis(
'f15.floor(),
"f15.floor()",
"FLOOR(f15)",
"-1232")
testAllApis(
'f15.ceil(),
"f15.ceil()",
"CEIL(f15)",
"-1231")
}
@Test
def testSin(): Unit = {
testAllApis(
'f2.sin(),
"f2.sin()",
"SIN(f2)",
math.sin(42.toByte).toString)
testAllApis(
'f3.sin(),
"f3.sin()",
"SIN(f3)",
math.sin(43.toShort).toString)
testAllApis(
'f4.sin(),
"f4.sin()",
"SIN(f4)",
math.sin(44.toLong).toString)
testAllApis(
'f5.sin(),
"f5.sin()",
"SIN(f5)",
math.sin(4.5.toFloat).toString)
testAllApis(
'f6.sin(),
"f6.sin()",
"SIN(f6)",
math.sin(4.6).toString)
testAllApis(
'f15.sin(),
"sin(f15)",
"SIN(f15)",
math.sin(-1231.1231231321321321111).toString)
}
@Test
def testCos(): Unit = {
testAllApis(
'f2.cos(),
"f2.cos()",
"COS(f2)",
math.cos(42.toByte).toString)
testAllApis(
'f3.cos(),
"f3.cos()",
"COS(f3)",
math.cos(43.toShort).toString)
testAllApis(
'f4.cos(),
"f4.cos()",
"COS(f4)",
math.cos(44.toLong).toString)
testAllApis(
'f5.cos(),
"f5.cos()",
"COS(f5)",
math.cos(4.5.toFloat).toString)
testAllApis(
'f6.cos(),
"f6.cos()",
"COS(f6)",
math.cos(4.6).toString)
testAllApis(
'f15.cos(),
"cos(f15)",
"COS(f15)",
math.cos(-1231.1231231321321321111).toString)
}
@Test
def testSinh(): Unit = {
testAllApis(
0.sinh(),
"0.sinh()",
"SINH(0)",
math.sinh(0).toString)
testAllApis(
-1.sinh(),
"-1.sinh()",
"SINH(-1)",
math.sinh(-1).toString)
testAllApis(
'f4.sinh(),
"f4.sinh",
"SINH(f4)",
math.sinh(44L).toString)
testAllApis(
'f6.sinh(),
"f6.sinh",
"SINH(f6)",
math.sinh(4.6D).toString)
testAllApis(
'f7.sinh(),
"f7.sinh",
"SINH(f7)",
math.sinh(3).toString)
testAllApis(
'f22.sinh(),
"f22.sinh",
"SINH(f22)",
math.sinh(2.0).toString)
}
@Test
def testTan(): Unit = {
testAllApis(
'f2.tan(),
"f2.tan()",
"TAN(f2)",
math.tan(42.toByte).toString)
testAllApis(
'f3.tan(),
"f3.tan()",
"TAN(f3)",
math.tan(43.toShort).toString)
testAllApis(
'f4.tan(),
"f4.tan()",
"TAN(f4)",
math.tan(44.toLong).toString)
testAllApis(
'f5.tan(),
"f5.tan()",
"TAN(f5)",
math.tan(4.5.toFloat).toString)
testAllApis(
'f6.tan(),
"f6.tan()",
"TAN(f6)",
math.tan(4.6).toString)
testAllApis(
'f15.tan(),
"tan(f15)",
"TAN(f15)",
math.tan(-1231.1231231321321321111).toString)
}
@Test
def testTanh(): Unit = {
testAllApis(
0.tanh(),
"0.tanh()",
"TANH(0)",
math.tanh(0).toString)
testAllApis(
-1.tanh(),
"-1.tanh()",
"TANH(-1)",
math.tanh(-1).toString)
testAllApis(
'f4.tanh(),
"f4.tanh",
"TANH(f4)",
math.tanh(44L).toString)
testAllApis(
'f6.tanh(),
"f6.tanh",
"TANH(f6)",
math.tanh(4.6D).toString)
testAllApis(
'f7.tanh(),
"f7.tanh",
"TANH(f7)",
math.tanh(3).toString)
testAllApis(
'f22.tanh(),
"f22.tanh",
"TANH(f22)",
math.tanh(2.0).toString)
}
@Test
def testCot(): Unit = {
testAllApis(
'f2.cot(),
"f2.cot()",
"COT(f2)",
(1.0d / math.tan(42.toByte)).toString)
testAllApis(
'f3.cot(),
"f3.cot()",
"COT(f3)",
(1.0d / math.tan(43.toShort)).toString)
testAllApis(
'f4.cot(),
"f4.cot()",
"COT(f4)",
(1.0d / math.tan(44.toLong)).toString)
testAllApis(
'f5.cot(),
"f5.cot()",
"COT(f5)",
(1.0d / math.tan(4.5.toFloat)).toString)
testAllApis(
'f6.cot(),
"f6.cot()",
"COT(f6)",
(1.0d / math.tan(4.6)).toString)
testAllApis(
'f15.cot(),
"cot(f15)",
"COT(f15)",
(1.0d / math.tan(-1231.1231231321321321111)).toString)
}
@Test
def testAsin(): Unit = {
testAllApis(
'f25.asin(),
"f25.asin()",
"ASIN(f25)",
math.asin(0.42.toByte).toString)
testAllApis(
'f26.asin(),
"f26.asin()",
"ASIN(f26)",
math.asin(0.toShort).toString)
testAllApis(
'f27.asin(),
"f27.asin()",
"ASIN(f27)",
math.asin(0.toLong).toString)
testAllApis(
'f28.asin(),
"f28.asin()",
"ASIN(f28)",
math.asin(0.45.toFloat).toString)
testAllApis(
'f29.asin(),
"f29.asin()",
"ASIN(f29)",
math.asin(0.46).toString)
testAllApis(
'f30.asin(),
"f30.asin()",
"ASIN(f30)",
math.asin(1).toString)
testAllApis(
'f31.asin(),
"f31.asin()",
"ASIN(f31)",
math.asin(-0.1231231321321321111).toString)
}
@Test
def testAcos(): Unit = {
testAllApis(
'f25.acos(),
"f25.acos()",
"ACOS(f25)",
math.acos(0.42.toByte).toString)
testAllApis(
'f26.acos(),
"f26.acos()",
"ACOS(f26)",
math.acos(0.toShort).toString)
testAllApis(
'f27.acos(),
"f27.acos()",
"ACOS(f27)",
math.acos(0.toLong).toString)
testAllApis(
'f28.acos(),
"f28.acos()",
"ACOS(f28)",
math.acos(0.45.toFloat).toString)
testAllApis(
'f29.acos(),
"f29.acos()",
"ACOS(f29)",
math.acos(0.46).toString)
testAllApis(
'f30.acos(),
"f30.acos()",
"ACOS(f30)",
math.acos(1).toString)
testAllApis(
'f31.acos(),
"f31.acos()",
"ACOS(f31)",
math.acos(-0.1231231321321321111).toString)
}
@Test
def testAtan(): Unit = {
testAllApis(
'f25.atan(),
"f25.atan()",
"ATAN(f25)",
math.atan(0.42.toByte).toString)
testAllApis(
'f26.atan(),
"f26.atan()",
"ATAN(f26)",
math.atan(0.toShort).toString)
testAllApis(
'f27.atan(),
"f27.atan()",
"ATAN(f27)",
math.atan(0.toLong).toString)
testAllApis(
'f28.atan(),
"f28.atan()",
"ATAN(f28)",
math.atan(0.45.toFloat).toString)
testAllApis(
'f29.atan(),
"f29.atan()",
"ATAN(f29)",
math.atan(0.46).toString)
testAllApis(
'f30.atan(),
"f30.atan()",
"ATAN(f30)",
math.atan(1).toString)
testAllApis(
'f31.atan(),
"f31.atan()",
"ATAN(f31)",
math.atan(-0.1231231321321321111).toString)
}
@Test
def testAtan2(): Unit = {
testAllApis(
atan2('f25, 'f26),
"atan2(f25, f26)",
"ATAN2(f25, f26)",
math.atan2(0.42.toByte, 0.toByte).toString)
testAllApis(
atan2('f26, 'f25),
"atan2(f26, f25)",
"ATAN2(f26, f25)",
math.atan2(0.toShort, 0.toShort).toString)
testAllApis(
atan2('f27, 'f27),
"atan2(f27, f27)",
"ATAN2(f27, f27)",
math.atan2(0.toLong, 0.toLong).toString)
testAllApis(
atan2('f28, 'f28),
"atan2(f28, f28)",
"ATAN2(f28, f28)",
math.atan2(0.45.toFloat, 0.45.toFloat).toString)
testAllApis(
atan2('f29, 'f29),
"atan2(f29, f29)",
"ATAN2(f29, f29)",
math.atan2(0.46, 0.46).toString)
testAllApis(
atan2('f30, 'f30),
"atan2(f30, f30)",
"ATAN2(f30, f30)",
math.atan2(1, 1).toString)
testAllApis(
atan2('f31, 'f31),
"atan2(f31, f31)",
"ATAN2(f31, f31)",
math.atan2(-0.1231231321321321111, -0.1231231321321321111).toString)
}
@Test
def testDegrees(): Unit = {
testAllApis(
'f2.degrees(),
"f2.degrees()",
"DEGREES(f2)",
math.toDegrees(42.toByte).toString)
testAllApis(
'f3.degrees(),
"f3.degrees()",
"DEGREES(f3)",
math.toDegrees(43.toShort).toString)
testAllApis(
'f4.degrees(),
"f4.degrees()",
"DEGREES(f4)",
math.toDegrees(44.toLong).toString)
testAllApis(
'f5.degrees(),
"f5.degrees()",
"DEGREES(f5)",
math.toDegrees(4.5.toFloat).toString)
testAllApis(
'f6.degrees(),
"f6.degrees()",
"DEGREES(f6)",
math.toDegrees(4.6).toString)
testAllApis(
'f15.degrees(),
"degrees(f15)",
"DEGREES(f15)",
math.toDegrees(-1231.1231231321321321111).toString)
}
@Test
def testRadians(): Unit = {
testAllApis(
'f2.radians(),
"f2.radians()",
"RADIANS(f2)",
math.toRadians(42.toByte).toString)
testAllApis(
'f3.radians(),
"f3.radians()",
"RADIANS(f3)",
math.toRadians(43.toShort).toString)
testAllApis(
'f4.radians(),
"f4.radians()",
"RADIANS(f4)",
math.toRadians(44.toLong).toString)
testAllApis(
'f5.radians(),
"f5.radians()",
"RADIANS(f5)",
math.toRadians(4.5.toFloat).toString)
testAllApis(
'f6.radians(),
"f6.radians()",
"RADIANS(f6)",
math.toRadians(4.6).toString)
testAllApis(
'f15.radians(),
"radians(f15)",
"RADIANS(f15)",
math.toRadians(-1231.1231231321321321111).toString)
}
@Test
def testSign(): Unit = {
testAllApis(
'f4.sign(),
"f4.sign()",
"SIGN(f4)",
1.toString)
testAllApis(
'f6.sign(),
"f6.sign()",
"SIGN(f6)",
1.0.toString)
testAllApis(
'f15.sign(),
"sign(f15)",
"SIGN(f15)",
"-1.0000000000000000000") // calcite: SIGN(Decimal(p,s)) => Decimal(p,s)
}
@Test
def testRound(): Unit = {
testAllApis(
'f29.round('f30),
"f29.round(f30)",
"ROUND(f29, f30)",
0.5.toString)
testAllApis(
'f4.round('f32),
"f4.round(f32)",
"ROUND(f4, f32)",
40.toString)
testSqlApi(
"ROUND(125.315)",
"125")
testSqlApi(
"ROUND(-125.315, 2)",
"-125.32")
testSqlApi(
"ROUND(125.315, 0)",
"125")
testSqlApi(
"ROUND(1.4, 1)",
"1.4")
}
@Test
def testPi(): Unit = {
testAllApis(
pi(),
"pi()",
"pi()",
math.Pi.toString)
}
@Test
def testRandAndRandInteger(): Unit = {
val random1 = new java.util.Random(1)
testAllApis(
rand(1),
"rand(1)",
"RAND(1)",
random1.nextDouble().toString)
val random2 = new java.util.Random(3)
testAllApis(
rand('f7),
"rand(f7)",
"RAND(f7)",
random2.nextDouble().toString)
val random3 = new java.util.Random(1)
testAllApis(
randInteger(1, 10),
"randInteger(1, 10)",
"RAND_INTEGER(1, 10)",
random3.nextInt(10).toString)
val random4 = new java.util.Random(3)
testAllApis(
randInteger('f7, 'f4.cast(DataTypes.INT)),
"randInteger(f7, f4.cast(INT))",
"RAND_INTEGER(f7, CAST(f4 AS INT))",
random4.nextInt(44).toString)
}
@Test
def testE(): Unit = {
testAllApis(
e(),
"E()",
"E()",
math.E.toString)
testAllApis(
e(),
"e()",
"e()",
math.E.toString)
}
@Test
def testLog(): Unit = {
testAllApis(
'f6.log(),
"f6.log",
"LOG(f6)",
"1.5260563034950492"
)
testAllApis(
('f6 - 'f6 + 100).log('f6 - 'f6 + 10),
"(f6 - f6 + 100).log(f6 - f6 + 10)",
"LOG(f6 - f6 + 10, f6 - f6 + 100)",
"2.0"
)
testAllApis(
('f6 + 20).log(),
"(f6+20).log",
"LOG(f6+20)",
"3.202746442938317"
)
testAllApis(
10.log(),
"10.log",
"LOG(10)",
"2.302585092994046"
)
testAllApis(
100.log(10),
"100.log(10)",
"LOG(10, 100)",
"2.0"
)
testSqlApi(
"LOG(cast (null AS DOUBLE))",
"null"
)
testSqlApi(
"LOG(cast (null AS DOUBLE), 1)",
"null"
)
testSqlApi(
"LOG(1, cast (null AS DOUBLE))",
"null"
)
testSqlApi(
"LOG(cast (null AS DOUBLE), cast (null AS DOUBLE))",
"null"
)
}
@Test
def testLog2(): Unit = {
testAllApis(
'f6.log2(),
"f6.log2",
"LOG2(f6)",
"2.2016338611696504")
testAllApis(
('f6 - 'f6 + 100).log2(),
"(f6 - f6 + 100).log2()",
"LOG2(f6 - f6 + 100)",
"6.643856189774725")
testAllApis(
('f6 + 20).log2(),
"(f6+20).log2",
"LOG2(f6+20)",
"4.620586410451877"
)
testAllApis(
10.log2(),
"10.log2",
"LOG2(10)",
"3.3219280948873626"
)
testSqlApi(
"LOG2(cast (null AS DOUBLE))",
"null"
)
}
@Test
def testChr(): Unit = {
testSqlApi(
"CHR(f4)",
","
)
testSqlApi(
"CHR(f43)",
""
)
testSqlApi(
"CHR(f42)",
Character.MIN_VALUE.toString
)
testSqlApi(
"CHR(65)",
"A"
)
testSqlApi(
"CHR(CAST (-10 AS BIGINT))",
""
)
testSqlApi(
"CHR(300)",
","
)
testSqlApi(
"CHR(97)",
"a"
)
testSqlApi(
"CHR(97 + 256)",
"a"
)
testSqlApi(
"CHR(-9)",
""
)
testSqlApi(
"CHR(0)",
Character.MIN_VALUE.toString
)
testSqlApi(
"CHR(149)",
149.toChar.toString
)
testSqlApi(
"CHR(cast (null AS BIGINT))",
"null"
)
}
// ----------------------------------------------------------------------------------------------
// Temporal functions
// ----------------------------------------------------------------------------------------------
@Test
def testExtract(): Unit = {
testAllApis(
'f16.extract(TimeIntervalUnit.YEAR),
"f16.extract(YEAR)",
"EXTRACT(YEAR FROM f16)",
"1996")
testAllApis(
'f16.extract(TimeIntervalUnit.QUARTER),
"f16.extract(QUARTER)",
"EXTRACT(QUARTER FROM f16)",
"4")
testAllApis(
'f16.extract(TimeIntervalUnit.MONTH),
"extract(f16, MONTH)",
"EXTRACT(MONTH FROM f16)",
"11")
testAllApis(
'f16.extract(TimeIntervalUnit.WEEK),
"extract(f16, WEEK)",
"EXTRACT(WEEK FROM f16)",
"45")
testAllApis(
'f16.extract(TimeIntervalUnit.DAY),
"f16.extract(DAY)",
"EXTRACT(DAY FROM f16)",
"10")
testAllApis(
'f18.extract(TimeIntervalUnit.YEAR),
"f18.extract(YEAR)",
"EXTRACT(YEAR FROM f18)",
"1996")
testAllApis(
'f18.extract(TimeIntervalUnit.QUARTER),
"f18.extract(QUARTER)",
"EXTRACT(QUARTER FROM f18)",
"4")
testAllApis(
'f16.extract(TimeIntervalUnit.QUARTER),
"f16.extract(QUARTER)",
"EXTRACT(QUARTER FROM f16)",
"4")
testAllApis(
'f18.extract(TimeIntervalUnit.MONTH),
"f18.extract(MONTH)",
"EXTRACT(MONTH FROM f18)",
"11")
testAllApis(
'f18.extract(TimeIntervalUnit.WEEK),
"f18.extract(WEEK)",
"EXTRACT(WEEK FROM f18)",
"45")
testAllApis(
'f18.extract(TimeIntervalUnit.DAY),
"f18.extract(DAY)",
"EXTRACT(DAY FROM f18)",
"10")
testAllApis(
'f18.extract(TimeIntervalUnit.HOUR),
"f18.extract(HOUR)",
"EXTRACT(HOUR FROM f18)",
"6")
testAllApis(
'f17.extract(TimeIntervalUnit.HOUR),
"f17.extract(HOUR)",
"EXTRACT(HOUR FROM f17)",
"6")
testAllApis(
'f18.extract(TimeIntervalUnit.MINUTE),
"f18.extract(MINUTE)",
"EXTRACT(MINUTE FROM f18)",
"55")
testAllApis(
'f17.extract(TimeIntervalUnit.MINUTE),
"f17.extract(MINUTE)",
"EXTRACT(MINUTE FROM f17)",
"55")
testAllApis(
'f18.extract(TimeIntervalUnit.SECOND),
"f18.extract(SECOND)",
"EXTRACT(SECOND FROM f18)",
"44")
testAllApis(
'f17.extract(TimeIntervalUnit.SECOND),
"f17.extract(SECOND)",
"EXTRACT(SECOND FROM f17)",
"44")
testAllApis(
'f19.extract(TimeIntervalUnit.DAY),
"f19.extract(DAY)",
"EXTRACT(DAY FROM f19)",
"16979")
testAllApis(
'f19.extract(TimeIntervalUnit.HOUR),
"f19.extract(HOUR)",
"EXTRACT(HOUR FROM f19)",
"7")
testAllApis(
'f19.extract(TimeIntervalUnit.MINUTE),
"f19.extract(MINUTE)",
"EXTRACT(MINUTE FROM f19)",
"23")
testAllApis(
'f19.extract(TimeIntervalUnit.SECOND),
"f19.extract(SECOND)",
"EXTRACT(SECOND FROM f19)",
"33")
testAllApis(
'f20.extract(TimeIntervalUnit.MONTH),
"f20.extract(MONTH)",
"EXTRACT(MONTH FROM f20)",
"1")
testAllApis(
'f20.extract(TimeIntervalUnit.QUARTER),
"f20.extract(QUARTER)",
"EXTRACT(QUARTER FROM f20)",
"1")
testAllApis(
'f20.extract(TimeIntervalUnit.YEAR),
"f20.extract(YEAR)",
"EXTRACT(YEAR FROM f20)",
"2")
// test SQL only time units
testSqlApi(
"EXTRACT(MILLENNIUM FROM f18)",
"2")
testSqlApi(
"EXTRACT(MILLENNIUM FROM f16)",
"2")
testSqlApi(
"EXTRACT(CENTURY FROM f18)",
"20")
testSqlApi(
"EXTRACT(CENTURY FROM f16)",
"20")
testSqlApi(
"EXTRACT(DOY FROM f18)",
"315")
testSqlApi(
"EXTRACT(DOY FROM f16)",
"315")
testSqlApi(
"EXTRACT(DOW FROM f18)",
"1")
testSqlApi(
"EXTRACT(DOW FROM f16)",
"1")
testSqlApi(
"EXTRACT(QUARTER FROM f18)",
"4")
testSqlApi(
"EXTRACT(QUARTER FROM f16)",
"4")
testSqlApi(
"EXTRACT(WEEK FROM f18)",
"45")
testSqlApi(
"EXTRACT(WEEK FROM f16)",
"45")
testSqlApi(
"YEAR(f18)",
"1996")
testSqlApi(
"YEAR(f16)",
"1996")
testSqlApi(
"QUARTER(f18)",
"4")
testSqlApi(
"QUARTER(f16)",
"4")
testSqlApi(
"MONTH(f18)",
"11")
testSqlApi(
"MONTH(f16)",
"11")
testSqlApi(
"WEEK(f18)",
"45")
testSqlApi(
"WEEK(f16)",
"45")
testSqlApi(
"DAYOFYEAR(f18)",
"315")
testSqlApi(
"DAYOFYEAR(f16)",
"315")
testSqlApi(
"DAYOFMONTH(f18)",
"10")
testSqlApi(
"DAYOFMONTH(f16)",
"10")
testSqlApi(
"DAYOFWEEK(f18)",
"1")
testSqlApi(
"DAYOFWEEK(f16)",
"1")
testSqlApi(
"HOUR(f17)",
"6")
testSqlApi(
"HOUR(f19)",
"7")
testSqlApi(
"MINUTE(f17)",
"55")
testSqlApi(
"MINUTE(f19)",
"23")
testSqlApi(
"SECOND(f17)",
"44")
testSqlApi(
"SECOND(f19)",
"33")
}
@Test
def testTemporalFloor(): Unit = {
testAllApis(
'f18.floor(TimeIntervalUnit.YEAR),
"f18.floor(YEAR)",
"FLOOR(f18 TO YEAR)",
"1996-01-01 00:00:00.000")
testAllApis(
'f18.floor(TimeIntervalUnit.MONTH),
"f18.floor(MONTH)",
"FLOOR(f18 TO MONTH)",
"1996-11-01 00:00:00.000")
testAllApis(
'f18.floor(TimeIntervalUnit.DAY),
"f18.floor(DAY)",
"FLOOR(f18 TO DAY)",
"1996-11-10 00:00:00.000")
testAllApis(
'f18.floor(TimeIntervalUnit.HOUR),
"f18.floor(HOUR)",
"FLOOR(f18 TO HOUR)",
"1996-11-10 06:00:00.000")
testAllApis(
'f18.floor(TimeIntervalUnit.MINUTE),
"f18.floor(MINUTE)",
"FLOOR(f18 TO MINUTE)",
"1996-11-10 06:55:00.000")
testAllApis(
'f18.floor(TimeIntervalUnit.SECOND),
"f18.floor(SECOND)",
"FLOOR(f18 TO SECOND)",
"1996-11-10 06:55:44.000")
testAllApis(
'f17.floor(TimeIntervalUnit.HOUR),
"f17.floor(HOUR)",
"FLOOR(f17 TO HOUR)",
"06:00:00")
testAllApis(
'f17.floor(TimeIntervalUnit.MINUTE),
"f17.floor(MINUTE)",
"FLOOR(f17 TO MINUTE)",
"06:55:00")
testAllApis(
'f17.floor(TimeIntervalUnit.SECOND),
"f17.floor(SECOND)",
"FLOOR(f17 TO SECOND)",
"06:55:44")
testAllApis(
'f16.floor(TimeIntervalUnit.YEAR),
"f16.floor(YEAR)",
"FLOOR(f16 TO YEAR)",
"1996-01-01")
testAllApis(
'f16.floor(TimeIntervalUnit.MONTH),
"f16.floor(MONTH)",
"FLOOR(f16 TO MONTH)",
"1996-11-01")
testAllApis(
'f18.ceil(TimeIntervalUnit.YEAR),
"f18.ceil(YEAR)",
"CEIL(f18 TO YEAR)",
"1997-01-01 00:00:00.000")
testAllApis(
'f18.ceil(TimeIntervalUnit.MONTH),
"f18.ceil(MONTH)",
"CEIL(f18 TO MONTH)",
"1996-12-01 00:00:00.000")
testAllApis(
'f18.ceil(TimeIntervalUnit.DAY),
"f18.ceil(DAY)",
"CEIL(f18 TO DAY)",
"1996-11-11 00:00:00.000")
testAllApis(
'f18.ceil(TimeIntervalUnit.HOUR),
"f18.ceil(HOUR)",
"CEIL(f18 TO HOUR)",
"1996-11-10 07:00:00.000")
testAllApis(
'f18.ceil(TimeIntervalUnit.MINUTE),
"f18.ceil(MINUTE)",
"CEIL(f18 TO MINUTE)",
"1996-11-10 06:56:00.000")
testAllApis(
'f18.ceil(TimeIntervalUnit.SECOND),
"f18.ceil(SECOND)",
"CEIL(f18 TO SECOND)",
"1996-11-10 06:55:45.000")
testAllApis(
'f17.ceil(TimeIntervalUnit.HOUR),
"f17.ceil(HOUR)",
"CEIL(f17 TO HOUR)",
"07:00:00")
testAllApis(
'f17.ceil(TimeIntervalUnit.MINUTE),
"f17.ceil(MINUTE)",
"CEIL(f17 TO MINUTE)",
"06:56:00")
testAllApis(
'f17.ceil(TimeIntervalUnit.SECOND),
"f17.ceil(SECOND)",
"CEIL(f17 TO SECOND)",
"06:55:44")
testAllApis(
'f16.ceil(TimeIntervalUnit.YEAR),
"f16.ceil(YEAR)",
"CEIL(f16 TO YEAR)",
"1997-01-01")
testAllApis(
'f16.ceil(TimeIntervalUnit.MONTH),
"f16.ceil(MONTH)",
"CEIL(f16 TO MONTH)",
"1996-12-01")
}
@Test
def testCurrentTimePoint(): Unit = {
// current time points are non-deterministic
// we just test the format of the output
// manual test can be found in NonDeterministicTests
testAllApis(
currentDate().cast(DataTypes.STRING).charLength() >= 5,
"currentDate().cast(STRING).charLength() >= 5",
"CHAR_LENGTH(CAST(CURRENT_DATE AS VARCHAR)) >= 5",
"true")
testAllApis(
currentTime().cast(DataTypes.STRING).charLength() >= 5,
"currentTime().cast(STRING).charLength() >= 5",
"CHAR_LENGTH(CAST(CURRENT_TIME AS VARCHAR)) >= 5",
"true")
testAllApis(
currentTimestamp().cast(DataTypes.STRING).charLength() >= 12,
"currentTimestamp().cast(STRING).charLength() >= 12",
"CHAR_LENGTH(CAST(CURRENT_TIMESTAMP AS VARCHAR)) >= 12",
"true")
testAllApis(
localTimestamp().cast(DataTypes.STRING).charLength() >= 12,
"localTimestamp().cast(STRING).charLength() >= 12",
"CHAR_LENGTH(CAST(LOCALTIMESTAMP AS VARCHAR)) >= 12",
"true")
testAllApis(
localTime().cast(DataTypes.STRING).charLength() >= 5,
"localTime().cast(STRING).charLength() >= 5",
"CHAR_LENGTH(CAST(LOCALTIME AS VARCHAR)) >= 5",
"true")
testSqlApi(
"CHAR_LENGTH(CAST(NOW() AS VARCHAR)) >= 12",
"true")
// comparisons are deterministic
testAllApis(
localTimestamp() === localTimestamp(),
"localTimestamp() === localTimestamp()",
"LOCALTIMESTAMP = LOCALTIMESTAMP",
"true")
}
@Test
def testOverlaps(): Unit = {
testAllApis(
temporalOverlaps("2:55:00".toTime, 1.hour, "3:30:00".toTime, 2.hours),
"temporalOverlaps('2:55:00'.toTime, 1.hour, '3:30:00'.toTime, 2.hours)",
"(TIME '2:55:00', INTERVAL '1' HOUR) OVERLAPS (TIME '3:30:00', INTERVAL '2' HOUR)",
"true")
testAllApis(
temporalOverlaps("9:00:00".toTime, "9:30:00".toTime, "9:29:00".toTime, "9:31:00".toTime),
"temporalOverlaps(toTime('9:00:00'), '9:30:00'.toTime, '9:29:00'.toTime, '9:31:00'.toTime)",
"(TIME '9:00:00', TIME '9:30:00') OVERLAPS (TIME '9:29:00', TIME '9:31:00')",
"true")
testAllApis(
temporalOverlaps("9:00:00".toTime, "10:00:00".toTime, "10:15:00".toTime, 3.hours),
"temporalOverlaps('9:00:00'.toTime, '10:00:00'.toTime, '10:15:00'.toTime, 3.hours)",
"(TIME '9:00:00', TIME '10:00:00') OVERLAPS (TIME '10:15:00', INTERVAL '3' HOUR)",
"false")
testAllApis(
temporalOverlaps("2011-03-10".toDate, 10.days, "2011-03-19".toDate, 10.days),
"temporalOverlaps(toDate('2011-03-10'), 10.days, '2011-03-19'.toDate, 10.days)",
"(DATE '2011-03-10', INTERVAL '10' DAY) OVERLAPS (DATE '2011-03-19', INTERVAL '10' DAY)",
"true")
testAllApis(
temporalOverlaps("2011-03-10 05:02:02".toTimestamp, 0.milli,
"2011-03-10 05:02:02".toTimestamp, "2011-03-10 05:02:01".toTimestamp),
"temporalOverlaps(toTimestamp('2011-03-10 05:02:02'), 0.milli, " +
"'2011-03-10 05:02:02'.toTimestamp, '2011-03-10 05:02:01'.toTimestamp)",
"(TIMESTAMP '2011-03-10 05:02:02', INTERVAL '0' SECOND) OVERLAPS " +
"(TIMESTAMP '2011-03-10 05:02:02', TIMESTAMP '2011-03-10 05:02:01')",
"true")
testAllApis(
temporalOverlaps("2011-03-10 02:02:02.001".toTimestamp, 0.milli,
"2011-03-10 02:02:02.002".toTimestamp, "2011-03-10 02:02:02.002".toTimestamp),
"temporalOverlaps('2011-03-10 02:02:02.001'.toTimestamp, 0.milli, " +
"'2011-03-10 02:02:02.002'.toTimestamp, '2011-03-10 02:02:02.002'.toTimestamp)",
"(TIMESTAMP '2011-03-10 02:02:02.001', INTERVAL '0' SECOND) OVERLAPS " +
"(TIMESTAMP '2011-03-10 02:02:02.002', TIMESTAMP '2011-03-10 02:02:02.002')",
"false")
}
@Test
def testQuarter(): Unit = {
testSqlApi(
"QUARTER(DATE '1997-01-27')",
"1")
testSqlApi(
"QUARTER(DATE '1997-04-27')",
"2")
testSqlApi(
"QUARTER(DATE '1997-12-31')",
"4")
}
@Test
def testTimestampDiff(): Unit = {
val dataMap = Map(
("DAY", TimePointUnit.DAY, "SQL_TSI_DAY") -> Seq(
("2018-07-03 11:11:11", "2018-07-05 11:11:11", "2"), // timestamp, timestamp
("2016-06-15", "2016-06-16 11:11:11", "1"), // date, timestamp
("2016-06-15 11:00:00", "2016-06-19", "3"), // timestamp, date
("2016-06-15", "2016-06-18", "3") // date, date
),
("HOUR", TimePointUnit.HOUR, "SQL_TSI_HOUR") -> Seq(
("2018-07-03 11:11:11", "2018-07-04 12:12:11", "25"),
("2016-06-15", "2016-06-16 11:11:11", "35"),
("2016-06-15 11:00:00", "2016-06-19", "85"),
("2016-06-15", "2016-06-12", "-72")
),
("MINUTE", TimePointUnit.MINUTE, "SQL_TSI_MINUTE") -> Seq(
("2018-07-03 11:11:11", "2018-07-03 12:10:11", "59"),
("2016-06-15", "2016-06-16 11:11:11", "2111"),
("2016-06-15 11:00:00", "2016-06-19", "5100"),
("2016-06-15", "2016-06-18", "4320")
),
("SECOND", TimePointUnit.SECOND, "SQL_TSI_SECOND") -> Seq(
("2018-07-03 11:11:11", "2018-07-03 11:12:12", "61"),
("2016-06-15", "2016-06-16 11:11:11", "126671"),
("2016-06-15 11:00:00", "2016-06-19", "306000"),
("2016-06-15", "2016-06-18", "259200")
),
("WEEK", TimePointUnit.WEEK, "SQL_TSI_WEEK") -> Seq(
("2018-05-03 11:11:11", "2018-07-03 11:12:12", "8"),
("2016-04-15", "2016-07-16 11:11:11", "13"),
("2016-04-15 11:00:00", "2016-09-19", "22"),
("2016-08-15", "2016-06-18", "-8")
),
("MONTH", TimePointUnit.MONTH, "SQL_TSI_MONTH") -> Seq(
("2018-07-03 11:11:11", "2018-09-05 11:11:11", "2"),
("2016-06-15", "2018-06-16 11:11:11", "24"),
("2016-06-15 11:00:00", "2018-05-19", "23"),
("2016-06-15", "2018-03-18", "21")
),
("QUARTER", TimePointUnit.QUARTER, "SQL_TSI_QUARTER") -> Seq(
("2018-01-03 11:11:11", "2018-09-05 11:11:11", "2"),
("2016-06-15", "2018-06-16 11:11:11", "8"),
("2016-06-15 11:00:00", "2018-05-19", "7"),
("2016-06-15", "2018-03-18", "7")
)
)
for ((unitParts, dataParts) <- dataMap) {
for ((data,index) <- dataParts.zipWithIndex) {
index match {
case 0 => // timestamp, timestamp
testAllApis(
timestampDiff(unitParts._2, data._1.toTimestamp, data._2.toTimestamp),
s"timestampDiff(${unitParts._1}, '${data._1}'.toTimestamp, '${data._2}'.toTimestamp)",
s"TIMESTAMPDIFF(${unitParts._1}, TIMESTAMP '${data._1}', TIMESTAMP '${data._2}')",
data._3
)
testSqlApi( // sql tsi
s"TIMESTAMPDIFF(${unitParts._3}, TIMESTAMP '${data._1}', TIMESTAMP '${data._2}')",
data._3
)
case 1 => // date, timestamp
testAllApis(
timestampDiff(unitParts._2, data._1.toDate, data._2.toTimestamp),
s"timestampDiff(${unitParts._1}, '${data._1}'.toDate, '${data._2}'.toTimestamp)",
s"TIMESTAMPDIFF(${unitParts._1}, DATE '${data._1}', TIMESTAMP '${data._2}')",
data._3
)
testSqlApi( // sql tsi
s"TIMESTAMPDIFF(${unitParts._3}, DATE '${data._1}', TIMESTAMP '${data._2}')",
data._3
)
case 2 => // timestamp, date
testAllApis(
timestampDiff(unitParts._2, data._1.toTimestamp, data._2.toDate),
s"timestampDiff(${unitParts._1}, '${data._1}'.toTimestamp, '${data._2}'.toDate)",
s"TIMESTAMPDIFF(${unitParts._1}, TIMESTAMP '${data._1}', DATE '${data._2}')",
data._3
)
testSqlApi( // sql tsi
s"TIMESTAMPDIFF(${unitParts._3}, TIMESTAMP '${data._1}', DATE '${data._2}')",
data._3
)
case 3 => // date, date
testAllApis(
timestampDiff(unitParts._2, data._1.toDate, data._2.toDate),
s"timestampDiff(${unitParts._1}, '${data._1}'.toDate, '${data._2}'.toDate)",
s"TIMESTAMPDIFF(${unitParts._1}, DATE '${data._1}', DATE '${data._2}')",
data._3
)
testSqlApi( // sql tsi
s"TIMESTAMPDIFF(${unitParts._3}, DATE '${data._1}', DATE '${data._2}')",
data._3
)
}
}
}
testAllApis(
timestampDiff(TimePointUnit.DAY, nullOf(DataTypes.TIMESTAMP(3)),
"2016-02-24 12:42:25".toTimestamp),
"timestampDiff(DAY, Null(SQL_TIMESTAMP), '2016-02-24 12:42:25'.toTimestamp)",
"TIMESTAMPDIFF(DAY, CAST(NULL AS TIMESTAMP), TIMESTAMP '2016-02-24 12:42:25')",
"null"
)
testAllApis(
timestampDiff(TimePointUnit.DAY, "2016-02-24 12:42:25".toTimestamp,
nullOf(DataTypes.TIMESTAMP(3))),
"timestampDiff(DAY, '2016-02-24 12:42:25'.toTimestamp, Null(SQL_TIMESTAMP))",
"TIMESTAMPDIFF(DAY, TIMESTAMP '2016-02-24 12:42:25', CAST(NULL AS TIMESTAMP))",
"null"
)
}
@Test
def testTimestampAdd(): Unit = {
val data = Seq(
(1, "2017-11-29 22:58:58.998"),
(3, "2017-11-29 22:58:58.998"),
(-1, "2017-11-29 22:58:58.998"),
(-61, "2017-11-29 22:58:58.998"),
(-1000, "2017-11-29 22:58:58.998")
)
val YEAR = Seq(
"2018-11-29 22:58:58.998",
"2020-11-29 22:58:58.998",
"2016-11-29 22:58:58.998",
"1956-11-29 22:58:58.998",
"1017-11-29 22:58:58.998")
val QUARTER = Seq(
"2018-02-28 22:58:58.998",
"2018-08-29 22:58:58.998",
"2017-08-29 22:58:58.998",
"2002-08-29 22:58:58.998",
"1767-11-29 22:58:58.998")
val MONTH = Seq(
"2017-12-29 22:58:58.998",
"2018-02-28 22:58:58.998",
"2017-10-29 22:58:58.998",
"2012-10-29 22:58:58.998",
"1934-07-29 22:58:58.998")
val WEEK = Seq(
"2017-12-06 22:58:58.998",
"2017-12-20 22:58:58.998",
"2017-11-22 22:58:58.998",
"2016-09-28 22:58:58.998",
"1998-09-30 22:58:58.998")
val DAY = Seq(
"2017-11-30 22:58:58.998",
"2017-12-02 22:58:58.998",
"2017-11-28 22:58:58.998",
"2017-09-29 22:58:58.998",
"2015-03-05 22:58:58.998")
val HOUR = Seq(
"2017-11-29 23:58:58.998",
"2017-11-30 01:58:58.998",
"2017-11-29 21:58:58.998",
"2017-11-27 09:58:58.998",
"2017-10-19 06:58:58.998")
val MINUTE = Seq(
"2017-11-29 22:59:58.998",
"2017-11-29 23:01:58.998",
"2017-11-29 22:57:58.998",
"2017-11-29 21:57:58.998",
"2017-11-29 06:18:58.998")
val SECOND = Seq(
"2017-11-29 22:58:59.998",
"2017-11-29 22:59:01.998",
"2017-11-29 22:58:57.998",
"2017-11-29 22:57:57.998",
"2017-11-29 22:42:18.998")
// we do not supported FRAC_SECOND, MICROSECOND, SQL_TSI_FRAC_SECOND, SQL_TSI_MICROSECOND
val intervalMapResults = Map(
"YEAR" -> YEAR,
"SQL_TSI_YEAR" -> YEAR,
"QUARTER" -> QUARTER,
"SQL_TSI_QUARTER" -> QUARTER,
"MONTH" -> MONTH,
"SQL_TSI_MONTH" -> MONTH,
"WEEK" -> WEEK,
"SQL_TSI_WEEK" -> WEEK,
"DAY" -> DAY,
"SQL_TSI_DAY" -> DAY,
"HOUR" -> HOUR,
"SQL_TSI_HOUR" -> HOUR,
"MINUTE" -> MINUTE,
"SQL_TSI_MINUTE" -> MINUTE,
"SECOND" -> SECOND,
"SQL_TSI_SECOND" -> SECOND
)
def intervalCount(interval: String, count: Int): (Expression, String) = interval match {
case "YEAR" => (count.years, s"$count.years")
case "SQL_TSI_YEAR" => (count.years, s"$count.years")
case "QUARTER" => (count.quarters, s"$count.quarters")
case "SQL_TSI_QUARTER" => (count.quarters, s"$count.quarters")
case "MONTH" => (count.months, s"$count.months")
case "SQL_TSI_MONTH" => (count.months, s"$count.months")
case "WEEK" => (count.weeks, s"$count.weeks")
case "SQL_TSI_WEEK" => (count.weeks, s"$count.weeks")
case "DAY" => (count.days, s"$count.days")
case "SQL_TSI_DAY" => (count.days, s"$count.days")
case "HOUR" => (count.hours, s"$count.hours")
case "SQL_TSI_HOUR" => (count.hours, s"$count.hours")
case "MINUTE" => (count.minutes, s"$count.minutes")
case "SQL_TSI_MINUTE" => (count.minutes, s"$count.minutes")
case "SECOND" => (count.seconds, s"$count.seconds")
case "SQL_TSI_SECOND" => (count.seconds, s"$count.seconds")
}
for ((interval, result) <- intervalMapResults) {
for (i <- 0 to 4) {
val (offset, ts) = data(i)
val timeInterval = intervalCount(interval, offset)
testAllApis(
timeInterval._1 + ts.toTimestamp,
s"${timeInterval._2} + '$ts'.toTimestamp",
s"TIMESTAMPADD($interval, $offset, TIMESTAMP '$ts')",
result(i))
}
}
testAllApis(
"2016-02-24 12:42:25".toTimestamp + nullOf(DataTypes.INTERVAL(DataTypes.MINUTE())),
"'2016-02-24 12:42:25'.toTimestamp + Null(INTERVAL_MILLIS)",
"TIMESTAMPADD(HOUR, CAST(NULL AS INTEGER), TIMESTAMP '2016-02-24 12:42:25')",
"null")
testAllApis(
nullOf(DataTypes.TIMESTAMP(3)) + -200.hours,
"Null(SQL_TIMESTAMP) + -200.hours",
"TIMESTAMPADD(HOUR, -200, CAST(NULL AS TIMESTAMP))",
"null")
testAllApis(
"2016-06-15".toDate + 1.day,
"'2016-06-15'.toDate + 1.day",
"TIMESTAMPADD(DAY, 1, DATE '2016-06-15')",
"2016-06-16")
testAllApis(
nullOf(DataTypes.TIMESTAMP(3)) + 3.months,
"Null(SQL_TIMESTAMP) + 3.months",
"TIMESTAMPADD(MONTH, 3, CAST(NULL AS TIMESTAMP))",
"null")
testAllApis(
"2016-02-24 12:42:25".toTimestamp + nullOf(Types.INTERVAL_MILLIS),
"'2016-02-24 12:42:25'.toTimestamp + nullOf(INTERVAL_MILLIS)",
"TIMESTAMPADD(HOUR, CAST(NULL AS INTEGER), TIMESTAMP '2016-02-24 12:42:25')",
"null")
testAllApis(
nullOf(Types.SQL_TIMESTAMP) + -200.hours,
"nullOf(SQL_TIMESTAMP) + -200.hours",
"TIMESTAMPADD(HOUR, -200, CAST(NULL AS TIMESTAMP))",
"null")
testAllApis(
nullOf(Types.SQL_TIMESTAMP) + 3.months,
"nullOf(SQL_TIMESTAMP) + 3.months",
"TIMESTAMPADD(MONTH, 3, CAST(NULL AS TIMESTAMP))",
"null")
// TIMESTAMPADD with DATE returns a TIMESTAMP value for sub-day intervals.
testAllApis("2016-06-15".toDate + 1.month,
"'2016-06-15'.toDate + 1.month",
"timestampadd(MONTH, 1, date '2016-06-15')",
"2016-07-15")
testAllApis("2016-06-15".toDate + 1.day,
"'2016-06-15'.toDate + 1.day",
"timestampadd(DAY, 1, date '2016-06-15')",
"2016-06-16")
// There is no timestamp literal function in Java String Table API,
// toTimestamp is casting string to TIMESTAMP(3) which is not the same to timestamp literal.
testTableApi("2016-06-15".toTimestamp - 1.hour,
"'2016-06-15'.toTimestamp - 1.hour",
"2016-06-14 23:00:00.000")
testSqlApi(
"timestampadd(HOUR, -1, date '2016-06-15')",
"2016-06-14 23:00:00.000000")
// There is no timestamp literal function in Java String Table API,
// toTimestamp is casting string to TIMESTAMP(3) which is not the same to timestamp literal.
testTableApi("2016-06-15".toTimestamp + 1.minute,
"'2016-06-15'.toTimestamp + 1.minute",
"2016-06-15 00:01:00.000")
testSqlApi("timestampadd(MINUTE, 1, date '2016-06-15')",
"2016-06-15 00:01:00.000000")
// There is no timestamp literal function in Java String Table API,
// toTimestamp is casting string to TIMESTAMP(3) which is not the same to timestamp literal.
testTableApi("2016-06-15".toTimestamp - 1.second,
"'2016-06-15'.toTimestamp - 1.second",
"2016-06-14 23:59:59.000")
testSqlApi("timestampadd(SQL_TSI_SECOND, -1, date '2016-06-15')",
"2016-06-14 23:59:59.000000")
// There is no timestamp literal function in Java String Table API,
// toTimestamp is casting string to TIMESTAMP(3) which is not the same to timestamp literal.
testTableApi("2016-06-15".toTimestamp + 1.second,
"'2016-06-15'.toTimestamp + 1.second",
"2016-06-15 00:00:01.000")
testSqlApi("timestampadd(SECOND, 1, date '2016-06-15')",
"2016-06-15 00:00:01.000000")
testAllApis(nullOf(Types.SQL_TIMESTAMP) + 1.second,
"nullOf(SQL_TIMESTAMP) + 1.second",
"timestampadd(SECOND, 1, cast(null as date))",
"null")
testAllApis(nullOf(Types.SQL_TIMESTAMP) + 1.day,
"nullOf(SQL_TIMESTAMP) + 1.day",
"timestampadd(DAY, 1, cast(null as date))",
"null")
// Round to the last day of previous month
testAllApis("2016-05-31".toDate + 1.month,
"'2016-05-31'.toDate + 1.month",
"timestampadd(MONTH, 1, date '2016-05-31')",
"2016-06-30")
testAllApis("2016-01-31".toDate + 5.month,
"'2016-01-31'.toDate + 5.month",
"timestampadd(MONTH, 5, date '2016-01-31')",
"2016-06-30")
testAllApis("2016-03-31".toDate - 1.month,
"'2016-03-31'.toDate - 1.month",
"timestampadd(MONTH, -1, date '2016-03-31')",
"2016-02-29")
testAllApis("2016-03-31".toDate - 1.week,
"'2016-03-31'.toDate - 1.week",
"timestampadd(WEEK, -1, date '2016-03-31')",
"2016-03-24")
// test TIMESTAMPADD with positive time interval in various granularity.
testSqlApi("TIMESTAMPADD(SECOND, 1, time '23:59:59')", "00:00:00")
testSqlApi("TIMESTAMPADD(MINUTE, 1, time '00:00:00')", "00:01:00")
testSqlApi("TIMESTAMPADD(MINUTE, 1, time '23:59:59')", "00:00:59")
testSqlApi("TIMESTAMPADD(HOUR, 1, time '23:59:59')", "00:59:59")
testSqlApi("TIMESTAMPADD(DAY, 15, time '23:59:59')", "23:59:59")
testSqlApi("TIMESTAMPADD(WEEK, 3, time '23:59:59')", "23:59:59")
testSqlApi("TIMESTAMPADD(MONTH, 6, time '23:59:59')", "23:59:59")
testSqlApi("TIMESTAMPADD(QUARTER, 1, time '23:59:59')", "23:59:59")
testSqlApi("TIMESTAMPADD(YEAR, 10, time '23:59:59')", "23:59:59")
// test TIMESTAMPADD with negative time interval in various granularity.
testSqlApi("TIMESTAMPADD(SECOND, -1, time '00:00:00')", "23:59:59")
testSqlApi("TIMESTAMPADD(MINUTE, -1, time '00:00:00')", "23:59:00")
testSqlApi("TIMESTAMPADD(MINUTE, -1, time '00:00:59')", "23:59:59")
testSqlApi("TIMESTAMPADD(HOUR, -1, time '00:00:00')", "23:00:00")
testSqlApi("TIMESTAMPADD(DAY, -1, time '23:59:59')", "23:59:59")
testSqlApi("TIMESTAMPADD(WEEK, -1, time '23:59:59')", "23:59:59")
testSqlApi("TIMESTAMPADD(MONTH, -1, time '23:59:59')", "23:59:59")
testSqlApi("TIMESTAMPADD(QUARTER, -1, time '23:59:59')", "23:59:59")
testSqlApi("TIMESTAMPADD(YEAR, -1, time '23:59:59')", "23:59:59")
}
@Test
def testToTimestamp(): Unit = {
testSqlApi("to_timestamp('abc')", "null")
testSqlApi("to_timestamp('2017-09-15 00:00:00')", "2017-09-15 00:00:00.000")
testSqlApi("to_timestamp('20170915000000', 'yyyyMMddHHmmss')", "2017-09-15 00:00:00.000")
testSqlApi("to_timestamp('2017-09-15', 'yyyy-MM-dd')", "2017-09-15 00:00:00.000")
// test with null input
testSqlApi("to_timestamp(cast(null as varchar))", "null")
}
@Test
def testToDate(): Unit = {
testSqlApi("to_date('2017-09-15 00:00:00')", "2017-09-15")
}
// ----------------------------------------------------------------------------------------------
// Hash functions
// ----------------------------------------------------------------------------------------------
@Test
def testHashFunctions(): Unit = {
val expectedMd5 = "098f6bcd4621d373cade4e832627b4f6"
val expectedSha1 = "a94a8fe5ccb19ba61c4c0873d391e987982fbbd3"
val expectedSha224 = "90a3ed9e32b2aaf4c61c410eb925426119e1a9dc53d4286ade99a809"
val expectedSha256 = "9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08"
val expectedSha384 = "768412320f7b0aa5812fce428dc4706b3cae50e02a64caa16a7" +
"82249bfe8efc4b7ef1ccb126255d196047dfedf17a0a9"
val expectedSha512 = "ee26b0dd4af7e749aa1a8ee3c10ae9923f618980772e473f8819a" +
"5d4940e0db27ac185f8a0e1d5f84f88bc887fd67b143732c304cc5fa9ad8e6f57f50028a8ff"
testAllApis(
"test".md5(),
"md5('test')",
"MD5('test')",
expectedMd5)
testAllApis(
"test".sha1(),
"sha1('test')",
"SHA1('test')",
expectedSha1)
// sha224
testAllApis(
"test".sha224(),
"sha224('test')",
"SHA224('test')",
expectedSha224)
// sha-2 224
testAllApis(
"test".sha2(224),
"sha2('test', 224)",
"SHA2('test', 224)",
expectedSha224)
// sha256
testAllApis(
"test".sha256(),
"sha256('test')",
"SHA256('test')",
expectedSha256)
// sha-2 256
testAllApis(
"test".sha2(256),
"sha2('test', 256)",
"SHA2('test', 256)",
expectedSha256)
// sha384
testAllApis(
"test".sha384(),
"sha384('test')",
"SHA384('test')",
expectedSha384)
// sha-2 384
testAllApis(
"test".sha2(384),
"sha2('test', 384)",
"SHA2('test', 384)",
expectedSha384)
// sha512
testAllApis(
"test".sha512(),
"sha512('test')",
"SHA512('test')",
expectedSha512)
// sha-2 512
testAllApis(
"test".sha2(512),
"sha2('test', 512)",
"SHA2('test', 512)",
expectedSha512)
// null tests
testAllApis(
'f33.md5(),
"md5(f33)",
"MD5(f33)",
"null")
testAllApis(
'f33.sha1(),
"sha1(f33)",
"SHA1(f33)",
"null")
testAllApis(
'f33.sha224(),
"sha224(f33)",
"SHA2(f33, 224)",
"null")
testAllApis(
'f33.sha2(224),
"sha2(f33, 224)",
"SHA2(f33, 224)",
"null")
testAllApis(
'f33.sha256(),
"sha256(f33)",
"SHA2(f33, 256)",
"null")
testAllApis(
'f33.sha384(),
"sha384(f33)",
"SHA2(f33, 384)",
"null")
testAllApis(
'f33.sha512(),
"sha512(f33)",
"SHA2(f33, 512)",
"null")
testAllApis(
"test".sha2(nullOf(DataTypes.INT)),
"sha2('test', Null(INT))",
"SHA2('test', CAST(NULL AS INT))",
"null")
// non-constant bit length
testAllApis(
"test".sha2('f44),
"sha2('test', f44)",
"SHA2('test', f44)",
expectedSha256)
}
// ----------------------------------------------------------------------------------------------
// Other functions
// ----------------------------------------------------------------------------------------------
@Test
def testIsTrueIsFalse(): Unit = {
testAllApis(
'f1.isTrue,
"f1.isTrue",
"f1 IS TRUE",
"true")
testAllApis(
'f21.isTrue,
"f21.isTrue",
"f21 IS TRUE",
"false")
testAllApis(
false.isFalse,
"false.isFalse",
"FALSE IS FALSE",
"true")
testAllApis(
'f21.isFalse,
"f21.isFalse",
"f21 IS FALSE",
"false")
testAllApis(
'f1.isNotTrue,
"f1.isNotTrue",
"f1 IS NOT TRUE",
"false")
testAllApis(
'f21.isNotTrue,
"f21.isNotTrue",
"f21 IS NOT TRUE",
"true")
testAllApis(
false.isNotFalse,
"false.isNotFalse",
"FALSE IS NOT FALSE",
"false")
testAllApis(
'f21.isNotFalse,
"f21.isNotFalse",
"f21 IS NOT FALSE",
"true")
}
@Test
def testStringFunctionsWithNull(): Unit = {
val functions = List(
("%s.subString(2)", "SUBSTRING(%s, 2)"),
("%s.trim()", "TRIM(%s)"),
("%s.like('%%link')", "%s LIKE '%%link'"),
("%s.charLength()", "CHAR_LENGTH(%s)"),
("%s.lowerCase()", "LOWER(%s)"),
("%s.upperCase()", "UPPER(%s)"),
("%s.initCap()", "INITCAP(%s)"),
("%s.position('aa')", "POSITION('aa' IN %s)"),
("%s.overlay('aa', 2)", "OVERLAY(%s PLACING 'aa' FROM 2 FOR 2)")
)
val field = "f40"
functions.foreach ( x => {
val tableApiString = x._1.format(field)
val sqlApiString = x._2.format(field)
testAllApis(
ExpressionParser.parseExpression(tableApiString),
tableApiString,
sqlApiString,
"null"
)
})
}
@Test
def testCodeGenNPE(): Unit = {
// case 1: non-null field argument, null result,
// case 2: null field argument, null result
// case 3: constant argument, null result
val fields = Seq ("f0", "f40", "''")
fields.foreach(x => {
val tableApiString =
"""
|%s.subString(1, -1)
|.upperCase()
""".stripMargin.format(x)
val sqlApiString = "UPPER(%s)"
.format("SUBSTRING(%s, 1, -1)")
.format(x)
testAllApis(
ExpressionParser.parseExpression(tableApiString),
tableApiString,
sqlApiString,
"null"
)
})
}
@Test
def testNullBigDecimal(): Unit = {
testAllApis(
'f41.sign(),
"f41.sign()",
"SIGN(f41)",
"null")
}
@Test
def testEncodeAndDecode(): Unit = {
testSqlApi(
"decode(encode('aabbef', 'UTF-16LE'), 'UTF-16LE')",
"aabbef")
testSqlApi(
"decode(encode('aabbef', 'utf-8'), 'utf-8')",
"aabbef")
testSqlApi(
"decode(encode('', 'utf-8'), 'utf-8')",
"")
testSqlApi(
"encode(cast (null AS VARCHAR), 'utf-8')",
"null"
)
testSqlApi(
"encode(cast (null AS VARCHAR), cast (null AS VARCHAR))",
"null"
)
testSqlApi(
"encode('aabbef', cast (null AS VARCHAR))",
"null"
)
testSqlApi(
"decode(cast (null AS BINARY), 'utf-8')",
"null"
)
testSqlApi(
"decode(cast (null AS BINARY), cast (null AS VARCHAR))",
"null"
)
testSqlApi(
"decode(encode('aabbef', 'utf-8'), cast (null AS VARCHAR))",
"null"
)
testSqlApi(
"decode(encode('中国', 'UTF-16LE'), 'UTF-16LE')",
"中国")
testSqlApi(
"decode(encode('val_238', 'US-ASCII'), 'US-ASCII') =" +
" decode(encode('val_238', 'utf-8'), 'utf-8')",
"true")
}
@Test
def testStringToMap(): Unit = {
testSqlApi(
"STR_TO_MAP('k1=v1,k2=v2')",
"{k1=v1, k2=v2}")
testSqlApi(
"STR_TO_MAP('k1:v1;k2: v2', ';', ':')",
"{k1=v1, k2= v2}")
// test empty
testSqlApi(
"STR_TO_MAP('')",
"{}")
// test key access
testSqlApi(
"STR_TO_MAP('k1=v1,k2=v2')['k1']",
"v1")
testSqlApi(
"STR_TO_MAP('k1:v1;k2:v2', ';', ':')['k2']",
"v2")
// test non-exist key access
testSqlApi(
"STR_TO_MAP('k1=v1,k2=v2')['k3']",
"null")
testSqlApi(
"STR_TO_MAP(f46)",
"{test1=1, test2=2, test3=3}")
testSqlApi(
"STR_TO_MAP(f47)",
"null")
}
@Test
def testIf(): Unit = {
// test IF(BOOL, INT, BIGINT), will do implicit type coercion.
testSqlApi(
"IF(f7 > 5, f14, f4)",
"44")
// test input with null
testSqlApi(
"IF(f7 < 5, cast(null as int), f4)",
"null")
// f0 is a STRING, cast(f0 as double) should never be ran
testSqlApi(
"IF(1 = 1, f6, cast(f0 as double))",
"4.6")
// test STRING, STRING
testSqlApi(
"IF(f7 > 5, f0, f8)",
" This is a test String. ")
// test BYTE, BYTE
testSqlApi(
"IF(f7 < 5, f2, f9)",
"42")
// test INT, INT
testSqlApi(
"IF(f7 < 5, f14, f7)",
"-3")
// test SHORT, SHORT
testSqlApi(
"IF(f7 < 5, f3, f10)",
"43")
// test Long, Long
testSqlApi(
"IF(f7 < 5, f4, f11)",
"44")
// test Double, Double
testSqlApi(
"IF(f7 < 5, f6, f13)",
"4.6")
// test BOOL, BOOL
testSqlApi(
"IF(f7 < 5, f1, f48)",
"true")
// test DECIMAL, DECIMAL
testSqlApi(
"IF(f7 < 5, f15, f49)",
"-1231.1231231321321321111")
// test BINARY, BINARY
// the answer BINARY will cast to STRING in ExpressionTestBase.scala
testSqlApi(
"IF(f7 < 5, f53, f54)",
"hello world") // hello world
// test DATE, DATE
testSqlApi(
"IF(f7 < 5, f16, f50)",
"1996-11-10")
// test TIME, TIME
testSqlApi(
"IF(f7 < 5, f17, f51)",
"06:55:44")
// test TIMESTAMP, TIMESTAMP
testSqlApi(
"IF(f7 < 5, f18, f52)",
"1996-11-10 06:55:44.333")
}
@Test
def testIfDecimal(): Unit = {
// test DECIMAL, DECIMAL
testAllApis(
ifThenElse('f7 < 5, 'f31, 'f34),
"ifThenElse(f7 < 5, f31, f34)",
"IF(f7 < 5, f31, f34)",
"-0.1231231321321321111")
}
@Test
def testIsDecimal(): Unit = {
testSqlApi(
"IS_DECIMAL('1')",
"true")
testSqlApi(
"IS_DECIMAL('123')",
"true")
testSqlApi(
"IS_DECIMAL('2')",
"true")
testSqlApi(
"IS_DECIMAL('11.4445')",
"true")
testSqlApi(
"IS_DECIMAL('3')",
"true")
testSqlApi(
"IS_DECIMAL('abc')",
"false")
// test null string field
testSqlApi(
"IS_DECIMAL(f33)",
"false")
}
@Test
def testIsDigit(): Unit = {
testSqlApi(
"IS_DIGIT('1')",
"true")
testSqlApi(
"IS_DIGIT('123')",
"true")
testSqlApi(
"IS_DIGIT('2')",
"true")
testSqlApi(
"IS_DIGIT('11.4445')",
"false")
testSqlApi(
"IS_DIGIT('3')",
"true")
testSqlApi(
"IS_DIGIT('abc')",
"false")
// test null string field
testSqlApi(
"IS_DIGIT(f33)",
"false")
}
@Test
def testIsAlpha(): Unit = {
testSqlApi(
"IS_ALPHA('1')",
"false")
testSqlApi(
"IS_ALPHA('123')",
"false")
testSqlApi(
"IS_ALPHA('2')",
"false")
testSqlApi(
"IS_ALPHA('11.4445')",
"false")
testSqlApi(
"IS_ALPHA('3')",
"false")
testSqlApi(
"IS_ALPHA('abc')",
"true")
// test null string field
testSqlApi(
"IS_ALPHA(f33)",
"false")
}
@Test
def testRawTypeEquality(): Unit = {
testSqlApi(
"f55=f56",
"false")
testSqlApi(
"f55=f57",
"true")
}
}
| aljoscha/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/expressions/ScalarFunctionsTest.scala | Scala | apache-2.0 | 94,346 |
package net.slozzer.babel
import cats.data.Chain
import cats.implicits._
import io.circe.Json
import io.circe.{parser => CirceParser}
trait circe {
private def error(tpe: String, path: Chain[String]): Parser.Error =
Parser.Error(s"Unsupported JSON type: $tpe (${path.mkString_(".", ".", "")})", cause = None)
def toBabel(json: Json, path: Chain[String]): Either[Parser.Error, Babel] =
json.fold(
Right(Babel.Null),
_ => Left(error("BOOLEAN", path)),
_ => Left(error("NUMBER", path)),
value => Right(Babel.Value(value)),
_ => Left(error("ARRAY", path)),
json =>
json.toMap.toList
.traverse { case (key, json) =>
toBabel(json, path :+ key).tupleLeft(key)
}
.map(values => Babel.Object(values.toMap))
)
def toJson(babel: Babel): Json = babel match {
case Babel.Object(values) => Json.fromFields(values.fmap(toJson))
case Babel.Value(value) => Json.fromString(value)
case Babel.Null => Json.Null
}
val parser: Parser = value =>
CirceParser
.parse(value)
.leftMap(failure => Parser.Error.typeMismatch("Json", cause = Some(failure)))
.flatMap(toBabel(_, Chain.empty))
val printer: Printer = toJson(_).spaces2
}
object circe extends circe
| Taig/lokal | modules/circe/src/main/scala/net/slozzer/babel/circe.scala | Scala | mit | 1,295 |
package org.ensime.server
import org.ensime.util._
import scala.collection.{ immutable, mutable }
import scala.tools.nsc.io.AbstractFile
import scala.tools.nsc.interactive.{ Global }
import org.ardverk.collection._
import scala.collection.JavaConversions._
trait NamespaceTraversal { self: RichPresentationCompiler =>
trait NamespaceVisitor {
def visitPackage(sym: Symbol)
def visitType(sym: Symbol)
}
import definitions.{ RootPackage, EmptyPackage }
def traverse(v: NamespaceVisitor, sym: Symbol) {
try {
if (sym.isPackage) {
v.visitPackage(sym)
traverseMembers(v, sym)
} else if (!(sym.nameString.contains("$")) && (sym != NoSymbol) && (sym.tpe != NoType)) {
if (sym.isClass || sym.isTrait || sym.isModule ||
sym.isModuleClass || sym.isPackageClass) {
v.visitType(sym)
}
}
} catch {
case e => None
}
}
def traverseMembers(v: NamespaceVisitor, sym: Symbol) {
def isRoot(s: Symbol) = s.isRoot || s.isRootPackage
def iter(s: Symbol) {
if (s != EmptyPackage && !isRoot(s) &&
// This check is necessary to prevent infinite looping..
((isRoot(s.owner) && isRoot(sym)) || (s.owner.fullName == sym.fullName))) {
traverse(v, s)
}
}
if (isRoot(sym)) {
EmptyPackage.info.members.foreach(iter)
}
sym.info.members.foreach(iter)
}
}
| non/ensime | src/main/scala/org/ensime/server/NamespaceTraversal.scala | Scala | gpl-3.0 | 1,403 |
package app
import util.Directory._
import util.Implicits._
import util.ControlUtil._
import _root_.util._
import service._
import org.scalatra._
import java.io.File
import org.eclipse.jgit.api.Git
import org.eclipse.jgit.lib._
import org.apache.commons.io.FileUtils
import org.eclipse.jgit.treewalk._
import java.util.zip.{ZipEntry, ZipOutputStream}
import scala.Some
class RepositoryViewerController extends RepositoryViewerControllerBase
with RepositoryService with AccountService with ActivityService with ReferrerAuthenticator with CollaboratorsAuthenticator
/**
* The repository viewer.
*/
trait RepositoryViewerControllerBase extends ControllerBase {
self: RepositoryService with AccountService with ActivityService with ReferrerAuthenticator with CollaboratorsAuthenticator =>
/**
* Returns converted HTML from Markdown for preview.
*/
post("/:owner/:repository/_preview")(referrersOnly { repository =>
contentType = "text/html"
view.helpers.markdown(params("content"), repository,
params("enableWikiLink").toBoolean,
params("enableRefsLink").toBoolean)
})
/**
* Displays the file list of the repository root and the default branch.
*/
get("/:owner/:repository")(referrersOnly {
fileList(_)
})
/**
* Displays the file list of the specified path and branch.
*/
get("/:owner/:repository/tree/*")(referrersOnly { repository =>
val (id, path) = splitPath(repository, multiParams("splat").head)
if(path.isEmpty){
fileList(repository, id)
} else {
fileList(repository, id, path)
}
})
/**
* Displays the commit list of the specified resource.
*/
get("/:owner/:repository/commits/*")(referrersOnly { repository =>
val (branchName, path) = splitPath(repository, multiParams("splat").head)
val page = params.get("page").flatMap(_.toIntOpt).getOrElse(1)
using(Git.open(getRepositoryDir(repository.owner, repository.name))){ git =>
JGitUtil.getCommitLog(git, branchName, page, 30, path) match {
case Right((logs, hasNext)) =>
repo.html.commits(if(path.isEmpty) Nil else path.split("/").toList, branchName, repository,
logs.splitWith{ (commit1, commit2) =>
view.helpers.date(commit1.time) == view.helpers.date(commit2.time)
}, page, hasNext)
case Left(_) => NotFound
}
}
})
/**
* Displays the file content of the specified branch or commit.
*/
get("/:owner/:repository/blob/*")(referrersOnly { repository =>
val (id, path) = splitPath(repository, multiParams("splat").head)
val raw = params.get("raw").getOrElse("false").toBoolean
using(Git.open(getRepositoryDir(repository.owner, repository.name))){ git =>
val revCommit = JGitUtil.getRevCommitFromId(git, git.getRepository.resolve(id))
@scala.annotation.tailrec
def getPathObjectId(path: String, walk: TreeWalk): ObjectId = walk.next match {
case true if(walk.getPathString == path) => walk.getObjectId(0)
case true => getPathObjectId(path, walk)
}
val objectId = using(new TreeWalk(git.getRepository)){ treeWalk =>
treeWalk.addTree(revCommit.getTree)
treeWalk.setRecursive(true)
getPathObjectId(path, treeWalk)
}
if(raw){
// Download
defining(JGitUtil.getContent(git, objectId, false).get){ bytes =>
contentType = FileUtil.getContentType(path, bytes)
bytes
}
} else {
// Viewer
val large = FileUtil.isLarge(git.getRepository.getObjectDatabase.open(objectId).getSize)
val viewer = if(FileUtil.isImage(path)) "image" else if(large) "large" else "other"
val bytes = if(viewer == "other") JGitUtil.getContent(git, objectId, false) else None
val content = if(viewer == "other"){
if(bytes.isDefined && FileUtil.isText(bytes.get)){
// text
JGitUtil.ContentInfo("text", bytes.map(StringUtil.convertFromByteArray))
} else {
// binary
JGitUtil.ContentInfo("binary", None)
}
} else {
// image or large
JGitUtil.ContentInfo(viewer, None)
}
repo.html.blob(id, repository, path.split("/").toList, content, new JGitUtil.CommitInfo(revCommit))
}
}
})
/**
* Displays details of the specified commit.
*/
get("/:owner/:repository/commit/:id")(referrersOnly { repository =>
val id = params("id")
using(Git.open(getRepositoryDir(repository.owner, repository.name))){ git =>
defining(JGitUtil.getRevCommitFromId(git, git.getRepository.resolve(id))){ revCommit =>
JGitUtil.getDiffs(git, id) match { case (diffs, oldCommitId) =>
repo.html.commit(id, new JGitUtil.CommitInfo(revCommit),
JGitUtil.getBranchesOfCommit(git, revCommit.getName),
JGitUtil.getTagsOfCommit(git, revCommit.getName),
repository, diffs, oldCommitId)
}
}
}
})
/**
* Displays branches.
*/
get("/:owner/:repository/branches")(referrersOnly { repository =>
using(Git.open(getRepositoryDir(repository.owner, repository.name))){ git =>
// retrieve latest update date of each branch
val branchInfo = repository.branchList.map { branchName =>
val revCommit = git.log.add(git.getRepository.resolve(branchName)).setMaxCount(1).call.iterator.next
(branchName, revCommit.getCommitterIdent.getWhen)
}
repo.html.branches(branchInfo, hasWritePermission(repository.owner, repository.name, context.loginAccount), repository)
}
})
/**
* Deletes branch.
*/
get("/:owner/:repository/delete/:branchName")(collaboratorsOnly { repository =>
val branchName = params("branchName")
val userName = context.loginAccount.get.userName
if(repository.repository.defaultBranch != branchName){
using(Git.open(getRepositoryDir(repository.owner, repository.name))){ git =>
git.branchDelete().setBranchNames(branchName).call()
recordDeleteBranchActivity(repository.owner, repository.name, userName, branchName)
}
}
redirect(s"/${repository.owner}/${repository.name}/branches")
})
/**
* Displays tags.
*/
get("/:owner/:repository/tags")(referrersOnly {
repo.html.tags(_)
})
/**
* Download repository contents as an archive.
*/
get("/:owner/:repository/archive/:name")(referrersOnly { repository =>
val name = params("name")
if(name.endsWith(".zip")){
val revision = name.replaceFirst("\\\\.zip$", "")
val workDir = getDownloadWorkDir(repository.owner, repository.name, session.getId)
if(workDir.exists){
FileUtils.deleteDirectory(workDir)
}
workDir.mkdirs
val zipFile = new File(workDir, repository.name + "-" +
(if(revision.length == 40) revision.substring(0, 10) else revision) + ".zip")
using(Git.open(getRepositoryDir(repository.owner, repository.name))){ git =>
val revCommit = JGitUtil.getRevCommitFromId(git, git.getRepository.resolve(revision))
using(new TreeWalk(git.getRepository)){ walk =>
val reader = walk.getObjectReader
val objectId = new MutableObjectId
using(new ZipOutputStream(new java.io.FileOutputStream(zipFile))){ out =>
walk.addTree(revCommit.getTree)
walk.setRecursive(true)
while(walk.next){
val name = walk.getPathString
val mode = walk.getFileMode(0)
if(mode != FileMode.TREE){
walk.getObjectId(objectId, 0)
val entry = new ZipEntry(name)
val loader = reader.open(objectId)
entry.setSize(loader.getSize)
out.putNextEntry(entry)
loader.copyTo(out)
}
}
}
}
}
contentType = "application/octet-stream"
response.setHeader("Content-Disposition", s"attachment; filename=${zipFile.getName}")
zipFile
} else {
BadRequest
}
})
get("/:owner/:repository/network/members")(referrersOnly { repository =>
repo.html.forked(
getRepository(
repository.repository.originUserName.getOrElse(repository.owner),
repository.repository.originRepositoryName.getOrElse(repository.name),
baseUrl),
getForkedRepositories(
repository.repository.originUserName.getOrElse(repository.owner),
repository.repository.originRepositoryName.getOrElse(repository.name)),
repository)
})
private def splitPath(repository: service.RepositoryService.RepositoryInfo, path: String): (String, String) = {
val id = repository.branchList.collectFirst {
case branch if(path == branch || path.startsWith(branch + "/")) => branch
} orElse repository.tags.collectFirst {
case tag if(path == tag.name || path.startsWith(tag.name + "/")) => tag.name
} orElse Some(path.split("/")(0)) get
(id, path.substring(id.length).replaceFirst("^/", ""))
}
private val readmeFiles = Seq("readme.md", "readme.markdown")
/**
* Provides HTML of the file list.
*
* @param repository the repository information
* @param revstr the branch name or commit id(optional)
* @param path the directory path (optional)
* @return HTML of the file list
*/
private def fileList(repository: RepositoryService.RepositoryInfo, revstr: String = "", path: String = ".") = {
if(repository.commitCount == 0){
repo.html.guide(repository)
} else {
using(Git.open(getRepositoryDir(repository.owner, repository.name))){ git =>
val revisions = Seq(if(revstr.isEmpty) repository.repository.defaultBranch else revstr, repository.branchList.head)
// get specified commit
JGitUtil.getDefaultBranch(git, repository, revstr).map { case (objectId, revision) =>
defining(JGitUtil.getRevCommitFromId(git, objectId)){ revCommit =>
// get files
val files = JGitUtil.getFileList(git, revision, path)
// process README.md or README.markdown
val readme = files.find { file =>
readmeFiles.contains(file.name.toLowerCase)
}.map { file =>
StringUtil.convertFromByteArray(JGitUtil.getContent(Git.open(getRepositoryDir(repository.owner, repository.name)), file.id, true).get)
}
repo.html.files(revision, repository,
if(path == ".") Nil else path.split("/").toList, // current path
new JGitUtil.CommitInfo(revCommit), // latest commit
files, readme)
}
} getOrElse NotFound
}
}
}
}
| nvdnkpr/gitbucket | src/main/scala/app/RepositoryViewerController.scala | Scala | apache-2.0 | 10,698 |
package io.leonard
import io.leonard.AnimalSpec._
import io.leonard.TraitFormat.{caseObjectFormat, traitFormat}
import org.scalatest.{FlatSpec, Matchers}
import play.api.libs.json.Json.format
import play.api.libs.json._
class TraitFormatSpec extends FlatSpec with Matchers {
"TraitFormat" should "serialise" in {
val animalFormat = traitFormat[Animal] << format[Dog] << format[Cat]
val doggyJson = Json.parse("""{"s":"woof!","type":"Dog"}""")
animalFormat.writes(doggy) should be(doggyJson)
val animal1: Animal = animalFormat.reads(doggyJson).get
animal1 should be(doggy)
val kittyJson = Json.parse("""{"s":"Meow!","type":"Cat"}""")
animalFormat.writes(kitty) should be(kittyJson)
val animal2: Animal = animalFormat.reads(kittyJson).get
animal2 should be(kitty)
}
it should "serialise a case object" in {
val animalFormat = traitFormat[Animal] << format[Dog] << format[Cat] << caseObjectFormat(Nessy)
val nessyJson = """{"type":"Nessy"}"""
animalFormat.writes(Nessy) should be(Json.parse(nessyJson))
animalFormat.reads(Json.parse(nessyJson)).get should be(Nessy)
}
it should "serialise nested" in {
val mammalFormat = traitFormat[Mammal] << format[Dog] << format[Cat]
val animalFormat = traitFormat[Animal] << mammalFormat << caseObjectFormat(Nessy)
val doggyJson = Json.parse("""{"s":"woof!","type":"Dog"}""")
animalFormat.writes(doggy) should be(doggyJson)
}
it should "put discriminator in the JSON" in {
val animalFormat = traitFormat[Animal]("animalType") << format[Dog] << format[Cat]
val doggyJson = """{"s":"woof!","animalType":"Dog"}"""
animalFormat.writes(doggy).toString() should be(doggyJson)
val animal1: Animal = animalFormat.reads(Json.parse(doggyJson)).get
animal1 should be(doggy)
}
it should "return a JsError if the discriminator is not there" in {
val animalFormat = traitFormat[Animal]("animalType") << format[Dog] << format[Cat]
val doggyJson = """{"s":"woof!"}"""
val jsResult = animalFormat.reads(Json.parse(doggyJson))
jsResult should be(JsError(s"No valid discriminator property 'animalType' found in $doggyJson."))
}
it should "return a JsError if the discriminator is not a string" in {
val animalFormat = traitFormat[Animal]("animalType") << format[Dog] << format[Cat]
val doggyJson = """{"s":"woof!","animalType":{"type":"Dog"}}"""
val jsResult = animalFormat.reads(Json.parse(doggyJson))
jsResult should be(JsError(s"No valid discriminator property 'animalType' found in $doggyJson."))
}
it should "custom name for case class format" in {
val animalFormat = traitFormat[Animal] << ("hound", format[Dog]) << ("pussy_cat", format[Cat])
val houndJson = """{"s":"woof!","type":"hound"}"""
val jsResult = animalFormat.reads(Json.parse(houndJson))
jsResult.get should be(doggy)
val pussyCatJson = """{"s":"Meow!","type":"pussy_cat"}"""
animalFormat.reads(Json.parse(pussyCatJson)).get should be(kitty)
}
it should "write and read custom name for case object format" in {
val animalFormat = traitFormat[Animal] << ("hound", format[Dog]) << ("pussy_cat", format[Cat]) << ("sea_monster", caseObjectFormat(Nessy))
val nessyJson = """{"type":"sea_monster"}"""
val jsResult = animalFormat.reads(Json.parse(nessyJson))
jsResult.get should be(Nessy)
animalFormat.writes(Nessy).toString() should be(nessyJson)
}
}
| leonardehrenfried/play-json-traits | src/test/scala/io/leonard/TraitFormatSpec.scala | Scala | apache-2.0 | 3,481 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.sbt.routes
import play.api.PlayException
import play.core.PlayVersion
import play.routes.compiler.RoutesGenerator
import play.routes.compiler.RoutesCompilationError
import play.routes.compiler.RoutesCompiler.GeneratedSource
import play.routes.compiler.RoutesCompiler.RoutesCompilerTask
import sbt._
import sbt.Keys._
import xsbti.Position
import java.util.Optional
import scala.collection.mutable
import com.typesafe.sbt.web.incremental._
object RoutesKeys {
val routesCompilerTasks = TaskKey[Seq[RoutesCompilerTask]]("playRoutesTasks", "The routes files to compile")
val routes = TaskKey[Seq[File]]("playRoutes", "Compile the routes files")
val routesImport = SettingKey[Seq[String]]("playRoutesImports", "Imports for the router")
val routesGenerator = SettingKey[RoutesGenerator]("playRoutesGenerator", "The routes generator")
val generateReverseRouter = SettingKey[Boolean](
"playGenerateReverseRouter",
"Whether the reverse router should be generated. Setting to false may reduce compile times if it's not needed."
)
val namespaceReverseRouter = SettingKey[Boolean](
"playNamespaceReverseRouter",
"Whether the reverse router should be namespaced. Useful if you have many routers that use the same actions."
)
/**
* This class is used to avoid infinite recursions when configuring aggregateReverseRoutes,
* since it makes the ProjectReference a thunk.
*/
class LazyProjectReference(ref: => ProjectReference) {
def project: ProjectReference = ref
}
object LazyProjectReference {
import scala.language.implicitConversions
implicit def fromProjectReference(ref: => ProjectReference): LazyProjectReference = new LazyProjectReference(ref)
implicit def fromProject(project: => Project): LazyProjectReference = new LazyProjectReference(project)
}
val aggregateReverseRoutes = SettingKey[Seq[LazyProjectReference]](
"playAggregateReverseRoutes",
"A list of projects that reverse routes should be aggregated from."
)
val InjectedRoutesGenerator = play.routes.compiler.InjectedRoutesGenerator
}
object RoutesCompiler extends AutoPlugin {
import RoutesKeys._
override def trigger = noTrigger
val autoImport = RoutesKeys
override def projectSettings =
defaultSettings ++
inConfig(Compile)(routesSettings) ++
inConfig(Test)(routesSettings)
def routesSettings = Seq(
sources in routes := Nil,
routesCompilerTasks := Def.taskDyn {
val generateReverseRouterValue = generateReverseRouter.value
val namespaceReverseRouterValue = namespaceReverseRouter.value
val sourcesInRoutes = (sources in routes).value
val routesImportValue = routesImport.value
// Aggregate all the routes file tasks that we want to compile the reverse routers for.
aggregateReverseRoutes.value
.map { agg =>
routesCompilerTasks in (agg.project, configuration.value)
}
.join
.map {
aggTasks: Seq[Seq[RoutesCompilerTask]] =>
// Aggregated tasks need to have forwards router compilation disabled and reverse router compilation enabled.
val reverseRouterTasks = aggTasks.flatten.map { task =>
task.copy(forwardsRouter = false, reverseRouter = true)
}
// Find the routes compile tasks for this project
val thisProjectTasks = sourcesInRoutes.map { file =>
RoutesCompilerTask(
file,
routesImportValue,
forwardsRouter = true,
reverseRouter = generateReverseRouterValue,
namespaceReverseRouter = namespaceReverseRouterValue
)
}
thisProjectTasks ++ reverseRouterTasks
}
}.value,
watchSources in Defaults.ConfigGlobal ++= (sources in routes).value,
target in routes := crossTarget.value / "routes" / Defaults.nameForSrc(configuration.value.name),
routes := compileRoutesFiles.value,
sourceGenerators += Def.task(routes.value).taskValue,
managedSourceDirectories += (target in routes).value
)
def defaultSettings = Seq(
routesImport := Nil,
aggregateReverseRoutes := Nil,
// Generate reverse router defaults to true if this project is not aggregated by any of the projects it depends on
// aggregateReverseRoutes projects. Otherwise, it will be false, since another project will be generating the
// reverse router for it.
generateReverseRouter := Def.settingDyn {
val projectRef = thisProjectRef.value
val dependencies = buildDependencies.value.classpathTransitiveRefs(projectRef)
// Go through each dependency of this project
dependencies
.map { dep =>
// Get the aggregated reverse routes projects for the dependency, if defined
Def.optional(aggregateReverseRoutes in dep)(_.map(_.map(_.project)).getOrElse(Nil))
}
.join
.apply { aggregated: Seq[Seq[ProjectReference]] =>
val localProject = LocalProject(projectRef.project)
// Return false if this project is aggregated by one of our dependencies
!aggregated.flatten.contains(localProject)
}
}.value,
namespaceReverseRouter := false,
routesGenerator := InjectedRoutesGenerator,
sourcePositionMappers += routesPositionMapper
)
private val routesPositionMapper: Position => Option[Position] = position => {
position.sourceFile.asScala.collect {
case GeneratedSource(generatedSource) => new MappedPos(position, generatedSource)
}
}
private final class MappedPos(generatedPosition: Position, generatedSource: GeneratedSource) extends Position {
private val source = generatedSource.source.get
lazy val line = generatedPosition.line.asScala.flatMap(l => generatedSource.mapLine(l).map(Int.box(_))).asJava
lazy val lineContent = line.asScala.flatMap(l => IO.read(source).split('\\n').lift(l - 1)).getOrElse("")
val offset = Optional.empty[Integer]
val pointer = Optional.empty[Integer]
val pointerSpace = Optional.empty[String]
val sourcePath = Optional.ofNullable(source.getCanonicalPath)
val sourceFile = Optional.ofNullable(source)
override lazy val toString = {
val sb = new mutable.StringBuilder()
if (sourcePath.isPresent) sb.append(sourcePath.get)
if (line.isPresent) sb.append(":").append(line.get)
if (lineContent.nonEmpty) sb.append("\\n").append(lineContent)
sb.toString()
}
}
private val compileRoutesFiles = Def.task[Seq[File]] {
val log = state.value.log
compileRoutes(
routesCompilerTasks.value,
routesGenerator.value,
(target in routes).value,
streams.value.cacheDirectory,
log
)
}
def compileRoutes(
tasks: Seq[RoutesCompilerTask],
generator: RoutesGenerator,
generatedDir: File,
cacheDirectory: File,
log: Logger
): Seq[File] = {
val ops = tasks.map(task => RoutesCompilerOp(task, generator.id, PlayVersion.current))
val (products, errors) = syncIncremental(cacheDirectory, ops) { opsToRun: Seq[RoutesCompilerOp] =>
val errs = Seq.newBuilder[RoutesCompilationError]
val opResults: Map[RoutesCompilerOp, OpResult] = opsToRun.map { op =>
play.routes.compiler.RoutesCompiler.compile(op.task, generator, generatedDir) match {
case Right(inputs) =>
op -> OpSuccess(Set(op.task.file), inputs.toSet)
case Left(details) =>
errs ++= details
op -> OpFailure
}
}(scala.collection.breakOut)
opResults -> errs.result()
}
if (errors.nonEmpty) {
val exceptions = errors.map {
case RoutesCompilationError(source, message, line, column) =>
reportCompilationError(log, RoutesCompilationException(source, message, line, column.map(_ - 1)))
}
throw exceptions.head
}
products.to[Seq]
}
private def reportCompilationError(log: Logger, error: PlayException.ExceptionSource) = {
// log the source file and line number with the error message
log.error(
Option(error.sourceName).getOrElse("") + Option(error.line).map(":" + _).getOrElse("") + ": " + error.getMessage
)
Option(error.interestingLines(0)).map(_.focus).flatMap(_.headOption).map { line =>
// log the line
log.error(line)
Option(error.position).map { pos =>
// print a carat under the offending character
val spaces = (line: Seq[Char]).take(pos).map {
case '\\t' => '\\t'
case x => ' '
}
log.error(spaces.mkString + "^")
}
}
error
}
}
private case class RoutesCompilerOp(task: RoutesCompilerTask, generatorId: String, playVersion: String)
case class RoutesCompilationException(source: File, message: String, atLine: Option[Int], column: Option[Int])
extends PlayException.ExceptionSource("Compilation error", message)
with FeedbackProvidedException {
def line = atLine.map(_.asInstanceOf[java.lang.Integer]).orNull
def position = column.map(_.asInstanceOf[java.lang.Integer]).orNull
def input = IO.read(source)
def sourceName = source.getAbsolutePath
}
| benmccann/playframework | dev-mode/sbt-plugin/src/main/scala/play/sbt/routes/RoutesCompiler.scala | Scala | apache-2.0 | 9,392 |
package im.actor.server.model
@SerialVersionUID(1L)
case class UserPublicKey(
userId: Int,
hash: Long,
data: Array[Byte]
)
| ljshj/actor-platform | actor-server/actor-models/src/main/scala/im/actor/server/model/UserPublicKey.scala | Scala | mit | 134 |
package benchmarks
import org.openjdk.jmh.annotations._
@State(Scope.Thread)
class Step {
@Param(Array("2"))
var step: Int = _
var count: Int = 0
def run(): Int = {
count += 1
count
}
def get(): Int = count
def at(): Int = step
def test(v: Int): Boolean = v % step == 0
}
| volkc/REScala | Research/Microbenchmarks/src/main/scala/benchmarks/Step.scala | Scala | apache-2.0 | 300 |
package ca.hyperreal.scalgorithms
class MachineInt( val a: Long ) extends OrderedRing[MachineInt]
{
def isZero = a == 0
def isOne = a == 1
def +( that: MachineInt ) = MachineInt( a + that.a )
def *( that: MachineInt ) = MachineInt( a * that.a )
def *:( x: Int ) = MachineInt( a*x )
def -( that: MachineInt ) = this + -that
def /( that: MachineInt ) = *( that.inv )
def unary_- = MachineInt( -a )
def conj: MachineInt = this
def inv =
{
if (isUnit)
this
else
throw new Exception( "no inverse: " + a )
}
def isUnit = a == 1 || a == -1
def isUnital = true
def compare( that: MachineInt ) = a.compare( that.a )
def isPositive = this.a > 0
def abs = if (this.a < 0) -this else this
override def toString = a.toString
}
object MachineInt
{
val ZERO = new MachineInt( 0 )
val ONE = new MachineInt( 1 )
def apply( a: Long ) = new MachineInt( a )
implicit def int2MachineInt( a: Int ): MachineInt = new MachineInt( a )
} | edadma/scalgorithms | src/main/scala/MachineInt.scala | Scala | mit | 981 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala
package xml
package factory
import scala.collection.Seq
trait NodeFactory[A <: Node] {
val ignoreComments = false
val ignoreProcInstr = false
/* default behaviour is to use hash-consing */
val cache = new scala.collection.mutable.HashMap[Int, List[A]]
protected def create(pre: String, name: String, attrs: MetaData, scope: NamespaceBinding, children: Seq[Node]): A
protected def construct(hash: Int, old: List[A], pre: String, name: String, attrSeq: MetaData, scope: NamespaceBinding, children: Seq[Node]): A = {
val el = create(pre, name, attrSeq, scope, children)
cache.update(hash, el :: old)
el
}
def eqElements(ch1: Seq[Node], ch2: Seq[Node]): Boolean =
ch1.view.zipAll(ch2.view, null, null) forall { case (x, y) => x eq y }
def nodeEquals(n: Node, pre: String, name: String, attrSeq: MetaData, scope: NamespaceBinding, children: Seq[Node]): Boolean =
n.prefix == pre &&
n.label == name &&
n.attributes == attrSeq &&
// scope?
eqElements(n.child, children)
def makeNode(pre: String, name: String, attrSeq: MetaData, scope: NamespaceBinding, children: Seq[Node]): A = {
val hash = Utility.hashCode(pre, name, attrSeq.##, scope.##, children)
def cons(old: List[A]) = construct(hash, old, pre, name, attrSeq, scope, children)
(cache get hash) match {
case Some(list) => // find structurally equal
list.find(nodeEquals(_, pre, name, attrSeq, scope, children)) match {
case Some(x) => x
case _ => cons(list)
}
case None => cons(Nil)
}
}
def makeText(s: String): Text = Text(s)
def makePCData(s: String): PCData =
PCData(s)
def makeComment(s: String): Seq[Comment] =
if (ignoreComments) Nil else List(Comment(s))
def makeProcInstr(t: String, s: String): Seq[ProcInstr] =
if (ignoreProcInstr) Nil else List(ProcInstr(t, s))
}
| scala/scala-xml | shared/src/main/scala/scala/xml/factory/NodeFactory.scala | Scala | apache-2.0 | 2,195 |
package org.jetbrains.plugins.hocon.formatting
import java.io.File
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.command.CommandProcessor
import com.intellij.openapi.util.JDOMUtil
import com.intellij.openapi.util.io.FileUtil
import com.intellij.psi.codeStyle.{CodeStyleManager, CodeStyleSettingsManager}
import org.jetbrains.plugins.hocon.CommonUtil.TextRange
import org.jetbrains.plugins.scala.testcases.ScalaFileSetTestCase
import org.jetbrains.plugins.scala.util.TestUtils
import org.junit.Assert
class HoconFormatterTest extends ScalaFileSetTestCase(TestUtils.getTestDataPath + "/hocon/formatter/data") {
protected def runTest(file: File) = {
import org.jetbrains.plugins.hocon.HoconTestUtils._
val fileContents = new String(FileUtil.loadFileText(file, "UTF-8")).replaceAllLiterally("\\r", "")
val Array(settingsXml, input, expectedResult) = fileContents.split("-{5,}", 3).map(_.trim)
val settings = CodeStyleSettingsManager.getSettings(getProject)
settings.readExternal(JDOMUtil.loadDocument(settingsXml).getRootElement)
val psiFile = createPseudoPhysicalHoconFile(getProject, input)
def reformatAction() = ApplicationManager.getApplication.runWriteAction(try {
val TextRange(start, end) = psiFile.getTextRange
CodeStyleManager.getInstance(getProject).reformatText(psiFile, start, end)
} catch {
case e: Exception => e.printStackTrace()
})
CommandProcessor.getInstance.executeCommand(getProject, reformatAction(), null, null)
Assert.assertEquals(expectedResult, psiFile.getText.replaceAllLiterally("\\r", ""))
}
}
| triggerNZ/intellij-scala | test/org/jetbrains/plugins/hocon/formatting/HoconFormatterTest.scala | Scala | apache-2.0 | 1,635 |
package web
import core.{CoreActors, Core}
import api.Api
import akka.io.IO
import spray.can.Http
/**
* Provides the web server (spray-can) for the REST api in ``Api``, using the actor system
* defined in ``Core``.
*
* You may sometimes wish to construct separate ``ActorSystem`` for the web server machinery.
* However, for this simple application, we shall use the same ``ActorSystem`` for the
* entire application.
*
* Benefits of separate ``ActorSystem`` include the ability to use completely different
* configuration, especially when it comes to the threading model.
*/
trait Web {
this: Api with CoreActors with Core =>
IO(Http)(system) ! Http.Bind(rootService, "0.0.0.0", port = 80)
}
| chidakiyo/perf-spray-client | src/main/scala/web/Web.scala | Scala | apache-2.0 | 711 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.fpm
import scala.collection.JavaConverters._
import scala.reflect.ClassTag
import org.apache.spark.annotation.Since
import org.apache.spark.api.java.JavaRDD
import org.apache.spark.api.java.JavaSparkContext.fakeClassTag
import org.apache.spark.internal.Logging
import org.apache.spark.mllib.fpm.AssociationRules.Rule
import org.apache.spark.mllib.fpm.FPGrowth.FreqItemset
import org.apache.spark.rdd.RDD
/**
* Generates association rules from a `RDD[FreqItemset[Item]]`. This method only generates
* association rules which have a single item as the consequent.
*
*/
@Since("1.5.0")
class AssociationRules private[fpm] (
private var minConfidence: Double) extends Logging with Serializable {
/**
* Constructs a default instance with default parameters {minConfidence = 0.8}.
*/
@Since("1.5.0")
def this() = this(0.8)
/**
* Sets the minimal confidence (default: `0.8`).
*/
@Since("1.5.0")
def setMinConfidence(minConfidence: Double): this.type = {
require(minConfidence >= 0.0 && minConfidence <= 1.0,
s"Minimal confidence must be in range [0, 1] but got ${minConfidence}")
this.minConfidence = minConfidence
this
}
/**
* Computes the association rules with confidence above `minConfidence`.
* @param freqItemsets frequent itemset model obtained from [[FPGrowth]]
* @return a `RDD[Rule[Item]]` containing the association rules.
*
*/
@Since("1.5.0")
def run[Item: ClassTag](freqItemsets: RDD[FreqItemset[Item]]): RDD[Rule[Item]] = {
run(freqItemsets, Map.empty[Item, Double])
}
/**
* Computes the association rules with confidence above `minConfidence`.
* @param freqItemsets frequent itemset model obtained from [[FPGrowth]]
* @param itemSupport map containing an item and its support
* @return a `RDD[Rule[Item]]` containing the association rules. The rules will be able to
* compute also the lift metric.
*/
@Since("2.4.0")
def run[Item: ClassTag](freqItemsets: RDD[FreqItemset[Item]],
itemSupport: scala.collection.Map[Item, Double]): RDD[Rule[Item]] = {
// For candidate rule X => Y, generate (X, (Y, freq(X union Y)))
val candidates = freqItemsets.flatMap { itemset =>
val items = itemset.items
items.flatMap { item =>
items.partition(_ == item) match {
case (consequent, antecedent) if !antecedent.isEmpty =>
Some((antecedent.toSeq, (consequent.toSeq, itemset.freq)))
case _ => None
}
}
}
// Join to get (X, ((Y, freq(X union Y)), freq(X))), generate rules, and filter by confidence
candidates.join(freqItemsets.map(x => (x.items.toSeq, x.freq)))
.map { case (antecendent, ((consequent, freqUnion), freqAntecedent)) =>
new Rule(antecendent.toArray,
consequent.toArray,
freqUnion,
freqAntecedent,
// the consequent contains always only one element
itemSupport.get(consequent.head))
}.filter(_.confidence >= minConfidence)
}
/**
* Java-friendly version of `run`.
*/
@Since("1.5.0")
def run[Item](freqItemsets: JavaRDD[FreqItemset[Item]]): JavaRDD[Rule[Item]] = {
val tag = fakeClassTag[Item]
run(freqItemsets.rdd)(tag)
}
}
@Since("1.5.0")
object AssociationRules {
/**
* An association rule between sets of items.
* @param antecedent hypotheses of the rule. Java users should call [[Rule#javaAntecedent]]
* instead.
* @param consequent conclusion of the rule. Java users should call [[Rule#javaConsequent]]
* instead.
* @tparam Item item type
*
*/
@Since("1.5.0")
class Rule[Item] private[fpm] (
@Since("1.5.0") val antecedent: Array[Item],
@Since("1.5.0") val consequent: Array[Item],
private[spark] val freqUnion: Double,
freqAntecedent: Double,
freqConsequent: Option[Double]) extends Serializable {
/**
* Returns the confidence of the rule.
*
*/
@Since("1.5.0")
def confidence: Double = freqUnion / freqAntecedent
/**
* Returns the lift of the rule.
*/
@Since("2.4.0")
def lift: Option[Double] = freqConsequent.map(fCons => confidence / fCons)
require(antecedent.toSet.intersect(consequent.toSet).isEmpty, {
val sharedItems = antecedent.toSet.intersect(consequent.toSet)
s"A valid association rule must have disjoint antecedent and " +
s"consequent but ${sharedItems} is present in both."
})
/**
* Returns antecedent in a Java List.
*
*/
@Since("1.5.0")
def javaAntecedent: java.util.List[Item] = {
antecedent.toList.asJava
}
/**
* Returns consequent in a Java List.
*
*/
@Since("1.5.0")
def javaConsequent: java.util.List[Item] = {
consequent.toList.asJava
}
override def toString: String = {
s"${antecedent.mkString("{", ",", "}")} => " +
s"${consequent.mkString("{", ",", "}")}: (confidence: $confidence; lift: $lift)"
}
}
}
| dbtsai/spark | mllib/src/main/scala/org/apache/spark/mllib/fpm/AssociationRules.scala | Scala | apache-2.0 | 5,852 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples.mllib
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
// $example on$
import org.apache.spark.mllib.feature.PCA
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression.{LabeledPoint, LinearRegressionWithSGD}
// $example off$
@deprecated("Deprecated since LinearRegressionWithSGD is deprecated. Use ml.feature.PCA", "2.0.0")
object PCAExample {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("PCAExample")
val sc = new SparkContext(conf)
// $example on$
val data = sc.textFile("data/mllib/ridge-data/lpsa.data").map { line =>
val parts = line.split(',')
LabeledPoint(parts(0).toDouble, Vectors.dense(parts(1).split(' ').map(_.toDouble)))
}.cache()
val splits = data.randomSplit(Array(0.6, 0.4), seed = 11L)
val training = splits(0).cache()
val test = splits(1)
val pca = new PCA(training.first().features.size / 2).fit(data.map(_.features))
val training_pca = training.map(p => p.copy(features = pca.transform(p.features)))
val test_pca = test.map(p => p.copy(features = pca.transform(p.features)))
val numIterations = 100
val model = LinearRegressionWithSGD.train(training, numIterations)
val model_pca = LinearRegressionWithSGD.train(training_pca, numIterations)
val valuesAndPreds = test.map { point =>
val score = model.predict(point.features)
(score, point.label)
}
val valuesAndPreds_pca = test_pca.map { point =>
val score = model_pca.predict(point.features)
(score, point.label)
}
val MSE = valuesAndPreds.map { case (v, p) => math.pow((v - p), 2) }.mean()
val MSE_pca = valuesAndPreds_pca.map { case (v, p) => math.pow((v - p), 2) }.mean()
println(s"Mean Squared Error = $MSE")
println(s"PCA Mean Squared Error = $MSE_pca")
// $example off$
sc.stop()
}
}
// scalastyle:on println
| bravo-zhang/spark | examples/src/main/scala/org/apache/spark/examples/mllib/PCAExample.scala | Scala | apache-2.0 | 2,784 |
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.optimize
import cc.factorie.la._
import cc.factorie.model.{WeightsMap, WeightsSet}
import cc.factorie.util.{DenseDoubleSeq, DoubleSeq, RangeIntSeq}
/**
* This implements an efficient version of the Pegasos SGD algorithm for l2-regularized hinge loss
* it won't necessarily work with other losses because of the aggressive projection steps
* note that adding a learning rate here is nontrivial since the update relies on baseRate / step < 1.0 to avoid zeroing the weights
* but if I don't add a rate <1 here this optimizer does terribly in my tests -luke
* @param baseRate The base learning rate
* @param l2 The l2 regularization constant
*/
class Pegasos(baseRate: Double = 0.1, l2: Double = 0.01) extends GradientOptimizer {
private var step = 1
var initialized = false
def step(weights: WeightsSet, gradient: WeightsMap, value: Double): Unit = {
if (!initialized) { initializeWeights(weights); initialized = true }
if (step == 1) {
// make sure weights start off with ||w|| <= 1 / sqrt(l2)
if (weights.twoNorm > 1.0 / math.sqrt(l2))
weights *= 1.0 / (weights.twoNorm * math.sqrt(l2))
}
val eta_t = baseRate / (l2 * step)
weights *= (1.0 - eta_t * l2)
weights += (gradient, eta_t)
val projCoeff = math.min(1.0, (1.0 / math.sqrt(l2)) / weights.twoNorm)
weights *= projCoeff
step += 1
}
def initializeWeights(weights: WeightsSet) = if (!initialized) MutableScalableWeights.initializeWeights(weights, cacheTwoNormSq = true)
def finalizeWeights(weights: WeightsSet) = MutableScalableWeights.finalizeWeights(weights)
// can we get a good convergence criterion here? since it's not regular sgd, I feel like yes?
def isConverged = false
def reset(): Unit = {
step = 1
}
}
object MutableScalableWeights {
def initializeWeights(weights: WeightsSet, cacheTwoNormSq: Boolean = false): Unit = {
for (key <- weights.keys) {
key.value match {
case t: Tensor1 => weights(key) = new MutableScaledTensor1(t.length, cacheTwoNormSq)
case t: Tensor2 => weights(key) = new MutableScaledTensor2(t.dim1, t.dim2, cacheTwoNormSq)
case t: Tensor3 => weights(key) = new MutableScaledTensor3(t.dim1, t.dim2, t.dim3, cacheTwoNormSq)
case t: Tensor4 => weights(key) = new MutableScaledTensor4(t.dim1, t.dim2, t.dim3, t.dim4, cacheTwoNormSq)
}
}
}
def finalizeWeights(weights: WeightsSet): Unit =
for (key <- weights.keys) {
val scaledTensor = key.value
weights(key) = key.newBlankTensor
scaledTensor.foreachElement((i, v) => key.value(i) = v)
}
abstract class MutableScaledTensor(cacheTwoNormSq: Boolean) extends Tensor with DenseDoubleSeq {
private var cachedTwoNormSq: Double = 0.0
def activeDomain = new RangeIntSeq(0, length)
def activeDomainSize = activeDomain.length
def forallActiveElements(f: (Int,Double) => Boolean) = forallElements(f)
protected val _values = Array.fill(length)(0.0)
var multiplier = 1.0
var tolerance = 0.00001
override def twoNormSquared: Double = {
if (cacheTwoNormSq) return cachedTwoNormSq
val myValues = _values
val myMultiplier = multiplier
var normSq = 0.0
var i = 0
while (i < myValues.length) {
normSq += myValues(i) * myValues(i)
i += 1
}
normSq * myMultiplier * myMultiplier
}
override def update(i: Int, v: Double): Unit = {
val myValues = _values
val myMultiplier = multiplier
if (cacheTwoNormSq) {
val oldValue = myValues(i) * myMultiplier
cachedTwoNormSq -= oldValue * oldValue
cachedTwoNormSq += v * v
}
myValues(i) = v / myMultiplier
}
override def apply(i: Int): Double = _values(i) * multiplier
override def *=(f: Double): Unit = {
if (f == 0.0) zero()
else multiplier *= f
if (math.abs(multiplier) < tolerance) applyMultiplier()
}
override def +=(ds: DoubleSeq, factor: Double) {
val myValues = _values
val myMultiplier = multiplier
ds match {
case o: SparseIndexedTensor =>
val len = o._unsafeActiveDomainSize
val indices = o._indices
val values = o._values
var i = 0
while (i < len) {
val idx = indices(i)
val oldValue = myValues(idx) * myMultiplier
val newValue = oldValue + values(i) * factor
if (cacheTwoNormSq) {
cachedTwoNormSq -= oldValue * oldValue
cachedTwoNormSq += newValue * newValue
}
myValues(idx) = newValue / myMultiplier
i += 1
}
case o: SparseBinaryTensor =>
val len = o._unsafeActiveDomainSize
val indices = o._indices
var i = 0
while (i < len) {
val idx = indices(i)
val oldValue = myValues(idx) * myMultiplier
val newValue = oldValue + factor
if (cacheTwoNormSq) {
cachedTwoNormSq -= oldValue * oldValue
cachedTwoNormSq += newValue * newValue
}
myValues(idx) = newValue / myMultiplier
i += 1
}
case o: DenseTensor =>
val arr = o.asArray
var i = 0
while (i < arr.length) {
val oldValue = myValues(i) * myMultiplier
val newValue = oldValue + arr(i) * factor
if (cacheTwoNormSq) {
cachedTwoNormSq -= oldValue * oldValue
cachedTwoNormSq += newValue * newValue
}
myValues(i) = newValue / myMultiplier
i += 1
}
case _ => throw new Error("ScaledTensor can't yet handle += from" + ds.getClass.getName)
}
}
override def dot(ds: DoubleSeq) = {
val myValues = _values
val myMultiplier = multiplier
var res = 0.0
ds.foreachActiveElement((i, x) => res += myValues(i) * myMultiplier * x)
res
}
def copy: Tensor = throw new Error("Method copy not defined on MutableScaledTensor")
def blankCopy: Tensor = throw new Error("Method blankCopy not defined on MutableScaledTensor")
def +=(i: Int, v: Double): Unit = update(i, v + apply(i))
def zero(): Unit = {
for (i <- 0 until length) { _values(i) = 0 }
multiplier = 1.0
}
protected def copyTo[D <: DenseTensor](c: D): D = {
val cArr = c.asArray
val myValues = _values
val myMultiplier = multiplier
var i = 0
while (i < myValues.length) {
cArr(i) = myValues(i) * myMultiplier
i += 1
}
c
}
private def applyMultiplier(): Unit = {
var i = 0
val myValues = _values
val myMultiplier = multiplier
while (i < myValues.length) {
myValues(i) *= myMultiplier
i += 1
}
multiplier = 1.0
}
}
private class MutableScaledTensor1(val dim1: Int, val cacheTwoNormSq: Boolean) extends MutableScaledTensor(cacheTwoNormSq) with Tensor1 {
def isDense = false
override def copy = copyTo(new DenseTensor1(dim1))
}
private class MutableScaledTensor2(val dim1: Int, val dim2: Int, val cacheTwoNormSq: Boolean) extends MutableScaledTensor(cacheTwoNormSq) with Tensor2 {
def activeDomain1 = new RangeIntSeq(0, dim1)
def activeDomain2 = new RangeIntSeq(0, dim2)
def isDense = false
override def copy = copyTo(new DenseTensor2(dim1, dim2))
override def leftMultiply(t: Tensor1): Tensor1 = {
val myValues = _values
val myMultiplier = multiplier
assert(dim1 == t.dim1, "Dimensions don't match: " + dim1 + " " + t.dim1)
val myDim2 = dim2
val newT = new DenseTensor1(dim2)
val newArray = newT.asArray
t match {
case t: DenseTensor =>
val tArr = t.asArray
var row = 0
while (row < tArr.length) {
val v = tArr(row)
val offset = row * myDim2
var col = 0
while (col < myDim2) {
newArray(col) += (myValues(offset + col) * myMultiplier * v)
col += 1
}
row += 1
}
case t: SparseBinaryTensor =>
val tActiveDomainSize = t.activeDomainSize
val tIndices = t._indices
var ti = 0
while (ti < tActiveDomainSize) {
val row = tIndices(ti)
val offset = row * myDim2
var col = 0
while (col < myDim2) {
newArray(col) += myValues(offset + col) * myMultiplier
col += 1
}
ti += 1
}
case t: SparseIndexedTensor =>
val tActiveDomainSize = t.activeDomainSize
val tIndices = t._indices
val tValues = t._values
var ti = 0
while (ti < tActiveDomainSize) {
val row = tIndices(ti)
val offset = row * myDim2
val v = tValues(ti)
var col = 0
while (col < myDim2) {
newArray(col) += (myValues(offset + col) * v * myMultiplier)
col += 1
}
ti += 1
}
case _ =>
throw new Error("tensor type neither dense nor sparse: " + t.getClass.getName)
}
newT
}
override def *(t: Tensor1): Tensor1 = {
assert(dim2 == t.dim1, "Dimensions don't match: " + dim2 + " " + t.dim1)
val myValues = _values
val myMultiplier = multiplier
val newT = new DenseTensor1(dim1)
val newArray = newT.asArray
t match {
case t: DenseTensor =>
val tArr = t.asArray
var col = 0
while (col < tArr.length) {
val v = tArr(col)
var row = 0
while (row < dim1) {
val offset = row * dim2
newArray(row) += (myValues(offset + col) * myMultiplier * v)
row += 1
}
col += 1
}
case t: SparseTensor =>
val tActiveDomainSize = t.activeDomainSize
val tIndices = t._indices
val tValues = t._valuesSeq
var ti = 0
while (ti < tActiveDomainSize) {
val col = tIndices(ti)
val v = tValues(ti)
var row = 0
while (row < dim1) {
val offset = row * dim2
newArray(row) += (myValues(offset + col) * myMultiplier * v)
row += 1
}
ti += 1
}
case _ =>
throw new Error("tensor type neither dense nor sparse: " + t.getClass.getName)
}
newT
}
}
private class MutableScaledTensor3(val dim1: Int, val dim2: Int, val dim3: Int, val cacheTwoNormSq: Boolean) extends MutableScaledTensor(cacheTwoNormSq) with Tensor3 {
def isDense = false
def activeDomain1 = new RangeIntSeq(0, dim1)
def activeDomain2 = new RangeIntSeq(0, dim2)
def activeDomain3 = new RangeIntSeq(0, dim3)
override def copy = copyTo(new DenseTensor3(dim1, dim2, dim3))
}
private class MutableScaledTensor4(val dim1: Int, val dim2: Int, val dim3: Int, val dim4: Int, val cacheTwoNormSq: Boolean) extends MutableScaledTensor(cacheTwoNormSq) with Tensor4 {
def isDense = false
def activeDomain1 = new RangeIntSeq(0, dim1)
def activeDomain2 = new RangeIntSeq(0, dim2)
def activeDomain3 = new RangeIntSeq(0, dim3)
def activeDomain4 = new RangeIntSeq(0, dim4)
override def copy = copyTo(new DenseTensor4(dim1, dim2, dim3, dim4))
}
} | patverga/factorie | src/main/scala/cc/factorie/optimize/Pegasos.scala | Scala | apache-2.0 | 12,258 |
package skuber
import org.scalatest.{BeforeAndAfterAll, Matchers}
import org.scalatest.concurrent.Eventually
import skuber.json.format._
import scala.concurrent.duration._
import scala.concurrent.Await
import scala.util.{Failure, Success}
class PodSpec extends K8SFixture with Eventually with Matchers with BeforeAndAfterAll {
val nginxPodName: String = java.util.UUID.randomUUID().toString
val defaultLabels = Map("app" -> this.suiteName)
override def afterAll() = {
val k8s = k8sInit
val requirements = defaultLabels.toSeq.map { case (k, v) => LabelSelector.IsEqualRequirement(k, v) }
val labelSelector = LabelSelector(requirements: _*)
Await.result(k8s.deleteAllSelected[PodList](labelSelector), 5.seconds)
}
behavior of "Pod"
it should "create a pod" in { k8s =>
k8s.create(getNginxPod(nginxPodName, "1.7.9")) map { p =>
assert(p.name == nginxPodName)
}
}
it should "get the newly created pod" in { k8s =>
k8s.get[Pod](nginxPodName) map { p =>
assert(p.name == nginxPodName)
}
}
it should "check for newly created pod and container to be ready" in { k8s =>
eventually(timeout(100.seconds), interval(3.seconds)) {
val retrievePod = k8s.get[Pod](nginxPodName)
val podRetrieved = Await.ready(retrievePod, 2.seconds).value.get
val podStatus = podRetrieved.get.status.get
val nginxContainerStatus = podStatus.containerStatuses(0)
podStatus.phase should contain(Pod.Phase.Running)
nginxContainerStatus.name should be(nginxPodName)
nginxContainerStatus.state.get shouldBe a[Container.Running]
val isUnschedulable = podStatus.conditions.exists { c =>
c._type == "PodScheduled" && c.status == "False" && c.reason == Some("Unschedulable")
}
val isScheduled = podStatus.conditions.exists { c =>
c._type == "PodScheduled" && c.status == "True"
}
val isInitialised = podStatus.conditions.exists { c =>
c._type == "Initialized" && c.status == "True"
}
val isReady = podStatus.conditions.exists { c =>
c._type == "Ready" && c.status == "True"
}
assert(isScheduled)
assert(isInitialised)
assert(isReady)
}
}
it should "delete a pod" in { k8s =>
k8s.delete[Pod](nginxPodName).map { _ =>
eventually(timeout(100.seconds), interval(3.seconds)) {
val retrievePod = k8s.get[Pod](nginxPodName)
val podRetrieved = Await.ready(retrievePod, 2.seconds).value.get
podRetrieved match {
case s: Success[_] => assert(false)
case Failure(ex) => ex match {
case ex: K8SException if ex.status.code.contains(404) => assert(true)
case _ => assert(false)
}
}
}
}
}
it should "delete selected pods" in { k8s =>
for {
_ <- k8s.create(getNginxPod(nginxPodName + "-foo", "1.7.9", labels = Map("foo" -> "1")))
_ <- k8s.create(getNginxPod(nginxPodName + "-bar", "1.7.9", labels = Map("bar" -> "2")))
_ <- k8s.deleteAllSelected[PodList](LabelSelector(LabelSelector.ExistsRequirement("foo")))
} yield eventually(timeout(100.seconds), interval(3.seconds)) {
val retrievePods = k8s.list[PodList]()
val podsRetrieved = Await.result(retrievePods, 2.seconds)
val podNamesRetrieved = podsRetrieved.items.map(_.name)
assert(!podNamesRetrieved.contains(nginxPodName + "-foo") && podNamesRetrieved.contains(nginxPodName + "-bar"))
}
}
def getNginxContainer(name: String, version: String): Container = Container(name = name, image = "nginx:" + version).exposePort(80)
def getNginxPod(name: String, version: String, labels: Map[String, String] = Map()): Pod = {
val nginxContainer = getNginxContainer(name, version)
val nginxPodSpec = Pod.Spec(containers = List((nginxContainer)))
val podMeta=ObjectMeta(name = name, labels = labels ++ defaultLabels)
Pod(metadata = podMeta, spec = Some(nginxPodSpec))
}
}
| doriordan/skuber | client/src/it/scala/skuber/PodSpec.scala | Scala | apache-2.0 | 3,971 |
package debop4s.rediscala
/**
* RedisConsts
* Created by debop on 2014. 2. 22.
*/
object RedisConsts {
val DEFAULT_PORT: Int = 6379
val DEFAULT_SENTINEL_PORT: Int = 26379
val DEFAULT_TIMEOUT: Int = 2000
val DEFAULT_DATABASE: Int = 0
val CHARSET: String = "UTF-8"
val DEFAULT_LOGBACK_CHANNEL = "channel:debop4s.rediscala.logback:logs"
}
| debop/debop4s | debop4s-rediscala/src/main/scala/debop4s/rediscala/RedisConsts.scala | Scala | apache-2.0 | 355 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.s2graph.core.types
import org.apache.hadoop.hbase.util.Bytes
object HBaseType {
val VERSION4 = "v4"
val VERSION3 = "v3"
val VERSION2 = "v2"
val VERSION1 = "v1"
// val DEFAULT_VERSION = VERSION2
val DEFAULT_VERSION = VERSION3
val EMPTY_SEQ_BYTE = Byte.MaxValue
val DEFAULT_COL_ID = 0
val bitsForDir = 2
val maxBytes = Bytes.toBytes(Int.MaxValue)
val toSeqByte = -5.toByte
val defaultTgtVertexId = null
}
object HBaseDeserializable {
import HBaseType._
// 6 bits is used for index sequence so total index per label is limited to 2^6
def bytesToLabelIndexSeqWithIsInverted(bytes: Array[Byte], offset: Int): (Byte, Boolean) = {
val byte = bytes(offset)
val isInverted = if ((byte & 1) != 0) true else false
val labelOrderSeq = byte >> 1
(labelOrderSeq.toByte, isInverted)
}
def bytesToKeyValues(bytes: Array[Byte],
offset: Int,
length: Int,
version: String = DEFAULT_VERSION): (Array[(Byte, InnerValLike)], Int) = {
var pos = offset
val len = bytes(pos)
pos += 1
val kvs = new Array[(Byte, InnerValLike)](len)
var i = 0
while (i < len) {
val k = bytes(pos)
pos += 1
val (v, numOfBytesUsed) = InnerVal.fromBytes(bytes, pos, 0, version)
pos += numOfBytesUsed
kvs(i) = (k -> v)
i += 1
}
val ret = (kvs, pos)
// logger.debug(s"bytesToProps: $ret")
ret
}
def bytesToKeyValuesWithTs(bytes: Array[Byte],
offset: Int,
version: String = DEFAULT_VERSION): (Array[(Byte, InnerValLikeWithTs)], Int) = {
var pos = offset
val len = bytes(pos)
pos += 1
val kvs = new Array[(Byte, InnerValLikeWithTs)](len)
var i = 0
while (i < len) {
val k = bytes(pos)
pos += 1
val (v, numOfBytesUsed) = InnerValLikeWithTs.fromBytes(bytes, pos, 0, version)
pos += numOfBytesUsed
kvs(i) = (k -> v)
i += 1
}
val ret = (kvs, pos)
// logger.debug(s"bytesToProps: $ret")
ret
}
def bytesToProps(bytes: Array[Byte],
offset: Int,
version: String = DEFAULT_VERSION): (Array[(Byte, InnerValLike)], Int) = {
var pos = offset
val len = bytes(pos)
pos += 1
val kvs = new Array[(Byte, InnerValLike)](len)
var i = 0
while (i < len) {
val k = EMPTY_SEQ_BYTE
val (v, numOfBytesUsed) = InnerVal.fromBytes(bytes, pos, 0, version)
pos += numOfBytesUsed
kvs(i) = (k -> v)
i += 1
}
// logger.error(s"bytesToProps: $kvs")
val ret = (kvs, pos)
ret
}
}
object HBaseSerializable {
def propsToBytes(props: Seq[(Byte, InnerValLike)]): Array[Byte] = {
val len = props.length
assert(len < Byte.MaxValue)
var bytes = Array.fill(1)(len.toByte)
for ((k, v) <- props) bytes = Bytes.add(bytes, v.bytes)
bytes
}
def propsToKeyValues(props: Seq[(Byte, InnerValLike)]): Array[Byte] = {
val len = props.length
assert(len < Byte.MaxValue)
var bytes = Array.fill(1)(len.toByte)
for ((k, v) <- props) bytes = Bytes.add(bytes, Array.fill(1)(k), v.bytes)
bytes
}
def propsToKeyValuesWithTs(props: Seq[(Byte, InnerValLikeWithTs)]): Array[Byte] = {
val len = props.length
assert(len < Byte.MaxValue)
var bytes = Array.fill(1)(len.toByte)
for ((k, v) <- props) bytes = Bytes.add(bytes, Array.fill(1)(k), v.bytes)
bytes
}
def labelOrderSeqWithIsInverted(labelOrderSeq: Byte, isInverted: Boolean): Array[Byte] = {
assert(labelOrderSeq < (1 << 6))
val byte = labelOrderSeq << 1 | (if (isInverted) 1 else 0)
Array.fill(1)(byte.toByte)
}
}
trait HBaseSerializable {
def bytes: Array[Byte]
}
trait HBaseDeserializable {
import HBaseType._
def fromBytes(bytes: Array[Byte],
offset: Int,
len: Int,
version: String = DEFAULT_VERSION): (HBaseSerializable, Int)
// def fromBytesWithIndex(bytes: Array[Byte],
// offset: Int,
// len: Int,
// version: String = DEFAULT_VERSION): (HBaseSerializable, Int)
def notSupportedEx(version: String) = new RuntimeException(s"not supported version, $version")
}
trait HBaseDeserializableWithIsVertexId {
import HBaseType._
def fromBytes(bytes: Array[Byte],
offset: Int,
len: Int,
version: String = DEFAULT_VERSION,
isVertexId: Boolean = false): (HBaseSerializable, Int)
def notSupportedEx(version: String) = new RuntimeException(s"not supported version, $version")
}
| jongwook/incubator-s2graph | s2core/src/main/scala/org/apache/s2graph/core/types/HBaseType.scala | Scala | apache-2.0 | 5,485 |
package ninja.fangs.github
import org.eclipse.egit.github.core.Repository
import org.eclipse.egit.github.core.client.GitHubClient
import org.eclipse.egit.github.core.service.{IssueService, RepositoryService}
import scalafx.collections.ObservableBuffer
import scalafx.geometry.{Insets, Pos}
import scalafx.scene.Scene
import scalafx.scene.control._
import scalafx.scene.layout.{VBox, Priority}
import scala.collection.JavaConversions._
import scalafx.Includes._
import scalafx.util.StringConverter
class RepoScene(client: GitHubClient, repos: RepositoryService) extends Scene {
stylesheets = Seq("caspian.css")
val issuesList = new VBox {
content = Seq()
}
val reposList = ObservableBuffer[Repository](repos.getRepositories)
val repoDropDown = new ChoiceBox[Repository](reposList){
alignmentInParent = Pos.TOP_LEFT
converter = StringConverter.toStringConverter[Repository](repo => repo.getName)
selectionModel().selectedItem.onChange((value, _, _) => {
val issueClient = new IssueService(client)
val issues = issueClient.getIssues(value.value, Map[String, String]())
issuesList.content = issues.map(issue => new Label(issue.getTitle))
})
}
root = new VBox {
content = Seq(repoDropDown, issuesList)
spacing = 10
padding = Insets(20)
vgrow = Priority.ALWAYS
hgrow = Priority.ALWAYS
}
}
| kfang/scalafx-github | src/main/scala/ninja/fangs/github/RepoScene.scala | Scala | gpl-3.0 | 1,364 |
/*
* Copyright 2010-2011 Vilius Normantas <code@norma.lt>
*
* This file is part of Crossbow library.
*
* Crossbow is free software: you can redistribute it and/or modify it under the terms of the GNU
* General Public License as published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* Crossbow is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with Crossbow. If not,
* see <http://www.gnu.org/licenses/>.
*/
/*package lt.norma.crossbow.core
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.Duration;
class TimeBarSplitter(val barSize: Duration, var sessionOpenTime: Option[DateTime] = None)
extends DataNode {
def dependencies = Empty
var previousBarTime = new DateTime(0)
var firstBar = true
def receive = {
case so @ SessionOpen(time) =>
sessionOpenTime = Some(time)
case data: Data if(sessionOpenTime.isDefined) =>
val periodCount = (data.marketTime.getMillis - sessionOpenTime.getMillis) / barSize.getMillis
val barTime = new DateTime()
zeroMoment + periodCount * barSize.getMillis
if(firstBar) {
//previousBarTime =
}
}
/*
def receive = {
case data: Data =>
// On data message, check for bar open/close events and forward the data message
checkBar(data.marketTime)
dispatch(data)
case m =>
// Forward all other messages
dispatch(m)
}
def checkBar(time: DateTime) {
val barTime = zeroMoment + periodCount * barSize.getMillis
if(firstBar) {
previousBarTime = barTime
firstBar = false
dispatch(BarOpen(new DateTime(barTime, timeZone)))
} else if(barTime != previousBarTime) {
dispatch(BarClose(new DateTime(previousBarTime + barSize, timeZone)))
dispatch(BarOpen(new DateTime(barTime, timeZone)))
previousBarTime = barTime;
}
}*/
}*/
// TODO
| ViliusN/Crossbow | crossbow-core/src/lt/norma/crossbow/core/TimeBarSplitter.scala | Scala | gpl-3.0 | 2,186 |
import Macros.*
object Test {
def main(args: Array[String]): Unit = {
val a: String = defaultOf("int") // error
val b: Int = defaultOf("string") // error
}
}
| lampepfl/dotty | tests/neg-macros/quote-whitebox/Test_2.scala | Scala | apache-2.0 | 171 |
package com.twitter.finagle.loadbalancer.aperture
import com.twitter.util.Closable
import org.scalacheck.Gen
import org.scalatest.FunSuite
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks
class ProcessCoordinateTest extends FunSuite with ScalaCheckDrivenPropertyChecks {
import ProcessCoordinate._
test("update coordinate") {
var coordinate: Option[Coord] = None
val closable = ProcessCoordinate.changes.respond(coordinate = _)
ProcessCoordinate.setCoordinate(2, 10)
assert(coordinate.isDefined)
assert(ProcessCoordinate() == coordinate)
ProcessCoordinate.unsetCoordinate()
assert(coordinate.isEmpty)
}
test("setCoordinate") {
intercept[Exception] { ProcessCoordinate.setCoordinate(0, 0) }
val numInstances = 10
ProcessCoordinate.setCoordinate(1, numInstances)
val coord0 = ProcessCoordinate()
ProcessCoordinate.setCoordinate(2, numInstances)
val coord1 = ProcessCoordinate()
assert(coord0.isDefined)
assert(coord1.isDefined)
assert(coord0 != coord1)
}
private[this] val IdAndCount = for {
count <- Gen.choose[Int](1, Int.MaxValue)
id <- Gen.choose[Int](0, count - 1)
} yield id -> count
test("setCoordinate range") {
ProcessCoordinate.setCoordinate(0, 1)
val sample = ProcessCoordinate()
assert(sample.isDefined)
assert(1.0 - sample.get.unitWidth <= 1e-6)
forAll(IdAndCount) {
case (instanceId, numInstances) =>
ProcessCoordinate.setCoordinate(instanceId, numInstances)
val sample = ProcessCoordinate()
assert(sample.isDefined)
val offset = sample.get.offset
val width = sample.get.unitWidth
assert(offset >= 0 && offset < 1.0)
assert(width > 0 && width <= 1.0)
}
}
test("apply returns the most current value") {
val closables = (1 to 100).map { i =>
val closable = ProcessCoordinate.changes.respond { coord =>
assert(ProcessCoordinate() == coord)
}
ProcessCoordinate.setCoordinate(2, i)
closable
}
// Cleanup global state
Closable.all(closables: _*).close()
}
}
| luciferous/finagle | finagle-core/src/test/scala/com/twitter/finagle/loadbalancer/aperture/ProcessCoordinateTest.scala | Scala | apache-2.0 | 2,125 |
package gapt.expr.formula.fol
import gapt.expr.Expr
trait FOLPartialTerm extends Expr {
private[expr] def numberOfArguments: Int
}
| gapt/gapt | core/src/main/scala/gapt/expr/formula/fol/FOLPartialTerm.scala | Scala | gpl-3.0 | 135 |
class A1 {
def f1 = { def g = 1 ; class A { def a = g } ; new A().a }
def f2 = { def g = 2 ; class A { def a = g } ; new A().a }
}
object Test extends App {
println(classOf[A1].getDeclaredMethods.map(_.toString).sorted.mkString("\n"))
println(new A1().f1)
println(new A1().f2)
}
| lampepfl/dotty | tests/pending/run/t5652c/t5652c.scala | Scala | apache-2.0 | 290 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.jdbc
import java.sql.Types
import java.util.Locale
import org.apache.spark.sql.types.{BooleanType, DataType, LongType, MetadataBuilder}
private case object MySQLDialect extends JdbcDialect {
override def canHandle(url : String): Boolean =
url.toLowerCase(Locale.ROOT).startsWith("jdbc:mysql")
override def getCatalystType(
sqlType: Int, typeName: String, size: Int, md: MetadataBuilder): Option[DataType] = {
if (sqlType == Types.VARBINARY && typeName.equals("BIT") && size != 1) {
// This could instead be a BinaryType if we'd rather return bit-vectors of up to 64 bits as
// byte arrays instead of longs.
md.putLong("binarylong", 1)
Option(LongType)
} else if (sqlType == Types.BIT && typeName.equals("TINYINT")) {
Option(BooleanType)
} else None
}
override def quoteIdentifier(colName: String): String = {
s"`$colName`"
}
override def getTableExistsQuery(table: String): String = {
s"SELECT 1 FROM $table LIMIT 1"
}
override def isCascadingTruncateTable(): Option[Boolean] = Some(false)
}
| jkbradley/spark | sql/core/src/main/scala/org/apache/spark/sql/jdbc/MySQLDialect.scala | Scala | apache-2.0 | 1,907 |
package main.scala
import scala.collection.mutable._
class Lookup {
def register(request: Request) {
var remoteObjectName: String = request.remoteObjectArgumentList(0).asInstanceOf[String];
if (!LookupTableList.lookupTable.contains(remoteObjectName)) {
LookupTableList.lookupTable.put(remoteObjectName, request.remoteObjectArgumentList);
}
}
def bind(remoteObjectName: String): Response = {
var response: Response = new Response();
response.result = LookupTableList.lookupTable.get(remoteObjectName).get;
response
}
object LookupTableList {
val lookupTable = new HashMap[String, Array[Any]] with SynchronizedMap[String, Array[Any]]
}
}
| labs2/FLiMSy | LookupFLiMSy/src/main/scala/Lookup.scala | Scala | apache-2.0 | 727 |
package org.bitcoins.core.number
import org.bitcoins.core.gen.NumberGenerator
import org.bitcoins.core.util.BitcoinSLogger
import org.scalacheck.{ Prop, Properties }
import scala.util.Try
/**
* Created by chris on 6/21/16.
*/
class Int32Spec extends Properties("Int32Spec") {
property("Serialization symmetry") =
Prop.forAll(NumberGenerator.int32s) { int32: Int32 =>
Int32(int32.hex) == int32
}
property("Additive identity") =
Prop.forAll(NumberGenerator.int32s) { int32: Int32 =>
int32 + Int32.zero == int32
}
property("Add two arbitrary int32s") =
Prop.forAll(NumberGenerator.int32s, NumberGenerator.int32s) { (num1: Int32, num2: Int32) =>
val result = num1.toLong + num2.toLong
if (result <= Int32.max.toLong && result >= Int32.min.toLong) num1 + num2 == Int32(result)
else Try(num1 + num2).isFailure
}
property("Subtractive identity") =
Prop.forAll(NumberGenerator.int32s) { int32: Int32 =>
int32 - Int32.zero == int32
}
property("Subtract two arbitrary int32s") =
Prop.forAll(NumberGenerator.int32s, NumberGenerator.int32s) { (num1: Int32, num2: Int32) =>
val result = num1.toLong - num2.toLong
if (result >= Int32.min.toLong && result <= Int32.max.toLong) num1 - num2 == Int32(result)
else Try(num1 - num2).isFailure
}
property("Multiplying by zero") =
Prop.forAll(NumberGenerator.int32s) { int32: Int32 =>
int32 * Int32.zero == Int32.zero
}
property("Multiplicative identity") =
Prop.forAll(NumberGenerator.int32s) { int32: Int32 =>
int32 * Int32.one == int32
}
property("Multiply two int32s") =
Prop.forAll(NumberGenerator.int32s, NumberGenerator.int32s) { (num1: Int32, num2: Int32) =>
val result = num1.toLong * num2.toLong
if (result >= Int32.min.toLong && result <= Int32.max.toLong) num1 * num2 == Int32(result.toInt)
else Try(num1 * num2).isFailure
}
property("<= & >") =
Prop.forAll(NumberGenerator.int32s, NumberGenerator.int32s) { (num1: Int32, num2: Int32) =>
if (num1.toLong <= num2.toLong) num1 <= num2
else num1 > num2
}
property("< & =>") =
Prop.forAll(NumberGenerator.int32s, NumberGenerator.int32s) { (num1: Int32, num2: Int32) =>
if (num1.toLong < num2.toLong) num1 < num2
else num1 >= num2
}
property("== & !=") =
Prop.forAll(NumberGenerator.int32s, NumberGenerator.int32s) { (num1: Int32, num2: Int32) =>
if (num1.toLong == num2.toLong) num1 == num2
else num1 != num2
}
property("|") =
Prop.forAll(NumberGenerator.int32s, NumberGenerator.int32s) { (num1: Int32, num2: Int32) =>
Int32(num1.toLong | num2.toLong) == (num1 | num2)
}
property("&") =
Prop.forAll(NumberGenerator.int32s, NumberGenerator.int32s) { (num1: Int32, num2: Int32) =>
Int32(num1.toLong & num2.toLong) == (num1 & num2)
}
property("negation") = {
Prop.forAll(NumberGenerator.int32s) { int32 =>
-int32 == Int32(-int32.toLong)
}
}
}
| Christewart/bitcoin-s-core | src/test/scala/org/bitcoins/core/number/Int32Spec.scala | Scala | mit | 3,026 |
/**
* Copyright 2013 Gianluca Amato
*
* This file is part of JANDOM: JVM-based Analyzer for Numerical DOMains
* JANDOM is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JANDOM is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty ofa
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with JANDOM. If not, see <http://www.gnu.org/licenses/>.
*/
package it.unich.jandom.domains.numerical.ppl
import it.unich.jandom.domains.numerical.LinearForm
import it.unich.jandom.domains.numerical.NumericalDomain
import it.unich.jandom.domains.numerical.NumericalProperty
import parma_polyhedra_library._
import it.unich.jandom.domains.CachedTopBottom
/**
* This is the universal PPL numerical property. It encapsulate an object of the PPL library and
* use a `PPLDomain` as a proxy to call the right method.
*
* @constructor creates a new PPLProperty object
* @tparam PPLNativeProperty is the PPL class implementing the abstract property, such as Double_Box,
* Octagonal_Shape_double, etc...
* @param domain refers to the [[it.unich.jandom.domains.PPLDomain]] object which is the proxy for
* the interesting methods in PPLNativeProperty.
* @param pplobject is the PPL property we are encapsulating.
* @author Gianluca Amato <gamato@unich.it>
*/
class PPLProperty[PPLNativeProperty <: AnyRef](val domain: PPLDomain[PPLNativeProperty], val pplobject: PPLNativeProperty)
extends NumericalProperty[PPLProperty[PPLNativeProperty]] {
type Domain = PPLDomain[PPLNativeProperty]
def widening(that: PPLProperty[PPLNativeProperty]): PPLProperty[PPLNativeProperty] = {
val newpplobject = domain.copyConstructor(pplobject)
domain.upper_bound_assign(newpplobject, that.pplobject)
domain.widening_assign(newpplobject, pplobject)
new PPLProperty(domain, newpplobject)
}
/**
* @inheritdoc
* Since there is no standard narrowing in the PPL library, this try to use the method
* `CC76_narrowing_assign` when it exists, returns `this` otherwise.
* @note @inheritdoc
*/
def narrowing(that: PPLProperty[PPLNativeProperty]): PPLProperty[PPLNativeProperty] = {
if (domain.supportsNarrowing) {
val newpplobject = domain.copyConstructor(that.pplobject)
domain.narrowing_assign(newpplobject, pplobject)
new PPLProperty(domain, newpplobject)
} else
this
}
def union(that: PPLProperty[PPLNativeProperty]): PPLProperty[PPLNativeProperty] = {
val newpplobject = domain.copyConstructor(pplobject)
domain.upper_bound_assign(newpplobject, that.pplobject)
new PPLProperty(domain, newpplobject)
}
def intersection(that: PPLProperty[PPLNativeProperty]): PPLProperty[PPLNativeProperty] = {
val newpplobject = domain.copyConstructor(pplobject)
domain.intersection_assign(newpplobject, that.pplobject)
new PPLProperty(domain, newpplobject)
}
def nonDeterministicAssignment(n: Int): PPLProperty[PPLNativeProperty] = {
val newpplobject = domain.copyConstructor(pplobject)
domain.unconstrain_space_dimension(newpplobject, new Variable(n))
new PPLProperty(domain, newpplobject)
}
def linearAssignment(n: Int, lf: LinearForm[Double]): PPLProperty[PPLNativeProperty] = {
val (le, den) = PPLUtils.toPPLLinearExpression(lf)
val newpplobject = domain.copyConstructor(pplobject)
domain.affine_image(newpplobject, new Variable(n), le, den)
new PPLProperty(domain, newpplobject)
}
def linearInequality(lf: LinearForm[Double]): PPLProperty[PPLNativeProperty] = {
val (le, _) = PPLUtils.toPPLLinearExpression(lf)
val newpplobject = domain.copyConstructor(pplobject)
domain.refine_with_constraint(newpplobject, new Constraint(le, Relation_Symbol.LESS_OR_EQUAL, new Linear_Expression_Coefficient(new Coefficient(0))))
new PPLProperty(domain, newpplobject)
}
/**
* @inheritdoc
* @note @inheritdoc
* @note Not yet implemented.
*/
def linearDisequality(lf: LinearForm[Double]): PPLProperty[PPLNativeProperty] = {
val (le, _) = PPLUtils.toPPLLinearExpression(lf)
val newpplobject1 = domain.copyConstructor(pplobject)
val newpplobject2 = domain.copyConstructor(pplobject)
domain.refine_with_constraint(newpplobject1, new Constraint(le, Relation_Symbol.LESS_THAN, new Linear_Expression_Coefficient(new Coefficient(0))))
domain.refine_with_constraint(newpplobject2, new Constraint(le, Relation_Symbol.GREATER_THAN, new Linear_Expression_Coefficient(new Coefficient(0))))
domain.upper_bound_assign(newpplobject1, newpplobject2)
new PPLProperty(domain, newpplobject1)
}
def minimize(lf: LinearForm[Double]) = {
if (isEmpty) {
if (lf.homcoeffs.forall(_ == 0.0))
lf.known
else
Double.PositiveInfinity
} else {
val (le, den) = PPLUtils.toPPLLinearExpression(lf)
val exact = new By_Reference[java.lang.Boolean](false)
val val_n = new Coefficient(0)
val val_d = new Coefficient(0)
val result = domain.minimize(pplobject, le, val_n, val_d, exact)
if (!result)
Double.NegativeInfinity
else
(new java.math.BigDecimal(val_n.getBigInteger()) divide new java.math.BigDecimal(val_d.getBigInteger()) divide new java.math.BigDecimal(den.getBigInteger())).doubleValue()
}
}
def maximize(lf: LinearForm[Double]) = {
if (isEmpty) {
if (lf.homcoeffs.forall(_ == 0.0))
lf.known
else
Double.NegativeInfinity
} else {
val (le, den) = PPLUtils.toPPLLinearExpression(lf)
val exact = new By_Reference[java.lang.Boolean](false)
val val_n = new Coefficient(0)
val val_d = new Coefficient(0)
val result = domain.maximize(pplobject, le, val_n, val_d, exact)
if (!result)
Double.PositiveInfinity
else
(new java.math.BigDecimal(val_n.getBigInteger()) divide new java.math.BigDecimal(val_d.getBigInteger()) divide new java.math.BigDecimal(den.getBigInteger())).doubleValue()
}
}
def frequency(lf: LinearForm[Double]) = {
if (isEmpty) {
if (lf.homcoeffs.forall(_ == 0.0))
Option(lf.known)
else
None
} else {
val (le, den) = PPLUtils.toPPLLinearExpression(lf)
val freq_n = new Coefficient(0)
val freq_d = new Coefficient(0)
val val_n = new Coefficient(0)
val val_d = new Coefficient(0)
val result = domain.frequency(pplobject, le, freq_n, freq_d, val_n, val_d)
if (!result)
None
else
Some((new java.math.BigDecimal(val_n.getBigInteger()) divide new java.math.BigDecimal(val_d.getBigInteger()) divide new java.math.BigDecimal(den.getBigInteger())).doubleValue())
}
}
def constraints = {
import collection.JavaConversions._
val cs = domain.minimized_constraints(pplobject)
cs flatMap PPLUtils.fromPPLConstraint
}
def isPolyhedral = {
import collection.JavaConversions._
val cs = domain.minimized_constraints(pplobject)
// we explicitly check if the object is empty since, in this case, it has a unsatisfiable
// congruence.
isEmpty || ((cs forall PPLUtils.isRepresentableAsLinearForms) && domain.minimized_congruences(pplobject).isEmpty())
}
def addVariable = {
val newpplobject = domain.copyConstructor(pplobject)
domain.add_space_dimensions_and_embed(newpplobject, 1)
new PPLProperty(domain, newpplobject)
}
def delVariable(n: Int) = {
val newpplobject = domain.copyConstructor(pplobject)
val dims = new Variables_Set
dims.add(new Variable(n))
domain.remove_space_dimensions(newpplobject, dims)
new PPLProperty(domain, newpplobject)
}
def mapVariables(rho: Seq[Int]) = {
val newpplobject = domain.copyConstructor(pplobject)
domain.map_space_dimensions(newpplobject, PPLUtils.sequenceToPartialFunction(rho))
new PPLProperty(domain, newpplobject)
}
def dimension: Int = domain.space_dimension(pplobject).toInt
def isEmpty = domain.is_empty(pplobject)
def isTop = domain.is_universe(pplobject)
def isBottom = isEmpty
def bottom = domain.bottom(domain.space_dimension(pplobject).toInt)
def top = domain.top(domain.space_dimension(pplobject).toInt)
def tryCompareTo[B >: PPLProperty[PPLNativeProperty]](other: B)(implicit arg0: (B) => PartiallyOrdered[B]): Option[Int] = other match {
case other: PPLProperty[_] =>
if (pplobject.getClass != other.pplobject.getClass)
return None
else {
val other_pplobject = other.pplobject.asInstanceOf[PPLNativeProperty]
if (pplobject equals other_pplobject) Some(0)
else if (domain.strictly_contains(pplobject, other_pplobject)) Some(1)
else if (domain.strictly_contains(other_pplobject, pplobject)) Some(-1)
else None
}
case _ => None
}
override def hashCode: Int = pplobject.hashCode
def mkString(vars: Seq[String]): String = PPLUtils.constraintsToString(domain.minimized_constraints(pplobject), vars)
}
| francescaScozzari/Jandom | core/src/main/ppl/it/unich/jandom/domains/numerical/ppl/PPLProperty.scala | Scala | lgpl-3.0 | 9,283 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package connectors
import config.FrontendAppConfig
import models.api.Address
import models.external.addresslookup.AddressLookupConfigurationModel
import play.api.http.HeaderNames._
import play.api.http.HttpVerbs._
import play.api.mvc.Call
import uk.gov.hmrc.http.{HeaderCarrier, HttpClient, HttpResponse}
import uk.gov.hmrc.play.bootstrap.config.ServicesConfig
import javax.inject.{Inject, Singleton}
import scala.concurrent.{ExecutionContext, Future}
import scala.util.control.NoStackTrace
@Singleton
class AddressLookupConnector @Inject()(val http: HttpClient, appConfig: FrontendAppConfig)
(implicit ec: ExecutionContext) {
implicit val reads: Address.addressLookupReads.type = Address.addressLookupReads
def getAddress(id: String)(implicit hc: HeaderCarrier): Future[Address] =
http.GET[Address](appConfig.addressLookupRetrievalUrl(id))
def getOnRampUrl(alfConfig: AddressLookupConfigurationModel)(implicit hc: HeaderCarrier): Future[Call] =
http.POST[AddressLookupConfigurationModel, HttpResponse](appConfig.addressLookupJourneyUrl, alfConfig).map { resp =>
resp.header(LOCATION).map(Call(GET, _)).getOrElse { //here resp will be a 202 Accepted with a Location header
logger.warn("[getOnRampUrl] - ERROR: Location header not set in ALF response")
throw new ALFLocationHeaderNotSetException
}
}
}
private[connectors] class ALFLocationHeaderNotSetException extends NoStackTrace
| hmrc/vat-registration-frontend | app/connectors/AddressLookupConnector.scala | Scala | apache-2.0 | 2,077 |
/*
* Init.scala
* (Mellite)
*
* Copyright (c) 2012-2022 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Affero General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* contact@sciss.de
*/
package de.sciss.mellite
import java.io.File
import de.sciss.filecache.Limit
import de.sciss.freesound.lucre.{Retrieval, RetrievalObjView}
import de.sciss.fscape.lucre.{Cache => FScCache}
import de.sciss.proc.FScape
import de.sciss.lucre
import de.sciss.lucre.expr.LucrePi
import de.sciss.lucre.swing.{FScapeViews, LucreSwing}
import de.sciss.mellite.impl.code.{CodeFrameImpl, CodeViewImpl}
import de.sciss.mellite.impl.document.{FolderEditorViewImpl, FolderViewImpl}
import de.sciss.mellite.impl.fscape.{FScapeObjView, FScapeOutputObjView}
import de.sciss.mellite.impl.grapheme.{GraphemeToolImpl, GraphemeToolsImpl, GraphemeViewImpl}
import de.sciss.mellite.impl.markdown.{MarkdownEditorViewImpl, MarkdownFrameImpl, MarkdownObjView, MarkdownRenderViewImpl}
import de.sciss.mellite.impl.objview
import de.sciss.mellite.impl.patterns.{PatternObjView, StreamObjView}
import de.sciss.mellite.impl.proc.{ProcObjView, ProcOutputObjView}
import de.sciss.mellite.impl.timeline.{GlobalProcsViewImpl, TimelineToolImpl, TimelineToolsImpl, TimelineViewImpl}
import de.sciss.mellite.impl.widget.WidgetObjView
import de.sciss.negatum.Negatum
import de.sciss.negatum.gui.NegatumObjView
import de.sciss.nuages.Wolkenpumpe
import de.sciss.proc.Pattern
import de.sciss.proc.{GenView, SoundProcesses, Widget}
import de.sciss.synth.ThirdPartyUGens
import net.harawata.appdirs.AppDirsFactory
trait Init {
def cacheDir: File = _cacheDir
private[this] lazy val _cacheDir = {
val appDirs = AppDirsFactory.getInstance
val path = appDirs.getUserCacheDir("mellite", /* version */ null, /* author */ "de.sciss")
val res = new File(path) // new File(new File(sys.props("user.home"), "mellite"), "cache")
res.mkdirs()
res
}
private[this] lazy val _initObjViews: Unit = {
val obj = List(
objview.ActionObjView,
objview.ArtifactObjView,
objview.BooleanObjView,
objview.ColorObjView,
objview.CodeObjView,
objview.ControlObjView,
objview.DoubleObjView,
objview.DoubleVectorObjView,
objview.EnvSegmentObjView,
objview.FadeSpecObjView,
objview.FolderObjView,
objview.GraphemeObjView,
objview.IntObjView,
objview.IntVectorObjView,
objview.LongObjView,
objview.NuagesObjView,
objview.ParamSpecObjView,
objview.StringObjView,
objview.TagObjView,
objview.TimelineObjView,
ArtifactLocationObjView,
AudioCueObjView,
FScapeObjView,
FScapeOutputObjView,
MarkdownObjView,
NegatumObjView,
PatternObjView,
ProcObjView,
ProcOutputObjView,
RetrievalObjView,
StreamObjView,
WidgetObjView,
)
obj.foreach(ObjListView.addFactory)
val gr = List(
objview.DoubleObjView,
objview.DoubleVectorObjView,
objview.EnvSegmentObjView,
)
gr.foreach(ObjGraphemeView.addFactory)
val tl = List(
ProcObjView,
// ActionRawObjView,
PatternObjView,
StreamObjView,
)
tl.foreach(ObjTimelineView.addFactory)
}
private[this] lazy val _initCompanionFactories: Unit = {
objview.ArtifactLocationObjViewImpl .install()
objview.AudioCueObjViewImpl .install()
CodeFrameImpl .install()
CodeViewImpl .install()
FolderEditorViewImpl .install()
FolderViewImpl .install()
GlobalProcsViewImpl .install()
GraphemeToolImpl .install()
GraphemeToolsImpl .install()
GraphemeViewImpl .install()
MarkdownEditorViewImpl .install()
MarkdownFrameImpl .install()
MarkdownRenderViewImpl .install()
TimelineToolImpl .install()
TimelineToolsImpl .install()
TimelineViewImpl .install()
}
def initTypes(): Unit = {
FScape .init()
FScapeViews .init()
LucreSwing .init()
Negatum .init()
Pattern .init()
Retrieval .init()
SoundProcesses.init()
LucrePi .init()
Widget .init()
Wolkenpumpe .init()
lucre.swing.graph.TimelineView .init()
_initObjViews
_initCompanionFactories
ThirdPartyUGens.init() // not implied by SoundProcesses
// ---- FScape ----
val cacheLim = Limit(count = 8192, space = 2L << 10 << 100) // 2 GB; XXX TODO --- through user preferences
FScCache.init(folder = cacheDir, capacity = cacheLim)
// val ctlConf = Control.Config()
// ctlConf.terminateActors = false
// we have sane default config now!
// akka looks for stuff it can't find in IntelliJ plugin.
// see https://github.com/Sciss/FScape-next/issues/23
// for testing purposes, simply give up, so we
// should be able to work with Mellite minus FScape.
try {
val fscapeF = FScape.genViewFactory()
GenView.tryAddFactory(fscapeF)
} catch {
case ex if ex.getClass.getName.contains("com.typesafe.config.ConfigException") /* : com.typesafe.config.ConfigException */ =>
Console.err.println(s"Mellite.init: Failed to initialize Akka.")
}
}
} | Sciss/Mellite | app/src/main/scala/de/sciss/mellite/Init.scala | Scala | agpl-3.0 | 5,403 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.