code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
/*
* Copyright (c) 2017 Magomed Abdurakhmanov, Hypertino
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
*/
package com.hypertino.facade.raml
import com.hypertino.facade.filters.annotated.ResponseFieldFilterAdapter
import com.hypertino.facade.{TestBase, TestBaseWithHyperbus}
import com.hypertino.hyperbus.model
class RamlConfigurationBuilderCollectionTest extends TestBaseWithHyperbus(ramlConfigFiles=Seq("raml-collection-config-parser-test.raml")) {
import testServices._
"Responses" should "have filters on collection fields" in {
val filterChain = originalRamlConfig
.resourcesByPattern("/request-collection-annotations")
.methods(Method(model.Method.GET))
.responses
.head
._2
.ramlContentTypes
.head
._2
.filterChain
filterChain
.responseFilters
.size shouldBe 1
val rffa = filterChain
.responseFilters(0) shouldBe a[ResponseFieldFilterAdapter]
}
}
|
hypertino/hyperfacade
|
src/test/scala/com/hypertino/facade/raml/RamlConfigurationBuilderCollectionTest.scala
|
Scala
|
mpl-2.0
| 1,109
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.graphx
/**
* The direction of a directed edge relative to a vertex.
*/
class EdgeDirection private (private val name: String) extends Serializable {
/**
* Reverse the direction of an edge. An in becomes out,
* out becomes in and both and either remain the same.
*/
def reverse: EdgeDirection = this match {
case EdgeDirection.In => EdgeDirection.Out
case EdgeDirection.Out => EdgeDirection.In
case EdgeDirection.Either => EdgeDirection.Either
case EdgeDirection.Both => EdgeDirection.Both
}
override def toString: String = "EdgeDirection." + name
override def equals(o: Any): Boolean = o match {
case other: EdgeDirection => other.name == name
case _ => false
}
override def hashCode: Int = name.hashCode
}
/**
* A set of [[EdgeDirection]]s.
*/
object EdgeDirection {
/** Edges arriving at a vertex. */
final val In: EdgeDirection = new EdgeDirection("In")
/** Edges originating from a vertex. */
final val Out: EdgeDirection = new EdgeDirection("Out")
/** Edges originating from *or* arriving at a vertex of interest. */
final val Either: EdgeDirection = new EdgeDirection("Either")
/** Edges originating from *and* arriving at a vertex of interest. */
final val Both: EdgeDirection = new EdgeDirection("Both")
}
|
aokolnychyi/spark
|
graphx/src/main/scala/org/apache/spark/graphx/EdgeDirection.scala
|
Scala
|
apache-2.0
| 2,118
|
package com.hyenawarrior.OldNorseGrammar.grammar
import com.hyenawarrior.OldNorseGrammar.grammar.enums.Case.DATIVE
import com.hyenawarrior.OldNorseGrammar.grammar.enums.GNumber.{PLURAL, SINGULAR}
import com.hyenawarrior.OldNorseGrammar.grammar.nouns.stemclasses.{StrongStemClassFeminineA2, StrongStemClassMascA, StrongStemClassNeuter}
import com.hyenawarrior.OldNorseGrammar.grammar.nouns.{NounForm, NounStem}
import org.junit.Assert.assertEquals
import org.junit.Test
/**
* Created by HyenaWarrior on 2018.02.03..
*/
class NounStemTest {
@Test
def testStrongMascAStem(): Unit = {
assertEquals("hest", NounStem.fromStrRepr("hesta", StrongStemClassMascA).rootStr)
}
@Test
def testStrongMascJAStem(): Unit = {
assertEquals("nið", NounStem.fromStrRepr("niða", StrongStemClassMascA).rootStr)
}
@Test
def testStrongMascWaStem(): Unit = {
assertEquals("sǫng", NounStem.fromStrRepr("sǫnga", StrongStemClassMascA).rootStr)
assertEquals("sǫngv", NounStem.fromStrRepr("sǫngva", StrongStemClassMascA).rootStr)
}
@Test
def testStrongFemWoStem(): Unit = {
// stem "ǫr" doesn't ends in velar consonant and it's not short
// so to restore the 'v' is reasonless
assertEquals("ǫr", NounStem.fromStrRepr("ǫra", StrongStemClassFeminineA2).rootStr)
}
@Test
def testStrongNeuterStemSieversLaw1(): Unit = {
val nf = NounForm("ríki", SINGULAR -> DATIVE, isDefinite = false)
assertEquals(s"Failed to recover from ${nf.strRepr}", "ríkj", NounStem.from(nf, StrongStemClassNeuter).rootStr)
}
@Test
def testStrongNeuterStemSieversLaw2(): Unit = {
val nf = NounForm("ríkjum", PLURAL -> DATIVE, isDefinite = false)
assertEquals(s"Failed to recover from ${nf.strRepr}", "ríkj", NounStem.from(nf, StrongStemClassNeuter).rootStr)
}
@Test
def testStrongNeuterJaStemKnjam(): Unit = {
val nf = NounForm("knjám", PLURAL -> DATIVE, isDefinite = false)
assertEquals("kné", NounStem.from(nf, StrongStemClassNeuter).rootStr)
}
}
|
HyenaSoftware/IG-Dictionary
|
OldNorseGrammarEngine/src/test/scala/com/hyenawarrior/OldNorseGrammar/grammar/NounStemTest.scala
|
Scala
|
lgpl-3.0
| 2,019
|
/*
* Copyright 2017-2020 47 Degrees, LLC. <http://www.47deg.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sbtorgpolicies.settings
import sbt.Keys._
import sbt._
import sbtorgpolicies.exceptions.ValidationException
import sbtorgpolicies.model._
import sbtorgpolicies.OrgPoliciesKeys._
import de.heikoseeberger.sbtheader
import scoverage.ScoverageKeys
trait enforcement {
lazy val orgEnforcementSettingsTasks = Seq(
orgCheckSettings := Def
.sequential(
checkScalaVersion,
checkCrossScalaVersion,
checkScoverageSettings,
checkFileHeaderSettings
)
.value
)
private[this] def checkScalaVersion = Def.task {
val scalaVersionValue = scalaVersion.value
val isSbtPlugin = sbtPlugin.value
if (!isSbtPlugin && scalaVersionValue != scalac.latestScalaVersion) {
throw ValidationException(
s"scalaVersion is $scalaVersionValue. It should be ${scalac.latestScalaVersion}"
)
}
}
private[this] def checkCrossScalaVersion = Def.task {
val crossScalaVersionsValue = crossScalaVersions.value
val isSbtPlugin = sbtPlugin.value
if (!isSbtPlugin && !scalac.crossScalaVersions.forall(crossScalaVersionsValue.contains)) {
throw ValidationException(s"""
|crossScalaVersions is $crossScalaVersionsValue.
|It should have at least these versions: ${scalac.crossScalaVersions
.mkString(",")}""".stripMargin)
}
}
private[this] def checkScoverageSettings = Def.task {
val coverageFailOnMinimumValue = ScoverageKeys.coverageFailOnMinimum.value
val coverageMinimumValue = ScoverageKeys.coverageMinimum.value
if (!coverageFailOnMinimumValue)
throw ValidationException(
s"coverageFailOnMinimum is $coverageFailOnMinimumValue, however, it should be enabled."
)
if (coverageMinimumValue < scoverageMinimum)
throw ValidationException(
s"coverageMinimumValue is $coverageMinimumValue. It should be at least $scoverageMinimum%"
)
}
private[this] def checkFileHeaderSettings = Def.task {
val headerMappings: Map[sbtheader.FileType, sbtheader.CommentStyle] =
sbtheader.HeaderPlugin.autoImport.headerMappings.value
val headerLicense: Option[sbtheader.License] =
sbtheader.HeaderPlugin.autoImport.headerLicense.value
if (headerMappings.size <= 0) {
throw ValidationException("headerMappings is empty and it's a mandatory setting")
}
if (headerLicense.isEmpty) {
throw ValidationException("headerLicense is empty and it's a mandatory setting")
}
}
}
|
47deg/sbt-org-policies
|
src/main/scala/sbtorgpolicies/settings/enforcement.scala
|
Scala
|
apache-2.0
| 3,166
|
/*
* Copyright 2019 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.emailaddress
import org.scalacheck.Gen._
import org.scalacheck.{Gen, Shrink}
trait EmailAddressGenerators {
def noShrink[T] = Shrink[T](_ => Stream.empty)
implicit val dontShrinkStrings: Shrink[String] = noShrink[String]
def nonEmptyString(char: Gen[Char]) =
nonEmptyListOf(char)
.map(_.mkString)
.suchThat(!_.isEmpty)
def chars(chars: String) = Gen.choose(0, chars.length - 1).map(chars.charAt)
val validMailbox = nonEmptyString(oneOf(alphaChar, chars(".!#$%&’'*+/=?^_`{|}~-"))).label("mailbox")
val validDomain = (for {
topLevelDomain <- nonEmptyString(alphaChar)
otherParts <- listOf(nonEmptyString(alphaChar))
} yield (otherParts :+ topLevelDomain).mkString(".")).label("domain")
def validEmailAddresses(mailbox: Gen[String] = validMailbox, domain: Gen[String] = validDomain) =
for {
mailbox <- mailbox
domain <- domain
} yield s"$mailbox@$domain"
}
|
hmrc/emailaddress
|
src/test/scala/uk/gov/hmrc/emailaddress/EmailAddressGenerators.scala
|
Scala
|
apache-2.0
| 1,548
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import kafka.api.LeaderAndIsr
import kafka.cluster.Broker
import kafka.controller.{ControllerChannelManager, ControllerContext, StateChangeLogger}
import kafka.utils.TestUtils
import kafka.utils.TestUtils.createTopic
import kafka.zk.ZooKeeperTestHarness
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.metrics.Metrics
import org.apache.kafka.common.network.ListenerName
import org.apache.kafka.common.protocol.{ApiKeys, Errors}
import org.apache.kafka.common.requests.UpdateMetadataRequest.EndPoint
import org.apache.kafka.common.requests._
import org.apache.kafka.common.security.auth.SecurityProtocol
import org.apache.kafka.common.utils.Time
import org.junit.Assert._
import org.junit.{After, Before, Test}
import scala.collection.JavaConverters._
class BrokerEpochIntegrationTest extends ZooKeeperTestHarness {
val brokerId1 = 0
val brokerId2 = 1
var servers: Seq[KafkaServer] = Seq.empty[KafkaServer]
@Before
override def setUp() {
super.setUp()
val configs = Seq(
TestUtils.createBrokerConfig(brokerId1, zkConnect),
TestUtils.createBrokerConfig(brokerId2, zkConnect))
configs.foreach { config =>
config.setProperty(KafkaConfig.AutoLeaderRebalanceEnableProp, false.toString)}
// start both servers
servers = configs.map(config => TestUtils.createServer(KafkaConfig.fromProps(config)))
}
@After
override def tearDown() {
TestUtils.shutdownServers(servers)
super.tearDown()
}
@Test
def testReplicaManagerBrokerEpochMatchesWithZk(): Unit = {
val brokerAndEpochs = zkClient.getAllBrokerAndEpochsInCluster
assertEquals(brokerAndEpochs.size, servers.size)
brokerAndEpochs.foreach {
case (broker, epoch) =>
val brokerServer = servers.find(e => e.config.brokerId == broker.id)
assertTrue(brokerServer.isDefined)
assertEquals(epoch, brokerServer.get.kafkaController.brokerEpoch)
}
}
@Test
def testControllerBrokerEpochCacheMatchesWithZk(): Unit = {
val controller = getController
val otherBroker = servers.find(e => e.config.brokerId != controller.config.brokerId).get
// Broker epochs cache matches with zk in steady state
checkControllerBrokerEpochsCacheMatchesWithZk(controller.kafkaController.controllerContext)
// Shutdown a broker and make sure broker epochs cache still matches with zk state
otherBroker.shutdown()
checkControllerBrokerEpochsCacheMatchesWithZk(controller.kafkaController.controllerContext)
// Restart a broker and make sure broker epochs cache still matches with zk state
otherBroker.startup()
checkControllerBrokerEpochsCacheMatchesWithZk(controller.kafkaController.controllerContext)
}
@Test
def testControlRequestWithCorrectBrokerEpoch() {
testControlRequestWithBrokerEpoch(false)
}
@Test
def testControlRequestWithStaleBrokerEpoch() {
testControlRequestWithBrokerEpoch(true)
}
private def testControlRequestWithBrokerEpoch(isEpochInRequestStale: Boolean) {
val tp = new TopicPartition("new-topic", 0)
// create topic with 1 partition, 2 replicas, one on each broker
createTopic(zkClient, tp.topic(), partitionReplicaAssignment = Map(0 -> Seq(brokerId1, brokerId2)), servers = servers)
val controllerId = 2
val controllerEpoch = zkClient.getControllerEpoch.get._1
val controllerConfig = KafkaConfig.fromProps(TestUtils.createBrokerConfig(controllerId, zkConnect))
val securityProtocol = SecurityProtocol.PLAINTEXT
val listenerName = ListenerName.forSecurityProtocol(securityProtocol)
val brokerAndEpochs = servers.map(s =>
(new Broker(s.config.brokerId, "localhost", TestUtils.boundPort(s), listenerName, securityProtocol),
s.kafkaController.brokerEpoch)).toMap
val nodes = brokerAndEpochs.keys.map(_.node(listenerName))
val controllerContext = new ControllerContext
controllerContext.setLiveBrokerAndEpochs(brokerAndEpochs)
val metrics = new Metrics
val controllerChannelManager = new ControllerChannelManager(controllerContext, controllerConfig, Time.SYSTEM,
metrics, new StateChangeLogger(controllerId, inControllerContext = true, None))
controllerChannelManager.startup()
val broker2 = servers(brokerId2)
val epochInRequest =
if (isEpochInRequestStale) broker2.kafkaController.brokerEpoch - 1 else broker2.kafkaController.brokerEpoch
try {
// Send LeaderAndIsr request with correct broker epoch
{
val partitionStates = Map(
tp -> new LeaderAndIsrRequest.PartitionState(controllerEpoch, brokerId2, LeaderAndIsr.initialLeaderEpoch + 1,
Seq(brokerId1, brokerId2).map(Integer.valueOf).asJava, LeaderAndIsr.initialZKVersion,
Seq(0, 1).map(Integer.valueOf).asJava, false)
)
val requestBuilder = new LeaderAndIsrRequest.Builder(
ApiKeys.LEADER_AND_ISR.latestVersion, controllerId, controllerEpoch,
epochInRequest,
partitionStates.asJava, nodes.toSet.asJava)
if (isEpochInRequestStale) {
sendAndVerifyStaleBrokerEpochInResponse(controllerChannelManager, ApiKeys.LEADER_AND_ISR, requestBuilder)
}
else {
sendAndVerifySuccessfulResponse(controllerChannelManager, ApiKeys.LEADER_AND_ISR, requestBuilder)
TestUtils.waitUntilLeaderIsKnown(Seq(broker2), tp, 10000)
}
}
// Send UpdateMetadata request with correct broker epoch
{
val partitionStates = Map(
tp -> new UpdateMetadataRequest.PartitionState(controllerEpoch, brokerId2, LeaderAndIsr.initialLeaderEpoch + 1,
Seq(brokerId1, brokerId2).map(Integer.valueOf).asJava, LeaderAndIsr.initialZKVersion,
Seq(0, 1).map(Integer.valueOf).asJava, Seq.empty.asJava)
)
val liverBrokers = brokerAndEpochs.map { brokerAndEpoch =>
val broker = brokerAndEpoch._1
val securityProtocol = SecurityProtocol.PLAINTEXT
val listenerName = ListenerName.forSecurityProtocol(securityProtocol)
val node = broker.node(listenerName)
val endPoints = Seq(new EndPoint(node.host, node.port, securityProtocol, listenerName))
new UpdateMetadataRequest.Broker(broker.id, endPoints.asJava, broker.rack.orNull)
}
val requestBuilder = new UpdateMetadataRequest.Builder(
ApiKeys.UPDATE_METADATA.latestVersion, controllerId, controllerEpoch,
epochInRequest,
partitionStates.asJava, liverBrokers.toSet.asJava)
if (isEpochInRequestStale) {
sendAndVerifyStaleBrokerEpochInResponse(controllerChannelManager, ApiKeys.UPDATE_METADATA, requestBuilder)
}
else {
sendAndVerifySuccessfulResponse(controllerChannelManager, ApiKeys.UPDATE_METADATA, requestBuilder)
TestUtils.waitUntilMetadataIsPropagated(Seq(broker2), tp.topic(), tp.partition(), 10000)
assertEquals(brokerId2,
broker2.metadataCache.getPartitionInfo(tp.topic(), tp.partition()).get.basePartitionState.leader)
}
}
// Send StopReplica request with correct broker epoch
{
val requestBuilder = new StopReplicaRequest.Builder(
ApiKeys.STOP_REPLICA.latestVersion, controllerId, controllerEpoch,
epochInRequest, // Correct broker epoch
true, Set(tp).asJava)
if (isEpochInRequestStale) {
sendAndVerifyStaleBrokerEpochInResponse(controllerChannelManager, ApiKeys.STOP_REPLICA, requestBuilder)
}
else {
sendAndVerifySuccessfulResponse(controllerChannelManager, ApiKeys.STOP_REPLICA, requestBuilder)
assertTrue(broker2.replicaManager.getPartition(tp).isEmpty)
}
}
} finally {
controllerChannelManager.shutdown()
metrics.close()
}
}
private def getController: KafkaServer = {
val controllerId = TestUtils.waitUntilControllerElected(zkClient)
servers.filter(s => s.config.brokerId == controllerId).head
}
private def checkControllerBrokerEpochsCacheMatchesWithZk(controllerContext: ControllerContext): Unit = {
val brokerAndEpochs = zkClient.getAllBrokerAndEpochsInCluster
TestUtils.waitUntilTrue(() => {
val brokerEpochsInControllerContext = controllerContext.liveBrokerIdAndEpochs
if (brokerAndEpochs.size != brokerEpochsInControllerContext.size) false
else {
brokerAndEpochs.forall {
case (broker, epoch) => brokerEpochsInControllerContext.get(broker.id).contains(epoch)
}
}
}, "Broker epoch mismatches")
}
private def sendAndVerifyStaleBrokerEpochInResponse(controllerChannelManager: ControllerChannelManager, apiKeys: ApiKeys,
builder: AbstractControlRequest.Builder[_ <: AbstractControlRequest]): Unit = {
var staleBrokerEpochDetected = false
controllerChannelManager.sendRequest(brokerId2, apiKeys, builder,
response => {staleBrokerEpochDetected = response.errorCounts().containsKey(Errors.STALE_BROKER_EPOCH)})
TestUtils.waitUntilTrue(() => staleBrokerEpochDetected, "Broker epoch should be stale")
assertTrue("Stale broker epoch not detected by the broker", staleBrokerEpochDetected)
}
private def sendAndVerifySuccessfulResponse(controllerChannelManager: ControllerChannelManager, apiKeys: ApiKeys,
builder: AbstractControlRequest.Builder[_ <: AbstractControlRequest]): Unit = {
@volatile var succeed = false
controllerChannelManager.sendRequest(brokerId2, apiKeys, builder,
response => {
succeed = response.errorCounts().isEmpty ||
(response.errorCounts().containsKey(Errors.NONE) && response.errorCounts().size() == 1)})
TestUtils.waitUntilTrue(() => succeed, "Should receive response with no errors")
}
}
|
gf53520/kafka
|
core/src/test/scala/unit/kafka/server/BrokerEpochIntegrationTest.scala
|
Scala
|
apache-2.0
| 10,667
|
/*
* Copyright 2014-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.core.stacklang
class StandardExamplesSuite extends BaseExamplesSuite {
override def vocabulary: Vocabulary = StandardVocabulary
}
|
brharrington/atlas
|
atlas-core/src/test/scala/com/netflix/atlas/core/stacklang/StandardExamplesSuite.scala
|
Scala
|
apache-2.0
| 762
|
package frdomain.ch3
package algebra.interpreter
import java.util.{ Date, Calendar }
import util.{ Try, Success, Failure }
import common._
import algebra.AccountService
object AccountService extends AccountService[Account, Amount, Balance] {
def open(no: String, name: String, openingDate: Option[Date]): Try[Account] = {
if (no.isEmpty || name.isEmpty) Failure(new Exception(s"Account no or name cannot be blank") )
else if (openingDate.getOrElse(today) before today) Failure(new Exception(s"Cannot open account in the past"))
else Success(Account(no, name, openingDate.getOrElse(today)))
}
def close(account: Account, closeDate: Option[Date]): Try[Account] = {
val cd = closeDate.getOrElse(today)
if (cd before account.dateOfOpening)
Failure(new Exception(s"Close date $cd cannot be before opening date ${account.dateOfOpening}"))
else Success(account.copy(dateOfClosing = Some(cd)))
}
def debit(a: Account, amount: Amount): Try[Account] = {
if (a.balance.amount < amount) Failure(new Exception("Insufficient balance"))
else Success(a.copy(balance = Balance(a.balance.amount - amount)))
}
def credit(a: Account, amount: Amount): Try[Account] =
Success(a.copy(balance = Balance(a.balance.amount + amount)))
def balance(account: Account): Try[Balance] = Success(account.balance)
}
|
debasishg/frdomain
|
src/main/scala/frdomain/ch3/algebra/interpreter/AccountService.scala
|
Scala
|
apache-2.0
| 1,349
|
/**
* Copyright 2011-2013 StackMob
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stackmob.customcode.dev.server.sdk.logger
import com.stackmob.sdkapi.LoggerService
import org.slf4j.LoggerFactory
class LoggerServiceImpl(name: String) extends LoggerService {
private lazy val logger = LoggerFactory.getLogger(name)
def trace(s: String) {
logger.trace(s)
}
def trace(s: String, t: Throwable) {
logger.trace(s, t)
}
def debug(s: String) {
logger.debug(s)
}
def debug(s: String, t: Throwable) {
logger.debug(s, t)
}
def info(s: String) {
logger.info(s)
}
def info(s: String, t: Throwable) {
logger.info(s, t)
}
def warn(s: String) {
logger.warn(s)
}
def warn(s: String, t: Throwable) {
logger.warn(s, t)
}
def error(s: String) {
logger.error(s)
}
def error(s: String, t: Throwable) {
logger.error(s, t)
}
}
|
matthewfarwell/stackmob-customcode-dev
|
src/main/scala/com/stackmob/customcode/dev/server/sdk/logger/LoggerServiceImpl.scala
|
Scala
|
apache-2.0
| 1,421
|
/*
* Copyright (c) 2016, Innoave.com
* All rights reserved.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL INNOAVE.COM OR ITS CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.innoave.soda.l10n.resource
import java.util.{ResourceBundle => JResourceBundle}
import scala.util.control.NonFatal
import com.innoave.soda.l10n.BundleName
import com.innoave.soda.l10n.Locale
import com.innoave.soda.l10n.Message
import com.innoave.soda.l10n.Localized
import com.innoave.soda.l10n.ResourceBundle
import com.innoave.soda.l10n.DefineMessage
import com.innoave.soda.l10n.KeyNamingStrategy
trait Utf8PropertiesResourceBundleProducer {
final def resourceBundleFor(bundleName: BundleName, locale: Locale): ResourceBundle =
Utf8PropertiesResourceBundle(bundleName, locale)
}
final class Utf8PropertiesResourceBundle private[resource](val delegate: JResourceBundle) extends ResourceBundle {
override def bundleName: BundleName =
BundleName(delegate.getBaseBundleName)
override def locale: Locale =
Locale.fromJavaLocale(delegate.getLocale)
override def stringFor(message: Message): String =
_stringFor(message.key())
override def stringFor[T](localized: Localized[T]): String =
_stringFor(localized.key())
private def _stringFor(key: String): String =
try {
delegate.getString(key)
} catch {
case NonFatal(error) =>
s"!!!$key!!!"
}
}
object Utf8PropertiesResourceBundle {
def apply(bundleName: BundleName, locale: Locale): Utf8PropertiesResourceBundle =
new Utf8PropertiesResourceBundle(JResourceBundle.getBundle(bundleName.value, locale.asJavaLocale, Utf8ResourceBundleControl))
def stubFor(messages: DefineMessage): String =
s"""#
|# ${KeyNamingStrategy.simpleTypeName(messages.getClass)} : Message definitions
|#
|""".stripMargin +
messages.values.iterator.map(m => m.key + "=\\n").mkString
}
|
innoave/soda
|
l10n/src/main/scala/com/innoave/soda/l10n/resource/Utf8PropertiesResourceBundle.scala
|
Scala
|
apache-2.0
| 2,577
|
package io.getquill.codegen
import io.getquill.codegen.integration.CodegenTestCases._
import io.getquill.codegen.util.ConfigPrefix.TestSqlServerDB
import io.getquill.codegen.util._
import org.scalatest.matchers.should.Matchers._
class SqlServerCodegenTestCases extends CodegenSpec {
import io.getquill.codegen.generated.postgres._
type Prefix = TestSqlServerDB
val prefix = TestSqlServerDB
"trivial generator tests" - {
"use trivial snake case schema" in WithContext[Prefix, `1-simple-snake`].run { ctx =>
import `1-simple-snake-lib`.public._
import ctx._
val results = ctx.run(query[Person].filter(_.age > 11)).toSeq
results should contain theSameElementsAs
(List(Person(1, "Joe", "Bloggs", 22), Person(2, "Jack", "Ripper", 33)))
}
"use trivial literal schema" in WithContext[Prefix, `2-simple-literal`].run { ctx =>
import `2-simple-literal-lib`.public._
import ctx._
val results = ctx.run(query[Person].filter(_.age > 11)).toSeq
results should contain theSameElementsAs
(List(Person(1, "Joe", "Bloggs", 22), Person(2, "Jack", "Ripper", 33)))
}
}
"composable generator" - {
"1-comp-sanity" in WithContext[Prefix, `1-comp-sanity`].run { ctx =>
import `1-comp-sanity-lib`.public._
import ctx._
ctx.run(query[Person].filter(_.age > 11)) should contain theSameElementsAs List(
Person(1, "Joe", "Bloggs", 22),
Person(2, "Jack", "Ripper", 33)
)
}
"2-comp-stereo-single" in WithContext[Prefix, `2-comp-stereo-single`].run { ctx =>
import `2-comp-stereo-single-lib`.public._
import ctx._
(ctx.run(PersonDao.query.filter(_.age > 11))) should contain theSameElementsAs
(List(
Person(1, "Joe", "Bloggs", 22),
Person(2, "Jack", "Ripper", 33)
))
}
"3-comp-stereo-oneschema" in WithContext[Prefix, `3-comp-stereo-oneschema`].run { ctx =>
import `3-comp-stereo-oneschema-lib`.public._
import ctx._
(ctx.run(PublicSchema.PersonDao.alphaPerson.filter(_.age > 11))) should contain theSameElementsAs (
List(
Person(1, "Joe", "Bloggs", 22, 55L, "Wonkles"),
Person(2, "Jack", "Ripper", 33, 66L, "Ginkles")
)
)
(ctx.run(PublicSchema.AddressDao.publicAddress.filter(_.personFk == 1))) should contain theSameElementsAs (
List(
Address(1, "123 Someplace", 1001),
Address(1, "678 Blah", 2002)
)
)
}
"4-comp-stereo-twoschema" in WithContext[Prefix, `4-comp-stereo-twoschema`].run { ctx =>
import `4-comp-stereo-twoschema-lib`.common._
import `4-comp-stereo-twoschema-lib`.public._
import ctx._
(ctx.run(PersonDao.alphaPerson.filter(_.age > 11))) should contain theSameElementsAs (
List(
Person(1, "Joe", "Bloggs", 22, 55L, "Wonkles"),
Person(2, "Jack", "Ripper", 33, 66L, "Ginkles")
)
)
(ctx.run(AddressDao.publicAddress.filter(_.personFk == 1))) should contain theSameElementsAs (
List(
Address(1, "123 Someplace", 1001),
Address(1, "678 Blah", 2002)
)
)
}
"5 - non-stereotyped multiple schemas" in WithContext[Prefix, `5-comp-non-stereo-allschema`].run { ctx =>
import `5-comp-non-stereo-allschema-lib`.alpha._
import `5-comp-non-stereo-allschema-lib`.public._
import ctx._
(ctx.run(ctx.AlphaSchema.PersonDao.query.filter(_.age > 11))) should contain theSameElementsAs (
List(
Person(1, "Joe", "Bloggs", 22, "blah", 55, "Wonkles"),
Person(2, "Jack", "Ripper", 33, "blah", 66, "Ginkles")
)
)
(ctx.run(ctx.PublicSchema.AddressDao.query.filter(_.personFk == 1))) should contain theSameElementsAs (
List(
Address(1, "123 Someplace", 1001),
Address(1, "678 Blah", 2002)
)
)
}
}
}
|
getquill/quill
|
quill-codegen-tests/src/test/scala/io/getquill/codegen/SqlServerCodegenTestCases.scala
|
Scala
|
apache-2.0
| 3,931
|
/*
* Copyright 2017-2022 John Snow Labs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.johnsnowlabs.nlp.annotators
import com.johnsnowlabs.nlp.{Annotation, DataBuilder}
import com.johnsnowlabs.tags.FastTest
import org.apache.spark.ml.Pipeline
import org.apache.spark.sql.{Dataset, Row}
import org.joda.time.LocalDateTime
import org.joda.time.format.DateTimeFormat
import org.scalatest.flatspec.AnyFlatSpec
import java.time.LocalDate
import java.time.format.DateTimeFormatter
class DateMatcherMultiLanguageTestSpec extends AnyFlatSpec with DateMatcherBehaviors {
/** ITALIAN * */
"a DateMatcher" should "be catching formatted italian dates" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Sono arrivato in Italia il 15/9/2012.")
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat("MM/dd/yyyy")
.setSourceLanguage("it")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
assert(annotations.head.result == "09/15/2012")
}
"a DateMatcher" should "be catching unformatted italian dates" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Sono arrivato in Italia il 15 Settembre 2012.")
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat("MM/dd/yyyy")
.setSourceLanguage("it")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
assert(annotations.head.result == "09/15/2012")
}
"a DateMatcher" should "be catching unformatted italian language dates" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Sono arrivato in Italia il 15 Settembre 2012.")
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat("MM/dd/yyyy")
.setSourceLanguage("it")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
assert(annotations.head.result == "09/15/2012")
}
"a DateMatcher" should "be catching relative unformatted italian language dates yearly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Sono arrivato in Italia 2 anni fa.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("it")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.minusYears(2L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted italian language dates weekly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Sono arrivato in Italia 2 settimane fa.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("it")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.minusWeeks(2L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted italian language dates daily" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Sono arrivato in Italia 2 giorni fa.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("it")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.minusDays(2L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted italian language future dates yearly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Il prossimo anno tornerò in Italia.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("it")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusYears(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted italian language future dates monthly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Il mese prossimo tornerò in Italia.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("it")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusMonths(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted italian language future dates weekly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("La settimana prossima tornerò in Italia.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("it")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusWeeks(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted italian language future dates daily" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Domani andrò in Italia.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("it")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusDays(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative numeric date in italian language daily" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Andrò in Italia tra 4 giorni.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("it")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusDays(4L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative numeric date in italian language hourly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Partirò per l'Italia tra 4 ore.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("it")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDateTime = LocalDateTime.now.plusHours(4)
val formatter = DateTimeFormat.forPattern(DateFormat)
val formattedDateString = formatter.print(localDateTime)
assert(annotations.head.result == formattedDateString)
}
/** FRENCH * */
"a DateMatcher" should "be catching formatted french dates" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Je suis arrivé en France le 23/5/2019.")
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat("MM/dd/yyyy")
.setSourceLanguage("fr")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
assert(annotations.head.result == "05/23/2019")
}
"a DateMatcher" should "be catching unformatted french dates" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Je suis arrivé en France le 23 avril 2019.")
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat("MM/dd/yyyy")
.setSourceLanguage("fr")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
assert(annotations.head.result == "04/23/2019")
}
"a DateMatcher" should "be catching unformatted french language dates" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Je suis arrivé en France le 23 février 2019.")
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat("MM/dd/yyyy")
.setSourceLanguage("fr")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
assert(annotations.head.result == "02/23/2019")
}
"a DateMatcher" should "be catching relative unformatted french language dates yearly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Je suis arrivé en France il y a 2 ans.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("fr")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.minusYears(2L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted french language dates weekly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Je suis arrivé en France il y a 2 semaines.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("fr")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.minusWeeks(2L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted french language dates daily" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Je suis arrivé en France il y a 2 jours.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("fr")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.minusDays(2L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted french language future dates yearly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Je retournerai en Italie l'année prochaine.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("fr")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusYears(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted french language future dates monthly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Je retournerai en Italie le mois prochain.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("fr")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusMonths(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted french language future dates weekly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("La semaine prochaine, je retournerai en Italie.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("fr")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusWeeks(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted french language future dates daily" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Demain j'irai en France.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("fr")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusDays(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative numeric date in french language daily" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("J'irai en Italie dans 4 jours.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("fr")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusDays(4L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative numeric date in french language hourly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Je pars pour l'Italie dans 4 heures.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("fr")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDateTime = LocalDateTime.now.plusHours(4)
val formatter = DateTimeFormat.forPattern(DateFormat)
val formattedDateString = formatter.print(localDateTime)
assert(annotations.head.result == formattedDateString)
}
/** PORTUGUESE * */
"a DateMatcher" should "be catching formatted portuguese dates" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Cheguei à França no dia 23/5/2019.")
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat("MM/dd/yyyy")
.setSourceLanguage("pt")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
assert(annotations.head.result == "05/23/2019")
}
"a DateMatcher" should "be catching unformatted portuguese dates" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Cheguei à França em 23 de maio de 2019.")
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat("MM/dd/yyyy")
.setSourceLanguage("pt")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
assert(annotations.head.result == "05/23/2019")
}
"a DateMatcher" should "be catching relative unformatted portuguese language dates yearly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Eu cheguei na França 2 anos atrás.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("pt")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.minusYears(2L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted portuguese language dates weekly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Eu cheguei na França 2 semanas atrás.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("pt")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.minusWeeks(2L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted portuguese language dates daily" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Eu cheguei na França 2 dias atrás.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("pt")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.minusDays(2L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted portuguese language future dates yearly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("No próximo ano, eu voltarei novamente au Portugal.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("pt")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusYears(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted portuguese language future dates monthly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("No próximo mês irei novamente a Portugal.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("pt")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusMonths(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted portuguese language future dates weekly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Na próxima semana vou para portugal.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("pt")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusWeeks(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted portuguese language future dates daily" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Amanhã vou para portugal.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("pt")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusDays(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative numeric date in portuguese language daily" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Vou visitar Portugal em 4 dias.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("pt")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusDays(4L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative numeric date in portuguese language hourly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Vou chegar a Portugal em 4 horas.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("pt")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDateTime = LocalDateTime.now.plusHours(4)
val formatter = DateTimeFormat.forPattern(DateFormat)
val formattedDateString = formatter.print(localDateTime)
assert(annotations.head.result == formattedDateString)
}
/** SPANISH * */
"a DateMatcher" should "be catching formatted spanish dates" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Llegué a España el 23/05/2019.")
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat("MM/dd/yyyy")
.setSourceLanguage("es")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
assert(annotations.head.result == "05/23/2019")
}
"a DateMatcher" should "be catching unformatted spanish dates" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Llegué a España el 23 de mayo de 2019.")
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat("MM/dd/yyyy")
.setSourceLanguage("es")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
assert(annotations.head.result == "05/23/2019")
}
"a DateMatcher" should "be catching relative unformatted spanish language dates yearly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Llegué a españa hace 2 años.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("es")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.minusYears(2L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted spanish language dates weekly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Llegué a España hace 2 semanas.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("es")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.minusWeeks(2L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted spanish language dates daily" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Llegué a España hace 2 días.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("es")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.minusDays(2L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted spanish language future dates yearly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("El año que viene volveré a España.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("es")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusYears(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted spanish language future dates monthly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("El mes que viene volveré a España.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("es")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusMonths(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted spanish language future dates weekly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("La semana que viene me voy a España.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("es")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusWeeks(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted spanish language future dates daily" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Mañana me voy a España.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("es")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusDays(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative numeric date in spanish language daily" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Visitaré España en 4 días.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("es")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusDays(4L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative numeric date in spanish language hourly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Visitaré España en 4 horas.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("es")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDateTime = LocalDateTime.now.plusHours(4)
val formatter = DateTimeFormat.forPattern(DateFormat)
val formattedDateString = formatter.print(localDateTime)
assert(annotations.head.result == formattedDateString)
}
/** GERMAN * */
"a DateMatcher" should "be catching formatted german dates" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Ich bin am 23/05/2019 in Deutschland angekommen.")
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat("MM/dd/yyyy")
.setSourceLanguage("de")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
assert(annotations.head.result == "05/23/2019")
}
"a DateMatcher" should "be catching unformatted german dates" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Ich bin am 23 Mai 2019 in Deutschland angekommen.")
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat("MM/dd/yyyy")
.setSourceLanguage("de")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
assert(annotations.head.result == "05/23/2019")
}
"a DateMatcher" should "be catching relative unformatted german language dates yearly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Ich bin vor 2 jahren in Deutschland angekommen.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("de")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.minusYears(2L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted german language dates weekly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Ich bin vor 2 wochen in Deutschland angekommen.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("de")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.minusWeeks(2L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted german language dates daily" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Ich bin vor 2 tagen in Deutschland angekommen.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("de")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.minusDays(2L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted german language future dates yearly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Nächstes jahr fahre ich nach Deutschland.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("de")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusYears(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted german language future dates monthly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Nächsten monat fahre ich nach Deutschland.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("de")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusMonths(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted german language future dates weekly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Nächste woche fahre ich nach Deutschland.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("de")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusWeeks(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative unformatted german language future dates daily" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Morgen fahre ich nach Deutschland.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("de")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusDays(1L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative numeric date in german language daily" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Ich werde Deutschland in 4 tagen besuchen.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("de")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDate = LocalDate.now.plusDays(4L)
val formatter = DateTimeFormatter.ofPattern(DateFormat)
val formattedDateString = localDate.format(formatter)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative numeric date in german language hourly" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild("Ich werde Deutschland in 4 stunden besuchen.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("de")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDateTime = LocalDateTime.now.plusHours(4)
val formatter = DateTimeFormat.forPattern(DateFormat)
val formattedDateString = formatter.print(localDateTime)
assert(annotations.head.result == formattedDateString)
}
"a DateMatcher" should "be catching relative date in italian with complex sentence" taggedAs FastTest in {
val data: Dataset[Row] = DataBuilder.basicDataBuild(
"Così il ct azzurro Roberto Mancini, oggi, poco prima di entrare al Quirinale dove l'Italia campione " +
"d'Europa sta per essere accolta dal Presidente della Repubblica Sergio Mattarella.")
val DateFormat = "MM/dd/yyyy"
val dateMatcher = new DateMatcher()
.setInputCols("document")
.setOutputCol("date")
.setOutputFormat(DateFormat)
.setSourceLanguage("it")
val pipeline = new Pipeline().setStages(Array(dateMatcher))
val annotated = pipeline.fit(data).transform(data)
val annotations: Seq[Annotation] =
Annotation.getAnnotations(
annotated.select("date").collect().head,
"date")
val localDateTime = LocalDateTime.now
val formatter = DateTimeFormat.forPattern(DateFormat)
val formattedDateString = formatter.print(localDateTime)
assert(annotations.head.result == formattedDateString)
}
}
|
JohnSnowLabs/spark-nlp
|
src/test/scala/com/johnsnowlabs/nlp/annotators/DateMatcherMultiLanguageTestSpec.scala
|
Scala
|
apache-2.0
| 51,228
|
package mimir.algebra;
import java.sql._
import mimir.ctables.CTables
class TypeException(found: Type.T, expected: Type.T,
context:String)
extends Exception(
"Type Mismatch ["+context+
"]: found "+found.toString+
", but expected "+expected.toString
);
class RAException(msg: String) extends Exception(msg);
object Type extends Enumeration {
type T = Value
val TInt, TFloat, TDate, TString, TBool, TRowId, TAny = Value
def toString(t: T) = t match {
case TInt => "int"
case TFloat => "decimal"
case TDate => "date"
case TString => "string"
case TBool => "bool"
case TRowId => "rowid"
case TAny => throw new SQLException("Unable to produce string of type TAny");
}
def toStringPrimitive(t: T) = StringPrimitive(toString(t))
def fromString(t: String) = t.toLowerCase match {
case "int" => Type.TInt
case "float" => Type.TFloat
case "decimal" => Type.TFloat
case "date" => Type.TDate
case "string" => Type.TString
case "bool" => Type.TBool
case "rowid" => Type.TRowId
case _ => throw new SQLException("Invalid Type '" + t + "'");
}
def fromStringPrimitive(t: StringPrimitive) = fromString(t.asString)
}
import mimir.algebra.Type._
abstract class Expression {
def exprType(bindings: Map[String,Type.T]): Type.T
def exprType: Type.T = exprType(Map[String,Type.T]())
def children: List[Expression]
def rebuild(c: List[Expression]): Expression
}
abstract class LeafExpression extends Expression {
def children = List[Expression]();
def rebuild(c: List[Expression]):Expression = { return this }
}
abstract class PrimitiveValue(t: Type.T)
extends LeafExpression
{
def exprType(x: Map[String,Type.T]) = t
def asLong: Long;
def asDouble: Double;
def asString: String;
def payload: Object;
}
case class IntPrimitive(v: Long)
extends PrimitiveValue(TInt)
{
override def toString() = v.toString
def asLong: Long = v;
def asDouble: Double = v.toDouble;
def asString: String = v.toString;
def payload: Object = v.asInstanceOf[Object];
}
case class StringPrimitive(v: String)
extends PrimitiveValue(TString)
{
override def toString() = "'"+v.toString+"'"
def asLong: Long = java.lang.Long.parseLong(v)
def asDouble: Double = java.lang.Double.parseDouble(v)
def asString: String = v;
def payload: Object = v.asInstanceOf[Object];
}
case class RowIdPrimitive(v: String)
extends PrimitiveValue(TRowId)
{
override def toString() = "'"+v.toString+"'"
def asLong: Long = java.lang.Long.parseLong(v)
def asDouble: Double = java.lang.Double.parseDouble(v)
def asString: String = v;
def payload: Object = v.asInstanceOf[Object];
}
case class FloatPrimitive(v: Double)
extends PrimitiveValue(TFloat)
{
override def toString() = v.toString
def asLong: Long = throw new TypeException(TFloat, TInt, "Cast");
def asDouble: Double = v
def asString: String = v.toString;
def payload: Object = v.asInstanceOf[Object];
}
case class DatePrimitive(y: Int, m: Int, d: Int)
extends PrimitiveValue(TDate)
{
override def toString() = "DATE '"+y+"-"+m+"-"+d+"'"
def asLong: Long = throw new TypeException(TString, TInt, "Cast");
def asDouble: Double = throw new TypeException(TString, TFloat, "Cast");
def asString: String = toString;
def payload: Object = (y, m, d).asInstanceOf[Object];
def compare(c: DatePrimitive): Integer = {
if(c.y < y){ -1 }
else if(c.y > y) { 1 }
else if(c.m < m) { -1 }
else if(c.m > m) { 1 }
else if(c.d < d) { -1 }
else if(c.d > d) { 1 }
else { 0 }
}
}
case class BoolPrimitive(v: Boolean)
extends PrimitiveValue(TBool)
{
override def toString() = if(v) {"TRUE"} else {"FALSE"}
def asLong: Long = throw new TypeException(TBool, TInt, "Cast");
def asDouble: Double = throw new TypeException(TBool, TFloat, "Cast");
def asString: String = toString;
def payload: Object = v.asInstanceOf[Object];
}
case class NullPrimitive()
extends PrimitiveValue(TAny)
{
override def toString() = "NULL"
def asLong: Long = throw new TypeException(TAny, TInt, "Cast Null");
def asDouble: Double = throw new TypeException(TAny, TFloat, "Cast Null");
def asString: String = throw new TypeException(TAny, TString, "Cast Null");
def payload: Object = null
}
case class Not(child: Expression)
extends Expression
{
def exprType(bindings: Map[String,Type.T]): Type.T = {
Arith.escalateCompat(TBool, child.exprType(bindings))
}
def children: List[Expression] = List[Expression](child)
def rebuild(x: List[Expression]): Expression = Not(x(0))
}
abstract class Proc(args: List[Expression]) extends Expression
{
def getArgs = args
def children = args
def get(v: List[PrimitiveValue]): PrimitiveValue
}
object Arith extends Enumeration {
type Op = Value
val Add, Sub, Mult, Div, And, Or = Value
def matchRegex = """\\+|-|\\*|/|\\||&""".r
def fromString(a: String) = {
a match {
case "+" => Add
case "-" => Sub
case "*" => Mult
case "/" => Div
case "&" => And
case "|" => Or
case x => throw new Exception("Invalid operand '"+x+"'")
}
}
def escalateNumeric(a: Type.T, b: Type.T): Type.T = {
(a,b) match {
case (_, TAny) => a
case (TAny, _) => b
case (TInt, TInt) => TInt
case (TFloat, TInt) => TFloat
case (TInt, TFloat) => TFloat
case (TFloat, TFloat) => TFloat
case (TBool, TBool) => TBool
case ((TInt | TFloat), _) =>
throw new TypeException(b, TFloat, "Numeric")
case _ =>
throw new TypeException(a, TFloat, "Numeric")
}
}
def escalateCompat(a: Type.T, b: Type.T): Type.T = {
(a, b) match {
case (TAny, _) => b
case (_, TAny) => a
case (TInt, TInt) => TInt
case (TInt, TFloat) => TFloat
case (TFloat, (TInt | TFloat)) => TFloat
case (TString, TString) => TString
case (TRowId, TString) => TString
case (TString, TRowId) => TString
case (TRowId, TRowId) => TRowId
case (TBool, TBool) => TBool
case _ =>
throw new TypeException(a, b, "Compat")
}
}
def computeType(v: Op, a: Type.T, b: Type.T): Type.T = {
v match {
case (Add | Sub | Mult | Div) => escalateNumeric(a, b)
case (And | Or) =>
if(a != TBool) {
throw new TypeException(a, TBool, "BoolOp")
} else if(b != TBool) {
throw new TypeException(b, TBool, "BoolOp")
} else {
TBool
}
}
}
def opString(v: Op): String = {
v match {
case Add => "+"
case Sub => "-"
case Mult => "*"
case Div => "/"
case And => " AND "
case Or => " OR "
}
}
def isBool(v: Op): Boolean = {
v match {
case And | Or => true
case _ => false
}
}
def isNumeric(v: Op): Boolean = !isBool(v)
def makeAnd(a: Expression, b: Expression): Expression =
(a, b) match {
case (BoolPrimitive(true), _) => b
case (_, BoolPrimitive(true)) => a
case (BoolPrimitive(false), _) => BoolPrimitive(false)
case (_, BoolPrimitive(false)) => BoolPrimitive(false)
case _ => Arithmetic(And, a, b)
}
def makeOr(a: Expression, b: Expression): Expression =
(a, b) match {
case (BoolPrimitive(false), _) => b
case (_, BoolPrimitive(false)) => a
case (BoolPrimitive(true), _) => BoolPrimitive(true)
case (_, BoolPrimitive(true)) => BoolPrimitive(true)
case _ => Arithmetic(Or, a, b)
}
def makeNot(e: Expression): Expression =
{
e match {
case BoolPrimitive(b) => BoolPrimitive(!b)
case Arithmetic(And, a, b) =>
Arithmetic(Or, makeNot(a), makeNot(b))
case Arithmetic(Or, a, b) =>
Arithmetic(And, makeNot(a), makeNot(b))
case Comparison(c, a, b) =>
Comparison(Cmp.negate(c), a, b)
case IsNullExpression(a, n) =>
IsNullExpression(a, !n)
case Not(a) => a
case _ => Not(e)
}
}
}
object Cmp extends Enumeration {
type Op = Value
val Eq, Neq, Gt, Lt, Gte, Lte, Like, NotLike = Value
def computeType(v: Op, a: Type.T, b: Type.T): Type.T = {
v match {
case (Eq | Neq) =>
Arith.escalateCompat(a, b); return TBool
case (Gt | Gte | Lt | Lte) =>
Arith.escalateNumeric(a, b); return TBool
case (Like | NotLike) =>
if(a != TString) {
throw new TypeException(a, TBool, "Like")
} else if(b != TString) {
throw new TypeException(b, TBool, "Like")
} else {
TBool
}
}
}
def negate(v: Op): Op = {
v match {
case Eq => Neq
case Neq => Eq
case Gt => Lte
case Gte => Lt
case Lt => Gte
case Lte => Gt
}
}
def opString(v: Op): String = {
v match {
case Eq => "="
case Neq => "<>"
case Gt => ">"
case Gte => ">="
case Lt => "<"
case Lte => "<="
case Like => " LIKE "
case NotLike => " NOT LIKE "
}
}
}
case class Var(name: String) extends LeafExpression {
def exprType(bindings: Map[String,Type.T]): T = {
val t = bindings.get(name)
if(t.isEmpty){
throw new RAException("Missing Variable '" + name + "' in "+bindings.toString)
}
t.get
}
override def toString = name;
}
case class Arithmetic(op: Arith.Op, lhs: Expression,
rhs: Expression)
extends Expression
{
def exprType(bindings: Map[String,Type.T]): T = {
Arith.computeType(op,
lhs.exprType(bindings),
rhs.exprType(bindings)
)
}
override def toString() =
" (" + lhs.toString + Arith.opString(op) + rhs.toString + ") "
def children = List(lhs, rhs)
def rebuild(c: List[Expression]) = Arithmetic(op, c(0), c(1))
}
case class Comparison(op: Cmp.Op, lhs: Expression,
rhs: Expression)
extends Expression
{
def exprType(bindings: Map[String,Type.T]): T = {
Cmp.computeType(op,
lhs.exprType(bindings),
rhs.exprType(bindings)
)
}
override def toString() =
" (" + lhs.toString + Cmp.opString(op) + rhs.toString + ") "
def children = List(lhs, rhs)
def rebuild(c: List[Expression]) = Comparison(op, c(0), c(1))
}
case class Function(op: String, params: List[Expression]) extends Expression {
def exprType(bindings: Map[String, Type.T]): T = {
op match {
case "JOIN_ROWIDS" => TRowId
case CTables.ROW_PROBABILITY => TString
case CTables.VARIANCE | CTables.CONFIDENCE => TFloat
case "__LIST_MIN" | "__LIST_MAX" => TFloat
case "__LEFT_UNION_ROWID" | "__RIGHT_UNION_ROWID" => TRowId
case _ =>
bindings.get("__"+op+"()") match {
case Some(binding) => binding
case None => throw new SQLException("Unknown Function: "+op)
}
}
}
override def toString() = {
op match {
// Need to special case COUNT DISTINCT
case "COUNT" if params.size > 0 =>
"COUNT(DISTINCT " + params.map( _.toString ).mkString(", ") + ")"
case "COUNT" if params.size == 0 =>
"COUNT(*)"
case "EXTRACT" =>
op + "(" + params(0).asInstanceOf[StringPrimitive].v + " FROM " +
params(1).toString + ")"
case _ => op + "(" + params.map( _.toString ).mkString(", ") + ")"
}
}
def children = params
def rebuild(c: List[Expression]) = Function(op, c)
}
case class WhenThenClause(when: Expression,
then: Expression)
{
def exprType(bindings: Map[String,Type.T]): T = {
if(when.exprType(bindings) != TBool){
throw new TypeException(when.exprType, TBool, "WHEN")
}
return then.exprType(bindings)
}
override def toString() = "WHEN " + when.toString + " THEN " + then.toString
}
case class CaseExpression(
whenClauses: List[WhenThenClause],
elseClause: Expression
) extends Expression
{
def exprType(bindings: Map[String,Type.T]): T = {
whenClauses.
map ( _.exprType(bindings) ).
fold(TAny)( Arith.escalateCompat(_,_) )
}
override def toString() =
"CASE "+whenClauses.map( _.toString ).mkString(" ")+
" ELSE "+elseClause.toString+" END"
def children =
whenClauses.map( (w) => List(w.when, w.then) ).flatten ++ List(elseClause)
def rebuild(c: List[Expression]) = {
var currC = c
val w =
whenClauses.map ( _ => {
currC match {
case w :: t :: rest =>
currC = rest
WhenThenClause(w, t)
case _ =>
throw new SQLException("Invalid Rebuild of a Case: "+c)
}
})
CaseExpression(w, currC(0))
}
}
case class IsNullExpression(child: Expression, neg: Boolean = false) extends Expression {
def exprType(bindings: Map[String, Type.T]): T = {
child.exprType(bindings);
TBool
}
override def toString() = {child.toString+" IS"+(if(neg){" NOT"}else{""})+" NULL"}
def children = List(child)
def rebuild(c: List[Expression]) = IsNullExpression(c(0), neg)
}
|
Legacy25/mimir
|
mimircore/src/main/scala/mimir/algebra/Expression.scala
|
Scala
|
apache-2.0
| 12,943
|
import javax.crypto.Cipher
import javax.crypto.SecretKey
import javax.crypto.spec.SecretKeySpec
import javax.crypto.spec.IvParameterSpec
object AesEncryptionExample extends App {
val password: Array[Byte] = "0000000000000000".getBytes
require(password.length == 16)
val ivBytes: Array[Byte] = "0000000000000000".getBytes
require(ivBytes.length == 16)
val key: SecretKey = new SecretKeySpec(password, "AES")
val cipher: Cipher = Cipher.getInstance("AES/CBC/PKCS5Padding")
val iv = new IvParameterSpec(ivBytes)
// Encrypt
cipher.init(Cipher.ENCRYPT_MODE, key, iv)
val cipherText = cipher.doFinal("my secret message".getBytes)
println(cipherText.toSeq)
// Decrypt
cipher.init(Cipher.DECRYPT_MODE, key, iv)
val clearText = new String(cipher.doFinal(cipherText))
println(clearText)
require(clearText == "my secret message")
}
|
ramn/gists-public
|
src/AesEncryption.scala
|
Scala
|
apache-2.0
| 858
|
package gitbucket.core.service
import fr.brouillard.oss.security.xhub.XHub
import fr.brouillard.oss.security.xhub.XHub.{XHubConverter, XHubDigest}
import gitbucket.core.api._
import gitbucket.core.model.{
Account,
CommitComment,
Issue,
IssueComment,
Label,
PullRequest,
WebHook,
RepositoryWebHook,
RepositoryWebHookEvent,
AccountWebHook,
AccountWebHookEvent
}
import gitbucket.core.model.Profile._
import gitbucket.core.model.Profile.profile.blockingApi._
import org.apache.http.client.utils.URLEncodedUtils
import gitbucket.core.util.JGitUtil.CommitInfo
import gitbucket.core.util.{RepositoryName, StringUtil}
import gitbucket.core.service.RepositoryService.RepositoryInfo
import org.apache.http.NameValuePair
import org.apache.http.client.entity.UrlEncodedFormEntity
import org.apache.http.message.BasicNameValuePair
import org.eclipse.jgit.api.Git
import org.eclipse.jgit.lib.ObjectId
import org.slf4j.LoggerFactory
import scala.concurrent._
import scala.util.{Failure, Success}
import org.apache.http.HttpRequest
import org.apache.http.HttpResponse
import gitbucket.core.model.WebHookContentType
import org.apache.http.client.entity.EntityBuilder
import org.apache.http.entity.ContentType
trait WebHookService {
import WebHookService._
private val logger = LoggerFactory.getLogger(classOf[WebHookService])
/** get All WebHook informations of repository */
def getWebHooks(owner: String, repository: String)(
implicit s: Session
): List[(RepositoryWebHook, Set[WebHook.Event])] =
RepositoryWebHooks
.filter(_.byRepository(owner, repository))
.join(RepositoryWebHookEvents)
.on { (w, t) =>
t.byRepositoryWebHook(w)
}
.map { case (w, t) => w -> t.event }
.list
.groupBy(_._1)
.mapValues(_.map(_._2).toSet)
.toList
.sortBy(_._1.url)
/** get All WebHook informations of repository event */
def getWebHooksByEvent(owner: String, repository: String, event: WebHook.Event)(
implicit s: Session
): List[RepositoryWebHook] =
RepositoryWebHooks
.filter(_.byRepository(owner, repository))
.join(RepositoryWebHookEvents)
.on { (wh, whe) =>
whe.byRepositoryWebHook(wh)
}
.filter { case (wh, whe) => whe.event === event.bind }
.map { case (wh, whe) => wh }
.list
.distinct
/** get All WebHook information from repository to url */
def getWebHook(owner: String, repository: String, url: String)(
implicit s: Session
): Option[(RepositoryWebHook, Set[WebHook.Event])] =
RepositoryWebHooks
.filter(_.byPrimaryKey(owner, repository, url))
.join(RepositoryWebHookEvents)
.on { (w, t) =>
t.byRepositoryWebHook(w)
}
.map { case (w, t) => w -> t.event }
.list
.groupBy(_._1)
.mapValues(_.map(_._2).toSet)
.headOption
def addWebHook(
owner: String,
repository: String,
url: String,
events: Set[WebHook.Event],
ctype: WebHookContentType,
token: Option[String]
)(implicit s: Session): Unit = {
RepositoryWebHooks insert RepositoryWebHook(owner, repository, url, ctype, token)
events.map { event: WebHook.Event =>
RepositoryWebHookEvents insert RepositoryWebHookEvent(owner, repository, url, event)
}
}
def updateWebHook(
owner: String,
repository: String,
url: String,
events: Set[WebHook.Event],
ctype: WebHookContentType,
token: Option[String]
)(implicit s: Session): Unit = {
RepositoryWebHooks
.filter(_.byPrimaryKey(owner, repository, url))
.map(w => (w.ctype, w.token))
.update((ctype, token))
RepositoryWebHookEvents.filter(_.byRepositoryWebHook(owner, repository, url)).delete
events.map { event: WebHook.Event =>
RepositoryWebHookEvents insert RepositoryWebHookEvent(owner, repository, url, event)
}
}
def deleteWebHook(owner: String, repository: String, url: String)(implicit s: Session): Unit =
RepositoryWebHooks.filter(_.byPrimaryKey(owner, repository, url)).delete
/** get All AccountWebHook informations of user */
def getAccountWebHooks(owner: String)(implicit s: Session): List[(AccountWebHook, Set[WebHook.Event])] =
AccountWebHooks
.filter(_.byAccount(owner))
.join(AccountWebHookEvents)
.on { (w, t) =>
t.byAccountWebHook(w)
}
.map { case (w, t) => w -> t.event }
.list
.groupBy(_._1)
.mapValues(_.map(_._2).toSet)
.toList
.sortBy(_._1.url)
/** get All AccountWebHook informations of repository event */
def getAccountWebHooksByEvent(owner: String, event: WebHook.Event)(implicit s: Session): List[AccountWebHook] =
AccountWebHooks
.filter(_.byAccount(owner))
.join(AccountWebHookEvents)
.on { (wh, whe) =>
whe.byAccountWebHook(wh)
}
.filter { case (wh, whe) => whe.event === event.bind }
.map { case (wh, whe) => wh }
.list
.distinct
/** get All AccountWebHook information from repository to url */
def getAccountWebHook(owner: String, url: String)(implicit s: Session): Option[(AccountWebHook, Set[WebHook.Event])] =
AccountWebHooks
.filter(_.byPrimaryKey(owner, url))
.join(AccountWebHookEvents)
.on { (w, t) =>
t.byAccountWebHook(w)
}
.map { case (w, t) => w -> t.event }
.list
.groupBy(_._1)
.mapValues(_.map(_._2).toSet)
.headOption
def addAccountWebHook(
owner: String,
url: String,
events: Set[WebHook.Event],
ctype: WebHookContentType,
token: Option[String]
)(implicit s: Session): Unit = {
AccountWebHooks insert AccountWebHook(owner, url, ctype, token)
events.map { event: WebHook.Event =>
AccountWebHookEvents insert AccountWebHookEvent(owner, url, event)
}
}
def updateAccountWebHook(
owner: String,
url: String,
events: Set[WebHook.Event],
ctype: WebHookContentType,
token: Option[String]
)(implicit s: Session): Unit = {
AccountWebHooks.filter(_.byPrimaryKey(owner, url)).map(w => (w.ctype, w.token)).update((ctype, token))
AccountWebHookEvents.filter(_.byAccountWebHook(owner, url)).delete
events.map { event: WebHook.Event =>
AccountWebHookEvents insert AccountWebHookEvent(owner, url, event)
}
}
def deleteAccountWebHook(owner: String, url: String)(implicit s: Session): Unit =
AccountWebHooks.filter(_.byPrimaryKey(owner, url)).delete
def callWebHookOf(owner: String, repository: String, event: WebHook.Event)(
makePayload: => Option[WebHookPayload]
)(implicit s: Session, c: JsonFormat.Context): Unit = {
val webHooks = getWebHooksByEvent(owner, repository, event)
if (webHooks.nonEmpty) {
makePayload.map(callWebHook(event, webHooks, _))
}
val accountWebHooks = getAccountWebHooksByEvent(owner, event)
if (accountWebHooks.nonEmpty) {
makePayload.map(callWebHook(event, accountWebHooks, _))
}
}
def callWebHook(event: WebHook.Event, webHooks: List[WebHook], payload: WebHookPayload)(
implicit c: JsonFormat.Context
): List[(WebHook, String, Future[HttpRequest], Future[HttpResponse])] = {
import org.apache.http.impl.client.HttpClientBuilder
import ExecutionContext.Implicits.global // TODO Shouldn't use the default execution context
import org.apache.http.protocol.HttpContext
import org.apache.http.client.methods.HttpPost
if (webHooks.nonEmpty) {
val json = JsonFormat(payload)
webHooks.map { webHook =>
val reqPromise = Promise[HttpRequest]
val f = Future {
val itcp = new org.apache.http.HttpRequestInterceptor {
def process(res: HttpRequest, ctx: HttpContext): Unit = {
reqPromise.success(res)
}
}
try {
val httpClient = HttpClientBuilder.create.useSystemProperties.addInterceptorLast(itcp).build
logger.debug(s"start web hook invocation for ${webHook.url}")
val httpPost = new HttpPost(webHook.url)
logger.debug(s"Content-Type: ${webHook.ctype.ctype}")
httpPost.addHeader("Content-Type", webHook.ctype.ctype)
httpPost.addHeader("X-Github-Event", event.name)
httpPost.addHeader("X-Github-Delivery", java.util.UUID.randomUUID().toString)
webHook.ctype match {
case WebHookContentType.FORM => {
val params: java.util.List[NameValuePair] = new java.util.ArrayList()
params.add(new BasicNameValuePair("payload", json))
def postContent = new UrlEncodedFormEntity(params, "UTF-8")
httpPost.setEntity(postContent)
if (webHook.token.exists(_.trim.nonEmpty)) {
// TODO find a better way and see how to extract content from postContent
val contentAsBytes = URLEncodedUtils.format(params, "UTF-8").getBytes("UTF-8")
httpPost.addHeader(
"X-Hub-Signature",
XHub.generateHeaderXHubToken(
XHubConverter.HEXA_LOWERCASE,
XHubDigest.SHA1,
webHook.token.get,
contentAsBytes
)
)
}
}
case WebHookContentType.JSON => {
httpPost.setEntity(
EntityBuilder.create().setContentType(ContentType.APPLICATION_JSON).setText(json).build()
)
if (webHook.token.exists(_.trim.nonEmpty)) {
httpPost.addHeader(
"X-Hub-Signature",
XHub.generateHeaderXHubToken(
XHubConverter.HEXA_LOWERCASE,
XHubDigest.SHA1,
webHook.token.orNull,
json.getBytes("UTF-8")
)
)
}
}
}
val res = httpClient.execute(httpPost)
httpPost.releaseConnection()
logger.debug(s"end web hook invocation for ${webHook}")
res
} catch {
case e: Throwable => {
if (!reqPromise.isCompleted) {
reqPromise.failure(e)
}
throw e
}
}
}
f.onComplete {
case Success(_) => logger.debug(s"Success: web hook request to ${webHook.url}")
case Failure(t) => logger.error(s"Failed: web hook request to ${webHook.url}", t)
}
(webHook, json, reqPromise.future, f)
}
} else {
Nil
}
// logger.debug("end callWebHook")
}
}
trait WebHookPullRequestService extends WebHookService {
self: AccountService with RepositoryService with PullRequestService with IssuesService =>
import WebHookService._
// https://developer.github.com/v3/activity/events/types/#issuesevent
def callIssuesWebHook(
action: String,
repository: RepositoryService.RepositoryInfo,
issue: Issue,
sender: Account
)(implicit s: Session, context: JsonFormat.Context): Unit = {
callWebHookOf(repository.owner, repository.name, WebHook.Issues) {
val users = getAccountsByUserNames(Set(repository.owner, issue.openedUserName), Set(sender))
for {
repoOwner <- users.get(repository.owner)
issueUser <- users.get(issue.openedUserName)
} yield {
WebHookIssuesPayload(
action = action,
number = issue.issueId,
repository = ApiRepository(repository, ApiUser(repoOwner)),
issue = ApiIssue(
issue,
RepositoryName(repository),
ApiUser(issueUser),
getIssueLabels(repository.owner, repository.name, issue.issueId)
.map(ApiLabel(_, RepositoryName(repository)))
),
sender = ApiUser(sender)
)
}
}
}
def callPullRequestWebHook(
action: String,
repository: RepositoryService.RepositoryInfo,
issueId: Int,
sender: Account
)(implicit s: Session, c: JsonFormat.Context): Unit = {
import WebHookService._
callWebHookOf(repository.owner, repository.name, WebHook.PullRequest) {
for {
(issue, pullRequest) <- getPullRequest(repository.owner, repository.name, issueId)
users = getAccountsByUserNames(
Set(repository.owner, pullRequest.requestUserName, issue.openedUserName),
Set(sender)
)
baseOwner <- users.get(repository.owner)
headOwner <- users.get(pullRequest.requestUserName)
issueUser <- users.get(issue.openedUserName)
assignee = issue.assignedUserName.flatMap { userName =>
getAccountByUserName(userName, false)
}
headRepo <- getRepository(pullRequest.requestUserName, pullRequest.requestRepositoryName)
labels = getIssueLabels(repository.owner, repository.name, issue.issueId)
.map(ApiLabel(_, RepositoryName(repository)))
} yield {
WebHookPullRequestPayload(
action = action,
issue = issue,
issueUser = issueUser,
assignee = assignee,
pullRequest = pullRequest,
headRepository = headRepo,
headOwner = headOwner,
baseRepository = repository,
baseOwner = baseOwner,
labels = labels,
sender = sender,
mergedComment = getMergedComment(repository.owner, repository.name, issueId)
)
}
}
}
/** @return Map[(issue, issueUser, pullRequest, baseOwner, headOwner), webHooks] */
def getPullRequestsByRequestForWebhook(userName: String, repositoryName: String, branch: String)(
implicit s: Session
): Map[(Issue, Account, PullRequest, Account, Account), List[RepositoryWebHook]] =
(for {
is <- Issues if is.closed === false.bind
pr <- PullRequests if pr.byPrimaryKey(is.userName, is.repositoryName, is.issueId)
if pr.requestUserName === userName.bind
if pr.requestRepositoryName === repositoryName.bind
if pr.requestBranch === branch.bind
bu <- Accounts if bu.userName === pr.userName
ru <- Accounts if ru.userName === pr.requestUserName
iu <- Accounts if iu.userName === is.openedUserName
wh <- RepositoryWebHooks if wh.byRepository(is.userName, is.repositoryName)
wht <- RepositoryWebHookEvents
if wht.event === WebHook.PullRequest.asInstanceOf[WebHook.Event].bind && wht.byRepositoryWebHook(wh)
} yield {
((is, iu, pr, bu, ru), wh)
}).list.groupBy(_._1).mapValues(_.map(_._2))
def callPullRequestWebHookByRequestBranch(
action: String,
requestRepository: RepositoryService.RepositoryInfo,
requestBranch: String,
sender: Account
)(implicit s: Session, c: JsonFormat.Context): Unit = {
import WebHookService._
for {
((issue, issueUser, pullRequest, baseOwner, headOwner), webHooks) <- getPullRequestsByRequestForWebhook(
requestRepository.owner,
requestRepository.name,
requestBranch
)
assignee = issue.assignedUserName.flatMap { userName =>
getAccountByUserName(userName, false)
}
baseRepo <- getRepository(pullRequest.userName, pullRequest.repositoryName)
labels = getIssueLabels(pullRequest.userName, pullRequest.repositoryName, issue.issueId)
.map(ApiLabel(_, RepositoryName(pullRequest.userName, pullRequest.repositoryName)))
} yield {
val payload = WebHookPullRequestPayload(
action = action,
issue = issue,
issueUser = issueUser,
assignee = assignee,
pullRequest = pullRequest,
headRepository = requestRepository,
headOwner = headOwner,
baseRepository = baseRepo,
baseOwner = baseOwner,
labels = labels,
sender = sender,
mergedComment = getMergedComment(baseRepo.owner, baseRepo.name, issue.issueId)
)
callWebHook(WebHook.PullRequest, webHooks, payload)
}
}
}
trait WebHookPullRequestReviewCommentService extends WebHookService {
self: AccountService with RepositoryService with PullRequestService with IssuesService with CommitsService =>
def callPullRequestReviewCommentWebHook(
action: String,
comment: CommitComment,
repository: RepositoryService.RepositoryInfo,
issue: Issue,
pullRequest: PullRequest,
sender: Account
)(implicit s: Session, c: JsonFormat.Context): Unit = {
import WebHookService._
callWebHookOf(repository.owner, repository.name, WebHook.PullRequestReviewComment) {
val users =
getAccountsByUserNames(Set(repository.owner, pullRequest.requestUserName, issue.openedUserName), Set(sender))
for {
baseOwner <- users.get(repository.owner)
headOwner <- users.get(pullRequest.requestUserName)
issueUser <- users.get(issue.openedUserName)
assignee = issue.assignedUserName.flatMap { userName =>
getAccountByUserName(userName, false)
}
headRepo <- getRepository(pullRequest.requestUserName, pullRequest.requestRepositoryName)
labels = getIssueLabels(pullRequest.userName, pullRequest.repositoryName, issue.issueId)
.map(ApiLabel(_, RepositoryName(pullRequest.userName, pullRequest.repositoryName)))
} yield {
WebHookPullRequestReviewCommentPayload(
action = action,
comment = comment,
issue = issue,
issueUser = issueUser,
assignee = assignee,
pullRequest = pullRequest,
headRepository = headRepo,
headOwner = headOwner,
baseRepository = repository,
baseOwner = baseOwner,
labels = labels,
sender = sender,
mergedComment = getMergedComment(repository.owner, repository.name, issue.issueId)
)
}
}
}
}
trait WebHookIssueCommentService extends WebHookPullRequestService {
self: AccountService with RepositoryService with PullRequestService with IssuesService =>
import WebHookService._
def callIssueCommentWebHook(
repository: RepositoryService.RepositoryInfo,
issue: Issue,
issueCommentId: Int,
sender: Account
)(implicit s: Session, c: JsonFormat.Context): Unit = {
callWebHookOf(repository.owner, repository.name, WebHook.IssueComment) {
for {
issueComment <- getComment(repository.owner, repository.name, issueCommentId.toString())
users = getAccountsByUserNames(
Set(issue.openedUserName, repository.owner, issueComment.commentedUserName),
Set(sender)
)
issueUser <- users.get(issue.openedUserName)
repoOwner <- users.get(repository.owner)
commenter <- users.get(issueComment.commentedUserName)
labels = getIssueLabels(repository.owner, repository.name, issue.issueId)
} yield {
WebHookIssueCommentPayload(
issue = issue,
issueUser = issueUser,
comment = issueComment,
commentUser = commenter,
repository = repository,
repositoryUser = repoOwner,
sender = sender,
labels = labels
)
}
}
}
}
object WebHookService {
trait WebHookPayload
// https://developer.github.com/v3/activity/events/types/#createevent
case class WebHookCreatePayload(
sender: ApiUser,
description: String,
ref: String,
ref_type: String,
master_branch: String,
repository: ApiRepository
) extends FieldSerializable
with WebHookPayload {
val pusher_type = "user"
}
object WebHookCreatePayload {
def apply(
sender: Account,
repositoryInfo: RepositoryInfo,
repositoryOwner: Account,
ref: String,
refType: String
): WebHookCreatePayload =
WebHookCreatePayload(
sender = ApiUser(sender),
ref = ref,
ref_type = refType,
description = repositoryInfo.repository.description.getOrElse(""),
master_branch = repositoryInfo.repository.defaultBranch,
repository = ApiRepository(repositoryInfo, repositoryOwner)
)
}
// https://developer.github.com/v3/activity/events/types/#pushevent
case class WebHookPushPayload(
pusher: ApiPusher,
sender: ApiUser,
ref: String,
before: String,
after: String,
commits: List[ApiCommit],
repository: ApiRepository
) extends FieldSerializable
with WebHookPayload {
val compare = commits.size match {
case 0 => ApiPath(s"/${repository.full_name}") // maybe test hook on un-initialized repository
case 1 => ApiPath(s"/${repository.full_name}/commit/${after}")
case _ if before.forall(_ == '0') => ApiPath(s"/${repository.full_name}/compare/${commits.head.id}^...${after}")
case _ => ApiPath(s"/${repository.full_name}/compare/${before}...${after}")
}
val head_commit = commits.lastOption
}
object WebHookPushPayload {
def apply(
git: Git,
sender: Account,
refName: String,
repositoryInfo: RepositoryInfo,
commits: List[CommitInfo],
repositoryOwner: Account,
newId: ObjectId,
oldId: ObjectId
): WebHookPushPayload =
WebHookPushPayload(
pusher = ApiPusher(sender),
sender = ApiUser(sender),
ref = refName,
before = ObjectId.toString(oldId),
after = ObjectId.toString(newId),
commits = commits.map { commit =>
ApiCommit(git, RepositoryName(repositoryInfo), commit)
},
repository = ApiRepository(repositoryInfo, repositoryOwner)
)
def createDummyPayload(sender: Account): WebHookPushPayload =
WebHookPushPayload(
pusher = ApiPusher(sender),
sender = ApiUser(sender),
ref = "refs/heads/master",
before = "adc83b19e793491b1c6ea0fd8b46cd9f32e592fc",
after = "adc83b19e793491b1c6ea0fd8b46cd9f32e592fc",
commits = List.empty,
repository = ApiRepository.forDummyPayload(ApiUser(sender))
)
}
// https://developer.github.com/v3/activity/events/types/#issuesevent
case class WebHookIssuesPayload(
action: String,
number: Int,
repository: ApiRepository,
issue: ApiIssue,
sender: ApiUser
) extends WebHookPayload
// https://developer.github.com/v3/activity/events/types/#pullrequestevent
case class WebHookPullRequestPayload(
action: String,
number: Int,
repository: ApiRepository,
pull_request: ApiPullRequest,
sender: ApiUser
) extends WebHookPayload
object WebHookPullRequestPayload {
def apply(
action: String,
issue: Issue,
issueUser: Account,
assignee: Option[Account],
pullRequest: PullRequest,
headRepository: RepositoryInfo,
headOwner: Account,
baseRepository: RepositoryInfo,
baseOwner: Account,
labels: List[ApiLabel],
sender: Account,
mergedComment: Option[(IssueComment, Account)]
): WebHookPullRequestPayload = {
val headRepoPayload = ApiRepository(headRepository, headOwner)
val baseRepoPayload = ApiRepository(baseRepository, baseOwner)
val senderPayload = ApiUser(sender)
val pr = ApiPullRequest(
issue = issue,
pullRequest = pullRequest,
headRepo = headRepoPayload,
baseRepo = baseRepoPayload,
user = ApiUser(issueUser),
labels = labels,
assignee = assignee.map(ApiUser.apply),
mergedComment = mergedComment
)
WebHookPullRequestPayload(
action = action,
number = issue.issueId,
repository = pr.base.repo,
pull_request = pr,
sender = senderPayload
)
}
}
// https://developer.github.com/v3/activity/events/types/#issuecommentevent
case class WebHookIssueCommentPayload(
action: String,
repository: ApiRepository,
issue: ApiIssue,
comment: ApiComment,
sender: ApiUser
) extends WebHookPayload
object WebHookIssueCommentPayload {
def apply(
issue: Issue,
issueUser: Account,
comment: IssueComment,
commentUser: Account,
repository: RepositoryInfo,
repositoryUser: Account,
sender: Account,
labels: List[Label]
): WebHookIssueCommentPayload =
WebHookIssueCommentPayload(
action = "created",
repository = ApiRepository(repository, repositoryUser),
issue = ApiIssue(
issue,
RepositoryName(repository),
ApiUser(issueUser),
labels.map(ApiLabel(_, RepositoryName(repository)))
),
comment =
ApiComment(comment, RepositoryName(repository), issue.issueId, ApiUser(commentUser), issue.isPullRequest),
sender = ApiUser(sender)
)
}
// https://developer.github.com/v3/activity/events/types/#pullrequestreviewcommentevent
case class WebHookPullRequestReviewCommentPayload(
action: String,
comment: ApiPullRequestReviewComment,
pull_request: ApiPullRequest,
repository: ApiRepository,
sender: ApiUser
) extends WebHookPayload
object WebHookPullRequestReviewCommentPayload {
def apply(
action: String,
comment: CommitComment,
issue: Issue,
issueUser: Account,
assignee: Option[Account],
pullRequest: PullRequest,
headRepository: RepositoryInfo,
headOwner: Account,
baseRepository: RepositoryInfo,
baseOwner: Account,
labels: List[ApiLabel],
sender: Account,
mergedComment: Option[(IssueComment, Account)]
): WebHookPullRequestReviewCommentPayload = {
val headRepoPayload = ApiRepository(headRepository, headOwner)
val baseRepoPayload = ApiRepository(baseRepository, baseOwner)
val senderPayload = ApiUser(sender)
WebHookPullRequestReviewCommentPayload(
action = action,
comment = ApiPullRequestReviewComment(
comment = comment,
commentedUser = senderPayload,
repositoryName = RepositoryName(baseRepository),
issueId = issue.issueId
),
pull_request = ApiPullRequest(
issue = issue,
pullRequest = pullRequest,
headRepo = headRepoPayload,
baseRepo = baseRepoPayload,
user = ApiUser(issueUser),
labels = labels,
assignee = assignee.map(ApiUser.apply),
mergedComment = mergedComment
),
repository = baseRepoPayload,
sender = senderPayload
)
}
}
// https://developer.github.com/v3/activity/events/types/#gollumevent
case class WebHookGollumPayload(
pages: Seq[WebHookGollumPagePayload],
repository: ApiRepository,
sender: ApiUser
) extends WebHookPayload
case class WebHookGollumPagePayload(
page_name: String,
title: String,
summary: Option[String] = None,
action: String, // created or edited
sha: String, // SHA of the latest commit
html_url: ApiPath
)
object WebHookGollumPayload {
def apply(
action: String,
pageName: String,
sha: String,
repository: RepositoryInfo,
repositoryUser: Account,
sender: Account
): WebHookGollumPayload = apply(Seq((action, pageName, sha)), repository, repositoryUser, sender)
def apply(
pages: Seq[(String, String, String)],
repository: RepositoryInfo,
repositoryUser: Account,
sender: Account
): WebHookGollumPayload = {
WebHookGollumPayload(
pages = pages.map {
case (action, pageName, sha) =>
WebHookGollumPagePayload(
action = action,
page_name = pageName,
title = pageName,
sha = sha,
html_url = ApiPath(s"/${RepositoryName(repository).fullName}/wiki/${StringUtil.urlDecode(pageName)}")
)
},
repository = ApiRepository(repository, repositoryUser),
sender = ApiUser(sender)
)
}
}
}
|
McFoggy/gitbucket
|
src/main/scala/gitbucket/core/service/WebHookService.scala
|
Scala
|
apache-2.0
| 28,032
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.examples.ecommercerecommendation
import org.apache.predictionio.controller.LServing
class Serving
extends LServing[Query, PredictedResult] {
override
def serve(query: Query,
predictedResults: Seq[PredictedResult]): PredictedResult = {
predictedResults.head
}
}
|
dszeto/incubator-predictionio
|
examples/scala-parallel-ecommercerecommendation/train-with-rate-event/src/main/scala/Serving.scala
|
Scala
|
apache-2.0
| 1,114
|
package com.citechnical.model.business
class Division {
}
|
dlwhitehurst/accounting-model
|
src/main/java/com/citechnical/model/business/Division.scala
|
Scala
|
apache-2.0
| 59
|
package models
import play.api.test._
import play.api.test.Helpers._
import org.specs2.mutable._
import helpers.{InjectorSupport, TokenGenerator}
import play.api.Application
import play.api.db.Database
import play.api.inject.guice.GuiceApplicationBuilder
class FirstSetupSpec extends Specification with InjectorSupport {
"FirstSetup" should {
"Salt and hash is created by create() method." in {
implicit val app: Application = GuiceApplicationBuilder().configure(inMemoryDatabase()).build()
inject[Database].withConnection { implicit conn =>
implicit val storeUserRepo = inject[StoreUserRepo]
val user = FirstSetup(
"userName", "firstName", Some("middleName"), "lastName", "email", Seq[String](), "password", "companyName",
Some("kanaFirstName"), None, Some("kanaLastName")
).save(
conn
)
storeUserRepo(user.id.get) === user
}
}
}
}
|
ruimo/store2
|
test/models/FirstSetupSpec.scala
|
Scala
|
apache-2.0
| 936
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.stream.sql
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.config.ExecutionConfigOptions.{TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, TABLE_EXEC_MINIBATCH_ENABLED, TABLE_EXEC_MINIBATCH_SIZE}
import org.apache.flink.table.planner.utils.{StreamTableTestUtil, TableTestBase}
import java.time.Duration
import org.junit.{Before, Test}
class DeduplicateTest extends TableTestBase {
var util: StreamTableTestUtil = _
@Before
def setUp(): Unit = {
util = streamTestUtil()
util.addDataStream[(Int, String, Long)](
"MyTable", 'a, 'b, 'c, 'proctime.proctime, 'rowtime.rowtime)
}
@Test
def testInvalidRowNumberConditionOnProctime(): Unit = {
val sql =
"""
|SELECT *
|FROM (
| SELECT a, ROW_NUMBER() OVER (PARTITION BY b ORDER BY proctime DESC) as rank_num
| FROM MyTable)
|WHERE rank_num = 2
""".stripMargin
// the rank condition is not 1, so it will not be translate to LastRow, but Rank
util.verifyExecPlan(sql)
}
@Test
def testInvalidRowNumberConditionOnRowtime(): Unit = {
val sql =
"""
|SELECT *
|FROM (
| SELECT a, ROW_NUMBER() OVER (PARTITION BY b ORDER BY rowtime DESC) as rank_num
| FROM MyTable)
|WHERE rank_num = 3
""".stripMargin
// the rank condition is not 1, so it will not be translate to LastRow, but Rank
util.verifyExecPlan(sql)
}
@Test
def testLastRowWithWindowOnRowtime(): Unit = {
util.tableEnv.getConfig.getConfiguration
.set(TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, Duration.ofMillis(500))
util.addTable(
"""
|CREATE TABLE T (
| `a` INT,
| `b` STRING,
| `ts` TIMESTAMP(3),
| WATERMARK FOR `ts` AS `ts`
|) WITH (
| 'connector' = 'COLLECTION',
| 'is-bounded' = 'false'
|)
""".stripMargin
)
val deduplicateSQl =
"""
|(
|SELECT a, b, ts
|FROM (
| SELECT *,
| ROW_NUMBER() OVER (PARTITION BY a ORDER BY ts DESC) as rowNum
| FROM T
|)
|WHERE rowNum = 1
|)
""".stripMargin
val windowSql =
s"""
|select b, sum(a), TUMBLE_START(ts, INTERVAL '0.004' SECOND)
|FROM $deduplicateSQl
|GROUP BY b, TUMBLE(ts, INTERVAL '0.004' SECOND)
""".stripMargin
thrown.expect(classOf[TableException])
thrown.expectMessage("GroupWindowAggregate doesn't support consuming update " +
"and delete changes which is produced by node Deduplicate(")
util.verifyExplain(windowSql)
}
@Test
def testSimpleFirstRowOnRowtime(): Unit = {
val sql =
"""
|SELECT a, b, c
|FROM (
| SELECT *,
| ROW_NUMBER() OVER (PARTITION BY a ORDER BY rowtime ASC) as rank_num
| FROM MyTable)
|WHERE rank_num <= 1
""".stripMargin
util.verifyExecPlan(sql)
}
@Test
def testMiniBatchInferFirstRowOnRowtime(): Unit = {
util.tableEnv.getConfig.getConfiguration.setBoolean(TABLE_EXEC_MINIBATCH_ENABLED, true)
util.tableEnv.getConfig.getConfiguration.setLong(TABLE_EXEC_MINIBATCH_SIZE, 3L)
util.tableEnv.getConfig.getConfiguration.set(
TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, Duration.ofSeconds(1))
val ddl =
s"""
|CREATE TABLE T (
| a INT,
| b VARCHAR,
| rowtime TIMESTAMP(3),
| proctime as PROCTIME(),
| WATERMARK FOR rowtime AS rowtime
|) WITH (
| 'connector' = 'COLLECTION',
| 'is-bounded' = 'false'
|)
|""".stripMargin
util.tableEnv.executeSql(ddl)
val sql =
"""
|SELECT COUNT(b) FROM (
| SELECT a, b
| FROM (
| SELECT *,
| ROW_NUMBER() OVER (PARTITION BY a ORDER BY rowtime ASC) as rank_num
| FROM T)
| WHERE rank_num <= 1
|)
""".stripMargin
util.verifyExecPlan(sql)
}
@Test
def testSimpleLastRowOnRowtime(): Unit = {
val sql =
"""
|SELECT a, b, c
|FROM (
| SELECT *,
| ROW_NUMBER() OVER (PARTITION BY a ORDER BY rowtime DESC) as rank_num
| FROM MyTable)
|WHERE rank_num = 1
""".stripMargin
util.verifyExecPlan(sql)
}
@Test
def testMiniBatchInferLastRowOnRowtime(): Unit = {
util.tableEnv.getConfig.getConfiguration.setBoolean(TABLE_EXEC_MINIBATCH_ENABLED, true)
util.tableEnv.getConfig.getConfiguration.setLong(TABLE_EXEC_MINIBATCH_SIZE, 3L)
util.tableEnv.getConfig.getConfiguration.set(
TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, Duration.ofSeconds(1))
val ddl =
s"""
|CREATE TABLE T (
| a INT,
| b VARCHAR,
| rowtime TIMESTAMP(3),
| proctime as PROCTIME(),
| WATERMARK FOR rowtime AS rowtime
|) WITH (
| 'connector' = 'COLLECTION',
| 'is-bounded' = 'false'
|)
|""".stripMargin
util.tableEnv.executeSql(ddl)
val sql =
"""
|SELECT COUNT(b) FROM (
| SELECT a, b
| FROM (
| SELECT *,
| ROW_NUMBER() OVER (PARTITION BY a ORDER BY rowtime DESC) as rank_num
| FROM T)
| WHERE rank_num = 1
|)
""".stripMargin
util.verifyExecPlan(sql)
}
@Test
def testSimpleLastRowOnProctime(): Unit = {
val sql =
"""
|SELECT *
|FROM (
| SELECT *,
| ROW_NUMBER() OVER (PARTITION BY a ORDER BY proctime DESC) as rank_num
| FROM MyTable)
|WHERE rank_num = 1
""".stripMargin
util.verifyExecPlan(sql)
}
@Test
def testSimpleLastRowOnBuiltinProctime(): Unit = {
val sqlQuery =
"""
|SELECT *
|FROM (
| SELECT *,
| ROW_NUMBER() OVER (ORDER BY PROCTIME() DESC) as rowNum
| FROM MyTable
|)
|WHERE rowNum = 1
""".stripMargin
util.verifyExecPlan(sqlQuery)
}
@Test
def testSimpleFirstRowOnProctime(): Unit = {
val sql =
"""
|SELECT a, b, c
|FROM (
| SELECT *,
| ROW_NUMBER() OVER (PARTITION BY a ORDER BY proctime ASC) as rank_num
| FROM MyTable)
|WHERE rank_num = 1
""".stripMargin
util.verifyExecPlan(sql)
}
@Test
def testSimpleFirstRowOnBuiltinProctime(): Unit = {
val sqlQuery =
"""
|SELECT *
|FROM (
| SELECT *,
| ROW_NUMBER() OVER (PARTITION BY a ORDER BY PROCTIME() ASC) as rowNum
| FROM MyTable
|)
|WHERE rowNum = 1
""".stripMargin
util.verifyExecPlan(sqlQuery)
}
}
|
clarkyzl/flink
|
flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/DeduplicateTest.scala
|
Scala
|
apache-2.0
| 7,634
|
// Project: Default (Template) Project
// Module: rxjs/operator/*
// Description: Façades for the RxJS5 oparators
// Copyright (c) 2016. Distributed under the MIT License (see included LICENSE file).
package rxjs
import scala.scalajs.js
import scala.scalajs.js.annotation.{JSImport, JSName}
object Operators {
@js.native
@JSImport("rxjs/operator/buffer","buffer","Rx.Observable.prototype.buffer")
object buffer extends js.Object {
@JSName("call")
def apply[T](observable: Observable[T], closingNotifier: Observable[Any]): Observable[js.Array[T]] = js.native
}
@js.native
@JSImport("rxjs/operator/catch","_catch","Rx.Observable.prototype._catch")
object _catch extends js.Object {
@JSName("call")
def apply[T,R](observable: Observable[T], selector: js.Function2[js.Any,Observable[T],Observable[R]]): Observable[R] = js.native
}
@js.native
@JSImport("rxjs/operator/concatAll","concatAll","Rx.Observable.prototype.concatAll")
object concatAll extends js.Object {
@JSName("call")
def apply[T](observable: Observable[Observable[T]]): Observable[T] = js.native
}
@js.native
@JSImport("rxjs/operator/count","count","Rx.Observable.prototype.count")
object count extends js.Object {
@JSName("call")
def apply[T](observable: Observable[T], predicate: js.UndefOr[js.Function3[T,Int,Observable[T],Boolean]] = js.undefined): Observable[Int] = js.native
}
@js.native
@JSImport("rxjs/operator/delay","delay","Rx.Observable.prototype.delay")
object delay extends js.Object {
@JSName("call")
def apply[T](observable: Observable[T], delay: Int): Observable[T] = js.native
}
@js.native
@JSImport("rxjs/operator/do","_do","Rx.Observable.prototype._do")
object _do extends js.Object {
@JSName("call")
def apply[T](observable: Observable[T], f: js.Function1[T,_]): Observable[T] = js.native
}
@js.native
@JSImport("rxjs/operator/debounceTime","debounceTime","Rx.Observable.prototype.debounceTime")
object debounceTime extends js.Object {
@JSName("call")
def apply[T](observable: Observable[T], dueTime: Int): Observable[T] = js.native
}
@js.native
@JSImport("rxjs/operator/distinctUntilChanged","distinctUntilChanged","Rx.Observable.prototype.distinctUntilChanged")
object distinctUntilChanged extends js.Object {
@JSName("call")
def apply[T](observable: Observable[T], compare: js.UndefOr[js.Function] = js.undefined): Observable[T] = js.native
}
@js.native
@JSImport("rxjs/operator/elementAt","elementAt","Rx.Observable.prototype.elementAt")
object elementAt extends js.Object {
@JSName("call")
def apply[T](observable: Observable[T], index: Int): Observable[T] = js.native
}
@js.native
@JSImport("rxjs/operator/filter","filter","Rx.Observable.prototype.filter")
object filter extends js.Object {
@JSName("call")
def apply[T](observable: Observable[T], predicate: js.Function2[T,Int,Boolean]): Observable[T] = js.native
}
@js.native
@JSImport("rxjs/operator/find","find","Rx.Observable.prototype.find")
object find extends js.Object {
@JSName("call")
def apply[T](observable: Observable[T], predicate: js.Function2[T,Int,Boolean]): Observable[T] = js.native
}
@js.native
@JSImport("rxjs/operator/first","first","Rx.Observable.prototype.first")
object first extends js.Object {
@JSName("call")
def apply[T](observable: Observable[T], predicate: js.UndefOr[js.Function3[T,Int,Observable[T],Boolean]] = js.undefined): Observable[T] = js.native
}
@js.native
@JSImport("rxjs/operator/map", "map","Rx.Observable.prototype.map")
object map extends js.Object {
@JSName("call")
def apply[T,R](observable: Observable[T], project: js.Function2[T, Int, R]): Observable[R] = js.native
}
@js.native
@JSImport("rxjs/operator/switchMap","switchMap","Rx.Observable.prototype.switchMap")
object switchMap extends js.Object {
@JSName("call")
def apply[T,R](observable: Observable[T], project: js.Function2[T, Int, Observable[R]]): Observable[R] = js.native
}
@js.native
@JSImport("rxjs/operator/take","take","Rx.Observable.prototype.take")
object take extends js.Object {
@JSName("call")
def apply[T](observable: Observable[T], count: Int): Observable[T] = js.native
}
@js.native
@JSImport("rxjs/operator/toPromise","toPromise","Rx.Observable.prototype.toPromise")
object toPromise extends js.Object {
@JSName("call")
def apply[T](observable: Observable[T]): RxPromise[T] = js.native
}
}
|
jokade/scalajs-rxjs
|
src/main/scala/rxjs/Operators.scala
|
Scala
|
mit
| 4,535
|
package leo.datastructures.blackboard
import leo.Configuration
import leo.agents.{Agent, Task}
import leo.datastructures.blackboard.impl.AuctionBlackboard
import leo.datastructures.blackboard.scheduler.{Scheduler, SchedulerImpl}
xxxxxxxxxxx
object Blackboard {
def newBlackboard : (Blackboard, Scheduler) = {
val bl = new AuctionBlackboard
val sc = new SchedulerImpl(Configuration.THREADCOUNT, bl)
sc.start()
bl.setScheduler(sc)
(bl, sc)
}
}
/**
*
* <p>
* A blackboard is a central data collection object that supports
* synchronized access between multiple processes.
* </p>
*
* <p>
* The implementation decides over the fairness and order of exession of the
* processes.
* </p>
*
* <p>
* IMPORTANT: CHANGE FROM TPTP to the internal used Representation as
* soon as they are ready.
* </p>
*
* @author Max Wisniewski
* @since 29.04.2014
*/
trait Blackboard extends TaskOrganize with DataBlackboard with MessageBlackboard {
/**
* Resets the blackboard to an initial state.
*/
def clear() : Unit
/**
* Prints some information on the workload performed in the blackboard.
*/
def info() : Unit
}
/**
* Subtrait of the Blackboard, responsible for the
* organization of tasks and agents. Not visible outside the
* blackboard package except the agentRegistering.
*/
trait TaskOrganize {
/**
* Gives all agents the chance to react to an event
* and adds the generated tasks.
*
* @param t - Function that generates for each agent a set of tasks.
*/
def filterAll(t : Agent => Unit) : Unit
/**
* Method that filters the whole Blackboard, if a new agent 'a' is added
* to the context.
*
* @param a - New Agent.
*/
protected[blackboard] def freshAgent(a : Agent) : Unit
/**
*
* Starts a new auction for agents to buy computation time
* for their tasks.
*
* The result is a set of tasks, that can be executed in parallel
* and approximate the optimal combinatorical auction.
*
* @return Not yet executed noncolliding set of tasks
*/
protected[blackboard] def getTask : Iterable[(Agent,Task)]
/**
* Allows a force check for new Tasks. Necessary for the DoneEvent to be
* thrown correctly.
*/
protected[blackboard] def forceCheck() : Unit
/**
* Signal Task is called, when a new task is available.
*/
def signalTask() : Unit
/**
* Registers an agent to the blackboard, should only be called by the agent itself
*
* @param a - the new agent
*/
def registerAgent(a : Agent) : Unit
/**
* Removes an agent from the notification lists.
*
* Recomended if the agent will be used nevermore. Otherwise
* a.setActive(false) should be used.
*
* This method should be called solely from the agent.
*
* @param a the agent to be unregistered.
*/
def unregisterAgent(a : Agent) : Unit
/**
*
* Returns for debugging and interactive use the agent work
*
* @return all registered agents and their budget
*/
def getAgents: Iterable[Agent]
/**
* Submits a new Task to the list of executable tasks.
*
* @param ts Set of new Tasks
*/
def submitTasks(a : Agent, ts : Set[Task]) : Unit
/**
* Declares, that a task has been completely executed.
*
* @param t The finished task.
*/
def finishTask(t : Task) : Unit
}
/**
* The DataBlackboard handels publishing of data structures
* through the blackboard and the execution interface.
*/
trait DataBlackboard extends TaskOrganize {
/**
* Adds a data structure to the blackboard.
* After this method the data structure will be
* manipulated by the action of the agents.
*
* @param ds is the data structure to be added.
*/
def addDS(ds : DataStore): Unit
/**
* Adds a data structure to the blackboard.
* After this method the data structure will
* no longer be manipulated by the action of the agent.
*
* @param ds is the data structure to be added.
*/
def rmDS(ds : DataStore): Unit
/**
* For the update phase in the executor.
* Returns a list of all data structures to
* insert a given type.
*
* @param d is the type that we are interested in.
* @return a list of all data structures, which store this type.
*/
def getDS(d : Set[DataType[Any]]) : Iterable[DataStore]
/**
* Returns a list of all data structures
* currently registered in the blackboard
*
* @return list of all data structures registered in the blackboard
*/
def getDS : Iterable[DataStore]
/**
*
* Adds new data to the blackboard.
*
* @param dataType The type of data to be added
* @param d the data to be added
* @return true if sucessfully added. false if already existing or could no be added
*/
def addData[T](dataType : DataType[T])(d : T) : Boolean = {
val result = Result().insert(dataType)(d)
var isNew = false
getDS(Set(dataType)) foreach (ds => isNew |= ds.insertData(dataType)(d))
if(isNew)
filterAll{a =>
submitTasks(a, a.filter(result).toSet)
}
isNew
}
/**
*
* Updates data in the blackboard
*
* @param dataType The type of data to be updated
* @param d1 the old value
* @param d2 the new value
* @return true if sucessfully been updated. false if already existing or could no be added
*/
def updateData[T](dataType: DataType[T])(d1 : T)(d2 : T) : Boolean = {
val result = Result().update(dataType)(d1)(d2)
val isNew = getDS(Set(dataType)) exists {ds => ds.updateData(dataType)(d1)(d2)} // TODO forall or exist?
if(isNew)
filterAll{a =>
submitTasks(a, a.filter(result).toSet)
}
isNew
}
/**
*
* Removes data from the blackboard.
*
* @param dataType The type of data to be deleted
* @param d the value to be deleted
*/
def removeData[T](dataType: DataType[T])(d : T) : Unit = {
val result = Result().remove(dataType)(d)
val wasDel = getDS(Set(dataType)) exists {d1 => d1.deleteData(dataType)(d) }
if(wasDel)
filterAll{a =>
submitTasks(a, a.filter(result).toSet)
}
}
/**
*
* Gets all data of type dataType from all
* the blackboard
*
* @param dataType The type of data to be collected
* @tparam T The concrete type of data
* @return the set of all data of the given type
*/
def getData[T](dataType : DataType[T]) : Set[T]
/**
* Submits a complete delta to the blackboard
* and informs registered agents.
*
* @param d Delta to be inserted
*/
def submitDelta(d : Delta) : Unit = {
var result : Delta = EmptyDelta
val dsIt = getDS.iterator
while(dsIt.hasNext){
val ds = dsIt.next()
result = result.merge(ds.updateResult(d))
}
if(!result.isEmpty){
filterAll{a => submitTasks(a, a.filter(result).toSet)}
}
}
}
/**
* This trait capsules the message handling for the blackboard
*/
trait MessageBlackboard {
/**
* Sends a message to an agent.
*
* @param m - The message to send
* @param to - The recipient
*/
def send(m : Message, to : Agent): Unit
}
|
lex-lex/Leo-III
|
oldsrc/main/scala/leo/datastructures/blackboard/Blackboard.scala
|
Scala
|
bsd-3-clause
| 7,165
|
package org.deepdive.test.unit
import akka.actor._
import akka.testkit._
import org.deepdive.profiling._
import org.scalatest._
class ProfilerSpec(_system: ActorSystem) extends TestKit(_system) with FunSpecLike with ImplicitSender {
def this() = this(ActorSystem("ProfilerSpec"))
describe("Assembling reports") {
it("should work") {
val actor = TestActorRef[Profiler]
actor.receive(StartReport("1", "Report 1"))
actor.receive(EndReport("1", Option("Done!")))
assert(actor.underlyingActor.reports.size == 1)
}
}
}
|
feiranwang/deepdive
|
src/test/scala/unit/profiling/ProfilerSpec.scala
|
Scala
|
apache-2.0
| 558
|
package im.tox.antox.wrapper
import java.util
import java.util.{Collections, Locale}
import im.tox.antox.utils.{IDUtils, Hex}
//remove if not needed
import scala.collection.JavaConversions._
class GroupList {
private var groups: util.List[Group] = new util.ArrayList[Group]()
def this(groups: util.List[Group]) {
this()
this.groups = groups
}
def getGroup(groupNumber: Int): Group = {
groups.find(group => group.groupNumber == groupNumber).get
}
def getGroup(id: String): Group = {
groups.find(group => group.id == id).get
}
def getPeer(groupNumber: Int, peerNumber: Int): GroupPeer = {
getGroup(groupNumber).peers.getPeer(peerNumber)
}
def getByTitle(title: String, ignorecase: Boolean): util.List[Group] = {
if (ignorecase) {
return getByTitleIgnoreCase(title)
} else {
groups.filter(group => (group.name == null && title == null) || (title != null && title == group.name))
}
}
private def getByTitleIgnoreCase(title: String): util.List[Group] = {
groups.filter(group => (group.name == null && title == null) || (title != null && title.equalsIgnoreCase(group.name)))
}
def searchGroup(partial: String): util.List[Group] = {
val partialLowered = partial.toLowerCase(Locale.getDefault)
if (partial == null) {
throw new IllegalArgumentException("Cannot search for null")
}
groups.filter(group => group.name != null && group.name.contains(partialLowered))
}
def all(): util.List[Group] = {
new util.ArrayList[Group](this.groups)
}
def addGroup(group: Group): Unit = {
println("group " + group.groupNumber + " added")
groups.find(existingGroup => existingGroup.groupNumber == group.groupNumber) match {
case Some(f) => throw new Exception()
case None => this.groups.add(group)
}
}
def addGroup(tox: ToxCore, groupNumber: Int): Unit = {
addGroup(new Group(tox.getGroupChatId(groupNumber),
groupNumber, IDUtils.trimForUI(tox.getGroupChatId(groupNumber)),
"", "", new PeerList()))
}
def addGroupIfNotExists(group: Group): Unit = {
groups.find(existingGroup => existingGroup.groupNumber == group.groupNumber).headOption match {
case Some(f) => return
case None =>
this.groups.add(group)
}
}
def removeGroup(groupNumber: Int) {
println("before remove")
for (group <- groups) {
println("group " + group.name)
}
groups.remove(groups.find(group => group.groupNumber == groupNumber).get)
println("after remove")
for (group <- groups) {
println("group " + group.name)
}
}
}
|
afkgeek/Antox
|
app/src/main/scala/im/tox/antox/wrapper/GroupList.scala
|
Scala
|
gpl-3.0
| 2,610
|
package au.id.cxd.math.app.workflow
import au.id.cxd.math.app.model.Project
/**
* Created by cd on 22/1/17.
*/
class ProjectWorkflow[T <: Project](override val data:T) extends Workflow[T](data) {
}
object ProjectWorkflow {
def id[T<: Project](data:T) = new ProjectWorkflow(data)
def apply[T<: Project](data:T) = id(data)
}
trait ProjectWorkflowStep[T <: Project] extends WorkflowStep[T] {
}
|
cxd/scala-au.id.cxd.math
|
app/src/main/scala/au/id/cxd/math/app/workflow/ProjectWorkflow.scala
|
Scala
|
mit
| 407
|
import imperium.engine.core.position.Area
import org.newdawn.slick.geom.Point
import org.scalatest.{Matchers, FlatSpec}
/*
* Created by DelavaR on 07.12.2014.
*/
class AreaSpec extends FlatSpec with Matchers {
it should "not accept invalid coordinates/size" in {
noException shouldBe thrownBy {
Area(new Point(5, 7), 10, 15)
}
val topPoint = new Point(5, 7)
a[IllegalArgumentException] should be thrownBy {
Area(topPoint, -10, 15)
}
a[IllegalArgumentException] should be thrownBy {
Area(topPoint, 10, -15)
}
a[IllegalArgumentException] should be thrownBy {
Area(null, 10, 15)
}
val area = Area(topPoint, 10, 15)
assert(area.point == topPoint)
assert(area.width == 10)
assert(area.height == 15)
}
it should "correctly shows included points" in {
val topPoint = new Point(2, 2)
val area = Area(topPoint, 10, 15)
val included = List(new Point(2, 2),
new Point(2, 17),
new Point(12, 2),
new Point(12, 17),
new Point(5, 5),
new Point(3, 2),
new Point(10, 10))
val excluded = List(new Point(1, 1),
new Point(1, 10),
new Point(10, 1),
new Point(13, 5),
new Point(5, 18))
included.map { x => assert(area.includes(x)) }
excluded.map { x => assert(!area.includes(x)) }
}
}
|
d3lavar/Imperium
|
src/test/scala/AreaSpec.scala
|
Scala
|
mit
| 1,521
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.v2.orc
import java.net.URI
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapreduce.{JobID, TaskAttemptID, TaskID, TaskType}
import org.apache.hadoop.mapreduce.lib.input.FileSplit
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl
import org.apache.orc.{OrcConf, OrcFile, TypeDescription}
import org.apache.orc.mapred.OrcStruct
import org.apache.orc.mapreduce.OrcInputFormat
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.connector.read.{InputPartition, PartitionReader}
import org.apache.spark.sql.execution.WholeStageCodegenExec
import org.apache.spark.sql.execution.datasources.PartitionedFile
import org.apache.spark.sql.execution.datasources.orc.{OrcColumnarBatchReader, OrcDeserializer, OrcFilters, OrcUtils}
import org.apache.spark.sql.execution.datasources.v2._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources.Filter
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.vectorized.ColumnarBatch
import org.apache.spark.util.{SerializableConfiguration, Utils}
/**
* A factory used to create Orc readers.
*
* @param sqlConf SQL configuration.
* @param broadcastedConf Broadcast serializable Hadoop Configuration.
* @param dataSchema Schema of orc files.
* @param readDataSchema Required data schema in the batch scan.
* @param partitionSchema Schema of partitions.
*/
case class OrcPartitionReaderFactory(
sqlConf: SQLConf,
broadcastedConf: Broadcast[SerializableConfiguration],
dataSchema: StructType,
readDataSchema: StructType,
partitionSchema: StructType,
filters: Array[Filter]) extends FilePartitionReaderFactory {
private val resultSchema = StructType(readDataSchema.fields ++ partitionSchema.fields)
private val isCaseSensitive = sqlConf.caseSensitiveAnalysis
private val capacity = sqlConf.orcVectorizedReaderBatchSize
private val orcFilterPushDown = sqlConf.orcFilterPushDown
private val ignoreCorruptFiles = sqlConf.ignoreCorruptFiles
override def supportColumnarReads(partition: InputPartition): Boolean = {
sqlConf.orcVectorizedReaderEnabled && sqlConf.wholeStageEnabled &&
!WholeStageCodegenExec.isTooManyFields(sqlConf, resultSchema) &&
resultSchema.forall(s => OrcUtils.supportColumnarReads(
s.dataType, sqlConf.orcVectorizedReaderNestedColumnEnabled))
}
private def pushDownPredicates(filePath: Path, conf: Configuration): Unit = {
if (orcFilterPushDown && filters.nonEmpty) {
OrcUtils.readCatalystSchema(filePath, conf, ignoreCorruptFiles).foreach { fileSchema =>
OrcFilters.createFilter(fileSchema, filters).foreach { f =>
OrcInputFormat.setSearchArgument(conf, f, fileSchema.fieldNames)
}
}
}
}
override def buildReader(file: PartitionedFile): PartitionReader[InternalRow] = {
val conf = broadcastedConf.value.value
OrcConf.IS_SCHEMA_EVOLUTION_CASE_SENSITIVE.setBoolean(conf, isCaseSensitive)
val filePath = new Path(new URI(file.filePath))
pushDownPredicates(filePath, conf)
val fs = filePath.getFileSystem(conf)
val readerOptions = OrcFile.readerOptions(conf).filesystem(fs)
val resultedColPruneInfo =
Utils.tryWithResource(OrcFile.createReader(filePath, readerOptions)) { reader =>
OrcUtils.requestedColumnIds(
isCaseSensitive, dataSchema, readDataSchema, reader, conf)
}
if (resultedColPruneInfo.isEmpty) {
new EmptyPartitionReader[InternalRow]
} else {
val (requestedColIds, canPruneCols) = resultedColPruneInfo.get
OrcUtils.orcResultSchemaString(canPruneCols, dataSchema, resultSchema, partitionSchema, conf)
assert(requestedColIds.length == readDataSchema.length,
"[BUG] requested column IDs do not match required schema")
val taskConf = new Configuration(conf)
val fileSplit = new FileSplit(filePath, file.start, file.length, Array.empty)
val attemptId = new TaskAttemptID(new TaskID(new JobID(), TaskType.MAP, 0), 0)
val taskAttemptContext = new TaskAttemptContextImpl(taskConf, attemptId)
val orcRecordReader = new OrcInputFormat[OrcStruct]
.createRecordReader(fileSplit, taskAttemptContext)
val deserializer = new OrcDeserializer(readDataSchema, requestedColIds)
val fileReader = new PartitionReader[InternalRow] {
override def next(): Boolean = orcRecordReader.nextKeyValue()
override def get(): InternalRow = deserializer.deserialize(orcRecordReader.getCurrentValue)
override def close(): Unit = orcRecordReader.close()
}
new PartitionReaderWithPartitionValues(fileReader, readDataSchema,
partitionSchema, file.partitionValues)
}
}
override def buildColumnarReader(file: PartitionedFile): PartitionReader[ColumnarBatch] = {
val conf = broadcastedConf.value.value
OrcConf.IS_SCHEMA_EVOLUTION_CASE_SENSITIVE.setBoolean(conf, isCaseSensitive)
val filePath = new Path(new URI(file.filePath))
pushDownPredicates(filePath, conf)
val fs = filePath.getFileSystem(conf)
val readerOptions = OrcFile.readerOptions(conf).filesystem(fs)
val resultedColPruneInfo =
Utils.tryWithResource(OrcFile.createReader(filePath, readerOptions)) { reader =>
OrcUtils.requestedColumnIds(
isCaseSensitive, dataSchema, readDataSchema, reader, conf)
}
if (resultedColPruneInfo.isEmpty) {
new EmptyPartitionReader
} else {
val (requestedDataColIds, canPruneCols) = resultedColPruneInfo.get
val resultSchemaString = OrcUtils.orcResultSchemaString(canPruneCols,
dataSchema, resultSchema, partitionSchema, conf)
val requestedColIds = requestedDataColIds ++ Array.fill(partitionSchema.length)(-1)
assert(requestedColIds.length == resultSchema.length,
"[BUG] requested column IDs do not match required schema")
val taskConf = new Configuration(conf)
val fileSplit = new FileSplit(filePath, file.start, file.length, Array.empty)
val attemptId = new TaskAttemptID(new TaskID(new JobID(), TaskType.MAP, 0), 0)
val taskAttemptContext = new TaskAttemptContextImpl(taskConf, attemptId)
val batchReader = new OrcColumnarBatchReader(capacity)
batchReader.initialize(fileSplit, taskAttemptContext)
val requestedPartitionColIds =
Array.fill(readDataSchema.length)(-1) ++ Range(0, partitionSchema.length)
batchReader.initBatch(
TypeDescription.fromString(resultSchemaString),
resultSchema.fields,
requestedColIds,
requestedPartitionColIds,
file.partitionValues)
new PartitionRecordReader(batchReader)
}
}
}
|
chuckchen/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/orc/OrcPartitionReaderFactory.scala
|
Scala
|
apache-2.0
| 7,615
|
package dao.util
import java.io.File
import com.mongodb._
import com.mongodb.casbah.commons.MongoDBObject
import com.mongodb.casbah.{MongoCollection, MongoConnection, MongoDB}
import com.typesafe.config.{Config, ConfigFactory}
import play.api.Logger
import scala.collection.JavaConverters._
case class Index(field: String, unique: Boolean=false)
object MongoUtil {
def collection(name: String, indexes: Seq[Index]=Seq.empty) = {
val c = MongoInit.mongoDb(name)
indexes.foreach { idx =>
MongoInit.safeEnsureIndexes(c, Seq((idx.field, idx.unique)))
}
c
}
def collectionWithIndexes(name: String, indexes: Seq[Index]=Seq.empty) = {
val c = collection(name, indexes)
MongoInit.safeEnsureIndexes(c, Seq(("guid", true)))
c
}
}
object MongoInit extends MongoInit {
private lazy val ConfigFile = new File(sys.props.get("config.file").getOrElse("conf/application.conf"))
override val mongoConfig = ConfigFactory.parseFile(ConfigFile)
}
object TestMongoInit extends MongoInit {
override val mongoConfig = ConfigFactory.parseResources("test_mongodb.yml")
}
private[util] trait MongoInit {
case class IndexedAttribute(key: String, ascending: Boolean)
case class Index(attributes: Seq[IndexedAttribute], name: String, isUnique: Boolean)
val mongoConfig: Config
lazy val mongo: Mongo = {
val connsPerHost = 16
val hosts = mongoConfig.getStringList("db.hosts").asScala
Logger.info(s"Using mongo hosts '$hosts'")
val servers = hosts.map(_.split(":")).map((hp: Array[String]) => new ServerAddress(hp(0), hp(1).toInt))
new MongoClient(servers.toList.asJava, MongoClientOptions.builder().connectionsPerHost(connsPerHost).build())
}
lazy val mongoDb: MongoDB = {
new MongoConnection(mongo).getDB(mongoConfig.getString("db.name"))
}
def safeEnsureIndexes(collection: MongoCollection, indexes: Seq[(String, Boolean)]): Unit = {
safeEnsureCompoundIndexes(
collection,
indexes.map {
case (key, isUnique) => Index(Seq(IndexedAttribute(key, ascending = true)), key, isUnique)
})
}
def safeEnsureCompoundIndexes(collection: MongoCollection, indexes: Seq[Index]): Unit = {
def getIndexObject(indexedAttributes: Seq[IndexedAttribute]) = {
MongoDBObject(indexedAttributes.toList.map(attr => (attr.key, if (attr.ascending) 1 else -1)))
}
if (mongoConfig.getBoolean("db.ensureIndex")) {
indexes.foreach { index =>
val Index(indexedAttributes, name, isUnique) = index
collection.ensureIndex(getIndexObject(indexedAttributes), name, unique = isUnique)
}
} else {
def checkIndexExists(index: Index): Unit = {
val Index(indexedAttributes, name, _) = index
val indexObject = getIndexObject(indexedAttributes)
collection.getIndexInfo.find { dbo =>
dbo.get("key") match {
case key: DBObject => indexObject == key
case _ => false
}
}.getOrElse(sys.error("required mongo index '%s' on collection '%s' not found".format(name, collection.getFullName)))
}
indexes.foreach(checkIndexExists)
}
}
}
|
grahamar/Giles
|
app/dao/util/MongoUtil.scala
|
Scala
|
apache-2.0
| 3,146
|
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.twitter.zipkin.query
import com.twitter.logging.Logger
import com.twitter.ostrich.admin.{ServiceTracker, RuntimeEnvironment}
import com.twitter.util.Eval
import com.twitter.zipkin.builder.Builder
import com.twitter.zipkin.BuildProperties
object Main {
val log = Logger.get(getClass.getName)
def main(args: Array[String]) {
log.info("Loading configuration")
val runtime = RuntimeEnvironment(BuildProperties, args)
val builder = (new Eval).apply[Builder[RuntimeEnvironment => ZipkinQuery]](runtime.configFile)
try {
val server = builder.apply().apply(runtime)
server.start()
ServiceTracker.register(server)
} catch {
case e: Exception =>
e.printStackTrace()
log.error(e, "Unexpected exception: %s", e.getMessage)
System.exit(0)
}
}
}
|
ajantis/zipkin
|
zipkin-query-service/src/main/scala/com/twitter/zipkin/query/Main.scala
|
Scala
|
apache-2.0
| 1,432
|
package com.obecto.gattakka.genetics
import com.obecto.gattakka.genetics.descriptors.{GeneDescriptor, Gene}
case class Chromosome(byteArray: Array[Byte], descriptor: GeneDescriptor) {
def value: Any = descriptor.apply(byteArray).value
def toGene: Gene = descriptor.apply(byteArray)
def diversity(chromosome: Chromosome): Double = {
if (this.descriptor.getClass != chromosome.descriptor.getClass) {
1
} else {
val geneTuples = this.byteArray.zip(chromosome.byteArray)
val geneDiversitySum = geneTuples.map((geneTuple: (Byte, Byte)) => {
val xorNum = (geneTuple._1 ^ geneTuple._2).toByte
percentageOfBitsSet(xorNum)
}).sum
geneDiversitySum / geneTuples.length
}
}
private def percentageOfBitsSet(num: Byte): Double = {
val sumOfSetBits: Int = (0 to 7).map((i: Int) => (num >>> i) & 1).sum
val percentageOfSetBits: Double = sumOfSetBits.toDouble / 8
percentageOfSetBits
}
private def printGeneAsBits(gene: Byte): String = {
var cutStr = gene.toBinaryString.takeRight(8)
while (cutStr.length < 8) {
cutStr = "0" + cutStr
}
cutStr
}
}
|
obecto/gattakka
|
src/main/scala/com/obecto/gattakka/genetics/Chromosome.scala
|
Scala
|
mit
| 1,148
|
/*
* #%L
* WSB Webapp
* %%
* Copyright (C) 2013 - 2017 OpenDesignFlow.org
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package com.idyria.osi.wsb.webapp.http.connector
import com.idyria.osi.wsb.core.network.connectors.tcp.TCPNetworkContext
import com.idyria.osi.wsb.core.network.connectors.tcp.TCPProtocolHandlerConnector
import com.idyria.osi.wsb.core.network.NetworkContext
import com.idyria.osi.tea.listeners.ListeningSupport
import java.nio.ByteBuffer
import com.idyria.osi.wsb.core.network.protocols.ProtocolHandler
import com.idyria.osi.wsb.core.message.Message
import com.idyria.osi.wsb.core.network.connectors.tcp.TCPNetworkContext
import com.idyria.osi.wsb.core.network.connectors.tcp.TCPProtocolHandlerConnector
import com.idyria.osi.wsb.core.network.NetworkContext
import com.idyria.osi.wsb.webapp.http.message.HTTPRequest
import com.idyria.osi.wsb.webapp.mime.DefaultMimePart
import com.idyria.osi.wsb.webapp.mime.MimePart
import com.idyria.osi.tea.logging.TLogSource
import com.idyria.osi.wsb.webapp.http.connector.websocket.WebsocketProtocolhandler
import com.idyria.osi.wsb.core.network.NetworkContext
import com.idyria.osi.wsb.webapp.http.message.HTTPMessage
import com.idyria.osi.wsb.webapp.http.message.HTTPResponse
import com.idyria.osi.wsb.core.network.connectors.ConnectorFactory
import com.idyria.osi.wsb.core.network.connectors.AbstractConnector
import java.net.URL
import com.idyria.osi.wsb.core.network.connectors.tcp.SSLTCPProtocolHandlerConnector
class HTTPConnector(cport: Int) extends TCPProtocolHandlerConnector[MimePart](ctx => new HTTPProtocolHandler(ctx)) with TLogSource {
this.address = "0.0.0.0"
this.port = cport
this.messageType = "http"
this.protocolType = "tcp+http"
Message("http", HTTPMessage)
/**
* After sending response data to a client, one must close the socket
*/
override def send(buffer: ByteBuffer, context: TCPNetworkContext) = {
super.send(buffer, context)
//context.socketChannel.socket().flu
/*logInfo {
"Send datas to client -> close it"
}*/
//println("Closing")
//context.socketChannel.close()
//context.socket.shutdownOutput()
//println("Send datas to client -> close it")
//context.socket.close
//context.socket.socket.close
}
}
class HTTPSConnector(cport: Int) extends SSLTCPProtocolHandlerConnector[MimePart](ctx => new HTTPProtocolHandler(ctx)) with TLogSource {
this.address = "0.0.0.0"
this.port = cport
this.messageType = "http"
this.protocolType = "tcp+https"
Message("http", HTTPMessage)
/**
* After sending response data to a client, one must close the socket
*/
override def send(buffer: ByteBuffer, context: TCPNetworkContext) = {
super.send(buffer, context)
}
}
object HTTPSConnector extends ConnectorFactory {
/**
* Registers classes in various factories
*/
def init = {
ConnectorFactory("tcp+https", this)
}
def apply(port: Int): HTTPSConnector = new HTTPSConnector(port)
/**
* Factory for Client mode connectors
*/
def newInstance(connectionString: String): AbstractConnector[_ <: NetworkContext] = {
//-- Parse Conn String URL (Connection string has no protocol anymore, so add it to use the URL class)
var url = new URL("https://" + connectionString)
//-- Create connector
var connector = new HTTPSConnector(443)
connector.address = url.getHost()
//var connString = new
connector
}
}
object HTTPConnector extends ConnectorFactory {
/**
* Registers classes in various factories
*/
def init = {
ConnectorFactory("tcp+http", this)
}
def apply(port: Int): HTTPConnector = new HTTPConnector(port)
/**
* Factory for Client mode connectors
*/
def newInstance(connectionString: String): AbstractConnector[_ <: NetworkContext] = {
//-- Parse Conn String URL (Connection string has no protocol anymore, so add it to use the URL class)
var url = new URL("http://" + connectionString)
//-- Create connector
var connector = new HTTPConnector(80)
connector.address = url.getHost()
//var connString = new
connector
}
}
class HTTPProtocolHandler(var localContext: NetworkContext) extends ProtocolHandler[MimePart](localContext) with ListeningSupport with TLogSource {
// Receive
//-----------------
/**
* Read lines or bytes depending on data received
*
* Supported:
*
* line or bytes
*/
var readMode = "line"
/**
* The read mode for upcoming message part
*/
var nextReadMode = "line"
var contentLength = 0
var contentTypeRegexp = """Content-Type: (.*)""".r
var contentLengthRegexp = """Content-Length: (.*)""".r
// Take Lines and create message
var currentPart = new DefaultMimePart()
// Protocol Switching
//-------------------------
var switchingProtocols = Map[String, Class[_ <: ProtocolHandler[_]]]("websocket" -> classOf[WebsocketProtocolhandler])
// Chunked Transfer Encoding
//-------------------------------
//-- Stays to actual expected chunk size accross calls
var chunkSize = 0
var transferEncodingRegexp = """Transfer-Encoding: (.*)""".r
// Send
//---------------------
def storePart(part: MimePart) = {
(this.availableDatas.contains(this.currentPart), this.availableDatas.size) match {
// Part is already stacked, don't do anything
case (true, size) =>
logFine("Part is already stacked, don't do anything")
// Add part
case (false, 0) =>
logFine("Storing mime part")
this.availableDatas += part
// Merge part
case (false, size) =>
logFine("Merging with head")
this.availableDatas.head.append(part)
}
}
/**
* REceive some HTTP
*/
def receive(buffer: ByteBuffer): Boolean = {
@->("http.connector.receive", buffer)
var bytes = new Array[Byte](buffer.remaining)
buffer.get(bytes)
logFine("Got HTTP Datas: " + new String(bytes))
// Use SOurce to read from buffer
//--------------------
//var bytes = bytesArray
//var bytesSource = Source.fromInputStream(new ByteArrayInputStream(buffer.array))
var stop = false
do {
// If no bytes to read, put on hold
if (bytes.size == 0)
stop = true
else
// Read Mode
//------------------
readMode match {
// Take line
//---------------
case "line" =>
// Read line
//---------------
var currentLineBytes = bytes.takeWhile(_ != '\\n')
bytes = bytes.drop(currentLineBytes.size + 1)
if (bytes.length != 0 && bytes(0) == '\\r')
bytes.drop(1)
var line = new String(currentLineBytes.toArray).trim
// If Some content is expected,
// Read line
/*var currentLineBytes = bytes.takeWhile(_ != '\\n')
bytes = bytes.drop(currentLineBytes.size + 1)
var line = new String(currentLineBytes.toArray).trim*/
//-- Parse protocol
//-------------------------
line match {
//-- Content Length expected : Switch to bytes buffereing
case line if (contentLengthRegexp.findFirstMatchIn(line) match {
case Some(matched) =>
contentLength = matched.group(1).toInt
logFine("Content Type specified to bytes")
nextReadMode = "bytes"
true
case None => false
}) =>
currentPart.addParameter(line)
//-- Content Type
case line if (contentTypeRegexp.findFirstMatchIn(line) match {
// Multipart form data -> just continue using lines
case Some(matched) if (matched.group(1).matches("multipart/form-data.*")) => true
case Some(matched) if (matched.group(1).matches("application/x-www-form-urlencoded.*")) => true
// Otherwise, don't change anything
case Some(matched) =>
true
case None => false
}) =>
currentPart.addParameter(line)
//-- Transfer-Encoding Chunked
case line if (transferEncodingRegexp.findFirstMatchIn(line) match {
// Chunked
case Some(matched) if (matched.group(1).matches("chunked")) =>
logFine("Content is chunked")
nextReadMode = "chunked"
true
// Unsupported
case Some(matched) =>
logFine("Unsupported Transfer-Encoding: " + matched.group(1))
true
case None =>
false
}) => currentPart.addParameter(line)
//-- Normal Line
case line if (line != "") =>
currentPart.addParameter(line)
//-- If content lenght is reached, that was the last line
if (contentLength != 0 && contentLength == currentPart.contentLength) {
this.storePart(this.currentPart)
this.currentPart = new DefaultMimePart
this.contentLength = 0
}
//-- Empty Line but content is upcomming
case line if (line == "" && contentLength != 0 && nextReadMode == "line") =>
logFine(s"Empty Line but some content is expected")
//--> Write this message part to output
this.availableDatas += this.currentPart
this.currentPart = new DefaultMimePart
//-- Empty line, content is upcoming and next Read mode is not line
case line if (line == "" && nextReadMode != "line") =>
logFine(s"Empty Line but some content is expected in read mode: $nextReadMode, for a length of: $contentLength")
readMode = nextReadMode
//-- Empty Line and no content
case line if (line == "" && contentLength == 0 && this.currentPart.contentLength > 0) =>
logFine(s"Empty Line and no content expected, end of section")
//--> Write this message part to output
this.availableDatas += this.currentPart
this.currentPart = new DefaultMimePart
this.contentLength = 0
case _ =>
}
// Buffer Bytes
//---------------
case "bytes" =>
// Read
this.currentPart += bytes
bytes = bytes.drop(bytes.size)
// Report read progress
var progress = this.currentPart.bytes.size * 100.0 / contentLength
logFine(s"Read state: $progress %, $contentLength expected, and read bytes ${this.currentPart.bytes.size} and content length: ${this.currentPart.contentLength} ")
if ((contentLength - this.currentPart.contentLength) < 10) {
//if ( progress == 100 ) {
(this.availableDatas.contains(this.currentPart), this.availableDatas.size) match {
// Part is already stacked, don't do anything
case (true, size) =>
// Add part
case (false, 0) =>
logFine("Storing mime part")
this.availableDatas += this.currentPart
// Merge part
case (false, size) =>
logFine("Merging with head")
this.availableDatas.head.append(this.currentPart)
}
// Reset all
this.currentPart = new DefaultMimePart
this.contentLength = 0
this.nextReadMode = "line"
this.readMode = "line"
}
//-- Chunked
case "chunked" =>
do {
//-- If Chunk size is 0 -> try to determine a size from first line
chunkSize = chunkSize match {
case 0 =>
//-- Read Size Line as HEX value
var chunksizeLine = bytes.takeWhile(_.toChar != '\\n').map(_.toChar).mkString.trim
bytes = bytes.drop(chunksizeLine.getBytes.length + 2) // 2 for CRLF
Integer.parseInt(chunksizeLine, 16)
case _ => chunkSize
}
//-- Normal processing
chunkSize match {
// END
case 0 =>
logFine(s"[Chunked] End of chunked transfer")
storePart(this.currentPart)
stop = true
// Reset all
this.currentPart = new DefaultMimePart
this.contentLength = 0
this.nextReadMode = "line"
this.readMode = "line"
case _ =>
//-- Read bytes and add to mime part
var readBytes = bytes.size match {
case available if (available >= chunkSize) =>
// Get byte, then drop from array + 2 bytes for the last CRLF
logFine(s"[Chunked] Reading Chunk of $chunkSize")
this.currentPart += bytes.take(chunkSize)
bytes = bytes.drop(chunkSize + 2)
// Chunk size to 0 to reset actual chunk
chunkSize = 0
case available =>
// Get as much as possible until next call
logFine(s"[Chunked] Not enough bytes available (${bytes.size} to complete chunk of $chunkSize")
this.currentPart += bytes.take(available)
bytes = bytes.drop(chunkSize + 2)
// Update chunksize ot remaining
chunkSize = chunkSize - available
stop = true
available
}
}
} while (!stop)
case mode => throw new RuntimeException(s"HTTP Receive protocol unsupported read mode: $mode")
}
} while (!stop)
logFine("Done")
true
}
/**
* Send Buffer May Support connection upgrade
*/
def send(buffer: ByteBuffer, nc: NetworkContext): ByteBuffer = {
// Try to detecte protocol switching
//----------------
nc[HTTPResponse]("message") match {
case Some(httpResponse) =>
(httpResponse.parameters.find(_._1 == "Connection"), httpResponse.parameters.find(_._1 == "Upgrade")) match {
//-- Upgrade to Websocket
case (Some(Tuple2(_, "Upgrade")), Some(Tuple2(_, "websocket"))) =>
//println(s"Connector Update to websocket protocol detected")
// Record in Network Context the new protocol handler
nc("protocol.handler" -> new WebsocketProtocolhandler(nc))
nc("message.type" -> "json-soap")
case _ =>
}
case _ =>
}
// Normal Send
//-------------------
var post = "\\r\\n"
post = ""
post.getBytes.length match {
case 0 =>
buffer
case l =>
var newBuffer = ByteBuffer.allocate(buffer.remaining() + post.getBytes().length)
newBuffer.put(buffer)
buffer.clear()
newBuffer.put(post.getBytes())
newBuffer.flip()
newBuffer
}
// buffer
/*var newBuffer = ByteBuffer.allocate(buffer.remaining())
newBuffer.put(buffer)
newBuffer.flip()))))
newBuffer*/
}
}
|
richnou/wsb-webapp
|
src/main/scala/com/idyria/osi/wsb/webapp/http/connector/HTTPConnector.scala
|
Scala
|
agpl-3.0
| 16,740
|
package fr.iscpif.doors.client
import scaladget.api.{BootstrapTags => bs}
import scaladget.stylesheet.all._
import fr.iscpif.doors.client.stylesheet._
import scaladget.stylesheet.{all => sheet}
import scalatags.JsDom.tags
import scalatags.JsDom.all._
import Client.panelInBody
import org.scalajs.dom.html.Element
/*
* Copyright (C) 2017 // ISCPIF CNRS
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
class MessageDisplay(aMessage: String) {
def render = {
bs.withBootstrapNative(
tags.div()(
tags.div(wall +++ Seq(width := "400px", margin := "auto"))(
panelInBody(
"Information",
tags.div(
tags.p(
aMessage
),
tags.div(Seq(textAlign := "right")) (
tags.a(btn +++ btn_default +++ btn_primary +++ Seq(href := "/"))(
"Ok"
)
).render
).render
).render
).render
).render
)
}
}
|
ISCPIF/doors
|
client/src/main/scala/fr/iscpif/doors/client/MessageDisplay.scala
|
Scala
|
agpl-3.0
| 1,617
|
/**
* Copyright 2015 Gianluca Amato <gamato@unich.it>
*
* This file is part of JANDOM: JVM-based Analyzer for Numerical DOMains
* JANDOM is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JANDOM is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of a
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with JANDOM. If not, see <http://www.gnu.org/licenses/>.
*/
package it.unich.jandom.objectmodels
import scala.annotation.tailrec
/**
* This trait defines concrete methods which may be used to implement an object model. It is not
* particularly fast at the moment, since correctness and readability has been favored rather than
* performance. However, memoization is used to improve performance in some particularly lengthy
* computations.
* @todo make the methods faster
*/
trait ObjectModelHelper {
this: ObjectModel =>
/**
* A mutable HashMap used for memoizing sharing information.
*/
private val sharing = collection.mutable.HashMap[(Type, Type), Boolean]()
/**
* A mutable HashMap used for memoizing reachability information.
*/
private val reachable = collection.mutable.HashMap[Type, Set[Type]]()
/**
* A mutable HashMap used for memoizing concreteApproximations
*/
private val glb = collection.mutable.HashMap[(Type, Type), Option[Type]]()
def pathExists(t: Type, fs: Field*): Boolean = {
if (fs.isEmpty)
true
else {
val f = fs.head
(fields(t) contains f) && pathExists(t, fs.tail: _*)
}
}
/**
* This is an helper methods which visit an implicit graph, collecting values associated to nodes.
* @tparam Node the type for nodes of the graph
* @tparam Value the values associated to nodes
* @param start starting node
* @param children maps each node to its children
* @param values maps each node to a set of values
*/
protected def visitGraph[Node, Value](start: Node, children: Node => Set[Node], values: Node => Set[Value]): Set[Value] = {
val worklist = collection.mutable.Queue[Node](start)
val result = collection.mutable.Set[Value]()
while (!worklist.isEmpty) {
val current = worklist.dequeue()
result ++= values(current)
worklist ++= children(current)
}
result.toSet
}
def ancestors(t: Type): Set[Type] = visitGraph(t, parents, { (t: Type) => Set(t) })
def descendants(t: Type): Set[Type] = visitGraph(t, children, { (t: Type) => Set(t) })
def fields(t: Type): Set[Field] = visitGraph(t, parents, declaredFields)
def possibleFields(t: Type): Set[Field] = {
for ( k <- descendants(t); if isConcrete(k); f <- fields(k) )
yield f
}
def isConcretizable(t: Type): Boolean = {
descendants(t) exists isConcrete
}
def upperCrown(ts: Iterable[Type]): Set[Type] = {
@tailrec
def upperCrownHelper(ts: Iterable[Type], acc: Set[Type]): Set[Type] = {
if (ts.isEmpty)
acc
else {
val newt = ts.head
val newacc = collection.mutable.Set[Type]()
var toAdd = true
for (t <- acc) {
if (lteq(newt, t)) toAdd = false
if ((!lteq(t, newt)) || t == newt) newacc += t
}
if (toAdd) newacc += newt
upperCrownHelper(ts.tail, newacc.toSet)
}
}
upperCrownHelper(ts, Set())
}
def concreteApprox(t1: Type, t2: Type): Option[Type] = glb.getOrElseUpdate((t1, t2), {
if (lteq(t1, t2) && isConcrete(t1))
Option(t1)
else {
val glbs = upperCrown(children(t1) map { concreteApprox(_, t2) } filter { _.isDefined } map { _.get })
// Actually, we would need to remove elements from glbs which are subsumed by other elements.
if (glbs.isEmpty)
None
else if (glbs.forall(_ == glbs.head))
Option(glbs.head)
else
Option(t1)
}
})
/**
* @inheritdoc
* It is computed by iterating the binary glbApprox, but may be overriden for performance reasons.
*/
def concreteApprox(ts: Iterable[Type]): Option[Type] = {
@tailrec
def glbhelper(ts: Iterable[Type], current: Option[Type]): Option[Type] = {
if (ts.isEmpty || current.isEmpty)
current
else
glbhelper(ts.tail, concreteApprox(current.get, ts.head))
}
if (ts.isEmpty)
None
else
glbhelper(ts, Option(ts.head))
}
def concreteApprox(t: Type): Option[Type] = {
var subs = Set(t)
var current = t
do {
current = subs.head
subs = upperCrown((children(current) filter isConcretizable) map { concreteApprox(_).get })
} while (subs.size == 1 && (!isConcrete(current)))
if (isConcretizable(current))
Option(current)
else
None
}
def neededFields(t: Type): Set[Field] = {
val glb = concreteApprox(t, t)
if (glb.isEmpty) Set() else fields(glb.get)
}
def reachablesFrom(t: Type): Set[Type] = reachable.get(t) match {
case Some(types) =>
types
case None =>
reachable(t) = Set()
val set = collection.mutable.Set[Type]()
val queue = collection.mutable.Queue[Type](t)
while (queue.nonEmpty) {
val t1 = queue.dequeue
if (isConcretizable(t1) && !isPrimitive(t1)) set += t1
for { f <- possibleFields(t1); t2 = typeOf(f); if !set.contains(t2) } queue.enqueue(t2)
for { elt <- elementType(t1) } queue.enqueue(elt)
}
val result = set.toSet
reachable(t) = result
result
}
def isReachable(src: Type, tgt: Type) = reachablesFrom(src) exists { lteq(tgt, _) }
def mayBeAliases(t1: Type, t2: Type): Boolean =
!isPrimitive(t1) && !isPrimitive(t2) && concreteApprox(t1, t2).isDefined
def mayShare(t1: Type, t2: Type): Boolean = {
val doShare = sharing.get((t1, t2)) orElse sharing.get((t2, t1))
if (doShare.isDefined)
doShare.get
else {
val reach1 = reachablesFrom(t1)
val reach2 = reachablesFrom(t2)
val sharable = reach1 exists { t1 => reach2 exists { t2 => mayBeAliases(t1, t2) } }
sharing((t1, t2)) = sharable
sharing((t2, t1)) = sharable
sharable
}
}
}
|
francescaScozzari/Jandom
|
core/src/main/scala/it/unich/jandom/objectmodels/ObjectModelHelper.scala
|
Scala
|
lgpl-3.0
| 6,436
|
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations.calculations
import uk.gov.hmrc.ct.box.CtTypeConverters
import uk.gov.hmrc.ct.computations._
trait TotalDeductionsCalculator extends CtTypeConverters {
def totalDeductionsCalculation(cp58: CP58,
cp505: CP505,
cp504: CP504,
cp55: CP55,
cp57: CP57): CP59 = {
CP59(cp58 + cp505 + cp504 + cp55 + cp57)
}
}
|
keithhall/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/computations/calculations/TotalDeductionsCalculator.scala
|
Scala
|
apache-2.0
| 1,086
|
package models.qlkh
import models.core.{AbstractTable, WithId}
import play.api.db.slick.Config.driver.simple._
import play.api.db.slick.DB
import play.api.Play.current
/**
* The Class Station.
*
* @author Nguyen Duc Dung
* @since 4/22/14 9:00 AM
*
*/
case class Station(
id: Option[Long] = None,
name: String
) extends WithId[Long]
class Stations(tag: Tag) extends AbstractTable[Station](tag, "station") {
def name = column[String]("name", O.NotNull)
override def * = (id.?, name) <>(Station.tupled, Station.unapply)
}
object Stations extends TableQuery[Stations](new Stations(_)) {
val dbName = "qlkh"
def findByName(name: String) = DB(dbName).withSession(implicit session => {
where(_.name === name).firstOption()
})
}
|
SunriseSoftVN/sunerp
|
app/models/qlkh/Station.scala
|
Scala
|
apache-2.0
| 811
|
package java.lang
abstract class Enum[E <: Enum[E]] protected (_name: String, _ordinal: Int)
extends Comparable[E] with java.io.Serializable {
final def name(): String = _name
final def ordinal(): Int = _ordinal
override def toString(): String = _name
@inline
override final def equals(that: Any): scala.Boolean = super.equals(that)
@inline
override final def hashCode(): Int = super.hashCode()
override protected final def clone(): AnyRef =
throw new CloneNotSupportedException("Enums are not cloneable")
final def compareTo(o: E): Int = _ordinal.compareTo(o.ordinal)
// Not implemented:
// final def getDeclaringClass(): Class[E]
override protected final def finalize(): Unit = ()
}
// Not implemented:
// def valueOf[T <: Enum[T]](enumType: Class[T], name:String): T
|
mdedetrich/scala-js
|
javalanglib/src/main/scala/java/lang/Enum.scala
|
Scala
|
bsd-3-clause
| 813
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.nodes.logical
import org.apache.flink.table.calcite.FlinkRelBuilder.PlannerNamedWindowProperty
import org.apache.flink.table.plan.logical.LogicalWindow
import org.apache.flink.table.plan.nodes.FlinkConventions
import org.apache.flink.table.plan.nodes.calcite.{LogicalWindowAggregate, WindowAggregate}
import org.apache.calcite.plan._
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.convert.ConverterRule
import org.apache.calcite.rel.core.Aggregate.Group
import org.apache.calcite.rel.core.{Aggregate, AggregateCall}
import org.apache.calcite.rel.metadata.RelMetadataQuery
import org.apache.calcite.sql.SqlKind
import org.apache.calcite.util.ImmutableBitSet
import java.util
import scala.collection.JavaConverters._
class FlinkLogicalWindowAggregate(
cluster: RelOptCluster,
traitSet: RelTraitSet,
child: RelNode,
groupSet: ImmutableBitSet,
aggCalls: util.List[AggregateCall],
window: LogicalWindow,
namedProperties: Seq[PlannerNamedWindowProperty])
extends WindowAggregate(cluster, traitSet, child, groupSet, aggCalls, window, namedProperties)
with FlinkLogicalRel {
override def copy(
traitSet: RelTraitSet,
input: RelNode,
indicator: Boolean,
groupSet: ImmutableBitSet,
groupSets: util.List[ImmutableBitSet],
aggCalls: util.List[AggregateCall]): Aggregate = {
new FlinkLogicalWindowAggregate(
cluster,
traitSet,
input,
groupSet,
aggCalls,
window,
namedProperties)
}
override def computeSelfCost(planner: RelOptPlanner, mq: RelMetadataQuery): RelOptCost = {
val child = this.getInput
val rowCnt = mq.getRowCount(child)
val rowSize = mq.getAverageRowSize(child)
val aggCnt = this.getAggCallList.size
// group by CPU cost(multiple by 1.1 to encourage less group keys) + agg call CPU cost
val cpuCost: Double = rowCnt * getGroupCount * 1.1 + rowCnt * aggCnt
planner.getCostFactory.makeCost(rowCnt, cpuCost, rowCnt * rowSize)
}
}
class FlinkLogicalWindowAggregateConverter
extends ConverterRule(
classOf[LogicalWindowAggregate],
Convention.NONE,
FlinkConventions.LOGICAL,
"FlinkLogicalWindowAggregateConverter") {
override def matches(call: RelOptRuleCall): Boolean = {
val agg = call.rel(0).asInstanceOf[LogicalWindowAggregate]
// we do not support these functions natively
// they have to be converted using the WindowAggregateReduceFunctionsRule
agg.getAggCallList.asScala.map(_.getAggregation.getKind).forall {
// we support AVG
case SqlKind.AVG => true
// but none of the other AVG agg functions
case k if SqlKind.AVG_AGG_FUNCTIONS.contains(k) => false
case _ => true
}
}
override def convert(rel: RelNode): RelNode = {
val agg = rel.asInstanceOf[LogicalWindowAggregate]
require(!agg.indicator && (agg.getGroupType == Group.SIMPLE))
val newInput = RelOptRule.convert(agg.getInput, FlinkConventions.LOGICAL)
val traitSet = newInput.getCluster.traitSet().replace(FlinkConventions.LOGICAL).simplify()
new FlinkLogicalWindowAggregate(
rel.getCluster,
traitSet,
newInput,
agg.getGroupSet,
agg.getAggCallList,
agg.getWindow,
agg.getNamedProperties)
}
}
object FlinkLogicalWindowAggregate {
val CONVERTER = new FlinkLogicalWindowAggregateConverter
}
|
shaoxuan-wang/flink
|
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/plan/nodes/logical/FlinkLogicalWindowAggregate.scala
|
Scala
|
apache-2.0
| 4,214
|
/*
* Copyright 2015 ligaDATA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ligadata.KamanjaManager
import com.ligadata.InputOutputAdapterInfo.CountersAdapter
object SimpleStats extends CountersAdapter {
private[this] val _lock = new Object()
private[this] val _statusInfo = scala.collection.mutable.Map[String, Long]()
override def addCntr(key: String, cnt: Long): Long = _lock.synchronized {
val v = _statusInfo.getOrElse(key, 0L)
_statusInfo(key) = v + cnt
v + cnt
}
override def addCntr(cntrs: scala.collection.immutable.Map[String, Long]): Unit = _lock.synchronized {
// Adding each key
cntrs.foreach(kv => {
val v = _statusInfo.getOrElse(kv._1, 0L)
_statusInfo(kv._1) = v + kv._2
})
}
override def getCntr(key: String): Long = _lock.synchronized {
_statusInfo.getOrElse(key, 0L)
}
override def getDispString(delim: String): String = _lock.synchronized {
_statusInfo.mkString(delim)
}
override def copyMap: scala.collection.immutable.Map[String, Long] = _lock.synchronized {
_statusInfo.toMap
}
}
|
traytonwhite/Kamanja
|
trunk/KamanjaManager/src/main/scala/com/ligadata/KamanjaManager/SimpleStats.scala
|
Scala
|
apache-2.0
| 1,611
|
package com.bob.scala.webapi.sptutorial
import javax.annotation.{PostConstruct, PreDestroy}
import org.springframework.beans.factory.annotation.Value
import org.springframework.stereotype.Component
import scala.beans.BeanProperty
/**
* Created by bob on 16/6/14.
*/
@Component
class HelloWorld {
@PostConstruct
def init() {
println("HelloWorld Bean is going through init.")
}
@PreDestroy
def destory() {
println("HelloWorld Bean is going through destroy.")
}
@BeanProperty
@Value("${helloworld.message}")
var message: String = _
/**
* Spring Bean definition inheritance has nothing to do with Java class inheritance but inheritance concept is same.
* You can define a parent bean definition as a template and other child beans can inherit required configuration from the parent bean.
*/
}
|
sevenz/springboot-scala-withswagger
|
webapi/src/main/scala/com/bob/scala/webapi/sptutorial/HelloWorld.scala
|
Scala
|
apache-2.0
| 836
|
package de.tototec.sbuild.eclipse.plugin.container
import org.eclipse.core.runtime.IProgressMonitor
import org.eclipse.core.runtime.Status
import org.eclipse.jdt.core.JavaModelException
import de.tototec.sbuild.eclipse.plugin.Logger.error
import de.tototec.sbuild.eclipse.plugin.SimpleJob
class RefreshContainerJob(container: SBuildClasspathContainer, isUser: Boolean)
extends SimpleJob("Refresh SBuild Libraries", isUser, None)((monitor: IProgressMonitor) => {
try {
monitor.subTask("Updating library pathes")
container.updateClasspath(monitor)
Status.OK_STATUS
} catch {
case e: JavaModelException =>
error(s"${container.projectName}: Exception in refresh job.", e)
e.getStatus()
}
}
)
|
SBuild-org/sbuild-eclipse-plugin
|
de.tototec.sbuild.eclipse.plugin/src/main/scala/de/tototec/sbuild/eclipse/plugin/container/RefreshContainerJob.scala
|
Scala
|
apache-2.0
| 749
|
/*******************************************************************************
* Copyright (c) 2019. Carl Minden
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package com.anathema_roguelike
package main.display
import com.anathema_roguelike.main.Game
import com.anathema_roguelike.main.utilities.position.Direction
import com.anathema_roguelike.main.utilities.position.Point
import squidpony.squidgrid.gui.gdx.SColor
import java.util
abstract class Outline(color: SColor, private var offset: Point = Point(0, 0)) extends Renderable {
def getPoints: Iterable[Point]
def validPoint(point: Point): Boolean
override def render(): Unit = {
for (p <- getPoints) {
if(validPoint(p)) {
if(!validPoint(Direction.offset(p, Direction.UP))) top(p, color)
if(!validPoint(Direction.offset(p, Direction.DOWN))) bottom(p, color)
if(!validPoint(Direction.offset(p, Direction.LEFT))) left(p, color)
if(!validPoint(Direction.offset(p, Direction.RIGHT))) right(p, color)
}
}
}
private def top(point: Point, color: SColor) = {
val x = point.getX + offset.getX
val y = point.getY + offset.getY
Game.getInstance.getDisplay.drawLine(x * Display.cellWidth - 1, y * Display.cellHeight, (x + 1) * Display.cellWidth, y * Display.cellHeight, color)
}
private def bottom(point: Point, color: SColor) = {
val x = point.getX + offset.getX
val y = point.getY + offset.getY
Game.getInstance.getDisplay.drawLine(x * Display.cellWidth - 1, (y + 1) * Display.cellHeight - 1, (x + 1) * Display.cellWidth, (y + 1) * Display.cellHeight - 1, color)
}
private def left(point: Point, color: SColor) = {
val x = point.getX + offset.getX
val y = point.getY + offset.getY
Game.getInstance.getDisplay.drawLine(x * Display.cellWidth, y * Display.cellHeight, x * Display.cellWidth, (y + 1) * Display.cellHeight, color)
}
private def right(point: Point, color: SColor) = {
val x = point.getX + offset.getX
val y = point.getY + offset.getY
Game.getInstance.getDisplay.drawLine((x + 1) * Display.cellWidth, y * Display.cellHeight, (x + 1) * Display.cellWidth, (y + 1) * Display.cellHeight, color)
}
def setOffset(point: Point): Unit = this.offset = point
}
|
carlminden/anathema-roguelike
|
src/com/anathema_roguelike/main/display/Outline.scala
|
Scala
|
gpl-3.0
| 2,928
|
/*
* Copyright 2014 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s.server
import java.security.cert.X509Certificate
final case class SecureSession(
sslSessionId: String,
cipherSuite: String,
keySize: Int,
X509Certificate: List[X509Certificate],
)
object SecureSession {
def apply(
sslSessionId: String,
cipherSuite: String,
keySize: Int,
X509Certificate: Array[X509Certificate],
): SecureSession =
SecureSession(sslSessionId, cipherSuite, keySize, X509Certificate.toList)
}
|
rossabaker/http4s
|
server/jvm/src/main/scala/org/http4s/server/SecureSession.scala
|
Scala
|
apache-2.0
| 1,072
|
/*
* Copyright 2012 Eike Kettner
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eknet.publet.gitr
import javax.servlet.http.HttpServletRequest
import org.eknet.publet.web.{Config, RequestAttr, RequestUrl}
import org.eknet.publet.gitr.auth.{RepositoryTag, GitPermissionBuilder, RepositoryModel, GitAction}
import org.eknet.publet.web.shiro.Security
/**
* @author Eike Kettner eike.kettner@gmail.com
* @since 06.11.12 20:07
*/
class GitRequestUtils(val req: HttpServletRequest) extends RepositoryNameResolver with RequestUrl with RequestAttr {
}
object GitRequestUtils extends GitPermissionBuilder {
implicit def toGitRequestUtils(req: HttpServletRequest) = new GitRequestUtils(req)
def gitMount(config: Config) = config("publet.gitMount").getOrElse("git")
def checkGitAction(action: GitAction.Action, model: RepositoryModel) {
if (model.tag != RepositoryTag.open || action != GitAction.pull) {
Security.checkPerm(git action(action) on model.name)
}
}
def hasGitAction(action: GitAction.Action, model: RepositoryModel): Boolean = {
if (model.tag == RepositoryTag.open && action == GitAction.pull) {
true
} else {
Security.hasPerm(git action(action) on model.name)
}
}
}
|
eikek/publet
|
gitr-web/src/main/scala/org/eknet/publet/gitr/GitRequestUtils.scala
|
Scala
|
apache-2.0
| 1,758
|
package com.clemble.query.parser
import com.clemble.query.model.{Empty, Expression, Query}
/**
* Created by mavarazy on 9/10/16.
*/
case class QueryParser(
expressionParser: List[PartialFunction[(String, Seq[String]), Expression]],
includeProjection: IncludeProjectionParser,
excludeProjection: ExcludeProjectionParser,
paginationParser: PaginationParamsParser,
sortParser: SortOrderParser
) {
private val ignore = List(
sortParser.sortParam,
paginationParser.fromParam,
paginationParser.sizeParam,
includeProjection.includeParam,
excludeProjection.excludeParam
)
private val expression = expressionParser.
foldRight(PartialFunction.empty[(String, Seq[String]), Expression])
{ (a, b) => a orElse b }
def parse(query: Map[String, Seq[String]]): Query = {
val expressionQuery = query.filterKeys(!ignore.contains(_))
val where = expressionQuery.foldLeft[Expression](Empty)((a, b) => a and expression(b))
Query(
where,
pagination = paginationParser.toPage(query),
projection = (query.collect(includeProjection).flatten ++ query.collect(excludeProjection).flatten).toList,
sort = query.collect(sortParser).flatten.toList
)
}
}
object QueryParser {
def DEFAULT = QueryParser(
List(
new GreaterThenExpressionParser(),
new GreaterThenEqualsExpressionParser(),
new LessThenEqualsExpressionParser(),
new LessThenExpressionParser(),
new NotEqualsExpressionParser(),
EqualsExpressionParser
),
new IncludeProjectionParser(),
new ExcludeProjectionParser(),
new PaginationParamsParser(),
new SortOrderParser()
)
}
|
clemble/scala-query-dsl
|
src/main/scala/com/clemble/query/parser/QueryParser.scala
|
Scala
|
apache-2.0
| 1,795
|
package poker.core.handanalyzer
import poker.core.{Hand, HandStatus, HandType, Kickers}
final class FullHouseAnalyzer extends HandAnalyzer {
/** Detect any three cards of the same rank
* together with any two cards of the same rank.
*
* Record the fifth card as the kicker.
*/
override def doAnalyze(hand: Hand): HandStatus = {
if (hand.existsNCardsBySameRank(3) && hand.existsNCardsBySameRank(2)) {
val kickerOpt = hand.findNCardsBySameRank(2)
assert(kickerOpt.nonEmpty, s"A ${HandType.FullHouse} must have two cards of another rank")
HandStatus(HandType.FullHouse, Kickers(kickerOpt.get))
} else {
HandStatus.none
}
}
}
|
kyuksel/poker
|
src/main/scala/poker/core/handanalyzer/FullHouseAnalyzer.scala
|
Scala
|
mit
| 684
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.magic
import org.apache.toree.plugins.dependencies.Dependency
import org.apache.toree.plugins._
import org.mockito.Mockito._
import org.mockito.Matchers.{eq => mockEq, _}
import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.{FunSpec, Matchers, OneInstancePerTest}
import test.utils
import MagicManagerSpec._
import scala.runtime.BoxedUnit
object MagicManagerSpec {
val TestCellMagicOutput = CellMagicOutput("test" -> "value")
}
class SomeLineMagic extends LineMagic {
override def execute(code: String): Unit = {}
}
class SomeCellMagic extends CellMagic {
override def execute(code: String): CellMagicOutput = TestCellMagicOutput
}
private class SomePlugin extends Plugin
private class SomeMagic extends Magic {
override def execute(code: String): Any = ???
}
class LineMagicException extends Exception
private class ExceptionLineMagic extends LineMagic {
override def execute(code: String): Unit = throw new LineMagicException
}
class CellMagicException extends Exception
private class ExceptionCellMagic extends CellMagic {
override def execute(code: String): CellMagicOutput = throw new CellMagicException
}
class MagicManagerSpec
extends FunSpec with Matchers with MockitoSugar with OneInstancePerTest
{
private val TestPluginName = "SomePlugin"
private val TestMagicName = "SomeMagic"
private val mockPluginManager = mock[PluginManager]
private val magicManager = spy(new MagicManager(mockPluginManager))
describe("MagicManager") {
describe("#isLineMagic") {
it("should return true if the magic extends the line magic interface") {
val expected = true
val mockLineMagic = mock[LineMagic]
val actual = magicManager.isLineMagic(mockLineMagic)
actual should be (expected)
}
it("should return false if the magic does not extend the line magic interface") {
val expected = false
val mockMagic = mock[Magic]
val actual = magicManager.isLineMagic(mockMagic)
actual should be (expected)
}
it("should throw an exception if provided null") {
intercept[NullPointerException] {
magicManager.isLineMagic(null)
}
}
}
describe("#isCellMagic") {
it("should return true if the magic extends the cell magic interface") {
val expected = true
val mockCellMagic = mock[CellMagic]
val actual = magicManager.isCellMagic(mockCellMagic)
actual should be (expected)
}
it("should return false if the magic does not extend the cell magic interface") {
val expected = false
val mockMagic = mock[Magic]
val actual = magicManager.isCellMagic(mockMagic)
actual should be (expected)
}
it("should throw an exception if provided null") {
intercept[NullPointerException] {
magicManager.isCellMagic(null)
}
}
}
describe("#findMagic") {
it("should throw a MagicNotFoundException if no magic matches the name") {
intercept[MagicNotFoundException] {
doReturn(Seq(new Plugin {}).toIterable).when(mockPluginManager).plugins
magicManager.findMagic(TestMagicName)
}
}
it("should throw a MagicNotFoundException if there are no loaded plugins") {
intercept[MagicNotFoundException] {
doReturn(Nil).when(mockPluginManager).plugins
magicManager.findMagic(TestMagicName)
}
}
it("should throw a MagicNotFoundException if a plugin matches but is not a magic") {
intercept[MagicNotFoundException] {
doReturn(Seq(new SomePlugin).toIterable).when(mockPluginManager).plugins
magicManager.findMagic(TestPluginName)
}
}
it("should return the magic if exactly one is found") {
val expected = new SomeMagic
doReturn(Seq(expected).toIterable).when(mockPluginManager).plugins
val actual = magicManager.findMagic(TestMagicName)
actual should be (expected)
}
it("should return a magic whose name matches even if casing is different") {
val expected = new SomeMagic
doReturn(Seq(expected).toIterable).when(mockPluginManager).plugins
val actual = magicManager.findMagic(TestMagicName.toUpperCase())
actual should be (expected)
}
it("should return the first match if more than one magic matches the name") {
val expected = new SomeMagic
doReturn(Seq(expected, new utils.SomeMagic).toIterable)
.when(mockPluginManager).plugins
val actual = magicManager.findMagic(TestMagicName)
actual should be (expected)
}
}
describe("#applyDynamic") {
it("should return CellMagicOutput if the invocation of a magic throws an exception") {
doReturn(Some(FailurePluginMethodResult(
mock[PluginMethod],
new LineMagicException()
))).when(mockPluginManager).fireEventFirstResult(
anyString(), any(classOf[Dependency[_ <: AnyRef]])
)
val result = magicManager.applyDynamic("TEST")()
result.asMap.get("text/plain") should not be(empty)
}
it("should fire an event with the lowercase of the magic name") {
val arg: java.lang.String = "some arg"
val pluginName = "TEST"
val expected = Dependency.fromValueWithName("input", arg)
doReturn(Some(FailurePluginMethodResult(
mock[PluginMethod],
new LineMagicException()
))).when(mockPluginManager).fireEventFirstResult(
anyString(), any(classOf[Dependency[_ <: AnyRef]])
)
magicManager.applyDynamic(pluginName)(arg :: Nil: _*)
verify(mockPluginManager).fireEventFirstResult(mockEq(pluginName.toLowerCase), any())
}
it("should take the first argument and convert it to a string to pass to the magic") {
val arg: java.lang.String = "some arg"
val pluginName = "TEST"
val expected = Dependency.fromValueWithName("input", arg)
doReturn(Some(FailurePluginMethodResult(
mock[PluginMethod],
new LineMagicException()
))).when(mockPluginManager).fireEventFirstResult(
anyString(), any(classOf[Dependency[_ <: AnyRef]])
)
magicManager.applyDynamic(pluginName)(arg :: Nil: _*)
verify(mockPluginManager).fireEventFirstResult(anyString(), mockEq(Seq(expected)): _*)
}
it("should pass an empty string to the line magic if no arguments are provided") {
val arg: java.lang.String = ""
val pluginName = "TEST"
val expected = Dependency.fromValueWithName("input", arg)
doReturn(Some(FailurePluginMethodResult(
mock[PluginMethod],
new LineMagicException()
))).when(mockPluginManager).fireEventFirstResult(
anyString(), any(classOf[Dependency[_ <: AnyRef]])
)
magicManager.applyDynamic(pluginName)(Nil: _*)
verify(mockPluginManager).fireEventFirstResult(anyString(), mockEq(Seq(expected)): _*)
}
it("should return a Right[LineMagicOutput] if line magic execution is successful and returns null") {
val pluginName = "TEST"
val expected = LineMagicOutput
doReturn(Some(SuccessPluginMethodResult(
mock[PluginMethod],
null
))).when(mockPluginManager).fireEventFirstResult(
anyString(), any(classOf[Dependency[_ <: AnyRef]])
)
val result = magicManager.applyDynamic(pluginName)(Nil: _*)
result should be(expected)
}
it("should return a Right[LineMagicOutput] if line magic execution is successful and returns BoxedUnit") {
val pluginName = "TEST"
val expected = LineMagicOutput
doReturn(Some(SuccessPluginMethodResult(
mock[PluginMethod],
BoxedUnit.UNIT
))).when(mockPluginManager).fireEventFirstResult(
anyString(), any(classOf[Dependency[_ <: AnyRef]])
)
val result = magicManager.applyDynamic(pluginName)(Nil: _*)
result should be(expected)
}
it("should return a Left[CellMagicOutput] if cell magic execution is successful") {
val pluginName = "TEST"
val cellMagicOutput = CellMagicOutput("our/type" -> "TEST CONTENT")
doReturn(Some(SuccessPluginMethodResult(
mock[PluginMethod],
cellMagicOutput
))).when(mockPluginManager).fireEventFirstResult(
anyString(), any(classOf[Dependency[_ <: AnyRef]])
)
val result = magicManager.applyDynamic(pluginName)(Nil: _*)
result should be(cellMagicOutput)
}
it("should return a Left[CellMagicOutput] if is a magic but not a line or cell") {
val pluginName = "TEST"
doReturn(Some(SuccessPluginMethodResult(
mock[PluginMethod],
new AnyRef
))).when(mockPluginManager).fireEventFirstResult(
anyString(), any(classOf[Dependency[_ <: AnyRef]])
)
val result = magicManager.applyDynamic(pluginName)(Nil: _*)
result.asMap.get("text/plain") should not be (empty)
}
it("should return a Left[CellMagicOutput] if magic fails") {
val pluginName = "TEST"
doReturn(Some(FailurePluginMethodResult(
mock[PluginMethod],
new Throwable
))).when(mockPluginManager).fireEventFirstResult(
anyString(), any(classOf[Dependency[_ <: AnyRef]])
)
val result = magicManager.applyDynamic(pluginName)(Nil: _*)
result.asMap.get("text/plain") should not be (empty)
}
it("should throw a MagicNotFoundException when a magic cannot be found") {
val pluginName = "THISMAGICDOESN'TEXIST"
doReturn(None).when(mockPluginManager).fireEventFirstResult(
anyString(), any(classOf[Dependency[_ <: AnyRef]])
)
intercept[MagicNotFoundException] {
magicManager.applyDynamic(pluginName)(Nil: _*)
}
}
}
}
}
|
lresende/incubator-toree
|
kernel-api/src/test/scala/org/apache/toree/magic/MagicManagerSpec.scala
|
Scala
|
apache-2.0
| 10,894
|
/*
* Copyright 2017-2018 47 Degrees, LLC. <http://www.47deg.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package examples.todolist.persistence.runtime.queries
import doobie.implicits.toSqlInterpolator
import doobie.util.query.Query0
import examples.todolist.{Tag, TodoItem, TodoList}
object AppQueries {
val listQuery: Query0[(TodoList, Tag, Option[TodoItem])] =
sql"""
SELECT lists.title, lists.tag_id, lists.id, tags.name, tags.id, items.item, items.todo_list_id, items.completed, items.id
FROM todo_lists AS lists
INNER JOIN tags ON lists.tag_id = tags.id
LEFT JOIN todo_items AS items
ON lists.id = items.todo_list_id
"""
.query[(TodoList, Tag, Option[TodoItem])]
}
|
frees-io/freestyle
|
modules/examples/todolist-lib/src/main/scala/todo/persistence/runtime/queries/AppQueries.scala
|
Scala
|
apache-2.0
| 1,265
|
package fr.inria.spirals.sigma.ttc14.fixml.xmlmm.impl
import fr.inria.spirals.sigma.ttc14.fixml.xmlmm.support.XMLMM
import fr.inria.spirals.sigma.ttc14.fixml.xmlmm.XMLAttribute
class XMLAttributeImplDelegate extends XMLAttributeImpl with XMLAttributeDelegate
trait XMLAttributeDelegate extends XMLAttribute with XMLMM {
override def toString = this.name + "=" + this.value
}
|
fikovnik/ttc14-fixml-sigma
|
ttc14-fixml-extension-2/src/fr/inria/spirals/sigma/ttc14/fixml/xmlmm/impl/XMLAttributeImplDelegate.scala
|
Scala
|
epl-1.0
| 383
|
package com.temportalist.morphadditions.server
import com.temportalist.morphadditions.common.ProxyCommon
import net.minecraft.entity.player.EntityPlayer
import net.minecraft.tileentity.TileEntity
import net.minecraft.world.World
/**
*
*
* @author TheTemportalist
*/
class ProxyServer extends ProxyCommon() {
override def getServerElement(ID: Int, player: EntityPlayer, world: World, x: Int, y: Int,
z: Int, tileEntity: TileEntity): AnyRef = null
}
|
TheTemportalist/MorphAdditions
|
src/main/scala/com/temportalist/morphadditions/server/ProxyServer.scala
|
Scala
|
apache-2.0
| 460
|
package eventstore
import akka.testkit.TestProbe
import ExpectedVersion._
class WriteEventsITest extends TestConnection {
"append to stream" should {
"not fail for zero events" in new WriteEventsScope {
writeEventsCompleted(Nil, NoStream)
}
"create stream with NoStream exp ver on first write if does not exist" in new WriteEventsScope {
writeEvent(newEventData, NoStream) must beSome(EventNumber.Range(EventNumber.First))
streamEvents must haveSize(1)
}
"create stream with ANY exp ver on first write if does not exist" in new WriteEventsScope {
writeEvent(newEventData, Any) must beSome(EventNumber.Range(EventNumber.First))
streamEvents must haveSize(1)
}
"fail create stream with wrong exp ver if does not exist" in new WriteEventsScope {
writeEventsFailed(newEventData, ExpectedVersion.First) must throwA[WrongExpectedVersionException]
writeEventsFailed(newEventData, ExpectedVersion(1)) must throwA[WrongExpectedVersionException]
}
"fail writing with correct exp ver to deleted stream" in new WriteEventsScope {
appendEventToCreateStream()
deleteStream()
writeEventsFailed(newEventData, ExpectedVersion.First) must throwA[StreamDeletedException]
}
"fail writing with any exp ver to deleted stream" in new WriteEventsScope {
appendEventToCreateStream()
deleteStream()
writeEventsFailed(newEventData, Any) must throwA[StreamDeletedException]
}
"fail writing with invalid exp ver to deleted stream" in new WriteEventsScope {
appendEventToCreateStream()
deleteStream()
writeEventsFailed(newEventData, ExpectedVersion(1)) must throwA[StreamDeletedException]
}
"succeed writing with correct exp ver to existing stream" in new WriteEventsScope {
appendEventToCreateStream()
writeEvent(newEventData, ExpectedVersion.First) must beSome(EventNumber.Range(EventNumber.Exact(1)))
writeEvent(newEventData, ExpectedVersion(1)) must beSome(EventNumber.Range(EventNumber.Exact(2)))
}
"succeed writing with any exp ver to existing stream" in new WriteEventsScope {
appendEventToCreateStream()
writeEvent(newEventData, Any) must beSome(EventNumber.Range(EventNumber.Exact(1)))
writeEvent(newEventData, Any) must beSome(EventNumber.Range(EventNumber.Exact(2)))
}
"fail writing with wrong exp ver to existing stream" in new WriteEventsScope {
appendEventToCreateStream()
writeEventsFailed(newEventData, NoStream) must throwA[WrongExpectedVersionException]
writeEventsFailed(newEventData, ExpectedVersion(1)) must throwA[WrongExpectedVersionException]
}
"be able to append multiple events at once" in new WriteEventsScope {
val events = appendMany()
streamEvents mustEqual events
}
"be able to append many events at once" in new WriteEventsScope {
val size = 100
appendMany(size = size)
actor ! ReadStreamEvents(streamId, EventNumber.Last, 1, ReadDirection.Backward)
expectMsgType[ReadStreamEventsCompleted].events.head.number mustEqual EventNumber(size - 1)
deleteStream()
}
"be able to append many events at once concurrently" in new WriteEventsScope {
val n = 10
val size = 10
Seq.fill(n)(TestProbe()).foreach(x => appendMany(size = size, testKit = x))
actor ! ReadStreamEvents(streamId, EventNumber.Last, 1, ReadDirection.Backward)
expectMsgType[ReadStreamEventsCompleted].events.head.number mustEqual EventNumber(size * n - 1)
deleteStream()
}
}
private trait WriteEventsScope extends TestConnectionScope {
def writeEvent(event: EventData, expVer: ExpectedVersion = Any) = writeEventsCompleted(List(event), expVer)
def writeEventsFailed(event: EventData, expVer: ExpectedVersion = Any) = {
actor ! WriteEvents(streamId, List(event), expVer)
expectEsException()
}
}
}
|
pawelkaczor/EventStore.JVM
|
src/test/scala/eventstore/WriteEventsITest.scala
|
Scala
|
bsd-3-clause
| 3,940
|
package net.rfc1149.inf355
import scala.language.higherKinds
trait OptionSig {
type Option[+_]
type Some[+A] <: Option[A]
type None <: Option[Nothing]
}
trait Ops[Sig <: OptionSig] {
def some[A](x: A): Sig#Some[A]
def none: Sig#None
def fold[A, B](opt: Sig#Option[A])(ifNone: => B, ifSome: A => B): B
}
trait ScalaOption extends OptionSig {
type Option[+A] = scala.Option[A]
type Some[+A] = scala.Some[A]
type None = scala.None.type
}
object ScalaOption {
implicit object Ops extends Ops[ScalaOption] {
def some[A](x: A): ScalaOption#Some[A] = scala.Some(x)
def none: ScalaOption#None = scala.None
def fold[A, B](opt: ScalaOption#Option[A])(ifNone: => B, ifSome: A => B): B = opt match {
case None => ifNone
case Some(x) => ifSome(x)
}
}
}
import java.util.Optional
import java.util.function.{ Function => F, Supplier }
trait Java8Option extends OptionSig {
type Option[+A] = Optional[_ <: A]
type Some[+A] = Optional[_ <: A]
type None = Optional[Nothing]
}
object Java8Option {
implicit object Ops extends Ops[Java8Option] {
def some[A](x: A): Java8Option#Some[A] = Optional.of(x)
def none: Java8Option#None = Optional.empty()
def fold[A, B](opt: Java8Option#Option[A])(ifNone: => B, ifSome: A => B): B = {
def f = new F[A, B] { def apply(a: A): B = ifSome(a) }
def supplier = new Supplier[B] { def get(): B = ifNone }
opt.map[B](f).orElseGet(supplier)
}
}
}
trait AnyOption extends OptionSig {
type Option[+A] = Any
type Some[+A] = Any
type None = Null
}
object AnyOption {
implicit object Ops extends Ops[AnyOption] {
def some[A](x: A): AnyOption#Some[A] = x
def none: AnyOption#None = null
def fold[A, B](opt: AnyOption#Option[A])(ifNone: => B, ifSome: A => B): B = {
if (opt == null)
ifNone
else
ifSome(opt.asInstanceOf[A])
}
}
}
class Show[Sig <: OptionSig](implicit ops: Ops[Sig]) {
def show[A](opt: Sig#Option[A]): String =
ops.fold(opt)("None", i => s"Some($i)")
}
object Show {
implicit def showInstance[Sig <: OptionSig](implicit ops: Ops[Sig]): Show[Sig] = new Show
}
class MyApp[Sig <: OptionSig](implicit ops: Ops[Sig]) extends App {
val show: Show[Sig] = implicitly[Show[Sig]]
val opt = ops.some(42)
println(show.show(opt))
}
object ScalaOptionApp extends MyApp[ScalaOption]
object Java8OptionApp extends MyApp[Java8Option]
object AnyOptionApp extends MyApp[AnyOption]
|
betehess/inf355
|
src/main/scala/inf355.scala
|
Scala
|
apache-2.0
| 2,464
|
package component
import core._
import akka.actor.{Actor, Props}
import java.util.UUID
import org.joda.time.DateTime
case class GetEntity[T](ids: String*)
case class ListWithOffset(t: Any, params: Seq[Any], offset: Int, limit: Int)
case class EntityList[T](slice: Iterable[T])
case class AddEntity[T](blog: T, ids: String*)
case class DeleteEntity(ids: String*)
class Model(val mode: Option[String]) extends Actor {
// Dummy data for illustration purposes, in ascending order by date
val tableBlog = (for {
x <- 1 to 100
} yield Blog(UUID.randomUUID.toString, "jim", new DateTime().minusDays(x),
s"Title ${x}", s"Description ${x}. Mode: ${mode}")).reverse
def receive: Receive = process(tableBlog, Map())
def process(tableBlog: IndexedSeq[Blog], tableComment: Map[String, IndexedSeq[Comment]]): Receive = {
case GetEntity(uuid) =>
sender ! tableBlog.find(_.id == uuid.toString)
case ListWithOffset(Blog, _, offset, limit) =>
sender ! EntityList(tableBlog.drop(offset).take(limit))
case AddEntity(blog: Blog, _*) =>
context.become(process(blog +: tableBlog, tableComment))
sender ! blog
case DeleteEntity(id) =>
val entity = tableBlog.find(_.id == id)
context.become(process(tableBlog.filterNot(_.id == id), tableComment))
sender ! entity
case GetEntity(blogId, commentId) =>
sender ! tableComment(blogId).find(_.id == commentId.toString)
case ListWithOffset(Comment, Seq(blogId: String), offset, limit) =>
if (tableComment contains blogId) {
sender ! EntityList(tableComment(blogId).drop(offset).take(limit))
} else {
sender ! EntityList(IndexedSeq.empty[Comment])
}
case AddEntity(comment: Comment, ids @ _*) =>
val blogId = ids.head
if (tableComment contains blogId) {
val comments = tableComment(blogId)
context.become(process(tableBlog, tableComment + (blogId -> (comment +: comments))))
} else {
context.become(process(tableBlog, tableComment + (blogId -> IndexedSeq(comment))))
}
sender ! comment
case DeleteEntity(blogId, commentId) =>
val entity = tableComment(blogId).find(_.id == commentId)
val comments = tableComment(blogId).filterNot(_.id == commentId)
context.become(process(tableBlog, tableComment + (blogId -> comments)))
sender ! entity
}
}
object Model {
def props(mode: Option[String]) = Props(new Model(mode))
def name = "model"
}
// vim: set ts=4 sw=4 et:
|
enpassant/james
|
src/main/scala/component/Model.scala
|
Scala
|
apache-2.0
| 2,717
|
package controllers
import com.mohiva.play.silhouette.api.exceptions.ProviderException
import com.mohiva.play.silhouette.api.util.Credentials
import com.mohiva.play.silhouette.impl.exceptions.IdentityNotFoundException
import com.mohiva.play.silhouette.impl.providers._
import forms.{ SignInForm, TotpForm }
import javax.inject.Inject
import play.api.i18n.Messages
import play.api.mvc.{ AnyContent, Request }
import utils.route.Calls
import scala.concurrent.{ ExecutionContext, Future }
/**
* The `Sign In` controller.
*/
class SignInController @Inject() (
scc: SilhouetteControllerComponents,
signIn: views.html.signIn,
activateAccount: views.html.activateAccount,
totp: views.html.totp
)(implicit ex: ExecutionContext) extends AbstractAuthController(scc) {
/**
* Views the `Sign In` page.
*
* @return The result to display.
*/
def view = UnsecuredAction.async { implicit request: Request[AnyContent] =>
Future.successful(Ok(signIn(SignInForm.form, socialProviderRegistry)))
}
/**
* Handles the submitted form.
*
* @return The result to display.
*/
def submit = UnsecuredAction.async { implicit request: Request[AnyContent] =>
SignInForm.form.bindFromRequest.fold(
form => Future.successful(BadRequest(signIn(form, socialProviderRegistry))),
data => {
val credentials = Credentials(data.email, data.password)
credentialsProvider.authenticate(credentials).flatMap { loginInfo =>
userService.retrieve(loginInfo).flatMap {
case Some(user) if !user.activated =>
Future.successful(Ok(activateAccount(data.email)))
case Some(user) =>
authInfoRepository.find[GoogleTotpInfo](user.loginInfo).flatMap {
case Some(totpInfo) => Future.successful(Ok(totp(TotpForm.form.fill(TotpForm.Data(
user.userID, totpInfo.sharedKey, data.rememberMe)))))
case _ => authenticateUser(user, data.rememberMe)
}
case None => Future.failed(new IdentityNotFoundException("Couldn't find user"))
}
}.recover {
case _: ProviderException =>
Redirect(Calls.signin).flashing("error" -> Messages("invalid.credentials"))
}
}
)
}
}
|
mohiva/play-silhouette-seed
|
app/controllers/SignInController.scala
|
Scala
|
apache-2.0
| 2,275
|
package info.lindblad.prometheus.cloudwatch.proxy.util
import org.slf4j.LoggerFactory
trait Logging {
val logger = LoggerFactory.getLogger(this.getClass)
}
|
nlindblad/prometheus-cloudwatch-proxy
|
src/main/scala/info/lindblad/prometheus/cloudwatch/proxy/util/Logging.scala
|
Scala
|
apache-2.0
| 159
|
package scala.collection
package immutable
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
import org.junit.Test
import org.junit.Assert._
import scala.annotation.unused
import scala.collection.mutable.{Builder, ListBuffer}
import scala.tools.testkit.AssertUtil
import scala.util.Try
@RunWith(classOf[JUnit4])
class LazyListTest {
@Test
def t6727_and_t6440_and_8627(): Unit = {
assertTrue(LazyList.continually(()).filter(_ => true).take(2) == Seq((), ()))
assertTrue(LazyList.continually(()).filterNot(_ => false).take(2) == Seq((), ()))
assertTrue(LazyList(1,2,3,4,5).filter(_ < 4) == Seq(1,2,3))
assertTrue(LazyList(1,2,3,4,5).filterNot(_ > 4) == Seq(1,2,3,4))
assertTrue(LazyList.from(1).filter(_ > 4).take(3) == Seq(5,6,7))
assertTrue(LazyList.from(1).filterNot(_ <= 4).take(3) == Seq(5,6,7))
}
@Test // scala/bug#8990
def withFilter_can_retry_after_exception_thrown_in_filter(): Unit = {
// use mutable state to control an intermittent failure in filtering the LazyList
var shouldThrow = true
val wf = LazyList.from(1).take(10).withFilter { n =>
if (shouldThrow && n == 5) throw new RuntimeException("n == 5") else n > 5
}
assertEquals(true, Try { wf.map(identity).length }.isFailure) // throws on n == 5
shouldThrow = false // won't throw next time
assertEquals(5, wf.map(identity).length) // success instead of NPE
}
@Test // scala/bug#6881
def test_reference_equality(): Unit = {
// Make sure we're tested with reference equality
val s = LazyList.from(0)
assert(s == s, "Referentially identical LazyLists should be equal (==)")
assert(s equals s, "Referentially identical LazyLists should be equal (equals)")
assert((0 #:: 1 #:: s) == (0 #:: 1 #:: s), "Cons of referentially identical LazyLists should be equal (==)")
assert((0 #:: 1 #:: s) equals (0 #:: 1 #:: s), "Cons of referentially identical LazyLists should be equal (equals)")
}
@Test
def t9886(): Unit = {
assertEquals(LazyList(None, Some(1)), None #:: LazyList(Some(1)))
assertEquals(LazyList(None, Some(1)), LazyList(None) #::: LazyList(Some(1)))
}
@Test
def testLazyListDoesNotForceHead(): Unit = {
var i = 0
def f: Int = { i += 1; i }
@unused val s = LazyList.empty.#::(f).#::(f).#::(f)
assertEquals(0, i)
}
@Test
def testEmptyLazyListToString(): Unit = {
assertEquals("LazyList()", LazyList.empty.force.toString)
}
@Test
def testLazyListToStringWhenHeadAndTailBothAreNotEvaluated(): Unit = {
val l = LazyList(1, 2, 3, 4, 5)
assertEquals("LazyList(<not computed>)", l.toString)
}
@Test
def testLazyListToStringWhenOnlyHeadIsEvaluated(): Unit = {
val l = LazyList(1, 2, 3, 4, 5)
l.head
assertEquals("LazyList(1, <not computed>)", l.toString)
}
@Test
def testLazyListToStringWhenHeadAndTailIsEvaluated(): Unit = {
val l = LazyList(1, 2, 3, 4, 5)
l.head
l.tail
assertEquals("LazyList(1, <not computed>)", l.toString)
}
@Test
def testLazyListToStringWhenHeadAndTailHeadIsEvaluated(): Unit = {
val l = LazyList(1, 2, 3, 4, 5)
l.head
l.tail.head
assertEquals("LazyList(1, 2, <not computed>)", l.toString)
}
@Test
def testLazyListToStringWhenHeadIsNotEvaluatedAndOnlyTailIsEvaluated(): Unit = {
val l = LazyList(1, 2, 3, 4, 5)
l.tail
assertEquals("LazyList(1, <not computed>)", l.toString)
}
@Test
def testLazyListToStringWhenHeadIsNotEvaluatedAndTailHeadIsEvaluated(): Unit = {
val l = LazyList(1, 2, 3, 4, 5)
l.tail.head
assertEquals("LazyList(1, 2, <not computed>)", l.toString)
}
@Test
def testLazyListToStringWhenHeadIsNotEvaluatedAndTailTailIsEvaluated(): Unit = {
val l = LazyList(1, 2, 3, 4, 5)
l.tail.tail
assertEquals("LazyList(1, 2, <not computed>)", l.toString)
}
@Test
def testLazyListToStringWhendHeadIsNotEvaluatedAndTailTailHeadIsEvaluated(): Unit = {
val l = LazyList(1, 2, 3, 4, 5)
l.tail.tail.head
assertEquals("LazyList(1, 2, 3, <not computed>)", l.toString)
}
@Test
def testLazyListToStringWhenLazyListIsForcedToList(): Unit = {
val l = 1 #:: 2 #:: 3 #:: 4 #:: LazyList.empty
l.toList
assertEquals("LazyList(1, 2, 3, 4)", l.toString)
}
@Test
def testLazyListToStringWhenLazyListIsEmpty(): Unit = {
// cached empty
val l1 = LazyList.empty
assertEquals("LazyList()", l1.toString)
// non-cached empty
val l2 = LazyList.unfold(0)(_ => None)
assertEquals("LazyList(<not computed>)", l2.toString)
}
@Test
def testLazyListToStringForSingleElementList(): Unit = {
val l = LazyList(1)
l.force
assertEquals("LazyList(1)", l.toString)
}
@Test
def testLazyListToStringWhenLazyListHasCyclicReference(): Unit = {
lazy val cyc: LazyList[Int] = 1 #:: 2 #:: 3 #:: 4 #:: cyc
assertEquals("LazyList(<not computed>)", cyc.toString)
cyc.head
assertEquals("LazyList(1, <not computed>)", cyc.toString)
cyc.tail
assertEquals("LazyList(1, <not computed>)", cyc.toString)
cyc.tail.head
assertEquals("LazyList(1, 2, <not computed>)", cyc.toString)
cyc.tail.tail.head
assertEquals("LazyList(1, 2, 3, <not computed>)", cyc.toString)
cyc.tail.tail.tail.head
assertEquals("LazyList(1, 2, 3, 4, <not computed>)", cyc.toString)
cyc.tail.tail.tail.tail.head
assertEquals("LazyList(1, 2, 3, 4, <cycle>)", cyc.toString)
}
@Test
def hasCorrectDrop(): Unit = {
assertEquals(LazyList(), LazyList().drop(2))
assertEquals(LazyList(), LazyList(1).drop(2))
assertEquals(LazyList(), LazyList(1, 2).drop(2))
assertEquals(LazyList(3), LazyList(1, 2, 3).drop(2))
assertEquals(LazyList(3, 4), LazyList(1, 2, 3, 4).drop(2))
}
@Test
def testForceReturnsEvaluatedLazyList() : Unit = {
var i = 0
def f: Int = { i += 1; i }
val xs = LazyList.from(Iterator.fill(3)(f))
assertEquals(0, i)
xs.force
assertEquals(3, i)
// it's possible to implement `force` with incorrect string representation
// (to forget about `tlEvaluated` update)
assertEquals( "LazyList(1, 2, 3)", xs.toString())
}
val cycle1: LazyList[Int] = 1 #:: 2 #:: cycle1
val cycle2: LazyList[Int] = 1 #:: 2 #:: 3 #:: cycle2
@Test(timeout=10000)
def testSameElements(): Unit = {
assert(LazyList().sameElements(LazyList()))
assert(!LazyList().sameElements(LazyList(1)))
assert(LazyList(1,2).sameElements(LazyList(1,2)))
assert(!LazyList(1,2).sameElements(LazyList(1)))
assert(!LazyList(1).sameElements(LazyList(1,2)))
assert(!LazyList(1).sameElements(LazyList(2)))
assert(cycle1.sameElements(cycle1))
assert(!cycle1.sameElements(cycle2))
assert(!cycle1.sameElements(cycle2))
}
@Test
def toStringIsStackSafe(): Unit = {
val l = LazyList.from(Range.inclusive(1, 10000))
l.foreach(_ => ())
@unused val s = l.toString // No exception thrown
}
@Test
def laziness(): Unit = {
lazy val fibs: LazyList[Int] = 0 #:: 1 #:: fibs.zip(fibs.tail).map { n => n._1 + n._2 }
assert(List(0, 1, 1, 2) == fibs.take(4).to(List))
var lazeCount = 0
def lazeL(i: Int) = { lazeCount += 1; i }
@unused val xs21 = lazeL(1) #:: lazeL(2) #:: lazeL(3) #:: LazyList.empty
assertEquals(0, lazeCount)
}
@Test // Strawman issue #529
def testLazyListMustComputeHeadOnlyOnce(): Unit = {
var seedCounter = 0
var fCounter = 0
def seed(): Int = {
seedCounter += 1
1
}
val f: Int => Int = { x =>
fCounter += 1
x + 1
}
val xs = LazyList.iterate(seed())(f)
assertEquals(0, seedCounter)
assertEquals(0, fCounter)
xs.head
assertEquals(1, seedCounter)
assertEquals(0, fCounter)
xs.tail
assertEquals(1, seedCounter)
assertEquals(0, fCounter)
xs.tail.head
assertEquals(1, seedCounter)
assertEquals(1, fCounter)
xs.tail.tail
assertEquals(1, seedCounter)
assertEquals(1, fCounter)
xs.tail.tail.head
assertEquals(1, seedCounter)
assertEquals(2, fCounter)
xs.take(10).toList
assertEquals(1, seedCounter)
assertEquals(9, fCounter)
}
@Test
def t8680(): Unit = {
def pre(n: Int) = (-n to -1).to(LazyList)
def cyc(m: Int) = {
lazy val s: LazyList[Int] = (0 until m).to(LazyList) #::: s
s
}
def precyc(n: Int, m: Int) = pre(n) #::: cyc(m)
def goal(n: Int, m: Int) = (-n until m).mkString + "<cycle>"
// Check un-forced cyclic and non-cyclic streams
assertEquals("LazyList(<not computed>)", pre(2).toString)
assertEquals("LazyList(<not computed>)", cyc(2).toString)
assertEquals("LazyList(<not computed>)", precyc(2,2).toString)
// Check forced cyclic and non-cyclic streams
assertEquals("LazyList(-2, -1)", pre(2).force.toString)
assertEquals("LazyList(0, 1, <cycle>)", cyc(2).force.toString)
assertEquals("LazyList(-2, -1, 0, 1, <cycle>)", precyc(2,2).force.toString)
// Special cases
assertEquals("LazyList(0, <cycle>)", cyc(1).force.toString)
assertEquals("LazyList(-1, 0, 1, 2, 3, 4, 5, <cycle>)", precyc(1,6).force.toString)
assertEquals("LazyList(-6, -5, -4, -3, -2, -1, 0, <cycle>)", precyc(6,1).force.toString)
// Make sure there are no odd/even problems
for (n <- 3 to 4; m <- 3 to 4) {
assertEquals(s"mkString $n $m", precyc(n,m).mkString, goal(n,m))
}
// Make sure there are no cycle/prefix modulus problems
for (i <- 6 to 8) {
assertEquals(s"mkString $i 3", goal(i,3), precyc(i,3).mkString)
assertEquals(s"mkString 3 $i", goal(3,i), precyc(3,i).mkString)
}
}
@Test
def updated(): Unit = {
val lazyList = LazyList from 0 take 4
val list = lazyList.toList
for (i <- lazyList.indices) {
assertEquals(list.updated(i, -1), lazyList.updated(i, -1))
}
AssertUtil.assertThrows[IndexOutOfBoundsException](lazyList.updated(-1, -1))
}
@Test
def tapEach(): Unit = {
/** @param makeLL must make a lazylist that evaluates to Seq(1,2,3,4,5) */
def check(makeLL: => LazyList[Int]): Unit = {
val lb = ListBuffer[Int]()
val ll = makeLL.tapEach(lb += _)
assertEquals(ListBuffer[Int](), lb)
assertEquals(Vector(1, 2), ll.take(2).to(Vector))
assertEquals(ListBuffer(1, 2), lb)
assertEquals(4, ll(3))
assertEquals(ListBuffer(1, 2, 3, 4), lb)
assertEquals(Vector(1, 2, 3, 4, 5), ll.to(Vector))
assertEquals(ListBuffer(1, 2, 3, 4, 5), lb)
}
check(LazyList.from(Iterator(1, 2, 3, 4, 5)))
check(LazyList.from(Vector(1, 2, 3, 4, 5)))
check(LazyList.tabulate(5)(_ + 1))
}
@Test
def builder(): Unit = {
def build(init: Builder[Int, LazyList[Int]] => Unit): LazyList[Int] = {
val b = LazyList.newBuilder[Int]
init(b)
b.result()
}
assertEquals(Nil, build(_ => ()))
assertEquals(Nil, build(_ ++= Nil))
assertEquals(Nil, build(_ ++= LazyList.empty))
assertEquals(1 to 10, build(_ += 1 ++= (2 to 5) += 6 += 7 ++= (8 to 10)))
assertEquals(1 to 10, build(_ ++= (1 to 4) ++= (5 to 6) += 7 ++= (8 to 9) += 10))
assertEquals(1 to 10, build(_ ++= LazyList.from(1).take(10)))
assertEquals(1 to 10, build(_ ++= Iterator.from(1).take(10)))
}
@Test
def selfReferentialFailure(): Unit = {
def assertNoStackOverflow[A](lazyList: LazyList[A]): Unit = {
// don't hang the test if we've made a programming error in this test
val finite = lazyList.take(1000)
AssertUtil.assertThrows[RuntimeException](finite.force, _ contains "self-referential")
}
assertNoStackOverflow { class L { val ll: LazyList[Nothing] = LazyList.empty #::: ll }; (new L).ll }
assertNoStackOverflow { class L { val ll: LazyList[Int] = 1 #:: ll.map(_ + 1).filter(_ % 2 == 0) }; (new L).ll }
class L {
lazy val a: LazyList[Nothing] = LazyList.empty #::: b
lazy val b: LazyList[Nothing] = LazyList.empty #::: a
}
assertNoStackOverflow((new L).a)
assertNoStackOverflow((new L).b)
}
// scala/bug#11931
@Test
def lazyAppendedAllExecutesOnce(): Unit = {
var count = 0
LazyList(1).lazyAppendedAll({ count += 1; Seq(2)}).toList
assertEquals(1, count)
}
}
|
lrytz/scala
|
test/junit/scala/collection/immutable/LazyListTest.scala
|
Scala
|
apache-2.0
| 12,237
|
/**
* Copyright (C) 2010-2011 LShift Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.lshift.diffa.kernel.participants
import scala.util.matching.Regex
import scala.collection.JavaConversions._
import net.lshift.diffa.participant.scanning._
/**
* Category function for partitioning on prefixes of strings.
*
*/
case class StringPrefixCategoryFunction(attrName:String,
prefixLength: Int,
maxLength: Int,
step: Int)
extends StringPrefixAggregation(attrName, prefixLength)
with CategoryFunction {
def name = "prefix(%d,%d,%d)".format(prefixLength, maxLength, step)
def descend =
if (prefixLength == maxLength)
None
else if (prefixLength + step > maxLength)
Some(StringPrefixCategoryFunction(attrName, maxLength, maxLength, step))
else
Some(StringPrefixCategoryFunction(attrName, prefixLength + step, maxLength, step))
def constrain(parent:Option[ScanConstraint], partition: String) =
if (partition.length < prefixLength)
new SetConstraint(attrName, Set(partition))
else if (partition.length > prefixLength)
throw new InvalidAttributeValueException(
"Partition value must be %d characters in length".format(prefixLength))
else
new StringPrefixConstraint(attrName, partition)
val shouldBucket = true
}
|
aprescott/diffa
|
kernel/src/main/scala/net/lshift/diffa/kernel/participants/StringPrefixCategoryFunction.scala
|
Scala
|
apache-2.0
| 1,941
|
/*
* Copyright 2015 ligaDATA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ligadata.pmml.udfs
import scala.reflect.ClassTag
import scala.collection.GenSeq
import scala.collection.mutable._
import scala.math._
import scala.collection.immutable.StringLike
import scala.collection.immutable.List
import scala.collection.immutable.Map
import scala.collection.immutable.Set
import scala.collection.immutable.Iterable
import scala.collection.mutable.{ Map => MutableMap }
import scala.collection.mutable.{ Set => MutableSet }
import scala.collection.mutable.{ Iterable => MutableIterable }
import scala.collection.mutable.ArraySeq
import scala.collection.mutable.TreeSet
import scala.collection.GenSeq
import scala.reflect.ClassTag
import java.util.UUID
import org.joda.time.base
import org.joda.time.chrono
import org.joda.time.convert
import org.joda.time.field
import org.joda.time.format
import org.joda.time.tz
import org.joda.time.{LocalDate, DateTime, Duration, LocalTime, LocalDateTime}
import org.joda.time.Months
import org.joda.time.Years
import org.joda.time.format.DateTimeFormat
import org.joda.time.format.DateTimeFormatter
import org.joda.time.format.DateTimeFormatterBuilder
import org.joda.time.format.DateTimeParser
import org.joda.time.chrono.JulianChronology
import org.apache.logging.log4j.{ Logger, LogManager }
import com.ligadata.pmml.runtime._
import com.ligadata.Exceptions.StackTrace
import com.ligadata.KamanjaBase._
/**
* These are the udfs supplied with the system.
*/
object Udfs extends LogTrait {
/**
Answer the version number of the supplied BaseMsg
@param msg : A BaseMsg known to the model.
@return the version number, a string in the format "NNNNNN.NNNNNN.NNNNNN"
*/
def Version(msg : BaseMsg) : String = {
val ver : String = if (msg != null) {
msg.Version
} else {
"000000.000000.000000"
}
ver
}
/**
Answer the version number of the supplied BaseMsg
@param container : A BaseMsg known to the model.
@return the version number, a string in the format "NNNNNN.NNNNNN.NNNNNN"
*/
def Version(container : BaseContainer) : String = {
val ver : String = if (container != null) {
container.Version
} else {
"000000.000000.000000"
}
ver
}
/**
@deprecated("Use Contains(ctx: Context, containerName: String, partKey: List[String], primaryKey: List[String]): Boolean ", "2015-Jun-08")
Answer whether the supplied container and key exist in the storage manged by the global context
@param xId : the transaction id that initialized the model instance that is calling
@param gCtx : the EnvContext object that initialized the model instance that is calling
@param containerName : The top level container name that purportedly contains the companion 'key'
@param key : the object identifer of interest that purportedly lives in supplied 'containerName'
@return true if the object exists
*/
def Contains(xId: Long, gCtx: EnvContext, containerName: String, partKey: List[String], primaryKey: List[String]): Boolean = {
val itExists: Boolean = if (gCtx != null) gCtx.contains(xId, containerName, partKey, primaryKey) else false
itExists
}
/**
Answer whether the supplied container and key exist in the storage manged by the global context
@param ctx : the runtime Context for the calling model instance
@param containerName : The top level container name that purportedly contains the companion 'key'
@param key : the object identifer of interest that purportedly lives in supplied 'containerName'
@return true if the object exists
*/
def Contains(ctx: Context, containerName: String, partKey: List[String], primaryKey: List[String]): Boolean = {
val itExists: Boolean = if (ctx != null && ctx.gCtx != null) ctx.gCtx.contains(ctx.xId, containerName, partKey, primaryKey) else false
itExists
}
/**
@deprecated("Use ContainsAny(ctx: Context, containerName: String, partKeys: Array[List[String]], primaryKeys: Array[List[String]]): Boolean ", "2015-Jun-08")
Answer whether ANY of the supplied keys exist in the supplied container.
@param xId : the transaction id that initialized the model instance that is calling
@param gCtx : the EnvContext object that initialized the model instance that is calling
@param containerName : The top level container name that purportedly contains the companion 'key'
@param keys : an array of identifiers sought in the 'containerName'
@return true if the object exists
*/
def ContainsAny(xId: Long, gCtx: EnvContext, containerName: String, partKeys: Array[List[String]], primaryKeys: Array[List[String]]): Boolean = {
val itExists: Boolean = if (gCtx != null) gCtx.containsAny(xId, containerName, partKeys, primaryKeys) else false
itExists
}
/**
Answer whether ANY of the supplied keys exist in the supplied container.
@param ctx : the runtime Context for the calling model instance
@param containerName : The top level container name that purportedly contains the companion 'key'
@param keys : an array of identifiers sought in the 'containerName'
@return true if any of the supplied keys are found
*/
def ContainsAny(ctx: Context, containerName: String, partKeys: Array[List[String]], primaryKeys: Array[List[String]]): Boolean = {
val itExists: Boolean = if (ctx != null && ctx.gCtx != null) ctx.gCtx.containsAny(ctx.xId, containerName, partKeys, primaryKeys) else false
itExists
}
/**
@deprecated("Use ContainsAll(ctx: Context, containerName: String, partKeys: Array[List[String]], primaryKeys: Array[List[String]]): Boolean ", "2015-Jun-08")
Answer whether ALL of the supplied keys exist in the supplied container.
@param xId : the transaction id that initialized the model instance that is calling
@param gCtx : the EnvContext object that initialized the model instance that is calling
@param containerName : The top level container name that purportedly contains the companion 'key'
@param keys : an array of identifiers sought in the 'containerName'
@return true if the object exists
*/
def ContainsAll(xId: Long, gCtx: EnvContext, containerName: String, partKeys: Array[List[String]], primaryKeys: Array[List[String]]): Boolean = {
val allExist: Boolean = if (gCtx != null) gCtx.containsAll(xId, containerName, partKeys, primaryKeys) else false
allExist
}
/**
Answer whether ALL of the supplied keys exist in the supplied container.
@param ctx : the runtime Context for the calling model instance
@param containerName : The top level container name that purportedly contains the companion 'key'
@param keys : an array of identifiers sought in the 'containerName'
@return true if the object exists
*/
def ContainsAll(ctx: Context, containerName: String, partKeys: Array[List[String]], primaryKeys: Array[List[String]]): Boolean = {
val allExist: Boolean = if (ctx != null && ctx.gCtx != null) ctx.gCtx.containsAll(ctx.xId, containerName, partKeys, primaryKeys) else false
allExist
}
/**
Put the supplied 'value' into the dictionary field named 'variableName'.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def Put(ctx: Context, variableName: String, value: String): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valuePut(variableName, new StringDataValue(value))
}
set
}
/**
Put the supplied 'value' into the dictionary field named 'variableName'.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def Put(ctx: Context, variableName: String, value: Array[String]): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valuePut(variableName, new AnyDataValue(value))
}
set
}
/**
Put the supplied 'value' into the dictionary field named 'variableName'.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def Put(ctx: Context, variableName: String, value: ArrayBuffer[String]): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valuePut(variableName, new AnyDataValue(value))
}
set
}
/**
Put the supplied 'value' into the dictionary field named 'variableName'.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def Put(ctx: Context, variableName: String, value: List[String]): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valuePut(variableName, new AnyDataValue(value))
}
set
}
/**
Put the supplied 'value' into the dictionary field named 'variableName'.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def Put(ctx: Context, variableName: String, value: Queue[String]): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valuePut(variableName, new AnyDataValue(value))
}
set
}
/**
Put the supplied 'value' into the dictionary field named 'variableName'.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def Put(ctx: Context, variableName: String, value: Set[String]): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valuePut(variableName, new AnyDataValue(value))
}
set
}
/**
Put the supplied 'value' into the dictionary field named 'variableName'.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def Put(ctx: Context, variableName: String, value: MutableSet[String]): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valuePut(variableName, new AnyDataValue(value))
}
set
}
/**
Put the supplied 'value' into the dictionary field named 'variableName'.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def Put(ctx: Context, variableName: String, value: Int): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valuePut(variableName, new IntDataValue(value))
}
set
}
/**
Put the supplied 'value' into the dictionary field named 'variableName'.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def Put(ctx: Context, variableName: String, value: Array[Int]): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valuePut(variableName, new AnyDataValue(value))
}
set
}
/**
Put the supplied 'value' into the dictionary field named 'variableName'.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def Put(ctx: Context, variableName: String, value: Long): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valuePut(variableName, new LongDataValue(value))
}
set
}
/**
Put the supplied 'value' into the dictionary field named 'variableName'.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def Put(ctx: Context, variableName: String, value: Array[Long]): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valuePut(variableName, new AnyDataValue(value))
}
set
}
/**
Put the supplied 'value' into the dictionary field named 'variableName'.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def Put(ctx: Context, variableName: String, value: Float): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valuePut(variableName, new FloatDataValue(value))
}
set
}
/**
Put the supplied 'value' into the dictionary field named 'variableName'.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def Put(ctx: Context, variableName: String, value: Array[Float]): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valuePut(variableName, new AnyDataValue(value))
}
set
}
/**
Put the supplied 'value' into the dictionary field named 'variableName'.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def Put(ctx: Context, variableName: String, value: Double): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valuePut(variableName, new DoubleDataValue(value))
}
set
}
/**
Put the supplied 'value' into the dictionary field named 'variableName'.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def Put(ctx: Context, variableName: String, value: Array[Double]): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valuePut(variableName, new AnyDataValue(value))
}
set
}
/**
Put the supplied 'value' into the dictionary field named 'variableName'.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def Put(ctx: Context, variableName: String, value: Boolean): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valuePut(variableName, new BooleanDataValue(value))
}
set
}
/**
Put the supplied 'value' into the dictionary field named 'variableName'.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def Put(ctx: Context, variableName: String, value: Array[Boolean]): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valuePut(variableName, new AnyDataValue(value))
}
set
}
/**
Put the supplied 'value' into the dictionary field named 'variableName'.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def Put(ctx: Context, variableName: String, value: Any): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valuePut(variableName, new AnyDataValue(value))
}
set
}
/**
Put the supplied 'value' into the dictionary field named 'variableName'.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def Put(ctx: Context, variableName: String, value: Array[Any]): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valuePut(variableName, new AnyDataValue(value))
}
set
}
/** runtime state increment function */
/**
Locate the 'variableName' in the supplied context and increment it by the 'value' supplied.
@param ctx : the global state singleton for the calling model instance that contains the variables
@param variableName : the name of the field in either the data or transaction dictionary found in the ctx
@param value : the value to store.
@return if it was set, true else false
*/
def incrementBy(ctx: Context, variableName: String, value: Int): Boolean = {
var set: Boolean = (ctx != null)
if (set) {
set = ctx.valueIncr(variableName, value)
}
set
}
/**
@deprecated("Use Get(ctx: Context, containerId: String, partKey: List[String], primaryKey: List[String]): MessageContainerBase", "2015-Jun-08")
Get the MessageContainerBase associated with the supplied containerId and key from the EnvContext managed stores.
@param xId : the transaction id that initialized the model instance that is calling
@param gCtx : the EnvContext object that initialized the model instance that is calling
@param containerName : The top level container name that purportedly contains the companion 'key'
@param key : the identifying key sought in the 'containerName'
@return the MessageContainerBase associated with these keys.
*/
def Get(xId: Long, gCtx: EnvContext, containerId: String, partKey: List[String], primaryKey: List[String]): MessageContainerBase = {
gCtx.getObject(xId, containerId, partKey, primaryKey)
}
/**
Get the MessageContainerBase associated with the supplied containerId and key from the EnvContext managed stores.
* @param ctx : the Pmml Runtime Context instance for the calling model
@param containerName : The top level container name that purportedly contains the companion 'key'
@param key : the identifying key sought in the 'containerName'
@return the MessageContainerBase associated with these keys.
*/
def Get(ctx: Context, containerId: String, partKey: List[String], primaryKey: List[String]): MessageContainerBase = {
if (ctx != null && ctx.gCtx != null) ctx.gCtx.getObject(ctx.xId, containerId, partKey, primaryKey) else null
}
/**
@deprecated("Use GetMsgContainerElseNew(ctx: Context, fqClassName : String, containerId: String, partKey: List[String], primaryKey: List[String]): MessageContainerBase", "2015-Jun-08")
Get the MessageContainerBase associated with the supplied containerId and key from the EnvContext managed stores.
* GetMsgContainerElseNew will attempt to retrieve the Message or Container from the container with supplied key. Should it
* not be present in the kv store, a new and empty version of the fqClassName is instantiated and returned.
*
* @param xId : transaction id from Pmml Runtime Context
* @param gCtx : the engine's EnvContext object that possesses the kv stores.
* @param fqClassName : the fully qualified class name of the MessageContainerBase subclass that will be created if the keys will not produce an instance
* @param containerId : the name of the kv container
* @param key : the key within the container being sought
* @return either the MessageContainerBase subclass with the supplied key or a brand new instance of the fqClassName (NO FIELDS FILLED)
*/
def GetMsgContainerElseNew(xId: Long, gCtx: EnvContext, fqClassName : String, containerId: String, partKey: List[String], primaryKey: List[String]): MessageContainerBase = {
val mc : MessageContainerBase = gCtx.getObject(xId, containerId, partKey, primaryKey)
if (mc != null) {
mc
} else {
gCtx.NewMessageOrContainer(fqClassName)
}
}
/**
* GetMsgContainerElseNew will attempt to retrieve the Message or Container from the container with supplied key. Should it
* not be present in the kv store, a new and empty version of the fqClassName is instantiated and returned.
*
* @param ctx : the Pmml Runtime Context instance for the calling model
* @param fqClassName : the fully qualified class name of the MessageContainerBase subclass that will be created if the keys will not produce an instance
* @param containerId : the name of the kv container
* @param key : the key within the container being sought
* @return either the MessageContainerBase subclass with the supplied key or a brand new instance of the fqClassName (NO FIELDS FILLED)
*/
def GetMsgContainerElseNew(ctx: Context, fqClassName : String, containerId: String, partKey: List[String], primaryKey: List[String]): MessageContainerBase = {
val mc : MessageContainerBase = if (ctx != null && ctx.gCtx != null) ctx.gCtx.getObject(ctx.xId, containerId, partKey, primaryKey) else null
if (mc != null) {
mc
} else {
ctx.gCtx.NewMessageOrContainer(fqClassName)
}
}
/** Given the supplied msg or container, answer its partition key. If there is none,
answer an empty List
@param msg : a Message or Container
@return the partition key as a List[String]
*/
def GetPartitionKey(msg : BaseMsg) : scala.collection.immutable.List[String] = {
val partitionKey : List[String] = if (msg.PartitionKeyData != null) {
msg.PartitionKeyData.toList
} else {
scala.collection.immutable.List[String]()
}
partitionKey
}
/** Given the supplied msg or container, answer its partition key. If there is none,
answer an empty List
@param msg : a Message or Container
@return the partition key as a List[String]
*/
def GetPartitionKey(msg : BaseContainer) : scala.collection.immutable.List[String] = {
val partitionKey : List[String] = if (msg.PartitionKeyData != null) {
msg.PartitionKeyData.toList
} else {
scala.collection.immutable.List[String]()
}
partitionKey
}
/** Given the supplied msg or container, answer its primary key. If there is none,
answer an empty List
@param msg : a Message or Container
@return the primary key as a List[String]
*/
def GetPrimaryKey(msg : BaseMsg) : scala.collection.immutable.List[String] = {
val primaryKey : List[String] = if (msg.PrimaryKeyData != null) {
msg.PrimaryKeyData.toList
} else {
scala.collection.immutable.List[String]()
}
primaryKey
}
/** Given the supplied msg or container, answer its primary key. If there is none,
answer an empty List
@param msg : a Message or Container
@return the primary key as a List[String]
*/
def GetPrimaryKey(msg : BaseContainer) : scala.collection.immutable.List[String] = {
val primaryKey : List[String] = if (msg.PrimaryKeyData != null) {
msg.PrimaryKeyData.toList
} else {
scala.collection.immutable.List[String]()
}
primaryKey
}
/**
@deprecated("Use GetHistory(ctx: Context, containerId: String, partKey: List[String], appendCurrentChanges: Boolean): Array[MessageContainerBase] ", "2015-Jun-08")
Get the MessageContainerBase associated with the supplied containerId and key from the EnvContext managed stores.
* Answer the array of messages or container for the supplied partition key found in the specified container. If append boolean is set,
* a message has been requested and the current message in the requesting model is appended to the history and returned.
* @param xId : the transaction id associated with this model instance
* @param gCtx : the engine's EnvContext portal known to the model that gives it access to the persisted data
* @param containerId : The name of the container or message
* @param partKey : the partition key that identifies which messages are to be retrieved.
* @param appendCurrentChanges : a boolean that is useful for messages only . When true, it will append the incoming message that caused
* the calling model to execute to the returned history of messages
* @return Array[MessageContainerBase] that will need to be downcast to their concrete type by the model before use.
*/
def GetHistory(xId: Long, gCtx: EnvContext, containerId: String, partKey: List[String], appendCurrentChanges: Boolean): Array[MessageContainerBase] = {
gCtx.getHistoryObjects(xId, containerId, partKey, appendCurrentChanges)
}
/**
* Answer the array of messages or container for the supplied partition key found in the specified container. If append boolean is set,
* a message has been requested and the current message in the requesting model is appended to the history and returned.
* @param ctx : the model instance's context
* @param containerId : The name of the container or message
* @param partKey : the partition key that identifies which messages are to be retrieved.
* @param appendCurrentChanges : a boolean that is useful for messages only . When true, it will append the incoming message that caused
* the calling model to execute to the returned history of messages
* @return Array[MessageContainerBase] that will need to be downcast to their concrete type by the model before use.
*/
def GetHistory(ctx: Context, containerId: String, partKey: List[String], appendCurrentChanges: Boolean): Array[MessageContainerBase] = {
if (ctx != null && ctx.gCtx != null) ctx.gCtx.getHistoryObjects(ctx.xId, containerId, partKey, appendCurrentChanges) else Array[MessageContainerBase]()
}
/**
@deprecated("Use GetArray(ctx: Context, gCtx: EnvContext, containerId: String): Array[MessageContainerBase] ", "2015-Jun-08")
* Get an array that contains all of the MessageContainerBase elements for the supplied 'containerId'
*
* @param xId : transaction id from Pmml Runtime Context
* @param gCtx : the engine's EnvContext object that possesses the kv stores.
* @param fqClassName : the fully qualified class name of the MessageContainerBase subclass that will be created if the keys will not produce an instance
* @param containerId : the name of the kv container
* @param key : the key within the container being sought
* @return either the Array[MessageContainerBase] subclass with the supplied containerId or an empty array
*/
def GetArray(xId: Long, gCtx: EnvContext, containerId: String): Array[MessageContainerBase] = {
gCtx.getAllObjects(xId, containerId)
}
/**
* Get an array that contains all of the MessageContainerBase elements for the supplied 'containerId'
*
* @param ctx : the model instance's context
* @param fqClassName : the fully qualified class name of the MessageContainerBase subclass that will be created if the keys will not produce an instance
* @param containerId : the name of the kv container
* @param key : the key within the container being sought
* @return either the Array[MessageContainerBase] subclass with the supplied containerId or an empty array
*/
def GetArray(ctx: Context, containerId: String): Array[MessageContainerBase] = {
if (ctx != null && ctx.gCtx != null) ctx.gCtx.getAllObjects(ctx.xId, containerId) else Array[MessageContainerBase]()
}
/**
@deprecated("Use Put(ctx: Context, containerId: String, key: List[String], value: BaseMsg): Boolean ", "2015-Jun-08")
* Add/update the object with the supplied 'containerId' and 'key' in the EnvContext managed storage with the supplied 'value'
*
* @param xId : transaction id from Pmml Runtime Context
* @param gCtx : the engine's EnvContext object that possesses the kv stores.
* @param fqClassName : the fully qualified class name of the MessageContainerBase subclass that will be created if the keys will not produce an instance
* @param containerId : the name of the kv container
* @param key : the key within the container being sought
* @return true if it worked
*/
def Put(xId: Long, gCtx: EnvContext, containerId: String, key: List[String], value: BaseMsg): Boolean = {
gCtx.setObject(xId, containerId, key, value)
true
}
/**
* Add/update the object with the supplied 'containerId' and 'key' in the EnvContext managed storage with the supplied 'value'
*
* @param ctx : the model instance's context
* @param containerId : the name of the kv container
* @param key : the key within the container being sought
* @return true if it worked
*/
def Put(ctx: Context, containerId: String, key: List[String], value: BaseMsg): Boolean = {
if (ctx != null && ctx.gCtx != null) {ctx.gCtx.setObject(ctx.xId, containerId, key, value); true} else false
}
/**
@deprecated("Use Put(ctx: Context, containerId: String, key: List[String], value: BaseContainer): Boolean ", "2015-Jun-08")
* Add/update the object with the supplied 'containerId' and 'key' in the EnvContext managed storage with the supplied 'value'
*
* @param xId : transaction id from Pmml Runtime Context
* @param gCtx : the engine's EnvContext object that possesses the kv stores.
* @param fqClassName : the fully qualified class name of the MessageContainerBase subclass that will be created if the keys will not produce an instance
* @param containerId : the name of the kv container
* @param key : the key within the container being sought
* @return true if it worked
*/
def Put(xId: Long, gCtx: EnvContext, containerId: String, key: List[String], value: BaseContainer): Boolean = {
gCtx.setObject(xId, containerId, key, value)
true
}
/**
* Add/update the object with the supplied 'containerId' and 'key' in the EnvContext managed storage with the supplied 'value'
*
* @param ctx : the model instance's context
* @param fqClassName : the fully qualified class name of the MessageContainerBase subclass that will be created if the keys will not produce an instance
* @param containerId : the name of the kv container
* @param key : the key within the container being sought
* @return true if it worked
*/
def Put(ctx: Context, containerId: String, key: List[String], value: BaseContainer): Boolean = {
if (ctx != null && ctx.gCtx != null) {ctx.gCtx.setObject(ctx.xId, containerId, key, value); true} else false
}
/**
@deprecated This function is no longer used by the pmml compiler to implement 'and'. The Scala generated uses the '&&' that supports
short-circuit execution.
And the supplied boolean expressions.
@param boolexpr : one or more Boolean expressions
@return the logical 'and' of the expressions
*/
def And(boolexpr : Boolean*): Boolean = {
boolexpr.reduceLeft(_ && _)
}
/**
And the supplied Int expressions. '0' is false; all other values are considered true
@param boolexpr : one or more Int expressions
@return the logical 'and' of the expressions
*/
def IntAnd(boolexpr : Int*): Boolean = {
if (boolexpr.filter(_ == 0).size == 0) true else false
}
/**
@deprecated This function is no longer used by the pmml compiler to implement 'or'. The Scala generated uses the '||' that supports
short-circuit execution.
Or the supplied boolean expressions.
@param boolexpr : one or more Boolean expressions
@return the logical 'and' of the expressions
*/
def Or(boolexpr : Boolean*): Boolean = {
boolexpr.reduceLeft(_ || _)
}
/**
Or the supplied Int expressions. '0' is false; all other values are considered true
@param boolexpr : one or more Int expressions
@return the logical 'and' of the expressions
*/
def IntOr(boolexpr : Int*): Boolean = {
if (boolexpr.filter(_ != 0).size > 0) true else false
}
/**
Answer whether the supplied String is found in the ArrayBuffer of Strings.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: String, setExprs: ArrayBuffer[String]): Boolean = {
setExprs.filter(_ == fldRefExpr).length > 0
}
/**
Answer whether the supplied Int is found in the ArrayBuffer of Int.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: Int, setExprs: ArrayBuffer[Int]): Boolean = {
setExprs.filter(_ == fldRefExpr).length > 0
}
/**
Answer whether the supplied Float is found in the ArrayBuffer of Float.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: Float, setExprs: ArrayBuffer[Float]): Boolean = {
setExprs.filter(_ == fldRefExpr).length > 0
}
/**
Answer whether the supplied Double is found in the ArrayBuffer of Double.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: Double, setExprs: ArrayBuffer[Double]): Boolean = {
setExprs.filter(_ == fldRefExpr).length > 0
}
/**
Answer whether the supplied String is found in the Array of String.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: String, setExprs: Array[String]): Boolean = {
setExprs.filter(_ == fldRefExpr).length > 0
}
/**
Answer whether the supplied Int is found in the Array of Int.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: Int, setExprs: Array[Int]): Boolean = {
setExprs.filter(_ == fldRefExpr).length > 0
}
/**
Answer whether the supplied Float is found in the Array of Float.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: Float, setExprs: Array[Float]): Boolean = {
setExprs.filter(_ == fldRefExpr).length > 0
}
/**
Answer whether the supplied Double is found in the Array of Double.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: Double, setExprs: Array[Double]): Boolean = {
setExprs.filter(_ == fldRefExpr).length > 0
}
/**
Answer whether the supplied String is found in the List of String.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: String, setExprs: List[String]): Boolean = {
setExprs.filter(_ == fldRefExpr).length > 0
}
/**
Answer whether the supplied Int is found in the List of Int.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: Int, setExprs: List[Int]): Boolean = {
setExprs.filter(_ == fldRefExpr).length > 0
}
/**
Answer whether the supplied Float is found in the List of Float.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: Float, setExprs: List[Float]): Boolean = {
setExprs.filter(_ == fldRefExpr).length > 0
}
/**
Answer whether the supplied Double is found in the List of Double.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: Double, setExprs: List[Double]): Boolean = {
setExprs.filter(_ == fldRefExpr).length > 0
}
/**
Answer whether the supplied String is found in the Set of String.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: String, setExprs: Set[String]): Boolean = {
setExprs.contains(fldRefExpr)
}
/**
Answer whether the supplied Int is found in the Set of Int.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: Int, setExprs: Set[Int]): Boolean = {
setExprs.contains(fldRefExpr)
}
/**
Answer whether the supplied Float is found in the Set of Float.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: Float, setExprs: Set[Float]): Boolean = {
setExprs.contains(fldRefExpr)
}
/**
Answer whether the supplied Double is found in the Set of Double.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: Double, setExprs: Set[Double]): Boolean = {
setExprs.contains(fldRefExpr)
}
/**
Answer whether the supplied String is found in the MutableSet of String.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: String, setExprs: MutableSet[String]): Boolean = {
setExprs.contains(fldRefExpr)
}
/**
Answer whether the supplied Int is found in the MutableSet of Int.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: Int, setExprs: MutableSet[Int]): Boolean = {
setExprs.contains(fldRefExpr)
}
/**
Answer whether the supplied Float is found in the MutableSet of Float.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: Float, setExprs: MutableSet[Float]): Boolean = {
setExprs.contains(fldRefExpr)
}
/**
Answer whether the supplied Double is found in the MutableSet of Double.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the strings matched the supplied fieldRefExpr
*/
def IsIn(fldRefExpr: Double, setExprs: MutableSet[Double]): Boolean = {
setExprs.contains(fldRefExpr)
}
/**
Answer whether the supplied fieldExpr value lies between any of the collection's value pairs.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@param inclusive : when true, the end values will match; when false, the end values will not match
@return true if one of the strings matched the supplied fieldRefExpr
*/
def FoundInAnyRange(fldRefExpr: String, tuples: Array[(String,String)], inclusive : Boolean): Boolean = {
tuples.filter(tup => {
if (inclusive)
(fldRefExpr >= tup._1 && fldRefExpr <= tup._2)
else
(fldRefExpr > tup._1 && fldRefExpr < tup._2)
}).length > 0
}
/**
Answer whether the supplied fieldExpr value lies between any of the collection's value pairs.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@param inclusive : when true, the end values will match; when false, the end values will not match
@return true if one of the strings matched the supplied fieldRefExpr
*/
def FoundInAnyRange(fldRefExpr: Int, tuples: Array[(Int,Int)], inclusive : Boolean): Boolean = {
tuples.filter(tup => {
if (inclusive)
(fldRefExpr >= tup._1 && fldRefExpr <= tup._2)
else
(fldRefExpr > tup._1 && fldRefExpr < tup._2)
}).length > 0
}
/**
Answer whether the supplied fieldExpr value lies between any of the collection's value pairs.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@param inclusive : when true, the end values will match; when false, the end values will not match
@return true if one of the strings matched the supplied fieldRefExpr
*/
def FoundInAnyRange(fldRefExpr: Long, tuples: Array[(Long,Long)], inclusive : Boolean): Boolean = {
tuples.filter(tup => {
if (inclusive)
(fldRefExpr >= tup._1 && fldRefExpr <= tup._2)
else
(fldRefExpr > tup._1 && fldRefExpr < tup._2)
}).length > 0
}
/**
Answer whether the supplied fieldExpr value lies between any of the collection's value pairs.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@param inclusive : when true, the end values will match; when false, the end values will not match
@return true if one of the strings matched the supplied fieldRefExpr
*/
def FoundInAnyRange(fldRefExpr: Float, tuples: Array[(Float,Float)], inclusive : Boolean): Boolean = {
tuples.filter(tup => {
if (inclusive)
(fldRefExpr >= tup._1 && fldRefExpr <= tup._2)
else
(fldRefExpr > tup._1 && fldRefExpr < tup._2)
}).length > 0
}
/**
Answer whether the supplied fieldExpr value lies between any of the collection's value pairs.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@param inclusive : when true, the end values will match; when false, the end values will not match
@return true if one of the strings matched the supplied fieldRefExpr
*/
def FoundInAnyRange(fldRefExpr: Double, tuples: Array[(Double,Double)], inclusive : Boolean): Boolean = {
tuples.filter(tup => {
if (inclusive)
(fldRefExpr >= tup._1 && fldRefExpr <= tup._2)
else
(fldRefExpr > tup._1 && fldRefExpr < tup._2)
}).length > 0
}
/**
Answer if any of the supplied array buffer values fall between the left and right margin. If inclusive,
the end values are acceptable.
@param arrayExpr : a collection of values to test against the left and right margin values
@param leftMargin : the lower bound
@param rightMargin : the upper bound
@param inclusive : when true the boundary values are acceptable
@return true if at least one of the collection values is found in range
*/
def AnyBetween(arrayExpr: ArrayBuffer[String], leftMargin: String, rightMargin: String, inclusive: Boolean): Boolean = {
arrayExpr.filter(Between(_, leftMargin, rightMargin, inclusive)).length > 0
}
/**
Answer if any of the supplied array buffer values fall between the left and right margin. If inclusive,
the end values are acceptable.
@param arrayExpr : a collection of values to test against the left and right margin values
@param leftMargin : the lower bound
@param rightMargin : the upper bound
@param inclusive : when true the boundary values are acceptable
@return true if at least one of the collection values is found in range
*/
def AnyBetween(arrayExpr: ArrayBuffer[Int], leftMargin: Int, rightMargin: Int, inclusive: Boolean): Boolean = {
arrayExpr.filter(Between(_, leftMargin, rightMargin, inclusive)).length > 0
}
/**
Answer if any of the supplied array buffer values fall between the left and right margin. If inclusive,
the end values are acceptable.
@param arrayExpr : a collection of values to test against the left and right margin values
@param leftMargin : the lower bound
@param rightMargin : the upper bound
@param inclusive : when true the boundary values are acceptable
@return true if at least one of the collection values is found in range
*/
def AnyBetween(arrayExpr: ArrayBuffer[Long], leftMargin: Long, rightMargin: Long, inclusive: Boolean): Boolean = {
arrayExpr.filter(Between(_, leftMargin, rightMargin, inclusive)).length > 0
}
/**
Answer if any of the supplied array buffer values fall between the left and right margin. If inclusive,
the end values are acceptable.
@param arrayExpr : a collection of values to test against the left and right margin values
@param leftMargin : the lower bound
@param rightMargin : the upper bound
@param inclusive : when true the boundary values are acceptable
@return true if at least one of the collection values is found in range
*/
def AnyBetween(arrayExpr: ArrayBuffer[Float], leftMargin: Float, rightMargin: Float, inclusive: Boolean): Boolean = {
arrayExpr.filter(Between(_, leftMargin, rightMargin, inclusive)).length > 0
}
/**
Answer if any of the supplied array buffer values fall between the left and right margin. If inclusive,
the end values are acceptable.
@param arrayExpr : a collection of values to test against the left and right margin values
@param leftMargin : the lower bound
@param rightMargin : the upper bound
@param inclusive : when true the boundary values are acceptable
@return true if at least one of the collection values is found in range
*/
def AnyBetween(arrayExpr: ArrayBuffer[Double], leftMargin: Double, rightMargin: Double, inclusive: Boolean): Boolean = {
arrayExpr.filter(Between(_, leftMargin, rightMargin, inclusive)).length > 0
}
/**
Answer if any of the supplied array buffer values fall between the left and right margin. If inclusive,
the end values are acceptable.
@param arrayExpr : a collection of values to test against the left and right margin values
@param leftMargin : the lower bound
@param rightMargin : the upper bound
@param inclusive : when true the boundary values are acceptable
@return true if at least one of the collection values is found in range
*/
def AnyBetween(arrayExpr: Array[String], leftMargin: String, rightMargin: String, inclusive: Boolean): Boolean = {
arrayExpr.filter(Between(_, leftMargin, rightMargin, inclusive)).length > 0
}
/**
Answer if any of the supplied array buffer values fall between the left and right margin. If inclusive,
the end values are acceptable.
@param arrayExpr : a collection of values to test against the left and right margin values
@param leftMargin : the lower bound
@param rightMargin : the upper bound
@param inclusive : when true the boundary values are acceptable
@return true if at least one of the collection values is found in range
*/
def AnyBetween(arrayExpr: Array[Int], leftMargin: Int, rightMargin: Int, inclusive: Boolean): Boolean = {
arrayExpr.filter(Between(_, leftMargin, rightMargin, inclusive)).length > 0
}
/**
Answer if any of the supplied array buffer values fall between the left and right margin. If inclusive,
the end values are acceptable.
@param arrayExpr : a collection of values to test against the left and right margin values
@param leftMargin : the lower bound
@param rightMargin : the upper bound
@param inclusive : when true the boundary values are acceptable
@return true if at least one of the collection values is found in range
*/
def AnyBetween(arrayExpr: Array[Float], leftMargin: Float, rightMargin: Float, inclusive: Boolean): Boolean = {
arrayExpr.filter(Between(_, leftMargin, rightMargin, inclusive)).length > 0
}
/**
Answer if any of the supplied array buffer values fall between the left and right margin. If inclusive,
the end values are acceptable.
@param arrayExpr : a collection of values to test against the left and right margin values
@param leftMargin : the lower bound
@param rightMargin : the upper bound
@param inclusive : when true the boundary values are acceptable
@return true if at least one of the collection values is found in range
*/
def AnyBetween(arrayExpr: Array[Double], leftMargin: Double, rightMargin: Double, inclusive: Boolean): Boolean = {
arrayExpr.filter(Between(_, leftMargin, rightMargin, inclusive)).length > 0
}
/**
Answer if all of the supplied array buffer values fall outside the left and right margin. If inclusive,
the end values are considered part of the range.
@param arrayExpr : a collection of values to test against the left and right margin values
@param leftMargin : the lower bound
@param rightMargin : the upper bound
@param inclusive : when true the boundary values are acceptable
@return true if at least one of the collection values is found in range
*/
def NotAnyBetween(arrayExpr: ArrayBuffer[String], leftMargin: String, rightMargin: String, inclusive: Boolean): Boolean = {
arrayExpr.filter(Between(_, leftMargin, rightMargin, inclusive)).length == 0
}
/**
Answer if all of the supplied array buffer values fall outside the left and right margin. If inclusive,
the end values are considered part of the range.
@param arrayExpr : a collection of values to test against the left and right margin values
@param leftMargin : the lower bound
@param rightMargin : the upper bound
@param inclusive : when true the boundary values are acceptable
@return true if at least one of the collection values is found in range
*/
def NotAnyBetween(arrayExpr: ArrayBuffer[Int], leftMargin: Int, rightMargin: Int, inclusive: Boolean): Boolean = {
arrayExpr.filter(Between(_, leftMargin, rightMargin, inclusive)).length == 0
}
/**
Answer if all of the supplied array buffer values fall outside the left and right margin. If inclusive,
the end values are considered part of the range.
@param arrayExpr : a collection of values to test against the left and right margin values
@param leftMargin : the lower bound
@param rightMargin : the upper bound
@param inclusive : when true the boundary values are acceptable
@return true if at least one of the collection values is found in range
*/
def NotAnyBetween(arrayExpr: ArrayBuffer[Long], leftMargin: Long, rightMargin: Long, inclusive: Boolean): Boolean = {
arrayExpr.filter(Between(_, leftMargin, rightMargin, inclusive)).length == 0
}
/**
Answer if all of the supplied array buffer values fall outside the left and right margin. If inclusive,
the end values are considered part of the range.
@param arrayExpr : a collection of values to test against the left and right margin values
@param leftMargin : the lower bound
@param rightMargin : the upper bound
@param inclusive : when true the boundary values are acceptable
@return true if at least one of the collection values is found in range
*/
def NotAnyBetween(arrayExpr: ArrayBuffer[Float], leftMargin: Float, rightMargin: Float, inclusive: Boolean): Boolean = {
arrayExpr.filter(Between(_, leftMargin, rightMargin, inclusive)).length == 0
}
/**
Answer if all of the supplied array buffer values fall outside the left and right margin. If inclusive,
the end values are considered part of the range.
@param arrayExpr : a collection of values to test against the left and right margin values
@param leftMargin : the lower bound
@param rightMargin : the upper bound
@param inclusive : when true the boundary values are acceptable
@return true if at least one of the collection values is found in range
*/
def NotAnyBetween(arrayExpr: ArrayBuffer[Double], leftMargin: Double, rightMargin: Double, inclusive: Boolean): Boolean = {
arrayExpr.filter(Between(_, leftMargin, rightMargin, inclusive)).length == 0
}
/**
Answer if all of the supplied array buffer values fall outside the left and right margin. If inclusive,
the end values are considered part of the range.
@param arrayExpr : a collection of values to test against the left and right margin values
@param leftMargin : the lower bound
@param rightMargin : the upper bound
@param inclusive : when true the boundary values are acceptable
@return true if at least one of the collection values is found in range
*/
def NotAnyBetween(arrayExpr: Array[String], leftMargin: String, rightMargin: String, inclusive: Boolean): Boolean = {
arrayExpr.filter(Between(_, leftMargin, rightMargin, inclusive)).length == 0
}
/**
Answer if all of the supplied array buffer values fall outside the left and right margin. If inclusive,
the end values are considered part of the range.
@param arrayExpr : a collection of values to test against the left and right margin values
@param leftMargin : the lower bound
@param rightMargin : the upper bound
@param inclusive : when true the boundary values are acceptable
@return true if at least one of the collection values is found in range
*/
def NotAnyBetween(arrayExpr: Array[Long], leftMargin: Long, rightMargin: Long, inclusive: Boolean): Boolean = {
arrayExpr.filter(Between(_, leftMargin, rightMargin, inclusive)).length == 0
}
/**
Answer if all of the supplied array buffer values fall outside the left and right margin. If inclusive,
the end values are considered part of the range.
@param arrayExpr : a collection of values to test against the left and right margin values
@param leftMargin : the lower bound
@param rightMargin : the upper bound
@param inclusive : when true the boundary values are acceptable
@return true if at least one of the collection values is found in range
*/
def NotAnyBetween(arrayExpr: Array[Int], leftMargin: Int, rightMargin: Int, inclusive: Boolean): Boolean = {
arrayExpr.filter(Between(_, leftMargin, rightMargin, inclusive)).length == 0
}
/**
Answer if all of the supplied array buffer values fall outside the left and right margin. If inclusive,
the end values are considered part of the range.
@param arrayExpr : a collection of values to test against the left and right margin values
@param leftMargin : the lower bound
@param rightMargin : the upper bound
@param inclusive : when true the boundary values are acceptable
@return true if at least one of the collection values is found in range
*/
def NotAnyBetween(arrayExpr: Array[Float], leftMargin: Float, rightMargin: Float, inclusive: Boolean): Boolean = {
arrayExpr.filter(Between(_, leftMargin, rightMargin, inclusive)).length == 0
}
/**
Answer if all of the supplied array buffer values fall outside the left and right margin. If inclusive,
the end values are considered part of the range.
@param arrayExpr : a collection of values to test against the left and right margin values
@param leftMargin : the lower bound
@param rightMargin : the upper bound
@param inclusive : when true the boundary values are acceptable
@return true if at least one of the collection values is found in range
*/
def NotAnyBetween(arrayExpr: Array[Double], leftMargin: Double, rightMargin: Double, inclusive: Boolean): Boolean = {
arrayExpr.filter(Between(_, leftMargin, rightMargin, inclusive)).length == 0
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(arrayExpr: ArrayBuffer[String], key: String): Boolean = {
arrayExpr.contains(key)
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(arrayExpr: ArrayBuffer[Long], key: Long): Boolean = {
arrayExpr.contains(key)
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(arrayExpr: ArrayBuffer[Int], key: Int): Boolean = {
arrayExpr.contains(key)
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(arrayExpr: ArrayBuffer[Float], key: Float): Boolean = {
arrayExpr.contains(key)
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(arrayExpr: ArrayBuffer[Double], key: Double): Boolean = {
arrayExpr.contains(key)
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(arrayExpr: Array[String], key: String): Boolean = {
arrayExpr.contains(key)
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(arrayExpr: Array[Long], key: Long): Boolean = {
arrayExpr.contains(key)
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(arrayExpr: Array[Int], key: Int): Boolean = {
arrayExpr.contains(key)
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(arrayExpr: Array[Float], key: Float): Boolean = {
arrayExpr.contains(key)
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(arrayExpr: Array[Double], key: Double): Boolean = {
arrayExpr.contains(key)
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(setExpr: Set[String], key: String): Boolean = {
setExpr.contains(key)
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(setExpr: Set[Long], key: Long): Boolean = {
setExpr.contains(key)
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(setExpr: Set[Int], key: Int): Boolean = {
setExpr.contains(key)
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(setExpr: Set[Float], key: Float): Boolean = {
setExpr.contains(key)
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(setExpr: Set[Double], key: Double): Boolean = {
setExpr.contains(key)
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(setExpr: MutableSet[String], key: String): Boolean = {
setExpr.contains(key)
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(setExpr: MutableSet[Long], key: Long): Boolean = {
setExpr.contains(key)
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(setExpr: MutableSet[Int], key: Int): Boolean = {
setExpr.contains(key)
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(setExpr: MutableSet[Float], key: Float): Boolean = {
setExpr.contains(key)
}
/**
Answer if the supplied key is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param key : The key sought in the collection
@return true if found, else false
*/
def Contains(setExpr: MutableSet[Double], key: Double): Boolean = {
setExpr.contains(key)
}
/**
Answer if ANY of the supplied keys is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param keys : The keys sought in the collection
@return true if found, else false
*/
def ContainsAny(setExpr: Set[String], keys: Array[String]): Boolean = {
(keys.filter(key => setExpr.contains(key)).length > 0)
}
/**
Answer if ANY of the supplied keys is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param keys : The keys sought in the collection
@return true if found, else false
*/
def ContainsAny(setExpr: Set[Long], keys: Array[Long]): Boolean = {
(keys.filter(key => setExpr.contains(key)).length > 0)
}
/**
Answer if ANY of the supplied keys is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param keys : The keys sought in the collection
@return true if found, else false
*/
def ContainsAny(setExpr: Set[Int], keys: Array[Int]): Boolean = {
(keys.filter(key => setExpr.contains(key)).length > 0)
}
/**
Answer if ANY of the supplied keys is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param keys : The keys sought in the collection
@return true if found, else false
*/
def ContainsAny(setExpr: Set[Float], keys: Array[Float]): Boolean = {
(keys.filter(key => setExpr.contains(key)).length > 0)
}
/**
Answer if ANY of the supplied keys is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param keys : The keys sought in the collection
@return true if found, else false
*/
def Contains(setExpr: Set[Double], keys: Array[Double]): Boolean = {
(keys.filter(key => setExpr.contains(key)).length > 0)
}
/**
Answer if ANY of the supplied keys is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param keys : The keys sought in the collection
@return true if found, else false
*/
def ContainsAny(setExpr: MutableSet[String], keys: Array[String]): Boolean = {
(keys.filter(key => setExpr.contains(key)).length > 0)
}
/**
Answer if ANY of the supplied keys is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param keys : The keys sought in the collection
@return true if found, else false
*/
def ContainsAny(setExpr: MutableSet[Long], keys: Array[Long]): Boolean = {
(keys.filter(key => setExpr.contains(key)).length > 0)
}
/**
Answer if ANY of the supplied keys is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param keys : The keys sought in the collection
@return true if found, else false
*/
def ContainsAny(setExpr: MutableSet[Int], keys: Array[Int]): Boolean = {
(keys.filter(key => setExpr.contains(key)).length > 0)
}
/**
Answer if ANY of the supplied keys is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param keys : The keys sought in the collection
@return true if found, else false
*/
def ContainsAny(setExpr: MutableSet[Float], keys: Array[Float]): Boolean = {
(keys.filter(key => setExpr.contains(key)).length > 0)
}
/**
Answer if ANY of the supplied keys is equivalent to one of the collection's values
@param arrayExpr : The collection to search
@param keys : The keys sought in the collection
@return true if found, else false
*/
def Contains(setExpr: MutableSet[Double], keys: Array[Double]): Boolean = {
(keys.filter(key => setExpr.contains(key)).length > 0)
}
/**
Answer the intersection of the supplied collections.
@param left : a collection
@param right: a collection
@return a Set of elements from the supplied collections or empty set if nothing is common
*/
def Intersect[T: ClassTag](left: Array[T], right: Array[T]): Set[T] = {
if (left == null || right == null || left.size == 0 || right.size == 0)
return Array[T]().toSet
(left.toSet & right.toSet)
}
/**
Answer the intersection of the supplied collections.
@param left : a collection
@param right: a collection
@return a Set of elements from the supplied collections or empty set if nothing is common
*/
def Intersect[T: ClassTag](left: Array[T], right: Set[T]): Set[T] = {
if (left == null || right == null || left.size == 0 || right.size == 0)
return Array[T]().toSet
(left.toSet & right)
}
/**
Answer the intersection of the supplied collections.
@param left : a collection
@param right: a collection
@return a Set of elements from the supplied collections or empty set if nothing is common
*/
def Intersect[T: ClassTag](left: Set[T], right: Array[T]): Set[T] = {
if (left == null || right == null || left.size == 0 || right.size == 0)
return Array[T]().toSet
(left & right.toSet)
}
/**
Answer the intersection of the supplied collections.
@param left : a collection
@param right: a collection
@return a Set of elements from the supplied collections or empty set if nothing is common
*/
def Intersect[T: ClassTag](left: Set[T], right: Set[T]): Set[T] = {
if (left == null || right == null || left.size == 0 || right.size == 0)
return Array[T]().toSet
(left & right)
}
/**
Answer the intersection of the supplied collections.
@param left : a collection
@param right: a collection
@return a Set of elements from the supplied collections or empty set if nothing is common
*/
def Intersect[T: ClassTag](left: Array[T], right: TreeSet[T]): Set[T] = {
if (left == null || right == null || left.size == 0 || right.size == 0)
return Array[T]().toSet
(left.toSet & right)
}
/**
Answer the intersection of the supplied collections.
@param left : a collection
@param right: a collection
@return a Set of elements from the supplied collections or empty set if nothing is common
*/
def Intersect[T: ClassTag](left: TreeSet[T], right: Array[T]): Set[T] = {
if (left == null || right == null || left.size == 0 || right.size == 0)
return Array[T]().toSet
(left & right.toSet).toSet
}
/**
Answer the intersection of the supplied collections.
@param left : a collection
@param right: a collection
@return a Set of elements from the supplied collections or empty set if nothing is common
*/
def Intersect[T: ClassTag](left: TreeSet[T], right: TreeSet[T]): Set[T] = {
if (left == null || right == null || left.size == 0 || right.size == 0)
return Array[T]().toSet
(left & right).toSet
}
/**
Answer the intersection of the supplied collections.
@param left : a collection
@param right: a collection
@return a Set of elements from the supplied collections or empty set if nothing is common
*/
def Intersect[T: ClassTag](left: Set[T], right: TreeSet[T]): Set[T] = {
if (left == null || right == null || left.size == 0 || right.size == 0)
return Array[T]().toSet
(left & right).toSet
}
/**
Answer the intersection of the supplied collections.
@param left : a collection
@param right: a collection
@return a Set of elements from the supplied collections or empty set if nothing is common
*/
def Intersect[T: ClassTag](left: TreeSet[T], right: Set[T]): Set[T] = {
if (left == null || right == null || left.size == 0 || right.size == 0)
return Array[T]().toSet
(left & right).toSet
}
/**
Answer the union of the supplied collections.
@param left : a collection
@param right: a collection
@return a Set of elements from the supplied collections
*/
def Union[T: ClassTag](left: ArrayBuffer[T], right: ArrayBuffer[T]): Set[T] = {
if (left == null || left.size == 0 && right == null || right.size == 0) {
Array[T]().toSet
} else if (left == null || left.size == 0) {
right.toSet
} else if (right == null || right.size == 0) {
left.toSet
} else {
(left.union(right)).toSet
}
}
/**
Answer the union of the supplied collections.
@param left : a collection
@param right: a collection
@return a Set of elements from the supplied collections
*/
def Union[T: ClassTag](left: Array[T], right: Array[T]): Set[T] = {
if (left == null || left.size == 0 && right == null || right.size == 0) {
Array[T]().toSet
} else if (left == null || left.size == 0) {
right.toSet
} else if (right == null || right.size == 0) {
left.toSet
} else {
(left.union(right)).toSet
}
}
/**
Answer the union of the supplied collections.
@param left : a collection
@param right: a collection
@return a Set of elements from the supplied collections
*/
def Union[T: ClassTag](left: Array[T], right: Set[T]): Set[T] = {
if (left == null || left.size == 0 && right == null || right.size == 0) {
Array[T]().toSet
} else if (left == null || left.size == 0) {
right.toSet
} else if (right == null || right.size == 0) {
left.toSet
} else {
(left.toSet.union(right))
}
}
/**
Answer the union of the supplied collections.
@param left : a collection
@param right: a collection
@return a Set of elements from the supplied collections
*/
def Union[T: ClassTag](left: Set[T], right: Array[T]): Set[T] = {
if (left == null || left.size == 0 && right == null || right.size == 0) {
Array[T]().toSet
} else if (left == null || left.size == 0) {
right.toSet
} else if (right == null || right.size == 0) {
left.toSet
} else {
(left.union(right.toSet))
}
}
/**
Answer the union of the supplied collections.
@param left : a collection
@param right: a collection
@return a Set of elements from the supplied collections
*/
def Union[T: ClassTag](left: Set[T], right: Set[T]): Set[T] = {
if (left == null || left.size == 0 && right == null || right.size == 0) {
Array[T]().toSet
} else if (left == null || left.size == 0) {
right.toSet
} else if (right == null || right.size == 0) {
left.toSet
} else {
left.union(right)
}
}
/**
Answer the last element in the supplied collection.
@param coll : a collection
@return the last element from the collection
*/
def Last(coll: Array[Any]): Any = {
coll.last
}
/**
Answer the last element in the supplied collection.
@param coll : a collection
@return the last element from the collection
*/
def Last(coll: ArrayBuffer[Any]): Any = {
coll.last
}
/**
Answer the last element in the supplied collection.
@param coll : a collection
@return the last element from the collection
*/
def Last[T: ClassTag](coll: ArrayBuffer[T]): T = {
coll.last
}
/**
Answer the last element in the supplied collection.
@param coll : a collection
@return the last element from the collection
*/
def Last(coll: Queue[Any]): Any = {
coll.last
}
/**
Answer the last element in the supplied collection.
@param coll : a collection
@return the last element from the collection
*/
def Last(coll: SortedSet[Any]): Any = {
coll.last
}
/**
Answer the first element in the supplied collection.
@param coll : a collection
@return the first element from the collection
*/
def First(coll: Array[Any]): Any = {
coll.head
}
/**
Answer the first element in the supplied collection.
@param coll : a collection
@return the first element from the collection
*/
def First(coll: ArrayBuffer[Any]): Any = {
coll.head
}
/**
Answer the first element in the supplied collection.
@param coll : a collection
@return the first element from the collection
*/
def First(coll: Queue[Any]): Any = {
coll.head
}
/**
Answer the first element in the supplied collection.
@param coll : a collection
@return the first element from the collection
*/
def First(coll: SortedSet[Any]): Any = {
coll.head
}
/**
Answer the compliment of the supplied boolean expression
@param boolexpr : a boolean
@return if true then false ; if false then true
*/
def Not(boolexpr: Boolean): Boolean = {
!boolexpr
}
/**
Answer whether the supplied value is NOT found in supplied collection.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the values matched the supplied fieldRefExpr
*/
def IsNotIn(fldRefExpr: String, setExprs: ArrayBuffer[String]): Boolean = {
setExprs.filter(_ == fldRefExpr).length == 0
}
/**
Answer whether the supplied value is NOT found in supplied collection.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the values matched the supplied fieldRefExpr
*/
def IsNotIn(fldRefExpr: Int, setExprs: ArrayBuffer[Int]): Boolean = {
setExprs.filter(_ == fldRefExpr).length == 0
}
/**
Answer whether the supplied value is NOT found in supplied collection.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the values matched the supplied fieldRefExpr
*/
def IsNotIn(fldRefExpr: Float, setExprs: ArrayBuffer[Float]): Boolean = {
setExprs.filter(_ == fldRefExpr).length == 0
}
/**
Answer whether the supplied value is NOT found in supplied collection.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the values matched the supplied fieldRefExpr
*/
def IsNotIn(fldRefExpr: Double, setExprs: ArrayBuffer[Double]): Boolean = {
setExprs.filter(_ == fldRefExpr).length == 0
}
/**
Answer whether the supplied value is NOT found in supplied collection.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the values matched the supplied fieldRefExpr
*/
def IsNotIn(fldRefExpr: String, setExprs: List[String]): Boolean = {
setExprs.filter(_ == fldRefExpr).length == 0
}
/**
Answer whether the supplied value is NOT found in supplied collection.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the values matched the supplied fieldRefExpr
*/
def IsNotIn(fldRefExpr: Int, setExprs: List[Int]): Boolean = {
setExprs.filter(_ == fldRefExpr).length == 0
}
/**
Answer whether the supplied value is NOT found in supplied collection.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the values matched the supplied fieldRefExpr
*/
def IsNotIn(fldRefExpr: Float, setExprs: List[Float]): Boolean = {
setExprs.filter(_ == fldRefExpr).length == 0
}
/**
Answer whether the supplied value is NOT found in supplied collection.
@param fldRefExpr : the key sought in the collection
@param setExprs : a collection of values to be searched
@return true if one of the values matched the supplied fieldRefExpr
*/
def IsNotIn(fldRefExpr: Double, setExprs: List[Double]): Boolean = {
setExprs.filter(_ == fldRefExpr).length == 0
}
/** Between */
/**
Answer whether the supplied value is between the left and right margin. When inclusive is true, margin values
are deemed acceptible
@param thisOne : the key sought in the collection
@param leftMargin : lower limit value to be considered
@param rightMargin : upper limit value to be considered
@param inclusive : when true either limit deemed acceptable
@return true if value is in range
*/
def Between(thisOne: String, leftMargin: String, rightMargin: String, inclusive: Boolean): Boolean = {
if (inclusive) (thisOne >= leftMargin && thisOne <= rightMargin) else (thisOne > leftMargin && thisOne < rightMargin)
}
/**
Answer whether the supplied value is between the left and right margin. When inclusive is true, margin values
are deemed acceptible
@param thisOne : the key sought in the collection
@param leftMargin : lower limit value to be considered
@param rightMargin : upper limit value to be considered
@param inclusive : when true either limit deemed acceptable
@return true if value is in range
*/
def Between(thisOne: Int, leftMargin: Int, rightMargin: Int, inclusive: Boolean): Boolean = {
if (inclusive) (thisOne >= leftMargin && thisOne <= rightMargin) else (thisOne > leftMargin && thisOne < rightMargin)
}
/**
Answer whether the supplied value is between the left and right margin. When inclusive is true, margin values
are deemed acceptible
@param thisOne : the key sought in the collection
@param leftMargin : lower limit value to be considered
@param rightMargin : upper limit value to be considered
@param inclusive : when true either limit deemed acceptable
@return true if value is in range
*/
def Between(thisOne: Long, leftMargin: Long, rightMargin: Long, inclusive: Boolean): Boolean = {
if (inclusive) (thisOne >= leftMargin && thisOne <= rightMargin) else (thisOne > leftMargin && thisOne < rightMargin)
}
/**
Answer whether the supplied value is between the left and right margin. When inclusive is true, margin values
are deemed acceptible
@param thisOne : the key sought in the collection
@param leftMargin : lower limit value to be considered
@param rightMargin : upper limit value to be considered
@param inclusive : when true either limit deemed acceptable
@return true if value is in range
*/
def Between(thisOne: Int, leftMargin: Long, rightMargin: Long, inclusive: Boolean): Boolean = {
if (inclusive) (thisOne >= leftMargin && thisOne <= rightMargin) else (thisOne > leftMargin && thisOne < rightMargin)
}
/**
Answer whether the supplied value is between the left and right margin. When inclusive is true, margin values
are deemed acceptible
@param thisOne : the key sought in the collection
@param leftMargin : lower limit value to be considered
@param rightMargin : upper limit value to be considered
@param inclusive : when true either limit deemed acceptable
@return true if value is in range
*/
def Between(thisOne: Double, leftMargin: Double, rightMargin: Int, inclusive: Boolean): Boolean = {
if (inclusive) (thisOne >= leftMargin && thisOne <= rightMargin) else (thisOne > leftMargin && thisOne < rightMargin)
}
/**
Answer whether the supplied value is between the left and right margin. When inclusive is true, margin values
are deemed acceptible
@param thisOne : the key sought in the collection
@param leftMargin : lower limit value to be considered
@param rightMargin : upper limit value to be considered
@param inclusive : when true either limit deemed acceptable
@return true if value is in range
*/
def Between(thisOne: Int, leftMargin: Int, rightMargin: Double, inclusive: Boolean): Boolean = {
if (inclusive) (thisOne >= leftMargin && thisOne <= rightMargin) else (thisOne > leftMargin && thisOne < rightMargin)
}
/**
Answer whether the supplied value is between the left and right margin. When inclusive is true, margin values
are deemed acceptible
@param thisOne : the key sought in the collection
@param leftMargin : lower limit value to be considered
@param rightMargin : upper limit value to be considered
@param inclusive : when true either limit deemed acceptable
@return true if value is in range
*/
def Between(thisOne: Double, leftMargin: Double, rightMargin: Double, inclusive: Boolean): Boolean = {
if (inclusive) (thisOne >= leftMargin && thisOne <= rightMargin) else (thisOne > leftMargin && thisOne < rightMargin)
}
/**
Answer whether the supplied value is between the left and right margin. When inclusive is true, margin values
are deemed acceptible
@param thisOne : the key sought in the collection
@param leftMargin : lower limit value to be considered
@param rightMargin : upper limit value to be considered
@param inclusive : when true either limit deemed acceptable
@return true if value is in range
*/
def Between(thisOne: Double, leftMargin: Double, rightMargin: Float, inclusive: Boolean): Boolean = {
if (inclusive) (thisOne >= leftMargin && thisOne <= rightMargin) else (thisOne > leftMargin && thisOne < rightMargin)
}
/**
Answer whether the supplied value is between the left and right margin. When inclusive is true, margin values
are deemed acceptible
@param thisOne : the key sought in the collection
@param leftMargin : lower limit value to be considered
@param rightMargin : upper limit value to be considered
@param inclusive : when true either limit deemed acceptable
@return true if value is in range
*/
def Between(thisOne: Float, leftMargin: Float, rightMargin: Double, inclusive: Boolean): Boolean = {
if (inclusive) (thisOne >= leftMargin && thisOne <= rightMargin) else (thisOne > leftMargin && thisOne < rightMargin)
}
/**
Answer whether the supplied value is between the left and right margin. When inclusive is true, margin values
are deemed acceptible
@param thisOne : the key sought in the collection
@param leftMargin : lower limit value to be considered
@param rightMargin : upper limit value to be considered
@param inclusive : when true either limit deemed acceptable
@return true if value is in range
*/
def Between(thisOne: Float, leftMargin: Float, rightMargin: Int, inclusive: Boolean): Boolean = {
if (inclusive) (thisOne >= leftMargin && thisOne <= rightMargin) else (thisOne > leftMargin && thisOne < rightMargin)
}
/**
Answer whether the supplied value is between the left and right margin. When inclusive is true, margin values
are deemed acceptible
@param thisOne : the key sought in the collection
@param leftMargin : lower limit value to be considered
@param rightMargin : upper limit value to be considered
@param inclusive : when true either limit deemed acceptable
@return true if value is in range
*/
def Between(thisOne: Int, leftMargin: Int, rightMargin: Float, inclusive: Boolean): Boolean = {
if (inclusive) (thisOne >= leftMargin && thisOne <= rightMargin) else (thisOne > leftMargin && thisOne < rightMargin)
}
/**
Answer whether the supplied value is between the left and right margin. When inclusive is true, margin values
are deemed acceptible
@param thisOne : the key sought in the collection
@param leftMargin : lower limit value to be considered
@param rightMargin : upper limit value to be considered
@param inclusive : when true either limit deemed acceptable
@return true if value is in range
*/
def Between(thisOne: Float, leftMargin: Float, rightMargin: Float, inclusive: Boolean): Boolean = {
if (inclusive) (thisOne >= leftMargin && thisOne <= rightMargin) else (thisOne > leftMargin && thisOne < rightMargin)
}
/**
Answer true if expr1 is greather than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterThan(expr1: String, expr2: String): Boolean = {
(expr1 > expr2)
}
/**
Answer true if expr1 is greather than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterThan(expr1: Int, expr2: Int): Boolean = {
(expr1 > expr2)
}
/**
Answer true if expr1 is greather than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterThan(expr1: Double, expr2: Int): Boolean = {
(expr1 > expr2)
}
/**
Answer true if expr1 is greather than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterThan(expr1: Int, expr2: Double): Boolean = {
(expr1 > expr2)
}
/**
Answer true if expr1 is greather than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterThan(expr1: Double, expr2: Double): Boolean = {
(expr1 > expr2)
}
/**
Answer true if expr1 is greather than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterThan(expr1: Double, expr2: Float): Boolean = {
(expr1 > expr2)
}
/**
Answer true if expr1 is greather than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterThan(expr1: Float, expr2: Double): Boolean = {
(expr1 > expr2)
}
/**
Answer true if expr1 is greather than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterThan(expr1: Float, expr2: Int): Boolean = {
(expr1 > expr2)
}
/**
Answer true if expr1 is greather than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterThan(expr1: Int, expr2: Float): Boolean = {
(expr1 > expr2)
}
/**
Answer true if expr1 is greather than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterThan(expr1: Float, expr2: Float): Boolean = {
(expr1 > expr2)
}
/**
Answer true if expr1 is greather than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterThan(expr1: Int, expr2: Long): Boolean = {
(expr1 > expr2)
}
/**
Answer true if expr1 is greather than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterThan(expr1: Long, expr2: Long): Boolean = {
(expr1 > expr2)
}
/**
Answer true if expr1 is greather than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterThan(expr1: Long, expr2: Int): Boolean = {
(expr1 > expr2)
}
/**
Answer true if expr1 is greather than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterOrEqual(expr1: String, expr2: String): Boolean = {
(expr1 >= expr2)
}
/**
Answer true if expr1 is greather than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterOrEqual(expr1: Int, expr2: Int): Boolean = {
(expr1 >= expr2)
}
/**
Answer true if expr1 is greather than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterOrEqual(expr1: Double, expr2: Int): Boolean = {
(expr1 >= expr2)
}
/**
Answer true if expr1 is greather than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterOrEqual(expr1: Int, expr2: Double): Boolean = {
(expr1 >= expr2)
}
/**
Answer true if expr1 is greather than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterOrEqual(expr1: Double, expr2: Double): Boolean = {
(expr1 >= expr2)
}
/**
Answer true if expr1 is greather than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterOrEqual(expr1: Double, expr2: Float): Boolean = {
(expr1 >= expr2)
}
/**
Answer true if expr1 is greather than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterOrEqual(expr1: Float, expr2: Double): Boolean = {
(expr1 >= expr2)
}
/**
Answer true if expr1 is greather than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterOrEqual(expr1: Float, expr2: Int): Boolean = {
(expr1 >= expr2)
}
/**
Answer true if expr1 is greather than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterOrEqual(expr1: Int, expr2: Float): Boolean = {
(expr1 >= expr2)
}
/**
Answer true if expr1 is greather than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterOrEqual(expr1: Float, expr2: Float): Boolean = {
(expr1 >= expr2)
}
/**
Answer true if expr1 is greather than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterOrEqual(expr1: Int, expr2: Long): Boolean = {
(expr1 >= expr2)
}
/**
Answer true if expr1 is greather than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterOrEqual(expr1: Long, expr2: Long): Boolean = {
(expr1 >= expr2)
}
/**
Answer true if expr1 is greather than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def GreaterOrEqual(expr1: Long, expr2: Int): Boolean = {
(expr1 >= expr2)
}
/**
Answer true if expr1 is less than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessOrEqual(expr1: String, expr2: String): Boolean = {
(expr1 <= expr2)
}
/**
Answer true if expr1 is less than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessOrEqual(expr1: Int, expr2: Int): Boolean = {
(expr1 <= expr2)
}
/**
Answer true if expr1 is less than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessOrEqual(expr1: Double, expr2: Int): Boolean = {
(expr1 <= expr2)
}
/**
Answer true if expr1 is less than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessOrEqual(expr1: Int, expr2: Double): Boolean = {
(expr1 <= expr2)
}
/**
Answer true if expr1 is less than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessOrEqual(expr1: Double, expr2: Double): Boolean = {
(expr1 <= expr2)
}
/**
Answer true if expr1 is less than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessOrEqual(expr1: Double, expr2: Float): Boolean = {
(expr1 <= expr2)
}
/**
Answer true if expr1 is less than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessOrEqual(expr1: Float, expr2: Double): Boolean = {
(expr1 <= expr2)
}
/**
Answer true if expr1 is less than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessOrEqual(expr1: Float, expr2: Int): Boolean = {
(expr1 <= expr2)
}
/**
Answer true if expr1 is less than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessOrEqual(expr1: Int, expr2: Float): Boolean = {
(expr1 <= expr2)
}
/**
Answer true if expr1 is less than or equal expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessOrEqual(expr1: Float, expr2: Float): Boolean = {
(expr1 <= expr2)
}
/**
Answer true if expr1 is less than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessOrEqual(expr1: Int, expr2: Long): Boolean = {
(expr1 <= expr2)
}
/**
Answer true if expr1 is less than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessOrEqual(expr1: Long, expr2: Long): Boolean = {
(expr1 <= expr2)
}
/**
Answer true if expr1 is less than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessOrEqual(expr1: Long, expr2: Int): Boolean = {
(expr1 <= expr2)
}
/**
Answer true if expr1 is less than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessThan(expr1: String, expr2: String): Boolean = {
(expr1 < expr2)
}
/**
Answer true if expr1 is less than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessThan(expr1: Int, expr2: Int): Boolean = {
(expr1 < expr2)
}
/**
Answer true if expr1 is less than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessThan(expr1: Double, expr2: Int): Boolean = {
(expr1 < expr2)
}
/**
Answer true if expr1 is less than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessThan(expr1: Int, expr2: Double): Boolean = {
(expr1 < expr2)
}
/**
Answer true if expr1 is less than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessThan(expr1: Double, expr2: Double): Boolean = {
(expr1 < expr2)
}
/**
Answer true if expr1 is less than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessThan(expr1: Double, expr2: Float): Boolean = {
(expr1 < expr2)
}
/**
Answer true if expr1 is less than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessThan(expr1: Float, expr2: Double): Boolean = {
(expr1 < expr2)
}
/**
Answer true if expr1 is less than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessThan(expr1: Float, expr2: Int): Boolean = {
(expr1 < expr2)
}
/**
Answer true if expr1 is less than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessThan(expr1: Int, expr2: Float): Boolean = {
(expr1 < expr2)
}
/**
Answer true if expr1 is less than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessThan(expr1: Float, expr2: Float): Boolean = {
(expr1 < expr2)
}
/**
Answer true if expr1 is less than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessThan(expr1: Int, expr2: Long): Boolean = {
(expr1 < expr2)
}
/**
Answer true if expr1 is less than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessThan(expr1: Long, expr2: Long): Boolean = {
(expr1 < expr2)
}
/**
Answer true if expr1 is less than expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def LessThan(expr1: Long, expr2: Int): Boolean = {
(expr1 < expr2)
}
/**
Answer true if expr1 is equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def Equal(expr1: String, expr2: String): Boolean = {
(expr1 == expr2)
}
/**
Answer true if expr1 is equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def Equal(expr1: Int, expr2: Int): Boolean = {
(expr1 == expr2)
}
/**
Answer true if expr1 is equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def Equal(expr1: Double, expr2: Int): Boolean = {
(expr1 == expr2)
}
/**
Answer true if expr1 is equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def Equal(expr1: Int, expr2: Double): Boolean = {
(expr1 == expr2)
}
/**
Answer true if expr1 is equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def Equal(expr1: Double, expr2: Double): Boolean = {
(expr1 == expr2)
}
/**
Answer true if expr1 is equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def Equal(expr1: Double, expr2: Float): Boolean = {
(expr1 == expr2)
}
/**
Answer true if expr1 is equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def Equal(expr1: Float, expr2: Double): Boolean = {
(expr1 == expr2)
}
/**
Answer true if expr1 is equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def Equal(expr1: Float, expr2: Int): Boolean = {
(expr1 == expr2)
}
/**
Answer true if expr1 is equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def Equal(expr1: Int, expr2: Float): Boolean = {
(expr1 == expr2)
}
/**
Answer true if expr1 is equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def Equal(expr1: Float, expr2: Float): Boolean = {
(expr1 == expr2)
}
/**
Answer true if expr1 is equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def Equal(expr1: Boolean, expr2: Boolean): Boolean = {
(expr1 == expr2)
}
/**
Answer true if expr1 is equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def Equal(expr1: Int, expr2: Long): Boolean = {
(expr1 == expr2)
}
/**
Answer true if expr1 is equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def Equal(expr1: Long, expr2: Long): Boolean = {
(expr1 == expr2)
}
/**
Answer true if expr1 is equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def Equal(expr1: Long, expr2: Int): Boolean = {
(expr1 == expr2)
}
/**
Answer true if expr1 is not equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def NotEqual(expr1: String, expr2: String): Boolean = {
!(expr1 == expr2)
}
/**
Answer true if expr1 is not equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def NotEqual(expr1: Int, expr2: Int): Boolean = {
!(expr1 == expr2)
}
/**
Answer true if expr1 is not equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def NotEqual(expr1: Double, expr2: Int): Boolean = {
!(expr1 == expr2)
}
/**
Answer true if expr1 is not equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def NotEqual(expr1: Int, expr2: Double): Boolean = {
!(expr1 == expr2)
}
/**
Answer true if expr1 is not equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def NotEqual(expr1: Double, expr2: Double): Boolean = {
!(expr1 == expr2)
}
/**
Answer true if expr1 is not equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def NotEqual(expr1: Double, expr2: Float): Boolean = {
!(expr1 == expr2)
}
/**
Answer true if expr1 is not equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def NotEqual(expr1: Float, expr2: Double): Boolean = {
!(expr1 == expr2)
}
/**
Answer true if expr1 is not equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def NotEqual(expr1: Float, expr2: Int): Boolean = {
!(expr1 == expr2)
}
/**
Answer true if expr1 is not equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def NotEqual(expr1: Int, expr2: Float): Boolean = {
!(expr1 == expr2)
}
/**
Answer true if expr1 is not equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def NotEqual(expr1: Float, expr2: Float): Boolean = {
!(expr1 == expr2)
}
/**
Answer true if expr1 is not equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def NotEqual(expr1: Boolean, expr2: Boolean): Boolean = {
!(expr1 == expr2)
}
/**
Answer true if expr1 is not equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def NotEqual(expr1: Int, expr2: Long): Boolean = {
!(expr1 == expr2)
}
/**
Answer true if expr1 is not equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def NotEqual(expr1: Long, expr2: Long): Boolean = {
!(expr1 == expr2)
}
/**
Answer true if expr1 is not equivalent to expr2
@param expr1 : a value
@param expr2 : a value
@return true if this is so, else false
*/
def NotEqual(expr1: Long, expr2: Int): Boolean = {
!(expr1 == expr2)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: String, expr2: String): String = {
(expr1 + expr2)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Int, expr2: Int): Int = {
(expr1 + expr2)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Int, expr2: Int, expr3: Int): Int = {
(expr1 + expr2 + expr3)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Long, expr2: Long, expr3: Long): Long = {
(expr1 + expr2 + expr3)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Int, expr2: Int, expr3: Int, expr4: Int): Int = {
(expr1 + expr2 + expr3 + expr4)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Long, expr2: Long, expr3: Long, expr4: Long): Long = {
(expr1 + expr2 + expr3 + expr4)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Int, expr2: Int, expr3: Int, expr4: Int, expr5: Int): Int = {
(expr1 + expr2 + expr3 + expr4 + expr5)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Long, expr2: Long, expr3: Long, expr4: Long, expr5: Long): Long = {
(expr1 + expr2 + expr3 + expr4 + expr5)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Int, expr2: Int, expr3: Int, expr4: Int, expr5: Int, expr6: Int): Int = {
(expr1 + expr2 + expr3 + expr4 + expr5 + expr6)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Long, expr2: Long, expr3: Long, expr4: Long, expr5: Long, expr6: Long): Long = {
(expr1 + expr2 + expr3 + expr4 + expr5 + expr6)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Int, expr2: Int, expr3: Int, expr4: Int, expr5: Int, expr6: Int, expr7: Int): Int = {
(expr1 + expr2 + expr3 + expr4 + expr5 + expr6 + expr7)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Long, expr2: Long, expr3: Long, expr4: Long, expr5: Long, expr6: Long, expr7: Long): Long = {
(expr1 + expr2 + expr3 + expr4 + expr5 + expr6 + expr7)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Int, expr2: Int, expr3: Int, expr4: Int, expr5: Int, expr6: Int, expr7: Int, expr8: Int): Int = {
(expr1 + expr2 + expr3 + expr4 + expr5 + expr6 + expr7 + expr8)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Int, expr2: Int, expr3: Int, expr4: Int, expr5: Int, expr6: Int, expr7: Int, expr8: Long): Long = {
(expr1 + expr2 + expr3 + expr4 + expr5 + expr6 + expr7 + expr8)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Double, expr2: Double): Double = {
(expr1 + expr2)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Double, expr2: Double, expr3: Double): Double = {
(expr1 + expr2 + expr3)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Double, expr2: Double, expr3: Double, expr4: Double): Double = {
(expr1 + expr2 + expr3 + expr4)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Double, expr2: Double, expr3: Double, expr4: Double, expr5: Double): Double = {
(expr1 + expr2 + expr3 + expr4 + expr5)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Double, expr2: Double, expr3: Double, expr4: Double, expr5: Double, expr6: Double): Double = {
(expr1 + expr2 + expr3 + expr4 + expr5 + expr6)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Double, expr2: Int): Double = {
(expr1 + expr2)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Int, expr2: Double): Double = {
(expr1 + expr2)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Int, expr2: Long): Double = {
(expr1 + expr2)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Long, expr2: Int): Long = {
(expr1 + expr2)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Int, expr2: Float): Float = {
(expr1 + expr2)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Float, expr2: Int): Float = {
(expr1 + expr2)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Long, expr2: Long): Long = {
(expr1 + expr2)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Double, expr2: Long): Double = {
(expr1 + expr2)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Long, expr2: Double): Double = {
(expr1 + expr2)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Long, expr2: Float): Float = {
(expr1 + expr2)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Float, expr2: Long): Float = {
(expr1 + expr2)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Double, expr2: Float): Double = {
(expr1 + expr2)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Float, expr2: Double): Double = {
(expr1 + expr2)
}
/**
Answer the sum of expr1 and expr2
@param expr1 : a value
@param expr2 : a value
@return the sum of the two values
*/
def Plus(expr1: Float, expr2: Float): Float = {
(expr1 + expr2)
}
/**
Answer the sum of exprs
@param expr1 : a value
@param expr2 : a value
@return the sum of the exprs
*/
def Plus(exprs: ArrayBuffer[String]): String = {
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of exprs
@param expr1 : a value
@param expr2 : a value
@return the sum of the exprs
*/
def Plus(exprs: ArrayBuffer[Int]): Int = {
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of exprs
@param expr1 : a value
@param expr2 : a value
@return the sum of the exprs
*/
def Plus(exprs: ArrayBuffer[Long]): Long = {
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of exprs
@param expr1 : a value
@param expr2 : a value
@return the sum of the exprs
*/
def Plus(exprs: ArrayBuffer[Double]): Double = {
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of exprs
@param expr1 : a value
@param expr2 : a value
@return the sum of the exprs
*/
def Plus(exprs: ArrayBuffer[Float]): Float = {
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of exprs
@param expr1 : a value
@param expr2 : a value
@return the sum of the exprs
*/
def Plus(exprs: Array[String]): String = {
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of exprs
@param expr1 : a value
@param expr2 : a value
@return the sum of the exprs
*/
def Plus(exprs: Array[Int]): Int = {
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of exprs
@param expr1 : a value
@param expr2 : a value
@return the sum of the exprs
*/
def Plus(exprs: Array[Long]): Long = {
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of exprs
@param expr1 : a value
@param expr2 : a value
@return the sum of the exprs
*/
def Plus(exprs: Array[Double]): Double = {
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of exprs
@param expr1 : a value
@param expr2 : a value
@return the sum of the exprs
*/
def Plus(exprs: Array[Float]): Float = {
exprs.reduceLeft(_ + _)
}
/**
Answer the difference of two exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(expr1: Int, expr2: Int): Int = {
(expr1 - expr2)
}
/**
Answer the difference of two exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(expr1: Double, expr2: Int): Double = {
(expr1 - expr2)
}
/**
Answer the difference of two exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(expr1: Int, expr2: Double): Double = {
(expr1 - expr2)
}
/**
Answer the difference of two exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(expr1: Int, expr2: Long): Double = {
(expr1 - expr2)
}
/**
Answer the difference of two exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(expr1: Long, expr2: Int): Long = {
(expr1 - expr2)
}
/**
Answer the difference of two exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(expr1: Int, expr2: Float): Float = {
(expr1 - expr2)
}
/**
Answer the difference of two exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(expr1: Float, expr2: Int): Float = {
(expr1 - expr2)
}
/**
Answer the difference of two exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(expr1: Long, expr2: Long): Long = {
(expr1 - expr2)
}
/**
Answer the difference of two exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(expr1: Double, expr2: Long): Double = {
(expr1 - expr2)
}
/**
Answer the difference of two exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(expr1: Long, expr2: Double): Double = {
(expr1 - expr2)
}
/**
Answer the difference of two exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(expr1: Long, expr2: Float): Float = {
(expr1 - expr2)
}
/**
Answer the difference of two exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(expr1: Float, expr2: Long): Float = {
(expr1 - expr2)
}
/**
Answer the difference of two exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(expr1: Double, expr2: Double): Double = {
(expr1 - expr2)
}
/**
Answer the difference of two exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(expr1: Double, expr2: Float): Double = {
(expr1 - expr2)
}
/**
Answer the difference of two exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(expr1: Float, expr2: Double): Double = {
(expr1 - expr2)
}
/**
Answer the difference of two exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(expr1: Float, expr2: Float): Float = {
(expr1 - expr2)
}
/**
Answer the difference of the exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(exprs: ArrayBuffer[Int]): Int = {
exprs.reduceLeft(_ - _)
}
/**
Answer the difference of the exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(exprs: ArrayBuffer[Long]): Long = {
exprs.reduceLeft(_ - _)
}
/**
Answer the difference of the exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(exprs: ArrayBuffer[Double]): Double = {
exprs.reduceLeft(_ - _)
}
/**
Answer the difference of the exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(exprs: ArrayBuffer[Float]): Float = {
exprs.reduceLeft(_ - _)
}
/**
Answer the difference of the exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(exprs: Array[Int]): Int = {
exprs.reduceLeft(_ - _)
}
/**
Answer the difference of the exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(exprs: Array[Long]): Long = {
exprs.reduceLeft(_ - _)
}
/**
Answer the difference of the exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(exprs: Array[Double]): Double = {
exprs.reduceLeft(_ - _)
}
/**
Answer the difference of the exprs
@param expr1 : a value
@param expr2 : a value
@return the difference of the exprs
*/
def Minus(exprs: Array[Float]): Float = {
exprs.reduceLeft(_ - _)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(expr1: Int, expr2: Int): Int = {
(expr1 * expr2)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(expr1: Double, expr2: Int): Double = {
(expr1 * expr2)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(expr1: Int, expr2: Double): Double = {
(expr1 * expr2)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(expr1: Int, expr2: Long): Double = {
(expr1 * expr2)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(expr1: Long, expr2: Int): Long = {
(expr1 * expr2)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(expr1: Int, expr2: Float): Float = {
(expr1 * expr2)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(expr1: Float, expr2: Int): Float = {
(expr1 * expr2)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(expr1: Long, expr2: Long): Long = {
(expr1 * expr2)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(expr1: Double, expr2: Long): Double = {
(expr1 * expr2)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(expr1: Long, expr2: Double): Double = {
(expr1 * expr2)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(expr1: Long, expr2: Float): Float = {
(expr1 * expr2)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(expr1: Float, expr2: Long): Float = {
(expr1 * expr2)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(expr1: Double, expr2: Double): Double = {
(expr1 * expr2)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(expr1: Double, expr2: Float): Double = {
(expr1 * expr2)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(expr1: Float, expr2: Double): Double = {
(expr1 * expr2)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(expr1: Float, expr2: Float): Float = {
(expr1 * expr2)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(exprs: ArrayBuffer[Int]): Int = {
exprs.reduceLeft(_ * _)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(exprs: ArrayBuffer[Long]): Long = {
exprs.reduceLeft(_ * _)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(exprs: ArrayBuffer[Double]): Double = {
exprs.reduceLeft(_ * _)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(exprs: ArrayBuffer[Float]): Float = {
exprs.reduceLeft(_ * _)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(exprs: Array[Int]): Int = {
exprs.reduceLeft(_ * _)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(exprs: Array[Long]): Long = {
exprs.reduceLeft(_ * _)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(exprs: Array[Double]): Double = {
exprs.reduceLeft(_ * _)
}
/**
Answer the product of the exprs
@param expr1 : a value
@param expr2 : a value
@return the product of the exprs
*/
def Multiply(exprs: Array[Float]): Float = {
exprs.reduceLeft(_ * _)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(expr1: Int, expr2: Int): Int = {
(expr1 / expr2)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(expr1: Double, expr2: Int): Double = {
(expr1 / expr2)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(expr1: Int, expr2: Double): Double = {
(expr1 / expr2)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(expr1: Int, expr2: Long): Double = {
(expr1 / expr2)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(expr1: Long, expr2: Int): Long = {
(expr1 / expr2)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(expr1: Int, expr2: Float): Float = {
(expr1 / expr2)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(expr1: Float, expr2: Int): Float = {
(expr1 / expr2)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(expr1: Long, expr2: Long): Long = {
(expr1 / expr2)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(expr1: Double, expr2: Long): Double = {
(expr1 / expr2)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(expr1: Long, expr2: Double): Double = {
(expr1 / expr2)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(expr1: Long, expr2: Float): Float = {
(expr1 / expr2)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(expr1: Float, expr2: Long): Float = {
(expr1 / expr2)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(expr1: Double, expr2: Double): Double = {
(expr1 / expr2)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(expr1: Double, expr2: Float): Double = {
(expr1 / expr2)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(expr1: Float, expr2: Double): Double = {
(expr1 / expr2)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(expr1: Float, expr2: Float): Float = {
(expr1 / expr2)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(exprs: ArrayBuffer[Int]): Int = {
exprs.reduceLeft(_ / _)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(exprs: ArrayBuffer[Long]): Long = {
exprs.reduceLeft(_ / _)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(exprs: ArrayBuffer[Double]): Double = {
exprs.reduceLeft(_ / _)
}
/**
Answer the quotient of the exprs
@param expr1 : a value
@param expr2 : a value
@return the quotient of the exprs
*/
def Divide(exprs: ArrayBuffer[Float]): Float = {
exprs.reduceLeft(_ / _)
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(expr1: Int, expr2: Int): Int = {
(min(expr1, expr2))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(expr1: Double, expr2: Int): Double = {
(min(expr1, expr2))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(expr1: Int, expr2: Double): Double = {
(min(expr1, expr2))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(expr1: Int, expr2: Long): Double = {
(min(expr1, expr2))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(expr1: Long, expr2: Int): Long = {
(min(expr1, expr2))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(expr1: Int, expr2: Float): Float = {
(min(expr1, expr2))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(expr1: Float, expr2: Int): Float = {
(min(expr1, expr2))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(expr1: Long, expr2: Long): Long = {
(min(expr1, expr2))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(expr1: Double, expr2: Long): Double = {
(min(expr1, expr2))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(expr1: Long, expr2: Double): Double = {
(min(expr1, expr2))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(expr1: Long, expr2: Float): Float = {
(min(expr1, expr2))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(expr1: Float, expr2: Long): Float = {
(min(expr1, expr2))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(expr1: Double, expr2: Double): Double = {
(min(expr1, expr2))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(expr1: Double, expr2: Float): Double = {
(min(expr1, expr2))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(expr1: Float, expr2: Double): Double = {
(min(expr1, expr2))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(expr1: Float, expr2: Float): Float = {
(min(expr1, expr2))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(exprs: ArrayBuffer[Int]): Int = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(exprs: ArrayBuffer[Long]): Long = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(exprs: ArrayBuffer[Double]): Double = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(exprs: ArrayBuffer[Float]): Float = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(exprs: Array[Int]): Int = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(exprs: Array[Long]): Long = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(exprs: Array[Double]): Double = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(exprs: Array[Float]): Float = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(exprs: List[Int]): Int = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(exprs: List[Long]): Long = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(exprs: List[Double]): Double = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the minimum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the minimum of the exprs
*/
def Min(exprs: List[Float]): Float = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(expr1: Int, expr2: Int): Int = {
(min(expr1, expr2))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(expr1: Double, expr2: Int): Double = {
(min(expr1, expr2))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(expr1: Int, expr2: Double): Double = {
(min(expr1, expr2))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(expr1: Int, expr2: Long): Double = {
(min(expr1, expr2))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(expr1: Long, expr2: Int): Long = {
(min(expr1, expr2))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(expr1: Int, expr2: Float): Float = {
(min(expr1, expr2))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(expr1: Float, expr2: Int): Float = {
(min(expr1, expr2))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(expr1: Long, expr2: Long): Long = {
(min(expr1, expr2))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(expr1: Double, expr2: Long): Double = {
(min(expr1, expr2))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(expr1: Long, expr2: Double): Double = {
(min(expr1, expr2))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(expr1: Long, expr2: Float): Float = {
(min(expr1, expr2))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(expr1: Float, expr2: Long): Float = {
(min(expr1, expr2))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(expr1: Double, expr2: Double): Double = {
(min(expr1, expr2))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(expr1: Double, expr2: Float): Double = {
(min(expr1, expr2))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(expr1: Float, expr2: Double): Double = {
(min(expr1, expr2))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(expr1: Float, expr2: Float): Float = {
(min(expr1, expr2))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(exprs: ArrayBuffer[Int]): Int = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(exprs: ArrayBuffer[Long]): Long = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(exprs: ArrayBuffer[Double]): Double = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(exprs: ArrayBuffer[Float]): Float = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the maximum of the exprs
@param expr1 : a value
@param expr2 : a value
@return the maximum of the exprs
*/
def Max(exprs: Array[Int]): Int = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the maximum of the exprs
@param exprs : a collection of values
@return the maximum of the exprs
*/
def Max(exprs: Array[Long]): Long = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the maximum of the exprs
@param exprs : a collection of values
@return the maximum of the exprs
*/
def Max(exprs: Array[Double]): Double = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the maximum of the exprs
@param exprs : a collection of values
@return the maximum of the exprs
*/
def Max(exprs: Array[Float]): Float = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the maximum of the exprs
@param exprs : a collection of values
@return the maximum of the exprs
*/
def Max(exprs: List[Int]): Int = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the maximum of the exprs
@param exprs : a collection of values
@return the maximum of the exprs
*/
def Max(exprs: List[Long]): Long = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the maximum of the exprs
@param exprs : a collection of values
@return the maximum of the exprs
*/
def Max(exprs: List[Double]): Double = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the maximum of the exprs
@param exprs : a collection of values
@return the maximum of the exprs
*/
def Max(exprs: List[Float]): Float = {
exprs.reduceLeft(min(_, _))
}
/**
Answer the sum of the exprs
@param exprs : a collection of values
@return the sum of the exprs
*/
def Sum(exprs: ArrayBuffer[Int]): Int = {
if (exprs != null && exprs.size > 0)
exprs.reduceLeft(_ + _)
else
0
}
/**
Answer the sum of the exprs
@param exprs : a collection of values
@return the sum of the exprs
*/
def Sum(exprs: ArrayBuffer[Long]): Long = {
if (exprs != null && exprs.size > 0)
exprs.reduceLeft(_ + _)
else
0
}
/**
Answer the sum of the exprs
@param exprs : a collection of values
@return the sum of the exprs
*/
def Sum(exprs: ArrayBuffer[Double]): Double = {
if (exprs != null && exprs.size > 0)
exprs.reduceLeft(_ + _)
else
0
}
/**
Answer the sum of the exprs
@param exprs : a collection of values
@return the sum of the exprs
*/
def Sum(exprs: ArrayBuffer[Float]): Float = {
if (exprs != null && exprs.size > 0)
exprs.reduceLeft(_ + _)
else
0
}
/**
Answer the sum of the exprs
@param exprs : a collection of values
@return the sum of the exprs
*/
def Sum(exprs: Array[Int]): Int = {
if (exprs != null && exprs.size > 0)
exprs.reduceLeft(_ + _)
else
0
}
/**
Answer the sum of the exprs
@param exprs : a collection of values
@return the sum of the exprs
*/
def Sum(exprs: Array[Long]): Long = {
if (exprs != null && exprs.size > 0)
exprs.reduceLeft(_ + _)
else
0
}
/**
Answer the sum of the exprs
@param tuple : tuple of values
@return the sum of the exprs
*/
def Sum(exprs: Array[Double]): Double = {
if (exprs != null && exprs.size > 0)
exprs.reduceLeft(_ + _)
else
0
}
/**
Answer the sum of the exprs
@param tuple : tuple of values
@return the sum of the exprs
*/
def Sum(exprs: Array[Float]): Float = {
if (exprs != null && exprs.size > 0)
exprs.reduceLeft(_ + _)
else
0
}
/**
Answer the sum of the tuple values
@param tuple : tuple of values
@return the sum of the tuples
*/
def Sum(tuples: Tuple2[Int, Int]): Int = {
val exprs: Array[Int] = ToArray(tuples)
if (exprs != null && exprs.size > 0)
exprs.reduceLeft(_ + _)
else
0
}
/**
Answer the sum of the tuple values
@param tuple : tuple of values
@return the sum of the tuples
*/
def Sum(tuples: Tuple3[Int, Int, Int]): Int = {
val exprs: Array[Int] = ToArray(tuples)
if (exprs != null && exprs.size > 0)
exprs.reduceLeft(_ + _)
else
0
}
/**
Answer the sum of the tuple values
@param tuple : tuple of values
@return the sum of the tuples
*/
def Sum(tuples: Tuple4[Int, Int, Int, Int]): Int = {
val exprs: Array[Int] = ToArray(tuples)
if (exprs != null && exprs.size > 0)
exprs.reduceLeft(_ + _)
else
0
}
/**
Answer the sum of the tuple values
@param tuple : tuple of values
@return the sum of the tuples
*/
def Sum(tuples: Tuple5[Int, Int, Int, Int, Int]): Int = {
val exprs: Array[Int] = ToArray(tuples)
if (exprs != null && exprs.size > 0)
exprs.reduceLeft(_ + _)
else
0
}
/**
Answer the sum of the tuple values
@param tuple : tuple of values
@return the sum of the tuples
*/
def Sum(tuples: Tuple6[Int, Int, Int, Int, Int, Int]): Int = {
val exprs: Array[Int] = ToArray(tuples)
if (exprs != null && exprs.size > 0)
exprs.reduceLeft(_ + _)
else
0
}
/**
Answer the sum of the tuple values
@param tuple : tuple of values
@return the sum of the tuples
*/
def Sum(tuples: Tuple7[Int, Int, Int, Int, Int, Int, Int]): Int = {
val exprs: Array[Int] = ToArray(tuples)
if (exprs != null && exprs.size > 0)
exprs.reduceLeft(_ + _)
else
0
}
/**
Answer the sum of the tuple values
@param tuple : tuple of values
@return the sum of the tuples
*/
def Sum(tuples: Tuple8[Int, Int, Int, Int, Int, Int, Int, Int]): Int = {
val exprs: Array[Int] = ToArray(tuples)
if (exprs != null && exprs.size > 0)
exprs.reduceLeft(_ + _)
else
0
}
/**
Answer the sum of the tuple values
@param tuple : tuple of values
@return the sum of the tuples
*/
def Sum(tuples: Tuple9[Int, Int, Int, Int, Int, Int, Int, Int, Int]): Int = {
val exprs: Array[Int] = ToArray(tuples)
if (exprs != null && exprs.size > 0)
exprs.reduceLeft(_ + _)
else
0
}
/**
Answer the sum of the tuple values
@param tuple : tuple of values
@return the sum of the tuples
*/
def Sum(tuples: Tuple10[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]): Int = {
val exprs: Array[Int] = ToArray(tuples)
if (exprs != null && exprs.size > 0)
exprs.reduceLeft(_ + _)
else
0
}
/**
Answer the sum of the tuple values coerced to Float
@param tuples : an array of tuples
@return the sum of the tuples
*/
def Sum(tuples: Tuple2[Float, Float]): Float = {
val exprs: Array[Float] = ToArray(tuples)
if (exprs != null && exprs.size > 0)
exprs.reduceLeft(_ + _)
else
0
}
/**
Answer the sum of the tuple values coerced to Float
@param tuples : a tuple
@return the sum of the tuple elements
*/
def SumToFloat(tuples: Tuple2[Any, Any]): Float = {
val exprs: Array[Float] = ToArrayOfFloat(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the array of tuples coerced to an array of Float
@param tuples : an array of tuples
@return the sum of the tuple elements
*/
def SumToArrayOfFloat(tuples: Array[Tuple2[Any, Any]]): Array[Float] = {
val exprs: Array[Float] = tuples.map(tuple => SumToFloat(tuple))
//exprs.reduceLeft(_ + _)
exprs
}
/**
Answer the sum of the tuple values coerced to Double
@param tuples : a tuple
@return the sum of the tuple elements
*/
def SumToDouble(tuples: Tuple2[Any, Any]): Double = {
val exprs: Array[Double] = ToArrayOfDouble(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the array of tuples coerced to an array of Double
@param tuples : an array of tuples
@return the sum of the tuple elements
*/
def SumToArrayOfDouble(tuples: Array[Tuple2[Any, Any]]): Array[Double] = {
val exprs: Array[Double] = tuples.map(tuple => SumToDouble(tuple))
exprs
}
/**
Answer the sum of the tuple values coerced to Int
@param tuples : a tuple
@return the sum of the tuple elements
*/
def SumToInt(tuples: Tuple2[Any, Any]): Int = {
val exprs: Array[Int] = ToArrayOfInt(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the array of tuples coerced to an array of Int
@param tuples : an array of tuples
@return the sum of the tuple elements
*/
def SumToArrayOfInt(tuples: Array[Tuple2[Any, Any]]): Array[Int] = {
val exprs: Array[Int] = tuples.map(tuple => SumToInt(tuple))
exprs
}
/**
Answer the sum of the tuple values
@param tuples : an array of tuples
@return the sum of the tuple's values
*/
def Sum(tuples: Tuple3[Float, Float, Float]): Float = {
val exprs: Array[Float] = ToArray(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the tuple values coerced to Float
@param tuples : a tuple
@return the sum of the tuple's values
*/
def SumToFloat(tuples: Tuple3[Any, Any, Any]): Float = {
val exprs: Array[Float] = ToArrayOfFloat(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the array of tuples coerced to an array of Float
@param tuples : an array of tuple
@return the sum of the tuple's values as an array
*/
def SumToArrayOfFloat(tuples: Array[Tuple3[Any, Any, Any]]): Array[Float] = {
val exprs: Array[Float] = tuples.map(tuple => SumToFloat(tuple))
//exprs.reduceLeft(_ + _)
exprs
}
/**
Answer the sum of the tuple values coerced to Double
@param tuples : a tuple
@return the sum of the tuple's values
*/
def SumToDouble(tuples: Tuple3[Any, Any, Any]): Double = {
val exprs: Array[Double] = ToArrayOfDouble(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the array of tuples coerced to an array of Double
@param tuples : an array of tuple
@return the sum of the tuple's values as an array
*/
def SumToArrayOfDouble(tuples: Array[Tuple3[Any, Any, Any]]): Array[Double] = {
val exprs: Array[Double] = tuples.map(tuple => SumToDouble(tuple))
exprs
}
/**
Answer the sum of the tuple values coerced to Int
@param tuples : a tuple
@return the sum of the tuple's values
*/
def SumToInt(tuples: Tuple3[Any, Any, Any]): Int = {
val exprs: Array[Int] = ToArrayOfInt(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the array of tuples coerced to an array of Int
@param tuples : an array of tuple
@return the sum of the tuple's values as an array
*/
def SumToArrayOfInt(tuples: Array[Tuple3[Any, Any, Any]]): Array[Int] = {
val exprs: Array[Int] = tuples.map(tuple => SumToInt(tuple))
exprs
}
/**
Answer the sum of the tuple values coerced to Float
@param tuples : a tuple
@return the sum of the tuple's values
*/
def Sum(tuples: Tuple4[Float, Float, Float, Float]): Float = {
val exprs: Array[Float] = ToArray(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the tuple values coerced to Float
@param tuples : a tuple
@return the sum of the tuple's values
*/
def SumToFloat(tuples: Tuple4[Any, Any, Any, Any]): Float = {
val exprs: Array[Float] = ToArrayOfFloat(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the array of tuples coerced to an array of Float
@param tuples : an array of tuple
@return the sum of the tuple's values as an array
*/
def SumToArrayOfFloat(tuples: Array[Tuple4[Any, Any, Any, Any]]): Array[Float] = {
val exprs: Array[Float] = tuples.map(tuple => SumToFloat(tuple))
//exprs.reduceLeft(_ + _)
exprs
}
/**
Answer the sum of the tuple values coerced to Double
@param tuples : a tuple
@return the sum of the tuple's values
*/
def SumToDouble(tuples: Tuple4[Any, Any, Any, Any]): Double = {
val exprs: Array[Double] = ToArrayOfDouble(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the array of tuples coerced to an array of Double
@param tuples : an array of tuple
@return the sum of the tuple's values as an array
*/
def SumToArrayOfDouble(tuples: Array[Tuple4[Any, Any, Any, Any]]): Array[Double] = {
val exprs: Array[Double] = tuples.map(tuple => SumToDouble(tuple))
exprs
}
/**
Answer the sum of the tuple values coerced to Int
@param tuples : a tuple
@return the sum of the tuple's values
*/
def SumToInt(tuples: Tuple4[Any, Any, Any, Any]): Int = {
val exprs: Array[Int] = ToArrayOfInt(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the array of tuples coerced to an array of Int
@param tuples : an array of tuple
@return the sum of the tuple's values as an array
*/
def SumToArrayOfInt(tuples: Array[Tuple4[Any, Any, Any, Any]]): Array[Int] = {
val exprs: Array[Int] = tuples.map(tuple => SumToInt(tuple))
exprs
}
/**
Answer the sum of the tuple values coerced to Float
@param tuples : a tuple
@return the sum of the tuple's values
*/
def Sum(tuples: Tuple5[Float, Float, Float, Float, Float]): Float = {
val exprs: Array[Float] = ToArray(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the tuple values coerced to Float
@param tuples : a tuple
@return the sum of the tuple's values
*/
def SumToFloat(tuples: Tuple5[Any, Any, Any, Any, Any]): Float = {
val exprs: Array[Float] = ToArrayOfFloat(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the array of tuples coerced to an array of Float
@param tuples : an array of tuple
@return the sum of the tuple's values as an array
*/
def SumToArrayOfFloat(tuples: Array[Tuple5[Any, Any, Any, Any, Any]]): Array[Float] = {
val exprs: Array[Float] = tuples.map(tuple => SumToFloat(tuple))
exprs
}
/**
Answer the sum of the tuple values coerced to Double
@param tuples : a tuple
@return the sum of the tuple's values
*/
def SumToDouble(tuples: Tuple5[Any, Any, Any, Any, Any]): Double = {
val exprs: Array[Double] = ToArrayOfDouble(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the array of tuples coerced to an array of Double
@param tuples : an array of tuple
@return the sum of the tuple's values as an array
*/
def SumToArrayOfDouble(tuples: Array[Tuple5[Any, Any, Any, Any, Any]]): Array[Double] = {
val exprs: Array[Double] = tuples.map(tuple => SumToDouble(tuple))
exprs
}
/**
Answer the sum of the tuple values coerced to Int
@param tuples : a tuple
@return the sum of the tuple's values
*/
def SumToInt(tuples: Tuple5[Any, Any, Any, Any, Any]): Int = {
val exprs: Array[Int] = ToArrayOfInt(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the array of tuples coerced to an array of Int
@param tuples : an array of tuple
@return the sum of the tuple's values as an array
*/
def SumToArrayOfInt(tuples: Array[Tuple5[Any, Any, Any, Any, Any]]): Array[Int] = {
val exprs: Array[Int] = tuples.map(tuple => SumToInt(tuple))
exprs
}
/**
Answer the sum of the tuple values coerced to Float
@param tuples : a tuple
@return the sum of the tuple's values
*/
def Sum(tuples: Tuple6[Float, Float, Float, Float, Float, Float]): Float = {
val exprs: Array[Float] = ToArray(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the tuple values coerced to Float
@param tuples : a tuple
@return the sum of the tuple's values
*/
def SumToFloat(tuples: Tuple6[Any, Any, Any, Any, Any, Any]): Float = {
val exprs: Array[Float] = ToArrayOfFloat(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the array of tuples coerced to an array of Float
@param tuples : an array of tuple
@return the sum of the tuple's values as an array
*/
def SumToArrayOfFloat(tuples: Array[Tuple6[Any, Any, Any, Any, Any, Any]]): Array[Float] = {
val exprs: Array[Float] = tuples.map(tuple => SumToFloat(tuple))
exprs
}
/**
Answer the sum of the tuple values coerced to Double
@param tuples : a tuple
@return the sum of the tuple's values
*/
def SumToDouble(tuples: Tuple6[Any, Any, Any, Any, Any, Any]): Double = {
val exprs: Array[Double] = ToArrayOfDouble(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the array of tuples coerced to an array of Double
@param tuples : an array of tuple
@return the sum of the tuple's values as an array
*/
def SumToArrayOfDouble(tuples: Array[Tuple6[Any, Any, Any, Any, Any, Any]]): Array[Double] = {
val exprs: Array[Double] = tuples.map(tuple => SumToDouble(tuple))
exprs
}
/**
Answer the sum of the tuple values coerced to Double
@param tuples : a tuple
@return the sum of the tuple's values
*/
def SumToInt(tuples: Tuple6[Any, Any, Any, Any, Any, Any]): Int = {
val exprs: Array[Int] = ToArrayOfInt(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the array of tuples coerced to an array of Int
@param tuples : an array of tuple
@return the sum of the tuple's values as an array
*/
def SumToArrayOfInt(tuples: Array[Tuple6[Any, Any, Any, Any, Any, Any]]): Array[Int] = {
val exprs: Array[Int] = tuples.map(tuple => SumToInt(tuple))
exprs
}
/** FIXME: Do SumTo<Scalar> and SumToArrayOf<Scalar> for the remaining Tuple<N> */
/**
Answer the sum of the tuple values
@param tuples : a tuple
@return the sum of the tuple's values
*/
def Sum(tuples: Tuple7[Float, Float, Float, Float, Float, Float, Float]): Float = {
val exprs: Array[Float] = ToArray(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the tuple values
@param tuples : a tuple
@return the sum of the tuple's values
*/
def Sum(tuples: Tuple8[Float, Float, Float, Float, Float, Float, Float, Float]): Float = {
val exprs: Array[Float] = ToArray(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the tuple values
@param tuples : a tuple
@return the sum of the tuple's values
*/
def Sum(tuples: Tuple9[Float, Float, Float, Float, Float, Float, Float, Float, Float]): Float = {
val exprs: Array[Float] = ToArray(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the tuple values
@param tuples : a tuple
@return the sum of the tuple's values
*/
def Sum(tuples: Tuple10[Float, Float, Float, Float, Float, Float, Float, Float, Float, Float]): Float = {
val exprs: Array[Float] = ToArray(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the tuple values
@param tuples : a tuple
@return the sum of the tuple's values
*/
def Sum(tuples: Tuple2[Double, Double]): Double = {
val exprs: Array[Double] = ToArray(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the tuple values
@param tuples : a tuple
@return the sum of the tuple's values
*/
def Sum(tuples: Tuple3[Double, Double, Double]): Double = {
val exprs: Array[Double] = ToArray(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the tuple values
@param tuples : a tuple
@return the sum of the tuple's values
*/
def Sum(tuples: Tuple4[Double, Double, Double, Double]): Double = {
val exprs: Array[Double] = ToArray(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the tuple values
@param tuples : a tuple
@return the sum of the tuple's values
*/
def Sum(tuples: Tuple5[Double, Double, Double, Double, Double]): Double = {
val exprs: Array[Double] = ToArray(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the tuple values
@param tuples : a tuple
@return the sum of the tuple's values
*/
def Sum(tuples: Tuple6[Double, Double, Double, Double, Double, Double]): Double = {
val exprs: Array[Double] = ToArray(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the tuple values
@param tuples : a tuple
@return the sum of the tuple's values
*/
def Sum(tuples: Tuple7[Double, Double, Double, Double, Double, Double, Double]): Double = {
val exprs: Array[Double] = ToArray(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the tuple values
@param tuples : a tuple
@return the sum of the tuple's values
*/
def Sum(tuples: Tuple8[Double, Double, Double, Double, Double, Double, Double, Double]): Double = {
val exprs: Array[Double] = ToArray(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the tuple values
@param tuples : a tuple
@return the sum of the tuple's values
*/
def Sum(tuples: Tuple9[Double, Double, Double, Double, Double, Double, Double, Double, Double]): Double = {
val exprs: Array[Double] = ToArray(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the tuple values
@param tuples : a tuple
@return the sum of the tuple's values
*/
def Sum(tuples: Tuple10[Double, Double, Double, Double, Double, Double, Double, Double, Double, Double]): Double = {
val exprs: Array[Double] = ToArray(tuples)
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the collection elements
@param exprs : a collection of scalar
@return the sum of the collections's values
*/
def Sum(exprs: List[Int]): Int = {
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the collection elements
@param exprs : a collection of scalar
@return the sum of the collections's values
*/
def Sum(exprs: List[Long]): Long = {
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the collection elements
@param exprs : a collection of scalar
@return the sum of the collections's values
*/
def Sum(exprs: List[Double]): Double = {
exprs.reduceLeft(_ + _)
}
/**
Answer the sum of the collection elements
@param exprs : a collection of scalar
@return the sum of the collections's values
*/
def Sum(exprs: List[Float]): Float = {
exprs.reduceLeft(_ + _)
}
/** avg (average)*/
/**
Answer the average of the collection elements
@param exprs : a collection of scalar
@return the sum of the collections's values
*/
def Avg(exprs: ArrayBuffer[Int]): Int = {
Sum(exprs) / exprs.length
}
/**
Answer the average of the collection elements
@param exprs : a collection of scalar
@return the sum of the collections's values
*/
def Avg(exprs: ArrayBuffer[Long]): Long = {
Sum(exprs) / exprs.length
}
/**
Answer the average of the collection elements
@param exprs : a collection of scalar
@return the sum of the collections's values
*/
def Avg(exprs: ArrayBuffer[Double]): Double = {
Sum(exprs) / exprs.length
}
/**
Answer the average of the collection elements
@param exprs : a collection of scalar
@return the sum of the collections's values
*/
def Avg(exprs: ArrayBuffer[Float]): Float = {
Sum(exprs) / exprs.length
}
/**
Answer the average of the collection elements
@param exprs : a collection of scalar
@return the sum of the collections's values
*/
def Avg(exprs: Array[Int]): Int = {
Sum(exprs) / exprs.length
}
/**
Answer the average of the collection elements
@param exprs : a collection of scalar
@return the sum of the collections's values
*/
def Avg(exprs: Array[Long]): Long = {
Sum(exprs) / exprs.length
}
/**
Answer the average of the collection elements
@param exprs : a collection of scalar
@return the sum of the collections's values
*/
def Avg(exprs: Array[Double]): Double = {
Sum(exprs) / exprs.length
}
/**
Answer the average of the collection elements
@param exprs : a collection of scalar
@return the sum of the collections's values
*/
def Avg(exprs: Array[Float]): Float = {
Sum(exprs) / exprs.length
}
/**
Answer the average of the collection elements
@param exprs : a collection of scalar
@return the sum of the collections's values
*/
def Avg(exprs: List[Int]): Int = {
Sum(exprs) / exprs.length
}
/**
Answer the average of the collection elements
@param exprs : a collection of scalar
@return the sum of the collections's values
*/
def Avg(exprs: List[Long]): Long = {
Sum(exprs) / exprs.length
}
/**
Answer the average of the collection elements
@param exprs : a collection of scalar
@return the sum of the collections's values
*/
def Avg(exprs: List[Double]): Double = {
Sum(exprs) / exprs.length
}
/**
Answer the average of the collection elements
@param exprs : a collection of scalar
@return the sum of the collections's values
*/
def Avg(exprs: List[Float]): Float = {
Sum(exprs) / exprs.length
}
/**
Answer the count of the collection elements
@param exprs : a collection of scalar
@return the count of the collections's elements
*/
def Count[A](exprs: ArrayBuffer[A]): Int = {
exprs.length
}
/**
Answer the count of the collection elements
@param exprs : a collection of scalar
@return the count of the collections's elements
*/
def Count[A](exprs: Array[A]): Int = {
exprs.length
}
/**
Answer the count of the collection elements
@param exprs : a collection of scalar
@return the count of the collections's elements
*/
def Count[A](exprs: List[A]): Int = {
exprs.length
}
/**
Answer the count of the collection elements
@param exprs : a collection of scalar
@return the count of the collections's elements
*/
def Count[A](exprs: Set[A]): Int = {
exprs.size
}
/**
Answer the count of the collection elements
@param exprs : a collection of scalar
@return the count of the collections's elements
*/
def Count[A](exprs: Queue[A]): Int = {
exprs.size
}
/**
Answer the count of the collection elements
@param exprs : a collection of scalar
@return the count of the collections's elements
*/
def Count[A, B](exprs: Map[A, B]): Int = {
exprs.size
}
/**
Answer the count of the collection elements
@param exprs : a collection of scalar
@return the count of the collections's elements
*/
def Count[A, B](exprs: HashMap[A, B]): Int = {
exprs.size
}
/**
Answer the median of the collection elements, where mean of two middle values is taken for even number of elements
@param exprs : a collection of scalar
@return the median of the collections's elements
*/
def Median(exprs: ArrayBuffer[Int]): Int = {
val mid = exprs.length
if (mid % 2 == 0) {
exprs(mid)
} else {
val midplus = mid + 1
(exprs(mid) + exprs(midplus) / 2)
}
}
/**
Answer the median of the collection elements, where mean of two middle values is taken for even number of elements
@param exprs : a collection of scalar
@return the median of the collections's elements
*/
def Median(exprs: ArrayBuffer[Long]): Long = {
val mid = exprs.length
if (mid % 2 == 0) {
exprs(mid)
} else {
val midplus = mid + 1
(exprs(mid) + exprs(midplus) / 2)
}
}
/**
Answer the median of the collection elements, where mean of two middle values is taken for even number of elements
@param exprs : a collection of scalar
@return the median of the collections's elements
*/
def Median(exprs: ArrayBuffer[Double]): Double = {
val mid = exprs.length
if (mid % 2 == 0) {
exprs(mid)
} else {
val midplus = mid + 1
(exprs(mid) + exprs(midplus) / 2)
}
}
/**
Answer the median of the collection elements, where mean of two middle values is taken for even number of elements
@param exprs : a collection of scalar
@return the median of the collections's elements
*/
def Median(exprs: ArrayBuffer[Float]): Float = {
val mid = exprs.length
if (mid % 2 == 0) {
exprs(mid)
} else {
val midplus = mid + 1
(exprs(mid) + exprs(midplus) / 2)
}
}
/**
Answer the median of the collection elements, where mean of two middle values is taken for even number of elements
@param exprs : a collection of scalar
@return the median of the collections's elements
*/
def Median(exprs: Array[Int]): Int = {
val mid = exprs.length
if (mid % 2 == 0) {
exprs(mid)
} else {
val midplus = mid + 1
(exprs(mid) + exprs(midplus) / 2)
}
}
/**
Answer the median of the collection elements, where mean of two middle values is taken for even number of elements
@param exprs : a collection of scalar
@return the median of the collections's elements
*/
def Median(exprs: Array[Long]): Long = {
val mid = exprs.length
if (mid % 2 == 0) {
exprs(mid)
} else {
val midplus = mid + 1
(exprs(mid) + exprs(midplus) / 2)
}
}
/**
Answer the median of the collection elements, where mean of two middle values is taken for even number of elements
@param exprs : a collection of scalar
@return the median of the collections's elements
*/
def Median(exprs: Array[Double]): Double = {
val mid = exprs.length
if (mid % 2 == 0) {
exprs(mid)
} else {
val midplus = mid + 1
(exprs(mid) + exprs(midplus) / 2)
}
}
/**
Answer the median of the collection elements, where mean of two middle values is taken for even number of elements
@param exprs : a collection of scalar
@return the median of the collections's elements
*/
def Median(exprs: Array[Float]): Float = {
val mid = exprs.length
if (mid % 2 == 0) {
exprs(mid)
} else {
val midplus = mid + 1
(exprs(mid) + exprs(midplus) / 2)
}
}
/**
Answer the product of the collection elements
@param exprs : a collection of scalar
@return the product of the collections's elements
*/
def Product(exprs: ArrayBuffer[Int]): Int = {
Multiply(exprs)
}
/**
Answer the product of the collection elements
@param exprs : a collection of scalar
@return the product of the collections's elements
*/
def Product(exprs: ArrayBuffer[Long]): Long = {
Multiply(exprs)
}
/**
Answer the product of the collection elements
@param exprs : a collection of scalar
@return the product of the collections's elements
*/
def Product(exprs: ArrayBuffer[Double]): Double = {
Multiply(exprs)
}
/**
Answer the product of the collection elements
@param exprs : a collection of scalar
@return the product of the collections's elements
*/
def Product(exprs: ArrayBuffer[Float]): Float = {
Multiply(exprs)
}
/**
Answer the product of the collection elements
@param exprs : a collection of scalar
@return the product of the collections's elements
*/
def Product(exprs: Array[Int]): Int = {
Multiply(exprs)
}
/**
Answer the product of the collection elements
@param exprs : a collection of scalar
@return the product of the collections's elements
*/
def Product(exprs: Array[Long]): Long = {
Multiply(exprs)
}
/**
Answer the product of the collection elements
@param exprs : a collection of scalar
@return the product of the collections's elements
*/
def Product(exprs: Array[Double]): Double = {
Multiply(exprs)
}
/**
Answer the product of the collection elements
@param exprs : a collection of scalar
@return the product of the collections's elements
*/
def Product(exprs: Array[Float]): Float = {
Multiply(exprs)
}
/** log10, ln, sqrt, abs, exp, pow, threshold, floor, ceil, round */
/**
* Answer the log 10 of the supplied double.
* @param expr : a double
*/
def log10(expr: Double): Double = {
scala.math.log10(expr)
}
/**
* Answer the natural log of the supplied double.
* @param expr : a double
*/
def ln(expr: Double): Double = {
scala.math.log(expr)
}
/**
* Answer the square root of the supplied double.
* @param expr : a double
*/
def sqrt(expr: Double): Double = {
scala.math.log(expr)
}
/**
* Answer the absolute value of the supplied scalar.
* @param expr : a scalar
*/
def abs(expr: Int): Int = {
scala.math.abs(expr)
}
/**
* Answer the absolute value of the supplied scalar.
* @param expr : a scalar
*/
def abs(expr: Long): Long = {
scala.math.abs(expr)
}
/**
* Answer the absolute value of the supplied scalar.
* @param expr : a scalar
*/
def abs(expr: Float): Float = {
scala.math.abs(expr)
}
/**
* Answer the absolute value of the supplied scalar.
* @param expr : a scalar
*/
def abs(expr: Double): Double = {
scala.math.abs(expr)
}
/**
* Answer Returns Euler's number e raised to the power of the supplied double value
* @param expr : a double
* @return the exponential function result
*/
def exp(expr: Double): Double = {
scala.math.exp(expr)
}
/**
* Answer the scalar taken to the supplied power
* @param x : a Double
* @param y : a Int
* @return x^y
*/
def pow(x: Double, y: Int): Double = {
scala.math.pow(x, y)
}
/**
* Answer if x has met or exceeded the threshold
* @param x : a scalar
* @param y : a scalar
* @return 1 if met else 0
*/
def threshold(x: Int, y: Int): Int = {
if (GreaterThan(x, y)) 1 else 0
}
/**
* Answer if x has met or exceeded the threshold
* @param x : a scalar
* @param y : a scalar
* @return 1 if met else 0
*/
def threshold(x: Long, y: Long): Int = {
if (GreaterThan(x, y)) 1 else 0
}
/**
* Answer if x has met or exceeded the threshold
* @param x : a scalar
* @param y : a scalar
* @return 1 if met else 0
*/
def threshold(x: Float, y: Float): Int = {
if (GreaterThan(x, y)) 1 else 0
}
/**
* Answer if x has met or exceeded the threshold
* @param x : a scalar
* @param y : a scalar
* @return 1 if met else 0
*/
def threshold(x: Double, y: Double): Int = {
if (GreaterThan(x, y)) 1 else 0
}
/**
* Answer the floor of the supplied Double
* @param expr : a Double
* @return floor(expr)
*/
def floor(expr: Double): Double = {
scala.math.floor(expr)
}
/**
* Answer the ceil of the supplied Double
* @param expr : a Double
* @return ceil(expr)
*/
def ceil(expr: Double): Double = {
scala.math.ceil(expr)
}
/**
* Answer the integer value closest to supplied Double
* @param expr : a Double
* @return round(expr)
*/
def round(expr: Double): Double = {
scala.math.round(expr)
}
/**
* IsMissing determines if the named field DOES NOT exist or EXISTS but with no legal value.
*
* @param ctx the runtime context for a given model
* @param fldName the name of the field being sought... it can be compound '.' qualified name
* @return true if it is missing else false
*
* NOTE: Compound names are currently limited to two names (e.g., container.fld). This will change
* when metadata is made available to the runtime context for the models.
*
*/
def IsMissing(ctx : Context, fldName : String) : Boolean = {
val nameParts : Array[String] = if (fldName.contains(".")) {
fldName.split('.')
} else {
Array(fldName)
}
/** if just one name, look in dictionaries */
val notMissing : Boolean = if (nameParts.size == 1) {
ctx.valueSetFor(fldName)
} else {
/**
* Obtain the MessageContainerBase for the message or container ... for now just the first namePart
* FIXME: This will change when derived concepts (ModelNamespace.ModelName.field) are introduced
*/
if (nameParts.size == 2) {
val msgContainerName : String = nameParts(0)
val fieldName : String = nameParts(1)
val msgOrContainer : MessageContainerBase = if (ctx.isFieldInTransformationDict(msgContainerName)) {
val derivedFld : DataValue = ctx.valueFor(msgContainerName)
val anyValue : AnyDataValue = if (derivedFld.isInstanceOf[AnyDataValue]) derivedFld.asInstanceOf[AnyDataValue] else null
val mOrC : MessageContainerBase = if (anyValue.Value.isInstanceOf[MessageContainerBase]) anyValue.Value.asInstanceOf[MessageContainerBase] else null
mOrC
} else {
if (ctx.isFieldInDataDict(msgContainerName)) {
val dataFld : DataValue = ctx.valueFor(msgContainerName)
val anyValue : AnyDataValue = if (dataFld.isInstanceOf[AnyDataValue]) dataFld.asInstanceOf[AnyDataValue] else null
val mOrC : MessageContainerBase = if (anyValue.Value.isInstanceOf[MessageContainerBase]) anyValue.Value.asInstanceOf[MessageContainerBase] else null
mOrC
} else {
null
}
}
val itsThere : Boolean = if (msgOrContainer != null) {
(msgOrContainer.IsFixed || (msgOrContainer.IsKv && (msgOrContainer.getOrElse(fieldName,null) != null)))
} else {
false
}
itsThere
} else {
logger.error("Unable to handle isMissing tests on container of containers at this time... more complete solution coming...")
logger.error("... need metadata manager to find type information at runtime to walk down hierarchies > 2 levels.")
false
}
}
(! notMissing)
}
/**
* IsNotMissing determines if the named field exists with a legal value.
*
* @param ctx the runtime context for a given model
* @param fldName the name of the field being sought... it can be compound '.' qualified name
* @return true if it is present else false
*
* NOTE: Compound names are currently limited to two names (e.g., container.fld). This will change
* when metadata is made available to the runtime context for the models.
*
*/
def IsNotMissing(ctx : Context, fldName : String) : Boolean = {
(! IsMissing(ctx,fldName))
}
/**
* Answer the current transaction id from the caller's runtime context.
*
* @param ctx the runtime context for a given model
* @return xid or 0 if the ctx is bogus
*
*/
def getXid(ctx : Context) : Long = {
if (ctx != null) ctx.xId else 0
}
/**
Answer a new string with all characters of the supplied string folded to upper case.
@param str : a String
@return a copy of 'str' with all characters folded to upper case.
*/
def uppercase(str: String): String = {
str.toUpperCase()
}
/**
Answer a new string with all characters of the supplied string folded to lower case.
@param str : a String
@return a copy of 'str' with all characters folded to lower case.
*/
def lowercase(str: String): String = {
str.toLowerCase()
}
/**
Answer a portion of the supplied string whose first character is at 'startidx' for the specified length.
@param str : a String
@param startidx : the first character of interest from the str
@param len : the number of characters of interest starting with 'startidx'
@return the substring.
*/
def substring(str: String, startidx: Int, len: Int): String = {
str.substring(startidx, (startidx + len - 1))
}
/**
Answer a portion of the supplied string whose first character is at 'startidx' for the remainder of the string
@param str : a String
@param startidx : the first character of interest from the str
@return the substring.
*/
def substring(str: String, startidx: Int): String = {
str.substring(startidx)
}
/**
Answer if 'inThis' string starts with 'findThis'
@param str : a String
@return a Boolean reflecting the answer.
*/
def startsWith(inThis: String, findThis: String): Boolean = {
inThis.startsWith(findThis)
}
/**
Answer if 'inThis' string ends with 'findThis'
@param str : a String
@return a Boolean reflecting the answer.
*/
def endsWith(inThis: String, findThis: String): Boolean = {
inThis.endsWith(findThis)
}
/**
Answer if 'inThis' string starts with 'findThis'
@param str : a String
@return a new string with the white space trimmed from the beginning and end.
*/
def trimBlanks(str: String): String = {
str.trim()
}
/**
* Returns a random UUID string
* @return random UUID string
*/
def idGen() : String = {
UUID.randomUUID().toString;
}
/**
* Accept an indefinite number of objects and concatenate their string representations
* @param args : arguments whose string representations will be concatenated
* @return concatenation of args' string representations
*/
def concat(args : Any*) : String = {
val argList : List[Any] = args.toList
argList.map( arg => if (arg != null) arg.toString else "" ).mkString("")
}
/**
* Accept a parent variable, a child variable and a replacement variable. Replace all instances of the child inside the parent with the replacement. The function will return a string
* @param relacewithin : The parent variable (can be any type) within which the replacement will be searched for
* @param inWord : The child string which will be searched for within the "replacewithin" variable
* @param replacewith : The string with which all instances of the child will be replaced in "replacewithin"
* @return outString : string where all "inwords" have been replaced by "replacewith"
*/
def replace (replacewithin: Any, inWord: Any, replacewith: Any): String = {
val replacewithinA : String = replacewithin.toString
val inWordA : String = inWord.toString
val replacewithA : String = replacewith.toString
val outString : String = replacewithinA.replaceAll(inWordA, replacewithA)
outString
}
/**
* Accept a parent variable and a child variable. Return a boolean value which is true if an instance of the child lies within the parent and false otherwise
* @param matchwithin : The parent variable (can be any type) within which the matching variable will be searched for
* @param matchwith : The child that will be searched for within "matchwithin"
* @return OutBool : Boolean value evaluating whether an instance of "matchwith" exists within "matchwithin"
*/
def matches (matchwithin: Any, matchwith: Any): Boolean = {
val matchwithinA : String = matchwithin.toString
var matchwithA : String = matchwith.toString
val outString : String = matchwithinA.replaceAll(matchwithA, matchwithA + matchwithA)
val outBool : Boolean = if (outString == matchwithinA) false; else true;
outBool
}
/**
* Generate a random Double between 0 and 1
* @return ranDouble : random Double between 0 and 1
*/
def random(): Double = {
val r : scala.util.Random = scala.util.Random
val randouble : Double = r.nextDouble
randouble
}
/**
* Answer string's length. If a null is supplied, 0 length answered
* @return length of the supplied string
*/
def length(str : String): Int = {
val len : Int = if (str != null) str.size else 0
len
}
/**
* Accept a number of any type and format it in a specified manner
* @param num : The number which is to be formatted
* @param formatting : The format which the number is to take. This should be given in standard form, e.g. %.2f for a 2 decimal place float
* @return formattedStr : A string version of the number formatted in the required way
*/
def formatNumber[T]( num : T, formatting : String) : String = {
val formattedStr : String = (formatting.format(num)).toString
formattedStr
}
/**
* Print the two strings to the log. The first is some location or context information. The second
* is the event description.
*
* @param severity a string describing log severity level ... any {error, warn, info, debug, trace}
* @param contextMsg a string describing the context of why this message is to be logged (or anything else for that matter)
* @param eventMsg a string that describes what has actually happened
* @param bool a Boolean that is returned as this function's result (to play into the pmml logic as desired)
* @return bool
*/
def logMsg(severity : String, contextMsg : String, eventMsg : String, bool : Boolean) : Boolean = {
if (severity != null && contextMsg != null && eventMsg != null) {
val sev : String = severity.toLowerCase.trim
sev match {
case "error" => logger.error(s"$contextMsg...$eventMsg")
case "warn" => logger.warn(s"$contextMsg...$eventMsg")
case "info" => logger.info(s"$contextMsg...$eventMsg")
case "debug" => logger.debug(s"$contextMsg...$eventMsg")
case "trace" => logger.trace(s"$contextMsg...$eventMsg")
case _ => logger.trace(s"$contextMsg...$eventMsg")
}
} else {
logger.error("LogMsg called with bogus arguments")
}
bool
}
/**
Date and Time Functions
*/
/**
Answer the number of days since the supplied year and today, including the year specified.
@param yr : a year of interest
@return the number of days since that time and now
*/
def dateDaysSinceYear(yr: Int): Long = {
val dt: org.joda.time.LocalDateTime = new org.joda.time.LocalDateTime(yr, 1, 1, 0, 0, 0)
var now: org.joda.time.LocalDateTime = new org.joda.time.LocalDateTime()
val dys: org.joda.time.Days = org.joda.time.Days.daysBetween(dt, now)
val days : Long = dys.getDays+1
days
}
/**
Answer the number of seconds since the supplied year and today, including the year specified.
@param yr : a year of interest
@return the number of seconds since that time and now
*/
def dateSecondsSinceYear(yr: Int): Long = {
val dt: org.joda.time.LocalDateTime = new org.joda.time.LocalDateTime(yr, 1, 1, 0, 0, 0)
var now: org.joda.time.LocalDateTime = new org.joda.time.LocalDateTime()
val scs: org.joda.time.Seconds = org.joda.time.Seconds.secondsBetween(dt, now)
val secs: Long = scs.getSeconds + (milliSecondsInDay / 1000)
secs
}
/**
Answer the number of seconds since midnight today.
@return the number of seconds since midnight today.
*/
def dateSecondsSinceMidnight(): Long = {
var now: org.joda.time.LocalTime = new org.joda.time.LocalTime()
val secs: Long = now.getHourOfDay() * 60 * 60 +
now.getMinuteOfHour() * 60 +
now.getSecondOfMinute() +
(if (now.getMillisOfSecond() >= 500) 1 else 0)
secs
}
/**
Answer the number of milliseconds since midnight today
@return the number of milliseconds since midnight today.
*/
def dateMilliSecondsSinceMidnight(): Long = {
dateSecondsSinceMidnight() * 1000
}
/**
Answer the number of milliseconds since the midnight today
@return the number of milliseconds since the midnight today
*/
def Timenow(): Long = {
dateSecondsSinceMidnight() * 1000
}
/**
Answer the number of seconds from the supplied millisecs value
@param millisecs : number of millisecs
@return the number of seconds
*/
def AsSeconds(milliSecs: Long): Long = {
milliSecs / 1000
}
/**
Answer the number of milliseconds since the epoch as of right now.
@return the number of milliseconds since the epoch as of right now.
*/
def Now(): Long = {
var now: org.joda.time.DateTime = new org.joda.time.DateTime()
now.getMillis()
}
/**
* Answer the number of millisecs some numYrs ago.
* @param numYrs : the number of years
* @return the millisecs from the epoch for that time.
*/
def YearsAgo(numYrs: Int): Long = {
val rightNow: org.joda.time.DateTime = new org.joda.time.DateTime()
val someTimeAgo = rightNow.minusYears(numYrs)
someTimeAgo.getMillis()
}
/**
* Answer the number of millisecs a numYrs ago from the supplied ISO 8601 compressed int date
* @param someDate an 8601 date compressed into integer (format OHHMMSSCC, WHERE CC REPRESENTS HUNDREDTHS OF A SECOND)
* @param numDays number of weeks to subtract
* @return millisecs for someDate - numYrs
*/
def YearsAgo(someDate: Int, numYrs: Int): Long = {
val dateAsMillisecs : Long = toDateTime(someDate).getMillis()
val someDt : org.joda.time.DateTime = new org.joda.time.DateTime(dateAsMillisecs)
val someTimeAgo = someDt.minusYears(numYrs)
someTimeAgo.getMillis()
}
/**
* Answer an ISO8601 compressed integer for a numDays ago from the supplied ISO 8601 compressed int date
* @param someDate an 8601 date compressed into integer (format OHHMMSSCC, WHERE CC REPRESENTS HUNDREDTHS OF A SECOND)
* @param numDays number of years to subtract
* @return ISO8601 date for someDate - numYrs
*/
def YearsAgoAsISO8601(someDate: Int, numYrs: Int): Int = {
AsCompressedDate(YearsAgo(someDate, numYrs))
}
/**
* Answer the number of millisecs numMos ago.
*/
def MonthsAgo(numMos: Int): Long = {
val rightNow: org.joda.time.DateTime = new org.joda.time.DateTime()
val someTimeAgo = rightNow.minusMonths(numMos)
someTimeAgo.getMillis()
}
/**
* Answer the number of millisecs a numMos ago from the supplied ISO 8601 compressed int date
* @param someDate an 8601 date compressed into integer (format OHHMMSSCC, WHERE CC REPRESENTS HUNDREDTHS OF A SECOND)
* @param numDays number of weeks to subtract
* @return millisecs for someDate - numMos
*/
def MonthsAgo(someDate: Int, numMos: Int): Long = {
val dateAsMillisecs : Long = toDateTime(someDate).getMillis()
val someDt : org.joda.time.DateTime = new org.joda.time.DateTime(dateAsMillisecs)
val someTimeAgo = someDt.minusMonths(numMos)
someTimeAgo.getMillis()
}
/**
* Answer an ISO8601 compressed integer for a numDays ago from the supplied ISO 8601 compressed int date
* @param someDate an 8601 date compressed into integer (format OHHMMSSCC, WHERE CC REPRESENTS HUNDREDTHS OF A SECOND)
* @param numDays number of months to subtract
* @return ISO8601 date for someDate - numMos
*/
def MonthsAgoAsISO8601(someDate: Int, numMos: Int): Int = {
AsCompressedDate(MonthsAgo(someDate, numMos))
}
/**
* Answer the number of millisecs from the epoch for the time a numWks ago.
* @param numWks an integer
* @return the tiem a numWks ago.
*/
def WeeksAgo(numWks: Int): Long = {
val rightNow: org.joda.time.DateTime = new org.joda.time.DateTime()
val someTimeAgo = rightNow.minusWeeks(numWks)
someTimeAgo.getMillis()
}
/**
* Answer the number of millisecs a numWks ago from the supplied ISO 8601 compressed int date
* @param someDate an 8601 date compressed into integer (format OHHMMSSCC, WHERE CC REPRESENTS HUNDREDTHS OF A SECOND)
* @param numDays number of weeks to subtract
* @return millisecs for someDate - numWks
*/
def WeeksAgo(someDate: Int, numWks: Int): Long = {
val dateAsMillisecs : Long = toDateTime(someDate).getMillis()
val someDt : org.joda.time.DateTime = new org.joda.time.DateTime(dateAsMillisecs)
val someTimeAgo = someDt.minusWeeks(numWks)
someTimeAgo.getMillis()
}
/**
* Answer an ISO8601 compressed integer for a numDays ago from the supplied ISO 8601 compressed int date
* @param someDate an 8601 date compressed into integer (format OHHMMSSCC, WHERE CC REPRESENTS HUNDREDTHS OF A SECOND)
* @param numDays number of weeks to subtract
* @return ISO8601 date for someDate - numWks
*/
def WeeksAgoAsISO8601(someDate: Int, numWks: Int): Int = {
AsCompressedDate(WeeksAgo(someDate, numWks))
}
/**
* Answer the number of millisecs a numDays ago.
* @param numDays number of days to subtract
* @return the time in millisecs from the epoch for the time a numDays ago
*/
def DaysAgo(numDays: Int): Long = {
val rightNow: org.joda.time.DateTime = new org.joda.time.DateTime()
val someTimeAgo = rightNow.minusDays(numDays)
someTimeAgo.getMillis()
}
/**
* Answer the number of millisecs a numDays ago from the supplied ISO 8601 compressed int date
* @param someDate an 8601 date compressed into integer (format OHHMMSSCC, WHERE CC REPRESENTS HUNDREDTHS OF A SECOND)
* @param numDays number of days to subtract
* @return millisecs for someDate - numDays
*/
def DaysAgo(someDate: Int, numDays: Int): Long = {
val dateAsMillisecs : Long = toDateTime(someDate).getMillis()
val someDt : org.joda.time.DateTime = new org.joda.time.DateTime(dateAsMillisecs)
val someTimeAgo = someDt.minusDays(numDays)
someTimeAgo.getMillis()
}
/**
* Answer an ISO8601 compressed integer for a numDays ago from the supplied ISO 8601 compressed int date
* @param someDate an 8601 date compressed into integer (format OHHMMSSCC, WHERE CC REPRESENTS HUNDREDTHS OF A SECOND)
* @param numDays number of days to subtract
* @return ISO8601 int for someDate - numDays
*/
def DaysAgoAsISO8601(someDate: Int, numDays: Int): Int = {
AsCompressedDate(DaysAgo(someDate, numDays))
}
/** Coerce the yyyymmdd ISO8601 type compressed in integer to a DateTime
*
* @param yyyymmdd ISO8601 type int compressed into integer
* @return joda DateTime
*
* NOTE: This function is currently not exported as the joda types are not available (at least for now) in the metadata.
*/
def toDateTime(yyyymmdd: Int): DateTime = {
val yyyy: Int = yyyymmdd / 10000
val mm: Int = (yyyymmdd % 10000) / 100
val day: Int = yyyymmdd % 100
val someDate: DateTime = new DateTime(yyyy, mm, day, 0, 0)
someDate
}
/**
* Coerce the YYDDD Julian date to millisecs. 21st century
* dates are assumed. If a bad Julian value is supplied, an error
* message is logged and the epoch (i.e., 0) is returned.
* @param yyddd 21st century julian date integer
* @return Long value of millisecs from epoch.
*/
def toMillisFromJulian(yyddd: Int): Long = {
val ydStr : String = yyddd.toString
val yydddStr : String = if (ydStr.size == 4) ("0" + ydStr) else ydStr
val reasonable : Boolean = if (yydddStr.length == 5) {
val yy : Int = yydddStr.slice(0, 2).toInt
val ddd : Int = yydddStr.slice(2,5).toInt
(yy >= 1 && yy <= 99 && ddd >= 1 && ddd <= 366)
} else {
false
}
val millis : Long = if (! reasonable) {
logger.error(s"toMillisFromJulian(yyddd = $yydddStr) ... malformed Julian date... expect YYDDD where YY>0 && YY <= 99 && DDD>0 && DDD<366")
0
} else {
val formatter : DateTimeFormatter = DateTimeFormat.forPattern("yyyyDDD").withChronology(JulianChronology.getInstance)
try {
val lcd : DateTime = formatter.parseDateTime("20" + yydddStr);
lcd.getMillis()
} catch {
case iae:IllegalArgumentException => {
logger.error(s"Unable to parse '20 + $yydddStr' with pattern - 'yyyyDDD'")
0
}
}
}
millis
}
/**
* Convert time formatted in integer (compressed decimal)
* to millisecs.
* Format: OHHMMSSCC, WHERE CC REPRESENTS HUNDREDTHS OF A SECOND
* @param time, an Int
* @return time, an Int
*/
def CompressedTimeHHMMSSCC2MilliSecs(compressedTime: Int): Long = {
val hours = (compressedTime / 1000000) % 100
val minutes = (compressedTime / 10000) % 100
val seconds = (compressedTime / 100) % 100
val millisecs = (compressedTime % 100) * 10
val millis = (hours * 60 * 60 + minutes * 60 + seconds) * 1000 + millisecs
millis
}
/**
* Convert time formatted in integer (compressed decimal)
* to seconds.
* Format: OHHMMSSCC, WHERE CC REPRESENTS HUNDREDTHS OF A SECOND
* @param time, an Int
* @return time, an Int
*/
def CompressedTimeHHMMSSCC2Secs(compressedTime: Int): Int = {
val hours = (compressedTime / 1000000) % 100
val minutes = (compressedTime / 10000) % 100
val seconds = (compressedTime / 100) % 100
val millisecs = (compressedTime % 100) * 10
val evtseconds = hours * 60 * 60 + minutes * 60 + seconds + (if (millisecs >= 500) 1 else 0)
evtseconds
}
/**
Convert millisecs to ISO8601 style date in integer
@param millisecs some time since the epoch expressed in millisecs
@return an integer with the date encoded in decimal form
*/
def AsCompressedDate(milliSecs: Long): Int = {
val dt: LocalDate = new LocalDate(milliSecs)
val dtAtMidnight: DateTime = dt.toDateTimeAtStartOfDay
val yr: Int = dtAtMidnight.year().get()
val mo: Int = dtAtMidnight.monthOfYear().get()
val day: Int = dtAtMidnight.dayOfMonth().get()
val compressedDate: Int = yr * 10000 + mo * 100 + day
compressedDate
}
/**
Extract the Month from the IS08601 compressed date
@param dt an Int in ISO8601 format
@return the number of months from that date
*/
def MonthFromISO8601Int(dt: Int): Int = {
val mm: Int = (dt % 1000) / 100
mm
}
/**
Extract the Year from the IS08601 compressed date
@param dt an Int in ISO8601 format
@return the number of years from that date
*/
def YearFromISO8601Int(dt: Int): Int = {
val yyyy: Int = dt / 10000
yyyy
}
/**
Extract the Day of the Month from the IS08601 compressed date
@param dt an Int in ISO8601 format
@return the day of the month from that date
*/
def DayOfMonthFromISO8601Int(dt: Int): Int = {
val day: Int = dt % 100
day
}
/**
Calculate age from yyyymmdd ISO8601 type birthdate as of right now.
@param yyyymmdd a birth date Int in ISO8601 format
@return age
*/
def AgeCalc(yyyymmdd: Int): Int = {
val yyyy: Int = yyyymmdd / 10000
val mm: Int = (yyyymmdd % 1000) / 100
val day: Int = yyyymmdd % 100
val birthDate: LocalDate = new LocalDate(yyyy, mm, day)
val age: Int = Years.yearsBetween(birthDate, new LocalDate).getYears
age
}
/**
Convert the supplied iso8601 date integer according to these format codes:
{{{
Symbol Meaning Presentation Examples
------ ------- ------------ -------
G era text AD
C century of era (>=0) number 20
Y year of era (>=0) year 1996
x weekyear year 1996
w week of weekyear number 27
e day of week number 2
E day of week text Tuesday; Tue
y year year 1996
D day of year number 189
M month of year month July; Jul; 07
d day of month number 10
a halfday of day text PM
K hour of halfday (0~11) number 0
h clockhour of halfday (1~12) number 12
H hour of day (0~23) number 0
k clockhour of day (1~24) number 24
m minute of hour number 30
s second of minute number 55
S fraction of second number 978
z time zone text Pacific Standard Time; PST
Z time zone offset/id zone -0800; -08:00; America/Los_Angeles
' escape for text delimiter
'' single quote literal
example: dateFmt("yyyy-MMM-dd", 20140401) produces 2014-Apr-01
}}}
@param fmtStr: a String specifying the desired format.
@param yyyymmdds: an Int encoded with iso8601 date...
@return string rep of this date
*/
def iso8601DateFmt(fmtStr : String, yyyymmdds : Int): String = {
val dateTime : DateTime = toDateTime(yyyymmdds)
val fmt : DateTimeFormatter = DateTimeFormat.forPattern(fmtStr);
val str : String = fmt.print(dateTime);
str
}
/**
Answer a String from the time in timestampStr argument formatted according to the format string
argument presented.
@param fmtStr - instructions on how to format the string. @see iso860DateFmt for format info
@param timestamp - the number of millisecs since epoch
@return a Long representing the timestamp as millisecs since the epoch (1970 based)
*/
def timestampFmt(fmtStr : String, timestamp : Long): String = {
val dateTime : DateTime = new DateTime(timestamp);
val fmt : DateTimeFormatter = DateTimeFormat.forPattern(fmtStr);
val str : String = fmt.print(dateTime);
str
}
/**
Answer the number of millisecs from the epoch for the supplied string that presumably
has the supplied format. If parse fails (IllegalArgumentException caught),
the epoch is returned.
@param fmtStr - instructions on how to parse the string. @see iso860DateFmt for format info
@param timestampStr - the string to parse
@return a Long representing the timestamp as millisecs since the epoch (1970 based)
*/
def timeStampFromStr(fmtStr : String, timestampStr : String): Long = {
val fmt : DateTimeFormatter = DateTimeFormat.forPattern(fmtStr);
try {
val dateTime : DateTime = fmt.parseDateTime(timestampStr);
val millis : Long = dateTime.getMillis
millis
} catch {
case iae:IllegalArgumentException => {
val stackTrace = StackTrace.ThrowableTraceString(iae)
logger.error(s"Unable to parse '$timestampStr' with pattern - '$fmtStr'")
logger.error("\\nStackTrace:"+stackTrace)
0
}
}
}
/**
Answer the number of millisecs from the epoch for the supplied string that presumably
has one of the supplied formats found in fmtStrArray. If parse fails (IllegalArgumentException caught),
the epoch is returned.
@param fmtStr - instructions on how to parse the string. @see iso860DateFmt for format info
@param timestampStr - the string to parse
@return a Long representing the timestamp as millisecs since the epoch (1970 based)
*/
def timeStampFromStr(fmtStrArray : Array[String], timestampStr : String): Long = {
//val fmt : DateTimeFormatter = DateTimeFormat.forPattern(fmtStr);
val millis : Long = if (fmtStrArray != null && fmtStrArray.size > 0 && timestampStr != null) {
val parsers : Array[DateTimeParser] = fmtStrArray.map (fmt => {
DateTimeFormat.forPattern(fmt).getParser()
})
val formatter : DateTimeFormatter = new DateTimeFormatterBuilder().append( null, parsers ).toFormatter();
try {
val dateTime : DateTime = formatter.parseDateTime(timestampStr)
val msecs : Long = dateTime.getMillis
msecs
} catch {
case iae:IllegalArgumentException => {
val stackTrace = StackTrace.ThrowableTraceString(iae)
logger.error(s"Unable to parse '$timestampStr' with any of the patterns - '${fmtStrArray.toString}'")
logger.error("\\nStackTrace:"+stackTrace)
0
}
}
} else {
0
}
millis
}
/**
Answer the number of millisecs from the epoch for the supplied string that is in one of
the following <b>builtin</b> formats:
"""
Pattern Example
"yyyy-MM-dd HH:mm:ss:SSS" 2015-02-28 14:02:31:222
"yyyy-MM-dd HH:mm:ss" 2015-02-28 14:02:31
"MM/dd/yy HH:mm:ss" 04/15/15 23:59:59
"dd-MM-yyyy HH:mm:ss" 04/15/2015 23:59:59
"dd-MM-yyyy HH:mm:ss:SSS" 04/15/15 23:59:59:999
"dd-MMM-yyyy HH:mm:ss" 15-Apr-2015 23:59:59
"dd-MM-yyyy HH:mm:ss:SSS" 15-04-2015 23:59:59:999
"""
Should your timestamp not be one of these, use the more general forms of this function that allows
you to supply one or more formats. @see timeStampFromStr(fmtStr : String, timestampStr : String): Long
and @see timeStampFromStr(fmtStrArray : Array[String], timestampStr : String): Long for details.
@param fmtStr - instructions on how to parse the string. @see iso860DateFmt for format info
@param timestampStr - the string to parse
@return a Long representing the timestamp as millisecs since the epoch (1970 based)
*/
def timeStampFromString(timestampStr : String): Long = {
DateTimeHelpers.timeStampFromString(timestampStr) /** use the builtin helpers in PmmlRuntimeDecls */
}
/**
Answer the number of millisecs from the epoch for the supplied date string that is in one of
the following <b>builtin</b> formats:
"""
Pattern Example
yyyy-MM-dd 2015-04-15
yyyy-MMM-dd 2015-Apr-15
MM/dd/yy 04/15/15
MMM-dd-yy Apr-15-15
dd-MM-yyyy 15-04-2015
dd-MMM-yyyy 15-Apr-2015
"""
Should your timestamp not be one of these, use the more general forms of this function that allows
you to supply one or more formats. @see timeStampFromStr(fmtStr : String, timestampStr : String): Long
and @see timeStampFromStr(fmtStrArray : Array[String], timestampStr : String): Long and
@see dateFromStr(fmtStr : String, timestampStr : String): Long for details.
@param fmtStr - instructions on how to parse the string. @see iso860DateFmt for format info
@param timestampStr - the string to parse
@return a Long representing the timestamp as millisecs since the epoch (1970 based)
*/
def dateFromString(timestampStr : String): Long = {
DateTimeHelpers.dateFromString(timestampStr) /** use the builtin helpers in PmmlRuntimeDecls */
}
/**
Answer the number of millisecs from the epoch for the supplied time string that is in one of
the following <b>builtin</b> formats:
"""
Pattern Example
"HH:mm:ss" 23:59:59
"HH:mm:ss:SSS" 23:59:59:999
"h:mm:ss" 12:45:59
"h:mm:ss aa" 12:45:59 AM
"""
Should your time not be formatted like one of these, use the more general forms of this function that allows
you to supply one or more formats. @see timeStampFromStr(fmtStr : String, timestampStr : String): Long
and @see timeStampFromStr(fmtStrArray : Array[String], timestampStr : String): Long for details. @see
timeFromStr(fmtStr : String, timestampStr : String): Long ... also available.
@param fmtStr - instructions on how to parse the string. @see iso860DateFmt for format info
@param timeStr - the string to parse
@return a Long representing the timestamp as millisecs since the epoch (1970 based)
*/
def timeFromString(timeStr : String): Long = {
DateTimeHelpers.timeFromString(timeStr) /** use the builtin helpers in PmmlRuntimeDecls */
}
/**
Answer the number of millisecs from the epoch for the supplied date portion in the supplied
string that presumably has the supplied format. If parse fails (IllegalArgumentException caught),
the epoch is returned.
@param fmtStr - instructions on how to parse the string. @see iso860DateFmt for format info
@param timestampStr - the string to parse
@return a Long representing the date as millisecs since the epoch (1970 based)
*/
def dateFromStr(fmtStr : String, timestampStr : String): Long = {
val fmt : DateTimeFormatter = DateTimeFormat.forPattern(fmtStr);
try {
val dateTime : DateTime = fmt.parseDateTime(timestampStr);
val dt : DateTime = new DateTime(dateTime.getYear, dateTime.getMonthOfYear, dateTime.getDayOfMonth, 0, 0)
val millis : Long = dt.getMillis
millis
} catch {
case iae:IllegalArgumentException => {
val stackTrace = StackTrace.ThrowableTraceString(iae)
logger.error(s"Unable to parse '$timestampStr' with pattern - '$fmtStr'")
logger.error("\\nStackTrace:"+stackTrace)
0
}
}
}
def javaEpoch : LocalDateTime = {
new LocalDateTime(1970, 1, 1, 0, 0)
}
/**
Answer the number of millisecs from the epoch for the supplied string that presumably
has the supplied format and represents some wall time.
@param fmtStr - instructions on how to parse the string. @see iso860DateFmt for format info
@param timestampStr - the string to parse
@return a Long representing the timestamp as millisecs since the epoch (1970 based)
*/
def timeFromStr(fmtStr : String, timestampStr : String): Long = {
dateSecondsSinceMidnight(fmtStr, timestampStr)
}
/**
Answer the number of seconds since midnight for the supplied time (HH:mm:ss:SSS) portion given in the supplied format.
@param fmtStr - instructions on how to parse the string. @see ISO860DateFmt for format info
@param timestampStr - the string to parse
@return a Long representing the seconds since midnight for the time portion of the supplied timestamp
*/
def dateSecondsSinceMidnight(fmtStr : String, timestampStr : String): Long = {
val fmt : DateTimeFormatter = DateTimeFormat.forPattern(fmtStr);
try {
val dateTime : DateTime = fmt.parseDateTime(timestampStr);
val hrOfDay : Int = dateTime.getHourOfDay
val minOfHr : Int = dateTime.getMinuteOfHour
val minOfDay : Int = dateTime.getMinuteOfDay
val secOfMin : Int = dateTime.getSecondOfMinute
val secOfDay : Int = dateTime.getSecondOfDay
val tm : LocalDateTime = new LocalDateTime(1970, 1, 1, hrOfDay, minOfHr, secOfMin, dateTime.getMillisOfSecond)
val seconds : Long = new Duration(javaEpoch.toDateTime.getMillis, tm.toDateTime.getMillis).getStandardSeconds()
seconds
} catch {
case iae:IllegalArgumentException => {
val stackTrace = StackTrace.ThrowableTraceString(iae)
logger.error(s"Unable to parse '$timestampStr' with pattern - '$fmtStr'")
logger.error("\\nStackTrace:"+stackTrace)
0
}
}
}
/**
Answer the number of seconds since midnight for the supplied time (HH:mm:ss:SSS) portion given in the supplied Long
@param timestamp - the string to parse
@return a Long representing the seconds since midnight for the time portion of the supplied timestamp
*/
def dateSecondsSinceMidnight(timestamp : Long): Long = {
val dateTime : DateTime = new DateTime(timestamp);
val hrOfDay : Int = dateTime.getHourOfDay
val minOfHr : Int = dateTime.getMinuteOfHour
val minOfDay : Int = dateTime.getMinuteOfDay
val secOfMin : Int = dateTime.getSecondOfMinute
val secOfDay : Int = dateTime.getSecondOfDay
val tm : LocalDateTime = new LocalDateTime(1970, 1, 1, hrOfDay, minOfHr, secOfMin, dateTime.getMillisOfSecond)
val seconds : Long = new Duration(javaEpoch.toDateTime.getMillis, tm.toDateTime.getMillis).getStandardSeconds()
seconds
}
val milliSecondsInSecond : Long = 1000
val milliSecondsInMinute : Long = 1000 * 60
val milliSecondsInHour : Long = 1000 * 60 * 60
val milliSecondsInDay : Long = 1000 * 60 * 60 * 24
val milliSecondsInWeek : Long = 1000 * 60 * 60 * 24 * 7
/**
Answer the number of milliseconds between the two time expressions (millisecs since Epoch)
@param time1 : a timestamp
@param time2 : a timestamp
@param inclusive (optional default is false) : when true +1 to the difference
@return number of milliseconds between the timestamps
*/
def millisecsBetween(time1 : Long, time2 : Long, inclusive : Boolean = false) : Long = {
val diff : Long = abs(time1 - time2) + (if (inclusive) 1 else 0)
diff
}
/**
Answer the number of seconds between the two time expressions (millisecs since Epoch)
@param time1 : a timestamp
@param time2 : a timestamp
@param inclusive (optional default is false) : when true +1 to the difference
@return number of seconds between the timestamps
*/
def secondsBetween(time1 : Long, time2 : Long, inclusive : Boolean = false) : Long = {
val diff : Long = abs(time1 - time2) + (if (inclusive) 1 else 0)
val nominalSeconds : Long = diff / milliSecondsInSecond
val rem : Long = diff % milliSecondsInSecond
val seconds : Long = nominalSeconds + (if (rem >= (milliSecondsInSecond / 2)) 1 else 0)
seconds
}
/**
Answer the number of minutes between the two time expressions (millisecs since Epoch). Partial hours are rounded
to the nearest integer value.
@param time1 : a timestamp
@param time2 : a timestamp
@param inclusive (optional default is false) : when true +1 to the difference
@return number of minutes between the timestamps
*/
def minutesBetween(time1 : Long, time2 : Long, inclusive : Boolean = false) : Long = {
val diff : Long = abs(time1 - time2) + (if (inclusive) 1 else 0)
val nominalMinutes : Long = diff / milliSecondsInMinute
val rem : Long = diff % milliSecondsInMinute
val minutes : Long = nominalMinutes + (if (rem >= (milliSecondsInMinute / 2)) 1 else 0)
minutes
}
/**
Answer the number of hours between the two time expressions (millisecs since Epoch). Partial hours are rounded
to the nearest integer value.
@param time1 : a timestamp
@param time2 : a timestamp
@param inclusive (optional default is false) : when true +1 to the difference
@return number of hours between the timestamps
*/
def hoursBetween(time1 : Long, time2 : Long, inclusive : Boolean = false) : Long = {
val diff : Long = abs(time1 - time2) + (if (inclusive) 1 else 0)
val nominalHours : Long = diff / milliSecondsInHour
val rem : Long = diff % milliSecondsInHour
val hours : Long = nominalHours + (if (rem >= (milliSecondsInHour / 2)) 1 else 0)
hours
}
/**
Answer the number of days between the two time expressions (millisecs since Epoch). Partial days are rounded
to the nearest integer value.
@param time1 : a timestamp
@param time2 : a timestamp
@param inclusive (optional default is false) : when true +1 to the difference
@return number of days between the timestamps
*/
def daysBetween(time1 : Long, time2 : Long, inclusive : Boolean = false) : Long = {
val diff : Long = abs(time1 - time2) + (if (inclusive) 1 else 0)
val nominalDays : Long = diff / milliSecondsInDay
val rem : Long = diff % milliSecondsInDay
val days : Long = nominalDays + (if (rem >= (milliSecondsInDay / 2)) 1 else 0)
days
}
/**
Answer the number of weeks between the two time expressions (millisecs since Epoch). Partial weeks are rounded
to the nearest integer value. 7 day week assumed.
@param time1 : a timestamp
@param time2 : a timestamp
@param inclusive (optional default is false) : when true +1 to the difference
@return number of weeks between the timestamps
*/
def weeksBetween(time1 : Long, time2 : Long, inclusive : Boolean = false) : Long = {
val diff : Long = abs(time1 - time2) + (if (inclusive) 1 else 0)
val nominalWeeks : Long = diff / milliSecondsInWeek
val rem : Long = diff % milliSecondsInWeek
val weeks : Long = nominalWeeks + (if (rem >= (milliSecondsInWeek / 2)) 1 else 0)
weeks
}
/**
Answer the number of months between the two time expressions (millisecs since Epoch).
@param time1 : a timestamp
@param time2 : a timestamp
@param inclusive (optional default is false) : when true +1 to the difference
@return number of weeks between the timestamps
*/
def monthsBetween(time1 : Long, time2 : Long, inclusive : Boolean = false) : Long = {
val t1 : Long = if (time1 <= time2) time1 else time2
val t2 : Long = if (time1 <= time2) time2 else time1
val dt1 : DateTime = new DateTime(t1)
val dt2 : DateTime = new DateTime(t2)
val m : Months = Months.monthsBetween(dt1, dt2)
val months : Long = m.getMonths
months
}
/**
Answer the number of years between the two time expressions (millisecs since Epoch).
@param time1 : a timestamp
@param time2 : a timestamp
@param inclusive (optional default is false) : when true +1 to the difference
@return number of years between the timestamps
*/
def yearsBetween(time1 : Long, time2 : Long, inclusive : Boolean = false) : Long = {
val t1 : Long = if (time1 <= time2) time1 else time2
val t2 : Long = if (time1 <= time2) time2 else time1
val dt1 : DateTime = new DateTime(t1)
val dt2 : DateTime = new DateTime(t2)
val y : Years = Years.yearsBetween(dt1, dt2)
val years : Long = y.getYears
years
}
/**
Create an Array of String tuples from the arguments where the 'left' string
is _._1 and the elements of the 'right' array are _._2 in the tuple array returned.
@param left the first tuple in each array element to be returned
@param right each element becomes the second element in the returned array, each matched with 'left'
@return an Array of String tuples
*/
def MakePairs(left: String, right: Array[String]): Array[(String, String)] = {
if (left == null || right == null || right.size == 0 || left.length == 0)
return new Array[(String, String)](0)
right.filter(v => v != null && v.length > 0).map(v => (left, v))
}
/**
Create an Array of String tuples from the arguments where the 'left' string
is one of the elements and the 'right' Array elements are each the other one of the pair. This is similar to
MakePairs except the left and right candidates are compared and the lesser value becomes the
_._1 and the larger _._2.
@param left the first tuple in each array element to be returned
@param right each element becomes the second element in the returned array, each matched with 'left'
@return an Array of String tuples
*/
def MakeOrderedPairs(left: String, right: Array[String]): Array[(String, String)] = {
if (left == null || right == null || right.size == 0 || left.length == 0)
return new Array[(String, String)](0)
right.filter(v => v != null && v.length > 0).map(v => {
if (left.compareTo(v) > 0)
(v, left)
else
(left, v)
})
}
/**
Create an Array of String tuples from the arguments where the 'left' string
is one of the elements and the 'right' ArrayBuffer elements are each the other one of the pair.
This is similar to MakePairs except the left and right candidates are compared and the
lesser value becomes the _._1 and the larger _._2.
@param left the first tuple in each array element to be returned
@param right each element becomes the second element in the returned array, each matched with 'left'
@return an Array of String tuples
*/
def MakeOrderedPairs(left: String, right: ArrayBuffer[String]): Array[(String, String)] = {
if (left == null || right == null || right.size == 0 || left.length == 0)
return new Array[(String, String)](0)
right.filter(v => v != null && v.length > 0).map(v => {
if (left.compareTo(v) > 0)
(v, left)
else
(left, v)
}).toArray
}
/**
Answer an array that takes an Array of String tuples and returns an Array of String utilizing
the supplied separator as the delimiter between the tuple elements.
@param arr an Array of String tuples
@return an Array of String delimited by the supplied separator
*/
def MakeStrings(arr: Array[(String, String)], separator: String): Array[String] = {
if (arr == null || arr.size == 0)
return new Array[String](0)
arr.filter(v => v != null).map(v => ("(" + v._1 + separator + v._2 + ")"))
}
/**
Coerce the supplied array to a set.
@param arr an Array[T]
@return an Set[T]
*/
def ToSet[T: ClassTag](arr: Array[T]): Set[T] = {
if (arr == null || arr.size == 0)
return Set[T]().toSet
arr.toSet
}
/**
Coerce the supplied array buffer to a set.
@param arr an ArrayBuffer[T]
@return an Set[T]
*/
def ToSet[T: ClassTag](arr: ArrayBuffer[T]): Set[T] = {
if (arr == null || arr.size == 0)
return Set[T]().toSet
arr.toSet
}
/**
Coerce the supplied queue to a set.
@param q a Queue[T]
@return n Set[T]
*/
def ToSet[T: ClassTag](q: Queue[T]): Set[T] = {
if (q == null || q.size == 0)
return Set[T]().toSet
q.toSet
}
/**
Coerce the supplied List to a set.
@param l a List[T]
@return n Set[T]
*/
def ToSet[T: ClassTag](l: List[T]): Set[T] = {
if (l == null || l.size == 0)
return Set[T]().toSet
l.toSet
}
/**
Coerce the supplied mutable set to an Array.
@param set a scala.collection.mutable.Set[T]
@return an Array[T]
*/
def ToArray[T: ClassTag](set: MutableSet[T]): Array[T] = {
if (set == null || set.size == 0)
return Array[T]()
set.toArray
}
/**
Coerce the supplied set to an Array.
@param set a scala.collection.immutable.Set[T]
@return an Array[T]
*/
def ToArray[T: ClassTag](set: Set[T]): Array[T] = {
if (set == null || set.size == 0)
return Array[T]()
set.toArray
}
/**
Coerce the supplied array buffer to an Array.
@param arr a scala.collection.mutable.ArrayBuffer[T]
@return an Array[T]
*/
def ToArray[T: ClassTag](arr: ArrayBuffer[T]): Array[T] = {
if (arr == null || arr.size == 0)
return Array[T]()
arr.toArray
}
/**
Coerce the supplied array to an Array.
@param arr a scala.Array[T]
@return an Array[T]
*/
def ToArray[T: ClassTag](arr: Array[T]): Array[T] = {
if (arr == null || arr.size == 0)
return Array[T]()
arr.toArray
}
/**
Coerce the supplied SortedSet to an Array.
@param set a scala.collection.immutable.SortedSet[T]
@return an Array[T]
*/
def ToArray[T: ClassTag](set: SortedSet[T]): Array[T] = {
if (set == null || set.size == 0)
return Array[T]()
set.toArray
}
/**
Coerce the supplied TreeSet to an Array.
@param ts a scala.collection.mutable.TreeSet[T]
@return an Array[T]
*/
def ToArray[T: ClassTag](ts: TreeSet[T]): Array[T] = {
if (ts == null || ts.size == 0)
return Array[T]()
ts.toArray
}
/**
Coerce the supplied List to an Array.
@param l a scala.collection.mutable.List[T]
@return an Array[T]
*/
def ToArray[T: ClassTag](l: List[T]): Array[T] = {
if (l == null || l.size == 0)
return Array[T]()
l.toArray
}
/**
Coerce the supplied Queue to an Array.
@param q a scala.collection.mutable.Queue[T]
@return an Array[T]
*/
def ToArray[T: ClassTag](q: Queue[T]): Array[T] = {
if (q == null || q.size == 0)
return Array[T]()
q.toArray
}
/**
*
* Suppress stack to array coercion until Stack based types are supported in the MdMgr ....
*
* def ToArray[T : ClassTag](stack: Stack[T]): Array[T] = {
* if (stack == null || stack.size == 0)
* return Array[T]()
* stack.toArray
* }
*
* def ToArray(stack: Stack[Any]): Array[Any] = {
* if (stack == null || stack.size == 0)
* return Array[Any]()
* stack.toArray
* }
*/
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple1[Any]): Array[Any] = {
tuple.productIterator.toArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple2[Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple doesn't contain appropriate numeric values, a 0 is returned at that position */
def ToArrayOfFloat(tuple: Tuple2[Any, Any]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => if (itm.isInstanceOf[Float] || itm.isInstanceOf[Int] || itm.isInstanceOf[Long]) itm.asInstanceOf[Float] else 0)
fArray
}
/** if the tuple doesn't contain appropriate numeric values, a 0 is returned at that position */
def ToArrayOfDouble(tuple: Tuple2[Any, Any]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => if (itm.isInstanceOf[Double] || itm.isInstanceOf[Float] || itm.isInstanceOf[Int] || itm.isInstanceOf[Long]) itm.asInstanceOf[Double] else 0)
dArray
}
/** if the tuple doesn't contain appropriate numeric values, a 0 is returned at that position */
def ToArrayOfInt(tuple: Tuple2[Any, Any]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => if (itm.isInstanceOf[Int]) itm.asInstanceOf[Int] else 0)
iArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple2[Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple3[Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple doesn't contain appropriate numeric values, a 0 is returned at that position */
def ToArrayOfFloat(tuple: Tuple3[Any, Any, Any]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => if (itm.isInstanceOf[Float] || itm.isInstanceOf[Int] || itm.isInstanceOf[Long]) itm.asInstanceOf[Float] else 0)
fArray
}
/** if the tuple doesn't contain appropriate numeric values, a 0 is returned at that position */
def ToArrayOfDouble(tuple: Tuple3[Any, Any, Any]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => if (itm.isInstanceOf[Double] || itm.isInstanceOf[Float] || itm.isInstanceOf[Int] || itm.isInstanceOf[Long]) itm.asInstanceOf[Double] else 0)
dArray
}
/** if the tuple doesn't contain appropriate numeric values, a 0 is returned at that position */
def ToArrayOfInt(tuple: Tuple3[Any, Any, Any]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => if (itm.isInstanceOf[Int]) itm.asInstanceOf[Int] else 0)
iArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple3[Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple4[Any, Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple doesn't contain appropriate numeric values, a 0 is returned at that position */
def ToArrayOfFloat(tuple: Tuple4[Any, Any, Any, Any]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => if (itm.isInstanceOf[Float] || itm.isInstanceOf[Int] || itm.isInstanceOf[Long]) itm.asInstanceOf[Float] else 0)
fArray
}
/** if the tuple doesn't contain appropriate numeric values, a 0 is returned at that position */
def ToArrayOfDouble(tuple: Tuple4[Any, Any, Any, Any]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => if (itm.isInstanceOf[Double] || itm.isInstanceOf[Float] || itm.isInstanceOf[Int] || itm.isInstanceOf[Long]) itm.asInstanceOf[Double] else 0)
dArray
}
/** if the tuple doesn't contain appropriate numeric values, a 0 is returned at that position */
def ToArrayOfInt(tuple: Tuple4[Any, Any, Any, Any]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => if (itm.isInstanceOf[Int]) itm.asInstanceOf[Int] else 0)
iArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple4[Any, Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple5[Any, Any, Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple doesn't contain appropriate numeric values, a 0 is returned at that position */
def ToArrayOfFloat(tuple: Tuple5[Any, Any, Any, Any, Any]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => if (itm.isInstanceOf[Float] || itm.isInstanceOf[Int] || itm.isInstanceOf[Long]) itm.asInstanceOf[Float] else 0)
fArray
}
/** if the tuple doesn't contain appropriate numeric values, a 0 is returned at that position */
def ToArrayOfDouble(tuple: Tuple5[Any, Any, Any, Any, Any]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => if (itm.isInstanceOf[Double] || itm.isInstanceOf[Float] || itm.isInstanceOf[Int] || itm.isInstanceOf[Long]) itm.asInstanceOf[Double] else 0)
dArray
}
/** if the tuple doesn't contain appropriate numeric values, a 0 is returned at that position */
def ToArrayOfInt(tuple: Tuple5[Any, Any, Any, Any, Any]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => if (itm.isInstanceOf[Int]) itm.asInstanceOf[Int] else 0)
iArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple5[Any, Any, Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple6[Any, Any, Any, Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple doesn't contain appropriate numeric values, a 0 is returned at that position */
def ToArrayOfFloat(tuple: Tuple6[Any, Any, Any, Any, Any, Any]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => if (itm.isInstanceOf[Float] || itm.isInstanceOf[Int] || itm.isInstanceOf[Long]) itm.asInstanceOf[Float] else 0)
fArray
}
/** if the tuple doesn't contain appropriate numeric values, a 0 is returned at that position */
def ToArrayOfDouble(tuple: Tuple6[Any, Any, Any, Any, Any, Any]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => if (itm.isInstanceOf[Double] || itm.isInstanceOf[Float] || itm.isInstanceOf[Int] || itm.isInstanceOf[Long]) itm.asInstanceOf[Double] else 0)
dArray
}
/** if the tuple doesn't contain appropriate numeric values, a 0 is returned at that position */
def ToArrayOfInt(tuple: Tuple6[Any, Any, Any, Any, Any, Any]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => if (itm.isInstanceOf[Int]) itm.asInstanceOf[Int] else 0)
iArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple6[Any, Any, Any, Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple7[Any, Any, Any, Any, Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple7[Any, Any, Any, Any, Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple8[Any, Any, Any, Any, Any, Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple8[Any, Any, Any, Any, Any, Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple9[Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple9[Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple10[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple10[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple11[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple11[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple12[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple12[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple13[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple13[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple14[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple14[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple15[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple15[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple16[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple16[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple17[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple17[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple18[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple18[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple19[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple19[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple20[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple20[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple21[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple21[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Any]
@param tuple TupleN[Any]
@return Array[Any]
*/
def ToArray(tuple: Tuple22[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Any] = {
tuple.productIterator.toArray
}
/** if the tuple is not boolean, a false is returned at that position */
def ToArrayOfBoolean(tuple: Tuple22[Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any, Any]): Array[Boolean] = {
val arr: Array[Any] = tuple.productIterator.toArray
val bArray: Array[Boolean] = arr.map(itm => if (itm.isInstanceOf[Boolean]) itm.asInstanceOf[Boolean] else false)
bArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple1[Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple2[Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple3[Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple4[Int, Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple5[Int, Int, Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple6[Int, Int, Int, Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple7[Int, Int, Int, Int, Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple8[Int, Int, Int, Int, Int, Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple9[Int, Int, Int, Int, Int, Int, Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple10[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple11[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple12[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple13[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple14[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple15[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple16[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple17[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple18[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple19[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple20[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple21[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Int]
@param tuple TupleN[Int]
@return Array[Int]
*/
def ToArray(tuple: Tuple22[Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int, Int]): Array[Int] = {
val arr: Array[Any] = tuple.productIterator.toArray
val iArray: Array[Int] = arr.map(itm => itm.asInstanceOf[Int])
iArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple1[Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple2[Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple3[Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple4[Float, Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple5[Float, Float, Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple6[Float, Float, Float, Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple7[Float, Float, Float, Float, Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple8[Float, Float, Float, Float, Float, Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple9[Float, Float, Float, Float, Float, Float, Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple10[Float, Float, Float, Float, Float, Float, Float, Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple11[Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple12[Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple13[Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple14[Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple15[Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple16[Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple17[Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple18[Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple19[Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple20[Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple21[Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Float]
@param tuple TupleN[Float]
@return Array[Float]
*/
def ToArray(tuple: Tuple22[Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float, Float]): Array[Float] = {
val arr: Array[Any] = tuple.productIterator.toArray
val fArray: Array[Float] = arr.map(itm => itm.asInstanceOf[Float])
fArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple1[Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple2[Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple3[Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple4[Double, Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple5[Double, Double, Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple6[Double, Double, Double, Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple7[Double, Double, Double, Double, Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple8[Double, Double, Double, Double, Double, Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple9[Double, Double, Double, Double, Double, Double, Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple10[Double, Double, Double, Double, Double, Double, Double, Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple11[Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple12[Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple13[Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple14[Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple15[Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple16[Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple17[Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple18[Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple19[Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple20[Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
def ToArray(tuple: Tuple21[Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the supplied tuple to an Array[Double]
@param tuple TupleN[Double]
@return Array[Double]
*/
def ToArray(tuple: Tuple22[Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double, Double]): Array[Double] = {
val arr: Array[Any] = tuple.productIterator.toArray
val dArray: Array[Double] = arr.map(itm => itm.asInstanceOf[Double])
dArray
}
/**
Coerce the elements in the MutableSet of Tuple2 to an Map
@param set scala.collection.mutable.Set[(T,U)]
@return Map[T, U]
*/
def ToMap[T: ClassTag, U: ClassTag](set: MutableSet[(T, U)]): Map[T, U] = {
if (set == null || set.size == 0)
return Map[T, U]()
set.toMap
}
/**
Coerce the elements in the Set of Tuple2 to an Map
@param set scala.collection.immutable.Set[(T,U)]
@return Map[T, U]
*/
def ToMap[T: ClassTag, U: ClassTag](set: Set[(T, U)]): Map[T, U] = {
if (set == null || set.size == 0)
return Map[T, U]()
set.toMap
}
/**
Coerce the elements in the Set of Tuple2 to an Map
@param set scala.collection.mutable.Set[(Any, Any)]
@return Map[Any, Any]
*/
def ToMap(set: MutableSet[(Any, Any)]): Map[Any, Any] = {
if (set == null || set.size == 0)
return Map[Any, Any]()
set.toMap
}
/**
Coerce the elements in the Set of Tuple2 to an Map
@param set scala.collection.immutable.Set[(Any, Any)]
@return Map[Any, Any]
*/
def ToMap(set: Set[(Any, Any)]): Map[Any, Any] = {
if (set == null || set.size == 0)
return Map[Any, Any]()
set.toMap
}
/**
Coerce the elements in the ArrayBuffer of Tuple2 to an Map
@param set scala.collection.mutable.ArrayBuffer[(T, U)]
@return Map[T, U]
*/
def ToMap[T: ClassTag, U: ClassTag](arr: ArrayBuffer[(T, U)]): Map[T, U] = {
if (arr == null || arr.size == 0)
return Map[T, U]()
arr.toMap
}
/**
Coerce the elements in the ArrayBuffer of Tuple2 to an Map
@param set scala.collection.mutable.ArrayBuffer[(Any, Any)]
@return Map[Any, Any]
*/
def ToMap(arr: ArrayBuffer[(Any, Any)]): Map[Any, Any] = {
if (arr == null || arr.size == 0)
return Map[Any, Any]()
arr.toMap
}
/**
Coerce the elements in the Array of Tuple2 to an Map
@param set scala.Array[(Any, Any)]
@return Map[Any, Any]
*/
def ToMap[T: ClassTag, U: ClassTag](arr: Array[(T, U)]): Map[T, U] = {
if (arr == null || arr.size == 0)
return Map[T, U]()
arr.toMap
}
/**
Coerce the elements in the Array of Tuple2 to an Map
@param set scala.Array[(T, U)]
@return Map[T, U]
*/
def ToMap(arr: Array[(Any, Any)]): Map[Any, Any] = {
if (arr == null || arr.size == 0)
return Map[Any, Any]()
arr.toMap
}
/**
Coerce the elements in the SortedSet of Tuple2 to an Map
@param set scala.collection.mutable.SortedSet[(T, U)]
@return Map[T, U]
*/
def ToMap[T: ClassTag, U: ClassTag](set: SortedSet[(T, U)]): Map[T, U] = {
if (set == null || set.size == 0)
return Map[T, U]()
set.toMap
}
/**
Coerce the elements in the SortedSet of Tuple2 to an Map
@param set scala.collection.mutable.SortedSet[(Any, Any)]
@return Map[Any, Any]
*/
def ToMap(set: SortedSet[(Any, Any)]): Map[Any, Any] = {
if (set == null || set.size == 0)
return Map[Any, Any]()
set.toMap
}
/**
Coerce the elements in the TreeSet of Tuple2 to an Map
@param set scala.collection.mutable.TreeSet[(T, U)]
@return Map[T, U]
*/
def ToMap[T: ClassTag, U: ClassTag](ts: TreeSet[(T, U)]): Map[T, U] = {
if (ts == null || ts.size == 0)
return Map[T, U]()
ts.toMap
}
/**
Coerce the elements in the TreeSet of Tuple2 to an Map
@param set scala.collection.mutable.TreeSet[(Any, Any)]
@return Map[Any, Any]
*/
def ToMap(ts: TreeSet[(Any, Any)]): Map[Any, Any] = {
if (ts == null || ts.size == 0)
return Map[Any, Any]()
ts.toMap
}
/**
Coerce the elements in the List of Tuple2 to an Map
@param set scala.collection.immutable.List[(T, U)]
@return Map[T, U]
*/
def ToMap[T: ClassTag, U: ClassTag](l: List[(T, U)]): Map[T, U] = {
if (l == null || l.size == 0)
return Map[T, U]()
l.toMap
}
/**
Coerce the elements in the List of Tuple2 to an Map
@param set scala.collection.immutable.List[(Any, Any)]
@return Map[Any, Any]
*/
def ToMap(l: List[(Any, Any)]): Map[Any, Any] = {
if (l == null || l.size == 0)
return Map[Any, Any]()
l.toMap
}
/**
Coerce the elements in the Queue of Tuple2 to an Map
@param set scala.collection.mutable.Queue[(T, U)]
@return Map[T, U]
*/
def ToMap[T: ClassTag, U: ClassTag](q: Queue[(T, U)]): Map[T, U] = {
if (q == null || q.size == 0)
return Map[T, U]()
q.toMap
}
/**
Coerce the elements in the Queue of Tuple2 to an Map
@param set scala.collection.mutable.Queue[(Any, Any)]
@return Map[Any, Any]
*/
def ToMap(q: Queue[(Any, Any)]): Map[Any, Any] = {
if (q == null || q.size == 0)
return Map[Any, Any]()
q.toMap
}
/**
* Suppress Stack type functions until MdMgr supports them properly
* def ToMap[T : ClassTag, U : ClassTag](stack: Stack[(T,U)]): Map[T,U] = {
* if (stack == null || stack.size == 0)
* return Map[T,U]()
* stack.toMap
* }
*
* def ToMap(stack: Stack[(Any,Any)]): Map[Any,Any] = {
* if (stack == null || stack.size == 0)
* return Map[Any,Any]()
* stack.toMap
* }
*/
/**
Zip the two arrays together. The first array's size must be > 0 and >e; other's size
@param receiver an Array[T]
@param other an Array[U]
@return Array[(T, U)]
*/
def Zip[T: ClassTag, U: ClassTag](receiver: Array[T], other: Array[U]): Array[(T, U)] = {
if (receiver == null || receiver.size == 0) {
return Array[(T, U)]()
}
receiver.zip(other)
}
/**
Zip the two array buffers together. The first array buffer's size must be > 0 and >e; other's size
@param receiver an ArrayBuffer[T]
@param other an ArrayBuffer[U]
@return ArrayBuffer[(T, U)]
*/
def Zip[T: ClassTag, U: ClassTag](receiver: ArrayBuffer[T], other: ArrayBuffer[U]): ArrayBuffer[(T, U)] = {
if (receiver == null || receiver.size == 0) {
return ArrayBuffer[(T, U)]()
}
receiver.zip(other)
}
/**
Zip the two Lists together. The first List's size must be > 0 and >e; other's size
@param receiver a List[T]
@param other a List[U]
@return List[(T, U)]
*/
def Zip[T: ClassTag, U: ClassTag](receiver: List[T], other: List[U]): List[(T, U)] = {
if (receiver == null || receiver.size == 0) {
return List[(T, U)]()
}
receiver.zip(other)
}
/**
Zip the two Lists together. The first List's size must be > 0 and >e; other's size
@param receiver a List[T]
@param other a List[U]
@return List[(T, U)]
*/
def Zip[T: ClassTag, U: ClassTag](receiver: Queue[T], other: Queue[U]): Queue[(T, U)] = {
if (receiver == null || receiver.size == 0) {
return Queue[(T, U)]()
}
receiver.zip(other)
}
/**
Zip the two Sets together. The first Set's size must be > 0 and >e; other's size
@param receiver a Set[T]
@param other a Set[U]
@return Set[(T, U)]
Note: Zipping mutable.Set and/or immutable.Set is typically not a good idea unless the pairing
* done absolutely does not matter. Use SortedSet or TreeSet for predictable pairings.
*/
def Zip[T: ClassTag, U: ClassTag](receiver: Set[T], other: Set[U]): Set[(T, U)] = {
if (receiver == null || receiver.size == 0) {
return Set[(T, U)]()
}
receiver.zip(other)
}
/**
Zip the two Sets together. The first Set's size must be > 0 and >e; other's size
@param receiver a Set[T]
@param other a Set[U]
@return Set[(T, U)]
Note: Zipping mutable.Set and/or immutable.Set is typically not a good idea unless the pairing
* done absolutely does not matter. Use SortedSet or TreeSet for predictable pairings.
*/
def Zip[T: ClassTag, U: ClassTag](receiver: MutableSet[T], other: MutableSet[U]): MutableSet[(T, U)] = {
if (receiver == null || receiver.size == 0) {
return MutableSet[(T, U)]()
}
receiver.zip(other)
}
/**
Zip the two SortedSets together. The first Set's size must be > 0 and >e; other's size
@param receiver a SortedSet[T]
@param other a SortedSet[U]
@return SortedSet[(T, U)]
*/
def Zip[T: ClassTag, U: ClassTag](receiver: SortedSet[T], other: SortedSet[U])(implicit cmp: Ordering[(T, U)]): SortedSet[(T, U)] = {
if (receiver == null || receiver.size == 0) {
return SortedSet[(T, U)]()
}
receiver.zip(other)
}
/**
Zip the two TreeSets together. The first Set's size must be > 0 and >e; other's size
@param receiver a TreeSet[T]
@param other a TreeSet[U]
@return TreeSet[(T, U)]
*/
def Zip[T: ClassTag, U: ClassTag](receiver: TreeSet[T], other: TreeSet[U])(implicit cmp: Ordering[(T, U)]): TreeSet[(T, U)] = {
if (receiver == null || receiver.size == 0) {
return TreeSet[(T, U)]()
}
receiver.zip(other)
}
/**
Answer the supplied map's keys as an array.
@param receiver a scala.collection.mutable.Map[T,U]
@return Array[T]
*/
def MapKeys[T: ClassTag, U: ClassTag](receiver: MutableMap[T, U]): Array[T] = {
receiver.keys.toArray
}
/**
Answer the supplied map's keys as an array.
@param receiver a scala.collection.immutable.Map[T,U]
@return Array[T]
*/
def MapKeys[T: ClassTag, U: ClassTag](receiver: Map[T, U]): Array[T] = {
receiver.keys.toArray
}
/**
Answer the supplied map's keys as an array.
@param receiver a scala.collection.immutable.Map[Any,Any]
@return Array[Any]
def MapKeys(receiver: MutableMap[Any, Any]): Array[Any] = {
receiver.keys.toArray
} */
/**
Answer the supplied map's keys as an array.
@param receiver a scala.collection.immutable.Map[Any,Any]
@return Array[Any]
def MapKeys(receiver: Map[Any, Any]): Array[Any] = {
receiver.keys.toArray
}*/
/**
Answer the supplied map's values as an array.
@param receiver a scala.collection.mutable.Map[T,U]
@return Array[T]
*/
def MapValues[T: ClassTag, U: ClassTag](receiver: MutableMap[T, U]): Array[U] = {
receiver.values.toArray
}
/**
Answer the supplied map's values as an array.
@param receiver a scala.collection.mutable.Map[T,U]
@return Array[T]
*/
def MapValues[T: ClassTag, U: ClassTag](receiver: Map[T, U]): Array[U] = {
receiver.values.toArray
}
/**
Answer the supplied map's values as an array.
@param receiver a scala.collection.mutable.Map[Any,Any]
@return Array[Any]
*/
def MapValues(receiver: MutableMap[Any, Any]): Array[Any] = {
receiver.values.toArray
}
/**
Answer the supplied map's values as an array.
@param receiver a scala.collection.immutable.Map[Any,Any]
@return Array[Any]
*/
def MapValues(receiver: Map[Any, Any]): Array[Any] = {
receiver.values.toArray
}
/**
Answer collection's length.
@param coll a Array[T]
@return size
*/
def CollectionLength[T: ClassTag](coll: Array[T]): Int = {
coll.length
}
/**
Answer collection's length.
@param coll a ArrayBuffer[T]
@return size
*/
def CollectionLength[T: ClassTag](coll: ArrayBuffer[T]): Int = {
coll.size
}
/**
Answer collection's length.
@param coll a MutableSet[T]
@return size
*/
def CollectionLength[T: ClassTag](coll: MutableSet[T]): Int = {
coll.size
}
/**
Answer collection's length.
@param coll a Set[T]
@return size
*/
def CollectionLength[T: ClassTag](coll: Set[T]): Int = {
coll.size
}
/**
Answer collection's length.
@param coll a TreeSet[T]
@return size
*/
def CollectionLength[T: ClassTag](coll: TreeSet[T]): Int = {
coll.size
}
/**
Answer collection's length.
@param coll a SortedSet[T]
@return size
*/
def CollectionLength[T: ClassTag](coll: SortedSet[T]): Int = {
coll.size
}
/**
Answer collection's length.
@param coll a List[T]
@return size
*/
def CollectionLength[T: ClassTag](coll: List[T]): Int = {
coll.size
}
/**
Answer collection's length.
@param coll a Queue[T]
@return size
*/
def CollectionLength[T: ClassTag](coll: Queue[T]): Int = {
coll.size
}
/**
* Suppress functions that use Stack and Vector until Mdmgr supports it
* def CollectionLength[T : ClassTag](coll : Stack[T]) : Int = {
* coll.size
* }
*
* def CollectionLength[T : ClassTag](coll : Vector[T]) : Int = {
* coll.size
* }
*
*/
/**
Answer collection's length.
@param coll a scala.collection.mutable.Map[T]
@return size
*/
def CollectionLength[K: ClassTag, V: ClassTag](coll: MutableMap[K, V]): Int = {
coll.size
}
/**
Answer collection's length.
@param coll a Map[T]
@return size
*/
def CollectionLength[K: ClassTag, V: ClassTag](coll: Map[K, V]): Int = {
coll.size
}
/**
Answer collection's length.
@param coll a HashMap[T]
@return size
*/
def CollectionLength[K: ClassTag, V: ClassTag](coll: HashMap[K, V]): Int = {
coll.size
}
/**
Add the supplied item(s) to a collection
@param collection a Collection of some type
@param item(s) an item to append
@return a new collection with the item appended
*/
def Add[T : ClassTag](coll : Array[T], items : T*) : Array[T] = {
if (coll != null && items != null) {
val itemArray : Array[T] = items.toArray
coll ++ (itemArray)
} else {
if (coll != null) {
coll
} else {
Array[T]()
}
}
}
/**
Add the supplied item(s) to a collection
@param collection a Collection of some type
@param item(s) an item to append
@return a new collection with the item appended
*/
def Add[T : ClassTag](coll : ArrayBuffer[T], items : T*) : ArrayBuffer[T] = {
if (coll != null && items != null) {
val itemArray : Array[T] = items.toArray
coll ++ (itemArray)
} else {
if (coll != null) {
ArrayBuffer[T]() ++ coll
} else {
ArrayBuffer[T]()
}
}
}
/**
Add the supplied item(s) to a collection
@param collection a Collection of some type
@param item(s) an item to append
@return a new collection with the item appended
*/
def Add[T : ClassTag](coll : List[T], items : T*) : scala.collection.immutable.List[T] = {
if (coll != null && items != null) {
val itemArray : Array[T] = items.toArray
coll ++ (itemArray)
} else {
if (coll != null) {
scala.collection.immutable.List[T]() ++ coll
} else {
scala.collection.immutable.List[T]()
}
}
}
/**
Add the supplied item(s) to a collection
@param collection a Collection of some type
@param item(s) an item to append
@return a new collection with the item appended
*/
def Add[T : ClassTag](coll : Queue[T], items : T*) : Queue[T] = {
if (coll != null && items != null) {
val itemArray : Array[T] = items.toArray
coll ++ (itemArray)
} else {
if (coll != null) {
Queue[T]() ++ coll
} else {
Queue[T]()
}
}
}
/**
Add the supplied item(s) to a collection
@param collection a Collection of some type
@param item(s) an item to append
@return a new collection with the item appended
*/
def Add[T : ClassTag](coll : SortedSet[T], items : T*)(implicit cmp: Ordering[T]): SortedSet[T] = {
if (coll != null && items != null) {
val itemArray : Array[T] = items.toArray
coll ++ (itemArray)
} else {
if (coll != null) {
SortedSet[T]() ++ coll
} else {
SortedSet[T]()
}
}
}
/**
Add the supplied item(s) to a collection
@param collection a Collection of some type
@param item(s) an item to append
@return a new collection with the item appended
*/
def Add[T : ClassTag](coll : TreeSet[T], items : T*)(implicit cmp: Ordering[T]) : TreeSet[T] = {
if (coll != null && items != null) {
val itemArray : Array[T] = items.toArray
coll ++ (itemArray)
} else {
if (coll != null) {
TreeSet[T]() ++ coll
} else {
TreeSet[T]()
}
}
}
/**
Add the supplied item(s) to a collection
@param collection a Collection of some type
@param item(s) an item to append
@return a new collection with the item appended
*/
def Add[T : ClassTag](coll : MutableSet[T], items : T*) : MutableSet[T] = {
if (coll != null && items != null) {
val itemArray : Array[T] = items.toArray
coll ++ (itemArray)
} else {
if (coll != null) {
MutableSet[T]() ++ coll
} else {
MutableSet[T]()
}
}
}
/**
Add the supplied item(s) to a collection
@param collection a Collection of some type
@param item(s) an item to append
@return a new collection with the item appended
*/
def Add[T : ClassTag](coll : Set[T], items : T*) : Set[T] = {
if (coll != null && items != null) {
val itemArray : Array[T] = items.toArray
coll ++ (itemArray)
} else {
if (coll != null) {
Set[T]() ++ coll
} else {
Set[T]()
}
}
}
/** Accept an indefinite number of objects and make them as List of String.
Null objects produce "" for their values.
@param args : an indefinite number of objects of any type
@return a list of their string representations
*/
def ToStringList(args : Any*) : List[String] = {
val argList : List[Any] = args.toList
argList.map( arg => if (arg != null) arg.toString else "")
}
/** Convert any type to string. Null object produces ""
@param arg : any kind of object
@return its string representation
*/
def ToString(arg : Any) : String = {
if (arg != null) arg.toString else ""
}
/**
* Accept an arbitrary number of arguments of any type, except that at least the last one
* must be a Boolean. If the last is not a Boolean type, false is returned as the
* result.
*
* This function is designed to allow the user to execute an arbitrary number of expressions
* of any type but still provide a visual cue in the calling PMML that the expectation is
* that last expression must return a Boolean to participate in the enclosing function or derived field...
* assuming a PMML caller.
*
* @param args : variable number of Any
* @return Boolean
*/
def CompoundStatementBoolean(args : Any*): Boolean = {
val answer : Boolean = if (args == null) {
false
} else {
val argList : List[Any] = args.toList
val lastOne : Any = argList.last
val ans : Boolean = if (lastOne.isInstanceOf[Boolean]) lastOne.asInstanceOf[Boolean] else false
ans
}
answer
}
/**
* Accept an arbitrary number of arguments of any type, except that at least the last one
* must be a String. If the last is not a String type, "" is returned as the
* result.
*
* This function is designed to allow the user to execute an arbitrary number of expressions
* of any type but still provide a visual cue in the calling PMML that the expectation is
* that last expression must return a String to participate in the enclosing function or derived field...
* assuming a PMML caller.
*
* @param args : variable number of Any
* @return String
*/
def CompoundStatementString(args : Any*): String = {
val answer : String = if (args == null) {
""
} else {
val argList : List[Any] = args.toList
val lastOne : Any = argList.last
val ans : String = if (lastOne.isInstanceOf[String]) lastOne.asInstanceOf[String] else ""
ans
}
answer
}
/**
* Accept an arbitrary number of arguments of any type, except that at least the last one
* must be an Int. If the last is not a Boolean type, 0 is returned as the
* result.
*
* This function is designed to allow the user to execute an arbitrary number of expressions
* of any type but still provide a visual cue in the calling PMML that the expectation is
* that last expression must return a Int to participate in the enclosing function or derived field...
* assuming a PMML caller.
*
* @param args : variable number of Any
* @return Int
*/
def CompoundStatementInt(args : Any*): Int = {
val answer : Int = if (args == null) {
0
} else {
val argList : List[Any] = args.toList
val lastOne : Any = argList.last
val ans : Int = if (lastOne.isInstanceOf[Int]) lastOne.asInstanceOf[Int] else 0
ans
}
answer
}
/**
* Accept an arbitrary number of arguments of any type, except that at least the last one
* must be a Long. If the last is not a Long type, 0 is returned as the
* result.
*
* This function is designed to allow the user to execute an arbitrary number of expressions
* of any type but still provide a visual cue in the calling PMML that the expectation is
* that last expression must return a Long to participate in the enclosing function or derived field...
* assuming a PMML caller.
*
* @param args : variable number of Any
* @return Long
*/
def CompoundStatementLong(args : Any*): Long = {
val answer : Long = if (args == null) {
0
} else {
val argList : List[Any] = args.toList
val lastOne : Any = argList.last
val ans : Long = if (lastOne.isInstanceOf[Long]) lastOne.asInstanceOf[Long] else 0
ans
}
answer
}
/**
* Accept an arbitrary number of arguments of any type, except that at least the last one
* must be a Float. If the last is not a Float type, 0.0 is returned as the
* result.
*
* This function is designed to allow the user to execute an arbitrary number of expressions
* of any type but still provide a visual cue in the calling PMML that the expectation is
* that last expression must return a Float to participate in the enclosing function or derived field...
* assuming a PMML caller.
*
* @param args : variable number of Any
* @return Float
*/
def CompoundStatementFloat(args : Any*): Float = {
val answer : Float = if (args == null) {
0.0F
} else {
val argList : List[Any] = args.toList
val lastOne : Any = argList.last
val ans : Float = if (lastOne.isInstanceOf[Float]) lastOne.asInstanceOf[Float] else 0.0F
ans
}
answer
}
/**
* Accept an arbitrary number of arguments of any type, except that at least the last one
* must be a Double. If the last is not a Double type, 0.0 is returned as the
* result.
*
* This function is designed to allow the user to execute an arbitrary number of expressions
* of any type but still provide a visual cue in the calling PMML that the expectation is
* that last expression must return a Double to participate in the enclosing function or derived field...
* assuming a PMML caller.
*
* @param args : variable number of Any
* @return Double
*/
def CompoundStatementDouble(args : Any*): Double = {
val answer : Double = if (args == null) {
0.0D
} else {
val argList : List[Any] = args.toList
val lastOne : Any = argList.last
val ans : Double = if (lastOne.isInstanceOf[Double]) lastOne.asInstanceOf[Double] else 0.0D
ans
}
answer
}
/**
* Accept an arbitrary number of arguments of any type. Answer the value returned by
* the last arg.
* @param args : variable number of Any
* @return Any
*
* NOTE: There is no type safety with this function. It is the responsibility of the caller
* to recognize the real type of the element and treat it appropriately. In PMML the result type
* of the last expression MUST be the one the enclosing DerivedField or Apply function expects.
* Failure to do this will result in ClassCastException at runtime.
*/
def CompoundStatement(args : Any*): Any = {
if (args != null) args.toList.last else ""
}
}
|
traytonwhite/Kamanja
|
trunk/Pmml/PmmlUdfs/src/main/scala/com/ligadata/pmml/udfs/PmmlUdfs.scala
|
Scala
|
apache-2.0
| 304,736
|
package org.scalatra
package servlet
import org.scalatra.test.scalatest.ScalatraFunSuite
class ServletContextAttributesTest extends ScalatraFunSuite with AttributesTest {
addServlet(new AttributesServlet {
def attributesMap = servletContext
}, "/*")
}
|
lightvector/scalatra
|
core/src/test/scala/org/scalatra/servlet/ServletContextAttributesTest.scala
|
Scala
|
bsd-2-clause
| 263
|
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.index.api
import java.nio.charset.StandardCharsets
import java.util.{Locale, UUID}
import org.apache.commons.codec.binary.Hex
import org.geotools.factory.Hints
import org.locationtech.geomesa.index.geotools.GeoMesaDataStore
import org.locationtech.geomesa.index.stats.GeoMesaStats
import org.locationtech.geomesa.index.utils.{ExplainNull, Explainer}
import org.locationtech.geomesa.utils.index.ByteArrays
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.opengis.filter.Filter
/**
* Represents a particular indexing strategy
*
* @tparam DS type of related data store
* @tparam F wrapper around a simple feature - used for caching write calculations
* @tparam WriteResult feature writers will transform simple features into these
*/
trait GeoMesaFeatureIndex[DS <: GeoMesaDataStore[DS, F, WriteResult], F <: WrappedFeature, WriteResult] {
type TypedFilterStrategy = FilterStrategy[DS, F, WriteResult]
lazy val identifier: String = s"$name:$version"
lazy val tableNameKey: String = s"table.$name.v$version"
/**
* The name used to identify the index
*/
def name: String
/**
* Current version of the index
*
* @return
*/
def version: Int
/**
* Is the index compatible with the given feature type
*
* @param sft simple feature type
* @return
*/
def supports(sft: SimpleFeatureType): Boolean
/**
* Configure the index upon initial creation
*
* @param sft simple feature type
* @param ds data store
*/
def configure(sft: SimpleFeatureType, ds: DS): Unit =
ds.metadata.insert(sft.getTypeName, tableNameKey, generateTableName(sft, ds))
/**
* Creates a function to write a feature to the index
*
* @param sft simple feature type
* @param ds data store
* @return
*/
def writer(sft: SimpleFeatureType, ds: DS): (F) => Seq[WriteResult]
/**
* Creates a function to delete a feature from the index
*
* @param sft simple feature type
* @param ds data store
* @return
*/
def remover(sft: SimpleFeatureType, ds: DS): (F) => Seq[WriteResult]
/**
* Removes all values from the index
*
* @param sft simple feature type
* @param ds data store
*/
def removeAll(sft: SimpleFeatureType, ds: DS): Unit
/**
* Deletes the entire index
*
* @param sft simple feature type
* @param ds data store
* @param shared true if this index shares physical space with another (e.g. shared tables)
*/
def delete(sft: SimpleFeatureType, ds: DS, shared: Boolean): Unit
/**
* Indicates whether the ID for each feature is serialized with the feature or in the row
*
* @return
*/
@deprecated
def serializedWithId: Boolean = false
/**
*
* Retrieve an ID from a row. All indices are assumed to encode the feature ID into the row key.
*
* The simple feature in the returned function signature is optional (null ok) - if provided the
* parsed UUID will be cached in the feature user data, if the sft is marked as using UUIDs
*
* @param sft simple feature type
* @return a function to retrieve an ID from a row - (row: Array[Byte], offset: Int, length: Int, feature: SimpleFeature)
*/
def getIdFromRow(sft: SimpleFeatureType): (Array[Byte], Int, Int, SimpleFeature) => String
/**
* Gets the initial splits for a table
*
* @param sft simple feature type
* @return
*/
def getSplits(sft: SimpleFeatureType): Seq[Array[Byte]]
/**
* Gets options for a 'simple' filter, where each OR is on a single attribute, e.g.
* (bbox1 OR bbox2) AND dtg
* bbox AND dtg AND (attr1 = foo OR attr = bar)
* not:
* bbox OR dtg
*
* Because the inputs are simple, each one can be satisfied with a single query filter.
* The returned values will each satisfy the query.
*
* @param filter input filter
* @param transform attribute transforms
* @return sequence of options, any of which can satisfy the query
*/
def getFilterStrategy(sft: SimpleFeatureType,
filter: Filter,
transform: Option[SimpleFeatureType]): Seq[TypedFilterStrategy]
/**
* Gets the estimated cost of running the query. In general, this is the estimated
* number of features that will have to be scanned.
*/
def getCost(sft: SimpleFeatureType,
stats: Option[GeoMesaStats],
filter: TypedFilterStrategy,
transform: Option[SimpleFeatureType]): Long
/**
* Plans the query
*/
def getQueryPlan(sft: SimpleFeatureType,
ds: DS,
filter: TypedFilterStrategy,
hints: Hints,
explain: Explainer = ExplainNull): QueryPlan[DS, F, WriteResult]
/**
* Gets the table name for this index
*
* @param typeName simple feature type name
* @param ds data store
* @return
*/
def getTableName(typeName: String, ds: DS): String = ds.metadata.read(typeName, tableNameKey).getOrElse {
throw new RuntimeException(s"Could not read table name from metadata for index $identifier")
}
/**
* Creates a valid, unique string for the underlying table
*
* @param sft simple feature type
* @param ds data store
* @return
*/
protected def generateTableName(sft: SimpleFeatureType, ds: DS): String =
GeoMesaFeatureIndex.formatTableName(ds.config.catalog, GeoMesaFeatureIndex.tableSuffix(this), sft)
}
object GeoMesaFeatureIndex {
// only alphanumeric is safe
private val SAFE_FEATURE_NAME_PATTERN = "^[a-zA-Z0-9]+$"
private val alphaNumeric = ('a' to 'z') ++ ('A' to 'Z') ++ ('0' to '9')
/**
* Format a table name with a namespace. Non alpha-numeric characters present in
* featureType names will be underscore hex encoded (e.g. _2a) including multibyte
* UTF8 characters (e.g. _2a_f3_8c) to make them safe for accumulo table names
* but still human readable.
*/
def formatTableName(catalog: String, suffix: String, sft: SimpleFeatureType): String = {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
if (sft.isTableSharing) {
formatSharedTableName(catalog, suffix)
} else {
formatSoloTableName(catalog, suffix, sft.getTypeName)
}
}
def formatSoloTableName(prefix: String, suffix: String, typeName: String): String =
concatenate(prefix, hexEncodeNonAlphaNumeric(typeName), suffix)
def formatSharedTableName(prefix: String, suffix: String): String =
concatenate(prefix, suffix)
def tableSuffix(index: GeoMesaFeatureIndex[_, _, _]): String =
if (index.version == 1) index.name else concatenate(index.name, s"v${index.version}")
/**
* Format a table name for the shared tables
*/
def concatenate(parts: String *): String = parts.mkString("_")
/**
* Encode non-alphanumeric characters in a string with
* underscore plus hex digits representing the bytes. Note
* that multibyte characters will be represented with multiple
* underscores and bytes...e.g. _8a_2f_3b
*/
def hexEncodeNonAlphaNumeric(input: String): String = {
if (input.matches(SAFE_FEATURE_NAME_PATTERN)) {
input
} else {
val sb = new StringBuilder
input.toCharArray.foreach { c =>
if (alphaNumeric.contains(c)) {
sb.append(c)
} else {
val hex = Hex.encodeHex(c.toString.getBytes(StandardCharsets.UTF_8))
val encoded = hex.grouped(2).map(arr => "_" + arr(0) + arr(1)).mkString.toLowerCase(Locale.US)
sb.append(encoded)
}
}
sb.toString()
}
}
/**
* Converts a feature id to bytes, for indexing or querying
*
* @param sft simple feature type
* @return
*/
def idToBytes(sft: SimpleFeatureType): (String) => Array[Byte] = {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
if (sft.isUuidEncoded) { uuidToBytes } else { stringToBytes }
}
/**
* Converts a byte array to a feature id. Return method takes an optional (null accepted) simple feature,
* which will be used to cache the parsed feature ID if it is a UUID.
*
* @param sft simple feature type
* @return (bytes, offset, length, SimpleFeature) => id
*/
def idFromBytes(sft: SimpleFeatureType): (Array[Byte], Int, Int, SimpleFeature) => String = {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
if (sft.isUuidEncoded) { uuidFromBytes } else { stringFromBytes }
}
private def uuidToBytes(id: String): Array[Byte] = {
val uuid = UUID.fromString(id)
ByteArrays.uuidToBytes(uuid.getMostSignificantBits, uuid.getLeastSignificantBits)
}
private def uuidFromBytes(bytes: Array[Byte], offset: Int, ignored: Int, sf: SimpleFeature): String = {
import org.locationtech.geomesa.utils.geotools.Conversions.RichSimpleFeature
val uuid = ByteArrays.uuidFromBytes(bytes, offset)
if (sf != null) {
sf.cacheUuid(uuid)
}
new UUID(uuid._1, uuid._2).toString
}
private def stringToBytes(id: String): Array[Byte] = id.getBytes(StandardCharsets.UTF_8)
private def stringFromBytes(bytes: Array[Byte], offset: Int, length: Int, ignored: SimpleFeature): String =
new String(bytes, offset, length, StandardCharsets.UTF_8)
}
|
ddseapy/geomesa
|
geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/api/GeoMesaFeatureIndex.scala
|
Scala
|
apache-2.0
| 9,907
|
package fuel.moves
import fuel.util.TRandom
import scala.collection.immutable.BitSet
/**
* Bitstring domain implemented as BitSets
*
*/
class BitSetMoves(numVars: Int)(rng: TRandom)
extends AbstractVectorMoves[BitSet] {
assert(numVars > 0)
override def newSolution = BitSet.empty ++
(for (i <- 0.until(numVars); if (rng.nextBoolean)) yield i)
override def onePointMutation = (p: BitSet) => {
val bitToMutate = rng.nextInt(numVars)
if (p(bitToMutate)) p - bitToMutate else p + bitToMutate
}
override def onePointCrossover = (p1: BitSet, p2: BitSet) => {
val cuttingPoint = rng.nextInt(numVars)
val (myHead, myTail) = p1.splitAt(cuttingPoint)
val (hisHead, hisTail) = p2.splitAt(cuttingPoint)
(myHead ++ hisTail, hisHead ++ myTail)
}
override def twoPointCrossover = (p1: BitSet, p2: BitSet) => {
val h = (rng.nextInt(numVars), rng.nextInt(numVars))
val c = if (h._1 <= h._2) h else h.swap
val (myHead, myRest) = p1.splitAt(c._1)
val (myMid, myTail) = myRest.splitAt(c._2)
val (hisHead, hisRest) = p2.splitAt(c._1)
val (hisMid, hisTail) = hisRest.splitAt(c._2)
(myHead ++ hisMid ++ myTail, hisHead ++ myMid ++ hisTail)
}
}
object BitSetMoves {
def apply(numVars: Int)(implicit rng: TRandom) = new BitSetMoves(numVars)(rng)
}
|
iwob/fuel
|
src/main/scala/fuel/moves/BitSetMoves.scala
|
Scala
|
mit
| 1,316
|
/*
* Copyright (c) 2012-2019 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and
* limitations there under.
*/
package com.snowplowanalytics
package snowplow.enrich
package spark
// Java
import java.net.URI
// Joda
import org.joda.time.DateTime
// Scalaz
import scalaz._
import Scalaz._
// Scopt
import scopt._
// Snowplow
import common.ValidatedNelMessage
import common.enrichments.EnrichmentRegistry
import common.loaders.Loader
import iglu.client.validation.ProcessingMessageMethods._
sealed trait EnrichJobConfig {
def inFolder: String
def inFormat: String
def outFolder: String
def badFolder: String
def enrichments: String
def igluConfig: String
def local: Boolean
}
private case class RawEnrichJobConfig(
override val inFolder: String = "",
override val inFormat: String = "",
override val outFolder: String = "",
override val badFolder: String = "",
override val enrichments: String = "",
override val igluConfig: String = "",
override val local: Boolean = false,
etlTstamp: Long = 0L
) extends EnrichJobConfig
/**
* Case class representing the configuration for the enrich job.
* @param inFolder Folder where the input events are located
* @param inFormat Collector format in which the data is coming in
* @param outFolder Output folder where the enriched events will be stored
* @param badFolder Output folder where the malformed events will be stored
* @param enrichments JSON representing the enrichments that need performing
* @param igluConfig JSON representing the Iglu configuration
* @param local Whether to build a registry from local data
* @param etlTstamp Timestamp at which the job was launched
*/
case class ParsedEnrichJobConfig(
override val inFolder: String,
override val inFormat: String,
override val outFolder: String,
override val badFolder: String,
override val enrichments: String,
override val igluConfig: String,
override val local: Boolean,
etlTstamp: DateTime,
filesToCache: List[(URI, String)]
) extends EnrichJobConfig
object EnrichJobConfig {
private val parser = new scopt.OptionParser[RawEnrichJobConfig]("EnrichJob") {
head("EnrichJob")
opt[String]("input-folder")
.required()
.valueName("<input folder>")
.action((f, c) => c.copy(inFolder = f))
.text("Folder where the input events are located")
opt[String]("input-format")
.required()
.valueName("<input format>")
.action((f, c) => c.copy(inFormat = f))
.text("The format in which the collector is saving data")
opt[String]("output-folder")
.required()
.valueName("<output folder>")
.action((f, c) => c.copy(outFolder = f))
.text("Output folder where the enriched events will be stored")
opt[String]("bad-folder")
.required()
.valueName("<bad folder>")
.action((f, c) => c.copy(badFolder = f))
.text("Output folder where the malformed events will be stored")
opt[String]("enrichments")
.required()
.valueName("<enrichments>")
.action((e, c) => c.copy(enrichments = e))
.text("Directory where the JSONs describing the enrichments are stored")
opt[String]("iglu-config")
.required()
.valueName("<iglu config>")
.action((i, c) => c.copy(igluConfig = i))
.text("Iglu resolver configuration")
opt[Long]("etl-timestamp")
.required()
.valueName("<ETL timestamp>")
.action((t, c) => c.copy(etlTstamp = t))
.text("Timestamp at which the job was launched, in milliseconds")
opt[Unit]("local")
.hidden()
.action((_, c) => c.copy(local = true))
.text("Whether to build a local enrichment registry")
help("help").text("Prints this usage text")
}
/** Turn a RawEnrichJobConfig into a ParsedEnrichJobConfig */
private def transform(
c: RawEnrichJobConfig
): ValidatedNelMessage[ParsedEnrichJobConfig] = {
// We try to build all the components early to detect failures before starting the job
import singleton._
val resolver = ResolverSingleton.getIgluResolver(c.igluConfig)
val registry = resolver
.flatMap(RegistrySingleton.getEnrichmentRegistry(c.enrichments, c.local)(_))
val loader = Loader
.getLoader(c.inFormat)
.fold(_.toProcessingMessage.failureNel, _.successNel)
(resolver |@| registry |@| loader) { (_, reg, _) =>
ParsedEnrichJobConfig(
c.inFolder,
c.inFormat,
c.outFolder,
c.badFolder,
c.enrichments,
c.igluConfig,
c.local,
new DateTime(c.etlTstamp),
filesToCache(reg))
}
}
/**
* Load a EnrichJobConfig from command line arguments.
* @param args The command line arguments
* @return The job config or one or more error messages boxed in a Scalaz ValidationNel
*/
def loadConfigFrom(
args: Array[String]
): ValidatedNelMessage[ParsedEnrichJobConfig] =
parser.parse(args, RawEnrichJobConfig()).map(transform) match {
case Some(c) => c
case _ => "Parsing of the configuration failed".toProcessingMessage.failureNel
}
/**
* Build the list of enrichment files to cache.
* @param registry EnrichmentRegistry used to find the files that need caching
* @return A list of URIs representing the files that need caching
*/
private def filesToCache(registry: EnrichmentRegistry): List[(URI, String)] =
registry.filesToCache
}
|
RetentionGrid/snowplow
|
3-enrich/spark-enrich/src/main/scala/com.snowplowanalytics.snowplow.enrich.spark/EnrichJobConfig.scala
|
Scala
|
apache-2.0
| 6,008
|
package com.tothferenc.templateFX.base.attribute
abstract class SettableFeature[-Holder, -AttrType] extends RemovableFeature[Holder] {
def set(target: Holder, value: AttrType): Unit
}
|
tferi/templateFX
|
base/src/main/scala/com/tothferenc/templateFX/base/attribute/SettableFeature.scala
|
Scala
|
gpl-3.0
| 187
|
/* Copyright 2017 EPFL, Lausanne */
package inox
package parsing
trait ExpressionExtractors { self: Extractors =>
trait ExpressionExtractor { self0: Extractor =>
import ExprIR._
private type MatchObligation = Option[Match]
protected def toIdObl(pair: (inox.Identifier, Identifier)): MatchObligation = {
val (id, templateId) = pair
templateId match {
case IdentifierName(name) if name == id.name => Some(empty)
case IdentifierHole(index) => Some(matching(index, id))
case _ => None
}
}
protected def toExprObl(pair: (trees.Expr, Expression)): MatchObligation = {
extract(pair._1, pair._2)
}
protected def toTypeObl(pair: (trees.Type, Type)): MatchObligation = {
val (tpe, template) = pair
extract(tpe, template)
}
protected def toOptTypeObl(pair: (trees.Type, Option[Type])): MatchObligation = {
val (tpe, optTemplateType) = pair
if (optTemplateType.isEmpty) {
Some(empty)
}
else {
toTypeObl(tpe -> optTemplateType.get)
}
}
protected def toExprObls(pair: (Seq[trees.Expr], Seq[Expression])): MatchObligation = {
pair match {
case (Seq(), Seq()) => Some(empty)
case (Seq(), _) => None
case (_, Seq()) => None
case (_, Seq(ExpressionSeqHole(i), templateRest @ _*)) => {
val n = pair._1.length - templateRest.length
if (n < 0) {
None
}
else {
val (matches, rest) = pair._1.splitAt(n)
toExprObls(rest -> templateRest) map {
case matchings => matching(i, matches) ++ matchings
}
}
}
case (Seq(expr, exprRest @ _*), Seq(template, templateRest @ _*)) => for {
matchingsHead <- extract(toExprObl(expr -> template))
matchingsRest <- extract(toExprObls(exprRest -> templateRest))
} yield matchingsHead ++ matchingsRest
}
}
protected def toTypeObls(pair: (Seq[trees.Type], Seq[Type])): MatchObligation = {
extractSeq(pair._1, pair._2)
}
protected def toOptTypeObls(pair: (Seq[trees.Type], Seq[Option[Type]])): MatchObligation = {
val pairs = pair._1.zip(pair._2).collect {
case (tpe, Some(template)) => toTypeObl(tpe -> template)
}
extract(pairs : _*)
}
protected def toIdObls(pair: (Seq[inox.Identifier], Seq[Identifier])): MatchObligation = {
// TODO: Change this.
val (ids, templatesIds) = pair
if (ids.length == templatesIds.length) {
extract(ids.zip(templatesIds).map(toIdObl) : _*)
}
else {
None
}
}
protected def extract(pairs: MatchObligation*): MatchObligation = {
val zero: MatchObligation = Some(empty)
pairs.foldLeft(zero) {
case (None, _) => None
case (Some(matchingsAcc), obligation) => {
obligation map {
case extraMatchings => matchingsAcc ++ extraMatchings
}
}
}
}
def extract(expr: trees.Expr, template: Expression): MatchObligation = {
val success = Some(empty)
template match {
case ExpressionHole(index) =>
return Some(Map(index -> expr))
case TypeAnnotationOperation(templateInner, templateType) =>
return extract(toTypeObl(expr.getType -> templateType), toExprObl(expr -> templateInner))
case _ => ()
}
expr match {
// Variables
case trees.Variable(inoxId, _, _) => template match {
case Variable(templateId) => extract(toIdObl(inoxId -> templateId))
case _ => fail
}
// Control structures.
case trees.IfExpr(cond, thenn, elze) => template match {
case Operation("IfThenElse", Seq(templateCond, templateThenn, templateElze)) =>
extract(toExprObl(cond -> templateCond), toExprObl(thenn -> templateThenn), toExprObl(elze -> templateElze))
case _ => fail
}
case trees.Assume(pred, body) => template match {
case Operation("Assume", Seq(templatePred, templateBody)) =>
extract(toExprObl(pred -> templatePred), toExprObl(body -> templateBody))
case _ => fail
}
case trees.Let(vd, value, body) => template match {
case Let(Seq((templateId, optTemplateType, templateValue), rest @ _*), templateBody) => {
val templateRest = rest match {
case Seq() => templateBody
case _ => Let(rest, templateBody)
}
extract(
toExprObl(value -> templateValue),
toOptTypeObl(vd.getType -> optTemplateType),
toIdObl(vd.id -> templateId),
toExprObl(body -> templateRest))
}
case _ => fail
}
case trees.Lambda(args, body) => template match {
case Abstraction(Lambda, templateArgs, templateBody) =>
extract(
toOptTypeObls(args.map(_.getType) -> templateArgs.map(_._2)),
toIdObls(args.map(_.id) -> templateArgs.map(_._1)),
toExprObl(body -> templateBody))
case _ => fail
}
case trees.Forall(args, body) => template match {
case Abstraction(Forall, templateArgs, templateBody) =>
extract(
toOptTypeObls(args.map(_.getType) -> templateArgs.map(_._2)),
toIdObls(args.map(_.id) -> templateArgs.map(_._1)),
toExprObl(body -> templateBody))
case _ => fail
}
case trees.Choose(arg, pred) => template match {
case Abstraction(Choose, Seq((templateId, optTemplateType), rest @ _*), templatePred) => {
val templateRest = rest match {
case Seq() => templatePred
case _ => Abstraction(Choose, rest, templatePred)
}
extract(
toOptTypeObl(arg.getType -> optTemplateType),
toIdObl(arg.id -> templateId),
toExprObl(pred -> templateRest))
}
case _ => fail
}
// Functions.
case trees.Application(callee, args) => template match {
case Application(templateCallee, templateArgs) =>
extract(toExprObl(callee -> templateCallee), toExprObls(args -> templateArgs))
case _ => fail
}
case trees.FunctionInvocation(id, tpes, args) => template match {
case Application(TypedFunDef(fd, optTemplatesTypes), templateArgs) if (id == fd.id) => {
optTemplatesTypes match {
case None => extract(toExprObls(args -> templateArgs))
case Some(templateTypes) => extract(toExprObls(args -> templateArgs), toTypeObls(tpes -> templateTypes))
case _ => fail
}
}
case Application(TypeApplication(ExpressionHole(index), templateTypes), templateArgs) => for {
matchings <- extract(toTypeObls(tpes -> templateTypes), toExprObls(args -> templateArgs))
} yield matching(index, id) ++ matchings
case Application(ExpressionHole(index), templateArgs) => for {
matchings <- extract(toExprObls(args -> templateArgs))
} yield matching(index, id) ++ matchings
case _ => fail
}
// ADTs.
case trees.ADT(id, tpes, args) => template match {
case Application(TypedConsDef(cons, optTemplatesTypes), templateArgs) if (id == cons.id) => {
optTemplatesTypes match {
case None => extract(toExprObls(args -> templateArgs))
case Some(templateTypes) => extract(toExprObls(args -> templateArgs), toTypeObls(tpes -> templateTypes))
case _ => fail
}
}
case Application(TypeApplication(ExpressionHole(index), templateTypes), templateArgs) => for {
matchings <- extract(toTypeObls(tpes -> templateTypes), toExprObls(args -> templateArgs))
} yield matching(index, id) ++ matchings
case Application(ExpressionHole(index), templateArgs) => for {
matchings <- extract(toExprObls(args -> templateArgs))
} yield matching(index, id) ++ matchings
case _ => fail
}
case trees.ADTSelector(adt, selector) => template match {
case Selection(adtTemplate, FieldHole(index)) => for {
matchings <- extract(toExprObl(adt -> adtTemplate))
} yield matching(index, selector) ++ matchings
case Selection(adtTemplate, Field((cons, vd))) if (vd.id == selector) => // TODO: Handle selectors with the same name.
extract(toExprObl(adt -> adtTemplate))
case _ => fail
}
// Instance checking and casting.
case trees.IsConstructor(inner, id) => template match {
case IsConstructorOperation(templateInner, name) if id.name == name =>
extract(toExprObl(inner -> templateInner))
case _ => fail
}
// Various.
case trees.CharLiteral(char) => template match {
case Literal(CharLiteral(`char`)) => success
case _ => fail
}
case trees.UnitLiteral() => template match {
case Literal(UnitLiteral) => success
case _ => fail
}
case trees.Equals(left, right) => template match {
case Operation("==", Seq(templateLeft, templateRight)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
case _ => fail
}
// Booleans.
case trees.BooleanLiteral(bool) => template match {
case Literal(BooleanLiteral(`bool`)) => success
case _ => fail
}
case trees.And(exprs) => template match {
case BooleanAndOperation(templates) =>
extract(toExprObls(exprs -> templates))
case _ => fail
}
case trees.Or(exprs) => template match {
case BooleanOrOperation(templates) =>
extract(toExprObls(exprs -> templates))
case _ => fail
}
case trees.Implies(left, right) => template match {
case Operation("==>", Seq(templateLeft, templateRight)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
case _ => fail
}
case trees.Not(inner) => template match {
case Operation("!", Seq(templateInner)) => extract(toExprObl(inner -> templateInner))
case _ => fail
}
// Strings.
case trees.StringLiteral(string) => template match {
case Literal(StringLiteral(`string`)) => success
case _ => fail
}
case trees.StringConcat(left, right) => template match {
case ConcatenateOperation(templateLeft, templateRight) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
case _ => fail
}
case trees.SubString(string, from, to) => template match {
case SubstringOperation(templateString, templateFrom, templateTo) =>
extract(toExprObl(string -> templateString), toExprObl(from -> templateFrom), toExprObl(to -> templateTo))
case _ => fail
}
case trees.StringLength(string) => template match {
case StringLengthOperation(templateString) => extract(toExprObl(string -> templateString))
case _ => fail
}
// Numbers.
case trees.IntegerLiteral(value) => template match {
case Literal(NumericLiteral(string)) if (scala.util.Try(BigInt(string)).toOption == Some(value)) => success
case _ => fail
}
case trees.FractionLiteral(numerator, denominator) => template match {
case Literal(NumericLiteral(string)) if { val n = BigInt(string); n * denominator == numerator } => success
case Literal(DecimalLiteral(w, t, r)) if { val (n, d) = Utils.toFraction(w, t, r); n * denominator == d * numerator } => success
case _ => fail
}
case trees.Plus(left, right) => template match {
case Operation("+", Seq(templateLeft, templateRight)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
case _ => fail
}
case trees.Minus(left, right) => template match {
case Operation("-", Seq(templateLeft, templateRight)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
case _ => fail
}
case trees.Times(left, right) => template match {
case Operation("*", Seq(templateLeft, templateRight)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
case _ => fail
}
case trees.Division(left, right) => template match {
case Operation("/", Seq(templateLeft, templateRight)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
case _ => fail
}
case trees.UMinus(inner) => template match {
case Operation("-", Seq(templateInner)) => extract(toExprObl(inner -> templateInner))
case _ => fail
}
case trees.Remainder(left, right) => template match {
case Operation("%", Seq(templateLeft, templateRight)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
case _ => fail
}
case trees.Modulo(left, right) => template match {
case Operation("mod", Seq(templateLeft, templateRight)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
case _ => fail
}
case trees.LessThan(left, right) => template match {
case Operation("<", Seq(templateLeft, templateRight)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
case _ => fail
}
case trees.GreaterThan(left, right) => template match {
case Operation(">", Seq(templateLeft, templateRight)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
case _ => fail
}
case trees.LessEquals(left, right) => template match {
case Operation("<=", Seq(templateLeft, templateRight)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
case _ => fail
}
case trees.GreaterEquals(left, right) => template match {
case Operation(">=", Seq(templateLeft, templateRight)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
case _ => fail
}
// Bit vectors.
case v@trees.BVLiteral(signed, value, base) => template match {
case Literal(NumericLiteral(string)) if (scala.util.Try(trees.BVLiteral(signed, BigInt(string), base)).toOption == Some(v)) => success
case _ => fail
}
case trees.BVNot(inner) => template match {
case Operation("~", Seq(templateInner)) => extract(toExprObl(inner -> templateInner))
case _ => fail
}
case trees.BVOr(left, right) => template match {
case Operation("|", Seq(templateLeft, templateRight)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
}
case trees.BVAnd(left, right) => template match {
case Operation("&", Seq(templateLeft, templateRight)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
case _ => fail
}
case trees.BVXor(left, right) => template match {
case Operation("^", Seq(templateLeft, templateRight)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
case _ => fail
}
case trees.BVShiftLeft(left, right) => template match {
case Operation("<<", Seq(templateLeft, templateRight)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
case _ => fail
}
case trees.BVAShiftRight(left, right) => template match {
case Operation(">>", Seq(templateLeft, templateRight)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
case _ => fail
}
case trees.BVLShiftRight(left, right) => template match {
case Operation(">>>", Seq(templateLeft, templateRight)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight))
case _ => fail
}
// Tuples.
case trees.Tuple(exprs) => template match {
case Operation("Tuple", templates) =>
extract(toExprObls(exprs -> templates))
case _ => fail
}
case trees.TupleSelect(inner, index) => template match {
case Selection(templateInner, TupleField(`index`)) => extract(toExprObl(inner -> templateInner))
case _ => fail
}
// Sets.
case trees.FiniteSet(elements, tpe) => template match {
case SetConstruction(templatesElements, optTemplateType) =>
extract(toExprObls(elements -> templatesElements), toOptTypeObl(tpe -> optTemplateType))
case _ => fail
}
case trees.SetAdd(set, element) => (set.getType(symbols), template) match {
case (trees.SetType(tpe), SetAddOperation(templateSet, templateElement, optTemplateType)) =>
extract(toExprObl(set -> templateSet), toExprObl(element -> templateElement), toOptTypeObl(tpe -> optTemplateType))
case _ => fail
}
case trees.ElementOfSet(element, set) => (set.getType(symbols), template) match {
case (trees.SetType(tpe), ContainsOperation(templateSet, templateElement, optTemplateType)) =>
extract(toExprObl(set -> templateSet), toExprObl(element -> templateElement), toOptTypeObl(tpe -> optTemplateType))
case _ => fail
}
case trees.SubsetOf(left, right) => (left.getType(symbols), template) match {
case (trees.SetType(tpe), SubsetOperation(templateLeft, templateRight, optTemplateType)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight), toOptTypeObl(tpe -> optTemplateType))
case _ => fail
}
case trees.SetIntersection(left, right) => (left.getType(symbols), template) match {
case (trees.SetType(tpe), SetIntersectionOperation(templateLeft, templateRight, optTemplateType)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight), toOptTypeObl(tpe -> optTemplateType))
case _ => fail
}
case trees.SetDifference(left, right) => (left.getType(symbols), template) match {
case (trees.SetType(tpe), SetDifferenceOperation(templateLeft, templateRight, optTemplateType)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight), toOptTypeObl(tpe -> optTemplateType))
case _ => fail
}
// Bags.
case trees.FiniteBag(mappings, tpe) => template match {
case BagConstruction(Bindings(Seq(), templateMappings), optTemplateType) => {
val (keys, values) = mappings.unzip
val (templatesKeys, templatesValues) = templateMappings.unzip
extract(toExprObls(keys -> templatesKeys), toExprObls(values -> templatesValues), toOptTypeObl(tpe -> optTemplateType))
}
case _ => fail
}
case trees.BagAdd(bag, element) => (bag.getType(symbols), template) match {
case (trees.BagType(tpe), BagAddOperation(templateBag, templateElement, optTemplateType)) =>
extract(toExprObl(bag -> templateBag), toExprObl(element -> templateElement), toOptTypeObl(tpe -> optTemplateType))
case _ => fail
}
case trees.MultiplicityInBag(element, bag) => (bag.getType, template) match {
case (trees.BagType(tpe), BagMultiplicityOperation(templateBag, templateElement, optTemplateType)) =>
extract(toExprObl(element -> templateElement), toExprObl(bag -> templateBag), toOptTypeObl(tpe -> optTemplateType))
case _ => fail
}
case trees.BagIntersection(left, right) => (left.getType, template) match {
case (trees.BagType(tpe), BagIntersectionOperation(templateLeft, templateRight, optTemplateType)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight), toOptTypeObl(tpe -> optTemplateType))
case _ => fail
}
case trees.BagUnion(left, right) => (left.getType, template) match {
case (trees.BagType(tpe), BagUnionOperation(templateLeft, templateRight, optTemplateType)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight), toOptTypeObl(tpe -> optTemplateType))
case _ => fail
}
case trees.BagDifference(left, right) => (left.getType, template) match {
case (trees.BagType(tpe), BagDifferenceOperation(templateLeft, templateRight, optTemplateType)) =>
extract(toExprObl(left -> templateLeft), toExprObl(right -> templateRight), toOptTypeObl(tpe -> optTemplateType))
case _ => fail
}
// Maps.
case trees.FiniteMap(pairs, default, keyType, valueType) => template match {
case MapConstruction(templateDefault, Bindings(Seq(), templatesPairs), optTemplatesTypes) => {
val (optTemplateKeyType, optTemplateValueType) = optTemplatesTypes match {
case Some(Right((k, v))) => (Some(k), Some(v))
case Some(Left(k)) => (Some(k), None)
case None => (None, None)
}
val (keys, values) = pairs.unzip
val (templatesKeys, templatesValues) = templatesPairs.unzip
extract(toExprObls(keys -> templatesKeys), toExprObls(values -> templatesValues),
toOptTypeObl(keyType -> optTemplateKeyType), toOptTypeObl(valueType -> optTemplateValueType), toExprObl(default -> templateDefault))
}
case _ => fail
}
case trees.MapApply(map, key) => (map.getType, template) match {
case (trees.MapType(keyType, valueType), MapApplyOperation(templateMap, templateKey, optTemplatesTypes)) => {
val (optTemplateKeyType, optTemplateValueType) = optTemplatesTypes match {
case Some((k, v)) => (Some(k), Some(v))
case None => (None, None)
}
extract(toExprObl(map -> templateMap), toExprObl(key -> templateKey),
toOptTypeObl(keyType -> optTemplateKeyType), toOptTypeObl(valueType -> optTemplateValueType))
}
case _ => fail
}
case trees.MapUpdated(map, key, value) => (map.getType, template) match {
case (trees.MapType(keyType, valueType), MapUpdatedOperation(templateMap, templateKey, templateValue, optTemplatesTypes)) => {
val (optTemplateKeyType, optTemplateValueType) = optTemplatesTypes match {
case Some((k, v)) => (Some(k), Some(v))
case None => (None, None)
}
extract(toExprObl(map -> templateMap), toExprObl(key -> templateKey), toOptTypeObl(keyType -> optTemplateKeyType),
toExprObl(value -> templateValue), toOptTypeObl(valueType -> optTemplateValueType))
}
case _ => fail
}
case _ => fail
}
}
}
}
|
romac/inox
|
src/main/scala/inox/parsing/ExpressionExtractor.scala
|
Scala
|
apache-2.0
| 23,746
|
/**
* Copyright (C) 2012-2013 Kaj Magnus Lindberg (born 1979)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package controllers
import actions.ApiActions._
import actions.PageActions._
import com.debiki.core._
import com.debiki.core.Prelude._
import debiki._
import java.{util => ju, io => jio}
import play.api._
import play.api.Play.current
import play.api.mvc.{Action => _, _}
import play.api.libs.json.Json.toJson
import play.api.libs.json._
import requests._
import DebikiHttp._
import Utils.ValidationImplicits._
import Utils.{OkHtml, OkXml}
/** Shows pages and individual posts.
*
* Also loads the users permissions on the page, and info on which
* comments the user has authored or rated, and also loads the user's
* comments that are pending approval — although such unapproved comments
* aren't loaded, when other people view the page.
*/
object ViewPageController extends mvc.Controller {
val HtmlEncodedUserSpecificDataJsonMagicString =
"__html_encoded_user_specific_data_json__"
def showActionLinks(pathIn: PagePath, postId: ActionId) =
PageGetAction(pathIn) { pageReq =>
val links = Utils.formHtml(pageReq).actLinks(postId)
OkHtml(links)
}
def viewPost(pathIn: PagePath) = PageGetAction(pathIn) { pageReq =>
viewPostImpl(pageReq)
}
def viewPostImpl(pageReq: PageGetRequest) = {
var pageHtml = pageReq.dao.renderTemplate(pageReq)
val anyUserSpecificDataJson = ReactJson.userDataJson(pageReq)
// Insert user specific data into the HTML.
// The Scala templates take care to place the <script type="application/json">
// tag with the magic-string-that-we'll-replace-with-user-specific-data before
// unsafe data like JSON and HTML for comments and the page title and body.
anyUserSpecificDataJson foreach { json =>
val htmlEncodedJson = org.owasp.encoder.Encode.forHtmlContent(json.toString)
pageHtml = org.apache.commons.lang3.StringUtils.replaceOnce(
pageHtml, HtmlEncodedUserSpecificDataJsonMagicString, htmlEncodedJson)
}
Ok(pageHtml) as HTML
}
}
|
debiki/debiki-server-old
|
app/controllers/ViewPageController.scala
|
Scala
|
agpl-3.0
| 2,715
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.log
import java.io.{File, IOException}
import java.nio.file.{Files, NoSuchFileException}
import java.nio.file.attribute.FileTime
import java.util.concurrent.TimeUnit
import kafka.common.LogSegmentOffsetOverflowException
import kafka.metrics.{KafkaMetricsGroup, KafkaTimer}
import kafka.server.epoch.LeaderEpochCache
import kafka.server.{FetchDataInfo, LogOffsetMetadata}
import kafka.utils._
import org.apache.kafka.common.errors.CorruptRecordException
import org.apache.kafka.common.record.FileRecords.LogOffsetPosition
import org.apache.kafka.common.record._
import org.apache.kafka.common.utils.Time
import scala.collection.JavaConverters._
import scala.math._
/**
* A segment of the log. Each segment has two components: a log and an index. The log is a FileRecords containing
* the actual messages. The index is an OffsetIndex that maps from logical offsets to physical file positions. Each
* segment has a base offset which is an offset <= the least offset of any message in this segment and > any offset in
* any previous segment.
*
* A segment with a base offset of [base_offset] would be stored in two files, a [base_offset].index and a [base_offset].log file.
*
* @param log The file records containing log entries
* @param offsetIndex The offset index
* @param timeIndex The timestamp index
* @param baseOffset A lower bound on the offsets in this segment
* @param indexIntervalBytes The approximate number of bytes between entries in the index
* @param time The time instance
*/
@nonthreadsafe
class LogSegment private[log] (val log: FileRecords,
val offsetIndex: OffsetIndex,
val timeIndex: TimeIndex,
val txnIndex: TransactionIndex,
val baseOffset: Long,
val indexIntervalBytes: Int,
val rollJitterMs: Long,
val maxSegmentMs: Long,
val maxSegmentBytes: Int,
val time: Time) extends Logging {
def shouldRoll(messagesSize: Int, maxTimestampInMessages: Long, maxOffsetInMessages: Long, now: Long): Boolean = {
val reachedRollMs = timeWaitedForRoll(now, maxTimestampInMessages) > maxSegmentMs - rollJitterMs
size > maxSegmentBytes - messagesSize ||
(size > 0 && reachedRollMs) ||
offsetIndex.isFull || timeIndex.isFull || !canConvertToRelativeOffset(maxOffsetInMessages)
}
def resizeIndexes(size: Int): Unit = {
offsetIndex.resize(size)
timeIndex.resize(size)
}
def sanityCheck(timeIndexFileNewlyCreated: Boolean): Unit = {
if (offsetIndex.file.exists) {
offsetIndex.sanityCheck()
// Resize the time index file to 0 if it is newly created.
if (timeIndexFileNewlyCreated)
timeIndex.resize(0)
timeIndex.sanityCheck()
txnIndex.sanityCheck()
}
else throw new NoSuchFileException(s"Offset index file ${offsetIndex.file.getAbsolutePath} does not exist")
}
private var created = time.milliseconds
/* the number of bytes since we last added an entry in the offset index */
private var bytesSinceLastIndexEntry = 0
/* The timestamp we used for time based log rolling */
private var rollingBasedTimestamp: Option[Long] = None
/* The maximum timestamp we see so far */
@volatile private var maxTimestampSoFar: Long = timeIndex.lastEntry.timestamp
@volatile private var offsetOfMaxTimestamp: Long = timeIndex.lastEntry.offset
/* Return the size in bytes of this log segment */
def size: Int = log.sizeInBytes()
/**
* checks that the argument offset can be represented as an integer offset relative to the baseOffset.
*/
def canConvertToRelativeOffset(offset: Long): Boolean = {
offsetIndex.canAppendOffset(offset)
}
/**
* Append the given messages starting with the given offset. Add
* an entry to the index if needed.
*
* It is assumed this method is being called from within a lock.
*
* @param largestOffset The last offset in the message set
* @param largestTimestamp The largest timestamp in the message set.
* @param shallowOffsetOfMaxTimestamp The offset of the message that has the largest timestamp in the messages to append.
* @param records The log entries to append.
* @return the physical position in the file of the appended records
* @throws LogSegmentOffsetOverflowException if the largest offset causes index offset overflow
*/
@nonthreadsafe
def append(largestOffset: Long,
largestTimestamp: Long,
shallowOffsetOfMaxTimestamp: Long,
records: MemoryRecords): Unit = {
if (records.sizeInBytes > 0) {
trace(s"Inserting ${records.sizeInBytes} bytes at end offset $largestOffset at position ${log.sizeInBytes} " +
s"with largest timestamp $largestTimestamp at shallow offset $shallowOffsetOfMaxTimestamp")
val physicalPosition = log.sizeInBytes()
if (physicalPosition == 0)
rollingBasedTimestamp = Some(largestTimestamp)
ensureOffsetInRange(largestOffset)
// append the messages
val appendedBytes = log.append(records)
trace(s"Appended $appendedBytes to ${log.file} at end offset $largestOffset")
// Update the in memory max timestamp and corresponding offset.
if (largestTimestamp > maxTimestampSoFar) {
maxTimestampSoFar = largestTimestamp
offsetOfMaxTimestamp = shallowOffsetOfMaxTimestamp
}
// append an entry to the index (if needed)
if (bytesSinceLastIndexEntry > indexIntervalBytes) {
offsetIndex.append(largestOffset, physicalPosition)
timeIndex.maybeAppend(maxTimestampSoFar, offsetOfMaxTimestamp)
bytesSinceLastIndexEntry = 0
}
bytesSinceLastIndexEntry += records.sizeInBytes
}
}
private def ensureOffsetInRange(offset: Long): Unit = {
if (!canConvertToRelativeOffset(offset))
throw new LogSegmentOffsetOverflowException(this, offset)
}
private def appendChunkFromFile(records: FileRecords, position: Int, bufferSupplier: BufferSupplier): Int = {
var bytesToAppend = 0
var maxTimestamp = Long.MinValue
var offsetOfMaxTimestamp = Long.MinValue
var maxOffset = Long.MinValue
var readBuffer = bufferSupplier.get(1024 * 1024)
def canAppend(batch: RecordBatch) =
canConvertToRelativeOffset(batch.lastOffset) &&
(bytesToAppend == 0 || bytesToAppend + batch.sizeInBytes < readBuffer.capacity)
// find all batches that are valid to be appended to the current log segment and
// determine the maximum offset and timestamp
val nextBatches = records.batchesFrom(position).asScala.iterator
for (batch <- nextBatches.takeWhile(canAppend)) {
if (batch.maxTimestamp > maxTimestamp) {
maxTimestamp = batch.maxTimestamp
offsetOfMaxTimestamp = batch.lastOffset
}
maxOffset = batch.lastOffset
bytesToAppend += batch.sizeInBytes
}
if (bytesToAppend > 0) {
// Grow buffer if needed to ensure we copy at least one batch
if (readBuffer.capacity < bytesToAppend)
readBuffer = bufferSupplier.get(bytesToAppend)
readBuffer.limit(bytesToAppend)
records.readInto(readBuffer, position)
append(maxOffset, maxTimestamp, offsetOfMaxTimestamp, MemoryRecords.readableRecords(readBuffer))
}
bufferSupplier.release(readBuffer)
bytesToAppend
}
/**
* Append records from a file beginning at the given position until either the end of the file
* is reached or an offset is found which is too large to convert to a relative offset for the indexes.
*
* @return the number of bytes appended to the log (may be less than the size of the input if an
* offset is encountered which would overflow this segment)
*/
def appendFromFile(records: FileRecords, start: Int): Int = {
var position = start
val bufferSupplier: BufferSupplier = new BufferSupplier.GrowableBufferSupplier
while (position < start + records.sizeInBytes) {
val bytesAppended = appendChunkFromFile(records, position, bufferSupplier)
if (bytesAppended == 0)
return position - start
position += bytesAppended
}
position - start
}
@nonthreadsafe
def updateTxnIndex(completedTxn: CompletedTxn, lastStableOffset: Long) {
if (completedTxn.isAborted) {
trace(s"Writing aborted transaction $completedTxn to transaction index, last stable offset is $lastStableOffset")
txnIndex.append(new AbortedTxn(completedTxn, lastStableOffset))
}
}
private def updateProducerState(producerStateManager: ProducerStateManager, batch: RecordBatch): Unit = {
if (batch.hasProducerId) {
val producerId = batch.producerId
val appendInfo = producerStateManager.prepareUpdate(producerId, isFromClient = false)
val maybeCompletedTxn = appendInfo.append(batch)
producerStateManager.update(appendInfo)
maybeCompletedTxn.foreach { completedTxn =>
val lastStableOffset = producerStateManager.completeTxn(completedTxn)
updateTxnIndex(completedTxn, lastStableOffset)
}
}
producerStateManager.updateMapEndOffset(batch.lastOffset + 1)
}
/**
* Find the physical file position for the first message with offset >= the requested offset.
*
* The startingFilePosition argument is an optimization that can be used if we already know a valid starting position
* in the file higher than the greatest-lower-bound from the index.
*
* @param offset The offset we want to translate
* @param startingFilePosition A lower bound on the file position from which to begin the search. This is purely an optimization and
* when omitted, the search will begin at the position in the offset index.
* @return The position in the log storing the message with the least offset >= the requested offset and the size of the
* message or null if no message meets this criteria.
*/
@threadsafe
private[log] def translateOffset(offset: Long, startingFilePosition: Int = 0): LogOffsetPosition = {
val mapping = offsetIndex.lookup(offset)
log.searchForOffsetWithSize(offset, max(mapping.position, startingFilePosition))
}
/**
* Read a message set from this segment beginning with the first offset >= startOffset. The message set will include
* no more than maxSize bytes and will end before maxOffset if a maxOffset is specified.
*
* @param startOffset A lower bound on the first offset to include in the message set we read
* @param maxOffset An optional maximum offset for the message set we read
* @param maxSize The maximum number of bytes to include in the message set we read
* @param maxPosition The maximum position in the log segment that should be exposed for read
* @param minOneMessage If this is true, the first message will be returned even if it exceeds `maxSize` (if one exists)
*
* @return The fetched data and the offset metadata of the first message whose offset is >= startOffset,
* or null if the startOffset is larger than the largest offset in this log
*/
@threadsafe
def read(startOffset: Long, maxOffset: Option[Long], maxSize: Int, maxPosition: Long = size,
minOneMessage: Boolean = false): FetchDataInfo = {
if (maxSize < 0)
throw new IllegalArgumentException(s"Invalid max size $maxSize for log read from segment $log")
val logSize = log.sizeInBytes // this may change, need to save a consistent copy
val startOffsetAndSize = translateOffset(startOffset)
// if the start position is already off the end of the log, return null
if (startOffsetAndSize == null)
return null
val startPosition = startOffsetAndSize.position
val offsetMetadata = new LogOffsetMetadata(startOffset, this.baseOffset, startPosition)
val adjustedMaxSize =
if (minOneMessage) math.max(maxSize, startOffsetAndSize.size)
else maxSize
// return a log segment but with zero size in the case below
if (adjustedMaxSize == 0)
return FetchDataInfo(offsetMetadata, MemoryRecords.EMPTY)
// calculate the length of the message set to read based on whether or not they gave us a maxOffset
val fetchSize: Int = maxOffset match {
case None =>
// no max offset, just read until the max position
min((maxPosition - startPosition).toInt, adjustedMaxSize)
case Some(offset) =>
// there is a max offset, translate it to a file position and use that to calculate the max read size;
// when the leader of a partition changes, it's possible for the new leader's high watermark to be less than the
// true high watermark in the previous leader for a short window. In this window, if a consumer fetches on an
// offset between new leader's high watermark and the log end offset, we want to return an empty response.
if (offset < startOffset)
return FetchDataInfo(offsetMetadata, MemoryRecords.EMPTY, firstEntryIncomplete = false)
val mapping = translateOffset(offset, startPosition)
val endPosition =
if (mapping == null)
logSize // the max offset is off the end of the log, use the end of the file
else
mapping.position
min(min(maxPosition, endPosition) - startPosition, adjustedMaxSize).toInt
}
FetchDataInfo(offsetMetadata, log.slice(startPosition, fetchSize),
firstEntryIncomplete = adjustedMaxSize < startOffsetAndSize.size)
}
def fetchUpperBoundOffset(startOffsetPosition: OffsetPosition, fetchSize: Int): Option[Long] =
offsetIndex.fetchUpperBoundOffset(startOffsetPosition, fetchSize).map(_.offset)
/**
* Run recovery on the given segment. This will rebuild the index from the log file and lop off any invalid bytes
* from the end of the log and index.
*
* @param producerStateManager Producer state corresponding to the segment's base offset. This is needed to recover
* the transaction index.
* @param leaderEpochCache Optionally a cache for updating the leader epoch during recovery.
* @return The number of bytes truncated from the log
* @throws LogSegmentOffsetOverflowException if the log segment contains an offset that causes the index offset to overflow
*/
@nonthreadsafe
def recover(producerStateManager: ProducerStateManager, leaderEpochCache: Option[LeaderEpochCache] = None): Int = {
offsetIndex.reset()
timeIndex.reset()
txnIndex.reset()
var validBytes = 0
var lastIndexEntry = 0
maxTimestampSoFar = RecordBatch.NO_TIMESTAMP
try {
for (batch <- log.batches.asScala) {
batch.ensureValid()
ensureOffsetInRange(batch.lastOffset)
// The max timestamp is exposed at the batch level, so no need to iterate the records
if (batch.maxTimestamp > maxTimestampSoFar) {
maxTimestampSoFar = batch.maxTimestamp
offsetOfMaxTimestamp = batch.lastOffset
}
// Build offset index
if (validBytes - lastIndexEntry > indexIntervalBytes) {
offsetIndex.append(batch.lastOffset, validBytes)
timeIndex.maybeAppend(maxTimestampSoFar, offsetOfMaxTimestamp)
lastIndexEntry = validBytes
}
validBytes += batch.sizeInBytes()
if (batch.magic >= RecordBatch.MAGIC_VALUE_V2) {
leaderEpochCache.foreach { cache =>
if (batch.partitionLeaderEpoch > cache.latestEpoch) // this is to avoid unnecessary warning in cache.assign()
cache.assign(batch.partitionLeaderEpoch, batch.baseOffset)
}
updateProducerState(producerStateManager, batch)
}
}
} catch {
case e: CorruptRecordException =>
warn("Found invalid messages in log segment %s at byte offset %d: %s."
.format(log.file.getAbsolutePath, validBytes, e.getMessage))
}
val truncated = log.sizeInBytes - validBytes
if (truncated > 0)
debug(s"Truncated $truncated invalid bytes at the end of segment ${log.file.getAbsoluteFile} during recovery")
log.truncateTo(validBytes)
offsetIndex.trimToValidSize()
// A normally closed segment always appends the biggest timestamp ever seen into log segment, we do this as well.
timeIndex.maybeAppend(maxTimestampSoFar, offsetOfMaxTimestamp, skipFullCheck = true)
timeIndex.trimToValidSize()
truncated
}
private def loadLargestTimestamp() {
// Get the last time index entry. If the time index is empty, it will return (-1, baseOffset)
val lastTimeIndexEntry = timeIndex.lastEntry
maxTimestampSoFar = lastTimeIndexEntry.timestamp
offsetOfMaxTimestamp = lastTimeIndexEntry.offset
val offsetPosition = offsetIndex.lookup(lastTimeIndexEntry.offset)
// Scan the rest of the messages to see if there is a larger timestamp after the last time index entry.
val maxTimestampOffsetAfterLastEntry = log.largestTimestampAfter(offsetPosition.position)
if (maxTimestampOffsetAfterLastEntry.timestamp > lastTimeIndexEntry.timestamp) {
maxTimestampSoFar = maxTimestampOffsetAfterLastEntry.timestamp
offsetOfMaxTimestamp = maxTimestampOffsetAfterLastEntry.offset
}
}
/**
* Check whether the last offset of the last batch in this segment overflows the indexes.
*/
def hasOverflow: Boolean = {
val nextOffset = readNextOffset
nextOffset > baseOffset && !canConvertToRelativeOffset(nextOffset - 1)
}
def collectAbortedTxns(fetchOffset: Long, upperBoundOffset: Long): TxnIndexSearchResult =
txnIndex.collectAbortedTxns(fetchOffset, upperBoundOffset)
override def toString = "LogSegment(baseOffset=" + baseOffset + ", size=" + size + ")"
/**
* Truncate off all index and log entries with offsets >= the given offset.
* If the given offset is larger than the largest message in this segment, do nothing.
*
* @param offset The offset to truncate to
* @return The number of log bytes truncated
*/
@nonthreadsafe
def truncateTo(offset: Long): Int = {
// Do offset translation before truncating the index to avoid needless scanning
// in case we truncate the full index
val mapping = translateOffset(offset)
offsetIndex.truncateTo(offset)
timeIndex.truncateTo(offset)
txnIndex.truncateTo(offset)
// After truncation, reset and allocate more space for the (new currently active) index
offsetIndex.resize(offsetIndex.maxIndexSize)
timeIndex.resize(timeIndex.maxIndexSize)
val bytesTruncated = if (mapping == null) 0 else log.truncateTo(mapping.position)
if (log.sizeInBytes == 0) {
created = time.milliseconds
rollingBasedTimestamp = None
}
bytesSinceLastIndexEntry = 0
if (maxTimestampSoFar >= 0)
loadLargestTimestamp()
bytesTruncated
}
/**
* Calculate the offset that would be used for the next message to be append to this segment.
* Note that this is expensive.
*/
@threadsafe
def readNextOffset: Long = {
val fetchData = read(offsetIndex.lastOffset, None, log.sizeInBytes)
if (fetchData == null)
baseOffset
else
fetchData.records.batches.asScala.lastOption
.map(_.nextOffset)
.getOrElse(baseOffset)
}
/**
* Flush this log segment to disk
*/
@threadsafe
def flush() {
LogFlushStats.logFlushTimer.time {
log.flush()
offsetIndex.flush()
timeIndex.flush()
txnIndex.flush()
}
}
/**
* Update the directory reference for the log and indices in this segment. This would typically be called after a
* directory is renamed.
*/
def updateDir(dir: File): Unit = {
log.setFile(new File(dir, log.file.getName))
offsetIndex.file = new File(dir, offsetIndex.file.getName)
timeIndex.file = new File(dir, timeIndex.file.getName)
txnIndex.file = new File(dir, txnIndex.file.getName)
}
/**
* Change the suffix for the index and log file for this log segment
* IOException from this method should be handled by the caller
*/
def changeFileSuffixes(oldSuffix: String, newSuffix: String) {
log.renameTo(new File(CoreUtils.replaceSuffix(log.file.getPath, oldSuffix, newSuffix)))
offsetIndex.renameTo(new File(CoreUtils.replaceSuffix(offsetIndex.file.getPath, oldSuffix, newSuffix)))
timeIndex.renameTo(new File(CoreUtils.replaceSuffix(timeIndex.file.getPath, oldSuffix, newSuffix)))
txnIndex.renameTo(new File(CoreUtils.replaceSuffix(txnIndex.file.getPath, oldSuffix, newSuffix)))
}
/**
* Append the largest time index entry to the time index and trim the log and indexes.
*
* The time index entry appended will be used to decide when to delete the segment.
*/
def onBecomeInactiveSegment() {
timeIndex.maybeAppend(maxTimestampSoFar, offsetOfMaxTimestamp, skipFullCheck = true)
offsetIndex.trimToValidSize()
timeIndex.trimToValidSize()
log.trim()
}
/**
* The time this segment has waited to be rolled.
* If the first message batch has a timestamp we use its timestamp to determine when to roll a segment. A segment
* is rolled if the difference between the new batch's timestamp and the first batch's timestamp exceeds the
* segment rolling time.
* If the first batch does not have a timestamp, we use the wall clock time to determine when to roll a segment. A
* segment is rolled if the difference between the current wall clock time and the segment create time exceeds the
* segment rolling time.
*/
def timeWaitedForRoll(now: Long, messageTimestamp: Long) : Long = {
// Load the timestamp of the first message into memory
if (rollingBasedTimestamp.isEmpty) {
val iter = log.batches.iterator()
if (iter.hasNext)
rollingBasedTimestamp = Some(iter.next().maxTimestamp)
}
rollingBasedTimestamp match {
case Some(t) if t >= 0 => messageTimestamp - t
case _ => now - created
}
}
/**
* Search the message offset based on timestamp and offset.
*
* This method returns an option of TimestampOffset. The returned value is determined using the following ordered list of rules:
*
* - If all the messages in the segment have smaller offsets, return None
* - If all the messages in the segment have smaller timestamps, return None
* - If all the messages in the segment have larger timestamps, or no message in the segment has a timestamp
* the returned the offset will be max(the base offset of the segment, startingOffset) and the timestamp will be Message.NoTimestamp.
* - Otherwise, return an option of TimestampOffset. The offset is the offset of the first message whose timestamp
* is greater than or equals to the target timestamp and whose offset is greater than or equals to the startingOffset.
*
* This methods only returns None when 1) all messages' offset < startOffing or 2) the log is not empty but we did not
* see any message when scanning the log from the indexed position. The latter could happen if the log is truncated
* after we get the indexed position but before we scan the log from there. In this case we simply return None and the
* caller will need to check on the truncated log and maybe retry or even do the search on another log segment.
*
* @param timestamp The timestamp to search for.
* @param startingOffset The starting offset to search.
* @return the timestamp and offset of the first message that meets the requirements. None will be returned if there is no such message.
*/
def findOffsetByTimestamp(timestamp: Long, startingOffset: Long = baseOffset): Option[TimestampOffset] = {
// Get the index entry with a timestamp less than or equal to the target timestamp
val timestampOffset = timeIndex.lookup(timestamp)
val position = offsetIndex.lookup(math.max(timestampOffset.offset, startingOffset)).position
// Search the timestamp
Option(log.searchForTimestamp(timestamp, position, startingOffset)).map { timestampAndOffset =>
TimestampOffset(timestampAndOffset.timestamp, timestampAndOffset.offset)
}
}
/**
* Close this log segment
*/
def close() {
CoreUtils.swallow(timeIndex.maybeAppend(maxTimestampSoFar, offsetOfMaxTimestamp, skipFullCheck = true), this)
CoreUtils.swallow(offsetIndex.close(), this)
CoreUtils.swallow(timeIndex.close(), this)
CoreUtils.swallow(log.close(), this)
CoreUtils.swallow(txnIndex.close(), this)
}
/**
* Close file handlers used by the log segment but don't write to disk. This is used when the disk may have failed
*/
def closeHandlers() {
CoreUtils.swallow(offsetIndex.closeHandler(), this)
CoreUtils.swallow(timeIndex.closeHandler(), this)
CoreUtils.swallow(log.closeHandlers(), this)
CoreUtils.swallow(txnIndex.close(), this)
}
/**
* Delete this log segment from the filesystem.
*/
def deleteIfExists() {
def delete(delete: () => Boolean, fileType: String, file: File, logIfMissing: Boolean): Unit = {
try {
if (delete())
info(s"Deleted $fileType ${file.getAbsolutePath}.")
else if (logIfMissing)
info(s"Failed to delete $fileType ${file.getAbsolutePath} because it does not exist.")
}
catch {
case e: IOException => throw new IOException(s"Delete of $fileType ${file.getAbsolutePath} failed.", e)
}
}
CoreUtils.tryAll(Seq(
() => delete(log.deleteIfExists _, "log", log.file, logIfMissing = true),
() => delete(offsetIndex.deleteIfExists _, "offset index", offsetIndex.file, logIfMissing = true),
() => delete(timeIndex.deleteIfExists _, "time index", timeIndex.file, logIfMissing = true),
() => delete(txnIndex.deleteIfExists _, "transaction index", txnIndex.file, logIfMissing = false)
))
}
/**
* The last modified time of this log segment as a unix time stamp
*/
def lastModified = log.file.lastModified
/**
* The largest timestamp this segment contains.
*/
def largestTimestamp = if (maxTimestampSoFar >= 0) maxTimestampSoFar else lastModified
/**
* Change the last modified time for this log segment
*/
def lastModified_=(ms: Long) = {
val fileTime = FileTime.fromMillis(ms)
Files.setLastModifiedTime(log.file.toPath, fileTime)
Files.setLastModifiedTime(offsetIndex.file.toPath, fileTime)
Files.setLastModifiedTime(timeIndex.file.toPath, fileTime)
}
}
object LogSegment {
def open(dir: File, baseOffset: Long, config: LogConfig, time: Time, fileAlreadyExists: Boolean = false,
initFileSize: Int = 0, preallocate: Boolean = false, fileSuffix: String = ""): LogSegment = {
val maxIndexSize = config.maxIndexSize
new LogSegment(
FileRecords.open(Log.logFile(dir, baseOffset, fileSuffix), fileAlreadyExists, initFileSize, preallocate),
new OffsetIndex(Log.offsetIndexFile(dir, baseOffset, fileSuffix), baseOffset = baseOffset, maxIndexSize = maxIndexSize),
new TimeIndex(Log.timeIndexFile(dir, baseOffset, fileSuffix), baseOffset = baseOffset, maxIndexSize = maxIndexSize),
new TransactionIndex(baseOffset, Log.transactionIndexFile(dir, baseOffset, fileSuffix)),
baseOffset,
indexIntervalBytes = config.indexInterval,
rollJitterMs = config.randomSegmentJitter,
maxSegmentMs = config.segmentMs,
maxSegmentBytes = config.segmentSize,
time)
}
def deleteIfExists(dir: File, baseOffset: Long, fileSuffix: String = ""): Unit = {
Log.deleteFileIfExists(Log.offsetIndexFile(dir, baseOffset, fileSuffix))
Log.deleteFileIfExists(Log.timeIndexFile(dir, baseOffset, fileSuffix))
Log.deleteFileIfExists(Log.transactionIndexFile(dir, baseOffset, fileSuffix))
Log.deleteFileIfExists(Log.logFile(dir, baseOffset, fileSuffix))
}
}
object LogFlushStats extends KafkaMetricsGroup {
val logFlushTimer = new KafkaTimer(newTimer("LogFlushRateAndTimeMs", TimeUnit.MILLISECONDS, TimeUnit.SECONDS))
}
|
Esquive/kafka
|
core/src/main/scala/kafka/log/LogSegment.scala
|
Scala
|
apache-2.0
| 28,823
|
package controllers
import java.sql.Timestamp
import java.util.Date
import api.API
import constants.Constants
import http.WS
import models.{User, DBUtils}
import play.api.data.Form
import play.api.data.Forms._
import play.api.libs.json._
import play.api.mvc.{Action, Controller}
import scala.concurrent.Future
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import play.api.libs.json.Reads._
import play.api.libs.functional.syntax._
object Application extends Controller {
val form = Form(single("key" -> nonEmptyText(minLength = 4, maxLength = 8)))
def index = Action {
Ok(views.html.index(form))
}
def key() = Action { implicit request =>
form.bindFromRequest().fold(
hasErrors => BadRequest(views.html.index(hasErrors)),
success => Redirect(routes.Application.oauth2(success))
)
}
implicit class MapConverter(rMap: Map[String, String]) {
def convert: List[String] = rMap.map(pair => s"${pair._1}=${pair._2}").toList
}
def oauth2(userId: String) = Action {
val params = Map[String, String](
("scope" -> "https://www.googleapis.com/auth/calendar"),
("state" -> userId.toString),
("response_type" -> "code"),
("client_id" -> s"${Constants.GoogleOauth.client_id}"),
("redirect_uri" -> s"${Constants.GoogleOauth.redirectURI}"),
("access_type" -> "offline"),
("approval_prompt" -> "force")
).convert.mkString("?", "&", "").toString
val requestURI = s"${Constants.GoogleOauth.GoogleOauth2}${params}"
Redirect(requestURI)
}
def oauth2callback(state: String, code: Option[String], error: Option[String]) = Action {
code match {
case Some(code) => Redirect(routes.Application.onCode(state, code))
case None => {
error match {
case Some(err) => Redirect(routes.Application.index()).flashing("failure" -> s"Google server error, error: $err")
case None => Redirect(routes.Application.index())
}
}
}
}
def onCode(state: String, code: String) = Action.async {
val body = Map[String, String](
("code" -> s"$code"),
("client_id" -> s"${Constants.GoogleOauth.client_id}"),
("client_secret" -> s"${Constants.GoogleOauth.client_secret}"),
("redirect_uri" -> s"${Constants.GoogleOauth.redirectURI}"),
("grant_type" -> "authorization_code")
)
WS.client.url(Constants.GoogleOauth.TokenEndpoint)
.withHeaders("Content-Type" -> "application/x-www-form-urlencoded; charset=utf-8")
.post(body.convert.mkString("", "&", "")).flatMap { response => {
val jsonBody = Json.parse(response.body)
val user = User(state, (jsonBody \\ "access_token").as[String],
(jsonBody \\ "refresh_token").as[String], (jsonBody \\ "expires_in").as[Long], new Timestamp(new Date().getTime))
val dbAction = DBUtils.saveNew(user)
val result = dbAction.flatMap { result => Future(Ok(s"Done ${user.toString}")) }
dbAction.recover {case th => Ok(s"Failed, reason: ${th.getMessage}")}
result
}}
}
def refreshToken(state: String, refreshToken: String) = Action.async {
val body = Map[String, String](
("client_id" -> Constants.GoogleOauth.client_id),
("client_secret" -> Constants.GoogleOauth.client_secret),
("refresh_token" -> refreshToken),
("grant_type" -> "refresh_token")
)
WS.client.url(Constants.GoogleOauth.TokenEndpoint)
.withHeaders("Content-Type" -> "application/x-www-form-urlencoded; charset=utf-8")
.post(body.convert.mkString("", "&", "")).flatMap {
response => {
val jsonBody = Json.parse(response.body)
val user = User(state, (jsonBody \\ "access_token").as[String],
refreshToken, (jsonBody \\ "expires_in").as[Long], new Timestamp(new Date().getTime))
val dbAction = DBUtils.saveNew(user)
dbAction.recover {case th => Ok(s"Database operation failed, reason ${th.getMessage}") }
dbAction.flatMap { _ => {
Future(Ok(s"refresh done ${user.toString}"))
}}
}}}
case class EventQuery(key: String, timeMin: String, timeMax: String)
implicit val eqReads: Reads[EventQuery] = (
(JsPath \\ "key").read[String] and
(JsPath \\ "timeMin").read[String] and (JsPath \\ "timeMax").read[String]
)(EventQuery.apply _)
def events() = Action.async(parse.json) { implicit request =>
request.body.validate[EventQuery] match {
case success: JsSuccess[EventQuery] => {
val eventQuery = success.value
val dbAction = DBUtils.getUser(eventQuery.key)
val response = dbAction.flatMap { user => {
DBUtils.checkRefreshRequired(user.key).flatMap { status => {
if (status) {
val events = API.events(user.accessToken, eventQuery.timeMin, eventQuery.timeMax).map { response => {
Ok(Json.parse(response.body))
}}
events.recover { case th => BadRequest(Json.obj("errors" -> th.getMessage)) }
events
} else {
val refresh = API.refresh(user.refreshToken, user.key).flatMap { status =>
val dbActionAfterRefresh = DBUtils.getUser(eventQuery.key).flatMap { user => {
val events = API.events(user.accessToken, eventQuery.timeMin, eventQuery.timeMax).map { response => {
Ok(Json.parse(response.body))
}}
events.recover { case th => BadRequest(Json.obj("errors" -> th.getMessage)) }
events
}}
dbActionAfterRefresh.recover { case th => BadRequest(Json.obj("errors" -> "DB action after refresh failed"))}
dbActionAfterRefresh
}
refresh.recover {case th => BadRequest(Json.obj("errors" -> "Refresh action failed"))}
refresh
}
}}
}}
dbAction.recover { case th => BadRequest(Json.obj("errors" -> s"No user with the key: ${eventQuery.key}")) }
response
}
case error: JsError => Future(BadRequest(Json.obj("errors" -> error.errors.mkString(","))))
}
}
def check(key: String) = Action.async {
DBUtils.checkRefreshRequired(key).map { status =>
Ok(status.toString)
}
}
}
|
pamu/play-freebusy
|
app/controllers/Application.scala
|
Scala
|
apache-2.0
| 6,283
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gearpump.integrationtest.checklist
import org.apache.log4j.Logger
import org.apache.gearpump.integrationtest.hadoop.HadoopCluster._
import org.apache.gearpump.integrationtest.kafka.KafkaCluster._
import org.apache.gearpump.integrationtest.kafka.{ResultVerifier, SimpleKafkaReader}
import org.apache.gearpump.integrationtest.{TestSpecBase, Util}
/**
* Checks message delivery consistency, like at-least-once, and exactly-once.
*/
class MessageDeliverySpec extends TestSpecBase {
private val LOG = Logger.getLogger(getClass)
override def beforeAll(): Unit = {
super.beforeAll()
}
override def afterAll(): Unit = {
super.afterAll()
}
"Gearpump" should {
"support exactly-once message delivery" in {
withKafkaCluster(cluster) { kafkaCluster =>
// setup
val sourcePartitionNum = 1
val sourceTopic = "topic1"
val sinkTopic = "topic2"
// Generate number sequence (1, 2, 3, ...) to the topic
kafkaCluster.createTopic(sourceTopic, sourcePartitionNum)
withDataProducer(sourceTopic, kafkaCluster.getBrokerListConnectString) { producer =>
withHadoopCluster { hadoopCluster =>
// exercise
val args = Array("org.apache.gearpump.streaming.examples.state.MessageCountApp",
"-defaultFS", hadoopCluster.getDefaultFS,
"-zookeeperConnect", kafkaCluster.getZookeeperConnectString,
"-brokerList", kafkaCluster.getBrokerListConnectString,
"-sourceTopic", sourceTopic,
"-sinkTopic", sinkTopic,
"-sourceTask", sourcePartitionNum).mkString(" ")
val appId = restClient.getNextAvailableAppId()
val stateJar = cluster.queryBuiltInExampleJars("state").head
val success = restClient.submitApp(stateJar, executorNum = 1, args = args)
success shouldBe true
// verify #1
expectAppIsRunning(appId, "MessageCount")
Util.retryUntil(() => restClient.queryStreamingAppDetail(appId).clock > 0,
"app is running")
// wait for checkpoint to take place
Thread.sleep(1000)
LOG.info("Trigger message replay by kill and restart the executors")
val executorToKill = restClient.queryExecutorBrief(appId).map(_.executorId).max
restClient.killExecutor(appId, executorToKill) shouldBe true
Util.retryUntil(() => restClient.queryExecutorBrief(appId)
.map(_.executorId).max > executorToKill, s"executor $executorToKill killed")
producer.stop()
val producedNumbers = producer.producedNumbers
LOG.info(s"In total, numbers in range[${producedNumbers.start}" +
s", ${producedNumbers.end - 1}] have been written to Kafka")
// verify #3
val kafkaSourceOffset = kafkaCluster.getLatestOffset(sourceTopic)
assert(producedNumbers.size == kafkaSourceOffset,
"produced message should match Kafka queue size")
LOG.info(s"The Kafka source topic $sourceTopic offset is " + kafkaSourceOffset)
// The sink processor of this job (MessageCountApp) writes total message
// count to Kafka Sink periodically (once every checkpoint interval).
// The detector keep record of latest message count.
val detector = new ResultVerifier {
var latestMessageCount: Int = 0
override def onNext(messageCount: Int): Unit = {
this.latestMessageCount = messageCount
}
}
val kafkaReader = new SimpleKafkaReader(detector, sinkTopic,
host = kafkaCluster.advertisedHost, port = kafkaCluster.advertisedPort)
Util.retryUntil(() => {
kafkaReader.read()
LOG.info(s"Received message count: ${detector.latestMessageCount}, " +
s"expect: ${producedNumbers.size}")
detector.latestMessageCount == producedNumbers.size
}, "MessageCountApp calculated message count matches " +
"expected in case of message replay")
}
}
}
}
}
}
|
manuzhang/incubator-gearpump
|
integrationtest/core/src/it/scala/org/apache/gearpump/integrationtest/checklist/MessageDeliverySpec.scala
|
Scala
|
apache-2.0
| 5,040
|
package com.yetu.controlcenter.routes
import com.yetu.controlcenter.base.BaseRoutesSpec
import play.api.test.FakeRequest
import play.api.test.Helpers._
class HealthCheckSpec extends BaseRoutesSpec {
val healthUrl = "/health"
"health controller" must {
"return name and organization " in {
val Some(result) = route(FakeRequest(GET, healthUrl))
status(result) mustEqual (OK)
contentAsString(result) must include("alive")
contentAsString(result) must include("com.yetu")
contentAsString(result) must include("controlcenter")
}
}
}
|
yetu/controlcenter
|
test/com/yetu/controlcenter/routes/HealthCheckSpec.scala
|
Scala
|
mit
| 577
|
package name.orhideous.twicher.error
import cats.data.Kleisli
import cats.data.OptionT
import cats.ApplicativeError
import org.http4s.HttpRoutes
import org.http4s.Request
import org.http4s.Response
import cats.implicits._
trait RoutesHttpErrorWrapper[F[_], E <: Throwable] {
protected type Handler = E => F[Response[F]]
protected def wrapWith(handler: Handler)(routes: HttpRoutes[F])(implicit ev: ApplicativeError[F, E]): HttpRoutes[F] =
Kleisli { req: Request[F] =>
OptionT {
routes
.run(req)
.value
.handleErrorWith(handler(_).map(Option(_)))
}
}
}
|
Orhideous/twicher
|
src/main/scala/name/orhideous/twicher/error/RoutesHttpErrorWrapper.scala
|
Scala
|
gpl-3.0
| 616
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import java.sql.Timestamp
import org.apache.spark.sql.catalyst.analysis.TypeCoercion._
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.{Rule, RuleExecutor}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
class TypeCoercionSuite extends AnalysisTest {
import TypeCoercionSuite._
// scalastyle:off line.size.limit
// The following table shows all implicit data type conversions that are not visible to the user.
// +----------------------+----------+-----------+-------------+----------+------------+-----------+------------+------------+-------------+------------+----------+---------------+------------+----------+-------------+----------+----------------------+---------------------+-------------+--------------+
// | Source Type\\CAST TO | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | BinaryType | BooleanType | StringType | DateType | TimestampType | ArrayType | MapType | StructType | NullType | CalendarIntervalType | DecimalType | NumericType | IntegralType |
// +----------------------+----------+-----------+-------------+----------+------------+-----------+------------+------------+-------------+------------+----------+---------------+------------+----------+-------------+----------+----------------------+---------------------+-------------+--------------+
// | ByteType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(3, 0) | ByteType | ByteType |
// | ShortType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(5, 0) | ShortType | ShortType |
// | IntegerType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(10, 0) | IntegerType | IntegerType |
// | LongType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(20, 0) | LongType | LongType |
// | DoubleType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(30, 15) | DoubleType | IntegerType |
// | FloatType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(14, 7) | FloatType | IntegerType |
// | Dec(10, 2) | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | X | X | StringType | X | X | X | X | X | X | X | DecimalType(10, 2) | Dec(10, 2) | IntegerType |
// | BinaryType | X | X | X | X | X | X | X | BinaryType | X | StringType | X | X | X | X | X | X | X | X | X | X |
// | BooleanType | X | X | X | X | X | X | X | X | BooleanType | StringType | X | X | X | X | X | X | X | X | X | X |
// | StringType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | BinaryType | X | StringType | DateType | TimestampType | X | X | X | X | X | DecimalType(38, 18) | DoubleType | X |
// | DateType | X | X | X | X | X | X | X | X | X | StringType | DateType | TimestampType | X | X | X | X | X | X | X | X |
// | TimestampType | X | X | X | X | X | X | X | X | X | StringType | DateType | TimestampType | X | X | X | X | X | X | X | X |
// | ArrayType | X | X | X | X | X | X | X | X | X | X | X | X | ArrayType* | X | X | X | X | X | X | X |
// | MapType | X | X | X | X | X | X | X | X | X | X | X | X | X | MapType* | X | X | X | X | X | X |
// | StructType | X | X | X | X | X | X | X | X | X | X | X | X | X | X | StructType* | X | X | X | X | X |
// | NullType | ByteType | ShortType | IntegerType | LongType | DoubleType | FloatType | Dec(10, 2) | BinaryType | BooleanType | StringType | DateType | TimestampType | ArrayType | MapType | StructType | NullType | CalendarIntervalType | DecimalType(38, 18) | DoubleType | IntegerType |
// | CalendarIntervalType | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | X | CalendarIntervalType | X | X | X |
// +----------------------+----------+-----------+-------------+----------+------------+-----------+------------+------------+-------------+------------+----------+---------------+------------+----------+-------------+----------+----------------------+---------------------+-------------+--------------+
// Note: StructType* is castable when all the internal child types are castable according to the table.
// Note: ArrayType* is castable when the element type is castable according to the table.
// Note: MapType* is castable when both the key type and the value type are castable according to the table.
// scalastyle:on line.size.limit
private def shouldCast(from: DataType, to: AbstractDataType, expected: DataType): Unit = {
// Check default value
val castDefault = TypeCoercion.ImplicitTypeCasts.implicitCast(default(from), to)
assert(DataType.equalsIgnoreCompatibleNullability(
castDefault.map(_.dataType).getOrElse(null), expected),
s"Failed to cast $from to $to")
// Check null value
val castNull = TypeCoercion.ImplicitTypeCasts.implicitCast(createNull(from), to)
assert(DataType.equalsIgnoreCaseAndNullability(
castNull.map(_.dataType).getOrElse(null), expected),
s"Failed to cast $from to $to")
}
private def shouldNotCast(from: DataType, to: AbstractDataType): Unit = {
// Check default value
val castDefault = TypeCoercion.ImplicitTypeCasts.implicitCast(default(from), to)
assert(castDefault.isEmpty, s"Should not be able to cast $from to $to, but got $castDefault")
// Check null value
val castNull = TypeCoercion.ImplicitTypeCasts.implicitCast(createNull(from), to)
assert(castNull.isEmpty, s"Should not be able to cast $from to $to, but got $castNull")
}
private def default(dataType: DataType): Expression = dataType match {
case ArrayType(internalType: DataType, _) =>
CreateArray(Seq(Literal.default(internalType)))
case MapType(keyDataType: DataType, valueDataType: DataType, _) =>
CreateMap(Seq(Literal.default(keyDataType), Literal.default(valueDataType)))
case _ => Literal.default(dataType)
}
private def createNull(dataType: DataType): Expression = dataType match {
case ArrayType(internalType: DataType, _) =>
CreateArray(Seq(Literal.create(null, internalType)))
case MapType(keyDataType: DataType, valueDataType: DataType, _) =>
CreateMap(Seq(Literal.create(null, keyDataType), Literal.create(null, valueDataType)))
case _ => Literal.create(null, dataType)
}
// Check whether the type `checkedType` can be cast to all the types in `castableTypes`,
// but cannot be cast to the other types in `allTypes`.
private def checkTypeCasting(checkedType: DataType, castableTypes: Seq[DataType]): Unit = {
val nonCastableTypes = allTypes.filterNot(castableTypes.contains)
castableTypes.foreach { tpe =>
shouldCast(checkedType, tpe, tpe)
}
nonCastableTypes.foreach { tpe =>
shouldNotCast(checkedType, tpe)
}
}
private def checkWidenType(
widenFunc: (DataType, DataType) => Option[DataType],
t1: DataType,
t2: DataType,
expected: Option[DataType],
isSymmetric: Boolean = true): Unit = {
var found = widenFunc(t1, t2)
assert(found == expected,
s"Expected $expected as wider common type for $t1 and $t2, found $found")
// Test both directions to make sure the widening is symmetric.
if (isSymmetric) {
found = widenFunc(t2, t1)
assert(found == expected,
s"Expected $expected as wider common type for $t2 and $t1, found $found")
}
}
test("implicit type cast - ByteType") {
val checkedType = ByteType
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(checkedType, DecimalType, DecimalType.ByteDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldCast(checkedType, IntegralType, checkedType)
}
test("implicit type cast - ShortType") {
val checkedType = ShortType
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(checkedType, DecimalType, DecimalType.ShortDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldCast(checkedType, IntegralType, checkedType)
}
test("implicit type cast - IntegerType") {
val checkedType = IntegerType
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(IntegerType, DecimalType, DecimalType.IntDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldCast(checkedType, IntegralType, checkedType)
}
test("implicit type cast - LongType") {
val checkedType = LongType
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(checkedType, DecimalType, DecimalType.LongDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldCast(checkedType, IntegralType, checkedType)
}
test("implicit type cast - FloatType") {
val checkedType = FloatType
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(checkedType, DecimalType, DecimalType.FloatDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - DoubleType") {
val checkedType = DoubleType
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(checkedType, DecimalType, DecimalType.DoubleDecimal)
shouldCast(checkedType, NumericType, checkedType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - DecimalType(10, 2)") {
val checkedType = DecimalType(10, 2)
checkTypeCasting(checkedType, castableTypes = numericTypes ++ Seq(StringType))
shouldCast(checkedType, DecimalType, checkedType)
shouldCast(checkedType, NumericType, checkedType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - BinaryType") {
val checkedType = BinaryType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType, StringType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - BooleanType") {
val checkedType = BooleanType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType, StringType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - StringType") {
val checkedType = StringType
val nonCastableTypes =
complexTypes ++ Seq(BooleanType, NullType, CalendarIntervalType)
checkTypeCasting(checkedType, castableTypes = allTypes.filterNot(nonCastableTypes.contains))
shouldCast(checkedType, DecimalType, DecimalType.SYSTEM_DEFAULT)
shouldCast(checkedType, NumericType, NumericType.defaultConcreteType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - DateType") {
val checkedType = DateType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType, StringType, TimestampType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - TimestampType") {
val checkedType = TimestampType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType, StringType, DateType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - ArrayType(StringType)") {
val checkedType = ArrayType(StringType)
val nonCastableTypes =
complexTypes ++ Seq(BooleanType, NullType, CalendarIntervalType)
checkTypeCasting(checkedType,
castableTypes = allTypes.filterNot(nonCastableTypes.contains).map(ArrayType(_)))
nonCastableTypes.map(ArrayType(_)).foreach(shouldNotCast(checkedType, _))
shouldNotCast(ArrayType(DoubleType, containsNull = false),
ArrayType(LongType, containsNull = false))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast between two Map types") {
val sourceType = MapType(IntegerType, IntegerType, true)
val castableTypes = numericTypes ++ Seq(StringType).filter(!Cast.forceNullable(IntegerType, _))
val targetTypes = numericTypes.filter(!Cast.forceNullable(IntegerType, _)).map { t =>
MapType(t, sourceType.valueType, valueContainsNull = true)
}
val nonCastableTargetTypes = allTypes.filterNot(castableTypes.contains(_)).map {t =>
MapType(t, sourceType.valueType, valueContainsNull = true)
}
// Tests that its possible to setup implicit casts between two map types when
// source map's key type is integer and the target map's key type are either Byte, Short,
// Long, Double, Float, Decimal(38, 18) or String.
targetTypes.foreach { targetType =>
shouldCast(sourceType, targetType, targetType)
}
// Tests that its not possible to setup implicit casts between two map types when
// source map's key type is integer and the target map's key type are either Binary,
// Boolean, Date, Timestamp, Array, Struct, CaleandarIntervalType or NullType
nonCastableTargetTypes.foreach { targetType =>
shouldNotCast(sourceType, targetType)
}
// Tests that its not possible to cast from nullable map type to not nullable map type.
val targetNotNullableTypes = allTypes.filterNot(_ == IntegerType).map { t =>
MapType(t, sourceType.valueType, valueContainsNull = false)
}
val sourceMapExprWithValueNull =
CreateMap(Seq(Literal.default(sourceType.keyType),
Literal.create(null, sourceType.valueType)))
targetNotNullableTypes.foreach { targetType =>
val castDefault =
TypeCoercion.ImplicitTypeCasts.implicitCast(sourceMapExprWithValueNull, targetType)
assert(castDefault.isEmpty,
s"Should not be able to cast $sourceType to $targetType, but got $castDefault")
}
}
test("implicit type cast - StructType().add(\\"a1\\", StringType)") {
val checkedType = new StructType().add("a1", StringType)
checkTypeCasting(checkedType, castableTypes = Seq(checkedType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("implicit type cast - NullType") {
val checkedType = NullType
checkTypeCasting(checkedType, castableTypes = allTypes)
shouldCast(checkedType, DecimalType, DecimalType.SYSTEM_DEFAULT)
shouldCast(checkedType, NumericType, NumericType.defaultConcreteType)
shouldCast(checkedType, IntegralType, IntegralType.defaultConcreteType)
}
test("implicit type cast - CalendarIntervalType") {
val checkedType = CalendarIntervalType
checkTypeCasting(checkedType, castableTypes = Seq(checkedType))
shouldNotCast(checkedType, DecimalType)
shouldNotCast(checkedType, NumericType)
shouldNotCast(checkedType, IntegralType)
}
test("eligible implicit type cast - TypeCollection") {
shouldCast(NullType, TypeCollection(StringType, BinaryType), StringType)
shouldCast(StringType, TypeCollection(StringType, BinaryType), StringType)
shouldCast(BinaryType, TypeCollection(StringType, BinaryType), BinaryType)
shouldCast(StringType, TypeCollection(BinaryType, StringType), StringType)
shouldCast(IntegerType, TypeCollection(IntegerType, BinaryType), IntegerType)
shouldCast(IntegerType, TypeCollection(BinaryType, IntegerType), IntegerType)
shouldCast(BinaryType, TypeCollection(BinaryType, IntegerType), BinaryType)
shouldCast(BinaryType, TypeCollection(IntegerType, BinaryType), BinaryType)
shouldCast(IntegerType, TypeCollection(StringType, BinaryType), StringType)
shouldCast(IntegerType, TypeCollection(BinaryType, StringType), StringType)
shouldCast(DecimalType.SYSTEM_DEFAULT,
TypeCollection(IntegerType, DecimalType), DecimalType.SYSTEM_DEFAULT)
shouldCast(DecimalType(10, 2), TypeCollection(IntegerType, DecimalType), DecimalType(10, 2))
shouldCast(DecimalType(10, 2), TypeCollection(DecimalType, IntegerType), DecimalType(10, 2))
shouldCast(IntegerType, TypeCollection(DecimalType(10, 2), StringType), DecimalType(10, 2))
shouldCast(StringType, TypeCollection(NumericType, BinaryType), DoubleType)
shouldCast(
ArrayType(StringType, false),
TypeCollection(ArrayType(StringType), StringType),
ArrayType(StringType, false))
shouldCast(
ArrayType(StringType, true),
TypeCollection(ArrayType(StringType), StringType),
ArrayType(StringType, true))
}
test("ineligible implicit type cast - TypeCollection") {
shouldNotCast(IntegerType, TypeCollection(DateType, TimestampType))
}
test("tightest common bound for types") {
def widenTest(t1: DataType, t2: DataType, expected: Option[DataType]): Unit =
checkWidenType(TypeCoercion.findTightestCommonType, t1, t2, expected)
// Null
widenTest(NullType, NullType, Some(NullType))
// Boolean
widenTest(NullType, BooleanType, Some(BooleanType))
widenTest(BooleanType, BooleanType, Some(BooleanType))
widenTest(IntegerType, BooleanType, None)
widenTest(LongType, BooleanType, None)
// Integral
widenTest(NullType, ByteType, Some(ByteType))
widenTest(NullType, IntegerType, Some(IntegerType))
widenTest(NullType, LongType, Some(LongType))
widenTest(ShortType, IntegerType, Some(IntegerType))
widenTest(ShortType, LongType, Some(LongType))
widenTest(IntegerType, LongType, Some(LongType))
widenTest(LongType, LongType, Some(LongType))
// Floating point
widenTest(NullType, FloatType, Some(FloatType))
widenTest(NullType, DoubleType, Some(DoubleType))
widenTest(FloatType, DoubleType, Some(DoubleType))
widenTest(FloatType, FloatType, Some(FloatType))
widenTest(DoubleType, DoubleType, Some(DoubleType))
// Integral mixed with floating point.
widenTest(IntegerType, FloatType, Some(FloatType))
widenTest(IntegerType, DoubleType, Some(DoubleType))
widenTest(IntegerType, DoubleType, Some(DoubleType))
widenTest(LongType, FloatType, Some(FloatType))
widenTest(LongType, DoubleType, Some(DoubleType))
// No up-casting for fixed-precision decimal (this is handled by arithmetic rules)
widenTest(DecimalType(2, 1), DecimalType(3, 2), None)
widenTest(DecimalType(2, 1), DoubleType, None)
widenTest(DecimalType(2, 1), IntegerType, None)
widenTest(DoubleType, DecimalType(2, 1), None)
// StringType
widenTest(NullType, StringType, Some(StringType))
widenTest(StringType, StringType, Some(StringType))
widenTest(IntegerType, StringType, None)
widenTest(LongType, StringType, None)
// TimestampType
widenTest(NullType, TimestampType, Some(TimestampType))
widenTest(TimestampType, TimestampType, Some(TimestampType))
widenTest(DateType, TimestampType, Some(TimestampType))
widenTest(IntegerType, TimestampType, None)
widenTest(StringType, TimestampType, None)
// ComplexType
widenTest(NullType,
MapType(IntegerType, StringType, false),
Some(MapType(IntegerType, StringType, false)))
widenTest(NullType, StructType(Seq()), Some(StructType(Seq())))
widenTest(StringType, MapType(IntegerType, StringType, true), None)
widenTest(ArrayType(IntegerType), StructType(Seq()), None)
widenTest(
StructType(Seq(StructField("a", IntegerType))),
StructType(Seq(StructField("b", IntegerType))),
None)
widenTest(
StructType(Seq(StructField("a", IntegerType, nullable = false))),
StructType(Seq(StructField("a", DoubleType, nullable = false))),
Some(StructType(Seq(StructField("a", DoubleType, nullable = false)))))
widenTest(
StructType(Seq(StructField("a", IntegerType, nullable = false))),
StructType(Seq(StructField("a", IntegerType, nullable = false))),
Some(StructType(Seq(StructField("a", IntegerType, nullable = false)))))
widenTest(
StructType(Seq(StructField("a", IntegerType, nullable = false))),
StructType(Seq(StructField("a", IntegerType, nullable = true))),
Some(StructType(Seq(StructField("a", IntegerType, nullable = true)))))
widenTest(
StructType(Seq(StructField("a", IntegerType, nullable = true))),
StructType(Seq(StructField("a", IntegerType, nullable = false))),
Some(StructType(Seq(StructField("a", IntegerType, nullable = true)))))
widenTest(
StructType(Seq(StructField("a", IntegerType, nullable = true))),
StructType(Seq(StructField("a", IntegerType, nullable = true))),
Some(StructType(Seq(StructField("a", IntegerType, nullable = true)))))
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
widenTest(
StructType(Seq(StructField("a", IntegerType))),
StructType(Seq(StructField("A", IntegerType))),
None)
}
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
checkWidenType(
TypeCoercion.findTightestCommonType,
StructType(Seq(StructField("a", IntegerType), StructField("B", IntegerType))),
StructType(Seq(StructField("A", IntegerType), StructField("b", IntegerType))),
Some(StructType(Seq(StructField("a", IntegerType), StructField("B", IntegerType)))),
isSymmetric = false)
}
widenTest(
ArrayType(IntegerType, containsNull = true),
ArrayType(IntegerType, containsNull = false),
Some(ArrayType(IntegerType, containsNull = true)))
widenTest(
ArrayType(NullType, containsNull = true),
ArrayType(IntegerType, containsNull = false),
Some(ArrayType(IntegerType, containsNull = true)))
widenTest(
MapType(IntegerType, StringType, valueContainsNull = true),
MapType(IntegerType, StringType, valueContainsNull = false),
Some(MapType(IntegerType, StringType, valueContainsNull = true)))
widenTest(
MapType(NullType, NullType, true),
MapType(IntegerType, StringType, false),
Some(MapType(IntegerType, StringType, true)))
widenTest(
new StructType()
.add("arr", ArrayType(IntegerType, containsNull = true), nullable = false),
new StructType()
.add("arr", ArrayType(IntegerType, containsNull = false), nullable = true),
Some(new StructType()
.add("arr", ArrayType(IntegerType, containsNull = true), nullable = true)))
widenTest(
new StructType()
.add("null", NullType, nullable = true),
new StructType()
.add("null", IntegerType, nullable = false),
Some(new StructType()
.add("null", IntegerType, nullable = true)))
widenTest(
ArrayType(NullType, containsNull = false),
ArrayType(IntegerType, containsNull = false),
Some(ArrayType(IntegerType, containsNull = false)))
widenTest(MapType(NullType, NullType, false),
MapType(IntegerType, StringType, false),
Some(MapType(IntegerType, StringType, false)))
widenTest(
new StructType()
.add("null", NullType, nullable = false),
new StructType()
.add("null", IntegerType, nullable = false),
Some(new StructType()
.add("null", IntegerType, nullable = false)))
}
test("wider common type for decimal and array") {
def widenTestWithStringPromotion(
t1: DataType,
t2: DataType,
expected: Option[DataType],
isSymmetric: Boolean = true): Unit = {
checkWidenType(TypeCoercion.findWiderTypeForTwo, t1, t2, expected, isSymmetric)
}
def widenTestWithoutStringPromotion(
t1: DataType,
t2: DataType,
expected: Option[DataType],
isSymmetric: Boolean = true): Unit = {
checkWidenType(
TypeCoercion.findWiderTypeWithoutStringPromotionForTwo, t1, t2, expected, isSymmetric)
}
// Decimal
widenTestWithStringPromotion(
DecimalType(2, 1), DecimalType(3, 2), Some(DecimalType(3, 2)))
widenTestWithStringPromotion(
DecimalType(2, 1), DoubleType, Some(DoubleType))
widenTestWithStringPromotion(
DecimalType(2, 1), IntegerType, Some(DecimalType(11, 1)))
widenTestWithStringPromotion(
DecimalType(2, 1), LongType, Some(DecimalType(21, 1)))
// ArrayType
widenTestWithStringPromotion(
ArrayType(ShortType, containsNull = true),
ArrayType(DoubleType, containsNull = false),
Some(ArrayType(DoubleType, containsNull = true)))
widenTestWithStringPromotion(
ArrayType(TimestampType, containsNull = false),
ArrayType(StringType, containsNull = true),
Some(ArrayType(StringType, containsNull = true)))
widenTestWithStringPromotion(
ArrayType(ArrayType(IntegerType), containsNull = false),
ArrayType(ArrayType(LongType), containsNull = false),
Some(ArrayType(ArrayType(LongType), containsNull = false)))
widenTestWithStringPromotion(
ArrayType(MapType(IntegerType, FloatType), containsNull = false),
ArrayType(MapType(LongType, DoubleType), containsNull = false),
Some(ArrayType(MapType(LongType, DoubleType), containsNull = false)))
widenTestWithStringPromotion(
ArrayType(new StructType().add("num", ShortType), containsNull = false),
ArrayType(new StructType().add("num", LongType), containsNull = false),
Some(ArrayType(new StructType().add("num", LongType), containsNull = false)))
widenTestWithStringPromotion(
ArrayType(IntegerType, containsNull = false),
ArrayType(DecimalType.IntDecimal, containsNull = false),
Some(ArrayType(DecimalType.IntDecimal, containsNull = false)))
widenTestWithStringPromotion(
ArrayType(DecimalType(36, 0), containsNull = false),
ArrayType(DecimalType(36, 35), containsNull = false),
Some(ArrayType(DecimalType(38, 35), containsNull = true)))
// MapType
widenTestWithStringPromotion(
MapType(ShortType, TimestampType, valueContainsNull = true),
MapType(DoubleType, StringType, valueContainsNull = false),
Some(MapType(DoubleType, StringType, valueContainsNull = true)))
widenTestWithStringPromotion(
MapType(IntegerType, ArrayType(TimestampType), valueContainsNull = false),
MapType(LongType, ArrayType(StringType), valueContainsNull = true),
Some(MapType(LongType, ArrayType(StringType), valueContainsNull = true)))
widenTestWithStringPromotion(
MapType(IntegerType, MapType(ShortType, TimestampType), valueContainsNull = false),
MapType(LongType, MapType(DoubleType, StringType), valueContainsNull = false),
Some(MapType(LongType, MapType(DoubleType, StringType), valueContainsNull = false)))
widenTestWithStringPromotion(
MapType(IntegerType, new StructType().add("num", ShortType), valueContainsNull = false),
MapType(LongType, new StructType().add("num", LongType), valueContainsNull = false),
Some(MapType(LongType, new StructType().add("num", LongType), valueContainsNull = false)))
widenTestWithStringPromotion(
MapType(StringType, IntegerType, valueContainsNull = false),
MapType(StringType, DecimalType.IntDecimal, valueContainsNull = false),
Some(MapType(StringType, DecimalType.IntDecimal, valueContainsNull = false)))
widenTestWithStringPromotion(
MapType(StringType, DecimalType(36, 0), valueContainsNull = false),
MapType(StringType, DecimalType(36, 35), valueContainsNull = false),
Some(MapType(StringType, DecimalType(38, 35), valueContainsNull = true)))
widenTestWithStringPromotion(
MapType(IntegerType, StringType, valueContainsNull = false),
MapType(DecimalType.IntDecimal, StringType, valueContainsNull = false),
Some(MapType(DecimalType.IntDecimal, StringType, valueContainsNull = false)))
widenTestWithStringPromotion(
MapType(DecimalType(36, 0), StringType, valueContainsNull = false),
MapType(DecimalType(36, 35), StringType, valueContainsNull = false),
None)
// StructType
widenTestWithStringPromotion(
new StructType()
.add("num", ShortType, nullable = true).add("ts", StringType, nullable = false),
new StructType()
.add("num", DoubleType, nullable = false).add("ts", TimestampType, nullable = true),
Some(new StructType()
.add("num", DoubleType, nullable = true).add("ts", StringType, nullable = true)))
widenTestWithStringPromotion(
new StructType()
.add("arr", ArrayType(ShortType, containsNull = false), nullable = false),
new StructType()
.add("arr", ArrayType(DoubleType, containsNull = true), nullable = false),
Some(new StructType()
.add("arr", ArrayType(DoubleType, containsNull = true), nullable = false)))
widenTestWithStringPromotion(
new StructType()
.add("map", MapType(ShortType, TimestampType, valueContainsNull = true), nullable = false),
new StructType()
.add("map", MapType(DoubleType, StringType, valueContainsNull = false), nullable = false),
Some(new StructType()
.add("map", MapType(DoubleType, StringType, valueContainsNull = true), nullable = false)))
widenTestWithStringPromotion(
new StructType().add("num", IntegerType, nullable = false),
new StructType().add("num", DecimalType.IntDecimal, nullable = false),
Some(new StructType().add("num", DecimalType.IntDecimal, nullable = false)))
widenTestWithStringPromotion(
new StructType().add("num", DecimalType(36, 0), nullable = false),
new StructType().add("num", DecimalType(36, 35), nullable = false),
Some(new StructType().add("num", DecimalType(38, 35), nullable = true)))
widenTestWithStringPromotion(
new StructType().add("num", IntegerType),
new StructType().add("num", LongType).add("str", StringType),
None)
widenTestWithoutStringPromotion(
new StructType().add("num", IntegerType),
new StructType().add("num", LongType).add("str", StringType),
None)
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
widenTestWithStringPromotion(
new StructType().add("a", IntegerType),
new StructType().add("A", LongType),
None)
widenTestWithoutStringPromotion(
new StructType().add("a", IntegerType),
new StructType().add("A", LongType),
None)
}
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
widenTestWithStringPromotion(
new StructType().add("a", IntegerType),
new StructType().add("A", LongType),
Some(new StructType().add("a", LongType)),
isSymmetric = false)
widenTestWithoutStringPromotion(
new StructType().add("a", IntegerType),
new StructType().add("A", LongType),
Some(new StructType().add("a", LongType)),
isSymmetric = false)
}
// Without string promotion
widenTestWithoutStringPromotion(IntegerType, StringType, None)
widenTestWithoutStringPromotion(StringType, TimestampType, None)
widenTestWithoutStringPromotion(ArrayType(LongType), ArrayType(StringType), None)
widenTestWithoutStringPromotion(ArrayType(StringType), ArrayType(TimestampType), None)
widenTestWithoutStringPromotion(
MapType(LongType, IntegerType), MapType(StringType, IntegerType), None)
widenTestWithoutStringPromotion(
MapType(IntegerType, LongType), MapType(IntegerType, StringType), None)
widenTestWithoutStringPromotion(
MapType(StringType, IntegerType), MapType(TimestampType, IntegerType), None)
widenTestWithoutStringPromotion(
MapType(IntegerType, StringType), MapType(IntegerType, TimestampType), None)
widenTestWithoutStringPromotion(
new StructType().add("a", IntegerType),
new StructType().add("a", StringType),
None)
widenTestWithoutStringPromotion(
new StructType().add("a", StringType),
new StructType().add("a", IntegerType),
None)
// String promotion
widenTestWithStringPromotion(IntegerType, StringType, Some(StringType))
widenTestWithStringPromotion(StringType, TimestampType, Some(StringType))
widenTestWithStringPromotion(
ArrayType(LongType), ArrayType(StringType), Some(ArrayType(StringType)))
widenTestWithStringPromotion(
ArrayType(StringType), ArrayType(TimestampType), Some(ArrayType(StringType)))
widenTestWithStringPromotion(
MapType(LongType, IntegerType),
MapType(StringType, IntegerType),
Some(MapType(StringType, IntegerType)))
widenTestWithStringPromotion(
MapType(IntegerType, LongType),
MapType(IntegerType, StringType),
Some(MapType(IntegerType, StringType)))
widenTestWithStringPromotion(
MapType(StringType, IntegerType),
MapType(TimestampType, IntegerType),
Some(MapType(StringType, IntegerType)))
widenTestWithStringPromotion(
MapType(IntegerType, StringType),
MapType(IntegerType, TimestampType),
Some(MapType(IntegerType, StringType)))
widenTestWithStringPromotion(
new StructType().add("a", IntegerType),
new StructType().add("a", StringType),
Some(new StructType().add("a", StringType)))
widenTestWithStringPromotion(
new StructType().add("a", StringType),
new StructType().add("a", IntegerType),
Some(new StructType().add("a", StringType)))
}
private def ruleTest(rule: Rule[LogicalPlan],
initial: Expression, transformed: Expression): Unit = {
ruleTest(Seq(rule), initial, transformed)
}
private def ruleTest(
rules: Seq[Rule[LogicalPlan]],
initial: Expression,
transformed: Expression): Unit = {
val testRelation = LocalRelation(AttributeReference("a", IntegerType)())
val analyzer = new RuleExecutor[LogicalPlan] {
override val batches = Seq(Batch("Resolution", FixedPoint(3), rules: _*))
}
comparePlans(
analyzer.execute(Project(Seq(Alias(initial, "a")()), testRelation)),
Project(Seq(Alias(transformed, "a")()), testRelation))
}
test("cast NullType for expressions that implement ExpectsInputTypes") {
ruleTest(TypeCoercion.ImplicitTypeCasts,
AnyTypeUnaryExpression(Literal.create(null, NullType)),
AnyTypeUnaryExpression(Literal.create(null, NullType)))
ruleTest(TypeCoercion.ImplicitTypeCasts,
NumericTypeUnaryExpression(Literal.create(null, NullType)),
NumericTypeUnaryExpression(Literal.create(null, DoubleType)))
}
test("cast NullType for binary operators") {
ruleTest(TypeCoercion.ImplicitTypeCasts,
AnyTypeBinaryOperator(Literal.create(null, NullType), Literal.create(null, NullType)),
AnyTypeBinaryOperator(Literal.create(null, NullType), Literal.create(null, NullType)))
ruleTest(TypeCoercion.ImplicitTypeCasts,
NumericTypeBinaryOperator(Literal.create(null, NullType), Literal.create(null, NullType)),
NumericTypeBinaryOperator(Literal.create(null, DoubleType), Literal.create(null, DoubleType)))
}
test("coalesce casts") {
val rule = TypeCoercion.FunctionArgumentConversion
val intLit = Literal(1)
val longLit = Literal.create(1L)
val doubleLit = Literal(1.0)
val stringLit = Literal.create("c", StringType)
val nullLit = Literal.create(null, NullType)
val floatNullLit = Literal.create(null, FloatType)
val floatLit = Literal.create(1.0f, FloatType)
val timestampLit = Literal.create(Timestamp.valueOf("2017-04-12 00:00:00"), TimestampType)
val decimalLit = Literal(new java.math.BigDecimal("1000000000000000000000"))
val tsArrayLit = Literal(Array(new Timestamp(System.currentTimeMillis())))
val strArrayLit = Literal(Array("c"))
val intArrayLit = Literal(Array(1))
ruleTest(rule,
Coalesce(Seq(doubleLit, intLit, floatLit)),
Coalesce(Seq(doubleLit, Cast(intLit, DoubleType), Cast(floatLit, DoubleType))))
ruleTest(rule,
Coalesce(Seq(longLit, intLit, decimalLit)),
Coalesce(Seq(Cast(longLit, DecimalType(22, 0)),
Cast(intLit, DecimalType(22, 0)), decimalLit)))
ruleTest(rule,
Coalesce(Seq(nullLit, intLit)),
Coalesce(Seq(Cast(nullLit, IntegerType), intLit)))
ruleTest(rule,
Coalesce(Seq(timestampLit, stringLit)),
Coalesce(Seq(Cast(timestampLit, StringType), stringLit)))
ruleTest(rule,
Coalesce(Seq(nullLit, floatNullLit, intLit)),
Coalesce(Seq(Cast(nullLit, FloatType), floatNullLit, Cast(intLit, FloatType))))
ruleTest(rule,
Coalesce(Seq(nullLit, intLit, decimalLit, doubleLit)),
Coalesce(Seq(Cast(nullLit, DoubleType), Cast(intLit, DoubleType),
Cast(decimalLit, DoubleType), doubleLit)))
ruleTest(rule,
Coalesce(Seq(nullLit, floatNullLit, doubleLit, stringLit)),
Coalesce(Seq(Cast(nullLit, StringType), Cast(floatNullLit, StringType),
Cast(doubleLit, StringType), stringLit)))
ruleTest(rule,
Coalesce(Seq(timestampLit, intLit, stringLit)),
Coalesce(Seq(Cast(timestampLit, StringType), Cast(intLit, StringType), stringLit)))
ruleTest(rule,
Coalesce(Seq(tsArrayLit, intArrayLit, strArrayLit)),
Coalesce(Seq(Cast(tsArrayLit, ArrayType(StringType)),
Cast(intArrayLit, ArrayType(StringType)), strArrayLit)))
}
test("CreateArray casts") {
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateArray(Literal(1.0)
:: Literal(1)
:: Literal.create(1.0f, FloatType)
:: Nil),
CreateArray(Literal(1.0)
:: Cast(Literal(1), DoubleType)
:: Cast(Literal.create(1.0f, FloatType), DoubleType)
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateArray(Literal(1.0)
:: Literal(1)
:: Literal("a")
:: Nil),
CreateArray(Cast(Literal(1.0), StringType)
:: Cast(Literal(1), StringType)
:: Literal("a")
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateArray(Literal.create(null, DecimalType(5, 3))
:: Literal(1)
:: Nil),
CreateArray(Literal.create(null, DecimalType(5, 3)).cast(DecimalType(13, 3))
:: Literal(1).cast(DecimalType(13, 3))
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateArray(Literal.create(null, DecimalType(5, 3))
:: Literal.create(null, DecimalType(22, 10))
:: Literal.create(null, DecimalType(38, 38))
:: Nil),
CreateArray(Literal.create(null, DecimalType(5, 3)).cast(DecimalType(38, 38))
:: Literal.create(null, DecimalType(22, 10)).cast(DecimalType(38, 38))
:: Literal.create(null, DecimalType(38, 38))
:: Nil))
}
test("CreateMap casts") {
// type coercion for map keys
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateMap(Literal(1)
:: Literal("a")
:: Literal.create(2.0f, FloatType)
:: Literal("b")
:: Nil),
CreateMap(Cast(Literal(1), FloatType)
:: Literal("a")
:: Literal.create(2.0f, FloatType)
:: Literal("b")
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateMap(Literal.create(null, DecimalType(5, 3))
:: Literal("a")
:: Literal.create(2.0f, FloatType)
:: Literal("b")
:: Nil),
CreateMap(Literal.create(null, DecimalType(5, 3)).cast(DoubleType)
:: Literal("a")
:: Literal.create(2.0f, FloatType).cast(DoubleType)
:: Literal("b")
:: Nil))
// type coercion for map values
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateMap(Literal(1)
:: Literal("a")
:: Literal(2)
:: Literal(3.0)
:: Nil),
CreateMap(Literal(1)
:: Literal("a")
:: Literal(2)
:: Cast(Literal(3.0), StringType)
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateMap(Literal(1)
:: Literal.create(null, DecimalType(38, 0))
:: Literal(2)
:: Literal.create(null, DecimalType(38, 38))
:: Nil),
CreateMap(Literal(1)
:: Literal.create(null, DecimalType(38, 0)).cast(DecimalType(38, 38))
:: Literal(2)
:: Literal.create(null, DecimalType(38, 38))
:: Nil))
// type coercion for both map keys and values
ruleTest(TypeCoercion.FunctionArgumentConversion,
CreateMap(Literal(1)
:: Literal("a")
:: Literal(2.0)
:: Literal(3.0)
:: Nil),
CreateMap(Cast(Literal(1), DoubleType)
:: Literal("a")
:: Literal(2.0)
:: Cast(Literal(3.0), StringType)
:: Nil))
}
test("greatest/least cast") {
for (operator <- Seq[(Seq[Expression] => Expression)](Greatest, Least)) {
ruleTest(TypeCoercion.FunctionArgumentConversion,
operator(Literal(1.0)
:: Literal(1)
:: Literal.create(1.0f, FloatType)
:: Nil),
operator(Literal(1.0)
:: Cast(Literal(1), DoubleType)
:: Cast(Literal.create(1.0f, FloatType), DoubleType)
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
operator(Literal(1L)
:: Literal(1)
:: Literal(new java.math.BigDecimal("1000000000000000000000"))
:: Nil),
operator(Cast(Literal(1L), DecimalType(22, 0))
:: Cast(Literal(1), DecimalType(22, 0))
:: Literal(new java.math.BigDecimal("1000000000000000000000"))
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
operator(Literal(1.0)
:: Literal.create(null, DecimalType(10, 5))
:: Literal(1)
:: Nil),
operator(Literal(1.0)
:: Literal.create(null, DecimalType(10, 5)).cast(DoubleType)
:: Literal(1).cast(DoubleType)
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
operator(Literal.create(null, DecimalType(15, 0))
:: Literal.create(null, DecimalType(10, 5))
:: Literal(1)
:: Nil),
operator(Literal.create(null, DecimalType(15, 0)).cast(DecimalType(20, 5))
:: Literal.create(null, DecimalType(10, 5)).cast(DecimalType(20, 5))
:: Literal(1).cast(DecimalType(20, 5))
:: Nil))
ruleTest(TypeCoercion.FunctionArgumentConversion,
operator(Literal.create(2L, LongType)
:: Literal(1)
:: Literal.create(null, DecimalType(10, 5))
:: Nil),
operator(Literal.create(2L, LongType).cast(DecimalType(25, 5))
:: Literal(1).cast(DecimalType(25, 5))
:: Literal.create(null, DecimalType(10, 5)).cast(DecimalType(25, 5))
:: Nil))
}
}
test("nanvl casts") {
ruleTest(TypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0f, FloatType), Literal.create(1.0, DoubleType)),
NaNvl(Cast(Literal.create(1.0f, FloatType), DoubleType), Literal.create(1.0, DoubleType)))
ruleTest(TypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0, DoubleType), Literal.create(1.0f, FloatType)),
NaNvl(Literal.create(1.0, DoubleType), Cast(Literal.create(1.0f, FloatType), DoubleType)))
ruleTest(TypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0, DoubleType), Literal.create(1.0, DoubleType)),
NaNvl(Literal.create(1.0, DoubleType), Literal.create(1.0, DoubleType)))
ruleTest(TypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0f, FloatType), Literal.create(null, NullType)),
NaNvl(Literal.create(1.0f, FloatType), Cast(Literal.create(null, NullType), FloatType)))
ruleTest(TypeCoercion.FunctionArgumentConversion,
NaNvl(Literal.create(1.0, DoubleType), Literal.create(null, NullType)),
NaNvl(Literal.create(1.0, DoubleType), Cast(Literal.create(null, NullType), DoubleType)))
}
test("type coercion for If") {
val rule = TypeCoercion.IfCoercion
val intLit = Literal(1)
val doubleLit = Literal(1.0)
val trueLit = Literal.create(true, BooleanType)
val falseLit = Literal.create(false, BooleanType)
val stringLit = Literal.create("c", StringType)
val floatLit = Literal.create(1.0f, FloatType)
val timestampLit = Literal.create(Timestamp.valueOf("2017-04-12 00:00:00"), TimestampType)
val decimalLit = Literal(new java.math.BigDecimal("1000000000000000000000"))
ruleTest(rule,
If(Literal(true), Literal(1), Literal(1L)),
If(Literal(true), Cast(Literal(1), LongType), Literal(1L)))
ruleTest(rule,
If(Literal.create(null, NullType), Literal(1), Literal(1)),
If(Literal.create(null, BooleanType), Literal(1), Literal(1)))
ruleTest(rule,
If(AssertTrue(trueLit), Literal(1), Literal(2)),
If(Cast(AssertTrue(trueLit), BooleanType), Literal(1), Literal(2)))
ruleTest(rule,
If(AssertTrue(falseLit), Literal(1), Literal(2)),
If(Cast(AssertTrue(falseLit), BooleanType), Literal(1), Literal(2)))
ruleTest(rule,
If(trueLit, intLit, doubleLit),
If(trueLit, Cast(intLit, DoubleType), doubleLit))
ruleTest(rule,
If(trueLit, floatLit, doubleLit),
If(trueLit, Cast(floatLit, DoubleType), doubleLit))
ruleTest(rule,
If(trueLit, floatLit, decimalLit),
If(trueLit, Cast(floatLit, DoubleType), Cast(decimalLit, DoubleType)))
ruleTest(rule,
If(falseLit, stringLit, doubleLit),
If(falseLit, stringLit, Cast(doubleLit, StringType)))
ruleTest(rule,
If(trueLit, timestampLit, stringLit),
If(trueLit, Cast(timestampLit, StringType), stringLit))
}
test("type coercion for CaseKeyWhen") {
ruleTest(TypeCoercion.ImplicitTypeCasts,
CaseKeyWhen(Literal(1.toShort), Seq(Literal(1), Literal("a"))),
CaseKeyWhen(Cast(Literal(1.toShort), IntegerType), Seq(Literal(1), Literal("a")))
)
ruleTest(TypeCoercion.CaseWhenCoercion,
CaseKeyWhen(Literal(true), Seq(Literal(1), Literal("a"))),
CaseKeyWhen(Literal(true), Seq(Literal(1), Literal("a")))
)
ruleTest(TypeCoercion.CaseWhenCoercion,
CaseWhen(Seq((Literal(true), Literal(1.2))),
Literal.create(BigDecimal.valueOf(1), DecimalType(7, 2))),
CaseWhen(Seq((Literal(true), Literal(1.2))),
Cast(Literal.create(BigDecimal.valueOf(1), DecimalType(7, 2)), DoubleType))
)
ruleTest(TypeCoercion.CaseWhenCoercion,
CaseWhen(Seq((Literal(true), Literal(100L))),
Literal.create(BigDecimal.valueOf(1), DecimalType(7, 2))),
CaseWhen(Seq((Literal(true), Cast(Literal(100L), DecimalType(22, 2)))),
Cast(Literal.create(BigDecimal.valueOf(1), DecimalType(7, 2)), DecimalType(22, 2)))
)
}
test("type coercion for Stack") {
val rule = TypeCoercion.StackCoercion
ruleTest(rule,
Stack(Seq(Literal(3), Literal(1), Literal(2), Literal(null))),
Stack(Seq(Literal(3), Literal(1), Literal(2), Literal.create(null, IntegerType))))
ruleTest(rule,
Stack(Seq(Literal(3), Literal(1.0), Literal(null), Literal(3.0))),
Stack(Seq(Literal(3), Literal(1.0), Literal.create(null, DoubleType), Literal(3.0))))
ruleTest(rule,
Stack(Seq(Literal(3), Literal(null), Literal("2"), Literal("3"))),
Stack(Seq(Literal(3), Literal.create(null, StringType), Literal("2"), Literal("3"))))
ruleTest(rule,
Stack(Seq(Literal(3), Literal(null), Literal(null), Literal(null))),
Stack(Seq(Literal(3), Literal(null), Literal(null), Literal(null))))
ruleTest(rule,
Stack(Seq(Literal(2),
Literal(1), Literal("2"),
Literal(null), Literal(null))),
Stack(Seq(Literal(2),
Literal(1), Literal("2"),
Literal.create(null, IntegerType), Literal.create(null, StringType))))
ruleTest(rule,
Stack(Seq(Literal(2),
Literal(1), Literal(null),
Literal(null), Literal("2"))),
Stack(Seq(Literal(2),
Literal(1), Literal.create(null, StringType),
Literal.create(null, IntegerType), Literal("2"))))
ruleTest(rule,
Stack(Seq(Literal(2),
Literal(null), Literal(1),
Literal("2"), Literal(null))),
Stack(Seq(Literal(2),
Literal.create(null, StringType), Literal(1),
Literal("2"), Literal.create(null, IntegerType))))
ruleTest(rule,
Stack(Seq(Literal(2),
Literal(null), Literal(null),
Literal(1), Literal("2"))),
Stack(Seq(Literal(2),
Literal.create(null, IntegerType), Literal.create(null, StringType),
Literal(1), Literal("2"))))
ruleTest(rule,
Stack(Seq(Subtract(Literal(3), Literal(1)),
Literal(1), Literal("2"),
Literal(null), Literal(null))),
Stack(Seq(Subtract(Literal(3), Literal(1)),
Literal(1), Literal("2"),
Literal.create(null, IntegerType), Literal.create(null, StringType))))
}
test("type coercion for Concat") {
val rule = TypeCoercion.ConcatCoercion
ruleTest(rule,
Concat(Seq(Literal("ab"), Literal("cde"))),
Concat(Seq(Literal("ab"), Literal("cde"))))
ruleTest(rule,
Concat(Seq(Literal(null), Literal("abc"))),
Concat(Seq(Cast(Literal(null), StringType), Literal("abc"))))
ruleTest(rule,
Concat(Seq(Literal(1), Literal("234"))),
Concat(Seq(Cast(Literal(1), StringType), Literal("234"))))
ruleTest(rule,
Concat(Seq(Literal("1"), Literal("234".getBytes()))),
Concat(Seq(Literal("1"), Cast(Literal("234".getBytes()), StringType))))
ruleTest(rule,
Concat(Seq(Literal(1L), Literal(2.toByte), Literal(0.1))),
Concat(Seq(Cast(Literal(1L), StringType), Cast(Literal(2.toByte), StringType),
Cast(Literal(0.1), StringType))))
ruleTest(rule,
Concat(Seq(Literal(true), Literal(0.1f), Literal(3.toShort))),
Concat(Seq(Cast(Literal(true), StringType), Cast(Literal(0.1f), StringType),
Cast(Literal(3.toShort), StringType))))
ruleTest(rule,
Concat(Seq(Literal(1L), Literal(0.1))),
Concat(Seq(Cast(Literal(1L), StringType), Cast(Literal(0.1), StringType))))
ruleTest(rule,
Concat(Seq(Literal(Decimal(10)))),
Concat(Seq(Cast(Literal(Decimal(10)), StringType))))
ruleTest(rule,
Concat(Seq(Literal(BigDecimal.valueOf(10)))),
Concat(Seq(Cast(Literal(BigDecimal.valueOf(10)), StringType))))
ruleTest(rule,
Concat(Seq(Literal(java.math.BigDecimal.valueOf(10)))),
Concat(Seq(Cast(Literal(java.math.BigDecimal.valueOf(10)), StringType))))
ruleTest(rule,
Concat(Seq(Literal(new java.sql.Date(0)), Literal(new Timestamp(0)))),
Concat(Seq(Cast(Literal(new java.sql.Date(0)), StringType),
Cast(Literal(new Timestamp(0)), StringType))))
withSQLConf(SQLConf.CONCAT_BINARY_AS_STRING.key -> "true") {
ruleTest(rule,
Concat(Seq(Literal("123".getBytes), Literal("456".getBytes))),
Concat(Seq(Cast(Literal("123".getBytes), StringType),
Cast(Literal("456".getBytes), StringType))))
}
withSQLConf(SQLConf.CONCAT_BINARY_AS_STRING.key -> "false") {
ruleTest(rule,
Concat(Seq(Literal("123".getBytes), Literal("456".getBytes))),
Concat(Seq(Literal("123".getBytes), Literal("456".getBytes))))
}
}
test("type coercion for Elt") {
val rule = TypeCoercion.EltCoercion
ruleTest(rule,
Elt(Seq(Literal(1), Literal("ab"), Literal("cde"))),
Elt(Seq(Literal(1), Literal("ab"), Literal("cde"))))
ruleTest(rule,
Elt(Seq(Literal(1.toShort), Literal("ab"), Literal("cde"))),
Elt(Seq(Cast(Literal(1.toShort), IntegerType), Literal("ab"), Literal("cde"))))
ruleTest(rule,
Elt(Seq(Literal(2), Literal(null), Literal("abc"))),
Elt(Seq(Literal(2), Cast(Literal(null), StringType), Literal("abc"))))
ruleTest(rule,
Elt(Seq(Literal(2), Literal(1), Literal("234"))),
Elt(Seq(Literal(2), Cast(Literal(1), StringType), Literal("234"))))
ruleTest(rule,
Elt(Seq(Literal(3), Literal(1L), Literal(2.toByte), Literal(0.1))),
Elt(Seq(Literal(3), Cast(Literal(1L), StringType), Cast(Literal(2.toByte), StringType),
Cast(Literal(0.1), StringType))))
ruleTest(rule,
Elt(Seq(Literal(2), Literal(true), Literal(0.1f), Literal(3.toShort))),
Elt(Seq(Literal(2), Cast(Literal(true), StringType), Cast(Literal(0.1f), StringType),
Cast(Literal(3.toShort), StringType))))
ruleTest(rule,
Elt(Seq(Literal(1), Literal(1L), Literal(0.1))),
Elt(Seq(Literal(1), Cast(Literal(1L), StringType), Cast(Literal(0.1), StringType))))
ruleTest(rule,
Elt(Seq(Literal(1), Literal(Decimal(10)))),
Elt(Seq(Literal(1), Cast(Literal(Decimal(10)), StringType))))
ruleTest(rule,
Elt(Seq(Literal(1), Literal(BigDecimal.valueOf(10)))),
Elt(Seq(Literal(1), Cast(Literal(BigDecimal.valueOf(10)), StringType))))
ruleTest(rule,
Elt(Seq(Literal(1), Literal(java.math.BigDecimal.valueOf(10)))),
Elt(Seq(Literal(1), Cast(Literal(java.math.BigDecimal.valueOf(10)), StringType))))
ruleTest(rule,
Elt(Seq(Literal(2), Literal(new java.sql.Date(0)), Literal(new Timestamp(0)))),
Elt(Seq(Literal(2), Cast(Literal(new java.sql.Date(0)), StringType),
Cast(Literal(new Timestamp(0)), StringType))))
withSQLConf(SQLConf.ELT_OUTPUT_AS_STRING.key -> "true") {
ruleTest(rule,
Elt(Seq(Literal(1), Literal("123".getBytes), Literal("456".getBytes))),
Elt(Seq(Literal(1), Cast(Literal("123".getBytes), StringType),
Cast(Literal("456".getBytes), StringType))))
}
withSQLConf(SQLConf.ELT_OUTPUT_AS_STRING.key -> "false") {
ruleTest(rule,
Elt(Seq(Literal(1), Literal("123".getBytes), Literal("456".getBytes))),
Elt(Seq(Literal(1), Literal("123".getBytes), Literal("456".getBytes))))
}
}
test("BooleanEquality type cast") {
val be = TypeCoercion.BooleanEquality
// Use something more than a literal to avoid triggering the simplification rules.
val one = Add(Literal(Decimal(1)), Literal(Decimal(0)))
ruleTest(be,
EqualTo(Literal(true), one),
EqualTo(Cast(Literal(true), one.dataType), one)
)
ruleTest(be,
EqualTo(one, Literal(true)),
EqualTo(one, Cast(Literal(true), one.dataType))
)
ruleTest(be,
EqualNullSafe(Literal(true), one),
EqualNullSafe(Cast(Literal(true), one.dataType), one)
)
ruleTest(be,
EqualNullSafe(one, Literal(true)),
EqualNullSafe(one, Cast(Literal(true), one.dataType))
)
}
test("BooleanEquality simplification") {
val be = TypeCoercion.BooleanEquality
ruleTest(be,
EqualTo(Literal(true), Literal(1)),
Literal(true)
)
ruleTest(be,
EqualTo(Literal(true), Literal(0)),
Not(Literal(true))
)
ruleTest(be,
EqualNullSafe(Literal(true), Literal(1)),
And(IsNotNull(Literal(true)), Literal(true))
)
ruleTest(be,
EqualNullSafe(Literal(true), Literal(0)),
And(IsNotNull(Literal(true)), Not(Literal(true)))
)
ruleTest(be,
EqualTo(Literal(true), Literal(1L)),
Literal(true)
)
ruleTest(be,
EqualTo(Literal(new java.math.BigDecimal(1)), Literal(true)),
Literal(true)
)
ruleTest(be,
EqualTo(Literal(BigDecimal(0)), Literal(true)),
Not(Literal(true))
)
ruleTest(be,
EqualTo(Literal(Decimal(1)), Literal(true)),
Literal(true)
)
ruleTest(be,
EqualTo(Literal.create(Decimal(1), DecimalType(8, 0)), Literal(true)),
Literal(true)
)
}
private def checkOutput(logical: LogicalPlan, expectTypes: Seq[DataType]): Unit = {
logical.output.zip(expectTypes).foreach { case (attr, dt) =>
assert(attr.dataType === dt)
}
}
private val timeZoneResolver = ResolveTimeZone
private def widenSetOperationTypes(plan: LogicalPlan): LogicalPlan = {
timeZoneResolver(TypeCoercion.WidenSetOperationTypes(plan))
}
test("WidenSetOperationTypes for except and intersect") {
val firstTable = LocalRelation(
AttributeReference("i", IntegerType)(),
AttributeReference("u", DecimalType.SYSTEM_DEFAULT)(),
AttributeReference("b", ByteType)(),
AttributeReference("d", DoubleType)())
val secondTable = LocalRelation(
AttributeReference("s", StringType)(),
AttributeReference("d", DecimalType(2, 1))(),
AttributeReference("f", FloatType)(),
AttributeReference("l", LongType)())
val expectedTypes = Seq(StringType, DecimalType.SYSTEM_DEFAULT, FloatType, DoubleType)
val r1 = widenSetOperationTypes(
Except(firstTable, secondTable, isAll = false)).asInstanceOf[Except]
val r2 = widenSetOperationTypes(
Intersect(firstTable, secondTable, isAll = false)).asInstanceOf[Intersect]
checkOutput(r1.left, expectedTypes)
checkOutput(r1.right, expectedTypes)
checkOutput(r2.left, expectedTypes)
checkOutput(r2.right, expectedTypes)
// Check if a Project is added
assert(r1.left.isInstanceOf[Project])
assert(r1.right.isInstanceOf[Project])
assert(r2.left.isInstanceOf[Project])
assert(r2.right.isInstanceOf[Project])
}
test("WidenSetOperationTypes for union") {
val firstTable = LocalRelation(
AttributeReference("i", IntegerType)(),
AttributeReference("u", DecimalType.SYSTEM_DEFAULT)(),
AttributeReference("b", ByteType)(),
AttributeReference("d", DoubleType)())
val secondTable = LocalRelation(
AttributeReference("s", StringType)(),
AttributeReference("d", DecimalType(2, 1))(),
AttributeReference("f", FloatType)(),
AttributeReference("l", LongType)())
val thirdTable = LocalRelation(
AttributeReference("m", StringType)(),
AttributeReference("n", DecimalType.SYSTEM_DEFAULT)(),
AttributeReference("p", FloatType)(),
AttributeReference("q", DoubleType)())
val forthTable = LocalRelation(
AttributeReference("m", StringType)(),
AttributeReference("n", DecimalType.SYSTEM_DEFAULT)(),
AttributeReference("p", ByteType)(),
AttributeReference("q", DoubleType)())
val expectedTypes = Seq(StringType, DecimalType.SYSTEM_DEFAULT, FloatType, DoubleType)
val unionRelation = widenSetOperationTypes(
Union(firstTable :: secondTable :: thirdTable :: forthTable :: Nil)).asInstanceOf[Union]
assert(unionRelation.children.length == 4)
checkOutput(unionRelation.children.head, expectedTypes)
checkOutput(unionRelation.children(1), expectedTypes)
checkOutput(unionRelation.children(2), expectedTypes)
checkOutput(unionRelation.children(3), expectedTypes)
assert(unionRelation.children.head.isInstanceOf[Project])
assert(unionRelation.children(1).isInstanceOf[Project])
assert(unionRelation.children(2).isInstanceOf[Project])
assert(unionRelation.children(3).isInstanceOf[Project])
}
test("Transform Decimal precision/scale for union except and intersect") {
def checkOutput(logical: LogicalPlan, expectTypes: Seq[DataType]): Unit = {
logical.output.zip(expectTypes).foreach { case (attr, dt) =>
assert(attr.dataType === dt)
}
}
val left1 = LocalRelation(
AttributeReference("l", DecimalType(10, 8))())
val right1 = LocalRelation(
AttributeReference("r", DecimalType(5, 5))())
val expectedType1 = Seq(DecimalType(10, 8))
val r1 = widenSetOperationTypes(Union(left1, right1)).asInstanceOf[Union]
val r2 = widenSetOperationTypes(
Except(left1, right1, isAll = false)).asInstanceOf[Except]
val r3 = widenSetOperationTypes(
Intersect(left1, right1, isAll = false)).asInstanceOf[Intersect]
checkOutput(r1.children.head, expectedType1)
checkOutput(r1.children.last, expectedType1)
checkOutput(r2.left, expectedType1)
checkOutput(r2.right, expectedType1)
checkOutput(r3.left, expectedType1)
checkOutput(r3.right, expectedType1)
val plan1 = LocalRelation(AttributeReference("l", DecimalType(10, 5))())
val rightTypes = Seq(ByteType, ShortType, IntegerType, LongType, FloatType, DoubleType)
val expectedTypes = Seq(DecimalType(10, 5), DecimalType(10, 5), DecimalType(15, 5),
DecimalType(25, 5), DoubleType, DoubleType)
rightTypes.zip(expectedTypes).foreach { case (rType, expectedType) =>
val plan2 = LocalRelation(
AttributeReference("r", rType)())
val r1 = widenSetOperationTypes(Union(plan1, plan2)).asInstanceOf[Union]
val r2 = widenSetOperationTypes(
Except(plan1, plan2, isAll = false)).asInstanceOf[Except]
val r3 = widenSetOperationTypes(
Intersect(plan1, plan2, isAll = false)).asInstanceOf[Intersect]
checkOutput(r1.children.last, Seq(expectedType))
checkOutput(r2.right, Seq(expectedType))
checkOutput(r3.right, Seq(expectedType))
val r4 = widenSetOperationTypes(Union(plan2, plan1)).asInstanceOf[Union]
val r5 = widenSetOperationTypes(
Except(plan2, plan1, isAll = false)).asInstanceOf[Except]
val r6 = widenSetOperationTypes(
Intersect(plan2, plan1, isAll = false)).asInstanceOf[Intersect]
checkOutput(r4.children.last, Seq(expectedType))
checkOutput(r5.left, Seq(expectedType))
checkOutput(r6.left, Seq(expectedType))
}
}
test("SPARK-32638: corrects references when adding aliases in WidenSetOperationTypes") {
val t1 = LocalRelation(AttributeReference("v", DecimalType(10, 0))())
val t2 = LocalRelation(AttributeReference("v", DecimalType(11, 0))())
val p1 = t1.select(t1.output.head).as("p1")
val p2 = t2.select(t2.output.head).as("p2")
val union = p1.union(p2)
val wp1 = widenSetOperationTypes(union.select(p1.output.head, $"p2.v"))
assert(wp1.isInstanceOf[Project])
// The attribute `p1.output.head` should be replaced in the root `Project`.
assert(wp1.expressions.forall(_.find(_ == p1.output.head).isEmpty))
val wp2 = widenSetOperationTypes(Aggregate(Nil, sum(p1.output.head).as("v") :: Nil, union))
assert(wp2.isInstanceOf[Aggregate])
assert(wp2.missingInput.isEmpty)
}
/**
* There are rules that need to not fire before child expressions get resolved.
* We use this test to make sure those rules do not fire early.
*/
test("make sure rules do not fire early") {
// InConversion
val inConversion = TypeCoercion.InConversion
ruleTest(inConversion,
In(UnresolvedAttribute("a"), Seq(Literal(1))),
In(UnresolvedAttribute("a"), Seq(Literal(1)))
)
ruleTest(inConversion,
In(Literal("test"), Seq(UnresolvedAttribute("a"), Literal(1))),
In(Literal("test"), Seq(UnresolvedAttribute("a"), Literal(1)))
)
ruleTest(inConversion,
In(Literal("a"), Seq(Literal(1), Literal("b"))),
In(Cast(Literal("a"), StringType),
Seq(Cast(Literal(1), StringType), Cast(Literal("b"), StringType)))
)
}
test("SPARK-15776 Divide expression's dataType should be casted to Double or Decimal " +
"in aggregation function like sum") {
val rules = Seq(FunctionArgumentConversion, Division)
// Casts Integer to Double
ruleTest(rules, sum(Divide(4, 3)), sum(Divide(Cast(4, DoubleType), Cast(3, DoubleType))))
// Left expression is Double, right expression is Int. Another rule ImplicitTypeCasts will
// cast the right expression to Double.
ruleTest(rules, sum(Divide(4.0, 3)), sum(Divide(4.0, 3)))
// Left expression is Int, right expression is Double
ruleTest(rules, sum(Divide(4, 3.0)), sum(Divide(Cast(4, DoubleType), Cast(3.0, DoubleType))))
// Casts Float to Double
ruleTest(
rules,
sum(Divide(4.0f, 3)),
sum(Divide(Cast(4.0f, DoubleType), Cast(3, DoubleType))))
// Left expression is Decimal, right expression is Int. Another rule DecimalPrecision will cast
// the right expression to Decimal.
ruleTest(rules, sum(Divide(Decimal(4.0), 3)), sum(Divide(Decimal(4.0), 3)))
}
test("SPARK-17117 null type coercion in divide") {
val rules = Seq(FunctionArgumentConversion, Division, ImplicitTypeCasts)
val nullLit = Literal.create(null, NullType)
ruleTest(rules, Divide(1L, nullLit), Divide(Cast(1L, DoubleType), Cast(nullLit, DoubleType)))
ruleTest(rules, Divide(nullLit, 1L), Divide(Cast(nullLit, DoubleType), Cast(1L, DoubleType)))
}
test("binary comparison with string promotion") {
val rule = TypeCoercion.PromoteStrings
ruleTest(rule,
GreaterThan(Literal("123"), Literal(1)),
GreaterThan(Cast(Literal("123"), IntegerType), Literal(1)))
ruleTest(rule,
LessThan(Literal(true), Literal("123")),
LessThan(Literal(true), Cast(Literal("123"), BooleanType)))
ruleTest(rule,
EqualTo(Literal(Array(1, 2)), Literal("123")),
EqualTo(Literal(Array(1, 2)), Literal("123")))
ruleTest(rule,
GreaterThan(Literal("1.5"), Literal(BigDecimal("0.5"))),
GreaterThan(Cast(Literal("1.5"), DoubleType), Cast(Literal(BigDecimal("0.5")),
DoubleType)))
// Checks that dates/timestamps are not promoted to strings
val date0301 = Literal(java.sql.Date.valueOf("2017-03-01"))
val timestamp0301000000 = Literal(Timestamp.valueOf("2017-03-01 00:00:00"))
val timestamp0301000001 = Literal(Timestamp.valueOf("2017-03-01 00:00:01"))
// `Date` should be treated as timestamp at 00:00:00 See SPARK-23549
ruleTest(rule, EqualTo(date0301, timestamp0301000000),
EqualTo(Cast(date0301, TimestampType), timestamp0301000000))
ruleTest(rule, LessThan(date0301, timestamp0301000001),
LessThan(Cast(date0301, TimestampType), timestamp0301000001))
}
test("cast WindowFrame boundaries to the type they operate upon") {
// Can cast frame boundaries to order dataType.
ruleTest(WindowFrameCoercion,
windowSpec(
Seq(UnresolvedAttribute("a")),
Seq(SortOrder(Literal(1L), Ascending)),
SpecifiedWindowFrame(RangeFrame, Literal(3), Literal(2147483648L))),
windowSpec(
Seq(UnresolvedAttribute("a")),
Seq(SortOrder(Literal(1L), Ascending)),
SpecifiedWindowFrame(RangeFrame, Cast(3, LongType), Literal(2147483648L)))
)
// Cannot cast frame boundaries to order dataType.
ruleTest(WindowFrameCoercion,
windowSpec(
Seq(UnresolvedAttribute("a")),
Seq(SortOrder(Literal.default(DateType), Ascending)),
SpecifiedWindowFrame(RangeFrame, Literal(10.0), Literal(2147483648L))),
windowSpec(
Seq(UnresolvedAttribute("a")),
Seq(SortOrder(Literal.default(DateType), Ascending)),
SpecifiedWindowFrame(RangeFrame, Literal(10.0), Literal(2147483648L)))
)
// Should not cast SpecialFrameBoundary.
ruleTest(WindowFrameCoercion,
windowSpec(
Seq(UnresolvedAttribute("a")),
Seq(SortOrder(Literal(1L), Ascending)),
SpecifiedWindowFrame(RangeFrame, CurrentRow, UnboundedFollowing)),
windowSpec(
Seq(UnresolvedAttribute("a")),
Seq(SortOrder(Literal(1L), Ascending)),
SpecifiedWindowFrame(RangeFrame, CurrentRow, UnboundedFollowing))
)
}
test("SPARK-29000: skip to handle decimals in ImplicitTypeCasts") {
ruleTest(TypeCoercion.ImplicitTypeCasts,
Multiply(CaseWhen(Seq((EqualTo(1, 2), Cast(1, DecimalType(34, 24)))),
Cast(100, DecimalType(34, 24))), Literal(1)),
Multiply(CaseWhen(Seq((EqualTo(1, 2), Cast(1, DecimalType(34, 24)))),
Cast(100, DecimalType(34, 24))), Literal(1)))
ruleTest(TypeCoercion.ImplicitTypeCasts,
Multiply(CaseWhen(Seq((EqualTo(1, 2), Cast(1, DecimalType(34, 24)))),
Cast(100, DecimalType(34, 24))), Cast(1, IntegerType)),
Multiply(CaseWhen(Seq((EqualTo(1, 2), Cast(1, DecimalType(34, 24)))),
Cast(100, DecimalType(34, 24))), Cast(1, IntegerType)))
}
test("SPARK-31468: null types should be casted to decimal types in ImplicitTypeCasts") {
Seq(AnyTypeBinaryOperator(_, _), NumericTypeBinaryOperator(_, _)).foreach { binaryOp =>
// binaryOp(decimal, null) case
ruleTest(TypeCoercion.ImplicitTypeCasts,
binaryOp(Literal.create(null, DecimalType.SYSTEM_DEFAULT),
Literal.create(null, NullType)),
binaryOp(Literal.create(null, DecimalType.SYSTEM_DEFAULT),
Cast(Literal.create(null, NullType), DecimalType.SYSTEM_DEFAULT)))
// binaryOp(null, decimal) case
ruleTest(TypeCoercion.ImplicitTypeCasts,
binaryOp(Literal.create(null, NullType),
Literal.create(null, DecimalType.SYSTEM_DEFAULT)),
binaryOp(Cast(Literal.create(null, NullType), DecimalType.SYSTEM_DEFAULT),
Literal.create(null, DecimalType.SYSTEM_DEFAULT)))
}
}
test("SPARK-31761: byte, short and int should be cast to long for IntegralDivide's datatype") {
val rules = Seq(FunctionArgumentConversion, Division, ImplicitTypeCasts)
// Casts Byte to Long
ruleTest(TypeCoercion.IntegralDivision, IntegralDivide(2.toByte, 1.toByte),
IntegralDivide(Cast(2.toByte, LongType), Cast(1.toByte, LongType)))
// Casts Short to Long
ruleTest(TypeCoercion.IntegralDivision, IntegralDivide(2.toShort, 1.toShort),
IntegralDivide(Cast(2.toShort, LongType), Cast(1.toShort, LongType)))
// Casts Integer to Long
ruleTest(TypeCoercion.IntegralDivision, IntegralDivide(2, 1),
IntegralDivide(Cast(2, LongType), Cast(1, LongType)))
// should not be any change for Long data types
ruleTest(TypeCoercion.IntegralDivision, IntegralDivide(2L, 1L), IntegralDivide(2L, 1L))
// one of the operand is byte
ruleTest(TypeCoercion.IntegralDivision, IntegralDivide(2L, 1.toByte),
IntegralDivide(2L, Cast(1.toByte, LongType)))
// one of the operand is short
ruleTest(TypeCoercion.IntegralDivision, IntegralDivide(2.toShort, 1L),
IntegralDivide(Cast(2.toShort, LongType), 1L))
// one of the operand is int
ruleTest(TypeCoercion.IntegralDivision, IntegralDivide(2, 1L),
IntegralDivide(Cast(2, LongType), 1L))
}
}
object TypeCoercionSuite {
val integralTypes: Seq[DataType] =
Seq(ByteType, ShortType, IntegerType, LongType)
val fractionalTypes: Seq[DataType] =
Seq(DoubleType, FloatType, DecimalType.SYSTEM_DEFAULT, DecimalType(10, 2))
val numericTypes: Seq[DataType] = integralTypes ++ fractionalTypes
val atomicTypes: Seq[DataType] =
numericTypes ++ Seq(BinaryType, BooleanType, StringType, DateType, TimestampType)
val complexTypes: Seq[DataType] =
Seq(ArrayType(IntegerType),
ArrayType(StringType),
MapType(StringType, StringType),
new StructType().add("a1", StringType),
new StructType().add("a1", StringType).add("a2", IntegerType))
val allTypes: Seq[DataType] =
atomicTypes ++ complexTypes ++ Seq(NullType, CalendarIntervalType)
case class AnyTypeUnaryExpression(child: Expression)
extends UnaryExpression with ExpectsInputTypes with Unevaluable {
override def inputTypes: Seq[AbstractDataType] = Seq(AnyDataType)
override def dataType: DataType = NullType
}
case class NumericTypeUnaryExpression(child: Expression)
extends UnaryExpression with ExpectsInputTypes with Unevaluable {
override def inputTypes: Seq[AbstractDataType] = Seq(NumericType)
override def dataType: DataType = NullType
}
case class AnyTypeBinaryOperator(left: Expression, right: Expression)
extends BinaryOperator with Unevaluable {
override def dataType: DataType = NullType
override def inputType: AbstractDataType = AnyDataType
override def symbol: String = "anytype"
}
case class NumericTypeBinaryOperator(left: Expression, right: Expression)
extends BinaryOperator with Unevaluable {
override def dataType: DataType = NullType
override def inputType: AbstractDataType = NumericType
override def symbol: String = "numerictype"
}
}
|
shuangshuangwang/spark
|
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala
|
Scala
|
apache-2.0
| 75,478
|
/*
* Copyright 2015 bigobject.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.bigobject.spark
import java.util.Properties
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.{Logging, Partition}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SaveMode, DataFrame, Row, SQLContext}
import org.apache.spark.sql.sources._
import org.apache.spark.sql.types.StructType
class DefaultSource
extends RelationProvider
with SchemaRelationProvider
with CreatableRelationProvider
with Logging {
/** Returns a new base relation with the given parameters. */
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String]): BaseRelation = {
createRelation(sqlContext, parameters, null)
}
/** Returns a new base relation with the given parameters and schema. */
override def createRelation(
sqlContext: SQLContext,
parameters: Map[String, String],
schema: StructType): BaseRelation = {
val url = parameters.getOrElse("url", sys.error("Option 'url' not specified"))
val table = parameters.getOrElse("dbtable", sys.error("Option 'dbtable' not specified"))
val properties = new Properties()
parameters.foreach(kv => properties.setProperty(kv._1, kv._2))
BORelation(url, table, schema, properties)(sqlContext)
}
/** Returns a new base relation with the given parameters and DataFrame. */
override def createRelation(
sqlContext: SQLContext,
mode: SaveMode,
parameters: Map[String, String],
data: DataFrame): BaseRelation = {
val url = parameters.getOrElse("url", sys.error("Option 'url' not specified"))
val urls = url.split(",")
val table = parameters.getOrElse("dbtable", sys.error("Option 'dbtable' not specified"))
val overwrite = (mode == SaveMode.Overwrite)
var exist = false
var u = null
for (u <- urls) {
if (BORDD.isTableExist(u, table)) {
exist = true
if(overwrite) {
val boApi = new BOIface(u, "cmd", "post", Array(s"DROP TABLE $table"))
if (boApi.httpStatus != 200 || boApi.status != 0) {
logError(s"Failed to delete existing $table table. (Http status: ${boApi.httpStatus}, BO status: ${boApi.status})")
return null
}
}
}
}
if (exist) {
if (mode == SaveMode.ErrorIfExists) {
sys.error(s"Table $table exists.")
}
}
if (!exist || overwrite) {
val key = parameters.getOrElse("key", "")
val sb = new StringBuilder(BORDD.schemaString(data))
if (key.length() > 0)
sb.append(s", KEY ($key)")
val schemaStr = sb.toString()
val boApi = new BOIface(urls(0), "cmd", "post", Array(s"CREATE TABLE $table ($schemaStr)"))
if (boApi.httpStatus != 200 || boApi.status != 0) {
logError(s"Failed to create $table table. (Http status: ${boApi.httpStatus}, BO status: ${boApi.status})")
return null
}
}
val schema = data.schema
data.foreachPartition {iter =>
// TODO: write to different partition (BO server) separately.
BORDD.writeData(iter, urls(0), table, schema)
}
val properties = new Properties()
parameters.foreach(kv => properties.setProperty(kv._1, kv._2))
BORelation(url, table, schema, properties)(sqlContext)
}
}
case class BORelation(
url: String,
table: String,
sch: StructType = null,
properties: Properties = new Properties())(@transient val sqlContext: SQLContext)
extends BaseRelation
with PrunedFilteredScan
with InsertableRelation
with Logging {
override val needConversion: Boolean = false
private val urls = url.split(",")
private def checkParams() = {
if (urls.length == 0 || urls(0).length == 0 || table.length == 0)
throw new IllegalArgumentException("No BO server is speciffied.")
}
checkParams()
override val schema: StructType = {
if (sch != null)
sch
else
BORDD.resolveTable(urls(0), table)
}
override def buildScan(requiredColumns: Array[String], filters: Array[Filter]): RDD[Row] = {
logInfo(s"buildScan is called.")
requiredColumns.foreach(c => logInfo(s"required colume: $c"))
filters.foreach(f => logInfo(s"filter: $f"))
BORDD.scanTable(
sqlContext.sparkContext,
schema,
properties,
table,
requiredColumns,
filters,
BORDD.getPartition(urls, new Array[String](urls.length))).asInstanceOf[RDD[Row]]
}
// TODO: we should do just "INSERT INTO", not whole table??
override def insert(data: DataFrame, overwrite: Boolean): Unit = {
logInfo(s"insert is called. data: $data, overwrite: $overwrite.")
val exist = BORDD.isTableExist(urls(0), table)
if (exist && overwrite) {
val boApi = new BOIface(urls(0), "cmd", "post", Array(s"DROP TABLE $table"))
if (boApi.httpStatus != 200 || boApi.status != 0) {
logError(s"Failed to delete existing $table table. (Http status: ${boApi.httpStatus}, BO status: ${boApi.status})")
return
}
// TODO: check status code
}
if (!exist || overwrite) {
// TODO: add key
val schString = BORDD.schemaString(data)
val boApi = new BOIface(urls(0), "cmd", "post", Array(s"CREATE TABLE $table ($schString)"))
if (boApi.httpStatus != 200 || boApi.status != 0) {
logError(s"Failed to create $table table. (Http status: ${boApi.httpStatus}, BO status: ${boApi.status})")
return
}
// TODO: check status code
}
val sch = data.schema
data.foreachPartition {iter =>
BORDD.writeData(iter, urls(0), table, sch)
}
}
}
|
macrodatalab/spark-bo
|
spark-1.5.1/bigobject/src/main/scala/com/bigobject/spark/BORelation.scala
|
Scala
|
apache-2.0
| 6,150
|
/*
* Copyright (C) 2016 Christopher Batey and Dogan Narinc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scassandra.server.actors
import akka.actor.ActorRef
import akka.pattern.{ask, pipe}
import akka.util.Timeout
import org.scassandra.codec._
import org.scassandra.server.actors.ExecuteHandler.HandleExecute
import org.scassandra.server.actors.PrepareHandler.{PreparedStatementQuery, PreparedStatementResponse}
import org.scassandra.server.priming._
import org.scassandra.server.priming.prepared.PreparedStoreLookup
import org.scassandra.server.priming.query.Reply
import scala.concurrent.duration._
import scala.language.postfixOps
class ExecuteHandler(primePreparedStore: PreparedStoreLookup, activityLog: ActivityLog, prepareHandler: ActorRef) extends ProtocolActor {
import context.dispatcher
implicit val timeout: Timeout = 1 second
def receive: Receive = {
case ProtocolMessage(Frame(header, e: Execute)) =>
val id = e.id.toInt()
val recipient = sender
// Lookup the associated prepared statement for this execute and on completion
// send a message to self indicating to handle the request.
val executeRequest = (prepareHandler ? PreparedStatementQuery(List(id)))
.mapTo[PreparedStatementResponse]
.map(res => HandleExecute(res.prepared.get(id), header, e, recipient))
executeRequest.pipeTo(self)
case HandleExecute(query, header, execute, connection) =>
handleExecute(query, header, execute, connection)
}
def handleExecute(preparedStatement: Option[(String, Prepared)], header: FrameHeader, execute: Execute, connection: ActorRef) = {
implicit val protocolVersion = header.version.version
preparedStatement match {
case Some((queryText, prepared)) =>
val prime = primePreparedStore(queryText, execute)
prime.foreach(p => log.info("Found prime {}", p))
// Decode query parameters using the prepared statement metadata.
val dataTypes = prepared.preparedMetadata.columnSpec.map(_.dataType)
val values = extractQueryVariables(queryText, execute.parameters.values.map(_.map(_.value)), dataTypes)
values match {
case Some(v) =>
activityLog.recordPreparedStatementExecution(queryText, execute.parameters.consistency,
execute.parameters.serialConsistency, v, dataTypes, execute.parameters.timestamp)
case None =>
activityLog.recordPreparedStatementExecution(queryText, execute.parameters.consistency,
execute.parameters.serialConsistency, Nil, Nil, execute.parameters.timestamp)
}
writePrime(execute, prime, header, Some(connection), alternative=Some(Reply(VoidResult)), consistency = Some(execute.parameters.consistency))
case None =>
val errMsg = s"Could not find prepared statement with id: 0x${execute.id.toHex}"
activityLog.recordPreparedStatementExecution(errMsg, execute.parameters.consistency,
execute.parameters.serialConsistency, Nil, Nil, execute.parameters.timestamp)
val unprepared = Unprepared(errMsg, execute.id)
write(unprepared, header, Some(connection))
}
}
}
object ExecuteHandler {
case class HandleExecute(query: Option[(String, Prepared)], header: FrameHeader, execute: Execute, connection: ActorRef)
}
|
mikefero/cpp-driver
|
gtests/src/integration/scassandra/server/server/src/main/scala/org/scassandra/server/actors/ExecuteHandler.scala
|
Scala
|
apache-2.0
| 3,845
|
package bootstrap.liftweb
import net.liftweb._
import util._
import Helpers._
import common._
import http._
import js.jquery.JQueryArtifacts
import net.liftmodules.JQueryModule
import net.liftmodules.validate.Validate
import net.liftmodules.validate.options.Bs3Options
import net.liftweb.sitemap.{ SiteMap, Menu }
/**
* A class that's instantiated early and run. It allows the application
* to modify lift's environment
*/
class Boot {
def boot {
// where to search snippet
LiftRules.addToPackages("com.github.limansky")
// Build SiteMap
def sitemap = SiteMap(
Menu.i("index") / "index",
Menu.i("ajax") / "ajax"
)
LiftRules.setSiteMap(sitemap)
// Initialize Validate
Validate.options.default.set(Bs3Options())
Validate.init()
//Init the jQuery module, see http://liftweb.net/jquery for more information.
LiftRules.jsArtifacts = JQueryArtifacts
JQueryModule.InitParam.JQuery=JQueryModule.JQuery111Z
JQueryModule.init()
//Show the spinny image when an Ajax call starts
LiftRules.ajaxStart =
Full(() => LiftRules.jsArtifacts.show("ajax-loader").cmd)
// Make the spinny image go away when it ends
LiftRules.ajaxEnd =
Full(() => LiftRules.jsArtifacts.hide("ajax-loader").cmd)
// Force the request to be UTF-8
LiftRules.early.append(_.setCharacterEncoding("UTF-8"))
// Use HTML5 for rendering
LiftRules.htmlProperties.default.set((r: Req) =>
new Html5Properties(r.userAgent))
}
}
|
limansky/validation-demo
|
src/main/scala/bootstrap/liftweb/Boot.scala
|
Scala
|
apache-2.0
| 1,508
|
package com.sksamuel.elastic4s.requests.searches
case class IncludeExclude(include: Seq[String], exclude: Seq[String])
case class IncludePartition(partition: Int, numPartitions: Int)
|
sksamuel/elastic4s
|
elastic4s-domain/src/main/scala/com/sksamuel/elastic4s/requests/searches/IncludeExclude.scala
|
Scala
|
apache-2.0
| 185
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.mesos.scheduler
import java.util.UUID
import akka.actor._
import akka.testkit._
import org.apache.flink.configuration.Configuration
import org.apache.flink.mesos.TestFSMUtils
import org.apache.flink.mesos.scheduler.ReconciliationCoordinator.Reconcile
import org.apache.flink.mesos.scheduler.TaskMonitor._
import org.apache.flink.mesos.scheduler.messages.{Connected, Disconnected, StatusUpdate}
import org.apache.flink.runtime.akka.AkkaUtils
import org.apache.mesos.Protos.TaskState._
import org.apache.mesos.{Protos, SchedulerDriver}
import org.junit.runner.RunWith
import org.mockito.Mockito._
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
import scala.collection.mutable.{Map => MutableMap}
@RunWith(classOf[JUnitRunner])
class TasksTest
extends WordSpecLike
with Matchers
with BeforeAndAfterAll {
lazy val config = new Configuration()
implicit lazy val system = AkkaUtils.createLocalActorSystem(config)
override def afterAll(): Unit = {
TestKit.shutdownActorSystem(system)
}
def randomSlave = {
val slaveID = Protos.SlaveID.newBuilder.setValue(UUID.randomUUID.toString).build
val hostname = s"host-${slaveID.getValue}"
(slaveID, hostname)
}
def randomTask(slaveID: Protos.SlaveID) = {
val taskID = Protos.TaskID.newBuilder.setValue(UUID.randomUUID.toString).build
val taskStatus = Protos.TaskStatus.newBuilder()
.setTaskId(taskID).setSlaveId(slaveID).setState(TASK_STAGING)
(taskID, taskStatus)
}
def childProbe(parent: ActorRefFactory): (TestProbe, ActorRef) = {
val probe = TestProbe()
val childRef = parent.actorOf(Props(
new Actor {
override def receive: Receive = {
case msg @ _ => probe.ref.forward(msg)
}
}
))
(probe,childRef)
}
class Context(implicit val system: ActorSystem) extends TestKitBase with ImplicitSender {
case class MockTaskMonitor(probe: TestProbe, actorRef: ActorRef, task: TaskGoalState)
val schedulerDriver = mock(classOf[SchedulerDriver])
val slave = randomSlave
val task = randomTask(slave._1)
val taskActors = MutableMap[Protos.TaskID,MockTaskMonitor]()
val actor = {
val taskActorCreator = (factory: ActorRefFactory, task: TaskGoalState) => {
val (probe, taskActorRef) = childProbe(factory)
taskActors.put(task.taskID, MockTaskMonitor(probe, taskActorRef, task))
taskActorRef
}
TestActorRef[Tasks](
Props(classOf[Tasks], testActor, config, schedulerDriver, taskActorCreator),
testActor,
TestFSMUtils.randomName)
}
}
def handle = afterWord("handle")
"Tasks" should handle {
"(supervision)" which {
"escalates" in new Context {
actor ! TaskGoalStateUpdated(Launched(task._1, slave._1))
watch(actor)
taskActors(task._1).actorRef ! Kill
expectTerminated(actor)
}
}
"Connect" which {
"stores the connected message for later use" in new Context {
val msg = new Connected() {}
actor ! msg
actor.underlyingActor.registered should be (Some(msg))
}
"forwards the message to child tasks" in new Context {
val msg = new Connected() {}
actor ! TaskGoalStateUpdated(Launched(task._1, slave._1))
actor ! msg
taskActors(task._1).probe.expectMsg(msg)
}
}
"Disconnect" which {
"releases any connected message that was previously stored" in new Context {
actor.underlyingActor.registered = Some(new Connected() {})
actor ! new Disconnected()
actor.underlyingActor.registered should be (None)
}
"forwards the message to child tasks" in new Context {
val msg = new Disconnected() {}
actor ! TaskGoalStateUpdated(Launched(task._1, slave._1))
actor ! msg
taskActors(task._1).probe.expectMsg(msg)
}
}
"TaskGoalStateUpdated" which {
"creates a task monitor on-demand for a given task" in new Context {
val goal = Launched(task._1, slave._1)
actor ! TaskGoalStateUpdated(goal)
actor.underlyingActor.taskMap.contains(task._1) should be (true)
taskActors(task._1).task should be (goal)
}
"forwards the stored connected message to new monitor actors" in new Context {
val msg = new Connected() {}
val goal = Launched(task._1, slave._1)
actor ! msg
actor ! TaskGoalStateUpdated(goal)
taskActors(task._1).probe.expectMsg(msg)
}
"forwards the goal state to the task monitor" in new Context {
actor ! TaskGoalStateUpdated(Launched(task._1, slave._1))
val updateMsg = TaskGoalStateUpdated(Released(task._1, slave._1))
actor ! updateMsg
taskActors(task._1).probe.expectMsg(updateMsg)
}
}
"StatusUpdate" which {
"forwards the update to a task monitor" in new Context {
actor ! TaskGoalStateUpdated(Launched(task._1, slave._1))
val msg = new StatusUpdate(task._2.setState(TASK_RUNNING).build())
actor ! msg
taskActors(task._1).probe.expectMsg(msg)
}
"resumes monitoring of resurrected tasks" in new Context {
// in this scenario, no goal state is sent prior to the status update
actor ! new StatusUpdate(task._2.setState(TASK_RUNNING).build())
taskActors.contains(task._1) should be (true)
taskActors(task._1).task should be (Released(task._1, slave._1))
}
}
"Reconcile" which {
"forwards the message to the parent" in new Context {
val msg = new Reconcile(Seq(task._2.build()))
actor ! msg
expectMsg(msg)
}
}
"TaskTerminated" which {
"removes the task monitor ref" in new Context {
actor ! TaskGoalStateUpdated(Launched(task._1, slave._1))
actor.underlyingActor.taskMap.contains(task._1) should be (true)
actor ! TaskTerminated(task._1, task._2.setState(TASK_FAILED).build())
actor.underlyingActor.taskMap.contains(task._1) should be (false)
}
"forwards to the parent" in new Context {
actor ! TaskGoalStateUpdated(Launched(task._1, slave._1))
val msg = TaskTerminated(task._1, task._2.setState(TASK_FAILED).build())
actor ! msg
expectMsg(msg)
}
}
}
}
|
hequn8128/flink
|
flink-mesos/src/test/scala/org/apache/flink/mesos/scheduler/TasksTest.scala
|
Scala
|
apache-2.0
| 7,205
|
package gapt.expr.formula.constants
import gapt.expr.ty.To
object OrC extends MonomorphicLogicalC( "∨", To ->: To ->: To )
|
gapt/gapt
|
core/src/main/scala/gapt/expr/formula/constants/OrC.scala
|
Scala
|
gpl-3.0
| 127
|
package cz.kamenitxan.jakon.core.deploy
import cz.kamenitxan.jakon.core.configuration.Settings
import cz.kamenitxan.jakon.core.deploy.entity.Server
import cz.kamenitxan.jakon.core.template.utils.TemplateUtils
class LocalDeploy extends IDeploy {
override def deploy(server: Server): Unit = {
TemplateUtils.clean(server.path)
TemplateUtils.copy(Settings.getOutputDir, server.path)
}
}
|
kamenitxan/Jakon
|
modules/backend/src/main/scala/cz/kamenitxan/jakon/core/deploy/LocalDeploy.scala
|
Scala
|
bsd-3-clause
| 391
|
package bowhaus
import java.net.URI
import java.util.Date
import com.twitter.bijection._
import com.twitter.util.{ Await, Future }
import com.twitter.storehaus._
import com.twitter.storehaus.redis._
case class Package(name: String, url: String)
object Packages {
def apply(stores: PackageStores, storePrefix: String) =
new Packages(stores, storePrefix)
}
class Packages(stores: PackageStores, prefix: String) {
import bowhaus.Bijections._
private val hk = "%s:bowhaus:hits" format(prefix)
private val packagePrefix = "%s:bowhaus:packages:" format prefix
private def pk(name: String) = "%s%s" format(packagePrefix, name)
private def unprefix(key: String) =
key.replace(packagePrefix, "")
// for admin eyes only
object admin {
def unregister(name: String) = Await.result(Future.join(
// delete hits
stores.packageHits.put(((hk, pk(name)), None)),
// delete package
stores.packages.put((pk(name), None))))
}
def create(name: String, url: URI): Future[Either[String, String]] =
stores.packages.get(pk(name)).flatMap(
_.map(_ => Future.value(Left("package already exists")))
.getOrElse(
stores.packages.put((pk(name), Some(
Map("url" -> url.toString, "created_at" -> new Date().getTime.toString))))
.flatMap({
_ =>
stores.packageHits.put(((hk, pk(name)), Some(0)))
Future.value(Right("ok"))
}))
)
def get(name: String): Future[Option[Package]] =
stores.packageUrls.get((pk(name), "url")).map(
_.map { url =>
stores.packageHits.merge(((hk, pk(name)), 1))
Package(name, url)
}
)
def list: Future[Iterable[Package]] =
stores.hits.get(hk).flatMap({
_.map({ hs =>
FutureOps.mapCollect(stores.packageUrls.multiGet(hs.map { case (key, _) => (key, "url") }.toSet))
.map((_.map({
case ((key, _), Some(url)) => Some(Package(unprefix(key), url))
case _ => None
}).flatten))
}).getOrElse(Future.value(Nil))
})
def like(name: String): Future[Iterable[Package]] =
Future.value(Nil) // todo: impl me
}
|
softprops/bowhaus
|
src/main/scala/packages.scala
|
Scala
|
mit
| 2,202
|
package com.tribbloids.spookystuff.utils.io.lock
import com.tribbloids.spookystuff.utils.BypassingRule
import com.tribbloids.spookystuff.utils.io.{URIExecution, URIResolver}
import com.tribbloids.spookystuff.utils.lifespan.{Lifespan, LocalCleanable}
import java.io.FileNotFoundException
import java.nio.file.NoSuchFileException
case class Lock(
source: URIExecution,
expired: LockExpired = URIResolver.default.expired, // TODO: use it!
override val _lifespan: Lifespan = Lifespan.TaskOrJVM()
) extends LockLike
with LocalCleanable {
import Lock._
@volatile var acquiredTimestamp: Long = -1
protected def acquire(): URIExecution = {
try {
source.moveTo(Moved.locked.absolutePathStr)
} catch {
case ee @ (_: FileNotFoundException | _: NoSuchFileException) =>
val canBeUnlocked = expired.scanForUnlocking(Moved.dir)
canBeUnlocked match {
case Some(v) =>
v.exe.moveTo(Moved.locked.absolutePathStr)
case None =>
throw ee
}
}
logAcquire(source)
Moved.locked
}
protected def release(): Unit = {
logRelease(Moved.locked)
if (source.isExisting) {
source.moveTo(PathStrs.old)
}
Moved.locked.moveTo(source.absolutePathStr)
}
protected def duringOnce[T](fn: URIExecution => T): T = {
val acquired = acquire()
try {
fn(acquired)
} catch {
case e: CanReattempt =>
throw e
case e: Throwable =>
throw BypassingRule.NoRetry(e)
} finally {
release()
}
}
final def during[T](fn: URIExecution => T): T = source.synchronized {
resolver.retry {
duringOnce(fn)
}
}
/**e
* unlock on cleanup
*/
override protected def cleanImpl(): Unit = {
if (Moved.locked.isExisting) release()
}
def logAcquire(execution: URIExecution): Unit = {
// Lock.acquired += execution -> System.currentTimeMillis()
this.logPrefixed(s"=== ACQUIRED!: ${execution.absolutePathStr}")
}
def logRelease(execution: URIExecution): Unit = {
// Lock.acquired -= execution
this.logPrefixed(s"=== RELEASED! ${execution.absolutePathStr}")
}
}
object Lock {
// val acquired: CachingUtils.ConcurrentCache[URIExecution, Long] = CachingUtils.ConcurrentCache()
trait CanReattempt extends Exception
}
|
tribbloid/spookystuff
|
mldsl/src/main/scala/com/tribbloids/spookystuff/utils/io/lock/Lock.scala
|
Scala
|
apache-2.0
| 2,335
|
package cats.examples.typeclasses
/**
* Type classes are a powerful tool used in functional programming to enable
* ad-hoc polymorphism, more commonly known as overloading. Where many
* object-oriented languages leverage subtyping for polymorphic code,
* functional programming tends towards a combination of parametric
* polymorphism (think type parameters, like Java generics) and ad-hoc
* polymorphism.
*
* See https://typelevel.org/cats/typeclasses.html
*/
object TypeClassExample extends App {
// Example - collapsing a list
// The following code snippets show code that sums a list of integers,
// concatenates a list of strings, and unions a list of sets.
def sumInts(list: List[Int]): Int = list.foldRight(0)(_ + _)
def concatStrings(list: List[String]): String = list.foldRight("")(_ ++ _)
def unionSets[A](list: List[Set[A]]): Set[A] = list.foldRight(Set.empty[A])(_ union _)
// All of these follow the same pattern: an initial value (0, empty string,
// empty set) and a combining function (+, ++, union). We’d like to abstract
// over this so we can write the function once instead of once for every type
// so we pull out the necessary pieces into an interface.
trait Monoid[A] {
def empty: A
def combine(x: A, y: A): A
}
// Implementation for Int
val intAdditionMonoid: Monoid[Int] = new Monoid[Int] {
def empty: Int = 0
def combine(x: Int, y: Int): Int = x + y
}
// The name Monoid is taken from abstract algebra which specifies precisely
// this kind of structure.
// We can now write the functions above against this interface.
def combineAll[A](list: List[A], A: Monoid[A]): A = list.foldRight(A.empty)(A.combine)
// Type classes vs. subtyping
// The definition above takes an actual monoid argument instead of doing the
// usual object-oriented practice of using subtype constraints.
// Subtyping
def combineAll[A <: Monoid[A]](list: List[A]): A = ???
// This has a subtle difference with the earlier explicit example. In order to
// seed the foldRight with the empty value, we need to get a hold of it given
// only the type A. Taking Monoid[A] as an argument gives us this by calling
// the appropriate empty method on it. With the subtype example, the empty
// method would be on a value of type Monoid[A] itself, which we are only
// getting from the list argument. If list is empty, we have no values to work
// with and therefore can’t get the empty value. Not to mention the oddity of
// getting a constant value from a non-static object.
// For another motivating difference, consider the simple pair type.
final case class Pair[A, B](first: A, second: B)
// Defining a Monoid[Pair[A, B]] depends on the ability to define a Monoid[A]
// and Monoid[B], where the definition is point-wise, i.e. the first element
// of the first pair combines with the first element of the second pair and
// the second element of the first pair combines with the second element of
// the second pair. With subtyping such a constraint would be encoded as
// something like...
final case class PairMonoid[A <: Monoid[A], B <: Monoid[B]](first: A, second: B) extends Monoid[Pair[A, B]] {
def empty: Pair[A, B] = ???
def combine(x: Pair[A, B], y: Pair[A, B]): Pair[A, B] = ???
}
// Not only is the type signature of Pair now messy but it also forces all
// instances of Pair to have a Monoid instance, whereas Pair should be able to
// carry any types it wants and if the types happens to have a Monoid instance
// then so would it. We could try bubbling down the constraint into the methods
// themselves...
/**
// The following does not compile - uncomment to see this
final case class PairMonoid2[A, B](first: A, second: B) extends Monoid[Pair[A, B]] {
def empty(implicit eva: A <:< Monoid[A], evb: B <:< Monoid[B]): Pair[A, B] = ???
def combine(x: Pair[A, B], y: Pair[A, B])(implicit eva: A <:< Monoid[A], evb: B <:< Monoid[B]): Pair[A, B] = ???
}
**/
// But now these don’t conform to the interface of Monoid due to the implicit constraints.
// Implicit derivation
// Note that a Monoid[Pair[A, B]] is derivable given Monoid[A] and Monoid[B]:
def deriveMonoidPair[A, B](A: Monoid[A], B: Monoid[B]): Monoid[Pair[A, B]] =
new Monoid[Pair[A, B]] {
def empty: Pair[A, B] = Pair(A.empty, B.empty)
def combine(x: Pair[A, B], y: Pair[A, B]): Pair[A, B] =
Pair(A.combine(x.first, y.first), B.combine(x.second, y.second))
}
// One of the most powerful features of type classes is the ability to do this
// kind of derivation automatically. We can do this through Scala’s implicit
// mechanism.
object Pair {
implicit def tuple2Instance[A, B](implicit A: Monoid[A], B: Monoid[B]): Monoid[Pair[A, B]] =
new Monoid[Pair[A, B]] {
def empty: Pair[A, B] = Pair(A.empty, B.empty)
def combine(x: Pair[A, B], y: Pair[A, B]): Pair[A, B] =
Pair(A.combine(x.first, y.first), B.combine(x.second, y.second))
}
}
// We also change any functions that have a Monoid constraint on the type
// parameter to take the argument implicitly, and any instances of the type
// class to be implicit.
implicit val intAdditionMonoidImplicit: Monoid[Int] = new Monoid[Int] {
def empty: Int = 0
def combine(x: Int, y: Int): Int = x + y
}
def combineAll2[A](list: List[A])(implicit A: Monoid[A]): A = list.foldRight(A.empty)(A.combine)
// Now we can also combineAll a list of Pairs so long as Pair’s type
// parameters themselves have Monoid instances.
implicit val stringMonoid: Monoid[String] = new Monoid[String] {
def empty: String = ""
def combine(x: String, y: String): String = x ++ y
}
import Pair.tuple2Instance
assert(
combineAll2(List(Pair(1, "hello"), Pair(2, " "), Pair(3, "world"))) == Pair(6, "hello world")
)
// A note on syntax
// In many cases, including the combineAll function above, the implicit
// arguments can be written with syntactic sugar.
def combineAll3[A : Monoid](list: List[A]): A = ???
// While nicer to read as a user, it comes at a cost for the implementer.
def combineAll4[A : Monoid](list: List[A]): A =
list.foldRight(implicitly[Monoid[A]].empty)(implicitly[Monoid[A]].combine)
// For this reason, many libraries that provide type classes provide a utility
// method on the companion object of the type class, usually under the name
// apply, that skirts the need to call implicitly everywhere.
object Monoid {
def apply[A : Monoid]: Monoid[A] = implicitly[Monoid[A]]
}
def combineAll5[A : Monoid](list: List[A]): A =
list.foldRight(Monoid[A].empty)(Monoid[A].combine)
// Cats uses simulacrum for defining type classes which will auto-generate
// such an apply method.
// Laws
// Conceptually, all type classes come with laws. These laws constrain
// implementations for a given type and can be exploited and used to reason
// about generic code.
// For instance, the Monoid type class requires that combine be associative
// and empty be an identity element for combine. That means the following
// equalities should hold for any choice of x, y, and z.
/**
combine(x, combine(y, z)) = combine(combine(x, y), z)
combine(x, id) = combine(id, x) = x
**/
// With these laws in place, functions parametrized over a Monoid can leverage
// them for say, performance reasons. A function that collapses a List[A] into
// a single A can do so with foldLeft or foldRight since combine is assumed to
// be associative, or it can break apart the list into smaller lists and
// collapse in parallel, such as
val list = List(1, 2, 3, 4, 5)
val (left, right) = list.splitAt(2)
// Imagine the following two operations run in parallel
val sumLeft = combineAll4(left)
assert(sumLeft == 3)
val sumRight = combineAll4(right)
assert(sumRight == 12)
// Now gather the results
val result = Monoid[Int].combine(sumLeft, sumRight)
assert(result == 15)
}
|
carwynellis/cats-examples
|
src/main/scala/cats/examples/typeclasses/TypeClassExample.scala
|
Scala
|
mit
| 8,088
|
package org.typedsolutions.aws.kinesis
import akka.actor.Actor
import akka.actor.ActorRef
import akka.actor.Terminated
import akka.event.LoggingReceive
import org.typedsolutions.aws.kinesis.model._
import akka.pattern.pipe
import scala.concurrent.Future
class AmazonKinesisActor(owner: ActorRef, wrapper: AmazonKinesis) extends Actor {
import context._
override def preStart(): Unit = {
super.preStart()
owner ! KinesisClientCreated
context.watch(owner)
}
override def receive: Receive = LoggingReceive {
case command: CreateStreamRequest => handle(command)(sender())(wrapper.createStream)
case command: DeleteStreamRequest => handle(command)(sender())(wrapper.deleteStream)
case command: DescribeStreamRequest => handle(command)(sender())(wrapper.describeStream)
case command: GetRecordsRequest => handle(command)(sender())(wrapper.getRecords)
case command: GetShardIteratorRequest => handle(command)(sender())(wrapper.getShardIterator)
case command: ListStreamsRequest => handle(command)(sender())(wrapper.listStreams)
case command: MergeShardsRequest => handle(command)(sender())(wrapper.mergeShards)
case command: PutRecordRequest => handle(command)(sender())(wrapper.putRecord)
case command: PutRecordsRequest => handle(command)(sender())(wrapper.putRecords)
case command: SplitShardRequest => handle(command)(sender())(wrapper.splitShard)
case Terminated(`owner`) => wrapper.underlying.shutdown()
}
private def handle[C <: Command, E <: Event](command: C)(commander: ActorRef)(thunk: C => Future[E]): Unit = {
thunk(command).recover(withCommandFailed(command)).pipeTo(commander)
}
private def withCommandFailed(command: Command): PartialFunction[Throwable, CommandFailed] = {
case exception: Exception => CommandFailed(command, exception)
}
}
|
mattroberts297/akka-kinesis
|
src/main/scala/org/typedsolutions/aws/kinesis/AmazonKinesisActor.scala
|
Scala
|
mit
| 1,839
|
package org.bitcoins.core.protocol.blockchain
import org.bitcoins.core.crypto.DoubleSha256Digest
import org.bitcoins.core.gen.MerkleGenerator
import org.scalacheck.{ Prop, Properties }
/**
* Created by chris on 8/12/16.
*/
class MerkleBlockSpec extends Properties("MerkleBlockSpec") {
//TODO: This is *extremely* slow, this is currently the longest running property we have taking about 6 minutes to run
//I think it is the generator MerkleGenerator.merkleBlockWithInsertTxIds
property("contains all inserted txids when we directly create a merkle block from the txids && " +
"contains all txids matched by a bloom filter && " +
"serialization symmetry") =
Prop.forAllNoShrink(MerkleGenerator.merkleBlockWithInsertedTxIds) {
case (merkleBlock: MerkleBlock, _, txIds: Seq[DoubleSha256Digest]) =>
val extractedMatches = merkleBlock.partialMerkleTree.extractMatches
extractedMatches == txIds &&
extractedMatches.intersect(txIds) == txIds &&
MerkleBlock(merkleBlock.hex) == merkleBlock
}
}
|
Christewart/bitcoin-s-core
|
src/test/scala/org/bitcoins/core/protocol/blockchain/MerkleBlockSpec.scala
|
Scala
|
mit
| 1,054
|
package org.jetbrains.plugins.scala.annotator.createFromUsage
import com.intellij.codeInsight.template.{TemplateBuilderImpl, TemplateManager}
import com.intellij.codeInsight.{CodeInsightUtilCore, FileModificationService}
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.project.Project
import com.intellij.psi._
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.refactoring.util.CommonRefactoringUtil
import org.jetbrains.plugins.scala.annotator.createFromUsage.CreateFromUsageUtil._
import org.jetbrains.plugins.scala.codeInspection.collections.MethodRepr
import org.jetbrains.plugins.scala.console.ScalaLanguageConsoleView
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.base.types.{ScSelfTypeElement, ScSimpleTypeElement}
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.{ScExtendsBlock, ScTemplateBody}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef._
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory._
import org.jetbrains.plugins.scala.lang.psi.types.api.ExtractClass
import org.jetbrains.plugins.scala.lang.psi.{ScalaPsiElement, ScalaPsiUtil}
import org.jetbrains.plugins.scala.project.ScalaLanguageLevel.Scala_2_10
import org.jetbrains.plugins.scala.project._
import org.jetbrains.plugins.scala.util.TypeAnnotationUtil
import scala.util.{Failure, Success, Try}
/**
* Pavel Fatin
*/
abstract class CreateEntityQuickFix(ref: ScReferenceExpression, entity: String, keyword: String)
extends CreateFromUsageQuickFixBase(ref, entity) {
// TODO add private modifiers for unqualified entities ?
// TODO use Java CFU when needed
// TODO find better place for fields, create methods after
override def isAvailable(project: Project, editor: Editor, file: PsiFile): Boolean = {
if (!super.isAvailable(project, editor, file)) return false
def checkBlock(expr: ScExpression) = blockFor(expr) match {
case Success(bl) => !bl.isInCompiledFile
case _ => false
}
ref match {
case Both(Parent(_: ScAssignStmt), Parent(Parent(_: ScArgumentExprList))) =>
false
case exp@Parent(infix: ScInfixExpr) if infix.operation == exp => checkBlock(infix.getBaseExpr)
case it =>
it.qualifier match {
case Some(sup: ScSuperReference) => unambiguousSuper(sup).exists(!_.isInCompiledFile)
case Some(qual) => checkBlock(qual)
case None => !it.isInCompiledFile
}
}
}
def invokeInner(project: Project, editor: Editor, file: PsiFile) {
def tryToFindBlock(expr: ScExpression): Option[ScExtendsBlock] = {
blockFor(expr) match {
case Success(bl) => Some(bl)
case Failure(e) =>
CommonRefactoringUtil.showErrorHint(project, editor, e.getMessage, "Create entity quickfix", null)
None
}
}
if (!ref.isValid) return
val entityType = typeFor(ref)
val genericParams = genericParametersFor(ref)
val parameters = parametersFor(ref)
val placeholder = if (entityType.isDefined) "%s %s%s: Int" else "%s %s%s"
val unimplementedBody = if (file.scalaLanguageLevel.exists(_ >= Scala_2_10)) " = ???" else ""
val params = (genericParams ++: parameters).mkString
val text = placeholder.format(keyword, ref.nameId.getText, params) + unimplementedBody
val block = ref match {
case it if it.isQualified => ref.qualifier.flatMap(tryToFindBlock)
case Parent(infix: ScInfixExpr) => tryToFindBlock(infix.getBaseExpr)
case _ => None
}
if (!FileModificationService.getInstance.prepareFileForWrite(block.map(_.getContainingFile).getOrElse(file))) return
inWriteAction {
val maybeEntity = block match {
case Some(_ childOf (obj: ScObject)) if obj.isSyntheticObject =>
val bl = materializeSytheticObject(obj).extendsBlock
createEntity(bl, ref, text)
case Some(it) => createEntity(it, ref, text)
case None => createEntity(ref, text)
}
for (entity <- maybeEntity) {
ScalaPsiUtil.adjustTypes(entity)
entity match {
case scalaPsi: ScalaPsiElement => TypeAnnotationUtil.removeTypeAnnotationIfNeeded(scalaPsi)
case _ =>
}
val builder = new TemplateBuilderImpl(entity)
for (aType <- entityType;
typeElement <- entity.children.findByType[ScSimpleTypeElement]) {
builder.replaceElement(typeElement, aType)
}
addTypeParametersToTemplate(entity, builder)
addParametersToTemplate(entity, builder)
addQmarksToTemplate(entity, builder)
CodeInsightUtilCore.forcePsiPostprocessAndRestoreElement(entity)
val template = builder.buildTemplate()
val isScalaConsole = file.getName == ScalaLanguageConsoleView.SCALA_CONSOLE
if (!isScalaConsole) {
val newEditor = positionCursor(entity.getLastChild)
val range = entity.getTextRange
newEditor.getDocument.deleteString(range.getStartOffset, range.getEndOffset)
TemplateManager.getInstance(project).startTemplate(newEditor, template)
}
}
}
}
private def materializeSytheticObject(obj: ScObject): ScObject = {
val clazz = obj.fakeCompanionClassOrCompanionClass
val objText = s"object ${clazz.name} {}"
val fromText = ScalaPsiElementFactory.createTemplateDefinitionFromText(objText, clazz.getParent, clazz)
clazz.getParent.addAfter(fromText, clazz).asInstanceOf[ScObject]
}
private def blockFor(exp: ScExpression): Try[ScExtendsBlock] = {
object ParentExtendsBlock {
def unapply(e: PsiElement): Option[ScExtendsBlock] = Option(PsiTreeUtil.getParentOfType(exp, classOf[ScExtendsBlock]))
}
exp match {
case InstanceOfClass(td: ScTemplateDefinition) => Success(td.extendsBlock)
case th: ScThisReference if PsiTreeUtil.getParentOfType(th, classOf[ScExtendsBlock], true) != null =>
th.refTemplate match {
case Some(ScTemplateDefinition.ExtendsBlock(block)) => Success(block)
case None =>
val parentBl = PsiTreeUtil.getParentOfType(th, classOf[ScExtendsBlock], /*strict = */true, /*stopAt = */classOf[ScTemplateDefinition])
if (parentBl != null) Success(parentBl)
else Failure(new IllegalStateException("Cannot find template definition for `this` reference"))
}
case sup: ScSuperReference =>
unambiguousSuper(sup) match {
case Some(ScTemplateDefinition.ExtendsBlock(block)) => Success(block)
case None => Failure(new IllegalStateException("Cannot find template definition for not-static super reference"))
}
case Both(_: ScThisReference, ParentExtendsBlock(block)) => Success(block)
case Both(ReferenceTarget((_: ScSelfTypeElement)), ParentExtendsBlock(block)) => Success(block)
case _ => Failure(new IllegalStateException("Cannot find a place to create definition"))
}
}
def createEntity(block: ScExtendsBlock, ref: ScReferenceExpression, text: String): Option[PsiElement] = {
if (block.templateBody.isEmpty)
block.add(createTemplateBody(block.getManager))
val children = block.templateBody.get.children.toSeq
for (anchor <- children.find(_.isInstanceOf[ScSelfTypeElement]).orElse(children.headOption)) yield {
val holder = anchor.getParent
val hasMembers = holder.children.containsType[ScMember]
val entity = holder.addAfter(createElementFromText(text), anchor)
if (hasMembers) holder.addAfter(createNewLine(), entity)
entity
}
}
def createEntity(ref: ScReferenceExpression, text: String): Option[PsiElement] = {
for (anchor <- anchorForUnqualified(ref)) yield {
val holder = anchor.getParent
val entity = holder.addBefore(createElementFromText(text), anchor)
holder.addBefore(createNewLine("\n\n"), entity)
holder.addAfter(createNewLine("\n\n"), entity)
entity
}
}
private def typeFor(ref: ScReferenceExpression): Option[String] = ref.getParent match {
case call: ScMethodCall => call.expectedType().map(_.canonicalText)
case _ => ref.expectedType().map(_.canonicalText)
}
private def parametersFor(ref: ScReferenceExpression): Option[String] = {
ref.parent.collect {
case MethodRepr(_, _, Some(`ref`), args) => paramsText(args)
case (_: ScGenericCall) childOf (MethodRepr(_, _, Some(`ref`), args)) => paramsText(args)
}
}
private def genericParametersFor(ref: ScReferenceExpression): Option[String] = ref.parent.collect {
case genCall: ScGenericCall =>
genCall.arguments match {
case args if args.size == 1 => "[T]"
case args => args.indices.map(i => s"T$i").mkString("[", ", ", "]")
}
}
private def anchorForUnqualified(ref: ScReferenceExpression): Option[PsiElement] = {
val parents = ref.parentsInFile
val anchors = ref.withParentsInFile
val place = parents.zip(anchors).find {
case (_ : ScTemplateBody, _) => true
case (_ : ScalaFile, _) => true
case _ => false
}
place.map(_._2)
}
private def unambiguousSuper(supRef: ScSuperReference): Option[ScTypeDefinition] = {
supRef.staticSuper match {
case Some(ExtractClass(clazz: ScTypeDefinition)) => Some(clazz)
case None =>
supRef.parentsInFile.toSeq.collect { case td: ScTemplateDefinition => td } match {
case Seq(td) =>
td.supers match {
case Seq(t: ScTypeDefinition) => Some(t)
case _ => None
}
case _ => None
}
}
}
}
|
ilinum/intellij-scala
|
src/org/jetbrains/plugins/scala/annotator/createFromUsage/CreateEntityQuickFix.scala
|
Scala
|
apache-2.0
| 9,817
|
package howitworks.scalaz
class EqualDemo extends wp.Spec {
//about scalaz.Equal
//inside wp.Spec `===` is taken from org.scalactic.Equalizer.=== instead of scalaz.syntax.EqualOps
//here inside spec ≟ and ≠ aliases can be used
"simple types" in {
//import guide:
import scalaz.syntax.all._
//or more detailed: import scalaz.syntax.equal._
import scalaz.std.AllInstances._
//import scalaz.std.anyVal._ or more detailed
222 ≟ 222
//"222" ≟ 222 won't compile
//unsugared version of abouve
ToEqualOps(222)(scalaz.std.anyVal.intInstance).===(222)
}
"customTypeDemo" in {
class Foo(val bar: String)
def Foo(bar: String) = new Foo(bar)
import scalaz.Equal
import scalaz.syntax.all._
//first create Equal instance in scope
implicit val fooEqual = Equal.equal[Foo]((a, b) => a.bar == b.bar)
//now you can use it's goodies
//`===` is taken from org.scalactic.Equalizer.=== instead of scalaz.syntax.EqualOps
//Foo("bar") === Foo("bar")
Foo("bar") ≟ Foo("bar")
Foo("bar") =/= Foo("bazzzz")
Foo("bar") ≠ Foo("bazzzz")
//Foo("bar") ≠ "fobar" won't compile
}
"assert_===" in {
class Foo(val bar: String)
def Foo(bar: String) = new Foo(bar)
import scalaz._
import scalaz.syntax.all._
implicit val fooEqual = Equal.equal[Foo]((a, b) => a.bar == b.bar)
//in order to use assert_=== show inscance must be in scope
implicit val FooShow = Show.shows[Foo](foo => s"Foo(${foo.bar})")
Foo("a") assert_=== Foo("a")
}
}
|
jawp/wicked-playground
|
modules/server/src/test/scala/howitworks/scalaz/EqualDemo.scala
|
Scala
|
mit
| 1,573
|
package ch.ninecode.cim
/**
* Logging level enumeration.
*/
object LogLevels extends Enumeration
{
type LogLevels = Value
val ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN = Value
def toLog4j (level: Value): org.apache.log4j.Level =
level match
{
case ALL => org.apache.log4j.Level.ALL
case DEBUG => org.apache.log4j.Level.DEBUG
case ERROR => org.apache.log4j.Level.ERROR
case FATAL => org.apache.log4j.Level.FATAL
case INFO => org.apache.log4j.Level.INFO
case OFF => org.apache.log4j.Level.ALL
case TRACE => org.apache.log4j.Level.ALL
case WARN => org.apache.log4j.Level.WARN
}
}
object Mode extends Enumeration
{
type Mode = Value
val Summary, HumanReadable, ChangeSet = Value
}
/**
* Options for CIM export.
*
* @param valid <code>false</code> if either help or version requested (i.e. don't proceed with execution).
* @param unittest <code>true</code> when running unit tests.
* @param loglevel Logging level for messages.
* @param master Spark master URL. See [[https://spark.apache.org/docs/latest/submitting-applications.html#master-urls]].
* @param sparkopts Spark options. See [[https://spark.apache.org/docs/latest/configuration.html]].
* @param cimopts CIMReader options. See [[https://github.com/derrickoswald/CIMSpark/tree/master/CIMReader#reader-api]].
*/
final case class CIMDifferenceOptions (
var valid: Boolean = true,
unittest: Boolean = false,
loglevel: LogLevels.Value = LogLevels.OFF,
master: String = "",
sparkopts: Map[String, String] = Map(
"spark.graphx.pregel.checkpointInterval" -> "8",
"spark.serializer" -> "org.apache.spark.serializer.KryoSerializer",
"spark.kryo.registrator" -> "ch.ninecode.cim.CIMRegistrator",
"spark.ui.showConsoleProgress" -> "false",
"spark.sql.debug.maxToStringFields" -> "250",
"spark.sql.catalog.casscatalog" -> "com.datastax.spark.connector.datasource.CassandraCatalog"),
cimopts: Map[String, String] = Map(),
from: Seq[String] = Seq(),
to: Seq[String] = Seq(),
mode: Mode.Value = Mode.Summary,
output: String = "",
description: String = "",
name: String = ""
)
|
derrickoswald/CIMScala
|
CIMDifference/src/main/scala/ch/ninecode/cim/CIMDifferenceOptions.scala
|
Scala
|
mit
| 2,285
|
import java.util.regex._
import scala.reflect.internal.SymbolTable
import scala.reflect.macros.blackbox._
import language.experimental.macros
import java.lang.invoke._
object Macro {
def classNameOf(expr: Class[_]): String = macro Impl.classNameOf
}
class Impl(val c: Context) {
def classNameOf(expr: c.Tree): c.Tree = {
{
val symtab = c.universe.asInstanceOf[SymbolTable]
import symtab._
val bootstrapMethod = typeOf[test.Bootstrap].companion.member(TermName("bootstrap"))
val paramSym = NoSymbol.newTermSymbol(TermName("x")).setInfo(typeOf[String])
val dummySymbol = NoSymbol.newTermSymbol(TermName("classNameOf")).setInfo(internal.nullaryMethodType(typeOf[String]))
val bootstrapArgTrees: List[Tree] = List(
Literal(Constant(bootstrapMethod)).setType(NoType),
expr.asInstanceOf[Tree],
)
val result = ApplyDynamic(Ident(dummySymbol).setType(dummySymbol.info), bootstrapArgTrees)
result.setType(dummySymbol.info.resultType)
result.asInstanceOf[c.Tree]
}
}
}
|
scala/scala
|
test/files/run/indy-via-macro-class-constant-bsa/macro_1.scala
|
Scala
|
apache-2.0
| 1,130
|
class AbsCell {
type T = Node
class Node
}
object Test {
def test: Unit = {
val cell = new AbsCell
new cell.T
}
}
class AbsCell2 {
type T = Node
val value: T = value
def set(x: T): Unit = {}
class Node
}
object init {
def main = {
val cell = new AbsCell2 { val init = new Node }
cell set (new cell.T)
}
}
|
som-snytt/dotty
|
tests/pos/i1865.scala
|
Scala
|
apache-2.0
| 343
|
package skate
import java.io.File
import java.util.concurrent.ConcurrentHashMap
import javax.servlet.ServletContext
import javax.servlet.http.HttpServlet
import javax.servlet.http.HttpServletRequest
import javax.servlet.http.HttpServletResponse
import scala.collection.JavaConversions._
import scala.collection.mutable
import scala.xml.XML
/**
* Helper functions for running in a Servlet container.
*/
object ServletSupport {
/**
* Evaluates the template indicated by req.requestURI and writes it to
* the writer supplied resp.getWriter, flushing when done.
*/
def render(req:HttpServletRequest, resp:HttpServletResponse) {
XML.write(resp.getWriter, Template.eval(req.getRequestURI).head, "UTF-8", false, null)
resp.getWriter.flush
}
def cat(root:String, uri:String) = {
(root.endsWith("/"), uri.startsWith("/")) match {
case (true, true) => root + uri.substring(1)
case (false, false) => root + "/" + uri
case _ => root + uri
}
}
/**
* Loads templates from /WEB-INF/templates, keeping a cache of previously loaded templates.
*/
val defaultTemplateFinder = {
val cache = new ConcurrentHashMap[String, (Long, Template)]()
findTemplate(_:String, _:ServletContext, cache, "/WEB-INF/templates")
}
/**
* Loads templates from a directory in a web application/WAR file, specified by root. The
* supplied cache will be searched for the template first, and if the modify stamp of
* the actual file is newer than the cached copy, it will be reloaded.
*/
def findTemplate(uri:String, sc:ServletContext, cache:mutable.Map[String, (Long, Template)], root:String):Option[Template] = {
try {
val path = cat(root, uri)
val tst = cache.get(path)
TemplateConfig.debug("defaultTemplateFinder: ", path, " -> ", tst, " ", cache)
tst match {
case None => loadTemplate(path, sc, cache).map(x => x._2)
case Some((-1L, t)) => Some(t)
case Some((ts, t)) if (ts < calcLastModified(path, sc)) => loadTemplate(path, sc, cache).map(x => x._2)
case Some((ts, t)) => Some(t)
}
}
catch {
case e => Some(new Template(TemplateConfig.templateError(e.toString, Some(e), uri)))
}
}
def calcLastModified(path:String, sc:ServletContext):Long = {
val realPath = Option(sc.getRealPath(path))
val lastModified = realPath.map {
x => new File(x).lastModified
}.getOrElse(-1L)
TemplateConfig.debug("calcLastModified: ", path, " (", realPath, ") -> ", lastModified)
lastModified
}
def loadTemplate(path:String, sc:ServletContext, cache:mutable.Map[String, (Long, Template)]):Option[(Long, Template)] = {
val is = Option(sc.getResourceAsStream(path))
is.map {
x => {
val t = new Template(XML.load(x))
val ts = calcLastModified(path, sc)
val tst = (ts, t)
cache.put(path, tst)
TemplateConfig.debug("loadTemplate: ", cache)
tst
}
}
}
}
|
sean8223/skate
|
src/main/scala/skate/ServletSupport.scala
|
Scala
|
bsd-2-clause
| 2,940
|
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package toplevel
package imports
import com.intellij.lang.ASTNode
import com.intellij.psi.stubs.StubElement
import com.intellij.psi.tree.IElementType
import com.intellij.psi.{PsiElement, PsiElementVisitor}
import com.intellij.util.IncorrectOperationException
import org.jetbrains.plugins.scala.extensions.ObjectExt
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.base.ScStableCodeReferenceElement
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports._
import org.jetbrains.plugins.scala.lang.psi.stubs.ScImportExprStub
/**
* @author AlexanderPodkhalyuzin
* Date: 20.02.2008
*/
class ScImportExprImpl private (stub: StubElement[ScImportExpr], nodeType: IElementType, node: ASTNode)
extends ScalaStubBasedElementImpl(stub, nodeType, node) with ScImportExpr {
override def accept(visitor: PsiElementVisitor) {
visitor match {
case visitor: ScalaElementVisitor => super.accept(visitor)
case _ => super.accept(visitor)
}
}
def this(node: ASTNode) = {this(null, null, node)}
def this(stub: ScImportExprStub) = {this(stub, ScalaElementTypes.IMPORT_EXPR, null)}
override def toString: String = "ImportExpression"
def singleWildcard: Boolean = {
val stub = getStub
if (stub != null) {
return stub.asInstanceOf[ScImportExprStub].isSingleWildcard
}
if (findChildByType[PsiElement](ScalaTokenTypes.tUNDER) != null) {
true
} else {
selectorSet match {
case Some(set) => set.hasWildcard
case None => false
}
}
}
def wildcardElement: Option[PsiElement] = {
if (findChildByType[PsiElement](ScalaTokenTypes.tUNDER) != null) {
Some(findChildByType[PsiElement](ScalaTokenTypes.tUNDER))
} else {
selectorSet match {
case Some(set) =>
set.wildcardElement
case None => None
}
}
}
def qualifier: ScStableCodeReferenceElement = {
if (reference.isEmpty)
throw new IncorrectOperationException()
else if (!singleWildcard && selectorSet.isEmpty)
reference.flatMap(_.qualifier).orNull
else
reference.get
}
def deleteExpr() {
val parent = getParent.asInstanceOf[ScImportStmt]
if (parent.importExprs.length == 1) {
parent.getParent match {
case x: ScImportsHolder => x.deleteImportStmt(parent)
case _ =>
}
} else {
val node = parent.getNode
val remove = node.removeChild _
val next = getNextSibling
if (next != null) {
def removeWhitespaceAfterComma(comma: ASTNode) {
if (comma.getTreeNext != null && !comma.getTreeNext.getText.contains("\\n") &&
comma.getTreeNext.getText.trim.isEmpty) {
remove(comma.getTreeNext)
}
}
if (next.getText == ",") {
val comma = next.getNode
removeWhitespaceAfterComma(comma)
remove(comma)
} else {
if (next.getNextSibling != null && next.getNextSibling.getText == ",") {
val comma = next.getNextSibling
removeWhitespaceAfterComma(comma.getNode)
remove(next.getNode)
remove(comma.getNode)
} else {
val prev = getPrevSibling
if (prev != null) {
if (prev.getText == ",") {
remove(prev.getNode)
} else {
if (prev.getPrevSibling != null && prev.getPrevSibling.getText == ",") {
remove(prev.getPrevSibling.getNode)
}
}
}
}
}
} else {
val prev = getPrevSibling
if (prev != null) {
if (prev.getText == ",") {
remove(prev.getNode)
} else {
if (prev.getPrevSibling != null && prev.getPrevSibling.getText == ",") {
val prevSibling = prev.getPrevSibling
remove(prev.getNode)
remove(prevSibling.getNode)
}
}
}
}
remove(getNode)
}
}
def selectorSet: Option[ScImportSelectors] = {
val psi: ScImportSelectors = getStubOrPsiChild(ScalaElementTypes.IMPORT_SELECTORS)
Option(psi)
}
def reference: Option[ScStableCodeReferenceElement] = {
val stub = getStub
if (stub != null) stub.asInstanceOf[ScImportExprStub].reference
else getFirstChild.asOptionOf[ScStableCodeReferenceElement] /*findChild(classOf[ScStableCodeReferenceElement])*/
}
}
|
whorbowicz/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/imports/ScImportExprImpl.scala
|
Scala
|
apache-2.0
| 4,692
|
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.exceptions.{StackDepthExceptionHelper, TestFailedException}
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers._
class SeveredStackTracesFailureSpec extends AnyFunSpec with SeveredStackTraces {
override def withFixture(test: NoArgTest): Outcome = {
super.withFixture(test) match {
case Failed(e: TestFailedException) =>
e.failedCodeStackDepth should equal (0)
e.failedCodeFileNameAndLineNumberString match {
case Some(s) =>
checkFileNameAndLineNumber(e, s)
case None => fail("TestFailedException didn't contain file name and line number string", e)
}
Succeeded
case other => other
}
}
describe("A severed TestFailedException") {
it("should be properly severed on fail()") {
fail()
}
it("should be properly severed on fail(\\"message\\")") {
fail("some message")
}
it("should be properly severed on fail(throwable)") {
fail(new RuntimeException)
}
it("should be properly severed on fail(\\"some message\\", throwable)") {
fail("some message", new RuntimeException)
}
it("should be properly severed on assert(false)") {
assert(false)
}
it("should be properly severed on assert(false, \\"some message\\")") {
assert(false, "some message")
}
it("should be properly severed on assert(1 === 2)") {
assert(1 === 2)
}
it("should be properly severed on assert(1 === 2, \\"some message\\")") {
assert(1 === 2, "some message")
}
it("should be properly severed on assertResult(1) { 2 }") {
assertResult(1) { 2 }
}
it("should be properly severed on assertResult(1, \\"some message\\") { 2 }") {
assertResult(1, "some message") { 2 }
}
it("should be properly severed on intercept[IllegalArgumentException] {}") {
intercept[IllegalArgumentException] {}
}
it("should be properly severed on intercept[IllegalArgumentException] { throw new RuntimeException }") {
intercept[IllegalArgumentException] { if (false) 1 else throw new RuntimeException }
}
it("should be properly severed on 1 should === (2)") {
1 should === (2)
}
it("should be properly severed on an [IllegalArgumentException] should be thrownBy {}") {
an [IllegalArgumentException] should be thrownBy {}
}
it("should be properly severed on an [IllegalArgumentException] should be thrownBy { throw new RuntimeException }") {
an [IllegalArgumentException] should be thrownBy { if (false) () else throw new RuntimeException }
}
}
private def checkFileNameAndLineNumber(e: TestFailedException, failedCodeFileNameAndLineNumberString: String): Unit = {
val stackTraceElement = e.getStackTrace()(e.failedCodeStackDepth)
val fileName = StackDepthExceptionHelper.getFailedCodeFileName(stackTraceElement).get
val lineNumber = stackTraceElement.getLineNumber
failedCodeFileNameAndLineNumberString should equal (fileName + ":" + lineNumber)
}
}
|
scalatest/scalatest
|
jvm/scalatest-test/src/test/scala/org/scalatest/SeveredStackTracesFailureSpec.scala
|
Scala
|
apache-2.0
| 3,688
|
package ca.friendlyguacamole.server.providers
import ca.friendlyguacamole.server.models.{PollModel, PollRequest}
import scalaz.concurrent.Task
/**
* Created by panagiotis on 04/06/17.
*/
trait PollsProvider {
def getPolls(userId: Option[Int]): Task[Seq[PollModel]]
def findByTag(tag: String, userId: Option[Int]): Task[Seq[PollModel]]
def search(q: String, userId: Option[Int]): Task[Seq[PollModel]]
def getTrendingTags(): Task[Seq[String]]
def findPoll(id: Int, userId: Option[Int]): Task[Option[PollModel]]
def createPoll(pollRequest: PollRequest, userId: Int): Task[Option[PollModel]]
def vote(userId: Int, pollId: Int, optionId: Int): Task[Option[PollModel]]
}
|
proubatsis/friendly-guacamole
|
server/friendly-guacamole-server/src/main/scala/ca/friendlyguacamole/server/providers/PollsProvider.scala
|
Scala
|
mit
| 686
|
/**
* Copyright 2013 Gianluca Amato <gamato@unich.it>
*
* This file is part of JANDOM: JVM-based Analyzer for Numerical DOMains
* JANDOM is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JANDOM is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty ofa
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with JANDOM. If not, see <http://www.gnu.org/licenses/>.
*/
package it.unich.jandom.targets
import org.scalatest.FunSuite
import it.unich.jandom.domains.numerical.ppl.PPLDomain
import it.unich.jandom.parsers.NumericalPropertyParser
import it.unich.jandom.targets.jvmsoot.SootFrameNumericalDomain
import parma_polyhedra_library.C_Polyhedron
/**
* @author Gianluca Amato <gamato@unich.it>
*
*/
class SootFrameNumericalDomainSuite extends FunSuite {
val numdom = PPLDomain[C_Polyhedron]()
val dom = new SootFrameNumericalDomain(numdom)
val classAType = soot.RefType.v("classA")
val types = Seq(classAType, soot.IntType.v(), soot.DoubleType.v())
test("Bottom and Top") {
val bottom = dom.bottom(types)
val top = dom.top(types)
assert(bottom < top)
}
test("Constructors") {
val env = Environment()
val parser = new NumericalPropertyParser(env)
val prop = parser.parseProperty("v0 == v0 && v1 + v2 == 0 && v1 <= 4", dom.numdom).get
val absframe = dom(prop, types)
intercept[AssertionError] { dom(prop, Seq(soot.IntType.v(), classAType, soot.DoubleType.v())) }
intercept[AssertionError] { dom(prop, Seq(soot.IntType.v(), soot.DoubleType.v(), classAType)) }
intercept[AssertionError] { dom(prop, Seq(classAType, soot.IntType.v())) }
val fullnumframe = dom(prop, soot.IntType.v())
intercept[AssertionError] { dom(prop, classAType) }
}
test("Connect") {
val env = Environment()
val parser = new NumericalPropertyParser(env)
val prop = parser.parseProperty("v0 == v0 && v1 + v2 == 0 && v1 <= 4", dom.numdom).get
val prop2 = parser.parseProperty("v0 >= 0 && v1 == v0 && v2 == v2", dom.numdom).get
val absframe = dom(prop, types)
val absframe2 = dom(prop2, Seq(soot.DoubleType.v(), soot.DoubleType.v(), classAType))
val conn = absframe.connect(absframe2, 1)
val prop3 = parser.parseProperty("v0 == v0 && v1 + v2 == 0 && v1 <=4 && v2 >= 0 && v3 == v3", dom.numdom).get
val absframe3 = dom(prop3, Seq(classAType, soot.IntType.v(), soot.DoubleType.v(), classAType))
assert(conn === absframe3)
}
test("Restrict/Extract") {
// TODO change the parser API.. is very cumbersome to use since it permanently modifies the environment
val env = Environment()
val parser = new NumericalPropertyParser(env)
val prop = parser.parseProperty("v0 == v0 && v1 + v2 == 0 && v1 <= 4", dom.numdom).get
val absframe = dom(prop, types)
val extr = absframe.extract(2)
val env2 = Environment()
val parser2 = new NumericalPropertyParser(env2)
val prop2 = parser2.parseProperty("v1 + v2 == 0 && v1 <= 4", dom.numdom).get
val absframe2 = dom(prop2, Seq(soot.IntType.v(), soot.DoubleType.v()))
assert(extr === absframe2)
val env3 = Environment()
val parser3= new NumericalPropertyParser(env3)
val prop3 = parser3.parseProperty("v0 == v0 && v1 <= 4", dom.numdom).get
val restr = absframe.restrict(1)
val absframe3 = dom(prop3, Seq(classAType, soot.IntType.v()))
assert(restr === absframe3)
}
}
|
rubino22/JDBeta
|
core/src/test/ppl/it/unich/jandom/targets/SootFrameNumericalDomainSuite.scala
|
Scala
|
lgpl-3.0
| 3,746
|
package hr.fer.ztel.thesis.measure
/**
* Cosine similarity measure [0, 1],
* in case of (0, 0, 0, 1) it returns 0 because Double.NaN during comparing is the biggest
*/
class CosineSimilarityMeasure extends ItemPairSimilarityMeasure {
def compute(a: Int, b: Int, c: Int, d: Int): Double = {
if (a == 0 && b == 0 && c == 0) 0.5 // instead of NaN todo check it
else a / math.sqrt((a + b) * (a + c))
}
}
|
fpopic/master_thesis
|
src/main/scala/hr/fer/ztel/thesis/measure/CosineSimilarityMeasure.scala
|
Scala
|
mit
| 421
|
package views
import db.scalikejdbc.{Round, User}
import org.intracer.wmua._
import play.api.test.{Helpers, PlaySpecification}
import play.twirl.api.Html
class LargeViewControllerSpec extends PlaySpecification {
import play.api.i18n._
implicit val lang = Lang("en-US")
"large view" should {
"have correct rating link" in {
implicit val request = play.api.test.FakeRequest("GET", "/")
implicit val messages = Helpers.stubMessagesApi().preferred(request)
val files = Seq(
ImageWithRating(
Image(pageId = 1, title = "File:1.jpg"),
selection = Nil)
)
val html = views.html.large.main_large(
title = "title",
user = User("name", "email", id = Some(1), roles = Set("jury"), contestId = Some(1)),
asUserId = 0,
score = 0.0,
readOnly = false,
url = "url",
files = files,
index = 0,
page = 0,
rate = Some(0),
region = "all",
round = Round(id = Some(1), number = 1, contestId = 1, active = true, rates = Round.ratesById(5)),
module = "byrate")(Html("html"))
val view = contentAsString(html)
view must not contain "&"
}
}
}
|
intracer/wlxjury
|
test/views/LargeViewControllerSpec.scala
|
Scala
|
apache-2.0
| 1,213
|
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.fs.tools.ingest
import java.util
import com.beust.jcommander.converters.BaseConverter
import com.beust.jcommander.{JCommander, Parameter, ParameterException, Parameters}
import org.locationtech.geomesa.fs.storage.api.StorageMetadata.{PartitionBounds, PartitionMetadata, StorageFile}
import org.locationtech.geomesa.fs.tools.FsDataStoreCommand
import org.locationtech.geomesa.fs.tools.FsDataStoreCommand.FsParams
import org.locationtech.geomesa.fs.tools.ingest.ManageMetadataCommand.{CompactCommand, ManageMetadataParams, RegisterCommand, UnregisterCommand}
import org.locationtech.geomesa.tools.{Command, CommandWithSubCommands, RequiredTypeNameParam, Runner}
import org.locationtech.jts.geom.Envelope
import scala.util.control.NonFatal
class ManageMetadataCommand(val runner: Runner, val jc: JCommander) extends CommandWithSubCommands {
override val name: String = "manage-metadata"
override val params = new ManageMetadataParams
override val subCommands: Seq[Command] = Seq(new CompactCommand, new RegisterCommand, new UnregisterCommand)
}
object ManageMetadataCommand {
import scala.collection.JavaConverters._
class CompactCommand extends FsDataStoreCommand {
override val name = "compact"
override val params = new CompactParams
override def execute(): Unit = withDataStore { ds =>
val metadata = ds.storage(params.featureName).metadata
metadata.compact(None, params.threads)
val partitions = metadata.getPartitions()
Command.user.info(s"Compacted metadata into ${partitions.length} partitions consisting of " +
s"${partitions.map(_.files.size).sum} files")
}
}
class RegisterCommand extends FsDataStoreCommand {
override val name = "register"
override val params = new RegisterParams
override def execute(): Unit = withDataStore { ds =>
val metadata = ds.storage(params.featureName).metadata
val files = params.files.asScala.map(StorageFile(_, System.currentTimeMillis()))
val count = Option(params.count).map(_.longValue()).getOrElse(0L)
val bounds = new Envelope
Option(params.bounds).foreach { case (xmin, ymin, xmax, ymax) =>
bounds.expandToInclude(xmin, ymin)
bounds.expandToInclude(xmax, ymax)
}
metadata.addPartition(PartitionMetadata(params.partition, files, PartitionBounds(bounds), count))
val partition = metadata.getPartition(params.partition).getOrElse(PartitionMetadata("", Seq.empty, None, 0L))
Command.user.info(s"Registered ${params.files.size} new files. Updated partition: ${partition.files.size} " +
s"files containing ${partition.count} known features")
}
}
class UnregisterCommand extends FsDataStoreCommand {
override val name = "unregister"
override val params = new UnregisterParams
override def execute(): Unit = withDataStore { ds =>
val metadata = ds.storage(params.featureName).metadata
val files = params.files.asScala.map(StorageFile(_, 0L))
val count = Option(params.count).map(_.longValue()).getOrElse(0L)
metadata.removePartition(PartitionMetadata(params.partition, files, None, count))
val partition = metadata.getPartition(params.partition).getOrElse(PartitionMetadata("", Seq.empty, None, 0L))
Command.user.info(s"Unregistered ${params.files.size} files. Updated partition: ${partition.files.size} " +
s"files containing ${partition.count} known features")
}
}
@Parameters(commandDescription = "Manage the metadata for a storage instance")
class ManageMetadataParams
@Parameters(commandDescription = "Compact the metadata for a storage instance")
class CompactParams extends FsParams with RequiredTypeNameParam {
@Parameter(names = Array("-t", "--threads"), description = "Number of threads to use for compaction")
var threads: Integer = 4
}
@Parameters(commandDescription = "Register new data files with a storage instance")
class RegisterParams extends FsParams with RequiredTypeNameParam {
@Parameter(names = Array("--partition"), description = "Partition to update", required = true)
var partition: String = _
@Parameter(names = Array("--files"), description = "Names of the files to register, must already exist in the appropriate partition folder", required = true, variableArity = true)
var files: java.util.List[String] = new util.ArrayList[String]()
@Parameter(names = Array("--bounds"), description = "Geographic bounds of the data files being registered, in the form xmin,ymin,xmax,ymax", required = false, converter = classOf[BoundsConverter])
var bounds: (Double, Double, Double, Double) = _
@Parameter(names = Array("--count"), description = "Number of features in the data files being registered", required = false)
var count: java.lang.Long = _
}
@Parameters(commandDescription = "Unregister data files from a storage instance")
class UnregisterParams extends FsParams with RequiredTypeNameParam {
@Parameter(names = Array("--partition"), description = "Partition to update", required = true)
var partition: String = _
@Parameter(names = Array("--files"), description = "Names of the files to unregister, must already exist in the appropriate partition folder", required = true, variableArity = true)
var files: java.util.List[String] = new util.ArrayList[String]()
@Parameter(names = Array("--count"), description = "Number of features in the data files being unregistered", required = false)
var count: java.lang.Long = _
}
class BoundsConverter(name: String) extends BaseConverter[(Double, Double, Double, Double)](name) {
override def convert(value: String): (Double, Double, Double, Double) = {
try {
val Array(xmin, ymin, xmax, ymax) = value.split(",").map(_.trim.toDouble)
(xmin, ymin, xmax, ymax)
} catch {
case NonFatal(e) => throw new ParameterException(getErrorString(value, s"format: $e"))
}
}
}
}
|
aheyne/geomesa
|
geomesa-fs/geomesa-fs-tools/src/main/scala/org/locationtech/geomesa/fs/tools/ingest/ManageMetadataCommand.scala
|
Scala
|
apache-2.0
| 6,462
|
package scala.slick.driver
import scala.language.{existentials, implicitConversions, higherKinds}
import scala.collection.mutable.HashMap
import scala.slick.SlickException
import scala.slick.ast._
import scala.slick.ast.Util.nodeToNodeOps
import scala.slick.ast.TypeUtil._
import scala.slick.ast.ExtraUtil._
import scala.slick.compiler.{RewriteBooleans, CodeGen, Phase, CompilerState, QueryCompiler}
import scala.slick.util._
import scala.slick.util.MacroSupport.macroSupportInterpolation
import scala.slick.lifted._
import scala.slick.profile.RelationalProfile
import scala.slick.relational.{ResultConverter, CompiledMapping}
import scala.slick.jdbc.JdbcResultConverterDomain
import scala.slick.util.SQLBuilder.Result
trait JdbcStatementBuilderComponent { driver: JdbcDriver =>
// Create the different builders -- these methods should be overridden by drivers as needed
def createQueryBuilder(n: Node, state: CompilerState): QueryBuilder = new QueryBuilder(n, state)
def createInsertBuilder(node: Insert): InsertBuilder = new InsertBuilder(node)
def createUpsertBuilder(node: Insert): InsertBuilder = new UpsertBuilder(node)
def createCheckInsertBuilder(node: Insert): InsertBuilder = new CheckInsertBuilder(node)
def createUpdateInsertBuilder(node: Insert): InsertBuilder = new UpdateInsertBuilder(node)
def createTableDDLBuilder(table: Table[_]): TableDDLBuilder = new TableDDLBuilder(table)
def createColumnDDLBuilder(column: FieldSymbol, table: Table[_]): ColumnDDLBuilder = new ColumnDDLBuilder(column)
def createSequenceDDLBuilder(seq: Sequence[_]): SequenceDDLBuilder = new SequenceDDLBuilder(seq)
class JdbcCompiledInsert(source: Node) {
class Artifacts(val compiled: Node, val converter: ResultConverter[JdbcResultConverterDomain, Any], val ibr: InsertBuilderResult) {
def table: TableNode = ibr.table
def sql: String = ibr.sql
def fields: IndexedSeq[FieldSymbol] = ibr.fields
}
protected[this] def compile(compiler: QueryCompiler): Artifacts = {
val compiled = compiler.run(source).tree
val ResultSetMapping(_, CompiledStatement(sql, ibr: InsertBuilderResult, _), CompiledMapping(conv, _)) = compiled
new Artifacts(compiled, conv.asInstanceOf[ResultConverter[JdbcResultConverterDomain, Any]], ibr)
}
/** The compiled artifacts for standard insert statements. */
lazy val standardInsert = compile(insertCompiler)
/** The compiled artifacts for forced insert statements. */
lazy val forceInsert = compile(forceInsertCompiler)
/** The compiled artifacts for upsert statements. */
lazy val upsert = compile(upsertCompiler)
/** The compiled artifacts for 'check insert' statements. */
lazy val checkInsert = compile(checkInsertCompiler)
/** The compiled artifacts for 'update insert' statements. */
lazy val updateInsert = compile(updateInsertCompiler)
/** Build a list of columns and a matching `ResultConverter` for retrieving keys of inserted rows. */
def buildReturnColumns(node: Node): (IndexedSeq[String], ResultConverter[JdbcResultConverterDomain, _], Boolean) = {
if(!capabilities.contains(JdbcProfile.capabilities.returnInsertKey))
throw new SlickException("This DBMS does not allow returning columns from INSERT statements")
val ResultSetMapping(_, CompiledStatement(_, ibr: InsertBuilderResult, _), CompiledMapping(rconv, _)) =
forceInsertCompiler.run(node).tree
if(ibr.table.baseIdentity != standardInsert.table.baseIdentity)
throw new SlickException("Returned key columns must be from same table as inserted columns ("+
ibr.table.baseIdentity+" != "+standardInsert.table.baseIdentity+")")
val returnOther = ibr.fields.size > 1 || !ibr.fields.head.options.contains(ColumnOption.AutoInc)
if(!capabilities.contains(JdbcProfile.capabilities.returnInsertOther) && returnOther)
throw new SlickException("This DBMS allows only a single AutoInc column to be returned from an INSERT")
(ibr.fields.map(_.name), rconv.asInstanceOf[ResultConverter[JdbcResultConverterDomain, _]], returnOther)
}
}
abstract class StatementPart
case object SelectPart extends StatementPart
case object FromPart extends StatementPart
case object WherePart extends StatementPart
case object OtherPart extends StatementPart
/** Create a SQL representation of a literal value. */
def valueToSQLLiteral(v: Any, tpe: Type): String = {
val JdbcType(ti, option) = tpe
if(option) v.asInstanceOf[Option[Any]].fold("null")(ti.valueToSQLLiteral)
else ti.valueToSQLLiteral(v)
}
// Immutable config options (to be overridden by subclasses)
/** The table name for scalar selects (e.g. "select 42 from DUAL;"), or `None` for
* scalar selects without a FROM clause ("select 42;"). */
val scalarFrom: Option[String] = None
/** Builder for SELECT and UPDATE statements. */
class QueryBuilder(val tree: Node, val state: CompilerState) { queryBuilder =>
// Immutable config options (to be overridden by subclasses)
protected val supportsTuples = true
protected val supportsCast = true
protected val supportsEmptyJoinConditions = true
protected val concatOperator: Option[String] = None
protected val hasPiFunction = true
protected val hasRadDegConversion = true
protected val pi = "3.1415926535897932384626433832795"
// Mutable state accessible to subclasses
protected val b = new SQLBuilder
protected var currentPart: StatementPart = OtherPart
protected val symbolName = new QuotingSymbolNamer(Some(state.symbolNamer))
protected val joins = new HashMap[Symbol, Join]
def sqlBuilder = b
final def buildSelect(): SQLBuilder.Result = {
buildComprehension(toComprehension(tree, true))
b.build
}
protected final def newSym = new AnonSymbol
@inline protected final def building(p: StatementPart)(f: => Unit): Unit = {
val oldPart = currentPart
currentPart = p
f
currentPart = oldPart
}
protected def toComprehension(n: Node, liftExpression: Boolean = false): Comprehension = n match {
case c : Comprehension => c
case p: Pure =>
Comprehension(select = Some(p))
case t: TableNode =>
Comprehension(from = Seq(newSym -> t))
case u: Union =>
Comprehension(from = Seq(newSym -> u))
case n =>
if(liftExpression) toComprehension(Pure(n))
else throw new SlickException("Unexpected node "+n+" -- SQL prefix: "+b.build.sql)
}
protected def buildComprehension(c: Comprehension): Unit = {
val limit0 = c.fetch match {
case Some(LiteralNode(0L)) => true
case _ => false
}
scanJoins(c.from)
buildSelectClause(c)
buildFromClause(c.from)
if(limit0) b" where 1=0"
else buildWhereClause(c.where)
buildGroupByClause(c.groupBy)
buildOrderByClause(c.orderBy)
if(!limit0) buildFetchOffsetClause(c.fetch, c.offset)
}
protected def buildSelectClause(c: Comprehension) = building(SelectPart) {
b"select "
buildSelectModifiers(c)
c.select match {
case Some(Pure(StructNode(ch), _)) =>
b.sep(ch, ", ") { case (sym, n) =>
buildSelectPart(n)
b" as `$sym"
}
if(ch.isEmpty) b"1"
case Some(Pure(ProductNode(ch), _)) =>
b.sep(ch, ", ")(buildSelectPart)
if(ch.isEmpty) b"1"
case Some(Pure(n, _)) => buildSelectPart(n)
case None =>
if(c.from.length <= 1) b"*"
else b"`${c.from.last._1}.*"
}
}
protected def buildSelectModifiers(c: Comprehension) {}
protected def scanJoins(from: Seq[(Symbol, Node)]) {
for((sym, j: Join) <- from) {
joins += sym -> j
scanJoins(j.nodeGenerators)
}
}
protected def buildFromClause(from: Seq[(Symbol, Node)]) = building(FromPart) {
if(from.isEmpty) scalarFrom.foreach { s => b" from $s" }
else {
b" from "
b.sep(from, ", ") { case (sym, n) => buildFrom(n, Some(sym)) }
}
}
protected def buildWhereClause(where: Seq[Node]) = building(WherePart) {
if(!where.isEmpty) {
b" where "
expr(where.reduceLeft((a, b) => Library.And.typed[Boolean](a, b)), true)
}
}
protected def buildGroupByClause(groupBy: Option[Node]) = building(OtherPart) {
groupBy.foreach { e => b" group by !$e" }
}
protected def buildOrderByClause(order: Seq[(Node, Ordering)]) = building(OtherPart) {
if(!order.isEmpty) {
b" order by "
b.sep(order, ", "){ case (n, o) => buildOrdering(n, o) }
}
}
protected def buildFetchOffsetClause(fetch: Option[Node], offset: Option[Node]) = building(OtherPart) {
(fetch, offset) match {
/* SQL:2008 syntax */
case (Some(t), Some(d)) => b" offset $d row fetch next $t row only"
case (Some(t), None) => b" fetch next $t row only"
case (None, Some(d)) => b" offset $d row"
case _ =>
}
}
protected def buildSelectPart(n: Node): Unit = n match {
case c: Comprehension =>
b"("
buildComprehension(c)
b")"
case n =>
expr(n, true)
}
protected def buildFrom(n: Node, alias: Option[Symbol], skipParens: Boolean = false): Unit = building(FromPart) {
def addAlias = alias foreach { s => b += ' ' += symbolName(s) }
n match {
case t: TableNode =>
b += quoteTableName(t)
addAlias
case j @ Join(leftGen, rightGen, left, right, jt, on) =>
buildFrom(left, Some(leftGen))
b" ${jt.sqlName} join "
buildFrom(right, Some(rightGen))
on match {
case LiteralNode(true) =>
if(!supportsEmptyJoinConditions) b" on 1=1"
case _ => b" on !$on"
}
case Union(left, right, all, _, _) =>
b"\\("
buildFrom(left, None, true)
if(all) b" union all " else b" union "
buildFrom(right, None, true)
b"\\)"
addAlias
case n =>
b"\\("
buildComprehension(toComprehension(n, true))
b"\\)"
addAlias
}
}
def expr(n: Node, skipParens: Boolean = false): Unit = n match {
case (n @ LiteralNode(v)) :@ JdbcType(ti, option) =>
if(n.volatileHint || !ti.hasLiteralForm) b +?= { (p, idx, param) =>
if(option) ti.setOption(v.asInstanceOf[Option[Any]], p, idx)
else ti.setValue(v, p, idx)
} else b += valueToSQLLiteral(v, n.nodeType)
case QueryParameter(extractor, JdbcType(ti, option)) =>
b +?= { (p, idx, param) =>
if(option) ti.setOption(extractor(param).asInstanceOf[Option[Any]], p, idx)
else ti.setValue(extractor(param), p, idx)
}
case Library.Not(Library.==(l, LiteralNode(null))) =>
b"\\($l is not null\\)"
case Library.==(l, LiteralNode(null)) =>
b"\\($l is null\\)"
case Library.==(left: ProductNode, right: ProductNode) =>
b"\\("
if(supportsTuples) b"$left = $right"
else {
val cols = left.nodeChildren zip right.nodeChildren
b.sep(cols, " and "){ case (l,r) => expr(l); b += "="; expr(r) }
}
b"\\)"
case ProductNode(ch) =>
b"\\("
b.sep(ch, ", ")(expr(_))
b"\\)"
case RewriteBooleans.ToFakeBoolean(ch) =>
expr(IfThenElse(Vector(ch, LiteralNode(1), LiteralNode(0))), skipParens)
case RewriteBooleans.ToRealBoolean(ch) =>
expr(Library.==.typed[Boolean](ch, LiteralNode(true)), skipParens)
case Library.Exists(c: Comprehension) if(!supportsTuples) =>
/* If tuples are not supported, selecting multiple individial columns
* in exists(select ...) is probably not supported, either, so we rewrite
* such sub-queries to "select *". */
b"exists(!${c.copy(select = None)})"
case Library.Concat(l, r) if concatOperator.isDefined =>
b"\\($l${concatOperator.get}$r\\)"
case Library.User() if !capabilities.contains(RelationalProfile.capabilities.functionUser) =>
b += "''"
case Library.Database() if !capabilities.contains(RelationalProfile.capabilities.functionDatabase) =>
b += "''"
case Library.Pi() if !hasPiFunction => b += pi
case Library.Degrees(ch) if !hasRadDegConversion => b"(180.0/!${Library.Pi.typed(columnTypes.bigDecimalJdbcType)}*$ch)"
case Library.Radians(ch) if!hasRadDegConversion => b"(!${Library.Pi.typed(columnTypes.bigDecimalJdbcType)}/180.0*$ch)"
case s: SimpleFunction =>
if(s.scalar) b"{fn "
b"${s.name}("
b.sep(s.nodeChildren, ",")(expr(_, true))
b")"
if(s.scalar) b += '}'
case SimpleLiteral(w) => b += w
case s: SimpleExpression => s.toSQL(this)
case Library.Between(left, start, end) => b"$left between $start and $end"
case Library.CountDistinct(e) => b"count(distinct $e)"
case Library.Like(l, r) => b"\\($l like $r\\)"
case Library.Like(l, r, LiteralNode(esc: Char)) =>
if(esc == '\\'' || esc == '%' || esc == '_') throw new SlickException("Illegal escape character '"+esc+"' for LIKE expression")
// JDBC defines an {escape } syntax but the unescaped version is understood by more DBs/drivers
b"\\($l like $r escape '$esc'\\)"
case Library.StartsWith(n, LiteralNode(s: String)) =>
b"\\($n like ${valueToSQLLiteral(likeEncode(s)+'%', ScalaBaseType.stringType)} escape '^'\\)"
case Library.EndsWith(n, LiteralNode(s: String)) =>
b"\\($n like ${valueToSQLLiteral("%"+likeEncode(s), ScalaBaseType.stringType)} escape '^'\\)"
case Library.Trim(n) =>
expr(Library.LTrim.typed[String](Library.RTrim.typed[String](n)), skipParens)
case Library.Substring(n, start, end) =>
b"\\({fn substring($n, ${QueryParameter.constOp[Int]("+")(_ + _)(start, LiteralNode(1))}, ${QueryParameter.constOp[Int]("-")(_ - _)(end, start)})}\\)"
case Library.Substring(n, start) =>
b"\\({fn substring($n, ${QueryParameter.constOp[Int]("+")(_ + _)(start, LiteralNode(1))})}\\)"
case Library.IndexOf(n, str) => b"\\({fn locate($str, $n)} - 1\\)"
case Library.Cast(ch @ _*) =>
val tn =
if(ch.length == 2) ch(1).asInstanceOf[LiteralNode].value.asInstanceOf[String]
else jdbcTypeFor(n.nodeType).sqlTypeName
if(supportsCast) b"cast(${ch(0)} as $tn)"
else b"{fn convert(!${ch(0)},$tn)}"
case Library.SilentCast(ch) => b"$ch"
case s: SimpleBinaryOperator => b"\\(${s.left} ${s.name} ${s.right}\\)"
case Apply(sym: Library.SqlOperator, ch) =>
b"\\("
if(ch.length == 1) {
b"${sym.name} ${ch.head}"
} else b.sep(ch, " " + sym.name + " ")(expr(_))
b"\\)"
case Apply(sym: Library.JdbcFunction, ch) =>
b"{fn ${sym.name}("
b.sep(ch, ",")(expr(_, true))
b")}"
case Apply(sym: Library.SqlFunction, ch) =>
b"${sym.name}("
b.sep(ch, ",")(expr(_, true))
b")"
case c: IfThenElse =>
b"(case"
c.ifThenClauses.foreach { case (l, r) => b" when $l then $r" }
c.elseClause match {
case LiteralNode(null) =>
case n => b" else $n"
}
b" end)"
case RowNumber(by) =>
b"row_number() over("
if(by.isEmpty) b"order by (select 1)"
else buildOrderByClause(by)
b")"
case Path(field :: (rest @ (_ :: _))) =>
val struct = rest.reduceRight[Symbol] {
case (ElementSymbol(idx), z) => joins(z).nodeGenerators(idx-1)._1
}
b += symbolName(struct) += '.' += symbolName(field)
case OptionApply(ch) => expr(ch, skipParens)
case n => // try to build a sub-query
b"\\("
buildComprehension(toComprehension(n))
b"\\)"
}
protected def buildOrdering(n: Node, o: Ordering) {
expr(n)
if(o.direction.desc) b" desc"
if(o.nulls.first) b" nulls first"
else if(o.nulls.last) b" nulls last"
}
def buildUpdate: SQLBuilder.Result = {
val (gen, from, where, select) = tree match {
case Comprehension(Seq((sym, from: TableNode)), where, None, _, Some(Pure(select, _)), None, None) => select match {
case f @ Select(Ref(struct), _) if struct == sym => (sym, from, where, Seq(f.field))
case ProductNode(ch) if ch.forall{ case Select(Ref(struct), _) if struct == sym => true; case _ => false} =>
(sym, from, where, ch.map{ case Select(Ref(_), field) => field })
case _ => throw new SlickException("A query for an UPDATE statement must select table columns only -- Unsupported shape: "+select)
}
case o => throw new SlickException("A query for an UPDATE statement must resolve to a comprehension with a single table -- Unsupported shape: "+o)
}
val qtn = quoteTableName(from)
symbolName(gen) = qtn // Alias table to itself because UPDATE does not support aliases
b"update $qtn set "
b.sep(select, ", ")(field => b += symbolName(field) += " = ?")
if(!where.isEmpty) {
b" where "
expr(where.reduceLeft((a, b) => Library.And.typed[Boolean](a, b)), true)
}
b.build
}
def buildDelete: SQLBuilder.Result = {
def fail(msg: String) =
throw new SlickException("Invalid query for DELETE statement: " + msg)
val (gen, from, where) = tree match {
case Comprehension(from, where, _, _, Some(Pure(select, _)), fetch, offset) =>
if(fetch.isDefined || offset.isDefined) fail(".take and .drop are not supported")
from match {
case Seq((sym, from: TableNode)) => (sym, from, where)
case from => fail("A single source table is required, found: "+from)
}
case o => fail("Unsupported shape: "+o+" -- A single SQL comprehension is required")
}
val qtn = quoteTableName(from)
symbolName(gen) = qtn // Alias table to itself because DELETE does not support aliases
b"delete from $qtn"
if(!where.isEmpty) {
b" where "
expr(where.reduceLeft((a, b) => Library.And.typed[Boolean](a, b)), true)
}
b.build
}
}
/** QueryBuilder mix-in for pagination based on RowNumber. */
trait RowNumberPagination extends QueryBuilder {
final case class StarAnd(child: Node) extends UnaryNode with SimplyTypedNode {
type Self = StarAnd
protected[this] def nodeRebuild(child: Node) = StarAnd(child)
protected def buildType = UnassignedType
}
override def expr(c: Node, skipParens: Boolean = false): Unit = c match {
case StarAnd(ch) => b"*, !$ch"
case _ => super.expr(c, skipParens)
}
override protected def buildComprehension(c: Comprehension) {
if(c.fetch.isDefined || c.offset.isDefined) {
val r = newSym
val rn = symbolName(r)
val tn = symbolName(newSym)
val c2 = makeSelectPageable(c, r)
val c3 = Phase.fixRowNumberOrdering.fixRowNumberOrdering(c2, None).asInstanceOf[Comprehension]
b"select "
buildSelectModifiers(c)
c3.select match {
case Some(Pure(StructNode(ch), _)) =>
b.sep(ch.filter { case (_, RowNumber(_)) => false; case _ => true }, ", ") {
case (sym, StarAnd(RowNumber(_))) => b"*"
case (sym, _) => b += symbolName(sym)
}
case o => throw new SlickException("Unexpected node "+o+" in SELECT slot of "+c)
}
b" from ("
super.buildComprehension(c3)
b") $tn where $rn"
(c.fetch, c.offset) match {
case (Some(t), Some(d)) => b" between ${QueryParameter.constOp[Long]("+")(_ + _)(d, LiteralNode(1L))} and ${QueryParameter.constOp[Long]("+")(_ + _)(t, d)}"
case (Some(t), None ) => b" between 1 and $t"
case (None, Some(d)) => b" > $d"
case _ => throw new SlickException("Unexpected empty fetch/offset")
}
b" order by $rn"
}
else super.buildComprehension(c)
}
/** Create aliases for all selected rows (unless it is a "select *" query),
* add a RowNumber column, and remove FETCH and OFFSET clauses. The SELECT
* clause of the resulting Comprehension always has the shape
* Some(Pure(StructNode(_))). */
protected def makeSelectPageable(c: Comprehension, rn: AnonSymbol): Comprehension = c.select match {
case Some(Pure(StructNode(ch), _)) =>
c.copy(select = Some(Pure(StructNode(ch :+ (rn -> RowNumber())))), fetch = None, offset = None)
case Some(Pure(ProductNode(ch), _)) =>
c.copy(select = Some(Pure(StructNode(ch.toIndexedSeq.map(n => newSym -> n) :+ (rn -> RowNumber())))), fetch = None, offset = None)
case Some(Pure(n, _)) =>
c.copy(select = Some(Pure(StructNode(IndexedSeq(newSym -> n, rn -> RowNumber())))), fetch = None, offset = None)
case None =>
// should not happen at the outermost layer, so copying an extra row does not matter
c.copy(select = Some(Pure(StructNode(IndexedSeq(rn -> StarAnd(RowNumber()))))), fetch = None, offset = None)
}
}
/** QueryBuilder mix-in for Oracle-style ROWNUM (applied before ORDER BY
* and GROUP BY) instead of the standard SQL ROWNUMBER(). */
trait OracleStyleRowNum extends QueryBuilder {
override protected def toComprehension(n: Node, liftExpression: Boolean = false) =
super.toComprehension(n, liftExpression) match {
case c @ Comprehension(from, _, None, orderBy, Some(sel), _, _) if !orderBy.isEmpty && hasRowNumber(sel) =>
// Pull the SELECT clause with the ROWNUM up into a new query
val paths = findPaths(from.map(_._1).toSet, sel).map(p => (p, new AnonSymbol)).toMap
val inner = c.copy(select = Some(Pure(StructNode(paths.toIndexedSeq.map { case (n,s) => (s,n) }))))
val gen = new AnonSymbol
val newSel = sel.replace {
case s: Select => paths.get(s).fold(s) { sym => Select(Ref(gen), sym) }
}
Comprehension(Seq((gen, inner)), select = Some(newSel))
case c => c
}
override def expr(n: Node, skipParens: Boolean = false) = n match {
case RowNumber(_) => b"rownum"
case _ => super.expr(n, skipParens)
}
}
/** Builder for INSERT statements. */
class InsertBuilder(val ins: Insert) {
protected val Insert(_, table: TableNode, ProductNode(rawColumns)) = ins
protected val syms: IndexedSeq[FieldSymbol] = rawColumns.map { case Select(_, fs: FieldSymbol) => fs }(collection.breakOut)
protected lazy val allNames = syms.map(fs => quoteIdentifier(fs.name))
protected lazy val allVars = syms.map(_ => "?").mkString("(", ",", ")")
protected lazy val tableName = quoteTableName(table)
def buildInsert: InsertBuilderResult = {
val start = buildInsertStart
new InsertBuilderResult(table, s"$start values $allVars", syms) {
override def buildInsert(compiledQuery: Node) = {
val (_, sbr: SQLBuilder.Result) = CodeGen.findResult(compiledQuery)
SQLBuilder.Result(start + sbr.sql, sbr.setter)
}
}
}
def transformMapping(n: Node) = n
protected def buildInsertStart: String = allNames.mkString(s"insert into $tableName (", ",", ") ")
/** Reorder InsertColumn indices in a mapping Node in the order of the given
* sequence of FieldSymbols (which may contain duplicates). */
protected def reorderColumns(n: Node, order: IndexedSeq[FieldSymbol]): Node = {
val newIndices = order.zipWithIndex.groupBy(_._1)
lazy val reordering: IndexedSeq[IndexedSeq[Int]] = syms.map(fs => newIndices(fs).map(_._2 + 1))
n.replace { case InsertColumn(IndexedSeq(Select(ref, ElementSymbol(idx))), fs, tpe) =>
val newPaths = reordering(idx-1).map(i => Select(ref, ElementSymbol(i)))
InsertColumn(newPaths, fs, tpe).nodeWithComputedType()
}
}
}
/** Builder for upsert statements, builds standard SQL MERGE statements by default. */
class UpsertBuilder(ins: Insert) extends InsertBuilder(ins) {
protected lazy val (pkSyms, softSyms) = syms.partition(_.options.contains(ColumnOption.PrimaryKey))
protected lazy val pkNames = pkSyms.map { fs => quoteIdentifier(fs.name) }
protected lazy val softNames = softSyms.map { fs => quoteIdentifier(fs.name) }
protected lazy val nonAutoIncSyms = syms.filterNot(_.options contains ColumnOption.AutoInc)
protected lazy val nonAutoIncNames = nonAutoIncSyms.map(fs => quoteIdentifier(fs.name))
override def buildInsert: InsertBuilderResult = {
val start = buildMergeStart
val end = buildMergeEnd
val paramSel = "select " + allNames.map(n => "? as "+n).mkString(",") + scalarFrom.map(n => " from "+n).getOrElse("")
// We'd need a way to alias the column names at the top level in order to support merges from a source Query
new InsertBuilderResult(table, start + paramSel + end, syms)
}
protected def buildMergeStart: String = s"merge into $tableName t using ("
protected def buildMergeEnd: String = {
val updateCols = softNames.map(n => s"t.$n=s.$n").mkString(", ")
val insertCols = nonAutoIncNames /*.map(n => s"t.$n")*/ .mkString(", ")
val insertVals = nonAutoIncNames.map(n => s"s.$n").mkString(", ")
val cond = pkNames.map(n => s"t.$n=s.$n").mkString(" and ")
s") s on ($cond) when matched then update set $updateCols when not matched then insert ($insertCols) values ($insertVals)"
}
}
/** Builder for SELECT statements that can be used to check for the existing of
* primary keys supplied to an INSERT operation. Used by the insertOrUpdate emulation
* on databases that don't support this in a single server-side statement. */
class CheckInsertBuilder(ins: Insert) extends UpsertBuilder(ins) {
override def buildInsert: InsertBuilderResult =
new InsertBuilderResult(table, pkNames.map(n => s"$n=?").mkString(s"select 1 from $tableName where ", " and ", ""), pkSyms)
}
/** Builder for UPDATE statements used as part of an insertOrUpdate operation
* on databases that don't support this in a single server-side statement. */
class UpdateInsertBuilder(ins: Insert) extends UpsertBuilder(ins) {
override def buildInsert: InsertBuilderResult =
new InsertBuilderResult(table,
"update " + tableName + " set " + softNames.map(n => s"$n=?").mkString(",") + " where " + pkNames.map(n => s"$n=?").mkString(" and "),
softSyms ++ pkSyms)
override def transformMapping(n: Node) = reorderColumns(n, softSyms ++ pkSyms)
}
/** Builder for various DDL statements. */
class TableDDLBuilder(val table: Table[_]) { self =>
protected val tableNode = table.toNode.asInstanceOf[TableExpansion].table.asInstanceOf[TableNode]
protected val columns: Iterable[ColumnDDLBuilder] = table.create_*.map(fs => createColumnDDLBuilder(fs, table))
protected val indexes: Iterable[Index] = table.indexes
protected val foreignKeys: Iterable[ForeignKey] = table.foreignKeys
protected val primaryKeys: Iterable[PrimaryKey] = table.primaryKeys
def buildDDL: DDL = {
if(primaryKeys.size > 1)
throw new SlickException("Table "+tableNode.tableName+" defines multiple primary keys ("
+ primaryKeys.map(_.name).mkString(", ") + ")")
DDL(createPhase1, createPhase2, dropPhase1, dropPhase2)
}
protected def createPhase1 = Iterable(createTable) ++ primaryKeys.map(createPrimaryKey) ++ indexes.map(createIndex)
protected def createPhase2 = foreignKeys.map(createForeignKey)
protected def dropPhase1 = foreignKeys.map(dropForeignKey)
protected def dropPhase2 = primaryKeys.map(dropPrimaryKey) ++ Iterable(dropTable)
protected def createTable: String = {
val b = new StringBuilder append "create table " append quoteTableName(tableNode) append " ("
var first = true
for(c <- columns) {
if(first) first = false else b append ","
c.appendColumn(b)
}
addTableOptions(b)
b append ")"
b.toString
}
protected def addTableOptions(b: StringBuilder) {}
protected def dropTable: String = "drop table "+quoteTableName(tableNode)
protected def createIndex(idx: Index): String = {
val b = new StringBuilder append "create "
if(idx.unique) b append "unique "
b append "index " append quoteIdentifier(idx.name) append " on " append quoteTableName(tableNode) append " ("
addIndexColumnList(idx.on, b, idx.table.tableName)
b append ")"
b.toString
}
protected def createForeignKey(fk: ForeignKey): String = {
val sb = new StringBuilder append "alter table " append quoteTableName(tableNode) append " add "
addForeignKey(fk, sb)
sb.toString
}
protected def addForeignKey(fk: ForeignKey, sb: StringBuilder) {
sb append "constraint " append quoteIdentifier(fk.name) append " foreign key("
addForeignKeyColumnList(fk.linearizedSourceColumns, sb, tableNode.tableName)
sb append ") references " append quoteTableName(fk.targetTable) append "("
addForeignKeyColumnList(fk.linearizedTargetColumnsForOriginalTargetTable, sb, fk.targetTable.tableName)
sb append ") on update " append fk.onUpdate.action
sb append " on delete " append fk.onDelete.action
}
protected def createPrimaryKey(pk: PrimaryKey): String = {
val sb = new StringBuilder append "alter table " append quoteTableName(tableNode) append " add "
addPrimaryKey(pk, sb)
sb.toString
}
protected def addPrimaryKey(pk: PrimaryKey, sb: StringBuilder) {
sb append "constraint " append quoteIdentifier(pk.name) append " primary key("
addPrimaryKeyColumnList(pk.columns, sb, tableNode.tableName)
sb append ")"
}
protected def dropForeignKey(fk: ForeignKey): String =
"alter table " + quoteTableName(tableNode) + " drop constraint " + quoteIdentifier(fk.name)
protected def dropPrimaryKey(pk: PrimaryKey): String =
"alter table " + quoteTableName(tableNode) + " drop constraint " + quoteIdentifier(pk.name)
protected def addIndexColumnList(columns: IndexedSeq[Node], sb: StringBuilder, requiredTableName: String) =
addColumnList(columns, sb, requiredTableName, "index")
protected def addForeignKeyColumnList(columns: IndexedSeq[Node], sb: StringBuilder, requiredTableName: String) =
addColumnList(columns, sb, requiredTableName, "foreign key constraint")
protected def addPrimaryKeyColumnList(columns: IndexedSeq[Node], sb: StringBuilder, requiredTableName: String) =
addColumnList(columns, sb, requiredTableName, "foreign key constraint")
protected def addColumnList(columns: IndexedSeq[Node], sb: StringBuilder, requiredTableName: String, typeInfo: String) {
var first = true
for(c <- columns) c match {
case Select(t: TableNode, field: FieldSymbol) =>
if(first) first = false
else sb append ","
sb append quoteIdentifier(field.name)
if(requiredTableName != t.tableName)
throw new SlickException("All columns in "+typeInfo+" must belong to table "+requiredTableName)
case _ => throw new SlickException("Cannot use column "+c+" in "+typeInfo+" (only named columns are allowed)")
}
}
}
/** Builder for column specifications in DDL statements. */
class ColumnDDLBuilder(column: FieldSymbol) {
protected val JdbcType(jdbcType, isOption) = column.tpe
protected var sqlType: String = null
protected var varying: Boolean = false
protected var size: Option[Int] = None
protected var customSqlType: Boolean = false
protected var notNull = !isOption
protected var autoIncrement = false
protected var primaryKey = false
protected var defaultLiteral: String = null
init()
protected def init() {
if(
column.options.collect{
case _:ColumnOption.DBType =>
case _:ColumnOption.Length[_] =>
}.size > 1
){
throw new SlickException("Please specify either ColumnOption DBType or Length, not both for column ${column.name}.")
}
for(o <- column.options) handleColumnOption(o)
if(sqlType eq null) sqlType = jdbcType.sqlTypeName
else customSqlType = true
}
protected def handleColumnOption(o: ColumnOption[_]): Unit = o match {
case ColumnOption.DBType(s) => sqlType = s
case ColumnOption.Length(s,v) =>
size = Some(s)
varying = v
case ColumnOption.NotNull => notNull = true
case ColumnOption.Nullable => notNull = false
case ColumnOption.AutoInc => autoIncrement = true
case ColumnOption.PrimaryKey => primaryKey = true
case ColumnOption.Default(v) => defaultLiteral = valueToSQLLiteral(v, column.tpe)
}
def appendType(sb: StringBuilder): Unit = {
if(size == None){
sb append sqlType
}
size.foreach{ s =>
// TODO: this probably needs to be generalized and unified with defaultSqlTypeName
if(varying)
sb append "VARCHAR"
else
sb append "CHAR"
sb append "("+s+")"
}
}
def appendColumn(sb: StringBuilder) {
sb append quoteIdentifier(column.name) append ' '
appendType(sb)
appendOptions(sb)
}
protected def appendOptions(sb: StringBuilder) {
if(defaultLiteral ne null) sb append " DEFAULT " append defaultLiteral
if(autoIncrement) sb append " GENERATED BY DEFAULT AS IDENTITY(START WITH 1)"
if(notNull) sb append " NOT NULL"
if(primaryKey) sb append " PRIMARY KEY"
}
}
/** Builder for DDL statements for sequences. */
class SequenceDDLBuilder(seq: Sequence[_]) {
def buildDDL: DDL = {
val b = new StringBuilder append "create sequence " append quoteIdentifier(seq.name)
seq._increment.foreach { b append " increment " append _ }
seq._minValue.foreach { b append " minvalue " append _ }
seq._maxValue.foreach { b append " maxvalue " append _ }
seq._start.foreach { b append " start " append _ }
if(seq._cycle) b append " cycle"
DDL(b.toString, "drop sequence " + quoteIdentifier(seq.name))
}
}
}
class InsertBuilderResult(val table: TableNode, val sql: String, val fields: IndexedSeq[FieldSymbol]) {
def buildInsert(compiledQuery: Node): SQLBuilder.Result =
throw new SlickException("Building Query-based inserts from this InsertBuilderResult is not supported")
}
|
nuodb/slick
|
src/main/scala/scala/slick/driver/JdbcStatementBuilderComponent.scala
|
Scala
|
bsd-2-clause
| 34,689
|
/*
* Copyright 2013-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package laika.parse.helper
import laika.parse.core.{CharSequenceReader, ParseResult, Parser, Parsers}
trait DefaultParserHelpers[T] { self: Parsers =>
def defaultParser: Parser[T]
object Parsing {
def apply (source: String): ParseResult[T] = defaultParser(new CharSequenceReader(source))
}
}
|
amuramatsu/Laika
|
core/src/test/scala/laika/parse/helper/DefaultParserHelpers.scala
|
Scala
|
apache-2.0
| 936
|
package keystoneml.workflow
import org.apache.spark.rdd.RDD
/**
* A [[TransformerOperator]] that gathers multiple datasets of {@tparam T} into a dataset of Seq[T]
* (Or individual datums of T into a single Seq[T])
*/
private[workflow] case class GatherTransformerOperator[T]() extends TransformerOperator {
override private[workflow] def singleTransform(inputs: Seq[DatumExpression]): Any = {
inputs.map(_.get.asInstanceOf[T])
}
override private[workflow] def batchTransform(inputs: Seq[DatasetExpression]): RDD[_] = {
inputs.map(_.get.asInstanceOf[RDD[T]].map(t => Seq(t))).reduceLeft((x, y) => {
x.zip(y).map(z => z._1 ++ z._2)
})
}
}
|
amplab/keystone
|
src/main/scala/keystoneml/workflow/GatherTransformerOperator.scala
|
Scala
|
apache-2.0
| 668
|
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt.internal.util
import collection.mutable
trait RMap[K[_], V[_]] {
def apply[T](k: K[T]): V[T]
def get[T](k: K[T]): Option[V[T]]
def contains[T](k: K[T]): Boolean
def toSeq: Seq[(K[_], V[_])]
def toTypedSeq: Seq[TPair[_]] = toSeq.map {
case (k: K[t], v) => TPair[t](k, v.asInstanceOf[V[t]])
}
def keys: Iterable[K[_]]
def values: Iterable[V[_]]
def isEmpty: Boolean
sealed case class TPair[T](key: K[T], value: V[T])
}
trait IMap[K[_], V[_]] extends (K ~> V) with RMap[K, V] {
def put[T](k: K[T], v: V[T]): IMap[K, V]
def remove[T](k: K[T]): IMap[K, V]
def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): IMap[K, V]
def mapValues[V2[_]](f: V ~> V2): IMap[K, V2]
def mapSeparate[VL[_], VR[_]](f: V ~> ({ type l[T] = Either[VL[T], VR[T]] })#l)
: (IMap[K, VL], IMap[K, VR])
}
trait PMap[K[_], V[_]] extends (K ~> V) with RMap[K, V] {
def update[T](k: K[T], v: V[T]): Unit
def remove[T](k: K[T]): Option[V[T]]
def getOrUpdate[T](k: K[T], make: => V[T]): V[T]
def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): V[T]
}
object PMap {
implicit def toFunction[K[_], V[_]](map: PMap[K, V]): K[_] => V[_] = k => map(k)
def empty[K[_], V[_]]: PMap[K, V] = new DelegatingPMap[K, V](new mutable.HashMap)
}
object IMap {
/**
* Only suitable for K that is invariant in its type parameter.
* Option and List keys are not suitable, for example,
* because None <:< Option[String] and None <: Option[Int].
*/
def empty[K[_], V[_]]: IMap[K, V] = new IMap0[K, V](Map.empty)
private[this] class IMap0[K[_], V[_]](backing: Map[K[_], V[_]])
extends AbstractRMap[K, V]
with IMap[K, V] {
def get[T](k: K[T]): Option[V[T]] = (backing get k).asInstanceOf[Option[V[T]]]
def put[T](k: K[T], v: V[T]) = new IMap0[K, V](backing.updated(k, v))
def remove[T](k: K[T]) = new IMap0[K, V](backing - k)
def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]) =
put(k, f(this get k getOrElse init))
def mapValues[V2[_]](f: V ~> V2) =
new IMap0[K, V2](backing.mapValues(x => f(x)))
def mapSeparate[VL[_], VR[_]](f: V ~> ({ type l[T] = Either[VL[T], VR[T]] })#l) = {
val mapped = backing.iterator.map {
case (k, v) =>
f(v) match {
case Left(l) => Left((k, l))
case Right(r) => Right((k, r))
}
}
val (l, r) = Util.separateE[(K[_], VL[_]), (K[_], VR[_])](mapped.toList)
(new IMap0[K, VL](l.toMap), new IMap0[K, VR](r.toMap))
}
def toSeq = backing.toSeq
def keys = backing.keys
def values = backing.values
def isEmpty = backing.isEmpty
override def toString = backing.toString
}
}
abstract class AbstractRMap[K[_], V[_]] extends RMap[K, V] {
def apply[T](k: K[T]): V[T] = get(k).get
def contains[T](k: K[T]): Boolean = get(k).isDefined
}
/**
* Only suitable for K that is invariant in its type parameter.
* Option and List keys are not suitable, for example,
* because None <:< Option[String] and None <: Option[Int].
*/
class DelegatingPMap[K[_], V[_]](backing: mutable.Map[K[_], V[_]])
extends AbstractRMap[K, V]
with PMap[K, V] {
def get[T](k: K[T]): Option[V[T]] = cast[T](backing.get(k))
def update[T](k: K[T], v: V[T]): Unit = { backing(k) = v }
def remove[T](k: K[T]) = cast(backing.remove(k))
def getOrUpdate[T](k: K[T], make: => V[T]) = cast[T](backing.getOrElseUpdate(k, make))
def mapValue[T](k: K[T], init: V[T], f: V[T] => V[T]): V[T] = {
val v = f(this get k getOrElse init)
update(k, v)
v
}
def toSeq = backing.toSeq
def keys = backing.keys
def values = backing.values
def isEmpty = backing.isEmpty
private[this] def cast[T](v: V[_]): V[T] = v.asInstanceOf[V[T]]
private[this] def cast[T](o: Option[V[_]]): Option[V[T]] = o map cast[T]
override def toString = backing.toString
}
|
Duhemm/sbt
|
internal/util-collection/src/main/scala/sbt/internal/util/PMap.scala
|
Scala
|
bsd-3-clause
| 3,941
|
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.bigquery
import com.spotify.scio.bigquery.client.BigQuery
import org.scalatest.matchers.should.Matchers
import org.scalatest.flatspec.AnyFlatSpec
// scio-test/it:runMain com.spotify.scio.PopulateTestData to re-populate data for integration tests
class BigQueryPartitionUtilIT extends AnyFlatSpec with Matchers {
val bq: BigQuery = BigQuery.defaultInstance()
"latestQuery" should "work with legacy syntax" in {
val input =
"""
|SELECT *
|FROM [data-integration-test:samples_eu.shakespeare]
|JOIN [data-integration-test:partition_a.table_$LATEST]
|JOIN [data-integration-test:partition_b.table_$LATEST]
|WHERE x = 0
""".stripMargin
val expected = input.replace("$LATEST", "20170102")
BigQueryPartitionUtil.latestQuery(bq, input) shouldBe expected
}
it should "work with SQL syntax" in {
val input =
"""
|SELECT *
|FROM `data-integration-test.samples_eu.shakespeare`
|JOIN `data-integration-test.partition_a.table_$LATEST`
|JOIN `data-integration-test.partition_b.table_$LATEST`
|WHERE x = 0
""".stripMargin
val expected = input.replace("$LATEST", "20170102")
BigQueryPartitionUtil.latestQuery(bq, input) shouldBe expected
}
it should "work with legacy syntax without $LATEST" in {
val input = "SELECT * FROM [data-integration-test:samples_eu.shakespeare]"
BigQueryPartitionUtil.latestQuery(bq, input) shouldBe input
}
it should "work with SQL syntax without $LATEST" in {
val input = "SELECT * FROM `data-integration-test.samples_eu.shakespeare`"
BigQueryPartitionUtil.latestQuery(bq, input) shouldBe input
}
it should "fail legacy syntax without latest common partition" in {
val input =
"""
|SELECT *
|FROM [data-integration-test:samples_eu.shakespeare]
|JOIN [data-integration-test:partition_a.table_$LATEST]
|JOIN [data-integration-test:partition_b.table_$LATEST]
|JOIN [data-integration-test:partition_c.table_$LATEST]
|WHERE x = 0
""".stripMargin
val msg = "requirement failed: Cannot find latest common partition for " +
"[data-integration-test:partition_a.table_$LATEST], " +
"[data-integration-test:partition_b.table_$LATEST], " +
"[data-integration-test:partition_c.table_$LATEST]"
the[IllegalArgumentException] thrownBy {
BigQueryPartitionUtil.latestQuery(bq, input)
} should have message msg
}
it should "fail SQL syntax without latest common partition" in {
val input =
"""
|SELECT *
|FROM `data-integration-test.samples_eu.shakespeare`
|JOIN `data-integration-test.partition_a.table_$LATEST`
|JOIN `data-integration-test.partition_b.table_$LATEST`
|JOIN `data-integration-test.partition_c.table_$LATEST`
|WHERE x = 0
""".stripMargin
val msg = "requirement failed: Cannot find latest common partition for " +
"`data-integration-test.partition_a.table_$LATEST`, " +
"`data-integration-test.partition_b.table_$LATEST`, " +
"`data-integration-test.partition_c.table_$LATEST`"
the[IllegalArgumentException] thrownBy {
BigQueryPartitionUtil.latestQuery(bq, input)
} should have message msg
}
"latestTable" should "work" in {
val input = "data-integration-test:partition_a.table_$LATEST"
val expected = input.replace("$LATEST", "20170103")
BigQueryPartitionUtil.latestTable(bq, input) shouldBe expected
}
it should "work without $LATEST" in {
val input = "data-integration-test:samples_eu.shakespeare"
BigQueryPartitionUtil.latestTable(bq, input) shouldBe input
}
it should "fail table specification without latest partition" in {
val input = "data-integration-test:samples_eu.shakespeare_$LATEST"
val msg = s"requirement failed: Cannot find latest partition for $input"
the[IllegalArgumentException] thrownBy {
BigQueryPartitionUtil.latestTable(bq, input)
} should have message msg
}
}
|
spotify/scio
|
scio-google-cloud-platform/src/it/scala/com/spotify/scio/bigquery/BigQueryPartitionUtilIT.scala
|
Scala
|
apache-2.0
| 4,655
|
/*
* Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com>
*/
package play.api.libs.functional
import scala.language.higherKinds
trait Alternative[M[_]] {
def app: Applicative[M]
def |[A, B >: A](alt1: M[A], alt2: M[B]): M[B]
def empty: M[Nothing]
//def some[A](m: M[A]): M[List[A]]
//def many[A](m: M[A]): M[List[A]]
}
class AlternativeOps[M[_], A](alt1: M[A])(implicit a: Alternative[M]) {
def |[B >: A](alt2: M[B]): M[B] = a.|(alt1, alt2)
def or[B >: A](alt2: M[B]): M[B] = |(alt2)
}
|
jyotikamboj/container
|
pf-framework/src/play-functional/src/main/scala/play/api/libs/functional/Alternative.scala
|
Scala
|
mit
| 521
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.