code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.streaming.ui import java.util.TimeZone import java.util.concurrent.TimeUnit import org.scalatest.matchers.must.Matchers import org.scalatest.matchers.should.Matchers._ import org.apache.spark.SparkFunSuite import org.apache.spark.ui.{UIUtils => SparkUIUtils} class UIUtilsSuite extends SparkFunSuite with Matchers { test("shortTimeUnitString") { assert("ns" === UIUtils.shortTimeUnitString(TimeUnit.NANOSECONDS)) assert("us" === UIUtils.shortTimeUnitString(TimeUnit.MICROSECONDS)) assert("ms" === UIUtils.shortTimeUnitString(TimeUnit.MILLISECONDS)) assert("sec" === UIUtils.shortTimeUnitString(TimeUnit.SECONDS)) assert("min" === UIUtils.shortTimeUnitString(TimeUnit.MINUTES)) assert("hrs" === UIUtils.shortTimeUnitString(TimeUnit.HOURS)) assert("days" === UIUtils.shortTimeUnitString(TimeUnit.DAYS)) } test("normalizeDuration") { verifyNormalizedTime(900, TimeUnit.MILLISECONDS, 900) verifyNormalizedTime(1.0, TimeUnit.SECONDS, 1000) verifyNormalizedTime(1.0, TimeUnit.MINUTES, 60 * 1000) verifyNormalizedTime(1.0, TimeUnit.HOURS, 60 * 60 * 1000) verifyNormalizedTime(1.0, TimeUnit.DAYS, 24 * 60 * 60 * 1000) } private def verifyNormalizedTime( expectedTime: Double, expectedUnit: TimeUnit, input: Long): Unit = { val (time, unit) = UIUtils.normalizeDuration(input) time should be (expectedTime +- 1E-6) unit should be (expectedUnit) } test("convertToTimeUnit") { verifyConvertToTimeUnit(60.0 * 1000 * 1000 * 1000, 60 * 1000, TimeUnit.NANOSECONDS) verifyConvertToTimeUnit(60.0 * 1000 * 1000, 60 * 1000, TimeUnit.MICROSECONDS) verifyConvertToTimeUnit(60 * 1000, 60 * 1000, TimeUnit.MILLISECONDS) verifyConvertToTimeUnit(60, 60 * 1000, TimeUnit.SECONDS) verifyConvertToTimeUnit(1, 60 * 1000, TimeUnit.MINUTES) verifyConvertToTimeUnit(1.0 / 60, 60 * 1000, TimeUnit.HOURS) verifyConvertToTimeUnit(1.0 / 60 / 24, 60 * 1000, TimeUnit.DAYS) } private def verifyConvertToTimeUnit( expectedTime: Double, milliseconds: Long, unit: TimeUnit): Unit = { val convertedTime = UIUtils.convertToTimeUnit(milliseconds, unit) convertedTime should be (expectedTime +- 1E-6) } test("formatBatchTime") { val tzForTest = TimeZone.getTimeZone("America/Los_Angeles") val batchTime = 1431637480452L // Thu May 14 14:04:40 PDT 2015 assert("2015/05/14 14:04:40" === SparkUIUtils.formatBatchTime(batchTime, 1000, timezone = tzForTest)) assert("2015/05/14 14:04:40.452" === SparkUIUtils.formatBatchTime(batchTime, 999, timezone = tzForTest)) assert("14:04:40" === SparkUIUtils.formatBatchTime(batchTime, 1000, false, timezone = tzForTest)) assert("14:04:40.452" === SparkUIUtils.formatBatchTime(batchTime, 999, false, timezone = tzForTest)) } }
hvanhovell/spark
streaming/src/test/scala/org/apache/spark/streaming/ui/UIUtilsSuite.scala
Scala
apache-2.0
3,633
/*********************************************************************** * Copyright (c) 2015-2022 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.geomesa.nifi.processors.kafka import org.apache.nifi.processor.ProcessContext import org.geomesa.nifi.datastore.processor.mixins.AbstractDataStoreProcessor import org.geomesa.nifi.datastore.processor.utils.PropertyDescriptorUtils import org.locationtech.geomesa.kafka.data.KafkaDataStoreParams.{ProducerConfig, TopicPartitions, TopicReplication} import org.locationtech.geomesa.kafka.data.{KafkaDataStoreFactory, KafkaDataStoreParams} abstract class KafkaProcessor extends AbstractDataStoreProcessor(KafkaProcessor.KafkaProperties) { // set consumer count to zero to disable consuming override protected def getDataStoreParams(context: ProcessContext): Map[String, _] = super.getDataStoreParams(context) ++ Map(KafkaDataStoreParams.ConsumerCount.getName -> Int.box(0)) } object KafkaProcessor extends PropertyDescriptorUtils { // note: KafkaDataStoreFactory.ParameterInfo is consumer-oriented, but we want producer properties here private val KafkaProperties = createPropertyDescriptors(KafkaDataStoreFactory) ++ Seq(ProducerConfig, TopicPartitions, TopicReplication).map(createPropertyDescriptor) }
geomesa/geomesa-nifi
geomesa-kafka-bundle/geomesa-kafka-processors/src/main/scala/org/geomesa/nifi/processors/kafka/KafkaProcessor.scala
Scala
apache-2.0
1,604
/* * Copyright 2001-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest.fixture import org.scalatest._ import scala.collection.immutable.ListSet import org.scalatest.Suite.autoTagClassAnnotations /** * Implementation trait for class <code>fixture.PropSpec</code>, which is * a sister class to <a href="../PropSpec.html"><code>org.scalatest.PropSpec</code></a> that can pass a * fixture object into its tests. * * <p> * <a href="PropSpec.html"><code>fixture.PropSpec</code></a> is a class, * not a trait, to minimize compile time given there is a slight compiler * overhead to mixing in traits compared to extending classes. If you need * to mix the behavior of <code>fixture.PropSpec</code> into some other * class, you can use this trait instead, because class * <code>fixture.PropSpec</code> does nothing more than extend this trait and add a nice <code>toString</code> implementation. * </p> * * <p> * See the documentation of the class for a <a href="PropSpec.html">detailed * overview of <code>fixture.PropSpec</code></a>. * </p> * * @author Bill Venners */ @Finders(Array("org.scalatest.finders.PropSpecFinder")) trait PropSpecRegistration extends Suite with TestRegistration with Informing with Notifying with Alerting with Documenting { thisSuite => private final val engine = new FixtureEngine[FixtureParam](Resources.concurrentFixturePropSpecMod, "FixturePropSpec") protected[scalatest] def getEngine: FixtureEngine[FixtureParam] = engine import engine._ private[scalatest] val sourceFileName = "PropSpecRegistration.scala" /** * Returns an <code>Informer</code> that during test execution will forward strings passed to its * <code>apply</code> method to the current reporter. If invoked in a constructor, it * will register the passed string for forwarding later during test execution. If invoked from inside a scope, * it will forward the information to the current reporter immediately. If invoked from inside a test function, * it will record the information and forward it to the current reporter only after the test completed, as <code>recordedEvents</code> * of the test completed event, such as <code>TestSucceeded</code>. If invoked at any other time, it will print to the standard output. * This method can be called safely by any thread. */ protected def info: Informer = atomicInformer.get /** * Returns a <code>Notifier</code> that during test execution will forward strings (and other objects) passed to its * <code>apply</code> method to the current reporter. If invoked in a constructor, it * will register the passed string for forwarding later during test execution. If invoked while this * <code>fixture.PropSpec</code> is being executed, such as from inside a test function, it will forward the information to * the current reporter immediately. If invoked at any other time, it will * print to the standard output. This method can be called safely by any thread. */ protected def note: Notifier = atomicNotifier.get /** * Returns an <code>Alerter</code> that during test execution will forward strings (and other objects) passed to its * <code>apply</code> method to the current reporter. If invoked in a constructor, it * will register the passed string for forwarding later during test execution. If invoked while this * <code>fixture.PropSpec</code> is being executed, such as from inside a test function, it will forward the information to * the current reporter immediately. If invoked at any other time, it will * print to the standard output. This method can be called safely by any thread. */ protected def alert: Alerter = atomicAlerter.get /** * Returns a <code>Documenter</code> that during test execution will forward strings passed to its * <code>apply</code> method to the current reporter. If invoked in a constructor, it * will register the passed string for forwarding later during test execution. If invoked from inside a scope, * it will forward the information to the current reporter immediately. If invoked from inside a test function, * it will record the information and forward it to the current reporter only after the test completed, as <code>recordedEvents</code> * of the test completed event, such as <code>TestSucceeded</code>. If invoked at any other time, it will print to the standard output. * This method can be called safely by any thread. */ protected def markup: Documenter = atomicDocumenter.get final def registerTest(testText: String, testTags: Tag*)(testFun: FixtureParam => Registration) { engine.registerTest(testText, transformToOutcome(testFun), Resources.testCannotBeNestedInsideAnotherTest, sourceFileName, "registerTest", 4, -1, None, None, None, testTags: _*) } final def registerIgnoredTest(testText: String, testTags: Tag*)(testFun: FixtureParam => Registration) { engine.registerIgnoredTest(testText, transformToOutcome(testFun), Resources.testCannotBeNestedInsideAnotherTest, sourceFileName, "registerIgnoredTest", 4, -3, None, testTags: _*) } /** * Register a property-based test with the specified name, optional tags, and function value that takes no arguments. * This method will register the test for later execution via an invocation of one of the <code>run</code> * methods. The passed test name must not have been registered previously on * this <code>PropSpec</code> instance. * * @param testName the name of the test * @param testTags the optional list of tags for this test * @param testFun the test function * @throws TestRegistrationClosedException if invoked after <code>run</code> has been invoked on this suite * @throws DuplicateTestNameException if a test with the same name has been registered previously * @throws NotAllowedException if <code>testName</code> had been registered previously * @throws NullPointerException if <code>testName</code> or any passed test tag is <code>null</code> */ protected def property(testName: String, testTags: Tag*)(testFun: FixtureParam => Registration) { engine.registerTest(testName, transformToOutcome(testFun), Resources.propertyCannotAppearInsideAnotherProperty, sourceFileName, "property", 4, -2, None, None, None, testTags: _*) } /** * Register a property-based test to ignore, which has the specified name, optional tags, and function value that takes no arguments. * This method will register the test for later ignoring via an invocation of one of the <code>run</code> * methods. This method exists to make it easy to ignore an existing test by changing the call to <code>test</code> * to <code>ignore</code> without deleting or commenting out the actual test code. The test will not be run, but a * report will be sent that indicates the test was ignored. The passed test name must not have been registered previously on * this <code>PropSpec</code> instance. * * @param testName the name of the test * @param testTags the optional list of tags for this test * @param testFun the test function * @throws TestRegistrationClosedException if invoked after <code>run</code> has been invoked on this suite * @throws DuplicateTestNameException if a test with the same name has been registered previously * @throws NotAllowedException if <code>testName</code> had been registered previously */ protected def ignore(testName: String, testTags: Tag*)(testFun: FixtureParam => Registration) { engine.registerIgnoredTest(testName, transformToOutcome(testFun), Resources.ignoreCannotAppearInsideAProperty, sourceFileName, "ignore", 4, -3, None, testTags: _*) } /** * An immutable <code>Set</code> of test names. If this <code>fixture.PropSpec</code> contains no tests, this method returns an empty <code>Set</code>. * * <p> * This trait's implementation of this method will return a set that contains the names of all registered tests. The set's iterator will * return those names in the order in which the tests were registered. * </p> * * @return the <code>Set</code> of test names */ override def testNames: Set[String] = { // I'm returning a ListSet here so that they tests will be run in registration order ListSet(atomic.get.testNamesList.toArray: _*) } /** * Run a test. This trait's implementation runs the test registered with the name specified by <code>testName</code>. * * @param testName the name of one test to run. * @param args the <code>Args</code> for this run * @return a <code>Status</code> object that indicates when the test started by this method has completed, and whether or not it failed . * @throws IllegalArgumentException if <code>testName</code> is defined but a test with that name does not exist on this <code>fixture.PropSpec</code> * @throws NullPointerException if any of <code>testName</code> or <code>args</code> is <code>null</code>. */ protected override def runTest(testName: String, args: Args): Status = { def invokeWithFixture(theTest: TestLeaf): AsyncOutcome = { PastOutcome( theTest.testFun match { case transformer: org.scalatest.fixture.Transformer[_] => transformer.exceptionalTestFun match { case wrapper: NoArgTestWrapper[_, _] => withFixture(new FixturelessTestFunAndConfigMap(testName, wrapper.test, args.configMap)) case fun => withFixture(new TestFunAndConfigMap(testName, fun, args.configMap)) } case other => other match { case wrapper: NoArgTestWrapper[_, _] => withFixture(new FixturelessTestFunAndConfigMap(testName, wrapper.test, args.configMap)) case fun => withFixture(new TestFunAndConfigMap(testName, fun, args.configMap)) } } ) } runTestImpl(thisSuite, testName, args, true, invokeWithFixture) } /** * A <code>Map</code> whose keys are <code>String</code> tag names to which tests in this <code>fixture.PropSpec</code> belong, and values * the <code>Set</code> of test names that belong to each tag. If this <code>fixture.PropSpec</code> contains no tags, this method returns an empty * <code>Map</code>. * * <p> * This trait's implementation returns tags that were passed as strings contained in <code>Tag</code> objects passed to * methods <code>test</code> and <code>ignore</code>. * </p> * * <p> * In addition, this trait's implementation will also auto-tag tests with class level annotations. * For example, if you annotate @Ignore at the class level, all test methods in the class will be auto-annotated with @Ignore. * </p> */ override def tags: Map[String, Set[String]] = autoTagClassAnnotations(atomic.get.tagsMap, this) /** * <p> * Run zero to many of this <code>fixture.PropSpecRegistration</code>'s tests. * </p> * * <p> * This method takes a <code>testName</code> parameter that optionally specifies a test to invoke. * If <code>testName</code> is <code>Some</code>, this trait's implementation of this method * invokes <code>runTest</code> on this object with passed <code>args</code>. * </p> * * <p> * This method takes an <code>args</code> that contains a <code>Set</code> of tag names that should be included (<code>tagsToInclude</code>), and a <code>Set</code> * that should be excluded (<code>tagsToExclude</code>), when deciding which of this <code>Suite</code>'s tests to execute. * If <code>tagsToInclude</code> is empty, all tests will be executed * except those those belonging to tags listed in the <code>tagsToExclude</code> <code>Set</code>. If <code>tagsToInclude</code> is non-empty, only tests * belonging to tags mentioned in <code>tagsToInclude</code>, and not mentioned in <code>tagsToExclude</code> * will be executed. However, if <code>testName</code> is <code>Some</code>, <code>tagsToInclude</code> and <code>tagsToExclude</code> are essentially ignored. * Only if <code>testName</code> is <code>None</code> will <code>tagsToInclude</code> and <code>tagsToExclude</code> be consulted to * determine which of the tests named in the <code>testNames</code> <code>Set</code> should be run. For more information on trait tags, see the main documentation for this trait. * </p> * * <p> * If <code>testName</code> is <code>None</code>, this trait's implementation of this method * invokes <code>testNames</code> on this <code>Suite</code> to get a <code>Set</code> of names of tests to potentially execute. * (A <code>testNames</code> value of <code>None</code> essentially acts as a wildcard that means all tests in * this <code>Suite</code> that are selected by <code>tagsToInclude</code> and <code>tagsToExclude</code> should be executed.) * For each test in the <code>testName</code> <code>Set</code>, in the order * they appear in the iterator obtained by invoking the <code>elements</code> method on the <code>Set</code>, this trait's implementation * of this method checks whether the test should be run based on the <code>tagsToInclude</code> and <code>tagsToExclude</code> <code>Set</code>s. * If so, this implementation invokes <code>runTest</code> with passed <code>args</code>. * </p> * * @param testName an optional name of one test to execute. If <code>None</code>, all relevant tests should be executed. * I.e., <code>None</code> acts like a wildcard that means execute all relevant tests in this <code>FunSpec</code>. * @param args the <code>Args</code> to which results will be reported * @return a <code>Status</code> object that indicates when all tests started by this method have completed, and whether or not a failure occurred. * @throws NullPointerException if any of <code>testName</code> or <code>args</code> is <code>null</code>. */ protected override def runTests(testName: Option[String], args: Args): Status = { runTestsImpl(thisSuite, testName, args, info, true, runTest) } override def run(testName: Option[String], args: Args): Status = { runImpl(thisSuite, testName, args, super.run) } /** * Registers shared tests. * * <p> * This method enables the following syntax for shared tests in a <code>fixture.PropSpec</code>: * </p> * * <pre class="stHighlight"> * propertiesFor(nonEmptyStack(lastValuePushed)) * </pre> * * <p> * This method just provides syntax sugar intended to make the intent of the code clearer. * Because the parameter passed to it is * type <code>Unit</code>, the expression will be evaluated before being passed, which * is sufficient to register the shared tests. For examples of shared tests, see the * <a href="../PropSpec.html#SharedTests">Shared tests section</a> in the main documentation for * trait <code>PropSpec</code>. * </p> * * @param unit a <code>Unit</code> */ protected def propertiesFor(unit: Unit) {} import scala.language.implicitConversions /** * Implicitly converts a function that takes no parameters and results in <code>PendingNothing</code> to * a function from <code>FixtureParam</code> to <code>Any</code>, to enable pending tests to registered as by-name parameters * by methods that require a test function that takes a <code>FixtureParam</code>. * * <p> * This method makes it possible to write pending tests as simply <code>(pending)</code>, without needing * to write <code>(fixture => pending)</code>. * </p> * * @param f a function * @return a function of <code>FixtureParam => Any</code> */ protected implicit def convertPendingToFixtureFunction(f: => PendingNothing): (FixtureParam => Any) = { fixture => f } /** * Implicitly converts a function that takes no parameters and results in <code>Any</code> to * a function from <code>FixtureParam</code> to <code>Any</code>, to enable no-arg tests to registered * by methods that require a test function that takes a <code>FixtureParam</code>. * * @param fun a function * @return a function of <code>FixtureParam => Any</code> */ protected implicit def convertNoArgToFixtureFunction(fun: () => Any): (FixtureParam => Any) = new NoArgTestWrapper(fun) /** * Suite style name. * * @return <code>org.scalatest.fixture.PropSpec</code> */ final override val styleName: String = "org.scalatest.fixture.PropSpec" override def testDataFor(testName: String, theConfigMap: ConfigMap = ConfigMap.empty): TestData = createTestDataFor(testName, theConfigMap, this) }
cheeseng/scalatest
scalatest/src/main/scala/org/scalatest/fixture/PropSpecRegistration.scala
Scala
apache-2.0
17,192
package net.cucumbersome.rpgRoller.warhammer.infrastructure.repositories import akka.Done import net.cucumbersome.rpgRoller.warhammer.infrastructure.repositories.ActorRepository.FilterExpression import net.cucumbersome.rpgRoller.warhammer.infrastructure.repositories.ActorRepository.FilterExpression._ import net.cucumbersome.rpgRoller.warhammer.player.CombatActor import scala.concurrent.{ExecutionContext, Future} trait ActorRepository { def all(implicit ec: ExecutionContext): Future[List[CombatActor]] def find(id: CombatActor.Id)(implicit ec: ExecutionContext): Future[Option[CombatActor]] def add(combatActor: CombatActor)(implicit ec: ExecutionContext): Future[Done] def filter(expression: FilterExpression)(implicit ec: ExecutionContext): Future[List[CombatActor]] } class InMemoryActorRepository(initialActors: List[CombatActor]) extends ActorRepository { var actors: List[CombatActor] = initialActors override def all(implicit ec: ExecutionContext): Future[List[CombatActor]] = { Future.successful(actors) } override def find(id: CombatActor.Id)(implicit ec: ExecutionContext): Future[Option[CombatActor]] = { Future.successful(actors.find(_.id == id)) } override def add(combatActor: CombatActor)(implicit ec: ExecutionContext): Future[Done] = { actors = actors ++ List(combatActor) Future.successful(Done) } def clear(): Unit = { actors = List() } override def filter(expression: FilterExpression)(implicit ec: ExecutionContext): Future[List[CombatActor]] = expression match { case ByName(value) => Future.successful(actors.filter(_.name.data.contains(value))) case ByHealth(value) => Future.successful(actors.filter(_.hp.data == value)) case ByIds(ids) => Future.successful(actors.filter(a => ids.contains(a.id.data))) } } object ActorRepository { sealed trait FilterExpression object FilterExpression { case class ByName(value: String) extends FilterExpression case class ByHealth(value: Int) extends FilterExpression case class ByIds(ids: Seq[String]) extends FilterExpression } }
CucumisSativus/rpgRollerBackend
src/main/scala/net/cucumbersome/rpgRoller/warhammer/infrastructure/repositories/ActorRepository.scala
Scala
mit
2,091
package mesosphere.marathon package core.storage.backup import akka.Done import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.stream.ActorMaterializer import ch.qos.logback.classic.{Level, Logger} import com.typesafe.scalalogging.StrictLogging import mesosphere.marathon.core.base.{JvmExitsCrashStrategy, LifecycleState} import mesosphere.marathon.core.storage.store.impl.zk.RichCuratorFramework import mesosphere.marathon.storage.{StorageConfig, StorageModule} import org.rogach.scallop.ScallopConf import org.slf4j.LoggerFactory import scala.concurrent.duration.Duration import scala.concurrent.{Await, Future} import scala.util.control.NonFatal /** * Base class for backup and restore command line utility. */ abstract class BackupRestoreAction extends StrictLogging { class BackupConfig(args: Seq[String]) extends ScallopConf(args) with MarathonConf { override def availableFeatures: Set[String] = Set.empty verify() require(backupLocation.isDefined, "--backup_location needs to be defined!") } /** * Can either run a backup or restore operation. */ def action(conf: BackupConfig, fn: PersistentStoreBackup => Future[Done]): Unit = { implicit val system = ActorSystem("Backup") implicit val materializer = ActorMaterializer() implicit val scheduler = system.scheduler import scala.concurrent.ExecutionContext.Implicits.global val metricsModule = MetricsModule(conf) metricsModule.start(system) try { val curatorFramework: RichCuratorFramework = StorageConfig.curatorFramework(conf, JvmExitsCrashStrategy, LifecycleState.WatchingJVM) val storageModule = StorageModule(metricsModule.metrics, conf, curatorFramework) storageModule.persistenceStore.markOpen() val backup = storageModule.persistentStoreBackup Await.result(fn(backup), Duration.Inf) storageModule.persistenceStore.markClosed() logger.info("Action complete.") } catch { case NonFatal(ex) => logger.error(s"Error: ${ex.getMessage}", ex) sys.exit(1) // signal a problem to the caller } finally { Await.result(Http().shutdownAllConnectionPools(), Duration.Inf) // akka http has an issue tearing down the connection pool: https://github.com/akka/akka-http/issues/907 // We will hide the fail message from the user until this is fixed LoggerFactory.getLogger("akka.actor.ActorSystemImpl").asInstanceOf[Logger].setLevel(Level.OFF) materializer.shutdown() Await.ready(system.terminate(), Duration.Inf) sys.exit(0) } } } /** * Command line utility to backup the current Marathon state to an external storage location. * * Please note: if you start Marathon with a backup location, it will automatically create a backup, * for every new Marathon version, before it runs a migration. * This is the preferred way to handle upgrades. * * Snapshot backups can be created at all time. * * There are several command line parameters to define the exact behaviour and location. * Please use --help to see all command line parameters */ object Backup extends BackupRestoreAction { def main(args: Array[String]): Unit = { val config = new BackupConfig(args.toVector) action(config, _.backup(config.backupLocation())) } } /** * Command line utility to restore a Marathon state from an external storage location. * * Please note: restoring a backup will overwrite all existing data in the store. * All changes that were applied between the creation of this snapshot to the current state will be lost! * * There are several command line parameters to define the exact behaviour and location. * Please use --help to see all command line parameters */ object Restore extends BackupRestoreAction { def main(args: Array[String]): Unit = { val config = new BackupConfig(args.toVector) action(config, _.restore(config.backupLocation())) } }
mesosphere/marathon
src/main/scala/mesosphere/marathon/core/storage/backup/Backup.scala
Scala
apache-2.0
3,948
package demo.impl.basket import akka.NotUsed import com.lightbend.lagom.scaladsl.api.ServiceCall import com.lightbend.lagom.scaladsl.api.transport.NotFound import com.lightbend.lagom.scaladsl.persistence.PersistentEntityRegistry import demo.api.basket.{Basket, BasketService, Item} import scala.concurrent.{ExecutionContext, Future} class BasketServiceImpl(persistentEntities: PersistentEntityRegistry)(implicit ec: ExecutionContext) extends BasketService { private var baskets = Map[String, Basket]() override def getBasket(basketId: String): ServiceCall[NotUsed, Basket] = ServiceCall { req => baskets.synchronized { Future.successful(baskets.getOrElse(basketId, Basket(Seq(), 0))) } } override def addItem(basketId: String): ServiceCall[Item, NotUsed] = ServiceCall { item => baskets.synchronized { val newItems = baskets.get(basketId).toSeq.flatMap(_.items) :+ item baskets = baskets.+(basketId -> Basket(newItems, newItems.map(_.price).sum)) } Future.successful(NotUsed) } }
tommpy/demo-lagom-checkout
exercise_010_basket_service/basket-impl/src/main/scala/demo/impl/basket/BasketServiceImpl.scala
Scala
apache-2.0
1,036
package org.jetbrains.plugins.scala package lang.refactoring.ui import com.intellij.psi.{PsiModifierList, PsiModifierListOwner, PsiElement} import com.intellij.refactoring.classMembers.MemberInfoModel import com.intellij.refactoring.ui.AbstractMemberSelectionTable import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScObject, ScMember} import com.intellij.ui.RowIcon import com.intellij.util.{IconUtil, VisibilityIcons} import javax.swing.Icon import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction import com.intellij.icons.AllIcons /** * Nikolay.Tropin * 8/20/13 */ abstract class ScalaMemberSelectionTableBase[M <: PsiElement, I <: ScalaMemberInfoBase[M]](memberInfos: java.util.Collection[I], memberInfoModel: MemberInfoModel[M, I], abstractColumnHeader: String) extends AbstractMemberSelectionTable[M, I](memberInfos, memberInfoModel, abstractColumnHeader) { def getAbstractColumnValue(memberInfo: I): AnyRef = { memberInfo.getMember match { case member: ScMember if member.containingClass.isInstanceOf[ScObject] => null case member: ScMember if member.hasAbstractModifier && myMemberInfoModel.isFixedAbstract(memberInfo) != null => myMemberInfoModel.isFixedAbstract(memberInfo) case _ if !myMemberInfoModel.isAbstractEnabled(memberInfo) => val res: java.lang.Boolean = myMemberInfoModel.isAbstractWhenDisabled(memberInfo) res case _ if memberInfo.isToAbstract => java.lang.Boolean.TRUE case _ => java.lang.Boolean.FALSE } } def isAbstractColumnEditable(rowIndex: Int): Boolean = { val info: I = myMemberInfos.get(rowIndex) info.getMember match { case member: ScMember if member.hasAbstractModifier && myMemberInfoModel.isFixedAbstract(info) == java.lang.Boolean.TRUE => false case _ => info.isChecked && myMemberInfoModel.isAbstractEnabled(info) } } def setVisibilityIcon(memberInfo: I, icon: RowIcon) { memberInfo.getMember match { case owner: PsiModifierListOwner => owner.getModifierList match { case mods: PsiModifierList => VisibilityIcons.setVisibilityIcon(mods, icon) case _ => icon.setIcon(IconUtil.getEmptyIcon(true), AbstractMemberSelectionTable.VISIBILITY_ICON_POSITION) } case _ => } } def getOverrideIcon(memberInfo: I): Icon = memberInfo.getMember match { case fun: ScFunction => if (java.lang.Boolean.TRUE == memberInfo.getOverrides) AllIcons.General.OverridingMethod else if (java.lang.Boolean.FALSE == memberInfo.getOverrides) AllIcons.General.ImplementingMethod else AbstractMemberSelectionTable.EMPTY_OVERRIDE_ICON case _ => AbstractMemberSelectionTable.EMPTY_OVERRIDE_ICON } }
consulo/consulo-scala
src/org/jetbrains/plugins/scala/lang/refactoring/ui/ScalaMemberSelectionTableBase.scala
Scala
apache-2.0
2,798
package org.jetbrains.plugins.scala package lang.psi.types import com.intellij.psi.{PsiTypeParameter, PsiClass} import com.intellij.psi.search.searches.ClassInheritorsSearch import com.intellij.psi.search.LocalSearchScope import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTypeDefinition import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScModifierListOwner import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScTypeParam import extensions.toPsiClassExt import org.jetbrains.plugins.scala.util.ScEquivalenceUtil.areClassesEquivalent /** * Nikolay.Tropin * 2014-04-03 */ object ComparingUtil { //this relation is actually symmetric def isNeverSubClass(clazz1: PsiClass, clazz2: PsiClass): Boolean = { val classes = Seq(clazz1, clazz2) val oneFinal = clazz1.isEffectivelyFinal || clazz2.isEffectivelyFinal val twoNonTraitsOrInterfaces = !classes.exists(_.isInterface) def inheritorsInSameFile(clazz: PsiClass) = ClassInheritorsSearch.search(clazz, new LocalSearchScope(clazz.getContainingFile), false).toArray(PsiClass.EMPTY_ARRAY).collect { case x: ScTypeDefinition => x } def sealedAndAllChildrenAreIrreconcilable = { val areSealed = classes.forall{ case modOwner: ScModifierListOwner => modOwner.hasModifierProperty("sealed") case _ => false } def childrenAreIrreconcilable = inheritorsInSameFile(clazz1).forall { c1 => inheritorsInSameFile(clazz2).forall { c2 => isNeverSubClass(c1, c2) } } areSealed && childrenAreIrreconcilable } val areUnrelatedClasses = !areClassesEquivalent(clazz1, clazz2) && !(clazz1.isInheritor(clazz2, true) || clazz2.isInheritor(clazz1, true)) areUnrelatedClasses && (oneFinal || twoNonTraitsOrInterfaces || sealedAndAllChildrenAreIrreconcilable) } def isNeverSubType(tp1: ScType, tp2: ScType, sameType: Boolean = false): Boolean = { if (tp2.weakConforms(tp1) || tp1.weakConforms(tp2)) return false val Seq(clazzOpt1, clazzOpt2) = Seq(tp1, tp2).map(ScType.extractClass(_)) if (clazzOpt1.isEmpty || clazzOpt2.isEmpty) return false val (clazz1, clazz2) = (clazzOpt1.get, clazzOpt2.get) def isNeverSameType(tp1: ScType, tp2: ScType) = isNeverSubType(tp1, tp2, sameType = true) def isNeverSubArgs(tps1: Seq[ScType], tps2: Seq[ScType], tparams: Seq[PsiTypeParameter]): Boolean = { def isNeverSubArg(t1: ScType, t2: ScType, variance: Int) = { if (variance > 0) isNeverSubType(t2, t1) else if (variance < 0) isNeverSubType(t1, t2) else isNeverSameType(t1, t2) } def getVariance(tp: PsiTypeParameter) = tp match { case scParam: ScTypeParam => if (scParam.isCovariant) 1 else if (scParam.isContravariant) -1 else 0 case _ => 0 } tps1.zip(tps2).zip(tparams.map(getVariance)) exists { case ((t1, t2), vr) => isNeverSubArg(t1, t2, vr) case _ => false } } def neverSubArgs() = { (tp1, tp2) match { case (ScParameterizedType(_, args1), ScParameterizedType(_, args2)) => isNeverSubArgs(args1, args2, clazz2.getTypeParameters) case _ => false } } isNeverSubClass(clazz1, clazz2) || ((areClassesEquivalent(clazz1, clazz2) || (!sameType) && clazz1.isInheritor(clazz2, true)) && neverSubArgs()) } }
consulo/consulo-scala
src/org/jetbrains/plugins/scala/lang/psi/types/ComparingUtil.scala
Scala
apache-2.0
3,430
//: ---------------------------------------------------------------------------- //: Copyright (C) 2015 Lech Głowiak. All Rights Reserved. //: //: Licensed under the Apache License, Version 2.0 (the "License"); //: you may not use this file except in compliance with the License. //: You may obtain a copy of the License at //: //: http://www.apache.org/licenses/LICENSE-2.0 //: //: Unless required by applicable law or agreed to in writing, software //: distributed under the License is distributed on an "AS IS" BASIS, //: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //: See the License for the specific language governing permissions and //: limitations under the License. //: //: ---------------------------------------------------------------------------- package remotely.transport.aeron import java.net.SocketAddress import remotely.Monitoring class NegotiatingLogger(val m: Monitoring, val addr: Option[SocketAddress]) { def negotiating(what: String, error: Option[Throwable] = None): Unit = { m.negotiating(addr, what, error) } }
lech-glowiak/remotely-aeron
core/src/main/scala/remotely/transport/aeron/NegotiatingLogger.scala
Scala
apache-2.0
1,104
import _root_.io.gatling.core.scenario.Simulation import ch.qos.logback.classic.{Level, LoggerContext} import io.gatling.core.Predef._ import io.gatling.http.Predef._ import org.slf4j.LoggerFactory import scala.concurrent.duration._ /** * Performance test for the Livro entity. */ class LivroGatlingTest extends Simulation { val context: LoggerContext = LoggerFactory.getILoggerFactory.asInstanceOf[LoggerContext] // Log all HTTP requests //context.getLogger("io.gatling.http").setLevel(Level.valueOf("TRACE")) // Log failed HTTP requests //context.getLogger("io.gatling.http").setLevel(Level.valueOf("DEBUG")) val baseURL = Option(System.getProperty("baseURL")) getOrElse """http://127.0.0.1:8080""" val httpConf = http .baseURL(baseURL) .inferHtmlResources() .acceptHeader("*/*") .acceptEncodingHeader("gzip, deflate") .acceptLanguageHeader("fr,fr-fr;q=0.8,en-us;q=0.5,en;q=0.3") .connectionHeader("keep-alive") .userAgentHeader("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:33.0) Gecko/20100101 Firefox/33.0") val headers_http = Map( "Accept" -> """application/json""" ) val headers_http_authenticated = Map( "Accept" -> """application/json""", "X-XSRF-TOKEN" -> "${xsrf_token}" ) val scn = scenario("Test the Livro entity") .exec(http("First unauthenticated request") .get("/api/account") .headers(headers_http) .check(status.is(401)) .check(headerRegex("Set-Cookie", "XSRF-TOKEN=(.*);[\\\\s]").saveAs("xsrf_token"))).exitHereIfFailed .pause(10) .exec(http("Authentication") .post("/api/authentication") .headers(headers_http_authenticated) .formParam("j_username", "admin") .formParam("j_password", "admin") .formParam("remember-me", "true") .formParam("submit", "Login") .check(headerRegex("Set-Cookie", "XSRF-TOKEN=(.*);[\\\\s]").saveAs("xsrf_token"))).exitHereIfFailed .pause(1) .exec(http("Authenticated request") .get("/api/account") .headers(headers_http_authenticated) .check(status.is(200))) .pause(10) .repeat(2) { exec(http("Get all livros") .get("/api/livros") .headers(headers_http_authenticated) .check(status.is(200))) .pause(10 seconds, 20 seconds) .exec(http("Create new livro") .post("/api/livros") .headers(headers_http_authenticated) .body(StringBody("""{"id":null, "nome":"SAMPLE_TEXT", "autor":"SAMPLE_TEXT"}""")).asJSON .check(status.is(201)) .check(headerRegex("Location", "(.*)").saveAs("new_livro_url"))).exitHereIfFailed .pause(10) .repeat(5) { exec(http("Get created livro") .get("${new_livro_url}") .headers(headers_http_authenticated)) .pause(10) } .exec(http("Delete created livro") .delete("${new_livro_url}") .headers(headers_http_authenticated)) .pause(10) } val users = scenario("Users").exec(scn) setUp( users.inject(rampUsers(Integer.getInteger("users", 100)) over (Integer.getInteger("ramp", 1) minutes)) ).protocols(httpConf) }
rafaelmss/WebHipster
src/test/gatling/user-files/simulations/LivroGatlingTest.scala
Scala
mit
3,387
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.datasources import java.io.{File, FileNotFoundException} import java.net.URI import scala.collection.mutable import org.apache.hadoop.fs.{BlockLocation, FileStatus, LocatedFileStatus, Path, RawLocalFileSystem} import org.apache.spark.SparkException import org.apache.spark.metrics.source.HiveCatalogMetrics import org.apache.spark.sql.SparkSession import org.apache.spark.sql.catalyst.util._ import org.apache.spark.sql.functions.col import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType} import org.apache.spark.util.KnownSizeEstimation class FileIndexSuite extends SharedSparkSession { private class TestInMemoryFileIndex( spark: SparkSession, path: Path, fileStatusCache: FileStatusCache = NoopCache) extends InMemoryFileIndex(spark, Seq(path), Map.empty, None, fileStatusCache) { def leafFilePaths: Seq[Path] = leafFiles.keys.toSeq def leafDirPaths: Seq[Path] = leafDirToChildrenFiles.keys.toSeq def leafFileStatuses: Iterable[FileStatus] = leafFiles.values } test("InMemoryFileIndex: leaf files are qualified paths") { withTempDir { dir => val file = new File(dir, "text.txt") stringToFile(file, "text") val path = new Path(file.getCanonicalPath) val catalog = new TestInMemoryFileIndex(spark, path) assert(catalog.leafFilePaths.forall(p => p.toString.startsWith("file:/"))) assert(catalog.leafDirPaths.forall(p => p.toString.startsWith("file:/"))) } } test("SPARK-26188: don't infer data types of partition columns if user specifies schema") { withTempDir { dir => val partitionDirectory = new File(dir, "a=4d") partitionDirectory.mkdir() val file = new File(partitionDirectory, "text.txt") stringToFile(file, "text") val path = new Path(dir.getCanonicalPath) val schema = StructType(Seq(StructField("a", StringType, false))) val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, Some(schema)) val partitionValues = fileIndex.partitionSpec().partitions.map(_.values) assert(partitionValues.length == 1 && partitionValues(0).numFields == 1 && partitionValues(0).getString(0) == "4d") } } test("SPARK-26990: use user specified field names if possible") { withTempDir { dir => val partitionDirectory = new File(dir, "a=foo") partitionDirectory.mkdir() val file = new File(partitionDirectory, "text.txt") stringToFile(file, "text") val path = new Path(dir.getCanonicalPath) val schema = StructType(Seq(StructField("A", StringType, false))) withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") { val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, Some(schema)) assert(fileIndex.partitionSchema.length == 1 && fileIndex.partitionSchema.head.name == "A") } } } test("SPARK-26230: if case sensitive, validate partitions with original column names") { withTempDir { dir => val partitionDirectory = new File(dir, "a=1") partitionDirectory.mkdir() val file = new File(partitionDirectory, "text.txt") stringToFile(file, "text") val partitionDirectory2 = new File(dir, "A=2") partitionDirectory2.mkdir() val file2 = new File(partitionDirectory2, "text.txt") stringToFile(file2, "text") val path = new Path(dir.getCanonicalPath) withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") { val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, None) val partitionValues = fileIndex.partitionSpec().partitions.map(_.values) assert(partitionValues.length == 2) } withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { val msg = intercept[AssertionError] { val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, None) fileIndex.partitionSpec() }.getMessage assert(msg.contains("Conflicting partition column names detected")) assert("Partition column name list #[0-1]: A".r.findFirstIn(msg).isDefined) assert("Partition column name list #[0-1]: a".r.findFirstIn(msg).isDefined) } } } test("SPARK-26263: Throw exception when partition value can't be casted to user-specified type") { withTempDir { dir => val partitionDirectory = new File(dir, "a=foo") partitionDirectory.mkdir() val file = new File(partitionDirectory, "text.txt") stringToFile(file, "text") val path = new Path(dir.getCanonicalPath) val schema = StructType(Seq(StructField("a", IntegerType, false))) withSQLConf(SQLConf.VALIDATE_PARTITION_COLUMNS.key -> "true") { val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, Some(schema)) val msg = intercept[RuntimeException] { fileIndex.partitionSpec() }.getMessage assert(msg == "Failed to cast value `foo` to `IntegerType` for partition column `a`") } withSQLConf(SQLConf.VALIDATE_PARTITION_COLUMNS.key -> "false") { val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, Some(schema)) val partitionValues = fileIndex.partitionSpec().partitions.map(_.values) assert(partitionValues.length == 1 && partitionValues(0).numFields == 1 && partitionValues(0).isNullAt(0)) } } } test("InMemoryFileIndex: input paths are converted to qualified paths") { withTempDir { dir => val file = new File(dir, "text.txt") stringToFile(file, "text") val unqualifiedDirPath = new Path(dir.getCanonicalPath) val unqualifiedFilePath = new Path(file.getCanonicalPath) require(!unqualifiedDirPath.toString.contains("file:")) require(!unqualifiedFilePath.toString.contains("file:")) val fs = unqualifiedDirPath.getFileSystem(spark.sessionState.newHadoopConf()) val qualifiedFilePath = fs.makeQualified(new Path(file.getCanonicalPath)) require(qualifiedFilePath.toString.startsWith("file:")) val catalog1 = new InMemoryFileIndex( spark, Seq(unqualifiedDirPath), Map.empty, None) assert(catalog1.allFiles.map(_.getPath) === Seq(qualifiedFilePath)) val catalog2 = new InMemoryFileIndex( spark, Seq(unqualifiedFilePath), Map.empty, None) assert(catalog2.allFiles.map(_.getPath) === Seq(qualifiedFilePath)) } } test("InMemoryFileIndex: root folders that don't exist don't throw exceptions") { withTempDir { dir => val deletedFolder = new File(dir, "deleted") assert(!deletedFolder.exists()) val catalog1 = new InMemoryFileIndex( spark, Seq(new Path(deletedFolder.getCanonicalPath)), Map.empty, None) // doesn't throw an exception assert(catalog1.listLeafFiles(catalog1.rootPaths).isEmpty) } } test("SPARK-27676: InMemoryFileIndex respects ignoreMissingFiles config for non-root paths") { import DeletionRaceFileSystem._ for ( raceCondition <- Seq( classOf[SubdirectoryDeletionRaceFileSystem], classOf[FileDeletionRaceFileSystem] ); ignoreMissingFiles <- Seq(true, false); parDiscoveryThreshold <- Seq(0, 100) ) { withClue(s"raceCondition=$raceCondition, ignoreMissingFiles=$ignoreMissingFiles, " + s"parDiscoveryThreshold=$parDiscoveryThreshold" ) { withSQLConf( SQLConf.IGNORE_MISSING_FILES.key -> ignoreMissingFiles.toString, SQLConf.PARALLEL_PARTITION_DISCOVERY_THRESHOLD.key -> parDiscoveryThreshold.toString, "fs.mockFs.impl" -> raceCondition.getName, "fs.mockFs.impl.disable.cache" -> "true" ) { def makeCatalog(): InMemoryFileIndex = new InMemoryFileIndex( spark, Seq(rootDirPath), Map.empty, None) if (ignoreMissingFiles) { // We're ignoring missing files, so catalog construction should succeed val catalog = makeCatalog() val leafFiles = catalog.listLeafFiles(catalog.rootPaths) if (raceCondition == classOf[SubdirectoryDeletionRaceFileSystem]) { // The only subdirectory was missing, so there should be no leaf files: assert(leafFiles.isEmpty) } else { assert(raceCondition == classOf[FileDeletionRaceFileSystem]) // One of the two leaf files was missing, but we should still list the other: assert(leafFiles.size == 1) assert(leafFiles.head.getPath == nonDeletedLeafFilePath) } } else { // We're NOT ignoring missing files, so catalog construction should fail val e = intercept[Exception] { makeCatalog() } // The exact exception depends on whether we're using parallel listing if (parDiscoveryThreshold == 0) { // The FileNotFoundException occurs in a Spark executor (as part of a job) assert(e.isInstanceOf[SparkException]) assert(e.getMessage.contains("FileNotFoundException")) } else { // The FileNotFoundException occurs directly on the driver assert(e.isInstanceOf[FileNotFoundException]) // Test that the FileNotFoundException is triggered for the expected reason: if (raceCondition == classOf[SubdirectoryDeletionRaceFileSystem]) { assert(e.getMessage.contains(subDirPath.toString)) } else { assert(raceCondition == classOf[FileDeletionRaceFileSystem]) assert(e.getMessage.contains(leafFilePath.toString)) } } } } } } } test("PartitioningAwareFileIndex listing parallelized with many top level dirs") { for ((scale, expectedNumPar) <- Seq((10, 0), (50, 1))) { withTempDir { dir => val topLevelDirs = (1 to scale).map { i => val tmp = new File(dir, s"foo=$i.txt") tmp.mkdir() new Path(tmp.getCanonicalPath) } HiveCatalogMetrics.reset() assert(HiveCatalogMetrics.METRIC_PARALLEL_LISTING_JOB_COUNT.getCount() == 0) new InMemoryFileIndex(spark, topLevelDirs, Map.empty, None) assert(HiveCatalogMetrics.METRIC_PARALLEL_LISTING_JOB_COUNT.getCount() == expectedNumPar) } } } test("PartitioningAwareFileIndex listing parallelized with large child dirs") { for ((scale, expectedNumPar) <- Seq((10, 0), (50, 1))) { withTempDir { dir => for (i <- 1 to scale) { new File(dir, s"foo=$i.txt").mkdir() } HiveCatalogMetrics.reset() assert(HiveCatalogMetrics.METRIC_PARALLEL_LISTING_JOB_COUNT.getCount() == 0) new InMemoryFileIndex(spark, Seq(new Path(dir.getCanonicalPath)), Map.empty, None) assert(HiveCatalogMetrics.METRIC_PARALLEL_LISTING_JOB_COUNT.getCount() == expectedNumPar) } } } test("PartitioningAwareFileIndex listing parallelized with large, deeply nested child dirs") { for ((scale, expectedNumPar) <- Seq((10, 0), (50, 4))) { withTempDir { dir => for (i <- 1 to 2) { val subdirA = new File(dir, s"a=$i") subdirA.mkdir() for (j <- 1 to 2) { val subdirB = new File(subdirA, s"b=$j") subdirB.mkdir() for (k <- 1 to scale) { new File(subdirB, s"foo=$k.txt").mkdir() } } } HiveCatalogMetrics.reset() assert(HiveCatalogMetrics.METRIC_PARALLEL_LISTING_JOB_COUNT.getCount() == 0) new InMemoryFileIndex(spark, Seq(new Path(dir.getCanonicalPath)), Map.empty, None) assert(HiveCatalogMetrics.METRIC_PARALLEL_LISTING_JOB_COUNT.getCount() == expectedNumPar) } } } test("InMemoryFileIndex - file filtering") { assert(!InMemoryFileIndex.shouldFilterOut("abcd")) assert(InMemoryFileIndex.shouldFilterOut(".ab")) assert(InMemoryFileIndex.shouldFilterOut("_cd")) assert(!InMemoryFileIndex.shouldFilterOut("_metadata")) assert(!InMemoryFileIndex.shouldFilterOut("_common_metadata")) assert(InMemoryFileIndex.shouldFilterOut("_ab_metadata")) assert(InMemoryFileIndex.shouldFilterOut("_cd_common_metadata")) assert(InMemoryFileIndex.shouldFilterOut("a._COPYING_")) } test("SPARK-17613 - PartitioningAwareFileIndex: base path w/o '/' at end") { class MockCatalog( override val rootPaths: Seq[Path]) extends PartitioningAwareFileIndex(spark, Map.empty, None) { override def refresh(): Unit = {} override def leafFiles: mutable.LinkedHashMap[Path, FileStatus] = mutable.LinkedHashMap( new Path("mockFs://some-bucket/file1.json") -> new FileStatus() ) override def leafDirToChildrenFiles: Map[Path, Array[FileStatus]] = Map( new Path("mockFs://some-bucket/") -> Array(new FileStatus()) ) override def partitionSpec(): PartitionSpec = { PartitionSpec.emptySpec } } withSQLConf( "fs.mockFs.impl" -> classOf[FakeParentPathFileSystem].getName, "fs.mockFs.impl.disable.cache" -> "true") { val pathWithSlash = new Path("mockFs://some-bucket/") assert(pathWithSlash.getParent === null) val pathWithoutSlash = new Path("mockFs://some-bucket") assert(pathWithoutSlash.getParent === null) val catalog1 = new MockCatalog(Seq(pathWithSlash)) val catalog2 = new MockCatalog(Seq(pathWithoutSlash)) assert(catalog1.allFiles().nonEmpty) assert(catalog2.allFiles().nonEmpty) } } test("InMemoryFileIndex with empty rootPaths when PARALLEL_PARTITION_DISCOVERY_THRESHOLD" + "is a nonpositive number") { withSQLConf(SQLConf.PARALLEL_PARTITION_DISCOVERY_THRESHOLD.key -> "0") { new InMemoryFileIndex(spark, Seq.empty, Map.empty, None) } val e = intercept[IllegalArgumentException] { withSQLConf(SQLConf.PARALLEL_PARTITION_DISCOVERY_THRESHOLD.key -> "-1") { new InMemoryFileIndex(spark, Seq.empty, Map.empty, None) } }.getMessage assert(e.contains("The maximum number of paths allowed for listing files at " + "driver side must not be negative")) } test("refresh for InMemoryFileIndex with FileStatusCache") { withTempDir { dir => val fileStatusCache = FileStatusCache.getOrCreate(spark) val dirPath = new Path(dir.getAbsolutePath) val fs = dirPath.getFileSystem(spark.sessionState.newHadoopConf()) val catalog = new TestInMemoryFileIndex(spark, dirPath, fileStatusCache) val file = new File(dir, "text.txt") stringToFile(file, "text") assert(catalog.leafDirPaths.isEmpty) assert(catalog.leafFilePaths.isEmpty) catalog.refresh() assert(catalog.leafFilePaths.size == 1) assert(catalog.leafFilePaths.head == fs.makeQualified(new Path(file.getAbsolutePath))) assert(catalog.leafDirPaths.size == 1) assert(catalog.leafDirPaths.head == fs.makeQualified(dirPath)) } } test("SPARK-20280 - FileStatusCache with a partition with very many files") { /* fake the size, otherwise we need to allocate 2GB of data to trigger this bug */ class MyFileStatus extends FileStatus with KnownSizeEstimation { override def estimatedSize: Long = 1000 * 1000 * 1000 } /* files * MyFileStatus.estimatedSize should overflow to negative integer * so, make it between 2bn and 4bn */ val files = (1 to 3).map { i => new MyFileStatus() } val fileStatusCache = FileStatusCache.getOrCreate(spark) fileStatusCache.putLeafFiles(new Path("/tmp", "abc"), files.toArray) } test("SPARK-20367 - properly unescape column names in inferPartitioning") { withTempPath { path => val colToUnescape = "Column/#%'?" spark .range(1) .select(col("id").as(colToUnescape), col("id")) .write.partitionBy(colToUnescape).parquet(path.getAbsolutePath) assert(spark.read.parquet(path.getAbsolutePath).schema.exists(_.name == colToUnescape)) } } test("SPARK-25062 - InMemoryFileIndex stores BlockLocation objects no matter what subclass " + "the FS returns") { withSQLConf("fs.file.impl" -> classOf[SpecialBlockLocationFileSystem].getName) { withTempDir { dir => val file = new File(dir, "text.txt") stringToFile(file, "text") val inMemoryFileIndex = new TestInMemoryFileIndex(spark, new Path(file.getCanonicalPath)) val blockLocations = inMemoryFileIndex.leafFileStatuses.flatMap( _.asInstanceOf[LocatedFileStatus].getBlockLocations) assert(blockLocations.forall(_.getClass == classOf[BlockLocation])) } } } test("Add an option to ignore block locations when listing file") { withTempDir { dir => val partitionDirectory = new File(dir, "a=foo") partitionDirectory.mkdir() for (i <- 1 to 8) { val file = new File(partitionDirectory, i + ".txt") stringToFile(file, "text") } val path = new Path(dir.getCanonicalPath) val fileIndex = new InMemoryFileIndex(spark, Seq(path), Map.empty, None) withSQLConf(SQLConf.IGNORE_DATA_LOCALITY.key -> "false", "fs.file.impl" -> classOf[SpecialBlockLocationFileSystem].getName) { val withBlockLocations = fileIndex. listLeafFiles(Seq(new Path(partitionDirectory.getPath))) withSQLConf(SQLConf.IGNORE_DATA_LOCALITY.key -> "true") { val withoutBlockLocations = fileIndex. listLeafFiles(Seq(new Path(partitionDirectory.getPath))) assert(withBlockLocations.size == withoutBlockLocations.size) assert(withBlockLocations.forall(b => b.isInstanceOf[LocatedFileStatus] && b.asInstanceOf[LocatedFileStatus].getBlockLocations.nonEmpty)) assert(withoutBlockLocations.forall(b => b.isInstanceOf[FileStatus] && !b.isInstanceOf[LocatedFileStatus])) assert(withoutBlockLocations.forall(withBlockLocations.contains)) } } } } } object DeletionRaceFileSystem { val rootDirPath: Path = new Path("mockFs:///rootDir/") val subDirPath: Path = new Path(rootDirPath, "subDir") val leafFilePath: Path = new Path(subDirPath, "leafFile") val nonDeletedLeafFilePath: Path = new Path(subDirPath, "nonDeletedLeafFile") val rootListing: Array[FileStatus] = Array(new FileStatus(0, true, 0, 0, 0, subDirPath)) val subFolderListing: Array[FileStatus] = Array( new FileStatus(0, false, 0, 100, 0, leafFilePath), new FileStatus(0, false, 0, 100, 0, nonDeletedLeafFilePath)) } // Used in SPARK-27676 test to simulate a race where a subdirectory is deleted // between back-to-back listing calls. class SubdirectoryDeletionRaceFileSystem extends RawLocalFileSystem { import DeletionRaceFileSystem._ override def getScheme: String = "mockFs" override def listStatus(path: Path): Array[FileStatus] = { if (path == rootDirPath) { rootListing } else if (path == subDirPath) { throw new FileNotFoundException(subDirPath.toString) } else { throw new IllegalArgumentException() } } } // Used in SPARK-27676 test to simulate a race where a file is deleted between // being listed and having its size / file status checked. class FileDeletionRaceFileSystem extends RawLocalFileSystem { import DeletionRaceFileSystem._ override def getScheme: String = "mockFs" override def listStatus(path: Path): Array[FileStatus] = { if (path == rootDirPath) { rootListing } else if (path == subDirPath) { subFolderListing } else { throw new IllegalArgumentException() } } override def getFileBlockLocations( file: FileStatus, start: Long, len: Long): Array[BlockLocation] = { if (file.getPath == leafFilePath) { throw new FileNotFoundException(leafFilePath.toString) } else { Array.empty } } } class FakeParentPathFileSystem extends RawLocalFileSystem { override def getScheme: String = "mockFs" override def getUri: URI = { URI.create("mockFs://some-bucket") } } class SpecialBlockLocationFileSystem extends RawLocalFileSystem { class SpecialBlockLocation( names: Array[String], hosts: Array[String], offset: Long, length: Long) extends BlockLocation(names, hosts, offset, length) override def getFileBlockLocations( file: FileStatus, start: Long, len: Long): Array[BlockLocation] = { Array(new SpecialBlockLocation(Array("dummy"), Array("dummy"), 0L, file.getLen)) } }
caneGuy/spark
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileIndexSuite.scala
Scala
apache-2.0
21,500
/* * Copyright 2012-2017 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.edda import java.util.Properties import com.netflix.config.{ConcurrentCompositeConfiguration, DynamicPropertyFactory} import org.apache.commons.configuration.MapConfiguration import org.scalatest.FunSuite import scala.actors.Actor import scala.concurrent._ import scala.concurrent.duration._ import scala.concurrent.ExecutionContext.Implicits.global import org.apache.log4j.Logger import org.apache.log4j.Level class MergedCollectionTest extends FunSuite { import Utils._ import Queryable._ implicit val req = RequestId() val logger = Logger.getRootLogger() //logger.setLevel(Level.DEBUG) def SYNC[T](future: Awaitable[T]): T = { Await.result(future, Duration(5, SECONDS)) } test("query") { DynamicPropertyFactory.getInstance() val composite = DynamicPropertyFactory. getBackingConfigurationSource.asInstanceOf[ConcurrentCompositeConfiguration] val config = new MapConfiguration(new Properties); composite.addConfigurationAtFront(config, "testConfig") config.addProperty("edda.collection.jitter.enabled", "false") val collA = new TestCollection("test.A") collA.elector.leader = false val collB = new TestCollection("test.B") collB.elector.leader = false val merged = new MergedCollection("merged.collection", Seq(collA, collB)) merged.start() collA.dataStore.get.recordSet = collA.dataStore.get.recordSet.copy(records = Seq(Record("a", 1), Record("b", 2), Record("c", 3))) collB.dataStore.get.recordSet = collB.dataStore.get.recordSet.copy(records = Seq(Record("A", 1), Record("B", 2), Record("C", 3))) SYNC( collA.addObserver(Actor.self) ) SYNC( collB.addObserver(Actor.self) ) Actor.self receive { case Collection.UpdateOK(`collA`, d, meta) => Unit } Actor.self receive { case Collection.UpdateOK(`collB`, d, meta) => Unit } assertResult(2) { SYNC ( merged.query(Map("data" -> 1)) ).size } assertResult(4) { SYNC ( merged.query(Map("data" -> Map("$gte" -> 2))) ).size } assertResult(2) { SYNC ( merged.query(Map("id" -> Map("$in" -> Seq("A", "a")))) ).size } SYNC( collA.delObserver(Actor.self) ) SYNC( collB.delObserver(Actor.self) ) merged.stop() } }
ralph-tice/edda
src/test/scala/com/netflix/edda/MergedCollectionTest.scala
Scala
apache-2.0
2,875
package priv.sp package object house { import priv.sp.update._ def lowestLife(s1: SlotUpdate, s2: SlotUpdate) = if (s2.get.life < s1.get.life) s2 else s1 def highestLife(s1: SlotUpdate, s2: SlotUpdate) = if (s2.get.life > s1.get.life) s2 else s1 def strongest(s1: SlotUpdate, s2: SlotUpdate) = if (s2.get.attack > s1.get.attack) s2 else s1 def nearestEmptySlot(selected: Int, player: PlayerUpdate): Option[Int] = { val slots = player.slots.slots val dists = player.value.slotList collect { case n if slots(n).value.isEmpty ⇒ (n, math.abs(n - selected)) } if (dists.isEmpty) None else Some(dists.minBy(_._2)._1) } def nearestSlotOpposed(selected: Int, player: PlayerUpdate, opposed: Boolean = true): Option[Int] = { val slots = player.slots.slots val otherSlots = player.otherPlayer.getSlots val dists = player.value.slotList collect { case n if n != selected && slots(n).value.isEmpty && otherSlots.isDefinedAt(n) == opposed ⇒ (n, math.abs(n - selected)) } if (dists.isEmpty) None else Some(dists.minBy(_._2)._1) } @inline def nonSpecial(p: PlayerId, state: GameState): List[Int] = listSlotWhere(p, state)(_.houseIndex < 4) @inline def special(p: PlayerId, state: GameState): List[Int] = listSlotWhere(p, state)(_.houseIndex == 4) def listSlotWhere(p: PlayerId, state: GameState)(f : Card => Boolean): List[Int] = { state.players(p).slots.foldLeft(List.empty[Int]) { case (acc, (i, s)) ⇒ if (f(s.card)) i :: acc else acc } } object ManaGrowthReaction{ def apply(amount: Int, houseIndexes: Int*): ManaGrowthReaction = new ManaGrowthReaction(amount, true, houseIndexes) } object OppManaGrowthReaction{ def apply(amount: Int, houseIndexes: Int*): ManaGrowthReaction = new ManaGrowthReaction(amount, false, houseIndexes) } class ManaGrowthReaction(amount : Int, owner : Boolean, houseIndexes : Seq[Int]) extends Reaction { def p = if (owner) selected.player else selected.otherPlayer override def onAdd(slot: SlotUpdate): Unit = { if (slot.num == selected.num) { p.houses.incrGrowth(amount, houseIndexes : _ *) } } override def onMyRemove(dead: Option[Dead]): Unit = { p.houses.incrGrowth(- amount, houseIndexes : _ *) } } }
illim/freespectrogdx
core/src/main/scala/priv/sp/house/package.scala
Scala
gpl-3.0
2,309
/* * Copyright 2016 Guy Van den Broeck and Wannes Meert (UCLA and KU Leuven) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.ucla.cs.starai.forclift.cli import java.io._ import java.lang.System._ import scala.collection.JavaConverters._ import scala.io._ import org.clapper.argot._ import edu.ucla.cs.starai.forclift._ import edu.ucla.cs.starai.forclift.inference._ import edu.ucla.cs.starai.forclift.languages._ import edu.ucla.cs.starai.forclift.learning.structure.StructureLearner import edu.ucla.cs.starai.forclift.util.ExternalBinaries import edu.ucla.cs.starai.forclift.languages.mln._ import edu.ucla.cs.starai.forclift.learning.Likelihood import edu.ucla.cs.starai.forclift.languages.focnf._ object CLI extends App { assertFalse() val argumentParser = new ArgotParser("wfomc", false, 80, Some("Version 3.1"), Some( """ EXAMPLE java -jar forclift.jar -q "smokes(Guy)" ./models/friendsmoker.mln java -jar forclift.jar -q "smokes(Guy)" ./models/friendsmoker.mln """), true) val debugCLI = new DebugCLI(argumentParser) val inputCLI = new InputCLI(argumentParser,debugCLI) val inferenceCLI = new InferenceCLI(argumentParser,debugCLI,inputCLI) val learningCLI = new LearningCLI(argumentParser,debugCLI,inputCLI) val outputCLI = new OutputCLI(argumentParser,debugCLI,inputCLI) /* PARSE FLAGS AND HANDLE LOGIC */ try { argumentParser.parse(args) debugCLI.runDebugging(inputCLI) inferenceCLI.runInference() learningCLI.runLearning() outputCLI.runOutput() } catch { case e: ArgotUsageException => println(e.message) System.exit(1) } def assertFalse() = assert(false, "Assertions are enabled in CLI: check compiler flags") }
UCLA-StarAI/Forclift
src/main/scala/edu/ucla/cs/starai/forclift/cli/CLI.scala
Scala
apache-2.0
2,238
/* * Copyright © 2014 Nemanja Stanarevic <nemanja@alum.mit.edu> * * Made with ❤ in NYC at Hacker School <http://hackerschool.com> * * Licensed under the GNU Affero General Public License, Version 3 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at: * * <http://www.gnu.org/licenses/agpl-3.0.html> * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied * See the License for the specific language governing permissions and * limitations under the License. */ package gmailapi.oauth2 import gmailapi.resources.GmailResource import org.json4s.{ DefaultFormats, FieldSerializer } import spray.httpx.Json4sJacksonSupport case class OAuth2Identity( accessToken: String, refreshToken: String, expiration: Long, userId: Option[String] = None, email: Option[String] = None, scope: Seq[String] = Nil, name: Option[String] = None, givenName: Option[String] = None, familyName: Option[String] = None, picture: Option[String] = None, gender: Option[String] = None, locale: Option[String] = None) extends GmailResource
nemanja-stanarevic/gmail-api-scala-client
src/main/scala/gmailapi/oauth2/OAuth2Identity.scala
Scala
agpl-3.0
1,285
/** soar * * Copyright (c) 2017 Hugo Firth * Email: <me@hugofirth.com/> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.ac.ncl.la.soar.glance.eval import java.time.temporal.TemporalAmount import java.time.{Duration, Instant} import java.util.UUID import uk.ac.ncl.la.soar.StudentNumber import uk.ac.ncl.la.soar.glance.util.Time import uk.ac.ncl.la.soar.glance.util.Times._ import scala.annotation.tailrec /** * ADT describing session types, describing student activities * TODO: Fix Time for JS compilation */ sealed trait Session { self => def start: Instant def duration: Int def student: StudentNumber } case class ClusterSession(start: Instant, end: Instant, machine: String, student: StudentNumber, id: UUID) extends Session { //TODO: Investigate better handling of the time situation override val duration = Duration.between(start, end).getSeconds.toInt } case class RecapSession(id: UUID, start: Instant, student: StudentNumber, duration: Int) extends Session object Session { def getSummary[S <: Session](sessions: List[S], startInstant: Instant, endInstant: Instant, chunk: TemporalAmount): SessionSummary = new SessionSummary { //Build list of time chunks @tailrec private def constructChunks(chunks: List[(Instant,Instant)], cursor: Instant): List[(Instant,Instant)] = { if (cursor.isBefore(endInstant)) { val dCursor = cursor.plus(chunk) constructChunks((cursor, dCursor) :: chunks, dCursor) } else chunks } private val chunks = constructChunks(List.empty[(Instant, Instant)], startInstant) //TODO: Construct more performant recursive version of this def chunkSessions[A](sessions: List[S], process: List[S] => A) = chunks.map({ case c @ (cStart, cEnd) => c -> process(sessions.filter(s => s.start.isAfter(cStart) && s.start.isBefore(cEnd))) }).toMap private val meanDurationPerChunk = chunkSessions(sessions, identity).mapValues{ ss => val totDuration = ss.map(_.duration).sum val numStudents = ss.groupBy(_.student).size totDuration.toDouble / numStudents } private def instantKeysToTime[V](m: Map[(Instant, Instant), V]) = m.map { case ((s, e), v) => (s.toTime, e.toTime) -> v } private val perStudentDurationPerChunk: Map[StudentNumber, Map[(Time, Time), Double]] = { //Group sessions by student val byStudent: Map[StudentNumber, List[S]] = sessions.groupBy(_.student) //Chunk Sessions for each student and sum the duration byStudent.mapValues { stS => val cS = chunkSessions(stS, ss => ss.map(_.duration.toDouble).sum) instantKeysToTime(cS) } } override def meanDuration: Map[(Time, Time), Double] = instantKeysToTime(meanDurationPerChunk) override def start: Time = startInstant.toTime override def studentDuration: Map[StudentNumber, Map[(Time, Time), Double]] = perStudentDurationPerChunk override def end: Time = endInstant.toTime } }
NewcastleComputingScience/student-outcome-accelerator
glance-core/jvm/src/main/scala/uk/ac/ncl/la/soar/glance/eval/Session.scala
Scala
apache-2.0
3,786
/** * Copyright (C) 2014 Orbeon, Inc. * * This program is free software; you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; either version * 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * The full text of the license is available at http://www.gnu.org/copyleft/lesser.html */ package org.orbeon.oxf.fr.embedding import java.io.Writer import javax.servlet.http.{HttpServletRequest, HttpServletResponse} import org.apache.commons.io.IOUtils import org.apache.http.client.CookieStore import org.apache.http.impl.client.BasicCookieStore import org.orbeon.io.IOUtils._ import org.orbeon.oxf.common.OXFException import org.orbeon.oxf.externalcontext.ExternalContext import org.orbeon.oxf.fr.embedding.servlet.ServletEmbeddingContextWithResponse import org.orbeon.oxf.http.Headers._ import org.orbeon.oxf.http.HttpMethod.{GET, POST} import org.orbeon.oxf.http._ import org.orbeon.oxf.util.CoreUtils._ import org.orbeon.oxf.util.MarkupUtils._ import org.orbeon.oxf.util.PathUtils._ import org.orbeon.oxf.util.{ContentTypes, PathUtils} import org.slf4j.LoggerFactory import scala.jdk.CollectionConverters._ import scala.collection.immutable import scala.util.{Failure, Success} import scala.util.matching.Regex object APISupport { import Private._ val Logger = LoggerFactory.getLogger(List("org", "orbeon", "embedding") mkString ".") // so JARJAR doesn't touch this! val XFormsServerSubmit = "/xforms-server-submit" def proxyPage( baseURL : String, path : String, headers : immutable.Seq[(String, String)] = Nil, params : immutable.Seq[(String, String)] = Nil)( implicit ctx : EmbeddingContextWithResponse ): Unit = { Logger.debug(s"proxying page for path = `$path`") val url = formRunnerURL(baseURL, path, embeddable = true) callService(RequestDetails(None, url, path, headers, params))._1 match { case content: StreamedContent => useAndClose(content)(writeResponseBody(mustRewriteForMediatype)) case Redirect(_, _) => throw new UnsupportedOperationException } } def proxyServletResources( req : HttpServletRequest, res : HttpServletResponse, namespace : String, resourcePath : String ): Unit = withSettings(req, res.getWriter) { settings => implicit val ctx = new ServletEmbeddingContextWithResponse( req, Right(res), namespace, settings.orbeonPrefix, settings.httpClient ) APISupport.sanitizeResourceId(resourcePath, settings.FormRunnerResourcePathRegex) match { case Some(sanitizedResourcePath) => val url = formRunnerURL(settings.formRunnerURL, sanitizedResourcePath, embeddable = false) val contentFromRequest = req.getMethod == "POST" option StreamedContent( req.getInputStream, Option(req.getContentType), Some(req.getContentLength.toLong) filter (_ >= 0L), None ) proxyResource( RequestDetails( content = contentFromRequest, url = url, path = sanitizedResourcePath, headers = proxyCapitalizeAndCombineHeaders(requestHeaders(req).toList, request = true).toList, params = Nil ) ) case None => ctx.setStatusCode(HttpServletResponse.SC_NOT_FOUND) } } def proxySubmission( req : HttpServletRequest, res : HttpServletResponse ): Unit = withSettings(req, res.getWriter) { settings => Logger.debug("proxying submission") implicit val ctx = new ServletEmbeddingContextWithResponse( req, Right(res), APISupport.NamespacePrefix + "0", settings.orbeonPrefix, settings.httpClient ) val contentFromRequest = StreamedContent( inputStream = req.getInputStream, contentType = Option(req.getContentType), contentLength = Some(req.getContentLength.toLong) filter (_ >= 0), title = None ) val (contentOrRedirect, httpResponse) = APISupport.callService(RequestDetails( content = Some(contentFromRequest), url = settings.formRunnerURL.dropTrailingSlash + XFormsServerSubmit, path = XFormsServerSubmit, headers = proxyCapitalizeAndCombineHeaders(APISupport.requestHeaders(req).toList, request = true).toList, params = Nil )) contentOrRedirect match { case Redirect(location, true) => res.sendRedirect(location) case Redirect(_, false) => throw new NotImplementedError case content: StreamedContent => ctx.setStatusCode(httpResponse.statusCode) httpResponse.content.contentType foreach (ctx.setHeader(Headers.ContentType, _)) proxyCapitalizeAndCombineHeaders(httpResponse.headers, request = false) foreach (ctx.setHeader _).tupled useAndClose(content)(APISupport.writeResponseBody(mustRewriteForMediatype)) } } def proxyResource(requestDetails: RequestDetails)(implicit ctx: EmbeddingContextWithResponse): Unit = { Logger.debug(s"proxying resource for URL = `${requestDetails.url}`") val res = connectURL(requestDetails) ctx.setStatusCode(res.statusCode) res.content.contentType foreach (ctx.setHeader(Headers.ContentType, _)) proxyCapitalizeAndCombineHeaders(res.headers, request = false) foreach (ctx.setHeader _).tupled useAndClose(res.content)(writeResponseBody(mediatype => mustRewriteForMediatype(mediatype) || mustRewriteForPath(requestDetails.path))) } def formRunnerPath(app: String, form: String, mode: String, documentId: Option[String], query: Option[String]) = PathUtils.appendQueryString(s"/fr/$app/$form/$mode${documentId map ("/" +) getOrElse ""}", query getOrElse "") def formRunnerHomePath(query: Option[String]) = PathUtils.appendQueryString("/fr/", query getOrElse "") def formRunnerURL(baseURL: String, path: String, embeddable: Boolean) = PathUtils.appendQueryString(baseURL.dropTrailingSlash + path, if (embeddable) s"${ExternalContext.EmbeddableParam}=true" else "") def requestHeaders(req: HttpServletRequest) = for { name <- req.getHeaderNames.asScala values = req.getHeaders(name).asScala.toList } yield name -> values // Match on headers in a case-insensitive way, but the header we sent follows the capitalization of the // header specified in the init parameter. def headersToForward(clientHeaders: List[(String, List[String])], configuredHeaders: Map[String, String]) = for { (name, value) <- proxyAndCombineRequestHeaders(clientHeaders) originalName <- configuredHeaders.get(name.toLowerCase) } yield originalName -> value // Call the Orbeon service at the other end def callService(requestDetails: RequestDetails)(implicit ctx: EmbeddingContext): (StreamedContentOrRedirect, HttpResponse) = { Logger.debug(s"calling service for URL = `${requestDetails.url}`") val cx = connectURL(requestDetails) val redirectOrContent = if (StatusCode.isRedirectCode(cx.statusCode)) { // https://github.com/orbeon/orbeon-forms/issues/2967 val location = cx.headers("Location").head Redirect(location, exitPortal = urlHasProtocol(location)) } else cx.content redirectOrContent -> cx } def mustRewriteForMediatype(mediatype: String): Boolean = ContentTypes.isTextOrJSONContentType(mediatype) || ContentTypes.isXMLMediatype(mediatype) // TODO: Duplicated from `XFormsAssetServer` val XFormServerPrefix = "/xforms-server/" val FormDynamicResourcesPath = XFormServerPrefix + "form/dynamic/" val FormDynamicResourcesRegex = s"$FormDynamicResourcesPath(.+).js".r def mustRewriteForPath(path: String): Boolean = path match { case FormDynamicResourcesRegex(_) => true case _ => false } def writeResponseBody(doRewrite: String => Boolean)(content: Content)(implicit ctx: EmbeddingContextWithResponse): Unit = content.contentType flatMap ContentTypes.getContentTypeMediaType match { case Some(mediatype) if doRewrite(mediatype) => // Text/JSON/XML content type: rewrite response content val encoding = content.contentType flatMap ContentTypes.getContentTypeCharset getOrElse ExternalContext.StandardCharacterEncoding val contentAsString = useAndClose(content.inputStream)(IOUtils.toString(_, encoding)) val encodeForXML = ContentTypes.isXMLMediatype(mediatype) def decodeURL(encoded: String) = { val decodedURL = ctx.decodeURL(encoded) if (encodeForXML) decodedURL.escapeXmlMinimal else decodedURL } decodeWSRPContent( contentAsString, ctx.namespace, decodeURL, ctx.writer ) case other => // All other types: just copy Logger.debug(s"using ctx.outputStream for mediatype = `$other`") ctx.outputStream match { case Success(os) => useAndClose(content.inputStream)(IOUtils.copy(_, os)) case Failure(t) => Logger.warn(s"unable to obtain `OutputStream` possibly because of a missing mediatype downstream", t) ctx.writer.write("unable to provide content") } } def scopeSettings[T](req: HttpServletRequest, settings: EmbeddingSettings)(body: => T): T = { req.setAttribute(SettingsKey, settings) try body finally req.removeAttribute(SettingsKey) } def withSettings[T](req: HttpServletRequest, writer: => Writer)(body: EmbeddingSettings => T): Unit = Option(req.getAttribute(SettingsKey).asInstanceOf[EmbeddingSettings]) match { case Some(settings) => body(settings) case None => val msg = "ERROR: Orbeon Forms embedding filter is not configured." Logger.error(msg) writer.write(msg) } def nextNamespace(req: HttpServletRequest) = { val newValue = Option(req.getAttribute(LastNamespaceIndexKey).asInstanceOf[Integer]) match { case Some(value) => value + 1 case None => 0 } req.setAttribute(LastNamespaceIndexKey, newValue) NamespacePrefix + newValue } val DefaultFormRunnerResourcePath = """(?xi) ( # XForms server paths (?: /xforms-server (?: (?: | /upload | /dynamic/[^/^.]+ | -submit ) | (?: /.+[.] (?: css|js ) ) ) ) | # PDF/TIFF service paths (?: /fr/service/ [^/^.]+ / [^/^.]+ / (?: pdf|tiff ) / [^/^.]+ / [0-9A-Za-z\-]+ (?: /[^/]+ )? [.] (?: pdf|tiff ) ) | # PDF/TIFF resource paths (?: /fr/ [^/^.]+ / [^/^.]+ / (?: pdf|tiff ) / [^/^.]+ ) | # Other asset paths (?: # Optional versioned resources token (?: / [^/^.]+ )? / (?: apps/fr/style | ops | xbl | forms/orbeon/builder/images ) / .+ [.] (?: gif|css|pdf|js|map|png|jpg|ico|svg|ttf|eot|woff|woff2 ) ) ) """ // Resources are whitelisted to prevent unauthorized access to pages def sanitizeResourceId(s: String, FormRunnerResourcePath: Regex): Option[String] = { // First level of sanitation: parse, normalize and keep the path only def sanitizeResourcePath(s: String) = new java.net.URI(s).normalize().getPath def hasNoParent(s: String) = ! s.contains("/..") && ! s.contains("../") Option(s) map sanitizeResourcePath filter hasNoParent collect { case FormRunnerResourcePath(resourcePath) => resourcePath } } val NamespacePrefix = "o" private object Private { val SettingsKey = "orbeon.form-runner.filter-settings" val RemoteSessionIdKey = "orbeon.form-runner.remote-session-id" val LastNamespaceIndexKey = "orbeon.form-runner.last-namespace-index" // POST when we get RequestDetails for: // // - actions requests // - resources requests: Ajax requests, form posts, and uploads // // GET otherwise for: // // - render requests // - resources: typically image, CSS, JavaScript, etc. def connectURL(requestDetails: RequestDetails)(implicit ctx: EmbeddingContext): HttpResponse = ctx.httpClient.connect( url = recombineQuery(requestDetails.url, requestDetails.params), credentials = None, cookieStore = getOrCreateCookieStore, method = if (requestDetails.content.isEmpty) GET else POST, headers = requestDetails.headersMapWithContentType + (Headers.OrbeonClient -> List(ctx.client)), content = requestDetails.content ) // Parse a string containing WSRP encodings and encode the URLs and namespaces def decodeWSRPContent(content: String, ns: String, decodeURL: String => String, writer: Writer): Unit = { val stringLength = content.length var currentIndex = 0 var index = 0 import org.orbeon.oxf.externalcontext.WSRPURLRewriter.{decodeURL => _, _} while ({index = content.indexOf(BaseTag, currentIndex); index} != -1) { // Write up to the current mark writer.write(content, currentIndex, index - currentIndex) // Check if escaping is requested if (index + BaseTagLength * 2 <= stringLength && content.substring(index + BaseTagLength, index + BaseTagLength * 2) == BaseTag) { // Write escaped tag, update index and keep looking writer.write(BaseTag) currentIndex = index + BaseTagLength * 2 } else if (index < stringLength - BaseTagLength && content.charAt(index + BaseTagLength) == '?') { // URL encoding // Find the matching end mark val endIndex = content.indexOf(EndTag, index) if (endIndex == -1) throw new OXFException("Missing end tag for WSRP encoded URL.") val encodedURL = content.substring(index + StartTagLength, endIndex) currentIndex = endIndex + EndTagLength writer.write(decodeURL(encodedURL)) } else if (index < stringLength - BaseTagLength && content.charAt(index + BaseTagLength) == '_') { // Namespace encoding writer.write(ns) currentIndex = index + PrefixTagLength } else throw new OXFException("Invalid WSRP rewrite tagging.") } // Write remainder of string if (currentIndex < stringLength) writer.write(content, currentIndex, content.length - currentIndex) } def getOrCreateCookieStore(implicit ctx: EmbeddingContext): CookieStore = ctx.getSessionAttribute(RemoteSessionIdKey) map (_.asInstanceOf[CookieStore]) getOrElse { val newCookieStore = new BasicCookieStore ctx.setSessionAttribute(RemoteSessionIdKey, newCookieStore) newCookieStore } } }
orbeon/orbeon-forms
embedding/src/main/scala/org/orbeon/oxf/fr/embedding/APISupport.scala
Scala
lgpl-2.1
16,100
/* * Copyright 2018 Analytics Zoo Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.zoo.pipeline.api.keras.layers import com.intel.analytics.bigdl.tensor.Tensor import com.intel.analytics.bigdl.utils.Shape import com.intel.analytics.zoo.pipeline.api.keras.models.Sequential import com.intel.analytics.zoo.pipeline.api.keras.serializer.ModuleSerializationTest class EmbeddingSpec extends KerasBaseSpec { // Compared results with Keras on Python side "Embedding with weights" should "work properly" in { val weights = Tensor[Float](10, 32).rand() val seq = Sequential[Float]() val layer = Embedding[Float](10, 32, weights = weights, inputLength = 4) seq.add(layer) require(seq.getWeightsBias().sameElements(Array(weights))) seq.getOutputShape().toSingle().toArray should be (Array(-1, 4, 32)) val input = Tensor[Float](2, 4) input(Array(1, 1)) = 1 input(Array(1, 2)) = 2 input(Array(1, 3)) = 4 input(Array(1, 4)) = 5 input(Array(2, 1)) = 4 input(Array(2, 2)) = 3 input(Array(2, 3)) = 2 input(Array(2, 4)) = 6 val output = seq.forward(input).toTensor[Float] for (i <- 0 to 1) { val nonBatchOutput = output.split(1)(i) for (j <- 0 to 3) { val actual = nonBatchOutput.split(1)(j) val expected = weights.select(1, input.valueAt(i + 1, j + 1).toInt + 1) require(actual == expected) } } val gradInput = seq.backward(input, output) } } class EmbeddingSerialTest extends ModuleSerializationTest { override def test(): Unit = { val layer = Embedding[Float](1000, 32, inputLength = 4) layer.build(Shape(2, 4)) val input = Tensor[Float](2, 4) input(Array(1, 1)) = 1 input(Array(1, 2)) = 2 input(Array(1, 3)) = 4 input(Array(1, 4)) = 5 input(Array(2, 1)) = 4 input(Array(2, 2)) = 3 input(Array(2, 3)) = 2 input(Array(2, 4)) = 6 runSerializationTest(layer, input) } }
intel-analytics/analytics-zoo
zoo/src/test/scala/com/intel/analytics/zoo/pipeline/api/keras/layers/EmbeddingSpec.scala
Scala
apache-2.0
2,485
package tfm.examples import tfm.{fin, local} @fin trait ExampleInterpreter[F[_]] { @local def map2[A, B, C](fa: F[A], fb: F[B])(f: (A, B) => C): F[C] def lit(n: Int): F[Int] def add(lhs: Int, rhs: Int): F[Int] } object ExampleInterpreter { type Id[A] = A val id: ExampleInterpreter[Id] = new ExampleInterpreter[Id] { def map2[A, B, C](fa: A, fb: B)(f: (A, B) => C): C = f(fa, fb) def lit(n: Int): Int = n def add(lhs: Int, rhs: Int): Int = lhs + rhs } } object ExampleApp extends App { // Generated by macro import ExampleInterpreter.Example import ExampleInterpreter.language._ val program1 = add(3, 4) println(program1.run(ExampleInterpreter.id)) // Doesn't exist // ExampleInterpreter.language.map2 // Compose primitives def add3(x: Int, y: Int, z: Int): Example[Int] = new Example[Int] { def run[F[_]](interpreter: ExampleInterpreter[F]): F[Int] = interpreter.map2(interpreter.add(x, y), interpreter.lit(z))(_ + _) } val program2 = add3(1, 2, 3) println(program2.run(ExampleInterpreter.id)) }
travisbrown/tfm
examples/src/main/scala/tfm/examples/Example.scala
Scala
mit
1,095
/* * Copyright 2014 Commonwealth Computer Research, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.locationtech.geomesa.core.data import org.apache.accumulo.core.client.mock.MockInstance import org.apache.accumulo.core.client.security.tokens.PasswordToken import org.apache.accumulo.core.security.Authorizations import org.geotools.data._ import org.geotools.data.collection.ListFeatureCollection import org.geotools.data.simple.SimpleFeatureStore import org.geotools.factory.{CommonFactoryFinder, Hints} import org.geotools.feature.simple.SimpleFeatureBuilder import org.locationtech.geomesa.core.index._ import org.locationtech.geomesa.core.security import org.locationtech.geomesa.feature.AvroSimpleFeatureFactory import org.locationtech.geomesa.utils.geotools.Conversions._ import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes import org.locationtech.geomesa.utils.text.WKTUtils import org.opengis.feature.simple.SimpleFeatureType import org.specs2.mutable.Specification import scala.collection.JavaConversions._ class VisibilitiesTest extends Specification { sequential "handle per feature visibilities" should { val mockInstance = new MockInstance("perfeatureinstance") val conn = mockInstance.getConnector("myuser", new PasswordToken("mypassword".getBytes("UTF8"))) conn.securityOperations().changeUserAuthorizations("myuser", new Authorizations("user", "admin")) conn.securityOperations().createLocalUser("nonpriv", new PasswordToken("nonpriv".getBytes("UTF8"))) conn.securityOperations().changeUserAuthorizations("nonpriv", new Authorizations("user")) // create the data store val ds = DataStoreFinder.getDataStore(Map( "instanceId" -> "perfeatureinstance", "zookeepers" -> "zoo1:2181,zoo2:2181,zoo3:2181", "user" -> "myuser", "password" -> "mypassword", "tableName" -> "testwrite", "useMock" -> "true", "featureEncoding" -> "avro")).asInstanceOf[AccumuloDataStore] val sftName = "perfeatureauthtest" val sft = SimpleFeatureTypes.createType(sftName, s"name:String,dtg:Date,*geom:Point:srid=4326") sft.getUserData.put(SF_PROPERTY_START_TIME, "dtg") ds.createSchema(sft) // write some data val fs = ds.getFeatureSource(sftName).asInstanceOf[AccumuloFeatureStore] val features = getFeatures(sft).toList val privFeatures = features.take(3) privFeatures.foreach { f => f.getUserData.put(security.SecurityUtils.FEATURE_VISIBILITY, "user&admin") } val nonPrivFeatures = features.drop(3) nonPrivFeatures.foreach { f => f.getUserData.put(security.SecurityUtils.FEATURE_VISIBILITY, "user") } fs.addFeatures(new ListFeatureCollection(sft, privFeatures ++ nonPrivFeatures)) fs.flush() val ff = CommonFactoryFinder.getFilterFactory2 import ff.{literal => lit, property => prop, _} val unprivDS = DataStoreFinder.getDataStore(Map( "instanceId" -> "perfeatureinstance", "zookeepers" -> "zoo1:2181,zoo2:2181,zoo3:2181", "user" -> "nonpriv", "password" -> "nonpriv", "tableName" -> "testwrite", "useMock" -> "true", "featureEncoding" -> "avro")).asInstanceOf[AccumuloDataStore] "nonpriv should only be able to read a subset of features" in { "using ALL queries" in { val reader = unprivDS.getFeatureReader(sftName, Query.ALL) val readFeatures = reader.getIterator.toList readFeatures.size must be equalTo 3 } "using ST queries" in { val filter = bbox(prop("geom"), 44.0, 44.0, 46.0, 46.0, "EPSG:4326") val reader = unprivDS.getFeatureReader(new Query(sftName, filter), Transaction.AUTO_COMMIT) reader.getIterator.toList.size must be equalTo 3 } "using attribute queries" in { val filter = or( ff.equals(prop("name"), lit("1")), ff.equals(prop("name"), lit("4"))) val reader = unprivDS.getFeatureReader(new Query(sftName, filter), Transaction.AUTO_COMMIT) reader.getIterator.toList.size must be equalTo 1 } } "priv should be able to read all 6 features" in { "using ALL queries" in { val reader = ds.getFeatureReader(sftName, Query.ALL) val readFeatures = reader.getIterator.toList readFeatures.size must be equalTo 6 } "using ST queries" in { val filter = bbox(prop("geom"), 44.0, 44.0, 46.0, 46.0, "EPSG:4326") val reader = ds.getFeatureReader(new Query(sftName, filter), Transaction.AUTO_COMMIT) reader.getIterator.toList.size must be equalTo 6 } "using attribute queries" in { val filter = or( ff.equals(prop("name"), lit("1")), ff.equals(prop("name"), lit("4"))) val reader = ds.getFeatureReader(new Query(sftName, filter), Transaction.AUTO_COMMIT) reader.getIterator.toList.size must be equalTo 2 } } } "remove should continue to work as expected" in { val instanceId = "removeviz" val mockInstance = new MockInstance(instanceId) val conn = mockInstance.getConnector("myuser", new PasswordToken("mypassword".getBytes("UTF8"))) conn.securityOperations().changeUserAuthorizations("myuser", new Authorizations("user", "admin")) conn.securityOperations().createLocalUser("nonpriv", new PasswordToken("nonpriv".getBytes("UTF8"))) conn.securityOperations().changeUserAuthorizations("nonpriv", new Authorizations("user")) // create the data store val ds = DataStoreFinder.getDataStore(Map( "instanceId" -> instanceId, "zookeepers" -> "zoo1:2181,zoo2:2181,zoo3:2181", "user" -> "myuser", "password" -> "mypassword", "tableName" -> "testwrite", "useMock" -> "true", "featureEncoding" -> "avro")).asInstanceOf[AccumuloDataStore] val sftName = "perfeatureauthtest" val sft = SimpleFeatureTypes.createType(sftName, s"name:String,dtg:Date,*geom:Point:srid=4326") sft.getUserData.put(SF_PROPERTY_START_TIME, "dtg") ds.createSchema(sft) // write some data val fs = ds.getFeatureSource(sftName).asInstanceOf[AccumuloFeatureStore] val features = getFeatures(sft).toList val privFeatures = features.take(3) privFeatures.foreach { f => f.getUserData.put(security.SecurityUtils.FEATURE_VISIBILITY, "user&admin") } val nonPrivFeatures = features.drop(3) nonPrivFeatures.foreach { f => f.getUserData.put(security.SecurityUtils.FEATURE_VISIBILITY, "user") } fs.addFeatures(new ListFeatureCollection(sft, privFeatures ++ nonPrivFeatures)) fs.flush() val ff = CommonFactoryFinder.getFilterFactory2 import ff.{literal => lit, property => prop, _} val unprivDS = DataStoreFinder.getDataStore(Map( "instanceId" -> instanceId, "zookeepers" -> "zoo1:2181,zoo2:2181,zoo3:2181", "user" -> "nonpriv", "password" -> "nonpriv", "tableName" -> "testwrite", "useMock" -> "true", "featureEncoding" -> "avro")).asInstanceOf[AccumuloDataStore] "priv should be able to delete a feature" in { fs.removeFeatures(ff.id("1")) fs.flush() "using ALL queries" in { fs.getFeatures(Query.ALL).features().toList.size must be equalTo 5 } "using record id queries" in { fs.getFeatures(ff.id(ff.featureId("1"))).features().hasNext must beFalse } "using attribute queries" in { val filter = or( ff.equals(prop("name"), lit("1")), ff.equals(prop("name"), lit("4"))) val reader = ds.getFeatureReader(new Query(sftName, filter), Transaction.AUTO_COMMIT) reader.getIterator.toList.size must be equalTo 1 } } "nonpriv should not be able to delete a priv feature" in { val unprivFS = unprivDS.getFeatureSource(sftName).asInstanceOf[SimpleFeatureStore] unprivFS.removeFeatures(ff.id("2")) unprivFS.flush() "priv should still see the feature that was attempted to be deleted" in { fs.getFeatures(ff.id(ff.featureId("2"))).features().hasNext must beTrue } } } val hints = new Hints(Hints.FEATURE_FACTORY, classOf[AvroSimpleFeatureFactory]) val featureFactory = CommonFactoryFinder.getFeatureFactory(hints) def getFeatures(sft: SimpleFeatureType) = (0 until 6).map { i => val builder = new SimpleFeatureBuilder(sft, featureFactory) builder.set("geom", WKTUtils.read("POINT(45.0 45.0)")) builder.set("dtg", "2012-01-02T05:06:07.000Z") builder.set("name",i.toString) val sf = builder.buildFeature(i.toString) sf.getUserData()(Hints.USE_PROVIDED_FID) = java.lang.Boolean.TRUE sf } }
kevinwheeler/geomesa
geomesa-core/src/test/scala/org/locationtech/geomesa/core/data/VisibilitiesTest.scala
Scala
apache-2.0
9,359
package org.scalajs.testsuite.utils object Platform { /** Returns `true` if and only if the code is executing on a JVM. * Note: Returns `false` when executing on any JS VM. */ final val executingInJVM = true def executingInJVMOnJDK6: Boolean = jdkVersion == 6 def executingInJVMOnJDK7OrLower: Boolean = jdkVersion <= 7 private lazy val jdkVersion = { val v = System.getProperty("java.version") if (v.startsWith("1.")) Integer.parseInt(v.drop(2).takeWhile(_.isDigit)) else throw new Exception("Unknown java.version format") } def executingInRhino: Boolean = false def executingInNodeJS: Boolean = false def executingInPhantomJS: Boolean = false def hasTypedArrays: Boolean = false def isInFastOpt: Boolean = false def isInFullOpt: Boolean = false def isInProductionMode: Boolean = false def isInDevelopmentMode: Boolean = true def hasCompliantAsInstanceOfs: Boolean = true def hasCompliantModule: Boolean = true def hasStrictFloats: Boolean = true }
mdedetrich/scala-js
test-suite/jvm/src/test/scala/org/scalajs/testsuite/utils/Platform.scala
Scala
bsd-3-clause
1,009
/** * Created by Romain Reuillon on 05/05/16. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * */ package org.openmole.tool import java.io._ import java.nio.file._ import java.util.concurrent.{ Callable, ThreadPoolExecutor, TimeUnit, TimeoutException } import java.util.zip.{ GZIPInputStream, GZIPOutputStream } import squants.time._ package object stream { val DefaultBufferSize = 16 * 1024 def copy(inputStream: InputStream, outputStream: OutputStream) = { val buffer = new Array[Byte](DefaultBufferSize) Iterator.continually(inputStream.read(buffer)).takeWhile(_ != -1).foreach { outputStream.write(buffer, 0, _) } } def copy(inputStream: InputStream, outputStream: OutputStream, bufferSize: Int, timeout: Time)(implicit pool: ThreadPoolExecutor) = { val buffer = new Array[Byte](bufferSize) val reader = new ReaderRunnable(buffer, inputStream, bufferSize) Iterator.continually { val futureRead = pool.submit(reader) try futureRead.get(timeout.millis, TimeUnit.MILLISECONDS) catch { case (e: TimeoutException) ⇒ futureRead.cancel(true) throw new IOException(s"Timeout on reading $bufferSize bytes, read was longer than $timeout ms.", e) } }.takeWhile(_ != -1).foreach { count ⇒ val futureWrite = pool.submit(new WriterRunnable(buffer, outputStream, count)) try futureWrite.get(timeout.millis, TimeUnit.MILLISECONDS) catch { case (e: TimeoutException) ⇒ futureWrite.cancel(true) throw new IOException(s"Timeout on writing $count bytes, write was longer than $timeout ms.", e) } } } implicit class OutputStreamDecorator(os: OutputStream) { def flushClose = { try os.flush finally os.close } def toGZ = new GZIPOutputStream(os) def append(content: String) = new PrintWriter(os).append(content).flush def appendLine(line: String) = append(line + "\\n") } implicit class InputStreamDecorator(is: InputStream) { def toByteArray = { val os = new ByteArrayOutputStream() try { copy(os) os.toByteArray } finally is.close() } def copy(to: OutputStream): Unit = stream.copy(is, to) def copy(to: File, maxRead: Int, timeout: Time)(implicit pool: ThreadPoolExecutor): Unit = withClosable(new BufferedOutputStream(new FileOutputStream(to))) { copy(_, maxRead, timeout) } def copy(to: OutputStream, maxRead: Int, timeout: Time)(implicit pool: ThreadPoolExecutor) = stream.copy(is, to, maxRead, timeout) def toGZiped = new GZipedInputStream(is) def toGZ = new GZIPInputStream(is) // this one must have REPLACE_EXISTING enabled but does not support COPY_ATTRIBUTES, nor NOFOLLOW_LINKS def copy(file: File, replace: Boolean = true) = Files.copy( is, file.toPath, (if (replace) Seq(StandardCopyOption.REPLACE_EXISTING) else Seq()): _* ) def mkString = try scala.io.Source.fromInputStream(is).mkString finally is.close() } def withClosable[C <: { def close() }, T](open: ⇒ C)(f: C ⇒ T): T = { val c = open try f(c) finally c.close() } class ReaderRunnable(buffer: Array[Byte], from: InputStream, maxRead: Int) extends Callable[Int] { override def call: Int = from.read(buffer, 0, maxRead) } class WriterRunnable(buffer: Array[Byte], to: OutputStream, amount: Int) extends Callable[Unit] { override def call: Unit = { to.write(buffer, 0, amount) to.flush() } } }
openmole/openmole
openmole/third-parties/org.openmole.tool.stream/src/main/scala/org/openmole/tool/stream/package.scala
Scala
agpl-3.0
4,201
package ems import javax.servlet.http.HttpServletRequest import org.joda.time.{DateTimeZone, DateTime} import unfiltered.directives._ import unfiltered.directives.Result._ import Directives._ import unfiltered.request._ import unfiltered.response._ import unfilteredx._ import net.hamnaberg.json.collection.{Error, JsonCollection} trait EmsDirectives { type ResponseDirective = Directive[HttpServletRequest, ResponseFunction[Any], ResponseFunction[Any]] def baseURIBuilder = request[HttpServletRequest].map(r => BaseURIBuilder(r)) def baseURI = request[HttpServletRequest].map(r => BaseURI(r)) def requestURIBuilder = request[Any].map(r => RequestURIBuilder(r)) def requestURI = request[Any].map(r => RequestURI(r)) def ifModifiedSince(dt: DateTime, res: ResponseFunction[Any]) = Directive[Any, ResponseFunction[Any], ResponseFunction[Any]]{ case IfModifiedSinceString("*") => Result.Error(NotModified) case IfModifiedSince(date) if dt.withMillisOfSecond(0).withZone(DateTimeZone.UTC).toDate == date => Result.Error(NotModified) case _ => Success(res) } def ifUnmodifiedSince(dt: DateTime) = Directive[Any, ResponseFunction[Any], Unit]{ case IfUnmodifiedSinceString("*") => Success(()) case IfUnmodifiedSince(date) if dt.withMillisOfSecond(0).withZone(DateTimeZone.UTC).toDate == date => Success(()) case IfUnmodifiedSince(date) => Result.Error(PreconditionFailed ~> ResponseString(s"${dt.withMillisOfSecond(0).withZone(DateTimeZone.UTC).toDate} is not equal to $date")) case RequestURI(href) => Result.Error(PreconditionRequired ~> CollectionJsonResponse(JsonCollection(href, Error("Wrong response", None, Some("Missing If-Unmodified-Since header"))))) } def contentType(ct: String) = commit(when { case RequestContentType(`ct`) => ct }.orElse(UnsupportedMediaType)) def contentDisposition = commit( for { href <- requestURI res <- when { case RequestContentDisposition(cd) => cd }.orElse(BadRequest ~> CollectionJsonResponse(JsonCollection(href, Error("Wrong response", None, Some("Missing Content-Disposition header for binary data"))))) } yield res ) }
chrissearle/ems-redux
src/main/scala/ems/EmsDirectives.scala
Scala
apache-2.0
2,160
/* * Copyright (C) 2014 - 2017 Contributors as noted in the AUTHORS.md file * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.wegtam.tensei.agent import akka.testkit.TestActorRef import com.wegtam.tensei.adt.StatsMessages.CalculateStatisticsResult import com.wegtam.tensei.adt.StatsResult.{ BasicStatisticsResult, StatisticErrors, StatsResultNumeric, StatsResultString } import com.wegtam.tensei.adt._ import com.wegtam.tensei.agent.DataTreeDocument.DataTreeDocumentMessages import com.wegtam.tensei.agent.Stats.StatsMessages.FinishAnalysis import com.wegtam.tensei.agent.adt.ParserDataContainer import com.wegtam.tensei.agent.helpers.{ GenericHelpers, XmlHelpers } import scalaz._ import Scalaz._ class StatsTest extends ActorSpec with XmlHelpers with GenericHelpers { describe("Stats") { describe("DataTreeDocumentMessages.SaveData") { describe("with one data message") { val sourceData = getClass.getResource("/com/wegtam/tensei/agent/stats/simple-data.csv").toURI val dfasdl = DFASDL( "SIMPLE-DFASDL", scala.io.Source .fromInputStream( getClass.getResourceAsStream("/com/wegtam/tensei/agent/stats/simple-dfasdl.xml") ) .mkString ) val sourceElements = List( ElementReference(dfasdl.id, "alter"), ElementReference(dfasdl.id, "name") ) val targetElements = List( ElementReference(dfasdl.id, "alter"), ElementReference(dfasdl.id, "name") ) val mapping = MappingTransformation(sourceElements, targetElements) val recipe = new Recipe("COPY-COLUMNS", Recipe.MapOneToOne, List(mapping)) val cookbook = Cookbook("COOKBOOK", List(dfasdl), Option(dfasdl), List(recipe)) val source = ConnectionInformation(sourceData, Option(DFASDLReference(cookbook.id, dfasdl.id))) describe("for string data") { describe("that is empty") { it("should work") { val stats = TestActorRef(Stats.props(source, cookbook, List("alter", "name"))) val data = ParserDataContainer( "", "name", Option("ID"), -1L, Option(calculateDataElementStorageHash("name", List.empty[(String, Long)])) ) stats ! DataTreeDocumentMessages.SaveData(data, data.dataElementHash.get) stats ! FinishAnalysis val sResult = new StatsResultString("name", BasicStatisticsResult(1, Option(1), Option(0.0), Option(0.0), Option(0.0))) val cresult = new CalculateStatisticsResult(List(sResult).right[String], source, cookbook, List("alter", "name")) val response = cresult expectMsg(response) } } describe("that is not empty") { it("should work") { val stats = TestActorRef(Stats.props(source, cookbook, List("alter", "name"))) val data = ParserDataContainer( "Augustus", "name", Option("ID"), -1L, Option(calculateDataElementStorageHash("name", List.empty[(String, Long)])) ) stats ! DataTreeDocumentMessages.SaveData(data, data.dataElementHash.get) stats ! FinishAnalysis val sResult = new StatsResultString("name", BasicStatisticsResult(1, Option(1), Option(8.0), Option(8.0), Option(8.0))) val cresult = new CalculateStatisticsResult(List(sResult).right[String], source, cookbook, List("alter", "name")) val response = cresult expectMsg(response) } } } describe("for numerical data") { describe("that is empty") { it("should work") { val stats = TestActorRef(Stats.props(source, cookbook, List("alter", "name"))) val data = ParserDataContainer( "", "alter", Option("ID"), -1L, Option(calculateDataElementStorageHash("alter", List.empty[(String, Long)])) ) stats ! DataTreeDocumentMessages.SaveData(data, data.dataElementHash.get) stats ! FinishAnalysis val sResult = new StatsResultNumeric("alter", BasicStatisticsResult(1, Option(0), None, None, None, Option(StatisticErrors(1, 0, 0)))) val cresult = new CalculateStatisticsResult(List(sResult).right[String], source, cookbook, List("alter", "name")) val response = cresult expectMsg(response) } } } describe("that is incorrect") { it("should work") { val stats = TestActorRef(Stats.props(source, cookbook, List("alter", "name"))) val data = ParserDataContainer( "haus", "alter", Option("ID"), -1L, Option(calculateDataElementStorageHash("alter", List.empty[(String, Long)])) ) stats ! DataTreeDocumentMessages.SaveData(data, data.dataElementHash.get) stats ! FinishAnalysis val sResult = new StatsResultNumeric("alter", BasicStatisticsResult(1, Option(0), None, None, None, Option(StatisticErrors(1, 0, 0)))) val cresult = new CalculateStatisticsResult(List(sResult).right[String], source, cookbook, List("alter", "name")) val response = cresult expectMsg(response) } } describe("that is correct") { it("should work") { val stats = TestActorRef(Stats.props(source, cookbook, List("alter", "name"))) val data = ParserDataContainer( "27", "alter", Option("ID"), -1L, Option(calculateDataElementStorageHash("alter", List.empty[(String, Long)])) ) stats ! DataTreeDocumentMessages.SaveData(data, data.dataElementHash.get) stats ! FinishAnalysis val sResult = new StatsResultNumeric("alter", BasicStatisticsResult(1, Option(1), Option(27.0), Option(27.0), Option(27.0))) val cresult = new CalculateStatisticsResult(List(sResult).right[String], source, cookbook, List("alter", "name")) val response = cresult expectMsg(response) } } } describe("with multiple data messages") { val sourceData = getClass.getResource("/com/wegtam/tensei/agent/stats/simple-data.csv").toURI val dfasdl = DFASDL( "SIMPLE-DFASDL", scala.io.Source .fromInputStream( getClass.getResourceAsStream("/com/wegtam/tensei/agent/stats/simple-dfasdl.xml") ) .mkString ) val sourceElements = List( ElementReference(dfasdl.id, "alter"), ElementReference(dfasdl.id, "name") ) val targetElements = List( ElementReference(dfasdl.id, "alter"), ElementReference(dfasdl.id, "name") ) val mapping = MappingTransformation(sourceElements, targetElements) val recipe = new Recipe("COPY-COLUMNS", Recipe.MapOneToOne, List(mapping)) val cookbook = Cookbook("COOKBOOK", List(dfasdl), Option(dfasdl), List(recipe)) val source = ConnectionInformation(sourceData, Option(DFASDLReference(cookbook.id, dfasdl.id))) describe("only string data") { it("should work") { val stats = TestActorRef(Stats.props(source, cookbook, List("alter", "name"))) val data = ParserDataContainer( "Mark", "name", Option("ID"), -1L, Option(calculateDataElementStorageHash("name", List.empty[(String, Long)])) ) val data2 = ParserDataContainer( "Karin", "name", Option("ID"), -1L, Option(calculateDataElementStorageHash("name", List.empty[(String, Long)])) ) val data3 = ParserDataContainer( "Augustus", "name", Option("ID"), -1L, Option(calculateDataElementStorageHash("name", List.empty[(String, Long)])) ) stats ! DataTreeDocumentMessages.SaveData(data, data.dataElementHash.get) stats ! DataTreeDocumentMessages.SaveData(data2, data2.dataElementHash.get) stats ! DataTreeDocumentMessages.SaveData(data3, data3.dataElementHash.get) stats ! FinishAnalysis val sResult = new StatsResultString("name", BasicStatisticsResult(3, Option(3), Option(4.0), Option(8.0), Option(5.666666666666667))) val cresult = new CalculateStatisticsResult(List(sResult).right[String], source, cookbook, List("alter", "name")) val response = cresult expectMsg(response) } } describe("only numeric data") { it("should work") { val stats = TestActorRef(Stats.props(source, cookbook, List("alter", "name"))) val data = ParserDataContainer( "27", "alter", Option("ID"), -1L, Option(calculateDataElementStorageHash("alter", List.empty[(String, Long)])) ) val data2 = ParserDataContainer( "26", "alter", Option("ID"), -1L, Option(calculateDataElementStorageHash("alter", List.empty[(String, Long)])) ) val data3 = ParserDataContainer( "3", "alter", Option("ID"), -1L, Option(calculateDataElementStorageHash("alter", List.empty[(String, Long)])) ) stats ! DataTreeDocumentMessages.SaveData(data, data.dataElementHash.get) stats ! DataTreeDocumentMessages.SaveData(data2, data2.dataElementHash.get) stats ! DataTreeDocumentMessages.SaveData(data3, data3.dataElementHash.get) stats ! FinishAnalysis val sResult = new StatsResultNumeric("alter", BasicStatisticsResult(3, Option(3), Option(3.0), Option(27.0), Option(18.666666666666668))) val cresult = new CalculateStatisticsResult(List(sResult).right[String], source, cookbook, List("alter", "name")) val response = cresult expectMsg(response) } } describe("mixed data") { it("should work") { val stats = TestActorRef(Stats.props(source, cookbook, List("alter", "name"))) val stringData = ParserDataContainer( "Mark", "name", Option("ID"), -1L, Option(calculateDataElementStorageHash("name", List.empty[(String, Long)])) ) val stringData2 = ParserDataContainer( "Karin", "name", Option("ID"), -1L, Option(calculateDataElementStorageHash("name", List.empty[(String, Long)])) ) val stringData3 = ParserDataContainer( "Augustus", "name", Option("ID"), -1L, Option(calculateDataElementStorageHash("name", List.empty[(String, Long)])) ) val numericData = ParserDataContainer( "27", "alter", Option("ID"), -1L, Option(calculateDataElementStorageHash("alter", List.empty[(String, Long)])) ) val numericData2 = ParserDataContainer( "26", "alter", Option("ID"), -1L, Option(calculateDataElementStorageHash("alter", List.empty[(String, Long)])) ) val numericData3 = ParserDataContainer( "3", "alter", Option("ID"), -1L, Option(calculateDataElementStorageHash("alter", List.empty[(String, Long)])) ) stats ! DataTreeDocumentMessages.SaveData(stringData, stringData.dataElementHash.get) stats ! DataTreeDocumentMessages.SaveData(numericData, numericData.dataElementHash.get) stats ! DataTreeDocumentMessages.SaveData(stringData2, stringData2.dataElementHash.get) stats ! DataTreeDocumentMessages.SaveData(numericData2, numericData2.dataElementHash.get) stats ! DataTreeDocumentMessages.SaveData(stringData3, stringData3.dataElementHash.get) stats ! DataTreeDocumentMessages.SaveData(numericData3, numericData3.dataElementHash.get) stats ! FinishAnalysis val response = expectMsgType[CalculateStatisticsResult] response.source should be(source) response.cookbook should be(cookbook) response.sourceIds should be(List("alter", "name")) response.results match { case -\\/(failure) => fail(failure) case \\/-(success) => withClue("The number of results should be correct!") { success.size should be(2) } withClue("The result list should contain the correct results!") { success should contain( new StatsResultNumeric("alter", BasicStatisticsResult(3, Option(3), Option(3.0), Option(27.0), Option(18.666666666666668))) ) success should contain( new StatsResultString("name", BasicStatisticsResult(3, Option(3), Option(4.0), Option(8.0), Option(5.666666666666667))) ) } } } } } } } }
Tensei-Data/tensei-agent
src/test/scala/com/wegtam/tensei/agent/StatsTest.scala
Scala
agpl-3.0
18,894
package ly.stealth.shaihulud.reader import java.util.{Properties, UUID} import it.nerdammer.spark.hbase._ import consumer.kafka.MessageAndMetadata import consumer.kafka.client.KafkaReceiver import it.nerdammer.spark.hbase.conversion.FieldWriter import kafka.producer.{KeyedMessage, ProducerConfig, Producer} import org.apache.spark._ import org.apache.spark.storage.StorageLevel import org.apache.spark.streaming._ import org.apache.spark.streaming.dstream.DStream import org.apache.spark.SparkContext import org.apache.spark.SparkContext._ import org.apache.spark.SparkConf import org.apache.hadoop.hbase.util.Bytes object Main extends App with Logging { val parser = new scopt.OptionParser[ReaderConfiguration]("spark-reader") { head("Spark Reader for Kafka client applications", "1.0") opt[String]("source") unbounded() required() action { (value, config) => config.copy(sourceTopic = value) } text ("Source topic with initial set of data") opt[String]("destination") unbounded() required() action { (value, config) => config.copy(destinationTopic = value) } text ("Destination topic with processed set of data") opt[Int]("partitions") unbounded() optional() action { (value, config) => config.copy(partitions = value) } text ("Partitions in topic") opt[String]("zookeeper") unbounded() required() action { (value, config) => config.copy(zookeeper = value) } text ("Zookeeper connection host:port") opt[String]("broker.list") unbounded() required() action { (value, config) => config.copy(brokerList = value) } text ("Comma separated string of host:port") opt[Int]("kafka.fetch.size") unbounded() optional() action { (value, config) => config.copy(kafkaFetchSize = value) } text ("Maximum KBs to fetch from Kafka") checkConfig { c => if (c.testId.isEmpty || c.sourceTopic.isEmpty || c.destinationTopic.isEmpty || c.zookeeper.isEmpty || c.brokerList.isEmpty) { failure("You haven't provided all required parameters") } else { success } } } val config = parser.parse(args, ReaderConfiguration()) match { case Some(c) => c case None => sys.exit(1) } val sparkConfig = new SparkConf().setAppName("kafka_client_validator") .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .set("spark.hbase.host", config.zookeeper) val ssc = new StreamingContext(sparkConfig, Seconds(10)) ssc.checkpoint("spark-validator-hbase") implicit def TestFieldWriter: FieldWriter[Test] = new FieldWriter[Test] { override def map(data: Test): HBaseData = Seq( Some(Bytes.toBytes(data.test_id)), Some(Bytes.toBytes(data.source_topic)), Some(Bytes.toBytes(data.destination_topic)) ) override def columns = Seq("source_topic", "destination_topic") } implicit def MessageFieldWriter: FieldWriter[Message] = new FieldWriter[Message] { override def map(data: Message): HBaseData = Seq( Some(Bytes.toBytes(data.id)), Some(Bytes.toBytes(data.test_id)), Some(Bytes.toBytes(data.topic)), Some(Bytes.toBytes(data.partition)), Some(Bytes.toBytes(data.offset)), Some(Bytes.toBytes(data.payload)) ) override def columns = Seq("test_id", "topic", "partition", "offset", "payload") } val poisonPill = UUID.randomUUID().toString.getBytes("UTF8") markStreamEnd(poisonPill) val test = Test(config.testId, config.sourceTopic, config.destinationTopic) ssc.sparkContext.parallelize(Seq(test)) .toHBaseTable("tests") .inColumnFamily("kafka_client_validation") .save() val acc = ssc.sparkContext.accumulator[Int](0, "finishedPartitions") val validator = new Validator(config) startStreamForTopic(test.test_id, config.sourceTopic, config, poisonPill, validator) startStreamForTopic(test.test_id, config.destinationTopic, config, poisonPill, validator) ssc.start() ssc.awaitTermination() def markStreamEnd(poisonPill: Array[Byte]) { //Producing poison pill message to each partition of specified source and destination topics //in order to determine end of the stream val props = new Properties() props.put("metadata.broker.list", config.brokerList) props.put("producer.type", "sync") val producerConfig = new ProducerConfig(props) val producer = new Producer[Array[Byte], Array[Byte]](producerConfig) (0 until config.partitions).foreach(partition => { producer.send(new KeyedMessage(config.sourceTopic, null, partition, poisonPill)) producer.send(new KeyedMessage(config.destinationTopic, null, partition, poisonPill)) logInfo("Marked stream end for partition %d with sequence %s".format(partition, poisonPill)) }) } def startStreamForTopic(testId: String, topic: String, config: ReaderConfiguration, poisonPill: Array[Byte], validator: Validator) { val stream = createKafkaStream(config.zookeeper, topic, config.partitions).repartition(config.partitions).persist(StorageLevel.MEMORY_AND_DISK_SER) stream.map(message => { Message(testId, message.getTopic, message.getPartition.partition, message.getOffset, new String(message.getPayload)) }).foreachRDD(rdd => { val filtered = rdd.filter(message => !java.util.Arrays.equals(message.payload.getBytes("UTF8"), poisonPill)) filtered.toHBaseTable("messages") .inColumnFamily("kafka_client_validation") .save() if (rdd.count() > filtered.count()) { logInfo("End of the stream reached") acc.add((rdd.count() - filtered.count()).toInt) if (acc.value == config.partitions * 2) { validator.validate() ssc.stop(true) } } }) } private def createKafkaStream(zkConnect: String, topic: String, partitions: Int): DStream[MessageAndMetadata] = { val zkhosts = zkConnect.split(":")(0) val zkports = zkConnect.split(":")(1) val kafkaParams = Map("zookeeper.hosts" -> zkhosts, "zookeeper.port" -> zkports, "zookeeper.consumer.connection" -> zkConnect, "zookeeper.broker.path" -> "/brokers", "zookeeper.consumer.path" -> "/consumers", "fetch.size.bytes" -> (config.kafkaFetchSize * 1024).toString, "kafka.topic" -> topic, "kafka.consumer.id" -> "%s-%s".format(topic, UUID.randomUUID().toString)) val props = new java.util.Properties() kafkaParams foreach { case (key, value) => props.put(key, value)} val streams = (0 to partitions - 1).map { partitionId => ssc.receiverStream(new KafkaReceiver(StorageLevel.MEMORY_AND_DISK_SER, props, partitionId))} ssc.union(streams) } } case class Test(test_id: String = "", source_topic: String = "", destination_topic: String = "") case class Counter(test_id: String = "", topic: String = "", total: Long = 0L) case class Message(test_id: String = "", topic: String = "", partition: Int = 0, offset: Long = 0, payload: String = "") { val id = test_id + topic + partition + offset } case class ReaderConfiguration(testId: String = UUID.randomUUID().toString, sourceTopic: String = "", destinationTopic: String = "", partitions: Int = 1, zookeeper: String = "", brokerList: String = "", kafkaFetchSize: Int = 8)
stealthly/gauntlet
spark-validator-hbase/src/main/scala/ly/stealth/shaihulud/reader/Main.scala
Scala
apache-2.0
7,261
package com.github.mnogu.gatling.kafka.test import io.gatling.core.Predef._ import org.apache.kafka.clients.producer.ProducerConfig import scala.concurrent.duration._ import com.github.mnogu.gatling.kafka.Predef._ class BasicSimulation extends Simulation { val kafkaConf = kafka .topic("test") .properties( Map( ProducerConfig.ACKS_CONFIG -> "1", ProducerConfig.BOOTSTRAP_SERVERS_CONFIG -> "localhost:9092", ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG -> "org.apache.kafka.common.serialization.ByteArraySerializer", ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG -> "org.apache.kafka.clients.producer.ByteArraySerializer")) val scn = scenario("Kafka Test") .exec(kafka("request").send("foo")) // You can also use feeder // //val scn = scenario("Kafka Test") // .feed(csv("test.csv").circular) // .exec(kafka("request").send("${foo}")) setUp( scn .inject(constantUsersPerSec(10) during(90 seconds))) .protocols(kafkaConf) }
mistsys/gatling-kafka
src/test/scala/com/github/mnogu/gatling/kafka/test/BasicSimulation.scala
Scala
apache-2.0
1,027
package com.twitter.algebird import org.scalatest.{ PropSpec, Matchers, WordSpec } import org.scalatest.prop.PropertyChecks import org.scalacheck.{ Gen, Arbitrary } import CMSHasherImplicits._ class CmsLaws extends PropSpec with PropertyChecks with Matchers { import BaseProperties._ val DELTA = 1E-8 val EPS = 0.005 val SEED = 1 private def createArbitrary[K: Numeric](cmsMonoid: CMSMonoid[K]): Arbitrary[CMS[K]] = { val k = implicitly[Numeric[K]] Arbitrary { for (v <- Gen.choose(0, 10000)) yield cmsMonoid.create(k.fromInt(v)) } } property("CountMinSketch[Short] is a Monoid") { implicit val cmsMonoid = CMS.monoid[Short](EPS, DELTA, SEED) implicit val cmsGen = createArbitrary[Short](cmsMonoid) monoidLaws[CMS[Short]] } property("CountMinSketch[Int] is a Monoid") { implicit val cmsMonoid = CMS.monoid[Int](EPS, DELTA, SEED) implicit val cmsGen = createArbitrary[Int](cmsMonoid) monoidLaws[CMS[Int]] } property("CountMinSketch[Long] is a Monoid") { implicit val cmsMonoid = CMS.monoid[Long](EPS, DELTA, SEED) implicit val cmsGen = createArbitrary[Long](cmsMonoid) monoidLaws[CMS[Long]] } property("CountMinSketch[BigInt] is a Monoid") { implicit val cmsMonoid = CMS.monoid[BigInt](EPS, DELTA, SEED) implicit val cmsGen = createArbitrary[BigInt](cmsMonoid) monoidLaws[CMS[BigInt]] } } class TopPctCmsLaws extends PropSpec with PropertyChecks with Matchers { import BaseProperties._ val DELTA = 1E-8 val EPS = 0.005 val SEED = 1 val HEAVY_HITTERS_PCT = 0.1 private def createArbitrary[K: Numeric](cmsMonoid: TopPctCMSMonoid[K]): Arbitrary[TopCMS[K]] = { val k = implicitly[Numeric[K]] Arbitrary { for (v <- Gen.choose(0, 10000)) yield cmsMonoid.create(k.fromInt(v)) } } property("TopPctCms[Short] is a Monoid") { implicit val cmsMonoid = TopPctCMS.monoid[Short](EPS, DELTA, SEED, HEAVY_HITTERS_PCT) implicit val cmsGen = createArbitrary[Short](cmsMonoid) monoidLaws[TopCMS[Short]] } property("TopPctCms[Int] is a Monoid") { implicit val cmsMonoid = TopPctCMS.monoid[Int](EPS, DELTA, SEED, HEAVY_HITTERS_PCT) implicit val cmsGen = createArbitrary[Int](cmsMonoid) monoidLaws[TopCMS[Int]] } property("TopPctCms[Long] is a Monoid") { implicit val cmsMonoid = TopPctCMS.monoid[Long](EPS, DELTA, SEED, HEAVY_HITTERS_PCT) implicit val cmsGen = createArbitrary[Long](cmsMonoid) monoidLaws[TopCMS[Long]] } property("TopPctCms[BigInt] is a Monoid") { implicit val cmsMonoid = TopPctCMS.monoid[BigInt](EPS, DELTA, SEED, HEAVY_HITTERS_PCT) implicit val cmsGen = createArbitrary[BigInt](cmsMonoid) monoidLaws[TopCMS[BigInt]] } } class CMSShortTest extends CMSTest[Short] class CMSIntTest extends CMSTest[Int] class CMSLongTest extends CMSTest[Long] class CMSBigIntTest extends CMSTest[BigInt] abstract class CMSTest[K: Ordering: CMSHasher: Numeric] extends WordSpec with Matchers { val DELTA = 1E-10 val EPS = 0.001 val SEED = 1 // We use TopPctCMS for testing CMSCounting functionality. We argue that because TopPctCMS[K] encapsulates CMS[K] // and uses it for all its counting/querying functionality (like an adapter) we can test CMS[K] indirectly through // testing TopPctCMS[K]. val COUNTING_CMS_MONOID = { val ANY_HEAVY_HITTERS_PCT = 0.1 // heavy hitters functionality is not relevant for the tests using this monoid TopPctCMS.monoid[K](EPS, DELTA, SEED, ANY_HEAVY_HITTERS_PCT) } val RAND = new scala.util.Random // Convenience methods to convert from `Int` to the actual `K` type, and we prefer these conversions to be explicit // (cf. JavaConverters vs. JavaConversions). We use the name `toK` to clarify the intent and to prevent name conflicts // with the existing `to[Col]` method in Scala. implicit class IntCast(x: Int) { def toK[A: Numeric]: A = implicitly[Numeric[A]].fromInt(x) } implicit class SeqCast(xs: Seq[Int]) { def toK[A: Numeric]: Seq[A] = xs map { _.toK[A] } } implicit class SetCast(xs: Set[Int]) { def toK[A: Numeric]: Set[A] = xs map { _.toK[A] } } /** * Returns the exact frequency of {x} in {data}. */ def exactFrequency(data: Seq[K], x: K): Long = data.count(_ == x) /** * Returns the exact inner product between two data streams, when the streams * are viewed as count vectors. */ def exactInnerProduct(data1: Seq[K], data2: Seq[K]): Long = { val counts1 = data1.groupBy(x => x).mapValues(_.size) val counts2 = data2.groupBy(x => x).mapValues(_.size) (counts1.keys.toSet & counts2.keys.toSet).map { k => counts1(k) * counts2(k) }.sum } /** * Returns the elements in {data} that appear at least heavyHittersPct * data.size times. */ def exactHeavyHitters(data: Seq[K], heavyHittersPct: Double): Set[K] = { val counts = data.groupBy(x => x).mapValues(_.size) val totalCount = counts.values.sum counts.filter { _._2 >= heavyHittersPct * totalCount }.keys.toSet } "A Count-Min sketch implementing CMSCounting" should { "count total number of elements in a stream" in { val totalCount = 1243 val range = 234 val data = (0 to (totalCount - 1)).map { _ => RAND.nextInt(range) }.toK[K] val cms = COUNTING_CMS_MONOID.create(data) cms.totalCount should be(totalCount) } "estimate frequencies" in { val totalCount = 5678 val range = 897 val data = (0 to (totalCount - 1)).map { _ => RAND.nextInt(range) }.toK[K] val cms = COUNTING_CMS_MONOID.create(data) (0 to 100).foreach { _ => val x = RAND.nextInt(range).toK[K] val exact = exactFrequency(data, x) val approx = cms.frequency(x).estimate val estimationError = approx - exact val maxError = approx - cms.frequency(x).min val beWithinTolerance = be >= 0L and be <= maxError approx should be >= exact estimationError should beWithinTolerance } } "exactly compute frequencies in a small stream" in { val one = COUNTING_CMS_MONOID.create(1.toK[K]) val two = COUNTING_CMS_MONOID.create(2.toK[K]) val cms = COUNTING_CMS_MONOID.plus(COUNTING_CMS_MONOID.plus(one, two), two) cms.frequency(0.toK[K]).estimate should be(0) cms.frequency(1.toK[K]).estimate should be(1) cms.frequency(2.toK[K]).estimate should be(2) val three = COUNTING_CMS_MONOID.create(Seq(1, 1, 1).toK[K]) three.frequency(1.toK[K]).estimate should be(3) val four = COUNTING_CMS_MONOID.create(Seq(1, 1, 1, 1).toK[K]) four.frequency(1.toK[K]).estimate should be(4) val cms2 = COUNTING_CMS_MONOID.plus(four, three) cms2.frequency(1.toK[K]).estimate should be(7) } "estimate inner products" in { val totalCount = 5234 val range = 1390 val data1 = (0 to (totalCount - 1)).map { _ => RAND.nextInt(range) }.toK[K] val data2 = (0 to (totalCount - 1)).map { _ => RAND.nextInt(range) }.toK[K] val cms1 = COUNTING_CMS_MONOID.create(data1) val cms2 = COUNTING_CMS_MONOID.create(data1) val approxA = cms1.innerProduct(cms2) val approx = approxA.estimate val exact = exactInnerProduct(data1, data2) val estimationError = approx - exact val maxError = approx - approxA.min val beWithinTolerance = be >= 0L and be <= maxError approx should be(cms2.innerProduct(cms1).estimate) approx should be >= exact estimationError should beWithinTolerance } "exactly compute inner product of small streams" in { // Nothing in common. val a1 = List(1, 2, 3).toK[K] val a2 = List(4, 5, 6).toK[K] COUNTING_CMS_MONOID.create(a1).innerProduct(COUNTING_CMS_MONOID.create(a2)).estimate should be(0) // One element in common. val b1 = List(1, 2, 3).toK[K] val b2 = List(3, 5, 6).toK[K] COUNTING_CMS_MONOID.create(b1).innerProduct(COUNTING_CMS_MONOID.create(b2)).estimate should be(1) // Multiple, non-repeating elements in common. val c1 = List(1, 2, 3).toK[K] val c2 = List(3, 2, 6).toK[K] COUNTING_CMS_MONOID.create(c1).innerProduct(COUNTING_CMS_MONOID.create(c2)).estimate should be(2) // Multiple, repeating elements in common. val d1 = List(1, 2, 2, 3, 3).toK[K] val d2 = List(2, 3, 3, 6).toK[K] COUNTING_CMS_MONOID.create(d1).innerProduct(COUNTING_CMS_MONOID.create(d2)).estimate should be(6) } "work as an Aggregator when created from a single, small stream" in { val data1 = Seq(1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 5).toK[K] val cms = CMS.aggregator[K](EPS, DELTA, SEED).apply(data1) cms.frequency(1.toK[K]).estimate should be(1L) cms.frequency(2.toK[K]).estimate should be(2L) cms.frequency(3.toK[K]).estimate should be(3L) cms.frequency(4.toK[K]).estimate should be(4L) cms.frequency(5.toK[K]).estimate should be(5L) val topPctCMS = { val anyHeavyHittersPct = 0.1 // exact setting not relevant for this test TopPctCMS.aggregator[K](EPS, DELTA, SEED, anyHeavyHittersPct).apply(data1) } topPctCMS.frequency(1.toK[K]).estimate should be(1L) topPctCMS.frequency(2.toK[K]).estimate should be(2L) topPctCMS.frequency(3.toK[K]).estimate should be(3L) topPctCMS.frequency(4.toK[K]).estimate should be(4L) topPctCMS.frequency(5.toK[K]).estimate should be(5L) val topNCMS = { val anyHeavyHittersN = 1 // exact setting not relevant for this test TopNCMS.aggregator[K](EPS, DELTA, SEED, anyHeavyHittersN).apply(data1) } topNCMS.frequency(1.toK[K]).estimate should be(1L) topNCMS.frequency(2.toK[K]).estimate should be(2L) topNCMS.frequency(3.toK[K]).estimate should be(3L) topNCMS.frequency(4.toK[K]).estimate should be(4L) topNCMS.frequency(5.toK[K]).estimate should be(5L) } } "A Top-% Count-Min sketch implementing CMSHeavyHitters" should { "estimate heavy hitters" in { // Simple way of making some elements appear much more often than others. val data1 = (1 to 3000).map { _ => RAND.nextInt(3) }.toK[K] val data2 = (1 to 3000).map { _ => RAND.nextInt(10) }.toK[K] val data3 = (1 to 1450).map { _ => -1 }.toK[K] // element close to being a 20% heavy hitter val data = data1 ++ data2 ++ data3 // Find elements that appear at least 20% of the time. val heavyHittersPct = 0.2 val cms = TopPctCMS.monoid[K](EPS, DELTA, SEED, 0.2).create(data) val trueHhs = exactHeavyHitters(data, heavyHittersPct) val estimatedHhs = cms.heavyHitters // All true heavy hitters must be claimed as heavy hitters. trueHhs.intersect(estimatedHhs) should be(trueHhs) // It should be very unlikely that any element with count less than // (heavyHittersPct - eps) * totalCount is claimed as a heavy hitter. val minHhCount = (heavyHittersPct - cms.eps) * cms.totalCount val infrequent = data.groupBy { x => x }.mapValues { _.size }.filter { _._2 < minHhCount }.keys.toSet infrequent.intersect(estimatedHhs) should be('empty) } "(when adding CMS instances) drop old heavy hitters when new heavy hitters replace them" in { val monoid = TopPctCMS.monoid[K](EPS, DELTA, SEED, 0.3) val cms1 = monoid.create(Seq(1, 2, 2).toK[K]) cms1.heavyHitters should be(Set(1, 2)) val cms2 = cms1 ++ monoid.create(2.toK[K]) cms2.heavyHitters should be(Set(2)) val cms3 = cms2 ++ monoid.create(1.toK[K]) cms3.heavyHitters should be(Set(1, 2)) val cms4 = cms3 ++ monoid.create(Seq(0, 0, 0, 0, 0, 0).toK[K]) cms4.heavyHitters should be(Set(0)) } "(when adding individual items) drop old heavy hitters when new heavy hitters replace them" in { val monoid = TopPctCMS.monoid[K](EPS, DELTA, SEED, 0.3) val cms1 = monoid.create(Seq(1, 2, 2).toK[K]) cms1.heavyHitters should be(Set(1, 2)) val cms2 = cms1 + 2.toK[K] cms2.heavyHitters should be(Set(2)) val cms3 = cms2 + 1.toK[K] cms3.heavyHitters should be(Set(1, 2)) val heaviest = 0.toK[K] val cms4 = cms3 + heaviest + heaviest + heaviest + heaviest + heaviest + heaviest cms4.heavyHitters should be(Set(heaviest)) } "(when adding CMS instances) merge heavy hitters correctly [GH-353 regression test]" in { // See https://github.com/twitter/algebird/issues/353 val monoid = TopPctCMS.monoid(EPS, DELTA, SEED, 0.1) val data1 = Seq(1, 1, 1, 2, 2, 3).toK[K] val data2 = Seq(3, 4, 4, 4, 5, 5).toK[K] val data3 = Seq(3, 6, 6, 6, 7, 7).toK[K] val data4 = Seq(3, 8, 8, 8, 9, 9).toK[K] val singleData = data1 ++ data2 ++ data3 ++ data4 /* Data sets from above shown in tabular view Item 1 2 3 4 total (= singleData) ---------------------------------------- A (1) 3 - - - 3 B (2) 2 - - - 2 C (3) 1 1 1 1 4 <<< C is global top 1 heavy hitter D (4) - 3 - - 3 E (5) - 2 - - 2 F (6) - - 3 - 3 G (7) - - 2 - 2 H (8) - - - 3 3 I (9) - - - 2 2 */ val cms1 = monoid.create(data1) val cms2 = monoid.create(data2) val cms3 = monoid.create(data3) val cms4 = monoid.create(data4) val aggregated = cms1 ++ cms2 ++ cms3 ++ cms4 val single = monoid.create(singleData) aggregated.heavyHitters should be(single.heavyHitters) aggregated.heavyHitters contains (3.toK[K]) // C=3 is global top 1 heavy hitter } "exactly compute heavy hitters when created from a single, small stream" in { val data1 = Seq(1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 5).toK[K] val cms1 = TopPctCMS.monoid[K](EPS, DELTA, SEED, 0.01).create(data1) cms1.heavyHitters should be(Set(1, 2, 3, 4, 5)) val cms2 = TopPctCMS.monoid[K](EPS, DELTA, SEED, 0.1).create(data1) cms2.heavyHitters should be(Set(2, 3, 4, 5)) val cms3 = TopPctCMS.monoid[K](EPS, DELTA, SEED, 0.3).create(data1) cms3.heavyHitters should be(Set(5)) val cms4 = TopPctCMS.monoid[K](EPS, DELTA, SEED, 0.9).create(data1) cms4.heavyHitters should be(Set[K]()) } "work as an Aggregator when created from a single, small stream" in { val data1 = Seq(1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 5).toK[K] val cms1 = TopPctCMS.aggregator[K](EPS, DELTA, SEED, 0.01).apply(data1) cms1.heavyHitters should be(Set(1, 2, 3, 4, 5)) val cms2 = TopPctCMS.aggregator[K](EPS, DELTA, SEED, 0.1).apply(data1) cms2.heavyHitters should be(Set(2, 3, 4, 5)) val cms3 = TopPctCMS.aggregator[K](EPS, DELTA, SEED, 0.3).apply(data1) cms3.heavyHitters should be(Set(5)) val cms4 = TopPctCMS.aggregator[K](EPS, DELTA, SEED, 0.9).apply(data1) cms4.heavyHitters should be(Set[K]()) } } "A Top-N Count-Min sketch implementing CMSHeavyHitters" should { // Note: As described in https://github.com/twitter/algebird/issues/353, a top-N CMS is, in general, not able to // merge heavy hitters correctly. This is because merging top-N based heavy hitters is not an associative // operation. // This test involves merging of top-N CMS instances, which is not an associative operation. This means that the // success or failure of this test depends on the merging order and/or the test data characteristics. "(when adding CMS instances) drop old heavy hitters when new heavy hitters replace them, if merge order matches data" in { val heavyHittersN = 2 val monoid = TopNCMS.monoid[K](EPS, DELTA, SEED, heavyHittersN) val cms1 = monoid.create(Seq(1, 2, 2).toK[K]) cms1.heavyHitters should be(Set(1, 2)) val cms2 = cms1 ++ monoid.create(Seq(3, 3, 3).toK[K]) cms2.heavyHitters should be(Set(2, 3)) val cms3 = cms2 ++ monoid.create(Seq(1, 1, 1).toK[K]) cms3.heavyHitters should be(Set(3, 1)) val cms4 = cms3 ++ monoid.create(Seq(6, 6, 6, 6, 6, 6).toK[K]) cms4.heavyHitters should be(Set(1, 6)) } "(when adding individual items) drop old heavy hitters when new heavy hitters replace them" in { val monoid = TopPctCMS.monoid[K](EPS, DELTA, SEED, 0.3) val cms1 = monoid.create(Seq(1, 2, 2).toK[K]) cms1.heavyHitters should be(Set(1, 2)) val cms2 = cms1 + 2.toK[K] cms2.heavyHitters should be(Set(2)) val cms3 = cms2 + 1.toK[K] cms3.heavyHitters should be(Set(1, 2)) val heaviest = 0.toK[K] val cms4 = cms3 + heaviest + heaviest + heaviest + heaviest + heaviest + heaviest cms4.heavyHitters should be(Set(heaviest)) } // This test documents the order bias of top-N CMS, i.e. it's a negative test case. // See https://github.com/twitter/algebird/issues/353 "(when adding CMS instances) generally FAIL to merge heavy hitters correctly due to order bias" in { val topN = 2 val monoid = TopNCMS.monoid(EPS, DELTA, SEED, topN) val data1 = Seq(1, 1, 1, 2, 2, 3).toK[K] val data2 = Seq(3, 4, 4, 4, 5, 5).toK[K] val data3 = Seq(3, 6, 6, 6, 7, 7).toK[K] val data4 = Seq(3, 8, 8, 8, 9, 9).toK[K] val singleData = data1 ++ data2 ++ data3 ++ data4 /* Data sets from above shown in tabular view Item 1 2 3 4 total (= singleData) ---------------------------------------- A (1) 3 - - - 3 B (2) 2 - - - 2 C (3) 1 1 1 1 4 <<< C is global top 1 heavy hitter D (4) - 3 - - 3 E (5) - 2 - - 2 F (6) - - 3 - 3 G (7) - - 2 - 2 H (8) - - - 3 3 I (9) - - - 2 2 */ val cms1 = monoid.create(data1) val cms2 = monoid.create(data2) val cms3 = monoid.create(data3) val cms4 = monoid.create(data4) val aggregated = cms1 ++ cms2 ++ cms3 ++ cms4 val single = monoid.create(singleData) aggregated.heavyHitters shouldNot be(single.heavyHitters) aggregated.heavyHitters shouldNot contain(3.toK[K]) // C=3 is global top 1 heavy hitter } // Compared to adding top-N CMS instances, which is generally unsafe because of order bias (see test cases above), // adding individual items to a top-N CMS is a safe operation. // See https://github.com/twitter/algebird/issues/353 "(when adding individual items) merge heavy hitters correctly [GH-353 regression test]" in { val topN = 2 val monoid = TopNCMS.monoid(EPS, DELTA, SEED, topN) val data1 = Seq(1, 1, 1, 2, 2, 3).toK[K] val data2 = Seq(3, 4, 4, 4, 5, 5).toK[K] val data3 = Seq(3, 6, 6, 6, 7, 7).toK[K] val data4 = Seq(3, 8, 8, 8, 9, 9).toK[K] val singleData = data1 ++ data2 ++ data3 ++ data4 /* Data sets from above shown in tabular view Item 1 2 3 4 total (= singleData) ---------------------------------------- A (1) 3 - - - 3 B (2) 2 - - - 2 C (3) 1 1 1 1 4 <<< C is global top 1 heavy hitter D (4) - 3 - - 3 E (5) - 2 - - 2 F (6) - - 3 - 3 G (7) - - 2 - 2 H (8) - - - 3 3 I (9) - - - 2 2 */ val cms1 = monoid.create(data1) val cms2 = cms1 + 3.toK[K] + 4.toK[K] + 4.toK[K] + 4.toK[K] + 5.toK[K] + 5.toK[K] // effectively "++ data2" val cms3 = cms2 + 3.toK[K] + 6.toK[K] + 6.toK[K] + 6.toK[K] + 7.toK[K] + 7.toK[K] // "++ data3" val aggregated = cms3 + 3.toK[K] + 8.toK[K] + 8.toK[K] + 8.toK[K] + 9.toK[K] + 9.toK[K] // "++ data4" val single = monoid.create(singleData) aggregated.heavyHitters should be(single.heavyHitters) aggregated.heavyHitters should contain(3.toK[K]) // C=3 is global top 1 heavy hitter } "exactly compute heavy hitters when created a from single, small stream" in { val data1 = Seq(1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 5).toK[K] val cms1 = TopNCMS.monoid[K](EPS, DELTA, SEED, 5).create(data1) cms1.heavyHitters should be(Set(1, 2, 3, 4, 5)) val cms2 = TopNCMS.monoid[K](EPS, DELTA, SEED, 4).create(data1) cms2.heavyHitters should be(Set(2, 3, 4, 5)) val cms3 = TopNCMS.monoid[K](EPS, DELTA, SEED, 3).create(data1) cms3.heavyHitters should be(Set(3, 4, 5)) val cms4 = TopNCMS.monoid[K](EPS, DELTA, SEED, 2).create(data1) cms4.heavyHitters should be(Set(4, 5)) val cms5 = TopNCMS.monoid[K](EPS, DELTA, SEED, 1).create(data1) cms5.heavyHitters should be(Set(5)) } "work as an Aggregator when created from a single, small stream" in { val data1 = Seq(1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 5).toK[K] val cms1 = TopNCMS.aggregator[K](EPS, DELTA, SEED, 5).apply(data1) cms1.heavyHitters should be(Set(1, 2, 3, 4, 5)) val cms2 = TopNCMS.aggregator[K](EPS, DELTA, SEED, 4).apply(data1) cms2.heavyHitters should be(Set(2, 3, 4, 5)) val cms3 = TopNCMS.aggregator[K](EPS, DELTA, SEED, 3).apply(data1) cms3.heavyHitters should be(Set(3, 4, 5)) val cms4 = TopNCMS.aggregator[K](EPS, DELTA, SEED, 2).apply(data1) cms4.heavyHitters should be(Set(4, 5)) val cms5 = TopNCMS.aggregator[K](EPS, DELTA, SEED, 1).apply(data1) cms5.heavyHitters should be(Set(5)) } } } class CMSFunctionsSpec extends PropSpec with PropertyChecks with Matchers { property("roundtrips width->eps->width") { forAll { (i: Int) => whenever(i > 0) { CMSFunctions.width(CMSFunctions.eps(i)) should be(i) } } } property("roundtrips depth->delta->depth for common depth values") { // For all i > 709 this test break because of precision limits: For all i > 709 will return 0.0, which is not the // mathematically correct value but rather the asymptote of delta. val maxI = 709 forAll(Gen.choose(0, maxI)) { (i: Int) => CMSFunctions.depth(CMSFunctions.delta(i)) should be(i) } } // Documents a precision error that is exposed by all depths > 709. // For all i > 709, CMSFunctions.delta(i) will return 0.0, which is not the mathematically correct value but rather // the asymptote of the delta function. property("throw IAE when deriving delta from invalid depth values") { val maxValidDelta = 709 forAll(Gen.choose(maxValidDelta + 1, 10000)) { (invalidDepth: Int) => val exception = intercept[IllegalArgumentException] { CMSFunctions.delta(invalidDepth) } exception.getMessage should fullyMatch regex """requirement failed: depth must be smaller as it causes precision errors when computing delta \\(\\d+ led to an invalid delta of 0.0\\)""" } } property("throw IAE when deriving depth from invalid delta values") { val invalidDeltas = Table("invalidDelta", 0.0, 1E-330, 1E-400) forAll(invalidDeltas) { (invalidDelta: Double) => val exception = intercept[IllegalArgumentException] { CMSFunctions.depth(invalidDelta) } exception.getMessage should be("requirement failed: delta must lie in (0, 1)") } } } class CMSParamsSpec extends PropSpec with PropertyChecks with Matchers { val AnyEps = 0.001 val AnyDelta = 1E-5 val AnyHashes = { val AnySeed = 1 CMSFunctions.generateHashes[Long](AnyEps, AnyDelta, AnySeed) } property("throw IAE for invalid eps values") { val invalidEpsilons = Table("invalidEps", 0.0, 1.0, 2.0, 100.0) forAll(invalidEpsilons) { (invalidEps: Double) => val exception = intercept[IllegalArgumentException] { CMSParams(AnyHashes, invalidEps, AnyDelta) } exception.getMessage should be("requirement failed: eps must lie in (0, 1)") } } property("throw IAE for invalid delta values") { val invalidDeltas = Table("invalidDelta", 0.0, 1.0, 2.0, 100.0, 1E-330, 1E-400) forAll(invalidDeltas) { (invalidDelta: Double) => val exception = intercept[IllegalArgumentException] { CMSParams(AnyHashes, AnyEps, invalidDelta) } exception.getMessage should be("requirement failed: delta must lie in (0, 1)") } } property("throw IAE when we do not have enough hashes") { val tooFewHashes = Seq.empty[CMSHash[Long]] val exception = intercept[IllegalArgumentException] { CMSParams(tooFewHashes, AnyEps, AnyDelta) } exception.getMessage should fullyMatch regex """requirement failed: we require at least (\\d+) hash functions""" } } /** * This spec verifies that we provide legacy types for the CMS and CountMinSketchMonoid classes we had in Algebird * versions < 0.8.1. Note that this spec is not meant to verify their actual functionality. */ class LegacyCMSSpec extends WordSpec with Matchers { import legacy.CountMinSketchMonoid val DELTA = 1E-10 val EPS = 0.001 val SEED = 1 val CMS_MONOID: CountMinSketchMonoid = CountMinSketchMonoid(EPS, DELTA, SEED) "The legacy package" should { "provide a legacy type for the CMS implementation in Algebird versions < 0.8.1" in { val cms: legacy.CMS = CMS_MONOID.create(Seq(0L, 0L)) cms.frequency(0L).estimate should be (2) cms.heavyHitters should be(Set(0L)) } "provide a legacy type for the CMS monoid implementation in Algebird versions < 0.8.1" in { val cmsMonoid: CountMinSketchMonoid = { val eps = 0.001 val delta = 1E-5 val seed = 1 val heavyHittersPct = 0.1 CountMinSketchMonoid(eps, delta, seed, heavyHittersPct) } val cms = cmsMonoid.create(Seq(0L, 0L)) cms.frequency(0L).estimate should be (2) cms.heavyHitters should be(Set(0L)) } } }
jinlee/algebird
algebird-test/src/test/scala/com/twitter/algebird/CountMinSketchTest.scala
Scala
apache-2.0
25,902
package roshan.map import akka.actor.{Actor, ActorRef} import roshan.protocols.MapProtocol.{AddCharacter, RemoveCharacter, MoveCharacter, CharacterId} import scala.collection.mutable import roshan.protocols.CharacterProtocol.Moved import roshan.Loaderable trait CharacterHandler extends Actor with EventBox with MapInfo { var char_id = mutable.HashMap[ActorRef, CharacterId]() val Server: Loaderable def characterActions: Receive = { case MoveCharacter(x, y, id, character) => if (!grid.checkCharacterCollision(x, y) && !checkMapCollision(x, y)) { grid.add(character, x, y) char_id += (character -> id) sender tell(RemoveCharacter(Some(x, y)), character) character ! Moved(x, y) publishCharacterChange(id = id, x = x, y = y, walk = true) } case AddCharacter(id, x, y) => grid.add(sender, x, y) char_id += (sender -> new CharacterId(id)) publishCharacterChange(id = char_id(sender), x = x, y = y) case RemoveCharacter(newXY) => if (!newXY.isDefined) publishCharacterChange(char_id(sender), 0, 0, isGone = true) else newXY foreach { // If moving across channels post to the old place as well xy: (Int, Int) => publishCharacterChange(id = char_id(sender), x = xy._1, y = xy._2) } grid remove sender char_id -= sender } }
andychase/roshan
src/main/scala/roshan/map/CharacterHandler.scala
Scala
mit
1,368
package com.codacy.client.bitbucket.v1 import java.time.LocalDateTime import play.api.libs.functional.syntax._ import play.api.libs.json._ case class Issue( id: Long, status: String, priority: String, title: String, content: String, owner: String, created_on: LocalDateTime, kind: String ) object Issue { val dateFormat = "yyyy-MM-dd'T'HH:mm:ss.SSS" implicit val dateTimeReads: Reads[LocalDateTime] = Reads.localDateTimeReads(dateFormat) // format: off implicit val reader: Reads[Issue] = ( (__ \\ "local_id").read[Long] and (__ \\ "status").read[String] and (__ \\ "priority").read[String] and (__ \\ "title").read[String] and (__ \\ "content").read[String] and (__ \\ "reported_by" \\ "username").read[String] and (__ \\ "created_on").read[LocalDateTime] and (__ \\ "metadata" \\ "kind").read[String] )(Issue.apply _) // format: on }
codacy/bitbucket-scala-client
src/main/scala/com/codacy/client/bitbucket/v1/Issue.scala
Scala
apache-2.0
923
package collins.models.cache import org.specs2.mutable import play.api.test.WithApplication import collins.util.IpAddress import collins.models.Asset import collins.models.AssetMeta import collins.models.AssetMetaValue import collins.models.AssetType import collins.models.IpmiInfo import collins.models.State import collins.models.Status import collins.models.IpAddresses /* * This specification relies heavily on migrations to populate the database */ class CacheSpec extends mutable.Specification { "Cache Specification".title args(sequential = true) "Basic cache operations " should { "return None when looking for an element not populated in cache " in new WithApplication { val assetFromCache = Cache.get[Option[Asset]](Asset.findByTagKey("notincache")) assetFromCache mustEqual None } } "Assets must be cached" in { "during find for non existing asset cache should be populated with None" in new WithApplication { val maybeAsset = Asset.findByTag("cacheasset1") maybeAsset mustEqual None val assetFromCache = Cache.get[Option[Asset]](Asset.findByTagKey("cacheasset1")) assetFromCache mustEqual Some(None) } "after a create asset must be found in cache using tag" in new WithApplication { val assetTag = "cacheasset1" val maybeAsset = Asset.findByTag(assetTag) maybeAsset mustEqual None val assetFromCache = Cache.get[Option[Asset]](Asset.findByTagKey(assetTag)) assetFromCache mustEqual Some(None) val asset = Asset.create(Asset(assetTag, Status.Incomplete.get, AssetType.ServerNode.get)) val afterCreateMaybeAsset = Asset.findByTag(assetTag) afterCreateMaybeAsset mustEqual Some(asset) val afterCreateAssetFromCache = Cache.get[Option[Asset]](Asset.findByTagKey(assetTag)) afterCreateAssetFromCache mustEqual Some(Some(asset)) } "after a create asset must be found in cache using id" in new WithApplication { val assetTag = "cacheasset1" val maybeAsset = Asset.findByTag(assetTag) maybeAsset mustEqual None val assetFromCache = Cache.get[Option[Asset]](Asset.findByTagKey(assetTag)) assetFromCache mustEqual Some(None) val asset = Asset.create(Asset(assetTag, Status.Incomplete.get, AssetType.ServerNode.get)) val afterCreateMaybeAsset = Asset.findById(asset.id) afterCreateMaybeAsset mustEqual Some(asset) val afterCreateAssetFromCache = Cache.get[Option[Asset]](Asset.findByIdKey(asset.id)) afterCreateAssetFromCache mustEqual Some(Some(asset)) } } "AssetMeta must be cached" in { "find pre-populated asset meta" in new WithApplication { val metas = AssetMeta.findAll() val metasFromCache = Cache.get[List[AssetMeta]](AssetMeta.findByAllKey) metasFromCache mustEqual Some(metas) } "find pre-populated asset meta by name" in new WithApplication { val meta = AssetMeta.findAll().head val sameMeta = AssetMeta.findByName(meta.name) sameMeta mustEqual Some(meta) val metaFromCache = Cache.get[Option[AssetMeta]](AssetMeta.findByNameKey(meta.name)) metaFromCache mustEqual Some(Some(meta)) } "find pre-populated asset meta by id" in new WithApplication { val meta = AssetMeta.findAll().head val sameMeta = AssetMeta.findById(meta.id) sameMeta mustEqual Some(meta) val metaFromCache = Cache.get[Option[AssetMeta]](AssetMeta.findByIdKey(meta.id)) metaFromCache mustEqual Some(Some(meta)) } "find pre-populated by viewabled " in new WithApplication { val metas = AssetMeta.getViewable() val metasFromCache = Cache.get[List[AssetMeta]](AssetMeta.findByViewableKey) metasFromCache mustEqual Some(metas) } } "AssetType must be cached" in { "find pre-populated asset types" in new WithApplication { val types = AssetType.find val typesFromCache = Cache.get[Option[List[AssetType]]](AssetType.findKey) typesFromCache mustEqual Some(types) } "find pre-populated asset type by name" in new WithApplication { val assetType = AssetType.find.head val sameType = AssetType.findByName(assetType.name) sameType mustEqual Some(assetType) val typeFromCache = Cache.get[Option[AssetType]](AssetType.findByNameKey(assetType.name)) typeFromCache mustEqual Some(Some(assetType)) } "find pre-populated asset type by id" in new WithApplication { val assetType = AssetType.find.head val sameType = AssetType.findById(assetType.id) sameType mustEqual Some(assetType) val typeFromCache = Cache.get[Option[AssetType]](AssetType.findByIdKey(assetType.id)) typeFromCache mustEqual Some(Some(assetType)) } } "State must be cached" in { "find pre-populated states" in new WithApplication { val states = State.find val statesFromCache = Cache.get[List[State]](State.findKey) statesFromCache mustEqual Some(states) } "find pre-populated state by name" in new WithApplication { val state = State.find.head val sameState = State.findByName(state.name) sameState mustEqual Some(state) val stateFromCache = Cache.get[Option[State]](State.findByNameKey(state.name)) stateFromCache mustEqual Some(Some(state)) } "find pre-populated state by id" in new WithApplication { val state = State.find.head val sameState = State.findById(state.id) sameState mustEqual Some(state) val stateFromCache = Cache.get[Option[State]](State.findByIdKey(state.id)) stateFromCache mustEqual Some(Some(state)) } "find pre-populated state by any status" in new WithApplication { val states = State.findByAnyStatus() states.size mustEqual 6 val statesFromCache = Cache.get[List[State]](State.findByAnyStatusKey) statesFromCache mustEqual Some(states) } "find pre-populated state by status key" in new WithApplication { val status = Status.Maintenance.get val state = State.findByStatus(status) val stateFromCache = Cache.get[Option[State]](State.findByStatusKey(status.id)) stateFromCache mustEqual Some(state) } } "AssetMetaValues must be cached" in { "find pre-populated meta value by asset and meta id" in new WithApplication { val asset = Asset.findById(1).get val assetMeta = AssetMeta.findById(1).get val metaValues = AssetMetaValue.findByAssetAndMeta(asset, assetMeta, 10) val metaValuesFromCache = Cache.get[List[AssetMetaValue]](AssetMetaValue.findByAssetAndMetaKey(asset.id, assetMeta.id)) metaValuesFromCache mustEqual Some(metaValues) // using method from Asset val firstMetaValue = asset.getMetaAttribute(assetMeta.name) firstMetaValue mustEqual metaValuesFromCache.get.headOption firstMetaValue mustEqual metaValues.headOption } "find pre-populated meta value by asset" in new WithApplication { val asset = Asset.findById(1).get val metaValues = AssetMetaValue.findByAsset(asset) val metaValuesFromCache = Cache.get[List[AssetMetaValue]](AssetMetaValue.findByAssetKey(asset.id)) metaValuesFromCache mustEqual Some(metaValues) } "find pre-populated meta value by meta" in new WithApplication { val assetMeta = AssetMeta.findById(1).get val metaValues = AssetMetaValue.findByMeta(assetMeta) val metaValuesFromCache = Cache.get[List[AssetMetaValue]](AssetMetaValue.findByMetaKey(assetMeta.id)) metaValuesFromCache mustEqual Some(metaValues) } } "IpmiInfo must be cached" in { "find pre-populated ipmi info by asset" in new WithApplication { val asset = Asset.findById(1).get val ipmiInfo = IpmiInfo.findByAsset(asset) val ipmiInfoFromCache = Cache.get[Option[IpmiInfo]](IpmiInfo.findByAssetKey(asset.id)) ipmiInfoFromCache mustEqual Some(ipmiInfo) } "find pre-populated all ipmi info by asset" in new WithApplication { val asset = Asset.findById(1).get val ipmiInfo = IpmiInfo.findAllByAsset(asset) val ipmiInfoFromCache = Cache.get[List[IpmiInfo]](IpmiInfo.findAllByAssetKey(asset.id)) ipmiInfoFromCache mustEqual Some(ipmiInfo) } "find pre-populated ipmi info by id" in new WithApplication { val ipmiInfo = IpmiInfo.get(IpmiInfo(1, "test-user", "", 167772161L, 167772162L, 4294959104L, 1)) val ipmiInfoFromCache = Cache.get[Option[IpmiInfo]](IpmiInfo.findByIdKey(ipmiInfo.id)) ipmiInfoFromCache mustEqual Some(ipmiInfo) } } "IpAddress must be cached" in { "find ip address by asset" in new WithApplication { val asset = Asset.findById(1).get IpAddresses.create(IpAddresses(asset.id, IpAddress.toLong("10.0.0.1"), IpAddress.toLong("10.0.0.2"), IpAddress.toLong("255.255.224.0"), "fortesting")) val address = IpAddresses.findByAsset(asset) val addressFromCache = Cache.get[Option[IpAddresses]](IpAddresses.findByAssetKey(asset.id)) addressFromCache mustEqual Some(address) } "find all ip address by asset" in new WithApplication { val asset = Asset.findById(1).get IpAddresses.create(IpAddresses(asset.id, IpAddress.toLong("10.0.0.1"), IpAddress.toLong("10.0.0.2"), IpAddress.toLong("255.255.224.0"), "fortesting")) val addresses = IpAddresses.findAllByAsset(asset) val addressesFromCache = Cache.get[Option[List[IpAddresses]]](IpAddresses.findAllByAssetKey(asset.id)) addressesFromCache mustEqual Some(addresses) } "find ip address by id" in new WithApplication { val asset = Asset.findById(1).get val address = IpAddresses.get(IpAddresses.create(IpAddresses(asset.id, IpAddress.toLong("10.0.0.1"), IpAddress.toLong("10.0.0.2"), IpAddress.toLong("255.255.224.0"), "fortesting"))) val addressFromCache = Cache.get[Option[IpAddresses]](IpAddresses.findByIdKey(address.id)) addressFromCache mustEqual Some(address) } "find pools in use" in new WithApplication { val asset = Asset.findById(1).get IpAddresses.create(IpAddresses(asset.id, IpAddress.toLong("10.0.0.1"), IpAddress.toLong("10.0.0.2"), IpAddress.toLong("255.255.224.0"), "fortesting")) val pools = IpAddresses.getPoolsInUse() pools mustEqual Set("fortesting") val poolsFromCache = Cache.get[Option[IpAddresses]](IpAddresses.findPoolsInUseKey) poolsFromCache mustEqual Some(pools) } } }
box/collins
test/collins/models/cache/CacheSpec.scala
Scala
apache-2.0
10,462
package com.typesafe.sbt package packager import sbt._ /** * General purpose keys for the native packager */ trait NativePackagerKeys { val packageName = SettingKey[String]("packageName", "Name of the created output package. Used for dirs/scripts.") val packageSummary = SettingKey[String]("package-summary", "Summary of the contents of a linux package.") val packageDescription = SettingKey[String]("package-description", "The description of the package. Used when searching.") val maintainer = SettingKey[String]("maintainer", "The name/email address of a maintainer for the native package.") val executableScriptName = SettingKey[String]("executableScriptName", "Name of the executing script.") } /** * This Keys object can be used for * <ul> * <li>non autoplugin builds</li> * <li>import single keys, which are not inside the autoImport</li> * </ul> * * == Non autoplugin builds == * * {{{ * import com.typesafe.sbt.packager.Keys._ * * packageName := "" * }}} * * == autoplugin builds == * * {{{ * NativePackagerKeys.packageName := "" * }}} */ object Keys extends NativePackagerKeys with universal.UniversalKeys with linux.LinuxKeys with windows.WindowsKeys with docker.DockerKeys with debian.DebianKeys with rpm.RpmKeys with archetypes.JavaAppKeys
bfil/sbt-native-packager
src/main/scala/com/typesafe/sbt/packager/Keys.scala
Scala
bsd-2-clause
1,308
package org.jetbrains.plugins.scala package lang package resolve package processor import com.intellij.psi._ import org.jetbrains.plugins.scala.lang.psi.api.base.ScReferenceElement import org.jetbrains.plugins.scala.lang.psi.api.statements._ import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypedDefinition import org.jetbrains.plugins.scala.lang.psi.types._ import org.jetbrains.plugins.scala.lang.psi.types.api.designator.ScProjectionType import org.jetbrains.plugins.scala.lang.psi.types.result._ import scala.collection.Set import scala.collection.mutable.ArrayBuffer /** * This class is useful for finding actual methods for unapply or unapplySeq, in case for values: * <code> * val a: Regex * z match { * case a() => * } * </code> * This class cannot be used for actual resolve, because reference to value should work to this value, not to * invoked unapply method. */ class ExpandedExtractorResolveProcessor(ref: ScReferenceElement, refName: String, kinds: Set[ResolveTargets.Value], expected: Option[ScType]) extends ExtractorResolveProcessor(ref, refName, kinds, expected) { override def execute(element: PsiElement, state: ResolveState): Boolean = { val named = element.asInstanceOf[PsiNamedElement] if (nameAndKindMatch(named, state)) { val accessible = isAccessible(named, ref) if (accessibility && !accessible) return true named match { case bind: ScTypedDefinition => { val parentSubst = getSubst(state) val parentImports = getImports(state) val typez = getFromType(state) match { case Some(tp) => ScProjectionType(tp, bind, superReference = false) case _ => bind.`type`().getOrAny } var seq = false val buffer = new ArrayBuffer[ScalaResolveResult] val proc = new BaseProcessor(StdKinds.methodRef) { def execute(element: PsiElement, state: ResolveState): Boolean = { val subst = getSubst(state) element match { case fun: ScFunction if fun.name == "unapply" || (seq && fun.name == "unapplySeq") => buffer += new ScalaResolveResult(fun, parentSubst.followed(subst), parentImports, parentElement = Some(bind), isAccessible = accessible) case _ => } true } } proc.processType(parentSubst.subst(typez), ref, ResolveState.initial) addResults(buffer.toSeq) if (candidatesSet.isEmpty && levelSet.isEmpty) { buffer.clear() seq = true proc.processType(parentSubst.subst(typez), ref, ResolveState.initial) addResults(buffer.toSeq) } } case _ => return super.execute(element, state) } } true } }
triplequote/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/resolve/processor/ExpandedExtractorResolveProcessor.scala
Scala
apache-2.0
2,989
package commons.repositories import commons.models.{Descending, IdMetaModel, Ordering, Property} import slick.dbio.DBIO import slick.jdbc.MySQLProfile.api.{DBIO => _, MappedTo => _, Rep => _, TableQuery => _, _} import slick.lifted._ import scala.concurrent.ExecutionContext.Implicits._ trait BaseRepo[ModelId <: BaseId[Long], Model <: WithId[Long, ModelId], ModelTable <: IdTable[ModelId, Model]] { protected val mappingConstructor: Tag => ModelTable protected val metaModelToColumnsMapping: Map[Property[_], (ModelTable) => Rep[_]] implicit protected val modelIdMapping: BaseColumnType[ModelId] // lazy required to init table query with concrete mappingConstructor value lazy val query: TableQuery[ModelTable] = TableQuery[ModelTable](mappingConstructor) protected val metaModel: IdMetaModel def all: DBIO[Seq[Model]] = all(List(Ordering(metaModel.id, Descending))) def all(orderings: List[Ordering]): DBIO[Seq[Model]] = { if (orderings == null) all else orderings match { case Nil => all case _ => // multiple sortBy calls are reversed comparing to SQLs order by clause val slickOrderings = orderings.map(toSlickOrderingSupplier).reverse var sortQuery = query.sortBy(slickOrderings.head) slickOrderings.tail.foreach(getSlickOrdering => { sortQuery = sortQuery.sortBy(getSlickOrdering) }) sortQuery.result } } protected def toSlickOrderingSupplier(ordering: Ordering): (ModelTable) => ColumnOrdered[_] = { implicit val Ordering(property, direction) = ordering val getColumn = metaModelToColumnsMapping(ordering.property) getColumn.andThen(RepoHelper.createSlickColumnOrdered) } def byId(modelId: ModelId): DBIO[Option[Model]] = if (modelId == null) DBIO.failed(new NullPointerException) else query.filter(_.id === modelId).result.headOption def create(model: Model): DBIO[Model] = if (model == null) DBIO.failed(new NullPointerException) else query.returning(query.map(_.id)).+=(model) .flatMap(id => byId(id)) .map(_.get) def update(model: Model): DBIO[Model] = if (model == null) DBIO.failed(new NullPointerException) else query .filter(_.id === model.id) .update(model) .flatMap(_ => byId(model.id)) .map(_.get) } abstract class IdTable[Id <: BaseId[Long], Entity <: WithId[Long, Id]] (tag: Tag, schemaName: Option[String], tableName: String) (implicit val mapping: BaseColumnType[Id]) extends Table[Entity](tag, schemaName, tableName) { def this(tag: Tag, tableName: String)(implicit mapping: BaseColumnType[Id]) = this(tag, None, tableName) protected val idColumnName: String = "id" final def id: Rep[Id] = column[Id](idColumnName, O.PrimaryKey, O.AutoInc) }
Dasiu/play-framework-test-project
app/commons/repositories/BaseRepo.scala
Scala
mit
2,774
package com.sksamuel.scapegoat.inspections.collections import com.sksamuel.scapegoat._ class ReverseFunc extends Inspection( text = "Unnecessary reverse", defaultLevel = Levels.Info, description = "Checks for use of reverse followed by head/headOption/iterator/map.", explanation = "`reverse` followed by `head`, `headOption`, `iterator`, or `map` can be replaced, respectively, with " + "`last`, `lastOption`, `reverseIterator`, or `reverseMap`." ) { object FuncReplace { private val funcReplace = Map( "head" -> "last", "headOption" -> "lastOption", "iterator" -> "reverseIterator", "map" -> "reverseMap" ) def unapply(func: String): Option[(String, String)] = funcReplace.find(_._1 == func) } def inspector(context: InspectionContext): Inspector = new Inspector(context) { override def postTyperTraverser: context.Traverser = new context.Traverser { import context.global._ override def inspect(tree: Tree): Unit = { tree match { case Select(Select(c, TermName("reverse")), TermName(FuncReplace(_, _))) if c.tpe <:< typeOf[Iterable[Any]] => context.warn(tree.pos, self, tree.toString.take(500)) case Select( Apply(arrayOps1, List(Select(Apply(arrayOps2, List(_)), TermName("reverse")))), TermName(FuncReplace(_, _)) ) if arrayOps1.toString.contains("ArrayOps") && arrayOps2.toString.contains("ArrayOps") => context.warn(tree.pos, self, tree.toString.take(500)) case _ => continue(tree) } } } } }
sksamuel/scapegoat
src/main/scala/com/sksamuel/scapegoat/inspections/collections/ReverseFunc.scala
Scala
apache-2.0
1,737
class ImplicitConversions1 { implicit class any2either1[T](a: T) { def either: Either[T,T] = Left(a) } } object ImplicitConversions2 extends ImplicitConversions1 object ImplicitConversions3 extends ImplicitConversions1 { implicit class any2either3[T](a: T) { def either: Either[T,T] = Right(a) } }
grzegorzbalcerek/scala-book-examples
examples/ImplicitConversions2.scala
Scala
mit
314
/** * Copyright 2015 ICT. * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cn.ac.ict.acs.netflow trait MasterMessage extends Serializable object MasterMessages { // LeaderElectionAgent to Master case object AppointedAsLeader extends MasterMessage case object RevokedLeadership extends MasterMessage // Actor System to Master case object CompleteRecovery extends MasterMessage case object BoundPortsRequest extends MasterMessage case class BoundPortsResponse(actorPort: Int, webUIPort: Int) extends MasterMessage }
DataSysLab/netflow
common/src/main/scala/cn/ac/ict/acs/netflow/MasterMessages.scala
Scala
apache-2.0
1,299
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.template.similarproduct import org.apache.predictionio.controller.PPreparator import org.apache.spark.SparkContext import org.apache.spark.SparkContext._ import org.apache.spark.rdd.RDD class Preparator extends PPreparator[TrainingData, PreparedData] { def prepare(sc: SparkContext, trainingData: TrainingData): PreparedData = { new PreparedData( users = trainingData.users, items = trainingData.items, rateEvents = trainingData.rateEvents) } } class PreparedData( val users: RDD[(String, User)], val items: RDD[(String, Item)], val rateEvents: RDD[RateEvent] ) extends Serializable
pferrel/PredictionIO
examples/scala-parallel-similarproduct/add-rateevent/src/main/scala/Preparator.scala
Scala
apache-2.0
1,436
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.streaming.sources import scala.collection.mutable import scala.language.implicitConversions import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.encoders.RowEncoder import org.apache.spark.sql.execution.SerializeFromObjectExec import org.apache.spark.sql.execution.streaming.MemoryStream import org.apache.spark.sql.functions._ import org.apache.spark.sql.streaming._ case class KV(key: Int, value: Long) class ForeachBatchSinkSuite extends StreamTest { import testImplicits._ test("foreachBatch with non-stateful query") { val mem = MemoryStream[Int] val ds = mem.toDS.map(_ + 1) val tester = new ForeachBatchTester[Int](mem) val writer = (ds: Dataset[Int], batchId: Long) => tester.record(batchId, ds.map(_ + 1)) import tester._ testWriter(ds, writer)( check(in = 1, 2, 3)(out = 3, 4, 5), // out = in + 2 (i.e. 1 in query, 1 in writer) check(in = 5, 6, 7)(out = 7, 8, 9)) } test("foreachBatch with non-stateful query - untyped Dataset") { val mem = MemoryStream[Int] val ds = mem.toDF.selectExpr("value + 1 as value") val tester = new ForeachBatchTester[Row](mem)(RowEncoder.apply(ds.schema)) val writer = (df: DataFrame, batchId: Long) => tester.record(batchId, df.selectExpr("value + 1")) import tester._ testWriter(ds, writer)( // out = in + 2 (i.e. 1 in query, 1 in writer) check(in = 1, 2, 3)(out = Row(3), Row(4), Row(5)), check(in = 5, 6, 7)(out = Row(7), Row(8), Row(9))) } test("foreachBatch with stateful query in update mode") { val mem = MemoryStream[Int] val ds = mem.toDF() .select($"value" % 2 as "key") .groupBy("key") .agg(count("*") as "value") .toDF.as[KV] val tester = new ForeachBatchTester[KV](mem) val writer = (batchDS: Dataset[KV], batchId: Long) => tester.record(batchId, batchDS) import tester._ testWriter(ds, writer, outputMode = OutputMode.Update)( check(in = 0)(out = (0, 1L)), check(in = 1)(out = (1, 1L)), check(in = 2, 3)(out = (0, 2L), (1, 2L))) } test("foreachBatch with stateful query in complete mode") { val mem = MemoryStream[Int] val ds = mem.toDF() .select($"value" % 2 as "key") .groupBy("key") .agg(count("*") as "value") .toDF.as[KV] val tester = new ForeachBatchTester[KV](mem) val writer = (batchDS: Dataset[KV], batchId: Long) => tester.record(batchId, batchDS) import tester._ testWriter(ds, writer, outputMode = OutputMode.Complete)( check(in = 0)(out = (0, 1L)), check(in = 1)(out = (0, 1L), (1, 1L)), check(in = 2)(out = (0, 2L), (1, 1L))) } test("foreachBatch with batch specific operations") { val mem = MemoryStream[Int] val ds = mem.toDS.map(_ + 1) val tester = new ForeachBatchTester[Int](mem) val writer: (Dataset[Int], Long) => Unit = { case (df, batchId) => df.persist() val newDF = df .map(_ + 1) .repartition(1) .sort(Column("value").desc) tester.record(batchId, newDF) // just run another simple query against cached DF to confirm they don't conflict each other val curValues = df.collect() val newValues = df.map(_ + 2).collect() assert(curValues.map(_ + 2) === newValues) df.unpersist() } import tester._ testWriter(ds, writer)( // out = in + 2 (i.e. 1 in query, 1 in writer), with sorted check(in = 1, 2, 3)(out = 5, 4, 3), check(in = 5, 6, 7)(out = 9, 8, 7)) } test("foreachBatchSink does not affect metric generation") { val mem = MemoryStream[Int] val ds = mem.toDS.map(_ + 1) val tester = new ForeachBatchTester[Int](mem) val writer = (ds: Dataset[Int], batchId: Long) => tester.record(batchId, ds.map(_ + 1)) import tester._ testWriter(ds, writer)( check(in = 1, 2, 3)(out = 3, 4, 5), checkMetrics) } test("throws errors in invalid situations") { val ds = MemoryStream[Int].toDS val ex1 = intercept[IllegalArgumentException] { ds.writeStream.foreachBatch(null.asInstanceOf[(Dataset[Int], Long) => Unit]).start() } assert(ex1.getMessage.contains("foreachBatch function cannot be null")) val ex2 = intercept[AnalysisException] { ds.writeStream.foreachBatch((_: Dataset[Int], _: Long) => {}) .trigger(Trigger.Continuous("1 second")).start() } assert(ex2.getMessage.contains("'foreachBatch' is not supported with continuous trigger")) val ex3 = intercept[AnalysisException] { ds.writeStream.foreachBatch((_: Dataset[Int], _: Long) => {}).partitionBy("value").start() } assert(ex3.getMessage.contains("'foreachBatch' does not support partitioning")) } test("foreachBatch should not introduce object serialization") { def assertPlan[T](stream: MemoryStream[Int], ds: Dataset[T]): Unit = { var planAsserted = false val writer: (Dataset[T], Long) => Unit = { case (df, _) => assert(df.queryExecution.executedPlan.find { p => p.isInstanceOf[SerializeFromObjectExec] }.isEmpty, "Untyped Dataset should not introduce serialization on object!") planAsserted = true } stream.addData(1, 2, 3, 4, 5) val query = ds.writeStream.trigger(Trigger.Once()).foreachBatch(writer).start() query.awaitTermination() assert(planAsserted, "ForeachBatch writer should be called!") } // typed val mem = MemoryStream[Int] val ds = mem.toDS.map(_ + 1) assertPlan(mem, ds) // untyped val mem2 = MemoryStream[Int] val dsUntyped = mem2.toDF().selectExpr("value + 1 as value") assertPlan(mem2, dsUntyped) } // ============== Helper classes and methods ================= private class ForeachBatchTester[T: Encoder](memoryStream: MemoryStream[Int]) { trait Test private case class Check(in: Seq[Int], out: Seq[T]) extends Test private case object CheckMetrics extends Test private val recordedOutput = new mutable.HashMap[Long, Seq[T]] def testWriter( ds: Dataset[T], outputBatchWriter: (Dataset[T], Long) => Unit, outputMode: OutputMode = OutputMode.Append())(tests: Test*): Unit = { try { var expectedBatchId = -1 val query = ds.writeStream.outputMode(outputMode).foreachBatch(outputBatchWriter).start() tests.foreach { case Check(in, out) => expectedBatchId += 1 memoryStream.addData(in) query.processAllAvailable() assert(recordedOutput.contains(expectedBatchId)) val ds: Dataset[T] = spark.createDataset[T](recordedOutput(expectedBatchId)) checkDataset[T](ds, out: _*) case CheckMetrics => assert(query.recentProgress.exists(_.numInputRows > 0)) } } finally { sqlContext.streams.active.foreach(_.stop()) } } def check(in: Int*)(out: T*): Test = Check(in, out) def checkMetrics: Test = CheckMetrics def record(batchId: Long, ds: Dataset[T]): Unit = recordedOutput.put(batchId, ds.collect()) implicit def conv(x: (Int, Long)): KV = KV(x._1, x._2) } }
ueshin/apache-spark
sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/sources/ForeachBatchSinkSuite.scala
Scala
apache-2.0
8,002
/* * Copyright 2015 TouchType Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.databricks.spark.redshift import java.io.File import java.sql.{Connection, PreparedStatement, SQLException} import scala.util.matching.Regex import org.apache.hadoop.conf.Configuration import org.apache.hadoop.mapreduce.InputFormat import org.scalamock.scalatest.MockFactory import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers} import org.apache.spark.SparkContext import org.apache.spark.rdd.RDD import org.apache.spark.sql.jdbc.JDBCWrapper import org.apache.spark.sql.sources._ import org.apache.spark.sql.{Row, SQLContext, SaveMode} class TestContext extends SparkContext("local", "RedshiftSourceSuite") { /** * A text file containing fake unloaded Redshift data of all supported types */ val testData = new File("src/test/resources/redshift_unload_data.txt").toURI.toString override def newAPIHadoopFile[K, V, F <: InputFormat[K, V]] (path: String, fClass: Class[F], kClass: Class[K], vClass: Class[V], conf: Configuration = hadoopConfiguration): RDD[(K, V)] = { super.newAPIHadoopFile[K, V, F](testData, fClass, kClass, vClass, conf) } } /** * Tests main DataFrame loading and writing functionality */ class RedshiftSourceSuite extends FunSuite with Matchers with MockFactory with BeforeAndAfterAll { /** * Temporary folder for unloading data to */ val tempDir = { var dir = File.createTempFile("spark_redshift_tests", "") dir.delete() dir.mkdirs() dir.toURI.toString } /** * Expected parsed output corresponding to the output of testData. */ val expectedData = Array( Row(1.toByte, true, TestUtils.toTimestamp(2015, 6, 1, 0, 0, 0), 1234152.123124981, 1.0f, 42, 1239012341823719L, 23, "Unicode是樂趣", TestUtils.toTimestamp(2015, 6, 1, 0, 0, 0, 1)), Row(1.toByte, false, TestUtils.toTimestamp(2015, 6, 2, 0, 0, 0), 0.0, 0.0f, 42, 1239012341823719L, -13, "asdf", TestUtils.toTimestamp(2015, 6, 2, 0, 0, 0, 0)), Row(0.toByte, null, TestUtils.toTimestamp(2015, 6, 3, 0, 0, 0), 0.0, -1.0f, 4141214, 1239012341823719L, null, "f", TestUtils.toTimestamp(2015, 6, 3, 0, 0, 0)), Row(0.toByte, false, null, -1234152.123124981, 100000.0f, null, 1239012341823719L, 24, "___|_123", null), Row(List.fill(10)(null): _*)) /** * Spark Context with hadoop file overridden to point at our local test data file for this suite, * no-matter what temp directory was generated and requested. */ private var sc: SparkContext = _ override def beforeAll(): Unit = { super.beforeAll() sc = new TestContext } override def afterAll(): Unit = { val temp = new File(tempDir) val tempFiles = temp.listFiles() if(tempFiles != null) tempFiles foreach { case f => if(f != null) f.delete() } temp.delete() sc.stop() super.afterAll() } /** * Set up a mocked JDBCWrapper instance that expects a sequence of queries matching the given * regular expressions will be executed, and that the connection returned will be closed. */ def mockJdbcWrapper(expectedUrl: String, expectedQueries: Seq[Regex]): JDBCWrapper = { val jdbcWrapper = mock[JDBCWrapper] val mockedConnection = mock[Connection] (jdbcWrapper.getConnector _).expects(*, expectedUrl, *).returning(() => mockedConnection) inSequence { expectedQueries foreach { r => val mockedStatement = mock[PreparedStatement] (mockedConnection.prepareStatement(_: String)) .expects(where {(sql: String) => r.findFirstMatchIn(sql).nonEmpty}) .returning(mockedStatement) (mockedStatement.execute _).expects().returning(true) } (mockedConnection.close _).expects() } jdbcWrapper } /** * Prepare the JDBC wrapper for an UNLOAD test. */ def prepareUnloadTest(params: Map[String, String]) = { val jdbcUrl = params("url") val jdbcWrapper = mockJdbcWrapper(jdbcUrl, Seq("UNLOAD.*".r)) // We expect some extra calls to the JDBC wrapper, // to register the driver and retrieve the schema. (jdbcWrapper.registerDriver _) .expects(*) .anyNumberOfTimes() (jdbcWrapper.resolveTable _) .expects(jdbcUrl, "test_table", *) .returning(TestUtils.testSchema) .anyNumberOfTimes() jdbcWrapper } test("DefaultSource can load Redshift UNLOAD output to a DataFrame") { val params = Map("url" -> "jdbc:postgresql://foo/bar", "tempdir" -> "tmp", "dbtable" -> "test_table", "aws_access_key_id" -> "test1", "aws_secret_access_key" -> "test2") val jdbcWrapper = prepareUnloadTest(params) val testSqlContext = new SQLContext(sc) // Assert that we've loaded and converted all data in the test file val source = new DefaultSource(jdbcWrapper) val relation = source.createRelation(testSqlContext, params) val df = testSqlContext.baseRelationToDataFrame(relation) df.rdd.collect() zip expectedData foreach { case (loaded, expected) => loaded shouldBe expected } } test("DefaultSource supports simple column filtering") { val params = Map("url" -> "jdbc:postgresql://foo/bar", "tempdir" -> "tmp", "dbtable" -> "test_table", "aws_access_key_id" -> "test1", "aws_secret_access_key" -> "test2") val jdbcWrapper = prepareUnloadTest(params) val testSqlContext = new SQLContext(sc) // Construct the source with a custom schema val source = new DefaultSource(jdbcWrapper) val relation = source.createRelation(testSqlContext, params, TestUtils.testSchema) val rdd = relation.asInstanceOf[PrunedFilteredScan].buildScan(Array("testByte", "testBool"), Array.empty[Filter]) val prunedExpectedValues = Array(Row(1.toByte, true), Row(1.toByte, false), Row(0.toByte, null), Row(0.toByte, false), Row(null, null)) rdd.collect() zip prunedExpectedValues foreach { case (loaded, expected) => loaded shouldBe expected } } test("DefaultSource supports user schema, pruned and filtered scans") { val params = Map("url" -> "jdbc:postgresql://foo/bar", "tempdir" -> "tmp", "dbtable" -> "test_table", "aws_access_key_id" -> "test1", "aws_secret_access_key" -> "test2") val jdbcWrapper = prepareUnloadTest(params) val testSqlContext = new SQLContext(sc) // Construct the source with a custom schema val source = new DefaultSource(jdbcWrapper) val relation = source.createRelation(testSqlContext, params, TestUtils.testSchema) // Define a simple filter to only include a subset of rows val filters: Array[Filter] = Array(EqualTo("testBool", true), EqualTo("testString", "Unicode是樂趣"), GreaterThan("testDouble", 1000.0), LessThan("testDouble", Double.MaxValue), GreaterThanOrEqual("testFloat", 1.0f), LessThanOrEqual("testInt", 43)) val rdd = relation.asInstanceOf[PrunedFilteredScan].buildScan(Array("testByte", "testBool"), filters) // We should now only have one matching row, with two columns val filteredExpectedValues = Array(Row(1, true)) rdd.collect() zip filteredExpectedValues foreach { case (loaded, expected) => loaded shouldBe expected } } test("DefaultSource serializes data as Avro, then sends Redshift COPY command") { val testSqlContext = new SQLContext(sc) val jdbcUrl = "jdbc:postgresql://foo/bar" val params = Map("url" -> jdbcUrl, "tempdir" -> tempDir, "dbtable" -> "test_table", "aws_access_key_id" -> "test1", "aws_secret_access_key" -> "test2", "postactions" -> "GRANT SELECT ON %s TO jeremy", "diststyle" -> "KEY", "distkey" -> "testInt") val rdd = sc.parallelize(expectedData.toSeq) val df = testSqlContext.createDataFrame(rdd, TestUtils.testSchema) val expectedCommands = Seq("DROP TABLE IF EXISTS test_table_staging_.*".r, "CREATE TABLE IF NOT EXISTS test_table_staging.* DISTSTYLE KEY DISTKEY \\(testInt\\).*".r, "COPY test_table_staging_.*".r, "GRANT SELECT ON test_table_staging.+ TO jeremy".r, "ALTER TABLE test_table RENAME TO test_table_backup_.*".r, "ALTER TABLE test_table_staging_.* RENAME TO test_table".r, "DROP TABLE test_table_backup.*".r) val jdbcWrapper = mockJdbcWrapper(jdbcUrl, expectedCommands) (jdbcWrapper.tableExists _) .expects(*, "test_table") .returning(true) .anyNumberOfTimes() (jdbcWrapper.schemaString _) .expects(*, jdbcUrl) .returning("schema") .anyNumberOfTimes() val relation = RedshiftRelation(jdbcWrapper, Parameters.mergeParameters(params), None)(testSqlContext) relation.asInstanceOf[InsertableRelation].insert(df, true) // Make sure we wrote the data out ready for Redshift load, in the expected formats val written = testSqlContext.read.format("com.databricks.spark.avro").load(tempDir) written.collect() zip expectedData foreach { case (loaded, expected) => loaded shouldBe expected } } test("Failed copies are handled gracefully when using a staging table") { val testSqlContext = new SQLContext(sc) val jdbcUrl = "jdbc:postgresql://foo/bar" val params = Map("url" -> jdbcUrl, "tempdir" -> tempDir, "dbtable" -> "test_table", "aws_access_key_id" -> "test1", "aws_secret_access_key" -> "test2", "usestagingtable" -> "true") val rdd = sc.parallelize(expectedData.toSeq) val df = testSqlContext.createDataFrame(rdd, TestUtils.testSchema) val jdbcWrapper = mock[JDBCWrapper] val mockedConnection = mock[Connection] (jdbcWrapper.getConnector _) .expects(*, jdbcUrl, *) .returning(() => mockedConnection) def successfulStatement(pattern: Regex): PreparedStatement = { val mockedStatement = mock[PreparedStatement] (mockedConnection.prepareStatement(_: String)) .expects(where {(sql: String) => pattern.findFirstMatchIn(sql).nonEmpty}) .returning(mockedStatement) (mockedStatement.execute _).expects().returning(true) mockedStatement } def failedStatement(pattern: Regex) : PreparedStatement = { val mockedStatement = mock[PreparedStatement] (mockedConnection.prepareStatement(_: String)) .expects(where {(sql: String) => pattern.findFirstMatchIn(sql).nonEmpty}) .returning(mockedStatement) (mockedStatement.execute _) .expects() .throwing(new SQLException("Mocked Error")) mockedStatement } (jdbcWrapper.tableExists _) .expects(*, "test_table") .returning(true) .anyNumberOfTimes() (jdbcWrapper.schemaString _) .expects(*, jdbcUrl) .anyNumberOfTimes() inSequence { // Initial staging table setup succeeds successfulStatement("DROP TABLE IF EXISTS test_table_staging_.*".r) successfulStatement("CREATE TABLE IF NOT EXISTS test_table_staging.*".r) // Simulate COPY failure failedStatement("COPY test_table_staging_.*".r) // Expect recovery operations (jdbcWrapper.tableExists _) .expects(where {(conn: Connection, sql: String) => "test_table_staging.*".r.findFirstIn(sql).nonEmpty}) .returning(true) successfulStatement("DROP TABLE test_table_staging.*".r) (jdbcWrapper.tableExists _) .expects(where {(conn: Connection, sql: String) => "test_table_backup.*".r.findFirstIn(sql).nonEmpty}) .returning(true) successfulStatement("ALTER TABLE test_table_backup.+ RENAME TO test_table".r) (mockedConnection.close _).expects() } val source = new DefaultSource(jdbcWrapper) intercept[Exception] { source.createRelation(testSqlContext, SaveMode.Overwrite, params, df) } } test("Append SaveMode doesn't destroy existing data") { val testSqlContext = new SQLContext(sc) val jdbcUrl = "jdbc:postgresql://foo/bar" val params = Map("url" -> jdbcUrl, "tempdir" -> tempDir, "dbtable" -> "test_table", "aws_access_key_id" -> "test1", "aws_secret_access_key" -> "test2") val rdd = sc.parallelize(expectedData.toSeq) val df = testSqlContext.createDataFrame(rdd, TestUtils.testSchema) val expectedCommands = Seq("CREATE TABLE IF NOT EXISTS test_table .*".r, "COPY test_table .*".r) val jdbcWrapper = mockJdbcWrapper(jdbcUrl, expectedCommands) (jdbcWrapper.tableExists _) .expects(*, "test_table") .returning(true) .anyNumberOfTimes() (jdbcWrapper.schemaString _) .expects(*, jdbcUrl) .returning("schema") .anyNumberOfTimes() val source = new DefaultSource(jdbcWrapper) val savedDf = source.createRelation(testSqlContext, SaveMode.Append, params, df) // This test is "appending" to an empty table, so we expect all our test data to be // the only content in the returned data frame val written = testSqlContext.read.format("com.databricks.spark.avro").load(tempDir) written.collect() zip expectedData foreach { case (loaded, expected) => loaded shouldBe expected } } test("Respect SaveMode.ErrorIfExists when table exists") { val testSqlContext = new SQLContext(sc) val jdbcUrl = "jdbc:postgresql://foo/bar" val params = Map("url" -> jdbcUrl, "tempdir" -> tempDir, "dbtable" -> "test_table", "aws_access_key_id" -> "test1", "aws_secret_access_key" -> "test2") val rdd = sc.parallelize(expectedData.toSeq) val df = testSqlContext.createDataFrame(rdd, TestUtils.testSchema) // Check that SaveMode.ErrorIfExists throws an exception val errIfExistsWrapper = mockJdbcWrapper(jdbcUrl, Seq.empty[Regex]) (errIfExistsWrapper.tableExists _) .expects(*, "test_table") .returning(true) val errIfExistsSource = new DefaultSource(errIfExistsWrapper) intercept[Exception] { errIfExistsSource.createRelation(testSqlContext, SaveMode.ErrorIfExists, params, df) } } test("Do nothing when table exists if SaveMode = Ignore") { val testSqlContext = new SQLContext(sc) val jdbcUrl = "jdbc:postgresql://foo/bar" val params = Map("url" -> jdbcUrl, "tempdir" -> tempDir, "dbtable" -> "test_table", "aws_access_key_id" -> "test1", "aws_secret_access_key" -> "test2") val rdd = sc.parallelize(expectedData.toSeq) val df = testSqlContext.createDataFrame(rdd, TestUtils.testSchema) // Check that SaveMode.Ignore does nothing val ignoreWrapper = mockJdbcWrapper(jdbcUrl, Seq.empty[Regex]) (ignoreWrapper.tableExists _) .expects(*, "test_table") .returning(true) // Note: Assertions covered by mocks val ignoreSource = new DefaultSource(ignoreWrapper) ignoreSource.createRelation(testSqlContext, SaveMode.Ignore, params, df) } test("Public Scala API rejects invalid parameter maps") { val invalid = Map("dbtable" -> "foo") // missing tempdir and url val rdd = sc.parallelize(expectedData) val testSqlContext = new SQLContext(sc) val df = testSqlContext.createDataFrame(rdd, TestUtils.testSchema) intercept[Exception] { df.saveAsRedshiftTable(invalid) } intercept[Exception] { testSqlContext.redshiftTable(invalid) } } test("DefaultSource has default constructor, required by Data Source API") { new DefaultSource() } }
methodmill/spark-redshift
src/test/scala/com/databricks/spark/redshift/RedshiftSourceSuite.scala
Scala
apache-2.0
16,206
/* * Copyright ixias.net All Rights Reserved. * * Use of this source code is governed by an MIT-style license * For the full copyright and license information, * please view the LICENSE file that was distributed with this source code. */ package ixias.play.api.json import play.api.libs.json._ import play.api.libs.functional.syntax._ // The Error response //~~~~~~~~~~~~~~~~~~~~ case class JsValueError( val error: Int, // Error code val message: Option[String] // Error message ) object JsValueError { implicit val writes: Writes[JsValueError] = ( (__ \\ "error" ).write[Int] and (__ \\ "message").write[Option[String]] )(unlift(JsValueError.unapply)) }
sp1rytus/ixias
framework/ixias-play-core/src/main/scala/ixias/play/api/json/JsValueError.scala
Scala
mit
695
package com.example.locationservice import scala.concurrent.ExecutionContext import scala.concurrent.Future import akka.event.LoggingAdapter import spray.http.StatusCodes.BadRequest import spray.http.StatusCodes.InternalServerError import spray.json.DefaultJsonProtocol import com.glueware.glue._ import GeocodingStatusCodes.OK import akka.actor.ActorRefFactory import spray.http.StatusCodes.BadRequest import spray.http.StatusCodes.InternalServerError import spray.json.DefaultJsonProtocol import akka.event.LoggingAdapter /** * Output class for ServiceLocation */ case class ServiceLocation(latitude: Double, longitude: Double) /** * Implicit conversions needed when wrapping GoogleLocate by a ApiComponent * Unused in our example * * see package object */ trait LocateJsonProtocol extends DefaultJsonProtocol { implicit def serviceLocationJson = jsonFormat2(ServiceLocation) } /** * Abstract interface of ILocate */ abstract class ILocate(geocodingLocate: FutureFunction1[Address, GeocodingResult])(implicit functionContext: FunctionContext) extends FutureFunction1[Address, ServiceLocation] /** * Delivers a location for an address. * If there is corresponding location for address it fails. * So in case of success one can rely that there is location. */ case class Locate(geocodingLocate: FutureFunction1[Address, GeocodingResult])(implicit functionContext: FunctionContext) extends ILocate(geocodingLocate) { import functionContext._ // Members declared in scala.Function1 def _apply(address: Future[Address]): Future[ServiceLocation] = { def geocodingResultToLocation(geocodingResult: GeocodingResult): Future[ServiceLocation] = { log.debug(geocodingResult.toString) import GeocodingStatusCodes._ val status = geocodingResult.status val location = geocodingResult.location status match { case OK => if (location.isDefined) Future.successful(ServiceLocation(location.get.lat, location.get.lng)) else Future.failed(FunctionException1(address.value, InternalServerError, "Google Geocoding contract not correctly checked")) case _ => Future.failed(FunctionException1(address.value, BadRequest, s"Google Geocoding no location found. Geocoding Status Code: ${status}")) } } geocodingLocate(address).flatMap(geocodingResultToLocation) } }
glueware/locationservice
src/main/scala/com/example/locationservice/Locate.scala
Scala
apache-2.0
2,389
package org.otw.open.listeners import com.badlogic.gdx.Input import com.badlogic.gdx.scenes.scene2d.{InputEvent, InputListener} import org.otw.open.controllers.{Event, ScreenController} /** * Created by eilievska on 2/19/2016. */ class DispatchEventListener(val screenChangeEvent: Event) extends InputListener { override def touchDown(event: InputEvent, x: Float, y: Float, pointer: Int, button: Int): Boolean = { if (button == Input.Buttons.LEFT) ScreenController.dispatchEvent(screenChangeEvent) true } }
eilievska/OPEN
core/src/org/otw/open/listeners/DispatchEventListener.scala
Scala
apache-2.0
532
/*********************************************************************** * Copyright (c) 2013-2016 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. *************************************************************************/ package org.locationtech.geomesa.accumulo.data import java.io.Flushable import java.util.concurrent.atomic.AtomicLong import com.typesafe.scalalogging.LazyLogging import org.apache.accumulo.core.client.BatchWriter import org.apache.accumulo.core.data.{Key, Mutation, Value} import org.apache.accumulo.core.security.ColumnVisibility import org.apache.hadoop.io.Text import org.apache.hadoop.mapred.RecordWriter import org.geotools.data.simple.SimpleFeatureWriter import org.geotools.data.{Query, Transaction} import org.geotools.factory.Hints import org.geotools.filter.identity.FeatureIdImpl import org.locationtech.geomesa.accumulo.GeomesaSystemProperties.FeatureIdProperties.FEATURE_ID_GENERATOR import org.locationtech.geomesa.accumulo.data.AccumuloFeatureWriter.{FeatureToWrite, FeatureWriterFn} import org.locationtech.geomesa.accumulo.data.tables._ import org.locationtech.geomesa.accumulo.index._ import org.locationtech.geomesa.accumulo.util.{GeoMesaBatchWriterConfig, Z3FeatureIdGenerator} import org.locationtech.geomesa.features.kryo.KryoFeatureSerializer import org.locationtech.geomesa.features.{ScalaSimpleFeature, ScalaSimpleFeatureFactory, SimpleFeatureSerializer} import org.locationtech.geomesa.security.SecurityUtils.FEATURE_VISIBILITY import org.locationtech.geomesa.utils.uuid.FeatureIdGenerator import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType} import org.opengis.filter.Filter import scala.collection.JavaConversions._ import scala.util.hashing.MurmurHash3 object AccumuloFeatureWriter extends LazyLogging { type FeatureToMutations = (FeatureToWrite) => Seq[Mutation] type FeatureWriterFn = (FeatureToWrite) => Unit type TableAndWriter = (String, FeatureToMutations) type AccumuloRecordWriter = RecordWriter[Key, Value] val tempFeatureIds = new AtomicLong(0) class FeatureToWrite(val feature: SimpleFeature, defaultVisibility: String, serializer: SimpleFeatureSerializer, indexValueEncoder: IndexValueEncoder, binEncoder: Option[BinEncoder]) { import org.locationtech.geomesa.utils.geotools.Conversions.RichSimpleFeature lazy val visibility = new Text(feature.userData[String](FEATURE_VISIBILITY).getOrElse(defaultVisibility)) lazy val columnVisibility = new ColumnVisibility(visibility) // the index value is the encoded date/time/fid lazy val indexValue = new Value(indexValueEncoder.encode(feature)) // the data value is the encoded SimpleFeature lazy val dataValue = new Value(serializer.serialize(feature)) // bin formatted value lazy val binValue = binEncoder.map(e => new Value(e.encode(feature))) // hash value of the feature id lazy val idHash = Math.abs(MurmurHash3.stringHash(feature.getID)) // TODO GEOMESA-1254 optimize for case where all vis are the same lazy val perAttributeValues: Seq[RowValue] = { val count = feature.getFeatureType.getAttributeCount val visibilities = feature.userData[String](FEATURE_VISIBILITY).map(_.split(",")) .getOrElse(Array.fill(count)(defaultVisibility)) require(visibilities.length == count, "Per-attribute visibilities do not match feature type") val groups = visibilities.zipWithIndex.groupBy(_._1).mapValues(_.map(_._2.toByte).sorted).toSeq groups.map { case (vis, indices) => val cq = new Text(indices) val values = indices.map(i => serializer.serialize(i, feature.getAttribute(i))) val output = KryoFeatureSerializer.getOutput() // note: same output object used in serializer.serialize values.foreach { value => output.writeInt(value.length, true) output.write(value) } val value = new Value(output.toBytes) RowValue(GeoMesaTable.AttributeColumnFamily, cq, new ColumnVisibility(vis), value) } } } def featureWriter(writers: Seq[(BatchWriter, FeatureToMutations)]): FeatureWriterFn = feature => { // calculate all the mutations first, so that if something fails we won't have a partially written feature val mutations = writers.map { case (bw, fToM) => (bw, fToM(feature)) } mutations.foreach { case (bw, m) => bw.addMutations(m) } } /** * Gets writers and table names for each table (e.g. index) that supports the sft */ def getTablesAndWriters(sft: SimpleFeatureType, ds: AccumuloConnectorCreator): Seq[TableAndWriter] = GeoMesaTable.getTables(sft).map(table => (ds.getTableName(sft.getTypeName, table), table.writer(sft))) /** * Gets removers and table names for each table (e.g. index) that supports the sft */ def getTablesAndRemovers(sft: SimpleFeatureType, ds: AccumuloConnectorCreator): Seq[TableAndWriter] = GeoMesaTable.getTables(sft).map(table => (ds.getTableName(sft.getTypeName, table), table.remover(sft))) private val idGenerator: FeatureIdGenerator = try { logger.debug(s"Using feature id generator '${FEATURE_ID_GENERATOR.get}'") Class.forName(FEATURE_ID_GENERATOR.get).newInstance().asInstanceOf[FeatureIdGenerator] } catch { case e: Throwable => logger.error(s"Could not load feature id generator class '${FEATURE_ID_GENERATOR.get}'", e) new Z3FeatureIdGenerator } /** * Sets the feature ID on the feature. If the user has requested a specific ID, that will be used, * otherwise one will be generated. If possible, the original feature will be modified and returned. */ def featureWithFid(sft: SimpleFeatureType, feature: SimpleFeature): SimpleFeature = { if (feature.getUserData.containsKey(Hints.PROVIDED_FID)) { withFid(sft, feature, feature.getUserData.get(Hints.PROVIDED_FID).toString) } else if (feature.getUserData.containsKey(Hints.USE_PROVIDED_FID) && feature.getUserData.get(Hints.USE_PROVIDED_FID).asInstanceOf[Boolean]) { feature } else { withFid(sft, feature, idGenerator.createId(sft, feature)) } } private def withFid(sft: SimpleFeatureType, feature: SimpleFeature, fid: String): SimpleFeature = feature.getIdentifier match { case f: FeatureIdImpl => f.setID(fid) feature case f => logger.warn(s"Unknown feature ID implementation found, rebuilding feature: ${f.getClass} $f") ScalaSimpleFeatureFactory.copyFeature(sft, feature, fid) } case class RowValue(cf: Text, cq: Text, vis: ColumnVisibility, value: Value) } abstract class AccumuloFeatureWriter(sft: SimpleFeatureType, encoder: SimpleFeatureSerializer, ds: AccumuloDataStore, defaultVisibility: String) extends SimpleFeatureWriter with Flushable with LazyLogging { protected val multiBWWriter = ds.connector.createMultiTableBatchWriter(GeoMesaBatchWriterConfig()) protected val binEncoder = BinEncoder(sft) protected val indexValueEncoder = IndexValueEncoder(sft) // A "writer" is a function that takes a simple feature and writes it to an index or table protected val writer: FeatureWriterFn = { val writers = AccumuloFeatureWriter.getTablesAndWriters(sft, ds).map { case (table, write) => (multiBWWriter.getBatchWriter(table), write) } AccumuloFeatureWriter.featureWriter(writers) } protected val statUpdater = ds.stats.statUpdater(sft) // returns a temporary id - we will replace it just before write protected def nextFeatureId = AccumuloFeatureWriter.tempFeatureIds.getAndIncrement().toString protected def writeToAccumulo(feature: SimpleFeature): Unit = { // see if there's a suggested ID to use for this feature, else create one based on the feature val featureWithFid = AccumuloFeatureWriter.featureWithFid(sft, feature) writer(new FeatureToWrite(featureWithFid, defaultVisibility, encoder, indexValueEncoder, binEncoder)) statUpdater.add(featureWithFid) } override def getFeatureType: SimpleFeatureType = sft override def hasNext: Boolean = false override def flush(): Unit = { multiBWWriter.flush() statUpdater.flush() } override def close(): Unit = { multiBWWriter.close() statUpdater.close() } } /** * Appends new features - can't modify or delete existing features. */ class AppendAccumuloFeatureWriter(sft: SimpleFeatureType, encoder: SimpleFeatureSerializer, ds: AccumuloDataStore, defaultVisibility: String) extends AccumuloFeatureWriter(sft, encoder, ds, defaultVisibility) { var currentFeature: SimpleFeature = null override def write(): Unit = if (currentFeature != null) { writeToAccumulo(currentFeature) currentFeature = null } override def remove(): Unit = throw new UnsupportedOperationException("Use getFeatureWriter instead of getFeatureWriterAppend") override def next(): SimpleFeature = { currentFeature = new ScalaSimpleFeature(nextFeatureId, sft) currentFeature } } /** * Modifies or deletes existing features. Per the data store api, does not allow appending new features. */ class ModifyAccumuloFeatureWriter(sft: SimpleFeatureType, encoder: SimpleFeatureSerializer, ds: AccumuloDataStore, defaultVisibility: String, filter: Filter) extends AccumuloFeatureWriter(sft, encoder, ds, defaultVisibility) { val reader = ds.getFeatureReader(new Query(sft.getTypeName, filter), Transaction.AUTO_COMMIT) var live: SimpleFeature = null /* feature to let user modify */ var original: SimpleFeature = null /* feature returned from reader */ // A remover is a function that removes a feature from an // index or table. This list is configured to match the // version of the datastore (i.e. single table vs catalog // table + index tables) val remover: FeatureWriterFn = { val writers = AccumuloFeatureWriter.getTablesAndRemovers(sft, ds).map { case (table, write) => (multiBWWriter.getBatchWriter(table), write) } AccumuloFeatureWriter.featureWriter(writers) } override def remove() = if (original != null) { remover(new FeatureToWrite(original, defaultVisibility, encoder, indexValueEncoder, binEncoder)) statUpdater.remove(original) } override def hasNext = reader.hasNext /* only write if non null and it hasn't changed...*/ /* original should be null only when reader runs out */ override def write() = // comparison of feature ID and attributes - doesn't consider concrete class used if (!ScalaSimpleFeature.equalIdAndAttributes(live, original)) { remove() writeToAccumulo(live) } override def next: SimpleFeature = { original = reader.next() // set the use provided FID hint - allows user to update fid if desired, // but if not we'll use the existing one original.getUserData.put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE) live = ScalaSimpleFeatureFactory.copyFeature(sft, original, original.getID) // this copies user data as well live } override def close() = { super.close() // closes writer reader.close() } }
mdzimmerman/geomesa
geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/data/AccumuloFeatureWriter.scala
Scala
apache-2.0
11,742
/** * Copyright 2011-2017 GatlingCorp (http://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.core.action import io.gatling.core.stats.StatsEngine import io.gatling.core.session.{ Expression, Session } class Switch(nextAction: Expression[Action], val statsEngine: StatsEngine, val name: String, val next: Action) extends ExitableAction { override def execute(session: Session): Unit = recover(session) { nextAction(session).map(_ ! session) } }
timve/gatling
gatling-core/src/main/scala/io/gatling/core/action/Switch.scala
Scala
apache-2.0
999
/* * Copyright (c) 2016, Innoave.com * All rights reserved. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL INNOAVE.COM OR ITS CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.innoave.abacus.fxui.view import Orientation._ import com.innoave.abacus.domain.model.Bead import com.innoave.abacus.domain.model.BeadRod import com.innoave.abacus.domain.model.Parameter import com.innoave.abacus.domain.service.AbacusService import com.innoave.abacus.domain.service.EventBus import com.innoave.abacus.domain.service.event.BeadsMoved import scalafx.Includes._ import scalafx.scene.layout.GridPane import scalafx.scene.Node class DeckView[T <: Bead]( val abacusService: AbacusService[T], val beadRods: Seq[BeadRod[T]], val orientation: Orientation )( implicit val params: Parameter ) extends GridPane { styleClass += "deck-view" val rodViews: Seq[RodView[T]] = { for { beadRod <- beadRods } yield { val rodView = new RodView(abacusService, beadRod, orientation) if (beadRod.clearedBeads.size > 2 && beadRod.position % 3 == 2) { rodView.beads.last.styleClass += "group-marker-bead" rodView.beads.take(rodView.beads.size - 1).foreach { bv => bv.styleClass += "bead" } } else { rodView.beads.foreach { bv => bv.styleClass += "bead" } } rodView } } def setBeadRod(value: BeadRod[T]) { rodViewFor(value.position, value.beadValue).foreach { rodView => rodView.beadRod() = value } } def beadRodFor(position: Int, beadValue: Int): Option[BeadRod[T]] = rodViewFor(position, beadValue).map { x => x.beadRod() } def rodViewFor(position: Int, beadValue: Int): Option[RodView[T]] = rodViews.find { x => x.beadRod().position == position && x.beadRod().beadValue == beadValue } orientation match { case TopToBottom => addRow(0, rodViews.map { v => Node.sfxNode2jfx(v) }: _*) case BottomToTop => addRow(0, rodViews.map { v => Node.sfxNode2jfx(v) }: _*) case LeftToRight => addColumn(0, rodViews.map { v => Node.sfxNode2jfx(v) }: _*) case RightToLeft => addColumn(0, rodViews.map { v => Node.sfxNode2jfx(v) }: _*) } EventBus.of(abacusService).register(classOf[BeadsMoved[T]], { (ev: BeadsMoved[T]) => setBeadRod(ev.newBeadRod) }) }
haraldmaida/AbacusSFX
src/main/scala/com/innoave/abacus/fxui/view/DeckView.scala
Scala
apache-2.0
3,009
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.metrics.source import com.codahale.metrics.MetricRegistry private[spark] trait Source { def sourceName: String def metricRegistry: MetricRegistry }
yelshater/hadoop-2.3.0
spark-core_2.10-1.0.0-cdh5.1.0/src/main/scala/org/apache/spark/metrics/source/Source.scala
Scala
apache-2.0
981
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.batch.sql import org.apache.flink.api.scala._ import org.apache.flink.table.api._ import org.apache.flink.table.api.config.ExecutionConfigOptions import org.apache.flink.table.planner.plan.rules.physical.batch.BatchPhysicalSortRule.TABLE_EXEC_RANGE_SORT_ENABLED import org.apache.flink.table.planner.utils.TableTestBase import org.junit.Test class SortLimitTest extends TableTestBase { private val util = batchTestUtil() util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c) util.tableEnv.getConfig.getConfiguration.setInteger( ExecutionConfigOptions.TABLE_EXEC_SORT_DEFAULT_LIMIT, 200) @Test def testNonRangeSortWithoutOffset(): Unit = { util.tableEnv.getConfig.getConfiguration.setBoolean(TABLE_EXEC_RANGE_SORT_ENABLED, false) util.verifyExecPlan("SELECT * FROM MyTable ORDER BY a DESC LIMIT 5") } @Test def testNonRangeSortWithLimit0(): Unit = { util.tableEnv.getConfig.getConfiguration.setBoolean(TABLE_EXEC_RANGE_SORT_ENABLED, false) util.verifyExecPlan("SELECT * FROM MyTable ORDER BY a DESC LIMIT 0") } @Test def testNonRangeSortOnlyWithOffset(): Unit = { util.tableEnv.getConfig.getConfiguration.setBoolean(TABLE_EXEC_RANGE_SORT_ENABLED, false) util.verifyExecPlan("SELECT * FROM MyTable ORDER BY a DESC OFFSET 10 ROWS") } @Test def testNoneRangeSortWithOffsetLimit(): Unit = { util.tableEnv.getConfig.getConfiguration.setBoolean(TABLE_EXEC_RANGE_SORT_ENABLED, false) util.verifyExecPlan("SELECT * FROM MyTable ORDER BY a DESC, b LIMIT 10 OFFSET 1") } @Test def testNoneRangeSortWithOffsetLimit0(): Unit = { util.tableEnv.getConfig.getConfiguration.setBoolean(TABLE_EXEC_RANGE_SORT_ENABLED, false) util.verifyExecPlan("SELECT * FROM MyTable ORDER BY a DESC, b LIMIT 0 OFFSET 1") } @Test def testRangeSortOnWithoutOffset(): Unit = { util.tableEnv.getConfig.getConfiguration.setBoolean(TABLE_EXEC_RANGE_SORT_ENABLED, true) util.verifyExecPlan("SELECT * FROM MyTable ORDER BY a DESC LIMIT 5") } @Test def testRangeSortOnWithLimit0(): Unit = { util.tableEnv.getConfig.getConfiguration.setBoolean(TABLE_EXEC_RANGE_SORT_ENABLED, true) util.verifyExecPlan("SELECT * FROM MyTable ORDER BY a DESC LIMIT 0") } @Test def testRangeSortOnlyWithOffset(): Unit = { util.tableEnv.getConfig.getConfiguration.setBoolean(TABLE_EXEC_RANGE_SORT_ENABLED, true) util.verifyExecPlan("SELECT * FROM MyTable ORDER BY a DESC OFFSET 10 ROWS") } @Test def testRangeSortWithOffsetLimit(): Unit = { util.tableEnv.getConfig.getConfiguration.setBoolean(TABLE_EXEC_RANGE_SORT_ENABLED, true) util.verifyExecPlan("SELECT * FROM MyTable ORDER BY a DESC, b LIMIT 10 OFFSET 1") } @Test def testRangeSortWithOffsetLimit0(): Unit = { util.tableEnv.getConfig.getConfiguration.setBoolean(TABLE_EXEC_RANGE_SORT_ENABLED, true) util.verifyExecPlan("SELECT * FROM MyTable ORDER BY a DESC, b LIMIT 0 OFFSET 1") } }
lincoln-lil/flink
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/SortLimitTest.scala
Scala
apache-2.0
3,800
package com.github.zenpie import scala.annotation.compileTimeOnly import scala.language.implicitConversions import scala.util.matching.Regex package object macrowave extends RuleActions { type HList = shapeless.HList type ::[H, T <: HList] = shapeless.::[H, T] type HNil = shapeless.HNil val HNil = shapeless.HNil type Prepend[P <: HList, S <: HList] = shapeless.ops.hlist.Prepend[P, S] type Rule1[+T] = Rule[T :: HNil] def compileTime: Nothing = throw new UnsupportedOperationException("") @compileTimeOnly("Calls to function 'literal' have to be inside a macro invocation!") implicit def literal(string: String): RegExp = compileTime @compileTimeOnly("Calls to function 'regex' have to be inside a macro invocation!") def regex(string: String): RegExp = compileTime @compileTimeOnly("Calls to function 'regex' have to be inside a macro invocation!") implicit def regex(regex: Regex): RegExp = compileTime @compileTimeOnly("Calls to function 'token' have to be inside a macro invocation!") implicit def token(regex: RegExp): Token = compileTime @compileTimeOnly("Calls to function 'singletonRule' have to be inside a macro invocation!") implicit def singletonRule(token: Token): Rule1[String] = compileTime @compileTimeOnly("Calls to function 'epsilon' have to be inside a macro invocation!") def epsilon: Rule[HNil] = compileTime }
zen-pie/macrowave
src/main/scala/com/github/zenpie/macrowave/package.scala
Scala
mit
1,394
/* * GNU GENERAL PUBLIC LICENSE * Version 2, June 1991 * * Copyright (C) 1989, 1991 Free Software Foundation, Inc., <http://fsf.org/> * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA * Everyone is permitted to copy and distribute verbatim copies * of this license document, but changing it is not allowed. * * Preamble * * The licenses for most software are designed to take away your * freedom to share and change it. By contrast, the GNU General Public * License is intended to guarantee your freedom to share and change free * software--to make sure the software is free for all its users. This * General Public License applies to most of the Free Software * Foundation's software and to any other program whose authors commit to * using it. (Some other Free Software Foundation software is covered by * the GNU Lesser General Public License instead.) You can apply it to * your programs, too. * * When we speak of free software, we are referring to freedom, not * price. Our General Public Licenses are designed to make sure that you * have the freedom to distribute copies of free software (and charge for * this service if you wish), that you receive source code or can get it * if you want it, that you can change the software or use pieces of it * in new free programs; and that you know you can do these things. * * To protect your rights, we need to make restrictions that forbid * anyone to deny you these rights or to ask you to surrender the rights. * These restrictions translate to certain responsibilities for you if you * distribute copies of the software, or if you modify it. * * For example, if you distribute copies of such a program, whether * gratis or for a fee, you must give the recipients all the rights that * you have. You must make sure that they, too, receive or can get the * source code. And you must show them these terms so they know their * rights. * * We protect your rights with two steps: (1) copyright the software, and * (2) offer you this license which gives you legal permission to copy, * distribute and/or modify the software. * * Also, for each author's protection and ours, we want to make certain * that everyone understands that there is no warranty for this free * software. If the software is modified by someone else and passed on, we * want its recipients to know that what they have is not the original, so * that any problems introduced by others will not reflect on the original * authors' reputations. * * Finally, any free program is threatened constantly by software * patents. We wish to avoid the danger that redistributors of a free * program will individually obtain patent licenses, in effect making the * program proprietary. To prevent this, we have made it clear that any * patent must be licensed for everyone's free use or not licensed at all. * * The precise terms and conditions for copying, distribution and * modification follow. * * GNU GENERAL PUBLIC LICENSE * TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION * * 0. This License applies to any program or other work which contains * a notice placed by the copyright holder saying it may be distributed * under the terms of this General Public License. The "Program", below, * refers to any such program or work, and a "work based on the Program" * means either the Program or any derivative work under copyright law: * that is to say, a work containing the Program or a portion of it, * either verbatim or with modifications and/or translated into another * language. (Hereinafter, translation is included without limitation in * the term "modification".) Each licensee is addressed as "you". * * Activities other than copying, distribution and modification are not * covered by this License; they are outside its scope. The act of * running the Program is not restricted, and the output from the Program * is covered only if its contents constitute a work based on the * Program (independent of having been made by running the Program). * Whether that is true depends on what the Program does. * * 1. You may copy and distribute verbatim copies of the Program's * source code as you receive it, in any medium, provided that you * conspicuously and appropriately publish on each copy an appropriate * copyright notice and disclaimer of warranty; keep intact all the * notices that refer to this License and to the absence of any warranty; * and give any other recipients of the Program a copy of this License * along with the Program. * * You may charge a fee for the physical act of transferring a copy, and * you may at your option offer warranty protection in exchange for a fee. * * 2. You may modify your copy or copies of the Program or any portion * of it, thus forming a work based on the Program, and copy and * distribute such modifications or work under the terms of Section 1 * above, provided that you also meet all of these conditions: * * a) You must cause the modified files to carry prominent notices * stating that you changed the files and the date of any change. * * b) You must cause any work that you distribute or publish, that in * whole or in part contains or is derived from the Program or any * part thereof, to be licensed as a whole at no charge to all third * parties under the terms of this License. * * c) If the modified program normally reads commands interactively * when run, you must cause it, when started running for such * interactive use in the most ordinary way, to print or display an * announcement including an appropriate copyright notice and a * notice that there is no warranty (or else, saying that you provide * a warranty) and that users may redistribute the program under * these conditions, and telling the user how to view a copy of this * License. (Exception: if the Program itself is interactive but * does not normally print such an announcement, your work based on * the Program is not required to print an announcement.) * * These requirements apply to the modified work as a whole. If * identifiable sections of that work are not derived from the Program, * and can be reasonably considered independent and separate works in * themselves, then this License, and its terms, do not apply to those * sections when you distribute them as separate works. But when you * distribute the same sections as part of a whole which is a work based * on the Program, the distribution of the whole must be on the terms of * this License, whose permissions for other licensees extend to the * entire whole, and thus to each and every part regardless of who wrote it. * * Thus, it is not the intent of this section to claim rights or contest * your rights to work written entirely by you; rather, the intent is to * exercise the right to control the distribution of derivative or * collective works based on the Program. * * In addition, mere aggregation of another work not based on the Program * with the Program (or with a work based on the Program) on a volume of * a storage or distribution medium does not bring the other work under * the scope of this License. * * 3. You may copy and distribute the Program (or a work based on it, * under Section 2) in object code or executable form under the terms of * Sections 1 and 2 above provided that you also do one of the following: * * a) Accompany it with the complete corresponding machine-readable * source code, which must be distributed under the terms of Sections * 1 and 2 above on a medium customarily used for software interchange; or, * * b) Accompany it with a written offer, valid for at least three * years, to give any third party, for a charge no more than your * cost of physically performing source distribution, a complete * machine-readable copy of the corresponding source code, to be * distributed under the terms of Sections 1 and 2 above on a medium * customarily used for software interchange; or, * * c) Accompany it with the information you received as to the offer * to distribute corresponding source code. (This alternative is * allowed only for noncommercial distribution and only if you * received the program in object code or executable form with such * an offer, in accord with Subsection b above.) * * The source code for a work means the preferred form of the work for * making modifications to it. For an executable work, complete source * code means all the source code for all modules it contains, plus any * associated interface definition files, plus the scripts used to * control compilation and installation of the executable. However, as a * special exception, the source code distributed need not include * anything that is normally distributed (in either source or binary * form) with the major components (compiler, kernel, and so on) of the * operating system on which the executable runs, unless that component * itself accompanies the executable. * * If distribution of executable or object code is made by offering * access to copy from a designated place, then offering equivalent * access to copy the source code from the same place counts as * distribution of the source code, even though third parties are not * compelled to copy the source along with the object code. * * 4. You may not copy, modify, sublicense, or distribute the Program * except as expressly provided under this License. Any attempt * otherwise to copy, modify, sublicense or distribute the Program is * void, and will automatically terminate your rights under this License. * However, parties who have received copies, or rights, from you under * this License will not have their licenses terminated so long as such * parties remain in full compliance. * * 5. You are not required to accept this License, since you have not * signed it. However, nothing else grants you permission to modify or * distribute the Program or its derivative works. These actions are * prohibited by law if you do not accept this License. Therefore, by * modifying or distributing the Program (or any work based on the * Program), you indicate your acceptance of this License to do so, and * all its terms and conditions for copying, distributing or modifying * the Program or works based on it. * * 6. Each time you redistribute the Program (or any work based on the * Program), the recipient automatically receives a license from the * original licensor to copy, distribute or modify the Program subject to * these terms and conditions. You may not impose any further * restrictions on the recipients' exercise of the rights granted herein. * You are not responsible for enforcing compliance by third parties to * this License. * * 7. If, as a consequence of a court judgment or allegation of patent * infringement or for any other reason (not limited to patent issues), * conditions are imposed on you (whether by court order, agreement or * otherwise) that contradict the conditions of this License, they do not * excuse you from the conditions of this License. If you cannot * distribute so as to satisfy simultaneously your obligations under this * License and any other pertinent obligations, then as a consequence you * may not distribute the Program at all. For example, if a patent * license would not permit royalty-free redistribution of the Program by * all those who receive copies directly or indirectly through you, then * the only way you could satisfy both it and this License would be to * refrain entirely from distribution of the Program. * * If any portion of this section is held invalid or unenforceable under * any particular circumstance, the balance of the section is intended to * apply and the section as a whole is intended to apply in other * circumstances. * * It is not the purpose of this section to induce you to infringe any * patents or other property right claims or to contest validity of any * such claims; this section has the sole purpose of protecting the * integrity of the free software distribution system, which is * implemented by public license practices. Many people have made * generous contributions to the wide range of software distributed * through that system in reliance on consistent application of that * system; it is up to the author/donor to decide if he or she is willing * to distribute software through any other system and a licensee cannot * impose that choice. * * This section is intended to make thoroughly clear what is believed to * be a consequence of the rest of this License. * * 8. If the distribution and/or use of the Program is restricted in * certain countries either by patents or by copyrighted interfaces, the * original copyright holder who places the Program under this License * may add an explicit geographical distribution limitation excluding * those countries, so that distribution is permitted only in or among * countries not thus excluded. In such case, this License incorporates * the limitation as if written in the body of this License. * * 9. The Free Software Foundation may publish revised and/or new versions * of the General Public License from time to time. Such new versions will * be similar in spirit to the present version, but may differ in detail to * address new problems or concerns. * * Each version is given a distinguishing version number. If the Program * specifies a version number of this License which applies to it and "any * later version", you have the option of following the terms and conditions * either of that version or of any later version published by the Free * Software Foundation. If the Program does not specify a version number of * this License, you may choose any version ever published by the Free Software * Foundation. * * 10. If you wish to incorporate parts of the Program into other free * programs whose distribution conditions are different, write to the author * to ask for permission. For software which is copyrighted by the Free * Software Foundation, write to the Free Software Foundation; we sometimes * make exceptions for this. Our decision will be guided by the two goals * of preserving the free status of all derivatives of our free software and * of promoting the sharing and reuse of software generally. * * NO WARRANTY * * 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY * FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN * OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES * PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED * OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS * TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE * PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, * REPAIR OR CORRECTION. * * 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING * WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR * REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, * INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING * OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED * TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY * YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER * PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE * POSSIBILITY OF SUCH DAMAGES. * * END OF TERMS AND CONDITIONS * * How to Apply These Terms to Your New Programs * * If you develop a new program, and you want it to be of the greatest * possible use to the public, the best way to achieve this is to make it * free software which everyone can redistribute and change under these terms. * * To do so, attach the following notices to the program. It is safest * to attach them to the start of each source file to most effectively * convey the exclusion of warranty; and each file should have at least * the "copyright" line and a pointer to where the full notice is found. * * {description} * Copyright (C) {year} {fullname} * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. * * Also add information on how to contact you by electronic and paper mail. * * If the program is interactive, make it output a short notice like this * when it starts in an interactive mode: * * Gnomovision version 69, Copyright (C) year name of author * Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. * This is free software, and you are welcome to redistribute it * under certain conditions; type `show c' for details. * * The hypothetical commands `show w' and `show c' should show the appropriate * parts of the General Public License. Of course, the commands you use may * be called something other than `show w' and `show c'; they could even be * mouse-clicks or menu items--whatever suits your program. * * You should also get your employer (if you work as a programmer) or your * school, if any, to sign a "copyright disclaimer" for the program, if * necessary. Here is a sample; alter the names: * * Yoyodyne, Inc., hereby disclaims all copyright interest in the program * `Gnomovision' (which makes passes at compilers) written by James Hacker. * * {signature of Ty Coon}, 1 April 1989 * Ty Coon, President of Vice * * This General Public License does not permit incorporating your program into * proprietary programs. If your program is a subroutine library, you may * consider it more useful to permit linking proprietary applications with the * library. If this is what you want to do, use the GNU Lesser General * Public License instead of this License. */ package controllers import javax.inject.Inject import controllers.traits.TController import models.fhs.pages.editdocents.MEditDocents._ import models.fhs.pages.editdocents._ import models.persistence.docents.Docent import play.api.Logger import play.api.cache.Cached import play.api.data.Forms._ import play.api.data._ import play.api.i18n._ import play.api.libs.json._ import play.api.mvc._ import play.twirl.api.Html import views.html.editdocents._ /** * @author fabian * on 13.04.14. */ class CEditDocents @Inject()(cached: Cached)(val messagesApi: MessagesApi) extends TController { val newDocentForm: Form[MDocent] = Form( mapping( "lastName" -> nonEmptyText(minLength = 3), "userId" -> text )(MDocent.apply)(MDocent.unapply) ) val expireDateForm: Form[MExpireDate] = Form( mapping("date" -> date("yyyy-MM-dd"))(MExpireDate.apply)(MExpireDate.unapply) ) val existingDocentForm: Form[MExistingDocent] = Form( mapping( "id" -> longNumber, "lastName" -> nonEmptyText(minLength = 3), "userId" -> text, "comments" -> text, "timeslots" -> list( mapping( "timeKind" -> nonEmptyText, "duration" -> nonEmptyText, "weekday" -> number, "startHour" -> number, "startMinute" -> number, "stopHour" -> number, "stopMinute" -> number )(MDocentTimeWhish.apply)(MDocentTimeWhish.unapply) ), "houseCriterias" -> list( longNumber ), "roomAttr" -> list( nonEmptyText ), "roomCrit" -> list( longNumber ) )(MExistingDocent.apply)(MExistingDocent.unapply) ) private def getDocentList(implicit request: Request[AnyContent]) = { val allDocents = findAllDocents() val session = request.session /** docents will only see their own stuff */ var docentList: List[Docent] = null if (!session.get(IS_ADMIN).getOrElse("false").toBoolean) { docentList = allDocents.filter(d => session.get(CURRENT_USER).getOrElse("").equals(d.getUserId)) if (docentList.isEmpty) { docentList = allDocents.filter(d => session.get(CURRENT_USER).getOrElse("").equalsIgnoreCase(d.getLastName)) } } else { docentList = allDocents } docentList } def page = Action { implicit request => val docentList = getDocentList val expireDate = findExpireDate() val semesters = findSemesters() val currentExpireDateForm = if (expireDate != null) { expireDateForm.fill(expireDate) } else { expireDateForm } Ok(editDocents("Dozenten", newDocentForm, currentExpireDateForm, docentList, semesters)) } def sendDocentFields(id: Long) = Action { implicit request => val docent = findDocentById(id) val session = request.session val timeWishExpireDate = findExpireDate() val expireDate = if (timeWishExpireDate != null) { timeWishExpireDate.getExpiredate } else { null } val (allTimeSlots, timeRanges) = timeRange Ok(Json.stringify(Json.obj("htmlresult" -> docentfields(existingDocentForm.fill(docent), findHouses(), findAllRooms(), timeRanges, allTimeSlots, expireDate, findSemesters()).toString().trim))) .withSession(session + ("docentName" -> docent.getLastName)) } def editDocent = Action(parse.json) { implicit request => val session = request.session val jsVal = request.body val lastName = (jsVal \\ existingDocentForm("lastName").name).as[String] val id = (jsVal \\ existingDocentForm("id").name).as[String].toLong val houseCriterias = (jsVal \\ existingDocentForm("houseCriterias").name).as[JsArray].value.map(_.as[String].toLong).toList val roomAttr = (jsVal \\ existingDocentForm("roomAttr").name).as[JsArray].value.map(_.as[String]).toList val roomCrit = (jsVal \\ existingDocentForm("roomCrit").name).as[JsArray].value.map(_.as[String].toLong).toList val comments = (jsVal \\ existingDocentForm("comments").name).as[String].trim val userId = (jsVal \\ existingDocentForm("userId").name).as[String].trim val timeslots = (jsVal \\ "timeslots").as[JsArray].value.par.map { slot => val rangeString = (slot \\ "timerange").as[String].split(",") val startTime = rangeString(0).split("-") val startHour = startTime(0).toInt val startMinute = startTime(1).toInt val stopTime = rangeString(1).split("-") val stopHour = stopTime(0).toInt val stopMinute = stopTime(1).toInt val weekday = rangeString(2).toInt val timeKind = rangeString(3).trim val duration = (slot \\ "duration").as[String].trim MDocentTimeWhish(timeKind, duration, weekday, startHour, startMinute, stopHour, stopMinute) }.toList.filter { case w => // Logger.debug("" + !w.timeKind.equalsIgnoreCase("n")) !w.timeKind.equalsIgnoreCase("n") } val mDocent = MExistingDocent(id, lastName, userId, comments, timeslots, houseCriterias, roomAttr, roomCrit) Logger.debug(mDocent.toString) val docent = persistEditedDocent(mDocent) val flashing = request.flash +("submitResult", "true") val (allTimeSlots, timeRanges) = timeRange val timeWishExpireDate = findExpireDate() val expireDate = if (timeWishExpireDate != null) { timeWishExpireDate.getExpiredate } else { null } Ok(Json.obj("htmlresult" -> docentfields(existingDocentForm.fill(docent), findHouses(), findAllRooms(), timeRanges, allTimeSlots, expireDate, findSemesters())(flashing, session, request2Messages).toString)) } def saveExpireDate = Action { implicit request => val result = expireDateForm.bindFromRequest() Logger.debug("expireform:" + result) result.fold( error => { BadRequest(editDocents("Dozenten", newDocentForm, error, getDocentList, findSemesters())) }, mExpireDate => { persistExpireDate(mExpireDate) Redirect(routes.CEditDocents.page) } ) } def deleteDocent(id: Long) = Action { implicit request => val session = request.session val (docentName, connectedSubjects) = removeDocent(id) Redirect(routes.CEditDocents.page).flashing("connectedSubjects" -> connectedSubjects.mkString(" "), "docentName" -> docentName).withSession(session + ("docentName" -> docentName)) } def saveNewDocent = Action { implicit request => //Logger.debug("add docent header: " + request.headers) val docentResult = newDocentForm.bindFromRequest docentResult.fold( errors => { BadRequest(editDocents("Dozenten", errors, expireDateForm, findAllDocents(), findSemesters())) }, mDocent => { persistNewDocent(docentResult.get) Redirect(routes.CEditDocents.page) } ) } def getStatisticFields(semesterId: Long, targetContainer: String) = Action { val semester = findSemesterById(semesterId) val docentList = findAllDocents() val docentStatisticList = docentList.par.map { d => val neededSws = calculateNeededSws(semesterId, d.getId) val givenSws = calculateDocentSwsForStatistic(d) (d, neededSws, givenSws) }.toList Ok.chunked(enumeratorContent(targetContainer, docentStatistics(semester, docentStatisticList))) } def calculaterequiredSWS(semesterId:Long, docentId:Long) = Action { implicit request => val sws = calculateNeededSws(semesterId,docentId) Ok( Json.obj( "htmlresult" -> sws) ) } }
P1tt187/fhs-schedule-generator
app/controllers/CEditDocents.scala
Scala
gpl-2.0
26,745
/* * Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com> */ package play.utils import java.sql._ import java.util.logging.Logger class ProxyDriver(proxied: Driver) extends Driver { def acceptsURL(url: String) = proxied.acceptsURL(url) def connect(user: String, properties: java.util.Properties) = proxied.connect(user, properties) def getMajorVersion() = proxied.getMajorVersion def getMinorVersion() = proxied.getMinorVersion def getPropertyInfo(user: String, properties: java.util.Properties) = proxied.getPropertyInfo(user, properties) def jdbcCompliant() = proxied.jdbcCompliant def getParentLogger(): Logger = null }
jyotikamboj/container
pf-framework/src/play/src/main/scala/play/utils/ProxyDriver.scala
Scala
mit
654
package com.twitter.hashing import org.specs.SpecificationWithJUnit import scala.collection.mutable.ListBuffer import org.apache.commons.codec.binary.Base64 import com.twitter.io.TempFile class KeyHasherSpec extends SpecificationWithJUnit { def readResource(name: String) = { var lines = new ListBuffer[String]() val src = scala.io.Source.fromFile(TempFile.fromResourcePath(getClass, "/"+name)) src.getLines } val base64 = new Base64() def decode(str: String) = base64.decode(str) def testHasher(name: String, hasher: KeyHasher) { val sources = readResource(name + "_source") map { decode(_) } val hashes = readResource(name + "_hashes") sources.size must beGreaterThan(0) sources zip hashes foreach { case (source, hashAsString) => val hash = BigInt(hashAsString).toLong hasher.hashKey(source) mustEqual hash } } "KeyHasher" should { "correctly hash fnv1_32" in { testHasher("fnv1_32", KeyHasher.FNV1_32) } "correctly hash fnv1_64" in { testHasher("fnv1_64", KeyHasher.FNV1_64) } "correctly hash fnv1a_32" in { testHasher("fnv1a_32", KeyHasher.FNV1A_32) } "correctly hash fnv1a_64" in { testHasher("fnv1a_64", KeyHasher.FNV1A_64) } "correctly hash jenkins" in { testHasher("jenkins", KeyHasher.JENKINS) } "correctly hash crc32 itu" in { testHasher("crc32", KeyHasher.CRC32_ITU) } } }
mosesn/util
util-hashing/src/test/scala/com/twitter/hashing/KeyHasherSpec.scala
Scala
apache-2.0
1,441
/*********************************************************************** * Copyright (c) 2013-2019 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.utils import java.time.ZoneOffset import java.time.format.DateTimeFormatter import org.geotools.data.FeatureReader import org.geotools.data.collection.DelegateFeatureReader import org.geotools.feature.collection.DelegateFeatureIterator import org.geotools.geometry.jts.ReferencedEnvelope import org.geotools.referencing.CRS import org.geotools.referencing.crs.DefaultGeographicCRS import org.locationtech.geomesa.utils.text.WKTUtils import org.locationtech.jts.geom.{Geometry, Polygon} import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType} import org.opengis.referencing.crs.CoordinateReferenceSystem package object geotools { // use the epsg jar if it's available (e.g. in geoserver), otherwise use the less-rich constant val CRS_EPSG_4326: CoordinateReferenceSystem = try { CRS.decode("EPSG:4326", true) } catch { case t: Throwable => DefaultGeographicCRS.WGS84 } val CrsEpsg4326: CoordinateReferenceSystem = CRS_EPSG_4326 // we make this a function, as envelopes are mutable def wholeWorldEnvelope = new ReferencedEnvelope(-180, 180, -90, 90, CRS_EPSG_4326) val WholeWorldPolygon: Polygon = WKTUtils.read("POLYGON((-180 -90, 180 -90, 180 90, -180 90, -180 -90))").asInstanceOf[Polygon] val EmptyGeometry: Geometry = WKTUtils.read("POLYGON EMPTY") // date format with geotools pattern val GeoToolsDateFormat: DateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'").withZone(ZoneOffset.UTC) type FR = FeatureReader[SimpleFeatureType, SimpleFeature] type DFR = DelegateFeatureReader[SimpleFeatureType, SimpleFeature] type DFI = DelegateFeatureIterator[SimpleFeature] }
elahrvivaz/geomesa
geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/geotools/package.scala
Scala
apache-2.0
2,150
package com.github.pockethub.accounts import android.app.Activity import android.content.Intent import android.graphics.Bitmap import android.net.Uri import android.os.Bundle import android.support.v7.app.AppCompatActivity import android.webkit.WebViewClient import com.github.pockethub.R import com.github.pockethub.ui.{LightProgressDialog, WebView} /** * @author chentaov5@gmail.com * * ___====-_ _-====___ * _--^^^#####// \\\\#####^^^--_ * _-^##########// ( ) \\\\##########^-_ * -############// |\\^^/| \\\\############- * _/############// (@::@) \\\\############\\_ * /#############(( \\\\// ))#############\\ * -###############\\\\ (oo) //###############- * -#################\\\\ / VV \\ //#################- * -###################\\\\/ \\//###################- * _#/|##########/\\######( /\\ )######/\\##########|\\#_ * |/ |#/\\#/\\#/\\/ \\#/\\##\\ | | /##/\\#/ \\/\\#/\\#/\\#| \\| * ` |/ V V ` V \\#\\| | | |/#/ V ' V V \\| ' * ` ` ` ` / | | | | \\ ' ' ' ' * ( | | | | ) * __\\ | | | | /__ * (vvv(VVV)(VVV)vvv) * * HERE BE DRAGONS * */ class LoginWebViewActivity extends AppCompatActivity { outer => protected override def onCreate(savedInstanceState: Bundle) { super.onCreate(savedInstanceState) val webView = new WebView(this) webView.loadUrl(getIntent.getStringExtra(LoginActivity.INTENT_EXTRA_URL)) webView.setWebViewClient(new WebViewClient() { val dialog = LightProgressDialog.create(outer, R.string.loading) override def onPageStarted(view: android.webkit.WebView, url: String, favicon: Bitmap): Unit = { dialog.show() } override def onPageFinished(view: android.webkit.WebView, url: String): Unit = { dialog.dismiss() } override def shouldOverrideUrlLoading(view: android.webkit.WebView, url: String): Boolean = { val uri = Uri.parse(url) if (uri.getScheme.equals(getString(R.string.github_oauth_scheme))) { val data = new Intent() data.setData(uri) setResult(Activity.RESULT_OK, data) finish() true } else { super.shouldOverrideUrlLoading(view, url) } } }) setContentView(webView) } }
JLLK/PocketHub-scala
app/src/main/scala/com/github/pockethub/accounts/LoginWebViewActivity.scala
Scala
apache-2.0
2,580
package slide import java.net.NetworkInterface import enums.OperatingSystem object SystemInfo { private val os: String = System.getProperty("os.name").toUpperCase var operatingSystem: OperatingSystem = OperatingSystem.UNKNOWN var systemExtension: String = "" var chmod: String = "+x" if (os.toUpperCase.contains("WIN")) { chmod = "" systemExtension = ".exe" operatingSystem = OperatingSystem.WINDOWS } else if (os.toUpperCase.contains("MAC")) operatingSystem = OperatingSystem.OSX else operatingSystem = OperatingSystem.NIX /** * @return Whether or not the system has a NIC */ def isNetworkIsAvailable: Boolean = { val interfaces: java.util.Enumeration[NetworkInterface] = NetworkInterface.getNetworkInterfaces while (interfaces.hasMoreElements) { val interf: NetworkInterface = interfaces.nextElement() if (interf.isUp && !interf.isLoopback) return true } false } }
LorenK96/slide-desktop
src/main/scala/slide/SystemInfo.scala
Scala
gpl-2.0
1,038
package mesosphere.marathon package core.deployment.impl import akka.Done import akka.actor._ import akka.event.EventStream import akka.pattern._ import com.typesafe.scalalogging.StrictLogging import mesosphere.marathon.core.async.ExecutionContexts.global import mesosphere.marathon.core.event._ import mesosphere.marathon.core.instance.Instance import mesosphere.marathon.core.instance.Instance.Id import mesosphere.marathon.core.launchqueue.LaunchQueue import mesosphere.marathon.core.readiness.ReadinessCheckExecutor import mesosphere.marathon.core.task.termination.InstanceChangedPredicates.considerTerminal import mesosphere.marathon.core.task.termination.{ KillReason, KillService } import mesosphere.marathon.core.task.tracker.InstanceTracker import mesosphere.marathon.state.RunSpec import scala.collection.{ SortedSet, mutable } import scala.concurrent.{ Future, Promise } class TaskReplaceActor( val deploymentManagerActor: ActorRef, val status: DeploymentStatus, val killService: KillService, val launchQueue: LaunchQueue, val instanceTracker: InstanceTracker, val eventBus: EventStream, val readinessCheckExecutor: ReadinessCheckExecutor, val runSpec: RunSpec, promise: Promise[Unit]) extends Actor with ReadinessBehavior with StrictLogging { import TaskReplaceActor._ // compute all values ==================================================================================== // All running instances of this app // // Killed resident tasks are not expunged from the instances list. Ignore // them. LaunchQueue takes care of launching instances against reservations // first val currentRunningInstances = instanceTracker.specInstancesSync(runSpec.id).filter(_.isActive) // In case previous master was abdicated while the deployment was still running we might have // already started some new tasks. // All already started and active tasks are filtered while the rest is considered private[this] val (instancesAlreadyStarted, instancesToKill) = { currentRunningInstances.partition(_.runSpecVersion == runSpec.version) } // The ignition strategy for this run specification private[this] val ignitionStrategy = computeRestartStrategy(runSpec, currentRunningInstances.size) // compute all variables maintained in this actor ========================================================= // All instances to kill queued up private[this] val toKill: mutable.Queue[Instance] = instancesToKill.to[mutable.Queue] // All instances to kill as set for quick lookup private[this] var oldInstanceIds: SortedSet[Id] = instancesToKill.map(_.instanceId).to[SortedSet] // The number of started instances. Defaults to the number of already started instances. var instancesStarted: Int = instancesAlreadyStarted.size override def preStart(): Unit = { super.preStart() // subscribe to all needed events eventBus.subscribe(self, classOf[InstanceChanged]) eventBus.subscribe(self, classOf[InstanceHealthChanged]) // reconcile the state from a possible previous run reconcileAlreadyStartedInstances() // kill old instances to free some capacity for (_ <- 0 until ignitionStrategy.nrToKillImmediately) killNextOldInstance() // start new instances, if possible launchInstances() // reset the launch queue delay logger.info("Resetting the backoff delay before restarting the runSpec") launchQueue.resetDelay(runSpec) // it might be possible, that we come here, but nothing is left to do checkFinished() } override def postStop(): Unit = { eventBus.unsubscribe(self) super.postStop() } override def receive: Receive = readinessBehavior orElse replaceBehavior def replaceBehavior: Receive = { // New instance failed to start, restart it case InstanceChanged(id, `version`, `pathId`, condition, instance) if !oldInstanceIds(id) && considerTerminal(condition) => logger.error(s"New instance $id failed on agent ${instance.agentInfo.agentId} during app $pathId restart") instanceTerminated(id) instancesStarted -= 1 launchInstances() // Old instance successfully killed case InstanceChanged(id, _, `pathId`, condition, _) if oldInstanceIds(id) && considerTerminal(condition) => oldInstanceIds -= id launchInstances().foreach(_ => checkFinished()) // Ignore change events, that are not handled in parent receives case _: InstanceChanged => case Status.Failure(e) => // This is the result of failed launchQueue.addAsync(...) call. Log the message and // restart this actor. Next reincarnation should try to start from the beginning. logger.warn("Failed to launch instances: ", e) throw e case Done => // This is the result of successful launchQueue.addAsync(...) call. Nothing to do here } override def instanceConditionChanged(instanceId: Instance.Id): Unit = { if (healthyInstances(instanceId) && readyInstances(instanceId)) killNextOldInstance(Some(instanceId)) checkFinished() } def reconcileAlreadyStartedInstances(): Unit = { logger.info(s"reconcile: found ${instancesAlreadyStarted.size} already started instances " + s"and ${oldInstanceIds.size} old instances") instancesAlreadyStarted.foreach(reconcileHealthAndReadinessCheck) } // Careful not to make this method completely asynchronous - it changes local actor's state `instancesStarted`. // Only launching new instances needs to be asynchronous. def launchInstances(): Future[Done] = { val leftCapacity = math.max(0, ignitionStrategy.maxCapacity - oldInstanceIds.size - instancesStarted) val instancesNotStartedYet = math.max(0, runSpec.instances - instancesStarted) val instancesToStartNow = math.min(instancesNotStartedYet, leftCapacity) if (instancesToStartNow > 0) { logger.info(s"Reconciling instances during app $pathId restart: queuing $instancesToStartNow new instances") instancesStarted += instancesToStartNow launchQueue.addAsync(runSpec, instancesToStartNow).pipeTo(self) } else { Future.successful(Done) } } def killNextOldInstance(maybeNewInstanceId: Option[Instance.Id] = None): Unit = { if (toKill.nonEmpty) { val nextOldInstance = toKill.dequeue() maybeNewInstanceId match { case Some(newInstanceId: Instance.Id) => logger.info(s"Killing old ${nextOldInstance.instanceId} because $newInstanceId became reachable") case _ => logger.info(s"Killing old ${nextOldInstance.instanceId}") } killService.killInstance(nextOldInstance, KillReason.Upgrading) } } def checkFinished(): Unit = { if (targetCountReached(runSpec.instances) && oldInstanceIds.isEmpty) { logger.info(s"All new instances for $pathId are ready and all old instances have been killed") promise.trySuccess(()) context.stop(self) } else { logger.debug(s"For run spec: [${runSpec.id}] there are [${healthyInstances.size}] healthy and " + s"[${readyInstances.size}] ready new instances and " + s"[${oldInstanceIds.size}] old instances.") } } } object TaskReplaceActor extends StrictLogging { //scalastyle:off def props( deploymentManagerActor: ActorRef, status: DeploymentStatus, killService: KillService, launchQueue: LaunchQueue, instanceTracker: InstanceTracker, eventBus: EventStream, readinessCheckExecutor: ReadinessCheckExecutor, app: RunSpec, promise: Promise[Unit]): Props = Props( new TaskReplaceActor(deploymentManagerActor, status, killService, launchQueue, instanceTracker, eventBus, readinessCheckExecutor, app, promise) ) /** Encapsulates the logic how to get a Restart going */ private[impl] case class RestartStrategy(nrToKillImmediately: Int, maxCapacity: Int) private[impl] def computeRestartStrategy(runSpec: RunSpec, runningInstancesCount: Int): RestartStrategy = { // in addition to a spec which passed validation, we require: require(runSpec.instances > 0, s"instances must be > 0 but is ${runSpec.instances}") require(runningInstancesCount >= 0, s"running instances count must be >=0 but is $runningInstancesCount") val minHealthy = (runSpec.instances * runSpec.upgradeStrategy.minimumHealthCapacity).ceil.toInt var maxCapacity = (runSpec.instances * (1 + runSpec.upgradeStrategy.maximumOverCapacity)).toInt var nrToKillImmediately = math.max(0, runningInstancesCount - minHealthy) if (minHealthy == maxCapacity && maxCapacity <= runningInstancesCount) { if (runSpec.isResident) { // Kill enough instances so that we end up with one instance below minHealthy. // TODO: We need to do this also while restarting, since the kill could get lost. nrToKillImmediately = runningInstancesCount - minHealthy + 1 logger.info( "maxCapacity == minHealthy for resident app: " + s"adjusting nrToKillImmediately to $nrToKillImmediately in order to prevent over-capacity for resident app" ) } else { logger.info("maxCapacity == minHealthy: Allow temporary over-capacity of one instance to allow restarting") maxCapacity += 1 } } logger.info(s"For minimumHealthCapacity ${runSpec.upgradeStrategy.minimumHealthCapacity} of ${runSpec.id.toString} leave " + s"$minHealthy instances running, maximum capacity $maxCapacity, killing $nrToKillImmediately of " + s"$runningInstancesCount running instances immediately. (RunSpec version ${runSpec.version})") assume(nrToKillImmediately >= 0, s"nrToKillImmediately must be >=0 but is $nrToKillImmediately") assume(maxCapacity > 0, s"maxCapacity must be >0 but is $maxCapacity") def canStartNewInstances: Boolean = minHealthy < maxCapacity || runningInstancesCount - nrToKillImmediately < maxCapacity assume(canStartNewInstances, "must be able to start new instances") RestartStrategy(nrToKillImmediately = nrToKillImmediately, maxCapacity = maxCapacity) } }
janisz/marathon
src/main/scala/mesosphere/marathon/core/deployment/impl/TaskReplaceActor.scala
Scala
apache-2.0
10,053
package edu.iitd.nlp.ListExtraction import org.allenai.common.LoggingWithUncaughtExceptions import org.scalatest._ import java.io._ import scala.collection.mutable import scala.io.Source import scala.util.Random class TestRuleBasedExtractor extends FlatSpec with LoggingWithUncaughtExceptions { val extractor = new RuleBasedExtractor "RuleBasedExtractor" should "run correctly on a simple sentence" ignore { val sent = "I like playing hockey, cricket and football." val (tokens, parse, listRanges) = extractor.extractListRange(sent) val goldListRanges = Seq(ListRange(6, mutable.ArrayBuffer((3, 3), (5, 5), (7, 7)), 1.0)) assert(listRanges == goldListRanges) } it should "give correct score on a simple sentence with MaxMatchScorer" ignore { val sent = "I like playing hockey, cricket and football." val (tokens, parse, listRanges) = extractor.extractListRange(sent) val goldListRanges = Seq(ListRange(6, mutable.ArrayBuffer((3, 3), (5, 5), (7, 7)), 1.0)) val scorer = new MaxMatchScorer scorer.addSentence(sent, listRanges, goldListRanges) val score = scorer.getAverageScore logger.info(s"Cand: $listRanges\\nGold: $goldListRanges\\nScore: ${scorer.getAverageScore}") assert(score == Score(1, 1)) } it should "give >= 70% score on British News Tree Bank dataset with MaxMatchScorer" in { val file = "data/british_news_treebank_dataset" val data = Source.fromFile(file).getLines() val scorer = new MaxMatchScorer var skippedSentencesCount = 0 val numSentences = data.next().toInt val listPrintProb = 0.01 val r = new Random(0L) val logFileName = "logs/" + this.getClass.getName + ".txt" val writer = new PrintWriter(new File(logFileName)) for (i <- 0 until numSentences) { val sent = data.next() val sentTokenCount = sent.split(" ").size val listCount = data.next().toInt val goldListsRange = mutable.ArrayBuffer[ListRange]() for (j <- 0 until listCount) { val Seq(ccId, elemCount) = data.next().split(" ").map(_.toInt).toSeq val elemPos = mutable.ArrayBuffer[(Int, Int)]() for (k <- 0 until elemCount) { val elemSize = data.next().toInt val elemRange = data.next().split(" ").map(_.toInt) elemPos += ((elemRange.head, elemRange.last)) } goldListsRange += ListRange(ccId, elemPos, 1.0) } val (tokens, parse, candListsRange) = extractor.extractListRange(sent) if (tokens.size != sentTokenCount) skippedSentencesCount += 1 else { val sentResult = scorer.addSentence(sent, candListsRange, goldListsRange) val matchedGoldListsRange = sentResult.map(_._3) val scores = sentResult.map(_._1) val goldLists = extractor.extractLists(tokens, goldListsRange) val matchedGoldLists = extractor.extractLists(tokens, matchedGoldListsRange) val candLists = extractor.extractLists(tokens, candListsRange) writer.write(s"Sentence: $sent\\n\\nGold Lists Range: $goldListsRange\\nGold Lists: $goldLists\\n\\n" + s"Matched Gold Lists Range: $matchedGoldListsRange\\nMatched Gold Lists: $matchedGoldLists\\n\\n" + s"Candidate Lists Range: $candListsRange\\nCandidate Lists: $candLists\\n\\nScores: $scores\\n\\n\\n") } } val avgScore = scorer.getAverageScore val avgScoreByLength = scorer.getAverageScoreByLength.toSeq.sortBy(_._1) logger.info(s"Average score on British News Tree Bank dataset: $avgScore with $skippedSentencesCount sentences skipped") logger.info(s"Average score by max length of list elements: $avgScoreByLength") assert(avgScore.precision >= 0.7) } it should "give >= 70% score on Penn Tree Bank dataset with MaxMatchScorer" ignore { val file = "data/penn_treebank_dataset" val data = Source.fromFile(file).getLines() val scorer = new MaxMatchScorer var skippedSentencesCount = 0 val numSentences = data.next().toInt val listPrintProb = 0.01 val r = new Random(0L) val logFileName = "logs/" + this.getClass.getName + ".txt" val writer = new PrintWriter(new FileOutputStream(new File(logFileName), true)) for (i <- 0 until numSentences) { val sent = data.next() val sentTokenCount = sent.split(" ").size val listCount = data.next().toInt val goldListsRange = mutable.ArrayBuffer[ListRange]() for (j <- 0 until listCount) { val Seq(ccId, elemCount) = data.next().split(" ").map(_.toInt).toSeq val elemPos = mutable.ArrayBuffer[(Int, Int)]() for (k <- 0 until elemCount) { val elemSize = data.next().toInt val elemRange = data.next().split(" ").map(_.toInt) elemPos += ((elemRange.head, elemRange.last)) } goldListsRange += ListRange(ccId, elemPos, 1.0) } val (tokens, parse, candListsRange) = extractor.extractListRange(sent) if (tokens.size != sentTokenCount) skippedSentencesCount += 1 else { val sentResult = scorer.addSentence(sent, candListsRange, goldListsRange) val matchedGoldListsRange = sentResult.map(_._3) val scores = sentResult.map(_._1) val goldLists = extractor.extractLists(tokens, goldListsRange) val matchedGoldLists = extractor.extractLists(tokens, matchedGoldListsRange) val candLists = extractor.extractLists(tokens, candListsRange) writer.write(s"Sentence: $sent\\n\\nGold Lists Range: $goldListsRange\\nGold Lists: $goldLists\\n\\n" + s"Matched Gold Lists Range: $matchedGoldListsRange\\nMatched Gold Lists: $matchedGoldLists\\n\\n" + s"Candidate Lists Range: $candListsRange\\nCandidate Lists: $candLists\\n\\nScores: $scores\\n\\n\\n") } } val avgScore = scorer.getAverageScore val avgScoreByLength = scorer.getAverageScoreByLength.toSeq.sortBy(_._1) logger.info(s"Average score on British News Tree Bank dataset: $avgScore with $skippedSentencesCount sentences skipped") logger.info(s"Average score by max length of list elements: $avgScoreByLength") assert(avgScore.precision >= 0.7) } }
satwantrana/list-extractor
src/test/scala/edu/iitd/nlp/ListExtraction/TestRuleBasedExtractor.scala
Scala
mit
6,105
package helloscalaexecutablejar import javax.swing.JFrame object Main { def main(args: Array[String]) { val frame = new JFrame("Scala Executable Jar") frame.setBounds(300, 200, 250, 100) frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setVisible(true) } }
AlexHolly/helloScalaExecutableJar
src/main/scala/helloscalaexecutablejar/Main.scala
Scala
mit
290
/* * Copyright 2013 - 2015, Daniel Krzywicki <daniel.krzywicki@agh.edu.pl> * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package pl.edu.agh.scalamas.emas import pl.edu.agh.scalamas.genetic.GeneticProblem import pl.edu.agh.scalamas.solutions.{Solutions, SolutionsComponent, SolutionsFactoryComponent} /** * Created by mateusz on 14.11.16. */ trait EmasSolutions extends SolutionsComponent { this: GeneticProblem with SolutionsFactoryComponent => type SolutionsType = (Genetic#Solution, Genetic#Evaluation) lazy val solutions: Solutions[SolutionsType] = { val solution = genetic.generate val fitness = genetic.evaluate(solution) solutionsFactory((genetic.generate, fitness)) { case (oldSolution, newSolution) => fixedMax(oldSolution, newSolution)(genetic.ordering) } } def solutionsFormatter = { case (solution, fitness) => s"$solution $fitness" } private[this] def fixedMax(x: SolutionsType, y: SolutionsType)(implicit ordering: Ordering[Genetic#Evaluation]) = if (ordering.gt(x._2, y._2)) x else y }
ros3n/IntOb
emas/src/main/scala/pl/edu/agh/scalamas/emas/EmasSolutions.scala
Scala
mit
2,086
package be.cmpg.walk.neighbourhoodScoring import java.nio.file.Paths import be.cmpg.graph.Gene import be.cmpg.graph.NetworkReader import be.cmpg.graph.Network import collection.immutable.ListMap import java.io.FileWriter import be.cmpg.graph.interaction.NodeCostNetworkManager import scala.util.Random import be.cmpg.walk.Path import be.cmpg.utils.StatUtils import be.cmpg.expression.ExpressionNetworkManager object NeighbourhoodscanByBestSubnetWork extends App { val interactions = NetworkReader.fromFile("src/test/resources/be/cmpg/graph/network_small_connected.txt") val selectedGenesSet = new FileWriter(raw"C:\\Users\\Bram\\Documents\\doctoraat\\Projecten/PhAc/selected.txt") val goldenSet = new FileWriter(raw"C:\\Users\\Bram\\Documents\\doctoraat\\Projecten/PhAc/goldenSet.txt") val possibleGenes = new FileWriter(raw"C:\\Users\\Bram\\Documents\\doctoraat\\Projecten/PhAc/possibleGenes.txt") def simulateGeneScores(): (Set[Gene], ExpressionNetworkManager) = { /* * Select randomly the genes that are going to be important */ var randomImportantGene = networkManager.getGenes.toList(Random.nextInt(networkManager.getGenes.size)) //val randomImportantGene = Gene("STM4175") var resetcounter = 0 var path = new Path(randomImportantGene) while (path.size < 7) { if (Random.nextDouble <= 0.2) { path.reset } else { val possibleInteractions = networkManager.getOutgoingInteractionsFor(path.currentEndpoint).filter(interaction => path.canTakeInteraction(interaction)).toList if (!possibleInteractions.isEmpty) { val next = possibleInteractions(Random.nextInt(possibleInteractions.size)) path.expand(next) } else { resetcounter = resetcounter + 1 if (resetcounter > 20) { randomImportantGene = networkManager.getGenes.toList(Random.nextInt(networkManager.getGenes.size)) path = new Path(randomImportantGene) resetcounter = 0 } path.reset } } } val importantGenes = path.visitedGenes.toSet /* * Give scores to all genes. */ val importantGeneMean = 2 val nonImportantGeneMean = 5 networkManager.getNetwork.getNodes.foreach(node => node.score = StatUtils.getRandomPoisson(if (importantGenes contains node.gene) importantGeneMean else nonImportantGeneMean)) importantGenes.foreach(g => println(g + "\\t" + networkManager.getNetwork().getNode(g).score)) println("random scores generated") (importantGenes, networkManager) } val randomGeneratedInput = simulateGeneScores() val importantGenes = randomGeneratedInput._1 println("importantGenes:" + importantGenes) val networkManager = randomGeneratedInput._2 importantGenes.foreach(gene => goldenSet.write(gene.name + "\\n")) goldenSet.close networkManager.getNetwork().getNodes().foreach(node => possibleGenes.write(node.gene.name + "\\n")) possibleGenes.close val mutationsScore = networkManager.getNetwork().getNodes().map(node => (node.gene.name, node.score.toString)) var relevanceMap = new scala.collection.mutable.HashMap[scala.collection.mutable.HashSet[Gene], Double]() println("mutationScores:" + mutationsScore) networkManager.getGenes.foreach(mutatedGene => { val MutationDisRelevance = new NeighbourhoodTreeGenerator(mutatedGene, 2, networkManager).expand relevanceMap.put(MutationDisRelevance._2, MutationDisRelevance._1) }) val orderedMap = ListMap(relevanceMap.toList.sortBy { _._2 }: _*) val selectedGenes = orderedMap.map(input => input._1 + "\\t" + input._2.toString).toList.dropRight(orderedMap.size - 1).toString.drop(9).split("\\\\)")(0).split(", ") println("selectedGenes: " + selectedGenes.deep) selectedGenes.foreach(gene => selectedGenesSet.write(gene + "\\n")) selectedGenesSet.close }
spulido99/SSA
src/main/scala/be/cmpg/walk/neighbourhoodScoring/NeighbourhoodscanByBestSubnetWork.scala
Scala
gpl-2.0
3,844
package org.openurp.edu.eams.time.web.dwr import org.beangle.commons.entity.metadata.Model import org.beangle.data.jpa.hibernate.HibernateEntityDao import org.openurp.edu.eams.base.Calendar class SemesterDaoDwrHibernate extends HibernateEntityDao with SemesterDaoDwr { def getTermsOrderByDistance(calendarId: java.lang.Integer, year: String): List[_] = { val calendar = Model.newInstance(classOf[Calendar]).asInstanceOf[Calendar] calendar.setId(calendarId) val params = new HashMap() params.put("calendar", calendar) params.put("schoolYear", year) val rs = search("@getTermsOrderByDistance", params, true) rs } def getYearsOrderByDistance(calendarId: java.lang.Integer): List[_] = { val calendar = Model.newInstance(classOf[Calendar]).asInstanceOf[Calendar] calendar.setId(calendarId) val params = new HashMap() params.put("calendar", calendar) val rawYears = search("@getYearsOrderByDistance", params, true) val newYears = new ArrayList() val distinctYears = new HashSet() var iter = rawYears.iterator() while (iter.hasNext) { val schoolYear = iter.next().asInstanceOf[String] if (!distinctYears.contains(schoolYear)) { distinctYears.add(schoolYear) newYears.add(schoolYear) } } newYears } }
openurp/edu-eams-webapp
web/src/main/scala/org/openurp/edu/eams/time/web/dwr/SemesterDaoDwrHibernate.scala
Scala
gpl-3.0
1,320
/* Copyright 2012 Twitter, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.twitter.algebird import java.lang.{ Boolean => JBool, Double => JDouble, Float => JFloat, Integer => JInt, Long => JLong, Short => JShort } import java.util.{ArrayList => JArrayList, HashMap => JHashMap, List => JList, Map => JMap} import scala.collection.JavaConverters._ object JIntRing extends Ring[JInt] { override val zero: JInt = JInt.valueOf(0) override val one: JInt = JInt.valueOf(1) override def plus(x: JInt, y: JInt): JInt = x + y override def negate(x: JInt): JInt = -x override def minus(x: JInt, y: JInt): JInt = x - y override def times(x: JInt, y: JInt): JInt = x * y } object JShortRing extends Ring[JShort] { override val zero: JShort = Short.box(0) override val one: JShort = Short.box(1) override def plus(x: JShort, y: JShort): JShort = (x + y).toShort override def negate(x: JShort): JShort = (-x).toShort override def minus(x: JShort, y: JShort): JShort = (x - y).toShort override def times(x: JShort, y: JShort): JShort = (x * y).toShort } object JLongRing extends Ring[JLong] { override val zero: JLong = JLong.valueOf(0L) override val one: JLong = JLong.valueOf(1L) override def plus(x: JLong, y: JLong): JLong = x + y override def negate(x: JLong): JLong = -x override def minus(x: JLong, y: JLong): JLong = x - y override def times(x: JLong, y: JLong): JLong = x * y } object JFloatRing extends Ring[JFloat] { override val zero: JFloat = JFloat.valueOf(0.0f) override val one: JFloat = JFloat.valueOf(1.0f) override def plus(x: JFloat, y: JFloat): JFloat = x + y override def negate(x: JFloat): JFloat = -x override def minus(x: JFloat, y: JFloat): JFloat = x - y override def times(x: JFloat, y: JFloat): JFloat = x * y } object JDoubleRing extends Ring[JDouble] { override val zero: JDouble = JDouble.valueOf(0.0) override val one: JDouble = JDouble.valueOf(1.0) override def plus(x: JDouble, y: JDouble): JDouble = x + y override def negate(x: JDouble): JDouble = -x override def minus(x: JDouble, y: JDouble): JDouble = x - y override def times(x: JDouble, y: JDouble): JDouble = x * y } object JBoolRing extends Ring[JBool] { override val zero: JBool = JBool.FALSE override val one: JBool = JBool.TRUE override def plus(x: JBool, y: JBool): JBool = JBool.valueOf(x.booleanValue ^ y.booleanValue) override def negate(x: JBool): JBool = x override def minus(x: JBool, y: JBool): JBool = plus(x, y) override def times(x: JBool, y: JBool): JBool = JBool.valueOf(x.booleanValue & y.booleanValue) } /** * Since Lists are mutable, this always makes a full copy. Prefer scala immutable Lists if you use scala * immutable lists, the tail of the result of plus is always the right argument */ class JListMonoid[T] extends Monoid[JList[T]] { override def isNonZero(x: JList[T]): Boolean = !x.isEmpty override lazy val zero: JArrayList[T] = new JArrayList[T](0) override def plus(x: JList[T], y: JList[T]): JArrayList[T] = { val res = new JArrayList[T](x.size + y.size) res.addAll(x) res.addAll(y) res } } /** * Since maps are mutable, this always makes a full copy. Prefer scala immutable maps if you use scala * immutable maps, this operation is much faster TODO extend this to Group, Ring */ class JMapMonoid[K, V: Semigroup] extends Monoid[JMap[K, V]] { override lazy val zero: JHashMap[K, V] = new JHashMap[K, V](0) val nonZero: (V => Boolean) = implicitly[Semigroup[V]] match { case mon: Monoid[_] => mon.isNonZero(_) case _ => (_ => true) } override def isNonZero(x: JMap[K, V]): Boolean = !x.isEmpty && (implicitly[Semigroup[V]] match { case mon: Monoid[_] => x.values.asScala.exists(v => mon.isNonZero(v)) case _ => true }) override def plus(x: JMap[K, V], y: JMap[K, V]): JHashMap[K, V] = { val (big, small, bigOnLeft) = if (x.size > y.size) { (x, y, true) } else { (y, x, false) } val vsemi = implicitly[Semigroup[V]] val result = new JHashMap[K, V](big.size + small.size) result.putAll(big) small.entrySet.asScala.foreach { kv => val smallK = kv.getKey val smallV = kv.getValue if (big.containsKey(smallK)) { val bigV = big.get(smallK) val newV = if (bigOnLeft) vsemi.plus(bigV, smallV) else vsemi.plus(smallV, bigV) if (nonZero(newV)) result.put(smallK, newV) else result.remove(smallK) } else { // No need to explicitly add with zero on V, just put in the small value result.put(smallK, smallV) } } result } }
twitter/algebird
algebird-core/src/main/scala/com/twitter/algebird/JavaMonoids.scala
Scala
apache-2.0
5,180
package lila.tournament import org.joda.time.DateTime import reactivemongo.bson._ import reactivemongo.core.commands._ import BSONHandlers._ import lila.db.BSON._ import lila.db.dsl._ import lila.rating.Perf import lila.user.{ User, Perfs } object PlayerRepo { private lazy val coll = Env.current.playerColl private def selectId(id: String) = $doc("_id" -> id) private def selectTour(tourId: String) = $doc("tid" -> tourId) private def selectUser(userId: String) = $doc("uid" -> userId) private def selectTourUser(tourId: String, userId: String) = $doc( "tid" -> tourId, "uid" -> userId) private val selectActive = $doc("w" $ne true) private val selectWithdraw = $doc("w" -> true) private val bestSort = $doc("m" -> -1) def byId(id: String): Fu[Option[Player]] = coll.uno[Player](selectId(id)) def bestByTour(tourId: String, nb: Int, skip: Int = 0): Fu[List[Player]] = coll.find(selectTour(tourId)).sort(bestSort).skip(skip).cursor[Player]().gather[List](nb) def bestByTourWithRank(tourId: String, nb: Int, skip: Int = 0): Fu[RankedPlayers] = bestByTour(tourId, nb, skip).map { res => res.foldRight(List.empty[RankedPlayer] -> (res.size + skip)) { case (p, (res, rank)) => (RankedPlayer(rank, p) :: res, rank - 1) }._1 } def bestByTourWithRankByPage(tourId: String, nb: Int, page: Int): Fu[RankedPlayers] = bestByTourWithRank(tourId, nb, (page - 1) * nb) def countActive(tourId: String): Fu[Int] = coll.count(Some(selectTour(tourId) ++ selectActive)) def count(tourId: String): Fu[Int] = coll.count(Some(selectTour(tourId))) def removeByTour(tourId: String) = coll.remove(selectTour(tourId)).void def remove(tourId: String, userId: String) = coll.remove(selectTourUser(tourId, userId)).void def exists(tourId: String, userId: String) = coll.count(selectTourUser(tourId, userId).some) map (0!=) def existsActive(tourId: String, userId: String) = coll.count(Some( selectTourUser(tourId, userId) ++ selectActive )) map (0!=) def unWithdraw(tourId: String) = coll.update( selectTour(tourId) ++ selectWithdraw, $doc("$unset" -> $doc("w" -> true)), multi = true).void def find(tourId: String, userId: String): Fu[Option[Player]] = coll.find(selectTourUser(tourId, userId)).uno[Player] def update(tourId: String, userId: String)(f: Player => Fu[Player]) = find(tourId, userId) flatten s"No such player: $tourId/$userId" flatMap f flatMap { player => coll.update(selectId(player._id), player).void } def playerInfo(tourId: String, userId: String): Fu[Option[PlayerInfo]] = find(tourId, userId) flatMap { _ ?? { player => coll.countSel(selectTour(tourId) ++ $doc( "m" -> $doc("$gt" -> player.magicScore)) ) map { n => PlayerInfo((n + 1), player.withdraw).some } } } def join(tourId: String, user: User, perfLens: Perfs => Perf) = find(tourId, user.id) flatMap { case Some(p) if p.withdraw => coll.update(selectId(p._id), $doc("$unset" -> $doc("w" -> true))) case Some(p) => funit case None => coll.insert(Player.make(tourId, user, perfLens)) } void def withdraw(tourId: String, userId: String) = coll.update( selectTourUser(tourId, userId), $doc("$set" -> $doc("w" -> true))).void def withPoints(tourId: String): Fu[List[Player]] = coll.find( selectTour(tourId) ++ $doc("m" -> $doc("$gt" -> 0)) ).cursor[Player]().gather[List]() private def aggregationUserIdList(res: Stream[Bdoc]): List[String] = res.headOption flatMap { _.getAs[List[String]]("uids") } getOrElse Nil import reactivemongo.api.collections.bson.BSONBatchCommands.AggregationFramework.{ Descending, Group, Match, PushField, Sort } def userIds(tourId: String): Fu[List[String]] = coll.distinct[String, List]("uid", selectTour(tourId).some) def activeUserIds(tourId: String): Fu[List[String]] = coll.distinct[String, List]( "uid", (selectTour(tourId) ++ selectActive).some) def winner(tourId: String): Fu[Option[Player]] = coll.find(selectTour(tourId)).sort(bestSort).uno[Player] // freaking expensive (marathons) private[tournament] def computeRanking(tourId: String): Fu[Ranking] = coll.aggregate(Match(selectTour(tourId)), List(Sort(Descending("m")), Group(BSONNull)("uids" -> PushField("uid")))) map { _.firstBatch.headOption.fold(Map.empty: Ranking) { _ get "uids" match { case Some(BSONArray(uids)) => // mutable optimized implementation val b = Map.newBuilder[String, Int] var r = 0 for (u <- uids) { b += (u.get.asInstanceOf[BSONString].value -> r) r = r + 1 } b.result case _ => Map.empty } } } def byTourAndUserIds(tourId: String, userIds: Iterable[String]): Fu[List[Player]] = coll.find(selectTour(tourId) ++ $doc("uid" $in userIds)) .list[Player]() .chronometer.logIfSlow(200, logger) { players => s"PlayerRepo.byTourAndUserIds $tourId ${userIds.size} user IDs, ${players.size} players" }.result def pairByTourAndUserIds(tourId: String, id1: String, id2: String): Fu[Option[(Player, Player)]] = byTourAndUserIds(tourId, List(id1, id2)) map { case List(p1, p2) if p1.is(id1) && p2.is(id2) => Some(p1 -> p2) case List(p1, p2) if p1.is(id2) && p2.is(id1) => Some(p2 -> p1) case _ => none } def setPerformance(player: Player, performance: Int) = coll.update(selectId(player.id), $doc("$set" -> $doc("e" -> performance))).void private def rankPlayers(players: List[Player], ranking: Ranking): RankedPlayers = players.flatMap { p => ranking get p.userId map { RankedPlayer(_, p) } }.sortBy(_.rank) def rankedByTourAndUserIds(tourId: String, userIds: Iterable[String], ranking: Ranking): Fu[RankedPlayers] = byTourAndUserIds(tourId, userIds).map { rankPlayers(_, ranking) } .chronometer .logIfSlow(200, logger) { players => s"PlayerRepo.rankedByTourAndUserIds $tourId ${userIds.size} user IDs, ${ranking.size} ranking, ${players.size} players" }.result }
clarkerubber/lila
modules/tournament/src/main/PlayerRepo.scala
Scala
agpl-3.0
6,271
package net.sansa_stack.rdf.spark.model.hdt import org.apache.jena.graph.Triple import org.apache.spark.rdd.RDD import org.apache.spark.sql.types.{LongType, StringType, StructField, StructType} import org.apache.spark.sql.{DataFrame, Row, SaveMode, SparkSession} object TripleOps { private val spark: SparkSession = SparkSession.builder().getOrCreate() /** * Function returns the Schema of Indexed Triple Fact table. * @return StructType */ def hdtSchema: StructType = { StructType( Seq( StructField(name = "s", dataType = StringType, nullable = false), StructField(name = "o", dataType = StringType, nullable = false), StructField(name = "p", dataType = StringType, nullable = false))) } /** * Function returns the Schema of Dictionary Dataframe. * @return Schema of Dictionary */ def dictionarySchema: StructType = { StructType( Seq( StructField(name = "name", dataType = StringType, nullable = false), StructField(name = "index", dataType = LongType, nullable = false))) } /** * Function converts RDD[graph.Triple] to DataFrame [Subject,Object,Predicate] by extracting SOP value from each record * @param triple: Input raw RDD[graph.Triple] * @return Returns DataFrame [Subject,Object,Predicate] */ def makeHDT(triple: RDD[Triple]): DataFrame = { spark.createDataFrame(triple.map(t => Row(t.getSubject.toString, t.getObject.toString(), t.getPredicate.toString())), hdtSchema) } /** * Return Dataframe of Index + Subject by retrieving the unique subjects from RDD[Triple] and zip it with undex * @param triples RDD[Triple] conversion of input file * @return DataFrame Subject dictionary of [index,subject] */ def getDistinctSubjectDictDF(triples: RDD[Triple]): DataFrame = { spark.createDataFrame(triples.map(_.getSubject.toString()).distinct().zipWithIndex().map(t => Row(t._1, t._2)), dictionarySchema) } /** * Return Dataframe of Index + Predicate by retrieving the unique predicate from RDD[Triple] and zip it with undex * @param triples RDD[Triple] conversion of input file * @return DataFrame Predicate dictionary of [index,Prediate] */ def getDistinctPredicateDictDF(triples: RDD[Triple]): DataFrame = { spark.createDataFrame(triples.map(_.getPredicate.toString()).distinct().zipWithIndex().map(t => Row(t._1, t._2)), dictionarySchema) } /** * Return Dataframe of Index + Object by retrieving the unique objects from RDD[Triple] and zip it with undex * @param triples RDD[Triple] conversion of input file * @return DataFrame Object dictionary of [index , object] */ def getDistinctObjectDictDF(triples: RDD[Triple]): DataFrame = { spark.createDataFrame(triples.map(_.getObject.toString()).distinct().zipWithIndex().map(t => Row(t._1, t._2)), dictionarySchema) } /** * Convert an RDD of triples into a DataFrame of hdt. * * @param triples RDD of triples. * @return a DataFrame of hdt triples. */ def asHDT(triples: RDD[Triple]): DataFrame = { val hdtDF = makeHDT(triples) val object_hdt = getDistinctObjectDictDF(triples).createOrReplaceTempView("objects_hdt") val predicate_hdt = getDistinctPredicateDictDF(triples).createOrReplaceTempView("predicates_hdt") val subjectHDT = getDistinctSubjectDictDF(triples).createOrReplaceTempView("subjects_hdt") hdtDF.createOrReplaceTempView("triples_hdt") val sqlQuery = """ SELECT subjects_hdt.index as s, predicates_hdt.index as p, objects_hdt.index as o FROM triples_hdt JOIN subjects_hdt ON triples_hdt.s = subjects_hdt.name JOIN objects_hdt ON triples_hdt.o = objects_hdt.name JOIN predicates_hdt ON triples_hdt.p =predicates_hdt.name """ // Creating Fact table from Subject,Predicate and Object index. Fact table contains unique ID of Subject/Object/Predicate val hdt = spark.sql(sqlQuery) hdt.createOrReplaceTempView("hdt") hdt } /** * Read hdt data from disk. * @param input -- path to hdt data. * @retun DataFrame of hdt, subject, predicate, and object view. */ def readHDTFromDisk(input: String): (DataFrame, DataFrame, DataFrame, DataFrame) = { val hdt = spark.read.schema(hdtSchema).csv(input + "/triples") hdt.createOrReplaceTempView("hdt") val subjectDF = spark.read.schema(dictionarySchema) .csv(input + "/subject") subjectDF.createOrReplaceTempView("subjects_hdt") val objectDF = spark.read.schema(dictionarySchema) .csv(input + "/object") objectDF.createOrReplaceTempView("objects_hdt") val predicateDF = spark.read.schema(dictionarySchema) .csv(input + "/predicate") predicateDF.createOrReplaceTempView("predicates_hdt") (hdt, subjectDF, objectDF, predicateDF) } /** * Function saves the Index and Dictionaries Dataframe into given location * @param output Path to be written * @param mode SaveMode of Write */ def saveAsCSV(hdt: DataFrame, subjectDF: DataFrame, predicateDF: DataFrame, objectDF: DataFrame, output: String, mode: SaveMode): Unit = { hdt.write.mode(mode).csv(output + "/triples") subjectDF.write.mode(mode).csv(output + "/subject") objectDF.write.mode(mode).csv(output + "/object") predicateDF.write.mode(mode).csv(output + "/predicate") } }
SANSA-Stack/SANSA-RDF
sansa-rdf/sansa-rdf-spark/src/main/scala/net/sansa_stack/rdf/spark/model/hdt/TripleOps.scala
Scala
apache-2.0
5,361
/* * Copyright 2016-2017 original author or authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tap.data /** * Created by andrew@andrewresearch.net on 26/10/17. */ case class TapPosStats( verbNounRatio:Double, futurePastRatio:Double, namedEntityWordRatio:Double, adjectiveWordRatio:Double, nounDistribution:Vector[Double], verbDistribution:Vector[Double], adjectiveDistribution:Vector[Double] )
uts-cic/tap
src/main/scala/tap/data/TapPosStats.scala
Scala
apache-2.0
1,120
package com.rikmuld.camping.registers import com.rikmuld.camping.CampingMod import com.rikmuld.camping.Library.EntityInfo._ import com.rikmuld.camping.Library.PotionInfo import com.rikmuld.camping.Library.SoundInfo._ import com.rikmuld.camping.features.blocks.trap.PotionBleeding import com.rikmuld.camping.features.entities.bear.EntityBear import com.rikmuld.camping.features.entities.camper.EntityCamper import com.rikmuld.camping.features.entities.fox.EntityFox import net.minecraft.entity.Entity import net.minecraft.entity.EntityList.EntityEggInfo import net.minecraft.init.Items import net.minecraft.item.{ItemMonsterPlacer, ItemStack} import net.minecraft.potion.Potion import net.minecraft.util.{ResourceLocation, SoundEvent} import net.minecraftforge.event.RegistryEvent import net.minecraftforge.fml.common.Mod import net.minecraftforge.fml.common.eventhandler.SubscribeEvent import net.minecraftforge.fml.common.registry.EntityEntry @Mod.EventBusSubscriber object Registry { var foxAmb, foxDeath, bearAmb, bearDeath: SoundEvent = _ var bleeding: Potion = _ var bear, fox, camper: EntityEntry = _ @SubscribeEvent def registerSounds(event: RegistryEvent.Register[SoundEvent]): Unit = { foxAmb = registerSound(FOX_SAY) foxDeath = registerSound(FOX_HURT) bearAmb = registerSound(BEAR_SAY) bearDeath = registerSound(BEAR_HURT) event.getRegistry.registerAll( foxAmb, foxDeath, bearAmb, bearDeath ) } @SubscribeEvent def registerPotions(event: RegistryEvent.Register[Potion]): Unit = { bleeding = new PotionBleeding(PotionInfo.BLEEDING) event.getRegistry.registerAll( bleeding ) } @SubscribeEvent def register(event: RegistryEvent.Register[EntityEntry]) { bear = new EntityEntry(classOf[EntityBear], BEAR).setRegistryName(BEAR) fox = new EntityEntry(classOf[EntityFox], FOX).setRegistryName(FOX) camper = new EntityEntry(classOf[EntityCamper], CAMPER).setRegistryName(CAMPER) bear.setEgg(new EntityEggInfo(bear.getRegistryName, 0x583B2D, 0xE2B572)) fox.setEgg(new EntityEggInfo(fox.getRegistryName, 0xE0EEEE, 0x362819)) camper.setEgg(new EntityEggInfo(camper.getRegistryName, 0x747B51, 0x70471B)) event.getRegistry.registerAll( bear, fox, camper ) } def registerSound(location: ResourceLocation):SoundEvent = new SoundEvent(location).setRegistryName(location) def mkEntityEntry[A <: Entity](entity: Class[A], name: String, color1: Int, color2: Int): EntityEntry = { val entry = mkEntityEntry(entity, name) val stack = new ItemStack(Items.SPAWN_EGG, 1) ItemMonsterPlacer.applyEntityIdToItemStack(stack, entry.getRegistryName) CampingMod.OBJ.tab.addToTab(stack) entry.setEgg(new EntityEggInfo(entry.getRegistryName, color1, color2)) entry } def mkEntityEntry[A <: Entity](entity: Class[A], name: String): EntityEntry = new EntityEntry(entity, name).setRegistryName(name) }
Rikmuld/MC-Camping
scala/com/rikmuld/camping/registers/Registry.scala
Scala
gpl-3.0
3,014
package org.littlewings.javaee7.rest import java.io.File import java.net.URL import javax.ws.rs.ApplicationPath import org.jboss.arquillian.container.test.api.{Deployment, RunAsClient} import org.jboss.arquillian.junit.Arquillian import org.jboss.arquillian.test.api.ArquillianResource import org.jboss.shrinkwrap.api.ShrinkWrap import org.jboss.shrinkwrap.api.spec.WebArchive import org.junit.Test import org.junit.runner.RunWith import org.scalatest.Matchers._ import org.scalatest.junit.JUnitSuite import scala.io.Source object CalcResourceTest { @Deployment def createDeployment: WebArchive = ShrinkWrap .create(classOf[WebArchive]) .addAsWebInfResource(new File("src/main/webapp/WEB-INF/web.xml")) } @RunWith(classOf[Arquillian]) @RunAsClient class CalcResourceTest extends JUnitSuite { private val resourcePrefix: String = classOf[JaxrsApplication] .getAnnotation(classOf[ApplicationPath]) .value @ArquillianResource private var url: URL = _ @Test def testAdd(): Unit = Source .fromURL(s"${url}${resourcePrefix}/calc/add?a=1&b=2") .mkString should be("3") }
kazuhira-r/javaee7-scala-examples
arquillian-tomcat-embedded-8/src/test/scala/org/littlewings/javaee7/rest/CalcResourceTest.scala
Scala
mit
1,133
package com.codahale.jerkson.deser import com.fasterxml.jackson.databind.JavaType import com.fasterxml.jackson.databind.{DeserializationContext, JsonDeserializer} import com.fasterxml.jackson.core.{JsonToken, JsonParser} import com.fasterxml.jackson.databind.deser.ResolvableDeserializer class OptionDeserializer(elementType: JavaType) extends JsonDeserializer[Object] with ResolvableDeserializer { var elementDeserializer: JsonDeserializer[Object] = _ override def getEmptyValue = None override def getNullValue = None def deserialize(jp: JsonParser, ctxt: DeserializationContext) = { if (jp.getCurrentToken == JsonToken.VALUE_NULL) { None } else { Some(elementDeserializer.deserialize(jp, ctxt)) } } def resolve(ctxt: DeserializationContext): Unit = { elementDeserializer = ctxt.findRootValueDeserializer(elementType) } override def isCachable = true }
rememberthemilk/jerkson
src/main/scala/com/codahale/jerkson/deser/OptionDeserializer.scala
Scala
mit
909
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.jdbc import java.math.BigDecimal import java.sql.{Date, DriverManager, SQLException, Timestamp} import java.time.{Instant, LocalDate} import java.util.{Calendar, GregorianCalendar, Properties, TimeZone} import scala.collection.JavaConverters._ import org.mockito.ArgumentMatchers._ import org.mockito.Mockito._ import org.scalatest.{BeforeAndAfter, PrivateMethodTester} import org.apache.spark.SparkException import org.apache.spark.sql.{AnalysisException, DataFrame, QueryTest, Row} import org.apache.spark.sql.catalyst.{analysis, TableIdentifier} import org.apache.spark.sql.catalyst.parser.CatalystSqlParser import org.apache.spark.sql.catalyst.plans.logical.ShowCreateTable import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, DateTimeTestUtils} import org.apache.spark.sql.execution.{DataSourceScanExec, ExtendedMode} import org.apache.spark.sql.execution.command.{ExplainCommand, ShowCreateTableCommand} import org.apache.spark.sql.execution.datasources.LogicalRelation import org.apache.spark.sql.execution.datasources.jdbc.{JDBCOptions, JDBCPartition, JDBCRDD, JDBCRelation, JdbcUtils} import org.apache.spark.sql.execution.metric.InputOutputMetricsHelper import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.sources._ import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.sql.types._ import org.apache.spark.util.Utils class JDBCSuite extends QueryTest with BeforeAndAfter with PrivateMethodTester with SharedSparkSession { import testImplicits._ val url = "jdbc:h2:mem:testdb0" val urlWithUserAndPass = "jdbc:h2:mem:testdb0;user=testUser;password=testPass" var conn: java.sql.Connection = null val testBytes = Array[Byte](99.toByte, 134.toByte, 135.toByte, 200.toByte, 205.toByte) ++ Array.fill(15)(0.toByte) val testH2Dialect = new JdbcDialect { override def canHandle(url: String): Boolean = url.startsWith("jdbc:h2") override def getCatalystType( sqlType: Int, typeName: String, size: Int, md: MetadataBuilder): Option[DataType] = Some(StringType) } val testH2DialectTinyInt = new JdbcDialect { override def canHandle(url: String): Boolean = url.startsWith("jdbc:h2") override def getCatalystType( sqlType: Int, typeName: String, size: Int, md: MetadataBuilder): Option[DataType] = { sqlType match { case java.sql.Types.TINYINT => Some(ByteType) case _ => None } } } val defaultMetadata = new MetadataBuilder().putLong("scale", 0).build() override def beforeAll(): Unit = { super.beforeAll() Utils.classForName("org.h2.Driver") // Extra properties that will be specified for our database. We need these to test // usage of parameters from OPTIONS clause in queries. val properties = new Properties() properties.setProperty("user", "testUser") properties.setProperty("password", "testPass") conn = DriverManager.getConnection(url, properties) conn.prepareStatement("create schema test").executeUpdate() conn.prepareStatement( "create table test.people (name TEXT(32) NOT NULL, theid INTEGER NOT NULL)").executeUpdate() conn.prepareStatement("insert into test.people values ('fred', 1)").executeUpdate() conn.prepareStatement("insert into test.people values ('mary', 2)").executeUpdate() conn.prepareStatement( "insert into test.people values ('joe ''foo'' \\"bar\\"', 3)").executeUpdate() conn.commit() sql( s""" |CREATE OR REPLACE TEMPORARY VIEW foobar |USING org.apache.spark.sql.jdbc |OPTIONS (url '$url', dbtable 'TEST.PEOPLE', user 'testUser', password 'testPass') """.stripMargin.replaceAll("\\n", " ")) sql( s""" |CREATE OR REPLACE TEMPORARY VIEW fetchtwo |USING org.apache.spark.sql.jdbc |OPTIONS (url '$url', dbtable 'TEST.PEOPLE', user 'testUser', password 'testPass', | ${JDBCOptions.JDBC_BATCH_FETCH_SIZE} '2') """.stripMargin.replaceAll("\\n", " ")) sql( s""" |CREATE OR REPLACE TEMPORARY VIEW parts |USING org.apache.spark.sql.jdbc |OPTIONS (url '$url', dbtable 'TEST.PEOPLE', user 'testUser', password 'testPass', | partitionColumn 'THEID', lowerBound '1', upperBound '4', numPartitions '3') """.stripMargin.replaceAll("\\n", " ")) sql( s""" |CREATE OR REPLACE TEMPORARY VIEW partsoverflow |USING org.apache.spark.sql.jdbc |OPTIONS (url '$url', dbtable 'TEST.PEOPLE', user 'testUser', password 'testPass', | partitionColumn 'THEID', lowerBound '-9223372036854775808', | upperBound '9223372036854775807', numPartitions '3') """.stripMargin.replaceAll("\\n", " ")) conn.prepareStatement("create table test.inttypes (a INT, b BOOLEAN, c TINYINT, " + "d SMALLINT, e BIGINT)").executeUpdate() conn.prepareStatement("insert into test.inttypes values (1, false, 3, 4, 1234567890123)" ).executeUpdate() conn.prepareStatement("insert into test.inttypes values (null, null, null, null, null)" ).executeUpdate() conn.commit() sql( s""" |CREATE OR REPLACE TEMPORARY VIEW inttypes |USING org.apache.spark.sql.jdbc |OPTIONS (url '$url', dbtable 'TEST.INTTYPES', user 'testUser', password 'testPass') """.stripMargin.replaceAll("\\n", " ")) conn.prepareStatement("create table test.strtypes (a BINARY(20), b VARCHAR(20), " + "c VARCHAR_IGNORECASE(20), d CHAR(20), e BLOB, f CLOB)").executeUpdate() val stmt = conn.prepareStatement("insert into test.strtypes values (?, ?, ?, ?, ?, ?)") stmt.setBytes(1, testBytes) stmt.setString(2, "Sensitive") stmt.setString(3, "Insensitive") stmt.setString(4, "Twenty-byte CHAR") stmt.setBytes(5, testBytes) stmt.setString(6, "I am a clob!") stmt.executeUpdate() sql( s""" |CREATE OR REPLACE TEMPORARY VIEW strtypes |USING org.apache.spark.sql.jdbc |OPTIONS (url '$url', dbtable 'TEST.STRTYPES', user 'testUser', password 'testPass') """.stripMargin.replaceAll("\\n", " ")) conn.prepareStatement("create table test.timetypes (a TIME, b DATE, c TIMESTAMP(7))" ).executeUpdate() conn.prepareStatement("insert into test.timetypes values ('12:34:56', " + "'1996-01-01', '2002-02-20 11:22:33.543543543')").executeUpdate() conn.prepareStatement("insert into test.timetypes values ('12:34:56', " + "null, '2002-02-20 11:22:33.543543543')").executeUpdate() conn.commit() sql( s""" |CREATE OR REPLACE TEMPORARY VIEW timetypes |USING org.apache.spark.sql.jdbc |OPTIONS (url '$url', dbtable 'TEST.TIMETYPES', user 'testUser', password 'testPass') """.stripMargin.replaceAll("\\n", " ")) conn.prepareStatement("CREATE TABLE test.timezone (tz TIMESTAMP WITH TIME ZONE) " + "AS SELECT '1999-01-08 04:05:06.543543543-08:00'") .executeUpdate() conn.commit() conn.prepareStatement("CREATE TABLE test.array_table (ar Integer ARRAY) " + "AS SELECT ARRAY[1, 2, 3]") .executeUpdate() conn.commit() conn.prepareStatement("create table test.flttypes (a DOUBLE, b REAL, c DECIMAL(38, 18))" ).executeUpdate() conn.prepareStatement("insert into test.flttypes values (" + "1.0000000000000002220446049250313080847263336181640625, " + "1.00000011920928955078125, " + "123456789012345.543215432154321)").executeUpdate() conn.commit() sql( s""" |CREATE OR REPLACE TEMPORARY VIEW flttypes |USING org.apache.spark.sql.jdbc |OPTIONS (url '$url', dbtable 'TEST.FLTTYPES', user 'testUser', password 'testPass') """.stripMargin.replaceAll("\\n", " ")) conn.prepareStatement( s""" |create table test.nulltypes (a INT, b BOOLEAN, c TINYINT, d BINARY(20), e VARCHAR(20), |f VARCHAR_IGNORECASE(20), g CHAR(20), h BLOB, i CLOB, j TIME, k DATE, l TIMESTAMP, |m DOUBLE, n REAL, o DECIMAL(38, 18)) """.stripMargin.replaceAll("\\n", " ")).executeUpdate() conn.prepareStatement("insert into test.nulltypes values (" + "null, null, null, null, null, null, null, null, null, " + "null, null, null, null, null, null)").executeUpdate() conn.commit() sql( s""" |CREATE OR REPLACE TEMPORARY VIEW nulltypes |USING org.apache.spark.sql.jdbc |OPTIONS (url '$url', dbtable 'TEST.NULLTYPES', user 'testUser', password 'testPass') """.stripMargin.replaceAll("\\n", " ")) conn.prepareStatement( "create table test.emp(name TEXT(32) NOT NULL," + " theid INTEGER, \\"Dept\\" INTEGER)").executeUpdate() conn.prepareStatement( "insert into test.emp values ('fred', 1, 10)").executeUpdate() conn.prepareStatement( "insert into test.emp values ('mary', 2, null)").executeUpdate() conn.prepareStatement( "insert into test.emp values ('joe ''foo'' \\"bar\\"', 3, 30)").executeUpdate() conn.prepareStatement( "insert into test.emp values ('kathy', null, null)").executeUpdate() conn.commit() conn.prepareStatement( "create table test.seq(id INTEGER)").executeUpdate() (0 to 6).foreach { value => conn.prepareStatement( s"insert into test.seq values ($value)").executeUpdate() } conn.prepareStatement( "insert into test.seq values (null)").executeUpdate() conn.commit() sql( s""" |CREATE OR REPLACE TEMPORARY VIEW nullparts |USING org.apache.spark.sql.jdbc |OPTIONS (url '$url', dbtable 'TEST.EMP', user 'testUser', password 'testPass', |partitionColumn '"Dept"', lowerBound '1', upperBound '4', numPartitions '3') """.stripMargin.replaceAll("\\n", " ")) conn.prepareStatement( """create table test."mixedCaseCols" ("Name" TEXT(32), "Id" INTEGER NOT NULL)""") .executeUpdate() conn.prepareStatement("""insert into test."mixedCaseCols" values ('fred', 1)""").executeUpdate() conn.prepareStatement("""insert into test."mixedCaseCols" values ('mary', 2)""").executeUpdate() conn.prepareStatement("""insert into test."mixedCaseCols" values (null, 3)""").executeUpdate() conn.commit() sql( s""" |CREATE OR REPLACE TEMPORARY VIEW mixedCaseCols |USING org.apache.spark.sql.jdbc |OPTIONS (url '$url', dbtable 'TEST."mixedCaseCols"', user 'testUser', password 'testPass') """.stripMargin.replaceAll("\\n", " ")) conn.prepareStatement("CREATE TABLE test.partition (THEID INTEGER, `THE ID` INTEGER) " + "AS SELECT 1, 1") .executeUpdate() conn.commit() conn.prepareStatement("CREATE TABLE test.datetime (d DATE, t TIMESTAMP)").executeUpdate() conn.prepareStatement( "INSERT INTO test.datetime VALUES ('2018-07-06', '2018-07-06 05:50:00.0')").executeUpdate() conn.prepareStatement( "INSERT INTO test.datetime VALUES ('2018-07-06', '2018-07-06 08:10:08.0')").executeUpdate() conn.prepareStatement( "INSERT INTO test.datetime VALUES ('2018-07-08', '2018-07-08 13:32:01.0')").executeUpdate() conn.prepareStatement( "INSERT INTO test.datetime VALUES ('2018-07-12', '2018-07-12 09:51:15.0')").executeUpdate() conn.commit() // Untested: IDENTITY, OTHER, UUID, ARRAY, and GEOMETRY types. } override def afterAll(): Unit = { conn.close() super.afterAll() } // Check whether the tables are fetched in the expected degree of parallelism def checkNumPartitions(df: DataFrame, expectedNumPartitions: Int): Unit = { val jdbcRelations = df.queryExecution.analyzed.collect { case LogicalRelation(r: JDBCRelation, _, _, _) => r } assert(jdbcRelations.length == 1) assert(jdbcRelations.head.parts.length == expectedNumPartitions, s"Expecting a JDBCRelation with $expectedNumPartitions partitions, but got:`$jdbcRelations`") } private def checkPushdown(df: DataFrame): DataFrame = { val parentPlan = df.queryExecution.executedPlan // Check if SparkPlan Filter is removed in a physical plan and // the plan only has PhysicalRDD to scan JDBCRelation. assert(parentPlan.isInstanceOf[org.apache.spark.sql.execution.WholeStageCodegenExec]) val node = parentPlan.asInstanceOf[org.apache.spark.sql.execution.WholeStageCodegenExec] assert(node.child.isInstanceOf[org.apache.spark.sql.execution.DataSourceScanExec]) assert(node.child.asInstanceOf[DataSourceScanExec].nodeName.contains("JDBCRelation")) df } private def checkNotPushdown(df: DataFrame): DataFrame = { val parentPlan = df.queryExecution.executedPlan // Check if SparkPlan Filter is not removed in a physical plan because JDBCRDD // cannot compile given predicates. assert(parentPlan.isInstanceOf[org.apache.spark.sql.execution.WholeStageCodegenExec]) val node = parentPlan.asInstanceOf[org.apache.spark.sql.execution.WholeStageCodegenExec] assert(node.child.isInstanceOf[org.apache.spark.sql.execution.FilterExec]) df } test("SELECT *") { assert(sql("SELECT * FROM foobar").collect().size === 3) } test("SELECT * WHERE (simple predicates)") { assert(checkPushdown(sql("SELECT * FROM foobar WHERE THEID < 1")).collect().size == 0) assert(checkPushdown(sql("SELECT * FROM foobar WHERE THEID != 2")).collect().size == 2) assert(checkPushdown(sql("SELECT * FROM foobar WHERE THEID = 1")).collect().size == 1) assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME = 'fred'")).collect().size == 1) assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME <=> 'fred'")).collect().size == 1) assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME > 'fred'")).collect().size == 2) assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME != 'fred'")).collect().size == 2) assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME IN ('mary', 'fred')")) .collect().size == 2) assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME NOT IN ('fred')")) .collect().size == 2) assert(checkPushdown(sql("SELECT * FROM foobar WHERE THEID = 1 OR NAME = 'mary'")) .collect().size == 2) assert(checkPushdown(sql("SELECT * FROM foobar WHERE THEID = 1 OR NAME = 'mary' " + "AND THEID = 2")).collect().size == 2) assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME LIKE 'fr%'")).collect().size == 1) assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME LIKE '%ed'")).collect().size == 1) assert(checkPushdown(sql("SELECT * FROM foobar WHERE NAME LIKE '%re%'")).collect().size == 1) assert(checkPushdown(sql("SELECT * FROM nulltypes WHERE A IS NULL")).collect().size == 1) assert(checkPushdown(sql("SELECT * FROM nulltypes WHERE A IS NOT NULL")).collect().size == 0) // This is a test to reflect discussion in SPARK-12218. // The older versions of spark have this kind of bugs in parquet data source. val df1 = sql("SELECT * FROM foobar WHERE NOT (THEID != 2) OR NOT (NAME != 'mary')") assert(df1.collect.toSet === Set(Row("mary", 2))) // SPARK-22548: Incorrect nested AND expression pushed down to JDBC data source val df2 = sql("SELECT * FROM foobar " + "WHERE (THEID > 0 AND TRIM(NAME) = 'mary') OR (NAME = 'fred')") assert(df2.collect.toSet === Set(Row("fred", 1), Row("mary", 2))) assert(checkNotPushdown(sql("SELECT * FROM foobar WHERE (THEID + 1) < 2")).collect().size == 0) assert(checkNotPushdown(sql("SELECT * FROM foobar WHERE (THEID + 2) != 4")).collect().size == 2) } test("SELECT COUNT(1) WHERE (predicates)") { // Check if an answer is correct when Filter is removed from operations such as count() which // does not require any columns. In some data sources, e.g., Parquet, `requiredColumns` in // org.apache.spark.sql.sources.interfaces is not given in logical plans, but some filters // are applied for columns with Filter producing wrong results. On the other hand, JDBCRDD // correctly handles this case by assigning `requiredColumns` properly. See PR 10427 for more // discussions. assert(sql("SELECT COUNT(1) FROM foobar WHERE NAME = 'mary'").collect.toSet === Set(Row(1))) } test("SELECT * WHERE (quoted strings)") { assert(sql("select * from foobar").where('NAME === "joe 'foo' \\"bar\\"").collect().size === 1) } test("SELECT first field") { val names = sql("SELECT NAME FROM foobar").collect().map(x => x.getString(0)).sortWith(_ < _) assert(names.size === 3) assert(names(0).equals("fred")) assert(names(1).equals("joe 'foo' \\"bar\\"")) assert(names(2).equals("mary")) } test("SELECT first field when fetchsize is two") { val names = sql("SELECT NAME FROM fetchtwo").collect().map(x => x.getString(0)).sortWith(_ < _) assert(names.size === 3) assert(names(0).equals("fred")) assert(names(1).equals("joe 'foo' \\"bar\\"")) assert(names(2).equals("mary")) } test("SELECT second field") { val ids = sql("SELECT THEID FROM foobar").collect().map(x => x.getInt(0)).sortWith(_ < _) assert(ids.size === 3) assert(ids(0) === 1) assert(ids(1) === 2) assert(ids(2) === 3) } test("SELECT second field when fetchsize is two") { val ids = sql("SELECT THEID FROM fetchtwo").collect().map(x => x.getInt(0)).sortWith(_ < _) assert(ids.size === 3) assert(ids(0) === 1) assert(ids(1) === 2) assert(ids(2) === 3) } test("SELECT * partitioned") { val df = sql("SELECT * FROM parts") checkNumPartitions(df, expectedNumPartitions = 3) assert(df.collect().length == 3) } test("SELECT WHERE (simple predicates) partitioned") { val df1 = sql("SELECT * FROM parts WHERE THEID < 1") checkNumPartitions(df1, expectedNumPartitions = 3) assert(df1.collect().length === 0) val df2 = sql("SELECT * FROM parts WHERE THEID != 2") checkNumPartitions(df2, expectedNumPartitions = 3) assert(df2.collect().length === 2) val df3 = sql("SELECT THEID FROM parts WHERE THEID = 1") checkNumPartitions(df3, expectedNumPartitions = 3) assert(df3.collect().length === 1) } test("SELECT second field partitioned") { val ids = sql("SELECT THEID FROM parts").collect().map(x => x.getInt(0)).sortWith(_ < _) assert(ids.size === 3) assert(ids(0) === 1) assert(ids(1) === 2) assert(ids(2) === 3) } test("SPARK-34843: columnPartition should generate the correct stride size" + " and also realign the first partition for better distribution") { val schema = StructType(Seq( StructField("PartitionColumn", DateType) )) val numPartitions = 1000 val partitionConfig = Map( "lowerBound" -> "1930-01-01", "upperBound" -> "2020-12-31", "numPartitions" -> numPartitions.toString, "partitionColumn" -> "PartitionColumn" ) val partitions = JDBCRelation.columnPartition( schema, analysis.caseInsensitiveResolution, TimeZone.getDefault.toZoneId.toString, new JDBCOptions(url, "table", partitionConfig) ) val firstPredicate = partitions.head.asInstanceOf[JDBCPartition].whereClause val lastPredicate = partitions(numPartitions - 1).asInstanceOf[JDBCPartition].whereClause // 152 days (exclusive) to lower bound assert(firstPredicate == """"PartitionColumn" < '1930-06-02' or "PartitionColumn" is null""") // 152 days (inclusive) to upper bound assert(lastPredicate == """"PartitionColumn" >= '2020-08-02'""") } test("overflow of partition bound difference does not give negative stride") { val df = sql("SELECT * FROM partsoverflow") checkNumPartitions(df, expectedNumPartitions = 3) assert(df.collect().length == 3) } test("Register JDBC query with renamed fields") { // Regression test for bug SPARK-7345 sql( s""" |CREATE OR REPLACE TEMPORARY VIEW renamed |USING org.apache.spark.sql.jdbc |OPTIONS (url '$url', dbtable '(select NAME as NAME1, NAME as NAME2 from TEST.PEOPLE)', |user 'testUser', password 'testPass') """.stripMargin.replaceAll("\\n", " ")) val df = sql("SELECT * FROM renamed") assert(df.schema.fields.size == 2) assert(df.schema.fields(0).name == "NAME1") assert(df.schema.fields(1).name == "NAME2") } test("Basic API") { assert(spark.read.jdbc( urlWithUserAndPass, "TEST.PEOPLE", new Properties()).collect().length === 3) } test("Missing partition columns") { withView("tempPeople") { val e = intercept[IllegalArgumentException] { sql( s""" |CREATE OR REPLACE TEMPORARY VIEW tempPeople |USING org.apache.spark.sql.jdbc |OPTIONS ( | url 'jdbc:h2:mem:testdb0;user=testUser;password=testPass', | dbtable 'TEST.PEOPLE', | lowerBound '0', | upperBound '52', | numPartitions '53', | fetchSize '10000' ) """.stripMargin.replaceAll("\\n", " ")) }.getMessage assert(e.contains("When reading JDBC data sources, users need to specify all or none " + "for the following options: 'partitionColumn', 'lowerBound', 'upperBound', and " + "'numPartitions'")) } } test("Basic API with FetchSize") { (0 to 4).foreach { size => val properties = new Properties() properties.setProperty(JDBCOptions.JDBC_BATCH_FETCH_SIZE, size.toString) assert(spark.read.jdbc( urlWithUserAndPass, "TEST.PEOPLE", properties).collect().length === 3) } } test("Partitioning via JDBCPartitioningInfo API") { val df = spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", "THEID", 0, 4, 3, new Properties()) checkNumPartitions(df, expectedNumPartitions = 3) assert(df.collect().length === 3) } test("Partitioning via list-of-where-clauses API") { val parts = Array[String]("THEID < 2", "THEID >= 2") val df = spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", parts, new Properties()) checkNumPartitions(df, expectedNumPartitions = 2) assert(df.collect().length === 3) } test("Partitioning on column that might have null values.") { val df = spark.read.jdbc(urlWithUserAndPass, "TEST.EMP", "theid", 0, 4, 3, new Properties()) checkNumPartitions(df, expectedNumPartitions = 3) assert(df.collect().length === 4) val df2 = spark.read.jdbc(urlWithUserAndPass, "TEST.EMP", "THEID", 0, 4, 3, new Properties()) checkNumPartitions(df2, expectedNumPartitions = 3) assert(df2.collect().length === 4) // partitioning on a nullable quoted column assert( spark.read.jdbc(urlWithUserAndPass, "TEST.EMP", """"Dept"""", 0, 4, 3, new Properties()) .collect().length === 4) } test("Partitioning on column where numPartitions is zero") { val res = spark.read.jdbc( url = urlWithUserAndPass, table = "TEST.seq", columnName = "id", lowerBound = 0, upperBound = 4, numPartitions = 0, connectionProperties = new Properties() ) checkNumPartitions(res, expectedNumPartitions = 1) assert(res.count() === 8) } test("Partitioning on column where numPartitions are more than the number of total rows") { val res = spark.read.jdbc( url = urlWithUserAndPass, table = "TEST.seq", columnName = "id", lowerBound = 1, upperBound = 5, numPartitions = 10, connectionProperties = new Properties() ) checkNumPartitions(res, expectedNumPartitions = 4) assert(res.count() === 8) } test("Partitioning on column where lowerBound is equal to upperBound") { val res = spark.read.jdbc( url = urlWithUserAndPass, table = "TEST.seq", columnName = "id", lowerBound = 5, upperBound = 5, numPartitions = 4, connectionProperties = new Properties() ) checkNumPartitions(res, expectedNumPartitions = 1) assert(res.count() === 8) } test("Partitioning on column where lowerBound is larger than upperBound") { val e = intercept[IllegalArgumentException] { spark.read.jdbc( url = urlWithUserAndPass, table = "TEST.seq", columnName = "id", lowerBound = 5, upperBound = 1, numPartitions = 3, connectionProperties = new Properties() ) }.getMessage assert(e.contains("Operation not allowed: the lower bound of partitioning column " + "is larger than the upper bound. Lower bound: 5; Upper bound: 1")) } test("SELECT * on partitioned table with a nullable partition column") { val df = sql("SELECT * FROM nullparts") checkNumPartitions(df, expectedNumPartitions = 3) assert(df.collect().length == 4) } test("H2 integral types") { val rows = sql("SELECT * FROM inttypes WHERE A IS NOT NULL").collect() assert(rows.length === 1) assert(rows(0).getInt(0) === 1) assert(rows(0).getBoolean(1) === false) assert(rows(0).getInt(2) === 3) assert(rows(0).getInt(3) === 4) assert(rows(0).getLong(4) === 1234567890123L) } test("H2 null entries") { val rows = sql("SELECT * FROM inttypes WHERE A IS NULL").collect() assert(rows.length === 1) assert(rows(0).isNullAt(0)) assert(rows(0).isNullAt(1)) assert(rows(0).isNullAt(2)) assert(rows(0).isNullAt(3)) assert(rows(0).isNullAt(4)) } test("H2 string types") { val rows = sql("SELECT * FROM strtypes").collect() assert(rows(0).getAs[Array[Byte]](0).sameElements(testBytes)) assert(rows(0).getString(1).equals("Sensitive")) assert(rows(0).getString(2).equals("Insensitive")) assert(rows(0).getString(3).equals("Twenty-byte CHAR ")) assert(rows(0).getAs[Array[Byte]](4).sameElements(testBytes)) assert(rows(0).getString(5).equals("I am a clob!")) } test("H2 time types") { val rows = sql("SELECT * FROM timetypes").collect() val cal = new GregorianCalendar(java.util.Locale.ROOT) cal.setTime(rows(0).getAs[java.sql.Timestamp](0)) assert(cal.get(Calendar.HOUR_OF_DAY) === 12) assert(cal.get(Calendar.MINUTE) === 34) assert(cal.get(Calendar.SECOND) === 56) cal.setTime(rows(0).getAs[java.sql.Timestamp](1)) assert(cal.get(Calendar.YEAR) === 1996) assert(cal.get(Calendar.MONTH) === 0) assert(cal.get(Calendar.DAY_OF_MONTH) === 1) cal.setTime(rows(0).getAs[java.sql.Timestamp](2)) assert(cal.get(Calendar.YEAR) === 2002) assert(cal.get(Calendar.MONTH) === 1) assert(cal.get(Calendar.DAY_OF_MONTH) === 20) assert(cal.get(Calendar.HOUR) === 11) assert(cal.get(Calendar.MINUTE) === 22) assert(cal.get(Calendar.SECOND) === 33) assert(cal.get(Calendar.MILLISECOND) === 543) assert(rows(0).getAs[java.sql.Timestamp](2).getNanos === 543543000) } test("SPARK-34357: test TIME types") { val rows = spark.read.jdbc( urlWithUserAndPass, "TEST.TIMETYPES", new Properties()).collect() val cachedRows = spark.read.jdbc(urlWithUserAndPass, "TEST.TIMETYPES", new Properties()) .cache().collect() val expectedTimeAtEpoch = java.sql.Timestamp.valueOf("1970-01-01 12:34:56.0") assert(rows(0).getAs[java.sql.Timestamp](0) === expectedTimeAtEpoch) assert(rows(1).getAs[java.sql.Timestamp](0) === expectedTimeAtEpoch) assert(cachedRows(0).getAs[java.sql.Timestamp](0) === expectedTimeAtEpoch) } test("test DATE types") { val rows = spark.read.jdbc( urlWithUserAndPass, "TEST.TIMETYPES", new Properties()).collect() val cachedRows = spark.read.jdbc(urlWithUserAndPass, "TEST.TIMETYPES", new Properties()) .cache().collect() assert(rows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01")) assert(rows(1).getAs[java.sql.Date](1) === null) assert(cachedRows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01")) } test("test DATE types in cache") { withTempView("mycached_date") { val rows = spark.read.jdbc(urlWithUserAndPass, "TEST.TIMETYPES", new Properties()).collect() spark.read.jdbc(urlWithUserAndPass, "TEST.TIMETYPES", new Properties()) .cache().createOrReplaceTempView("mycached_date") val cachedRows = sql("select * from mycached_date").collect() assert(rows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01")) assert(cachedRows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01")) } } test("test types for null value") { val rows = spark.read.jdbc( urlWithUserAndPass, "TEST.NULLTYPES", new Properties()).collect() assert((0 to 14).forall(i => rows(0).isNullAt(i))) } test("H2 floating-point types") { val rows = sql("SELECT * FROM flttypes").collect() assert(rows(0).getDouble(0) === 1.00000000000000022) assert(rows(0).getDouble(1) === 1.00000011920928955) assert(rows(0).getAs[BigDecimal](2) === new BigDecimal("123456789012345.543215432154321000")) assert(rows(0).schema.fields(2).dataType === DecimalType(38, 18)) val result = sql("SELECT C FROM flttypes where C > C - 1").collect() assert(result(0).getAs[BigDecimal](0) === new BigDecimal("123456789012345.543215432154321000")) } test("SQL query as table name") { sql( s""" |CREATE OR REPLACE TEMPORARY VIEW hack |USING org.apache.spark.sql.jdbc |OPTIONS (url '$url', dbtable '(SELECT B, B*B FROM TEST.FLTTYPES)', | user 'testUser', password 'testPass') """.stripMargin.replaceAll("\\n", " ")) val rows = sql("SELECT * FROM hack").collect() assert(rows(0).getDouble(0) === 1.00000011920928955) // Yes, I meant ==. // For some reason, H2 computes this square incorrectly... assert(math.abs(rows(0).getDouble(1) - 1.00000023841859331) < 1e-12) } test("Remap types via JdbcDialects") { JdbcDialects.registerDialect(testH2Dialect) val df = spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", new Properties()) assert(!df.schema.exists(_.dataType != org.apache.spark.sql.types.StringType)) val rows = df.collect() assert(rows(0).get(0).isInstanceOf[String]) assert(rows(0).get(1).isInstanceOf[String]) JdbcDialects.unregisterDialect(testH2Dialect) } test("Map TINYINT to ByteType via JdbcDialects") { JdbcDialects.registerDialect(testH2DialectTinyInt) val df = spark.read.jdbc(urlWithUserAndPass, "test.inttypes", new Properties()) val rows = df.collect() assert(rows.length === 2) assert(rows(0).get(2).isInstanceOf[Byte]) assert(rows(0).getByte(2) === 3) assert(rows(1).isNullAt(2)) JdbcDialects.unregisterDialect(testH2DialectTinyInt) } test("Default jdbc dialect registration") { assert(JdbcDialects.get("jdbc:mysql://127.0.0.1/db") == MySQLDialect) assert(JdbcDialects.get("jdbc:postgresql://127.0.0.1/db") == PostgresDialect) assert(JdbcDialects.get("jdbc:db2://127.0.0.1/db") == DB2Dialect) assert(JdbcDialects.get("jdbc:sqlserver://127.0.0.1/db") == MsSqlServerDialect) assert(JdbcDialects.get("jdbc:derby:db") == DerbyDialect) assert(JdbcDialects.get("test.invalid") == NoopDialect) } test("quote column names by jdbc dialect") { val MySQL = JdbcDialects.get("jdbc:mysql://127.0.0.1/db") val Postgres = JdbcDialects.get("jdbc:postgresql://127.0.0.1/db") val Derby = JdbcDialects.get("jdbc:derby:db") val columns = Seq("abc", "key") val MySQLColumns = columns.map(MySQL.quoteIdentifier(_)) val PostgresColumns = columns.map(Postgres.quoteIdentifier(_)) val DerbyColumns = columns.map(Derby.quoteIdentifier(_)) assert(MySQLColumns === Seq("`abc`", "`key`")) assert(PostgresColumns === Seq(""""abc"""", """"key"""")) assert(DerbyColumns === Seq(""""abc"""", """"key"""")) } test("compile filters") { val compileFilter = PrivateMethod[Option[String]](Symbol("compileFilter")) def doCompileFilter(f: Filter): String = JDBCRDD invokePrivate compileFilter(f, JdbcDialects.get("jdbc:")) getOrElse("") Seq(("col0", "col1"), ("`col0`", "`col1`")).foreach { case(col0, col1) => assert(doCompileFilter(EqualTo(col0, 3)) === """"col0" = 3""") assert(doCompileFilter(Not(EqualTo(col1, "abc"))) === """(NOT ("col1" = 'abc'))""") assert(doCompileFilter(And(EqualTo(col0, 0), EqualTo(col1, "def"))) === """("col0" = 0) AND ("col1" = 'def')""") assert(doCompileFilter(Or(EqualTo(col0, 2), EqualTo(col1, "ghi"))) === """("col0" = 2) OR ("col1" = 'ghi')""") assert(doCompileFilter(LessThan(col0, 5)) === """"col0" < 5""") assert(doCompileFilter(LessThan(col0, Timestamp.valueOf("1995-11-21 00:00:00.0"))) === """"col0" < '1995-11-21 00:00:00.0'""") assert(doCompileFilter(LessThan(col0, Date.valueOf("1983-08-04"))) === """"col0" < '1983-08-04'""") assert(doCompileFilter(LessThanOrEqual(col0, 5)) === """"col0" <= 5""") assert(doCompileFilter(GreaterThan(col0, 3)) === """"col0" > 3""") assert(doCompileFilter(GreaterThanOrEqual(col0, 3)) === """"col0" >= 3""") assert(doCompileFilter(In(col1, Array("jkl"))) === """"col1" IN ('jkl')""") assert(doCompileFilter(In(col1, Array.empty)) === """CASE WHEN "col1" IS NULL THEN NULL ELSE FALSE END""") assert(doCompileFilter(Not(In(col1, Array("mno", "pqr")))) === """(NOT ("col1" IN ('mno', 'pqr')))""") assert(doCompileFilter(IsNull(col1)) === """"col1" IS NULL""") assert(doCompileFilter(IsNotNull(col1)) === """"col1" IS NOT NULL""") assert(doCompileFilter(And(EqualNullSafe(col0, "abc"), EqualTo(col1, "def"))) === """((NOT ("col0" != 'abc' OR "col0" IS NULL OR 'abc' IS NULL) """ + """OR ("col0" IS NULL AND 'abc' IS NULL))) AND ("col1" = 'def')""") } val e = intercept[AnalysisException] { doCompileFilter(EqualTo("col0.nested", 3)) }.getMessage assert(e.contains("Filter push down does not support nested column: col0.nested")) } test("Dialect unregister") { JdbcDialects.unregisterDialect(H2Dialect) try { JdbcDialects.registerDialect(testH2Dialect) JdbcDialects.unregisterDialect(testH2Dialect) assert(JdbcDialects.get(urlWithUserAndPass) == NoopDialect) } finally { JdbcDialects.registerDialect(H2Dialect) } } test("Aggregated dialects") { val agg = new AggregatedDialect(List(new JdbcDialect { override def canHandle(url: String) : Boolean = url.startsWith("jdbc:h2:") override def getCatalystType( sqlType: Int, typeName: String, size: Int, md: MetadataBuilder): Option[DataType] = if (sqlType % 2 == 0) { Some(LongType) } else { None } override def quoteIdentifier(colName: String): String = { s"My $colName quoteIdentifier" } override def getTableExistsQuery(table: String): String = { s"My $table Table" } override def getSchemaQuery(table: String): String = { s"My $table Schema" } override def isCascadingTruncateTable(): Option[Boolean] = Some(true) }, testH2Dialect)) assert(agg.canHandle("jdbc:h2:xxx")) assert(!agg.canHandle("jdbc:h2")) assert(agg.getCatalystType(0, "", 1, null) === Some(LongType)) assert(agg.getCatalystType(1, "", 1, null) === Some(StringType)) assert(agg.isCascadingTruncateTable() === Some(true)) assert(agg.quoteIdentifier ("Dummy") === "My Dummy quoteIdentifier") assert(agg.getTableExistsQuery ("Dummy") === "My Dummy Table") assert(agg.getSchemaQuery ("Dummy") === "My Dummy Schema") } test("Aggregated dialects: isCascadingTruncateTable") { def genDialect(cascadingTruncateTable: Option[Boolean]): JdbcDialect = new JdbcDialect { override def canHandle(url: String): Boolean = true override def getCatalystType( sqlType: Int, typeName: String, size: Int, md: MetadataBuilder): Option[DataType] = None override def isCascadingTruncateTable(): Option[Boolean] = cascadingTruncateTable } def testDialects(cascadings: List[Option[Boolean]], expected: Option[Boolean]): Unit = { val dialects = cascadings.map(genDialect(_)) val agg = new AggregatedDialect(dialects) assert(agg.isCascadingTruncateTable() === expected) } testDialects(List(Some(true), Some(false), None), Some(true)) testDialects(List(Some(true), Some(true), None), Some(true)) testDialects(List(Some(false), Some(false), None), None) testDialects(List(Some(true), Some(true)), Some(true)) testDialects(List(Some(false), Some(false)), Some(false)) testDialects(List(None, None), None) } test("DB2Dialect type mapping") { val db2Dialect = JdbcDialects.get("jdbc:db2://127.0.0.1/db") assert(db2Dialect.getJDBCType(StringType).map(_.databaseTypeDefinition).get == "CLOB") assert(db2Dialect.getJDBCType(BooleanType).map(_.databaseTypeDefinition).get == "CHAR(1)") assert(db2Dialect.getJDBCType(ShortType).map(_.databaseTypeDefinition).get == "SMALLINT") assert(db2Dialect.getJDBCType(ByteType).map(_.databaseTypeDefinition).get == "SMALLINT") // test db2 dialect mappings on read assert(db2Dialect.getCatalystType(java.sql.Types.REAL, "REAL", 1, null) == Option(FloatType)) assert(db2Dialect.getCatalystType(java.sql.Types.OTHER, "DECFLOAT", 1, null) == Option(DecimalType(38, 18))) assert(db2Dialect.getCatalystType(java.sql.Types.OTHER, "XML", 1, null) == Option(StringType)) assert(db2Dialect.getCatalystType(java.sql.Types.OTHER, "TIMESTAMP WITH TIME ZONE", 1, null) == Option(TimestampType)) } test("MySQLDialect catalyst type mapping") { val mySqlDialect = JdbcDialects.get("jdbc:mysql") val metadata = new MetadataBuilder() assert(mySqlDialect.getCatalystType(java.sql.Types.VARBINARY, "BIT", 2, metadata) == Some(LongType)) assert(metadata.build().contains("binarylong")) assert(mySqlDialect.getCatalystType(java.sql.Types.VARBINARY, "BIT", 1, metadata) == None) assert(mySqlDialect.getCatalystType(java.sql.Types.BIT, "TINYINT", 1, metadata) == Some(BooleanType)) } test("SPARK-35446: MySQLDialect type mapping of float") { val mySqlDialect = JdbcDialects.get("jdbc:mysql://127.0.0.1/db") assert(mySqlDialect.getJDBCType(FloatType).map(_.databaseTypeDefinition).get == "FLOAT") } test("PostgresDialect type mapping") { val Postgres = JdbcDialects.get("jdbc:postgresql://127.0.0.1/db") val md = new MetadataBuilder().putLong("scale", 0) assert(Postgres.getCatalystType(java.sql.Types.OTHER, "json", 1, null) === Some(StringType)) assert(Postgres.getCatalystType(java.sql.Types.OTHER, "jsonb", 1, null) === Some(StringType)) assert(Postgres.getCatalystType(java.sql.Types.ARRAY, "_numeric", 0, md) == Some(ArrayType(DecimalType.SYSTEM_DEFAULT))) assert(Postgres.getCatalystType(java.sql.Types.ARRAY, "_bpchar", 64, md) == Some(ArrayType(StringType))) assert(Postgres.getJDBCType(FloatType).map(_.databaseTypeDefinition).get == "FLOAT4") assert(Postgres.getJDBCType(DoubleType).map(_.databaseTypeDefinition).get == "FLOAT8") assert(Postgres.getJDBCType(ByteType).map(_.databaseTypeDefinition).get == "SMALLINT") } test("DerbyDialect jdbc type mapping") { val derbyDialect = JdbcDialects.get("jdbc:derby:db") assert(derbyDialect.getJDBCType(StringType).map(_.databaseTypeDefinition).get == "CLOB") assert(derbyDialect.getJDBCType(ByteType).map(_.databaseTypeDefinition).get == "SMALLINT") assert(derbyDialect.getJDBCType(BooleanType).map(_.databaseTypeDefinition).get == "BOOLEAN") } test("OracleDialect jdbc type mapping") { val oracleDialect = JdbcDialects.get("jdbc:oracle") val metadata = new MetadataBuilder().putString("name", "test_column").putLong("scale", -127) assert(oracleDialect.getCatalystType(java.sql.Types.NUMERIC, "float", 1, metadata) == Some(DecimalType(DecimalType.MAX_PRECISION, 10))) assert(oracleDialect.getCatalystType(java.sql.Types.NUMERIC, "numeric", 0, null) == Some(DecimalType(DecimalType.MAX_PRECISION, 10))) assert(oracleDialect.getCatalystType(OracleDialect.BINARY_FLOAT, "BINARY_FLOAT", 0, null) == Some(FloatType)) assert(oracleDialect.getCatalystType(OracleDialect.BINARY_DOUBLE, "BINARY_DOUBLE", 0, null) == Some(DoubleType)) assert(oracleDialect.getCatalystType(OracleDialect.TIMESTAMPTZ, "TIMESTAMP", 0, null) == Some(TimestampType)) } test("MsSqlServerDialect jdbc type mapping") { val msSqlServerDialect = JdbcDialects.get("jdbc:sqlserver") assert(msSqlServerDialect.getJDBCType(TimestampType).map(_.databaseTypeDefinition).get == "DATETIME") assert(msSqlServerDialect.getJDBCType(StringType).map(_.databaseTypeDefinition).get == "NVARCHAR(MAX)") assert(msSqlServerDialect.getJDBCType(BooleanType).map(_.databaseTypeDefinition).get == "BIT") assert(msSqlServerDialect.getJDBCType(BinaryType).map(_.databaseTypeDefinition).get == "VARBINARY(MAX)") Seq(true, false).foreach { flag => withSQLConf(SQLConf.LEGACY_MSSQLSERVER_NUMERIC_MAPPING_ENABLED.key -> s"$flag") { if (SQLConf.get.legacyMsSqlServerNumericMappingEnabled) { assert(msSqlServerDialect.getJDBCType(ShortType).map(_.databaseTypeDefinition).isEmpty) } else { assert(msSqlServerDialect.getJDBCType(ShortType).map(_.databaseTypeDefinition).get == "SMALLINT") } } } } test("SPARK-28152 MsSqlServerDialect catalyst type mapping") { val msSqlServerDialect = JdbcDialects.get("jdbc:sqlserver") val metadata = new MetadataBuilder().putLong("scale", 1) Seq(true, false).foreach { flag => withSQLConf(SQLConf.LEGACY_MSSQLSERVER_NUMERIC_MAPPING_ENABLED.key -> s"$flag") { if (SQLConf.get.legacyMsSqlServerNumericMappingEnabled) { assert(msSqlServerDialect.getCatalystType(java.sql.Types.SMALLINT, "SMALLINT", 1, metadata).isEmpty) assert(msSqlServerDialect.getCatalystType(java.sql.Types.REAL, "REAL", 1, metadata).isEmpty) } else { assert(msSqlServerDialect.getCatalystType(java.sql.Types.SMALLINT, "SMALLINT", 1, metadata).get == ShortType) assert(msSqlServerDialect.getCatalystType(java.sql.Types.REAL, "REAL", 1, metadata).get == FloatType) } } } } test("table exists query by jdbc dialect") { val MySQL = JdbcDialects.get("jdbc:mysql://127.0.0.1/db") val Postgres = JdbcDialects.get("jdbc:postgresql://127.0.0.1/db") val db2 = JdbcDialects.get("jdbc:db2://127.0.0.1/db") val h2 = JdbcDialects.get(url) val derby = JdbcDialects.get("jdbc:derby:db") val table = "weblogs" val defaultQuery = s"SELECT * FROM $table WHERE 1=0" val limitQuery = s"SELECT 1 FROM $table LIMIT 1" assert(MySQL.getTableExistsQuery(table) == limitQuery) assert(Postgres.getTableExistsQuery(table) == limitQuery) assert(db2.getTableExistsQuery(table) == defaultQuery) assert(h2.getTableExistsQuery(table) == defaultQuery) assert(derby.getTableExistsQuery(table) == defaultQuery) } test("truncate table query by jdbc dialect") { val mysql = JdbcDialects.get("jdbc:mysql://127.0.0.1/db") val postgres = JdbcDialects.get("jdbc:postgresql://127.0.0.1/db") val db2 = JdbcDialects.get("jdbc:db2://127.0.0.1/db") val h2 = JdbcDialects.get(url) val derby = JdbcDialects.get("jdbc:derby:db") val oracle = JdbcDialects.get("jdbc:oracle://127.0.0.1/db") val teradata = JdbcDialects.get("jdbc:teradata://127.0.0.1/db") val table = "weblogs" val defaultQuery = s"TRUNCATE TABLE $table" val postgresQuery = s"TRUNCATE TABLE ONLY $table" val teradataQuery = s"DELETE FROM $table ALL" val db2Query = s"TRUNCATE TABLE $table IMMEDIATE" Seq(mysql, h2, derby).foreach{ dialect => assert(dialect.getTruncateQuery(table, Some(true)) == defaultQuery) } assert(postgres.getTruncateQuery(table) == postgresQuery) assert(oracle.getTruncateQuery(table) == defaultQuery) assert(teradata.getTruncateQuery(table) == teradataQuery) assert(db2.getTruncateQuery(table) == db2Query) } test("SPARK-22880: Truncate table with CASCADE by jdbc dialect") { // cascade in a truncate should only be applied for databases that support this, // even if the parameter is passed. val mysql = JdbcDialects.get("jdbc:mysql://127.0.0.1/db") val postgres = JdbcDialects.get("jdbc:postgresql://127.0.0.1/db") val db2 = JdbcDialects.get("jdbc:db2://127.0.0.1/db") val h2 = JdbcDialects.get(url) val derby = JdbcDialects.get("jdbc:derby:db") val oracle = JdbcDialects.get("jdbc:oracle://127.0.0.1/db") val teradata = JdbcDialects.get("jdbc:teradata://127.0.0.1/db") val table = "weblogs" val defaultQuery = s"TRUNCATE TABLE $table" val postgresQuery = s"TRUNCATE TABLE ONLY $table CASCADE" val oracleQuery = s"TRUNCATE TABLE $table CASCADE" val teradataQuery = s"DELETE FROM $table ALL" val db2Query = s"TRUNCATE TABLE $table IMMEDIATE" Seq(mysql, h2, derby).foreach{ dialect => assert(dialect.getTruncateQuery(table, Some(true)) == defaultQuery) } assert(postgres.getTruncateQuery(table, Some(true)) == postgresQuery) assert(oracle.getTruncateQuery(table, Some(true)) == oracleQuery) assert(teradata.getTruncateQuery(table, Some(true)) == teradataQuery) assert(db2.getTruncateQuery(table, Some(true)) == db2Query) } test("Test DataFrame.where for Date and Timestamp") { // Regression test for bug SPARK-11788 val timestamp = java.sql.Timestamp.valueOf("2001-02-20 11:22:33.543543"); val date = java.sql.Date.valueOf("1995-01-01") val jdbcDf = spark.read.jdbc(urlWithUserAndPass, "TEST.TIMETYPES", new Properties()) val rows = jdbcDf.where($"B" > date && $"C" > timestamp).collect() assert(rows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01")) assert(rows(0).getAs[java.sql.Timestamp](2) === java.sql.Timestamp.valueOf("2002-02-20 11:22:33.543543")) } test("SPARK-33867: Test DataFrame.where for LocalDate and Instant") { // Test for SPARK-33867 val timestamp = Instant.parse("2001-02-20T11:22:33.543543Z") val date = LocalDate.parse("1995-01-01") withSQLConf(SQLConf.DATETIME_JAVA8API_ENABLED.key -> "true") { val jdbcDf = spark.read.jdbc(urlWithUserAndPass, "TEST.TIMETYPES", new Properties()) val rows = jdbcDf.where($"B" > date && $"C" > timestamp).collect() assert(rows(0).getAs[LocalDate](1) === LocalDate.parse("1996-01-01")) // 8 hour difference since saved time was America/Los_Angeles and Instant is GMT assert(rows(0).getAs[Instant](2) === Instant.parse("2002-02-20T19:22:33.543543Z")) } } test("test credentials in the properties are not in plan output") { val df = sql("SELECT * FROM parts") val explain = ExplainCommand(df.queryExecution.logical, ExtendedMode) spark.sessionState.executePlan(explain).executedPlan.executeCollect().foreach { r => assert(!List("testPass", "testUser").exists(r.toString.contains)) } // test the JdbcRelation toString output df.queryExecution.analyzed.collect { case r: LogicalRelation => assert(r.relation.toString == "JDBCRelation(TEST.PEOPLE) [numPartitions=3]") } } test("test credentials in the connection url are not in the plan output") { val df = spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", new Properties()) val explain = ExplainCommand(df.queryExecution.logical, ExtendedMode) spark.sessionState.executePlan(explain).executedPlan.executeCollect().foreach { r => assert(!List("testPass", "testUser").exists(r.toString.contains)) } } test("hide credentials in create and describe a persistent/temp table") { val password = "testPass" val tableName = "tab1" Seq("TABLE", "TEMPORARY VIEW").foreach { tableType => withTable(tableName) { val df = sql( s""" |CREATE $tableType $tableName |USING org.apache.spark.sql.jdbc |OPTIONS ( | url '$urlWithUserAndPass', | dbtable 'TEST.PEOPLE', | user 'testUser', | password '$password') """.stripMargin) val explain = ExplainCommand(df.queryExecution.logical, ExtendedMode) spark.sessionState.executePlan(explain).executedPlan.executeCollect().foreach { r => assert(!r.toString.contains(password)) } sql(s"DESC FORMATTED $tableName").collect().foreach { r => assert(!r.toString().contains(password)) } } } } test("Hide credentials in show create table") { val userName = "testUser" val password = "testPass" val tableName = "tab1" val dbTable = "TEST.PEOPLE" withTable(tableName) { sql( s""" |CREATE TABLE $tableName |USING org.apache.spark.sql.jdbc |OPTIONS ( | url '$urlWithUserAndPass', | dbtable '$dbTable', | user '$userName', | password '$password') """.stripMargin) val show = ShowCreateTableCommand(TableIdentifier(tableName), ShowCreateTable.getoutputAttrs) spark.sessionState.executePlan(show).executedPlan.executeCollect().foreach { r => assert(!r.toString.contains(password)) assert(r.toString.contains(dbTable)) assert(r.toString.contains(userName)) } sql(s"SHOW CREATE TABLE $tableName").collect().foreach { r => assert(!r.toString.contains(password)) assert(r.toString.contains(dbTable)) assert(r.toString.contains(userName)) } withSQLConf(SQLConf.SQL_OPTIONS_REDACTION_PATTERN.key -> "(?i)dbtable|user") { spark.sessionState.executePlan(show).executedPlan.executeCollect().foreach { r => assert(!r.toString.contains(password)) assert(!r.toString.contains(dbTable)) assert(!r.toString.contains(userName)) } } } } test("Replace CatalogUtils.maskCredentials with SQLConf.get.redactOptions") { val password = "testPass" val tableName = "tab1" withTable(tableName) { sql( s""" |CREATE TABLE $tableName |USING org.apache.spark.sql.jdbc |OPTIONS ( | url '$urlWithUserAndPass', | dbtable 'TEST.PEOPLE', | user 'testUser', | password '$password') """.stripMargin) val storageProps = sql(s"DESC FORMATTED $tableName") .filter("col_name = 'Storage Properties'") .select("data_type").collect() assert(storageProps.length === 1) storageProps.foreach { r => assert(r.getString(0).contains(s"url=${Utils.REDACTION_REPLACEMENT_TEXT}")) assert(r.getString(0).contains(s"password=${Utils.REDACTION_REPLACEMENT_TEXT}")) } val information = sql(s"SHOW TABLE EXTENDED LIKE '$tableName'") .select("information").collect() assert(information.length === 1) information.foreach { r => assert(r.getString(0).contains(s"url=${Utils.REDACTION_REPLACEMENT_TEXT}")) assert(r.getString(0).contains(s"password=${Utils.REDACTION_REPLACEMENT_TEXT}")) } val createTabStmt = sql(s"SHOW CREATE TABLE $tableName") .select("createtab_stmt").collect() assert(createTabStmt.length === 1) createTabStmt.foreach { r => assert(r.getString(0).contains(s"'url' = '${Utils.REDACTION_REPLACEMENT_TEXT}'")) assert(r.getString(0).contains(s"'password' = '${Utils.REDACTION_REPLACEMENT_TEXT}'")) } } } test("SPARK 12941: The data type mapping for StringType to Oracle") { val oracleDialect = JdbcDialects.get("jdbc:oracle://127.0.0.1/db") assert(oracleDialect.getJDBCType(StringType). map(_.databaseTypeDefinition).get == "VARCHAR2(255)") } test("SPARK-16625: General data types to be mapped to Oracle") { def getJdbcType(dialect: JdbcDialect, dt: DataType): String = { dialect.getJDBCType(dt).orElse(JdbcUtils.getCommonJDBCType(dt)). map(_.databaseTypeDefinition).get } val oracleDialect = JdbcDialects.get("jdbc:oracle://127.0.0.1/db") assert(getJdbcType(oracleDialect, BooleanType) == "NUMBER(1)") assert(getJdbcType(oracleDialect, IntegerType) == "NUMBER(10)") assert(getJdbcType(oracleDialect, LongType) == "NUMBER(19)") assert(getJdbcType(oracleDialect, FloatType) == "NUMBER(19, 4)") assert(getJdbcType(oracleDialect, DoubleType) == "NUMBER(19, 4)") assert(getJdbcType(oracleDialect, ByteType) == "NUMBER(3)") assert(getJdbcType(oracleDialect, ShortType) == "NUMBER(5)") assert(getJdbcType(oracleDialect, StringType) == "VARCHAR2(255)") assert(getJdbcType(oracleDialect, BinaryType) == "BLOB") assert(getJdbcType(oracleDialect, DateType) == "DATE") assert(getJdbcType(oracleDialect, TimestampType) == "TIMESTAMP") } private def assertEmptyQuery(sqlString: String): Unit = { assert(sql(sqlString).collect().isEmpty) } test("SPARK-15916: JDBC filter operator push down should respect operator precedence") { val TRUE = "NAME != 'non_exists'" val FALSE1 = "THEID > 1000000000" val FALSE2 = "THEID < -1000000000" assertEmptyQuery(s"SELECT * FROM foobar WHERE ($TRUE OR $FALSE1) AND $FALSE2") assertEmptyQuery(s"SELECT * FROM foobar WHERE $FALSE1 AND ($FALSE2 OR $TRUE)") // Tests JDBCPartition whereClause clause push down. withTempView("tempFrame") { val jdbcPartitionWhereClause = s"$FALSE1 OR $TRUE" val df = spark.read.jdbc( urlWithUserAndPass, "TEST.PEOPLE", predicates = Array[String](jdbcPartitionWhereClause), new Properties()) df.createOrReplaceTempView("tempFrame") assertEmptyQuery(s"SELECT * FROM tempFrame where $FALSE2") } } test("SPARK-16387: Reserved SQL words are not escaped by JDBC writer") { val df = spark.createDataset(Seq("a", "b", "c")).toDF("order") val schema = JdbcUtils.schemaString( df.schema, df.sqlContext.conf.caseSensitiveAnalysis, "jdbc:mysql://localhost:3306/temp") assert(schema.contains("`order` TEXT")) } test("SPARK-18141: Predicates on quoted column names in the jdbc data source") { assert(sql("SELECT * FROM mixedCaseCols WHERE Id < 1").collect().size == 0) assert(sql("SELECT * FROM mixedCaseCols WHERE Id <= 1").collect().size == 1) assert(sql("SELECT * FROM mixedCaseCols WHERE Id > 1").collect().size == 2) assert(sql("SELECT * FROM mixedCaseCols WHERE Id >= 1").collect().size == 3) assert(sql("SELECT * FROM mixedCaseCols WHERE Id = 1").collect().size == 1) assert(sql("SELECT * FROM mixedCaseCols WHERE Id != 2").collect().size == 2) assert(sql("SELECT * FROM mixedCaseCols WHERE Id <=> 2").collect().size == 1) assert(sql("SELECT * FROM mixedCaseCols WHERE Name LIKE 'fr%'").collect().size == 1) assert(sql("SELECT * FROM mixedCaseCols WHERE Name LIKE '%ed'").collect().size == 1) assert(sql("SELECT * FROM mixedCaseCols WHERE Name LIKE '%re%'").collect().size == 1) assert(sql("SELECT * FROM mixedCaseCols WHERE Name IS NULL").collect().size == 1) assert(sql("SELECT * FROM mixedCaseCols WHERE Name IS NOT NULL").collect().size == 2) assert(sql("SELECT * FROM mixedCaseCols").filter($"Name".isin()).collect().size == 0) assert(sql("SELECT * FROM mixedCaseCols WHERE Name IN ('mary', 'fred')").collect().size == 2) assert(sql("SELECT * FROM mixedCaseCols WHERE Name NOT IN ('fred')").collect().size == 1) assert(sql("SELECT * FROM mixedCaseCols WHERE Id = 1 OR Name = 'mary'").collect().size == 2) assert(sql("SELECT * FROM mixedCaseCols WHERE Name = 'mary' AND Id = 2").collect().size == 1) } test("SPARK-18419: Fix `asConnectionProperties` to filter case-insensitively") { val parameters = Map( "url" -> "jdbc:mysql://localhost:3306/temp", "dbtable" -> "t1", "numPartitions" -> "10") assert(new JDBCOptions(parameters).asConnectionProperties.isEmpty) assert(new JDBCOptions(CaseInsensitiveMap(parameters)).asConnectionProperties.isEmpty) } test("SPARK-16848: jdbc API throws an exception for user specified schema") { val schema = StructType(Seq(StructField("name", StringType, false, defaultMetadata), StructField("theid", IntegerType, false, defaultMetadata))) val parts = Array[String]("THEID < 2", "THEID >= 2") val e1 = intercept[AnalysisException] { spark.read.schema(schema).jdbc(urlWithUserAndPass, "TEST.PEOPLE", parts, new Properties()) }.getMessage assert(e1.contains("User specified schema not supported with `jdbc`")) val e2 = intercept[AnalysisException] { spark.read.schema(schema).jdbc(urlWithUserAndPass, "TEST.PEOPLE", new Properties()) }.getMessage assert(e2.contains("User specified schema not supported with `jdbc`")) } test("jdbc API support custom schema") { val parts = Array[String]("THEID < 2", "THEID >= 2") val customSchema = "NAME STRING, THEID INT" val props = new Properties() props.put("customSchema", customSchema) val df = spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", parts, props) assert(df.schema.size === 2) val expectedSchema = new StructType(CatalystSqlParser.parseTableSchema(customSchema).map( f => StructField(f.name, f.dataType, f.nullable, defaultMetadata)).toArray) assert(df.schema === expectedSchema) assert(df.count() === 3) } test("jdbc API custom schema DDL-like strings.") { withTempView("people_view") { val customSchema = "NAME STRING, THEID INT" sql( s""" |CREATE TEMPORARY VIEW people_view |USING org.apache.spark.sql.jdbc |OPTIONS (uRl '$url', DbTaBlE 'TEST.PEOPLE', User 'testUser', PassWord 'testPass', |customSchema '$customSchema') """.stripMargin.replaceAll("\\n", " ")) val df = sql("select * from people_view") assert(df.schema.length === 2) val expectedSchema = new StructType(CatalystSqlParser.parseTableSchema(customSchema) .map(f => StructField(f.name, f.dataType, f.nullable, defaultMetadata)).toArray) assert(df.schema === expectedSchema) assert(df.count() === 3) } } test("SPARK-15648: teradataDialect StringType data mapping") { val teradataDialect = JdbcDialects.get("jdbc:teradata://127.0.0.1/db") assert(teradataDialect.getJDBCType(StringType). map(_.databaseTypeDefinition).get == "VARCHAR(255)") } test("SPARK-15648: teradataDialect BooleanType data mapping") { val teradataDialect = JdbcDialects.get("jdbc:teradata://127.0.0.1/db") assert(teradataDialect.getJDBCType(BooleanType). map(_.databaseTypeDefinition).get == "CHAR(1)") } test("Checking metrics correctness with JDBC") { val foobarCnt = spark.table("foobar").count() val res = InputOutputMetricsHelper.run(sql("SELECT * FROM foobar").toDF()) assert(res === (foobarCnt, 0L, foobarCnt) :: Nil) } test("unsupported types") { var e = intercept[SQLException] { spark.read.jdbc(urlWithUserAndPass, "TEST.TIMEZONE", new Properties()).collect() }.getMessage assert(e.contains("Unsupported type TIMESTAMP_WITH_TIMEZONE")) e = intercept[SQLException] { spark.read.jdbc(urlWithUserAndPass, "TEST.ARRAY_TABLE", new Properties()).collect() }.getMessage assert(e.contains("Unsupported type ARRAY")) } test("SPARK-19318: Connection properties keys should be case-sensitive.") { def testJdbcOptions(options: JDBCOptions): Unit = { // Spark JDBC data source options are case-insensitive assert(options.tableOrQuery == "t1") // When we convert it to properties, it should be case-sensitive. assert(options.asProperties.size == 3) assert(options.asProperties.get("customkey") == null) assert(options.asProperties.get("customKey") == "a-value") assert(options.asConnectionProperties.size == 1) assert(options.asConnectionProperties.get("customkey") == null) assert(options.asConnectionProperties.get("customKey") == "a-value") } val parameters = Map("url" -> url, "dbTAblE" -> "t1", "customKey" -> "a-value") testJdbcOptions(new JDBCOptions(parameters)) testJdbcOptions(new JDBCOptions(CaseInsensitiveMap(parameters))) // test add/remove key-value from the case-insensitive map var modifiedParameters = (CaseInsensitiveMap(Map.empty) ++ parameters).asInstanceOf[Map[String, String]] testJdbcOptions(new JDBCOptions(modifiedParameters)) modifiedParameters -= "dbtable" assert(modifiedParameters.get("dbTAblE").isEmpty) modifiedParameters -= "customkey" assert(modifiedParameters.get("customKey").isEmpty) modifiedParameters += ("customKey" -> "a-value") modifiedParameters += ("dbTable" -> "t1") testJdbcOptions(new JDBCOptions(modifiedParameters)) assert ((modifiedParameters -- parameters.keys).size == 0) } test("SPARK-19318: jdbc data source options should be treated case-insensitive.") { val df = spark.read.format("jdbc") .option("Url", urlWithUserAndPass) .option("DbTaBle", "TEST.PEOPLE") .load() assert(df.count() == 3) withTempView("people_view") { sql( s""" |CREATE TEMPORARY VIEW people_view |USING org.apache.spark.sql.jdbc |OPTIONS (uRl '$url', DbTaBlE 'TEST.PEOPLE', User 'testUser', PassWord 'testPass') """.stripMargin.replaceAll("\\n", " ")) assert(sql("select * from people_view").count() == 3) } } test("SPARK-21519: option sessionInitStatement, run SQL to initialize the database session.") { val initSQL1 = "SET @MYTESTVAR 21519" val df1 = spark.read.format("jdbc") .option("url", urlWithUserAndPass) .option("dbtable", "(SELECT NVL(@MYTESTVAR, -1))") .option("sessionInitStatement", initSQL1) .load() assert(df1.collect() === Array(Row(21519))) val initSQL2 = "SET SCHEMA DUMMY" val df2 = spark.read.format("jdbc") .option("url", urlWithUserAndPass) .option("dbtable", "TEST.PEOPLE") .option("sessionInitStatement", initSQL2) .load() val e = intercept[SparkException] {df2.collect()}.getMessage assert(e.contains("""Schema "DUMMY" not found""")) sql( s""" |CREATE OR REPLACE TEMPORARY VIEW test_sessionInitStatement |USING org.apache.spark.sql.jdbc |OPTIONS (url '$urlWithUserAndPass', |dbtable '(SELECT NVL(@MYTESTVAR1, -1), NVL(@MYTESTVAR2, -1))', |sessionInitStatement 'SET @MYTESTVAR1 21519; SET @MYTESTVAR2 1234') """.stripMargin) val df3 = sql("SELECT * FROM test_sessionInitStatement") assert(df3.collect() === Array(Row(21519, 1234))) } test("jdbc data source shouldn't have unnecessary metadata in its schema") { val schema = StructType(Seq(StructField("NAME", StringType, true, defaultMetadata), StructField("THEID", IntegerType, true, defaultMetadata))) val df = spark.read.format("jdbc") .option("Url", urlWithUserAndPass) .option("DbTaBle", "TEST.PEOPLE") .load() assert(df.schema === schema) withTempView("people_view") { sql( s""" |CREATE TEMPORARY VIEW people_view |USING org.apache.spark.sql.jdbc |OPTIONS (uRl '$url', DbTaBlE 'TEST.PEOPLE', User 'testUser', PassWord 'testPass') """.stripMargin.replaceAll("\\n", " ")) assert(sql("select * from people_view").schema === schema) } } test("SPARK-23856 Spark jdbc setQueryTimeout option") { val numJoins = 100 val longRunningQuery = s"SELECT t0.NAME AS c0, ${(1 to numJoins).map(i => s"t$i.NAME AS c$i").mkString(", ")} " + s"FROM test.people t0 ${(1 to numJoins).map(i => s"join test.people t$i").mkString(" ")}" val df = spark.read.format("jdbc") .option("Url", urlWithUserAndPass) .option("dbtable", s"($longRunningQuery)") .option("queryTimeout", 1) .load() val errMsg = intercept[SparkException] { df.collect() }.getMessage assert(errMsg.contains("Statement was canceled or the session timed out")) } test("SPARK-24327 verify and normalize a partition column based on a JDBC resolved schema") { def testJdbcPartitionColumn(partColName: String, expectedColumnName: String): Unit = { val df = spark.read.format("jdbc") .option("url", urlWithUserAndPass) .option("dbtable", "TEST.PARTITION") .option("partitionColumn", partColName) .option("lowerBound", 1) .option("upperBound", 4) .option("numPartitions", 3) .load() val quotedPrtColName = testH2Dialect.quoteIdentifier(expectedColumnName) df.logicalPlan match { case LogicalRelation(JDBCRelation(_, parts, _), _, _, _) => val whereClauses = parts.map(_.asInstanceOf[JDBCPartition].whereClause).toSet assert(whereClauses === Set( s"$quotedPrtColName < 2 or $quotedPrtColName is null", s"$quotedPrtColName >= 2 AND $quotedPrtColName < 3", s"$quotedPrtColName >= 3")) } } testJdbcPartitionColumn("THEID", "THEID") testJdbcPartitionColumn("\\"THEID\\"", "THEID") withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") { testJdbcPartitionColumn("ThEiD", "THEID") } testJdbcPartitionColumn("THE ID", "THE ID") def testIncorrectJdbcPartitionColumn(partColName: String): Unit = { val errMsg = intercept[AnalysisException] { testJdbcPartitionColumn(partColName, "THEID") }.getMessage assert(errMsg.contains(s"User-defined partition column $partColName not found " + "in the JDBC relation:")) } testIncorrectJdbcPartitionColumn("NoExistingColumn") withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") { testIncorrectJdbcPartitionColumn(testH2Dialect.quoteIdentifier("ThEiD")) } } test("query JDBC option - negative tests") { val query = "SELECT * FROM test.people WHERE theid = 1" // load path val e1 = intercept[RuntimeException] { val df = spark.read.format("jdbc") .option("Url", urlWithUserAndPass) .option("query", query) .option("dbtable", "test.people") .load() }.getMessage assert(e1.contains("Both 'dbtable' and 'query' can not be specified at the same time.")) // jdbc api path val properties = new Properties() properties.setProperty(JDBCOptions.JDBC_QUERY_STRING, query) val e2 = intercept[RuntimeException] { spark.read.jdbc(urlWithUserAndPass, "TEST.PEOPLE", properties).collect() }.getMessage assert(e2.contains("Both 'dbtable' and 'query' can not be specified at the same time.")) val e3 = intercept[RuntimeException] { sql( s""" |CREATE OR REPLACE TEMPORARY VIEW queryOption |USING org.apache.spark.sql.jdbc |OPTIONS (url '$url', query '$query', dbtable 'TEST.PEOPLE', | user 'testUser', password 'testPass') """.stripMargin.replaceAll("\\n", " ")) }.getMessage assert(e3.contains("Both 'dbtable' and 'query' can not be specified at the same time.")) val e4 = intercept[RuntimeException] { val df = spark.read.format("jdbc") .option("Url", urlWithUserAndPass) .option("query", "") .load() }.getMessage assert(e4.contains("Option `query` can not be empty.")) // Option query and partitioncolumn are not allowed together. val expectedErrorMsg = s""" |Options 'query' and 'partitionColumn' can not be specified together. |Please define the query using `dbtable` option instead and make sure to qualify |the partition columns using the supplied subquery alias to resolve any ambiguity. |Example : |spark.read.format("jdbc") | .option("url", jdbcUrl) | .option("dbtable", "(select c1, c2 from t1) as subq") | .option("partitionColumn", "c1") | .option("lowerBound", "1") | .option("upperBound", "100") | .option("numPartitions", "3") | .load() """.stripMargin val e5 = intercept[RuntimeException] { sql( s""" |CREATE OR REPLACE TEMPORARY VIEW queryOption |USING org.apache.spark.sql.jdbc |OPTIONS (url '$url', query '$query', user 'testUser', password 'testPass', | partitionColumn 'THEID', lowerBound '1', upperBound '4', numPartitions '3') """.stripMargin.replaceAll("\\n", " ")) }.getMessage assert(e5.contains(expectedErrorMsg)) } test("query JDBC option") { val query = "SELECT name, theid FROM test.people WHERE theid = 1" // query option to pass on the query string. val df = spark.read.format("jdbc") .option("Url", urlWithUserAndPass) .option("query", query) .load() checkAnswer( df, Row("fred", 1) :: Nil) // query option in the create table path. sql( s""" |CREATE OR REPLACE TEMPORARY VIEW queryOption |USING org.apache.spark.sql.jdbc |OPTIONS (url '$url', query '$query', user 'testUser', password 'testPass') """.stripMargin.replaceAll("\\n", " ")) checkAnswer( sql("select name, theid from queryOption"), Row("fred", 1) :: Nil) } test("SPARK-22814 support date/timestamp types in partitionColumn") { val expectedResult = Seq( ("2018-07-06", "2018-07-06 05:50:00.0"), ("2018-07-06", "2018-07-06 08:10:08.0"), ("2018-07-08", "2018-07-08 13:32:01.0"), ("2018-07-12", "2018-07-12 09:51:15.0") ).map { case (date, timestamp) => Row(Date.valueOf(date), Timestamp.valueOf(timestamp)) } // DateType partition column val df1 = spark.read.format("jdbc") .option("url", urlWithUserAndPass) .option("dbtable", "TEST.DATETIME") .option("partitionColumn", "d") .option("lowerBound", "2018-07-06") .option("upperBound", "2018-07-20") .option("numPartitions", 3) .load() df1.logicalPlan match { case LogicalRelation(JDBCRelation(_, parts, _), _, _, _) => val whereClauses = parts.map(_.asInstanceOf[JDBCPartition].whereClause).toSet assert(whereClauses === Set( """"D" < '2018-07-11' or "D" is null""", """"D" >= '2018-07-11' AND "D" < '2018-07-15'""", """"D" >= '2018-07-15'""")) } checkAnswer(df1, expectedResult) // TimestampType partition column val df2 = spark.read.format("jdbc") .option("url", urlWithUserAndPass) .option("dbtable", "TEST.DATETIME") .option("partitionColumn", "t") .option("lowerBound", "2018-07-04 03:30:00.0") .option("upperBound", "2018-07-27 14:11:05.0") .option("numPartitions", 2) .load() df2.logicalPlan match { case LogicalRelation(JDBCRelation(_, parts, _), _, _, _) => val whereClauses = parts.map(_.asInstanceOf[JDBCPartition].whereClause).toSet assert(whereClauses === Set( """"T" < '2018-07-15 20:50:32.5' or "T" is null""", """"T" >= '2018-07-15 20:50:32.5'""")) } checkAnswer(df2, expectedResult) } test("throws an exception for unsupported partition column types") { val errMsg = intercept[AnalysisException] { spark.read.format("jdbc") .option("url", urlWithUserAndPass) .option("dbtable", "TEST.PEOPLE") .option("partitionColumn", "name") .option("lowerBound", "aaa") .option("upperBound", "zzz") .option("numPartitions", 2) .load() }.getMessage assert(errMsg.contains( "Partition column type should be numeric, date, or timestamp, but string found.")) } test("SPARK-24288: Enable preventing predicate pushdown") { val table = "test.people" val df = spark.read.format("jdbc") .option("Url", urlWithUserAndPass) .option("dbTable", table) .option("pushDownPredicate", false) .load() .filter("theid = 1") .select("name", "theid") checkAnswer( checkNotPushdown(df), Row("fred", 1) :: Nil) // pushDownPredicate option in the create table path. sql( s""" |CREATE OR REPLACE TEMPORARY VIEW predicateOption |USING org.apache.spark.sql.jdbc |OPTIONS (url '$urlWithUserAndPass', dbTable '$table', pushDownPredicate 'false') """.stripMargin.replaceAll("\\n", " ")) checkAnswer( checkNotPushdown(sql("SELECT name, theid FROM predicateOption WHERE theid = 1")), Row("fred", 1) :: Nil) } test( "SPARK-36574: pushDownPredicate=false should prevent push down filters to JDBC data source") { val df = spark.read.format("jdbc") .option("Url", urlWithUserAndPass) .option("dbTable", "test.people") val df1 = df .option("pushDownPredicate", false) .load() .filter("theid = 1") .select("name", "theid") val df2 = df .option("pushDownPredicate", true) .load() .filter("theid = 1") .select("name", "theid") val df3 = df .load() .select("name", "theid") def getRowCount(df: DataFrame): Long = { val queryExecution = df.queryExecution val rawPlan = queryExecution.executedPlan.collect { case p: DataSourceScanExec => p } match { case Seq(p) => p case _ => fail(s"More than one PhysicalRDD found\\n$queryExecution") } rawPlan.execute().count() } assert(getRowCount(df1) == df3.count) assert(getRowCount(df2) < df3.count) } test("SPARK-26383 throw IllegalArgumentException if wrong kind of driver to the given url") { val e = intercept[IllegalArgumentException] { val opts = Map( "url" -> "jdbc:mysql://localhost/db", "dbtable" -> "table", "driver" -> "org.postgresql.Driver" ) spark.read.format("jdbc").options(opts).load }.getMessage assert(e.contains("The driver could not open a JDBC connection. " + "Check the URL: jdbc:mysql://localhost/db")) } test("support casting patterns for lower/upper bounds of TimestampType") { DateTimeTestUtils.outstandingTimezonesIds.foreach { timeZone => withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> timeZone) { Seq( ("1972-07-04 03:30:00", "1972-07-15 20:50:32.5", "1972-07-27 14:11:05"), ("2019-01-20 12:00:00.502", "2019-01-20 12:00:00.751", "2019-01-20 12:00:01.000"), ("2019-01-20T00:00:00.123456", "2019-01-20 00:05:00.123456", "2019-01-20T00:10:00.123456"), ("1500-01-20T00:00:00.123456", "1500-01-20 00:05:00.123456", "1500-01-20T00:10:00.123456") ).foreach { case (lower, middle, upper) => val df = spark.read.format("jdbc") .option("url", urlWithUserAndPass) .option("dbtable", "TEST.DATETIME") .option("partitionColumn", "t") .option("lowerBound", lower) .option("upperBound", upper) .option("numPartitions", 2) .load() df.logicalPlan match { case lr: LogicalRelation if lr.relation.isInstanceOf[JDBCRelation] => val jdbcRelation = lr.relation.asInstanceOf[JDBCRelation] val whereClauses = jdbcRelation.parts.map(_.asInstanceOf[JDBCPartition].whereClause) assert(whereClauses.toSet === Set( s""""T" < '$middle' or "T" is null""", s""""T" >= '$middle'""")) } } } } } test("Add exception when isolationLevel is Illegal") { val e = intercept[IllegalArgumentException] { spark.read.format("jdbc") .option("Url", urlWithUserAndPass) .option("dbTable", "test.people") .option("isolationLevel", "test") .load() }.getMessage assert(e.contains( "Invalid value `test` for parameter `isolationLevel`. This can be " + "`NONE`, `READ_UNCOMMITTED`, `READ_COMMITTED`, `REPEATABLE_READ` or `SERIALIZABLE`.")) } test("SPARK-28552: Case-insensitive database URLs in JdbcDialect") { assert(JdbcDialects.get("jdbc:mysql://localhost/db") === MySQLDialect) assert(JdbcDialects.get("jdbc:MySQL://localhost/db") === MySQLDialect) assert(JdbcDialects.get("jdbc:postgresql://localhost/db") === PostgresDialect) assert(JdbcDialects.get("jdbc:postGresql://localhost/db") === PostgresDialect) assert(JdbcDialects.get("jdbc:db2://localhost/db") === DB2Dialect) assert(JdbcDialects.get("jdbc:DB2://localhost/db") === DB2Dialect) assert(JdbcDialects.get("jdbc:sqlserver://localhost/db") === MsSqlServerDialect) assert(JdbcDialects.get("jdbc:sqlServer://localhost/db") === MsSqlServerDialect) assert(JdbcDialects.get("jdbc:derby://localhost/db") === DerbyDialect) assert(JdbcDialects.get("jdbc:derBy://localhost/db") === DerbyDialect) assert(JdbcDialects.get("jdbc:oracle://localhost/db") === OracleDialect) assert(JdbcDialects.get("jdbc:Oracle://localhost/db") === OracleDialect) assert(JdbcDialects.get("jdbc:teradata://localhost/db") === TeradataDialect) assert(JdbcDialects.get("jdbc:Teradata://localhost/db") === TeradataDialect) } test("SQLContext.jdbc (deprecated)") { val sqlContext = spark.sqlContext var jdbcDF = sqlContext.jdbc(urlWithUserAndPass, "TEST.PEOPLE") checkAnswer(jdbcDF, Row("fred", 1) :: Row("mary", 2) :: Row ("joe 'foo' \\"bar\\"", 3) :: Nil) jdbcDF = sqlContext.jdbc(urlWithUserAndPass, "TEST.PEOPLE", "THEID", 0, 4, 3) checkNumPartitions(jdbcDF, 3) checkAnswer(jdbcDF, Row("fred", 1) :: Row("mary", 2) :: Row ("joe 'foo' \\"bar\\"", 3) :: Nil) val parts = Array[String]("THEID = 2") jdbcDF = sqlContext.jdbc(urlWithUserAndPass, "TEST.PEOPLE", parts) checkAnswer(jdbcDF, Row("mary", 2) :: Nil) } test("SPARK-32364: JDBCOption constructor") { val extraOptions = CaseInsensitiveMap[String](Map("UrL" -> "url1", "dBTable" -> "table1")) val connectionProperties = new Properties() connectionProperties.put("url", "url2") connectionProperties.put("dbtable", "table2") // connection property should override the options in extraOptions val params = extraOptions ++ connectionProperties.asScala assert(params.size == 2) assert(params.get("uRl").contains("url2")) assert(params.get("DbtaBle").contains("table2")) // JDBCOptions constructor parameter should overwrite the existing conf val options = new JDBCOptions(url, "table3", params) assert(options.asProperties.size == 2) assert(options.asProperties.get("url") == url) assert(options.asProperties.get("dbtable") == "table3") } test("SPARK-34379: Map JDBC RowID to StringType rather than LongType") { val mockRsmd = mock(classOf[java.sql.ResultSetMetaData]) when(mockRsmd.getColumnCount).thenReturn(1) when(mockRsmd.getColumnLabel(anyInt())).thenReturn("rowid") when(mockRsmd.getColumnType(anyInt())).thenReturn(java.sql.Types.ROWID) when(mockRsmd.getColumnTypeName(anyInt())).thenReturn("rowid") when(mockRsmd.getPrecision(anyInt())).thenReturn(0) when(mockRsmd.getScale(anyInt())).thenReturn(0) when(mockRsmd.isSigned(anyInt())).thenReturn(false) when(mockRsmd.isNullable(anyInt())).thenReturn(java.sql.ResultSetMetaData.columnNoNulls) val mockRs = mock(classOf[java.sql.ResultSet]) when(mockRs.getMetaData).thenReturn(mockRsmd) val mockDialect = mock(classOf[JdbcDialect]) when(mockDialect.getCatalystType(anyInt(), anyString(), anyInt(), any[MetadataBuilder])) .thenReturn(None) val schema = JdbcUtils.getSchema(mockRs, mockDialect) val fields = schema.fields assert(fields.length === 1) assert(fields(0).dataType === StringType) } }
ueshin/apache-spark
sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
Scala
apache-2.0
80,817
package org.opencompare.api.scala abstract class AbstractFeature { var name : String = "" var parent : Option[FeatureGroup] = None }
OpenCompare/OpenCompare
org.opencompare/api-scala/src/main/scala/org/opencompare/api/scala/AbstractFeature.scala
Scala
apache-2.0
140
package cn.edu.sjtu.omnilab.kalin.utils import cn.edu.sjtu.omnilab.kalin.stlab._ import org.apache.spark.{SparkConf, SparkContext} object GenFlowmap { def main(args: Array[String]) = { if (args.length < 2){ println("usage: GenFlowmap <in> <out> [interval=86400] [minnum=1]") sys.exit(-1) } val input = args(0) val output = args(1) var minnum = 1 var interval = 86400 if (args.size >= 3) interval = args(2).toInt if (args.size >= 4) minnum = args(3).toInt val conf = new SparkConf().setAppName("Generate flowmap") val spark = new SparkContext(conf) val formatedRDD = spark.textFile(input).map(_.split(",")) .map(parts => { val uid = parts(0) val time = parts(1).toDouble val loc = parts(2) STPoint(uid, time, loc) }) Flowmap.draw(formatedRDD, interval) .filter(_.NumUnique >= minnum) .sortBy(m => (m.interval, m.FROM, m.TO)) .map(m => "%d,%s,%s,%d,%d" .format(m.interval, m.FROM, m.TO, m.NumTotal, m.NumUnique)) .saveAsTextFile(output) spark.stop() } }
caesar0301/MDMS
kalin-etl/src/main/scala/cn/edu/sjtu/omnilab/kalin/utils/GenFlowmap.scala
Scala
apache-2.0
1,165
package mesosphere.marathon.upgrade import java.net.URL import scala.concurrent.Promise import akka.actor.Actor import akka.actor.Status.Failure import akka.pattern.pipe import mesosphere.marathon.ResolveArtifactsCanceledException import mesosphere.marathon.io.storage.StorageProvider import mesosphere.marathon.io.{ CancelableDownload, IO, PathFun } import mesosphere.marathon.state.AppDefinition import mesosphere.util.Logging class ResolveArtifactsActor( app: AppDefinition, url2Path: Map[URL, String], promise: Promise[Boolean], storage: StorageProvider) extends Actor with IO with PathFun with Logging { import mesosphere.marathon.upgrade.ResolveArtifactsActor.DownloadFinished import mesosphere.util.ThreadPoolContext.{ context => executionContext } // all downloads that have to be performed by this actor var downloads = url2Path.map { case (url, path) => new CancelableDownload(url, storage, path) } override def preStart(): Unit = { downloads.map(_.get.map(DownloadFinished) pipeTo self) if (url2Path.isEmpty) promise.success(true) // handle empty list } override def postStop(): Unit = { downloads.foreach(_.cancel()) // clean up not finished artifacts if (!promise.isCompleted) promise.tryFailure(new ResolveArtifactsCanceledException("Artifact Resolving has been cancelled")) } override def receive: Receive = { case DownloadFinished(download) => downloads = downloads.filter(_ != download) if (downloads.isEmpty) promise.success(true) case Failure(ex) => log.warn("Can not resolve artifact", ex) // do not fail the promise! } } object ResolveArtifactsActor { case class DownloadFinished(download: CancelableDownload) }
14Zen/marathon
src/main/scala/mesosphere/marathon/upgrade/ResolveArtifactsActor.scala
Scala
apache-2.0
1,737
/* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.flaminem.flamy.conf import com.flaminem.flamy.conf.ConfLevel.Global import com.flaminem.flamy.exec.utils.io.FlamyOutput import com.flaminem.flamy.utils.macros.SealedValues import com.typesafe.config.Config import org.apache.commons.configuration.Configuration import org.apache.hadoop.fs.Path import scala.reflect.runtime.universe._ /** * These configuration parameters are global to the whole JVM. * They can be changed without restarting the JVM, but having two threads with different values * of these properties is not possible. */ private[conf] class FlamyGlobalConfVars { self => var conf: Config = _ object RUN_DIR extends GlobalConfVar[String]( varName = "run.dir.path", defaultValue = Some(new Path(s"/tmp/${Flamy.name}-${SystemContext.userName}").toString), validator = Validator.Required(), description = "Set the directory in which all the temporary outputs will be written. " + s"By default this is a temporary directory created in /tmp/${Flamy.name}-$$USER." ) object RUN_DIR_CLEANING_DELAY extends GlobalConfVar[Int]( varName = "run.dir.cleaning.delay", defaultValue = Some(24), validator = Validator.Required(), description = "Set the number of hours for which all the run directories older than this time laps " + s"will be automatically removed. Automatic removal occurs during each ${Flamy.name} command startup." ) object REGEN_STATIC_SYMBOL extends GlobalConfVar[String]( varName = "regen.static.symbol", defaultValue = Some("\\u2713"), validator = Validator.Required(), description = "Set the symbol used to represent partitions that the regen can predict.", hidden = true ) object REGEN_DYNAMIC_SYMBOL extends GlobalConfVar[String]( varName = "regen.dynamic.symbol", defaultValue = Some("\\u2715"), validator = Validator.Required(), description = "Set the symbol used to represent partitions that the regen cannot predict and will handle dynamically.", hidden = true ) object REGEN_SHOW_INPUTS extends GlobalConfVar[Boolean]( varName = "regen.show.inputs", defaultValue = Some(true), validator = Validator.Required(), description = "(experimental feature) This this to true display the number of input partition when running a regen.", hidden = true ) object USE_OLD_REGEN extends GlobalConfVar[Boolean]( varName = "regen.use.legacy", defaultValue = Some(false), validator = Validator.Required(), description = "Use the old version of the regen." ) object DYNAMIC_OUTPUT extends GlobalConfVar[Boolean]( varName = "io.dynamic.output", defaultValue = Some(true), validator = Validator.Required(), description = "The run and regen commands will use a dynamic output, instead of a static output. " + "Only work with terminals supporting ANSI escape codes." ) object USE_HYPERLINKS extends GlobalConfVar[Boolean]( varName = "io.use.hyperlinks", defaultValue = Some(true), validator = Validator.Required(), description = s"Every file path that ${Flamy.name} prints will be formatted as a url. " + s"In some shells, this allows CTRL+clicking the link to open the file." ) object BROWSER_GUI extends GlobalConfVar[Boolean]( varName = "io.browser", defaultValue = Some(false), validator = Validator.Required(), description = "When this is set to true, flamy's output will be altered to be made exploitable by a javascript front-end." ) object AUTO_OPEN_COMMAND extends GlobalConfVar[String]( varName = "auto.open.command", defaultValue = Some(SystemContext.osFamily.openCommand), validator = Validator.Required(), description = "Some commands like 'show graph' generate a file and automatically open it. " + "Use this option to specify the command to use when opening the file," + "or set it to an empty string to disable the automatic opening of the files." ) object AUTO_OPEN_MULTI extends GlobalConfVar[Boolean]( varName = "auto.open.multi", defaultValue = Some(SystemContext.osFamily.isMultiOpenSuported), validator = Validator.Required(), description = "In addition with auto.open.command, this boolean flag indicates if multiple files should be open simultaneously." ) object VERBOSITY_LEVEL extends GlobalConfVar[String]( varName = "verbosity.level", defaultValue = Some("INFO"), validator = Validator.In(FlamyOutput.LogLevel.logLevelNames), description = "Controls the verbosity level of flamy." ) sealed class GlobalConfVar[T] ( override val varName: String, override val defaultValue: Option[T], override val validator: Validator[T], override val description: String, override val hidden: Boolean = false )(implicit override val typeTag: TypeTag[T]) extends ConfVarTemplate[T] { override def confLevel = ConfLevel.Global override def conf: Config = self.conf override def propertyKey: String = s"${Flamy.name}.$varName" } /* This line must stay after the value declaration or it will be empty */ val confVars: Seq[GlobalConfVar[_]] = SealedValues.values[GlobalConfVar[_]] }
flaminem/flamy
src/main/scala/com/flaminem/flamy/conf/FlamyGlobalConfVars.scala
Scala
apache-2.0
5,926
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution import org.scalatest.FunSuite import org.apache.spark.sql.{SQLConf, execution} import org.apache.spark.sql.functions._ import org.apache.spark.sql.TestData._ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.plans._ import org.apache.spark.sql.execution.joins.{BroadcastHashJoin, ShuffledHashJoin} import org.apache.spark.sql.test.TestSQLContext._ import org.apache.spark.sql.test.TestSQLContext.implicits._ import org.apache.spark.sql.test.TestSQLContext.planner._ import org.apache.spark.sql.types._ class PlannerSuite extends FunSuite { test("unions are collapsed") { val query = testData.unionAll(testData).unionAll(testData).logicalPlan val planned = BasicOperators(query).head val logicalUnions = query collect { case u: logical.Union => u } val physicalUnions = planned collect { case u: execution.Union => u } assert(logicalUnions.size === 2) assert(physicalUnions.size === 1) } test("count is partially aggregated") { val query = testData.groupBy('value).agg(count('key)).queryExecution.analyzed val planned = HashAggregation(query).head val aggregations = planned.collect { case n if n.nodeName contains "Aggregate" => n } assert(aggregations.size === 2) } test("count distinct is partially aggregated") { val query = testData.groupBy('value).agg(countDistinct('key)).queryExecution.analyzed val planned = HashAggregation(query) assert(planned.nonEmpty) } test("mixed aggregates are partially aggregated") { val query = testData.groupBy('value).agg(count('value), countDistinct('key)).queryExecution.analyzed val planned = HashAggregation(query) assert(planned.nonEmpty) } test("sizeInBytes estimation of limit operator for broadcast hash join optimization") { def checkPlan(fieldTypes: Seq[DataType], newThreshold: Int): Unit = { setConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD, newThreshold.toString) val fields = fieldTypes.zipWithIndex.map { case (dataType, index) => StructField(s"c${index}", dataType, true) } :+ StructField("key", IntegerType, true) val schema = StructType(fields) val row = Row.fromSeq(Seq.fill(fields.size)(null)) val rowRDD = org.apache.spark.sql.test.TestSQLContext.sparkContext.parallelize(row :: Nil) createDataFrame(rowRDD, schema).registerTempTable("testLimit") val planned = sql( """ |SELECT l.a, l.b |FROM testData2 l JOIN (SELECT * FROM testLimit LIMIT 1) r ON (l.a = r.key) """.stripMargin).queryExecution.executedPlan val broadcastHashJoins = planned.collect { case join: BroadcastHashJoin => join } val shuffledHashJoins = planned.collect { case join: ShuffledHashJoin => join } assert(broadcastHashJoins.size === 1, "Should use broadcast hash join") assert(shuffledHashJoins.isEmpty, "Should not use shuffled hash join") dropTempTable("testLimit") } val origThreshold = conf.autoBroadcastJoinThreshold val simpleTypes = NullType :: BooleanType :: ByteType :: ShortType :: IntegerType :: LongType :: FloatType :: DoubleType :: DecimalType(10, 5) :: DecimalType.Unlimited :: DateType :: TimestampType :: StringType :: BinaryType :: Nil checkPlan(simpleTypes, newThreshold = 16434) val complexTypes = ArrayType(DoubleType, true) :: ArrayType(StringType, false) :: MapType(IntegerType, StringType, true) :: MapType(IntegerType, ArrayType(DoubleType), false) :: StructType(Seq( StructField("a", IntegerType, nullable = true), StructField("b", ArrayType(DoubleType), nullable = false), StructField("c", DoubleType, nullable = false))) :: Nil checkPlan(complexTypes, newThreshold = 901617) setConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD, origThreshold.toString) } test("InMemoryRelation statistics propagation") { val origThreshold = conf.autoBroadcastJoinThreshold setConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD, 81920.toString) testData.limit(3).registerTempTable("tiny") sql("CACHE TABLE tiny") val a = testData.as("a") val b = table("tiny").as("b") val planned = a.join(b, $"a.key" === $"b.key").queryExecution.executedPlan val broadcastHashJoins = planned.collect { case join: BroadcastHashJoin => join } val shuffledHashJoins = planned.collect { case join: ShuffledHashJoin => join } assert(broadcastHashJoins.size === 1, "Should use broadcast hash join") assert(shuffledHashJoins.isEmpty, "Should not use shuffled hash join") setConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD, origThreshold.toString) } }
hengyicai/OnlineAggregationUCAS
sql/core/src/test/scala/org/apache/spark/sql/execution/PlannerSuite.scala
Scala
apache-2.0
5,581
package com.eharmony.aloha.score.proto.conversions import com.eharmony.aloha.score.Scores.Score import com.eharmony.aloha.score.Scores.Score.{ModelId => MId} import com.eharmony.aloha.id.ModelId package object rich { /** Provides extension methods to the protocol buffer based Score class. * @param s */ implicit class RichScore(protected val s: Score) extends RichScoreLike /** Provides an extension method to the protocol buffer based ModelID class to allow conversion to a ModelIdentity. * @param id */ implicit class RichModelId(protected val id: MId) { /** Transform the com.eharmony.aloha.score.Scores.Score.ModelId to a * [[com.eharmony.aloha.id.ModelId]]. * @return */ def toModelId = ModelId(id.getId, if (id.hasName) id.getName else "") } }
eHarmony/aloha
aloha-io-proto/src/main/scala/com/eharmony/aloha/score/proto/conversions/rich/package.scala
Scala
mit
845
import sbt._ import Keys._ import play.Project._ object ApplicationBuild extends Build { val appName = "cricket_stats" val appVersion = "1.0-SNAPSHOT" val appDependencies = Seq( // Add your project dependencies here, jdbc, anorm ) val main = play.Project(appName, appVersion, appDependencies).settings( // Add your own settings in here ) }
ishakir/cric-stat
project/Build.scala
Scala
epl-1.0
384
package com.spr.scala /** * Scala version of a Bag. Note that by inheriting from `Iterable`, many of our equivalent methods from the Java * version of `Bag` are defined already. */ // a trait is similar to an interface (especially with Java 8's default methods) that can also contain fields // in fact, a trait is almost the same as an abstract class except for two things: // 1. a trait cannot have any constructors // 2. a class or trait can extend multiple traits, but only a single class // also note the square brackets syntax for generics, and the use of A instead of T as the usual generic type // we can ignore the + in the type for now, but essentially it allows Bag[B] to be a subclass of Bag[A] if B is a // subclass of A trait Bag[+A] extends Iterable[A] { // generics are sometimes hard to get right. in order to support subtypes of Bag[A], we need to accept a supertype of A def add[A1 >: A](item: A1): Bag[A1] // lambda function syntax def remove(p: A => Boolean): Bag[A] def map[B](f: A => B): Bag[B] def flatMap[B](f: A => Bag[B]): Bag[B] def filter(p: A => Boolean): Bag[A] // Scala's optional type is Option instead of Optional def reduce[A1 >: A](acc: (A1, A1) => A1): Option[A1] def toArray[A1 >: A]: Array[A1] // Scala has its own Stream API which serves a similar purpose to Java 8's Stream API def toStream: Stream[A] def contains(p: A => Boolean): Boolean // 0-arg methods can leave off parenthesis, though this is normally only done for side-effect-free methods // also, when declaring a method with no parenthesis, callers must also not use parenthesis. // if empty parenthesis were declared here, then callers would have the option of not using them (confusing!) def size: Int def isEmpty: Boolean } // since Scala does not provide static methods, in order to provide a convenience factory method for constructing // bags, we include one in its "companion object". object Bag { /** * Constructs a `Bag` using a varargs array of items. */ // by naming this method "apply", we can call it as one of two ways: // 1. Bag.apply(1, 2, 3) // 2. Bag(1, 2, 3) // the second syntax is preferred as all "apply" methods are used as such def apply[A](items: A*): Bag[A] = // using varargs can be confusing sometimes. in order to expand out a collection of some sort into another varargs // method, we must attach the "_*" type to it to indicate to the compiler to expand it // also note that we can qualify classes based on subpackages in our current scope rather than having to fully // qualify or import those classes new impl.ImplicitArrayBag(Array(items: _*)) }
jvz/scala-for-java
src/main/scala/com/spr/scala/Bag.scala
Scala
apache-2.0
2,675
import scala.annotation.meta.* class Bippy extends scala.annotation.StaticAnnotation abstract class Foo { @Bippy val x1: Int // warn @(Bippy @field) val x2: Int // warn @(Bippy @getter) val x3: Int // no warn @(Bippy @setter) val x4: Int // warn @(Bippy @param) val x5: Int // warn } object Bar extends Foo { val x1 = 1 val x2 = 2 val x3 = 3 val x4 = 4 val x5 = 5 @(Bippy @getter) private[this] val q1: Int = 1 // warn @(Bippy @getter) private val q2: Int = 1 // no warn def f1(@(Bippy @param) x: Int): Int = 0 // no warn def f2(@(Bippy @getter) x: Int): Int = 0 // warn - todo def f3(@(Bippy @setter) x: Int): Int = 0 // warn - todo def f4(@(Bippy @field) x: Int): Int = 0 // warn - todo def f5(@Bippy x: Int): Int = 0 // no warn @(Bippy @companionClass) def g1(x: Int): Int = 0 // warn - todo @(Bippy @companionObject) def g2(x: Int): Int = 0 // warn - todo @(Bippy @companionMethod) def g3(x: Int): Int = 0 // no warn @Bippy def g4(x: Int): Int = 0 // no warn @(Bippy @companionObject @companionMethod) def g5(x: Int): Int = 0 // no warn } class Dingo( @Bippy p0: Int, // no warn @(Bippy @param) p1: Int, // no warn @(Bippy @getter) p2: Int, // warn @(Bippy @setter) p3: Int, // warn @(Bippy @field) p4: Int // warn ) class ValDingo( @Bippy val p0: Int, // no warn @(Bippy @param) val p1: Int, // no warn @(Bippy @getter) val p2: Int, // no warn @(Bippy @setter) val p3: Int, // warn - todo @(Bippy @field) val p4: Int // no warn ) class VarDingo( @Bippy var p0: Int, // no warn @(Bippy @param) var p1: Int, // no warn @(Bippy @getter) var p2: Int, // no warn @(Bippy @setter) var p3: Int, // no warn @(Bippy @field) var p4: Int // no warn ) case class CaseDingo( @Bippy p0: Int, // no warn @(Bippy @param) p1: Int, // no warn @(Bippy @getter) p2: Int, // no warn @(Bippy @setter) p3: Int, // warn - todo @(Bippy @field) p4: Int // no warn )
dotty-staging/dotty
tests/untried/neg/t6375.scala
Scala
apache-2.0
2,112
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.scala.createTypeInformation import org.apache.flink.table.api.scala._ import org.apache.flink.table.CompositeFlatteningTest.{TestCaseClass, giveMeCaseClass} import org.apache.flink.table.api.ValidationException import org.apache.flink.table.functions.ScalarFunction import org.apache.flink.table.utils.TableTestBase import org.apache.flink.table.utils.TableTestUtil._ import org.junit.Test class CompositeFlatteningTest extends TableTestBase { @Test(expected = classOf[ValidationException]) def testDuplicateFlattening(): Unit = { val util = batchTestUtil() val table = util.addTable[((Int, Long), (String, Boolean), String)]("MyTable", 'a, 'b, 'c) table.select('a.flatten(), 'a.flatten()) } @Test def testMultipleFlatteningsTable(): Unit = { val util = batchTestUtil() val table = util.addTable[((Int, Long), (String, Boolean), String)]("MyTable", 'a, 'b, 'c) val result = table.select('a.flatten(), 'c, 'b.flatten()) val expected = unaryNode( "DataSetCalc", batchTableNode(0), term("select", "a._1 AS a$_1", "a._2 AS a$_2", "c", "b._1 AS b$_1", "b._2 AS b$_2" ) ) util.verifyTable(result, expected) } @Test def testMultipleFlatteningsSql(): Unit = { val util = batchTestUtil() val table = util.addTable[((Int, Long), (String, Boolean), String)]("MyTable", 'a, 'b, 'c) val expected = unaryNode( "DataSetCalc", batchTableNode(0), term("select", "a._1 AS _1", "a._2 AS _2", "c", "b._1 AS _10", "b._2 AS _20" ) ) util.verifySql( "SELECT MyTable.a.*, c, MyTable.b.* FROM MyTable", expected) } @Test def testNestedFlattenings(): Unit = { val util = batchTestUtil() val table = util .addTable[((((String, TestCaseClass), Boolean), String), String)]("MyTable", 'a, 'b) val result = table.select('a.flatten(), 'b.flatten()) val expected = unaryNode( "DataSetCalc", batchTableNode(0), term("select", "a._1 AS a$_1", "a._2 AS a$_2", "b" ) ) util.verifyTable(result, expected) } @Test def testScalarFunctionAccess(): Unit = { val util = batchTestUtil() val table = util .addTable[(String, Int)]("MyTable", 'a, 'b) val result = table.select( giveMeCaseClass().get("my"), giveMeCaseClass().get("clazz"), giveMeCaseClass().flatten()) val expected = unaryNode( "DataSetCalc", batchTableNode(0), term("select", s"${giveMeCaseClass.functionIdentifier}().my AS _c0", s"${giveMeCaseClass.functionIdentifier}().clazz AS _c1", s"${giveMeCaseClass.functionIdentifier}().my AS _c2", s"${giveMeCaseClass.functionIdentifier}().clazz AS _c3" ) ) util.verifyTable(result, expected) } } object CompositeFlatteningTest { case class TestCaseClass(my: String, clazz: Int) object giveMeCaseClass extends ScalarFunction { def eval(): TestCaseClass = { TestCaseClass("hello", 42) } override def getResultType(signature: Array[Class[_]]): TypeInformation[_] = { createTypeInformation[TestCaseClass] } } }
WangTaoTheTonic/flink
flink-libraries/flink-table/src/test/scala/org/apache/flink/table/CompositeFlatteningTest.scala
Scala
apache-2.0
4,164
package shield.proxying import akka.actor.{ActorContext, ActorRef, ActorRefFactory, PoisonPill} import akka.pattern.ask import akka.routing.RoundRobinGroup import akka.util.Timeout import shield.actors.Middleware import shield.actors.config.{ProxyState, WeightedProxyState} import shield.config.ServiceLocation import shield.routing.{EndpointTemplate, Param} import spray.http.{HttpRequest, HttpResponse} import scala.annotation.tailrec import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} import scala.util.Random case class ProxyRequest(template: EndpointTemplate, request: HttpRequest) case class ProxiedResponse(upstreamService: ServiceLocation, serviceName: String, template: EndpointTemplate, cacheParams: Set[Param], response: HttpResponse) trait ProxyBalancer { def proxy(template: EndpointTemplate, request: HttpRequest) : Future[ProxiedResponse] } object FailBalancer extends ProxyBalancer { def proxy(template: EndpointTemplate, request: HttpRequest): Future[ProxiedResponse] = Future.failed(new NotImplementedError()) } class AkkaBalancer(val balancer: ActorRef) extends ProxyBalancer { // todo: use global timeout config implicit val timeout = Timeout(60.seconds) def proxy(template: EndpointTemplate, request: HttpRequest) = (balancer ? ProxyRequest(template, request)).mapTo[ProxiedResponse] } trait ProxyBalancerBuilder[T <: ProxyBalancer] { val allMiddleware : List[Middleware] def build(actors: Set[ActorRef]) : ProxyBalancer def teardown() : Unit } object EmptyBalancerBuilder extends ProxyBalancerBuilder[ProxyBalancer] { val allMiddleware : List[Middleware] = Nil def build(actors: Set[ActorRef]) : ProxyBalancer = { FailBalancer } def teardown() : Unit = {} } // todo: weighted service instances (and dynamic weighting) // todo: retry safe gets (config option to enable) via something like http://doc.akka.io/docs/akka/snapshot/scala/routing.html#TailChoppingPool_and_TailChoppingGroup class RoundRobinBalancerBuilder(val allMiddleware: List[Middleware], factory: ActorRefFactory, hostProxies: Map[ActorRef, WeightedProxyState])(implicit execContext: ExecutionContext) extends ProxyBalancerBuilder[AkkaBalancer] { var balancers : List[ActorRef] = Nil // https://en.wikipedia.org/wiki/Euclidean_algorithm#Implementations // nb: gcd is associative, so we're safe to `reduce` the results @tailrec private def gcd(a: Int, b: Int) : Int = if (b == 0) { a } else { gcd(b, a % b) } def build(actors: Set[ActorRef]) : AkkaBalancer = { // todo: refactor this out to somewhere common when we have other balancer types val actorWeight = actors.map(hostProxies(_).weight) val totalWeight = actorWeight.sum val group = if (totalWeight == 0) { actors.toList } else { val actorGCD = actorWeight.filter(_ != 0).reduceLeftOption(gcd).getOrElse(1) Random.shuffle(actors.toList.flatMap(a => List.fill(hostProxies(a).weight / actorGCD)(a))) } val balancer = factory.actorOf(RoundRobinGroup(group.map(_.path.toString)).props()) balancers = balancer :: balancers new AkkaBalancer(balancer) } def teardown() = { for (balancer <- balancers) { // just stop the router, not the host proxy behind them balancer ! PoisonPill } balancers = Nil } }
RetailMeNot/shield
src/main/scala/shield/proxying/ProxyBalancer.scala
Scala
mit
3,341
package demo package components package materialui import japgolly.scalajs.react._ import japgolly.scalajs.react.vdom.prefix_<^._ import scalacss.Defaults._ object MuiInfo { object Style extends StyleSheet.Inline { import dsl._ val content = style(textAlign.center, fontSize(30.px), paddingTop(40.px)) } val component = ReactComponentB[Unit]("MuiInfo") .render(P => { InfoTemplate(componentFilePath = "materialui/")( <.div( <.h3("Material-ui "), <.p("scalajs-react wrapper for ", RedLink("material-ui","http://material-ui.com/#/") ), <.div( <.h4("Supported Version :"), <.span("0.9.1") ), <.div( <.h4("How To Use :"), <.p("Follow the installation guide from :",RedLink("here","https://github.com/callemall/material-ui#installation"), <.br(), <.br(), "Configure material-ui context in u r top level component :" ,RedLink("example","https://github.com/chandu0101/scalajs-react-components/blob/master/demo/src/main/scala/chandu0101/scalajs/react/components/demo/pages/MuiPage.scala#L16") ) ) ) ) }).buildU def apply() = component() }
tpdi/scalajs-react-components
demo/src/main/scala/demo/components/materialui/MuiInfo.scala
Scala
apache-2.0
1,237
package sangria.execution import org.scalatest.{Matchers, WordSpec} import sangria.parser.QueryParser import sangria.schema._ import sangria.util.AwaitSupport import scala.concurrent.Future import scala.util.Success import scala.concurrent.ExecutionContext.Implicits.global class ActionMapSpec extends WordSpec with Matchers with AwaitSupport { case class Color(name: String) case class ColorDefer(num: Int) extends Deferred[String] class ColorResolver extends DeferredResolver { override def resolve(deferred: List[Deferred[Any]]) = deferred map { case ColorDefer(num) => Future.successful("[" + (num + 45) + "]") } } val ColorType = ObjectType("Color", fields[Unit, Color]( Field("name", StringType, resolve = _.value.name))) val QueryType = ObjectType("Query", fields[Unit, Unit]( Field("value", StringType, resolve = _ => Value("red").map("light-" + _)), Field("doubleMap", StringType, resolve = _ => Value("red").map("light-" + _).map(_ + "-color")), Field("future", StringType, resolve = _ => FutureValue(Future.successful("green")).map("light-" + _)), Field("futureDouble", ColorType, resolve = _ => FutureValue(Future.successful("green")).map("light-" + _).map(Color(_))), Field("futureTriple", StringType, resolve = _ => FutureValue(Future.successful("green")).map("light-" + _).map(Color(_)).map("super-" + _.name)), Field("deferred", StringType, resolve = _ => DeferredValue(ColorDefer(123)).map(x => x + 345)), Field("futureDeferred", StringType, resolve = _ => DeferredFutureValue(Future.successful(ColorDefer(34))).map(x => x + 56)), Field("futureDeferredDouble", StringType, resolve = _ => DeferredFutureValue(Future.successful(ColorDefer(34))).map(x => x + 576).map("Yay! " + _ + " +++")), Field("futureDeferredTriple", StringType, resolve = _ => DeferredFutureValue(Future.successful(ColorDefer(34))).map(x => x + 576).map(Color(_)).map(c => "Yay! " + c.name + " +++")), Field("ctxUpdate", ColorType, resolve = ctx => UpdateCtx(DeferredFutureValue(Future.successful(ColorDefer(11)))){v => require(v == "[56]"); ctx.ctx}.map("!" + _ + "?").map(x => x + 576).map(Color(_)).map(c => "(" + c.name + ")").map(Color(_))) )) val schema = Schema(QueryType) "Actions when mapped" should { "transform values correctly" in { val Success(doc) = QueryParser.parse(""" { value doubleMap future futureDouble {name} futureTriple deferred futureDeferred futureDeferredDouble futureDeferredTriple ctxUpdate {name} } """) Executor.execute(schema, doc, deferredResolver = new ColorResolver).await should be (Map( "data" -> Map( "value" -> "light-red", "doubleMap" -> "light-red-color", "future" -> "light-green", "futureDouble" -> Map("name" -> "light-green"), "futureTriple" -> "super-light-green", "deferred" -> "[168]345", "futureDeferred" -> "[79]56", "futureDeferredDouble" -> "Yay! [79]576 +++", "futureDeferredTriple" -> "Yay! [79]576 +++", "ctxUpdate" -> Map("name" -> "(![56]?576)") ))) } } }
narahari92/sangria
src/test/scala/sangria/execution/ActionMapSpec.scala
Scala
apache-2.0
3,307
package it.codingjam.lagioconda.ga trait MutationPointLike { def mutationPoint(int: Int): Int }
coding-jam/lagioconda
common/src/main/scala/it/codingjam/lagioconda/MutationPointLike.scala
Scala
apache-2.0
100
package com.karasiq.shadowcloud.providers import com.karasiq.shadowcloud.crypto._ import com.karasiq.shadowcloud.model.crypto.{EncryptionMethod, HashingMethod, SignMethod} abstract class CryptoProvider extends ModuleProvider { type HashingPF = PartialFunction[HashingMethod, HashingModule] type EncryptionPF = PartialFunction[EncryptionMethod, EncryptionModule] type SignPF = PartialFunction[SignMethod, SignModule] def hashingAlgorithms: Set[String] = Set.empty def hashing: HashingPF = PartialFunction.empty def encryptionAlgorithms: Set[String] = Set.empty def encryption: EncryptionPF = PartialFunction.empty def signingAlgorithms: Set[String] = Set.empty def signing: SignPF = PartialFunction.empty }
Karasiq/shadowcloud
crypto/parent/src/main/scala/com/karasiq/shadowcloud/providers/CryptoProvider.scala
Scala
apache-2.0
731
/* * Scala (https://www.scala-lang.org) * * Copyright EPFL and Lightbend, Inc. * * Licensed under Apache License 2.0 * (http://www.apache.org/licenses/LICENSE-2.0). * * See the NOTICE file distributed with this work for * additional information regarding copyright ownership. */ package scala.collection package immutable import scala.Predef.{wrapString => _, assert} import scala.collection.Stepper.EfficientSplit import scala.collection.convert.impl.CharStringStepper import scala.collection.mutable.{Builder, StringBuilder} /** * This class serves as a wrapper augmenting `String`s with all the operations * found in indexed sequences. * * The difference between this class and `StringOps` is that calling transformer * methods such as `filter` and `map` will yield an object of type `WrappedString` * rather than a `String`. * * @param self a string contained within this wrapped string * * @define Coll `WrappedString` * @define coll wrapped string */ @SerialVersionUID(3L) final class WrappedString(private val self: String) extends AbstractSeq[Char] with IndexedSeq[Char] with IndexedSeqOps[Char, IndexedSeq, WrappedString] with Serializable { def apply(i: Int): Char = self.charAt(i) override protected def fromSpecific(coll: scala.collection.IterableOnce[Char]): WrappedString = WrappedString.fromSpecific(coll) override protected def newSpecificBuilder: Builder[Char, WrappedString] = WrappedString.newBuilder override def empty: WrappedString = WrappedString.empty override def slice(from: Int, until: Int): WrappedString = { val start = if (from < 0) 0 else from if (until <= start || start >= self.length) return WrappedString.empty val end = if (until > length) length else until new WrappedString(self.substring(start, end)) } override def length = self.length override def toString = self override def view: StringView = new StringView(self) override def stepper[S <: Stepper[_]](implicit shape: StepperShape[Char, S]): S with EfficientSplit = { val st = new CharStringStepper(self, 0, self.length) val r = if (shape.shape == StepperShape.CharShape) st else { assert(shape.shape == StepperShape.ReferenceShape, s"unexpected StepperShape: $shape") AnyStepper.ofParIntStepper(st) } r.asInstanceOf[S with EfficientSplit] } override def startsWith[B >: Char](that: IterableOnce[B], offset: Int = 0): Boolean = that match { case s: WrappedString => self.startsWith(s.self, offset) case _ => super.startsWith(that, offset) } override def endsWith[B >: Char](that: collection.Iterable[B]): Boolean = that match { case s: WrappedString => self.endsWith(s.self) case _ => super.endsWith(that) } override def indexOf[B >: Char](elem: B, from: Int = 0): Int = elem match { case c: Char => self.indexOf(c, from) case _ => super.indexOf(elem, from) } override def lastIndexOf[B >: Char](elem: B, end: Int = length - 1): Int = elem match { case c: Char => self.lastIndexOf(c, end) case _ => super.lastIndexOf(elem, end) } override def copyToArray[B >: Char](xs: Array[B], start: Int): Int = copyToArray(xs, start, length) override def copyToArray[B >: Char](xs: Array[B], start: Int, len: Int): Int = (xs: Any) match { case chs: Array[Char] => val copied = IterableOnce.elemsToCopyToArray(length, chs.length, start, len) self.getChars(0, copied, chs, start) copied case _ => super.copyToArray(xs, start, len) } override def appendedAll[B >: Char](suffix: IterableOnce[B]): IndexedSeq[B] = suffix match { case s: WrappedString => new WrappedString(self concat s.self) case _ => super.appendedAll(suffix) } override def sameElements[B >: Char](o: IterableOnce[B]) = o match { case s: WrappedString => self == s.self case _ => super.sameElements(o) } override protected[this] def className = "WrappedString" override protected final def applyPreferredMaxLength: Int = Int.MaxValue override def equals(other: Any): Boolean = other match { case that: WrappedString => this.self == that.self case _ => super.equals(other) } } /** A companion object for wrapped strings. */ @SerialVersionUID(3L) object WrappedString extends SpecificIterableFactory[Char, WrappedString] { def fromSpecific(it: IterableOnce[Char]): WrappedString = { val b = newBuilder val s = it.knownSize if(s >= 0) b.sizeHint(s) b ++= it b.result() } val empty: WrappedString = new WrappedString("") def newBuilder: Builder[Char, WrappedString] = new StringBuilder().mapResult(x => new WrappedString(x)) implicit class UnwrapOp(private val value: WrappedString) extends AnyVal { def unwrap: String = value.self } }
martijnhoekstra/scala
src/library/scala/collection/immutable/WrappedString.scala
Scala
apache-2.0
4,958
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.datasources.parquet import org.apache.spark.sql.catalyst.expressions.{And, Attribute, AttributeReference, Expression, NamedExpression} import org.apache.spark.sql.catalyst.planning.PhysicalOperation import org.apache.spark.sql.catalyst.plans.logical.{Filter, LogicalPlan, Project} import org.apache.spark.sql.catalyst.rules.Rule import org.apache.spark.sql.execution.{ProjectionOverSchema, SelectedField} import org.apache.spark.sql.execution.datasources.{HadoopFsRelation, LogicalRelation} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types.{ArrayType, DataType, MapType, StructField, StructType} /** * Prunes unnecessary Parquet columns given a [[PhysicalOperation]] over a * [[ParquetRelation]]. By "Parquet column", we mean a column as defined in the * Parquet format. In Spark SQL, a root-level Parquet column corresponds to a * SQL column, and a nested Parquet column corresponds to a [[StructField]]. */ private[sql] object ParquetSchemaPruning extends Rule[LogicalPlan] { override def apply(plan: LogicalPlan): LogicalPlan = if (SQLConf.get.nestedSchemaPruningEnabled) { apply0(plan) } else { plan } private def apply0(plan: LogicalPlan): LogicalPlan = plan transformDown { case op @ PhysicalOperation(projects, filters, l @ LogicalRelation(hadoopFsRelation: HadoopFsRelation, _, _, _)) if canPruneRelation(hadoopFsRelation) => val (normalizedProjects, normalizedFilters) = normalizeAttributeRefNames(l, projects, filters) val requestedRootFields = identifyRootFields(normalizedProjects, normalizedFilters) // If requestedRootFields includes a nested field, continue. Otherwise, // return op if (requestedRootFields.exists { root: RootField => !root.derivedFromAtt }) { val dataSchema = hadoopFsRelation.dataSchema val prunedDataSchema = pruneDataSchema(dataSchema, requestedRootFields) // If the data schema is different from the pruned data schema, continue. Otherwise, // return op. We effect this comparison by counting the number of "leaf" fields in // each schemata, assuming the fields in prunedDataSchema are a subset of the fields // in dataSchema. if (countLeaves(dataSchema) > countLeaves(prunedDataSchema)) { val prunedParquetRelation = hadoopFsRelation.copy(dataSchema = prunedDataSchema)(hadoopFsRelation.sparkSession) val prunedRelation = buildPrunedRelation(l, prunedParquetRelation) val projectionOverSchema = ProjectionOverSchema(prunedDataSchema) buildNewProjection(normalizedProjects, normalizedFilters, prunedRelation, projectionOverSchema) } else { op } } else { op } } /** * Checks to see if the given relation is Parquet and can be pruned. */ private def canPruneRelation(fsRelation: HadoopFsRelation) = fsRelation.fileFormat.isInstanceOf[ParquetFileFormat] /** * Normalizes the names of the attribute references in the given projects and filters to reflect * the names in the given logical relation. This makes it possible to compare attributes and * fields by name. Returns a tuple with the normalized projects and filters, respectively. */ private def normalizeAttributeRefNames( logicalRelation: LogicalRelation, projects: Seq[NamedExpression], filters: Seq[Expression]): (Seq[NamedExpression], Seq[Expression]) = { val normalizedAttNameMap = logicalRelation.output.map(att => (att.exprId, att.name)).toMap val normalizedProjects = projects.map(_.transform { case att: AttributeReference if normalizedAttNameMap.contains(att.exprId) => att.withName(normalizedAttNameMap(att.exprId)) }).map { case expr: NamedExpression => expr } val normalizedFilters = filters.map(_.transform { case att: AttributeReference if normalizedAttNameMap.contains(att.exprId) => att.withName(normalizedAttNameMap(att.exprId)) }) (normalizedProjects, normalizedFilters) } /** * Returns the set of fields from the Parquet file that the query plan needs. */ private def identifyRootFields(projects: Seq[NamedExpression], filters: Seq[Expression]) = { val projectionRootFields = projects.flatMap(getRootFields) val filterRootFields = filters.flatMap(getRootFields) (projectionRootFields ++ filterRootFields).distinct } /** * Builds the new output [[Project]] Spark SQL operator that has the pruned output relation. */ private def buildNewProjection( projects: Seq[NamedExpression], filters: Seq[Expression], prunedRelation: LogicalRelation, projectionOverSchema: ProjectionOverSchema) = { // Construct a new target for our projection by rewriting and // including the original filters where available val projectionChild = if (filters.nonEmpty) { val projectedFilters = filters.map(_.transformDown { case projectionOverSchema(expr) => expr }) val newFilterCondition = projectedFilters.reduce(And) Filter(newFilterCondition, prunedRelation) } else { prunedRelation } // Construct the new projections of our Project by // rewriting the original projections val newProjects = projects.map(_.transformDown { case projectionOverSchema(expr) => expr }).map { case expr: NamedExpression => expr } if (log.isDebugEnabled) { logDebug(s"New projects:\\n${newProjects.map(_.treeString).mkString("\\n")}") } Project(newProjects, projectionChild) } /** * Filters the schema from the given file by the requested fields. * Schema field ordering from the file is preserved. */ private def pruneDataSchema( fileDataSchema: StructType, requestedRootFields: Seq[RootField]) = { // Merge the requested root fields into a single schema. Note the ordering of the fields // in the resulting schema may differ from their ordering in the logical relation's // original schema val mergedSchema = requestedRootFields .map { case RootField(field, _) => StructType(Array(field)) } .reduceLeft(_ merge _) val dataSchemaFieldNames = fileDataSchema.fieldNames.toSet val mergedDataSchema = StructType(mergedSchema.filter(f => dataSchemaFieldNames.contains(f.name))) // Sort the fields of mergedDataSchema according to their order in dataSchema, // recursively. This makes mergedDataSchema a pruned schema of dataSchema sortLeftFieldsByRight(mergedDataSchema, fileDataSchema).asInstanceOf[StructType] } /** * Builds a pruned logical relation from the output of the output relation and the schema of the * pruned base relation. */ private def buildPrunedRelation( outputRelation: LogicalRelation, prunedBaseRelation: HadoopFsRelation) = { // We need to replace the expression ids of the pruned relation output attributes // with the expression ids of the original relation output attributes so that // references to the original relation's output are not broken val outputIdMap = outputRelation.output.map(att => (att.name, att.exprId)).toMap val prunedRelationOutput = prunedBaseRelation .schema .toAttributes .map { case att if outputIdMap.contains(att.name) => att.withExprId(outputIdMap(att.name)) case att => att } outputRelation.copy(relation = prunedBaseRelation, output = prunedRelationOutput) } /** * Gets the root (aka top-level, no-parent) [[StructField]]s for the given [[Expression]]. * When expr is an [[Attribute]], construct a field around it and indicate that that * field was derived from an attribute. */ private def getRootFields(expr: Expression): Seq[RootField] = { expr match { case att: Attribute => RootField(StructField(att.name, att.dataType, att.nullable), derivedFromAtt = true) :: Nil case SelectedField(field) => RootField(field, derivedFromAtt = false) :: Nil case _ => expr.children.flatMap(getRootFields) } } /** * Counts the "leaf" fields of the given dataType. Informally, this is the * number of fields of non-complex data type in the tree representation of * [[DataType]]. */ private def countLeaves(dataType: DataType): Int = { dataType match { case array: ArrayType => countLeaves(array.elementType) case map: MapType => countLeaves(map.keyType) + countLeaves(map.valueType) case struct: StructType => struct.map(field => countLeaves(field.dataType)).sum case _ => 1 } } /** * Sorts the fields and descendant fields of structs in left according to their order in * right. This function assumes that the fields of left are a subset of the fields of * right, recursively. That is, left is a "subschema" of right, ignoring order of * fields. */ private def sortLeftFieldsByRight(left: DataType, right: DataType): DataType = (left, right) match { case (ArrayType(leftElementType, containsNull), ArrayType(rightElementType, _)) => ArrayType( sortLeftFieldsByRight(leftElementType, rightElementType), containsNull) case (MapType(leftKeyType, leftValueType, containsNull), MapType(rightKeyType, rightValueType, _)) => MapType( sortLeftFieldsByRight(leftKeyType, rightKeyType), sortLeftFieldsByRight(leftValueType, rightValueType), containsNull) case (leftStruct: StructType, rightStruct: StructType) => val filteredRightFieldNames = rightStruct.fieldNames.filter(leftStruct.fieldNames.contains) val sortedLeftFields = filteredRightFieldNames.map { fieldName => val leftFieldType = leftStruct(fieldName).dataType val rightFieldType = rightStruct(fieldName).dataType val sortedLeftFieldType = sortLeftFieldsByRight(leftFieldType, rightFieldType) StructField(fieldName, sortedLeftFieldType) } StructType(sortedLeftFields) case _ => left } /** * A "root" schema field (aka top-level, no-parent) and whether it was derived from * an attribute or had a proper child. */ private case class RootField(field: StructField, derivedFromAtt: Boolean) }
sahilTakiar/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetSchemaPruning.scala
Scala
apache-2.0
11,251
package org.miszkiewicz import org.miszkiewicz.model._ import org.miszkiewicz.model.smtpapi.{SmtpApi, Templates} import org.specs2.mutable.Specification import scala.concurrent.Await import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration._ class SendGridSpec extends Specification { val sendGrid: Option[SendGrid] = sys.props.get("sendgrid.test") map { s => val (login, password) = splitToTuple(s) new SendGrid(UserCredentials(login, password)) } "SendGrid" should { "send email" in { lazy val sendMail = { val smtpApi = SmtpApi( substitutions = Map("-link-" -> Seq("https://github.com/dmiszkiewicz")), filters = Seq(Templates(true, "e317f565-ee21-4f2d-85a1-b3eaf775896e")) ) val sendGridEmail = Email( html = Some("Look at my GitHub: "), subject = Some("Look at my GitHub!"), from = Some("dominik223@gmail.com"), fromName = Some("Dominik Miszkiewicz"), to = Seq("dominik223@gmail.com"), smtpAPI = Some(smtpApi) ) val response = Await.result(sendGrid.get.send(sendGridEmail), 20 seconds) response.code shouldEqual 200 } sendGrid match { case Some(_) => sendMail.toResult case None => sendMail.pendingUntilFixed("No credentials found for SendGrid") } } } private def splitToTuple(s: String): (String, String) = { s.split(":") match { case Array(login: String, password: String) => (login, password) case _ => throw new Exception() } } }
dmiszkiewicz/sendgrid-scala
src/test/scala/org/miszkiewicz/SendGridSpec.scala
Scala
mit
1,620
package com.karasiq.nanoboard.api object NanoboardMessageData { val NO_POW = Array.fill[Byte](128)(0) val NO_SIGNATURE = Array.fill[Byte](64)(0) } case class NanoboardMessageData(containerId: Option[Long], parent: Option[String], hash: String, text: String, answers: Int, pow: Array[Byte] = NanoboardMessageData.NO_POW, signature: Array[Byte] = NanoboardMessageData.NO_SIGNATURE) { def isSigned: Boolean = { !signature.sameElements(NanoboardMessageData.NO_SIGNATURE) } def isCategory: Boolean = { parent.isEmpty } }
Karasiq/nanoboard
shared/shared/src/main/scala/com/karasiq/nanoboard/api/NanoboardMessageData.scala
Scala
apache-2.0
539
/* * Copyright (c) 2011-14 Miles Sabin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package shapeless import org.junit.Test import org.junit.Assert._ import test._ import testutil._ class HListTests { import nat._ import poly._ import syntax.std.traversable._ import syntax.singleton._ import syntax.typeable._ import ops.hlist._ import ops.record._ type SI = Set[Int] :: HNil type OI = Option[Int] :: HNil type SISS = Set[Int] :: Set[String] :: HNil type OIOS = Option[Int] :: Option[String] :: HNil type ISII = Int :: String :: Int :: Int :: HNil type IIII = Int :: Int :: Int :: Int :: HNil type IYII = Int :: Any :: Int :: Int :: HNil type OIOSOIOI = Option[Int] :: Option[String] :: Option[Int] :: Option[Int] :: HNil type SISSSISI = Set[Int] :: Set[String] :: Set[Int] :: Set[Int] :: HNil type BBBB = Boolean :: Boolean :: Boolean :: Boolean :: HNil trait Fruit case class Apple() extends Fruit case class Pear() extends Fruit case class Banana() extends Fruit type PWS = Product with Serializable with Fruit type YYYY = Any :: Any :: Any :: Any :: HNil type FF = Fruit :: Fruit :: HNil type AP = Apple :: Pear :: HNil type BP = Banana :: Pear :: HNil type AF = Apple :: Fruit :: HNil type FFFF = Fruit :: Fruit :: Fruit :: Fruit :: HNil type APAP = Apple :: Pear :: Apple :: Pear :: HNil type APBP = Apple :: Pear :: Banana :: Pear :: HNil type APB = Apple :: Pear :: Banana :: HNil type PBPA = Pear :: Banana :: Pear :: Apple :: HNil type PABP = Pear :: Apple :: Banana :: Pear :: HNil val a : Apple = Apple() val p : Pear = Pear() val b : Banana = Banana() val f : Fruit = new Fruit {} val ap : AP = a :: p :: HNil val bp : BP = b :: p :: HNil val apap : APAP = a :: p :: a :: p :: HNil val apbp : APBP = a :: p :: b :: p :: HNil val apapList = a :: p :: a :: p :: Nil val apbpList = a :: p :: b :: p :: Nil val apapArray = Array(a, p, a, p) val apbpArray = Array(a, p, b, p) trait Ctv[-T] type CICSCICICD = Ctv[Int] :: Ctv[String] :: Ctv[Int] :: Ctv[Int] :: Ctv[Double] :: HNil val ci: Ctv[Int] = new Ctv[Int] {} val cs: Ctv[String] = new Ctv[String] {} val cd: Ctv[Double] = new Ctv[Double] {} val cicscicicdList = ci :: cs :: ci :: ci :: cd :: Nil val cicscicicdArray = Array(ci, cs, ci, ci, cd) val cicscicicd: CICSCICICD = ci :: cs :: ci :: ci :: cd :: HNil trait M[T] type MIMSMIMIMD = M[Int] :: M[String] :: M[Int] :: M[Int] :: M[Double] :: HNil val mi: M[Int] = new M[Int] {} val ms: M[String] = new M[String] {} val md: M[Double] = new M[Double] {} val mimsmimimdList = mi :: ms :: mi :: mi :: md :: Nil val mimsmimimdArray = Array(mi, ms, mi, mi, md) val mimsmimimd: MIMSMIMIMD = mi :: ms :: mi :: mi :: md :: HNil import language.existentials val mExist: M[_] = new M[Double] {} type MIMSMIMEMD = M[Int] :: M[String] :: M[Int] :: M[_] :: M[Double] :: HNil val mimsmimemdList = mi :: ms :: mi :: mExist :: md :: Nil val mimsmimemdArray = Array[M[_]](mi, ms, mi, mExist, md) val mimsmimemd: MIMSMIMEMD = mi :: ms :: mi :: mExist :: md :: HNil trait M2[A,B] type M2IM2SM2IM2IM2D = M2[Int, Unit] :: M2[String, Unit] :: M2[Int, Unit] :: M2[Int, Unit] :: M2[Double, Unit] :: HNil val m2i: M2[Int, Unit] = new M2[Int, Unit] {} val m2s: M2[String, Unit] = new M2[String, Unit] {} val m2d: M2[Double, Unit] = new M2[Double, Unit] {} val m2im2sm2im2im2dList = m2i :: m2s :: m2i :: m2i :: m2d :: Nil val m2im2sm2im2im2dArray = Array(m2i, m2s, m2i, m2i, m2d) val m2im2sm2im2im2d: M2IM2SM2IM2IM2D = m2i :: m2s :: m2i :: m2i :: m2d :: HNil val m2iExist: M2[Int, _] = new M2[Int, Unit] {} val m2sExist: M2[String, _] = new M2[String, Unit] {} val m2dExist: M2[Double, _] = new M2[Double, Unit] {} type M2EIM2ESM2EIM2EEM2ED = M2[Int, _] :: M2[String, _] :: M2[Int, _] :: M2[Int, _] :: M2[Double, _] :: HNil val m2eim2esm2eim2eem2edList = m2iExist :: m2sExist :: m2iExist :: m2iExist :: m2dExist :: Nil val m2eim2esm2eim2eem2edArray = Array(m2iExist, m2sExist, m2iExist, m2iExist, m2dExist) val m2eim2esm2eim2eem2ed: M2EIM2ESM2EIM2EEM2ED = m2iExist :: m2sExist :: m2iExist :: m2iExist :: m2dExist :: HNil object mkString extends (Any -> String)(_.toString) object fruit extends (Fruit -> Fruit)(f => f) object incInt extends (Int >-> Int)(_ + 1) object extendedChoose extends LiftU(choose) @Test def testBasics { val l = 1 :: "foo" :: 2.0 :: HNil val r1 = l.head assertTypedEquals[Int](1, r1) val r2 = l.tail.head assertTypedEquals[String]("foo", r2) assertEquals(2.0, l.tail.tail.head, Double.MinPositiveValue) illTyped(""" HNil.head """) illTyped(""" HNil.tail """) illTyped(""" l.tail.tail.tail.head """) } @Test def testMap { implicitly[Mapper.Aux[choose.type, HNil, HNil]] implicitly[choose.Case[Set[Int]]] implicitly[Mapper.Aux[choose.type, Set[Int] :: HNil, Option[Int] :: HNil]] val s1 = Set(1) :: HNil val o1 = s1 map choose assertTypedEquals[OI](Option(1) :: HNil, o1) val s2 = Set(1) :: Set("foo") :: HNil val o2 = s2 map choose assertTypedEquals[OIOS](Option(1) :: Option("foo") :: HNil, o2) val l1 = 1 :: "foo" :: 2 :: 3 :: HNil val l2 = l1 map singleton assertTypedEquals[SISSSISI](Set(1) :: Set("foo") :: Set(2) :: Set(3) :: HNil, l2) val l3 = l1 map option assertTypedEquals[OIOSOIOI](Option(1) :: Option("foo") :: Option(2) :: Option(3) :: HNil, l3) val l4 = Option(1) :: Option("foo") :: Option(2) :: Option(3) :: HNil val l5 = l4 map get assertTypedEquals[ISII](1 :: "foo" :: 2 :: 3 :: HNil, l5) typed[Int](l5.head) typed[String](l5.tail.head) typed[Int](l5.tail.tail.head) typed[Int](l5.tail.tail.tail.head) val l6 = l1 map identity assertTypedEquals[ISII](1 :: "foo" :: 2 :: 3 :: HNil, l6) val l7 = l4 map isDefined assertTypedEquals[BBBB](true :: true :: true :: true :: HNil, l7) val l8 = 23 :: "foo" :: true :: HNil val l9 = l8 map mkString assertTypedEquals[String :: String :: String :: HNil]("23" :: "foo" :: "true" :: HNil, l9) val l10 = apbp map fruit assertTypedEquals[Fruit :: Fruit :: Fruit :: Fruit :: HNil](apbp, l10) val l11 = apbp map mkString assertTypedEquals[String :: String :: String :: String :: HNil]("Apple()" :: "Pear()" :: "Banana()" :: "Pear()" :: HNil, l11) } object dup extends Poly1 { implicit def default[T] = at[T](t => t :: t :: HNil) } @Test def testFlatMap { val l1 = 1 :: "foo" :: true :: HNil val l2 = l1 flatMap dup assertTypedEquals[Int :: Int :: String :: String :: Boolean :: Boolean :: HNil]( 1 :: 1 :: "foo" :: "foo" :: true :: true :: HNil, l2) val l3 = (1 :: "foo" :: HNil) :: (HNil : HNil) :: (2.0 :: true :: HNil) :: ("bar" :: HNil) :: HNil val l4 = l3 flatMap identity assertTypedEquals[Int :: String :: Double :: Boolean :: String :: HNil]( 1 :: "foo" :: 2.0 :: true :: "bar" :: HNil, l4) val l5 = 23 :: "foo" :: 7 :: true :: 0 :: HNil val l6 = l5 flatMap incInt assertTypedEquals[Int :: Int :: Int :: HNil](24 :: 8 :: 1 :: HNil, l6) val l7 = Set(23) :: "foo" :: Set(true) :: 23 :: HNil val l8 = l7 flatMap extendedChoose assertTypedEquals[Option[Int] :: Option[Boolean] :: HNil](Option(23) :: Option(true) :: HNil, l8) } @Test def testConformance { val l1 = 1 :: "foo" :: 2 :: 3 :: HNil assertTypedEquals[Any :: AnyRef :: Any :: Any :: HNil](1 :: "foo" :: 2 :: 3 :: HNil, l1) val ap = a :: p :: HNil typed[AP](ap) val bp = b :: p :: HNil typed[BP](bp) val apap = a :: p :: a :: p :: HNil typed[APAP](apap) val apbp = a :: p :: b :: p :: HNil typed[APBP](apbp) val ffff : FFFF = apap typed[FFFF](ffff) } @Test def testLength { val l0 = HNil typed[Nat._0](l0.length) assertEquals(0, Nat toInt l0.length) val l1 = 1 :: "foo" :: 2 :: 3 :: HNil typed[Nat._4](l1.length) assertEquals(4, Nat toInt l1.length) val ap = a :: p :: HNil typed[Nat._2](ap.length) assertEquals(2, Nat toInt ap.length) val bp = b :: p :: HNil typed[Nat._2](bp.length) assertEquals(2, Nat toInt bp.length) val apap = a :: p :: a :: p :: HNil typed[Nat._4](apap.length) assertEquals(4, Nat toInt apap.length) val apbp = a :: p :: b :: p :: HNil typed[Nat._4](apbp.length) assertEquals(4, Nat toInt apbp.length) val ffff : FFFF = apap typed[Nat._4](ffff.length) assertEquals(4, Nat toInt ffff.length) } @Test def testInitLast { val lp = apbp.last assertTypedEquals[Pear](p, lp) val iapb = apbp.init assertTypedEquals[APB](a :: p :: b :: HNil, iapb) } @Test def testAlign { type M0 = Int :: String :: Boolean :: HNil type M1 = Int :: Boolean :: String :: HNil type M2 = String :: Int :: Boolean :: HNil type M3 = String :: Boolean :: Int :: HNil type M4 = Boolean :: Int :: String :: HNil type M5 = Boolean :: String :: Int :: HNil val m0 = 13 :: "bar" :: false :: HNil val m1 = 13 :: false :: "bar" :: HNil val m2 = "bar" :: 13 :: false :: HNil val m3 = "bar" :: false :: 13 :: HNil val m4 = false :: 13 :: "bar" :: HNil val m5 = false :: "bar" :: 13 :: HNil val l = 23 :: "foo" :: true :: HNil val a0 = l.align(m0) assertTypedEquals[M0](23 :: "foo" :: true :: HNil, a0) val a1 = l.align(m1) assertTypedEquals[M1](23 :: true :: "foo" :: HNil, a1) val a2 = l.align(m2) assertTypedEquals[M2]("foo" :: 23 :: true :: HNil, a2) val a3 = l.align(m3) assertTypedEquals[M3]("foo" :: true :: 23 :: HNil, a3) val a4 = l.align(m4) assertTypedEquals[M4](true :: 23 :: "foo" :: HNil, a4) val a5 = l.align(m5) assertTypedEquals[M5](true :: "foo" :: 23 :: HNil, a5) val b0 = l.align[M0] assertTypedEquals[M0](23 :: "foo" :: true :: HNil, b0) val b1 = l.align[M1] assertTypedEquals[M1](23 :: true :: "foo" :: HNil, b1) val b2 = l.align[M2] assertTypedEquals[M2]("foo" :: 23 :: true :: HNil, b2) val b3 = l.align[M3] assertTypedEquals[M3]("foo" :: true :: 23 :: HNil, b3) val b4 = l.align[M4] assertTypedEquals[M4](true :: 23 :: "foo" :: HNil, b4) val b5 = l.align[M5] assertTypedEquals[M5](true :: "foo" :: 23 :: HNil, b5) val c0 = (HNil: HNil).align[HNil] typed[HNil](c0) val c1 = (23 :: HNil).align[Int :: HNil] typed[Int :: HNil](c1) val c2 = (23 :: "foo" :: HNil).align[String :: Int :: HNil] typed[String :: Int :: HNil](c2) illTyped(""" (HNil: HNil).align[Int :: HNil] """) illTyped(""" (23 :: HNil).align[String :: HNil] """) illTyped(""" (23 :: "foo" :: HNil).align[String :: String :: HNil] """) } @Test def testReverse { val pbpa = apbp.reverse assertTypedEquals[PBPA](p :: b :: p :: a :: HNil, pbpa) val al = a :: HNil val ral = al.reverse assertTypedEquals[Apple :: HNil](a :: HNil, ral) } @Test def testPrepend { val apbp2 = ap ::: bp assertTypedEquals[APBP](a :: p :: b :: p :: HNil, apbp2) typed[Apple](apbp2.head) typed[Pear](apbp2.tail.head) typed[Banana](apbp2.tail.tail.head) typed[Pear](apbp2.tail.tail.tail.head) val pabp = ap reverse_::: bp assertTypedEquals[PABP](p :: a :: b :: p :: HNil, pabp) // must compile without requiring an implicit Prepend def prependWithHNil[L <: HList](list: L) = HNil ::: list def prependToHNil[L <: HList](list: L) = list ::: HNil val r1 = prependWithHNil(ap) assertTypedEquals[AP](ap, r1) val r2 = prependToHNil(ap) assertTypedEquals[AP](ap, r2) val r3 = HNil ::: HNil assertTypedEquals[HNil](HNil, r3) // must compile without requiring an implicit ReversePrepend def reversePrependWithHNil[L <: HList](list: L) = HNil reverse_::: list def reversePrependToHNil[L <: HList: Reverse](list: L) = list reverse_::: HNil val r4 = reversePrependWithHNil(ap) assertTypedEquals[AP](ap, r4) val r5 = reversePrependToHNil(ap) assertTypedEquals[Pear :: Apple :: HNil](ap.reverse, r5) val r6 = HNil reverse_::: HNil assertTypedEquals[HNil](HNil, r6) } @Test def testToSizedList { def equalInferredTypes[A,B](a: A, b: B)(implicit eq: A =:= B) {} val hnil = HNil val snil = hnil.toSized[List] assertEquals(Nat toInt hnil.length, snil.length) val expectedUnsized = List.empty[Nothing] equalInferredTypes(expectedUnsized, snil.unsized) assertEquals(expectedUnsized, snil.unsized) implicitly[ToSized.Aux[HNil, List, Nothing, _0]] implicitly[ToSized.Aux[HNil, List, Int, _0]] { implicitly[ToSized.Aux[M[Int] :: HNil, List, M[Int], _1]] implicitly[ToSized.Aux[M[Int] :: HNil, List, M[_], _1]] } val sizedApap = apap.toSized[List] assertEquals(Nat toInt apap.length, sizedApap.length) equalInferredTypes(apapList, sizedApap.unsized) assertEquals(apapList, sizedApap.unsized) val sizedApbp = apbp.toSized[List] assertEquals(Nat toInt apbp.length, sizedApbp.length) equalInferredTypes(apbpList, sizedApbp.unsized) assertEquals(apbpList, sizedApbp.unsized) val sizedCicscicicd = cicscicicd.toSized[List] assertEquals(Nat toInt cicscicicd.length, sizedCicscicicd.length) equalInferredTypes(cicscicicdList, sizedCicscicicd.unsized) assertEquals(cicscicicdList, sizedCicscicicd.unsized) val sizedMimsmimimd = mimsmimimd.toSized[List] assertEquals(Nat toInt mimsmimimd.length, sizedMimsmimimd.length) equalInferredTypes(mimsmimimdList, sizedMimsmimimd.unsized) assertEquals(mimsmimimdList, sizedMimsmimimd.unsized) val sizedMimsmimemd = mimsmimemd.toSized[List] assertEquals(Nat toInt mimsmimemd.length, sizedMimsmimemd.length) // equalInferredTypes(mimsmimemdList, sizedMimsmimemd.unsized) typed[List[M[_]]](sizedMimsmimemd.unsized) assertEquals(mimsmimemdList, sizedMimsmimemd.unsized) val sizedM2im2sm2im2im2d = m2im2sm2im2im2d.toSized[List] assertEquals(Nat toInt m2im2sm2im2im2d.length, sizedM2im2sm2im2im2d.length) equalInferredTypes(m2im2sm2im2im2dList, sizedM2im2sm2im2im2d.unsized) assertEquals(m2im2sm2im2im2dList, sizedM2im2sm2im2im2d.unsized) val sizedM2eim2esm2eim2eem2ed = m2eim2esm2eim2eem2ed.toSized[List] assertEquals(Nat toInt m2eim2esm2eim2eem2ed.length, sizedM2eim2esm2eim2eem2ed.length) // equalInferredTypes(m2eim2esm2eim2eem2edList, sizedM2eim2esm2eim2eem2ed.unsized) assertTypedEquals[List[M2[_ >: Double with Int with String, _]]]( m2eim2esm2eim2eem2edList, sizedM2eim2esm2eim2eem2ed.unsized) } @Test def testToSizedArray { def assertArrayEquals2[T](arr1 : Array[T], arr2 : Array[T]) = assertArrayEquals(arr1.asInstanceOf[Array[Object]], arr2.asInstanceOf[Array[Object]]) def equalInferredTypes[A,B](a: A, b: B)(implicit eq: A =:= B) {} val hnil = HNil val snil = hnil.toSized[Array] assertEquals(Nat toInt hnil.length, snil.length) val expectedUnsized = Array.empty[Nothing] equalInferredTypes(expectedUnsized, snil.unsized) assertArrayEquals2(expectedUnsized, snil.unsized) implicitly[ToSized.Aux[HNil, Array, Nothing, _0]] implicitly[ToSized.Aux[HNil, Array, Int, _0]] val sizedApap = apap.toSized[Array] assertEquals(Nat toInt apap.length, sizedApap.length) equalInferredTypes(apapArray, sizedApap.unsized) assertArrayEquals2(apapArray, sizedApap.unsized) val sizedApbp = apbp.toSized[Array] assertEquals(Nat toInt apbp.length, sizedApbp.length) equalInferredTypes(apbpArray, sizedApbp.unsized) assertArrayEquals2(apbpArray, sizedApbp.unsized) val sizedCicscicicd = cicscicicd.toSized[Array] assertEquals(Nat toInt cicscicicd.length, sizedCicscicicd.length) equalInferredTypes(cicscicicdArray, sizedCicscicicd.unsized) assertArrayEquals2(cicscicicdArray, sizedCicscicicd.unsized) val sizedMimsmimimd = mimsmimimd.toSized[Array] assertEquals(Nat toInt mimsmimimd.length, sizedMimsmimimd.length) equalInferredTypes(mimsmimimdArray, sizedMimsmimimd.unsized) assertArrayEquals2(mimsmimimdArray, sizedMimsmimimd.unsized) val sizedMimsmimemd = mimsmimemd.toSized[Array] assertEquals(Nat toInt mimsmimemd.length, sizedMimsmimemd.length) // equalInferredTypes(mimsmimemdArray, sizedMimsmimemd.unsized) typed[Array[M[_]]](sizedMimsmimemd.unsized) assertArrayEquals2(mimsmimemdArray, sizedMimsmimemd.unsized) val sizedM2im2sm2im2im2d = m2im2sm2im2im2d.toSized[Array] assertEquals(Nat toInt m2im2sm2im2im2d.length, sizedM2im2sm2im2im2d.length) equalInferredTypes(m2im2sm2im2im2dArray, sizedM2im2sm2im2im2d.unsized) assertArrayEquals2(m2im2sm2im2im2dArray, sizedM2im2sm2im2im2d.unsized) val sizedM2eim2esm2eim2eem2ed = m2eim2esm2eim2eem2ed.toSized[Array] assertEquals(Nat toInt m2eim2esm2eim2eem2ed.length, sizedM2eim2esm2eim2eem2ed.length) // equalInferredTypes(m2eim2esm2eim2eem2edArray, sizedM2eim2esm2eim2eem2ed.unsized) typed[Array[M2[_ >: Double with Int with String, _]]](sizedM2eim2esm2eim2eem2ed.unsized) assertArrayEquals2(m2eim2esm2eim2eem2edArray.map(x => x: Any), sizedM2eim2esm2eim2eem2ed.unsized.map(x => x: Any)) } @Test def testUnifier { def lub[X, Y, L](x : X, y : Y)(implicit lb : Lub[X, Y, L]) : (L, L) = (lb.left(x), lb.right(y)) val u21 = lub(a, a) typed[(Apple, Apple)](u21) val u22 = lub(a, p) typed[(Fruit, Fruit)](u22) val u23 = lub(a, f) typed[(Fruit, Fruit)](u23) val u24 = lub(p, a) typed[(Fruit, Fruit)](u24) val u25 = lub(p, p) typed[(Pear, Pear)](u25) val u26 = lub(f, f) typed[(Fruit, Fruit)](u26) val u27 = lub(f, a) typed[(Fruit, Fruit)](u27) val u28 = lub(f, p) typed[(Fruit, Fruit)](u28) val u29 = lub(f, f) typed[(Fruit, Fruit)](u29) implicitly[Lub[HNil, HNil, HNil]] implicitly[Lub[Apple :: HNil, Apple :: HNil, Apple :: HNil]] implicitly[Lub[Fruit :: Pear :: HNil, Fruit :: Fruit :: HNil, Fruit :: Fruit :: HNil]] implicitly[Lub[Apple :: Pear :: HNil, Pear :: Apple :: HNil, Fruit :: Fruit :: HNil]] implicitly[Lub[ISII, IIII, IYII]] val u31 = lub(HNil, HNil) typed[(HNil, HNil)](u31) val u32 = lub(a :: HNil, a :: HNil) typed[(Apple :: HNil, Apple :: HNil)](u32) val u33 = lub(f :: p :: HNil, f :: f :: HNil) typed[(Fruit :: Fruit :: HNil, Fruit :: Fruit :: HNil)](u33) val u34 = lub(a :: p :: HNil, p :: a :: HNil) typed[(Fruit :: Fruit :: HNil, Fruit :: Fruit :: HNil)](u34) val u35 = lub(1 :: "two" :: 3 :: 4 :: HNil, 1 :: 2 :: 3 :: 4 :: HNil) typed[(Int :: Any :: Int :: Int :: HNil, Int :: Any :: Int :: Int :: HNil)](u35) implicitly[Unifier.Aux[Apple :: HNil, Apple :: HNil]] implicitly[Unifier.Aux[Fruit :: Pear :: HNil, Fruit :: Fruit :: HNil]] implicitly[Unifier.Aux[Apple :: Pear :: HNil, Fruit :: Fruit :: HNil]] implicitly[Unifier.Aux[Int :: String :: Int :: Int :: HNil, YYYY]] val uapap = implicitly[Unifier.Aux[Apple :: Pear :: Apple :: Pear :: HNil, FFFF]] val unified1 = uapap(apap) typed[FFFF](unified1) val unified2 = apap.unify typed[FFFF](unified2) val ununified1 = unified2.cast[APAP] assertTrue(ununified1.isDefined) typed[APAP](ununified1.get) val ununified2 = unified2.cast[APBP] assertFalse(ununified2.isDefined) typed[Option[APBP]](ununified2) def getUnifier[L <: HList, Out <: HList](l : L)(implicit u : Unifier.Aux[L, Out]) = u val u2 = getUnifier(a :: HNil) typed[Unifier.Aux[Apple :: HNil, Apple :: HNil]](u2) val u3 = getUnifier(a :: a :: HNil) typed[Unifier.Aux[Apple :: Apple :: HNil, Apple :: Apple :: HNil]](u3) val u4 = getUnifier(a :: a :: a :: HNil) typed[Unifier.Aux[Apple :: Apple :: Apple :: HNil, Apple :: Apple :: Apple :: HNil]](u4) val u5 = getUnifier(a :: a :: a :: a :: HNil) typed[Unifier.Aux[Apple :: Apple :: Apple :: Apple :: HNil, Apple :: Apple :: Apple :: Apple :: HNil]](u5) val u6 = getUnifier(a :: p :: HNil) //typed[Unifier.Aux[Apple :: Pear :: HNil, Fruit :: Fruit :: HNil]](u6) val u7 = getUnifier(a :: f :: HNil) typed[Unifier.Aux[Apple :: Fruit :: HNil, Fruit :: Fruit :: HNil]](u7) val u8 = getUnifier(f :: a :: HNil) typed[Unifier.Aux[Fruit :: Apple :: HNil, Fruit :: Fruit :: HNil]](u8) val u9a = getUnifier(a :: f :: HNil) typed[Unifier.Aux[Apple :: Fruit :: HNil, FF]](u9a) val u9b = getUnifier(a :: p :: HNil) typed[Unifier.Aux[Apple :: Pear :: HNil, PWS :: PWS :: HNil]](u9b) val u10 = getUnifier(apap) typed[Unifier.Aux[APAP, PWS :: PWS :: PWS :: PWS :: HNil]](u10) val u11 = getUnifier(apbp) typed[Unifier.Aux[APBP, PWS :: PWS :: PWS :: PWS :: HNil]](u11) val invar1 = Set(23) :: Set("foo") :: HNil val uinvar1 = invar1.unify typed[Set[_ >: Int with String] :: Set[_ >: Int with String] :: HNil](uinvar1) // Unifying three or more elements which have an invariant outer type constructor and differing type // arguments fails, presumably due to a failure to compute a sensible LUB. //val invar2 = Set(23) :: Set("foo") :: Set(true) :: HNil //val uinvar2 = invar.unify } @Test def testSubtypeUnifier { val fruits : Apple :: Pear :: Fruit :: HNil = a :: p :: f :: HNil typed[Fruit :: Fruit :: Fruit :: HNil](fruits.unifySubtypes[Fruit]) typed[Apple :: Pear :: Fruit :: HNil](fruits.unifySubtypes[Apple]) assertEquals(a :: p :: f :: HNil, fruits.unifySubtypes[Fruit].filter[Fruit]) val stuff : Apple :: String :: Pear :: HNil = a :: "foo" :: p :: HNil typed[Fruit :: String :: Fruit :: HNil](stuff.unifySubtypes[Fruit]) assertEquals(HNil, stuff.filter[Fruit]) assertEquals(a :: p :: HNil, stuff.unifySubtypes[Fruit].filter[Fruit]) } @Test def testToTraversableList { val r1 = HNil.to[List] assertTypedEquals[List[Nothing]](Nil, r1) ToList[HNil, Nothing] ToList[HNil, Int] { implicitly[ToTraversable.Aux[M[Int] :: HNil, List, M[Int]]] implicitly[ToTraversable.Aux[M[Int] :: HNil, List, M[_]]] } val r2 = apap.to[List] assertTypedEquals[List[Fruit]](List(a, p, a, p), r2) val fruits2 = apbp.to[List] assertTypedEquals[List[Fruit]](List(a, p, b, p), fruits2) val fruits3 = fruits2.toHList[APBP] assertTrue(fruits3.isDefined) assertTypedEquals[APBP](apbp, fruits3.get) val stuff = (1 :: "foo" :: 2 :: 3 :: HNil).to[List] assertTypedEquals[List[Any]](List(1, "foo", 2, 3), stuff) val stuff2 = stuff.toHList[ISII] assertTrue(stuff2.isDefined) assertTypedEquals[ISII](1 :: "foo" :: 2 :: 3 :: HNil, stuff2.get) val l4 = Option(1) :: Option("foo") :: Option(2) :: Option(3) :: HNil val l7 = l4 map isDefined assertTypedEquals[BBBB](true :: true :: true :: true :: HNil, l7) val ll2 = l7.to[List] typed[Boolean](ll2.head) val moreStuff = (a :: "foo" :: p :: HNil).to[List] typed[List[Any]](moreStuff) def equalInferredTypes[A,B](a: A, b: B)(implicit eq: A =:= B) {} val ctv = cicscicicd.to[List] equalInferredTypes(cicscicicdList, ctv) assertTypedEquals[List[Ctv[Int with String with Double]]](cicscicicdList, ctv) val m = mimsmimimd.to[List] equalInferredTypes(mimsmimimdList, m) assertTypedEquals[List[M[_ >: Int with String with Double]]](mimsmimimdList, m) val mWithEx = mimsmimemd.to[List] // equalType(mimsmimemdList, mWithEx) assertTypedEquals[List[M[_]]](mimsmimemdList, mWithEx) val m2 = m2im2sm2im2im2d.to[List] equalInferredTypes(m2im2sm2im2im2dList, m2) assertTypedEquals[List[M2[_ >: Int with String with Double, Unit]]](m2im2sm2im2im2dList, m2) val m2e = m2eim2esm2eim2eem2ed.to[List] // equalType(m2eim2esm2eim2eem2edList, m2e) assertTypedEquals[List[M2[_ >: Int with String with Double, _]]](m2eim2esm2eim2eem2edList, m2e) } @Test def testToList { val r1 = HNil.toList assertTypedEquals[List[Nothing]](Nil, r1) implicitly[ToTraversable.Aux[HNil, List, Nothing]] implicitly[ToTraversable.Aux[HNil, List, Int]] { val l1 = (mi :: HNil).toList[M[Int]] val l2 = (mi :: HNil).toList[M[_]] assertTypedEquals[List[M[Int]]](List(mi), l1) assertTypedEquals[List[M[_]]](List(mi), l2) } val fruits1 = apap.toList assertTypedEquals[List[Fruit]](List(a, p, a, p), fruits1) val fruits2 = apbp.toList assertTypedEquals[List[Fruit]](List(a, p, b, p), fruits2) val fruits3 = fruits2.toHList[APBP] assertTrue(fruits3.isDefined) assertTypedEquals[APBP](apbp, fruits3.get) val l1 = 1 :: "foo" :: 2 :: 3 :: HNil val stuff = l1.toList assertTypedEquals[List[Any]](List(1, "foo", 2, 3), stuff) val stuff2 = stuff.toHList[ISII] assertTrue(stuff2.isDefined) assertTypedEquals[ISII](1 :: "foo" :: 2 :: 3 :: HNil, stuff2.get) val l4 = Option(1) :: Option("foo") :: Option(2) :: Option(3) :: HNil val l7 = l4 map isDefined assertTypedEquals[BBBB](true :: true :: true :: true :: HNil, l7) val ll2 = l7.toList typed[Boolean](ll2.head) val moreStuff = (a :: "foo" :: p :: HNil).toList typed[List[Any]](moreStuff) def equalInferredTypes[A,B](a: A, b: B)(implicit eq: A =:= B) {} val ctv = cicscicicd.toList equalInferredTypes(cicscicicdList, ctv) assertTypedEquals[List[Ctv[Int with String with Double]]](cicscicicdList, ctv) val m = mimsmimimd.toList equalInferredTypes(mimsmimimdList, m) assertTypedEquals[List[M[_ >: Int with String with Double]]](mimsmimimdList, m) // With existentials, it gets more tricky val mWithEx = mimsmimemd.toList // Compiler fails complaining that it // Cannot prove that List[HListTests.this.M[_ >: Double with _$1 with Int with String]] =:= List[HListTests.this.M[_]] // equalType(mimsmimemdList, mWithEx) assertTypedEquals[List[M[_]]](mimsmimemdList, mWithEx) // Second order higher kinded types are ok... val m2 = m2im2sm2im2im2d.toList equalInferredTypes(m2im2sm2im2im2dList, m2) assertTypedEquals[List[M2[_ >: Int with String with Double, Unit]]](m2im2sm2im2im2dList, m2) // ...as long as existentials are not involved. val m2e = m2eim2esm2eim2eem2ed.toList // Compiler complains that it // Cannot prove that List[HListTests.this.M2[_ >: Double with Int with Int with String with Int, _ >: _$5 with _$3 with _$3 with _$4 with _$3]] =:= List[HListTests.this.M2[_35,_36] forSome { type _$10; type _$9; type _34 >: _$10 with _$9; type _$8; type _$7; type _32 >: _$8 with _$7; type _35 >: Double with Int with Int with String; type _36 >: _34 with _32 }] // equalType(m2eim2esm2eim2eem2edList, m2e) assertTypedEquals[List[M2[_ >: Int with String with Double, _]]](m2eim2esm2eim2eem2edList, m2e) } @Test def testToTraversableArray { def assertArrayEquals2[T](arr1 : Array[T], arr2 : Array[T]) = assertArrayEquals(arr1.asInstanceOf[Array[Object]], arr2.asInstanceOf[Array[Object]]) val empty = HNil.to[Array] typed[Array[Nothing]](empty) assertArrayEquals2(Array[Nothing](), empty) implicitly[ToTraversable.Aux[HNil, Array, Nothing]] implicitly[ToTraversable.Aux[HNil, Array, Int]] { implicitly[ToTraversable.Aux[M[Int] :: HNil, Array, M[Int]]] implicitly[ToTraversable.Aux[M[Int] :: HNil, Array, M[_]]] } val fruits1 = apap.to[Array].map(x => x : Fruit) // Default inferred type is too precise // (Product with Serializable with Fruit) typed[Array[Fruit]](fruits1) assertArrayEquals2(Array[Fruit](a, p, a, p), fruits1) val fruits2 = apbp.to[Array].map(x => x : Fruit) typed[Array[Fruit]](fruits2) assertArrayEquals2(Array[Fruit](a, p, b, p), fruits2) val fruits3 = fruits2.toHList[APBP] assertTrue(fruits3.isDefined) assertTypedEquals[APBP](apbp, fruits3.get) val l1 = 1 :: "foo" :: 2 :: 3 :: HNil val stuff = l1.to[Array] typed[Array[Any]](stuff) assertArrayEquals2(Array(1, "foo", 2, 3), stuff) val stuff2 = stuff.toHList[ISII] assertTrue(stuff2.isDefined) assertTypedEquals[ISII](1 :: "foo" :: 2 :: 3 :: HNil, stuff2.get) val l4 = Option(1) :: Option("foo") :: Option(2) :: Option(3) :: HNil val l7 = l4 map isDefined assertTypedEquals[BBBB](true :: true :: true :: true :: HNil, l7) val ll2 = l7.to[Array] typed[Boolean](ll2(0)) val moreStuff = (a :: "foo" :: p :: HNil).to[Array].map(x => x : AnyRef) typed[Array[AnyRef]](moreStuff) assertArrayEquals2(Array[AnyRef](a, "foo", p), moreStuff) def equalInferredTypes[A,B](a: A, b: B)(implicit eq: A =:= B) {} val ctv = cicscicicd.to[Array] equalInferredTypes(cicscicicdArray, ctv) typed[Array[Ctv[Int with String with Double]]](ctv) assertArrayEquals2(cicscicicdArray, ctv) val m = mimsmimimd.to[Array] equalInferredTypes(mimsmimimdArray, m) typed[Array[M[_ >: Int with String with Double]]](m) assertArrayEquals2(mimsmimimdArray, m) val mWithEx = mimsmimemd.to[Array] // equalType(mimsmimemdArray, mWithEx) typed[Array[M[_]]](mWithEx) assertArrayEquals2(mimsmimemdArray, mWithEx) val m2 = m2im2sm2im2im2d.to[Array] equalInferredTypes(m2im2sm2im2im2dArray, m2) typed[Array[M2[_ >: Int with String with Double, Unit]]](m2) assertArrayEquals2(m2im2sm2im2im2dArray, m2) val m2e = m2eim2esm2eim2eem2ed.to[Array] // equalInferredTypes(m2eim2esm2eim2eem2edArray, m2e) typed[Array[M2[_ >: Int with String with Double, _]]](m2e) assertArrayEquals2(m2eim2esm2eim2eem2edArray.map(x => x : Any), m2e.map(x => x : Any)) } @Test def testToArray { def assertArrayEquals2[T](arr1 : Array[T], arr2 : Array[T]) = assertArrayEquals(arr1.asInstanceOf[Array[Object]], arr2.asInstanceOf[Array[Object]]) val empty = HNil.toArray typed[Array[Nothing]](empty) assertArrayEquals2(Array[Nothing](), empty) ToArray[HNil, Nothing] ToArray[HNil, Int] { val a1 = (mi :: HNil).toArray[M[Int]] val a2 = (mi :: HNil).toArray[M[_]] typed[Array[M[Int]]](a1) typed[Array[M[_]]](a2) assertArrayEquals2(Array[M[Int]](mi), a1) assertArrayEquals2(Array[M[_]](mi), a2) } val fruits1 = apap.toArray[Fruit] typed[Array[Fruit]](fruits1) assertArrayEquals2(Array[Fruit](a, p, a, p), fruits1) val fruits2 = apbp.toArray[Fruit] typed[Array[Fruit]](fruits2) assertArrayEquals2(Array[Fruit](a, p, b, p), fruits2) val fruits3 = fruits2.toHList[APBP] assertTrue(fruits3.isDefined) assertTypedEquals[APBP](apbp, fruits3.get) val l1 = 1 :: "foo" :: 2 :: 3 :: HNil val stuff = l1.toArray typed[Array[Any]](stuff) assertArrayEquals2(Array(1, "foo", 2, 3), stuff) val ssl = "foo" :: "bar" :: 1L :: HNil val ssla = ssl.toArray typed[Array[Any]](ssla) assertArrayEquals2(Array("foo", "bar", 1L), ssla) val stuff2 = stuff.toHList[ISII] assertTrue(stuff2.isDefined) assertTypedEquals[ISII](1 :: "foo" :: 2 :: 3 :: HNil, stuff2.get) val l4 = Option(1) :: Option("foo") :: Option(2) :: Option(3) :: HNil val l7 = l4 map isDefined assertTypedEquals[BBBB](true :: true :: true :: true :: HNil, l7) val ll2 = l7.toArray typed[Boolean](ll2(0)) val moreStuff = (a :: "foo" :: p :: HNil).toArray[AnyRef] typed[Array[AnyRef]](moreStuff) assertArrayEquals2(Array[AnyRef](a, "foo", p), moreStuff) def equalInferredTypes[A,B](a: A, b: B)(implicit eq: A =:= B) {} val ctv = cicscicicd.toArray equalInferredTypes(cicscicicdArray, ctv) typed[Array[Ctv[Int with String with Double]]](ctv) assertArrayEquals2(cicscicicdArray, ctv) val m = mimsmimimd.toArray equalInferredTypes(mimsmimimdArray, m) typed[Array[M[_ >: Int with String with Double]]](m) assertArrayEquals2(mimsmimimdArray, m) val mWithEx = mimsmimemd.toArray[M[_]] // equalType(mimsmimemdArray, mWithEx) typed[Array[M[_]]](mWithEx) assertArrayEquals2(mimsmimemdArray, mWithEx) val m2 = m2im2sm2im2im2d.toArray equalInferredTypes(m2im2sm2im2im2dArray, m2) typed[Array[M2[_ >: Int with String with Double, Unit]]](m2) assertArrayEquals2(m2im2sm2im2im2dArray, m2) val m2e = m2eim2esm2eim2eem2ed.toArray // equalInferredTypes(m2eim2esm2eim2eem2edArray, m2e) typed[Array[M2[_ >: Int with String with Double, _]]](m2e) assertArrayEquals2(m2eim2esm2eim2eem2edArray.map(x => x : Any), m2e.map(x => x : Any)) } @Test def testFoldMap { implicitly[Mapper.Aux[isDefined.type, HNil, HNil]] implicitly[Mapper.Aux[isDefined.type, Option[Int] :: HNil, Boolean :: HNil]] val tl1 = Option(1) :: Option("foo") :: Option(2) :: Option(3) :: HNil val tl2 = Option(1) :: Option("foo") :: (None : Option[Int]) :: Option(3) :: HNil val mlfl1 = (tl1 map isDefined).toList.foldLeft(true)(_ && _) assertTrue(mlfl1) val mlfl2 = (tl2 map isDefined).toList.foldLeft(true)(_ && _) assertFalse(mlfl2) val fl1 = tl1.foldMap(true)(isDefined)(_ && _) assertTrue(fl1) val fl2 = tl2.foldMap(true)(isDefined)(_ && _) assertFalse(fl2) } @Test def testAt { val sn1 = 23 :: 3.0 :: "foo" :: () :: "bar" :: true :: 5L :: HNil val at0 = sn1(_0) assertTypedEquals[Int](23, at0) val at1 = sn1(_1) typed[Double](at1) assertEquals(3.0, at1, Double.MinPositiveValue) val at2 = sn1(_2) assertTypedEquals[String]("foo", at2) val at3 = sn1(_3) assertTypedEquals[Unit]((), at3) val at4 = sn1(_4) assertTypedEquals[String]("bar", at4) val at5 = sn1(_5) assertTypedEquals[Boolean](true, at5) val at6 = sn1(_6) assertTypedEquals[Long](5L, at6) val sn2 = 0 :: 1 :: 2 :: 3 :: 4 :: 5 :: 6 :: 7 :: 8 :: 9 :: 10 :: 11 :: 12 :: 13 :: 14 :: 15 :: 16 :: 17 :: 18 :: 19 :: 20 :: 21 :: 22 :: HNil val at22 = sn2(_22) assertTypedEquals[Int](22, at22) } @Test def testAtLiteral { val sn1 = 23 :: 3.0 :: "foo" :: () :: "bar" :: true :: 5L :: HNil val at0 = sn1(0) assertTypedEquals[Int](23, at0) val at1 = sn1(1) typed[Double](at1) assertEquals(3.0, at1, Double.MinPositiveValue) val at2 = sn1(2) assertTypedEquals[String]("foo", at2) val at3 = sn1(3) assertTypedEquals[Unit]((), at3) val at4 = sn1(4) assertTypedEquals[String]("bar", at4) val at5 = sn1(5) assertTypedEquals[Boolean](true, at5) val at6 = sn1(6) assertTypedEquals[Long](5L, at6) val sn2 = 0 :: 1 :: 2 :: 3 :: 4 :: 5 :: 6 :: 7 :: 8 :: 9 :: 10 :: 11 :: 12 :: 13 :: 14 :: 15 :: 16 :: 17 :: 18 :: 19 :: 20 :: 21 :: 22 :: HNil val at22 = sn2(22) assertTypedEquals[Int](22, at22) } @Test def testTakeDrop { val sn1 = 23 :: 3.0 :: "foo" :: () :: "bar" :: true :: 5L :: HNil val r1 = sn1.take(_0) assertTypedEquals[HNil](HNil, r1) val r2 = sn1.drop(_0) assertTypedEquals[Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil]( 23 :: 3.0 :: "foo" :: () :: "bar" :: true :: 5L :: HNil, r2) val r3 = sn1.take(_2) assertTypedEquals[Int :: Double :: HNil](23 :: 3.0 :: HNil, r3) val r4 = sn1.drop(_2) assertTypedEquals[String :: Unit :: String :: Boolean :: Long :: HNil]( "foo" :: () :: "bar" :: true :: 5L :: HNil, r4) val r5 = sn1.take(_7) assertTypedEquals[Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil]( 23 :: 3.0 :: "foo" :: () :: "bar" :: true :: 5L :: HNil, r5) val r6 = sn1.drop(_7) assertTypedEquals[HNil](HNil, r6) } @Test def testTakeDropLiteral { val sn1 = 23 :: 3.0 :: "foo" :: () :: "bar" :: true :: 5L :: HNil val r1 = sn1.take(0) assertTypedEquals[HNil](HNil, r1) val r2 = sn1.drop(0) assertTypedEquals[Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil]( 23 :: 3.0 :: "foo" :: () :: "bar" :: true :: 5L :: HNil, r2) val r3 = sn1.take(2) assertTypedEquals[Int :: Double :: HNil](23 :: 3.0 :: HNil, r3) val r4 = sn1.drop(2) assertTypedEquals[String :: Unit :: String :: Boolean :: Long :: HNil]( "foo" :: () :: "bar" :: true :: 5L :: HNil, r4) val r5 = sn1.take(7) assertTypedEquals[Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil]( 23 :: 3.0 :: "foo" :: () :: "bar" :: true :: 5L :: HNil, r5) val r6 = sn1.drop(7) assertTypedEquals[HNil](HNil, r6) } @Test def testSplit { val sn1 = 23 :: 3.0 :: "foo" :: () :: "bar" :: true :: 5L :: HNil val sni0 = sn1.split(_0) typed[(HNil, (Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil))](sni0) val sni1 = sn1.split(_1) typed[((Int :: HNil), (Double :: String :: Unit :: String :: Boolean :: Long :: HNil))](sni1) val sni2 = sn1.split(_2) typed[((Int :: Double :: HNil), (String :: Unit :: String :: Boolean :: Long :: HNil))](sni2) val sni3 = sn1.split(_3) typed[((Int :: Double :: String :: HNil), (Unit :: String :: Boolean :: Long :: HNil))](sni3) val sni4 = sn1.split(_4) typed[((Int :: Double :: String :: Unit :: HNil), (String :: Boolean :: Long :: HNil))](sni4) val sni5 = sn1.split(_5) typed[((Int :: Double :: String :: Unit :: String :: HNil), (Boolean :: Long :: HNil))](sni5) val sni6 = sn1.split(_6) typed[((Int :: Double :: String :: Unit :: String :: Boolean :: HNil), (Long :: HNil))](sni6) val sni7 = sn1.split(_7) typed[((Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil), HNil)](sni7) val snri0 = sn1.reverse_split(_0) typed[(HNil, (Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil))](snri0) val snri1 = sn1.reverse_split(_1) typed[((Int :: HNil), (Double :: String :: Unit :: String :: Boolean :: Long :: HNil))](snri1) val snri2 = sn1.reverse_split(_2) typed[((Double :: Int :: HNil), (String :: Unit :: String :: Boolean :: Long :: HNil))](snri2) val snri3 = sn1.reverse_split(_3) typed[((String :: Double :: Int :: HNil), (Unit :: String :: Boolean :: Long :: HNil))](snri3) val snri4 = sn1.reverse_split(_4) typed[((Unit :: String :: Double :: Int :: HNil), (String :: Boolean :: Long :: HNil))](snri4) val snri5 = sn1.reverse_split(_5) typed[((String :: Unit :: String :: Double :: Int :: HNil), (Boolean :: Long :: HNil))](snri5) val snri6 = sn1.reverse_split(_6) typed[((Boolean :: String :: Unit :: String :: Double :: Int :: HNil), (Long :: HNil))](snri6) val snri7 = sn1.reverse_split(_7) typed[((Long :: Boolean :: String :: Unit :: String :: Double :: Int :: HNil), HNil)](snri7) } @Test def testSplitLiteral { val sn1 = 23 :: 3.0 :: "foo" :: () :: "bar" :: true :: 5L :: HNil val sni0 = sn1.split(0) typed[(HNil, (Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil))](sni0) val sni1 = sn1.split(1) typed[((Int :: HNil), (Double :: String :: Unit :: String :: Boolean :: Long :: HNil))](sni1) val sni2 = sn1.split(2) typed[((Int :: Double :: HNil), (String :: Unit :: String :: Boolean :: Long :: HNil))](sni2) val sni3 = sn1.split(3) typed[((Int :: Double :: String :: HNil), (Unit :: String :: Boolean :: Long :: HNil))](sni3) val sni4 = sn1.split(4) typed[((Int :: Double :: String :: Unit :: HNil), (String :: Boolean :: Long :: HNil))](sni4) val sni5 = sn1.split(5) typed[((Int :: Double :: String :: Unit :: String :: HNil), (Boolean :: Long :: HNil))](sni5) val sni6 = sn1.split(6) typed[((Int :: Double :: String :: Unit :: String :: Boolean :: HNil), (Long :: HNil))](sni6) val sni7 = sn1.split(7) typed[((Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil), HNil)](sni7) val snri0 = sn1.reverse_split(0) typed[(HNil, (Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil))](snri0) val snri1 = sn1.reverse_split(1) typed[((Int :: HNil), (Double :: String :: Unit :: String :: Boolean :: Long :: HNil))](snri1) val snri2 = sn1.reverse_split(2) typed[((Double :: Int :: HNil), (String :: Unit :: String :: Boolean :: Long :: HNil))](snri2) val snri3 = sn1.reverse_split(3) typed[((String :: Double :: Int :: HNil), (Unit :: String :: Boolean :: Long :: HNil))](snri3) val snri4 = sn1.reverse_split(4) typed[((Unit :: String :: Double :: Int :: HNil), (String :: Boolean :: Long :: HNil))](snri4) val snri5 = sn1.reverse_split(5) typed[((String :: Unit :: String :: Double :: Int :: HNil), (Boolean :: Long :: HNil))](snri5) val snri6 = sn1.reverse_split(6) typed[((Boolean :: String :: Unit :: String :: Double :: Int :: HNil), (Long :: HNil))](snri6) val snri7 = sn1.reverse_split(7) typed[((Long :: Boolean :: String :: Unit :: String :: Double :: Int :: HNil), HNil)](snri7) } @Test def testSplitP { val sn1 = 23 :: 3.0 :: "foo" :: () :: "bar" :: true :: 5L :: HNil val sni0 = sn1.splitP(_0) typed[(HNil) :: (Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil) :: HNil](sni0) val sni1 = sn1.splitP(_1) typed[(Int :: HNil) :: (Double :: String :: Unit :: String :: Boolean :: Long :: HNil) :: HNil](sni1) val sni2 = sn1.splitP(_2) typed[(Int :: Double :: HNil) :: (String :: Unit :: String :: Boolean :: Long :: HNil) :: HNil](sni2) val sni3 = sn1.splitP(_3) typed[(Int :: Double :: String :: HNil) :: (Unit :: String :: Boolean :: Long :: HNil) :: HNil](sni3) val sni4 = sn1.splitP(_4) typed[(Int :: Double :: String :: Unit :: HNil) :: (String :: Boolean :: Long :: HNil) :: HNil](sni4) val sni5 = sn1.splitP(_5) typed[(Int :: Double :: String :: Unit :: String :: HNil) :: (Boolean :: Long :: HNil) :: HNil](sni5) val sni6 = sn1.splitP(_6) typed[(Int :: Double :: String :: Unit :: String :: Boolean :: HNil) :: (Long :: HNil) :: HNil](sni6) val sni7 = sn1.splitP(_7) typed[(Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil) :: (HNil) :: HNil](sni7) val snri0 = sn1.reverse_splitP(_0) typed[(HNil) :: (Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil) :: HNil](snri0) val snri1 = sn1.reverse_splitP(_1) typed[(Int :: HNil) :: (Double :: String :: Unit :: String :: Boolean :: Long :: HNil) :: HNil](snri1) val snri2 = sn1.reverse_splitP(_2) typed[(Double :: Int :: HNil) :: (String :: Unit :: String :: Boolean :: Long :: HNil) :: HNil](snri2) val snri3 = sn1.reverse_splitP(_3) typed[(String :: Double :: Int :: HNil) :: (Unit :: String :: Boolean :: Long :: HNil) :: HNil](snri3) val snri4 = sn1.reverse_splitP(_4) typed[(Unit :: String :: Double :: Int :: HNil) :: (String :: Boolean :: Long :: HNil) :: HNil](snri4) val snri5 = sn1.reverse_splitP(_5) typed[(String :: Unit :: String :: Double :: Int :: HNil) :: (Boolean :: Long :: HNil) :: HNil](snri5) val snri6 = sn1.reverse_splitP(_6) typed[(Boolean :: String :: Unit :: String :: Double :: Int :: HNil) :: (Long :: HNil) :: HNil](snri6) val snri7 = sn1.reverse_splitP(_7) typed[(Long :: Boolean :: String :: Unit :: String :: Double :: Int :: HNil) :: (HNil) :: HNil](snri7) } @Test def testSplitPLiteral { val sn1 = 23 :: 3.0 :: "foo" :: () :: "bar" :: true :: 5L :: HNil val sni0 = sn1.splitP(0) typed[(HNil) :: (Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil) :: HNil](sni0) val sni1 = sn1.splitP(1) typed[(Int :: HNil) :: (Double :: String :: Unit :: String :: Boolean :: Long :: HNil) :: HNil](sni1) val sni2 = sn1.splitP(2) typed[(Int :: Double :: HNil) :: (String :: Unit :: String :: Boolean :: Long :: HNil) :: HNil](sni2) val sni3 = sn1.splitP(3) typed[(Int :: Double :: String :: HNil) :: (Unit :: String :: Boolean :: Long :: HNil) :: HNil](sni3) val sni4 = sn1.splitP(4) typed[(Int :: Double :: String :: Unit :: HNil) :: (String :: Boolean :: Long :: HNil) :: HNil](sni4) val sni5 = sn1.splitP(5) typed[(Int :: Double :: String :: Unit :: String :: HNil) :: (Boolean :: Long :: HNil) :: HNil](sni5) val sni6 = sn1.splitP(6) typed[(Int :: Double :: String :: Unit :: String :: Boolean :: HNil) :: (Long :: HNil) :: HNil](sni6) val sni7 = sn1.splitP(7) typed[(Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil) :: (HNil) :: HNil](sni7) val snri0 = sn1.reverse_splitP(0) typed[(HNil) :: (Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil) :: HNil](snri0) val snri1 = sn1.reverse_splitP(1) typed[(Int :: HNil) :: (Double :: String :: Unit :: String :: Boolean :: Long :: HNil) :: HNil](snri1) val snri2 = sn1.reverse_splitP(2) typed[(Double :: Int :: HNil) :: (String :: Unit :: String :: Boolean :: Long :: HNil) :: HNil](snri2) val snri3 = sn1.reverse_splitP(3) typed[(String :: Double :: Int :: HNil) :: (Unit :: String :: Boolean :: Long :: HNil) :: HNil](snri3) val snri4 = sn1.reverse_splitP(4) typed[(Unit :: String :: Double :: Int :: HNil) :: (String :: Boolean :: Long :: HNil) :: HNil](snri4) val snri5 = sn1.reverse_splitP(5) typed[(String :: Unit :: String :: Double :: Int :: HNil) :: (Boolean :: Long :: HNil) :: HNil](snri5) val snri6 = sn1.reverse_splitP(6) typed[(Boolean :: String :: Unit :: String :: Double :: Int :: HNil) :: (Long :: HNil) :: HNil](snri6) val snri7 = sn1.reverse_splitP(7) typed[(Long :: Boolean :: String :: Unit :: String :: Double :: Int :: HNil) :: (HNil) :: HNil](snri7) } @Test def testSelect { val sl = 1 :: true :: "foo" :: 2.0 :: HNil val si = sl.select[Int] assertTypedEquals[Int](1, si) val sb = sl.select[Boolean] assertTypedEquals[Boolean](true, sb) val ss = sl.select[String] assertTypedEquals[String]("foo", ss) val sd = sl.select[Double] assertEquals(2.0, sd, Double.MinPositiveValue) } @Test def testFilter { val l1 = 1 :: 2 :: HNil val f1 = l1.filter[Int] assertTypedEquals[Int :: Int :: HNil](1 :: 2 :: HNil, f1) val l2 = 1 :: true :: "foo" :: 2 :: HNil val f2 = l2.filter[Int] assertTypedEquals[Int :: Int :: HNil](1 :: 2 :: HNil, f2) typed[HNil](l2.filter[Double]) } @Test def testFilterNot { val l1 = 1 :: 2 :: HNil val f1 = l1.filterNot[String] assertTypedEquals[Int :: Int :: HNil](1 :: 2 :: HNil, f1) val l2 = 1 :: true :: "foo" :: 2 :: HNil val f2 = l2.filterNot[String] assertTypedEquals[Int :: Boolean :: Int :: HNil](1 :: true :: 2 :: HNil, f2) typed[HNil](l2.filter[Double]) } @Test def testPartition { val l1 = 1 :: 2 :: HNil val l2 = 1 :: true :: "foo" :: 2 :: HNil val r1 = l1.partition[Int] assertTypedEquals[(Int :: Int :: HNil, HNil)]((1 :: 2 :: HNil, HNil), r1) val r2 = l1.partitionP[Int] assertTypedEquals[(Int :: Int :: HNil) :: HNil :: HNil]((1 :: 2 :: HNil) :: HNil :: HNil, r2) val r3 = l2.partition[Int] assertTypedEquals[(Int :: Int :: HNil, Boolean :: String :: HNil)]((1 :: 2 :: HNil, true :: "foo" :: HNil), r3) val r4 = l2.partitionP[Int] assertTypedEquals[(Int :: Int :: HNil) :: (Boolean :: String :: HNil) :: HNil]( (1 :: 2 :: HNil) :: (true :: "foo" :: HNil) :: HNil, r4 ) } @Test def testReplace { val sl = 1 :: true :: "foo" :: 2.0 :: HNil val (i, r1) = sl.replace(23) assertTypedEquals[Int](1, i) assertTypedEquals[Int :: Boolean :: String :: Double :: HNil](23 :: true :: "foo" :: 2.0 :: HNil, r1) val (b, r2) = sl.replace(false) assertTypedEquals[Boolean](true, b) assertTypedEquals[Int :: Boolean :: String :: Double :: HNil](1 :: false :: "foo" :: 2.0 :: HNil, r2) val (s, r3) = sl.replace("bar") assertTypedEquals[String]("foo", s) assertTypedEquals[Int :: Boolean :: String :: Double :: HNil](1 :: true :: "bar" :: 2.0 :: HNil, r3) val (d, r4) = sl.replace(3.0) typed[Double](d) assertEquals(2.0, d, Double.MinPositiveValue) assertTypedEquals[Int :: Boolean :: String :: Double :: HNil](1 :: true :: "foo" :: 3.0 :: HNil, r4) val (i2, r5) = sl.replaceType[Int]('*') typed[Char](r5(0)) assertTypedEquals[Int](1, i2) assertTypedEquals[Char :: Boolean :: String :: Double :: HNil]('*' :: true :: "foo" :: 2.0 :: HNil, r5) val (b2, r6) = sl.replaceType[Boolean]('*') typed[Char](r6(1)) assertTypedEquals[Boolean](true, b2) assertTypedEquals[Int :: Char :: String :: Double :: HNil](1 :: '*' :: "foo" :: 2.0 :: HNil, r6) val (s2, r7) = sl.replaceType[String]('*') typed[Char](r7(2)) assertTypedEquals[String]("foo", s2) assertTypedEquals[Int :: Boolean :: Char :: Double :: HNil](1 :: true :: '*' :: 2.0 :: HNil, r7) val (d2, r8) = sl.replaceType[Double]('*') typed[Double](d2) typed[Char](r8(3)) assertEquals(2.0, d2, Double.MinPositiveValue) assertTypedEquals[Int :: Boolean :: String :: Char :: HNil](1 :: true :: "foo" :: '*' :: HNil, r8) val fruits = a :: p :: a :: f :: HNil val (x1, rr1) = fruits.replaceType[Pear](a) typed[Pear](x1) typed[Apple :: Apple :: Apple :: Fruit :: HNil](rr1) val (x2, rr2) = fruits.replaceType[Pear](f) typed[Pear](x2) typed[Apple :: Fruit :: Apple :: Fruit :: HNil](rr2) val (x3, rr3) = fruits.replaceType[Fruit](p) typed[Fruit](x3) typed[Apple :: Pear :: Apple :: Pear :: HNil](rr3) val (x4, rr4) = fruits.replace(p) typed[Pear](x4) typed[Apple :: Pear :: Apple :: Fruit :: HNil](rr4) val (x5, rr5) = fruits.replace(f) typed[Fruit](x5) typed[Apple :: Pear :: Apple :: Fruit :: HNil](rr5) } @Test def testUpdate { type SL = Int :: Boolean :: String :: Double :: HNil val sl: SL = 1 :: true :: "foo" :: 2.0 :: HNil val r1 = sl.updatedElem(23) assertTypedEquals[SL](23 :: true :: "foo" :: 2.0 :: HNil, r1) val r2 = sl.updatedElem(false) assertTypedEquals[SL](1 :: false :: "foo" :: 2.0 :: HNil, r2) val r3 = sl.updatedElem("bar") assertTypedEquals[SL](1 :: true :: "bar" :: 2.0 :: HNil, r3) val r4 = sl.updatedElem(3.0) assertTypedEquals[SL](1 :: true :: "foo" :: 3.0 :: HNil, r4) val r5 = sl.updatedType[Int]('*') assertTypedEquals[Char :: Boolean :: String :: Double :: HNil]('*' :: true :: "foo" :: 2.0 :: HNil, r5) val r6 = sl.updatedType[Boolean]('*') assertTypedEquals[Int :: Char :: String :: Double :: HNil](1 :: '*' :: "foo" :: 2.0 :: HNil, r6) val r7 = sl.updatedType[String]('*') assertTypedEquals[Int :: Boolean :: Char :: Double :: HNil](1 :: true :: '*' :: 2.0 :: HNil, r7) val r8 = sl.updatedType[Double]('*') assertTypedEquals(1 :: true :: "foo" :: '*' :: HNil, r8) val r9 = sl.updateWith((i : Int) => i * 2) assertTypedEquals[Int :: Boolean :: String :: Double :: HNil](2 :: true :: "foo" :: 2.0 :: HNil, r9) val r10 = sl.updateWith((b : Boolean) => !b) assertTypedEquals[Int :: Boolean :: String :: Double :: HNil](1 :: false :: "foo" :: 2.0 :: HNil, r10) val r11 = sl.updateWith((s : String) => s.toUpperCase) assertTypedEquals[Int :: Boolean :: String :: Double :: HNil](1 :: true :: "FOO" :: 2.0 :: HNil, r11) val r12 = sl.updateWith((d : Double) => d / 2.0) assertTypedEquals[Int :: Boolean :: String :: Double :: HNil](1 :: true :: "foo" :: 1.0 :: HNil, r12) val r13 = sl.updateWith((i : Int) => i.toString) assertTypedEquals[String :: Boolean :: String :: Double :: HNil]("1" :: true :: "foo" :: 2.0 :: HNil, r13) val r14 = sl.updateWith((b : Boolean) => b.toString) assertTypedEquals[Int :: String :: String :: Double :: HNil](1 :: "true" :: "foo" :: 2.0 :: HNil, r14) val r15 = sl.updateWith((_ : String) => 0xF00) assertTypedEquals[Int :: Boolean :: Int :: Double :: HNil](1 :: true :: 0xF00 :: 2.0 :: HNil, r15) val r16 = sl.updateWith((d : Double) => d.toString) assertTypedEquals[Int :: Boolean :: String :: String :: HNil](1 :: true :: "foo" :: "2.0" :: HNil, r16) val fruits = a :: p :: a :: f :: HNil val rr1 = fruits.updatedType[Pear](a) typed[Apple :: Apple :: Apple :: Fruit :: HNil](rr1) val rr2 = fruits.updatedType[Pear](f) typed[Apple :: Fruit :: Apple :: Fruit :: HNil](rr2) val rr3 = fruits.updatedType[Fruit](p) typed[Apple :: Pear :: Apple :: Pear :: HNil](rr3) val rr4 = fruits.updatedElem(p) typed[Apple :: Pear :: Apple :: Fruit :: HNil](rr4) val rr5 = fruits.updatedElem(f) typed[Apple :: Pear :: Apple :: Fruit :: HNil](rr5) } @Test def testSplitLeft { type SL = Int :: Boolean :: String :: Double :: HNil type SL2 = Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil val sl: SL = 1 :: true :: "foo" :: 2.0 :: HNil val sl2: SL2 = 23 :: 3.0 :: "foo" :: () :: "bar" :: true :: 5L :: HNil val (sp1, sp2) = sl.splitLeft[String] typed[String :: Double :: HNil](sp2) typed[Int :: Boolean :: HNil](sp1) assertTypedEquals[SL]((sp1 ::: sp2), sl) val (sli1, sli2) = sl2.splitLeft[String] typed[Int :: Double :: HNil](sli1) typed[String :: Unit :: String :: Boolean :: Long :: HNil](sli2) assertTypedEquals[SL2]((sli1 ::: sli2), sl2) val (rsp1, rsp2) = sl.reverse_splitLeft[String] typed[Boolean :: Int :: HNil](rsp1) typed[String :: Double :: HNil](rsp2) assertTypedEquals[SL]((rsp1 reverse_::: rsp2), sl) val (rsli1, rsli2) = sl2.reverse_splitLeft[String] typed[Double :: Int :: HNil](rsli1) typed[String :: Unit :: String :: Boolean :: Long :: HNil](rsli2) assertTypedEquals[SL2]((rsli1 reverse_::: rsli2), sl2) } @Test def testSplitLeftP { type SL = Int :: Boolean :: String :: Double :: HNil type SL2 = Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil val sl: SL = 1 :: true :: "foo" :: 2.0 :: HNil val sl2: SL2 = 23 :: 3.0 :: "foo" :: () :: "bar" :: true :: 5L :: HNil val sp1 :: sp2 :: HNil = sl.splitLeftP[String] typed[String :: Double :: HNil](sp2) typed[Int :: Boolean :: HNil](sp1) assertTypedEquals[SL]((sp1 ::: sp2), sl) val sli1 :: sli2 :: HNil = sl2.splitLeftP[String] typed[Int :: Double :: HNil](sli1) typed[String :: Unit :: String :: Boolean :: Long :: HNil](sli2) assertTypedEquals[SL2]((sli1 ::: sli2), sl2) val rsp1 :: rsp2 :: HNil = sl.reverse_splitLeftP[String] typed[Boolean :: Int :: HNil](rsp1) typed[String :: Double :: HNil](rsp2) assertTypedEquals[SL]((rsp1 reverse_::: rsp2), sl) val rsli1 :: rsli2 :: HNil = sl2.reverse_splitLeftP[String] typed[Double :: Int :: HNil](rsli1) typed[String :: Unit :: String :: Boolean :: Long :: HNil](rsli2) assertTypedEquals[SL2]((rsli1 reverse_::: rsli2), sl2) } @Test def testSplitRight { type SL = Int :: Boolean :: String :: Double :: HNil type SL2 = Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil val sl: SL = 1 :: true :: "foo" :: 2.0 :: HNil val sl2: SL2 = 23 :: 3.0 :: "foo" :: () :: "bar" :: true :: 5L :: HNil val (srp1, srp2) = sl.splitRight[String] typed[Int :: Boolean :: String :: HNil](srp1) typed[Double :: HNil](srp2) assertTypedEquals[SL]((srp1 ::: srp2), sl) val (srli1, srli2) = sl2.splitRight[String] typed[Int :: Double :: String :: Unit :: String :: HNil](srli1) typed[Boolean :: Long :: HNil](srli2) assertTypedEquals[SL2](sl2, srli1 ::: srli2) val (rsrp1, rsrp2) = sl.reverse_splitRight[String] typed[String :: Boolean :: Int :: HNil](rsrp1) typed[Double :: HNil](rsrp2) assertTypedEquals[SL]((rsrp1 reverse_::: rsrp2), sl) val (rsrli1, rsrli2) = sl2.reverse_splitRight[String] typed[String :: Unit :: String :: Double :: Int :: HNil](rsrli1) typed[Boolean :: Long :: HNil](rsrli2) assertTypedEquals[SL2]((rsrli1 reverse_::: rsrli2), sl2) } @Test def testSplitRightP { type SL = Int :: Boolean :: String :: Double :: HNil type SL2 = Int :: Double :: String :: Unit :: String :: Boolean :: Long :: HNil val sl: SL = 1 :: true :: "foo" :: 2.0 :: HNil val sl2: SL2 = 23 :: 3.0 :: "foo" :: () :: "bar" :: true :: 5L :: HNil val srp1 :: srp2 :: HNil = sl.splitRightP[String] typed[Int :: Boolean :: String :: HNil](srp1) typed[Double :: HNil](srp2) assertTypedEquals[SL]((srp1 ::: srp2), sl) val srli1 :: srli2 :: HNil = sl2.splitRightP[String] typed[Int :: Double :: String :: Unit :: String :: HNil](srli1) typed[Boolean :: Long :: HNil](srli2) assertTypedEquals[SL2](sl2, srli1 ::: srli2) val rsrp1 :: rsrp2 :: HNil = sl.reverse_splitRightP[String] typed[String :: Boolean :: Int :: HNil](rsrp1) typed[Double :: HNil](rsrp2) assertTypedEquals[SL]((rsrp1 reverse_::: rsrp2), sl) val rsrli1 :: rsrli2 :: HNil = sl2.reverse_splitRightP[String] typed[String :: Unit :: String :: Double :: Int :: HNil](rsrli1) typed[Boolean :: Long :: HNil](rsrli2) assertTypedEquals[SL2]((rsrli1 reverse_::: rsrli2), sl2) } @Test def testTranspose { val l1 = 1 :: HNil val l2 = ("a" :: HNil) :: HNil val r1 = l1.zipOne(l2) assertTypedEquals[(Int :: String :: HNil) :: HNil]((1 :: "a" :: HNil) :: HNil, r1) val r2 = l1.mapConst(HNil) assertTypedEquals[HNil :: HNil](HNil :: HNil, r2) val r3 = (l1 :: HNil).transpose assertTypedEquals[(Int :: HNil) :: HNil]((1 :: HNil) :: HNil, r3) val l3 = 1 :: 2 :: 3 :: HNil val l4 = ("a" :: 1.0 :: HNil) :: ("b" :: 2.0 :: HNil) :: ("c" :: 3.0 :: HNil) :: HNil type ISD = Int :: String :: Double :: HNil val z2 = l3.zipOne(l4) assertTypedEquals[ISD :: ISD :: ISD :: HNil]( (1 :: "a" :: 1.0 :: HNil) :: (2 :: "b" :: 2.0 :: HNil) :: (3 :: "c" :: 3.0 :: HNil) :: HNil, z2 ) val r5 = l3.mapConst(HNil) assertTypedEquals[HNil :: HNil :: HNil :: HNil](HNil :: HNil :: HNil :: HNil, r5) val t2 = l4.transpose assertTypedEquals[ (String :: String :: String :: HNil) :: (Double :: Double :: Double :: HNil) :: HNil ](("a" :: "b" :: "c" :: HNil) :: (1.0 :: 2.0 :: 3.0 :: HNil) :: HNil, t2) val t3 = z2.transpose assertTypedEquals[ (Int :: Int :: Int :: HNil) :: (String :: String :: String :: HNil) :: (Double :: Double :: Double :: HNil) :: HNil ]( (1 :: 2 :: 3 :: HNil) :: ("a" :: "b" :: "c" :: HNil) :: (1.0 :: 2.0 :: 3.0 :: HNil) :: HNil, t3 ) val r8 = t3.transpose assertTypedEquals[ISD :: ISD :: ISD :: HNil](z2, r8) } @Test def testZipUnzip { val l1 = 1 :: "a" :: 1.0 :: HNil val l2 = 2 :: "b" :: 2.0 :: HNil val t1 = (l1 :: l2 :: HNil).transpose val z1 = t1.map(tupled) assertTypedEquals[(Int, Int) :: (String, String) :: (Double, Double) :: HNil]( (1, 2) :: ("a", "b") :: (1.0, 2.0) :: HNil, z1) def zip[L <: HList, OutT <: HList](l : L) (implicit transposer : Transposer.Aux[L, OutT], mapper : Mapper[tupled.type, OutT]) = l.transpose.map(tupled) val z2 = zip(l1 :: l2 :: HNil) assertTypedEquals[(Int, Int) :: (String, String) :: (Double, Double) :: HNil]( (1, 2) :: ("a", "b") :: (1.0, 2.0) :: HNil, z2) val z3 = (l1 :: l2 :: HNil).zip assertTypedEquals[(Int, Int) :: (String, String) :: (Double, Double) :: HNil]( (1, 2) :: ("a", "b") :: (1.0, 2.0) :: HNil, z3) val t2 = z1.map(productElements).transpose val u1 = t2.tupled assertTypedEquals[(Int :: String :: Double :: HNil, Int :: String :: Double :: HNil)]( (1 :: "a" :: 1.0 :: HNil, 2 :: "b" :: 2.0 :: HNil), u1) def unzip[L <: HList, OutM <: HList, OutT <: HList](l : L) (implicit mapper : Mapper.Aux[productElements.type, L, OutM], transposer : Transposer.Aux[OutM, OutT], tupler : Tupler[OutT]) = l.map(productElements).transpose.tupled val u2 = unzip(z1) assertTypedEquals[(Int :: String :: Double :: HNil, Int :: String :: Double :: HNil)]( (1 :: "a" :: 1.0 :: HNil, 2 :: "b" :: 2.0 :: HNil), u2) val r1 = z1.unzip assertTypedEquals[(Int :: String :: Double :: HNil, Int :: String :: Double :: HNil)]( (1 :: "a" :: 1.0 :: HNil, 2 :: "b" :: 2.0 :: HNil), r1) val r2 = l1 zip l2 assertTypedEquals[(Int, Int) :: (String, String) :: (Double, Double) :: HNil]( (1, 2) :: ("a", "b") :: (1.0, 2.0) :: HNil, r2) val intInc : Int => Int = _+1 val stringInc : String => String = _+"*" val doubleInc : Double => Int = _.toInt+1 val l3 = intInc :: stringInc :: doubleInc :: HNil val z5 = l3 zipApply l1 assertTypedEquals[Int :: String :: Int :: HNil](2 :: "a*" :: 2 :: HNil, z5) } @Test def testUnapply { val l = 1 :: true :: "foo" :: 2.0 :: HNil val l2 = 23 :: 3.0 :: "foo" :: () :: "bar" :: true :: 5L :: HNil val is = l match { case i :: true :: s :: 2.0 :: HNil => (i, s) } assertTypedEquals[Int](1, is._1) assertTypedEquals[String]("foo", is._2) val is2 = (l : Any) match { case (i : Int) :: true :: (s : String) :: 2.0 :: HNil => (i, s) case _ => sys.error("Not matched") } assertTypedEquals[Int](1, is2._1) assertTypedEquals[String]("foo", is2._2) import HList.ListCompat._ val tl = l2 match { case 23 #: 3.0 #: s #: xs => (s, xs) } assertTypedEquals[String]("foo", tl._1) assertTypedEquals[Unit :: String :: Boolean :: Long :: HNil](() :: "bar" :: true :: 5L :: HNil, tl._2) val tl2 = (l2 : Any) match { case 23 #: 3.0 #: (s : String) #: xs => (s, xs) case _ => sys.error("Not matched") } assertTypedEquals[String]("foo", tl2._1) assertTypedEquals[HList](() :: "bar" :: true :: 5L :: HNil, tl2._2) val ll = List(1, 2, 3, 4) val tll = ll match { case 1 :: 2 :: x :: y :: Nil => (x, y) case _ => sys.error("Not matched") } assertTypedEquals[Int](3, tll._1) assertTypedEquals[Int](4, tll._2) val tll2 = ll match { case 1 :: xs => xs case _ => sys.error("Not matched") } assertTypedEquals[List[Int]](List(2, 3, 4), tll2) val mixed = 23 :: "foo" :: (1 :: 2 :: 3 :: 4 :: 5 :: Nil) :: false :: () :: HNil val tmixed = mixed match { case _ #: _ #: (_ :: 2 :: x :: tl1) #: tl2 => (x, tl1, tl2) case _ => sys.error("Not matched") } assertTypedEquals[Int](3, tmixed._1) assertTypedEquals[List[Int]](4 :: 5 :: Nil, tmixed._2) assertTypedEquals[Boolean :: Unit :: HNil](false :: () :: HNil, tmixed._3) } @Test def testRemove { val l = 1 :: true :: "foo" :: HNil val li = l.removeElem[Int] assertTypedEquals[(Int, Boolean :: String :: HNil)]((1, true :: "foo" :: HNil), li) val lb = l.removeElem[Boolean] assertTypedEquals[(Boolean, Int :: String :: HNil)]((true, 1 :: "foo" :: HNil), lb) val ls = l.removeElem[String] assertTypedEquals[(String, Int :: Boolean :: HNil)](("foo", 1 :: true :: HNil), ls) } @Test def testRemoveAll { val l = 1 :: true :: "foo" :: HNil val lnil = l.removeAll[HNil] assertTypedEquals[(HNil, Int :: Boolean :: String :: HNil)]((HNil, 1 :: true :: "foo" :: HNil), lnil) val li = l.removeAll[Int :: HNil] assertTypedEquals[(Int :: HNil, Boolean :: String :: HNil)]((1 :: HNil, true :: "foo" :: HNil), li) val lb = l.removeAll[Boolean :: HNil] assertTypedEquals[(Boolean :: HNil, Int :: String :: HNil)]((true :: HNil, 1 :: "foo" :: HNil), lb) val lbi = l.removeAll[Boolean :: Int :: HNil] assertTypedEquals[(Boolean :: Int :: HNil, String :: HNil)]((true :: 1 :: HNil, "foo" :: HNil), lbi) } object combine extends Poly { implicit def caseCharString = use((c : Char, s : String) => s.indexOf(c)) implicit def caseIntBoolean = use((i : Int, b : Boolean) => if ((i >= 0) == b) "pass" else "fail") } @Test def testFoldLeft { val c1a = combine('o', "foo") val c1b = combine(c1a, true) assertTypedEquals[String]("pass", c1b) implicitly[LeftFolder.Aux[HNil, String, combine.type, String]] implicitly[LeftFolder.Aux[Boolean :: HNil, Int, combine.type, String]] implicitly[LeftFolder.Aux[String :: Boolean :: HNil, Char, combine.type, String]] val tf1 = implicitly[LeftFolder[HNil, String, combine.type]] val tf2 = implicitly[LeftFolder[Boolean :: HNil, Int, combine.type]] val tf3 = implicitly[LeftFolder[String :: Boolean :: HNil, Char, combine.type]] val l1 = "foo" :: true :: HNil val f1 = l1.foldLeft('o')(combine) assertTypedEquals[String]("pass", f1) val c2a = combine('o', "bar") val c2b = combine(c2a, false) assertTypedEquals[String]("pass", c2b) val l2 = "bar" :: false :: HNil val f2 = l2.foldLeft('o')(combine) assertTypedEquals[String]("pass", f2) } @Test def testUpdatedAt { type IBS = Int :: Boolean :: String :: HNil val l = 1 :: true :: "foo" :: HNil val r1 = l.updatedAt[_0](2) assertTypedEquals[IBS](2 :: true :: "foo" :: HNil, r1) val r2 = l.updatedAt[_1](false) assertTypedEquals[IBS](1 :: false :: "foo" :: HNil, r2) val r3 = l.updatedAt[_2]("bar") assertTypedEquals[IBS](1 :: true :: "bar" :: HNil, r3) } @Test def testUpdatedAtLiteral { type IBS = Int :: Boolean :: String :: HNil val l = 1 :: true :: "foo" :: HNil val r1 = l.updatedAt(0, 2) assertTypedEquals[IBS](2 :: true :: "foo" :: HNil, r1) val r2 = l.updatedAt(1, false) assertTypedEquals[IBS](1 :: false :: "foo" :: HNil, r2) val r3 = l.updatedAt(2, "bar") assertTypedEquals[IBS](1 :: true :: "bar" :: HNil, r3) } @Test def testNatTRel { type L1 = Int :: String :: Boolean :: HNil type L2 = List[Int] :: List[String] :: List[Boolean] :: HNil type L3 = Option[Int] :: Option[String] :: Option[Boolean] :: HNil type L4 = Int :: Int :: Int :: HNil type L5 = String :: String :: String :: HNil implicitly[NatTRel[L1, Id, L2, List]] implicitly[NatTRel[L2, List, L1, Id]] implicitly[NatTRel[L2, List, L3, Option]] implicitly[NatTRel[L1, Id, L4, Const[Int]#λ]] implicitly[NatTRel[L2, List, L4, Const[Int]#λ]] } object optionToList extends (Option ~> List) { def apply[A](fa: Option[A]): List[A] = List.fill(3)(fa.toList).flatten } @Test def testNatTRelMap { type L1 = Option[Int] :: Option[Boolean] :: Option[String] :: Option[Nothing] :: HNil type L2 = List[Int] :: List[Boolean] :: List[String] :: List[Nothing] :: HNil val nattrel = implicitly[NatTRel[L1, Option, L2, List]] val l1: L1 = Option(1) :: Option(true) :: Option("three") :: None :: HNil val l2 = nattrel.map(optionToList, l1) assertTypedEquals[L2](l2, List(1, 1, 1) :: List(true, true, true) :: List("three", "three", "three") :: List() :: HNil) } @Test def testZipConst { type IBS = Int :: Boolean :: String :: HNil val c = 5 type WithConst = (Int, Int) :: (Boolean, Int) :: (String, Int) :: HNil val l = 1 :: true :: "a" :: HNil typed[IBS](l) val expected = (1, c) :: (true, c) :: ("a", c) :: HNil val zcIntIbs = ZipConst[Int, IBS] val zipped1 = zcIntIbs(c, l) assertTypedEquals[WithConst](expected, zipped1) val zcaIntIbs = implicitly[ZipConst.Aux[Int, IBS, WithConst]] assertTypedEquals[WithConst](expected, zcaIntIbs(c, l)) val x = l.zipConst(c) assertTypedEquals[WithConst](expected, x) } @Test def testZipWith { import poly._ object empty extends Poly2 object add extends Poly2 { implicit val caseIntInt = at[Int, Int](_ + _) } // HNil zipWith HNil (emptyFn) val r1 = (HNil: HNil).zipWith(HNil: HNil)(empty) assertTypedEquals[HNil](HNil, r1) // HNil zipWith nonEmpty (emptyFn) val r2 = (HNil: HNil).zipWith(1 :: HNil)(empty) assertTypedEquals[HNil](HNil, r2) // nonEmpty zipWith HNil (emptyFn) val r3 = (1 :: HNil).zipWith(HNil: HNil)(empty) assertTypedEquals[HNil](HNil, r3) // singleton zipWith singleton val r4 = (1 :: HNil).zipWith(2 :: HNil)(add) assertTypedEquals[Int :: HNil](3 :: HNil, r4) { // longList zipWith longerList type Left = Int :: String :: Double :: HNil type Right = Int :: Double :: String :: Boolean :: HNil val left: Left = 1 :: "foo" :: 1.2 :: HNil val right: Right = 2 :: 2.3 :: "3.4" :: true :: HNil object zipFn extends Poly2 { implicit val caseIntInt = at[Int, Int](_ + _) implicit val caseStringDouble = at[String, Double](_ + " -> " + _.toString) implicit val caseDoubleString = at[Double, String](_ + _.toDouble) } val r5 = left.zipWith(right)(zipFn) assertTypedEquals[Int :: String :: Double :: HNil](3 :: "foo -> 2.3" :: 4.6 :: HNil, r5) } { // invalid polys illTyped(""" (1 :: HNil).zipWith(2 :: HNil)(empty) """) object noIntFn extends Poly2 { implicit val caseDoubleDouble = at[Double, Double](_ + _) } illTyped(""" (1 :: HNil).zipWith(2 :: HNil)(noIntFn) """) illTyped(""" (1.0 :: 2 :: HNil).zipWith(2.0 :: 3 :: HNil)(noIntFn) """) } } @Test def testWithKeys { import record._ import syntax.singleton._ val orig = ("intField" ->> 1) :: ("boolField" ->> true) :: HNil val result = orig.values.zipWithKeys(orig.keys) sameTyped(orig)(result) assertEquals(orig, result) val int = result.get("intField") assertTypedEquals[Int](1, int) val bool = result.get("boolField") assertTypedEquals[Boolean](true, bool) illTyped("""result.get("otherField")""") // key/value lengths must match up illTyped("orig.tail.values.zipWithKeys(orig.keys)") illTyped("orig.values.zipWithKeys(orig.keys.tail)") // Explicit type argument { val result = orig.values.zipWithKeys[HList.`"intField", "boolField"`.T] sameTyped(orig)(result) assertEquals(orig, result) val int = result.get("intField") assertTypedEquals[Int](1, int) val bool = result.get("boolField") assertTypedEquals[Boolean](true, bool) illTyped("""result.get("otherField")""") // key/value lengths must match up illTyped(""" orig.tail.values.zipWithKeys[HList.`"intField", "boolField"`.T] """) illTyped(""" orig.values.zipWithKeys[HList.`"boolField"`.T] """) } } @Test def testCollect { import poly._ object empty extends Poly1 object complex extends Poly1 { implicit val caseInt = at[Int](_.toDouble) implicit val caseString = at[String](_ => 1) } val in: Int :: String :: Double :: HNil = 1 :: "foo" :: 2.2 :: HNil // HNil collect p val r1 = (HNil: HNil).collect(empty) assertTypedEquals[HNil](HNil, r1) val r2 = (HNil: HNil).collect(poly.identity) assertTypedEquals[HNil](HNil, r2) val r3 = (HNil: HNil).collect(complex) assertTypedEquals[HNil](HNil, r3) // non-HNil collect empty val r4 = in.collect(empty) assertTypedEquals[HNil](HNil, r4) // non-HNil collect identity val r5 = in.collect(identity) assertTypedEquals[Int :: String :: Double :: HNil](in, r5) // non-HNil collect complex val r6 = in.collect(complex) assertTypedEquals[Double :: Int :: HNil](1.0 :: 1 :: HNil, r6) } @Test def testOrdering { assertEquals(List(HNil: HNil, HNil), List(HNil: HNil, HNil).sorted) assertEquals(List(1 :: HNil, 2 :: HNil, 3 :: HNil), List(2 :: HNil, 1 :: HNil, 3 :: HNil).sorted) assertEquals( List(1 :: "abc" :: HNil, 1 :: "def" :: HNil, 2 :: "abc" :: HNil, 2 :: "def" :: HNil), List(2 :: "abc" :: HNil, 1 :: "def" :: HNil, 2 :: "def" :: HNil, 1 :: "abc" :: HNil).sorted ) } @Test def testMapCons { type C = Char; type S = String; type I = Int; type D = Double val r1 = (HNil: HNil).mapCons('a') assertTypedEquals[HNil](HNil, r1) val r2 = (HNil :: HNil).mapCons('a') assertTypedEquals[(Char :: HNil) :: HNil]((('a' :: HNil) :: HNil), r2) val r3 = ((1 :: HNil) :: ("foo" :: HNil) :: (2.0 :: HNil) :: HNil).mapCons('a') assertTypedEquals[(C::I::HNil) :: (C::S::HNil) :: (C::D::HNil) :: HNil]( ('a' :: 1 :: HNil) :: ('a' :: "foo" :: HNil) :: ('a' :: 2.0 :: HNil) :: HNil, r3 ) } @Test def testInterleave { type C = Char; type S = String; type I = Int; type D = Double def interleave[I, L <: HList](i: I, l: L)(implicit interleave: Interleave[I, L]): interleave.Out = interleave(i, l) val r1 = interleave('i', HNil) assertTypedEquals[(Char :: HNil) :: HNil](('i' :: HNil) :: HNil, r1) val r2 = interleave('i', 1 :: HNil) assertTypedEquals[(C::I::HNil) :: (I::C::HNil) :: HNil](('i' :: 1 :: HNil) :: (1 :: 'i' :: HNil) :: HNil, r2 ) val r3 = interleave('i', 1 :: "foo" :: HNil) assertTypedEquals[(C::I::S::HNil) :: (I::C::S::HNil) :: (I::S::C::HNil) :: HNil]( ('i' :: 1 :: "foo" :: HNil) :: (1 :: 'i' :: "foo" :: HNil) :: (1 :: "foo" :: 'i' :: HNil) :: HNil, r3 ) val r4 = interleave('i', 1 :: "foo" :: 2.0 :: HNil) assertTypedEquals[(C::I::S::D::HNil) :: (I::C::S::D::HNil) :: (I::S::C::D::HNil) :: (I::S::D::C::HNil) :: HNil]( ('i' :: 1 :: "foo" :: 2.0 :: HNil) :: (1 :: 'i' :: "foo" :: 2.0 :: HNil) :: (1 :: "foo" :: 'i' :: 2.0 :: HNil) :: (1 :: "foo" :: 2.0 :: 'i' :: HNil) :: HNil, r4 ) } @Test def testFlatMapInterleave { type C = Char; type I = Int def flatMapInterleave[I, L <: HList](i: I, l: L)(implicit flatMapInterleave: FlatMapInterleave[I, L]) = flatMapInterleave(i, l) val r1 = flatMapInterleave('i', HNil) assertTypedEquals[HNil](HNil, r1) val r2 = flatMapInterleave('i', HNil :: HNil) assertTypedEquals[(Char :: HNil) :: HNil](('i' :: HNil) :: HNil, r2) val r3 = flatMapInterleave('i', (1 :: HNil) :: (2 :: HNil) :: HNil) assertTypedEquals[(C::I::HNil) :: (I::C::HNil) :: (C::I::HNil) :: (I::C::HNil) :: HNil]( ('i' :: 1 :: HNil) :: (1 :: 'i' :: HNil) :: ('i' :: 2 :: HNil) :: (2 :: 'i' :: HNil) :: HNil, r3 ) } @Test def testPermutations { type S = String; type I = Int; type D = Double val r1 = HNil.permutations assertTypedEquals[HNil :: HNil](HNil :: HNil, r1) val r2 = (1 :: HNil).permutations assertTypedEquals[(Int :: HNil) :: HNil]((1 :: HNil) :: HNil, r2) val r3 = (1 :: "foo" :: HNil).permutations assertTypedEquals[(I::S::HNil) :: (S::I::HNil) :: HNil]( (1 :: "foo" :: HNil) :: ("foo" :: 1 :: HNil) :: HNil, r3 ) val r4 = (1 :: "foo" :: 2.0 :: HNil).permutations assertTypedEquals[ (I::S::D::HNil) :: (S::I::D::HNil) :: (S::D::I::HNil) :: (I::D::S::HNil) :: (D::I::S::HNil) :: (D::S::I::HNil) :: HNil ]( (1 :: "foo" :: 2.0 :: HNil) :: ("foo" :: 1 :: 2.0 :: HNil) :: ("foo" :: 2.0 :: 1 :: HNil) :: (1 :: 2.0 :: "foo" :: HNil) :: (2.0 :: 1 :: "foo" :: HNil) :: (2.0 :: "foo" :: 1 :: HNil) :: HNil, r4 ) } @Test def testMkString { assertEquals("⸨1, foo, 2.0⸩", (1 :: "foo" :: 2.0 :: HNil).mkString("⸨", ", ", "⸩")) } @Test def testRotateLeft { val in0 = HNil val in1 = 1 :: HNil val in2 = 1 :: "foo" :: HNil val in3 = 1 :: "foo" :: 2.0 :: HNil val in4 = 1 :: "foo" :: 2.0 :: 'a' :: HNil type S = String; type I = Int; type D = Double; type C = Char { // rotateLeft(0) val r1 = in0.rotateLeft(0) assertTypedSame[HNil](HNil, r1) val r2 = in1.rotateLeft(0) assertTypedSame[I :: HNil](in1, r2) val r3 = in2.rotateLeft(0) assertTypedSame[I :: S :: HNil](in2, r3) val r4 = in3.rotateLeft(0) assertTypedSame[I :: S :: D :: HNil](in3, r4) val r5 = in4.rotateLeft(0) assertTypedSame[I :: S :: D :: C :: HNil](in4, r5) } { // rotateLeft[_0] val r1 = in0.rotateLeft[_0] assertTypedSame[HNil](HNil, r1) val r2 = in1.rotateLeft[_0] assertTypedSame[I :: HNil](in1, r2) val r3 = in2.rotateLeft[_0] assertTypedSame[I :: S :: HNil](in2, r3) val r4 = in3.rotateLeft[_0] assertTypedSame[I :: S :: D :: HNil](in3, r4) val r5 = in4.rotateLeft[_0] assertTypedSame[I :: S :: D :: C :: HNil](in4, r5) } { // rotateLeft(n % size == 0) val r1 = in1.rotateLeft(1) assertTypedSame[I :: HNil](in1, r1) val r2 = in1.rotateLeft(2) assertTypedSame[I :: HNil](in1, r2) val r3 = in2.rotateLeft(2) assertTypedSame[I :: S :: HNil](in2, r3) val r4 = in2.rotateLeft(4) assertTypedSame[I :: S :: HNil](in2, r4) val r5 = in3.rotateLeft(3) assertTypedSame[I :: S :: D :: HNil](in3, r5) val r6 = in3.rotateLeft(6) assertTypedSame[I :: S :: D :: HNil](in3, r6) val r7 = in4.rotateLeft(4) assertTypedSame[I :: S :: D :: C :: HNil](in4, r7) val r8 = in4.rotateLeft(8) assertTypedSame[I :: S :: D :: C :: HNil](in4, r8) } { // rotateLeft[N % Size == 0] val r1 = in1.rotateLeft[_1] assertTypedSame[I :: HNil](in1, r1) val r2 = in1.rotateLeft[_2] assertTypedSame[I :: HNil](in1, r2) val r3 = in2.rotateLeft[_2] assertTypedSame[I :: S :: HNil](in2, r3) val r4 = in2.rotateLeft[_4] assertTypedSame[I :: S :: HNil](in2, r4) val r5 = in3.rotateLeft[_3] assertTypedSame[I :: S :: D :: HNil](in3, r5) val r6 = in3.rotateLeft[_6] assertTypedSame[I :: S :: D :: HNil](in3, r6) val r7 = in4.rotateLeft[_4] assertTypedSame[I :: S :: D :: C :: HNil](in4, r7) val r8 = in4.rotateLeft[_8] assertTypedSame[I :: S :: D :: C :: HNil](in4, r8) } { // other(n) val r1 = in2.rotateLeft(1) assertTypedEquals[S :: I :: HNil]("foo" :: 1 :: HNil, r1) val r2 = in3.rotateLeft(1) assertTypedEquals[S :: D :: I :: HNil]("foo" :: 2.0 :: 1 :: HNil, r2) val r3 = in4.rotateLeft(1) assertTypedEquals[S :: D :: C :: I :: HNil]("foo" :: 2.0 :: 'a' :: 1 :: HNil, r3) val r4 = in4.rotateLeft(2) assertTypedEquals[D :: C :: I :: S :: HNil](2.0 :: 'a' :: 1 :: "foo" :: HNil, r4) val r5 = in4.rotateLeft(3) assertTypedEquals[C :: I :: S :: D :: HNil]('a' :: 1 :: "foo" :: 2.0 :: HNil, r5) val r6 = in4.rotateLeft(5) assertTypedEquals[S :: D :: C :: I :: HNil]("foo" :: 2.0 :: 'a' :: 1 :: HNil, r6) val r7 = in4.rotateLeft(6) assertTypedEquals[D :: C :: I :: S :: HNil](2.0 :: 'a' :: 1 :: "foo" :: HNil, r7) } { // other[N] val r1 = in2.rotateLeft[_1] assertTypedEquals[S :: I :: HNil]("foo" :: 1 :: HNil, r1) val r2 = in3.rotateLeft[_1] assertTypedEquals[S :: D :: I :: HNil]("foo" :: 2.0 :: 1 :: HNil, r2) val r3 = in4.rotateLeft[_1] assertTypedEquals[S :: D :: C :: I :: HNil]("foo" :: 2.0 :: 'a' :: 1 :: HNil, r3) val r4 = in4.rotateLeft[_2] assertTypedEquals[D :: C :: I :: S :: HNil](2.0 :: 'a' :: 1 :: "foo" :: HNil, r4) val r5 = in4.rotateLeft[_3] assertTypedEquals[C :: I :: S :: D :: HNil]('a' :: 1 :: "foo" :: 2.0 :: HNil, r5) val r6 = in4.rotateLeft[_5] assertTypedEquals[S :: D :: C :: I :: HNil]("foo" :: 2.0 :: 'a' :: 1 :: HNil, r6) val r7 = in4.rotateLeft[_6] assertTypedEquals[D :: C :: I :: S :: HNil](2.0 :: 'a' :: 1 :: "foo" :: HNil, r7) } } @Test def testRotateRight { val in0 = HNil val in1 = 1 :: HNil val in2 = 1 :: "foo" :: HNil val in3 = 1 :: "foo" :: 2.0 :: HNil val in4 = 1 :: "foo" :: 2.0 :: 'a' :: HNil type S = String; type I = Int; type D = Double; type C = Char { // rotateRight(0) val r1 = in0.rotateRight(0) assertTypedSame[HNil](HNil, r1) val r2 = in1.rotateRight(0) assertTypedSame[I :: HNil](in1, r2) val r3 = in2.rotateRight(0) assertTypedSame[I :: S :: HNil](in2, r3) val r4 = in3.rotateRight(0) assertTypedSame[I :: S :: D :: HNil](in3, r4) val r5 = in4.rotateRight(0) assertTypedSame[I :: S :: D :: C :: HNil](in4, r5) } { // rotateRight[_0] val r1 = in0.rotateRight[_0] assertTypedSame[HNil](HNil, r1) val r2 = in1.rotateRight[_0] assertTypedSame[I :: HNil](in1, r2) val r3 = in2.rotateRight[_0] assertTypedSame[I :: S :: HNil](in2, r3) val r4 = in3.rotateRight[_0] assertTypedSame[I :: S :: D :: HNil](in3, r4) val r5 = in4.rotateRight[_0] assertTypedSame[I :: S :: D :: C :: HNil](in4, r5) } { // rotateRight(n % size == 0) val r1 = in1.rotateRight(1) assertTypedSame[I :: HNil](in1, r1) val r2 = in1.rotateRight(2) assertTypedSame[I :: HNil](in1, r2) val r3 = in2.rotateRight(2) assertTypedSame[I :: S :: HNil](in2, r3) val r4 = in2.rotateRight(4) assertTypedSame[I :: S :: HNil](in2, r4) val r5 = in3.rotateRight(3) assertTypedSame[I :: S :: D :: HNil](in3, r5) val r6 = in3.rotateRight(6) assertTypedSame[I :: S :: D :: HNil](in3, r6) val r7 = in4.rotateRight(4) assertTypedSame[I :: S :: D :: C :: HNil](in4, r7) val r8 = in4.rotateRight(8) assertTypedSame[I :: S :: D :: C :: HNil](in4, r8) } { // rotateRight[N % Size == 0] val r1 = in1.rotateRight[_1] assertTypedSame[I :: HNil](in1, r1) val r2 = in1.rotateRight[_2] assertTypedSame[I :: HNil](in1, r2) val r3 = in2.rotateRight[_2] assertTypedSame[I :: S :: HNil](in2, r3) val r4 = in2.rotateRight[_4] assertTypedSame[I :: S :: HNil](in2, r4) val r5 = in3.rotateRight[_3] assertTypedSame[I :: S :: D :: HNil](in3, r5) val r6 = in3.rotateRight[_6] assertTypedSame[I :: S :: D :: HNil](in3, r6) val r7 = in4.rotateRight[_4] assertTypedSame[I :: S :: D :: C :: HNil](in4, r7) val r8 = in4.rotateRight[_8] assertTypedSame[I :: S :: D :: C :: HNil](in4, r8) } { // others(n) val r1 = in2.rotateRight(1) assertTypedEquals[S :: I :: HNil]("foo" :: 1 :: HNil, r1) val r2 = in3.rotateRight(1) assertTypedEquals[D :: I :: S :: HNil](2.0 :: 1 :: "foo" :: HNil, r2) val r3 = in4.rotateRight(1) assertTypedEquals[C :: I :: S :: D :: HNil]('a' :: 1 :: "foo" :: 2.0 :: HNil, r3) val r4 = in4.rotateRight(2) assertTypedEquals[D :: C :: I :: S :: HNil](2.0 :: 'a' :: 1 :: "foo" :: HNil, r4) val r5 = in4.rotateRight(3) assertTypedEquals[S :: D :: C :: I :: HNil]("foo" :: 2.0 :: 'a' :: 1 :: HNil, r5) val r6 = in4.rotateRight(5) assertTypedEquals[C :: I :: S :: D :: HNil]('a' :: 1 :: "foo" :: 2.0 :: HNil, r6) val r7 = in4.rotateRight(6) assertTypedEquals[D :: C :: I :: S :: HNil](2.0 :: 'a' :: 1 :: "foo" :: HNil, r7) } { // others[N] val r1 = in2.rotateRight[_1] assertTypedEquals[S :: I :: HNil]("foo" :: 1 :: HNil, r1) val r2 = in3.rotateRight[_1] assertTypedEquals[D :: I :: S :: HNil](2.0 :: 1 :: "foo" :: HNil, r2) val r3 = in4.rotateRight[_1] assertTypedEquals[C :: I :: S :: D :: HNil]('a' :: 1 :: "foo" :: 2.0 :: HNil, r3) val r4 = in4.rotateRight[_2] assertTypedEquals[D :: C :: I :: S :: HNil](2.0 :: 'a' :: 1 :: "foo" :: HNil, r4) val r5 = in4.rotateRight[_3] assertTypedEquals[S :: D :: C :: I :: HNil]("foo" :: 2.0 :: 'a' :: 1 :: HNil, r5) val r6 = in4.rotateRight[_5] assertTypedEquals[C :: I :: S :: D :: HNil]('a' :: 1 :: "foo" :: 2.0 :: HNil, r6) val r7 = in4.rotateRight[_6] assertTypedEquals[D :: C :: I :: S :: HNil](2.0 :: 'a' :: 1 :: "foo" :: HNil, r7) } } object smear extends Poly { implicit val caseIntInt = use((x: Int, y: Int) => x + y) implicit val caseStringInt = use((x: String, y: Int) => x.toInt + y) implicit val caseIntString = use((x: Int, y: String) => x + y.toInt) } @Test def testScanLeft { val in = 1 :: "2" :: HNil val out = in.scanLeft(1)(smear) typed[Int :: Int :: Int :: HNil](out) assertEquals(1 :: 2 :: 4 :: HNil, out) } @Test def testScanRight{ val in = 1 :: "2" :: HNil val out = in.scanRight(1)(smear) typed[Int :: Int :: Int :: HNil](out) assertEquals(4 :: 3 :: 1 :: HNil, out) } @Test def testFill { { val empty = HList.fill(0)(true) typed[_0](empty.length) } { val empty = HList.fill[Boolean](0)(true) typed[_0](empty.length) } { val single = HList.fill(1)(None) typed[_1](single.length) typed[None.type](single.head) assertEquals(None, single.head) } { val single = HList.fill[None.type](1)(None) typed[_1](single.length) typed[None.type](single.head) assertEquals(None, single.head) } { val three = HList.fill(3)(m2i) typed[_3](three.length) typed[M2[Int, Unit]](three(_0)) typed[M2[Int, Unit]](three(_1)) typed[M2[Int, Unit]](three(_2)) assertEquals(m2i, three(_0)) assertEquals(m2i, three(_1)) assertEquals(m2i, three(_2)) } { val three = HList.fill[M2[Int, Unit]](3)(m2i) typed[_3](three.length) typed[M2[Int, Unit]](three(_0)) typed[M2[Int, Unit]](three(_1)) typed[M2[Int, Unit]](three(_2)) assertEquals(m2i, three(_0)) assertEquals(m2i, three(_1)) assertEquals(m2i, three(_2)) } { val empty = HList.fill(0, 0)(true) typed[_0](empty.length) } { val empty = HList.fill[Boolean](0, 0)(true) typed[_0](empty.length) } { val empty = HList.fill(2, 0)(true) typed[_2](empty.length) typed[_0](empty(_0).length) typed[_0](empty(_1).length) } { val empty = HList.fill[Boolean](2, 0)(true) typed[_2](empty.length) typed[_0](empty(_0).length) typed[_0](empty(_1).length) } { val empty = HList.fill(0, 2)(true) typed[_0](empty.length) } { val empty = HList.fill[Boolean](0, 2)(true) typed[_0](empty.length) } { val oneByTwo = HList.fill(1, 2)(None) typed[_1](oneByTwo.length) typed[_2](oneByTwo.head.length) typed[None.type](oneByTwo.head(_0)) typed[None.type](oneByTwo.head(_1)) assertEquals(None, oneByTwo.head(_0)) assertEquals(None, oneByTwo.head(_1)) } { val oneByTwo = HList.fill[None.type](1, 2)(None) typed[_1](oneByTwo.length) typed[_2](oneByTwo.head.length) typed[None.type](oneByTwo.head(_0)) typed[None.type](oneByTwo.head(_1)) assertEquals(None, oneByTwo.head(_0)) assertEquals(None, oneByTwo.head(_1)) } { val twoByThree = HList.fill(2, 3)(None) typed[_2](twoByThree.length) typed[_3](twoByThree(_0).length) typed[_3](twoByThree(_1).length) typed[None.type](twoByThree.at[_0].at[_0]) typed[None.type](twoByThree.at[_0].at[_1]) typed[None.type](twoByThree.at[_0].at[_2]) typed[None.type](twoByThree.at[_1].at[_0]) typed[None.type](twoByThree.at[_1].at[_1]) typed[None.type](twoByThree.at[_1].at[_2]) assertEquals(None, twoByThree.at[_0].at[_0]) assertEquals(None, twoByThree.at[_0].at[_1]) assertEquals(None, twoByThree.at[_0].at[_2]) assertEquals(None, twoByThree.at[_1].at[_0]) assertEquals(None, twoByThree.at[_1].at[_1]) assertEquals(None, twoByThree.at[_1].at[_2]) } { val twoByThree = HList.fill[None.type](2, 3)(None) typed[_2](twoByThree.length) typed[_3](twoByThree(_0).length) typed[_3](twoByThree(_1).length) typed[None.type](twoByThree.at[_0].at[_0]) typed[None.type](twoByThree.at[_0].at[_1]) typed[None.type](twoByThree.at[_0].at[_2]) typed[None.type](twoByThree.at[_1].at[_0]) typed[None.type](twoByThree.at[_1].at[_1]) typed[None.type](twoByThree.at[_1].at[_2]) assertEquals(None, twoByThree.at[_0].at[_0]) assertEquals(None, twoByThree.at[_0].at[_1]) assertEquals(None, twoByThree.at[_0].at[_2]) assertEquals(None, twoByThree.at[_1].at[_0]) assertEquals(None, twoByThree.at[_1].at[_1]) assertEquals(None, twoByThree.at[_1].at[_2]) } } @Test def testPatch { val basehl = 1 :: 2 :: "three" :: HNil { //patch an empty hlist val out = HNil.patch(0, basehl, 0) val out2 = HNil.patch[_0,_0](basehl) typed[Int :: Int :: String :: HNil](out) assertEquals(out, basehl) assertTypedEquals[Int :: Int :: String :: HNil](out, out2) } { //single patch w/ nothing removed val out = basehl.patch(1, 4 :: HNil, 0) val out2 = basehl.patch[_1,_0](4 :: HNil) typed[Int :: Int :: Int :: String :: HNil](out) assertEquals(1 :: 4 :: 2 :: "three" :: HNil, out) assertTypedEquals[Int :: Int :: Int :: String :: HNil](out, out2) } { //single patch w/ 2 elements removed val out = basehl.patch(1, 3 :: HNil, 2) val out2 = basehl.patch[_1,_2](3 :: HNil) typed[Int :: Int :: HNil](out) assertEquals(1 :: 3 :: HNil, out) assertTypedEquals[Int :: Int :: HNil](out, out2) } { //essentially append val p = 4 :: 5 :: "six" :: HNil val out = basehl.patch(3, p, 0) val out2 = basehl.patch[_3,_0](p) typed[Int :: Int :: String :: Int :: Int :: String :: HNil](out) assertEquals(1 :: 2 :: "three" :: 4 :: 5 :: "six" :: HNil, out) assertTypedEquals[Int :: Int :: String :: Int :: Int :: String :: HNil](out, out2) } { //several patched w/ everything from original removed val sub = 4 :: "five" :: "six" :: HNil val out = basehl.patch(0, sub, 3) val out2 = basehl.patch[_0,_3](sub) typed[Int :: String :: String :: HNil](out) assertEquals(sub, out) assertTypedEquals[Int :: String :: String :: HNil](out, out2) } } @Test def testToCoproduct { type PISB = Int :: String :: Boolean :: HNil type CISBa = Int :+: String :+: Boolean :+: CNil type CISBb = the.`ToCoproduct[PISB]`.Out implicitly[CISBa =:= CISBb] } @Test def testHListTypeSelector { import syntax.singleton._ typed[HList.` `.T](HNil) typed[HList.`Int`.T](23 :: HNil) typed[HList.`Int, String`.T](23 :: "foo" :: HNil) typed[HList.`Int, String, Boolean`.T](23 :: "foo" :: true :: HNil) // Literal types typed[HList.`2`.T](2.narrow :: HNil) typed[HList.`2, "a", true`.T](2.narrow :: "a".narrow :: true.narrow :: HNil) illTyped(""" typed[HList.`2`.T](3.narrow :: HNil) """) // Mix of standard and literal types typed[HList.`2, String, true`.T](2.narrow :: "a" :: true.narrow :: HNil) } object Foo extends ProductArgs { def applyProduct[L <: HList](args: L): L = args } @Test def testProductArgs { val l = Foo(23, "foo", true) typed[Int :: String :: Boolean :: HNil](l) val v1 = l.head typed[Int](v1) assertEquals(23, v1) val v2 = l.tail.head typed[String](v2) assertEquals("foo", v2) val v3 = l.tail.tail.head typed[Boolean](v3) assertEquals(true, v3) val v4 = l.tail.tail.tail typed[HNil](v4) illTyped(""" r.tail.tail.tail.head """) } object SFoo extends SingletonProductArgs { def applyProduct[L <: HList](args: L): L = args } case class Quux(i: Int, s: String, b: Boolean) object selectAll extends SingletonProductArgs { class Apply[K <: HList] { def from[T, R <: HList, S <: HList, Out](t: T) (implicit gen: LabelledGeneric.Aux[T, R], sel: SelectAll.Aux[R, K, S], tp: Tupler.Aux[S, Out] ): Out = tp(sel(gen.to(t))) } def applyProduct[K <: HList](keys: K) = new Apply[K] } trait NonSingletonHNilTC[T] object NonSingletonHNilTC { def apply[T](t: T)(implicit i: NonSingletonHNilTC[T]): NonSingletonHNilTC[T] = i implicit val nsHNilTC: NonSingletonHNilTC[HNil] = new NonSingletonHNilTC[HNil] {} } @Test def testSingletonProductArgs { object Obj val l = SFoo(23, "foo", 'bar, Obj, true) typed[Witness.`23`.T :: Witness.`"foo"`.T :: Witness.`'bar`.T :: Obj.type :: Witness.`true`.T :: HNil](l) // Annotations on the LHS here and subsequently, otherwise scalac will // widen the RHS to a non-singleton type. val v1: Witness.`23`.T = l.head assertEquals(23, v1) val v2: Witness.`"foo"`.T = l.tail.head assertEquals("foo", v2) val v3: Witness.`'bar`.T = l.tail.tail.head assertEquals('bar, v3) val v4: Obj.type = l.tail.tail.tail.head assertEquals(Obj, v4) val v5: Witness.`true`.T = l.tail.tail.tail.tail.head assertEquals(true, v5) val v6 = l.tail.tail.tail.tail.tail typed[HNil](v6) illTyped(""" r.tail.tail.tail.tail.tail.tail.head """) // Verify that we infer HNil rather than HNil.type at the end NonSingletonHNilTC(SFoo(23).tail) NonSingletonHNilTC(SFoo()) val quux = Quux(23, "foo", true) val ib = selectAll('i, 'b).from(quux) typed[(Int, Boolean)](ib) assertEquals((23, true), ib) } implicit class Interpolator(val sc: StringContext) { class Args extends ProductArgs { def applyProduct[L <: HList](l: L): L = l } val hlist: Args = new Args } @Test def testStringInterpolator { val (i, s, b) = (23, "foo", true) val l = hlist"Int: $i, String: $s, Boolean: $b" typed[Int :: String :: Boolean :: HNil](l) val v1 = l.head typed[Int](v1) assertEquals(23, v1) val v2 = l.tail.head typed[String](v2) assertEquals("foo", v2) val v3 = l.tail.tail.head typed[Boolean](v3) assertEquals(true, v3) val v4 = l.tail.tail.tail typed[HNil](v4) illTyped(""" r.tail.tail.tail.head """) } }
travisbrown/shapeless
core/src/test/scala/shapeless/hlist.scala
Scala
apache-2.0
94,647
/* Author: uberset Date: 2015-12-26 Licence: GPL v2 */ package uberset.l1_compiler import java.io.{StringWriter, BufferedReader, PrintWriter, StringReader} object TestParseTree { def main(args: Array[String]): Unit = { println(this.getClass.getSimpleName) val results = Seq( test("int1", Program(Seq(PrintInt(IntLit(1))))), test("int32767", Program(Seq(PrintInt(IntLit(32767))))), test("int32768", Program(Seq(PrintInt(IntLit(32768))))), test("int65535", Program(Seq(PrintInt(IntLit(65535))))), test("int65536", Program(Seq(PrintInt(IntLit(65536))))), test("hello", Program(Seq(PrintStr(StrLit("Hello World!"))))), test("char", Program(Seq( PrintChr(ChrLit('H')), PrintChr(ChrLit('\\'')), PrintChr(ChrLit('\\\\')), PrintChr(ChrLit(10.toChar)), PrintChr(ChrLit('\\r')), PrintChr(ChrLit('/')) ))), test("empty", Program(Seq())), test("boolean", Program(Seq( PrintBoo(BooLit(true)), PrintChr(ChrLit(' ')), PrintBoo(BooLit(false)) ))), test("negInt", Program(Seq( PrintStr(StrLit("-1=")), PrintInt(NegI(IntLit(1))), PrintLn(), PrintStr(StrLit("--2=")), PrintInt(NegI(NegI(IntLit(2)))), PrintLn(), PrintStr(StrLit("---3=")), PrintInt(NegI(NegI(NegI(IntLit(3))))), PrintLn() ))), test("mulOpsInt", Program(Seq( PrintStr(StrLit("2*3=")), PrintInt(MulI(IntLit(2), IntLit(3))), PrintLn(), PrintStr(StrLit("2*-3=")), PrintInt(MulI(IntLit(2), NegI(IntLit(3)))), PrintLn(), PrintStr(StrLit("-2*-3=")), PrintInt(MulI(NegI(IntLit(2)), NegI(IntLit(3)))), PrintLn(), PrintStr(StrLit("-2*3=")), PrintInt(MulI(NegI(IntLit(2)), IntLit(3))), PrintLn(), PrintLn(), PrintStr(StrLit("5/2=")), PrintInt(DivI(IntLit(5), IntLit(2))), PrintLn(), PrintStr(StrLit("5/-2=")), PrintInt(DivI(IntLit(5), NegI(IntLit(2)))), PrintLn(), PrintStr(StrLit("-5/-2=")), PrintInt(DivI(NegI(IntLit(5)), NegI(IntLit(2)))), PrintLn(), PrintStr(StrLit("-5/2=")), PrintInt(DivI(NegI(IntLit(5)), IntLit(2))), PrintLn(), PrintLn(), PrintStr(StrLit("5%2=")), PrintInt(ModI(IntLit(5), IntLit(2))), PrintLn(), PrintStr(StrLit("5%-2=")), PrintInt(ModI(IntLit(5), NegI(IntLit(2)))), PrintLn(), PrintStr(StrLit("-5%-2=")), PrintInt(ModI(NegI(IntLit(5)), NegI(IntLit(2)))), PrintLn(), PrintStr(StrLit("-5%2=")), PrintInt(ModI(NegI(IntLit(5)), IntLit(2))), PrintLn() ))), test("addOpsInt", Program(Seq( PrintStr(StrLit("2+3=")), PrintInt(AddI(IntLit(2), IntLit(3))), PrintLn(), PrintStr(StrLit("2+-3=")), PrintInt(AddI(IntLit(2), NegI(IntLit(3)))), PrintLn(), PrintStr(StrLit("2+3*4=")), PrintInt(AddI(IntLit(2), MulI(IntLit(3), IntLit(4)))), PrintLn(), PrintStr(StrLit("2*3+4=")), PrintInt(AddI(MulI(IntLit(2), IntLit(3)), IntLit(4))), PrintLn(), PrintLn(), PrintStr(StrLit("2-3=")), PrintInt(SubI(IntLit(2), IntLit(3))), PrintLn(), PrintStr(StrLit("2--3=")), PrintInt(SubI(IntLit(2), NegI(IntLit(3)))), PrintLn(), PrintStr(StrLit("2-3*4=")), PrintInt(SubI(IntLit(2), MulI(IntLit(3), IntLit(4)))), PrintLn(), PrintStr(StrLit("2*3-4=")), PrintInt(SubI(MulI(IntLit(2), IntLit(3)), IntLit(4))), PrintLn() ))) ) val tests = results.size val passed = results.filter(identity).size val failed = tests - passed if(failed>0) println(s"$failed of $tests tests failed.") else println(s"All $tests tests passed.") } def test(mainName: String, tree: Program): Boolean = { try { val text: String = scala.io.Source.fromFile("input/"+mainName+".l1").getLines.mkString("\\n") println(mainName);println(text); println("--------------------") val lexer = new Lexer(new BufferedReader(new StringReader(text))) val sw = new StringWriter() new PrintWriter(sw) { writer => val generator = new GeneratorRpn(writer) new Compiler(lexer, generator).parse() writer.close } val expected = toRpn(tree) val actual = sw.toString() if(expected!=actual) { println(s"expecting: $expected") println(s"got : $actual") false } else { true } } catch { case e: Exception => println(e.toString) e.printStackTrace() false } } def toRpn(node: AnyRef): String = { node match { case Program(stms) => { var s = "Program(" for(stm <- stms) s = s + toRpn(stm) s + ")" } case PrintInt(arg) => toRpn(arg) + "printInt " case PrintStr(arg) => toRpn(arg) + "printStr " case PrintChr(arg) => toRpn(arg) + "printChr " case PrintBoo(arg) => toRpn(arg) + "printBoo " case PrintLn() => "printLn " case IntLit(v) => s"Int($v) " case StrLit(v) => s"Str($v) " case ChrLit(v) => s"Chr(${v.toInt}) " case BooLit(v) => s"Boo($v) " case NegI(x) => toRpn(x) + "negI " case MulI(x, y) => toRpn(x) + toRpn(y) + "mulI " case DivI(x, y) => toRpn(x) + toRpn(y) + "divI " case ModI(x, y) => toRpn(x) + toRpn(y) + "modI " case AddI(x, y) => toRpn(x) + toRpn(y) + "addI " case SubI(x, y) => toRpn(x) + toRpn(y) + "subI " } } } case class Program(stms: Seq[Stm]) abstract class Stm case class PrintInt(arg: IntExpression) extends Stm case class PrintStr(arg: StrLit) extends Stm case class PrintChr(arg: ChrLit) extends Stm case class PrintBoo(arg: BooLit) extends Stm case class PrintLn() extends Stm abstract class IntExpression case class IntLit(v: Int) extends IntExpression case class NegI(v: IntExpression) extends IntExpression case class MulI(v: IntExpression, w: IntExpression) extends IntExpression case class DivI(v: IntExpression, w: IntExpression) extends IntExpression case class ModI(v: IntExpression, w: IntExpression) extends IntExpression case class AddI(v: IntExpression, w: IntExpression) extends IntExpression case class SubI(v: IntExpression, w: IntExpression) extends IntExpression case class StrLit(v: String) case class ChrLit(v: Char) case class BooLit(v: Boolean)
uberset/L1-compiler
src/test/scala/uberset/l1_compiler/TestParseTree.scala
Scala
gpl-2.0
6,968
package com.twitter.finagle.loadbalancer import com.twitter.conversions.time._ import com.twitter.finagle.{param, Stack} import com.twitter.finagle.stats.SummarizingStatsReceiver import com.twitter.finagle.NoBrokersAvailableException import com.twitter.util.{Activity, Future, Stopwatch, Var} private object Simulation extends com.twitter.app.App { val qps = flag("qps", 1250, "Number of queries to send per second.") val dur = flag("dur", 45.seconds, "Benchmark duration") val nBackends = flag("backends", 10, "Number of stable uniform backends") val nClients = flag("clients", 1, "Number of clients which uniformly send load") val bal = flag("bal", "p2c", "The load balancer used by the clients") val coldStartBackend = flag("coldstart", false, "Add a cold starting backend") val slowMiddleBackend = flag("slowmiddle", false, "Adds a fast-then-slow-then-fast again backend") val showProgress = flag("showprogress", false, "Print stats each second") val showSummary = flag("showsummary", true, "Print a stats summary at the end of the test") val showLoadDist = flag("showloaddist", true, "Print a summary of server load distribution at the end of the test") // Exception returned by balancers when they have no members in the set. private val noBrokers = new NoBrokersAvailableException def main() { val stats = new SummarizingStatsReceiver var serverCount: Int = 0 val genServerId: () => String = () => { serverCount += 1 serverCount.toString } // create a latency distribution from a set of recorded ping latencies. val url = getClass.getClassLoader.getResource("real_latencies.data") val stableLatency = LatencyProfile.fromFile(url) val servers = Var( Seq .tabulate(nBackends()) { _ => val id = genServerId() ServerFactory(id, stableLatency, stats.scope(s"srv_${id}")) } .toSet ) val activityServers = Activity(servers.map { srvs => Activity.Ok(srvs.toVector) }) var clientCount: Int = 0 val genClientId: () => String = () => { clientCount += 1 clientCount.toString } val clients: Seq[Client] = Seq.tabulate(nClients()) { _ => val id = genClientId() val sr = stats.scope(s"client_${id}") // This gives us a nice entry point to have per-client // id configurations. For example, we can test two groups // of clients (even and odds) running a separate aperture // config. val balancer = bal() match { case "p2c" => Balancers .p2c() .newBalancer( activityServers, noBrokers, Stack.Params.empty + param.Stats(sr.scope("p2c")) ) case "ewma" => Balancers .p2cPeakEwma() .newBalancer( activityServers, noBrokers, Stack.Params.empty + param.Stats(sr.scope("p2c_ewma")) ) case "aperture" => Balancers .aperture() .newBalancer( activityServers, noBrokers, Stack.Params.empty + param.Stats(sr.scope("aperture")) ) case "rr" => Balancers .roundRobin() .newBalancer( activityServers, noBrokers, Stack.Params.empty + param.Stats(sr.scope("round_robin")) ) } ClientFactory(id, balancer, sr) } val query: () => Future[Unit] = () => { Future .collect(clients.map { clnt => clnt(()) }) .unit } val elapsed = Stopwatch.start() val qpms = qps() / 1000 val rem = qps() % 1000 var ms = 0 val p = new LatencyProfile(elapsed) // TODO: These latency events are dependent on the running time of // the simulation. They should probably be defined in terms of a ratio // of the running time to be more flexible. if (coldStartBackend()) { val coldStart = p.warmup(10.seconds) _ andThen p.slowWithin(19.seconds, 23.seconds, 10) servers() += ServerFactory( genServerId(), coldStart(stableLatency), stats.scope("srv_cold_start") ) } if (slowMiddleBackend()) { val slowMiddle = p.slowWithin(15.seconds, 45.seconds, 10) _ servers() += ServerFactory( genServerId(), slowMiddle(stableLatency), stats.scope("srv_slow_middle") ) } // Note, it's important to actually elapse time here instead of // synthesizing it since we want to see the effects of latency // on the load balancer. while (elapsed() < dur()) { Thread.sleep(1) var n = 0 while (n < qpms) { query() n += 1 } if (rem > 0 && ms % (1000 / rem) == 0) { query() } ms += 1 if (showProgress() & ms % 1000 == 0) { println("-" * 100) println(s"Requests at ${elapsed()}") val lines = for ((name, fn) <- stats.gauges.toSeq) yield (name.mkString("/"), fn()) for ((name, value) <- lines.sortBy(_._1)) println(s"$name $value") } } if (showSummary()) { println(stats.summary(includeTails = true)) } if (showLoadDist()) { val srvs = servers.sample().toSeq val totalLoad = srvs.map(_.count).sum val optimal = totalLoad / serverCount.toDouble println("# load distribution") println(f"optimal (total / servers): ${optimal}%1.2f") srvs.sortBy(_.count).foreach { srv => val variance = math.abs(srv.count - optimal) val variancePct = (variance / optimal.toDouble) * 100 println( s"srv=${srv.toString} load=${srv.count} " + f"variance=$variance%1.2f (${variancePct}%1.2f%%)" ) } // TODO: export standard deviation. } } }
mkhq/finagle
finagle-benchmark/src/main/scala/com/twitter/finagle/loadbalancer/Simulation.scala
Scala
apache-2.0
5,909
/* * Copyright 2015 Martijn van de Rijdt * * This file is part of BOSI. * * BOSI is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. * * BOSI is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along with BOSI. If not, see <http://www.gnu.org/licenses/>. */ package nl.mvdr.breakout.state import org.scalatest._ /** * Test specification for {@link Brick}. * * @author Martijn van de Rijdt */ class BrickSpec extends FlatSpec with Matchers { "A brick" should "be destroyed when its hit points are reduced to zero" in { val brick = new Brick(Point(0, 0)) brick.takeDamage should be (Option.empty) } it should "lose a hit point when it takes damage" in { val brick = Brick(Point(0, 0), 5) brick.takeDamage should be (Option(Brick(Point(0, 0), 4))) } it should "not be allowed to have negative hit points" in { intercept[IllegalArgumentException] { Brick(Point(0, 0), -10) } } it should "not be allowed to have zero hit points" in { intercept[IllegalArgumentException] { Brick(Point(0, 0), 0) } } }
TinusTinus/bosi
src/test/scala/nl/mvdr/breakout/state/BrickSpec.scala
Scala
gpl-3.0
1,514
package hercules.test.utils import akka.actor.Props import akka.actor.Actor import akka.actor.ActorRef /** * Use this class to create a fake parent class to pass messages through * which should have been sent to the master. * It will simply take any messages from the child and forward them. */ class StepParent(childToCreate: Props, probe: ActorRef, role: String = "child") extends Actor { val child = context.actorOf(childToCreate, name = role) def receive = { case msg => probe.tell(msg, sender) } }
johandahlberg/hercules
src/test/scala/hercules/test/utils/StepParent.scala
Scala
mit
518
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.bwsw.sj.engine.core.output.types.jdbc import com.bwsw.sj.common.engine.core.entities.{OutputEnvelope, TStreamEnvelope} /** * Provides methods for sending data into a SQL database * * @tparam T incoming data type * @author Pavel Tomskikh */ trait JdbcSender[T <: AnyRef] { /** * Sends data into a SQL database * * @param envelope processed envelope * @param inputEnvelope received envelope */ def send(envelope: OutputEnvelope, inputEnvelope: TStreamEnvelope[T]): Unit /** * Triggered before every checkpoint */ def checkpoint(): Unit = {} } object JdbcSender { def apply[T <: AnyRef](commandBuilder: JdbcCommandBuilder, supportsBatchUpdates: Boolean): JdbcSender[T] = { if (supportsBatchUpdates) new BatchedJdbcSender(commandBuilder) else new SimpleJdbcSender(commandBuilder) } }
bwsw/sj-platform
core/sj-engine-core/src/main/scala/com/bwsw/sj/engine/core/output/types/jdbc/JdbcSender.scala
Scala
apache-2.0
1,667
package lila.gameSearch import akka.actor._ import akka.pattern.pipe import com.sksamuel.elastic4s.ElasticClient import com.sksamuel.elastic4s.ElasticDsl._ import com.sksamuel.elastic4s.mappings.FieldType._ import lila.game.actorApi.{ InsertGame, FinishGame } import lila.game.GameRepo import lila.search.actorApi._ import lila.search.ElasticSearch private[gameSearch] final class Indexer( client: ElasticClient, indexName: String, typeName: String) extends Actor { context.system.lilaBus.subscribe(self, 'finishGame) private val indexType = s"$indexName/$typeName" def receive = { case Search(definition) => client execute definition pipeTo sender case Count(definition) => client execute definition pipeTo sender case FinishGame(game, _, _) => self ! InsertGame(game) case InsertGame(game) => if (storable(game)) { GameRepo isAnalysed game.id foreach { analysed => client execute store(game, analysed) } } case Reset => ElasticSearch.createType(client, indexName, typeName) try { import Fields._ client execute { put mapping indexName / typeName as Seq( status typed ShortType, turns typed ShortType, rated typed BooleanType, variant typed ShortType, uids typed StringType, winner typed StringType, averageRating typed ShortType, ai typed ShortType, opening typed StringType, date typed DateType format ElasticSearch.Date.format, duration typed ShortType, analysed typed BooleanType ) } import scala.concurrent.Await import scala.concurrent.duration._ import play.api.libs.json.Json import lila.db.api._ import lila.game.tube.gameTube loginfo("[game search] counting games...") val size = SprayPimpedFuture($count($select.all)).await val batchSize = 1000 var nb = 0 var nbSkipped = 0 var started = nowMillis Await.result( $enumerate.bulk[Option[lila.game.Game]]($query.all, batchSize) { gameOptions => val games = gameOptions.flatten filter storable val nbGames = games.size (GameRepo filterAnalysed games.map(_.id).toSeq flatMap { analysedIds => client execute { bulk { games.map { g => store(g, analysedIds(g.id)) }: _* } } }).void >>- { nb = nb + nbGames nbSkipped = nbSkipped + gameOptions.size - nbGames val perS = (batchSize * 1000) / math.max(1, (nowMillis - started)) started = nowMillis loginfo("[game search] Indexed %d of %d, skipped %d, at %d/s".format(nb, size, nbSkipped, perS)) } }, 10 hours) sender ! (()) } catch { case e: Exception => println(e) sender ! Status.Failure(e) } } private def storable(game: lila.game.Game) = (game.finished || game.imported) && game.playedTurns > 4 private def store(game: lila.game.Game, hasAnalyse: Boolean) = { import Fields._ index into indexType fields { List( status -> game.status.is(_.Timeout).fold(chess.Status.Resign, game.status).id.some, turns -> math.ceil(game.turns.toFloat / 2).some, rated -> game.rated.some, variant -> game.variant.id.some, uids -> game.userIds.toArray.some.filterNot(_.isEmpty), winner -> (game.winner flatMap (_.userId)), averageRating -> game.averageUsersRating, ai -> game.aiLevel, date -> (ElasticSearch.Date.formatter print game.createdAt).some, duration -> game.estimateTotalTime.some, opening -> (game.opening map (_.code.toLowerCase)), analysed -> hasAnalyse.some ).collect { case (key, Some(value)) => key -> value }: _* } id game.id } }
Happy0/lila
modules/gameSearch/src/main/Indexer.scala
Scala
mit
4,053
/** * Licensed to Big Data Genomics (BDG) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The BDG licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bdgenomics.avocado.preprocessing import org.apache.commons.configuration.SubnodeConfiguration import org.apache.spark.rdd.RDD import org.bdgenomics.formats.avro.AlignmentRecord import org.bdgenomics.adam.rdd.ADAMContext._ import org.bdgenomics.adam.rdd.read.AlignmentRecordContext._ object RealignIndels extends PreprocessingStage { val stageName = "realignIndels" def apply(rdd: RDD[AlignmentRecord], config: SubnodeConfiguration): RDD[AlignmentRecord] = { // no configuration needed, simply call indel realigner rdd.adamRealignIndels() } }
tdanford/avocado
avocado-core/src/main/scala/org/bdgenomics/avocado/preprocessing/RealignIndels.scala
Scala
apache-2.0
1,367
package justin.db.vectorclocks import java.util.UUID import org.scalatest.{FlatSpec, Matchers} class VectorClockTest extends FlatSpec with Matchers { behavior of "Vector Clock" it should "initialize an empty Vector Clock with passed id" in { val id = UUID.randomUUID() val vc = VectorClock.empty(id) vc shouldBe VectorClock(Map(id -> Counter(0))) } it should "increase corresponding counter by one for particular id" in { val id = UUID.randomUUID() val vc = VectorClock.empty(id) val increased = vc.increase(id) increased shouldBe VectorClock(Map(id -> Counter(1))) } it should "merge two vector clocks" in { val id1 = UUID.randomUUID() val vc1 = VectorClock(Map(id1 -> Counter(109))) val id2 = UUID.randomUUID() val vc2 = VectorClock(Map( id1 -> Counter(1), id2 -> Counter(99) )) val receiverId = id1 val merged = VectorClock.merge(receiverId, vc1, vc2) merged.get(id1).get shouldBe Counter(110) merged.get(id2).get shouldBe Counter(99) } it should "merge two vector clocks without having passed \\"receiverId\\" key" in { val id1 = UUID.randomUUID() val vc1 = VectorClock.empty(id1) val id2 = UUID.randomUUID() val vc2 = VectorClock.empty(id2) val receiverId = UUID.randomUUID() val merged = VectorClock.merge(receiverId, vc1, vc2) merged.get(id1).get shouldBe Counter(0) merged.get(id2).get shouldBe Counter(0) merged.get(receiverId).get shouldBe Counter(1) } it should "init an empty Vector Clock" in { type Id = Int val vc = VectorClock.apply[Id]() vc shouldBe VectorClock(Map.empty[Id, Counter]) } it should "list Vector Clock" in { val vc = VectorClock(Map(1 -> Counter(109))) val list = vc.toList list shouldBe List((1, Counter(109))) } it should "get keys" in { val vc = VectorClock(Map(1 -> Counter(109), 2 -> Counter(1))) val keys = vc.keys keys shouldBe Set(1,2) } }
speedcom/JustinDB
justin-vector-clocks/src/test/scala/justin/db/vectorclocks/VectorClockTest.scala
Scala
apache-2.0
1,997
package slogger.model trait BsonHandlers extends slogger.model.common.BsonHandlers with slogger.model.specification.BsonHandlers with slogger.model.processing.BsonHandlers { } object BsonHandlers extends BsonHandlers
IvanMykhailov/stats-logger
core/src/main/scala/slogger/model/BsonHandlers.scala
Scala
mit
228
/** * * * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kafka.metrics import com.yammer.metrics.Metrics import java.io.File import com.yammer.metrics.reporting.CsvReporter import java.util.concurrent.TimeUnit import kafka.utils.{Logging, VerifiableProperties} import org.apache.kafka.common.utils.Utils private trait KafkaCSVMetricsReporterMBean extends KafkaMetricsReporterMBean private class KafkaCSVMetricsReporter extends KafkaMetricsReporter with KafkaCSVMetricsReporterMBean with Logging { private var csvDir: File = null private var underlying: CsvReporter = null private var running = false private var initialized = false override def getMBeanName = "kafka:type=kafka.metrics.KafkaCSVMetricsReporter" override def init(props: VerifiableProperties) { synchronized { if (!initialized) { val metricsConfig = new KafkaMetricsConfig(props) csvDir = new File(props.getString("kafka.csv.metrics.dir", "kafka_metrics")) Utils.delete(csvDir) csvDir.mkdirs() underlying = new CsvReporter(Metrics.defaultRegistry(), csvDir) if (props.getBoolean("kafka.csv.metrics.reporter.enabled", default = false)) { initialized = true startReporter(metricsConfig.pollingIntervalSecs) } } } } override def startReporter(pollingPeriodSecs: Long) { synchronized { if (initialized && !running) { underlying.start(pollingPeriodSecs, TimeUnit.SECONDS) running = true info("Started Kafka CSV metrics reporter with polling period %d seconds".format(pollingPeriodSecs)) } } } override def stopReporter() { synchronized { if (initialized && running) { underlying.shutdown() running = false info("Stopped Kafka CSV metrics reporter") underlying = new CsvReporter(Metrics.defaultRegistry(), csvDir) } } } }
flange/drift-dev
kafka/00-kafka_2.11-0.10.1.0/libs/tmp/kafka/metrics/KafkaCSVMetricsReporter.scala
Scala
apache-2.0
2,734
/* * Copyright (c) 2002-2018 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.cypher.internal.spi.v2_3 import org.neo4j.graphdb.Relationship import org.neo4j.kernel.impl.api.RelationshipVisitor import org.neo4j.kernel.impl.api.store.RelationshipIterator import org.neo4j.kernel.impl.core.NodeManager /** * Converts a RelationshipIterator coming from the Kernel API into an Iterator[Relationship] while * still sticking to the fact that each relationship record is only loaded once. */ class BeansAPIRelationshipIterator(relationships: RelationshipIterator, nodeManager: NodeManager) extends Iterator[Relationship] { private var nextRelationship: Relationship = null private val visitor = new RelationshipVisitor[RuntimeException] { override def visit(relationshipId: Long, typeId: Int, startNodeId: Long, endNodeId: Long) { nextRelationship = nodeManager.newRelationshipProxy(relationshipId, startNodeId, typeId, endNodeId) } } override def hasNext: Boolean = relationships.hasNext override def next(): Relationship = { if (hasNext) { val relationshipId = relationships.next() relationships.relationshipVisit(relationshipId, visitor) nextRelationship } else { throw new NoSuchElementException } } }
HuangLS/neo4j
community/cypher/cypher/src/main/scala/org/neo4j/cypher/internal/spi/v2_3/BeansAPIRelationshipIterator.scala
Scala
apache-2.0
2,043
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.api.scala import org.apache.flink.api.common.typeinfo.TypeInformation import org.apache.flink.api.scala._ import org.apache.flink.streaming.api.scala.{DataStream, StreamExecutionEnvironment, asScalaStream} import org.apache.flink.table.api.{Table, TableConfig, TableEnvironment} import org.apache.flink.table.catalog.{CatalogManager, GenericInMemoryCatalog} import org.apache.flink.table.functions.{AggregateFunction, TableFunction} /** * The [[TableEnvironment]] for a Scala [[StreamExecutionEnvironment]] that works with * [[DataStream]]s. * * A TableEnvironment can be used to: * - convert a [[DataStream]] to a [[Table]] * - register a [[DataStream]] in the [[TableEnvironment]]'s catalog * - register a [[Table]] in the [[TableEnvironment]]'s catalog * - scan a registered table to obtain a [[Table]] * - specify a SQL query on registered tables to obtain a [[Table]] * - convert a [[Table]] into a [[DataStream]] * - explain the AST and execution plan of a [[Table]] * * @param execEnv The Scala [[StreamExecutionEnvironment]] of the TableEnvironment. * @param config The configuration of the TableEnvironment. */ class StreamTableEnvironment @deprecated( "This constructor will be removed. Use StreamTableEnvironment.create() instead.", "1.8.0") ( execEnv: StreamExecutionEnvironment, config: TableConfig, catalogManager: CatalogManager) extends org.apache.flink.table.api.StreamTableEnvironment( execEnv.getWrappedStreamExecutionEnvironment, config, catalogManager) { /** * Converts the given [[DataStream]] into a [[Table]]. * * The field names of the [[Table]] are automatically derived from the type of the * [[DataStream]]. * * @param dataStream The [[DataStream]] to be converted. * @tparam T The type of the [[DataStream]]. * @return The converted [[Table]]. */ def fromDataStream[T](dataStream: DataStream[T]): Table = { createTable(asQueryOperation(dataStream.javaStream, None)) } /** * Converts the given [[DataStream]] into a [[Table]] with specified field names. * * Example: * * {{{ * val stream: DataStream[(String, Long)] = ... * val tab: Table = tableEnv.fromDataStream(stream, 'a, 'b) * }}} * * @param dataStream The [[DataStream]] to be converted. * @param fields The field names of the resulting [[Table]]. * @tparam T The type of the [[DataStream]]. * @return The converted [[Table]]. */ // TODO: Change fields type to `Expression*` after introducing [Expression] def fromDataStream[T](dataStream: DataStream[T], fields: Symbol*): Table = { val exprs = fields.map(_.name).toArray createTable(asQueryOperation(dataStream.javaStream, Some(exprs))) } /** * Registers the given [[DataStream]] as table in the * [[TableEnvironment]]'s catalog. * Registered tables can be referenced in SQL queries. * * The field names of the [[Table]] are automatically derived * from the type of the [[DataStream]]. * * @param name The name under which the [[DataStream]] is registered in the catalog. * @param dataStream The [[DataStream]] to register. * @tparam T The type of the [[DataStream]] to register. */ def registerDataStream[T](name: String, dataStream: DataStream[T]): Unit = { registerTable(name, fromDataStream(dataStream)) } /** * Registers the given [[DataStream]] as table with specified field names in the * [[TableEnvironment]]'s catalog. * Registered tables can be referenced in SQL queries. * * Example: * * {{{ * val set: DataStream[(String, Long)] = ... * tableEnv.registerDataStream("myTable", set, 'a, 'b) * }}} * * @param name The name under which the [[DataStream]] is registered in the catalog. * @param dataStream The [[DataStream]] to register. * @param fields The field names of the registered table. * @tparam T The type of the [[DataStream]] to register. */ // TODO: Change fields type to `Expression*` after introducing [Expression] def registerDataStream[T](name: String, dataStream: DataStream[T], fields: Symbol*): Unit = { val exprs = fields.map(_.name).toArray registerTable(name, fromDataStream(dataStream, fields: _*)) } /** * Converts the given [[Table]] into an append [[DataStream]] of a specified type. * * The [[Table]] must only have insert (append) changes. If the [[Table]] is also modified * by update or delete changes, the conversion will fail. * * The fields of the [[Table]] are mapped to [[DataStream]] fields as follows: * - [[org.apache.flink.types.Row]] and Scala Tuple types: Fields are mapped by position, field * types must match. * - POJO [[DataStream]] types: Fields are mapped by field name, field types must match. * * @param table The [[Table]] to convert. * @tparam T The type of the resulting [[DataStream]]. * @return The converted [[DataStream]]. */ def toAppendStream[T: TypeInformation](table: Table): DataStream[T] = { val returnType = createTypeInformation[T] asScalaStream(translateToDataStream[T]( table, updatesAsRetraction = false, withChangeFlag = false, returnType)) } /** * Converts the given [[Table]] into a [[DataStream]] of add and retract messages. * The message will be encoded as [[Tuple2]]. The first field is a [[Boolean]] flag, * the second field holds the record of the specified type [[T]]. * * A true [[Boolean]] flag indicates an add message, a false flag indicates a retract message. * * @param table The [[Table]] to convert. * @tparam T The type of the requested data type. * @return The converted [[DataStream]]. */ def toRetractStream[T: TypeInformation](table: Table): DataStream[(Boolean, T)] = { val returnType = createTypeInformation[(Boolean, T)] asScalaStream(translateToDataStream[(Boolean, T)]( table, updatesAsRetraction = true, withChangeFlag = true, returnType)) } /** * Registers an [[AggregateFunction]] under a unique name in the TableEnvironment's catalog. * Registered functions can be referenced in Table API and SQL queries. * * @param name The name under which the function is registered. * @param f The AggregateFunction to register. * @tparam T The type of the output value. * @tparam ACC The type of aggregate accumulator. */ def registerFunction[T: TypeInformation, ACC: TypeInformation]( name: String, f: AggregateFunction[T, ACC]): Unit = { registerAggregateFunctionInternal[T, ACC](name, f) } /** * Registers a [[TableFunction]] under a unique name in the TableEnvironment's catalog. * Registered functions can be referenced in Table API and SQL queries. * * @param name The name under which the function is registered. * @param tf The TableFunction to register. * @tparam T The type of the output row. */ def registerFunction[T: TypeInformation](name: String, tf: TableFunction[T]): Unit = { registerTableFunctionInternal[T](name, tf) } } object StreamTableEnvironment { /** * The [[TableEnvironment]] for a Scala [[StreamExecutionEnvironment]] that works with * [[DataStream]]s. * * A TableEnvironment can be used to: * - convert a [[DataStream]] to a [[Table]] * - register a [[DataStream]] in the [[TableEnvironment]]'s catalog * - register a [[Table]] in the [[TableEnvironment]]'s catalog * - scan a registered table to obtain a [[Table]] * - specify a SQL query on registered tables to obtain a [[Table]] * - convert a [[Table]] into a [[DataStream]] * - explain the AST and execution plan of a [[Table]] * * @param executionEnvironment The Scala [[StreamExecutionEnvironment]] of the TableEnvironment. */ def create(executionEnvironment: StreamExecutionEnvironment): StreamTableEnvironment = { create(executionEnvironment, new TableConfig()) } /** * The [[TableEnvironment]] for a Scala [[StreamExecutionEnvironment]] that works with * [[DataStream]]s. * * A TableEnvironment can be used to: * - convert a [[DataStream]] to a [[Table]] * - register a [[DataStream]] in the [[TableEnvironment]]'s catalog * - register a [[Table]] in the [[TableEnvironment]]'s catalog * - scan a registered table to obtain a [[Table]] * - specify a SQL query on registered tables to obtain a [[Table]] * - convert a [[Table]] into a [[DataStream]] * - explain the AST and execution plan of a [[Table]] * * @param executionEnvironment The Scala [[StreamExecutionEnvironment]] of the TableEnvironment. * @param tableConfig The configuration of the TableEnvironment. */ def create( executionEnvironment: StreamExecutionEnvironment, tableConfig: TableConfig): StreamTableEnvironment = { val catalogManager = new CatalogManager( tableConfig.getBuiltInCatalogName, new GenericInMemoryCatalog( tableConfig.getBuiltInCatalogName, tableConfig.getBuiltInDatabaseName) ) create(executionEnvironment, tableConfig, catalogManager) } /** * The [[TableEnvironment]] for a Scala [[StreamExecutionEnvironment]] that works with * [[DataStream]]s. * * A TableEnvironment can be used to: * - convert a [[DataStream]] to a [[Table]] * - register a [[DataStream]] in the [[TableEnvironment]]'s catalog * - register a [[Table]] in the [[TableEnvironment]]'s catalog * - scan a registered table to obtain a [[Table]] * - specify a SQL query on registered tables to obtain a [[Table]] * - convert a [[Table]] into a [[DataStream]] * - explain the AST and execution plan of a [[Table]] * * @param executionEnvironment The Scala [[StreamExecutionEnvironment]] of the TableEnvironment. * @param tableConfig The configuration of the TableEnvironment. * @param catalogManager a catalog manager that encapsulates all available catalogs. */ def create( executionEnvironment: StreamExecutionEnvironment, tableConfig: TableConfig, catalogManager: CatalogManager): StreamTableEnvironment = { new StreamTableEnvironment(executionEnvironment, tableConfig, catalogManager) } }
shaoxuan-wang/flink
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/api/scala/StreamTableEnvironment.scala
Scala
apache-2.0
11,170