code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package io.eels.component.kudu
import io.eels.schema._
import org.apache.kudu.{ColumnSchema, Schema, Type}
import scala.collection.JavaConverters._
object KuduSchemaFns {
def fromKuduSchema(schema: Schema): StructType = {
val fields = schema.getColumns.asScala.map { col =>
val datatype = col.getType match {
case Type.BINARY => BinaryType
case Type.BOOL => BooleanType
case Type.DOUBLE => DoubleType
case Type.FLOAT => FloatType
case Type.INT8 => ByteType.Signed
case Type.INT16 => ShortType.Signed
case Type.INT32 => IntType.Signed
case Type.INT64 => LongType.Signed
case Type.STRING => StringType
case Type.UNIXTIME_MICROS => TimestampMicrosType
}
Field(col.getName, datatype, nullable = col.isNullable, key = col.isKey)
}
StructType(fields)
}
def toKuduColumn(field: Field): ColumnSchema = {
val tpe = field.dataType match {
case BinaryType => Type.BINARY
case BooleanType => Type.BOOL
case DoubleType => Type.DOUBLE
case FloatType => Type.FLOAT
case _: ByteType => Type.INT8
case _: ShortType => Type.INT16
case _: IntType => Type.INT32
case _: LongType => Type.INT64
case StringType => Type.STRING
case TimestampMicrosType => Type.UNIXTIME_MICROS
}
new ColumnSchema.ColumnSchemaBuilder(field.name, tpe).nullable(field.nullable).key(field.key).build()
}
// kudu does not support nested structs
def toKuduSchema(structType: StructType): Schema = {
val columns = structType.fields.map(toKuduColumn)
assert(columns.exists(_.isKey == true), "Kudu schema requires at least one column to be marked as a key")
assert(!columns.filter(_.isKey).exists(_.isNullable), "Kudu does not allow key columns to be nullable")
new Schema(columns.asJava)
}
}
| eel-lib/eel | eel-kudu/src/main/scala/io/eels/component/kudu/KuduSchemaFns.scala | Scala | mit | 1,861 |
package com.sksamuel.elastic4s.explain
import com.sksamuel.elastic4s.DocumentRef
trait ExplainApi {
def explain(ref: DocumentRef): ExplainRequest = ExplainRequest(ref.indexAndType, ref.id)
def explain(index: String, `type`: String, id: String): ExplainRequest = explain(DocumentRef(index, `type`, id))
}
| Tecsisa/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/explain/ExplainApi.scala | Scala | apache-2.0 | 335 |
package audit.collector
import akka.actor.ActorRef
import akka.http.scaladsl.model.{HttpResponse, StatusCodes}
import akka.http.scaladsl.server.Directives._
import akka.pattern.ask
import audit.collector.Collector.Event
import audit.collector.CollectorActor.{NotStored, Store, Stored}
import audit.{Api, ApiProtocol}
trait CollectorProtocol extends ApiProtocol {
implicit val eventFormat = jsonFormat7(Event)
}
trait CollectorApi extends CollectorProtocol { self: Api =>
val collector: ActorRef
val collectorRoute = path("collect") {
post {
entity(as[Event]) { event =>
complete {
(collector ? Store(event)).map {
case Stored =>
HttpResponse(StatusCodes.OK)
case NotStored =>
HttpResponse(StatusCodes.InternalServerError)
}
}
}
}
}
}
| grzesiekw/audit | src/main/scala/audit/collector/CollectorApi.scala | Scala | mit | 856 |
package japgolly.scalajs.react.vdom
private[vdom] object VdomCreation {
@inline final implicit class VdomExtString(private val s: String) extends AnyVal {
// May eventually make use of this
@inline def reactTerminalTag[N <: HtmlTopNode]: HtmlTagOf[N] =
new HtmlTagOf[N](s)
}
} | japgolly/scalajs-react | coreGeneric/src/main/scala-2/japgolly/scalajs/react/vdom/VdomCreation.scala | Scala | apache-2.0 | 298 |
package vulkan.wrapper.registry.command
object VulkanCommandQueues extends Enumeration {
type VulkanCommandQueue = Value
val compute, transfer, graphics, sparse_binding = Value
}
| MrInformatic/VulkanWrapper | src/vulkan/wrapper/registry/command/VulkanCommandQueues.scala | Scala | mit | 184 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.scala
import java.io._
import org.apache.flink.client.deployment.executors.RemoteExecutor
import org.apache.flink.configuration.{Configuration, DeploymentOptions, JobManagerOptions, RestOptions}
import org.apache.flink.runtime.clusterframework.BootstrapTools
import org.apache.flink.runtime.minicluster.MiniCluster
import org.apache.flink.runtime.testutils.MiniClusterResource
import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration
import org.apache.flink.util.TestLogger
import org.junit._
import org.junit.rules.TemporaryFolder
import scala.tools.nsc.Settings
class ScalaShellITCase extends TestLogger {
import ScalaShellITCase._
val _temporaryFolder = new TemporaryFolder
@Rule
def temporaryFolder = _temporaryFolder
@After
def resetClassLoder(): Unit = {
// The Scala interpreter changes current class loader to ScalaClassLoader in every execution
// refer to [[ILoop.process()]]. So, we need reset it to original class loader after every Test.
Thread.currentThread().setContextClassLoader(classOf[ScalaShellITCase].getClassLoader)
}
/** Prevent re-creation of environment */
@Test
def testPreventRecreationBatch(): Unit = {
val input: String =
"""
val benv = ExecutionEnvironment.getExecutionEnvironment
""".stripMargin
val output: String = processInShell(input)
Assert.assertTrue(output.contains(
"UnsupportedOperationException: Execution Environment is already " +
"defined for this shell"))
}
/** Prevent re-creation of environment */
@Test
def testPreventRecreationStreaming(): Unit = {
val input: String =
"""
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
val senv = StreamExecutionEnvironment.getExecutionEnvironment
""".stripMargin
val output: String = processInShell(input)
Assert.assertTrue(output.contains(
"UnsupportedOperationException: Execution Environment is already " +
"defined for this shell"))
}
/** Iteration test with iterative Pi example */
@Test
def testIterativePIBatch(): Unit = {
val input: String =
"""
val initial = benv.fromElements(0)
val count = initial.iterate(10000) { iterationInput: DataSet[Int] =>
val result = iterationInput.map { i =>
val x = Math.random()
val y = Math.random()
i + (if (x * x + y * y < 1) 1 else 0)
}
result
}
val result = count map { c => c / 10000.0 * 4 }
result.collect()
""".stripMargin
val output: String = processInShell(input)
Assert.assertFalse(output.contains("failed"))
Assert.assertFalse(output.contains("error"))
Assert.assertFalse(output.contains("Exception"))
}
/** WordCount in Shell */
@Test
def testWordCountBatch(): Unit = {
val input =
"""
val text = benv.fromElements("To be, or not to be,--that is the question:--",
"Whether 'tis nobler in the mind to suffer",
"The slings and arrows of outrageous fortune",
"Or to take arms against a sea of troubles,")
val counts = text.flatMap { _.toLowerCase.split("\\\\W+") }.map { (_, 1) }.groupBy(0).sum(1)
val result = counts.print()
""".stripMargin
val output = processInShell(input)
Assert.assertFalse(output.contains("failed"))
Assert.assertFalse(output.contains("error"))
Assert.assertFalse(output.contains("Exception"))
// some of the words that should be included
Assert.assertTrue(output.contains("(a,1)"))
Assert.assertTrue(output.contains("(whether,1)"))
Assert.assertTrue(output.contains("(to,4)"))
Assert.assertTrue(output.contains("(arrows,1)"))
}
/** Sum 1..10, should be 55 */
@Test
def testSumBatch: Unit = {
val input =
"""
val input: DataSet[Int] = benv.fromElements(0,1,2,3,4,5,6,7,8,9,10)
val reduced = input.reduce(_+_)
reduced.print
""".stripMargin
val output = processInShell(input)
Assert.assertFalse(output.contains("failed"))
Assert.assertFalse(output.contains("error"))
Assert.assertFalse(output.contains("Exception"))
Assert.assertTrue(output.contains("55"))
}
/** WordCount in Shell with custom case class */
@Test
def testWordCountWithCustomCaseClassBatch: Unit = {
val input =
"""
case class WC(word: String, count: Int)
val wordCounts = benv.fromElements(
new WC("hello", 1),
new WC("world", 2),
new WC("world", 8))
val reduced = wordCounts.groupBy(0).sum(1)
reduced.print()
""".stripMargin
val output = processInShell(input)
Assert.assertFalse(output.contains("failed"))
Assert.assertFalse(output.contains("error"))
Assert.assertFalse(output.contains("Exception"))
Assert.assertTrue(output.contains("WC(hello,1)"))
Assert.assertTrue(output.contains("WC(world,10)"))
}
@Test
def testSimpleSelectWithFilterBatchTableAPIQuery: Unit = {
val input =
"""
|val data = Seq(
| (1, 1L, "Hi"),
| (2, 2L, "Hello"),
| (3, 2L, "Hello world"))
|val t = benv.fromCollection(data).toTable(btenv, 'a, 'b, 'c).select('a,'c).where(
|'a% 2 === 1 )
|val results = t.toDataSet[Row].collect()
|results.foreach(println)
|:q
""".stripMargin
val output = processInShell(input)
Assert.assertFalse(output.toLowerCase.contains("failed"))
Assert.assertFalse(output.toLowerCase.contains("error"))
Assert.assertFalse(output.toLowerCase.contains("exception"))
Assert.assertTrue(output.contains("1,Hi"))
Assert.assertTrue(output.contains("3,Hello world"))
}
@Test
def testGroupedAggregationStreamTableAPIQuery: Unit = {
val input =
"""
| val data = List(
| ("Hello", 1),
| ("word", 1),
| ("Hello", 1),
| ("bark", 1),
| ("bark", 1),
| ("bark", 1),
| ("bark", 1),
| ("bark", 1),
| ("bark", 1),
| ("flink", 1)
| )
| val stream = senv.fromCollection(data)
| val table = stream.toTable(stenv, 'word, 'num)
| val resultTable = table.groupBy('word).select('num.sum as 'count).groupBy('count).select(
| 'count,'count.count as 'frequency)
| val results = resultTable.toRetractStream[Row]
| results.print
| senv.execute
""".stripMargin
val output = processInShell(input)
Assert.assertTrue(output.contains("6,1"))
Assert.assertTrue(output.contains("1,2"))
Assert.assertTrue(output.contains("2,1"))
Assert.assertFalse(output.toLowerCase.contains("failed"))
Assert.assertFalse(output.toLowerCase.contains("error"))
Assert.assertFalse(output.toLowerCase.contains("exception"))
}
/**
* Submit external library.
* Disabled due to FLINK-7111.
*/
@Ignore
@Test
def testSubmissionOfExternalLibraryBatch: Unit = {
val input =
"""
import org.apache.flink.api.scala.jar.TestingData
val source = benv.fromCollection(TestingData.elements)
source.print()
""".stripMargin
val output: String = processInShell(input, Option("customjar-test-jar.jar"))
Assert.assertFalse(output.contains("failed"))
Assert.assertFalse(output.contains("error"))
Assert.assertFalse(output.contains("Exception"))
Assert.assertTrue(output.contains("\\nHELLO 42"))
}
/**
* Submit external library.
* Disabled due to FLINK-7111.
*/
@Ignore
@Test
def testSubmissionOfExternalLibraryStream: Unit = {
val input =
"""
import org.apache.flink.api.scala.jar.TestingData
val source = senv.fromCollection(TestingData.elements)
source.print()
senv.execute
""".stripMargin
val output: String = processInShell(input, Option("customjar-test-jar.jar"))
Assert.assertFalse(output.contains("failed"))
Assert.assertFalse(output.contains("error"))
Assert.assertFalse(output.contains("Exception"))
Assert.assertTrue(output.contains("\\nHELLO 42"))
}
/**
* tests flink shell startup with remote cluster (starts cluster internally)
* for both streaming and batch api
*/
@Test
def testRemoteCluster: Unit = {
val input: String =
"""
|import org.apache.flink.api.common.functions.RichMapFunction
|import org.apache.flink.api.java.io.PrintingOutputFormat
|import org.apache.flink.api.common.accumulators.IntCounter
|import org.apache.flink.configuration.Configuration
|
|val els = benv.fromElements("foobar","barfoo")
|val mapped = els.map{
| new RichMapFunction[String, String]() {
| var intCounter: IntCounter = _
| override def open(conf: Configuration): Unit = {
| intCounter = getRuntimeContext.getIntCounter("intCounter")
| }
|
| def map(element: String): String = {
| intCounter.add(1)
| element
| }
| }
|}
|mapped.output(new PrintingOutputFormat())
|val executionResult = benv.execute("Test Job")
|System.out.println("IntCounter: " + executionResult.getIntCounterResult("intCounter"))
|
|val elss = senv.fromElements("foobar","barfoo")
|val mapped = elss.map{
| new RichMapFunction[String,String]() {
| def map(element:String): String = {
| element + "Streaming"
| }
| }
|}
|
|mapped.print
|senv.execute("awesome streaming process")
|
|:q
""".stripMargin
val in: BufferedReader = new BufferedReader(
new StringReader(
input + "\\n"))
val out: StringWriter = new StringWriter
val baos: ByteArrayOutputStream = new ByteArrayOutputStream
val oldOut: PrintStream = System.out
System.setOut(new PrintStream(baos))
val dir = temporaryFolder.newFolder()
BootstrapTools.writeConfiguration(configuration, new File(dir, "flink-conf.yaml"))
val port: Int = clusterResource.getRestAddres.getPort
val hostname : String = clusterResource.getRestAddres.getHost
val args = Array(
"remote",
hostname,
Integer.toString(port),
"--configDir",
dir.getAbsolutePath)
//start scala shell with initialized
// buffered reader for testing
FlinkShell.bufferedReader = Some(in)
FlinkShell.main(args)
baos.flush()
val output: String = baos.toString
System.setOut(oldOut)
Assert.assertTrue(output.contains("IntCounter: 2"))
Assert.assertTrue(output.contains("foobar"))
Assert.assertTrue(output.contains("barfoo"))
Assert.assertTrue(output.contains("foobarStreaming"))
Assert.assertTrue(output.contains("barfooStreaming"))
Assert.assertFalse(output.contains("failed"))
Assert.assertFalse(output.contains("Error"))
Assert.assertFalse(output.contains("ERROR"))
Assert.assertFalse(output.contains("Exception"))
}
@Test
def testImportJavaCollection(): Unit = {
val input = """
import java.util.List
val jul: List[Int] = new java.util.ArrayList[Int]()
jul.add(2)
jul.add(4)
jul.add(6)
jul.add(8)
jul.add(10)
val str = "the java list size is: " + jul.size
""".stripMargin
val output = processInShell(input)
Assert.assertTrue(output.contains("the java list size is: 5"))
Assert.assertFalse(output.toLowerCase.contains("failed"))
Assert.assertFalse(output.toLowerCase.contains("error"))
Assert.assertFalse(output.toLowerCase.contains("exception"))
}
@Test
def testImplicitConversionBetweenJavaAndScala(): Unit = {
val input =
"""
import collection.JavaConversions._
import scala.collection.mutable.ArrayBuffer
val jul:java.util.List[Int] = ArrayBuffer(1,2,3,4,5)
val buf: Seq[Int] = jul
var sum = 0
buf.foreach(num => sum += num)
val str = "sum is: " + sum
val scala2jul = List(1,2,3)
scala2jul.add(7)
""".stripMargin
val output = processInShell(input)
Assert.assertTrue(output.contains("sum is: 15"))
Assert.assertFalse(output.toLowerCase.contains("failed"))
Assert.assertFalse(output.toLowerCase.contains("error"))
Assert.assertTrue(output.contains("java.lang.UnsupportedOperationException"))
}
@Test
def testImportPackageConflict(): Unit = {
val input =
"""
import org.apache.flink.table.api._
import java.util.List
val jul: List[Int] = new java.util.ArrayList[Int]()
jul.add(2)
jul.add(4)
jul.add(6)
jul.add(8)
jul.add(10)
val str = "the java list size is: " + jul.size
""".stripMargin
val output = processInShell(input)
Assert.assertTrue(output.contains("the java list size is: 5"))
Assert.assertFalse(output.toLowerCase.contains("failed"))
Assert.assertFalse(output.toLowerCase.contains("error"))
Assert.assertFalse(output.toLowerCase.contains("exception"))
}
@Test
def testGetMultiExecutionEnvironment(): Unit = {
val input =
"""
|val newEnv = ExecutionEnvironment.getExecutionEnvironment
""".stripMargin
val output = processInShell(input)
Assert.assertTrue(output.contains("java.lang.UnsupportedOperationException: Execution " +
"Environment is already defined for this shell."))
}
}
object ScalaShellITCase {
val configuration = new Configuration()
var cluster: Option[MiniCluster] = None
val parallelism: Int = 4
val _clusterResource = new MiniClusterResource(new MiniClusterResourceConfiguration.Builder()
.setNumberSlotsPerTaskManager(parallelism)
.build())
@ClassRule
def clusterResource = _clusterResource
/**
* Run the input using a Scala Shell and return the output of the shell.
*
* @param input commands to be processed in the shell
* @return output of shell
*/
def processInShell(input: String, externalJars: Option[String] = None): String = {
val in = new BufferedReader(new StringReader(input + "\\n"))
val out = new StringWriter()
val baos = new ByteArrayOutputStream()
val oldOut = System.out
System.setOut(new PrintStream(baos))
val port: Int = clusterResource.getRestAddres.getPort
val hostname : String = clusterResource.getRestAddres.getHost
configuration.setString(DeploymentOptions.TARGET, RemoteExecutor.NAME)
configuration.setBoolean(DeploymentOptions.ATTACHED, true)
configuration.setString(JobManagerOptions.ADDRESS, hostname)
configuration.setInteger(JobManagerOptions.PORT, port)
configuration.setString(RestOptions.ADDRESS, hostname)
configuration.setInteger(RestOptions.PORT, port)
val repl = externalJars match {
case Some(ej) => new FlinkILoop(
configuration,
Option(Array(ej)),
in, new PrintWriter(out))
case None => new FlinkILoop(
configuration,
in, new PrintWriter(out))
}
repl.settings = new Settings()
// enable this line to use scala in intellij
repl.settings.usejavacp.value = true
externalJars match {
case Some(ej) => repl.settings.classpath.value = ej
case None =>
}
repl.process(repl.settings)
repl.closeInterpreter()
System.setOut(oldOut)
baos.flush()
val stdout = baos.toString
out.toString + stdout
}
}
| tzulitai/flink | flink-scala-shell/src/test/scala/org/apache/flink/api/scala/ScalaShellITCase.scala | Scala | apache-2.0 | 16,438 |
package pl.jozwik.smtp.server
import java.time.LocalDateTime
import java.time.temporal.ChronoUnit
import akka.actor.Cancellable
import pl.jozwik.smtp.actor.AbstractActor
import pl.jozwik.smtp.server.ActorWithTimeout.TimeoutTick
import scala.concurrent.duration.FiniteDuration
object ActorWithTimeout {
val TIMEOUT: String = "TIMEOUT"
case object TimeoutTick
}
trait ActorWithTimeout extends AbstractActor {
import context.dispatcher
private var cancellable: Option[Cancellable] = None
private var lastAccess: LocalDateTime = LocalDateTime.now()
val timeout: FiniteDuration
protected val tick: FiniteDuration = timeout / 2
override def preStart(): Unit = {
super.preStart()
cancellable = Option(context.system.scheduler.scheduleOnce(tick, self, TimeoutTick))
()
}
override def postStop(): Unit = {
super.postStop()
cancellable.foreach(_.cancel())
}
override def unhandled(message: Any): Unit = message match {
case TimeoutTick =>
if (LocalDateTime.now.minus(timeout.toMillis, ChronoUnit.MILLIS).isAfter(lastAccess)) {
sendTimeoutMessage(lastAccess)
} else {
cancellable = Some(context.system.scheduler.scheduleOnce(tick, self, TimeoutTick))
}
case _ =>
super.unhandled(message)
}
protected def sendTimeoutMessage(lastAccess: LocalDateTime): Unit
protected def resetTimeout(): Unit = {
lastAccess = LocalDateTime.now()
}
protected override def become(state: Receive): Unit = {
super.become(state)
resetTimeout()
}
}
| ajozwik/akka-smtp-server | akka-smtp/src/main/scala/pl/jozwik/smtp/server/ActorWithTimeout.scala | Scala | mit | 1,546 |
package edu.nus.systemtesting.testsuite
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.Paths
import java.util.Timer
import java.util.TimerTask
import org.scalatest.BeforeAndAfter
import org.scalatest.FlatSpec
import com.typesafe.config.ConfigException
import com.typesafe.config.ConfigFactory
import edu.nus.systemtesting.ExpectsOutput
import edu.nus.systemtesting.Testable
import edu.nus.systemtesting.TestCaseResult
/**
* @author richardg
*/
class TestSuiteSpec extends FlatSpec {
"TestSuite" should "throw an exception if any resultFor throws an exception" in {
// Values unused by TestSuite
val ArbitraryRev = "rev1"
// Need non-empty list of tests.
// But, can each be null, since TestSuite doesn't consider them.
val tests: List[Testable with ExpectsOutput] = List(null, null, null)
val testSuite = new TestSuite(tests, ArbitraryRev)
// Deliberately throw some exception,
// which TestSuite.runAllTests is unlikely to throw.
def resultFor(tc: Testable with ExpectsOutput): TestCaseResult =
throw new IllegalStateException("")
// Need to fail after some time,
// since that indicates that TestSuite got 'stuck' somewhere.
val timer = new Timer("TestSuiteSpecTimer")
val Timeout = 1000L
timer.schedule(new TimerTask() {
override def run(): Unit = {
fail()
}
}, Timeout)
// runAllTests asynchronously returns TestSuiteResult, so won't throw exception.
val tsr = testSuite.runAllTests(resultFor)
// Exception *should* be thrown when awaiting the TCRs of the TSR.
intercept[IllegalStateException] {
tsr.results
}
// cancel timer.
timer.cancel()
}
} | rgoulter/system-testing | src/test/scala/edu/nus/systemtesting/testsuite/TestSuiteSpec.scala | Scala | mit | 1,712 |
package org.openurp.edu.eams.teach.grade.transcript.service.impl
import org.beangle.commons.collection.Collections
import org.springframework.beans.factory.InitializingBean
import org.openurp.edu.base.Student
import org.openurp.edu.eams.teach.grade.service.CourseGradeProvider
import org.openurp.edu.eams.teach.grade.service.impl.DefaultGpaStatService
import org.openurp.edu.eams.teach.grade.service.impl.GpaPolicy
import org.openurp.edu.eams.teach.grade.service.impl.GradeFilter
import org.openurp.edu.eams.teach.grade.service.impl.GradeFilterRegistry
import org.openurp.edu.eams.teach.grade.transcript.service.TranscriptDataProvider
import org.openurp.edu.teach.grade.CourseGrade
class TranscriptGpaProvider extends TranscriptDataProvider with InitializingBean {
private var courseGradeProvider: CourseGradeProvider = _
private var gpaPolicy: GpaPolicy = _
private var gradeFilterRegistry: GradeFilterRegistry = _
private var gpaStatService: DefaultGpaStatService = new DefaultGpaStatService()
def afterPropertiesSet() {
gpaStatService.setGpaPolicy(gpaPolicy)
gpaStatService.setCourseGradeProvider(courseGradeProvider)
}
def getDataName(): String = "gpas"
def getData[T](std: Student, options: Map[String, String]): T = {
var grades = courseGradeProvider.getPublished(std)
val matched = getFilters(options)
for (filter <- matched) grades = filter.filter(grades)
gpaStatService.statGpa(std, grades).asInstanceOf[T]
}
def getDatas[T](stds: List[Student], options: Map[String, String]): Map[Student, T] = {
val matched = getFilters(options)
val datas = Collections.newMap[Any]
val gradeMap = courseGradeProvider.getPublished(stds)
for (std <- stds) {
var grades = gradeMap.get(std)
for (filter <- matched) grades = filter.filter(grades)
datas.put(std, gpaStatService.statGpa(std, grades).asInstanceOf[T])
}
datas
}
def setCourseGradeProvider(courseGradeProvider: CourseGradeProvider) {
this.courseGradeProvider = courseGradeProvider
}
def setGradeFilterRegistry(gradeFilterRegistry: GradeFilterRegistry) {
this.gradeFilterRegistry = gradeFilterRegistry
}
protected def getFilters(options: Map[String, String]): List[GradeFilter] = {
if (null == options || options.isEmpty) return Collections.emptyList()
gradeFilterRegistry.getFilters(options.get("gpa.filters"))
}
def setGpaPolicy(gpaPolicy: GpaPolicy) {
this.gpaPolicy = gpaPolicy
}
}
| openurp/edu-eams-webapp | grade/src/main/scala/org/openurp/edu/eams/teach/grade/transcript/service/impl/TranscriptGpaProvider.scala | Scala | gpl-3.0 | 2,477 |
package tastytest
object TestCtxFns {
import CtxFns._
class TestContext extends Context {
val logs = collection.mutable.ListBuffer.empty[String]
def puts[T](t: T): Unit = logs += t.toString
}
implicit val ctx = new TestContext
def test1: Unit = {
puts(23)(ctx)
puts(true)(ctx)
puts("Hello")(ctx)
assert(ctx.logs.toList == List("23", "true", "Hello"))
}
def test2: Unit = {
val box = new CtxBox[Contextual]
box.foo[Unit]
}
}
| lrytz/scala | test/tasty/neg/src-2/TestCtxFns_fail.scala | Scala | apache-2.0 | 479 |
/***
* Copyright 2014 Rackspace US, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rackspace.com.papi.components.checker.util
import javax.xml.transform.{Templates, Transformer}
import com.codahale.metrics.{Gauge, MetricRegistry}
import net.sf.saxon.jaxp.TransformerImpl
import net.sf.saxon.serialize.MessageWarner
import org.apache.commons.pool.PoolableObjectFactory
import org.apache.commons.pool.impl.SoftReferenceObjectPool
import scala.collection.mutable.{HashMap, MutableList, Map}
object TransformPool extends Instrumented {
private val transformPools : Map[Templates, SoftReferenceObjectPool[Transformer]] = new HashMap[Templates, SoftReferenceObjectPool[Transformer]]
private val activeGauges = new MutableList[Gauge[Int]]
private val idleGauges = new MutableList[Gauge[Int]]
private def pool (templates : Templates) : SoftReferenceObjectPool[Transformer] = transformPools.getOrElseUpdate (templates, addPool(templates))
private def addPool(templates : Templates) : SoftReferenceObjectPool[Transformer] = {
val pool = new SoftReferenceObjectPool[Transformer](new XSLTransformerFactory(templates))
val registryClassName = getRegistryClassName(getClass)
val hash = Integer.toHexString(templates.hashCode())
activeGauges :+ gaugeOrAdd(MetricRegistry.name(registryClassName, "Active", hash))(pool.getNumActive)
idleGauges :+ gaugeOrAdd(MetricRegistry.name(registryClassName, "Idle", hash))(pool.getNumIdle)
pool
}
def borrowTransformer (templates : Templates) = pool(templates).borrowObject
def returnTransformer (templates : Templates, transformer : Transformer) = pool(templates).returnObject(transformer)
def numActive (templates : Templates) : Int = pool(templates).getNumActive
def numIdle (templates : Templates) : Int = pool(templates).getNumIdle
}
private class XSLTransformerFactory(private val templates : Templates) extends PoolableObjectFactory[Transformer] {
def makeObject = templates.newTransformer()
def activateObject (trans : Transformer) : Unit = {
//
// No need to activate a transformer
//
}
def validateObject (trans : Transformer) : Boolean = {
val valid = trans != null
//
// Ask Saxon to behave like xalan when emitting messages.
//
if (valid && trans.isInstanceOf[TransformerImpl]) {
trans.asInstanceOf[TransformerImpl].getUnderlyingController().setMessageEmitter(new MessageWarner)
}
valid
}
def passivateObject (trans : Transformer) : Unit = {
trans.reset()
}
def destroyObject (trans : Transformer) : Unit = {
//
// Not needed
//
}
}
| wdschei/api-checker | util/src/main/scala/com/rackspace/com/papi/compenents/checker/util/TransformPool.scala | Scala | apache-2.0 | 3,161 |
/**
* Copyright 2017 Interel
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package core3.security
class NoAuthUserToken extends UserTokenBase {
override def permissions: Vector[String] = Vector.empty
override def userID: String = "none"
}
| Interel-Group/core3 | src/main/scala/core3/security/NoAuthUserToken.scala | Scala | apache-2.0 | 771 |
package io.digitalmagic.akka.dsl.javaserialization
import io.digitalmagic.akka.dsl.EventSourcedActorWithInterpreter.EventSourcedActorState
import io.digitalmagic.akka.dsl.{ClientIndexesStateMap, Event, PersistentState, UniqueIndexApi}
import org.scalatest.{Matchers, WordSpecLike}
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}
object JavaSerializationSpec {
case object test
@SerialVersionUID(1)
implicit case object api1 extends UniqueIndexApi.Base[String, String]
@SerialVersionUID(1)
implicit case object api2 extends UniqueIndexApi.Base[Int, Int]
trait MyEvent extends Event
case class MyState(n: Int) extends PersistentState {
override type EventType = MyEvent
}
}
class JavaSerializationSpec extends WordSpecLike with Matchers {
import JavaSerializationSpec._
def write(obj: Any): Array[Byte] = {
val baos = new ByteArrayOutputStream()
val oos = new ObjectOutputStream(baos)
oos.writeObject(obj)
oos.close()
baos.close()
baos.toByteArray
}
def read[T](arr: Array[Byte]): T = {
val bais = new ByteArrayInputStream(arr)
val ois = new ObjectInputStream(bais)
val deserialized = ois.readObject()
ois.close()
bais.close()
deserialized.asInstanceOf[T]
}
"java serialization for ClientEvent" must {
"support serializing AcquisitionStartedClientEvent" in {
{
val event = api1.AcquisitionStartedClientEvent("test")
val serialized = write(event)
val deserialized = read[api1.AcquisitionStartedClientEvent](serialized)
deserialized shouldBe event
}
{
val event = api2.AcquisitionStartedClientEvent(42)
val serialized = write(event)
val deserialized = read[api2.AcquisitionStartedClientEvent](serialized)
deserialized shouldBe event
}
}
"support serializing AcquisitionCompletedClientEvent" in {
{
val event = api1.AcquisitionCompletedClientEvent("test")
val serialized = write(event)
val deserialized = read[api1.AcquisitionCompletedClientEvent](serialized)
deserialized shouldBe event
}
{
val event = api2.AcquisitionCompletedClientEvent(42)
val serialized = write(event)
val deserialized = read[api2.AcquisitionCompletedClientEvent](serialized)
deserialized shouldBe event
}
}
"support serializing AcquisitionAbortedClientEvent" in {
{
val event = api1.AcquisitionAbortedClientEvent("test")
val serialized = write(event)
val deserialized = read[api1.AcquisitionAbortedClientEvent](serialized)
deserialized shouldBe event
}
{
val event = api2.AcquisitionAbortedClientEvent(42)
val serialized = write(event)
val deserialized = read[api2.AcquisitionAbortedClientEvent](serialized)
deserialized shouldBe event
}
}
"support serializing ReleaseStartedClientEvent" in {
{
val event = api1.ReleaseStartedClientEvent("test")
val serialized = write(event)
val deserialized = read[api1.ReleaseStartedClientEvent](serialized)
deserialized shouldBe event
}
{
val event = api2.ReleaseStartedClientEvent(42)
val serialized = write(event)
val deserialized = read[api2.ReleaseStartedClientEvent](serialized)
deserialized shouldBe event
}
}
"support serializing ReleaseCompletedClientEvent" in {
{
val event = api1.ReleaseCompletedClientEvent("test")
val serialized = write(event)
val deserialized = read[api1.ReleaseCompletedClientEvent](serialized)
deserialized shouldBe event
}
{
val event = api2.ReleaseCompletedClientEvent(42)
val serialized = write(event)
val deserialized = read[api2.ReleaseCompletedClientEvent](serialized)
deserialized shouldBe event
}
}
"support serializing ReleaseAbortedClientEvent" in {
{
val event = api1.ReleaseAbortedClientEvent("test")
val serialized = write(event)
val deserialized = read[api1.ReleaseAbortedClientEvent](serialized)
deserialized shouldBe event
}
{
val event = api2.ReleaseAbortedClientEvent(42)
val serialized = write(event)
val deserialized = read[api2.ReleaseAbortedClientEvent](serialized)
deserialized shouldBe event
}
}
}
"java serialization for EventSourcedActorState" must {
"support serializing EventSourcedActorState" in {
val state = EventSourcedActorState(MyState(1), ClientIndexesStateMap(Map(
api1 -> api1.ClientIndexesState(Map("abc" -> api1.AcquisitionPendingClientState(), "def" -> api1.ReleasePendingClientState(), "ghi" -> api1.AcquiredClientState())),
api2 -> api2.ClientIndexesState(Map(1 -> api2.AcquisitionPendingClientState(), 2 -> api2.ReleasePendingClientState(), 3 -> api2.AcquiredClientState()))
)))
val serialized = write(state)
val deserialized = read[EventSourcedActorState[MyState]](serialized)
deserialized shouldBe state
}
}
"java serialization for Error" must {
"support serializing DuplicateIndex" in {
{
val error = api1.DuplicateIndex("entityId", "key")
val serialized = write(error)
val deserialized = read[api1.DuplicateIndex](serialized)
deserialized shouldBe error
}
{
val error = api2.DuplicateIndex(41, 42)
val serialized = write(error)
val deserialized = read[api2.DuplicateIndex](serialized)
deserialized shouldBe error
}
}
"support serializing IndexIsFree" in {
{
val error = api1.IndexIsFree("entityId", "key")
val serialized = write(error)
val deserialized = read[api1.IndexIsFree](serialized)
deserialized shouldBe error
}
{
val error = api2.IndexIsFree(41, 42)
val serialized = write(error)
val deserialized = read[api2.IndexIsFree](serialized)
deserialized shouldBe error
}
}
"support serializing IndexIsAcquired" in {
{
val error = api1.IndexIsAcquired("entityId", "key")
val serialized = write(error)
val deserialized = read[api1.IndexIsAcquired](serialized)
deserialized shouldBe error
}
{
val error = api2.IndexIsAcquired(41, 42)
val serialized = write(error)
val deserialized = read[api2.IndexIsAcquired](serialized)
deserialized shouldBe error
}
}
"support serializing EntityIdMismatch" in {
{
val error = api1.EntityIdMismatch("occupyingEntityId", "requestedEntityId", "key")
val serialized = write(error)
val deserialized = read[api1.EntityIdMismatch](serialized)
deserialized shouldBe error
}
{
val error = api2.EntityIdMismatch(41, 42, 43)
val serialized = write(error)
val deserialized = read[api2.EntityIdMismatch](serialized)
deserialized shouldBe error
}
}
}
"java serialization for ServerEvent" must {
"support serializing AcquisitionStartedServerEvent" in {
{
val event = api1.AcquisitionStartedServerEvent("entityId")
val serialized = write(event)
val deserialized = read[api1.AcquisitionStartedServerEvent](serialized)
deserialized shouldBe event
}
{
val event = api2.AcquisitionStartedServerEvent(42)
val serialized = write(event)
val deserialized = read[api2.AcquisitionStartedServerEvent](serialized)
deserialized shouldBe event
}
}
"support serializing AcquisitionCompletedServerEvent" in {
{
val event = api1.AcquisitionCompletedServerEvent()
val serialized = write(event)
val deserialized = read[api1.AcquisitionCompletedServerEvent](serialized)
deserialized shouldBe event
}
{
val event = api2.AcquisitionCompletedServerEvent()
val serialized = write(event)
val deserialized = read[api2.AcquisitionCompletedServerEvent](serialized)
deserialized shouldBe event
}
}
"support serializing ReleaseStartedServerEvent" in {
{
val event = api1.ReleaseStartedServerEvent()
val serialized = write(event)
val deserialized = read[api1.ReleaseStartedServerEvent](serialized)
deserialized shouldBe event
}
{
val event = api2.ReleaseStartedServerEvent()
val serialized = write(event)
val deserialized = read[api2.ReleaseStartedServerEvent](serialized)
deserialized shouldBe event
}
}
"support serializing ReleaseCompletedServerEvent" in {
{
val event = api1.ReleaseCompletedServerEvent()
val serialized = write(event)
val deserialized = read[api1.ReleaseCompletedServerEvent](serialized)
deserialized shouldBe event
}
{
val event = api2.ReleaseCompletedServerEvent()
val serialized = write(event)
val deserialized = read[api2.ReleaseCompletedServerEvent](serialized)
deserialized shouldBe event
}
}
}
"java serialization for ServerState" must {
"support serializing FreeServerState" in {
{
val state = api1.FreeServerState()
val serialized = write(state)
val deserialized = read[api1.FreeServerState](serialized)
deserialized shouldBe state
}
{
val state = api2.FreeServerState()
val serialized = write(state)
val deserialized = read[api2.FreeServerState](serialized)
deserialized shouldBe state
}
}
"support serializing UnconfirmedServerState" in {
{
val state = api1.UnconfirmedServerState("entityId")
val serialized = write(state)
val deserialized = read[api1.UnconfirmedServerState](serialized)
deserialized shouldBe state
}
{
val state = api2.UnconfirmedServerState(42)
val serialized = write(state)
val deserialized = read[api2.UnconfirmedServerState](serialized)
deserialized shouldBe state
}
}
"support serializing AcquiredServerState" in {
{
val state = api1.AcquiredServerState("entityId")
val serialized = write(state)
val deserialized = read[api1.AcquiredServerState](serialized)
deserialized shouldBe state
}
{
val state = api2.AcquiredServerState(42)
val serialized = write(state)
val deserialized = read[api2.AcquiredServerState](serialized)
deserialized shouldBe state
}
}
}
"java serialization for UniqueIndexRequest" must {
"support serializing GetEntityId" in {
{
val request = api1.GetEntityId("key")
val serialized = write(request)
val deserialized = read[api1.GetEntityId](serialized)
deserialized shouldBe request
}
{
val request = api2.GetEntityId(42)
val serialized = write(request)
val deserialized = read[api2.GetEntityId](serialized)
deserialized shouldBe request
}
}
"support serializing StartAcquisition" in {
{
val request = api1.StartAcquisition("entityId", "key")
val serialized = write(request)
val deserialized = read[api1.StartAcquisition](serialized)
deserialized shouldBe request
}
{
val request = api2.StartAcquisition(41, 42)
val serialized = write(request)
val deserialized = read[api2.StartAcquisition](serialized)
deserialized shouldBe request
}
}
"support serializing CommitAcquisition" in {
{
val request = api1.CommitAcquisition("entityId", "key")
val serialized = write(request)
val deserialized = read[api1.CommitAcquisition](serialized)
deserialized shouldBe request
}
{
val request = api2.CommitAcquisition(41, 42)
val serialized = write(request)
val deserialized = read[api2.CommitAcquisition](serialized)
deserialized shouldBe request
}
}
"support serializing RollbackAcquisition" in {
{
val request = api1.RollbackAcquisition("entityId", "key")
val serialized = write(request)
val deserialized = read[api1.RollbackAcquisition](serialized)
deserialized shouldBe request
}
{
val request = api2.RollbackAcquisition(41, 42)
val serialized = write(request)
val deserialized = read[api2.RollbackAcquisition](serialized)
deserialized shouldBe request
}
}
"support serializing StartRelease" in {
{
val request = api1.StartRelease("entityId", "key")
val serialized = write(request)
val deserialized = read[api1.StartRelease](serialized)
deserialized shouldBe request
}
{
val request = api2.StartRelease(41, 42)
val serialized = write(request)
val deserialized = read[api2.StartRelease](serialized)
deserialized shouldBe request
}
}
"support serializing CommitRelease" in {
{
val request = api1.CommitRelease("entityId", "key")
val serialized = write(request)
val deserialized = read[api1.CommitRelease](serialized)
deserialized shouldBe request
}
{
val request = api2.CommitRelease(41, 42)
val serialized = write(request)
val deserialized = read[api2.CommitRelease](serialized)
deserialized shouldBe request
}
}
"support serializing RollbackRelease" in {
{
val request = api1.RollbackRelease("entityId", "key")
val serialized = write(request)
val deserialized = read[api1.RollbackRelease](serialized)
deserialized shouldBe request
}
{
val request = api2.RollbackRelease(41, 42)
val serialized = write(request)
val deserialized = read[api2.RollbackRelease](serialized)
deserialized shouldBe request
}
}
}
}
| digital-magic-io/akka-cqrs-dsl | akka-cqrs-dsl-core/src/test/scala/io/digitalmagic/akka/dsl/javaserialization/JavaSerializationSpec.scala | Scala | apache-2.0 | 14,212 |
package functional
import java.time.temporal.ChronoUnit
import java.time.LocalDateTime
import java.time.format.DateTimeFormatter
import play.api.test._
import play.api.test.Helpers._
import play.api.i18n.MessagesApi
import play.api.i18n.{Lang, Messages, MessagesImpl, MessagesProvider}
import java.time.Instant
import play.api.{Application => PlayApp}
import play.api.inject.guice.GuiceApplicationBuilder
import helpers.InjectorSupport
import play.api.db.Database
import play.api.test._
import play.api.test.Helpers._
import helpers.Helper._
import org.specs2.mutable.Specification
import play.api.test.{Helpers, TestServer}
import play.api.test.TestServer
import play.api.i18n.{Messages, Lang}
import models._
import org.openqa.selenium.By
import java.util.concurrent.TimeUnit
import play.api.test.TestServer
import com.ruimo.scoins.Scoping._
class ShippingMaintenanceSpec extends Specification with InjectorSupport {
"Shipping fee maintenance" should {
"Should occur validation error in creating shipping box" in new WithBrowser(
WebDriverFactory(CHROME), appl()
) {
inject[Database].withConnection { implicit conn =>
implicit val currencyInfo = inject[CurrencyRegistry]
implicit val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val user = loginWithTestUser(browser)
val site1 = inject[SiteRepo].createNew(Ja, "商店1")
val site2 = inject[SiteRepo].createNew(Ja, "商店2")
browser.goTo(
controllers.routes.ShippingBoxMaintenance.startCreateShippingBox().url + "?lang=" + lang.code
)
browser.webDriver.getTitle === Messages("commonTitle", Messages("createNewShippingBoxTitle"))
browser.find("#createNewShippingBoxForm").find("input[type='submit']").click
browser.await().atMost(5, TimeUnit.SECONDS).until(browser.el(".globalErrorMessage")).displayed()
browser.find(".globalErrorMessage").text === Messages("inputError")
browser.find("#itemClass_field").find("dd.error").text === Messages("error.number")
browser.find("#boxSize_field").find("dd.error").text === Messages("error.number")
browser.find("#boxName_field").find("dd.error").text === Messages("error.required")
browser.find("#itemClass").fill().`with`("a")
browser.find("#boxSize").fill().`with`("a")
browser.find("#itemClass_field").find("dd.error").text === Messages("error.number")
browser.find("#boxSize_field").find("dd.error").text === Messages("error.number")
}
}
"Can create shipping box" in new WithBrowser(
WebDriverFactory(CHROME), appl()
) {
inject[Database].withConnection { implicit conn =>
implicit val currencyInfo = inject[CurrencyRegistry]
implicit val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val user = loginWithTestUser(browser)
val site1 = inject[SiteRepo].createNew(Ja, "商店1")
val site2 = inject[SiteRepo].createNew(Ja, "商店2")
browser.goTo(
controllers.routes.ShippingBoxMaintenance.startCreateShippingBox().url + "?lang=" + lang.code
)
browser.webDriver.getTitle === Messages("commonTitle", Messages("createNewShippingBoxTitle"))
browser.webDriver
.findElement(By.id("siteId"))
.findElement(By.cssSelector("option[value=\\"" + site1.id.get + "\\"]")).getText() === "商店1"
browser.webDriver
.findElement(By.id("siteId"))
.findElement(By.cssSelector("option[value=\\"" + site2.id.get + "\\"]")).getText() === "商店2"
browser.find("#siteId").find("option[value=\\"" + site2.id.get + "\\"]").click()
browser.find("#itemClass").fill().`with`("1")
browser.find("#boxSize").fill().`with`("2")
browser.find("#boxName").fill().`with`("BoxName")
browser.find("#createNewShippingBoxForm").find("input[type='submit']").click
browser.await().atMost(5, TimeUnit.SECONDS).until(browser.el(".message")).displayed()
browser.find(".message").text === Messages("shippingBoxIsCreated")
browser.webDriver.getTitle === Messages("commonTitle", Messages("createNewShippingBoxTitle"))
val list = inject[ShippingBoxRepo].list(site2.id.get)
list.size === 1
doWith(list(0)) { rec =>
rec.siteId === site2.id.get
rec.itemClass === 1
rec.boxSize === 2
rec.boxName === "BoxName"
}
// Creating with the same site and item class will cause duplicated error.
browser.goTo(
controllers.routes.ShippingBoxMaintenance.startCreateShippingBox().url + "?lang=" + lang.code
)
browser.webDriver.getTitle === Messages("commonTitle", Messages("createNewShippingBoxTitle"))
browser.find("#siteId").find("option[value=\\"" + site2.id.get + "\\"]").click()
browser.find("#itemClass").fill().`with`("1")
browser.find("#boxSize").fill().`with`("3")
browser.find("#boxName").fill().`with`("BoxName2")
browser.find("#createNewShippingBoxForm").find("input[type='submit']").click
browser.find(".globalErrorMessage").text === Messages("inputError")
browser.find("#itemClass_field").find("dd.error").text === Messages("duplicatedItemClass")
}
}
"Can edit without records" in new WithBrowser(
WebDriverFactory(CHROME), appl()
) {
inject[Database].withConnection { implicit conn =>
implicit val currencyInfo = inject[CurrencyRegistry]
implicit val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val user = loginWithTestUser(browser)
browser.goTo(
controllers.routes.ShippingBoxMaintenance.editShippingBox().url + "?lang=" + lang.code
)
browser.webDriver.getTitle === Messages("commonTitle", Messages("editShippingBoxTitle"))
browser.find(".norecord").text === Messages("no.records.found")
}
}
"Can edit with some records" in new WithBrowser(
WebDriverFactory(CHROME), appl()
) {
inject[Database].withConnection { implicit conn =>
implicit val currencyInfo = inject[CurrencyRegistry]
implicit val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val user = loginWithTestUser(browser)
val site1 = inject[SiteRepo].createNew(Ja, "商店1")
val site2 = inject[SiteRepo].createNew(Ja, "商店2")
val box1 = inject[ShippingBoxRepo].createNew(site1.id.get, 1, 2, "box1")
val box2 = inject[ShippingBoxRepo].createNew(site1.id.get, 3, 4, "box2")
val box3 = inject[ShippingBoxRepo].createNew(site2.id.get, 5, 6, "box3")
browser.goTo(
controllers.routes.ShippingBoxMaintenance.editShippingBox().url + "?lang=" + lang.code
)
browser.webDriver.getTitle === Messages("commonTitle", Messages("editShippingBoxTitle"))
browser.find(".shippingBoxTableBodyId").index(0).text === box1.id.get.toString
browser.find(".shippingBoxTableBodySite").index(0).text === "商店1"
browser.find(".shippingBoxTableBodyItemClass").index(0).text === "1"
browser.find(".shippingBoxTableBodyBoxSize").index(0).text === "2"
browser.find(".shippingBoxTableBodyBoxName").index(0).text === "box1"
browser.find(".shippingBoxTableBodyId").index(1).text === box2.id.get.toString
browser.find(".shippingBoxTableBodySite").index(1).text === "商店1"
browser.find(".shippingBoxTableBodyItemClass").index(1).text === "3"
browser.find(".shippingBoxTableBodyBoxSize").index(1).text === "4"
browser.find(".shippingBoxTableBodyBoxName").index(1).text === "box2"
browser.find(".shippingBoxTableBodyId").index(2).text === box3.id.get.toString
browser.find(".shippingBoxTableBodySite").index(2).text === "商店2"
browser.find(".shippingBoxTableBodyItemClass").index(2).text === "5"
browser.find(".shippingBoxTableBodyBoxSize").index(2).text === "6"
browser.find(".shippingBoxTableBodyBoxName").index(2).text === "box3"
}
}
"Can edit one box record with validation error" in new WithBrowser(
WebDriverFactory(CHROME), appl()
) {
inject[Database].withConnection { implicit conn =>
implicit val currencyInfo = inject[CurrencyRegistry]
implicit val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val user = loginWithTestUser(browser)
val site1 = inject[SiteRepo].createNew(Ja, "商店1")
val site2 = inject[SiteRepo].createNew(Ja, "商店2")
val box1 = inject[ShippingBoxRepo].createNew(site1.id.get, 1, 2, "box1")
browser.goTo(
controllers.routes.ShippingBoxMaintenance.startChangeShippingBox(box1.id.get).url + "&lang=" + lang.code
)
browser.webDriver.getTitle === Messages("commonTitle", Messages("changeShippingBoxTitle"))
browser.webDriver
.findElement(By.id("siteId"))
.findElement(By.cssSelector("option[value=\\"" + site1.id.get + "\\"]")).getText() === "商店1"
browser.webDriver
.findElement(By.id("siteId"))
.findElement(By.cssSelector("option[value=\\"" + site2.id.get + "\\"]")).getText() === "商店2"
browser.find("#itemClass").fill().`with`("")
browser.find("#boxSize").fill().`with`("")
browser.find("#boxName").fill().`with`("")
browser.find("#changeShippingBoxForm").find("input[type='submit']").click
browser.await().atMost(5, TimeUnit.SECONDS).until(browser.el("dd.error")).displayed()
browser.find("#itemClass_field").find("dd.error").text === Messages("error.number")
browser.find("#boxSize_field").find("dd.error").text === Messages("error.number")
browser.find("#boxName_field").find("dd.error").text === Messages("error.required")
browser.find("#itemClass").fill().`with`("100")
browser.find("#boxSize").fill().`with`("200")
browser.find("#boxName").fill().`with`("boxName2")
browser.find("#changeShippingBoxForm").find("input[type='submit']").click
browser.await().atMost(10, TimeUnit.SECONDS).until(browser.el(".title")).displayed()
val box = inject[ShippingBoxRepo].apply(box1.id.get)
box.itemClass === 100
box.boxSize === 200
box.boxName === "boxName2"
}
}
"Can maintenance fee without records" in new WithBrowser(
WebDriverFactory(CHROME), appl()
) {
inject[Database].withConnection { implicit conn =>
implicit val currencyInfo = inject[CurrencyRegistry]
implicit val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val user = loginWithTestUser(browser)
val site1 = inject[SiteRepo].createNew(Ja, "商店1")
val site2 = inject[SiteRepo].createNew(Ja, "商店2")
val box1 = inject[ShippingBoxRepo].createNew(site1.id.get, 1, 2, "box1")
browser.goTo(
controllers.routes.ShippingFeeMaintenance.startFeeMaintenanceNow(box1.id.get).url + "&lang=" + lang.code
)
browser.webDriver.getTitle === Messages("commonTitle", Messages("shippingFeeMaintenanceTitle"))
browser.find("table.shippingFeeHeader").find(".body").find(".site").text === "商店1"
browser.find("table.shippingFeeHeader").find(".body").find(".boxName").text === "box1"
browser.find(".shippingFeeList").find(".body").size === 0
}
}
"Can maintenance fee with records" in new WithBrowser(
WebDriverFactory(CHROME), appl()
) {
inject[Database].withConnection { implicit conn =>
implicit val currencyInfo = inject[CurrencyRegistry]
implicit val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val user = loginWithTestUser(browser)
val site1 = inject[SiteRepo].createNew(Ja, "商店1")
val site2 = inject[SiteRepo].createNew(Ja, "商店2")
val box1 = inject[ShippingBoxRepo].createNew(site1.id.get, 1, 2, "box1")
val fee1 = inject[ShippingFeeRepo].createNew(box1.id.get, CountryCode.JPN, JapanPrefecture.北海道.code)
val fee2 = inject[ShippingFeeRepo].createNew(box1.id.get, CountryCode.JPN, JapanPrefecture.東京都.code)
browser.goTo(
controllers.routes.ShippingFeeMaintenance.startFeeMaintenanceNow(box1.id.get).url + "&lang=" + lang.code
)
browser.webDriver.getTitle === Messages("commonTitle", Messages("shippingFeeMaintenanceTitle"))
doWith(browser.find("table.shippingFeeHeader").find(".body")) { e =>
e.find(".site").text === "商店1"
e.find(".boxName").text === "box1"
}
doWith(browser.find(".shippingFeeList").find(".body").index(0)) { e =>
e.find(".country").text === Messages("country.JPN")
e.find(".prefecture").text === JapanPrefecture.北海道.toString
e.find(".shippingFee").text === "-"
}
doWith(browser.find(".shippingFeeList").find(".body").index(1)) { e =>
e.find(".country").text === Messages("country.JPN")
e.find(".prefecture").text === JapanPrefecture.東京都.toString
e.find(".shippingFee").text === "-"
}
}
}
"Can remove fee record" in new WithBrowser(
WebDriverFactory(CHROME), appl()
) {
inject[Database].withConnection { implicit conn =>
implicit val currencyInfo = inject[CurrencyRegistry]
implicit val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val user = loginWithTestUser(browser)
val site1 = inject[SiteRepo].createNew(Ja, "商店1")
val site2 = inject[SiteRepo].createNew(Ja, "商店2")
val box1 = inject[ShippingBoxRepo].createNew(site1.id.get, 1, 2, "box1")
val fee1 = inject[ShippingFeeRepo].createNew(box1.id.get, CountryCode.JPN, JapanPrefecture.北海道.code)
val fee2 = inject[ShippingFeeRepo].createNew(box1.id.get, CountryCode.JPN, JapanPrefecture.東京都.code)
browser.goTo(
controllers.routes.ShippingFeeMaintenance.startFeeMaintenanceNow(box1.id.get).url + "&lang=" + lang.code
)
browser.webDriver.getTitle === Messages("commonTitle", Messages("shippingFeeMaintenanceTitle"))
browser.find(".shippingFeeList").find(".body").index(0).find(".delete").find("button").click
// Dialog should be shown.
browser.await().atMost(10, TimeUnit.SECONDS).until(browser.el(".ui-dialog-buttonset")).displayed()
// Cancel
browser.find(".ui-dialog-buttonset").find("button").index(1).click()
browser.find(".shippingFeeList").find(".body").size === 2
browser.find(".shippingFeeList").find(".body").index(0).find(".delete").find("button").click
browser.await().atMost(10, TimeUnit.SECONDS).until(browser.el(".ui-dialog-buttonset")).displayed()
// do removal
browser.find(".ui-dialog-buttonset").find("button").index(0).click()
browser.find(".shippingFeeList").find(".body").size === 1
}
}
"Can create fee record" in new WithBrowser(
WebDriverFactory(CHROME), appl()
) {
inject[Database].withConnection { implicit conn =>
implicit val currencyInfo = inject[CurrencyRegistry]
implicit val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val user = loginWithTestUser(browser)
val site1 = inject[SiteRepo].createNew(Ja, "商店1")
val site2 = inject[SiteRepo].createNew(Ja, "商店2")
val box1 = inject[ShippingBoxRepo].createNew(site1.id.get, 1, 2, "box1")
browser.goTo(
controllers.routes.ShippingFeeMaintenance.startFeeMaintenanceNow(box1.id.get).url + "&lang=" + lang.code
)
browser.webDriver.getTitle === Messages("commonTitle", Messages("shippingFeeMaintenanceTitle"))
browser.await().atMost(5, TimeUnit.SECONDS).until(browser.el("#createShippingFeeEntryButton")).displayed()
browser.find("#createShippingFeeEntryButton").click()
// No prefectures are checked.
browser.find("input:not(:checked)[type='checkbox']").size === JapanPrefecture.all.length
// Check Tokyo and Kanagawa.
browser.find("input[type='checkbox'][value='" + JapanPrefecture.東京都.code + "']").click()
browser.find("input[type='checkbox'][value='" + JapanPrefecture.神奈川県.code + "']").click()
browser.find("#createShippingFeeForm").find("input[type='submit']").click
browser.webDriver.getTitle === Messages("commonTitle", Messages("shippingFeeMaintenanceTitle"))
doWith(browser.find(".shippingFeeList").find(".body").index(0)) { e =>
e.find(".country").text === Messages("country.JPN")
e.find(".prefecture").text === JapanPrefecture.東京都.toString
e.find(".shippingFee").text === "-"
}
doWith(browser.find(".shippingFeeList").find(".body").index(1)) { e =>
e.find(".country").text === Messages("country.JPN")
e.find(".prefecture").text === JapanPrefecture.神奈川県.toString
e.find(".shippingFee").text === "-"
}
}
}
"Show validation error when adding fee" in new WithBrowser(
WebDriverFactory(CHROME), appl()
) {
inject[Database].withConnection { implicit conn =>
implicit val currencyInfo = inject[CurrencyRegistry]
implicit val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val user = loginWithTestUser(browser)
val site1 = inject[SiteRepo].createNew(Ja, "商店1")
val site2 = inject[SiteRepo].createNew(Ja, "商店2")
val box1 = inject[ShippingBoxRepo].createNew(site1.id.get, 1, 2, "box1")
val fee1 = inject[ShippingFeeRepo].createNew(box1.id.get, CountryCode.JPN, JapanPrefecture.北海道.code)
val fee2 = inject[ShippingFeeRepo].createNew(box1.id.get, CountryCode.JPN, JapanPrefecture.東京都.code)
val tax1 = inject[TaxRepo].createNew
val taxName1 = inject[TaxNameRepo].createNew(tax1, Ja, "tax01")
val tax2 = inject[TaxRepo].createNew
val taxName2 = inject[TaxNameRepo].createNew(tax2, Ja, "tax02")
browser.goTo(
controllers.routes.ShippingFeeMaintenance.startFeeMaintenanceNow(box1.id.get).url + "&lang=" + lang.code
)
browser.webDriver.getTitle === Messages("commonTitle", Messages("shippingFeeMaintenanceTitle"))
// Edit fee for tokyo.
browser.find(".shippingFeeList").find(".body").index(0).find(".edit").find("a").click
browser.webDriver.getTitle === Messages("commonTitle", Messages("shippingFeeHistoryMaintenanceTitle"))
doWith(browser.find(".shippingFeeHistory").find(".body")) { rec =>
rec.find(".boxName").text === "box1"
rec.find(".country").text === "日本"
rec.find(".prefecture").text === "北海道"
}
browser.find("#taxId").find("option").index(0).text === "tax01"
browser.find("#taxId").find("option").index(1).text === "tax02"
browser.find("#addShippingFeeHistoryButton").click()
browser.await().atMost(5, TimeUnit.SECONDS).until(browser.el("#fee_field")).displayed()
browser.find("#fee_field").find(".error").text === Messages("error.number")
browser.find("#validUntil_field").find(".error").text === Messages("error.localDateTime")
browser.find("#costFee_field").find(".error").size === 0
browser.find("#costFee").fill().`with`("-1")
browser.find("#addShippingFeeHistoryButton").click()
browser.await().atMost(5, TimeUnit.SECONDS).until(browser.el("#fee_field")).displayed()
browser.find("#costFee_field").find(".error").text === Messages("error.min", 0)
}
}
"Can add, edit, delete fee" in new WithBrowser(
WebDriverFactory(CHROME), appl()
) {
inject[Database].withConnection { implicit conn =>
implicit val currencyInfo = inject[CurrencyRegistry]
implicit val localeInfo = inject[LocaleInfoRepo]
import localeInfo.{En, Ja}
implicit val lang = Lang("ja")
implicit val storeUserRepo = inject[StoreUserRepo]
val Messages = inject[MessagesApi]
implicit val mp: MessagesProvider = new MessagesImpl(lang, Messages)
val user = loginWithTestUser(browser)
val site1 = inject[SiteRepo].createNew(Ja, "商店1")
val site2 = inject[SiteRepo].createNew(Ja, "商店2")
val box1 = inject[ShippingBoxRepo].createNew(site1.id.get, 1, 2, "box1")
val fee1 = inject[ShippingFeeRepo].createNew(box1.id.get, CountryCode.JPN, JapanPrefecture.北海道.code)
val fee2 = inject[ShippingFeeRepo].createNew(box1.id.get, CountryCode.JPN, JapanPrefecture.東京都.code)
val tax1 = inject[TaxRepo].createNew
val taxName1 = inject[TaxNameRepo].createNew(tax1, Ja, "tax01")
val tax2 = inject[TaxRepo].createNew
val taxName2 = inject[TaxNameRepo].createNew(tax2, Ja, "tax02")
browser.goTo(
controllers.routes.ShippingFeeMaintenance.startFeeMaintenanceNow(box1.id.get).url + "&lang=" + lang.code
)
browser.webDriver.getTitle === Messages("commonTitle", Messages("shippingFeeMaintenanceTitle"))
// Edit fee for hokkaido.
browser.find(".shippingFeeList").find(".body").index(0).find(".edit").find("a").click
browser.webDriver.getTitle === Messages("commonTitle", Messages("shippingFeeHistoryMaintenanceTitle"))
doWith(browser.find(".shippingFeeHistory").find(".body")) { rec =>
rec.find(".boxName").text === "box1"
rec.find(".country").text === "日本"
rec.find(".prefecture").text === "北海道"
}
val validDate = LocalDateTime.now().plus(10, ChronoUnit.DAYS)
val formattedValidDate =
DateTimeFormatter.ofPattern(Messages("yyyy-MM-dd hh:mm:ss")).format(validDate)
// without cost fee.
browser.find("#taxId").find("option").index(1).click()
browser.find("#fee").fill().`with`("123")
browser.find("#validUntil").fill().`with`(formattedValidDate)
browser.find("#addShippingFeeHistoryButton").click()
browser.await().atMost(5, TimeUnit.SECONDS).until(browser.el(".title")).displayed()
browser.webDriver.getTitle === Messages("commonTitle", Messages("shippingFeeHistoryMaintenanceTitle"))
browser.webDriver.findElement(By.id("histories_0_taxId")).findElement(
By.cssSelector("option[value='" + tax2.id.get + "']")
).isSelected === true
browser.find("#histories_0_fee").attribute("value") === "123.00"
browser.find("#histories_0_costFee").attribute("value") === ""
browser.find("#histories_0_validUntil").attribute("value") === formattedValidDate
// Remove history.
browser.find(".removeHistoryButton").click()
browser.await().atMost(5, TimeUnit.SECONDS).until(browser.el(".title")).displayed()
browser.find("#histories_0_fee").size === 0
// with cost fee.
browser.find("#taxId").find("option").index(1).click()
browser.find("#fee").fill().`with`("123")
browser.find("#costFee").fill().`with`("100")
browser.find("#validUntil").fill().`with`(formattedValidDate)
browser.find("#addShippingFeeHistoryButton").click()
browser.await().atMost(5, TimeUnit.SECONDS).until(browser.el(".title")).displayed()
browser.webDriver.getTitle === Messages("commonTitle", Messages("shippingFeeHistoryMaintenanceTitle"))
browser.webDriver.findElement(By.id("histories_0_taxId")).findElement(
By.cssSelector("option[value='" + tax2.id.get + "']")
).isSelected === true
browser.find("#histories_0_fee").attribute("value") === "123.00"
browser.find("#histories_0_costFee").attribute("value") === "100.00"
browser.find("#histories_0_validUntil").attribute("value") === formattedValidDate
// Can change history.
browser.find("#histories_0_taxId").find("option[value='" + tax1.id.get + "']").click()
browser.find("#histories_0_fee").fill().`with`("234")
browser.find("#histories_0_costFee").fill().`with`("")
browser.find("#histories_0_validUntil").fill().`with`(formattedValidDate)
browser.find("#updateShippingFeeHistoryButton").click()
browser.await().atMost(5, TimeUnit.SECONDS).until(browser.el(".title")).displayed()
browser.webDriver.getTitle === Messages("commonTitle", Messages("shippingFeeHistoryMaintenanceTitle"))
browser.webDriver.findElement(By.id("histories_0_taxId")).findElement(
By.cssSelector("option[value='" + tax1.id.get + "']")
).isSelected === true
browser.find("#histories_0_fee").attribute("value") === "234.00"
browser.find("#histories_0_costFee").attribute("value") === ""
browser.find("#histories_0_validUntil").attribute("value") === formattedValidDate
browser.find("#histories_0_costFee").fill().`with`("100")
browser.find("#updateShippingFeeHistoryButton").click()
browser.await().atMost(5, TimeUnit.SECONDS).until(browser.el(".title")).displayed()
browser.webDriver.getTitle === Messages("commonTitle", Messages("shippingFeeHistoryMaintenanceTitle"))
browser.find("#histories_0_costFee").attribute("value") === "100.00"
// Check fee history.
browser.goTo(
controllers.routes.ShippingFeeMaintenance.startFeeMaintenanceNow(box1.id.get).url + "&lang=" + lang.code
)
browser.webDriver.getTitle === Messages("commonTitle", Messages("shippingFeeMaintenanceTitle"))
doWith(browser.find(".shippingFeeList").find(".body").index(0)) { e =>
e.find(".country").text === Messages("country.JPN")
e.find(".prefecture").text === JapanPrefecture.北海道.toString
e.find(".shippingFee").text === "234円"
}
// Delete fee history.
browser.goTo(
controllers.routes.ShippingFeeMaintenance.editHistory(fee1.id.get).url + "&lang=" + lang.code
)
browser.await().atMost(5, TimeUnit.SECONDS).until(browser.el(".title")).displayed()
browser.webDriver.getTitle === Messages("commonTitle", Messages("shippingFeeHistoryMaintenanceTitle"))
browser.find("button.removeHistoryButton").size === 1
browser.find("button.removeHistoryButton").click()
browser.await().atMost(5, TimeUnit.SECONDS).until(browser.el(".title")).displayed()
browser.webDriver.getTitle === Messages("commonTitle", Messages("shippingFeeHistoryMaintenanceTitle"))
browser.find("button.removeHistoryButton").size === 0
}
}
}
}
| ruimo/store2 | test/functional/ShippingMaintenanceSpec.scala | Scala | apache-2.0 | 29,212 |
/*
* Copyright (c) 2012-2016 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.schemaguru
package sparkjob
import scalaz._
import Scalaz._
import org.json4s._
import Common.{ SchemaDescription, SchemaVer }
import generators.PredefinedEnums.predefined
/**
* Class containing all inputs necessary data for schema derivation command
* Basically, a copy of [[com.snowplowanalytics.schemaguru.cli.SchemaCommand]]
* with [[input]] and [[output]] as String instead of File and required [[output]]
*/
case class SparkJobCommand private[sparkjob](
input: String,
output: String,
enumCardinality: Int = 0,
enumSets: Boolean = false,
vendor: Option[String] = None,
name: Option[String] = None,
schemaver: Option[SchemaVer] = None,
schemaBy: Option[String] = None,
noLength: Boolean = false,
ndjson: Boolean = false,
errorsPath: Option[String] = None) {
/**
* Preference representing Schema-segmentation
* First element of pair is JSON Path, by which we split our Schemas
* Second element is output path where to put Schemas (can't print it to stdout)
* None means we don't need to do segmentation
*/
val segmentSchema = schemaBy.map { jsonPath =>
(jsonPath, output)
}
val selfDescribing: Option[SchemaDescription] = (vendor |@| name) { (v, n) =>
SchemaDescription(v, n, schemaver.getOrElse(SchemaVer(1,0,0)))
}
lazy val successfulEnumSets: List[JArray] =
if (enumSets) predefined.valuesIterator.toList else Nil
}
| snowplow/schema-guru | sparkjob/src/main/scala/com/snowplowanalytics/schemaguru/sparkjob/SparkJobCommand.scala | Scala | apache-2.0 | 2,130 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.tools.export.formats
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
object NullExporter extends FeatureExporter {
override def start(sft: SimpleFeatureType): Unit = {}
override def export(features: Iterator[SimpleFeature]): Option[Long] = {
var count = 0L
features.foreach(_ => count += 1)
Some(count)
}
override def bytes: Long = 0L
override def close(): Unit = {}
}
| elahrvivaz/geomesa | geomesa-tools/src/main/scala/org/locationtech/geomesa/tools/export/formats/NullExporter.scala | Scala | apache-2.0 | 917 |
import scala.tools.partest.DirectTest
object Test extends DirectTest {
// for reference, UTF-8 of U0
//val data = Array(0xed, 0xa0, 0x81).map(_.asInstanceOf[Byte])
def U0 = "\\ud801"
def U1 = "\\udc00"
// \\u10428 isLetter and isLowerCase
def U2 = "\\ud801"
def U3 = "\\udc28"
def code =
s"""class C {
| def x = "$U0"
| def y = "$U1"
| def `$U0` = x
| def `$U1` = y
|
| def f(x: Any): Boolean = x match {
| case ${U2}${U3}XYZ: String => true
| case $U2$U3 => true
| }
| def g(x: Any) = x match {
| case $U2$U3 @ _ => $U2$U3
| }
|}""".stripMargin
def show(): Unit = {
assert(U0.length == 1)
assert(compile())
}
}
| lrytz/scala | test/files/run/t1406.scala | Scala | apache-2.0 | 765 |
package com.ignition.frame
import scala.util.Random
import scala.xml.{ Elem, Node }
import org.apache.spark.rdd.RDD.rddToPairRDDFunctions
import org.apache.spark.sql.{ DataFrame, Row }
import org.apache.spark.sql.types.{ Decimal, StructType }
import com.ignition.types.RichBoolean
import org.json4s._
import org.json4s.JsonDSL._
import com.ignition.util.XmlUtils.RichNodeSeq
import com.ignition.util.JsonUtils.RichJValue
/**
* Reduce operations.
*/
object ReduceOp extends Enumeration {
abstract class ReduceOp extends super.Val {
def reduce(a: Any, b: Any): Any
def apply(field: String) = field -> this
}
implicit def valueToOp(v: Value) = v.asInstanceOf[ReduceOp]
val ANY = new ReduceOp { def reduce(a: Any, b: Any): Any = Random.nextBoolean ? (a, b) }
val SUM = new ReduceOp {
def reduce(a: Any, b: Any): Any = (a, b) match {
case (x: Int, y: Int) => (x + y)
case (x: Long, y: Long) => (x + y)
case (x: Short, y: Short) => (x + y)
case (x: Byte, y: Byte) => (x + y)
case (x: Float, y: Float) => (x + y)
case (x: Double, y: Double) => (x + y)
case (x: Decimal, y: Decimal) => (x + y)
}
}
val MIN = new ReduceOp {
def reduce(a: Any, b: Any): Any = (a, b) match {
case (x: Int, y: Int) => (x < y) ? (x, y)
case (x: Long, y: Long) => (x < y) ? (x, y)
case (x: Short, y: Short) => (x < y) ? (x, y)
case (x: Byte, y: Byte) => (x < y) ? (x, y)
case (x: Float, y: Float) => (x < y) ? (x, y)
case (x: Double, y: Double) => (x < y) ? (x, y)
case (x: Decimal, y: Decimal) => (x < y) ? (x, y)
case (x: java.sql.Date, y: java.sql.Date) => (x before y) ? (x, y)
case (x: java.sql.Timestamp, y: java.sql.Timestamp) => (x before y) ? (x, y)
}
}
val MAX = new ReduceOp {
def reduce(a: Any, b: Any): Any = (a, b) match {
case (x: Int, y: Int) => (x > y) ? (x, y)
case (x: Long, y: Long) => (x > y) ? (x, y)
case (x: Short, y: Short) => (x > y) ? (x, y)
case (x: Byte, y: Byte) => (x > y) ? (x, y)
case (x: Float, y: Float) => (x > y) ? (x, y)
case (x: Double, y: Double) => (x > y) ? (x, y)
case (x: Decimal, y: Decimal) => (x > y) ? (x, y)
case (x: java.sql.Date, y: java.sql.Date) => (x after y) ? (x, y)
case (x: java.sql.Timestamp, y: java.sql.Timestamp) => (x after y) ? (x, y)
}
}
val AND = new ReduceOp {
def reduce(a: Any, b: Any): Any = (a, b) match {
case (x: Boolean, y: Boolean) => x && y
}
}
val OR = new ReduceOp {
def reduce(a: Any, b: Any): Any = (a, b) match {
case (x: Boolean, y: Boolean) => x || y
}
}
val XOR = new ReduceOp {
def reduce(a: Any, b: Any): Any = (a, b) match {
case (x: Boolean, y: Boolean) => (x && !y) || (y && !x)
}
}
val CONCAT = new ReduceOp {
def reduce(a: Any, b: Any): Any = (a, b) match {
case (x: String, y: String) => x + y
}
}
}
import ReduceOp._
/**
* Performs reduceByKey() function by grouping the rows by the selected key first, and then
* applying a list of reduce functions to the specified data columns.
*
* @author Vlad Orzhekhovskiy
*/
case class Reduce(reducers: Iterable[(String, ReduceOp)], groupFields: Iterable[String] = Nil)
extends FrameTransformer with PairFunctions {
import Reduce._
def add(tuple: (String, ReduceOp)) = copy(reducers = reducers.toSeq :+ tuple)
def %(tuple: (String, ReduceOp)) = add(tuple)
def add(field: String, functions: ReduceOp*) = copy(functions.map(f => (field, f)))
def %(field: String, functions: ReduceOp*) = add(field, functions: _*)
def groupBy(fields: String*) = copy(groupFields = fields)
protected def compute(arg: DataFrame)(implicit runtime: SparkRuntime): DataFrame = {
val groupFields = this.groupFields
val dataFields = reducers map (_._1) toSeq
val ops = reducers map (_._2) toSeq
val df = optLimit(arg, runtime.previewMode)
val rdd = toPair(df, dataFields, groupFields)
rdd.persist
val reduced = rdd reduceByKey { (row1, row2) =>
val values = ops zip (row1.toSeq zip row2.toSeq) map {
case (op, (a, b)) => op.reduce(a, b)
}
Row.fromSeq(values)
}
val targetRDD = reduced map {
case (key, value) => Row.fromSeq(key.toSeq ++ value.toSeq)
}
val targetFields = (groupFields map df.schema.apply toSeq) ++ (reducers map {
case (name, op) => df.schema(name).copy(name = name + "_" + op.toString)
})
val targetSchema = StructType(targetFields)
ctx.createDataFrame(targetRDD, targetSchema)
}
def toXml: Elem =
<node>
<aggregate>
{
reducers map { case (name, op) => <field name={ name } type={ op.toString }/> }
}
</aggregate>
{
if (!groupFields.isEmpty)
<group-by>
{ groupFields map (f => <field name={ f }/>) }
</group-by>
}
</node>.copy(label = tag)
def toJson: JValue = {
val groupBy = if (groupFields.isEmpty) None else Some(groupFields)
val aggregate = reducers map (df => df._1 -> df._2.toString)
("tag" -> tag) ~ ("groupBy" -> groupBy) ~ ("aggregate" -> aggregate)
}
}
/**
* Reduce companion object.
*/
object Reduce {
val tag = "reduce"
def apply(reducers: (String, ReduceOp)*): Reduce = apply(reducers.toList)
def fromXml(xml: Node) = {
val dataFields = (xml \ "aggregate" \ "field") map { node =>
val name = node \ "@name" asString
val func = ReduceOp.withName(node \ "@type" asString): ReduceOp
name -> func
}
val groupFields = (xml \ "group-by" \ "field") map (_ \ "@name" asString)
apply(dataFields, groupFields)
}
def fromJson(json: JValue) = {
val dataFields = for {
JObject(df) <- json \ "aggregate"
JField(name, JString(funcName)) <- df
func = ReduceOp.withName(funcName): ReduceOp
} yield name -> func
val groupFields = (json \ "groupBy" asArray) map (_ asString)
apply(dataFields, groupFields)
}
} | uralian/ignition | src/main/scala/com/ignition/frame/Reduce.scala | Scala | apache-2.0 | 6,019 |
new Test (args) {
"extractor be available" inService "extractorapp/" withGet "" should (result => {
result has ('responseCode -> 200)
})
} | pgiraud/georchestra | security-proxy/src/integration-test/validate-setup/validateGet.scala | Scala | gpl-3.0 | 147 |
package foo
object Bar {
val vvvv/*ref*/ = 2
} | romanowski/scala-ide | org.scala-ide.sdt.core.tests/test-workspace/find-references/super/src/foo/Bar.scala | Scala | bsd-3-clause | 49 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.sql.{Date, Timestamp}
import org.apache.spark.sql.expressions.{MutableAggregationBuffer, UserDefinedAggregateFunction, Window}
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.CalendarInterval
/**
* Window function testing for DataFrame API.
*/
class DataFrameWindowFunctionsSuite extends QueryTest with SharedSQLContext {
import testImplicits._
test("reuse window partitionBy") {
val df = Seq((1, "1"), (2, "2"), (1, "1"), (2, "2")).toDF("key", "value")
val w = Window.partitionBy("key").orderBy("value")
checkAnswer(
df.select(
lead("key", 1).over(w),
lead("value", 1).over(w)),
Row(1, "1") :: Row(2, "2") :: Row(null, null) :: Row(null, null) :: Nil)
}
test("reuse window orderBy") {
val df = Seq((1, "1"), (2, "2"), (1, "1"), (2, "2")).toDF("key", "value")
val w = Window.orderBy("value").partitionBy("key")
checkAnswer(
df.select(
lead("key", 1).over(w),
lead("value", 1).over(w)),
Row(1, "1") :: Row(2, "2") :: Row(null, null) :: Row(null, null) :: Nil)
}
test("Window.rowsBetween") {
val df = Seq(("one", 1), ("two", 2)).toDF("key", "value")
// Running (cumulative) sum
checkAnswer(
df.select('key, sum("value").over(
Window.rowsBetween(Window.unboundedPreceding, Window.currentRow))),
Row("one", 1) :: Row("two", 3) :: Nil
)
}
test("lead") {
val df = Seq((1, "1"), (2, "2"), (1, "1"), (2, "2")).toDF("key", "value")
df.createOrReplaceTempView("window_table")
checkAnswer(
df.select(
lead("value", 1).over(Window.partitionBy($"key").orderBy($"value"))),
Row("1") :: Row(null) :: Row("2") :: Row(null) :: Nil)
}
test("lag") {
val df = Seq((1, "1"), (2, "2"), (1, "1"), (2, "2")).toDF("key", "value")
df.createOrReplaceTempView("window_table")
checkAnswer(
df.select(
lag("value", 1).over(Window.partitionBy($"key").orderBy($"value"))),
Row(null) :: Row("1") :: Row(null) :: Row("2") :: Nil)
}
test("lead with default value") {
val df = Seq((1, "1"), (1, "1"), (2, "2"), (1, "1"),
(2, "2"), (1, "1"), (2, "2")).toDF("key", "value")
df.createOrReplaceTempView("window_table")
checkAnswer(
df.select(
lead("value", 2, "n/a").over(Window.partitionBy("key").orderBy("value"))),
Seq(Row("1"), Row("1"), Row("n/a"), Row("n/a"), Row("2"), Row("n/a"), Row("n/a")))
}
test("lag with default value") {
val df = Seq((1, "1"), (1, "1"), (2, "2"), (1, "1"),
(2, "2"), (1, "1"), (2, "2")).toDF("key", "value")
df.createOrReplaceTempView("window_table")
checkAnswer(
df.select(
lag("value", 2, "n/a").over(Window.partitionBy($"key").orderBy($"value"))),
Seq(Row("n/a"), Row("n/a"), Row("1"), Row("1"), Row("n/a"), Row("n/a"), Row("2")))
}
test("rank functions in unspecific window") {
val df = Seq((1, "1"), (2, "2"), (1, "2"), (2, "2")).toDF("key", "value")
df.createOrReplaceTempView("window_table")
checkAnswer(
df.select(
$"key",
max("key").over(Window.partitionBy("value").orderBy("key")),
min("key").over(Window.partitionBy("value").orderBy("key")),
mean("key").over(Window.partitionBy("value").orderBy("key")),
count("key").over(Window.partitionBy("value").orderBy("key")),
sum("key").over(Window.partitionBy("value").orderBy("key")),
ntile(2).over(Window.partitionBy("value").orderBy("key")),
row_number().over(Window.partitionBy("value").orderBy("key")),
dense_rank().over(Window.partitionBy("value").orderBy("key")),
rank().over(Window.partitionBy("value").orderBy("key")),
cume_dist().over(Window.partitionBy("value").orderBy("key")),
percent_rank().over(Window.partitionBy("value").orderBy("key"))),
Row(1, 1, 1, 1.0d, 1, 1, 1, 1, 1, 1, 1.0d, 0.0d) ::
Row(1, 1, 1, 1.0d, 1, 1, 1, 1, 1, 1, 1.0d / 3.0d, 0.0d) ::
Row(2, 2, 1, 5.0d / 3.0d, 3, 5, 1, 2, 2, 2, 1.0d, 0.5d) ::
Row(2, 2, 1, 5.0d / 3.0d, 3, 5, 2, 3, 2, 2, 1.0d, 0.5d) :: Nil)
}
test("window function should fail if order by clause is not specified") {
val df = Seq((1, "1"), (2, "2"), (1, "2"), (2, "2")).toDF("key", "value")
val e = intercept[AnalysisException](
// Here we missed .orderBy("key")!
df.select(row_number().over(Window.partitionBy("value"))).collect())
assert(e.message.contains("requires window to be ordered"))
}
test("aggregation and rows between") {
val df = Seq((1, "1"), (2, "1"), (2, "2"), (1, "1"), (2, "2")).toDF("key", "value")
df.createOrReplaceTempView("window_table")
checkAnswer(
df.select(
avg("key").over(Window.partitionBy($"value").orderBy($"key").rowsBetween(-1, 2))),
Seq(Row(4.0d / 3.0d), Row(4.0d / 3.0d), Row(3.0d / 2.0d), Row(2.0d), Row(2.0d)))
}
test("aggregation and range between") {
val df = Seq((1, "1"), (1, "1"), (3, "1"), (2, "2"), (2, "1"), (2, "2")).toDF("key", "value")
df.createOrReplaceTempView("window_table")
checkAnswer(
df.select(
avg("key").over(Window.partitionBy($"value").orderBy($"key").rangeBetween(-1, 1))),
Seq(Row(4.0d / 3.0d), Row(4.0d / 3.0d), Row(7.0d / 4.0d), Row(5.0d / 2.0d),
Row(2.0d), Row(2.0d)))
}
test("row between should accept integer values as boundary") {
val df = Seq((1L, "1"), (1L, "1"), (2147483650L, "1"),
(3L, "2"), (2L, "1"), (2147483650L, "2"))
.toDF("key", "value")
df.createOrReplaceTempView("window_table")
checkAnswer(
df.select(
$"key",
count("key").over(
Window.partitionBy($"value").orderBy($"key").rowsBetween(0, 2147483647))),
Seq(Row(1, 3), Row(1, 4), Row(2, 2), Row(3, 2), Row(2147483650L, 1), Row(2147483650L, 1))
)
val e = intercept[AnalysisException](
df.select(
$"key",
count("key").over(
Window.partitionBy($"value").orderBy($"key").rowsBetween(0, 2147483648L))))
assert(e.message.contains("Boundary end is not a valid integer: 2147483648"))
}
test("range between should accept int/long values as boundary") {
val df = Seq((1L, "1"), (1L, "1"), (2147483650L, "1"),
(3L, "2"), (2L, "1"), (2147483650L, "2"))
.toDF("key", "value")
df.createOrReplaceTempView("window_table")
checkAnswer(
df.select(
$"key",
count("key").over(
Window.partitionBy($"value").orderBy($"key").rangeBetween(0, 2147483648L))),
Seq(Row(1, 3), Row(1, 3), Row(2, 2), Row(3, 2), Row(2147483650L, 1), Row(2147483650L, 1))
)
checkAnswer(
df.select(
$"key",
count("key").over(
Window.partitionBy($"value").orderBy($"key").rangeBetween(-2147483649L, 0))),
Seq(Row(1, 2), Row(1, 2), Row(2, 3), Row(2147483650L, 2), Row(2147483650L, 4), Row(3, 1))
)
def dt(date: String): Date = Date.valueOf(date)
val df2 = Seq((dt("2017-08-01"), "1"), (dt("2017-08-01"), "1"), (dt("2020-12-31"), "1"),
(dt("2017-08-03"), "2"), (dt("2017-08-02"), "1"), (dt("2020-12-31"), "2"))
.toDF("key", "value")
checkAnswer(
df2.select(
$"key",
count("key").over(
Window.partitionBy($"value").orderBy($"key").rangeBetween(lit(0), lit(2)))),
Seq(Row(dt("2017-08-01"), 3), Row(dt("2017-08-01"), 3), Row(dt("2020-12-31"), 1),
Row(dt("2017-08-03"), 1), Row(dt("2017-08-02"), 1), Row(dt("2020-12-31"), 1))
)
}
test("range between should accept double values as boundary") {
val df = Seq((1.0D, "1"), (1.0D, "1"), (100.001D, "1"),
(3.3D, "2"), (2.02D, "1"), (100.001D, "2"))
.toDF("key", "value")
df.createOrReplaceTempView("window_table")
checkAnswer(
df.select(
$"key",
count("key").over(
Window.partitionBy($"value").orderBy($"key")
.rangeBetween(currentRow, lit(2.5D)))),
Seq(Row(1.0, 3), Row(1.0, 3), Row(100.001, 1), Row(3.3, 1), Row(2.02, 1), Row(100.001, 1))
)
}
test("range between should accept interval values as boundary") {
def ts(timestamp: Long): Timestamp = new Timestamp(timestamp * 1000)
val df = Seq((ts(1501545600), "1"), (ts(1501545600), "1"), (ts(1609372800), "1"),
(ts(1503000000), "2"), (ts(1502000000), "1"), (ts(1609372800), "2"))
.toDF("key", "value")
df.createOrReplaceTempView("window_table")
checkAnswer(
df.select(
$"key",
count("key").over(
Window.partitionBy($"value").orderBy($"key")
.rangeBetween(currentRow,
lit(CalendarInterval.fromString("interval 23 days 4 hours"))))),
Seq(Row(ts(1501545600), 3), Row(ts(1501545600), 3), Row(ts(1609372800), 1),
Row(ts(1503000000), 1), Row(ts(1502000000), 1), Row(ts(1609372800), 1))
)
}
test("aggregation and rows between with unbounded") {
val df = Seq((1, "1"), (2, "2"), (2, "3"), (1, "3"), (3, "2"), (4, "3")).toDF("key", "value")
df.createOrReplaceTempView("window_table")
checkAnswer(
df.select(
$"key",
last("key").over(
Window.partitionBy($"value").orderBy($"key")
.rowsBetween(Window.currentRow, Window.unboundedFollowing)),
last("key").over(
Window.partitionBy($"value").orderBy($"key")
.rowsBetween(Window.unboundedPreceding, Window.currentRow)),
last("key").over(Window.partitionBy($"value").orderBy($"key").rowsBetween(-1, 1))),
Seq(Row(1, 1, 1, 1), Row(2, 3, 2, 3), Row(3, 3, 3, 3), Row(1, 4, 1, 2), Row(2, 4, 2, 4),
Row(4, 4, 4, 4)))
}
test("aggregation and range between with unbounded") {
val df = Seq((5, "1"), (5, "2"), (4, "2"), (6, "2"), (3, "1"), (2, "2")).toDF("key", "value")
df.createOrReplaceTempView("window_table")
checkAnswer(
df.select(
$"key",
last("value").over(
Window.partitionBy($"value").orderBy($"key").rangeBetween(-2, -1))
.equalTo("2")
.as("last_v"),
avg("key").over(Window.partitionBy("value").orderBy("key").rangeBetween(Long.MinValue, 1))
.as("avg_key1"),
avg("key").over(Window.partitionBy("value").orderBy("key").rangeBetween(0, Long.MaxValue))
.as("avg_key2"),
avg("key").over(Window.partitionBy("value").orderBy("key").rangeBetween(-1, 0))
.as("avg_key3")
),
Seq(Row(3, null, 3.0d, 4.0d, 3.0d),
Row(5, false, 4.0d, 5.0d, 5.0d),
Row(2, null, 2.0d, 17.0d / 4.0d, 2.0d),
Row(4, true, 11.0d / 3.0d, 5.0d, 4.0d),
Row(5, true, 17.0d / 4.0d, 11.0d / 2.0d, 4.5d),
Row(6, true, 17.0d / 4.0d, 6.0d, 11.0d / 2.0d)))
}
test("reverse sliding range frame") {
val df = Seq(
(1, "Thin", "Cell Phone", 6000),
(2, "Normal", "Tablet", 1500),
(3, "Mini", "Tablet", 5500),
(4, "Ultra thin", "Cell Phone", 5500),
(5, "Very thin", "Cell Phone", 6000),
(6, "Big", "Tablet", 2500),
(7, "Bendable", "Cell Phone", 3000),
(8, "Foldable", "Cell Phone", 3000),
(9, "Pro", "Tablet", 4500),
(10, "Pro2", "Tablet", 6500)).
toDF("id", "product", "category", "revenue")
val window = Window.
partitionBy($"category").
orderBy($"revenue".desc).
rangeBetween(-2000L, 1000L)
checkAnswer(
df.select(
$"id",
avg($"revenue").over(window).cast("int")),
Row(1, 5833) :: Row(2, 2000) :: Row(3, 5500) ::
Row(4, 5833) :: Row(5, 5833) :: Row(6, 2833) ::
Row(7, 3000) :: Row(8, 3000) :: Row(9, 5500) ::
Row(10, 6000) :: Nil)
}
// This is here to illustrate the fact that reverse order also reverses offsets.
test("reverse unbounded range frame") {
val df = Seq(1, 2, 4, 3, 2, 1).
map(Tuple1.apply).
toDF("value")
val window = Window.orderBy($"value".desc)
checkAnswer(
df.select(
$"value",
sum($"value").over(window.rangeBetween(Long.MinValue, 1)),
sum($"value").over(window.rangeBetween(1, Long.MaxValue))),
Row(1, 13, null) :: Row(2, 13, 2) :: Row(4, 7, 9) ::
Row(3, 11, 6) :: Row(2, 13, 2) :: Row(1, 13, null) :: Nil)
}
test("statistical functions") {
val df = Seq(("a", 1), ("a", 1), ("a", 2), ("a", 2), ("b", 4), ("b", 3), ("b", 2)).
toDF("key", "value")
val window = Window.partitionBy($"key")
checkAnswer(
df.select(
$"key",
var_pop($"value").over(window),
var_samp($"value").over(window),
approx_count_distinct($"value").over(window)),
Seq.fill(4)(Row("a", 1.0d / 4.0d, 1.0d / 3.0d, 2))
++ Seq.fill(3)(Row("b", 2.0d / 3.0d, 1.0d, 3)))
}
test("window function with aggregates") {
val df = Seq(("a", 1), ("a", 1), ("a", 2), ("a", 2), ("b", 4), ("b", 3), ("b", 2)).
toDF("key", "value")
val window = Window.orderBy()
checkAnswer(
df.groupBy($"key")
.agg(
sum($"value"),
sum(sum($"value")).over(window) - sum($"value")),
Seq(Row("a", 6, 9), Row("b", 9, 6)))
}
test("SPARK-16195 empty over spec") {
val df = Seq(("a", 1), ("a", 1), ("a", 2), ("b", 2)).
toDF("key", "value")
df.createOrReplaceTempView("window_table")
checkAnswer(
df.select($"key", $"value", sum($"value").over(), avg($"value").over()),
Seq(Row("a", 1, 6, 1.5), Row("a", 1, 6, 1.5), Row("a", 2, 6, 1.5), Row("b", 2, 6, 1.5)))
checkAnswer(
sql("select key, value, sum(value) over(), avg(value) over() from window_table"),
Seq(Row("a", 1, 6, 1.5), Row("a", 1, 6, 1.5), Row("a", 2, 6, 1.5), Row("b", 2, 6, 1.5)))
}
test("window function with udaf") {
val udaf = new UserDefinedAggregateFunction {
def inputSchema: StructType = new StructType()
.add("a", LongType)
.add("b", LongType)
def bufferSchema: StructType = new StructType()
.add("product", LongType)
def dataType: DataType = LongType
def deterministic: Boolean = true
def initialize(buffer: MutableAggregationBuffer): Unit = {
buffer(0) = 0L
}
def update(buffer: MutableAggregationBuffer, input: Row): Unit = {
if (!(input.isNullAt(0) || input.isNullAt(1))) {
buffer(0) = buffer.getLong(0) + input.getLong(0) * input.getLong(1)
}
}
def merge(buffer1: MutableAggregationBuffer, buffer2: Row): Unit = {
buffer1(0) = buffer1.getLong(0) + buffer2.getLong(0)
}
def evaluate(buffer: Row): Any =
buffer.getLong(0)
}
val df = Seq(
("a", 1, 1),
("a", 1, 5),
("a", 2, 10),
("a", 2, -1),
("b", 4, 7),
("b", 3, 8),
("b", 2, 4))
.toDF("key", "a", "b")
val window = Window.partitionBy($"key").orderBy($"a").rangeBetween(Long.MinValue, 0L)
checkAnswer(
df.select(
$"key",
$"a",
$"b",
udaf($"a", $"b").over(window)),
Seq(
Row("a", 1, 1, 6),
Row("a", 1, 5, 6),
Row("a", 2, 10, 24),
Row("a", 2, -1, 24),
Row("b", 4, 7, 60),
Row("b", 3, 8, 32),
Row("b", 2, 4, 8)))
}
test("null inputs") {
val df = Seq(("a", 1), ("a", 1), ("a", 2), ("a", 2), ("b", 4), ("b", 3), ("b", 2))
.toDF("key", "value")
val window = Window.orderBy()
checkAnswer(
df.select(
$"key",
$"value",
avg(lit(null)).over(window),
sum(lit(null)).over(window)),
Seq(
Row("a", 1, null, null),
Row("a", 1, null, null),
Row("a", 2, null, null),
Row("a", 2, null, null),
Row("b", 4, null, null),
Row("b", 3, null, null),
Row("b", 2, null, null)))
}
test("last/first with ignoreNulls") {
val nullStr: String = null
val df = Seq(
("a", 0, nullStr),
("a", 1, "x"),
("a", 2, "y"),
("a", 3, "z"),
("a", 4, nullStr),
("b", 1, nullStr),
("b", 2, nullStr)).
toDF("key", "order", "value")
val window = Window.partitionBy($"key").orderBy($"order")
checkAnswer(
df.select(
$"key",
$"order",
first($"value").over(window),
first($"value", ignoreNulls = false).over(window),
first($"value", ignoreNulls = true).over(window),
last($"value").over(window),
last($"value", ignoreNulls = false).over(window),
last($"value", ignoreNulls = true).over(window)),
Seq(
Row("a", 0, null, null, null, null, null, null),
Row("a", 1, null, null, "x", "x", "x", "x"),
Row("a", 2, null, null, "x", "y", "y", "y"),
Row("a", 3, null, null, "x", "z", "z", "z"),
Row("a", 4, null, null, "x", null, null, "z"),
Row("b", 1, null, null, null, null, null, null),
Row("b", 2, null, null, null, null, null, null)))
}
test("SPARK-12989 ExtractWindowExpressions treats alias as regular attribute") {
val src = Seq((0, 3, 5)).toDF("a", "b", "c")
.withColumn("Data", struct("a", "b"))
.drop("a")
.drop("b")
val winSpec = Window.partitionBy("Data.a", "Data.b").orderBy($"c".desc)
val df = src.select($"*", max("c").over(winSpec) as "max")
checkAnswer(df, Row(5, Row(0, 3), 5))
}
test("aggregation and rows between with unbounded + predicate pushdown") {
val df = Seq((1, "1"), (2, "2"), (2, "3"), (1, "3"), (3, "2"), (4, "3")).toDF("key", "value")
df.createOrReplaceTempView("window_table")
val selectList = Seq($"key", $"value",
last("key").over(
Window.partitionBy($"value").orderBy($"key").rowsBetween(0, Long.MaxValue)),
last("key").over(
Window.partitionBy($"value").orderBy($"key").rowsBetween(Long.MinValue, 0)),
last("key").over(Window.partitionBy($"value").orderBy($"key").rowsBetween(-1, 1)))
checkAnswer(
df.select(selectList: _*).where($"value" < "3"),
Seq(Row(1, "1", 1, 1, 1), Row(2, "2", 3, 2, 3), Row(3, "2", 3, 3, 3)))
}
test("aggregation and range between with unbounded + predicate pushdown") {
val df = Seq((5, "1"), (5, "2"), (4, "2"), (6, "2"), (3, "1"), (2, "2")).toDF("key", "value")
df.createOrReplaceTempView("window_table")
val selectList = Seq($"key", $"value",
last("value").over(
Window.partitionBy($"value").orderBy($"key").rangeBetween(-2, -1)).equalTo("2")
.as("last_v"),
avg("key").over(Window.partitionBy("value").orderBy("key").rangeBetween(Long.MinValue, 1))
.as("avg_key1"),
avg("key").over(Window.partitionBy("value").orderBy("key").rangeBetween(0, Long.MaxValue))
.as("avg_key2"),
avg("key").over(Window.partitionBy("value").orderBy("key").rangeBetween(-1, 1))
.as("avg_key3"))
checkAnswer(
df.select(selectList: _*).where($"value" < 2),
Seq(Row(3, "1", null, 3.0, 4.0, 3.0), Row(5, "1", false, 4.0, 5.0, 5.0)))
}
test("SPARK-21258: complex object in combination with spilling") {
// Make sure we trigger the spilling path.
withSQLConf(SQLConf.WINDOW_EXEC_BUFFER_SPILL_THRESHOLD.key -> "17") {
val sampleSchema = new StructType().
add("f0", StringType).
add("f1", LongType).
add("f2", ArrayType(new StructType().
add("f20", StringType))).
add("f3", ArrayType(new StructType().
add("f30", StringType)))
val w0 = Window.partitionBy("f0").orderBy("f1")
val w1 = w0.rowsBetween(Long.MinValue, Long.MaxValue)
val c0 = first(struct($"f2", $"f3")).over(w0) as "c0"
val c1 = last(struct($"f2", $"f3")).over(w1) as "c1"
val input =
"""{"f1":1497820153720,"f2":[{"f20":"x","f21":0}],"f3":[{"f30":"x","f31":0}]}
|{"f1":1497802179638}
|{"f1":1497802189347}
|{"f1":1497802189593}
|{"f1":1497802189597}
|{"f1":1497802189599}
|{"f1":1497802192103}
|{"f1":1497802193414}
|{"f1":1497802193577}
|{"f1":1497802193709}
|{"f1":1497802202883}
|{"f1":1497802203006}
|{"f1":1497802203743}
|{"f1":1497802203834}
|{"f1":1497802203887}
|{"f1":1497802203893}
|{"f1":1497802203976}
|{"f1":1497820168098}
|""".stripMargin.split("\\n").toSeq
import testImplicits._
spark.read.schema(sampleSchema).json(input.toDS()).select(c0, c1).foreach { _ => () }
}
}
}
| minixalpha/spark | sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala | Scala | apache-2.0 | 21,388 |
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.api
import slamdata.Predef._
import quasar.{Data, DataCodec, Planner, SemanticError}
import quasar.RenderTree.ops._
import quasar.connector.EnvironmentError
import quasar.fp._
import quasar.fp.ski._
import quasar.fs._
import quasar.fs.mount.{Mounting, MountingError}
import quasar.fs.mount.module.Module
import quasar.sql._
import argonaut._, Argonaut._
import argonaut.ArgonautScalaz._
import org.http4s._, Status._
import pathy.Path._
import pathy.argonaut.PosixCodecJson._
import scalaz.{NonEmptyList, Scalaz}, Scalaz._
abstract class ToApiError[A] {
def toApiError(a: A): ApiError
}
object ToApiError extends ToApiErrorInstances {
def apply[A](implicit A: ToApiError[A]): ToApiError[A] = A
def error[A](f: A => ApiError): ToApiError[A] =
new ToApiError[A] { def toApiError(a: A) = f(a) }
object ops {
final implicit class ToApiErrorOps[A](val a: A) extends scala.AnyVal {
def toApiError(implicit A: ToApiError[A]): ApiError =
A.toApiError(a)
}
}
}
sealed abstract class ToApiErrorInstances extends ToApiErrorInstances0 {
import ToApiError._, ops._
import ApiError._
import ReadFile.ReadHandle
import WriteFile.WriteHandle
import QueryFile.ResultHandle
implicit def environmentErrorQResponse: ToApiError[EnvironmentError] = {
import EnvironmentError._
error {
case ConnectionFailed(msg) =>
fromMsg_(
InternalServerError withReason "Connection to backend failed.",
s"Connection failed: $msg.")
case InvalidCredentials(msg) =>
fromMsg_(
InternalServerError withReason "Invalid backend credentials.",
s"Invalid credentials: $msg")
case UnsupportedVersion(name, ver) =>
apiError(
InternalServerError withReason s"Unsupported $name version.",
"backendName" := name,
"version" := ver)
}
}
implicit def fileSystemErrorResponse: ToApiError[FileSystemError] = {
import FileSystemError._
error {
case ExecutionFailed(lp, reason, det, cause) =>
fromMsg(
InternalServerError withReason "Failed to execute SQL^2 query.",
reason,
det.toList : _*) :+
("logicalplan" := lp.render) :?+
("cause" :?= cause.map(_.shows))
case PathErr(e) =>
e.toApiError
case PlanningFailed(lp, e) =>
e.toApiError :+ ("logicalplan" := lp.render)
case QScriptPlanningFailed(e) =>
e.toApiError
case UnsupportedOperation(reason) =>
fromMsg(
BadRequest withReason "Unsupported Operation",
reason)
case UnknownReadHandle(ReadHandle(path, id)) =>
apiError(
InternalServerError withReason "Unknown read handle.",
"path" := path,
"handleId" := id)
case UnknownWriteHandle(WriteHandle(path, id)) =>
apiError(
InternalServerError withReason "Unknown write handle.",
"path" := path,
"handleId" := id)
case UnknownResultHandle(ResultHandle(id)) =>
apiError(
InternalServerError withReason "Unknown result handle.",
"handleId" := id)
case ReadFailed(data, reason) =>
fromMsg_(
InternalServerError withReason "Failed to read data.",
s"Failed to read data: $reason."
) :+ ("data" := data)
case PartialWrite(numFailed) =>
apiError(
InternalServerError withReason "Failed to write some values.",
"failedCount" := numFailed)
case WriteFailed(data, reason) =>
fromMsg_(
InternalServerError withReason "Failed to write data.",
s"Failed to write data: $reason."
) :?+ ("data" :?= encodeData(data))
}
}
implicit def moduleErrorToApiError: ToApiError[Module.Error] =
error {
case Module.Error.FSError(fsErr) => fsErr.toApiError
case Module.Error.SemErrors(semErrs) => semErrs.toApiError
case Module.Error.ArgumentsMissing(missing) =>
apiError(
BadRequest withReason "Arguments missing to function call",
"missing arguments" := missing)
}
implicit def physicalErrorToApiError: ToApiError[PhysicalError] =
error(err => fromMsg_(
InternalServerError withReason "Physical filesystem error.",
err.shows))
implicit def mountingErrorToApiError: ToApiError[MountingError] = {
import MountingError._, PathError.InvalidPath
error {
case PError(InvalidPath(p, rsn)) =>
fromMsg(
Conflict withReason "Unable to mount at path.",
s"Unable to mount at ${posixCodec.printPath(p)} because $rsn",
"path" := p)
case InvalidConfig(cfg, rsns) =>
apiError(
BadRequest withReason "Invalid mount configuration.",
"reasons" := rsns)
case InvalidMount(_, e) =>
fromMsg_(InternalServerError withReason "Invalid mount.", e)
case PError(e) => e.toApiError
case EError(e) => e.toApiError
}
}
implicit def mountingPathTypeErrorToApiError: ToApiError[Mounting.PathTypeMismatch] =
error { err =>
val expectedType = refineType(err.path).fold(κ("file"), κ("directory"))
fromMsg(
BadRequest withReason "Incorrect path type.",
s"Incorrect path type, expected a $expectedType.",
"path" := err.path)
}
implicit def pathErrorToApiError: ToApiError[PathError] = {
import PathError._
error {
case PathExists(path) =>
apiError(Conflict withReason "Path exists.", "path" := path)
case PathNotFound(path) =>
apiError(NotFound withReason "Path not found.", "path" := path)
case InvalidPath(path, reason) =>
fromMsg(
BadRequest withReason "Invalid path.",
s"Invalid path: $reason.",
"path" := path)
}
}
implicit def parsingErrorToApiError: ToApiError[ParsingError] =
error {
case GenericParsingError(msg) =>
fromMsg_(BadRequest withReason "Malformed SQL^2 query.", msg)
case ParsingPathError(e) =>
e.toApiError
}
implicit def plannerErrorToApiError: ToApiError[Planner.PlannerError] = {
import Planner._
error(err => err match {
case NonRepresentableData(data) =>
fromMsg_(
InternalServerError withReason "Unsupported constant.",
err.message
) :?+ ("data" :?= encodeData(data))
case NonRepresentableEJson(_) =>
fromMsg_(
InternalServerError withReason "Unsupported constant.",
err.message)
case UnsupportedFunction(fn, _) =>
fromMsg(
InternalServerError withReason "Unsupported function.",
err.message,
"functionName" := fn.shows)
case PlanPathError(e) =>
e.toApiError
case UnsupportedJoinCondition(cond) =>
fromMsg(
InternalServerError withReason "Unsupported join condition.",
err.message,
"joinCondition" := cond.render)
case UnsupportedPlan(lp, hint) =>
fromMsg(
InternalServerError withReason "Unsupported query plan.",
err.message,
"term" := lp.void.render.shows
) :?+ ("reason" :?= hint)
case FuncApply(fn, exp, act) =>
fromMsg(
BadRequest withReason "Illegal function argument.",
err.message,
"functionName" := fn.shows,
"expectedArg" := exp,
"actualArg" := act)
case ObjectIdFormatError(oid) =>
fromMsg(
BadRequest withReason "Invalid ObjectId.",
err.message,
"objectId" := oid)
case CompilationFailed(semErrs) =>
fromMsg(
BadRequest withReason "Compilation failed",
err.message,
"compilation errors" := semErrs.map(_.toApiError)
)
case NonRepresentableInJS(value) =>
fromMsg(
InternalServerError withReason "Unable to compile to JavaScript.",
err.message,
"value" := value)
case UnsupportedJS(value) =>
fromMsg(
InternalServerError withReason "Unsupported JavaScript in query plan.",
err.message,
"value" := value)
case InternalError(msg, cause) =>
fromMsg(
InternalServerError withReason "Failed to plan query.",
msg
) :?+ ("cause" :?= cause.map(_.toString))
case UnboundVariable(v) =>
fromMsg_(
InternalServerError withReason "Unbound variable.", v.toString)
case NoFilesFound(dirs) =>
fromMsg(
BadRequest withReason "No files to read from.",
err.message,
"dirs" := dirs.map(posixCodec.printPath))
})
}
implicit def semanticErrorToApiError: ToApiError[SemanticError] = {
import SemanticError._
error(err => err match {
case GenericError(msg) =>
fromMsg_(BadRequest withReason "Error in query.", msg)
case DomainError(data, _) =>
fromMsg_(
BadRequest withReason "Illegal argument.",
err.message
) :?+ ("data" :?= encodeData(data))
case FunctionNotFound(name) =>
fromMsg(
BadRequest withReason "Unknown function.",
err.message,
"functionName" := name)
case TypeError(exp, act, _) =>
fromMsg(
BadRequest withReason "Type error.",
err.message,
"expectedType" := exp,
"actualType" := act)
case VariableParseError(vname, vval, cause) =>
fromMsg(
BadRequest withReason "Malformed query variable.",
err.message,
"varName" := vname.value,
"varValue" := vval.value,
"cause" := cause.toApiError)
case UnboundVariable(vname) =>
fromMsg(
BadRequest withReason "Unbound variable.",
err.message,
"varName" := vname.value)
case DuplicateRelationName(name) =>
fromMsg(
BadRequest withReason "Duplicate relation name.",
err.message,
"relName" := name)
case NoTableDefined(node) =>
fromMsg(
BadRequest withReason "No table defined.",
err.message,
"sql" := node.render)
case MissingField(name) =>
fromMsg(
BadRequest withReason "Missing field.",
err.message,
"fieldName" := name)
case DuplicateAlias(name) =>
fromMsg(
BadRequest withReason "Duplicate alias name.",
err.message,
"name" := name)
case MissingIndex(i) =>
fromMsg(
BadRequest withReason "No element at index.",
err.message,
"index" := i)
case WrongArgumentCount(fn, exp, act) =>
fromMsg(
BadRequest withReason "Wrong number of arguments to function.",
err.message,
"functionName" := fn,
"expectedArgs" := exp,
"actualArgs" := act)
case AmbiguousReference(expr, _) =>
fromMsg(
BadRequest withReason "Ambiguous table reference.",
err.message,
"sql" := expr.render)
case DateFormatError(fn, str, _) =>
fromMsg(
BadRequest withReason "Malformed date/time string.",
err.message,
"functionName" := fn.shows,
"input" := str)
case e@AmbiguousFunctionInvoke(name, funcs) =>
fromMsg(
BadRequest withReason "Ambiguous function call",
err.message,
"invoke" := name.value,
"ambiguous functions" := e.fullyQualifiedFuncs)
case other =>
fromMsg_(
InternalServerError withReason "Compilation error.",
other.message)
})
}
////
private def encodeData(data: Data): Option[Json] =
DataCodec.Precise.encode(data)
}
sealed abstract class ToApiErrorInstances0 {
import ToApiError._, ops._
import ApiError._
implicit def messageFailureToApiError[A <: MessageFailure]: ToApiError[A] =
error {
case err @ InvalidMessageBodyFailure(_, _) =>
fromMsg_(
BadRequest withReason "Invalid request body.",
err.message)
case ParseFailure(sanitized, _) =>
fromMsg_(
BadRequest withReason "Malformed request.",
sanitized)
case err @ MalformedMessageBodyFailure(_, _) =>
fromMsg_(
BadRequest withReason "Malformed request body.",
err.message)
case MediaTypeMissing(expectedMediaTypes) =>
apiError(
BadRequest withReason "Media type missing.",
"supportedMediaTypes" := expectedMediaTypes.map(_.renderString))
case MediaTypeMismatch(messageType, expectedMediaTypes) =>
apiError(
UnsupportedMediaType,
"requestedMediaType" := messageType.renderString,
"supportedMediaTypes" := expectedMediaTypes.map(_.renderString))
}
implicit def nonEmptyListToApiError[A: ToApiError]: ToApiError[NonEmptyList[A]] =
error { nel =>
val herr = nel.head.toApiError
if (nel.size ≟ 1) herr
else {
val stat = (Status.fromInt(herr.status.code) getOrElse herr.status) withReason "Multiple errors"
apiError(stat, "errors" := nel.map(_.toApiError))
}
}
}
| drostron/quasar | web/src/main/scala/quasar/api/ToApiError.scala | Scala | apache-2.0 | 13,912 |
package sbn.core.distributions
import sbn.core.data.attributes.FiniteStateSpace
import sbn.core.utils.Utils
import sbn.core.variables.{Assignment, Assignments, Variable}
/**
* This class abstracts the distributions generated from a set of multinomial parents (i.e., [[Multinomial_MultinomialParents]]
* or [[Gaussian_MultinomialParents]]). All of them have a similar form, and to reduce the repeated code this class
* implements some of their methods.
*/
abstract class BaseDistribution_MultinomialParents(variable: Variable,
multinomialParents: Set[Variable],
parameterizedConditionalDistributions: Map[Assignments, UnivariateDistribution]) extends ConditionalDistribution {
/** @inheritdoc */
override def numberOfParameters: Int = this.parameterizedConditionalDistributions.values.map(_.numberOfParameters).sum
/** @inheritdoc */
override def conditioningVariables: Set[Variable] = this.multinomialParents
/** @inheritdoc */
override def getUnivariateDistribution(assignments: Assignments): UnivariateDistribution = try {
parameterizedConditionalDistributions(assignments)
} catch{ case nse: NoSuchElementException => throw new IllegalArgumentException("Invalid assignments for the distribution")}
/** @inheritdoc */
override def conditionalProbability(assignments: Assignments, x: Double): Double = Math.exp(logConditionalProbability(assignments, x))
/** @inheritdoc */
override def logConditionalProbability(assignments: Assignments, x: Double): Double = getUnivariateDistribution(assignments).logProbability(x)
/** @inheritdoc */
override def conditionalProbability(assignments: Assignments, x0: Double, x1: Double): Double = {
if(x0 > x1) throw new IllegalArgumentException("Lower endpoint above upper endpoint (x0 > x1)")
cumulativeConditionalProbability(assignments, x1) - cumulativeConditionalProbability(assignments, x0)
}
/** @inheritdoc */
override def cumulativeConditionalProbability(assignments: Assignments, x: Double): Double = getUnivariateDistribution(assignments).cumulativeProbability(x)
/** @inheritdoc */
override def conditionalDensity(assignments: Assignments, x: Double): Double = getUnivariateDistribution(assignments).density(x)
/** @inheritdoc */
override def logConditionalDensity(assignments: Assignments, x: Double): Double = getUnivariateDistribution(assignments).logDensity(x)
}
object BaseDistribution_MultinomialParents {
/**
* Auxiliary method that makes use of [[Utils.cartesianProduct]] to generate
*
* @param parents the multinomial parents of the variable.
* @throws IllegalArgumentException if the parents state space is not finite.
* @return the sequence of possible parent assignments that will be used to create the internal distributions.
*/
@throws[IllegalArgumentException]
def generateAssignmentCombinations(parents: Set[Variable]): Seq[Assignments] = {
val stateSequences: Seq[Vector[Int]] = parents.toSeq.map(v => v.attribute.stateSpaceType match {
case finite: FiniteStateSpace => finite.stateIndexes
case _ => throw new IllegalArgumentException("Parents state space must be finite")
})
// First we obtain the cartesian product (all the combinations) of the parents state space values
Utils.cartesianProduct(stateSequences)
// and then we zip each state value with its parent variable reference
.map(stateCombination => parents.zip(stateCombination))
// After that we create a Seq[Set[Assignment]] objects
.map(combination => combination. map(variableAndValue => Assignment(variableAndValue._1, variableAndValue._2)))
// Finally we generate the Seq[Assignments] object that we return
.map(x => Assignments(x))
}
}
| fernandoj92/sbn | core/src/main/scala/sbn/core/distributions/BaseDistribution_MultinomialParents.scala | Scala | apache-2.0 | 3,839 |
/*
* Copyright (c) 2014-2019 Israel Herraiz <isra@herraiz.org>
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use, copy,
* modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
* BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
// ---------------------
// Test for example 4.01
// ---------------------
package chap04
// For this exercise, the implementation of all the functions can be
// found in src/main/scala/errorhandling/Option.scala
import org.specs2.mutable._
import errorhandling._
object Ex01Spec extends Specification {
"The map function" should {
"return None with any None" in {
val x: Option[Double] = None
x.map(_*2) mustEqual None
}
"work with Some" in {
Some(6).map(_.toDouble) mustEqual Some(6.0)
}
}
"The getOrElse function" should {
"return a default val with None" in {
val x: Option[Int] = None
None.getOrElse(-5) mustEqual -5
}
"work with Some" in {
Some(8).getOrElse(14) mustEqual 8
}
}
"The flatMap function" should {
"return None with None" in {
val x: Option[Double] = None
x.flatMap(x => Some(x*9)) mustEqual None
}
"return the content with Some" in {
Some(7).flatMap(x => Some(x*2)) mustEqual Some(14)
}
}
"The orElse function" should {
"work with None" in {
val x: Option[Double] = None
x.orElse(Some(7.56)) mustEqual Some(7.56)
}
"return the same Some with Some" in {
Some(7).orElse(Some(12)) mustEqual Some(7)
}
}
"The filter function" should {
"return None with None" in {
val x: Option[Double] = None
x.filter(_ == 14) mustEqual None
}
"return None if filter does not match" in {
Some(9.8).filter(_ == 10.0) mustEqual None
}
"return Some when filter matches" in {
Some(9).filter(_%3 == 0) mustEqual Some(9)
}
}
}
| iht/fpinscala | src/test/scala/chap04/ex01Spec.scala | Scala | mit | 2,816 |
/* Scala.js compiler
* Copyright 2013 LAMP/EPFL
* @author Tobias Schlatter
*/
package org.scalajs.core.compiler
import scala.collection.mutable
/**
* Prepare export generation
*
* Helpers for transformation of @JSExport annotations
*/
trait PrepJSExports { this: PrepJSInterop =>
import global._
import jsAddons._
import definitions._
import jsDefinitions._
import scala.reflect.internal.Flags
case class ExportInfo(
jsName: String,
pos: Position,
isNamed: Boolean,
ignoreInvalid: Boolean
) extends jsInterop.ExportInfo
/** Generate the exporter for the given DefDef
*
* If this DefDef is a constructor, it is registered to be exported by
* GenJSCode instead and no trees are returned.
*/
def genExportMember(ddef: DefDef): List[Tree] = {
val baseSym = ddef.symbol
val clsSym = baseSym.owner
val exports = exportsOf(baseSym)
val ignoreInvalid = exports.forall(_.ignoreInvalid)
// Helper function for errors
def err(msg: String) = {
if (!ignoreInvalid)
reporter.error(exports.head.pos, msg)
Nil
}
def memType = if (baseSym.isConstructor) "constructor" else "method"
if (exports.isEmpty)
Nil
else if (!hasLegalExportVisibility(baseSym))
err(s"You may only export public and protected ${memType}s")
else if (baseSym.isMacro)
err("You may not export a macro")
else if (scalaPrimitives.isPrimitive(baseSym))
err("You may not export a primitive")
else if (hasIllegalRepeatedParam(baseSym))
err(s"In an exported $memType, a *-parameter must come last " +
"(through all parameter lists)")
else if (hasIllegalDefaultParam(baseSym))
err(s"In an exported $memType, all parameters with defaults " +
"must be at the end")
else if (baseSym.isConstructor) {
// we can generate constructors entirely in the backend, since they
// do not need inheritance and such. But we want to check their sanity
// here by previous tests and the following ones.
if (!hasLegalExportVisibility(clsSym))
err("You may only export public and protected classes")
else if (clsSym.isAbstractClass)
err("You may not export an abstract class")
else if (clsSym.isLocalToBlock)
err("You may not export a local class")
else if (clsSym.isNestedClass)
err("You may not export a nested class. Create an exported factory " +
"method in the outer class to work around this limitation.")
else {
jsInterop.registerForExport(baseSym, exports)
Nil
}
} else {
assert(!baseSym.isBridge)
// Reset interface flag: Any trait will contain non-empty methods
clsSym.resetFlag(Flags.INTERFACE)
// Actually generate exporter methods
exports.flatMap { exp =>
if (exp.isNamed)
genNamedExport(baseSym, exp.jsName, exp.pos) :: Nil
else
genExportDefs(baseSym, exp.jsName, exp.pos)
}
}
}
/** Checks and registers module exports on the symbol */
def registerModuleExports(sym: Symbol): Unit = {
assert(sym.isModuleClass, "Expected module class")
val exports = exportsOf(sym)
val ignoreInvalid = exports.forall(_.ignoreInvalid)
if (exports.nonEmpty) {
def err(msg: String) = {
if (!ignoreInvalid)
reporter.error(exports.head.pos, msg)
}
if (!hasLegalExportVisibility(sym))
err("You may only export public and protected objects")
else if (sym.isLocalToBlock)
err("You may not export a local object")
else if (!sym.owner.hasPackageFlag)
err("You may not export a nested object")
else {
val (named, normal) = exports.partition(_.isNamed)
for {
exp <- named
if !exp.ignoreInvalid
} reporter.error(exp.pos, "You may not use @JSNamedExport on an object")
jsInterop.registerForExport(sym, normal)
}
}
}
/** retrieves the names a sym should be exported to from its annotations
*
* Note that for accessor symbols, the annotations of the accessed symbol
* are used, rather than the annotations of the accessor itself.
*/
def exportsOf(sym: Symbol): List[ExportInfo] = {
val exports = directExportsOf(sym) ++ inheritedExportsOf(sym)
// Calculate the distinct exports for this symbol (eliminate double
// occurrences of (name, isNamed) pairs).
val grouped = exports.groupBy(exp => (exp.jsName, exp.isNamed))
for (((jsName, isNamed), exps) <- grouped.toList)
// Make sure that we are strict if necessary
yield exps.find(!_.ignoreInvalid).getOrElse(exps.head)
}
private def directExportsOf(sym: Symbol): List[ExportInfo] = {
val trgSym = {
// For accessors, look on the val/var def
if (sym.isAccessor) sym.accessed
// For primary class constructors, look on the class itself
else if (sym.isPrimaryConstructor && !sym.owner.isModuleClass) sym.owner
else sym
}
// Annotations that are directly on the member
val directAnnots = for {
annot <- trgSym.annotations
if annot.symbol == JSExportAnnotation ||
annot.symbol == JSExportNamedAnnotation
} yield annot
// Is this a member export (i.e. not a class or module export)?
val isMember = sym.isMethod && !sym.isConstructor
// Annotations for this member on the whole unit
val unitAnnots = {
if (isMember && sym.isPublic && !sym.isSynthetic)
sym.owner.annotations.filter(_.symbol == JSExportAllAnnotation)
else
Nil
}
for {
annot <- directAnnots ++ unitAnnots
} yield {
val isNamedExport = annot.symbol == JSExportNamedAnnotation
val isExportAll = annot.symbol == JSExportAllAnnotation
val hasExplicitName = annot.args.nonEmpty
def explicitName = annot.stringArg(0).getOrElse {
reporter.error(annot.pos,
s"The argument to ${annot.symbol.name} must be a literal string")
"dummy"
}
val name = {
if (hasExplicitName) explicitName
else if (sym.isConstructor) decodedFullName(sym.owner)
else if (sym.isModuleClass) decodedFullName(sym)
else sym.unexpandedName.decoded.stripSuffix("_=")
}
// Enforce proper setter signature
if (jsInterop.isJSSetter(sym))
checkSetterSignature(sym, annot.pos, exported = true)
// Enforce no __ in name
if (name.contains("__")) {
// Get position for error message
val pos = if (hasExplicitName) annot.args.head.pos else trgSym.pos
reporter.error(pos,
"An exported name may not contain a double underscore (`__`)")
}
// Make sure we do not override the default export of toString
def isIllegalToString = {
isMember && !isNamedExport &&
name == "toString" && sym.name != nme.toString_ &&
sym.tpe.params.isEmpty && !jsInterop.isJSGetter(sym)
}
if (isIllegalToString) {
reporter.error(annot.pos, "You may not export a zero-argument " +
"method named other than 'toString' under the name 'toString'")
}
def isIllegalApplyExport = {
isMember && !hasExplicitName &&
sym.name == nme.apply &&
!(isExportAll && directAnnots.exists(annot =>
annot.symbol == JSExportAnnotation &&
annot.args.nonEmpty &&
annot.stringArg(0) == Some("apply")))
}
// Don't allow apply without explicit name
if (isIllegalApplyExport) {
// Get position for error message
val pos = if (isExportAll) trgSym.pos else annot.pos
reporter.warning(pos, "Member cannot be exported to function " +
"application. It is available under the name apply instead. " +
"Add @JSExport(\\"apply\\") to silence this warning. " +
"This will be enforced in 1.0.")
}
if (isNamedExport && jsInterop.isJSProperty(sym)) {
reporter.error(annot.pos,
"You may not export a getter or a setter as a named export")
}
ExportInfo(name, annot.pos, isNamedExport, ignoreInvalid = false)
}
}
private def inheritedExportsOf(sym: Symbol): List[ExportInfo] = {
// The symbol from which we (potentially) inherit exports. It also
// gives the exports their name
val trgSym = {
if (sym.isModuleClass)
sym
else if (sym.isConstructor && sym.isPublic &&
sym.owner.isConcreteClass && !sym.owner.isModuleClass)
sym.owner
else NoSymbol
}
if (trgSym == NoSymbol) {
Nil
} else {
val trgAnnot =
if (sym.isModuleClass) JSExportDescendentObjectsAnnotation
else JSExportDescendentClassesAnnotation
val forcingSymInfos = for {
forcingSym <- trgSym.ancestors
annot <- forcingSym.annotations
if annot.symbol == trgAnnot
} yield {
val ignoreInvalid = annot.constantAtIndex(0).fold(false)(_.booleanValue)
(forcingSym, ignoreInvalid)
}
// The dominating forcing symbol, is the first that does not ignore
// or the first otherwise
val forcingSymInfo =
forcingSymInfos.find(!_._2).orElse(forcingSymInfos.headOption)
val name = decodedFullName(trgSym)
val nameValid = !name.contains("__")
val optExport = for {
(forcingSym, ignoreInvalid) <- forcingSymInfo
if nameValid || !ignoreInvalid
} yield {
// Enfore no __ in name
if (!nameValid) {
// Get all annotation positions for error message
reporter.error(sym.pos,
s"${trgSym.name} may not have a double underscore (`__`) in " +
"its fully qualified name, since it is forced to be exported by " +
s"a @${trgAnnot.name} on $forcingSym")
}
ExportInfo(name, sym.pos, false, ignoreInvalid)
}
optExport.toList
}
}
/** Just like sym.fullName, but does not encode components */
private def decodedFullName(sym: Symbol): String = {
if (sym.isRoot || sym.isRootPackage || sym == NoSymbol) sym.name.decoded
else if (sym.owner.isEffectiveRoot) sym.name.decoded
else decodedFullName(sym.effectiveOwner.enclClass) + '.' + sym.name.decoded
}
/** generate an exporter for a DefDef including default parameter methods */
private def genExportDefs(defSym: Symbol, jsName: String, pos: Position) = {
val clsSym = defSym.owner
val scalaName =
jsInterop.scalaExportName(jsName, jsInterop.isJSProperty(defSym))
// Create symbol for new method
val expSym = defSym.cloneSymbol
// Set position of symbol
expSym.pos = pos
// Alter type for new method (lift return type to Any)
// The return type is lifted, in order to avoid bridge
// construction and to detect methods whose signature only differs
// in the return type.
// Attention: This will cause boxes for primitive value types and value
// classes. However, since we have restricted the return types, we can
// always safely remove these boxes again in the back-end.
if (!defSym.isConstructor)
expSym.setInfo(retToAny(expSym.tpe))
// Change name for new method
expSym.name = scalaName
// Update flags
expSym.setFlag(Flags.SYNTHETIC)
expSym.resetFlag(
Flags.DEFERRED | // We always have a body
Flags.ACCESSOR | // We are never a "direct" accessor
Flags.CASEACCESSOR | // And a fortiori not a case accessor
Flags.LAZY | // We are not a lazy val (even if we export one)
Flags.OVERRIDE // Synthetic methods need not bother with this
)
// Remove export annotations
expSym.removeAnnotation(JSExportAnnotation)
expSym.removeAnnotation(JSExportNamedAnnotation)
// Add symbol to class
clsSym.info.decls.enter(expSym)
// Construct exporter DefDef tree
val exporter = genProxyDefDef(clsSym, defSym, expSym, pos)
// Construct exporters for default getters
val defaultGetters = for {
(param, i) <- expSym.paramss.flatten.zipWithIndex
if param.hasFlag(Flags.DEFAULTPARAM)
} yield genExportDefaultGetter(clsSym, defSym, expSym, i + 1, pos)
exporter :: defaultGetters
}
/** Generate a dummy DefDef tree for a named export. This tree is captured
* by GenJSCode again to generate the required JavaScript logic.
*/
private def genNamedExport(defSym: Symbol, jsName: String, pos: Position) = {
val clsSym = defSym.owner
val scalaName = jsInterop.scalaExportName(jsName, false)
// Create symbol for the new exporter method
val expSym = clsSym.newMethodSymbol(scalaName, pos,
Flags.SYNTHETIC | Flags.FINAL)
// Mark the symbol to be a named export
expSym.addAnnotation(JSExportNamedAnnotation)
// Create a single parameter of type Any
val param = expSym.newValueParameter(newTermName("namedArgs"), pos)
param.setInfo(AnyTpe)
// Set method type
expSym.setInfo(MethodType(param :: Nil, AnyClass.tpe))
// Register method to parent
clsSym.info.decls.enter(expSym)
// Placeholder tree
def ph = Ident(Predef_???)
// Create a call to the forwarded method with ??? as args
val sel: Tree = Select(This(clsSym), defSym)
val call = (sel /: defSym.paramss) {
(fun, params) => Apply(fun, List.fill(params.size)(ph))
}
// rhs is a block to prevent boxing of result
typer.typedDefDef(DefDef(expSym, Block(call, ph)))
}
private def genExportDefaultGetter(clsSym: Symbol, trgMethod: Symbol,
exporter: Symbol, paramPos: Int, pos: Position) = {
// Get default getter method we'll copy
val trgGetter =
clsSym.tpe.member(nme.defaultGetterName(trgMethod.name, paramPos))
assert(trgGetter.exists)
// Although the following must be true in a correct program, we cannot
// assert, since a graceful failure message is only generated later
if (!trgGetter.isOverloaded) {
val expGetter = trgGetter.cloneSymbol
expGetter.name = nme.defaultGetterName(exporter.name, paramPos)
expGetter.pos = pos
clsSym.info.decls.enter(expGetter)
genProxyDefDef(clsSym, trgGetter, expGetter, pos)
} else EmptyTree
}
/** generate a DefDef tree (from [[proxySym]]) that calls [[trgSym]] */
private def genProxyDefDef(clsSym: Symbol, trgSym: Symbol,
proxySym: Symbol, pos: Position) = atPos(pos) {
// Helper to ascribe repeated argument lists when calling
def spliceParam(sym: Symbol) = {
if (isRepeated(sym))
Typed(Ident(sym), Ident(tpnme.WILDCARD_STAR))
else
Ident(sym)
}
// Construct proxied function call
val sel: Tree = Select(This(clsSym), trgSym)
val rhs = (sel /: proxySym.paramss) {
(fun,params) => Apply(fun, params map spliceParam)
}
typer.typedDefDef(DefDef(proxySym, rhs))
}
/** changes the return type of the method type tpe to Any. returns new type */
private def retToAny(tpe: Type): Type = tpe match {
case MethodType(params, result) => MethodType(params, retToAny(result))
case NullaryMethodType(result) => NullaryMethodType(AnyClass.tpe)
case PolyType(tparams, result) => PolyType(tparams, retToAny(result))
case _ => AnyClass.tpe
}
/** Whether the given symbol has a visibility that allows exporting */
private def hasLegalExportVisibility(sym: Symbol): Boolean =
sym.isPublic || sym.isProtected && !sym.isProtectedLocal
/** checks whether this type has a repeated parameter elsewhere than at the end
* of all the params
*/
private def hasIllegalRepeatedParam(sym: Symbol): Boolean = {
val params = sym.paramss.flatten
params.nonEmpty && params.init.exists(isRepeated _)
}
/** checks whether there are default parameters not at the end of
* the flattened parameter list
*/
private def hasIllegalDefaultParam(sym: Symbol): Boolean = {
val isDefParam = (_: Symbol).hasFlag(Flags.DEFAULTPARAM)
sym.paramss.flatten.reverse.dropWhile(isDefParam).exists(isDefParam)
}
}
| jmnarloch/scala-js | compiler/src/main/scala/org/scalajs/core/compiler/PrepJSExports.scala | Scala | bsd-3-clause | 16,188 |
package at.fabricate.liftdev.common
package model
import net.liftweb.proto.{ProtoUser => GenProtoUser}
import scala.xml.NodeSeq
import net.liftweb.mapper.MetaMegaProtoUser
import net.liftweb.mapper.MegaProtoUser
import net.liftweb.common.Full
import net.liftweb.http.S
import net.liftweb.util.Mailer
import net.liftweb.util.Mailer.From
import net.liftweb.util.Mailer.Subject
import net.liftweb.util.Mailer.To
import net.liftweb.util.Mailer.BCC
import net.liftweb.common.Empty
import net.liftweb.util.Helpers._
import net.liftweb.util.Mailer.MailBodyType
trait CustomizeUserHandling[T <: MegaProtoUser[T] with BaseEntityWithTitleAndDescription[T]] extends MetaMegaProtoUser[T] {
self: T =>
def customValidateUser(id: String): NodeSeq = findUserByUniqueId(id) match {
case Full(user) if !user.validated_? =>
user.setValidated(true).resetUniqueId().save
logUserIn(user, () => {
S.notice(S.?("account.validated"))
S.redirectTo(homePage)
})
case _ => S.error(S.?("invalid.validation.link")); S.redirectTo(homePage)
}
/**
* Send validation email to the user. The XHTML version of the mail
* body is generated by calling signupMailBody. You can customize the
* mail sent to users by override generateValidationEmailBodies to
* send non-HTML mail or alternative mail bodies.
*/
def customSendValidationEmail(user: TheUserType, customValidationPath : List[String]) {
val resetLink = S.hostAndPath+"/"+customValidationPath.mkString("/")+
"/"+urlEncode(user.getUniqueId())
val email: String = user.getEmail
val msgXml = signupMailBody(user, resetLink)
Mailer.sendMail(From(emailFrom),Subject(signupMailSubject),
(To(user.getEmail) ::
generateValidationEmailBodies(user, resetLink) :::
(bccEmail.toList.map(BCC(_)))) :_* )
}
def customLostPassword(selector : ( (String, List[String],List[String]) => Unit ) => NodeSeq, defaultRedirectLocation : String = homePage) = {
selector(customSendPasswordReset(_,_,_,defaultRedirectLocation))
}
def customSendPasswordReset(email: String, customPasswordResetPath: List[String], customValidationPath : List[String], defaultRedirectLocation : String = homePage) {
findUserByUserName(email) match {
case Full(user) if user.validated_? =>
user.resetUniqueId().save
val resetLink = S.hostAndPath+
customPasswordResetPath.mkString("/", "/", "/")+urlEncode(user.getUniqueId())
val email: String = user.getEmail
Mailer.sendMail(From(emailFrom),Subject(passwordResetEmailSubject),
(To(user.getEmail) ::
generateResetEmailBodies(user, resetLink) :::
(bccEmail.toList.map(BCC(_)))) :_*)
S.notice(S.?("password.reset.email.sent"))
S.redirectTo(defaultRedirectLocation)
case Full(user) =>
customSendValidationEmail(user,customValidationPath)
S.notice(S.?("account.validation.resent"))
S.redirectTo(defaultRedirectLocation)
case _ => S.error(userNameNotFoundString)
}
}
def customPasswordReset(selector : (T, ()=>Unit ) => NodeSeq, userid : String, defaultRedirectLocation : String = homePage) =
findUserByUniqueId(userid) match {
case Full(user) =>
def finishSet() {
user.validate match {
case Nil => S.notice(S.?("password.changed"))
user.resetUniqueId().save
logUserIn(user, () => S.redirectTo(defaultRedirectLocation))
case xs => S.error(xs)
}
}
selector(user,finishSet)
case _ => S.error(S.?("password.link.invalid")); S.redirectTo(homePage)
}
def customSignup(selector : (T, () => Unit ) => NodeSeq, customValidationPath : List[String], createUserInstance : () => TheUserType = createNewUserInstance,defaultRedirectLocation : String = homePage) = {
val theUser: TheUserType = mutateUserOnSignup(createUserInstance())
val theName = signUpPath.mkString("")
def testSignup() {
validateSignup(theUser) match {
case Nil =>
customActionsAfterSignup(theUser, customValidationPath,() => S.redirectTo(defaultRedirectLocation))
case xs => S.error(xs) ; signupFunc(Full(innerSignup _))
}
}
def innerSignup = {
selector(theUser, testSignup)
}
innerSignup
}
/**
* Override this method to do something else after the user signs up
*/
def customActionsAfterSignup(theUser: TheUserType, customValidationPath : List[String], func: () => Nothing): Nothing = {
theUser.setValidated(skipEmailValidation).resetUniqueId()
theUser.save
if (!skipEmailValidation) {
customSendValidationEmail(theUser, customValidationPath)
S.notice(S.?("sign.up.message"))
func()
} else {
logUserIn(theUser, () => {
S.notice(S.?("welcome"))
func()
})
}
}
def customChangePassword(selector : (T, (String, List[String]) => Unit ) => NodeSeq, defaultRedirectLocation : String = homePage) = {
val user = currentUser.openOrThrowException("we can do this because the logged in test has happened")
def testAndSet(oldPassword : String, newPassword : List[String])() = {
if (!user.testPassword(Full(oldPassword))) S.error(S.?("wrong.old.password"))
else {
user.setPasswordFromListString(newPassword)
user.validate match {
case Nil => user.save; S.notice(S.?("password.changed")); S.redirectTo(defaultRedirectLocation)
case xs => S.error(xs)
}
}
}
selector(user, testAndSet)
}
def customLogin(selector : NodeSeq, defaultRedirectLocation : String = homePage) = {
if (S.post_?) {
S.param("username").
flatMap(username => findUserByUserName(username)) match {
case Full(user) if user.validated_? &&
user.testPassword(S.param("password")) => {
val preLoginState = capturePreLoginState()
val redir = loginRedirect.get match {
case Full(url) =>
loginRedirect(Empty)
url
case _ =>
defaultRedirectLocation
}
logUserIn(user, () => {
S.notice(S.?("logged.in"))
preLoginState()
S.redirectTo(redir)
})
}
case Full(user) if !user.validated_? =>
S.error(S.?("account.validation.error"))
case _ => S.error(S.?("invalid.credentials"))
}
}
selector
}
def customLogout(redirectLocation : String = homePage) = {
logoutCurrentUser
S.redirectTo(redirectLocation)
}
}
| Fabricate/OpenthingsImplementation | src/main/scala/at/fabricate/liftdev/common/model/CustomizeUserHandling.scala | Scala | lgpl-3.0 | 6,713 |
/*
* Copyright (c) 2013-2016 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow
package collectors
package scalastream
package sinks
// Java
import java.nio.ByteBuffer
import java.util.concurrent.ScheduledThreadPoolExecutor
import java.util.concurrent.ScheduledExecutorService
// Akka
import akka.actor.ActorSystem
import akka.kafka.ProducerSettings
import akka.kafka.scaladsl.Producer
import akka.NotUsed
import akka.stream.Materializer
import akka.stream.scaladsl._
// Kafka
import org.apache.kafka.common.serialization.{ByteArraySerializer, StringSerializer}
import org.apache.kafka.clients.producer.ProducerRecord
// Config
import com.typesafe.config.Config
// Concurrent libraries
import scala.concurrent.{Await, Future, TimeoutException}
import scala.concurrent.duration._
// Logging
import org.slf4j.LoggerFactory
// Scala
import scala.util.{Failure, Success}
import scala.collection.JavaConverters._
// Snowplow
import CollectorPayload.thrift.model1.CollectorPayload
/**
* Kafka Sink for the Scala collector.
*/
final class KafkaSink(config: CollectorConfig,
inputType: InputType.InputType)(implicit sys: ActorSystem, mat: Materializer)
extends AbstractSink {
import log.{debug, error, info, trace}
val MaxBytes = 1000000L
type Record = ProducerRecord[String, Array[Byte]]
private val producerSettings =
ProducerSettings(sys, new StringSerializer, new ByteArraySerializer)
.withBootstrapServers(config.kafkaHost)
private val consumer =
Producer.plainSink[String, Array[Byte]](producerSettings)
private val runnableGraph: RunnableGraph[Sink[Record, NotUsed]] =
MergeHub.source[Record].to(consumer)
private val toConsumer: Sink[Record, NotUsed] = runnableGraph.run()
private val topicName = inputType match {
case InputType.Good => config.kafkaTopicGoodName
case InputType.Bad => config.kafkaTopicBadName
}
def storeRawEvents(events: List[Array[Byte]], key: String) = {
Source(events)
.map(e => new ProducerRecord[String, Array[Byte]](topicName, key, e))
.runWith(toConsumer)
Nil
}
}
| TimothyKlim/snowplow | 2-collectors/scala-stream-collector/src/main/scala/com.snowplowanalytics.snowplow.collectors/scalastream/sinks/KafkaSink.scala | Scala | apache-2.0 | 2,771 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package whisk.core.dispatcher.test
import scala.Vector
import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import org.scalatest.junit.JUnitRunner
import spray.json.pimpAny
import spray.json.pimpString
import whisk.common.PrintStreamLogging
import whisk.core.entity.ActivationResponse._
import whisk.core.entity.size.SizeInt
import whisk.http.Messages._
@RunWith(classOf[JUnitRunner])
class ActivationResponseTests extends FlatSpec with Matchers {
behavior of "ActivationResponse"
val logger = new PrintStreamLogging()
it should "interpret truncated response" in {
val max = 5.B
Seq("abcdef", """{"msg":"abcedf"}""", """["a","b","c","d","e"]""").foreach { m =>
{
val response = ContainerResponse(okStatus = false, m.take(max.toBytes.toInt - 1), Some(m.length.B, max))
val init = processInitResponseContent(Right(response), logger)
init.statusCode shouldBe ContainerError
init.result.get.asJsObject.fields(ERROR_FIELD) shouldBe truncatedResponse(response.entity, m.length.B, max).toJson
}
{
val response = ContainerResponse(okStatus = true, m.take(max.toBytes.toInt - 1), Some(m.length.B, max))
val run = processRunResponseContent(Right(response), logger)
run.statusCode shouldBe ContainerError
run.result.get.asJsObject.fields(ERROR_FIELD) shouldBe truncatedResponse(response.entity, m.length.B, max).toJson
}
}
}
it should "interpret failed init that does not response" in {
Seq(NoHost(), ConnectionError(new Throwable()), NoResponseReceived(), Timeout())
.map(Left(_)).foreach { e =>
val ar = processInitResponseContent(e, logger)
ar.statusCode shouldBe ContainerError
ar.result.get.asJsObject.fields(ERROR_FIELD) shouldBe abnormalInitialization.toJson
}
}
it should "interpret failed init that responds with null string" in {
val response = ContainerResponse(okStatus = false, null)
val ar = processInitResponseContent(Right(response), logger)
ar.statusCode shouldBe ContainerError
ar.result.get.asJsObject.fields(ERROR_FIELD) shouldBe invalidInitResponse(response.entity).toJson
ar.result.get.toString should not include regex("null")
}
it should "interpret failed init that responds with empty string" in {
val response = ContainerResponse(okStatus = false, "")
val ar = processInitResponseContent(Right(response), logger)
ar.statusCode shouldBe ContainerError
ar.result.get.asJsObject.fields(ERROR_FIELD) shouldBe invalidInitResponse(response.entity).toJson
ar.result.get.asJsObject.fields(ERROR_FIELD).toString.endsWith(".\\"") shouldBe true
}
it should "interpret failed init that responds with non-empty string" in {
val response = ContainerResponse(okStatus = false, "string")
val ar = processInitResponseContent(Right(response), logger)
ar.statusCode shouldBe ContainerError
ar.result.get.asJsObject.fields(ERROR_FIELD) shouldBe invalidInitResponse(response.entity).toJson
ar.result.get.toString should include(response.entity)
}
it should "interpret failed init that responds with JSON string not object" in {
val response = ContainerResponse(okStatus = false, Vector(1).toJson.compactPrint)
val ar = processInitResponseContent(Right(response), logger)
ar.statusCode shouldBe ContainerError
ar.result.get.asJsObject.fields(ERROR_FIELD) shouldBe invalidInitResponse(response.entity).toJson
ar.result.get.toString should include(response.entity)
}
it should "interpret failed init that responds with JSON object containing error" in {
val response = ContainerResponse(okStatus = false, Map(ERROR_FIELD -> "foobar").toJson.compactPrint)
val ar = processInitResponseContent(Right(response), logger)
ar.statusCode shouldBe ContainerError
ar.result.get shouldBe response.entity.parseJson
}
it should "interpret failed init that responds with JSON object" in {
val response = ContainerResponse(okStatus = false, Map("foobar" -> "baz").toJson.compactPrint)
val ar = processInitResponseContent(Right(response), logger)
ar.statusCode shouldBe ContainerError
ar.result.get.asJsObject.fields(ERROR_FIELD) shouldBe invalidInitResponse(response.entity).toJson
ar.result.get.toString should include("baz")
}
it should "not interpret successful init" in {
val response = ContainerResponse(okStatus = true, "")
an[IllegalArgumentException] should be thrownBy {
processInitResponseContent(Right(response), logger)
}
}
it should "interpret failed run that does not response" in {
Seq(NoHost(), ConnectionError(new Throwable()), NoResponseReceived(), Timeout())
.map(Left(_)).foreach { e =>
val ar = processRunResponseContent(e, logger)
ar.statusCode shouldBe ContainerError
ar.result.get.asJsObject.fields(ERROR_FIELD) shouldBe abnormalRun.toJson
}
}
it should "interpret failed run that responds with null string" in {
val response = ContainerResponse(okStatus = false, null)
val ar = processRunResponseContent(Right(response), logger)
ar.statusCode shouldBe ContainerError
ar.result.get.asJsObject.fields(ERROR_FIELD) shouldBe invalidRunResponse(response.entity).toJson
ar.result.get.toString should not include regex("null")
}
it should "interpret failed run that responds with empty string" in {
val response = ContainerResponse(okStatus = false, "")
val ar = processRunResponseContent(Right(response), logger)
ar.statusCode shouldBe ContainerError
ar.result.get.asJsObject.fields(ERROR_FIELD) shouldBe invalidRunResponse(response.entity).toJson
ar.result.get.asJsObject.fields(ERROR_FIELD).toString.endsWith(".\\"") shouldBe true
}
it should "interpret failed run that responds with non-empty string" in {
val response = ContainerResponse(okStatus = false, "string")
val ar = processRunResponseContent(Right(response), logger)
ar.statusCode shouldBe ContainerError
ar.result.get.asJsObject.fields(ERROR_FIELD) shouldBe invalidRunResponse(response.entity).toJson
ar.result.get.toString should include(response.entity)
}
it should "interpret failed run that responds with JSON string not object" in {
val response = ContainerResponse(okStatus = false, Vector(1).toJson.compactPrint)
val ar = processRunResponseContent(Right(response), logger)
ar.statusCode shouldBe ContainerError
ar.result.get.asJsObject.fields(ERROR_FIELD) shouldBe invalidRunResponse(response.entity).toJson
ar.result.get.toString should include(response.entity)
}
it should "interpret failed run that responds with JSON object containing error" in {
val response = ContainerResponse(okStatus = false, Map(ERROR_FIELD -> "foobar").toJson.compactPrint)
val ar = processRunResponseContent(Right(response), logger)
ar.statusCode shouldBe ContainerError
ar.result.get shouldBe response.entity.parseJson
}
it should "interpret failed run that responds with JSON object" in {
val response = ContainerResponse(okStatus = false, Map("foobar" -> "baz").toJson.compactPrint)
val ar = processRunResponseContent(Right(response), logger)
ar.statusCode shouldBe ContainerError
ar.result.get.asJsObject.fields(ERROR_FIELD) shouldBe invalidRunResponse(response.entity).toJson
ar.result.get.toString should include("baz")
}
it should "interpret successful run that responds with JSON object containing error" in {
val response = ContainerResponse(okStatus = true, Map(ERROR_FIELD -> "foobar").toJson.compactPrint)
val ar = processRunResponseContent(Right(response), logger)
ar.statusCode shouldBe ApplicationError
ar.result.get shouldBe response.entity.parseJson
}
it should "interpret successful run that responds with JSON object" in {
val response = ContainerResponse(okStatus = true, Map("foobar" -> "baz").toJson.compactPrint)
val ar = processRunResponseContent(Right(response), logger)
ar.statusCode shouldBe Success
ar.result.get shouldBe response.entity.parseJson
}
}
| prccaraujo/openwhisk | tests/src/test/scala/whisk/core/dispatcher/test/ActivationResponseTests.scala | Scala | apache-2.0 | 9,466 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.database.azblob
import java.time.OffsetDateTime
import akka.actor.ActorSystem
import akka.event.Logging
import akka.event.Logging.InfoLevel
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.StatusCodes.NotFound
import akka.http.scaladsl.model.{ContentType, HttpRequest, HttpResponse, Uri}
import akka.http.scaladsl.unmarshalling.Unmarshal
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{Sink, Source}
import akka.util.{ByteString, ByteStringBuilder}
import com.azure.storage.blob.sas.{BlobContainerSasPermission, BlobServiceSasSignatureValues}
import com.azure.storage.blob.{BlobContainerAsyncClient, BlobContainerClientBuilder, BlobUrlParts}
import com.azure.storage.common.StorageSharedKeyCredential
import com.azure.storage.common.policy.{RequestRetryOptions, RetryPolicyType}
import com.typesafe.config.Config
import org.apache.openwhisk.common.LoggingMarkers.{
DATABASE_ATTS_DELETE,
DATABASE_ATT_DELETE,
DATABASE_ATT_GET,
DATABASE_ATT_SAVE
}
import org.apache.openwhisk.common.{Logging, TransactionId}
import org.apache.openwhisk.core.ConfigKeys
import org.apache.openwhisk.core.database.StoreUtils.{combinedSink, reportFailure}
import org.apache.openwhisk.core.database._
import org.apache.openwhisk.core.entity.DocId
import pureconfig._
import pureconfig.generic.auto._
import reactor.core.publisher.Flux
import scala.compat.java8.FutureConverters._
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{ExecutionContext, Future}
import scala.reflect.ClassTag
case class AzureCDNConfig(domainName: String)
case class AzBlobConfig(endpoint: String,
accountKey: String,
containerName: String,
accountName: String,
connectionString: Option[String],
prefix: Option[String],
retryConfig: AzBlobRetryConfig,
azureCdnConfig: Option[AzureCDNConfig] = None) {
def prefixFor[D](implicit tag: ClassTag[D]): String = {
val className = tag.runtimeClass.getSimpleName.toLowerCase
prefix.map(p => s"$p/$className").getOrElse(className)
}
}
case class AzBlobRetryConfig(retryPolicyType: RetryPolicyType,
maxTries: Int,
tryTimeout: FiniteDuration,
retryDelay: FiniteDuration,
secondaryHost: Option[String])
object AzureBlobAttachmentStoreProvider extends AttachmentStoreProvider {
override def makeStore[D <: DocumentSerializer: ClassTag]()(implicit actorSystem: ActorSystem,
logging: Logging,
materializer: ActorMaterializer): AttachmentStore = {
makeStore[D](actorSystem.settings.config)
}
def makeStore[D <: DocumentSerializer: ClassTag](config: Config)(implicit actorSystem: ActorSystem,
logging: Logging,
materializer: ActorMaterializer): AttachmentStore = {
val azConfig = loadConfigOrThrow[AzBlobConfig](config, ConfigKeys.azBlob)
new AzureBlobAttachmentStore(createClient(azConfig), azConfig.prefixFor[D], azConfig)
}
def createClient(config: AzBlobConfig): BlobContainerAsyncClient = {
val builder = new BlobContainerClientBuilder()
//If connection string is specified then it would have all needed info
//Mostly used for testing using Azurite
config.connectionString match {
case Some(s) => builder.connectionString(s)
case _ =>
builder
.endpoint(config.endpoint)
.credential(new StorageSharedKeyCredential(config.accountName, config.accountKey))
}
builder
.containerName(config.containerName)
.retryOptions(new RequestRetryOptions(
config.retryConfig.retryPolicyType,
config.retryConfig.maxTries,
config.retryConfig.tryTimeout.toSeconds.toInt,
config.retryConfig.retryDelay.toMillis,
config.retryConfig.retryDelay.toMillis,
config.retryConfig.secondaryHost.orNull))
.buildAsyncClient()
}
}
class AzureBlobAttachmentStore(client: BlobContainerAsyncClient, prefix: String, config: AzBlobConfig)(
implicit system: ActorSystem,
logging: Logging,
materializer: ActorMaterializer)
extends AttachmentStore {
override protected[core] def scheme: String = "az"
override protected[core] implicit val executionContext: ExecutionContext = system.dispatcher
override protected[core] def attach(
docId: DocId,
name: String,
contentType: ContentType,
docStream: Source[ByteString, _])(implicit transid: TransactionId): Future[AttachResult] = {
require(name != null, "name undefined")
val start =
transid.started(this, DATABASE_ATT_SAVE, s"[ATT_PUT] uploading attachment '$name' of document 'id: $docId'")
val blobClient = getBlobClient(docId, name)
//TODO Use BlobAsyncClient#upload(Flux<ByteBuffer>, com.azure.storage.blob.models.ParallelTransferOptions, boolean)
val uploadSink = Sink.fold[ByteStringBuilder, ByteString](new ByteStringBuilder)((builder, b) => builder ++= b)
val f = docStream.runWith(combinedSink(uploadSink))
val g = f.flatMap { r =>
val buff = r.uploadResult.result().compact
val uf = blobClient.upload(Flux.fromArray(Array(buff.asByteBuffer)), buff.size).toFuture.toScala
uf.map(_ => AttachResult(r.digest, r.length))
}
g.foreach(_ =>
transid
.finished(this, start, s"[ATT_PUT] '$prefix' completed uploading attachment '$name' of document 'id: $docId'"))
reportFailure(
g,
start,
failure => s"[ATT_PUT] '$prefix' internal error, name: '$name', doc: '$docId', failure: '${failure.getMessage}'")
}
override protected[core] def readAttachment[T](docId: DocId, name: String, sink: Sink[ByteString, Future[T]])(
implicit transid: TransactionId): Future[T] = {
require(name != null, "name undefined")
val start =
transid.started(
this,
DATABASE_ATT_GET,
s"[ATT_GET] '$prefix' finding attachment '$name' of document 'id: $docId'")
val source = getAttachmentSource(objectKey(docId, name), config)
val f = source.flatMap {
case Some(x) => x.runWith(sink)
case None => Future.failed(NoDocumentException("Not found on 'readAttachment'."))
}
val g = f.transform(
{ s =>
transid
.finished(this, start, s"[ATT_GET] '$prefix' completed: found attachment '$name' of document 'id: $docId'")
s
}, {
case e: NoDocumentException =>
transid
.finished(
this,
start,
s"[ATT_GET] '$prefix', retrieving attachment '$name' of document 'id: $docId'; not found.",
logLevel = Logging.ErrorLevel)
e
case e => e
})
reportFailure(
g,
start,
failure =>
s"[ATT_GET] '$prefix' internal error, name: '$name', doc: 'id: $docId', failure: '${failure.getMessage}'")
}
override protected[core] def deleteAttachments(docId: DocId)(implicit transid: TransactionId): Future[Boolean] = {
val start =
transid.started(
this,
DATABASE_ATTS_DELETE,
s"[ATTS_DELETE] deleting attachments of document 'id: $docId' with prefix ${objectKeyPrefix(docId)}")
var count = 0
val f = Source
.fromPublisher(client.listBlobsByHierarchy(objectKeyPrefix(docId)))
.mapAsync(1) { b =>
count += 1
val startDelete =
transid.started(
this,
DATABASE_ATT_DELETE,
s"[ATT_DELETE] deleting attachment '${b.getName}' of document 'id: $docId'")
client
.getBlobAsyncClient(b.getName)
.delete()
.toFuture
.toScala
.map(
_ =>
transid.finished(
this,
startDelete,
s"[ATT_DELETE] completed: deleting attachment '${b.getName}' of document 'id: $docId'"))
.recover {
case t =>
transid.failed(
this,
startDelete,
s"[ATT_DELETE] failed: deleting attachment '${b.getName}' of document 'id: $docId' error: $t")
}
}
.recover {
case t =>
logging.error(this, s"[ATT_DELETE] :error in delete ${t}")
throw t
}
.runWith(Sink.seq)
.map(_ => true)
f.foreach(
_ =>
transid.finished(
this,
start,
s"[ATTS_DELETE] completed: deleting ${count} attachments of document 'id: $docId'",
InfoLevel))
reportFailure(
f,
start,
failure => s"[ATTS_DELETE] '$prefix' internal error, doc: '$docId', failure: '${failure.getMessage}'")
}
override protected[core] def deleteAttachment(docId: DocId, name: String)(
implicit transid: TransactionId): Future[Boolean] = {
val start =
transid.started(this, DATABASE_ATT_DELETE, s"[ATT_DELETE] deleting attachment '$name' of document 'id: $docId'")
val f = getBlobClient(docId, name).delete().toFuture.toScala.map(_ => true)
f.foreach(_ =>
transid.finished(this, start, s"[ATT_DELETE] completed: deleting attachment '$name' of document 'id: $docId'"))
reportFailure(
f,
start,
failure => s"[ATT_DELETE] '$prefix' internal error, doc: '$docId', failure: '${failure.getMessage}'")
}
override def shutdown(): Unit = {}
private def objectKey(id: DocId, name: String): String = s"$prefix/${id.id}/$name"
private def objectKeyPrefix(id: DocId): String =
s"$prefix/${id.id}/" //must end with a slash so that ".../<package>/<action>other" does not match for "<package>/<action>"
private def getBlobClient(docId: DocId, name: String) =
client.getBlobAsyncClient(objectKey(docId, name)).getBlockBlobAsyncClient
private def getAttachmentSource(objectKey: String, config: AzBlobConfig)(
implicit tid: TransactionId): Future[Option[Source[ByteString, Any]]] = {
val blobClient = client.getBlobAsyncClient(objectKey).getBlockBlobAsyncClient
config.azureCdnConfig match {
case Some(cdnConfig) =>
//setup sas token
def expiryTime = OffsetDateTime.now().plusDays(1)
def permissions =
new BlobContainerSasPermission()
.setReadPermission(true)
val sigValues = new BlobServiceSasSignatureValues(expiryTime, permissions)
val sas = blobClient.generateSas(sigValues)
//parse the url, and reset the host
val parts = BlobUrlParts.parse(blobClient.getBlobUrl)
val url = parts.setHost(cdnConfig.domainName)
logging.info(
this,
s"[ATT_GET] '$prefix' downloading attachment from azure cdn '$objectKey' with url (sas params not displayed) ${url}")
//append the sas params to the url before downloading
val cdnUrlWithSas = s"${url.toUrl.toString}?$sas"
getUrlContent(cdnUrlWithSas)
case None =>
blobClient.exists().toFuture.toScala.map { exists =>
if (exists) {
val bbFlux = blobClient.download()
Some(Source.fromPublisher(bbFlux).map(ByteString.fromByteBuffer))
} else {
throw NoDocumentException("Not found on 'readAttachment'.")
}
}
}
}
private def getUrlContent(uri: Uri): Future[Option[Source[ByteString, Any]]] = {
val future = Http().singleRequest(HttpRequest(uri = uri))
future.flatMap {
case HttpResponse(status, _, entity, _) if status.isSuccess() && !status.isRedirection() =>
Future.successful(Some(entity.dataBytes))
case HttpResponse(status, _, entity, _) =>
if (status == NotFound) {
entity.discardBytes()
throw NoDocumentException("Not found on 'readAttachment'.")
} else {
Unmarshal(entity).to[String].map { err =>
throw new Exception(s"failed to download ${uri} status was ${status} response was ${err}")
}
}
}
}
}
| akrabat/openwhisk | common/scala/src/main/scala/org/apache/openwhisk/core/database/azblob/AzureBlobAttachmentStore.scala | Scala | apache-2.0 | 13,082 |
/*
* Copyright 2016 rdbc contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.rdbc.util.scheduler
import java.util.concurrent.ScheduledExecutorService
import io.rdbc.util.Logging
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{ExecutionContext, Future}
class JdkScheduler(executorService: ScheduledExecutorService)
(implicit ec: ExecutionContext)
extends TaskScheduler
with Logging {
def schedule(delay: FiniteDuration)
(action: () => Unit): ScheduledTask = traced {
logger.debug(s"Scheduling a task to run in $delay using $executorService")
val fut = executorService.schedule(runnable(() => Future(action())), delay.length, delay.unit)
new JdkScheduledTask(fut)
}
def shutdown(): Future[Unit] = {
Future {
executorService.shutdownNow()
}
}
/* Scala 2.11 compat */
private def runnable(action: () => Unit): Runnable = {
new Runnable() {
def run(): Unit = action()
}
}
}
| rdbc-io/rdbc | rdbc-util/src/main/scala/io/rdbc/util/scheduler/JdkScheduler.scala | Scala | apache-2.0 | 1,529 |
package com.googlecode.kanbanik.commands
import com.googlecode.kanbanik.builders.WorkflowitemBuilder
import com.googlecode.kanbanik.model.Workflowitem
import org.bson.types.ObjectId
import com.googlecode.kanbanik.db.HasMongoConnection
import com.googlecode.kanbanik.model.Board
import com.googlecode.kanbanik.builders.BoardBuilder
import com.googlecode.kanbanik.messages.ServerMessages
import com.googlecode.kanbanik.builders.WorkflowBuilder
import com.googlecode.kanbanik.model.Workflow
import com.googlecode.kanbanik.db.HasEntityLoader
import com.googlecode.kanbanik.dtos.{ErrorDto, WorkflowitemDto, EditWorkflowParams, WorkflowDto}
class EditWorkflowCommand extends Command[EditWorkflowParams, WorkflowitemDto] with HasMongoConnection with HasEntityLoader {
lazy val workflowitemBuilder = new WorkflowitemBuilder
lazy val workflowBuilder = new WorkflowBuilder
lazy val boardBuilder = new BoardBuilder
def execute(params: EditWorkflowParams): Either[WorkflowitemDto, ErrorDto] = {
val currenDto = params.current
val nextDto = params.next
val destContextDto = params.destinationWorkflow
// hack just to test if the board still exists
val board = loadBoard(new ObjectId(params.board.id.getOrElse(
return Right(ErrorDto("The board has to have the ID set"))
)), includeTasks = false).getOrElse(
return Right(ErrorDto(ServerMessages.entityDeletedMessage("board " + params.board.name)))
)
val currentBoard = board.copy(version = params.board.version)
doExecute(currenDto, nextDto, destContextDto, currentBoard)
}
private def doExecute(currentDto: WorkflowitemDto, nextDto: Option[WorkflowitemDto], destContextDto: WorkflowDto, currentBoard: Board): Either[WorkflowitemDto, ErrorDto] = {
if (hasTasks(destContextDto)) {
return Right(ErrorDto("The workflowitem into which you are about to drop this item already has some tasks in it which would effectively hide them. Please move this tasks first out."))
}
val currentWorkflow = workflowBuilder.buildEntity(currentDto.parentWorkflow.get, Some(currentBoard))
val currentEntityId = if (!currentDto.id.isDefined) new ObjectId else new ObjectId(currentDto.id.get)
val currentEntityIfExists = currentWorkflow.findItem(Workflowitem().copy(id = Some(currentEntityId)))
val currentEntity = currentEntityIfExists.getOrElse(workflowitemBuilder.buildEntity(currentDto, Some(currentWorkflow), Some(currentBoard)))
val nextEntity = {
if (!nextDto.isDefined) {
None
} else {
Some(workflowitemBuilder.buildEntity(nextDto.get, None, None))
}
}
val contextEntity = workflowBuilder.buildEntity(destContextDto, Some(currentBoard))
val res = contextEntity.board.move(currentEntity, nextEntity, contextEntity).store
val realCurrentEntity = res.workflow.findItem(currentEntity).getOrElse(throw new IllegalStateException("Was not able to find the just stored workflowitem with id: '" + currentEntity.id + "'"))
Left(workflowitemBuilder.buildDto(realCurrentEntity, None))
}
private def hasTasks(destContextDto: WorkflowDto): Boolean = {
val board = Board.byId(new ObjectId(destContextDto.board.id.get), includeTasks = true)
if (!destContextDto.id.isDefined) {
return false
}
val destWorkflow = Workflow().copy(id = Some(new ObjectId(destContextDto.id.get)))
if (board.workflow == destWorkflow) {
return false
}
val destParentItem = board.workflow.findParentItem(destWorkflow).getOrElse(throw new IllegalStateException("The workflow: " + destContextDto.id + " is defined on no item."))
val tasksOnWorkflowitem = board.tasks.filter(_.workflowitemId == destParentItem.id.get)
tasksOnWorkflowitem.size != 0
}
}
| nagyistoce/kanbanik | kanbanik-server/src/main/scala/com/googlecode/kanbanik/commands/EditWorkflowCommand.scala | Scala | apache-2.0 | 3,758 |
/*
* Copyright 2013 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.storehaus.cache
import org.scalacheck.{Prop, Properties}
import org.scalacheck.Prop._
object ClockProperties extends Properties("Clock") {
def vectorOfTicks[T: Ordering, K <: Clock[T, K]](
clock: K, initialTicks: Vector[T] = Vector.empty[T], number: Int = 100): (Seq[T], K) =
1.to(number).foldLeft((initialTicks, clock)) { case ((cum, c), _) =>
val (v, newClock) = c.tick
(cum :+ v, newClock)
}
def alwaysDistinct[T: Ordering, K <: Clock[T, K]](clock: K): Boolean = {
val (vector, _) = vectorOfTicks[T, K](clock)
vector.size == vector.toSet.size
}
def alwaysIncreasing[T: Ordering, K <: Clock[T, K]](clock: K): Boolean = {
val (vector, _) = vectorOfTicks[T, K](clock)
vector == vector.sorted
}
def clockLaws[T: Ordering, K <: Clock[T, K]](clock: K): Boolean =
alwaysDistinct[T, K](clock) && alwaysIncreasing[T, K](clock)
property("LongClock obeys the Clock laws") = clockLaws[Long, LongClock](LongClock())
}
object IdProviderProperties extends Properties("IdProvider") {
def alwaysIncreasing[T: Ordering](idProvider: IdProvider[T]): Boolean = {
val r = new java.util.Random
def iter(vector: Vector[T], idProvider: IdProvider[T]) = {
val (vector, newIdProvider) = ClockProperties.vectorOfTicks[T, IdProvider[T]](idProvider)
val shouldCull = vector map { _ => r.nextBoolean }
vector.zip(shouldCull)
.foldLeft((Vector.empty[T], newIdProvider)) { case ((vec, idP), (v, shouldC)) =>
if (shouldC) (vec, idP.cull(v)) else (vec :+ v, idP)
}
}
(1 to 100).foldLeft((Vector.empty[T], idProvider, true)) { case ((vec, idP, isWorking), _) =>
val (newVector, newIdProvider) = iter(vec, idP)
(newVector, newIdProvider, isWorking || (vec == vec.sorted && vec.size == vec.toSet.size))
}._3
}
def idProviderLaws[T: Ordering](idProvider: IdProvider[T]): Prop =
ClockProperties.clockLaws[T, IdProvider[T]](idProvider) && alwaysIncreasing[T](idProvider)
property("CyclicIncrementProvider obeys the IdProvider laws") =
idProviderLaws(CyclicIncrementProvider.intIncrementer)
}
| twitter/storehaus | storehaus-cache/src/test/scala/com/twitter/storehaus/cache/ClockProperties.scala | Scala | apache-2.0 | 2,732 |
package artisanal.pickle.maker
import models._
import parser._
import org.specs2._
import mutable._
import specification._
import scala.reflect.internal.pickling.ByteCodecs
import scala.tools.scalap.scalax.rules.scalasig._
import com.novus.salat.annotations.util._
import scala.reflect.ScalaSignature
class LongSpecLong extends mutable.Specification {
"a ScalaSig for case class MyRecord_LongLong(d1: Long, d2: Long)" should {
"have the correct string" in {
val mySig = new ScalaSig(List("case class"), List("models", "MyRecord_LongLong"), List(("d1", "Long"), ("d2", "Long")))
val correctParsedSig = SigParserHelper.parseByteCodeFromAnnotation(classOf[MyRecord_LongLong]).map(ScalaSigAttributeParsers.parse(_)).get
val myParsedSig = SigParserHelper.parseByteCodeFromMySig(mySig).map(ScalaSigAttributeParsers.parse(_)).get
correctParsedSig.toString === myParsedSig.toString
}
}
}
| julianpeeters/artisanal-pickle-maker | src/test/scala/doubleValueMember/LongLongSpec.scala | Scala | apache-2.0 | 919 |
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.crossdata.connector.postgresql
import org.apache.spark.sql.crossdata.ExecutionType
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class PostgresqlAggregationIT extends PostgresqlWithSharedContext{
"The Postgresql connector" should s"support a (SELECT MAX() ) natively" in {
assumeEnvironmentIsUpAndRunning
val df = sql(s"SELECT MAX(id) as maxim FROM $postgresqlSchema.$Table")
val result = df.collect(ExecutionType.Native)
result(0).getInt(0) should be (10)
}
it should s"support a (SELECT MIN()) natively" in {
assumeEnvironmentIsUpAndRunning
val df = sql(s"SELECT MIN(id) FROM $postgresqlSchema.$Table")
val result = df.collect(ExecutionType.Native)
result(0).getInt(0) should be (1)
}
it should s"support a (SELECT SUM()) natively" in {
assumeEnvironmentIsUpAndRunning
val df = sql(s"SELECT SUM(id) FROM $postgresqlSchema.$Table")
val result = df.collect(ExecutionType.Native)
result(0).getLong(0) should be (55)
}
it should s"support a (SELECT AVG()) natively" in {
assumeEnvironmentIsUpAndRunning
val df = sql(s"SELECT AVG(id) FROM $postgresqlSchema.$Table")
val result = df.collect(ExecutionType.Native)
val avg: Double = 5.5
result(0).getDouble(0) shouldBe avg
}
it should s"support a (SELECT Count()) natively" in {
assumeEnvironmentIsUpAndRunning
val df = sql(s"SELECT COUNT(id) FROM $postgresqlSchema.$Table")
val result = df.collect(ExecutionType.Native)
result(0).getLong(0) should be (10)
}
it should s"support a (SELECT Count()) ... GROUP BY ... natively" in {
assumeEnvironmentIsUpAndRunning
val df = sql(s"SELECT comment, COUNT(id) as count FROM $postgresqlSchema.$Table GROUP BY id, comment")
val result = df.collect(ExecutionType.Native)
result should have length 10
result(0).getLong(1) should be (1)
}
it should s"support a (SELECT Count()) ...WHERE ... GROUP BY ... natively" in {
assumeEnvironmentIsUpAndRunning
val df = sql(s"SELECT comment, COUNT(id) as count FROM $postgresqlSchema.$Table WHERE id > 5 GROUP BY id, comment")
val result = df.collect(ExecutionType.Native)
result should have length 5
result(0).getLong(1) should be (1)
}
it should s"support a (SELECT Count()) ... GROUP BY ... ORDER BY COUNT(id) natively" in {
assumeEnvironmentIsUpAndRunning
val df = sql(s"SELECT comment, COUNT(id) as countalias FROM $postgresqlSchema.$Table GROUP BY id, comment ORDER BY COUNT(id)")
val result = df.collect(ExecutionType.Native)
result should have length 10
result(0).getLong(1) should be (1)
}
it should s"support a (SELECT Count()) ... GROUP BY ... ORDER BY alias natively" in {
assumeEnvironmentIsUpAndRunning
val df = sql(s"SELECT comment, COUNT(id) as countalias FROM $postgresqlSchema.$Table GROUP BY id, comment ORDER BY countalias")
val result = df.collect(ExecutionType.Native)
result should have length 10
result(0).getLong(1) should be (1)
}
it should s"support a (SELECT Count()) ... GROUP BY ... ORDER BY alias ... HAVING alias natively" in {
assumeEnvironmentIsUpAndRunning
val df = sql(s"SELECT comment, COUNT(id) as countalias FROM $postgresqlSchema.$Table GROUP BY id, comment HAVING countalias < 5 ORDER BY countalias ")
val result = df.collect(ExecutionType.Native)
result should have length 10
result(0).getLong(1) should be (1)
}
it should s"support a (SELECT Count()) ... GROUP BY ... ORDER BY alias ... HAVING natively" in {
assumeEnvironmentIsUpAndRunning
val df = sql(s"SELECT comment, COUNT(id) as countalias FROM $postgresqlSchema.$Table GROUP BY id, comment HAVING COUNT(id) < 5 ORDER BY countalias ")
val result = df.collect(ExecutionType.Native)
result should have length 10
result(0).getLong(1) should be (1)
}
} | Stratio/crossdata | postgresql/src/test/scala/com/stratio/crossdata/connector/postgresql/PostgresqlAggregationIT.scala | Scala | apache-2.0 | 4,658 |
package xmppz
import java.util.concurrent.Executors
import org.jboss.netty.bootstrap.ServerBootstrap;
import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
import org.jboss.netty.channel.ChannelPipeline
import org.jboss.netty.channel.ChannelPipelineFactory
import org.jboss.netty.handler.codec.string.StringDecoder
import org.jboss.netty.handler.codec.string.StringEncoder
import java.net.InetSocketAddress
import java.util.concurrent.atomic.AtomicLong
import org.jboss.netty.buffer.ChannelBuffer
import org.jboss.netty.channel.{
ChannelEvent,
ChannelHandlerContext,
ChannelStateEvent,
ExceptionEvent,
MessageEvent,
Channel,
Channels,
SimpleChannelUpstreamHandler
}
import util._
import netty._
class TestNettyServer(port: Int) {
val bootstrap = new ServerBootstrap(
new NioServerSocketChannelFactory(Executors.newCachedThreadPool(), Executors.newCachedThreadPool()))
val handler = new SimpleServerHandler
var channel: Option[Channel] = None
bootstrap.setPipelineFactory(new ChannelPipelineFactory {
override def getPipeline = Channels.pipeline(
new StringDecoder,
new StringEncoder,
new SimpleServerHandler)
})
channel = Some(bootstrap.bind(new InetSocketAddress(port)))
println("listneing on port " + port)
def writeToClient(str: String) =
handler.writeToClient(str)
var clientChannel: Option[Channel] = None
class SimpleServerHandler extends SimpleChannelUpstreamHandler {
def writeToClient(str: String) {
for (client <- clientChannel)
client.write(str)
}
override def channelConnected(ctx: ChannelHandlerContext, e: ChannelStateEvent) {
clientChannel = Some(e.getChannel())
}
override def messageReceived(ctx: ChannelHandlerContext, e: MessageEvent) {
// Send back the received message to the remote peer.
e.getChannel.write(e.getMessage.toString.reverse)
}
override def exceptionCaught(context: ChannelHandlerContext, e: ExceptionEvent) {
// Close the connection when an exception is raised.
e.getChannel.close()
}
}
def close() {
channel.foreach(_.close())
}
}
| vmarquez/xmppz | src/test/scala/xmppz/TestNettyServer.scala | Scala | lgpl-3.0 | 2,155 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package unit.kafka.admin
import kafka.admin.ConsumerGroupCommandTest
import kafka.utils.TestUtils
import org.apache.kafka.common.protocol.Errors
import org.junit.Assert._
import org.junit.Test
class DeleteConsumerGroupTest extends ConsumerGroupCommandTest {
@Test(expected = classOf[joptsimple.OptionException])
def testDeleteWithTopicOption() {
TestUtils.createOffsetsTopic(zkClient, servers)
val cgcArgs = Array("--bootstrap-server", brokerList, "--delete", "--group", group, "--topic")
getConsumerGroupService(cgcArgs)
fail("Expected an error due to presence of mutually exclusive options")
}
@Test
def testDeleteCmdNonExistingGroup() {
TestUtils.createOffsetsTopic(zkClient, servers)
val missingGroup = "missing.group"
val cgcArgs = Array("--bootstrap-server", brokerList, "--delete", "--group", missingGroup)
val service = getConsumerGroupService(cgcArgs)
val output = TestUtils.grabConsoleOutput(service.deleteGroups())
assertTrue(s"The expected error (${Errors.GROUP_ID_NOT_FOUND}) was not detected while deleting consumer group",
output.contains(s"Group '$missingGroup' could not be deleted due to: ${Errors.GROUP_ID_NOT_FOUND.toString}"))
}
@Test
def testDeleteNonExistingGroup() {
TestUtils.createOffsetsTopic(zkClient, servers)
val missingGroup = "missing.group"
// note the group to be deleted is a different (non-existing) group
val cgcArgs = Array("--bootstrap-server", brokerList, "--delete", "--group", missingGroup)
val service = getConsumerGroupService(cgcArgs)
val result = service.deleteGroups()
assertTrue(s"The expected error (${Errors.GROUP_ID_NOT_FOUND}) was not detected while deleting consumer group",
result.size == 1 && result.keySet.contains(missingGroup) && result.get(missingGroup).contains(Errors.GROUP_ID_NOT_FOUND))
}
@Test
def testDeleteCmdInvalidGroupId() {
TestUtils.createOffsetsTopic(zkClient, servers)
val invalidGroupId = ""
val cgcArgs = Array("--bootstrap-server", brokerList, "--delete", "--group", invalidGroupId)
val service = getConsumerGroupService(cgcArgs)
val output = TestUtils.grabConsoleOutput(service.deleteGroups())
assertTrue(s"The expected error (${Errors.INVALID_GROUP_ID}) was not detected while deleting consumer group",
output.contains(s"Group '$invalidGroupId' could not be deleted due to: ${Errors.INVALID_GROUP_ID.toString}"))
}
@Test
def testDeleteInvalidGroupId() {
TestUtils.createOffsetsTopic(zkClient, servers)
val invalidGroupId = ""
val cgcArgs = Array("--bootstrap-server", brokerList, "--delete", "--group", invalidGroupId)
val service = getConsumerGroupService(cgcArgs)
val result = service.deleteGroups()
assertTrue(s"The expected error (${Errors.INVALID_GROUP_ID}) was not detected while deleting consumer group",
result.size == 1 && result.keySet.contains(invalidGroupId) && result.get(invalidGroupId).contains(Errors.INVALID_GROUP_ID))
}
@Test
def testDeleteCmdNonEmptyGroup() {
TestUtils.createOffsetsTopic(zkClient, servers)
// run one consumer in the group
addConsumerGroupExecutor(numConsumers = 1)
val cgcArgs = Array("--bootstrap-server", brokerList, "--delete", "--group", group)
val service = getConsumerGroupService(cgcArgs)
TestUtils.waitUntilTrue(() => {
service.listGroups().contains(group)
}, "The group did not initialize as expected.")
val output = TestUtils.grabConsoleOutput(service.deleteGroups())
assertTrue(s"The expected error (${Errors.NON_EMPTY_GROUP}) was not detected while deleting consumer group",
output.contains(s"Group '$group' could not be deleted due to: ${Errors.NON_EMPTY_GROUP}"))
}
@Test
def testDeleteNonEmptyGroup() {
TestUtils.createOffsetsTopic(zkClient, servers)
// run one consumer in the group
addConsumerGroupExecutor(numConsumers = 1)
val cgcArgs = Array("--bootstrap-server", brokerList, "--delete", "--group", group)
val service = getConsumerGroupService(cgcArgs)
TestUtils.waitUntilTrue(() => {
service.listGroups().contains(group)
}, "The group did not initialize as expected.")
val result = service.deleteGroups()
assertTrue(s"The expected error (${Errors.NON_EMPTY_GROUP}) was not detected while deleting consumer group",
result.size == 1 && result.keySet.contains(group) && result.get(group).contains(Errors.NON_EMPTY_GROUP))
}
@Test
def testDeleteCmdEmptyGroup() {
TestUtils.createOffsetsTopic(zkClient, servers)
// run one consumer in the group
val executor = addConsumerGroupExecutor(numConsumers = 1)
val cgcArgs = Array("--bootstrap-server", brokerList, "--delete", "--group", group)
val service = getConsumerGroupService(cgcArgs)
TestUtils.waitUntilTrue(() => {
service.listGroups().contains(group)
}, "The group did not initialize as expected.")
executor.shutdown()
TestUtils.waitUntilTrue(() => {
service.collectGroupState().state == "Empty"
}, "The group did become empty as expected.")
val output = TestUtils.grabConsoleOutput(service.deleteGroups())
assertTrue(s"The consumer group could not be deleted as expected",
output.contains(s"Deletion of requested consumer groups ('$group') was successful."))
}
@Test
def testDeleteEmptyGroup() {
TestUtils.createOffsetsTopic(zkClient, servers)
// run one consumer in the group
val executor = addConsumerGroupExecutor(numConsumers = 1)
val cgcArgs = Array("--bootstrap-server", brokerList, "--delete", "--group", group)
val service = getConsumerGroupService(cgcArgs)
TestUtils.waitUntilTrue(() => {
service.listGroups().contains(group)
}, "The group did not initialize as expected.")
executor.shutdown()
TestUtils.waitUntilTrue(() => {
service.collectGroupState().state == "Empty"
}, "The group did become empty as expected.")
val result = service.deleteGroups()
assertTrue(s"The consumer group could not be deleted as expected",
result.size == 1 && result.keySet.contains(group) && result.get(group).contains(Errors.NONE))
}
@Test
def testDeleteCmdWithMixOfSuccessAndError() {
TestUtils.createOffsetsTopic(zkClient, servers)
val missingGroup = "missing.group"
// run one consumer in the group
val executor = addConsumerGroupExecutor(numConsumers = 1)
val cgcArgs = Array("--bootstrap-server", brokerList, "--delete", "--group", group)
val service = getConsumerGroupService(cgcArgs)
TestUtils.waitUntilTrue(() => {
service.listGroups().contains(group)
}, "The group did not initialize as expected.")
executor.shutdown()
TestUtils.waitUntilTrue(() => {
service.collectGroupState().state == "Empty"
}, "The group did become empty as expected.")
val service2 = getConsumerGroupService(cgcArgs ++ Array("--group", missingGroup))
val output = TestUtils.grabConsoleOutput(service2.deleteGroups())
assertTrue(s"The consumer group deletion did not work as expected",
output.contains(s"Group '$missingGroup' could not be deleted due to: ${Errors.GROUP_ID_NOT_FOUND}") &&
output.contains(s"These consumer groups were deleted successfully: '$group'"))
}
@Test
def testDeleteWithMixOfSuccessAndError() {
TestUtils.createOffsetsTopic(zkClient, servers)
val missingGroup = "missing.group"
// run one consumer in the group
val executor = addConsumerGroupExecutor(numConsumers = 1)
val cgcArgs = Array("--bootstrap-server", brokerList, "--delete", "--group", group)
val service = getConsumerGroupService(cgcArgs)
TestUtils.waitUntilTrue(() => {
service.listGroups().contains(group)
}, "The group did not initialize as expected.")
executor.shutdown()
TestUtils.waitUntilTrue(() => {
service.collectGroupState().state == "Empty"
}, "The group did become empty as expected.")
val service2 = getConsumerGroupService(cgcArgs ++ Array("--group", missingGroup))
val result = service2.deleteGroups()
assertTrue(s"The consumer group deletion did not work as expected",
result.size == 2 &&
result.keySet.contains(group) && result.get(group).contains(Errors.NONE) &&
result.keySet.contains(missingGroup) && result.get(missingGroup).contains(Errors.GROUP_ID_NOT_FOUND))
}
@Test
def testDeleteCmdWithShortInitialization() {
// run one consumer in the group
val executor = addConsumerGroupExecutor(numConsumers = 1)
val cgcArgs = Array("--bootstrap-server", brokerList, "--delete", "--group", group)
val service = getConsumerGroupService(cgcArgs)
val output = TestUtils.grabConsoleOutput(service.deleteGroups())
assertTrue(s"The consumer group deletion did not work as expected",
output.contains(s"Group '$group' could not be deleted due to: ${Errors.COORDINATOR_NOT_AVAILABLE}"))
}
@Test
def testDeleteWithShortInitialization() {
// run one consumer in the group
val executor = addConsumerGroupExecutor(numConsumers = 1)
val cgcArgs = Array("--bootstrap-server", brokerList, "--delete", "--group", group)
val service = getConsumerGroupService(cgcArgs)
val result = service.deleteGroups()
assertTrue(s"The consumer group deletion did not work as expected",
result.size == 1 &&
result.keySet.contains(group) && result.get(group).contains(Errors.COORDINATOR_NOT_AVAILABLE))
}
}
| MyPureCloud/kafka | core/src/test/scala/unit/kafka/admin/DeleteConsumerGroupsTest.scala | Scala | apache-2.0 | 10,266 |
package org.broadinstitute.dsde.firecloud.integrationtest
import akka.stream.Materializer
import com.typesafe.scalalogging.LazyLogging
import org.broadinstitute.dsde.firecloud.integrationtest.ESIntegrationSupport.{searchDAO, shareLogDAO}
import org.broadinstitute.dsde.firecloud.model.ShareLog.{Share, ShareType}
import org.scalatest.BeforeAndAfterAll
import org.scalatest.freespec.AnyFreeSpec
import org.scalatest.matchers.should.Matchers
import scala.util.Try
class ElasticSearchShareLogDAOSpec extends AnyFreeSpec with Matchers with BeforeAndAfterAll with LazyLogging {
override def beforeAll = {
// using the recreate from search dao because we don't have recreate in sharelog dao
searchDAO.recreateIndex()
ElasticSearchShareLogDAOSpecFixtures.fixtureShares map { share =>
shareLogDAO.logShare(share.userId, share.sharee, share.shareType)
}
}
override def afterAll = {
// using the delete from search dao because we don't have recreate in sharelog dao
searchDAO.deleteIndex()
}
private def scrubShares(in: Seq[Share]) = in.map(_.copy(timestamp = None))
"ElasticSearchShareLogDAO" - {
"getShares" - {
"should get shares of all types that were logged in init" in {
val expected = ElasticSearchShareLogDAOSpecFixtures.fixtureShares.sortBy(s => (s.userId, s.sharee, s.shareType))
val checkFake1 = shareLogDAO.getShares("fake1")
val checkFake2 = shareLogDAO.getShares("fake2")
val check = checkFake1 ++ checkFake2
assertResult(expected.size) { check.size }
scrubShares(check) should contain theSameElementsAs scrubShares(expected)
}
"should get shares of a specific type and none others" in {
val expected = ElasticSearchShareLogDAOSpecFixtures.fixtureShares
.filter(s => s.userId.equals("fake1"))
.filter(s => s.shareType.equals(ShareType.GROUP))
.sortBy(_.sharee)
val check = shareLogDAO.getShares("fake1", Some(ShareType.GROUP)).sortBy(_.sharee)
assertResult(expected.size) { check.size }
scrubShares(check) should contain theSameElementsAs scrubShares(expected)
}
}
"logShare" - {
"should log a share and get it back successfully using the generated MD5 hash" in {
val share = Share("roger", "syd@gmail.com", ShareType.WORKSPACE)
val loggedShare = shareLogDAO.logShare(share.userId, share.sharee, share.shareType)
val check = shareLogDAO.getShare(share)
assertResult(loggedShare) { check }
}
"should successfully log a record of a user sharing a workspace with the same user twice" in {
val loggedShare = shareLogDAO.logShare("fake4", "fake3@gmail.com", ShareType.WORKSPACE)
val check = Try(shareLogDAO.logShare(loggedShare.userId, loggedShare.sharee, loggedShare.shareType))
assert(check.isSuccess)
}
}
}
}
object ElasticSearchShareLogDAOSpecFixtures {
val fixtureShares: Seq[Share] = Seq(
Share("fake1", "fake2@gmail.com", ShareType.WORKSPACE),
Share("fake1", "fake3@gmail.com", ShareType.WORKSPACE),
Share("fake1", "fake4@gmail.com", ShareType.WORKSPACE),
Share("fake1", "fake5@gmail.com", ShareType.WORKSPACE),
Share("fake1", "fake6@gmail.com", ShareType.WORKSPACE),
Share("fake1", "fake7@gmail.com", ShareType.WORKSPACE),
Share("fake1", "fake8@gmail.com", ShareType.WORKSPACE),
Share("fake1", "fake9@gmail.com", ShareType.WORKSPACE),
Share("fake1", "fake10@gmail.com", ShareType.WORKSPACE),
Share("fake1", "fakea1@gmail.com", ShareType.WORKSPACE),
Share("fake1", "fakea2@gmail.com", ShareType.WORKSPACE),
Share("fake1", "fakea3@gmail.com", ShareType.WORKSPACE),
Share("fake2", "fake1@gmail.com", ShareType.WORKSPACE),
Share("fake2", "fake3@gmail.com", ShareType.WORKSPACE),
Share("fake2", "fake4@gmail.com", ShareType.WORKSPACE),
Share("fake1", "fake2@gmail.com", ShareType.GROUP),
Share("fake1", "fake3@gmail.com", ShareType.GROUP),
Share("fake1", "fake4@gmail.com", ShareType.GROUP),
Share("fake1", "fake5@gmail.com", ShareType.GROUP),
Share("fake1", "fake6@gmail.com", ShareType.GROUP),
Share("fake1", "fake7@gmail.com", ShareType.GROUP),
Share("fake1", "fake8@gmail.com", ShareType.GROUP),
Share("fake1", "fake9@gmail.com", ShareType.GROUP),
Share("fake1", "fake10@gmail.com", ShareType.GROUP),
Share("fake1", "fakea11@gmail.com", ShareType.GROUP),
Share("fake1", "fakea12@gmail.com", ShareType.GROUP),
Share("fake1", "fakea13@gmail.com", ShareType.GROUP),
Share("fake2", "fake1@gmail.com", ShareType.GROUP),
Share("fake2", "fake3@gmail.com", ShareType.GROUP),
Share("fake2", "fake4@gmail.com", ShareType.GROUP)
)
}
| broadinstitute/firecloud-orchestration | src/test/scala/org/broadinstitute/dsde/firecloud/integrationtest/ElasticSearchShareLogDAOSpec.scala | Scala | bsd-3-clause | 4,735 |
package com.avsystem.commons
package mongo.typed
import com.avsystem.commons.annotation.macroPrivate
import com.avsystem.commons.meta.OptionLike
import com.avsystem.commons.misc.TypedMap
import com.avsystem.commons.mongo.typed.MongoPropertyRef.Separator
import com.avsystem.commons.mongo.{BsonValueInput, KeyEscaper}
import com.avsystem.commons.serialization.GenCodec.ReadFailure
import org.bson.{BsonDocument, BsonValue}
/**
* Represents a reference to a particular "place" in a MongoDB document. The "place" may be an actual path
* inside the document ([[MongoPropertyRef]]) or the whole document _itself_ (you can think of it as an empty path).
*
* When the [[MongoRef]] points to the whole document, it may also narrow the type only to some subtype(s).
* See the [[DataRefDsl.as]] macro for more details on narrowing.
*
* @tparam E the data type
*/
sealed trait MongoRef[E, T] extends MongoProjection[E, T] with DataRefDsl[E, T] { self =>
def format: MongoFormat[T]
def projectionRefs: Set[MongoRef[E, _]] = Set(this)
def showRecordId: Boolean = false
@macroPrivate def subtypeRefFor[C <: T : ClassTag]: MongoRef[E, C]
@macroPrivate def fieldRefFor[T0](scalaFieldName: String): MongoPropertyRef[E, T0] =
format.assumeAdt.fieldRefFor(this, scalaFieldName)
@macroPrivate def subtypeFilterFor[C <: T : ClassTag](negated: Boolean): MongoDocumentFilter[E] =
format.assumeUnion.subtypeFilterFor(this, classTag[C].runtimeClass.asInstanceOf[Class[C]], negated)
/**
* Composes this reference with another one, effectively prepending a "prefix" to this reference.
* This is conceptually similar to composing functions using `scala.Function1.compose`.
*/
def compose[P](prefix: MongoRef[P, E]): ThisRef[P, T]
/**
* Composes this reference with another one, effectively appending a "suffix" to this reference.
* This is conceptually similar to composing functions using `scala.Function1.andThen`.
*/
def andThen[S](suffix: MongoRef[T, S]): suffix.ThisRef[E, S] = suffix compose this
def on[E0](ref: MongoRef[E0, E]): MongoProjection[E0, T] = compose(ref)
}
/**
* A "reference" to a document type, possibly narrowed to some subtype.
* A `MongoToplevelRef` can be used as a [[MongoProjection]] to indicate that we want a query to return
* full documents. If the projection is narrowed to a subtype of the document, this implies an additional
* filter so that only a subset of documents matching the subtype is returned.
*
* @tparam E the document type
* @tparam T subtype of the document type, often equal to the document type
*/
sealed trait MongoToplevelRef[E, T <: E] extends MongoRef[E, T] {
// no need to expose this as MongoToplevelRef, MongoRef is enough
type ThisRef[E0, T0] = MongoRef[E0, T0]
def SelfRef: MongoRef[E, T] = this
def fullRef: MongoRef.RootRef[E]
def format: MongoAdtFormat[T]
@macroPrivate def subtypeRefFor[C <: T : ClassTag]: MongoToplevelRef[E, C] =
format.assumeUnion.subtypeRefFor(this, classTag[C].runtimeClass.asInstanceOf[Class[C]])
def decodeFrom(doc: BsonDocument): T = BsonValueInput.read(doc)(format.codec)
}
/**
* Represents a path inside a MongoDB document.
*
* A [[MongoPropertyRef]] is usually obtained using the [[DataRefDsl.ref]] macro -
* see its documentation for more details.
*
* [[MongoPropertyRef]] has a rich API so that it can be used for creating [[MongoDocumentFilter]]s, [[MongoDocumentUpdate]]s,
* [[MongoDocumentOrder]]s and [[MongoIndex]]es.
*
* {{{
* case class MyEntity(id: String, number: Int) extends MongoEntity[MyEntity]
* object MyEntity extends MongoEntityCompanion[MyEntity]
*
* val filter: MongoDocumentFilter[MyEntity] =
* MyEntity.ref(_.id).is("ID") && MyEntity.ref(_.number) > 8
*
* val update: MongoUpdate[MyEntity] =
* MyEntity.ref(_.number).inc(5)
*
* val order: MongoDocumentOrder[MyEntity] =
* MyEntity.ref(_.number).descending
* }}}
*
* [[MongoPropertyRef]] may also be used as a [[MongoProjection]]
* or as a part of a more complex, multi-field projection.
*
* @tparam E data type representing the whole document
* @tparam T type of the value under the referenced field or path
*/
sealed trait MongoPropertyRef[E, T] extends MongoRef[E, T]
with QueryOperatorsDsl[T, MongoDocumentFilter[E]]
with UpdateOperatorsDsl[T, MongoDocumentUpdate[E]] {
type ThisRef[E0, T0] = MongoPropertyRef[E0, T0]
def SelfRef: MongoPropertyRef[E, T] = this
import MongoRef._
@macroPrivate def subtypeRefFor[C <: T : ClassTag]: MongoPropertyRef[E, C] =
format.assumeUnion.subtypeRefFor(this, classTag[C].runtimeClass.asInstanceOf[Class[C]])
protected def wrapQueryOperators(ops: MongoQueryOperator[T]*): MongoDocumentFilter[E] =
satisfiesFilter(MongoOperatorsFilter(ops))
protected def wrapUpdate(update: MongoUpdate[T]): MongoDocumentUpdate[E] =
MongoUpdate.PropertyUpdate(this, update)
private def satisfiesFilter(filter: MongoFilter[T]): MongoDocumentFilter[E] =
MongoFilter.PropertyValueFilter(this, filter)
/**
* Creates a [[MongoDocumentFilter]] which applies some other filter on the value pointed by this
* reference. This method accepts a lambda simply for syntactic convenience - the "creator" gives you all the
* API for creating filters on the value type which is usually shorter than creating them manually.
*
* {{{
* case class MyEntity(id: String, data: InnerData) extends MongoEntity[MyEntity]
* object MyEntity extends MongoEntityCompanion[MyEntity]
*
* case class InnerData(number: Int, text: String)
* object InnerData extends MongoDataCompanion[InnerData]
*
* val filter: MongoDocumentFilter[MyEntity] =
* MyEntity.ref(_.number).satisfies(c => c.ref(_.number) > 0 && c.ref(_.text).startsWith("prefix"))
* }}}
*/
def satisfies(filter: MongoFilter.Creator[T] => MongoFilter[T]): MongoDocumentFilter[E] =
satisfiesFilter(filter(new MongoFilter.Creator[T](format)))
/**
* Creates a filter that applies multiple query operators on this reference (which means that all the operators
* must be satisfied). Note that every operator may be used only once and this is not validated statically
* (a runtime error is thrown when some operator is duplicated).
*
* {{{
* case class MyEntity(id: String, number: Int) extends MongoEntity[MyEntity]
* object MyEntity extends MongoEntityCompanion[MyEntity]
*
* val filter: MongoDocumentFilter[MyEntity] =
* MyEntity.ref(_.number).satisfiesOperators(c => c.gte(0) ++ c.lt(10))
* }}}
*
* The above produces a filter document that looks like this:
*
* {{{
* {"number": {"$$gte": 0, "$$lt": 10}}
* }}}
*
* Note that the same can be usually achieved using logical operators, i.e.
*
* {{{
* val filter: MongoDocumentFilter[MyEntity] =
* MyEntity.ref(_.number) >= 0 && MyEntity.ref(_.number) < 10
* }}}
*
* However, there are some places where this is not possible, e.g. when specifying a filter in
* [[VanillaQueryOperatorsDsl.ForCollection.elemMatch elemMatch]].
*/
def satisfiesOperators(operators: MongoQueryOperator.Creator[T] => Seq[MongoQueryOperator[T]]): MongoDocumentFilter[E] =
satisfies(_.satisfiesOperators(operators))
def updateWith(update: MongoUpdate.Creator[T] => MongoUpdate[T]): MongoDocumentUpdate[E] =
MongoUpdate.PropertyUpdate(this, update(new MongoUpdate.Creator(format)))
def rename(newRef: MongoPropertyRef[E, T]): MongoDocumentUpdate[E] = rename(newRef.rawPath)
def order(ascending: Boolean): MongoDocumentOrder[E] = MongoDocumentOrder(this -> ascending)
def ascending: MongoDocumentOrder[E] = order(true)
def descending: MongoDocumentOrder[E] = order(false)
def index(indexType: MongoIndexType): MongoIndex[E] = MongoIndex(this -> indexType)
def ascendingIndex: MongoIndex[E] = index(MongoIndexType.Ascending)
def descendingIndex: MongoIndex[E] = index(MongoIndexType.Descending)
def hashedIndex: MongoIndex[E] = index(MongoIndexType.Hashed)
def textIndex: MongoIndex[E] = index(MongoIndexType.Text)
def twoDimIndex: MongoIndex[E] = index(MongoIndexType.TwoDim)
def twoDimSphereIndex: MongoIndex[E] = index(MongoIndexType.TwoDimSphere)
//noinspection NoTailRecursionAnnotation
//no @tailrec because Scala 2.11 has problems with it
private def computePath[T0](
onlyUpToArray: Boolean,
ref: MongoPropertyRef[E, T0],
acc: List[String]
): List[String] = ref match {
case FieldRef(_: MongoToplevelRef[_, _], fieldName, _, _) =>
KeyEscaper.escape(fieldName) :: acc
case FieldRef(prefix: MongoPropertyRef[E, _], fieldName, _, _) =>
computePath(onlyUpToArray, prefix, KeyEscaper.escape(fieldName) :: acc)
case ArrayIndexRef(prefix, index, _) =>
val newAcc = if (onlyUpToArray) Nil else index.toString :: acc
computePath(onlyUpToArray, prefix, newAcc)
case GetFromOptional(prefix, _, _) =>
computePath(onlyUpToArray, prefix, acc)
case PropertySubtypeRef(prefix, _, _, _) =>
computePath(onlyUpToArray, prefix, acc)
}
lazy val rawPath: String =
computePath(onlyUpToArray = false, this, Nil).mkString(Separator)
lazy val projectionPath: String =
computePath(onlyUpToArray = true, this, Nil).mkString(Separator)
private def notFound =
throw new ReadFailure(s"path $rawPath absent in incoming document")
private def extractBson(doc: BsonDocument): BsonValue = this match {
case FieldRef(_: MongoToplevelRef[_, _], fieldName, _, fallback) =>
doc.get(KeyEscaper.escape(fieldName)).opt.orElse(fallback).getOrElse(notFound)
case FieldRef(prefix: MongoPropertyRef[E, _], fieldName, _, fallback) =>
prefix.extractBson(doc).asDocument.get(KeyEscaper.escape(fieldName)).opt.orElse(fallback).getOrElse(notFound)
case ArrayIndexRef(prefix, index, _) =>
val array = prefix.extractBson(doc).asArray
if (index < array.size) array.get(index) else notFound
case GetFromOptional(prefix, _, _) =>
prefix.extractBson(doc)
case PropertySubtypeRef(prefix, _, _, _) =>
prefix.extractBson(doc)
}
def decodeFrom(doc: BsonDocument): T =
format.readBson(extractBson(doc))
}
object MongoPropertyRef {
final val Separator = "."
implicit class CollectionRefOps[E, C[X] <: Iterable[X], T](private val ref: MongoPropertyRef[E, C[T]]) extends AnyVal {
def head: MongoPropertyRef[E, T] = apply(0)
def apply(index: Int): MongoPropertyRef[E, T] =
MongoRef.ArrayIndexRef(ref, index, ref.format.assumeCollection.elementFormat)
}
implicit class DictionaryRefOps[E, M[X, Y] <: BMap[X, Y], K, V](private val ref: MongoPropertyRef[E, M[K, V]]) extends AnyVal {
def apply(key: K): MongoPropertyRef[E, V] = {
val dictFormat = ref.format.assumeDictionary
MongoRef.FieldRef(ref, dictFormat.keyCodec.write(key), dictFormat.valueFormat, Opt.Empty)
}
}
implicit class TypedMapRefOps[E, K[_]](private val ref: MongoPropertyRef[E, TypedMap[K]]) extends AnyVal {
def apply[T](key: K[T]): MongoPropertyRef[E, T] = {
val tmFormat = ref.format.assumeTypedMap
MongoRef.FieldRef(ref, tmFormat.keyCodec.write(key), tmFormat.valueFormats.valueFormat(key), Opt.Empty)
}
}
implicit def optionalRefOps[E, O, T](ref: MongoPropertyRef[E, O])(implicit optionLike: OptionLike.Aux[O, T]): OptionalRefOps[E, O, T] =
new OptionalRefOps[E, O, T](ref)
class OptionalRefOps[E, O, T](private val ref: MongoPropertyRef[E, O]) extends AnyVal {
def get: MongoPropertyRef[E, T] = {
val format = ref.format.assumeOptional[T]
MongoRef.GetFromOptional(ref, format.wrappedFormat, format.optionLike)
}
}
}
object MongoRef {
// Deliberately not calling this IdentityRef so that it doesn't get confused with IdRef (for database ID field)
final case class RootRef[T](
format: MongoAdtFormat[T]
) extends MongoToplevelRef[T, T] {
def fullRef: RootRef[T] = this
def compose[P](prefix: MongoRef[P, T]): MongoRef[P, T] = prefix
}
final case class RootSubtypeRef[E, T <: E](
fullRef: RootRef[E],
caseFieldName: String,
caseNames: List[String],
format: MongoAdtFormat[T]
) extends MongoToplevelRef[E, T] {
def compose[P](prefix: MongoRef[P, E]): MongoRef[P, T] = prefix match {
case _: MongoToplevelRef[P, E] =>
// fullRef is guaranteed to be the same as prefix.fullRef
// must cast because the compiler cannot infer the fact that E <: P in this case
RootSubtypeRef(fullRef, caseFieldName, caseNames, format).asInstanceOf[MongoRef[P, T]]
case ref: MongoPropertyRef[P, E] => PropertySubtypeRef(ref, caseFieldName, caseNames, format)
}
}
final case class FieldRef[E, E0, T](
prefix: MongoRef[E, E0],
fieldName: String,
format: MongoFormat[T],
fallbackBson: Opt[BsonValue]
) extends MongoPropertyRef[E, T] {
def compose[P](newPrefix: MongoRef[P, E]): MongoPropertyRef[P, T] =
copy(prefix = this.prefix compose newPrefix)
}
final case class ArrayIndexRef[E, C[X] <: Iterable[X], T](
prefix: MongoPropertyRef[E, C[T]],
index: Int,
format: MongoFormat[T]
) extends MongoPropertyRef[E, T] {
require(index >= 0, "array index must be non-negative")
def compose[P](newPrefix: MongoRef[P, E]): MongoPropertyRef[P, T] =
copy(prefix = prefix compose newPrefix)
}
final case class GetFromOptional[E, O, T](
prefix: MongoPropertyRef[E, O],
format: MongoFormat[T],
optionLike: OptionLike.Aux[O, T]
) extends MongoPropertyRef[E, T] {
def compose[P](newPrefix: MongoRef[P, E]): MongoPropertyRef[P, T] =
copy(prefix = prefix compose newPrefix)
}
final case class PropertySubtypeRef[E, T0, T <: T0](
prefix: MongoPropertyRef[E, T0],
caseFieldName: String,
caseNames: List[String],
format: MongoAdtFormat[T]
) extends MongoPropertyRef[E, T] {
def compose[P](newPrefix: MongoRef[P, E]): MongoPropertyRef[P, T] =
copy(prefix = prefix compose newPrefix)
}
def caseNameRef[E, T](prefix: MongoRef[E, T], caseFieldName: String): MongoPropertyRef[E, String] =
FieldRef(prefix, caseFieldName, MongoFormat[String], Opt.Empty)
}
| AVSystem/scala-commons | commons-mongo/jvm/src/main/scala/com/avsystem/commons/mongo/typed/MongoRef.scala | Scala | mit | 14,302 |
package com.ovoenergy.comms.helpers
import com.ovoenergy.comms.model._
import com.ovoenergy.comms.model.email._
import com.ovoenergy.comms.model.sms._
import com.ovoenergy.comms.model.print._
import com.typesafe.config.Config
import shapeless.HNil
case class CommsKafkaCluster(clusterName: String)(implicit config: Config) {
implicit val kafkaConfig: KafkaClusterConfig = {
val confOrError = pureconfig.loadConfig[KafkaClusterConfig](config.getConfig(s"kafka.$clusterName"))
confOrError match {
case Left(err) => throw new Exception(s"Failed to read config with errors: $err")
case Right(c) => c
}
}
}
object Kafka {
def aiven(implicit config: Config) = new CommsKafkaCluster("aiven") {
val triggered = new {
val v3 = Topic[TriggeredV3]("triggeredV3")
val v4 = Topic[TriggeredV4]("triggeredV4")
val p0V4 = Topic[TriggeredV4]("p0TriggeredV4")
}
val composedEmail = new {
val v2 = Topic[ComposedEmailV2]("composedEmailV2")
val v3 = Topic[ComposedEmailV3]("composedEmailV3")
val v4 = Topic[ComposedEmailV4]("composedEmailV4")
}
val composedSms = new {
val v2 = Topic[ComposedSMSV2]("composedSmsV2")
val v3 = Topic[ComposedSMSV3]("composedSmsV3")
val v4 = Topic[ComposedSMSV4]("composedSmsV4")
}
val composedPrint = new {
val v1 = Topic[ComposedPrint]("composedPrint")
val v2 = Topic[ComposedPrintV2]("composedPrintV2")
val v3 = Topic[ComposedPrintV3]("composedPrintV3")
}
val failed = new {
val v2 = Topic[FailedV2]("failedV2")
val v3 = Topic[FailedV3]("failedV3")
}
val issuedForDelivery = new {
val v2 = Topic[IssuedForDeliveryV2]("issuedForDeliveryV2")
val v3 = Topic[IssuedForDeliveryV3]("issuedForDeliveryV3")
}
val orchestratedEmail = new {
val v3 = Topic[OrchestratedEmailV3]("orchestratedEmailV3")
val v4 = Topic[OrchestratedEmailV4]("orchestratedEmailV4")
}
val orchestratedSMS = new {
val v2 = Topic[OrchestratedSMSV2]("orchestratedSmsV2")
val v3 = Topic[OrchestratedSMSV3]("orchestratedSmsV3")
}
val orchestratedPrint = new {
val v1 = Topic[OrchestratedPrint]("orchestratedPrint")
val v2 = Topic[OrchestratedPrintV2]("orchestratedPrintV2")
}
val progressedEmail = new {
val v2 = Topic[EmailProgressedV2]("progressedEmailV2")
val v3 = Topic[EmailProgressedV3]("progressedEmailV3")
}
val progressedSMS = new {
val v2 = Topic[SMSProgressedV2]("progressedSmsV2")
val v3 = Topic[SMSProgressedV3]("progressedSmsV3")
}
val linkClicked = new {
val v2 = Topic[LinkClickedV2]("linkClickedV2")
val v3 = Topic[LinkClickedV3]("linkClickedV3")
}
val cancellationRequested = new {
val v2 = Topic[CancellationRequestedV2]("cancellationRequestedV2")
val v3 = Topic[CancellationRequestedV3]("cancellationRequestedV3")
}
val failedCancellation = new {
val v2 = Topic[FailedCancellationV2]("failedCancellationV2")
val v3 = Topic[FailedCancellationV3]("failedCancellationV3")
}
val cancelled = new {
val v2 = Topic[CancelledV2]("cancelledV2")
val v3 = Topic[CancelledV3]("cancelledV3")
}
val feedback = new {
val v1 = Topic[Feedback]("feedback")
val v2 = Topic[Feedback]("feedbackV2")
}
val allTopics = triggered.v3 :: triggered.v4 :: triggered.p0V4 :: composedEmail.v2 :: composedEmail.v3 :: composedEmail.v4 ::
composedSms.v2 :: composedSms.v3 :: composedSms.v4 :: composedPrint.v1 :: composedPrint.v2 :: failed.v2 ::
failed.v3 :: issuedForDelivery.v2 :: issuedForDelivery.v3 :: orchestratedEmail.v3 :: orchestratedEmail.v4 ::
orchestratedSMS.v2 :: orchestratedSMS.v3 :: progressedEmail.v2 :: progressedEmail.v3 :: progressedSMS.v2 ::
progressedSMS.v3 :: linkClicked.v2 :: linkClicked.v3 :: cancellationRequested.v2 :: cancellationRequested.v3 ::
failedCancellation.v2 :: failedCancellation.v3 :: cancelled.v2 :: cancelled.v3 :: feedback.v1 :: feedback.v2 :: HNil
}
}
| ovotech/comms-kafka-serialisation | modules/helpers/src/main/scala/com/ovoenergy/comms/helpers/Kafka.scala | Scala | mit | 4,077 |
package org.sandbox.chat.http
import scala.concurrent.duration.DurationInt
import org.sandbox.chat.ChatServer.Broadcast
import akka.actor.Actor
import akka.actor.ActorRef
import akka.actor.Props
import akka.actor.actorRef2Scala
import akka.event.LoggingReceive
import akka.pattern.ask
import akka.pattern.pipe
import akka.util.Timeout
class HttpChatClient private(chatServer: ActorRef) extends Actor {
var broadcasts: Seq[Broadcast] = Seq.empty
import HttpChatClient._
import context.dispatcher
implicit val timeout = Timeout(1 second)
def receive: Actor.Receive = LoggingReceive {
case broadcast: Broadcast if sender == chatServer =>
broadcasts = broadcasts :+ broadcast
case broadcast: Broadcast => // ignore
case GetBroadcasts =>
sender ! Broadcasts(broadcasts)
broadcasts = Seq.empty
case msg =>
val future = chatServer ? msg
future pipeTo sender
}
}
object HttpChatClient {
def props(chatServer: ActorRef): Props =
Props(new HttpChatClient(chatServer))
sealed trait HttpChatClientMsg
case object GetBroadcasts extends HttpChatClientMsg
case class Broadcasts(broadcasts: Seq[Broadcast])
}
| hustbill/ScalaDemo | src/main/scala/org/sandbox/chat/http/HttpChatClient.scala | Scala | gpl-2.0 | 1,172 |
object SCL5215 extends App {
val subscribersByType = Map("One" -> new OneWorker,
"Two" -> new TwoWorker)
val u: Map[String, Worker[_ >: TwoAuth with OneAuth <: Auth, _ >: TwoAuthService with OneAuthService <: AuthService[_ >: TwoAuth with OneAuth <: Auth]]] =
/*start*/subscribersByType/*end*/
}
abstract class Worker[A <: Auth, S <: AuthService[A]]
class OneWorker extends Worker[OneAuth, OneAuthService]
class TwoWorker extends Worker[TwoAuth, TwoAuthService]
class AuthService[T <: Auth]
class OneAuthService extends AuthService[OneAuth]
class TwoAuthService extends AuthService[TwoAuth]
class OneAuth extends Auth
class TwoAuth extends Auth
class Auth
//Map[String, Worker[_ >: TwoAuth with OneAuth <: Auth, _ >: TwoAuthService with OneAuthService <: AuthService[_ >: TwoAuth with OneAuth <: Auth]]] | ilinum/intellij-scala | testdata/typeInference/bugs5/SCL5215.scala | Scala | apache-2.0 | 821 |
object Test {
val a: Singleton = 1
val b: Singleton = 1L
val c: Singleton = 1.0
val d: Singleton = 1.0F
val e: Singleton = true
val f: Singleton = 'c'
val g: Singleton = "foo"
val h: Singleton = 'foo
implicitly[1 <:< Singleton]
implicitly[1L <:< Singleton]
implicitly[1.0 <:< Singleton]
implicitly[1.0F <:< Singleton]
implicitly[true <:< Singleton]
implicitly['c' <:< Singleton]
implicitly["foo" <:< Singleton]
implicitly['foo <:< Singleton]
} | jastice/intellij-scala | scala/scala-impl/testdata/annotator/literalTypes/sip23SingletonLub.scala | Scala | apache-2.0 | 476 |
package wav.devtools.sbt.karaf.packaging
import sbt.Keys._
import sbt._
import wav.devtools.karaf.packaging.{FeaturesXml, KarafDistribution}
object KarafPackagingKeys {
import FeaturesXml._
lazy val featuresXml = taskKey[FeaturesXml]("The project's features repository")
lazy val featuresFile = taskKey[Option[File]]("Generate features.xml")
lazy val featuresRequired = settingKey[Set[Dependency]]("Features that will be put in the project feature")
lazy val featuresRepositories = taskKey[Set[Repository]]("Repositories where `featuresRequired` are specified")
lazy val featuresSelected = taskKey[Either[Set[Dependency], Set[Feature]]]("Resolved features or unsatisfied feature constraints")
lazy val featuresProjectBundle = taskKey[Bundle]("The project bundle to add to the project feature")
lazy val featuresProjectFeature = taskKey[Feature]("The project feature to add to features.xml")
lazy val featuresAddDependencies = settingKey[Boolean]("EXPERIMENTAL: Add the dependencies of the resolved `featuresRequired` setting to `libraryDependencies`")
/**
* Usage hint: makes the use of `.versionAsInProject()` available in pax-exam tests
*/
lazy val shouldGenerateDependsFile = settingKey[Boolean]("Generate a dependencies.properties file like the `maven-depends-plugin`")
lazy val karafDistribution = settingKey[KarafDistribution]("The archive and the archive's subdirectory for a karaf distribution")
lazy val karafSourceDistribution = taskKey[Option[File]]("The source karaf archive")
lazy val unpackKarafDistribution = taskKey[File]("Unpack the source karaf archive")
}
object SbtKarafPackaging extends AutoPlugin {
object autoImport extends PluginSyntax {
val KarafPackagingKeys = wav.devtools.sbt.karaf.packaging.KarafPackagingKeys
def defaultKarafPackagingSettings: Seq[Setting[_]] =
KarafPackagingDefaults.featuresSettings ++
KarafPackagingDefaults.karafDistributionSettings
}
override def requires =
sbt.plugins.MavenResolverPlugin
override def projectSettings =
autoImport.defaultKarafPackagingSettings
} | wav/osgi-tooling | sbt-karaf-packaging/src/main/scala/wav/devtools/sbt/karaf/packaging/Keys.scala | Scala | apache-2.0 | 2,151 |
import sbt._
class LiftProject(info: ProjectInfo) extends DefaultWebProject(info) {
val liftVersion = property[Version]
// uncomment the following if you want to use the snapshot repo
// val scalatoolsSnapshot = ScalaToolsSnapshots
// If you're using JRebel for Lift development, uncomment
// this line
// override def scanDirectories = Nil
lazy val JavaNet = "Java.net Maven2 Repository" at "http://download.java.net/maven/2/"
override def libraryDependencies = Set(
"net.liftweb" %% "lift-webkit" % liftVersion.value.toString % "compile",
"net.liftweb" %% "lift-mapper" % liftVersion.value.toString % "compile",
"org.mortbay.jetty" % "jetty" % "6.1.26" % "test",
"junit" % "junit" % "4.7" % "test",
"ch.qos.logback" % "logback-classic" % "0.9.26",
"org.scala-tools.testing" %% "specs" % "1.6.8" % "test",
"com.h2database" % "h2" % "1.2.147"
) ++ super.libraryDependencies
}
| joestein/cronus | project/build/LiftProject.scala | Scala | mit | 928 |
package org.knora.webapi.responders
import java.util.UUID
import org.knora.webapi.{ApplicationLockException, IRI}
import org.scalatest.{Matchers, WordSpec}
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
/**
* Tests [[IriLocker]].
*/
class IriLockerSpec extends WordSpec with Matchers {
import scala.concurrent.ExecutionContext.Implicits.global
val SUCCESS = "success"
val FAILURE = "failure"
"IriLocker" should {
"not allow a request to acquire a lock when another request already has it" in {
def runLongTask(): Future[String] = Future {
Thread.sleep(3500)
SUCCESS
}
def runShortTask(): Future[String] = Future(SUCCESS)
val testIri: IRI = "http://example.org/test1"
val firstApiRequestID = UUID.randomUUID
IriLocker.runWithIriLock(
apiRequestID = firstApiRequestID,
iri = testIri,
task = () => runLongTask()
)
// Wait a bit to allow the first request to get the lock.
Thread.sleep(200)
val secondApiRequestID = UUID.randomUUID
val secondTaskResultFuture = IriLocker.runWithIriLock(
apiRequestID = secondApiRequestID,
iri = testIri,
task = () => runShortTask()
)
val secondTaskFailedWithLockTimeout = try {
Await.result(secondTaskResultFuture, 3.seconds)
false
} catch {
case ale: ApplicationLockException => true
}
assert(secondTaskFailedWithLockTimeout, "Second task did not get a lock timeout")
}
"provide reentrant locks" in {
def runRecursiveTask(iri: IRI, apiRequestID: UUID, count: Int): Future[String] = {
if (count > 0) {
IriLocker.runWithIriLock(
apiRequestID = apiRequestID,
iri = iri,
task = () => runRecursiveTask(iri, apiRequestID, count - 1)
)
} else {
Future(SUCCESS)
}
}
val testIri: IRI = "http://example.org/test2"
val firstApiRequestID = UUID.randomUUID
val firstTestResult = Await.result(runRecursiveTask(testIri, firstApiRequestID, 3), 1.second)
assert(firstTestResult == SUCCESS)
val secondApiRequestID = UUID.randomUUID
val secondTestResult = Await.result(runRecursiveTask(testIri, secondApiRequestID, 3), 1.second)
assert(secondTestResult == SUCCESS)
}
"release a lock when a task returns a failed future" in {
// If succeed is true, returns a successful future, otherwise returns a failed future.
def runTask(succeed: Boolean): Future[String] = Future {
if (succeed) {
SUCCESS
} else {
throw new Exception(FAILURE)
}
}
val testIri: IRI = "http://example.org/test3"
val firstApiRequestID = UUID.randomUUID
val firstTaskResultFuture = IriLocker.runWithIriLock(
apiRequestID = firstApiRequestID,
iri = testIri,
task = () => runTask(false)
)
val firstTaskFailed = try {
Await.result(firstTaskResultFuture, 1.second)
false
} catch {
case e: Exception => true
}
assert(firstTaskFailed, "First task did not fail")
val secondApiRequestID = UUID.randomUUID
val secondTaskResultFuture = IriLocker.runWithIriLock(
apiRequestID = secondApiRequestID,
iri = testIri,
task = () => runTask(true)
)
val secondTaskResult = Await.result(secondTaskResultFuture, 1.second)
assert(secondTaskResult == SUCCESS, "Second task did not succeed")
}
"release a lock when a task throws an exception instead of returning a future" in {
// If succeed is true, returns a successful future, otherwise throws an exception.
def runTask(succeed: Boolean): Future[String] = {
if (succeed) {
Future(SUCCESS)
} else {
throw new Exception(FAILURE)
}
}
val testIri: IRI = "http://example.org/test4"
val firstApiRequestID = UUID.randomUUID
val firstTaskResultFuture = IriLocker.runWithIriLock(
apiRequestID = firstApiRequestID,
iri = testIri,
task = () => runTask(false)
)
val firstTaskFailed = try {
Await.result(firstTaskResultFuture, 1.second)
false
} catch {
case e: Exception => true
}
assert(firstTaskFailed, "First task did not fail")
val secondApiRequestID = UUID.randomUUID
val secondTaskResultFuture = IriLocker.runWithIriLock(
apiRequestID = secondApiRequestID,
iri = testIri,
task = () => runTask(true)
)
val secondTaskResult = Await.result(secondTaskResultFuture, 1.second)
assert(secondTaskResult == SUCCESS, "Second task did not succeed")
}
}
}
| nie-ine/Knora | webapi/src/test/scala/org/knora/webapi/responders/IriLockerSpec.scala | Scala | agpl-3.0 | 5,602 |
/**
* Author: Fredrik Sommar
*/
package io.github.fsommar.chameneos
import akka.actor.{ActorSystem, Props}
import akka.event.Logging
import lacasa.akka.actor.{Actor, ActorRef}
import lacasa.Safe
object Chameneos {
def main(args: Array[String]): Unit = {
val system = ActorSystem("Chameneos")
val mallActor: ActorRef = system.actorOf(Props(
new ChameneosMallActor(
/* ChameneosConfig.numMeetings */ 2000,
/* ChameneosConfig.numChameneos */ 10)))
Thread.sleep(2000)
system.terminate()
}
object Message {
implicit val MessageIsSafe = new Safe[Message] {}
implicit val MeetMsgIsSafe = new Safe[MeetMsg] {}
implicit val ChangeMsgIsSafe = new Safe[ChangeMsg] {}
implicit val MeetingCountMsgIsSafe = new Safe[MeetingCountMsg] {}
implicit val ExitMsgIsSafe = new Safe[ExitMsg] {}
}
sealed trait Message
case class MeetMsg(color: Color, sender: ActorRef) extends Message
case class ChangeMsg(color: Color, sender: ActorRef) extends Message
case class MeetingCountMsg(count: Int, sender: ActorRef) extends Message
case class ExitMsg(sender: ActorRef) extends Message
private class ChameneosMallActor(numMeetings: Int, numChameneos: Int) extends Actor {
val log = Logging(context.system, this)
var numMeetingsLeft: Int = numMeetings
var sumMeetings: Int = 0
var numFaded: Int = 0
var waitingChameneo: Option[ActorRef] = None
val colors = List(YELLOW, BLUE, RED)
1 to numChameneos foreach { i =>
val color = colors(i % 3)
val chameneoActor: ActorRef = context.system.actorOf(Props(
new ChameneoActor(self, i, color)))
}
override def receive: Receive = {
case message: MeetingCountMsg =>
numFaded += 1
sumMeetings += message.count
if (numFaded == numChameneos) {
log.info("stopping")
context.stop(self)
}
case message: MeetMsg =>
if (numMeetingsLeft > 0) {
if (waitingChameneo == None) {
waitingChameneo = Some(message.sender)
} else {
numMeetingsLeft -= 1
waitingChameneo.get ! message
waitingChameneo = None
}
} else {
message.sender ! new ExitMsg(self)
}
case _ => ???
}
}
private class ChameneoActor(mall: ActorRef, id: Int, var color: Color) extends Actor {
val log = Logging(context.system, this)
private var meetings: Int = 0
mall ! new MeetMsg(color, self)
override def receive: Receive = {
case message: MeetMsg =>
color = color.complement(message.color)
meetings += 1
message.sender ! new ChangeMsg(color, self)
mall ! new MeetMsg(color, self)
case message: ChangeMsg =>
color = message.color
meetings += 1
mall ! new MeetMsg(color, self)
case message: ExitMsg =>
color = FADED
log.info(s"Chameneo #${id} is now a faded color.")
message.sender ! new MeetingCountMsg(meetings, self)
context.stop(self)
case _ => ???
}
}
}
sealed trait Color {
def complement(otherColor: Color): Color = {
this match {
case RED =>
otherColor match {
case RED => RED
case YELLOW => BLUE
case BLUE => YELLOW
case FADED => FADED
}
case YELLOW =>
otherColor match {
case RED => BLUE
case YELLOW => YELLOW
case BLUE => RED
case FADED => FADED
}
case BLUE =>
otherColor match {
case RED => YELLOW
case YELLOW => RED
case BLUE => BLUE
case FADED => FADED
}
case FADED => FADED
}
}
}
case object RED extends Color
case object YELLOW extends Color
case object BLUE extends Color
case object FADED extends Color
| fsommar/lakka | src/main/scala/io/github/fsommar/chameneos/Chameneos.scala | Scala | gpl-2.0 | 3,864 |
package jp.hotbrain.makecsv
import org.joda.time.DateTimeZone
/**
* Created by hidek on 2016/09/10.
*/
object DateTimeZoneDic {
final val UTC: DateTimeZone = DateTimeZone.forID("UTC")
final val tz_id_dic: Map[String, String] = Map[String, String](
"JST" -> "Asia/Tokyo")
def getDateTimeZone(str: Option[String]): DateTimeZone = {
str.map {
s =>
val hm = s.split(':')
if (1 == hm.length) {
DateTimeZone.forID(tz_id_dic.getOrElse(s, s))
} else {
val hour = java.lang.Integer.parseInt(hm(0))
val min = java.lang.Integer.parseInt(hm(1)) * {
if (hour < 0) -1 else 1
}
DateTimeZone.forOffsetHoursMinutes(hour, min)
}
}.getOrElse(UTC)
}
}
| HidekiTak/make_csv | src/main/scala/jp/hotbrain/makecsv/DateTimeZoneDic.scala | Scala | apache-2.0 | 762 |
package upickle
import utest._
import acyclic.file
/**
* Created by haoyi on 4/22/14.
*/
object TestUtil extends TestUtil[upickle.default.type](upickle.default)
object LegacyTestUtil extends TestUtil[upickle.legacy.type](upickle.legacy)
class TestUtil[Api <: upickle.Api](val api: Api){
def rw[T: api.Reader: api.Writer](t: T, s: String*) = {
rwk[T, T](t, s:_*)(x => x)
}
def rwNoBinaryJson[T: api.Reader: api.Writer](t: T, s: String*) = {
rwk[T, T](t, s:_*)(x => x, checkBinaryJson = false)
}
def rwEscape[T: api.Reader: api.Writer](t: T, s: String*) = {
rwk[T, T](t, s:_*)(x => x, escapeUnicode = true)
}
def rwk[T: api.Reader: api.Writer, V](t: T, sIn: String*)
(normalize: T => V,
escapeUnicode: Boolean = false,
checkBinaryJson: Boolean = true) = {
val writtenT = api.write(t)
// Test JSON round tripping
val strings = sIn.map(_.trim)
for (s <- strings) {
val readS = api.read[T](s)
val normalizedReadString = normalize(readS)
val normalizedValue = normalize(t)
assert(normalizedReadString == normalizedValue)
}
val normalizedReadWrittenT = normalize(api.read[T](writtenT))
val normalizedT = normalize(t)
assert(normalizedReadWrittenT == normalizedT)
// Test binary round tripping
val writtenBinary = api.writeBinary(t)
// println(upickle.core.Util.bytesToString(writtenBinary))
val roundTrippedBinary = api.readBinary[T](writtenBinary)
(roundTrippedBinary, t) match{
case (lhs: Array[_], rhs: Array[_]) => assert(lhs.toSeq == rhs.toSeq)
case _ => assert(roundTrippedBinary == t)
}
// Test binary-JSON equivalence
if (checkBinaryJson){
val rewrittenBinary = api.writeBinary(roundTrippedBinary)
val writtenBinaryStr = upickle.core.Util.bytesToString(writtenBinary)
val rewrittenBinaryStr = upickle.core.Util.bytesToString(rewrittenBinary)
assert(writtenBinaryStr == rewrittenBinaryStr)
}
}
} | lihaoyi/upickle-pprint | upickle/test/src/upickle/TestUtil.scala | Scala | mit | 2,078 |
/*
* Distributed as part of Scalala, a linear algebra library.
*
* Copyright (C) 2008- Daniel Ramage
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110 USA
*/
package scalala;
package tensor;
package dense;
import domain.TableDomain;
import scalala.scalar.Scalar;
/**
* A matrix backed by an array of arrays of values.
* Assumes row-major storage.
*
* @author dramage
*/
class ArrayArrayMatrix[B](val data : Array[Array[B]])
(implicit override val scalar : Scalar[B])
extends mutable.Matrix[B] with mutable.MatrixLike[B,ArrayArrayMatrix[B]] {
if (data.map(_.length).distinct.size != 1) {
throw new IllegalArgumentException("All rows must be same length");
}
override def numRows =
data.length;
override def numCols =
data(0).length;
override def apply(i : Int, j : Int) =
data(i)(j);
override def update(i : Int, j : Int, v : B) =
data(i)(j) = v;
}
| scalala/Scalala | src/main/scala/scalala/tensor/dense/ArrayArrayMatrix.scala | Scala | lgpl-2.1 | 1,584 |
package org.programmiersportgruppe.redis.client
import java.net.InetSocketAddress
import scala.concurrent.{Await, TimeoutException}
import scala.concurrent.duration._
import scala.util.Failure
import akka.util.ByteString
import org.programmiersportgruppe.redis._
import org.programmiersportgruppe.redis.commands._
import org.programmiersportgruppe.redis.test.ActorSystemAcceptanceTest
class RedisClientAcceptanceTest extends ActorSystemAcceptanceTest {
import ActorSystemAcceptanceTest._
behavior of "A Redis client"
for ((description, address) <- Seq(
"over IPv4" -> IPv4LoopbackAddress.map(new InetSocketAddress(_, redisServerPort)),
"over IPv6" -> IPv6LoopbackAddress.map(new InetSocketAddress(_, redisServerPort)),
"with unresolved InetSocketAddress" -> Some(InetSocketAddress.createUnresolved("localhost", redisServerPort))
))
it should s"return stored keys when connecting $description" in {
require(address.isDefined)
withRedisServer(address.get) { serverAddress =>
withActorSystem { implicit actorSystem =>
implicit val client = new RedisClient(ConnectionPoolSettings(serverAddress, 1), 3.seconds, actorSystem)
await(client.completeWhenConnected())
val retrieved = for {
s <- SET(Key("A key"), ByteString("A value")).execute
g <- GET(Key("A key")).executeString
} yield g
assertResult(Some("A value")) { await(retrieved) }
await(client.shutdown())
}
}
}
it should "delete stored keys" in {
withRedisServer { serverAddress =>
withActorSystem { implicit actorSystem =>
implicit val client = new RedisClient(ConnectionPoolSettings(serverAddress, 1), 3.seconds, actorSystem)
await(client.completeWhenConnected())
val deleted = for {
s <- SET(Key("A key"), ByteString("A value")).execute
d <- DEL(Key("A key")).executeLong
} yield d
assertResult(1) { await(deleted) }
await(client.shutdown())
}
}
}
it should "not hang forever on construction when unable to reach the server" in {
withActorSystem { implicit actorSystem =>
implicit val client = within(100.milliseconds) {
new RedisClient(ConnectionPoolSettings(new InetSocketAddress("localhost", 1), 1), 3.seconds, actorSystem)
}
intercept[TimeoutException] {
await(client.completeWhenConnected(timeout = 1.second))
}
val setCommand = SET(Key("A key"), ByteString("A value"))
val future = setCommand.execute
Await.ready(future, 2.seconds)
val Some(Failure(e: RequestExecutionException)) = future.value
e.cause shouldBe EmptyPoolException(setCommand)
await(client.shutdown())
}
}
it should "recover from the server going down abruptly" in {
withActorSystem { implicit actorSystem =>
val (serverAddress, originalClient) = withRedisServer { serverAddress =>
implicit val client = new RedisClient(ConnectionPoolSettings(serverAddress, 1), 3.seconds, actorSystem)
await(client.completeWhenConnected())
assertResult(RSimpleString.OK) {
await(SET(Key("A key"), ByteString(1)).execute)
}
(serverAddress, client)
}
implicit val client = originalClient
intercept[RequestExecutionException] {
await(SET(Key("A key"), ByteString(2)).execute)
}
withRedisServer(serverAddress) { _ =>
await(client.completeWhenConnected())
assertResult(RSimpleString.OK) {
await(SET(Key("A key"), ByteString(4)).execute)
}
await(client.shutdown())
}
}
}
it should "recover from the server going down nicely" in {
withActorSystem { implicit actorSystem =>
implicit var client: RedisClient = null
withRedisServer { serverAddress =>
client = new RedisClient(ConnectionPoolSettings(serverAddress, 1), 3.seconds, actorSystem)
await(client.completeWhenConnected(timeout = 1.second))
await(SHUTDOWN().executeConnectionClose)
}
withRedisServer { serverAddress =>
await(client.completeWhenConnected(timeout = 1.second))
assertResult(RSimpleString.OK) {
await(SET(Key("A key"), ByteString(4)).execute)
}
await(client.shutdown())
}
}
}
it should "send connection setup commands once per client" in {
withRedisServer { serverAddress =>
withActorSystem { implicit actorSystem =>
implicit val client = new RedisClient(ConnectionPoolSettings(serverAddress, 3), 3.seconds, actorSystem, Seq(APPEND(Key("song"), ByteString("La"))))
await(client.completeWhenConnected())
eventually {
assertResult(Some("LaLaLa")) {
await(GET(Key("song")).executeString)
}
}
await(client.shutdown())
}
}
}
it should "return the substring of the value stored at key" in {
withRedisServer { serverAddress =>
withActorSystem { implicit actorSystem =>
implicit val client = new RedisClient(ConnectionPoolSettings(serverAddress, 1), 3.seconds, actorSystem)
await(client.completeWhenConnected())
val getRange = for {
s <- SET(Key("A key"), ByteString("This is a string")).execute
d <- GETRANGE(Key("A key"), 0, 3).executeString
} yield d
assertResult(Some("This")) { await(getRange) }
await(client.shutdown())
}
}
}
it should "return the substring of the value stored at key for negative range" in {
withRedisServer { serverAddress =>
withActorSystem { implicit actorSystem =>
implicit val client = new RedisClient(ConnectionPoolSettings(serverAddress, 1), 3.seconds, actorSystem)
await(client.completeWhenConnected())
val getRange = for {
s <- SET(Key("A key"), ByteString("This is a string")).execute
d <- GETRANGE(Key("A key"), -3, -1).executeString
} yield d
assertResult(Some("ing")) { await(getRange) }
await(client.shutdown())
}
}
}
it should "return the substring of the value stored at key for large ranges" in {
withRedisServer { serverAddress =>
withActorSystem { implicit actorSystem =>
implicit val client = new RedisClient(ConnectionPoolSettings(serverAddress, 1), 3.seconds, actorSystem)
await(client.completeWhenConnected())
val getRange = for {
s <- SET(Key("A key"), ByteString("This is a string")).execute
d <- GETRANGE(Key("A key"), 10, 100).executeString
} yield d
assertResult(Some("string")) { await(getRange) }
await(client.shutdown())
}
}
}
}
| programmiersportgruppe/akre | client/src/test/scala/org/programmiersportgruppe/redis/client/RedisClientAcceptanceTest.scala | Scala | mit | 6,730 |
/*
* La Trobe University - Distributed Deep Learning System
* Copyright 2016 Matthias Langer (t3l@threelights.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package edu.latrobe
import scala.collection._
abstract class Platform
extends Serializable
with JsonSerializable {
// ---------------------------------------------------------------------------
// Conversion related
// ---------------------------------------------------------------------------
def toEdgeLabel
: String
}
object Platform {
final private var toFunctions
: mutable.Map[Platform, (Tensor, Any) => Tensor] = mutable.Map.empty
final private var asOrToFunctions
: mutable.Map[Platform, (Tensor, Any) => Tensor] = mutable.Map.empty
final private[latrobe] def register(platform: Platform,
toFn: (Tensor, Any) => Tensor,
asOrToFn: (Tensor, Any) => Tensor)
: Unit = {
toFunctions += Tuple2(platform, toFn)
asOrToFunctions += Tuple2(platform, asOrToFn)
}
final private[latrobe] def unregister(platform: Platform)
: Unit = {
toFunctions.remove(platform)
asOrToFunctions.remove(platform)
}
final def to(platform: Platform, tensor: Tensor, context: Any)
: Tensor = {
val fn = toFunctions(platform)
fn(tensor, context)
}
final def asOrTo(platform: Platform, tensor: Tensor, context: Any)
: Tensor = {
val fn = toFunctions(platform)
fn(tensor, context)
}
register(
JVM,
(tensor, context) => tensor.toRealArrayTensor,
(tensor, context) => tensor.asOrToRealArrayTensor
)
}
abstract class PlatformCompanion
extends JsonSerializableCompanion {
}
| bashimao/ltudl | base/src/main/scala/edu/latrobe/Platform.scala | Scala | apache-2.0 | 2,226 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
import sbt._
import Keys._
import buildinfo.BuildInfo
object Dependencies {
val akkaVersion: String = sys.props.getOrElse("akka.version", "2.6.18+21-4fb7bd9b-SNAPSHOT")
val akkaHttpVersion = sys.props.getOrElse("akka.http.version", "10.2.7")
val sslConfig = "com.typesafe" %% "ssl-config-core" % "0.6.0"
val playJsonVersion = "2.10.0-RC5"
val logback = "ch.qos.logback" % "logback-classic" % "1.2.10"
val specs2Version = "4.13.1"
val specs2Deps = Seq(
"specs2-core",
"specs2-junit",
"specs2-mock"
).map("org.specs2" %% _ % specs2Version)
val specsMatcherExtra = "org.specs2" %% "specs2-matcher-extra" % specs2Version
val scalacheckDependencies = Seq(
"org.specs2" %% "specs2-scalacheck" % specs2Version % Test,
"org.scalacheck" %% "scalacheck" % "1.15.4" % Test
)
val jacksonVersion = "2.11.4"
val jacksonDatabindVersion = jacksonVersion
val jacksonDatabind = Seq("com.fasterxml.jackson.core" % "jackson-databind" % jacksonDatabindVersion)
val jacksons = Seq(
"com.fasterxml.jackson.core" % "jackson-core",
"com.fasterxml.jackson.core" % "jackson-annotations",
"com.fasterxml.jackson.datatype" % "jackson-datatype-jdk8",
"com.fasterxml.jackson.datatype" % "jackson-datatype-jsr310"
).map(_ % jacksonVersion) ++ jacksonDatabind
val playJson = "com.typesafe.play" %% "play-json" % playJsonVersion
val slf4jVersion = "1.7.32"
val slf4j = Seq("slf4j-api", "jul-to-slf4j", "jcl-over-slf4j").map("org.slf4j" % _ % slf4jVersion)
val slf4jSimple = "org.slf4j" % "slf4j-simple" % slf4jVersion
val guava = "com.google.guava" % "guava" % "31.0.1-jre"
val findBugs = "com.google.code.findbugs" % "jsr305" % "3.0.2" // Needed by guava
val mockitoAll = "org.mockito" % "mockito-core" % "4.2.0"
val h2database = "com.h2database" % "h2" % "2.0.206"
val derbyDatabase = "org.apache.derby" % "derby" % "10.14.2.0"
val acolyteVersion = "1.0.57"
val acolyte = "org.eu.acolyte" % "jdbc-driver" % acolyteVersion
val jjwtVersion = "0.11.2"
val jjwts = Seq(
"io.jsonwebtoken" % "jjwt-api",
"io.jsonwebtoken" % "jjwt-impl"
).map(_ % jjwtVersion) ++ Seq(
("io.jsonwebtoken" % "jjwt-jackson" % jjwtVersion).excludeAll(ExclusionRule("com.fasterxml.jackson.core"))
)
val jdbcDeps = Seq(
("com.zaxxer" % "HikariCP" % "4.0.3")
.exclude("org.slf4j", "slf4j-api"), // fetches slf4j 2.0.0-alpha1, but Play (still) uses 1.7, see https://github.com/brettwooldridge/HikariCP/pull/1669
"com.googlecode.usc" % "jdbcdslog" % "1.0.6.2",
h2database % Test,
acolyte % Test,
logback % Test,
"tyrex" % "tyrex" % "1.0.1"
) ++ specs2Deps.map(_ % Test)
val jpaDeps = Seq(
"org.hibernate.javax.persistence" % "hibernate-jpa-2.1-api" % "1.0.2.Final",
"org.hibernate" % "hibernate-core" % "5.4.32.Final" % "test"
)
def scalaReflect(scalaVersion: String) = "org.scala-lang" % "scala-reflect" % scalaVersion % "provided"
val scalaJava8Compat = "org.scala-lang.modules" %% "scala-java8-compat" % "1.0.2"
val scalaParserCombinators = Seq("org.scala-lang.modules" %% "scala-parser-combinators" % "1.1.2")
val springFrameworkVersion = "5.3.14"
val javaDeps = Seq(
scalaJava8Compat,
// Used by the Java routing DSL
"net.jodah" % "typetools" % "0.6.3"
) ++ specs2Deps.map(_ % Test)
val joda = Seq(
"joda-time" % "joda-time" % "2.10.13",
"org.joda" % "joda-convert" % "2.2.2"
)
val javaFormsDeps = Seq(
"org.hibernate.validator" % "hibernate-validator" % "6.2.1.Final",
("org.springframework" % "spring-context" % springFrameworkVersion)
.exclude("org.springframework", "spring-aop")
.exclude("org.springframework", "spring-beans")
.exclude("org.springframework", "spring-core")
.exclude("org.springframework", "spring-expression")
.exclude("org.springframework", "spring-asm"),
("org.springframework" % "spring-core" % springFrameworkVersion)
.exclude("org.springframework", "spring-asm")
.exclude("org.springframework", "spring-jcl")
.exclude("commons-logging", "commons-logging"),
("org.springframework" % "spring-beans" % springFrameworkVersion)
.exclude("org.springframework", "spring-core")
) ++ specs2Deps.map(_ % Test)
val junitInterface = "com.github.sbt" % "junit-interface" % "0.13.3"
val junit = "junit" % "junit" % "4.13.2"
val javaTestDeps = Seq(
junit,
junitInterface,
"org.easytesting" % "fest-assert" % "1.4",
mockitoAll,
logback
).map(_ % Test)
val guiceVersion = "5.0.1"
val guiceDeps = Seq(
"com.google.inject" % "guice" % guiceVersion,
"com.google.inject.extensions" % "guice-assistedinject" % guiceVersion
)
def runtime(scalaVersion: String) =
slf4j ++
Seq("akka-actor", "akka-actor-typed", "akka-slf4j", "akka-serialization-jackson")
.map("com.typesafe.akka" %% _ % akkaVersion) ++
Seq("akka-testkit", "akka-actor-testkit-typed")
.map("com.typesafe.akka" %% _ % akkaVersion % Test) ++
jacksons ++
jjwts ++
Seq(
playJson,
guava,
"jakarta.transaction" % "jakarta.transaction-api" % "2.0.0",
"javax.inject" % "javax.inject" % "1",
scalaReflect(scalaVersion),
scalaJava8Compat,
sslConfig
) ++ scalaParserCombinators ++ specs2Deps.map(_ % Test) ++ javaTestDeps
val nettyVersion = "4.1.72.Final"
val netty = Seq(
"com.typesafe.netty" % "netty-reactive-streams-http" % "2.0.5",
("io.netty" % "netty-transport-native-epoll" % nettyVersion).classifier("linux-x86_64")
) ++ specs2Deps.map(_ % Test)
val akkaHttp = "com.typesafe.akka" %% "akka-http-core" % akkaHttpVersion
val akkaHttp2Support = "com.typesafe.akka" %% "akka-http2-support" % akkaHttpVersion
val cookieEncodingDependencies = slf4j
val jimfs = "com.google.jimfs" % "jimfs" % "1.2"
val okHttp = "com.squareup.okhttp3" % "okhttp" % "4.9.3"
def routesCompilerDependencies(scalaVersion: String) = {
specs2Deps.map(_ % Test) ++ Seq(specsMatcherExtra % Test) ++ scalaParserCombinators ++ (logback % Test :: Nil)
}
private def sbtPluginDep(moduleId: ModuleID, sbtVersion: String, scalaVersion: String) = {
Defaults.sbtPluginExtra(
moduleId,
CrossVersion.binarySbtVersion(sbtVersion),
CrossVersion.binaryScalaVersion(scalaVersion)
)
}
val playFileWatch = "com.lightbend.play" %% "play-file-watch" % "1.1.16"
def runSupportDependencies(sbtVersion: String): Seq[ModuleID] = {
Seq(playFileWatch, logback % Test) ++ specs2Deps.map(_ % Test)
}
val typesafeConfig = "com.typesafe" % "config" % "1.4.1"
def sbtDependencies(sbtVersion: String, scalaVersion: String) = {
def sbtDep(moduleId: ModuleID) = sbtPluginDep(moduleId, sbtVersion, scalaVersion)
Seq(
scalaReflect(scalaVersion),
typesafeConfig,
slf4jSimple,
playFileWatch,
sbtDep("com.typesafe.play" % "sbt-twirl" % BuildInfo.sbtTwirlVersion),
sbtDep("com.github.sbt" % "sbt-native-packager" % BuildInfo.sbtNativePackagerVersion),
sbtDep("com.typesafe.sbt" % "sbt-web" % "1.4.4"),
sbtDep("com.typesafe.sbt" % "sbt-js-engine" % "1.2.3"),
logback % Test
) ++ specs2Deps.map(_ % Test)
}
val playdocWebjarDependencies = Seq(
"org.webjars" % "jquery" % "3.6.0" % "webjars",
"org.webjars" % "prettify" % "4-Mar-2013-1" % "webjars"
)
val playDocVersion = "2.1.0"
val playDocsDependencies = Seq(
"com.typesafe.play" %% "play-doc" % playDocVersion
) ++ playdocWebjarDependencies
val streamsDependencies = Seq(
"org.reactivestreams" % "reactive-streams" % "1.0.3",
"com.typesafe.akka" %% "akka-stream" % akkaVersion,
scalaJava8Compat
) ++ specs2Deps.map(_ % Test) ++ javaTestDeps
val playServerDependencies = specs2Deps.map(_ % Test) ++ Seq(
guava % Test,
logback % Test
)
val clusterDependencies = Seq(
"com.typesafe.akka" %% "akka-cluster-sharding-typed" % akkaVersion
)
val fluentleniumVersion = "3.10.1"
// This is the selenium version compatible with the FluentLenium version declared above.
// See http://mvnrepository.com/artifact/org.fluentlenium/fluentlenium-core/3.10.1
val seleniumVersion = "3.141.59"
val testDependencies = Seq(junit, junitInterface, guava, findBugs, logback) ++ Seq(
("org.fluentlenium" % "fluentlenium-core" % fluentleniumVersion).exclude("org.jboss.netty", "netty"),
// htmlunit-driver uses an open range to selenium dependencies. This is slightly
// slowing down the build. So the open range deps were removed and we can re-add
// them using a specific version. Using an open range is also not good for the
// local cache.
("org.seleniumhq.selenium" % "htmlunit-driver" % "2.56.0").excludeAll(
ExclusionRule("org.seleniumhq.selenium", "selenium-api"),
ExclusionRule("org.seleniumhq.selenium", "selenium-support")
),
"org.seleniumhq.selenium" % "selenium-api" % seleniumVersion,
"org.seleniumhq.selenium" % "selenium-support" % seleniumVersion,
"org.seleniumhq.selenium" % "selenium-firefox-driver" % seleniumVersion
) ++ guiceDeps ++ specs2Deps.map(_ % Test)
val playCacheDeps = specs2Deps.map(_ % Test) :+ logback % Test
val jcacheApi = Seq(
"javax.cache" % "cache-api" % "1.1.1"
)
val ehcacheVersion = "2.10.9.2"
val playEhcacheDeps = Seq(
"net.sf.ehcache" % "ehcache" % ehcacheVersion,
"org.ehcache" % "jcache" % "1.0.1"
) ++ jcacheApi
val caffeineVersion = "2.9.3"
val playCaffeineDeps = Seq(
"com.github.ben-manes.caffeine" % "caffeine" % caffeineVersion,
"com.github.ben-manes.caffeine" % "jcache" % caffeineVersion
) ++ jcacheApi
val playWsStandaloneVersion = "2.2.0-M1"
val playWsDeps = Seq(
"com.typesafe.play" %% "play-ws-standalone" % playWsStandaloneVersion,
"com.typesafe.play" %% "play-ws-standalone-xml" % playWsStandaloneVersion,
"com.typesafe.play" %% "play-ws-standalone-json" % playWsStandaloneVersion,
// Update transitive Akka version as needed:
"com.typesafe.akka" %% "akka-stream" % akkaVersion
) ++ (specs2Deps :+ specsMatcherExtra).map(_ % Test) :+ mockitoAll % Test
// Must use a version of ehcache that supports jcache 1.0.0
val playAhcWsDeps = Seq(
"com.typesafe.play" %% "play-ahc-ws-standalone" % playWsStandaloneVersion,
"com.typesafe.play" % "shaded-asynchttpclient" % playWsStandaloneVersion,
"com.typesafe.play" % "shaded-oauth" % playWsStandaloneVersion,
"com.github.ben-manes.caffeine" % "jcache" % caffeineVersion % Test,
"net.sf.ehcache" % "ehcache" % ehcacheVersion % Test,
"org.ehcache" % "jcache" % "1.0.1" % Test
) ++ jcacheApi
val playDocsSbtPluginDependencies = Seq(
"com.typesafe.play" %% "play-doc" % playDocVersion
)
val salvationVersion = "2.7.2"
val playFilterDeps = Seq(
"com.shapesecurity" % "salvation" % salvationVersion % Test
)
}
| mkurz/playframework | project/Dependencies.scala | Scala | apache-2.0 | 11,532 |
/*
* Copyright (C) FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.camel.tooling.util
import javax.xml.bind.annotation.{XmlRootElement, XmlValue, XmlAttribute, XmlElement}
object Archetype {
def apply(groupId: String, artifactId: String, version: String, description: String = ""): Archetype = {
val answer = new Archetype()
answer.groupId = groupId
answer.artifactId = artifactId
answer.version = version
answer.description = description
answer
}
}
/**
* A simple DTO
*/
@XmlRootElement(name = "archetype")
class Archetype {
@XmlAttribute
var groupId: String = _
@XmlAttribute
var artifactId: String = _
@XmlAttribute
var version: String = _
@XmlValue
var description: String = _
override def toString = "Archtype(" + groupId + ":" + artifactId + ":" + version + ")"
}
| janstey/fuse | tooling/camel-tooling-util/src/main/scala/org/fusesource/camel/tooling/util/Archetype.scala | Scala | apache-2.0 | 1,402 |
package scalding
import sbt._
import Keys._
import com.typesafe.sbt.git.GitRunner
import com.typesafe.sbt.SbtGit.GitKeys
import com.typesafe.sbt.SbtSite.{ site, SiteKeys }
import com.typesafe.sbt.SbtGhPages.{ ghpages, GhPagesKeys => ghkeys }
import com.typesafe.sbt.SbtGit.GitKeys.gitRemoteRepo
object DocGen {
val docDirectory = "target/site"
val aggregateName = "scalding"
def syncLocal = (ghkeys.updatedRepository, GitKeys.gitRunner, streams) map { (repo, git, s) =>
cleanSite(repo, git, s) // First, remove 'stale' files.
val rootPath = file(docDirectory) // Now copy files.
IO.copyDirectory(rootPath, repo)
IO.touch(repo / ".nojekyll")
repo
}
private def cleanSite(dir: File, git: GitRunner, s: TaskStreams): Unit = {
val toClean = IO.listFiles(dir).filterNot(_.getName == ".git").map(_.getAbsolutePath).toList
if(!toClean.isEmpty)
git(("rm" :: "-r" :: "-f" :: "--ignore-unmatch" :: toClean) :_*)(dir, s.log)
()
}
lazy val unidocSettings: Seq[sbt.Setting[_]] =
site.includeScaladoc(docDirectory) ++ Seq(
scalacOptions in doc <++= (version, baseDirectory in LocalProject(aggregateName)).map { (v, rootBase) =>
val tagOrBranch = if (v.endsWith("-SNAPSHOT")) "develop" else v
val docSourceUrl = "https://github.com/twitter/" + aggregateName + "/tree/" + tagOrBranch + "€{FILE_PATH}.scala"
Seq("-sourcepath", rootBase.getAbsolutePath, "-doc-source-url", docSourceUrl)
},
Unidoc.unidocDirectory := file(docDirectory),
gitRemoteRepo := "https://github.com/twitter/" + aggregateName + ".git",
ghkeys.synchLocal <<= syncLocal
)
lazy val publishSettings = site.settings ++ Unidoc.settings ++ ghpages.settings ++ unidocSettings
}
| piyushnarang/scalding | project/DocGen.scala | Scala | apache-2.0 | 1,752 |
package org.bluescale.telco.jainsip.unittest
import org.bluescale.telco.jainsip._
import org.bluescale.telco.api._
import org.bluescale.telco._
import java.util.concurrent.CountDownLatch
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicInteger
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class OutgoingCancel extends FunTestHelper {
var latch:CountDownLatch = null
def getLatch = latch
val destNumber = "9495557777"
test("Simple outgoing cancel") {
latch = new CountDownLatch(1)
//runConn()
b2bServer.addIgnore(destNumber)
connectAndCancel()
println("finished simple outgoing cancel")
}
test("Simple outoging Cancel with rejected response") {
latch = new CountDownLatch(1)
b2bServer.addReject(destNumber)
connectAndCancel()
println("finished cancel rejected")
}
def connectAndCancel() {
val alice = telcoServer.createConnection(destNumber, "4445556666")
alice.connect().foreach( alice => {
println("HOW DID THIS HAPPEN")
assert(false) //shouldn't happen, we're ignoring this!
})
Thread.sleep(500)
alice.cancel().foreach( alice => {
println("cancelled!")
latch.countDown()
})
val result = getLatch.await(5,TimeUnit.SECONDS)
assert(result)
}
} | BlueScale/BlueScale | src/test/scala/org/bluescale/telco/jainsip/unittest/OutgoingCancel.scala | Scala | agpl-3.0 | 1,316 |
package org.duffqiu.rest.test.dsl
import scala.collection.convert.WrapAsScala.iterableAsScalaIterable
import scala.collection.convert.WrapAsScala.mapAsScalaMap
import scala.language.implicitConversions
import scala.language.postfixOps
import org.duffqiu.rest.common.RestClient
import org.duffqiu.rest.common.RestClientConfig
import org.duffqiu.rest.common.RestOperation
import org.duffqiu.rest.common.RestRequest
import org.duffqiu.rest.common.RestResource
import org.duffqiu.rest.common.RestResponse
import org.duffqiu.rest.common.RestResult
import org.duffqiu.rest.test.dsl.RestCommonImplicits.string2RestResponse
import org.scalatest.Assertions
import org.scalatest.concurrent
import dispatch.Defaults.executor
import dispatch.Http
import dispatch.implyRequestHandlerTuple
object RestClientTestDsl extends concurrent.ScalaFutures with RestClientConfig with Assertions {
type Client = RestClient
type WithClientResource = (Client, RestResource)
type WithClientResourceOperation = (Client, RestResource, RestOperation)
type WithClientResourceOperationRequest[A] = (Client, RestResource, RestOperation, A)
type WithClientResourceOperationRequestResult[A] = (Client, RestResource, RestOperation, A, RestResult)
type Response4Test = RestResponse => Unit
class ClientHelper(client: Client) {
def on(port: Int) = {
new RestClient(client.name, client.hostName, port, client())
}
def ask_for(resource: RestResource) = (client, resource)
}
class ClientResourceHelper(wcr: WithClientResource) {
def to(operation: RestOperation) = (wcr._1, wcr._2, operation)
}
class ClientOperationHelper(wcro: WithClientResourceOperation) {
def by[A](request: A) = (wcro._1, wcro._2, wcro._3, request)
}
class ClientRequestHelper[A](wcror: WithClientResourceOperationRequest[A]) {
def should(result: RestResult) = (wcror._1, wcror._2, wcror._3, wcror._4, result)
}
class ClientResultHelper[A <: RestRequest](wcrorr: WithClientResourceOperationRequestResult[A]) {
def and_with(fun: Response4Test) = tuple2Client(wcrorr._1, wcrorr._2, wcrorr._3, wcrorr._4, wcrorr._5, fun)
}
implicit def string2RestClientHelper(name: String) = new RestClient(name)
implicit def client2ClientHelper(client: Client) = new ClientHelper(client)
implicit def withClientResource(wcr: WithClientResource) = new ClientResourceHelper(wcr)
implicit def withClientOperation(wcro: WithClientResourceOperation) = new ClientOperationHelper(wcro)
implicit def withClientRequest[A](wcror: WithClientResourceOperationRequest[A]) = new ClientRequestHelper[A](wcror)
implicit def withClientResult[A <: RestRequest, B <: RestResponse](wcrorr: WithClientResourceOperationRequestResult[A]) = new ClientResultHelper[A](wcrorr)
def tuple2Client[A <: RestRequest](client: Client, resource: RestResource, operation: RestOperation, request: A,
result: RestResult, resp4test: Response4Test): Unit = {
val req = client.buildHttpRequest(resource, operation, request)
val httpClient = client()
whenReady(httpClient(req > { resp => resp })) {
response =>
{
val statusCode = response.getStatusCode()
val body = response.getResponseBody()
val httpHeaders = mapAsScalaMap(response.getHeaders()) toMap
val headerPara = httpHeaders map {
case (key, value) => {
val valueList = iterableAsScalaIterable(value)
//limitation, moco can't support a key with a list value in http header, but dispatch support.
(key, valueList.head)
}
}
result.shouldMatch(statusCode) match {
case true => {
val restResponse = ("RestResponse", statusCode) <:< headerPara <<< body
resp4test(restResponse)
}
case _ => fail("Expect result is not matched. Expect: " + result() + ", but get: " + statusCode +
", body is " + Option(body).getOrElse("Empty"))
}
}
}
}
}
| duffqiu/rest-test-dsl | src/main/scala/org/duffqiu/rest/test/dsl/RestClientTestDsl.scala | Scala | apache-2.0 | 4,485 |
package scala.math
import org.junit.Assert._
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
import java.{lang => jl}
import scala.collection.SortedSet
import scala.math.Ordering.Float.TotalOrdering
import scala.math.Ordering.Double.TotalOrdering
@RunWith(classOf[JUnit4])
class OrderingTest {
val floats = Seq(
Float.NegativeInfinity,
Float.MinValue,
-1f,
-0f,
0f,
Float.MinPositiveValue,
1f,
Float.MaxValue,
Float.PositiveInfinity,
Float.NaN
)
val doubles = Seq(
Double.NegativeInfinity,
Double.MinValue,
-1d,
-0d,
0d,
Double.MinPositiveValue,
1d,
Double.MaxValue,
Double.PositiveInfinity,
Double.NaN
)
/* Test for scala/bug#9077 */
@Test
def reverseOrdering(): Unit = {
def check[T: Ordering](t1: T, t2: T): Unit = {
val O = Ordering[T]
val R = O.reverse
assertEquals(O.min(t1, t2), R.max(t1, t2))
assertEquals(O.max(t1, t2), R.min(t1, t2))
assertEquals(O.lteq(t1, t2), R.lteq(t2, t1))
assertEquals(O.lt(t1, t2), R.lt(t2, t1))
assertEquals(O.gteq(t1, t2), R.gteq(t2, t1))
assertEquals(O.gt(t1, t2), R.gt(t2, t1))
assertEquals(O.compare(t1, t2), R.compare(t2, t1))
assertEquals(O.equiv(t1, t2), R.equiv(t1, t2))
assertEquals(O.on((x: T) => x).min(t1, t2), R.on((x: T) => x).max(t1, t2))
assertEquals(O.tryCompare(t1, t2), R.tryCompare(t2, t1))
assertEquals(O.mkOrderingOps(t1).<(t2), R.mkOrderingOps(t2).<(t1))
assertEquals(O.mkOrderingOps(t1).<=(t2), R.mkOrderingOps(t2).<=(t1))
assertEquals(O.mkOrderingOps(t1).>(t2), R.mkOrderingOps(t2).>(t1))
assertEquals(O.mkOrderingOps(t1).>=(t2), R.mkOrderingOps(t2).>=(t1))
assertEquals(O.mkOrderingOps(t1).min(t2), R.mkOrderingOps(t1).max(t2))
assertEquals(O.mkOrderingOps(t1).max(t2), R.mkOrderingOps(t1).min(t2))
}
def checkAll[T: Ordering](ts: T*): Unit = {
for (t1 <- ts; t2 <- ts) check(t1, t2)
}
checkAll[Unit](())
checkAll[Boolean](true, false)
checkAll[Byte](Byte.MinValue, -1.toByte, 0.toByte, 1.toByte, Byte.MaxValue)
checkAll[Char](Char.MinValue, -1.toChar, 0.toChar, 1.toChar, Char.MaxValue)
checkAll[Short](Short.MinValue, -1, 0, 1, Short.MaxValue)
checkAll[Int](Int.MinValue, -1, 0, 1, Int.MaxValue)
checkAll[Double](doubles: _*)
checkAll[Float](floats: _*)
checkAll[BigInt](Int.MinValue, -1, 0, 1, Int.MaxValue)
checkAll[BigDecimal](Int.MinValue, -1, -0, 1, Int.MaxValue)
checkAll[String]("", "a", "b", "bb")
checkAll[String]("", "a", "b", "bb")
checkAll[Option[Int]](None, Some(1), Some(2))
checkAll[Iterable[Int]](Nil, List(1), List(1, 2))
checkAll[(Int, Int)]((1, 2), (1, 3), (4, 5))
}
@Test
def reverseOf(): Unit = {
def check[T](ord: Ordering[T]): Unit = {
assert(ord isReverseOf ord.reverse)
assert(ord.reverse isReverseOf ord)
assert(!(ord isReverseOf ord))
assert(!(ord.reverse isReverseOf ord.reverse))
assert(!ord.isReverseOf({ (_, _) => 0 }: Ordering[T]))
assert(!ord.reverse.isReverseOf({ (_, _) => 0 }: Ordering[T]))
}
check(Ordering[Int])
check(Ordering[(Int, Long)])
check(Ordering[(Int, Long, Float)])
check(Ordering[(Int, Long, Float, Double)])
check(Ordering[(Int, Long, Float, Double, Byte)])
check(Ordering[(Int, Long, Float, Double, Byte, Char)])
check(Ordering[(Int, Long, Float, Double, Byte, Char, Short)])
check(Ordering[(Int, Long, Float, Double, Byte, Char, Short, BigInt)])
check(Ordering[(Int, Long, Float, Double, Byte, Char, Short, BigInt, BigDecimal)])
check(Ordering[Option[Int]])
import Ordering.Implicits._
check(Ordering[Seq[Int]])
check(Ordering[SortedSet[Int]])
}
@Test
def cachedReverse(): Unit = {
def check[T](ord: Ordering[T]): Unit = {
assert(ord.reverse eq ord.reverse)
}
check(Ordering[Int])
}
@Test
def composedOrdering(): Unit = {
case class Pair(a: Int, b: Int)
def check(ord1: Ordering[Pair], ord2: Ordering[Pair]): Unit = {
val pairs = List(Pair(0, 0), Pair(0, 1), Pair(1, 1))
for (p1 <- pairs; p2 <- pairs) {
assertEquals(signum(ord1.compare(p1, p2)), signum(ord2.compare(p1, p2)))
}
}
val o1 = Ordering.by[Pair, (Int, Int)]((p: Pair) => (p.a, p.b))
val o2 = Ordering.by[Pair, Int](_.a).orElseBy[Int](_.b)
val o3 = Ordering.by[Pair, Int](_.a).orElse(Ordering.by[Pair, Int](_.b))
check(o1, o2)
check(o1, o3)
}
/* Test for scala/bug#10511 */
@Test
def floatDoubleTotalOrdering(): Unit = {
val fNegZeroBits = jl.Float.floatToRawIntBits(-0.0f)
val fPosZeroBits = jl.Float.floatToRawIntBits(0.0f)
val dNegZeroBits = jl.Double.doubleToRawLongBits(-0.0d)
val dPosZeroBits = jl.Double.doubleToRawLongBits(0.0d)
def checkFloats(floats: Float*): Unit = {
def same(f1: Float, f2: Float): Boolean = {
val thisBits = jl.Float.floatToRawIntBits(f1)
if (thisBits == fNegZeroBits) jl.Float.floatToRawIntBits(f2) == fNegZeroBits
else if (thisBits == fPosZeroBits) jl.Float.floatToRawIntBits(f2) == fPosZeroBits
else f1 == f2 || (jl.Float.isNaN(f1) && jl.Float.isNaN(f2))
}
val O = Ordering[Float]
for (i <- floats; j <- floats) {
val msg = s"for i=$i, j=$j"
// consistency with `compare`
assertEquals(msg, O.compare(i, j) < 0, O.lt(i, j))
assertEquals(msg, O.compare(i, j) <= 0, O.lteq(i, j))
assertEquals(msg, O.compare(i, j) == 0, O.equiv(i, j))
assertEquals(msg, O.compare(i, j) >= 0, O.gteq(i, j))
assertEquals(msg, O.compare(i, j) > 0, O.gt(i, j))
// consistency with other ops
assertTrue(msg, O.lteq(i, j) || O.gteq(i, j))
assertTrue(msg, O.lteq(i, j) || O.gt(i, j))
assertTrue(msg, O.lteq(i, j) != O.gt(i, j))
assertTrue(msg, O.lt(i, j) || O.gteq(i, j))
assertTrue(msg, O.lt(i, j) != O.gteq(i, j))
// exactly one of `lt`, `equiv`, `gt` is true
assertTrue(msg,
(O.lt(i, j) ^ O.equiv(i, j) ^ O.gt(i, j))
&& !(O.lt(i, j) && O.equiv(i, j) && O.gt(i, j)))
// consistency with `max` and `min`
assertEquals(msg, O.compare(i, j) >= 0, same(O.max(i, j), i))
assertEquals(msg, O.compare(i, j) <= 0, same(O.min(i, j), i))
if (!same(i, j)) {
assertEquals(msg, O.compare(i, j) < 0, same(O.max(i, j), j))
assertEquals(msg, O.compare(i, j) > 0, same(O.min(i, j), j))
}
}
}
def checkDoubles(doubles: Double*): Unit = {
def same(d1: Double, d2: Double): Boolean = {
val thisBits = jl.Double.doubleToRawLongBits(d1)
if (thisBits == dNegZeroBits) jl.Double.doubleToRawLongBits(d2) == dNegZeroBits
else if (thisBits == dPosZeroBits) jl.Double.doubleToRawLongBits(d2) == dPosZeroBits
else d1 == d2 || (jl.Double.isNaN(d1) && jl.Double.isNaN(d2))
}
val O = Ordering[Double]
for (i <- doubles; j <- doubles) {
val msg = s"for i=$i, j=$j"
// consistency with `compare`
assertEquals(msg, O.compare(i, j) < 0, O.lt(i, j))
assertEquals(msg, O.compare(i, j) <= 0, O.lteq(i, j))
assertEquals(msg, O.compare(i, j) == 0, O.equiv(i, j))
assertEquals(msg, O.compare(i, j) >= 0, O.gteq(i, j))
assertEquals(msg, O.compare(i, j) > 0, O.gt(i, j))
// consistency with other ops
assertTrue(msg, O.lteq(i, j) || O.gteq(i, j))
assertTrue(msg, O.lteq(i, j) || O.gt(i, j))
assertTrue(msg, O.lteq(i, j) != O.gt(i, j))
assertTrue(msg, O.lt(i, j) || O.gteq(i, j))
assertTrue(msg, O.lt(i, j) != O.gteq(i, j))
// exactly one of `lt`, `equiv`, `gt` is true
assertTrue(msg,
(O.lt(i, j) ^ O.equiv(i, j) ^ O.gt(i, j))
&& !(O.lt(i, j) && O.equiv(i, j) && O.gt(i, j)))
// consistency with `max` and `min`
assertEquals(msg, O.compare(i, j) >= 0, same(O.max(i, j), i))
assertEquals(msg, O.compare(i, j) <= 0, same(O.min(i, j), i))
if (!same(i, j)) {
assertEquals(msg, O.compare(i, j) < 0, same(O.max(i, j), j))
assertEquals(msg, O.compare(i, j) > 0, same(O.min(i, j), j))
}
}
}
checkFloats(floats: _*)
checkDoubles(doubles: _*)
}
/* Test for scala/bug#8664 */
@Test
def symbolOrdering(): Unit = {
assertEquals(Seq('b, 'c, 'a).sorted, Seq('a, 'b, 'c))
}
@Test
def orderingEquality(): Unit = {
def check[T](ord: => Ordering[T]): Unit = {
assertEquals(ord, ord)
assertEquals(ord.hashCode(), ord.hashCode())
assertEquals(ord.reverse, ord.reverse)
assertEquals(ord.reverse.hashCode(), ord.reverse.hashCode())
}
check(Ordering[Int])
check(Ordering[(Int, Long)])
check(Ordering[(Int, Long, Float)])
check(Ordering[(Int, Long, Float, Double)])
check(Ordering[(Int, Long, Float, Double, Byte)])
check(Ordering[(Int, Long, Float, Double, Byte, Char)])
check(Ordering[(Int, Long, Float, Double, Byte, Char, Short)])
check(Ordering[(Int, Long, Float, Double, Byte, Char, Short, BigInt)])
check(Ordering[(Int, Long, Float, Double, Byte, Char, Short, BigInt, BigDecimal)])
check(Ordering[Option[Int]])
import Ordering.Implicits._
check(Ordering[Seq[Int]])
check(Ordering[SortedSet[Int]])
}
/* Test for scala/bug#11284 */
@Test
def supertypeOrdering(): Unit = {
val before = java.time.LocalDate.of(2004, 1, 20)
val now = java.time.LocalDate.now()
val later = java.time.LocalDate.now().plusWeeks(1)
assertEquals(Seq(before, now, later), Seq(now, later, before).sorted)
}
}
| martijnhoekstra/scala | test/junit/scala/math/OrderingTest.scala | Scala | apache-2.0 | 9,759 |
package org.lanyard.dist
import scala.language.higherKinds
/** Higher kinded type to create arbitrary distributions. */
trait DistFactory[D <: Distribution[_]] {
def create( param: D#Parameter): D
}
object DistFactory {
@inline def apply[D <: Distribution[_]]( implicit ev: DistFactory[D] ): DistFactory[D] = ev
}
| perian/Lanyard | src/main/scala/org/lanyard/dist/DistFactory.scala | Scala | gpl-2.0 | 325 |
package mimir;
import java.io.{FileReader, BufferedReader}
import java.sql.SQLException
import mimir.algebra._
import mimir.ctables.{VGTerm, Model}
import mimir.exec.{Compiler, ResultIterator, ResultSetIterator}
import mimir.lenses.{Lens, LensManager}
import mimir.parser.OperatorParser
import mimir.sql.{Backend, CreateLens, RAToSql, SqlToRA}
import scala.collection.mutable.ListBuffer
;
/**
* The central dispatcher for Mimir. Most Mimir functionality makes use of the Relational
* Algebra and Expression ASTs in mimir.algebra.{Operator,Expression}, but individual components
* may make use of SQL or other query representations. The Database class acts as a bridge
* between these components, and provides a single, central way to access all of Mimir's resources.
* As a side effect, this allows us to decouple logic for different parts of Mimir into separate
* classes, linked only by this central Database class.
*
* You should never need to access any of the classes below directly. If you do, add another
* accessor method to Database instead.
*
* === Parsing ===
* - mimir.sql.SqlToRA (sql)
* Responsible for translating JSqlParser AST elements into corresponding AST elements from
* mimir.algebra._
* - mimir.sql.RAToSql (ra)
* Responsible for translating mimir.algebra._ AST elements back to JSqlParser's AST. This is
* typically only required for compatibility with JDBC.
* - mimir.parser.OperatorParser (operator)
* Responsible for directly constructing mimir.algebra.{Operator,Expression} ASTs from string
* representations. Allows these ASTs to be serialized through toString()
*
* === Logic ===
* - mimir.sql.Backend (backend)
* Pluggable wrapper for database backends over which Mimir will actually run. Basically,
* a simplified form of JDBC. See mimir.sql._ for examples.
* - mimir.lenses.LensManager (lenses)
* Responsible for creating, serializing, and deserializing lenses and virtual views.
* - mimir.exec.Compiler
* Responsible for query execution. Acts as a wrapper around the logic in mimir.ctables._,
* mimir.lenses._, and mimir.exec._ that prepares non-deterministic queries to be evaluated
* on the backend database.
*/
case class Database(backend: Backend)
{
val sql = new SqlToRA(this)
val ra = new RAToSql(this)
val lenses = new LensManager(this)
val compiler = new Compiler(this)
val operator = new OperatorParser(this.getLensModel,
(x) =>
this.getTableSchema(x) match {
case Some(x) => x
case None => throw new SQLException("Table "+x+" does not exist in db!")
})
/**
* Evaluate the specified query on the backend directly and wrap the result in a
* ResultSetIterator. No Mimir-specific optimizations or rewrites are applied.
*/
def query(sql: String): ResultIterator =
new ResultSetIterator(backend.execute(sql))
/**
* Evaluate the specified query on the backend directly and wrap the result in a
* ResultSetIterator. JDBC parameters (`?`) are replaced according to the provided
* argument list. No Mimir-specific optimizations or rewrites are applied.
*/
def query(sql: String, args: List[String]): ResultIterator =
new ResultSetIterator(backend.execute(sql, args))
/**
* Evaluate the specified query on the backend directly. No Mimir-specific
* optimizations or rewrites are applied.
*/
def query(sql: net.sf.jsqlparser.statement.select.Select): ResultIterator =
new ResultSetIterator(backend.execute(sql))
/**
* Evaluate the specified query on the backend directly. No Mimir-specific
* optimizations or rewrites are applied.
*/
def query(sql: net.sf.jsqlparser.statement.select.SelectBody): ResultIterator =
new ResultSetIterator(backend.execute(sql))
/**
* Evaluate the specified SQL DDL expression on the backend directly. No Mimir-
* specific optimizations or updates are applied.
*/
def update(sql: String): Unit = {
// println(sql);
backend.update(sql);
}
/**
* Evaluate a list of SQL statements in batch mode. This is useful for speeding up
* data insertion during CSV uploads
*/
def update(sql: List[String]): Unit = {
backend.update(sql)
}
/**
* Evaluate the specified SQL DDL expression on the backend directly. JDBC
* parameters (`?`) are replaced according to the provided argument list.
* No Mimir-specific optimizations or updates are applied.
*/
def update(sql: String, args: List[String]): Unit = {
// println(sql);
backend.update(sql, args);
}
/**
* Apply the standard set of Mimir compiler optimizations -- Used mostly for EXPLAIN.
*/
def optimize(oper: Operator): Operator =
{
compiler.standardOptimizations.foldLeft(oper)( (o, fn) => fn(o) )
}
/**
* Optimize and evaluate the specified query. Applies all Mimir-specific optimizations
* and rewrites the query to properly account for Virtual Tables.
*/
def query(oper: Operator): ResultIterator =
{
compiler.compile(oper)
}
/**
* Flush the provided ResultIterator to the console.
*/
def dump(result: ResultIterator): Unit =
{
val colWidth = 12
val fmtSpecifier = "%"+colWidth+"s"
val verticalSep = "+"+("-"*(colWidth+1)+"+")*(result.numCols-1)+("-"*(colWidth+1))+"+"
println(verticalSep)
println("|"+result.schema.map( _._1 ).map(x => fmtSpecifier.format(x)).mkString(" |")+" |")
println(verticalSep)
while(result.getNext()){
println("|"+
(0 until result.numCols).map( (i) => {
("%"+colWidth+"s").format(
result(i)+(
if(!result.deterministicCol(i)){ "*" } else { "" }
)
)
}).mkString(" |")+(
if(!result.deterministicRow){
" (This row may be invalid)"
} else { "" }
)+" |"
)
}
println(verticalSep)
if(result.missingRows){
println("( There may be missing result rows )")
}
}
def webDump(result: ResultIterator): WebIterator =
{
val headers: List[String] = result.schema.map(_._1)
val data: ListBuffer[(List[String], Boolean)] = new ListBuffer()
while(result.getNext()){
val list =
(0 until result.numCols).map( (i) => {
result(i) + (if (!result.deterministicCol(i)) {"*"} else {""})
}).toList
println("RESULTS: "+list)
data.append((list, result.deterministicRow()))
}
new WebIterator(headers, data.toList, result.missingRows())
}
/**
* Translate the specified JSqlParser SELECT statement to Mimir's RA AST.
*/
def convert(sel: net.sf.jsqlparser.statement.select.Select): Operator =
sql.convert(sel)
/**
* Translate the specified JSqlParser SELECT body to Mimir's RA AST.
*/
def convert(sel: net.sf.jsqlparser.statement.select.SelectBody): (Operator,Map[String, String]) =
sql.convert(sel, null)
/**
* Translate the specified JSqlParser expression to Mimir's Expression AST.
*/
def convert(expr: net.sf.jsqlparser.expression.Expression): Expression =
sql.convert(expr)
/**
* Translate the specified Mimir RA AST back to a JSqlParser statement.
*/
def convert(oper: Operator): net.sf.jsqlparser.statement.select.SelectBody =
ra.convert(oper)
/**
* Parse the provided string as a Mimir Expression AST
*/
def parseExpression(exprString: String): Expression =
operator.expr(exprString)
/**
* Parse the provided string as a list of comma-delimited Mimir Expression ASTs
*/
def parseExpressionList(exprListString: String): List[Expression] =
operator.exprList(exprListString)
/**
* Parse the provided string as a Mimir RA AST
*/
def parseOperator(operString: String): Operator =
operator.operator(operString)
/**
* Look up the schema for the table with the provided name.
*/
def getTableSchema(name: String): Option[List[(String,Type.T)]] =
backend.getTableSchema(name);
/**
* Build a Table operator for the table with the provided name.
*/
def getTableOperator(table: String): Operator =
backend.getTableOperator(table)
/**
* Build a Table operator for the table with the provided name, requesting the
* specified metadata.
*/
def getTableOperator(table: String, metadata: List[(String, Type.T)]): Operator =
backend.getTableOperator(table, metadata)
/**
* Evaluate a CREATE LENS statement.
*/
def createLens(lensDefn: CreateLens): Unit =
lenses.create(lensDefn)
/**
* Prepare a database for use with Mimir.
*/
def initializeDBForMimir(): Unit = {
lenses.init();
}
/**
* Retrieve the Lens with the specified name.
*/
def getLens(lensName: String): Lens =
lenses.load(lensName).get
/**
* Retrieve the Model for the Lens with the specified name.
*/
def getLensModel(lensName: String): Model =
lenses.modelForLens(lensName)
/**
* Retrieve the query corresponding to the Lens or Virtual View with the specified
* name (or None if no such lens exists)
*/
def getView(name: String): Option[(Operator)] =
{
// System.out.println("Selecting from ..."+name);
if(lenses == null){ None }
else {
//println(iviews.views.toString())
lenses.load(name.toUpperCase()) match {
case None => None
case Some(lens) =>
// println("Found: "+name);
Some(lens.view)
}
}
}
/**
* Load CSV file into database
*/
def handleLoadTable(targetTable: String, sourceFile: String){
val input = new BufferedReader(new FileReader(sourceFile))
val firstLine = input.readLine()
getTableSchema(targetTable) match {
case Some(sch) => {
if(headerDetected(firstLine)) {
populateTable(input, targetTable, sch) // Ignore header since table already exists
}
else {
populateTable(new BufferedReader(new FileReader(sourceFile)), // Reset to top
targetTable, sch)
}
}
case None => {
if(headerDetected(firstLine)) {
update("CREATE TABLE "+targetTable+"("+
firstLine.split(",").map((x) => "\\'"+x.trim.replace(" ", "")+"\\'" ).mkString(" varchar, ")+
" varchar)")
handleLoadTable(targetTable, sourceFile)
}
else {
throw new SQLException("No header supplied for creating new table")
}
}
}
}
private def headerDetected(line: String): Boolean = {
if(line == null) return false;
// TODO Detection logic
return true; // Placeholder, assume every CSV file has a header
}
private def populateTable(src: BufferedReader,
targetTable: String,
sch: List[(String, Type.T)]): Unit = {
val keys = sch.map(_._1).map((x) => "\\'"+x+"\\'").mkString(", ")
val stmts = new ListBuffer[String]()
while(true){
val line = src.readLine()
if(line == null) { if(stmts.size > 0) update(stmts.toList); return }
val dataLine = line.trim.split(",").padTo(sch.size, "")
val data = (0 until dataLine.length).map( (i) =>
dataLine(i) match {
case "" => null
case x => sch(i)._2 match {
case Type.TDate | Type.TString => "\\'"+x+"\\'"
case _ => x
}
}
).mkString(", ")
stmts.append("INSERT INTO "+targetTable+"("+keys+") VALUES ("+data+")")
}
}
/**
* Find all VGTerms in an expression
*/
def getVGTerms(expression: Expression): List[VGTerm] = {
Eval.getVGTerms(expression)
}
def getVGTerms(expression: Expression,
bindings: Map[String, PrimitiveValue],
list: List[VGTerm]): List[VGTerm] = {
Eval.getVGTerms(expression, bindings, list)
}
} | Legacy25/mimir | mimircore/src/main/scala/mimir/Database.scala | Scala | apache-2.0 | 11,847 |
package WorkingWithLists.P05
object P05 {
def reverse[T](l: List[T]): List[T] = {
def _reverse[T](prevList: List[T], remList: List[T]): List[T] = remList match {
case e::ls => _reverse(e::prevList, ls)
case Nil => prevList
}
_reverse(Nil, l)
}
def reverse_ListAppend[T](l: List[T]): List[T] = l match {
case e::ls => reverse_ListAppend(ls):::List(e)
case Nil => Nil
}
def reverse_FoldLeft[T](l: List[T]): List[T] =
l.foldLeft(List[T]()) { (lr, e) => e::lr }
}
| ihac/Ninety-Nine-Scala-Problems | src/main/scala/WorkingWithLists/P05/P05.scala | Scala | gpl-3.0 | 509 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala
package reflect
/**
* <span class="badge badge-red" style="float: right;">EXPERIMENTAL</span>
*
* The base package for Scala macros.
*
* Macros are functions that are called by the compiler during compilation.
* Within these functions the programmer has access to compiler APIs.
* For example, it is possible to generate, analyze and typecheck code.
*
* See the [[https://docs.scala-lang.org/overviews/macros/overview.html Macros Guide]] on how to get started with Scala macros.
*/
package object macros {
/** The Scala macros context.
*
* In Scala 2.11, macros that were once the one are split into blackbox and whitebox macros,
* with the former being better supported and the latter being more powerful. You can read about
* the details of the split and the associated trade-offs in the [[https://docs.scala-lang.org/overviews/macros/overview.html Macros Guide]].
*
* `scala.reflect.macros.Context` follows this tendency and turns into `scala.reflect.macros.blackbox.Context`
* and `scala.reflect.macros.whitebox.Context`. The original `Context` is left in place for compatibility reasons,
* but it is now deprecated, nudging the users to choose between blackbox and whitebox macros.
*/
@deprecated("use blackbox.Context or whitebox.Context instead", "2.11.0")
type Context = whitebox.Context
}
| scala/scala | src/reflect/scala/reflect/macros/package.scala | Scala | apache-2.0 | 1,657 |
package at.logic.gapt.proofs.lk
import at.logic.gapt.expr._
import at.logic.gapt.expr.schema.SchemaFormula
import at.logic.gapt.proofs.lk._
import at.logic.gapt.proofs.lk.base._
import at.logic.gapt.proofs.shlk._
import scala.util.control.TailCalls._
/**
* Removes the redundant weakenings and contractions.
* Traverses the proof top down, keeping track of the weakened formulas in ws.
* When processing each rule, checks whether the auxiliary formulas are in ws.
* If all were previously weakened, remove them from ws and adds the main formula.
*
* I am aware the code is really really hard to read. I am sorry for that.
* In order to get it properly optimized, I had to use continuation-passing-style
* and the TailCall library from scala. This makes it particularly exotic.
* If you are going through the trouble of reading it (so brave of you ;),
* it's good to keep the following in mind:
* 1. Ignore tailcalls and everything associated with it
* 2. Treat the continuation call not as a function call but as if its arguments
* were the return value of the function.
*
* The answer to the optimization was found here:
* http://stackoverflow.com/questions/20164061/how-to-acheive-tail-call-optimization-while-traversing-tree-like-structure-using
*/
object CleanStructuralRules {
def apply( p: LKProof ): LKProof = {
cleanStructuralRules( p, {
( proof, ws ) =>
done( WeakeningMacroRule( proof, p.root.toHOLSequent ) )
} ).result
}
// Note: using a pair instead of a sequent because sequents are composed of
// formula occurrences and not formulas.
private def cleanStructuralRules( pr: LKProof, fun: ( ( LKProof, ( List[HOLFormula], List[HOLFormula] ) ) => TailRec[LKProof] ) ): TailRec[LKProof] =
pr match {
// Base case: axiom
case Axiom( s ) => fun( pr, ( Nil, Nil ) )
// Structural rules:
case WeakeningLeftRule( p, _, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
tailcall( fun( proof, ( ws._1 :+ m.formula, ws._2 ) ) )
} ) )
case WeakeningRightRule( p, _, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
tailcall( fun( proof, ( ws._1, ws._2 :+ m.formula ) ) )
} ) )
case ContractionLeftRule( p, _, a1, a2, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._1.count( f => f == a1.formula ) match {
case n if n >= 2 => tailcall( fun( proof, ( ws._1.diff( List( a1.formula, a2.formula ) ) :+ m.formula, ws._2 ) ) )
case n if n == 1 => tailcall( fun( proof, ( ws._1.diff( List( a1.formula ) ), ws._2 ) ) )
case n if n == 0 => tailcall( fun( ContractionLeftRule( proof, a1.formula ), ws ) )
}
} ) )
case ContractionRightRule( p, _, a1, a2, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._2.count( f => f == a1.formula ) match {
case n if n >= 2 => tailcall( fun( proof, ( ws._1, ws._2.diff( List( a1.formula, a2.formula ) ) :+ m.formula ) ) )
case n if n == 1 => tailcall( fun( proof, ( ws._1, ws._2.diff( List( a1.formula ) ) ) ) )
case n if n == 0 => tailcall( fun( ContractionRightRule( proof, a1.formula ), ws ) )
}
} ) )
case CutRule( p1, p2, _, a1, a2 ) =>
tailcall( cleanStructuralRules( p1, { ( proof1, wsl ) =>
cleanStructuralRules( p2, { ( proof2, wsr ) =>
( wsl._2.contains( a1.formula ), wsr._1.contains( a2.formula ) ) match {
case ( true, true ) =>
val ant2 = proof2.root.antecedent.map( _.formula )
val suc2 = proof2.root.succedent.map( _.formula )
val ws_1 = wsl._1 ++ wsr._1.diff( List( a2.formula ) ) ++ ( ant2 diff List( a2.formula ) )
val ws_2 = wsl._2.diff( List( a1.formula ) ) ++ wsr._2 ++ suc2
tailcall( fun( proof1, ( ws_1, ws_2 ) ) ) // The choice for proof1 is arbitrary
case ( true, false ) =>
val ant2 = proof2.root.antecedent.map( _.formula )
val suc2 = proof2.root.succedent.map( _.formula )
val ws_1 = wsl._1 ++ wsr._1.diff( List( a2.formula ) ) ++ ( ant2 diff List( a2.formula ) )
val ws_2 = wsl._2.diff( List( a1.formula ) ) ++ wsr._2 ++ suc2
tailcall( fun( proof1, ( ws_1, ws_2 ) ) )
case ( false, true ) =>
val ant1 = proof1.root.antecedent.map( _.formula )
val suc1 = proof1.root.succedent.map( _.formula )
val ws_1 = wsl._1 ++ wsr._1.diff( List( a2.formula ) ) ++ ant1
val ws_2 = wsl._2.diff( List( a1.formula ) ) ++ wsr._2 ++ ( suc1 diff List( a1.formula ) )
tailcall( fun( proof2, ( ws_1, ws_2 ) ) )
case ( false, false ) =>
val ws_1 = wsl._1 ++ wsr._1
val ws_2 = wsl._2 ++ wsr._2
tailcall( fun( CutRule( proof1, proof2, a1.formula ), ( ws_1, ws_2 ) ) )
}
} )
} ) )
case InductionRule( p1, p2, _, base, step1, step2, m, t ) =>
tailcall( cleanStructuralRules( p1, { ( proof1, wsl ) =>
cleanStructuralRules( p2, { ( proof2, wsr ) =>
( wsl._2.contains( base.formula ), wsr._1.contains( step1.formula ), wsr._2.contains( step2.formula ) ) match {
case ( true, _, _ ) => // In this case we delete the second subproof, i.e. the induction step.
val ant2 = proof2.root.antecedent.map( _.formula )
val suc2 = proof2.root.succedent.map( _.formula )
val ws_1 = wsl._1 ++ ( wsr._1 diff List( step1.formula ) ) ++ ant2
val ws_2 = ( wsl._2 diff List( base.formula ) ) ++ ( wsr._2 diff List( step2.formula ) ) ++ suc2
tailcall( fun( proof1, ( ws_1, ws_2 ) ) )
case ( false, true, true ) => // In this case we delete the first subproof, i.e. the induction base.
val ant1 = proof1.root.antecedent map ( _.formula )
val suc1 = proof1.root.succedent map ( _.formula )
val ws_1 = wsl._1 ++ ( wsr._1 diff List( step1.formula ) ) ++ ant1
val ws_2 = ( wsl._1 diff List( base.formula ) ) ++ ( wsr._2 diff List( step2.formula ) ) ++ suc1
tailcall( fun( proof2, ( ws_1, ws_2 ) ) )
// In the following three cases, we have to actually construct the induction rule.
case ( false, true, false ) =>
val ws_1 = wsl._1 ++ ( wsr._1 diff List( step1.formula ) )
val ws_2 = wsl._2 ++ wsr._2
val p = WeakeningLeftRule( proof2, step1.formula )
tailcall( fun( InductionRule( proof1, proof2, base.formula.asInstanceOf[FOLFormula], step1.formula.asInstanceOf[FOLFormula], step2.formula.asInstanceOf[FOLFormula], t ), ( ws_1, ws_2 ) ) )
case ( false, false, true ) =>
val ws_1 = wsl._1 ++ wsr._1
val ws_2 = wsl._2 ++ ( wsr._2 diff List( step2.formula ) )
val p = WeakeningRightRule( proof2, step2.formula )
tailcall( fun( InductionRule( proof1, proof2, base.formula.asInstanceOf[FOLFormula], step1.formula.asInstanceOf[FOLFormula], step2.formula.asInstanceOf[FOLFormula], t ), ( ws_1, ws_2 ) ) )
case ( false, false, false ) =>
val ws_1 = wsl._1 ++ wsr._1
val ws_2 = wsl._2 ++ wsr._2
tailcall( fun( InductionRule( proof1, proof2, base.formula.asInstanceOf[FOLFormula], step1.formula.asInstanceOf[FOLFormula], step2.formula.asInstanceOf[FOLFormula], t ), ( ws_1, ws_2 ) ) )
}
} )
} ) )
// Unary rules, one aux formula:
case NegLeftRule( p, _, a, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._2.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1 :+ m.formula, ws._2.diff( List( a.formula ) ) ) ) )
case false => tailcall( fun( NegLeftRule( proof, a.formula ), ws ) )
}
} ) )
case NegRightRule( p, _, a, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._1.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1.diff( List( a.formula ) ), ws._2 :+ m.formula ) ) )
case false => tailcall( fun( NegRightRule( proof, a.formula ), ws ) )
}
} ) )
case AndLeft1Rule( p, _, a, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._1.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1.diff( List( a.formula ) ) :+ m.formula, ws._2 ) ) )
case false =>
val And( _, a2 ) = m.formula
tailcall( fun( AndLeft1Rule( proof, a.formula, a2 ), ws ) )
}
} ) )
case AndLeft2Rule( p, _, a, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._1.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1.diff( List( a.formula ) ) :+ m.formula, ws._2 ) ) )
case false =>
val And( a1, _ ) = m.formula
tailcall( fun( AndLeft2Rule( proof, a1, a.formula ), ws ) )
}
} ) )
case OrRight1Rule( p, _, a, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._2.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1, ws._2.diff( List( a.formula ) ) :+ m.formula ) ) )
case false =>
val Or( _, a2 ) = m.formula
tailcall( fun( OrRight1Rule( proof, a.formula, a2 ), ws ) )
}
} ) )
case OrRight2Rule( p, _, a, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._2.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1, ws._2.diff( List( a.formula ) ) :+ m.formula ) ) )
case false =>
val Or( a1, _ ) = m.formula
tailcall( fun( OrRight2Rule( proof, a1, a.formula ), ws ) )
}
} ) )
case ForallLeftRule( p, _, a, m, t ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._1.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1.diff( List( a.formula ) ) :+ m.formula, ws._2 ) ) )
case false => tailcall( fun( ForallLeftRule( proof, a.formula, m.formula, t ), ws ) )
}
} ) )
case ForallRightRule( p, _, a, m, t ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._2.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1, ws._2.diff( List( a.formula ) ) :+ m.formula ) ) )
case false => tailcall( fun( ForallRightRule( proof, a.formula, m.formula, t ), ws ) )
}
} ) )
case ExistsLeftRule( p, _, a, m, t ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._1.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1.diff( List( a.formula ) ) :+ m.formula, ws._2 ) ) )
case false => tailcall( fun( ExistsLeftRule( proof, a.formula, m.formula, t ), ws ) )
}
} ) )
case ExistsRightRule( p, _, a, m, t ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._2.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1, ws._2.diff( List( a.formula ) ) :+ m.formula ) ) )
case false => tailcall( fun( ExistsRightRule( proof, a.formula, m.formula, t ), ws ) )
}
} ) )
// Schema rules (all unary with one aux formula):
case AndLeftEquivalenceRule1( p, _, a, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._1.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1.diff( List( a.formula ) ) :+ m.formula, ws._2 ) ) )
case false => tailcall( fun( AndLeftEquivalenceRule1( proof, a.formula.asInstanceOf[SchemaFormula], m.formula.asInstanceOf[SchemaFormula] ), ws ) )
}
} ) )
case AndRightEquivalenceRule1( p, _, a, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._2.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1, ws._2.diff( List( a.formula ) ) :+ m.formula ) ) )
case false => tailcall( fun( AndRightEquivalenceRule1( proof, a.formula.asInstanceOf[SchemaFormula], m.formula.asInstanceOf[SchemaFormula] ), ws ) )
}
} ) )
case OrLeftEquivalenceRule1( p, _, a, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._1.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1.diff( List( a.formula ) ) :+ m.formula, ws._2 ) ) )
case false => tailcall( fun( OrLeftEquivalenceRule1( proof, a.formula.asInstanceOf[SchemaFormula], m.formula.asInstanceOf[SchemaFormula] ), ws ) )
}
} ) )
case OrRightEquivalenceRule1( p, _, a, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._2.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1, ws._2.diff( List( a.formula ) ) :+ m.formula ) ) )
case false => tailcall( fun( OrRightEquivalenceRule1( proof, a.formula.asInstanceOf[SchemaFormula], m.formula.asInstanceOf[SchemaFormula] ), ws ) )
}
} ) )
case AndLeftEquivalenceRule3( p, _, a, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._1.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1.diff( List( a.formula ) ) :+ m.formula, ws._2 ) ) )
case false => tailcall( fun( AndLeftEquivalenceRule3( proof, a.formula.asInstanceOf[SchemaFormula], m.formula.asInstanceOf[SchemaFormula] ), ws ) )
}
} ) )
case AndRightEquivalenceRule3( p, _, a, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._2.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1, ws._2.diff( List( a.formula ) ) :+ m.formula ) ) )
case false => tailcall( fun( AndRightEquivalenceRule3( proof, a.formula.asInstanceOf[SchemaFormula], m.formula.asInstanceOf[SchemaFormula] ), ws ) )
}
} ) )
case OrLeftEquivalenceRule3( p, _, a, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._1.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1.diff( List( a.formula ) ) :+ m.formula, ws._2 ) ) )
case false => tailcall( fun( OrLeftEquivalenceRule3( proof, a.formula.asInstanceOf[SchemaFormula], m.formula.asInstanceOf[SchemaFormula] ), ws ) )
}
} ) )
case OrRightEquivalenceRule3( p, _, a, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._2.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1, ws._2.diff( List( a.formula ) ) :+ m.formula ) ) )
case false => tailcall( fun( OrRightEquivalenceRule3( proof, a.formula.asInstanceOf[SchemaFormula], m.formula.asInstanceOf[SchemaFormula] ), ws ) )
}
} ) )
// Definition rules (all unary with one aux formula):
case DefinitionLeftRule( p, _, a, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._1.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1.diff( List( a.formula ) ) :+ m.formula, ws._2 ) ) )
case false => tailcall( fun( DefinitionLeftRule( proof, a.formula, m.formula ), ws ) )
}
} ) )
case DefinitionRightRule( p, _, a, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
ws._2.contains( a.formula ) match {
case true => tailcall( fun( proof, ( ws._1, ws._2.diff( List( a.formula ) ) :+ m.formula ) ) )
case false => tailcall( fun( DefinitionRightRule( proof, a.formula, m.formula ), ws ) )
}
} ) )
// Unary rules, two aux formulas:
case ImpRightRule( p, _, a1, a2, m ) =>
tailcall( cleanStructuralRules( p, { ( proof, ws ) =>
( ws._1.contains( a1.formula ), ws._2.contains( a2.formula ) ) match {
case ( true, true ) =>
val ws_1 = ws._1.diff( List( a1.formula ) )
val ws_2 = ws._2.diff( List( a2.formula ) ) :+ m.formula
tailcall( fun( proof, ( ws_1, ws_2 ) ) )
case ( true, false ) =>
val p1 = WeakeningLeftRule( proof, a1.formula )
val p2 = ImpRightRule( p1, a1.formula, a2.formula )
tailcall( fun( p2, ( ws._1.diff( List( a1.formula ) ), ws._2 ) ) )
case ( false, true ) =>
val p1 = WeakeningRightRule( proof, a2.formula )
val p2 = ImpRightRule( p1, a1.formula, a2.formula )
tailcall( fun( p2, ( ws._1, ws._2.diff( List( a2.formula ) ) ) ) )
case ( false, false ) => tailcall( fun( ImpRightRule( proof, a1.formula, a2.formula ), ws ) )
}
} ) )
// Binary rules:
case OrLeftRule( p1, p2, _, a1, a2, m ) =>
tailcall( cleanStructuralRules( p1, { ( proof1, wsl ) =>
cleanStructuralRules( p2, { ( proof2, wsr ) =>
( wsl._1.contains( a1.formula ), wsr._1.contains( a2.formula ) ) match {
case ( true, true ) =>
val ant2 = proof2.root.antecedent.map( _.formula )
val suc2 = proof2.root.succedent.map( _.formula )
val ws_1 = ( ( wsl._1.diff( List( a1.formula ) ) ++ wsr._1.diff( List( a2.formula ) ) ) :+ m.formula ) ++ ant2
val ws_2 = wsl._2 ++ wsr._2 ++ suc2
tailcall( fun( proof1, ( ws_1, ws_2 ) ) ) // The choice for proof1 is arbitrary
case ( true, false ) =>
val ws_1 = wsl._1.diff( List( a1.formula ) ) ++ wsr._1
val ws_2 = wsl._2 ++ wsr._2
val p = WeakeningLeftRule( proof1, a1.formula )
tailcall( fun( OrLeftRule( p, proof2, a1.formula, a2.formula ), ( ws_1, ws_2 ) ) )
case ( false, true ) =>
val ws_1 = wsl._1 ++ wsr._1.diff( List( a2.formula ) )
val ws_2 = wsl._2 ++ wsr._2
val p = WeakeningLeftRule( proof2, a2.formula )
tailcall( fun( OrLeftRule( proof1, p, a1.formula, a2.formula ), ( ws_1, ws_2 ) ) )
case ( false, false ) =>
val ws_1 = wsl._1 ++ wsr._1
val ws_2 = wsl._2 ++ wsr._2
tailcall( fun( OrLeftRule( proof1, proof2, a1.formula, a2.formula ), ( ws_1, ws_2 ) ) )
}
} )
} ) )
case AndRightRule( p1, p2, _, a1, a2, m ) =>
tailcall( cleanStructuralRules( p1, { ( proof1, wsl ) =>
cleanStructuralRules( p2, { ( proof2, wsr ) =>
( wsl._2.contains( a1.formula ), wsr._2.contains( a2.formula ) ) match {
case ( true, true ) =>
val ant2 = proof2.root.antecedent.map( _.formula )
val suc2 = proof2.root.succedent.map( _.formula )
val ws_1 = wsl._1 ++ wsr._1 ++ ant2
val ws_2 = ( ( wsl._2.diff( List( a1.formula ) ) ++ wsr._2.diff( List( a2.formula ) ) ) :+ m.formula ) ++ suc2
tailcall( fun( proof1, ( ws_1, ws_2 ) ) ) // The choice for proof1 is arbitrary
case ( true, false ) =>
val ws_1 = wsl._1 ++ wsr._1
val ws_2 = wsl._2.diff( List( a1.formula ) ) ++ wsr._2
val p = WeakeningRightRule( proof1, a1.formula )
tailcall( fun( AndRightRule( p, proof2, a1.formula, a2.formula ), ( ws_1, ws_2 ) ) )
case ( false, true ) =>
val ws_1 = wsl._1 ++ wsr._1
val ws_2 = wsl._2 ++ wsr._2.diff( List( a2.formula ) )
val p = WeakeningRightRule( proof2, a2.formula )
tailcall( fun( AndRightRule( proof1, p, a1.formula, a2.formula ), ( ws_1, ws_2 ) ) )
case ( false, false ) =>
val ws_1 = wsl._1 ++ wsr._1
val ws_2 = wsl._2 ++ wsr._2
tailcall( fun( AndRightRule( proof1, proof2, a1.formula, a2.formula ), ( ws_1, ws_2 ) ) )
}
} )
} ) )
case ImpLeftRule( p1, p2, _, a1, a2, m ) =>
tailcall( cleanStructuralRules( p1, { ( proof1, wsl ) =>
cleanStructuralRules( p2, { ( proof2, wsr ) =>
( wsl._2.contains( a1.formula ), wsr._1.contains( a2.formula ) ) match {
case ( true, true ) =>
val ant2 = proof2.root.antecedent.map( _.formula )
val suc2 = proof2.root.succedent.map( _.formula )
val ws_1 = ( ( wsl._1 ++ wsr._1.diff( List( a2.formula ) ) ) :+ m.formula ) ++ ant2
val ws_2 = wsl._2.diff( List( a1.formula ) ) ++ wsr._2 ++ suc2
tailcall( fun( proof1, ( ws_1, ws_2 ) ) ) // The choice for proof1 is arbitrary
case ( true, false ) =>
val ws_1 = wsl._1 ++ wsr._1
val ws_2 = wsl._2.diff( List( a1.formula ) ) ++ wsr._2
val p = WeakeningRightRule( proof1, a1.formula )
tailcall( fun( ImpLeftRule( p, proof2, a1.formula, a2.formula ), ( ws_1, ws_2 ) ) )
case ( false, true ) =>
val ws_1 = wsl._1 ++ wsr._1.diff( List( a2.formula ) )
val ws_2 = wsl._2 ++ wsr._2
val p = WeakeningLeftRule( proof2, a2.formula )
tailcall( fun( ImpLeftRule( proof1, p, a1.formula, a2.formula ), ( ws_1, ws_2 ) ) )
case ( false, false ) =>
val ws_1 = wsl._1 ++ wsr._1
val ws_2 = wsl._2 ++ wsr._2
tailcall( fun( ImpLeftRule( proof1, proof2, a1.formula, a2.formula ), ( ws_1, ws_2 ) ) )
}
} )
} ) )
// Equation rules (all binary):
case EquationLeft1Rule( p1, p2, _, a1, a2, _, m ) =>
tailcall( cleanStructuralRules( p1, { ( proof1, wsl ) =>
cleanStructuralRules( p2, { ( proof2, wsr ) =>
( wsl._2.contains( a1.formula ), wsr._1.contains( a2.formula ) ) match {
case ( true, true ) =>
val ant2 = proof2.root.antecedent.map( _.formula )
val suc2 = proof2.root.succedent.map( _.formula )
val ws_1 = ( ( wsl._1 ++ wsr._1.diff( List( a2.formula ) ) ) :+ m.formula ) ++ ant2
val ws_2 = wsl._2.diff( List( a1.formula ) ) ++ wsr._2 ++ suc2
tailcall( fun( proof1, ( ws_1, ws_2 ) ) ) // The choice for proof1 is arbitrary
case ( true, false ) =>
val ws_1 = wsl._1 ++ wsr._1
val ws_2 = wsl._2.diff( List( a1.formula ) ) ++ wsr._2
val p = WeakeningRightRule( proof1, a1.formula )
tailcall( fun( EquationLeftRule( p, proof2, a1.formula, a2.formula, m.formula ), ( ws_1, ws_2 ) ) )
case ( false, true ) =>
val ws_1 = wsl._1 ++ wsr._1.diff( List( a2.formula ) )
val ws_2 = wsl._2 ++ wsr._2
val p = WeakeningLeftRule( proof2, a2.formula )
tailcall( fun( EquationLeftRule( proof1, p, a1.formula, a2.formula, m.formula ), ( ws_1, ws_2 ) ) )
case ( false, false ) =>
val ws_1 = wsl._1 ++ wsr._1
val ws_2 = wsl._2 ++ wsr._2
tailcall( fun( EquationLeftRule( proof1, proof2, a1.formula, a2.formula, m.formula ), ( ws_1, ws_2 ) ) )
}
} )
} ) )
case EquationLeft2Rule( p1, p2, _, a1, a2, _, m ) =>
tailcall( cleanStructuralRules( p1, { ( proof1, wsl ) =>
cleanStructuralRules( p2, { ( proof2, wsr ) =>
( wsl._2.contains( a1.formula ), wsr._1.contains( a2.formula ) ) match {
case ( true, true ) =>
val ant2 = proof2.root.antecedent.map( _.formula )
val suc2 = proof2.root.succedent.map( _.formula )
val ws_1 = ( ( wsl._1 ++ wsr._1.diff( List( a2.formula ) ) ) :+ m.formula ) ++ ant2
val ws_2 = wsl._2.diff( List( a1.formula ) ) ++ wsr._2 ++ suc2
tailcall( fun( proof1, ( ws_1, ws_2 ) ) ) // The choice for proof1 is arbitrary
case ( true, false ) =>
val ws_1 = wsl._1 ++ wsr._1
val ws_2 = wsl._2.diff( List( a1.formula ) ) ++ wsr._2
val p = WeakeningRightRule( proof1, a1.formula )
tailcall( fun( EquationLeftRule( p, proof2, a1.formula, a2.formula, m.formula ), ( ws_1, ws_2 ) ) )
case ( false, true ) =>
val ws_1 = wsl._1 ++ wsr._1.diff( List( a2.formula ) )
val ws_2 = wsl._2 ++ wsr._2
val p = WeakeningLeftRule( proof2, a2.formula )
tailcall( fun( EquationLeftRule( proof1, p, a1.formula, a2.formula, m.formula ), ( ws_1, ws_2 ) ) )
case ( false, false ) =>
val ws_1 = wsl._1 ++ wsr._1
val ws_2 = wsl._2 ++ wsr._2
tailcall( fun( EquationLeftRule( proof1, proof2, a1.formula, a2.formula, m.formula ), ( ws_1, ws_2 ) ) )
}
} )
} ) )
case EquationRight1Rule( p1, p2, _, a1, a2, _, m ) =>
tailcall( cleanStructuralRules( p1, { ( proof1, wsl ) =>
cleanStructuralRules( p2, { ( proof2, wsr ) =>
( wsl._2.contains( a1.formula ), wsr._2.contains( a2.formula ) ) match {
case ( true, true ) =>
val ant2 = proof2.root.antecedent.map( _.formula )
val suc2 = proof2.root.succedent.map( _.formula )
val ws_1 = wsl._1 ++ wsr._1 ++ ant2
val ws_2 = ( ( wsl._2.diff( List( a1.formula ) ) ++ wsr._2.diff( List( a2.formula ) ) ) :+ m.formula ) ++ suc2
tailcall( fun( proof1, ( ws_1, ws_2 ) ) ) // The choice for proof1 is arbitrary
case ( true, false ) =>
val ws_1 = wsl._1 ++ wsr._1
val ws_2 = wsl._2.diff( List( a1.formula ) ) ++ wsr._2
val p = WeakeningRightRule( proof1, a1.formula )
tailcall( fun( EquationRightRule( p, proof2, a1.formula, a2.formula, m.formula ), ( ws_1, ws_2 ) ) )
case ( false, true ) =>
val ws_1 = wsl._1 ++ wsr._1
val ws_2 = wsl._2 ++ wsr._2.diff( List( a2.formula ) )
val p = WeakeningRightRule( proof2, a2.formula )
tailcall( fun( EquationRightRule( proof1, p, a1.formula, a2.formula, m.formula ), ( ws_1, ws_2 ) ) )
case ( false, false ) =>
val ws_1 = wsl._1 ++ wsr._1
val ws_2 = wsl._2 ++ wsr._2
tailcall( fun( EquationRightRule( proof1, proof2, a1.formula, a2.formula, m.formula ), ( ws_1, ws_2 ) ) )
}
} )
} ) )
case EquationRight2Rule( p1, p2, _, a1, a2, _, m ) =>
tailcall( cleanStructuralRules( p1, { ( proof1, wsl ) =>
cleanStructuralRules( p2, { ( proof2, wsr ) =>
( wsl._2.contains( a1.formula ), wsr._2.contains( a2.formula ) ) match {
case ( true, true ) =>
val ant2 = proof2.root.antecedent.map( _.formula )
val suc2 = proof2.root.succedent.map( _.formula )
val ws_1 = wsl._1 ++ wsr._1 ++ ant2
val ws_2 = ( ( wsl._2.diff( List( a1.formula ) ) ++ wsr._2.diff( List( a2.formula ) ) ) :+ m.formula ) ++ suc2
tailcall( fun( proof1, ( ws_1, ws_2 ) ) ) // The choice for proof1 is arbitrary
case ( true, false ) =>
val ws_1 = wsl._1 ++ wsr._1
val ws_2 = wsl._2.diff( List( a1.formula ) ) ++ wsr._2
val p = WeakeningRightRule( proof1, a1.formula )
tailcall( fun( EquationRightRule( p, proof2, a1.formula, a2.formula, m.formula ), ( ws_1, ws_2 ) ) )
case ( false, true ) =>
val ws_1 = wsl._1 ++ wsr._1
val ws_2 = wsl._2 ++ wsr._2.diff( List( a2.formula ) )
val p = WeakeningRightRule( proof2, a2.formula )
tailcall( fun( EquationRightRule( proof1, p, a1.formula, a2.formula, m.formula ), ( ws_1, ws_2 ) ) )
case ( false, false ) =>
val ws_1 = wsl._1 ++ wsr._1
val ws_2 = wsl._2 ++ wsr._2
tailcall( fun( EquationRightRule( proof1, proof2, a1.formula, a2.formula, m.formula ), ( ws_1, ws_2 ) ) )
}
} )
} ) )
case _ => throw new Exception( "ERROR: Unexpected case while cleaning redundant structural rules." )
}
}
| loewenheim/gapt | src/main/scala/at/logic/gapt/proofs/lk/CleanStructuralRules.scala | Scala | gpl-3.0 | 29,083 |
/*
* Copyright (C) 2014-2015 by Nokia.
* See the LICENCE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package wookie.yql.analytics
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.{Minutes, Seconds, StreamingContext}
import org.rogach.scallop.ScallopConf
import wookie.Sparkle
import wookie.app.{CheckpointConf, DurationConf, NameConf}
import wookie.spark.SparkStreamingRuntime
import wookie.spark.cli.SparkStreamingApp
import wookie.spark.streaming.kafka.cli.Kafka
import wookie.yql.geo.Location
trait TwitterPopularTagsConf extends NameConf with DurationConf with CheckpointConf with TwitterConf with Kafka
object PopularTags {
def stream(tags: DStream[String], windowLenInSeconds: Long): Sparkle[DStream[(Int, String)]] =
Sparkle { _ =>
tags.map((_, 1)).window(Seconds(windowLenInSeconds))
.reduceByKey(_ + _)
.map{case (topic, count) => (count, topic)}
.transform(_.sortByKey(false))
}
}
object TwitterPopularTags extends SparkStreamingApp[TwitterPopularTagsConf](new ScallopConf(_) with TwitterPopularTagsConf) {
import Twitter._
import TwitterConverter._
import wookie.spark.DStreams._
import wookie.spark.streaming.kafka.Kafka._
//scalastyle:off
override def runStreaming(opt: TwitterPopularTagsConf, spark: SparkSession, ssc: StreamingContext): Unit = {
val pipeline = for {
tweets <- cleanedTwitterStreamWithLocations(opt, "US", "en", withId=a => a.location)
hashTags <- flatMap(tweets, (status: (Option[Location], Tweet)) => status._2.tags)
t1 <- Sparkle(_ => hashTags.map(a => (a, 1)).transform(_.sortByKey(false)))
topCounts60 <- PopularTags.stream(hashTags, 60)
topCount60ByTag <- map(topCounts60, (x: (Int, String)) => (x._2, x._1) )
weatherStream <- typedStream(opt.brokers(), Weather.queueName, Weather.parse, withId= (w: Weather) => Option(Location(w.area, w.region)))
joined <- Sparkle { _ =>
tweets.join(weatherStream.window(Minutes(10)))
}
} yield {
t1.foreachRDD(rdd => {
rdd.take(1)
()
//println("\\nPopular topics in last 60 seconds (%s total):".format(rdd.count()))
//topList.foreach{ a => println("%s".format(a))}
})
//hashTags.print(10)
joined.foreachRDD(rdd => {
val topList = rdd.collect()
println("\\n JOINED Tweets with weather (%s total):".format(rdd.count()))
topList.foreach{case (loc, (tweet, weather)) => println("%s (%s tweets) %s weather".format(loc, tweet.text, weather.conditions))}
})
}
pipeline.run(SparkStreamingRuntime(ssc))
}
//scalastyle:on
}
| elyast/wookie | examples/src/main/scala/wookie/yql/analytics/TwitterPopularTags.scala | Scala | apache-2.0 | 3,313 |
package pl.newicom.dddd.messaging.correlation
import pl.newicom.dddd.aggregate.Command
import pl.newicom.dddd.cluster.DistributionStrategy.EntityIdResolver
class AggregateIdResolution extends EntityIdResolution {
override def entityIdResolver: EntityIdResolver = {
super.entityIdResolver.orElse {
case c: Command => c.aggregateId.value
}
}
}
| pawelkaczor/akka-ddd | akka-ddd-core/src/main/scala/pl/newicom/dddd/messaging/correlation/AggregateIdResolution.scala | Scala | mit | 364 |
/*
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.compiler.directio
import java.util.{ List => JList }
import scala.collection.JavaConversions._
import scala.language.existentials
import scala.reflect.{ classTag, ClassTag }
import com.asakusafw.runtime.directio.DataFormat
import com.asakusafw.vocabulary.directio.DirectFileOutputDescription
case class DirectOutputDescription[T: ClassTag](
basePath: String,
resourcePattern: String,
order: Seq[String],
deletePatterns: Seq[String],
formatType: Class[_ <: DataFormat[T]]) extends DirectFileOutputDescription {
override def getModelType(): Class[_] = classTag[T].runtimeClass
override def getBasePath(): String = basePath
override def getResourcePattern(): String = resourcePattern
override def getOrder(): JList[String] = order
override def getDeletePatterns: JList[String] = deletePatterns
override def getFormat = formatType
}
object DirectOutputDescription {
def apply[T](
basePath: String,
resourcePattern: String,
formatType: Class[_ <: DataFormat[T]]): DirectOutputDescription[T] = {
DirectOutputDescription(
basePath, resourcePattern, Seq.empty, Seq.empty, formatType)(
ClassTag(formatType.newInstance().getSupportedType))
}
}
| asakusafw/asakusafw-spark | compiler/src/test/scala/com/asakusafw/spark/compiler/directio/DirectOutputDescription.scala | Scala | apache-2.0 | 1,842 |
package net.sansa_stack.query.spark.graph.jena.patternOp
import net.sansa_stack.query.spark.graph.jena.ExprParser
import net.sansa_stack.query.spark.graph.jena.expression.Filter
import net.sansa_stack.query.spark.graph.jena.model.{IntermediateResult, SparkExecutionModel}
import org.apache.jena.graph.Node
import org.apache.jena.sparql.algebra.Op
import org.apache.jena.sparql.algebra.op.OpLeftJoin
import org.apache.spark.graphx.Graph
import org.apache.spark.sql.SparkSession
import scala.collection.JavaConverters._
/**
* Class that execute SPARQL OPTIONAL operation
*/
class PatternOptional(op: OpLeftJoin) extends PatternOp {
private val tag = "OPTIONAL"
private val id = op.hashCode()
@deprecated("this method will be removed", "")
override def execute(
input: Array[Map[Node, Node]],
graph: Graph[Node, Node],
session: SparkSession): Array[Map[Node, Node]] = {
// compiler here
input
}
override def execute(): Unit = {
val leftId = op.getLeft.hashCode()
val rightId = op.getRight.hashCode()
val leftResult = IntermediateResult.getResult(leftId).cache()
var rightResult = IntermediateResult.getResult(rightId).cache()
if (op.getExprs != null) {
val filters = op.getExprs.getList.asScala.toList.map { expr =>
new ExprParser(expr).getExpression match {
case e: Filter => e
case _ => throw new UnsupportedOperationException
}
}
rightResult = SparkExecutionModel.filter(rightResult, filters)
}
val newResult = SparkExecutionModel.leftJoin(leftResult, rightResult)
IntermediateResult.putResult(id, newResult)
IntermediateResult.removeResult(leftId)
IntermediateResult.removeResult(rightId)
}
def getOp: Op = { op }
override def getId: Int = { id }
override def getTag: String = { tag }
}
| SANSA-Stack/SANSA-RDF | sansa-query/sansa-query-spark/src/main/scala/net/sansa_stack/query/spark/graph/jena/patternOp/PatternOptional.scala | Scala | apache-2.0 | 1,836 |
package levar
import org.joda.time.DateTime
import play.api.libs.json.JsValue
/**
* Arbitrary comment datatype
*
* @tparam A the type of thing the comment is about
* @param username the identifier of the user who made the comment
* @param comment the content of the comment
* @param id unique identifier for display and lookup, created by DB
* @param path the URL path to view the comment
* @param subject the thing being commented on, for display
* @param createdAt the date the comment was put in the DB
*/
case class Comment(
username: String,
comment: String,
id: Option[String] = None,
path: Option[String] = None,
subject: Option[(String, JsValue)] = None,
createdAt: Option[DateTime] = None)
| peoplepattern/LeVar | levar-core/src/main/scala/levar/Comment.scala | Scala | apache-2.0 | 723 |
package mesosphere.marathon.core.leadership
import akka.actor.{ ActorRef, ActorRefFactory, Props }
import com.twitter.common.zookeeper.ZooKeeperClient
import mesosphere.marathon.LeadershipAbdication
import mesosphere.marathon.core.leadership.impl._
trait LeadershipModule {
/**
* Create a wrapper around an actor which should only be active if this instance of Marathon is the
* current leader instance. The wrapper starts the actor with the given props when appropriate and stops
* it when this instance loses Leadership. If the wrapper receives messages while not being the leader,
* it stashes all messages with Status.Failure messages.
*
* @param props the props to create the actor
* @param name the name of the actor (the wrapping actor will be named like this)
*/
def startWhenLeader(props: Props, name: String): ActorRef
def coordinator(): LeadershipCoordinator
}
object LeadershipModule {
def apply(actorRefFactory: ActorRefFactory, zk: ZooKeeperClient, leader: LeadershipAbdication): LeadershipModule = {
new LeadershipModuleImpl(actorRefFactory, zk, leader)
}
}
/**
* This module provides a utility function for starting actors only when our instance is the current leader.
* This should be used for all normal top-level actors.
*
* In addition, it exports the coordinator which coordinates the activity performed when elected or stopped.
* The leadership election logic needs to call the appropriate methods for this module to work.
*/
private[leadership] class LeadershipModuleImpl(
actorRefFactory: ActorRefFactory, zk: ZooKeeperClient, leader: LeadershipAbdication) extends LeadershipModule {
private[this] var whenLeaderRefs = Set.empty[ActorRef]
private[this] var started: Boolean = false
override def startWhenLeader(props: Props, name: String): ActorRef = {
require(!started, s"already started: $name")
val proxyProps = WhenLeaderActor.props(props)
val actorRef = actorRefFactory.actorOf(proxyProps, name)
whenLeaderRefs += actorRef
actorRef
}
override def coordinator(): LeadershipCoordinator = coordinator_
private[this] lazy val coordinator_ = {
require(!started, "already started")
started = true
val props = LeadershipCoordinatorActor.props(whenLeaderRefs)
val actorRef = actorRefFactory.actorOf(props, "leaderShipCoordinator")
new LeadershipCoordinatorDelegate(actorRef)
}
/**
* Register this actor by default.
*/
startWhenLeader(AbdicateOnConnectionLossActor.props(zk, leader), "AbdicateOnConnectionLoss")
}
| ss75710541/marathon | src/main/scala/mesosphere/marathon/core/leadership/LeadershipModule.scala | Scala | apache-2.0 | 2,573 |
package fpinscala.gettingstarted
// A comment!
/* Another comment */
/** A documentation comment */
object MyModule {
def abs(n: Int): Int =
if (n < 0) -n
else n
private def formatAbs(x: Int) = {
val msg = "The absolute value of %d is %d"
msg.format(x, abs(x))
}
def main(args: Array[String]): Unit =
println(formatAbs(-42))
// A definition of factorial, using a local, tail recursive function
def factorial(n: Int): Int = {
@annotation.tailrec
def go(n: Int, acc: Int): Int =
if (n <= 0) acc
else go(n-1, n*acc)
go(n, 1)
}
// Another implementation of `factorial`, this time with a `while` loop
def factorial2(n: Int): Int = {
var acc = 1
var i = n
while (i > 0) { acc *= i; i -= 1 }
acc
}
// Exercise 1: Write a function to compute the nth fibonacci number
def fib(n: Int): Int = n match {
case 0 => 0
case 1 => 1
case n => fib(n-2) + fib(n-1)
}
def fibTail(n: Int): Int = {
@annotation.tailrec
def go(n: Int, current: Int, fibCurrent: Int, fibPrevious: Int): Int = {
if(n == current) return(fibCurrent)
go(n, current + 1, fibCurrent + fibPrevious, fibCurrent)
}
go(n, 1, 1, 0)
}
// This definition and `formatAbs` are very similar..
private def formatFactorial(n: Int) = {
val msg = "The factorial of %d is %d."
msg.format(n, factorial(n))
}
// We can generalize `formatAbs` and `formatFactorial` to
// accept a _function_ as a parameter
def formatResult(name: String, n: Int, f: Int => Int) = {
val msg = "The %s of %d is %d."
msg.format(name, n, f(n))
}
}
object FormatAbsAndFactorial {
import MyModule._
// Now we can use our general `formatResult` function
// with both `abs` and `factorial`
def main(args: Array[String]): Unit = {
println(formatResult("absolute value", -42, abs))
println(formatResult("factorial", 7, factorial))
}
}
object TestFib {
import MyModule._
// test implementation of `fib`
def main(args: Array[String]): Unit = {
println("Expected: 0, 1, 1, 2, 3, 5, 8")
println("Actual: %d, %d, %d, %d, %d, %d, %d".format(fib(0), fib(1), fib(2), fib(3), fib(4), fib(5), fib(6)))
}
}
// Functions get passed around so often in FP that it's
// convenient to have syntax for constructing a function
// *without* having to give it a name
object AnonymousFunctions {
import MyModule._
// Some examples of anonymous functions:
def main(args: Array[String]): Unit = {
println(formatResult("absolute value", -42, abs))
println(formatResult("factorial", 7, factorial))
println(formatResult("increment", 7, (x: Int) => x + 1))
println(formatResult("increment2", 7, (x) => x + 1))
println(formatResult("increment3", 7, x => x + 1))
println(formatResult("increment4", 7, _ + 1))
println(formatResult("increment5", 7, x => { val r = x + 1; r }))
}
}
object MonomorphicBinarySearch {
// First, a binary search implementation, specialized to `Double`,
// another primitive type in Scala, representing 64-bit floating
// point numbers
// Ideally, we could generalize this to work for any `Array` type,
// so long as we have some way of comparing elements of the `Array`
def binarySearch(ds: Array[Double], key: Double): Int = {
@annotation.tailrec
def go(low: Int, mid: Int, high: Int): Int = {
if (low > high) -mid - 1
else {
val mid2 = (low + high) / 2
val d = ds(mid2) // We index into an array using the same
// syntax as function application
if (d == key) mid2
else if (d > key) go(low, mid2, mid2-1)
else go(mid2 + 1, mid2, high)
}
}
go(0, 0, ds.length - 1)
}
}
object PolymorphicFunctions {
// Here's a polymorphic version of `binarySearch`, parameterized on
// a function for testing whether an `A` is greater than another `A`.
def binarySearch[A](as: Array[A], key: A, gt: (A,A) => Boolean): Int = {
@annotation.tailrec
def go(low: Int, mid: Int, high: Int): Int = {
if (low > high) -mid - 1
else {
val mid2 = (low + high) / 2
val a = as(mid2)
val greater = gt(a, key)
if (!greater && !gt(key,a)) mid2
else if (greater) go(low, mid2, mid2-1)
else go(mid2 + 1, mid2, high)
}
}
go(0, 0, as.length - 1)
}
// Exercise 2: Implement a polymorphic function to check whether
// an `Array[A]` is sorted
def isSorted[A](as: Array[A], gt: (A,A) => Boolean): Boolean = ???
// Polymorphic functions are often so constrained by their type
// that they only have one implementation! Here's an example:
def partial1[A,B,C](a: A, f: (A,B) => C): B => C =
(b: B) => f(a, b)
// Exercise 3: Implement `curry`.
// Note that `=>` associates to the right, so we could
// write the return type as `A => B => C`
def curry[A,B,C](f: (A, B) => C): A => (B => C) =
???
// NB: The `Function2` trait has a `curried` method already
// Exercise 4: Implement `uncurry`
def uncurry[A,B,C](f: A => B => C): (A, B) => C =
???
/*
NB: There is a method on the `Function` object in the standard library,
`Function.uncurried` that you can use for uncurrying.
Note that we can go back and forth between the two forms. We can curry
and uncurry and the two forms are in some sense "the same". In FP jargon,
we say that they are _isomorphic_ ("iso" = same; "morphe" = shape, form),
a term we inherit from category theory.
*/
// Exercise 5: Implement `compose`
def compose[A,B,C](f: B => C, g: A => B): A => C =
???
}
| svenski/fpinscala | exercises/src/main/scala/fpinscala/gettingstarted/GettingStarted.scala | Scala | mit | 5,634 |
package com.twitter.finatra.http.tests.integration.doeverything.main.exceptions
import scala.util.control.NoStackTrace
class FooException(val id: String)
extends Exception
with NoStackTrace
class BarException extends FooException("123")
class BazException extends FooException("321")
| syamantm/finatra | http/src/test/scala/com/twitter/finatra/http/tests/integration/doeverything/main/exceptions/exceptions.scala | Scala | apache-2.0 | 292 |
/*
* Copyright (c) 2011, Daniel Spiewak
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
* - Neither the name of "Anti-XML" nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.codecommit.antixml
import org.xml.sax._
import java.io.{InputStream, Reader, StringReader}
import javax.xml.parsers.SAXParserFactory
import scala.io.Source
/**
* A trait for objects which construct antixml from XML sources.
*/
// TODO named arguments for configuration
trait XMLParser {
def fromString(str: String): Elem
def fromInputStream(is: InputStream): Elem
def fromReader(reader: Reader): Elem
def fromSource(source: Source): Elem =
fromReader(new SourceReader(source))
private class SourceReader(source: Source) extends Reader {
import scala.util.control.Breaks._
def read(ch: Array[Char], offset: Int, length: Int) = {
if (!source.hasNext) {
-1
} else {
var i = offset
breakable {
while (i < offset + length) {
if (!source.hasNext) {
break
}
ch(i) = source.next()
i += 1
}
}
i - offset
}
}
override def reset() {
source.reset()
}
override def close() {
source.close()
}
}
}
| djspiewak/anti-xml | src/main/scala/com/codecommit/antixml/XMLParser.scala | Scala | bsd-3-clause | 2,697 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.executionplan.builders
import org.neo4j.cypher.internal.compiler.v2_3.commands._
import org.neo4j.cypher.internal.compiler.v2_3.executionplan.PartiallySolvedQuery
import org.neo4j.cypher.internal.frontend.v2_3.SemanticDirection
class ShortestPathBuilderTest extends BuilderTest {
val builder = new ShortestPathBuilder
test("should_not_accept_if_no_shortest_paths_exist") {
val q = PartiallySolvedQuery().
copy(start = Seq(Solved(NodeById("l", 0))),
patterns = Seq(Unsolved(RelatedTo(SingleNode("l"), SingleNode("r"), "rel", Seq(), SemanticDirection.OUTGOING, Map.empty))))
val p = createPipe(nodes = Seq("l"))
assertRejects(p, q)
}
test("should_accept_if_both_start_and_end_have_been_solved") {
val q = PartiallySolvedQuery().
copy(start = Seq(Solved(NodeById("a", 0)), Solved(NodeById("b", 0))),
patterns = Seq(Unsolved(ShortestPath("p", SingleNode("a"), SingleNode("b"), Seq(), SemanticDirection.OUTGOING, false, None, single = true, None))))
val p = createPipe(nodes = Seq("a", "b"))
val resultQ = assertAccepts(p, q).query
resultQ.patterns should equal(Seq(Solved(ShortestPath("p", SingleNode("a"), SingleNode("b"), Seq(), SemanticDirection.OUTGOING, false, None, single = true, None))))
}
}
| HuangLS/neo4j | community/cypher/cypher-compiler-2.3/src/test/scala/org/neo4j/cypher/internal/compiler/v2_3/executionplan/builders/ShortestPathBuilderTest.scala | Scala | apache-2.0 | 2,112 |
import sbt._
import Keys._
import play.Play.autoImport._
import PlayKeys._
object ApplicationBuild extends Build {
val appName = "etoxvault_ws"
val appVersion = "1.0"
val appDependencies = Seq(
// Add your project dependencies here,
jdbc,
"mysql" % "mysql-connector-java" % "5.1.26")
val main = Project(appName, file(".")).enablePlugins(play.PlayScala).settings(
version := appVersion,
scalaVersion := "2.11.8",
libraryDependencies ++= appDependencies)
}
| phi-grib/eTOX-vault-ws | project/Build.scala | Scala | gpl-3.0 | 494 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler
import java.util.concurrent.{ConcurrentHashMap, ConcurrentLinkedQueue}
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.internal.Logging
import org.apache.spark.scheduler.SchedulingMode.SchedulingMode
/**
* A Schedulable entity that represents collection of Pools or TaskSetManagers
*/
private[spark] class Pool(
val poolName: String,
val schedulingMode: SchedulingMode,
initMinShare: Int,
initWeight: Int)
extends Schedulable with Logging {
val schedulableQueue = new ConcurrentLinkedQueue[Schedulable]
val schedulableNameToSchedulable = new ConcurrentHashMap[String, Schedulable]
val weight = initWeight
val minShare = initMinShare
var runningTasks = 0
val priority = 0
// A pool's stage id is used to break the tie in scheduling.
var stageId = -1
val name = poolName
var parent: Pool = null
private val taskSetSchedulingAlgorithm: SchedulingAlgorithm = {
schedulingMode match {
case SchedulingMode.FAIR =>
new FairSchedulingAlgorithm()
case SchedulingMode.FIFO =>
new FIFOSchedulingAlgorithm()
case _ =>
val msg = s"Unsupported scheduling mode: $schedulingMode. Use FAIR or FIFO instead."
throw new IllegalArgumentException(msg)
}
}
override def addSchedulable(schedulable: Schedulable) {
require(schedulable != null)
schedulableQueue.add(schedulable)
schedulableNameToSchedulable.put(schedulable.name, schedulable)
schedulable.parent = this
}
override def removeSchedulable(schedulable: Schedulable) {
schedulableQueue.remove(schedulable)
schedulableNameToSchedulable.remove(schedulable.name)
}
override def getSchedulableByName(schedulableName: String): Schedulable = {
if (schedulableNameToSchedulable.containsKey(schedulableName)) {
return schedulableNameToSchedulable.get(schedulableName)
}
for (schedulable <- schedulableQueue.asScala) {
val sched = schedulable.getSchedulableByName(schedulableName)
if (sched != null) {
return sched
}
}
null
}
override def executorLost(executorId: String, host: String, reason: ExecutorLossReason) {
schedulableQueue.asScala.foreach(_.executorLost(executorId, host, reason))
}
override def checkSpeculatableTasks(minTimeToSpeculation: Int): Boolean = {
var shouldRevive = false
for (schedulable <- schedulableQueue.asScala) {
shouldRevive |= schedulable.checkSpeculatableTasks(minTimeToSpeculation)
}
shouldRevive
}
override def getSortedTaskSetQueue: ArrayBuffer[TaskSetManager] = {
val sortedTaskSetQueue = new ArrayBuffer[TaskSetManager]
val sortedSchedulableQueue =
schedulableQueue.asScala.toSeq.sortWith(taskSetSchedulingAlgorithm.comparator)
for (schedulable <- sortedSchedulableQueue) {
sortedTaskSetQueue ++= schedulable.getSortedTaskSetQueue
}
sortedTaskSetQueue
}
def increaseRunningTasks(taskNum: Int) {
runningTasks += taskNum
if (parent != null) {
parent.increaseRunningTasks(taskNum)
}
}
def decreaseRunningTasks(taskNum: Int) {
runningTasks -= taskNum
if (parent != null) {
parent.decreaseRunningTasks(taskNum)
}
}
}
| akopich/spark | core/src/main/scala/org/apache/spark/scheduler/Pool.scala | Scala | apache-2.0 | 4,093 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package reforest.rf
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.RDD
import reforest.TypeInfo
import reforest.data.{RawDataLabeled, StaticData}
import reforest.data.tree.ForestManager
import reforest.rf.split.{RFSplitter, RFSplitterManager}
import reforest.util.{GCInstrumented, MemoryUtil}
class RFDataPrepare[T, U](typeInfo: Broadcast[TypeInfo[T]],
instrumented: Broadcast[GCInstrumented],
strategy: Broadcast[RFStrategy[T, U]],
permitSparseWorkingData: Boolean,
poissonMean: Double) extends Serializable {
def prepareData(dataIndex: RDD[RawDataLabeled[T, U]],
splitter : Broadcast[RFSplitter[T, U]],
featureNumber: Int,
memoryUtil: MemoryUtil,
numTrees: Int,
macroIteration : Int):
RDD[StaticData[U]] = {
dataIndex.mapPartitionsWithIndex { (partitionIndex, instances) =>
strategy.value.prepareData(numTrees, macroIteration, splitter, partitionIndex, instances, instrumented.value, memoryUtil)
}
}
}
| alessandrolulli/reforest | src/main/scala/reforest/rf/RFDataPrepare.scala | Scala | apache-2.0 | 1,997 |
package org.jwvictor.flinktrl
/**
* Copyright 2017 Jason Victor
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.flink.streaming.api.scala._
import org.jwvictor.flinktrl.operators.{BasicStringSplitter, TextInputOperators}
import breeze.linalg._
import breeze.numerics._
import breeze.util._
import org.apache.flink.streaming.api.functions.source.SourceFunction
import org.jwvictor.flinktrl.math.FtrlParameters
import org.jwvictor.flinktrl.math.MachineLearningUtilities._
/**
* Entry point for example driver.
*/
object Job {
def main(args: Array[String]) {
// Set up the execution environment
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setMaxParallelism(1) // TODO: get more parallelism
import org.apache.flink.streaming.api.scala._
// Basic model parameters
val nDimensions = 100
implicit val ftrlParameters = FtrlParameters(1, 1, 1, 1, nDimensions) // implicit input to `withFtrlLearning`
val res = SparseVector.zeros[Double](1)
// Example serialization to file
breeze.linalg.mmwrite(new java.io.File("resoutfile.dat"), new DenseMatrix[Double](res.length, 1, res.toArray))
// Input stream
val fileStream = env.readTextFile("testdata.dat")
val txtStream = env.addSource[String](new SourceFunction[String] {
@volatile
private var isRunning = true
override def cancel(): Unit = {
isRunning = false
}
override def run(ctx: SourceFunction.SourceContext[String]): Unit = {
var ctr:Int = 0
while(isRunning && ctr < 1000){
ctx.collect(scala.util.Random.nextString(300))
Thread.sleep(300)
ctr += 1
}
}
})
val weightInputStream = env.addSource[DenseVector[Double]](new SourceFunction[DenseVector[Double]] {
@volatile
private var isRunning = true
override def cancel(): Unit = {
isRunning = false
}
override def run(ctx: SourceFunction.SourceContext[DenseVector[Double]]): Unit = {
var ctr:Int = 0
while(isRunning && ctr < 1000){
val data = 0.until(nDimensions).map(_ => scala.util.Random.nextGaussian).toArray
ctx.collect(DenseVector(data))
Thread.sleep(300)
ctr += 1
}
}
})
import org.jwvictor.flinktrl.operators.FtrlLearning._
val observationStream = txtStream.
map(TextInputOperators.textToHashVector(_, nDimensions, BasicStringSplitter)).
map(x => ObservationWithOutcome(ObservedValues(x), ObservedValues(Sparse(res))))
val fakeWeightStream = weightInputStream.map(x => LearnedWeights(Dense(x), Dense(x), Dense(x)))
val learnedWeightsAndStateStream = observationStream.
createFeedbackLoop(fakeWeightStream).
withFtrlLearning.
map(_.serialize.array().map(_.toString).toList.toString)
learnedWeightsAndStateStream.writeAsText("./state-out-ftrl-test.dat")
env.execute("FlinkTRL test driver")
}
}
| jwvictor/flink-trl | src/main/scala/org/jwvictor/flinktrl/Job.scala | Scala | apache-2.0 | 3,519 |
package colang.tokens
import colang.LexerUnitSpec
class BoolLiteralStrategySpec extends LexerUnitSpec {
describe("Bool literal lexer strategy") {
it("should match 'true'") {
BoolLiteral.strategy shouldSucceedOn "true" withoutIssues() andInProduced { token =>
token.value should be (true)
}
}
it("should match 'false'") {
BoolLiteral.strategy shouldSucceedOn "false" withoutIssues() andInProduced { token =>
token.value should be (false)
}
}
it("should not match identifiers that 'true' or 'false' is a prefix of") {
BoolLiteral.strategy shouldNotMatch "trueabc"
BoolLiteral.strategy shouldNotMatch "falsexyz"
}
}
}
| merkispavel/colang | src/test/scala/colang/tokens/BoolLiteralStrategySpec.scala | Scala | mit | 700 |
/**
* Copyright 2009 Jorge Ortiz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**/
package org.scala_tools.javautils.j2s
import java.util.Dictionary
import scala.collection.{Map => SMap}
import scala.collection.mutable.{Map => SMutableMap}
import scala.Function.untupled
import org.scala_tools.javautils.s2j.{SMapDictionaryWrapper, SMutableMapDictionaryWrapper}
class RichJDictionary[K, V](dictionary: Dictionary[K, V]) {
def asScala: SMap[K, V] = dictionary match {
case dw: SMapDictionaryWrapper[_, _] =>
dw.asScala.asInstanceOf[SMap[K, V]]
case _ => new JDictionaryWrapper[K, V] {
type Wrapped = Dictionary[K, V]
val underlying = dictionary
}
}
def asScalaMutable: SMutableMap[K, V] = dictionary match {
case mdw: SMutableMapDictionaryWrapper[_, _] =>
mdw.asScala.asInstanceOf[SMutableMap[K, V]]
case _ => new JMutableDictionaryWrapper[K, V] {
type Wrapped = Dictionary[K, V]
val underlying = dictionary
}
}
def foreach(fn: Tuple2[K, V] => Unit): Unit =
foreach(untupled(fn))
def foreach(fn: (K, V) => Unit): Unit =
Implicits.RichJEnumeration(dictionary.keys).foreach { key =>
val value = dictionary.get(key)
fn(key, value)
}
}
| jorgeortiz85/scala-javautils | src/main/scala/org/scala_tools/javautils/j2s/RichJDictionary.scala | Scala | apache-2.0 | 1,748 |
package es.weso.shex
import scala.util.parsing.input.Positional
import es.weso.rdf.nodes._
import XSFacet._
import PREFIXES._
import util._
import es.weso.validating._
import Checked._
import ConstraintReason._
import Constraint._
import ValueClass._
/**
* ValueClass ::= ValueConstr | ShapeConstr | ValueClassRef
*
*/
sealed trait ValueClass extends Positional
object ValueClass {
lazy val any : ValueClass = ValueSet(Seq(ValueAny(exclusions = List())))
def nodeKindfromIRI(iri: IRI): Try[NodeKind] = {
iri match {
case `sh_IRI` => Success(IRIKind(None, List()))
case `sh_BNode` => Success(BNodeKind(None, List()))
case `sh_Literal` => Success(LiteralKind(List()))
case `sh_NonLiteral` => Success(NonLiteralKind(None, List()))
// case `sh_Any` => Success(AnyKind)
case _ => Failure(new Exception("nodeKindFromIRI: unsupported IRI: " + iri))
}
}
lazy val iriKind = IRIKind(None, List())
lazy val bnodeKind = BNodeKind(None, List())
lazy val typeXsdString = Datatype(xsd_string, List())
}
case class ValueClassRef(label: Label) extends ValueClass
/**
* ValueConstr ::= LiteralDatatype | ValueSet | NodeKind
*/
sealed trait ValueConstr extends ValueClass
with Positional {
def check(node: RDFNode): CheckedRDFNode
}
case class Datatype(
v: IRI,
facets: List[XSFacet]) extends ValueConstr
with Positional {
override def check(node: RDFNode): CheckedRDFNode = {
if (v == sh_text) {
node match {
case l: Literal =>
if ((l.dataType == xsd_string || l.dataType == rdf_langString) && checkFacets(node, facets).isOK)
okSingle(node, s"$node matches literal $l")
else
errString(s"literal $l doesn't match datatype $this")
case _ => errString(s"node $node doesn't match datatype $this")
}
} else
node match {
case l: Literal =>
if (l.dataType == v && checkFacets(node, facets).isOK)
okSingle(node, s"$node matches literal datatype $l")
else
errString(s"literal $l with datatype ${l.dataType} doesn't match datatype $v")
case _ => errString(s"node $node doesn't match datatype $this")
}
}
}
case class ValueSet(s: Seq[ValueObject]) extends ValueConstr
with Positional {
override def check(node: RDFNode): CheckedRDFNode = {
val cs : Seq[RDFNode => CheckedRDFNode] =
s.map((vo: ValueObject) => ((n: RDFNode) => vo.check(node)))
checkSome(node,cs)
}
}
sealed trait ValueObject extends Positional {
def check(node: RDFNode): CheckedRDFNode
}
case class ValueIRI(iri: IRI) extends ValueObject {
override def check(node: RDFNode): CheckedRDFNode = {
if (node.isIRI && node.toIRI == iri) okSingle(node, s"$node is an IRI")
else errString(s"node $node doesn't match IRI $iri")
}
}
case class ValueLiteral(literal: Literal) extends ValueObject {
override def check(node: RDFNode): CheckedRDFNode = {
node match {
case l: Literal if (l == literal) => okSingle(node, s"$node is a Literal")
case _ => errString(s"node $node doesn't match Literal $literal")
}
}
}
case class ValueLang(lang: Lang) extends ValueObject {
override def check(node: RDFNode): CheckedRDFNode = {
node match {
case l: LangLiteral if (l.lang == lang) => okSingle(node, s"$node is a language tagged literal")
case _ => errString(s"node $node doesn't match Language literal $lang")
}
}
}
case class StemRange(
stem: IRI,
exclusions: List[Exclusion]) extends ValueObject {
override def check(node: RDFNode): CheckedRDFNode = {
errString(s"Unimplemented value Stem")
}
}
case class ValueAny(exclusions: List[Exclusion]) extends ValueObject {
override def check(node: RDFNode): CheckedRDFNode = {
if (exclusions.isEmpty) okSingle(node, s"$node matches any")
else errString("Not implemented ValueAny with exclusions")
}
}
case class Exclusion(
iri: IRI,
isStem: Boolean) extends Positional
sealed trait NodeKind extends ValueConstr
with Positional {
def token: String
override def check(node: RDFNode): CheckedRDFNode =
errString(s"Not implemented check on $this for node $node")
}
case class IRIKind(
shapeConstr: Option[ShapeConstr],
facets: List[StringFacet]) extends NodeKind {
override def token = "IRI"
override def check(node: RDFNode): CheckedRDFNode =
if (shapeConstr.isDefined)
errString(s"IRIKind: $this, unimplemented check for shapeConstr. Node: $node")
else {
if (node.isIRI)
checkFacets(node, facets)
else {
errString(s"IRIKind failed: node: $node is not an IRI")
}
}
}
case class BNodeKind(
shapeConstr: Option[ShapeConstr],
facets: List[StringFacet]) extends NodeKind {
override def token = "BNode"
override def check(node: RDFNode): CheckedRDFNode =
if (shapeConstr.isDefined)
errString(s"BNodeKind: $this, unimplemented check for shapeConstr. Node: $node")
else {
if (node.isBNode)
checkFacets(node, facets)
else {
// TODO...pass facets to error message
errString(s"BNodeKind failed: node: $node is not an BNode or doesn't pass facets")
}
}
}
case class LiteralKind(
facets: List[XSFacet]) extends NodeKind {
override def token = "Literal"
override def check(node: RDFNode): CheckedRDFNode =
if (node.isLiteral) {
checkFacets(node, facets)
} else {
// TODO...pass facets to error message
errString(s"LiteralKind failed: node: $node is not a Literal or doesn't pass facets")
}
}
case class NonLiteralKind(
shapeConstr: Option[ShapeConstr],
facets: List[XSFacet]) extends NodeKind {
override def token = "NonLiteral"
override def check(node: RDFNode): CheckedRDFNode =
if (shapeConstr.isDefined)
errString(s"IRIKind: $this, unimplemented check for shapeConstr. Node: $node")
else {
if (!node.isLiteral)
checkFacets(node, facets)
else {
// TODO...pass facets to error message
errString(s"NonLiteralKind failed: node: $node is a Literal or doesn't pass facets")
}
}
}
/**
* ShapeConstr ::= SingleShape | NotShape | DisjShapeConstr | ConjShapeConstr
*/
sealed trait ShapeConstr extends ValueClass
with Positional
case class SingleShape(shape: Label) extends ShapeConstr
case class NotShape(shape: Label) extends ShapeConstr
case class ConjShapeConstr(shapes: Seq[Label]) extends ShapeConstr
case class DisjShapeConstr(shapes: Seq[Label]) extends ShapeConstr
case class OrValueClass(shapes: Seq[ValueClass]) extends ShapeConstr
| labra/ShExcala | src/main/scala/es/weso/shex/ValueClass.scala | Scala | mit | 6,710 |
package io.eels.util
import org.apache.hadoop.fs.Path
// returns an iterator over the parts of a Hadoop Path
object PathIterator {
def apply(path: Path): Iterator[Path] = Iterator.iterate(path)(_.getParent).takeWhile(_ != null)
}
| sksamuel/eel-sdk | eel-core/src/main/scala/io/eels/util/PathIterator.scala | Scala | apache-2.0 | 234 |
package ch.uzh.ifi.pdeboer.pplib.hcomp.ballot.snippet
import ch.uzh.ifi.pdeboer.pplib.hcomp._
import ch.uzh.ifi.pdeboer.pplib.hcomp.ballot.dao.BallotDAO
import ch.uzh.ifi.pdeboer.pplib.hcomp.ballot.persistence.DBSettings
import ch.uzh.ifi.pdeboer.pplib.hcomp.ballot.report.AnswerParser
import ch.uzh.ifi.pdeboer.pplib.process.entities.DefaultParameters._
import ch.uzh.ifi.pdeboer.pplib.process.entities.IndexedPatch
import ch.uzh.ifi.pdeboer.pplib.process.stdlib.ContestWithBeatByKVotingProcess
import ch.uzh.ifi.pdeboer.pplib.process.stdlib.ContestWithBeatByKVotingProcess._
private[snippet] class TestPortal extends HCompPortalAdapter {
val answerIDsToGive = List(281, 278, 277, 279, 280, 282, 283, 284, 285, 286, 288, 287)
var it: Int = 0
DBSettings.initialize()
val dao = new BallotDAO
override def processQuery(query: HCompQuery, properties: HCompQueryProperties): Option[HCompAnswer] = {
answerIDsToGive.synchronized {
val targetId = answerIDsToGive(it)
val answer = dao.getAnswerById(targetId)
it += 1
logger.info(s"returning $targetId ${answer.get.answerJson})")
Some(HTMLQueryAnswer(AnswerParser.buildAnswerMap(answer.get.answerJson), query, Nil))
}
}
override def getDefaultPortalKey: String = "mytest"
override def cancelQuery(query: HCompQuery): Unit = {}
}
private[snippet] object SnippetHTMLTest extends App {
val process = new ContestWithBeatByKVotingProcess(Map(
K.key -> 4,
PORTAL_PARAMETER.key -> new TestPortal(),
MAX_ITERATIONS.key -> 30,
QUESTION_PRICE.key -> new HCompQueryProperties(),
QUERY_BUILDER_KEY -> new SnippetHTMLQueryBuilder(<div>This is a test</div>, "testquestion")
))
process.process(IndexedPatch.from(List(SnippetHTMLQueryBuilder.POSITIVE, SnippetHTMLQueryBuilder.NEGATIVE)))
} | manuelroesch/PaperValidator | app/helper/questiongenerator/snippet/ReworkCrowdAnswersBBK.scala | Scala | mit | 1,764 |
/*
* Copyright (c) 2016 BBC Design and Engineering
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package bbc.schedulerplus.system
import bbc.schedulerplus.persistence.Cache
import bbc.schedulerplus.{Job, Request}
import org.joda.time.DateTime
import org.joda.time.format.{DateTimeFormat, ISODateTimeFormat}
import scala.concurrent.Future
/**
* Encloses the business logic to for job and job request data
*/
trait JobManagerEngine {
import scala.concurrent.ExecutionContext.Implicits.global
val cache: Cache
/**
* Return all Jobs that has an id that matches the supplied JobRequest's id
* @param jobs
* @param jobRequest
* @return
*/
private def matchingJobs(jobs: Seq[Job], jobRequest: Request): Seq[Job] = jobs.filter(j => j.id == jobRequest.id)
/**
* Return all Jobs which are 'active', where the createdAt time plus the lifeTimeInMillis is before the current system time.
* @param jobs
* @return
*/
private def activeJobs(jobs: Seq[Job]): Seq[Job] = {
val now = System.currentTimeMillis()
val parser = ISODateTimeFormat.dateTimeNoMillis()
jobs.filter(j => parser.parseDateTime(j.createdAt).plus(j.lifetimeInMillis).isAfter(now))
}
/**
* Return a Set of all of the ids from a List of Requests
* @param requests
* @return
*/
private def requestIds(requests: Seq[Request]): Set[String] = { for(r <- requests) yield r.id }.toSet
/**
* Return a Set of all of the ids from a List of Jobs
* @param jobs
* @return
*/
private def jobIds(jobs: Seq[Job]): Set[String] = { for(a <- jobs) yield a.id}.toSet
/**
* Return job requests which should be run. Will return a job request which doesn't have a matching job or if the job
* has stalled (i.e. the `createdAt` + `livetimeInMillis` is less than the current time.
*
* @param requests
* @param jobs
* @return
*/
private def requestsToRun(requests: Seq[Request], jobs: Seq[Job]): Seq[Request] = {
if (jobs.size <= 0) requests else {
val matched: Seq[Job] = { for (jobRequest <- requests) yield matchingJobs(jobs, jobRequest) }.flatten
val active = activeJobs(matched)
if (active.size <= 0) requests else {
val toRunIds = requestIds(requests).filterNot(jobIds(active))
for (jobRequest <- requests if toRunIds.contains(jobRequest.id)) yield jobRequest
}
}
}
/**
* Return the current system time as an ISO8601-format string
* @return
*/
private def now: String = {
val timestamp = new DateTime()
val format = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ssZ")
timestamp.toString(format)
}
/**
* Find all of the current Requests that should be run. Looks for a
* @return
*/
def findRequestsToRun(jobType: String): Future[Seq[Request]] = {
val requestsFuture = cache.requests(jobType)
val jobsFuture = cache.jobs(jobType)
for {
requests <- requestsFuture
jobs <- jobsFuture
} yield requestsToRun(requests, jobs)
}
/**
* Creates a Job object and also creates a job item in the cache with the key [jobRequest.`type` + "_" + jobRequest.id], such as
* the key the_key_type_123456
*
* @param jobRequest
* @return
*/
def createJob(jobRequest: Request, lifetimeInMillis: Long): Job = {
val job = Job(
createdAt = now,
lifetimeInMillis = lifetimeInMillis,
`type` = jobRequest.`type`,
id = jobRequest.id
)
cache.putJob(key = "bbc.schedulerplus.job:" + job.toKey, item = job)
job
}
/**
* Updates a job by replacing it with one with a new createdAt timestamp and
* @param job
* @param lifetimeInMillis
* @return
*/
def updateJob(job: Job, lifetimeInMillis: Long): Job = {
val newJob = Job(
createdAt = now,
lifetimeInMillis = lifetimeInMillis,
`type` = job.`type`,
id = job.id
)
cache.putJob(key = "bbc.schedulerplus.job:" + job.toKey, item = newJob)
newJob
}
/**
* Removes a job from the cache by its key
* @param job
* @return
*/
def deleteJob(job: Job): Boolean = {
cache.deleteJob(key = "bbc.schedulerplus.job:" + job.toKey)
true
}
}
| bbc/scheduler-plus | src/main/scala/bbc/schedulerplus/system/JobManagerEngine.scala | Scala | mit | 5,250 |
package com.github.j5ik2o.forseti.adaptor.handler.flow.client
import java.net.URI
import com.github.j5ik2o.forseti.adaptor.handler.AuthorizationGrantHandler
import com.github.j5ik2o.forseti.adaptor.handler.model.AuthorizationGranted
import com.github.j5ik2o.forseti.domain.exception.{ErrorResponse, OAuthException}
import com.github.j5ik2o.forseti.domain.{GrantType, ResponseStyle}
import scala.concurrent.{ExecutionContext, Future}
import scalaz._
import scalaz.std.scalaFuture._
import scalaz.syntax.either._
trait ClientCredentialsGrantHandler
extends AuthorizationGrantHandler[ClientCredentialsGrant, AuthorizationGranted] {
protected val definedGrantType: GrantType.Value = GrantType.Client
override def handleGrant(request: ClientCredentialsGrant)(
implicit ec: ExecutionContext
): EitherT[Future, ErrorResponse, AuthorizationGranted] = {
getParams(request).fold(
createErrorResponseEitherT(request), {
case (clientCredential, grantType, scope, nonce) =>
(for {
client <- resolveByClientKey(clientReader, clientCredential.clientKey)
_ <- clientCredentialValidator.validate(clientCredential, client)
_ <- grantTypeValidator.validate(grantType, client, Maybe.just(definedGrantType))
_ <- scopeValidator.validate(scope, client)
user <- getUser(request)
accessToken <- generateAndStoreAccessToken(
client.id,
user.map(_.id),
client.scope,
generateRefreshToken = false
)
idToken <- createIdToken(client, nonce, user, scope, accessToken)
} yield
AuthorizationGranted(
responseStyle = ResponseStyle.JSON,
redirectUri = Maybe.empty,
accessToken = accessToken.value,
tokenType = accessToken.tokenType,
expiresIn = accessToken.expiresIn,
refreshToken = accessToken.refreshToken,
scope = scope,
state = Maybe.empty,
idToken = idToken
)).leftMap(createErrorResponse(Maybe.empty))
}
)
}
override protected def createErrorResponse(redirectUri: Maybe[URI])(
ex: OAuthException
): ErrorResponse = {
logger.error("Occurred error", ex)
ErrorResponse.fromException(ex, ResponseStyle.JSON, redirectUri)
}
override protected def createErrorResponseEitherT(request: ClientCredentialsGrant)(
error: OAuthException
): EitherT[Future, ErrorResponse, AuthorizationGranted] = {
logger.error("Occurred error", error)
val errorResponse =
ErrorResponse.fromException(error, ResponseStyle.JSON, Maybe.empty, Maybe.empty)
EitherT(Future.successful(errorResponse.left[AuthorizationGranted]))
}
private def getParams(request: ClientCredentialsGrant) = {
val validateParams = for {
clientCredential <- request.clientCredential
grantType <- request.grantType
scope <- request.scope
nonce <- request.nonce
} yield (clientCredential, grantType, scope, nonce)
validateParams
}
}
| j5ik2o/forseti | server/server-use-case-port/src/main/scala/com/github/j5ik2o/forseti/adaptor/handler/flow/client/ClientCredentialsGrantHandler.scala | Scala | mit | 3,160 |
package mosaico.common
import java.io.File
import sbt.{ForkOptions, Fork}
/**
* Created by msciab on 18/09/16.
*/
trait MiscUtils {
/**
* Receive an array (e.g "a" "@b" "c")
* Repace each value starting with "@" with the corresponging key in the map.
* (e.g. if Map("b" -> "d") returns "a" "d" "c")
*
* @param args
* @param map
*/
def replaceAtWithMap(args: Seq[String], map: Map[String, String]) = {
args.map(x =>
if (x.startsWith("@"))
map(x.substring(1))
else x
)
}
def includeExcludeRegex(input: String,
regex: String*): Boolean = {
if (regex.isEmpty)
true
else regex.toSeq
.map(x => (x(0) == '+') -> x.tail)
.map {
case (result, re) =>
input.matches(re) -> result
}
.map(x => x._1 == x._2)
.reduce(_ && _)
}
/**
* Ececute a java class with its args,
* adding a classpath
* and setting the home directory
*
* @param args
* @param home
* @param cp
* @return
*/
def exec(args: Seq[String], home: File, cp: Seq[File]) = {
Fork.java(ForkOptions(
runJVMOptions = "-cp" :: cp.map(_.getAbsolutePath).mkString(java.io.File.pathSeparator) :: Nil,
workingDirectory = Some(home)), args)
}
// get a wrapped property
/*
def prp(property: String) = {
val r = System.getProperty(property)
if (r == null)
None
else
Some(r)
}*/
val debugging = System.getProperty("debug") != null
val tracing = Option(System.getProperty("trace")).map(x => x.split(",").toSet)
/**
* Print a debug message if "deugging"
*
* Debugging is enabled by the "debug" system property
*
* @param msg
*/
def debug(msg: String) = {
if (debugging)
println(s"%% ${msg}")
}
/**
* Print a trace message if the specified tracing is enabled
*
* Traces are enabled by the "trace=a,b,c" system property
*
* The string "a,b,c" will enable traces for "a" "b" and "c"
*
* @param what
* @param msg
* @return
*/
def trace(what: String, msg: String) = {
tracing.map { set =>
if (set(what))
println(s"%%% ${msg}")
}
}
}
| sciabarra/Mosaico | plugin/src/main/scala/mosaico/common/MiscUtils.scala | Scala | apache-2.0 | 2,213 |
/**
* Copyright (C) 2010 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.function.xxforms
import org.orbeon.oxf.util.StringUtils._
import org.orbeon.oxf.xforms.function.XFormsFunction
import org.orbeon.oxf.xforms.model.InstanceData
import org.orbeon.oxf.xml.DependsOnContextItemIfSingleArgumentMissing
import org.orbeon.saxon.expr._
import org.orbeon.saxon.om._
import org.orbeon.scaxon.Implicits._
/**
* xxf:invalid-binds()
*/
class XXFormsInvalidBinds
extends XFormsFunction
with DependsOnContextItemIfSingleArgumentMissing { // don't extend XFormsMIPFunction as addToPathMap returns something different
override def iterate(xpathContext: XPathContext): SequenceIterator = {
// First item or context node if any
val item = argument.headOption map (e => Option(e.iterate(xpathContext).next())) getOrElse Option(xpathContext.getContextItem)
item match {
case Some(nodeInfo: NodeInfo) =>
Option(InstanceData.getInvalidBindIds(nodeInfo)) match {
case Some(invalidBindIdsString) =>
stringArrayToSequenceIterator(invalidBindIdsString.splitTo[Array]())
case None =>
// No invalid bind ids
EmptyIterator.getInstance
}
case _ =>
// Return () if we can't access the node
EmptyIterator.getInstance
}
}
// TODO: something smart
override def addToPathMap(pathMap: PathMap, pathMapNodeSet: PathMap.PathMapNodeSet): PathMap.PathMapNodeSet =
super.addToPathMap(pathMap, pathMapNodeSet)
} | orbeon/orbeon-forms | xforms-runtime/jvm/src/main/scala/org/orbeon/oxf/xforms/function/xxforms/XXFormsInvalidBinds.scala | Scala | lgpl-2.1 | 2,121 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
* file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
* to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package kafka.api
import java.{lang, util}
import org.apache.kafka.clients.producer.ProducerConfig
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.clients.consumer._
import org.apache.kafka.common.serialization.ByteArrayDeserializer
import org.apache.kafka.common.TopicPartition
import kafka.utils.{TestUtils, Logging}
import kafka.server.KafkaConfig
import java.util.ArrayList
import org.junit.Assert._
import scala.collection.JavaConversions._
import kafka.coordinator.ConsumerCoordinator
/**
* Integration tests for the new consumer that cover basic usage as well as server failures
*/
class ConsumerTest extends IntegrationTestHarness with Logging {
val producerCount = 1
val consumerCount = 2
val serverCount = 3
val topic = "topic"
val part = 0
val tp = new TopicPartition(topic, part)
val part2 = 1
val tp2 = new TopicPartition(topic, part2)
// configure the servers and clients
this.serverConfig.setProperty(KafkaConfig.ControlledShutdownEnableProp, "false") // speed up shutdown
this.serverConfig.setProperty(KafkaConfig.OffsetsTopicReplicationFactorProp, "3") // don't want to lose offset
this.serverConfig.setProperty(KafkaConfig.OffsetsTopicPartitionsProp, "1")
this.serverConfig.setProperty(KafkaConfig.ConsumerMinSessionTimeoutMsProp, "100") // set small enough session timeout
this.producerConfig.setProperty(ProducerConfig.ACKS_CONFIG, "all")
this.consumerConfig.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "my-test")
this.consumerConfig.setProperty(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, 4096.toString)
this.consumerConfig.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
this.consumerConfig.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false")
override def setUp() {
super.setUp()
// create the test topic with all the brokers as replicas
TestUtils.createTopic(this.zkClient, topic, 2, serverCount, this.servers)
}
def testSimpleConsumption() {
val numRecords = 10000
sendRecords(numRecords)
assertEquals(0, this.consumers(0).subscriptions.size)
this.consumers(0).subscribe(tp)
assertEquals(1, this.consumers(0).subscriptions.size)
this.consumers(0).seek(tp, 0)
consumeRecords(this.consumers(0), numRecords = numRecords, startingOffset = 0)
// check async commit callbacks
val commitCallback = new CountConsumerCommitCallback()
this.consumers(0).commit(CommitType.ASYNC, commitCallback)
// shouldn't make progress until poll is invoked
Thread.sleep(10)
assertEquals(0, commitCallback.count)
awaitCommitCallback(this.consumers(0), commitCallback)
}
def testCommitSpecifiedOffsets() {
sendRecords(5, tp)
sendRecords(7, tp2)
this.consumers(0).subscribe(tp)
this.consumers(0).subscribe(tp2)
// Need to poll to join the group
this.consumers(0).poll(50)
val pos1 = this.consumers(0).position(tp)
val pos2 = this.consumers(0).position(tp2)
this.consumers(0).commit(Map[TopicPartition,java.lang.Long]((tp, 3L)), CommitType.SYNC)
assertEquals(3, this.consumers(0).committed(tp))
intercept[NoOffsetForPartitionException] {
this.consumers(0).committed(tp2)
}
// positions should not change
assertEquals(pos1, this.consumers(0).position(tp))
assertEquals(pos2, this.consumers(0).position(tp2))
this.consumers(0).commit(Map[TopicPartition,java.lang.Long]((tp2, 5L)), CommitType.SYNC)
assertEquals(3, this.consumers(0).committed(tp))
assertEquals(5, this.consumers(0).committed(tp2))
// Using async should pick up the committed changes after commit completes
val commitCallback = new CountConsumerCommitCallback()
this.consumers(0).commit(Map[TopicPartition,java.lang.Long]((tp2, 7L)), CommitType.ASYNC, commitCallback)
awaitCommitCallback(this.consumers(0), commitCallback)
assertEquals(7, this.consumers(0).committed(tp2))
}
def testAutoOffsetReset() {
sendRecords(1)
this.consumers(0).subscribe(tp)
consumeRecords(this.consumers(0), numRecords = 1, startingOffset = 0)
}
def testSeek() {
val consumer = this.consumers(0)
val totalRecords = 50L
sendRecords(totalRecords.toInt)
consumer.subscribe(tp)
consumer.seekToEnd(tp)
assertEquals(totalRecords, consumer.position(tp))
assertFalse(consumer.poll(totalRecords).iterator().hasNext)
consumer.seekToBeginning(tp)
assertEquals(0, consumer.position(tp), 0)
consumeRecords(consumer, numRecords = 1, startingOffset = 0)
val mid = totalRecords / 2
consumer.seek(tp, mid)
assertEquals(mid, consumer.position(tp))
consumeRecords(consumer, numRecords = 1, startingOffset = mid.toInt)
}
def testGroupConsumption() {
sendRecords(10)
this.consumers(0).subscribe(topic)
consumeRecords(this.consumers(0), numRecords = 1, startingOffset = 0)
}
def testPositionAndCommit() {
sendRecords(5)
// committed() on a partition with no committed offset throws an exception
intercept[NoOffsetForPartitionException] {
this.consumers(0).committed(new TopicPartition(topic, 15))
}
// position() on a partition that we aren't subscribed to throws an exception
intercept[IllegalArgumentException] {
this.consumers(0).position(new TopicPartition(topic, 15))
}
this.consumers(0).subscribe(tp)
assertEquals("position() on a partition that we are subscribed to should reset the offset", 0L, this.consumers(0).position(tp))
this.consumers(0).commit(CommitType.SYNC)
assertEquals(0L, this.consumers(0).committed(tp))
consumeRecords(this.consumers(0), 5, 0)
assertEquals("After consuming 5 records, position should be 5", 5L, this.consumers(0).position(tp))
this.consumers(0).commit(CommitType.SYNC)
assertEquals("Committed offset should be returned", 5L, this.consumers(0).committed(tp))
sendRecords(1)
// another consumer in the same group should get the same position
this.consumers(1).subscribe(tp)
consumeRecords(this.consumers(1), 1, 5)
}
def testPartitionsFor() {
val numParts = 2
TestUtils.createTopic(this.zkClient, "part-test", numParts, 1, this.servers)
val parts = this.consumers(0).partitionsFor("part-test")
assertNotNull(parts)
assertEquals(2, parts.length)
assertNull(this.consumers(0).partitionsFor("non-exist-topic"))
}
def testListTopics() {
val numParts = 2
val topic1: String = "part-test-topic-1"
val topic2: String = "part-test-topic-2"
val topic3: String = "part-test-topic-3"
TestUtils.createTopic(this.zkClient, topic1, numParts, 1, this.servers)
TestUtils.createTopic(this.zkClient, topic2, numParts, 1, this.servers)
TestUtils.createTopic(this.zkClient, topic3, numParts, 1, this.servers)
val topics = this.consumers.head.listTopics()
assertNotNull(topics)
assertEquals(5, topics.size())
assertEquals(5, topics.keySet().size())
assertEquals(2, topics.get(topic1).length)
assertEquals(2, topics.get(topic2).length)
assertEquals(2, topics.get(topic3).length)
}
def testPartitionReassignmentCallback() {
val callback = new TestConsumerReassignmentCallback()
this.consumerConfig.setProperty(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "100"); // timeout quickly to avoid slow test
val consumer0 = new KafkaConsumer(this.consumerConfig, callback, new ByteArrayDeserializer(), new ByteArrayDeserializer())
consumer0.subscribe(topic)
// the initial subscription should cause a callback execution
while(callback.callsToAssigned == 0)
consumer0.poll(50)
// get metadata for the topic
var parts = consumer0.partitionsFor(ConsumerCoordinator.OffsetsTopicName)
while(parts == null)
parts = consumer0.partitionsFor(ConsumerCoordinator.OffsetsTopicName)
assertEquals(1, parts.size)
assertNotNull(parts(0).leader())
// shutdown the coordinator
val coordinator = parts(0).leader().id()
this.servers(coordinator).shutdown()
// this should cause another callback execution
while(callback.callsToAssigned < 2)
consumer0.poll(50)
assertEquals(2, callback.callsToAssigned)
assertEquals(2, callback.callsToRevoked)
consumer0.close()
}
def testUnsubscribeTopic() {
val callback = new TestConsumerReassignmentCallback()
this.consumerConfig.setProperty(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "100"); // timeout quickly to avoid slow test
val consumer0 = new KafkaConsumer(this.consumerConfig, callback, new ByteArrayDeserializer(), new ByteArrayDeserializer())
try {
consumer0.subscribe(topic)
// the initial subscription should cause a callback execution
while (callback.callsToAssigned == 0)
consumer0.poll(50)
consumer0.unsubscribe(topic)
assertEquals(0, consumer0.subscriptions.size())
} finally {
consumer0.close()
}
}
private class TestConsumerReassignmentCallback extends ConsumerRebalanceCallback {
var callsToAssigned = 0
var callsToRevoked = 0
def onPartitionsAssigned(consumer: Consumer[_,_], partitions: java.util.Collection[TopicPartition]) {
info("onPartitionsAssigned called.")
callsToAssigned += 1
}
def onPartitionsRevoked(consumer: Consumer[_,_], partitions: java.util.Collection[TopicPartition]) {
info("onPartitionsRevoked called.")
callsToRevoked += 1
}
}
private def sendRecords(numRecords: Int): Unit = {
sendRecords(numRecords, tp)
}
private def sendRecords(numRecords: Int, tp: TopicPartition) {
val futures = (0 until numRecords).map { i =>
this.producers(0).send(new ProducerRecord(tp.topic(), tp.partition(), i.toString.getBytes, i.toString.getBytes))
}
futures.map(_.get)
}
private def consumeRecords(consumer: Consumer[Array[Byte], Array[Byte]], numRecords: Int, startingOffset: Int) {
val records = new ArrayList[ConsumerRecord[Array[Byte], Array[Byte]]]()
val maxIters = numRecords * 300
var iters = 0
while (records.size < numRecords) {
for (record <- consumer.poll(50))
records.add(record)
if(iters > maxIters)
throw new IllegalStateException("Failed to consume the expected records after " + iters + " iterations.")
iters += 1
}
for (i <- 0 until numRecords) {
val record = records.get(i)
val offset = startingOffset + i
assertEquals(topic, record.topic())
assertEquals(part, record.partition())
assertEquals(offset.toLong, record.offset())
}
}
private def awaitCommitCallback(consumer: Consumer[Array[Byte], Array[Byte]], commitCallback: CountConsumerCommitCallback): Unit = {
val startCount = commitCallback.count
val started = System.currentTimeMillis()
while (commitCallback.count == startCount && System.currentTimeMillis() - started < 10000)
this.consumers(0).poll(10000)
assertEquals(startCount + 1, commitCallback.count)
}
private class CountConsumerCommitCallback extends ConsumerCommitCallback {
var count = 0
override def onComplete(offsets: util.Map[TopicPartition, lang.Long], exception: Exception): Unit = count += 1
}
} | jw0201/kafka | core/src/test/scala/integration/kafka/api/ConsumerTest.scala | Scala | apache-2.0 | 12,001 |
package net.bhardy.braintree.scala
import _root_.org.scalatest.{FunSpec, Inside}
import _root_.org.scalatest.matchers.MustMatchers
import xml.XML
/**
*/
class SubscriptionOptionsRequestSpec extends FunSpec with MustMatchers with Inside {
describe("toXmlString") {
it("populates options when present") {
val request = new SubscriptionOptionsRequest(new SubscriptionRequest).
doNotInheritAddOnsOrDiscounts(true).
prorateCharges(false).
replaceAllAddOnsAndDiscounts(true).
revertSubscriptionOnProrationFailure(false).
startImmediately(true)
val xmlString = request.toXml.get.toString
val options = XML.loadString(xmlString)
options.label must be === "options"
(options \\ "doNotInheritAddOnsOrDiscounts" text) must be === "true"
(options \\ "prorateCharges" text) must be === "false"
(options \\ "replaceAllAddOnsAndDiscounts" text) must be === "true"
(options \\ "revertSubscriptionOnProrationFailure" text) must be === "false"
(options \\ "startImmediately" text) must be === "true"
}
it("avoids options when absent") {
val request = new SubscriptionOptionsRequest(new SubscriptionRequest).
startImmediately(true)
val options = request.toXml.get
options.label must be === "options"
(options \\ "doNotInheritAddOnsOrDiscounts") must be ('empty)
(options \\ "prorateCharges") must be ('empty)
(options \\ "replaceAllAddOnsAndDiscounts") must be ('empty)
(options \\ "revertSubscriptionOnProrationFailure") must be ('empty)
(options \\ "startImmediately" text) must be === "true"
}
}
}
| benhardy/braintree-scala | src/test/scala/SubscriptionOptionsRequestSpec.scala | Scala | mit | 1,661 |
package elegans
import StatefulSemantics._
import Cells._
import Experiments._
import scala.collection.mutable.{Map => MutableMap, Set => MutableSet}
import z3.scala._
object Summaries {
type EdgeSet = MutableMap[(InputValue, InternalState), InternalState]
case class Region(lowerCorner: Seq[Option[Int]], upperCorner: Seq[Option[Int]])
def summarize(solution: Option[Constraints.Solution],
experiments: Seq[Experiment], modelIndex: Int): Unit = {
val (nonvpcs, aPrioriChannels, vpcs) = Model.createSystem(experiments.head)
val allCells = nonvpcs ::: vpcs
solution match {
case Some(sol) =>
concretize(allCells, sol)
case None =>
}
solution match {
case None => logWarning("No solution defined.")
case Some(sol) => sol.get(statefulSolutionKey) match {
case Some(statefulSol: StatefulLogicSolution) =>
for (asyncCell <- vpcs) {
for (node <- asyncCell.N) {
for (sl @ StatefulLogic() <- node.logics) {
summarize(sl, statefulSol, modelIndex)
}
}
}
case _ => logWarning("No stateful solution defined.")
}
}
}
def summarize(logic: StatefulLogic, solution: StatefulLogicSolution,
modelIndex: Int): Unit = {
val allSignals = logic.activatingSignals ++ logic.inhibitingSignals
def simplifyTransitions(transitions: EdgeSet) = {
transitions
}
def usedSrcStates(transitions: EdgeSet) = {
val srcStates = transitions.map{
case ((v, src), dst) => src
}.toSet
srcStates
}
def edgeSummaryString(edges: Set[Region]): String = {
val strs = edges.map{
case Region(l, u) =>
val edgeStr = ((l zip u) zip allSignals) collect {
case ((Some(lv), Some(uv)), bundle) =>
if (lv == uv) {
bundle.id + " = " + lv
} else {
bundle.id + " in [" + lv + ":" + uv + "]"
}
case ((Some(_), None), _) => terminate("Should not have happened.")
case ((None, Some(_)), _) => terminate("Should not have happened.")
}
edgeStr.mkString(", ")
}
strs.mkString("\\n")
}
def simplifyCovering(covering: Set[Region]): Set[Region] = {
covering map {
case Region(lb, ub) =>
val simplifiedBounds = ((lb zip ub) zip allSignals) map {
case ((Some(lp), Some(up)), bundle) =>
// if they cover the range, drop them
if (lp == 0 && up == bundleSize(bundle) - 1)
(None, None)
else
(Some(lp), Some(up))
case _ => terminate("Should not have happened.")
}
val (simplifiedLb, simplifiedUb) = simplifiedBounds.unzip
Region(simplifiedLb, simplifiedUb)
}
}
def findMinimalCovering(includedEdges: EdgeSet, complementEdges: EdgeSet) = {
val dimension: Int = {
val e = includedEdges.head
e._1._1._1.size + e._1._1._2.size
}
def findCovering(nbRegions: Int): Option[Set[Region]] = {
val ctx = new Z3Context("MODEL" -> true)
val regionVars = for (i <- 0 until nbRegions) yield {
val lower = for (j <- 0 until dimension) yield {
ctx.mkFreshIntConst("region_" + i + "_lower_" + j)
}
val upper = for (j <- 0 until dimension) yield {
ctx.mkFreshIntConst("region_" + i + "_upper_" + j)
}
(lower.toList, upper.toList)
}
// assert bounds on "corners"
for ((lowerCorner, upperCorner) <- regionVars) {
(lowerCorner zip allSignals) foreach {
case (point, bundle) =>
ctx.assertCnstr(ctx.mkGE(point, ctx.mkInt(0, ctx.mkIntSort)))
ctx.assertCnstr(ctx.mkLT(point, ctx.mkInt(bundleSize(bundle), ctx.mkIntSort)))
}
(upperCorner zip allSignals) foreach {
case (point, bundle) =>
ctx.assertCnstr(ctx.mkGE(point, ctx.mkInt(0, ctx.mkIntSort)))
ctx.assertCnstr(ctx.mkLT(point, ctx.mkInt(bundleSize(bundle), ctx.mkIntSort)))
}
(lowerCorner zip upperCorner) foreach {
case (p1, p2) =>
ctx.assertCnstr(ctx.mkLE(p1, p2))
}
}
// assert all edges are in a rectangle
for (edge <- includedEdges) {
val (actInputValue, inhInputValue) = edge._1._1
val inputValues = actInputValue ++ inhInputValue
// some rectangle includes this input value
val includedInRectangles = for ((lowerCorner, upperCorner) <- regionVars) yield {
val lowerBounds = (lowerCorner zip inputValues) map {
case (point, value) =>
ctx.mkLE(point, ctx.mkInt(value, ctx.mkIntSort))
}
val upperBounds = (upperCorner zip inputValues) map {
case (point, value) =>
ctx.mkGE(point, ctx.mkInt(value, ctx.mkIntSort))
}
ctx.mkAnd(ctx.mkAnd(lowerBounds: _*), ctx.mkAnd(upperBounds: _*))
}
ctx.assertCnstr(ctx.mkOr(includedInRectangles: _*))
}
// assert all excluded edges are not in any rectangle
for (edge <- complementEdges) {
val (actInputValue, inhInputValue) = edge._1._1
val inputValues = actInputValue ++ inhInputValue
// no rectangle includes this input value
val notIncludedInRectangles = for ((lowerCorner, upperCorner) <- regionVars) yield {
val lowerBounds = (lowerCorner zip inputValues) map {
case (point, value) =>
ctx.mkLE(point, ctx.mkInt(value, ctx.mkIntSort))
}
val upperBounds = (upperCorner zip inputValues) map {
case (point, value) =>
ctx.mkGE(point, ctx.mkInt(value, ctx.mkIntSort))
}
ctx.mkNot(
ctx.mkAnd(ctx.mkAnd(lowerBounds: _*), ctx.mkAnd(upperBounds: _*))
)
}
ctx.assertCnstr(ctx.mkAnd(notIncludedInRectangles: _*))
}
ctx.checkAndGetModel match {
case (Some(true), m) =>
val regions = for ((lowerCorner, upperCorner) <- regionVars) yield {
val lowerCornerValues = lowerCorner.map(point => m.evalAs[Int](point))
val upperCornerValues = upperCorner.map(point => m.evalAs[Int](point))
Region(lowerCornerValues, upperCornerValues)
}
m.delete
ctx.delete
Some(regions.toSet)
case _ => None
}
}
var coveringNotFound = true
var coveringSolution: Option[Set[Region]] = None
for (nbRegions <- 1 to includedEdges.size; if coveringNotFound) {
findCovering(nbRegions) match {
case Some(cs) =>
coveringNotFound = false
coveringSolution = Some(cs)
case None =>
}
}
coveringSolution match {
case None => terminate("No covering found!")
case Some(cs) => cs
}
}
type TransitionRegionMap = MutableMap[(InternalState, InternalState), Set[Region]]
def usedStates(ts: TransitionRegionMap): Set[InternalState] = {
val (srcStates, dstStates) = ts map {
case ((src, dst), rs) => (src, dst)
} unzip
srcStates.toSet ++ dstStates.toSet
}
def incomingEdges(ts: TransitionRegionMap, dst: InternalState) = {
ts.filter {
case ((_, d), _) => d == dst
}
}
// trees for flow analysis
sealed trait Formula
sealed trait Term
case class Variable(id: String) extends Term
case class IntegerValue(value: Int) extends Term
case object False extends Formula
case object True extends Formula
case class Equals(t1: Term, t2: Term) extends Formula
case class LessThanEquals(t1: Term, t2: Term) extends Formula
case class GreaterThanEquals(t1: Term, t2: Term) extends Formula
case class And(f1: Formula, f2: Formula) extends Formula
case class Or(f1: Formula, f2: Formula) extends Formula
def simplifyWithFlowAnalysis(
transitionsAsRegions: MutableMap[(InternalState, InternalState), Set[Region]],
initState: InternalState): MutableMap[(InternalState, InternalState), Set[Region]] = {
val allStates = usedStates(transitionsAsRegions)
val ctx = new Z3Context()
// create variables for each region and for each analysis step.
val z3Vars = MutableMap[(PortBundle, InternalState, Int), Z3AST]()
// declare the formulas that describe the new allowed values for each state at each time step
val stateFormulas = MutableMap[(InternalState, Int), Z3AST]()
// construct formulas that encode initial permissible values for each internal state
// these are namely true for the init state, false for others.
for (state <- allStates) {
if (state == initState)
stateFormulas((state, 0)) = ctx.mkTrue
else
stateFormulas((state, 0)) = ctx.mkFalse
}
def createVariablesForStep(step: Int) {
for (state <- allStates) {
for (bundle @ PortBundle(id, ports) <- allSignals) {
val fresh = ctx.mkFreshIntConst("state_" + state + "_" + id + "_" + step)
val size = bundleSize(bundle)
val lb = ctx.mkGE(fresh, ctx.mkInt(0, ctx.mkIntSort))
val ub = ctx.mkLT(fresh, ctx.mkInt(size, ctx.mkIntSort))
val bounds = ctx.mkAnd(lb, ub)
ctx.assertCnstr(bounds)
z3Vars((bundle, state, step)) = fresh
}
}
}
// construct formula that constrains vars with given regions
def constrainWithRegions(vars: Seq[Z3AST], regions: Set[Region]): Z3AST = {
val disjuncts = for (Region(lb, ub) <- regions) yield {
val rowConstraints = ((lb zip ub) zip vars) collect {
case ((Some(lv), Some(uv)), v) =>
val lower = ctx.mkGE(v, ctx.mkInt(lv, ctx.mkIntSort))
val upper = ctx.mkLE(v, ctx.mkInt(uv, ctx.mkIntSort))
ctx.mkAnd(lower, upper)
}
if (rowConstraints.isEmpty)
ctx.mkTrue
else
ctx.mkAnd(rowConstraints: _*)
}
if (disjuncts.isEmpty)
ctx.mkFalse
else
ctx.mkOr(disjuncts.toList : _*)
}
def assumptionConstraints(srcVars: Seq[Z3AST], dstVars: Seq[Z3AST], assumptions: Seq[Set[Assumption]]): Z3AST = {
val cnstrs = ((srcVars zip dstVars) zip assumptions) collect {
case ((src, dst), as) if !as.isEmpty =>
val cnstrsForDimension = as map {
case Constant =>
ctx.mkEq(src, dst)
case Monotonic =>
ctx.mkLE(src, dst)
}
ctx.mkAnd(cnstrsForDimension.toList : _*)
}
if (cnstrs.isEmpty)
ctx.mkTrue
else
ctx.mkAnd(cnstrs: _*)
}
def containsPoint(state: InternalState, steps: Seq[Int], point: Seq[Int]): Boolean = {
ctx.push
val disjuncts = for (step <- steps) yield {
val componentsEqualValues = (allSignals zip point) map {
case (bundle, value) =>
ctx.mkEq(
z3Vars((bundle, state, step)),
ctx.mkInt(value, ctx.mkIntSort)
)
}
val constraintForStep =
componentsEqualValues.foldLeft(stateFormulas((state, step)))(ctx.mkAnd(_, _))
constraintForStep
}
ctx.assertCnstr(disjuncts.foldLeft(ctx.mkFalse)(ctx.mkOr(_, _)))
val outcome = ctx.check match {
case Some(true) => true
case _ => false
}
ctx.pop(1)
outcome
}
def fixpointReachedForState(state: InternalState, step: Int): Boolean = {
val allPossiblePoints = signalValueCombinations(allSignals)
var differenceNonEmpty = false
for (point <- allPossiblePoints; if !differenceNonEmpty) {
// does the new space contain the point?
if (containsPoint(state, Seq(step), point)) {
// if yes, can we make sure the old spaces don't contain it?
if (! containsPoint(state, (0 until step).toList, point)) {
// if yes, then fixpoint not reached, return false
differenceNonEmpty = true
} else {
// if no, then move on to the next point
}
}
// if no, skip this point
}
!differenceNonEmpty
}
def regionIntersects(region: Region, formulas: MutableMap[(InternalState, Int), Z3AST]): Boolean = {
ctx.push
val disjuncts = formulas map {
case ((srcState, step), formula) =>
val varsAtThisStep = allSignals map {
bundle => z3Vars((bundle, srcState, step))
}
val isWithinRegionCnstr = constrainWithRegions(varsAtThisStep, Set(region))
ctx.mkAnd(formula, isWithinRegionCnstr)
}
val intersectsAtAnyTime = disjuncts.foldLeft(ctx.mkFalse)(ctx.mkOr(_, _))
ctx.assertCnstr(intersectsAtAnyTime)
val outcome = ctx.check match {
case Some(true) => true
case _ => false
}
ctx.pop(1)
outcome
}
def pruneUnusableEdges(transitionsAsRegions: TransitionRegionMap): TransitionRegionMap = {
val prunedMap: TransitionRegionMap = MutableMap[(InternalState, InternalState), Set[Region]]()
for (((src, dst), regions) <- transitionsAsRegions) {
val prunedRegions = regions.filter {
region =>
val formulasForSrc = stateFormulas.filter {
case ((s, _), f) => s == src
}
regionIntersects(region, formulasForSrc)
}
if (!prunedRegions.isEmpty)
prunedMap += (((src, dst), prunedRegions))
}
prunedMap
}
// then, loop:
// at each step, compute new formulas that encode one step of incoming flow for each state
// check whether the space for any node increases in the new step
var currentStep = 1
var fixpointReached = false
createVariablesForStep(0)
while(!fixpointReached) {
// create variables for each state
createVariablesForStep(currentStep)
// compute next input for each state
for (state <- allStates) {
// assert that current values at dst are the same + incoming region constraints
val constraintsForEdges = for (((src, _), regions) <- incomingEdges(transitionsAsRegions, state)) yield {
val srcVarsAtPrevStep = allSignals map {
bundle => z3Vars((bundle, src, currentStep - 1))
}
val dstVarsAtCurrentStep = allSignals map {
bundle => z3Vars((bundle, state, currentStep))
}
val assumptions = allSignals map {
bundle => bundle.assumptions
}
val srcCnstrAtPrevStep = stateFormulas((src, currentStep - 1))
// we want constrain the src constraint further with the edge constraint
val constrainedByEdge = constrainWithRegions(srcVarsAtPrevStep, regions)
// assert assumptions about this incoming edge
val assumptionCnstr = assumptionConstraints(srcVarsAtPrevStep, dstVarsAtCurrentStep, assumptions)
// we conjunct all the three above to obtain what's added to the space
val constraintForEdge = ctx.mkAnd(srcCnstrAtPrevStep, constrainedByEdge, assumptionCnstr)
constraintForEdge
}
val disjunct = constraintsForEdges.foldLeft(ctx.mkFalse)(ctx.mkOr(_, _))
stateFormulas((state, currentStep)) = ctx.mkOr(disjunct)
}
// is the fixpoint reached for all states?
fixpointReached = allStates.forall(fixpointReachedForState(_, currentStep))
currentStep += 1
}
// when fixpoint is reached, check whether any outgoing row can be removed from transitions
val toRet = pruneUnusableEdges(transitionsAsRegions)
ctx.delete
toRet
}
log("Summarizing logic " + logic.id)
solution match {
case StatefulLogicSolution(allTransitions, allMappings, initStates) => {
val transitions = allTransitions(logic.id)
val mapping = allMappings(logic.id)
val initState = initStates(logic.id)
def fsmAsDotString(
edgeStrings: MutableMap[(InternalState, InternalState), String]): String = {
def stateName(s: InternalState) = {
val extIndex = mapping(s)
val extName = logic.outputStates(extIndex).name
"S" + s + "_" + extName
}
def edgeBoxName(s1: InternalState, s2: InternalState) = "E_" + s1 + "_" + s2
val res = new StringBuffer()
res append "digraph finite_state_machine {\n"
res append """ size="15"
node [color = blue];"""
res append (stateName(initState) + ";\n")
res append " node [color = black];\n"
for (((src, dst), label) <- edgeStrings) {
res append (" " + edgeBoxName(src, dst) + " [ shape = box label = " +
"\"" + label + "\"" + " ];\n")
val toEdgeBox = " " + stateName(src) + " -> " + edgeBoxName(src, dst) + ";\n"
val fromEdgeBox = " " + edgeBoxName(src, dst) + " -> " + stateName(dst) + ";\n"
res append toEdgeBox
res append fromEdgeBox
}
res append "}"
res.toString
}
(allTransitions.get(logic.id), allMappings.get(logic.id), initStates.get(logic.id)) match {
case ((Some(transitions), Some(mapping), Some(initState))) => {
val simplifiedTransitions = simplifyTransitions(transitions)
val validSrcStates = usedSrcStates(simplifiedTransitions)
val simplifiedTransitionTable = MutableMap[(InternalState, InternalState), Set[Region]]()
for (srcState <- validSrcStates) {
val outgoingEdges = simplifiedTransitions.filter{
case ((v, s1), s2) => s1 == srcState
}
val dstStatesForSrc = outgoingEdges.map{
case ((v, s1), s2) => s2
}.toSet
for (dstState <- dstStatesForSrc) {
val edgesBetweenSrcAndDest = outgoingEdges.filter{
case ((v, s1), s2) => s2 == dstState
}
val complementEdges = transitions.filter{
case ((v, s1), s2) => s1 == srcState
} -- (edgesBetweenSrcAndDest.keys)
val minimalCovering = findMinimalCovering(edgesBetweenSrcAndDest, complementEdges)
val simplifiedCovering = simplifyCovering(minimalCovering)
simplifiedTransitionTable((srcState, dstState)) = simplifiedCovering
}
}
val simplifiedWithFlowAnalysis = simplifyWithFlowAnalysis(simplifiedTransitionTable, initState)
def generateImages(ts: TransitionRegionMap, suffix: String) {
val edgeStringMap = ts map {
case ((src, dst), regions) => ((src, dst), edgeSummaryString(regions))
}
val fsmDotStr = fsmAsDotString(edgeStringMap)
val dotPath = logic.id + "_" + suffix + "_" + modelIndex + ".dot"
val imgPath = logic.id + "_" + suffix + "_" + modelIndex + ".pdf"
writeToFile(dotPath, fsmDotStr)
import scala.sys.process._
("dot -Tpdf -o " + imgPath + " " + dotPath).!!
("rm " + dotPath).!!
}
generateImages(simplifiedTransitionTable, "collapsed")
generateImages(simplifiedWithFlowAnalysis, "pruned")
}
case _ => // do nothing
}
}
}
}
}
| koksal/elegans-popl2013-code | src/main/scala/elegans/Summaries.scala | Scala | bsd-2-clause | 20,171 |
package example.akkawschat.cli
import akka.stream.FlowShape
import akka.stream.scaladsl.{ Concat, Flow, GraphDSL, Source }
object Util {
def inject[U](source: Source[U, Any]): Flow[U, U, Any] =
Flow.fromGraph(GraphDSL.create() { implicit b ⇒
import GraphDSL.Implicits._
val concat = b.add(new Concat[U](2))
source ~> concat.in(0)
FlowShape(concat.in(1), concat.out)
})
}
| yukihirai0505/tutorial-program | programming/scala/websocket/cli/src/main/scala/example/akkawschat/cli/Util.scala | Scala | mit | 412 |
package im.actor.server.api.http.webhooks
import akka.actor.ActorSystem
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import akka.stream.Materializer
import akka.util.Timeout
import im.actor.server.KeyValueMappings
import im.actor.server.api.http.RoutesHandler
import im.actor.server.group.GroupViewRegion
import shardakka.{ IntCodec, ShardakkaExtension }
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._
class WebhooksHandler()(
implicit
val system: ActorSystem,
val ec: ExecutionContext,
val materializer: Materializer
) extends RoutesHandler with OutgoingHooks with IngoingHooks with TokenStatus {
implicit val timeout: Timeout = Timeout(5.seconds)
protected val integrationTokensKv = ShardakkaExtension(system).simpleKeyValue[Int](KeyValueMappings.IntegrationTokens, IntCodec)
override def routes: Route = pathPrefix("webhooks") {
outgoing ~ ingoing ~ status
}
}
| yangchenghu/actor-platform | actor-server/actor-http-api/src/main/scala/im/actor/server/api/http/webhooks/WebhooksHandler.scala | Scala | mit | 976 |
package cilib
import scalaz._
import scalaz.std.anyVal._
import org.scalacheck._
import org.scalacheck.Prop._
import scalaz.scalacheck.ScalazProperties._
// Should we look at using Discipline or scalaz's way of testing? I'm not sure...
object RVarTests extends Spec("RVar") {
val rng = RNG.fromTime
implicit def rngEqual = scalaz.Equal[Int].contramap((_: RVar[Int]).run(rng)._2)
implicit def arbRVar: Arbitrary[RVar[Int]] = Arbitrary {
Arbitrary.arbitrary[Int].map(RVar.point(_))
}
implicit def arbRVarFunc: Arbitrary[RVar[Int => Int]] = Arbitrary {
Arbitrary.arbitrary[Int => Int].map(RVar.point(_))
}
checkAll(equal.laws[RVar[Int]])
checkAll(monad.laws[RVar])
}
| robgarden/cilib | tests/src/test/scala/cilib/RVarTests.scala | Scala | gpl-3.0 | 697 |
package com.tierline.scala.activemodel.singletenant
import com.tierline.scala.activemodel.{TestSchema, TestSuite}
import com.tierline.scala.activemodel.singletenant.domain._
import org.squeryl.PrimitiveTypeMode._
class RepositoryTest extends TestSuite {
override val schema = TestSchema
def insert(size: Int) {
for (i <- 0 until size) new Cart("Note" + i, 1000 * i).save()
}
test("Reset auto inclument") {
val c1 = new Cart().create()
val firstId = c1.id;
val c2 = new Cart().create()
assert(c2.id == firstId + 1)
Cart.deleteAll()
Cart.resetAutoIncrement()
val c3 = new Cart().create()
assert(c3.id == 1)
}
test("Delete All") {
Cart.deleteAll()
assert(Cart.countAll == 0)
}
test("Delete All By Condition") {
insert(10)
Cart.deleteAll(c => (c.name like "Note1%") or (c.name === "Note0"))
val result = Cart.countAll
assert(result == 8, "result = " + result)
}
test("Find by id") {
val id = new Cart("Note", 1000).create.id
Cart.findById(id) match {
case Some(g) => assert(true)
case None => assert(false)
}
}
test("Count entities") {
Cart.deleteAll()
val size: Int = 20
insert(size)
assert(Cart.countAll == size)
}
test("Count entities by condition") {
Cart.deleteAll()
val size: Int = 20
insert(size)
val name = Cart.first.get.name
val result = Cart.countBy(c => c.name === name).toInt
assert(result == 1)
}
test("Exsits entity") {
Cart.deleteAll()
val size: Int = 20
insert(size)
val id = Cart.first.get.id
assert(true == Cart.exists(id))
val notExsitsId = 0
assert(false == Cart.exists(notExsitsId))
}
test("Count by conditions") {
Cart.deleteAll()
val size: Int = 20
insert(size)
val count = Cart.countByName(Cart.first.get.name)
assert(count == 1)
}
test("Find First Objects") {
Goods.deleteAll()
Cart.deleteAll()
val size: Int = 20
insert(size)
Cart.first match {
case Some(v) => assert(true)
case _ => assert(false)
}
}
test("Find") {
val size: Int = 20
Cart.deleteAll()
insert(size)
assert(Cart.countAll == size)
val id: Long = Cart.first match {
case Some(value) => value.id
case None => {
assert(false)
0
}
}
var sizeResult = 0;
{
val result1 = Cart.find(e => (e.id === id) and (e.name like "%Note%"))
sizeResult = result1.size
}
assert(sizeResult == 1, "result = " + sizeResult)
}
test("Getting All Objects") {
Cart.deleteAll()
val size: Int = 20
insert(size)
val all = Cart.all
assert(all.size == size)
}
test("Save Goods of child object by associate") {
Goods.deleteAll()
var cart = new Cart(0L, "", 100).create
cart.goods.associate(new Goods(0L, "", 100))
assert(Goods.countAll == 1)
val seq = cart.goods.toSeq
assert(seq.size == 1)
}
test("Save Goods of child object by assign") {
Goods.deleteAll()
var g = new Goods(0L, "", 100)
new Cart().create.goods.assign(g)
g.save
assert(Goods.countAll == 1)
Goods.deleteAll()
var g2 = new Goods(0L, "", 100)
g2.cart.assign(new Cart().create)
g.save
assert(Goods.countAll == 1)
}
test("Fetch object") {
Goods.deleteAll()
Cart.deleteAll()
insert(17)
assert(Cart.countAll == 17)
assert(Cart.fetch(1, 10).size == 10)
assert(Cart.fetch(2, 10).size == 7)
assert(Cart.fetch(3, 10).size == 0)
assert(Cart.fetch(0, 10).size == 10)
}
test("Fetch object by condition") {
Goods.deleteAll()
Cart.deleteAll()
insert(17)
assert(Cart.countAll == 17)
assert(Cart.fetch(c => c.name === "Note1")(1, 10).size == 1)
}
test("Fetch object by order by") {
Goods.deleteAll()
Cart.deleteAll()
insert(17)
import org.squeryl.PrimitiveTypeMode._
assert(Cart.countAll == 17)
assert(Cart.find(c => 1 === 1,
c => c.size desc).head.size == 16000)
}
} | tierline/scala-activemodel | src/test/scala/com/tierline/scala/activemodel/singletenant/RepositoryTest.scala | Scala | mit | 4,045 |
package spire
package math
object NumberTag {
def apply[A](implicit ev: NumberTag[A]): NumberTag[A] = ev
sealed trait Resolution
case object Integral extends Resolution
case object Approximate extends Resolution
case object Exact extends Resolution
class BuiltinIntTag[A](zero: A, min: A, max: A) extends NumberTag[A] {
def resolution: Resolution = Integral
val hasZero: Option[A] = Some(zero)
val hasMinValue: Option[A] = Some(min)
val hasMaxValue: Option[A] = Some(max)
def hasNaN: Option[A] = None
def hasPositiveInfinity: Option[A] = None
def hasNegativeInfinity: Option[A] = None
def overflows: Boolean = true
def isSigned: Boolean = true
def isInfinite(a: A): Boolean = false
def isNaN(a: A): Boolean = false
}
class UnsignedIntTag[A](zero: A, max: A) extends NumberTag[A] {
def resolution: Resolution = Integral
val hasZero: Option[A] = Some(zero)
val hasMinValue: Option[A] = Some(zero)
val hasMaxValue: Option[A] = Some(max)
def hasNaN: Option[A] = None
def hasPositiveInfinity: Option[A] = None
def hasNegativeInfinity: Option[A] = None
def overflows: Boolean = true
def isSigned: Boolean = false
def isInfinite(a: A): Boolean = false
def isNaN(a: A): Boolean = false
}
abstract class BuiltinFloatTag[A](zero: A, min: A, max: A, nan: A, posInf: A, negInf: A) extends NumberTag[A] {
def resolution: Resolution = Approximate
val hasZero: Option[A] = Some(zero)
val hasMinValue: Option[A] = Some(min)
val hasMaxValue: Option[A] = Some(max)
val hasNaN: Option[A] = Some(nan)
val hasPositiveInfinity: Option[A] = Some(posInf)
val hasNegativeInfinity: Option[A] = Some(negInf)
def overflows: Boolean = false
def isSigned: Boolean = true
}
class LargeTag[A](val resolution: Resolution, zero: A) extends NumberTag[A] {
val hasZero: Option[A] = Some(zero)
def hasMinValue: Option[A] = None
def hasMaxValue: Option[A] = None
def hasNaN: Option[A] = None
def hasPositiveInfinity: Option[A] = None
def hasNegativeInfinity: Option[A] = None
def overflows: Boolean = false
def isSigned: Boolean = true
def isInfinite(a: A): Boolean = false
def isNaN(a: A): Boolean = false
}
class CustomTag[A](val resolution: Resolution,
val hasZero: Option[A], val hasMinValue: Option[A],
val hasMaxValue: Option[A], val overflows: Boolean,
val isSigned: Boolean) extends NumberTag[A] {
def hasNaN: Option[A] = None
def hasPositiveInfinity: Option[A] = None
def hasNegativeInfinity: Option[A] = None
def isInfinite(a: A): Boolean = false
def isNaN(a: A): Boolean = false
}
}
/**
* A `NumberTag` provides information about important implementations details
* of numbers. For instance, it includes information about whether we can
* expect arithmetic to overflow or produce invalid values, the bounds of the
* number if they exist, whether it is an approximate or exact number type,
* etc.
*/
trait NumberTag[A] {
/**
* Returns the resolution of this number.
*/
def resolution: NumberTag.Resolution
/**
* Returns the smallest finite value that `A` can represent, if one exists.
* For instance, the smallest finite value representable by `Double` is
* `-1.7976931348623157E308`. On the other hand, `BigInt` has no smallest
* value.
*/
def hasMinValue: Option[A]
/**
* If `A` has a value that represents the real value 0, then it is returned
* here. Otherwise `None` is returned.
*/
def hasZero: Option[A]
/**
* Returns the largest finite value that `A` can represent, if one exists.
* For instance, the largest finite value representable by `Double` is
* `1.7976931348623157E308`. On the other hand, `BigInt` has no largest
* value.
*/
def hasMaxValue: Option[A]
/**
* If `A` has values that represent an undefined or invalid value, then a
* repsentitive value may be used here. Otherwise this returned `None` to
* indicate that all values in `A` are valid numbers in the extended real
* number line.
*/
def hasNaN: Option[A]
/**
* If `A` has a value that represents a positive infinity, then it is
* returned here, otherwise a value of `None` indicates that positive
* infinity cannot be represented in `A`.
*/
def hasPositiveInfinity: Option[A]
/**
* If `A` has a value that represents a negative infinity, then it is
* returned here, otherwise a value of `None` indicates that negative
* infinity cannot be represented in `A`.
*/
def hasNegativeInfinity: Option[A]
/**
* Returns true if this value can overflow as a result of arithmetic
* operations. Types that overflow include `Int` and `Long`.
*/
def overflows: Boolean
/**
* Returns true if `A` can represent both positive and negative values.
*/
def isSigned: Boolean
/**
* Returns true if all values representable by `A` are finite and live on the
* real number line.
*/
def finite: Boolean =
hasMinValue.isDefined && hasMaxValue.isDefined
/**
* Returns true if this type can represent arbitrarily large or small values.
*/
def infinite: Boolean =
hasMinValue.isEmpty || hasMaxValue.isEmpty
/**
* Returns `true` if `a` is an infinite value (either positive or negative)
* and false otherwise.
*/
def isInfinite(a: A): Boolean
/**
* Returns `true` if `a` is an invalid number. Note that positive and
* negative infinities are valid numbers.
*/
def isNaN(a: A): Boolean
/**
* Returns `true` if `a` represents a finite value (neither infinite nor
* invalid).
*/
def isFinite(a: A): Boolean = !(isInfinite(a) || isNaN(a))
}
| non/spire | core/src/main/scala/spire/math/NumberTag.scala | Scala | mit | 5,731 |
/* =========================================================================================
* Copyright © 2013-2014 the kamon project <http://kamon.io/>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
* =========================================================================================
*/
import sbt._
import sbt.Keys._
object Projects extends Build {
import Settings._
import Dependencies._
import com.typesafe.sbt.osgi.OsgiKeys
/** Build aggregator. */
lazy val root = Project("sigar-a", file("."))
.settings(basicSettings: _*)
.settings(formatSettings: _*)
.settings(noPublishing: _*)
.aggregate(sigarLoader, verifyAgent, verifyOsgi)
/** Core Sigar artifact. */
lazy val sigarLoader = Project("sigar-loader", file("core"))
.settings(basicSettings: _*)
.settings(formatSettings: _*)
.settings(Osgi.sigarLoader: _*)
.settings(SigarRepack.settings: _*)
.settings(
libraryDependencies ++=
external(sigarJar, sigarZip) ++
provided(osgiCore, osgiCompendium) ++
test(junit, junitInterface, slf4Api, slf4Jul, slf4Log4j, logback)
)
/** Sigar java agent integration test. */
lazy val verifyAgent = Project("sigar-verify-agent", file("verify-agent"))
.settings(basicSettings: _*)
.settings(noPublishing: _*)
.settings(SigarAgent.settings: _*)
.settings(
libraryDependencies ++=
test(junit, junitInterface, slf4Api, slf4Jul, slf4Log4j, logback)
).dependsOn(sigarLoader)
/** Sigar OSGI bundle activator integration test. */
lazy val verifyOsgi = Project("sigar-verify-osgi", file("verify-osgi"))
.settings(basicSettings: _*)
.settings(noPublishing: _*)
.settings(SigarOsgi.settings: _*)
.settings(
libraryDependencies ++=
test(junit, junitInterface, slf4Api, slf4Jul, slf4Log4j, logback)
).dependsOn(sigarLoader)
lazy val noPublishing = Seq(publish := (), publishLocal := (), publishArtifact := false)
lazy val generateSigarBundle =
(Keys.compile in Compile) <<= (OsgiKeys.bundle in sigarLoader, Keys.compile in Compile) map ((_, c) => c)
override lazy val settings =
super.settings ++
Seq(shellPrompt := { s => Project.extract(s).currentProject.id + " > " })
}
| kamon-io/sigar-loader | project/Projects.scala | Scala | apache-2.0 | 2,756 |
/**
* Copyright (C) 2017 DANS - Data Archiving and Networked Services (info@dans.knaw.nl)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cake.greeter.component
import cake.greeter.ScalaTestFixture
import nl.knaw.dans.lib.logging.DebugEnhancedLogging
import scala.util.{ Failure, Success }
class GreeterComponentSpec extends ScalaTestFixture with GreeterComponent with DebugEnhancedLogging {
override val greeter = new Greeter {
override val greetingPattern: String = "Hello %s!"
}
"greet" should "return a greet based on the formatting pattern" in {
inside(greeter.greet("Bob")) {
case Success(greet) => greet shouldBe "Hello Bob!"
}
}
it should "fail when the name is longer than 20 characters" in {
inside(greeter.greet("AliceBobCharlesDaveEve")) {
case Failure(e) => e.getMessage shouldBe "your name is too long!"
}
}
}
| rvanheest/easy-greeter | src/test/scala/cake/greeter/component/GreeterComponentSpec.scala | Scala | apache-2.0 | 1,401 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.spark.sql
import java.util.ArrayList
import java.util.{LinkedHashSet => JHashSet}
import java.util.{List => JList}
import java.util.{Map => JMap}
import java.util.Properties
import scala.collection.JavaConverters.asScalaBufferConverter
import scala.collection.JavaConverters.propertiesAsScalaMapConverter
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.sql.types.ArrayType
import org.apache.spark.sql.types.BinaryType
import org.apache.spark.sql.types.BooleanType
import org.apache.spark.sql.types.ByteType
import org.apache.spark.sql.types.DataType
import org.apache.spark.sql.types.DataTypes
import org.apache.spark.sql.types.DoubleType
import org.apache.spark.sql.types.FloatType
import org.apache.spark.sql.types.IntegerType
import org.apache.spark.sql.types.LongType
import org.apache.spark.sql.types.NullType
import org.apache.spark.sql.types.ShortType
import org.apache.spark.sql.types.StringType
import org.apache.spark.sql.types.StructField
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.types.TimestampType
import org.elasticsearch.hadoop.EsHadoopIllegalArgumentException
import org.elasticsearch.hadoop.cfg.InternalConfigurationOptions
import org.elasticsearch.hadoop.cfg.Settings
import org.elasticsearch.hadoop.rest.InitializationUtils
import org.elasticsearch.hadoop.rest.RestRepository
import org.elasticsearch.hadoop.serialization.FieldType.BINARY
import org.elasticsearch.hadoop.serialization.FieldType.BOOLEAN
import org.elasticsearch.hadoop.serialization.FieldType.BYTE
import org.elasticsearch.hadoop.serialization.FieldType.DATE
import org.elasticsearch.hadoop.serialization.FieldType.DATE_NANOS
import org.elasticsearch.hadoop.serialization.FieldType.DOUBLE
import org.elasticsearch.hadoop.serialization.FieldType.HALF_FLOAT
import org.elasticsearch.hadoop.serialization.FieldType.SCALED_FLOAT
import org.elasticsearch.hadoop.serialization.FieldType.FLOAT
import org.elasticsearch.hadoop.serialization.FieldType.GEO_POINT
import org.elasticsearch.hadoop.serialization.FieldType.GEO_SHAPE
import org.elasticsearch.hadoop.serialization.FieldType.INTEGER
import org.elasticsearch.hadoop.serialization.FieldType.JOIN
import org.elasticsearch.hadoop.serialization.FieldType.KEYWORD
import org.elasticsearch.hadoop.serialization.FieldType.LONG
import org.elasticsearch.hadoop.serialization.FieldType.NESTED
import org.elasticsearch.hadoop.serialization.FieldType.NULL
import org.elasticsearch.hadoop.serialization.FieldType.OBJECT
import org.elasticsearch.hadoop.serialization.FieldType.SHORT
import org.elasticsearch.hadoop.serialization.FieldType.STRING
import org.elasticsearch.hadoop.serialization.FieldType.TEXT
import org.elasticsearch.hadoop.serialization.FieldType.WILDCARD
import org.elasticsearch.hadoop.serialization.dto.mapping.Field
import org.elasticsearch.hadoop.serialization.dto.mapping.GeoField
import org.elasticsearch.hadoop.serialization.dto.mapping.GeoPointType
import org.elasticsearch.hadoop.serialization.dto.mapping.GeoShapeType
import org.elasticsearch.hadoop.serialization.dto.mapping.Mapping
import org.elasticsearch.hadoop.serialization.dto.mapping.MappingUtils
import org.elasticsearch.hadoop.serialization.field.FieldFilter
import org.elasticsearch.hadoop.serialization.field.FieldFilter.NumberedInclude
import org.elasticsearch.hadoop.util.Assert
import org.elasticsearch.hadoop.util.IOUtils
import org.elasticsearch.hadoop.util.SettingsUtils
import org.elasticsearch.hadoop.util.StringUtils
import org.elasticsearch.spark.sql.Utils.ROOT_LEVEL_NAME
import org.elasticsearch.spark.sql.Utils.ROW_INFO_ARRAY_PROPERTY
import org.elasticsearch.spark.sql.Utils.ROW_INFO_ORDER_PROPERTY
private[sql] object SchemaUtils {
case class Schema(mapping: Mapping, struct: StructType)
def discoverMapping(cfg: Settings): Schema = {
val (mapping, geoInfo) = discoverMappingAndGeoFields(cfg)
val struct = convertToStruct(mapping, geoInfo, cfg)
Schema(mapping, struct)
}
def discoverMappingAndGeoFields(cfg: Settings): (Mapping, JMap[String, GeoField]) = {
InitializationUtils.validateSettings(cfg)
InitializationUtils.discoverClusterInfo(cfg, Utils.LOGGER)
val repo = new RestRepository(cfg)
try {
if (repo.resourceExists(true)) {
var mappingSet = repo.getMappings
if (mappingSet == null || mappingSet.isEmpty) {
throw new EsHadoopIllegalArgumentException(s"Cannot find mapping for ${cfg.getResourceRead} - one is required before using Spark SQL")
}
var mapping = mappingSet.getResolvedView
mapping = MappingUtils.filterMapping(mapping, cfg)
val geoInfo = repo.sampleGeoFields(mapping)
// apply mapping filtering only when present to minimize configuration settings (big when dealing with large mappings)
if (StringUtils.hasText(cfg.getReadFieldInclude) || StringUtils.hasText(cfg.getReadFieldExclude)) {
// NB: metadata field is synthetic so it doesn't have to be filtered
// its presence is controlled through the dedicated config setting
cfg.setProperty(InternalConfigurationOptions.INTERNAL_ES_TARGET_FIELDS, StringUtils.concatenate(mapping.flatten().keySet(), StringUtils.DEFAULT_DELIMITER))
}
(mapping, geoInfo)
}
else {
throw new EsHadoopIllegalArgumentException(s"Cannot find mapping for ${cfg.getResourceRead} - one is required before using Spark SQL")
}
} finally {
repo.close()
}
}
def convertToStruct(mapping: Mapping, geoInfo: JMap[String, GeoField], cfg: Settings): StructType = {
val arrayIncludes = SettingsUtils.getFieldArrayFilterInclude(cfg)
val arrayExcludes = StringUtils.tokenize(cfg.getReadFieldAsArrayExclude)
var fields = for (fl <- mapping.getFields) yield convertField(fl, geoInfo, null, arrayIncludes, arrayExcludes, cfg)
if (cfg.getReadMetadata) {
// enrich structure
val metadataMap = DataTypes.createStructField(cfg.getReadMetadataField, DataTypes.createMapType(StringType, StringType, true), true)
fields :+= metadataMap
}
DataTypes.createStructType(fields)
}
private def convertToStruct(field: Field, geoInfo: JMap[String, GeoField], parentName: String,
arrayIncludes: JList[NumberedInclude], arrayExcludes: JList[String], cfg:Settings): StructType = {
DataTypes.createStructType(for (fl <- field.properties()) yield convertField(fl, geoInfo, parentName, arrayIncludes, arrayExcludes, cfg))
}
private def convertField(field: Field, geoInfo: JMap[String, GeoField], parentName: String,
arrayIncludes: JList[NumberedInclude], arrayExcludes: JList[String], cfg:Settings): StructField = {
val absoluteName = if (parentName != null) parentName + "." + field.name() else field.name()
val matched = FieldFilter.filter(absoluteName, arrayIncludes, arrayExcludes, false)
val createArray = !arrayIncludes.isEmpty() && matched.matched
var dataType = Utils.extractType(field) match {
case NULL => NullType
case BINARY => BinaryType
case BOOLEAN => BooleanType
case BYTE => ByteType
case SHORT => ShortType
case INTEGER => IntegerType
case LONG => LongType
case FLOAT => FloatType
case DOUBLE => DoubleType
case HALF_FLOAT => FloatType
case SCALED_FLOAT => DoubleType
// String type
case STRING => StringType
case TEXT => StringType
case KEYWORD => StringType
case WILDCARD => StringType
case DATE => if (cfg.getMappingDateRich) TimestampType else StringType
case DATE_NANOS => if (cfg.getMappingDateRich) TimestampType else StringType
case OBJECT => convertToStruct(field, geoInfo, absoluteName, arrayIncludes, arrayExcludes, cfg)
case NESTED => DataTypes.createArrayType(convertToStruct(field, geoInfo, absoluteName, arrayIncludes, arrayExcludes, cfg))
case JOIN => convertToStruct(field, geoInfo, absoluteName, arrayIncludes, arrayExcludes, cfg)
// GEO
case GEO_POINT => {
val geoPoint = geoInfo.get(absoluteName) match {
case GeoPointType.LON_LAT_ARRAY => DataTypes.createArrayType(DoubleType)
case GeoPointType.GEOHASH => StringType
case GeoPointType.LAT_LON_STRING => StringType
case GeoPointType.LAT_LON_OBJECT => {
val lat = DataTypes.createStructField("lat", DoubleType, true)
val lon = DataTypes.createStructField("lon", DoubleType, true)
DataTypes.createStructType(Array(lat,lon))
}
}
if (Utils.LOGGER.isDebugEnabled()) {
Utils.LOGGER.debug(s"Detected field [${absoluteName}] as a GeoPoint with format ${geoPoint.simpleString}")
}
geoPoint
}
case GEO_SHAPE => {
val fields = new ArrayList[StructField]()
fields.add(DataTypes.createStructField("type", StringType, true))
val COORD = "coordinates"
geoInfo.get(absoluteName) match {
case GeoShapeType.POINT => fields.add(DataTypes.createStructField(COORD, DataTypes.createArrayType(DoubleType), true))
case GeoShapeType.LINE_STRING => fields.add(DataTypes.createStructField(COORD, createNestedArray(DoubleType, 2), true))
case GeoShapeType.POLYGON => {
fields.add(DataTypes.createStructField(COORD, createNestedArray(DoubleType, 3), true))
fields.add(DataTypes.createStructField("orientation", StringType, true))
}
case GeoShapeType.MULTI_POINT => fields.add(DataTypes.createStructField(COORD, createNestedArray(DoubleType, 2), true))
case GeoShapeType.MULTI_LINE_STRING => fields.add(DataTypes.createStructField(COORD, createNestedArray(DoubleType, 3), true))
case GeoShapeType.MULTI_POLYGON => fields.add(DataTypes.createStructField(COORD, createNestedArray(DoubleType, 4), true))
case GeoShapeType.GEOMETRY_COLLECTION => throw new EsHadoopIllegalArgumentException(s"Geoshape $geoInfo not supported")
case GeoShapeType.ENVELOPE => fields.add(DataTypes.createStructField(COORD, createNestedArray(DoubleType, 2), true))
case GeoShapeType.CIRCLE => {
fields.add(DataTypes.createStructField(COORD, DataTypes.createArrayType(DoubleType), true))
fields.add(DataTypes.createStructField("radius", StringType, true))
}
}
val geoShape = DataTypes.createStructType(fields)
if (Utils.LOGGER.isDebugEnabled()) {
Utils.LOGGER.debug(s"Detected field [${absoluteName}] as a GeoShape with format ${geoShape.simpleString}")
}
geoShape
}
// fall back to String
case _ => StringType //throw new EsHadoopIllegalStateException("Unknown field type " + field);
}
if (createArray) {
// can't call createNestedArray for some reason...
for (_ <- 0 until matched.depth) {
dataType = DataTypes.createArrayType(dataType)
}
}
DataTypes.createStructField(field.name(), dataType, true)
}
private def createNestedArray(elementType: DataType, depth: Int): DataType = {
var array = elementType
for (_ <- 0 until depth) {
array = DataTypes.createArrayType(array)
}
array
}
def setRowInfo(settings: Settings, struct: StructType) = {
val rowInfo = detectRowInfo(settings, struct)
// save the field in the settings to pass it to the value reader
settings.setProperty(ROW_INFO_ORDER_PROPERTY, IOUtils.propsToString(rowInfo._1))
// also include any array info
settings.setProperty(ROW_INFO_ARRAY_PROPERTY, IOUtils.propsToString(rowInfo._2))
}
def getRowInfo(settings: Settings) = {
val rowOrderString = settings.getProperty(ROW_INFO_ORDER_PROPERTY)
Assert.hasText(rowOrderString, "no schema/row order detected...")
val rowOrderProps = IOUtils.propsFromString(rowOrderString)
val rowArrayString = settings.getProperty(ROW_INFO_ARRAY_PROPERTY)
val rowArrayProps = if (StringUtils.hasText(rowArrayString)) IOUtils.propsFromString(rowArrayString) else new Properties()
val order = new scala.collection.mutable.LinkedHashMap[String, Seq[String]]
for (prop <- rowOrderProps.asScala) {
val value = StringUtils.tokenize(prop._2).asScala
if (!value.isEmpty) {
order.put(prop._1, value.toSeq)
}
}
val needToBeArray = new JHashSet[String]()
for (prop <- rowArrayProps.asScala) {
needToBeArray.add(prop._1)
}
(order,needToBeArray)
}
def detectRowInfo(settings: Settings, struct: StructType): (Properties, Properties) = {
// tuple - 1 = columns (in simple names) for each row, 2 - what fields (in absolute names) are arrays
val rowInfo = (new Properties, new Properties)
doDetectInfo(rowInfo, ROOT_LEVEL_NAME, struct)
val requiredFields = settings.getProperty(Utils.DATA_SOURCE_REQUIRED_COLUMNS)
if (StringUtils.hasText(requiredFields)) {
rowInfo._1.setProperty(ROOT_LEVEL_NAME, requiredFields)
}
rowInfo
}
private def doDetectInfo(info: (Properties, Properties), level: String, dataType: DataType): Unit = {
dataType match {
case s: StructType => {
val fields = new java.util.ArrayList[String]
for (field <- s) {
fields.add(field.name)
doDetectInfo(info, if (level != ROOT_LEVEL_NAME) level + "." + field.name else field.name, field.dataType)
}
info._1.setProperty(level, StringUtils.concatenate(fields, StringUtils.DEFAULT_DELIMITER))
}
case a: ArrayType => {
val prop = info._2.getProperty(level)
val depth = (if(StringUtils.hasText(prop)) Integer.parseInt(prop) else 0) + 1
info._2.setProperty(level, String.valueOf(depth))
doDetectInfo(info, level, a.elementType)
}
// ignore primitives
case _ => // ignore
}
}
}
| elastic/elasticsearch-hadoop | spark/sql-30/src/main/scala/org/elasticsearch/spark/sql/SchemaUtils.scala | Scala | apache-2.0 | 14,940 |
package fixbugs.mc
object EvalS {
def main(args : Array[String]) : Unit = new EvalSpec().execute()
}
| FauxFaux/fixbugs | tests/fixbugs/mc/EvalS.scala | Scala | lgpl-3.0 | 104 |
package com.github.kikuomax.spray.jwt
import com.nimbusds.jose.{
JWSAlgorithm,
JWSHeader,
JWSObject,
Payload
}
import com.nimbusds.jose.crypto.{
MACSigner,
MACVerifier
}
import com.nimbusds.jwt.JWTClaimsSet
import java.text.ParseException
import java.util.{
Calendar,
Date
}
import net.minidev.json.JSONObject
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.Duration
import scala.language.implicitConversions
import spray.routing.{
AuthorizationFailedRejection,
Directive1
}
import spray.routing.authentication.{
UserPass,
UserPassAuthenticator
}
import spray.routing.directives.BasicDirectives.{
extract,
provide
}
import spray.routing.directives.CookieDirectives.optionalCookie
import spray.routing.directives.HeaderDirectives.optionalHeaderValueByName
import spray.routing.directives.RouteDirectives.reject
import scala.util.Try
/**
* Provides utilities for building, signing and verification of a JSON Web
* Token (JWT).
*/
trait JwtDirectives {
/**
* `UserPassAuthenticator` that returns a JWS object if a given pair of
* a user and a password is authenticated.
*
* Useful if combined with `BasicAuth` and an `authenticate` directive.
* An inner route of an `authenticate` directive will receive a JSON Web
* Signature object (`JWSObject`) built by `claimsBuilder` and signed by
* `signer`.
*
* @tparam T
* Outcome type of `authenticator`.
* @param authenticator
* Authenticates a given pair of a user and a password.
* @param claimsBuilder
* Builds a claims set from the authentication result.
* @param signer
* Signs the claims set and creates a JSON Web Signature.
* @param executionContext
* Execution context where a `Future` returned from `authenticator` runs.
*/
def jwtAuthenticator[T](authenticator: UserPassAuthenticator[T])
(implicit claimsBuilder: T => Option[JWTClaimsSet],
signer: JWTClaimsSet => JWSObject,
executionContext: ExecutionContext):
UserPassAuthenticator[JWSObject] =
authenticator(_) map {
case Some(x) => claimsBuilder(x) map { signer(_) }
case None => None
}
/**
* Verifies a token sent with an HTTP request.
*
* Thanks to [[JwtAuthorizationMagnet]], this directive works like the
* following functions,
* {{{
* authorizeToken[T](verifier: JWTClaimsSet => Option[T])
* (implicit confirmer: JWSObject => Option[JWTClaimsSet]): Directive1[T]
*
* authorizeToken[T](extractor: Directive1[Option[JWSObject]],
* verifier: JWTClaimsSet => Option[T])
* (implicit confirmer: JWSObject => Option[JWTClaimsSet]): Directive1[T]
* }}}
*
* This directive
* 1. Extracts a JWS from the request through `extractor`.
* 1. Confirms the signature of the JWS and extracts the claims set by
* `confirmer`.
* 1. Verifies the claims set by `verifier`.
* 1. Supplies the result from `verifier` to the inner route.
*
* Rejects
* - if `extractor` cannot extract a JWS from the request,
* - or if `confirmer` cannot confirm the signature of a JWS,
* - or if `confirmer` cannot extract the claims set from a JWS,
* - or if `verifier` rejects the claims set.
*
*/
def authorizeToken[T](magnet: JwtAuthorizationMagnet[T]): Directive1[T] =
magnet.extractor flatMap { jwsOpt =>
jwsOpt flatMap { jws =>
magnet.confirmer(jws) flatMap { token =>
magnet.verifier(token)
}
} match {
case Some(result) => provide(result)
case _ => reject(AuthorizationFailedRejection)
}
}
}
/** Companion object of [[JwtDirectives]]. */
object JwtDirectives extends JwtDirectives
/**
* Magnet that attracts parameters necessary for the `authorizeToken`
* directive.
*
* @constructor
* @tparam T
* Outcome type of `verifier`.
* @param extractor
* Extracts a JSON Web Signature (JWS) from an HTTP request.
* @param confirmer
* Confirms the signature of the JWS and extracts the claims set.
* @param verifier
* Verifiers the claims set and converts it to an application-specific
* object.
*/
case class JwtAuthorizationMagnet[T](
extractor: Directive1[Option[JWSObject]],
confirmer: JWSObject => Option[JWTClaimsSet],
verifier: JWTClaimsSet => Option[T])
/** Companion object of [[JwtAuthorizationMagnet]]. */
object JwtAuthorizationMagnet {
/**
* Implicitly converts a given verifier function into
* a [[JwtAuthorizationMagnet]].
*
* @param verifier
* Returns an application-specific object if a given claims set is
* verified, otherwise `None`.
*/
implicit def fromVerifier[T](verifier: JWTClaimsSet => Option[T])
(implicit confirmer: JWSObject => Option[JWTClaimsSet]):
JwtAuthorizationMagnet[T] = JwtAuthorizationMagnet(
JwsExtractor.extractJwsFromAuthorizationHeader,
confirmer,
verifier)
/**
* Implicitly converts a given pair of an extractor directive and a verifier
* function into a [[JwtAuthorizationMagnet]].
*
* @param ev
* `ev._1` extracts a JWS from an HTTP request.
* `ev._2` verifies a given claims set and returns an application-specific
* object.
*/
implicit def fromExtractor[T](ev: (Directive1[Option[JWSObject]],
JWTClaimsSet => Option[T]))
(implicit confirmer: JWSObject => Option[JWTClaimsSet]):
JwtAuthorizationMagnet[T] =
JwtAuthorizationMagnet(ev._1, confirmer, ev._2)
}
/**
* Provides signature signer and verifier for JWS.
*
* @constructor
* @param algorithm
* Name of the signature algorithm.
* @param secret
* Secret key for the signature algorithm.
*/
case class JwtSignature(algorithm: JWSAlgorithm, secret: String) {
/** Common header of JWS objects. */
private val header = new JWSHeader(algorithm)
/** Common signer for JWS objects. */
private val signer = new MACSigner(secret.getBytes())
/** Common verifier for JWS objects. */
private val verifier = new MACVerifier(secret.getBytes())
/**
* Implicit signer for JWS objects.
*
* Signs a given claims set and returns a signed JWS object.
*/
implicit def jwtSigner(claim: JWTClaimsSet): JWSObject = {
val jwsObject = new JWSObject(header, new Payload(claim.toJSONObject()))
jwsObject.sign(signer)
jwsObject
}
/**
* Implicit confirmer for JWS objects.
*
* Confirms the signature of a given JWS object and returns its claims set.
*/
implicit def jwtConfirmer(token: JWSObject): Option[JWTClaimsSet] = {
if (token.verify(verifier)) {
try {
Option(JWTClaimsSet.parse(token.getPayload().toJSONObject()))
} catch {
case _: ParseException => None
}
} else {
None
}
}
}
/**
* Claim builder.
*
* You can chain multiple claim builders by `&&` operator.
*/
trait JwtClaimBuilder[T] extends (T => Option[JWTClaimsSet]) { self =>
/**
* Builds a claim.
*
* @param input
* Input for the claim builder.
* Usually an output from an authenticator.
* @return
* Claims set build from `input`.
*/
def apply(input: T): Option[JWTClaimsSet];
/**
* Chains a specified claim builder function after this claim builder.
*
* Claims appended by `after` have precedence over the claims built by this
* claim builder.
*
* @param after
* Claim builder that appends claims after this claim builder.
* @return
* New claim builder which builds a claims set by this claim builder and
* `after`.
*/
def &&(after: T => Option[JWTClaimsSet]): T => Option[JWTClaimsSet] =
input => mergeClaims(self(input), after(input))
/**
* Merges specified two claim sets.
*
* Claims in `second` have precedence over claims in `first`.
*
* @param first
* First claims set.
* @param second
* Second claims set.
* @return
* New claims set that has claims in both `first` and `second`.
* `None` if `first` or `second` is `None`.
*/
protected def mergeClaims(first: Option[JWTClaimsSet],
second: Option[JWTClaimsSet]):
Option[JWTClaimsSet] = {
for {
claims1 <- first
claims2 <- second
} yield {
val newClaims = new JSONObject(claims1.toJSONObject())
newClaims.merge(claims2.toJSONObject())
JWTClaimsSet.parse(newClaims)
}
}
}
/** Companion object of [[JwtClaimBuilder]]. */
object JwtClaimBuilder {
/**
* Returns a claim builder which sets the "exp" field to an expiration time.
*
* If `duration` is less than one second, it will be treated as 0.
*
* @param duration
* Valid duration of a JWT.
* Minimum resolution is one second.
*/
def claimExpiration[T](duration: Duration): T => Option[JWTClaimsSet] =
input => {
val validUntil = Calendar.getInstance()
validUntil.add(Calendar.SECOND, duration.toSeconds.toInt)
Try(new JWTClaimsSet.Builder()
.expirationTime(validUntil.getTime())
.build
).toOption
}
/**
* Returns a claim builder which sets the "iss" field to a specified string.
*
* @param issuer
* Issuer of a JWT.
*/
def claimIssuer[T](issuer: String): T => Option[JWTClaimsSet] =
input => {
Try(new JWTClaimsSet.Builder()
.issuer(issuer)
.build
).toOption
}
/**
* Returns a claim builder which sets the "sub" field.
*
* @param subject
* Extracts the subject from an input.
*/
def claimSubject[T](subject: T => String): T => Option[JWTClaimsSet] =
input => {
Try(new JWTClaimsSet.Builder()
.subject(subject(input))
.build
).toOption
}
/**
* Implicitly converts a claim builder function into a [[JwtClaimBuilder]].
*/
implicit def toJwtClaimBuilder[T](f: T => Option[JWTClaimsSet]):
JwtClaimBuilder[T] =
new JwtClaimBuilder[T] {
override def apply(input: T) = f(input)
}
}
/** Provides common JWS extractors. */
object JwsExtractor {
/**
* Extracts a JWS from "Authorization" header of an HTTP request.
*
* A JWS should be sent through "Authorization" header like,
* {{{
* Authorization: Bearer JWS
* }}}
*
* @return
* Directive that extracts a JWS from "Authorization" header of an HTTP
* request.
* This directive provides `None` if an HTTP request does not have
* "Authorization" header, or if the value of "Authorization" header is
* invalid.
*/
val extractJwsFromAuthorizationHeader: Directive1[Option[JWSObject]] =
optionalHeaderValueByName("Authorization") flatMap { tokenOpt =>
provide {
tokenOpt flatMap { token =>
val prefix = "Bearer "
if (token.startsWith(prefix))
try
Some(JWSObject.parse(token.substring(prefix.length)))
catch {
case _: ParseException => None
}
else
None
}
}
}
/**
* Extracts a JWS from a cookie that has a given name.
*
* @param name
* Name of a cookie from which a JWS is to be extracted.
* @return
* Directive that extracts a JWS from a cookie given by `name`.
* This directive provides `None` if no cookie corresponding to `name`
* exists, or if the value of the cookie is invalid.
*/
def extractJwsFromCookie(name: String): Directive1[Option[JWSObject]] =
optionalCookie(name) flatMap { ckOpt =>
provide {
ckOpt flatMap { ck =>
try
Some(JWSObject.parse(ck.content))
catch {
case _: ParseException => None
}
}
}
}
}
/**
* Verifies a claims set.
*
* Instance of this trait can be passed as a `verifier` argument of the
* `authorizeToken` directive.
*/
trait JwtClaimVerifier extends (JWTClaimsSet => Option[JWTClaimsSet]) { self =>
/**
* Verifies a given claims set.
*
* @param claims
* Claims set to be verified.
* @return
* Verified claims set. `None` if `claims` is not verified.
*/
def apply(claims: JWTClaimsSet): Option[JWTClaimsSet]
/**
* Chains a given claim verifier after this claim verifier.
*
* `after` will not be applied if this claim verifier fails.
*
* @param after
* Claim verifier to be applied after this claim verifier.
* @return
* New claim verifier that applies this claim verifier and then `after`.
*/
def &&[T](after: JWTClaimsSet => Option[T]): JWTClaimsSet => Option[T] =
claims =>
for {
first <- self(claims)
second <- after(first)
} yield (second)
}
/** Companion object of [[JwtClaimVerifier]]. */
object JwtClaimVerifier {
/**
* Returns a claim verifier that tests the expiration time.
*
* If a specified claims set does not have "exp" field, verification of it
* fails; i.e., returns `None`.
*/
def verifyNotExpired: JWTClaimsSet => Option[JWTClaimsSet] =
claims => {
def isValid(validUntil: Date) =
Calendar.getInstance().getTime().compareTo(validUntil) <= 0
Option(claims.getExpirationTime()) match {
case Some(validUntil) if isValid(validUntil) => Some(claims)
case _ => None
}
}
/**
* Implicitly converts a claim verifier function into a [[JwtClaimVerifier]].
*/
implicit def toJwtClaimVerifier(f: JWTClaimsSet => Option[JWTClaimsSet]) =
new JwtClaimVerifier {
override def apply(claims: JWTClaimsSet): Option[JWTClaimsSet] = f(claims)
}
}
| kikuomax/spray-jwt | src/main/scala/com/github/kikuomax/spray/jwt/JwtDirectives.scala | Scala | mit | 13,743 |
package org.rogach.scallop
class ScallopOptionOpsTest extends ScallopTestBase {
test ("printing ScallopOption") {
object Conf extends ScallopConf(List("-a","3")) {
val apples = opt[Int]("apples")
verify()
}
Conf.apples.toString should equal ("ScallopSome(3)")
}
test ("toggle flag option") {
object Conf extends ScallopConf(List("-a")) {
val apples = opt[Boolean]("apples").map(!_)
val bananas = opt[Boolean]("bananas").map(!_)
verify()
}
Conf.apples() should equal (false)
Conf.bananas() should equal (true)
}
test ("option operations - all operations") {
object Conf extends ScallopConf(List("-a","3","-b","5")) {
val apples = opt[Int]("apples")
val applesCollect = apples.collect({case a:Int => a + 1})
val applesFilter1 = apples.filter(_>2)
val applesFilter2 = apples.filter(_>5)
val applesFilterNot = apples.filterNot(_>5)
val applesMap1 = apples.map(_+2)
val applesMap2 = apples.filter(_>5).map(_+2)
val applesOrElse1 = apples.orElse(Some(1))
val applesOrElse2 = apples.filter(_>5).orElse(Some(1))
val bananas = opt[String]("bananas").collect({case b:Int => b + 1}:PartialFunction[Any,Int])
verify()
}
Conf.applesCollect.toOption should equal (Some(4))
Conf.applesFilter1.toOption should equal (Some(3))
Conf.applesFilter2.toOption should equal (None)
Conf.applesFilterNot.toOption should equal (Some(3))
Conf.applesMap1.toOption should equal (Some(5))
Conf.applesMap2.toOption should equal (None)
Conf.applesOrElse1.toOption should equal (Some(3))
Conf.applesOrElse2.toOption should equal (Some(1))
Conf.bananas.toOption should equal (None)
}
test ("for comprehensions with ScallopOptions") {
object Conf extends ScallopConf(Seq("-a","3","-b","2")) {
val apples = opt[Int]("apples")
val bananas = opt[Int]("bananas")
val weight = for {
a <- apples
if a > 2
b <- bananas
} yield a * 2 + b * 3
val weight2 = for { a <- apples; if a < 2; b <- bananas } yield a * 2 + b * 3
verify()
}
Conf.weight.toOption should equal (Some(12))
Conf.weight2.toOption should equal (None)
}
}
| scallop/scallop | jvm/src/test/scala/ScallopOptionOpsTest.scala | Scala | mit | 2,246 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.errors.TreeNodeException
import org.apache.spark.sql.catalyst.trees
import org.apache.spark.sql.types.DataType
abstract sealed class SortDirection
case object Ascending extends SortDirection
case object Descending extends SortDirection
/**
* An expression that can be used to sort a tuple. This class extends expression primarily so that
* transformations over expression will descend into its child.
*/
case class SortOrder(child: Expression, direction: SortDirection) extends Expression
with trees.UnaryNode[Expression] {
override def dataType: DataType = child.dataType
override def nullable: Boolean = child.nullable
// SortOrder itself is never evaluated.
override def eval(input: Row = null): EvaluatedType =
throw new TreeNodeException(this, s"No function to evaluate expression. type: ${this.nodeName}")
override def toString: String = s"$child ${if (direction == Ascending) "ASC" else "DESC"}"
}
| andrewor14/iolap | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SortOrder.scala | Scala | apache-2.0 | 1,822 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.