code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* @author Daniel Strebel
* @author Philip Stutz
*
* Copyright 2012 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.signalcollect
import org.scalatest.Matchers
import org.scalatest.FlatSpec
import com.signalcollect.util.TestAnnouncements
class GraphModificationSpec extends FlatSpec with Matchers with TestAnnouncements {
"GraphEditor" should "support modification functions" in {
val graph = GraphBuilder.build
try {
graph.modifyGraph({ _.addVertex(new GraphModificationVertex(0, 1)) }, Some(0))
graph.modifyGraph({ _.addVertex(new GraphModificationVertex(1, 1)) }, Some(1))
graph.modifyGraph({ _.addVertex(new GraphModificationVertex(2, 1)) }, Some(2))
graph.modifyGraph({ _.addVertex(new GraphModificationVertex(3, 1)) }, Some(3))
graph.modifyGraph({ _.addEdge(0, new StateForwarderEdge(1)) }, Some(0))
graph.modifyGraph({ _.addEdge(1, new StateForwarderEdge(3)) }, Some(1))
var statistics = graph.execute
graph.aggregate(new CountVertices[GraphModificationVertex]) === 4
statistics.aggregatedWorkerStatistics.numberOfVertices === 4
statistics.aggregatedWorkerStatistics.verticesAdded === 4
statistics.aggregatedWorkerStatistics.verticesRemoved === 0
statistics.aggregatedWorkerStatistics.numberOfOutgoingEdges === 2
statistics.aggregatedWorkerStatistics.outgoingEdgesAdded === 2
statistics.aggregatedWorkerStatistics.outgoingEdgesRemoved === 0
graph.modifyGraph({ _.removeVertex(0, true) }, Some(0))
graph.modifyGraph({ _.removeVertex(2, true) }, Some(2))
statistics = graph.execute
graph.aggregate(new CountVertices[GraphModificationVertex]) === 2
statistics.aggregatedWorkerStatistics.numberOfVertices === 2
statistics.aggregatedWorkerStatistics.verticesAdded === 4
statistics.aggregatedWorkerStatistics.verticesRemoved === 2
statistics.aggregatedWorkerStatistics.numberOfOutgoingEdges === 1
statistics.aggregatedWorkerStatistics.outgoingEdgesAdded === 2
statistics.aggregatedWorkerStatistics.outgoingEdgesRemoved === 1
} finally {
graph.shutdown
}
}
it should "keep accurate statistics when using individual vertex removals" in {
val graph = GraphBuilder.build
try {
graph.addVertex(new GraphModificationVertex(0, 1))
graph.addVertex(new GraphModificationVertex(1, 1))
graph.addVertex(new GraphModificationVertex(2, 1))
graph.addVertex(new GraphModificationVertex(3, 1))
graph.addEdge(0, new StateForwarderEdge(1))
graph.addEdge(1, new StateForwarderEdge(3))
var statistics = graph.execute
graph.aggregate(new CountVertices[GraphModificationVertex]) === 4
statistics.aggregatedWorkerStatistics.numberOfVertices === 4
statistics.aggregatedWorkerStatistics.verticesAdded === 4
statistics.aggregatedWorkerStatistics.verticesRemoved === 0
statistics.aggregatedWorkerStatistics.numberOfOutgoingEdges === 2
statistics.aggregatedWorkerStatistics.outgoingEdgesAdded === 2
statistics.aggregatedWorkerStatistics.outgoingEdgesRemoved === 0
graph.removeVertex(0, true)
graph.removeVertex(2, true)
statistics = graph.execute
graph.aggregate(new CountVertices[GraphModificationVertex]) === 2
statistics.aggregatedWorkerStatistics.numberOfVertices === 2
statistics.aggregatedWorkerStatistics.verticesAdded === 4
statistics.aggregatedWorkerStatistics.verticesRemoved === 2
statistics.aggregatedWorkerStatistics.numberOfOutgoingEdges === 1
statistics.aggregatedWorkerStatistics.outgoingEdgesAdded === 2
statistics.aggregatedWorkerStatistics.outgoingEdgesRemoved === 1
} finally {
graph.shutdown
}
}
}
class GraphModificationVertex(id: Int, state: Int) extends DataGraphVertex(id, state) {
def collect = 1
}
| mageru/signal-collect | src/test/scala/com/signalcollect/GraphModificationSpec.scala | Scala | apache-2.0 | 4,414 |
package com.twitter.finagle.mux.exp.pushsession
import com.twitter.finagle.mux.exp.pushsession.MessageWriter.DiscardResult
import com.twitter.finagle.mux.transport.Message
import com.twitter.util.Future
import scala.collection.mutable
/** Helper class for testing what gets written into a real `MessageWriter` */
private class MockMessageWriter extends MessageWriter {
val messages = new mutable.Queue[Message]()
def write(message: Message): Unit = {
messages += message
}
def removeForTag(id: Int): DiscardResult = DiscardResult.NotFound
def drain: Future[Unit] = Future.Unit
}
| mkhq/finagle | finagle-mux/src/test/scala/com/twitter/finagle/mux/exp/pushsession/MockMessageWriter.scala | Scala | apache-2.0 | 597 |
package models.jbehave
import org.specs2.matcher.{PathMatchers, FileMatchers}
import org.specs2.mutable.Specification
import java.io.File
import org.jbehave.core.reporters.Format
import scala.collection.JavaConversions._
import com.technologyconversations.bdd.steps.WebSteps
import models.RunnerClass
class JBehaveRunnerSpec extends Specification with PathMatchers with FileMatchers {
val storiesDirPath = "test/stories"
val storyPaths = List(s"$storiesDirPath/**/*.story")
val reportsPath = "/test/jbehave/"
val params = Map("webDriver" -> "firefox", "webUrl" -> "http://www.technologyconversations.com")
val steps = List(RunnerClass("com.technologyconversations.bdd.steps.WebSteps", params))
val runner = new JBehaveRunner(storyPaths, steps, List(), reportsPath)
"JBehaveRunner#newInstance" should {
"store storyPaths" in {
runner.getStoryPaths must be equalTo storyPaths
}
"store stepsInstanceNames" in {
runner.getStepsInstances must have size 1
}
"store reportsPath" in {
runner.getReportsPath must be equalTo reportsPath
}
}
"JBehaveRunner#storyPaths" should {
"return all stories" in {
val expectedSize = new File(storiesDirPath).list.count(_.endsWith(".story"))
runner.storyPaths must have size expectedSize
}
}
"JBehaveRunner#setStepsInstances" should {
"return array of instances" in {
runner.getStepsInstances must have size 1
}
"throw exception if class does NOT exist" in {
val testSteps = List(
RunnerClass("com.technologyconversations.bdd.steps.NonExistentSteps", Map())
)
new JBehaveRunner(storyPaths, testSteps, List(), reportsPath) should throwA[Exception]
}
"have all params set" in {
val webSteps = runner.getStepsInstances.toList(0).asInstanceOf[WebSteps]
webSteps.getParams.toMap must havePair("webDriver" -> "firefox")
}
}
"JBehaveRunner#configuration" should {
val reporter = runner.configuration().storyReporterBuilder()
"use reportsPath" in {
reporter.relativeDirectory() must be equalTo reportsPath
}
"use console, html and xml formats" in {
reporter.formats().toList must contain(Format.CONSOLE, Format.HTML, Format.XML)
}
}
"JBehaveRunner#getSourceDir" should {
"return File with target directory prefixed to the path" in {
val path = "some/path"
val sourceDir = runner.getSourceDir(path)
val expected = new File(s"target/$path").getAbsolutePath
sourceDir.getPath must beEqualToIgnoringSep(expected)
}
}
"JBehaveRunner#getDestinationDir" should {
"return File created from the path" in {
val path = "some/path"
val destinationDir = runner.getDestinationDir(path)
val expected = new File(path).getAbsolutePath
destinationDir.getPath must beEqualToIgnoringSep(expected)
}
"return File with target/universal/stage removed" in {
val destinationDir = runner.getDestinationDir("some/target/universal/stage/path")
val expected = new File("some/path").getAbsolutePath
destinationDir.getPath must beEqualToIgnoringSep(expected)
}
}
}
| TechnologyConversations/TechnologyConversationsBdd | test/models/jbehave/JBehaveRunnerSpec.scala | Scala | apache-2.0 | 3,172 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testsuite.compiler
import org.junit.Test
import org.junit.Assert._
import org.junit.Assume._
import org.scalajs.testsuite.utils.Platform._
class ModuleInitTest {
import ModuleInitTest._
@Test def executeModuleInitializersOnce(): Unit = {
assumeTrue("Assumed compliant Module", hasCompliantModuleInit)
val x = A.Y
val y = A.cs.head
assertTrue(x ne null)
assertTrue(y eq null)
assertTrue(x eq A.Y)
assertEquals(1, Counter.c)
}
}
object ModuleInitTest {
object Counter {
var c: Int = 0
}
object A {
private def blankSym = ""
sealed abstract class C(symbol: String)
object Y extends C(blankSym) {
Counter.c += 1
}
val cs = Vector[C](Y)
}
}
| scala-js/scala-js | test-suite/js/src/test/scala/org/scalajs/testsuite/compiler/ModuleInitTest.scala | Scala | apache-2.0 | 1,011 |
package controllers
import lila.app._
import views._
final class ForumCateg(env: Env) extends LilaController(env) with ForumController {
def index =
Open { implicit ctx =>
pageHit
NotForKids {
for {
teamIds <- ctx.userId ?? teamCache.teamIdsList
categs <- categApi.list(teamIds, ctx.me)
_ <- env.user.lightUserApi preloadMany categs.flatMap(_.lastPostUserId)
} yield html.forum.categ.index(categs)
}
}
def show(slug: String, page: Int) =
Open { implicit ctx =>
NotForKids {
Reasonable(page, 50, errorPage = notFound) {
OptionFuOk(categApi.show(slug, page, ctx.me)) { case (categ, topics) =>
for {
canWrite <- isGrantedWrite(categ.slug)
stickyPosts <- (page == 1) ?? env.forum.topicApi.getSticky(categ, ctx.me)
_ <- env.user.lightUserApi preloadMany topics.currentPageResults.flatMap(_.lastPostUserId)
} yield html.forum.categ.show(categ, topics, canWrite, stickyPosts)
}
}
}
}
}
| luanlv/lila | app/controllers/ForumCateg.scala | Scala | mit | 1,101 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.scheduler
import scala.collection.mutable
import org.scalatest.concurrent.Eventually._
import org.scalatest.time.SpanSugar._
import org.apache.spark.streaming._
import org.apache.spark.streaming.scheduler.rate.RateEstimator
//速率控制器测试套件
class RateControllerSuite extends TestSuiteBase {
override def useManualClock: Boolean = false
override def batchDuration: Duration = Milliseconds(50)
//速率控制器发布批量完成后的更新
test("RateController - rate controller publishes updates after batches complete") {
//分隔的时间叫作批次间隔
val ssc = new StreamingContext(conf, batchDuration)
withStreamingContext(ssc) { ssc =>
val dstream = new RateTestInputDStream(ssc)
//将当前DStream注册到DStreamGraph的输出流中
dstream.register()
ssc.start()
eventually(timeout(10.seconds)) {
assert(dstream.publishedRates > 0)
}
}
}
//发布率达到接收器
test("ReceiverRateController - published rates reach receivers") {
//分隔的时间叫作批次间隔
val ssc = new StreamingContext(conf, batchDuration)
withStreamingContext(ssc) { ssc =>
val estimator = new ConstantEstimator(100)
val dstream = new RateTestInputDStream(ssc) {
override val rateController =
Some(new ReceiverRateController(id, estimator))
}
//将当前DStream注册到DStreamGraph的输出流中
dstream.register()
ssc.start()
// Wait for receiver to start
//等待接收器启动
eventually(timeout(5.seconds)) {
RateTestReceiver.getActive().nonEmpty
}
// Update rate in the estimator and verify whether the rate was published to the receiver
//估计的更新率,并验证该速率是否被发布到接收器
def updateRateAndVerify(rate: Long): Unit = {
estimator.updateRate(rate)
eventually(timeout(5.seconds)) {
assert(RateTestReceiver.getActive().get.getDefaultBlockGeneratorRateLimit() === rate)
}
}
// Verify multiple rate update
//验证多速率更新
Seq(100, 200, 300).foreach { rate =>
updateRateAndVerify(rate)
}
}
}
}
//常数的估计
private[streaming] class ConstantEstimator(@volatile private var rate: Long)
extends RateEstimator {
def updateRate(newRate: Long): Unit = {
rate = newRate
}
//compute:在指定时间生成一个RDD
def compute(
time: Long,
elements: Long,
processingDelay: Long,
schedulingDelay: Long): Option[Double] = Some(rate)
}
| tophua/spark1.52 | streaming/src/test/scala/org/apache/spark/streaming/scheduler/RateControllerSuite.scala | Scala | apache-2.0 | 3,425 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogdebugger.ui.fieldvisualizations.matrix
import cogx.runtime.debugger.ProbedField
import libcog._
import cogdebugger.ui.fieldvisualizations.{Viewer, ViewerSuperPanel, EventDrivenViewer}
import cogdebugger.Memoize
import cogdebugger.ui.components.MouseDragZoom
/**
* Host panel for Matrix field visualizations. Provides a toolbar with common
* controls and a combo box for selecting between the different matrix field
* visualizations.
*
* If you've implemented a new visualization for matrix fields and want to plug
* it into the GUI, provide a name for it to the `viewerNames` list and a case
* for it in the `viewerNameToInstance` memoized factory.
*
* Created by gonztobi on 2/24/14.
*/
class MatrixFieldSuperPanel protected (target: ProbedField,
options: List[String],
factory: (String) => Viewer)
extends ViewerSuperPanel(target, options, factory) {
require(target.fieldType.tensorShape.dimensions == 2,
"MatrixFieldSuperPanel only works with Matrix Fields!")
}
object MatrixFieldSuperPanel {
val MatrixPanelName = "Matrices"
val MatrixComponentsName = "Matrix Components"
def apply(target: ProbedField) = {
val fieldShape = target.fieldType.fieldShape
val tensorShape = target.fieldType.tensorShape
val viewerNames =
if (fieldShape.dimensions < 3)
List(MatrixComponentsName, MatrixPanelName)
else
List(MatrixPanelName)
val memoizer = Memoize[String, EventDrivenViewer] {
case MatrixPanelName => new MatrixMemoryView(target, fieldShape, tensorShape) with MouseDragZoom
case MatrixComponentsName => new MatrixComponentsView(target.fieldType) with MouseDragZoom
}
new MatrixFieldSuperPanel(target, viewerNames, memoizer)
}
} | hpe-cct/cct-core | src/main/scala/cogdebugger/ui/fieldvisualizations/matrix/MatrixFieldSuperPanel.scala | Scala | apache-2.0 | 2,441 |
#!/bin/sh
exec scala "$0" "$@"
!#
/*
* This script converts java source files that define tag classes into
* scala files that can be used to generate scaladocs.
*
* The resulting scala files won't work for actually running code, but
* they're close enough to get into the scaladocs.
*
* The script rewrites six files in src/main/java/org/scalatest
*
* DoNotDiscover.java
* Ignore.java
* Finders.java
* TagAnnotation.java
* WrapWith.java
* tags/Slow.java
*
* It copies them into target/docsrc/org/scalatest, removing java annotations
* and converting them into similar scala code and preserving their header
* comments so those make it into the scaladocs.
*/
import java.io.File
import java.io.PrintWriter
import java.util.regex.Pattern
import scala.io.Source
val srcDir = "src/main/java/org/scalatest"
val docsrcDir = "target/docsrc/org/scalatest"
//
// Splits java file's contents into two pieces: a top and body.
// The top contains the first scaladoc encountered plus
// everything else up through the declared class's name. The
// body contains the following curly braces and their contents.
//
def parseContents(className: String, text: String): (String, String) = {
val topDocPat = Pattern.compile("""(?s)^(.*?/\\*\\*.*?\\*/)(.*)$""")
val topDocMat = topDocPat.matcher(text)
topDocMat.find()
val bodyPat = Pattern.compile("""(?sm)(.*? @interface """ + className +
""") *(\\{.*\\})""")
val bodyMat = bodyPat.matcher(topDocMat.group(2))
bodyMat.find()
(topDocMat.group(1) + bodyMat.group(1), bodyMat.group(2))
}
//
// Constructs a modified class body where the java declaration of the value()
// method, where present, is replaced by a scala version.
//
def genNewBody(body: String): String = {
val matcher =
Pattern.compile("""(?m)^\\s*(.*?) *value\\(\\);""").matcher(body)
if (matcher.find()) {
val valueType = matcher.group(1)
val newValueType =
valueType match {
case "Class<? extends Suite>" => "Class[_ <: Suite]"
case "String" => "String"
case "String[]" => "Array[String]"
case _ =>
throw new RuntimeException("unexpected valueType [" +
valueType + "]")
}
val buf = new StringBuffer
matcher.appendReplacement(buf, " def value(): "+ newValueType)
matcher.appendTail(buf)
buf.toString
}
else ""
}
//
// Processes source code above the body. If code contains scaladoc it
// splits that out and processes the code above and below it separately.
//
def genNewTop(top: String): String = {
val matcher = Pattern.compile("""(?s)^(.*?)(/\\*\\*.*?\\*/)(.*)$""").matcher(top)
if (matcher.find()) {
val code = matcher.group(1)
val comment = matcher.group(2)
val remainder = matcher.group(3)
processCode(code) + comment + genNewTop(remainder)
}
else {
processCode(top)
}
}
//
// Removes java code in order to make it palatable to scaladoc processor.
//
def processCode(text: String): String = {
text.replaceAll("""@Retention\\(.*?\\)""", "")
.replaceAll("""@Target\\(.*?\\)""", "")
.replaceAll("""@TagAnnotation.*\\)""", "")
.replaceAll("""@TagAnnotation""", "")
.replaceAll("""@Inherited""", "")
.replaceAll("""public *@interface""", "")
.replaceAll("""(?m)^import.*$""", "")
}
def main() {
println("docjavatags.scala: porting java tag files to scala")
val filenames = Set("DoNotDiscover.java",
"Ignore.java",
"Finders.java",
"TagAnnotation.java",
"WrapWith.java",
"tags/ChromeBrowser.java",
"tags/FirefoxBrowser.java",
"tags/HtmlUnitBrowser.java",
"tags/InternetExplorerBrowser.java",
"tags/SafariBrowser.java",
"tags/Slow.java",
"tags/CPU.java",
"tags/Disk.java",
"tags/Network.java",
"tags/Retryable.java"
)
for (filename <- filenames) {
val contents = Source.fromFile(srcDir +"/"+ filename).mkString
val className =
filename.replaceFirst("""^.*/""", "").replaceFirst("""\\.java$""", "")
val (top, body) = parseContents(className, contents)
val newTop = genNewTop(top)
val newBody = genNewBody(body)
val newContents =
newTop
.replaceFirst(className + "$",
"trait "+ className +
" extends java.lang.annotation.Annotation "+ newBody +
"\\n")
if (filename.contains("/")) {
val newDir =
new File(docsrcDir +"/"+ filename.replaceFirst("""/.*$""", ""))
val result = newDir.mkdirs()
}
val newFile =
new PrintWriter(docsrcDir +"/"+
filename.replaceFirst("""\\.java$""", ".scala"))
newFile.print(newContents)
if (filename == "TagAnnotation.java")
newFile.print("{ def value: String}")
newFile.close()
}
}
main()
| scalatest/scalatest | support/docjavatags.scala | Scala | apache-2.0 | 5,145 |
/*
* La Trobe University - Distributed Deep Learning System
* Copyright 2016 Matthias Langer (t3l@threelights.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package edu.latrobe.blaze
import edu.latrobe._
import edu.latrobe.io.graph._
import scala.collection._
/**
* Whatever you do, do not make this a traversable, because the IntelliJ
* debugger would execute that when it hits a breakpoint.
*/
abstract class BatchPool
extends InstanceEx[BatchPoolBuilder] {
/**
* Input hints passed through at creation time. Might be interesting to augmenters.
* Should be implemented as a val.
*/
def inputHints
: BuildHints
/**
* Output hints from this pool. Can be implemented any way you want.
*/
def outputHints
: BuildHints
/**
* Draws the next batch from the pool.
*
* Returns None if the pool has been depleted.
*/
def draw()
: BatchPoolDrawContext
/**
* Grab works similar to take, but it will instigate updates on the record
* series and does not ensure the number of records returned.
*/
/*
final def take(noBatchesMax: Int)
: Array[Batch] = {
val buffer = mutable.ArrayBuffer.empty[Batch]
buffer.sizeHint(noBatchesMax)
while (buffer.length < noBatchesMax) {
draw() match {
case Some(batch) =>
val context =
buffer +=
case None =>
return buffer.toArray
}
}
buffer.toArray
}
*/
// ---------------------------------------------------------------------------
// Handy functions for batch jobs.
// ---------------------------------------------------------------------------
final def foldLeft[T](z: T)
(fn: (T, Batch) => T)
: T = {
var result = z
foreach(
batch => result = fn(result, batch)
)
result
}
final def foreach(fn: Batch => Unit)
: Unit = {
while (true) {
using(draw())(ctx => {
if (ctx.isEmpty) {
return
}
fn(ctx.batch)
})
}
throw new UnknownError
}
/**
* Use with caution. Some pools are infinite!
*/
/*
final def toArray: Array[Batch] = {
val builder = Array.newBuilder[Batch]
foreach(builder += _)
builder.result()
}
*/
// ---------------------------------------------------------------------------
// State management.
// ---------------------------------------------------------------------------
override def state
: BatchPoolState = BatchPoolStateEx(super.state)
override def restoreState(state: InstanceState)
: Unit = {
super.restoreState(state.parent)
state match {
case state: BatchPoolStateEx =>
case _ =>
throw new MatchError(state)
}
}
}
abstract class BatchPoolBuilder
extends InstanceExBuilder2[BatchPoolBuilder, BatchPool, TensorLayout, Iterable[Batch]] {
final def build(layoutHint: TensorLayout,
sample: Batch)
: BatchPool = build(layoutHint, sample, InstanceSeed.default)
final def build(layoutHint: TensorLayout,
sample: Batch,
seed: InstanceSeed)
: BatchPool = build(layoutHint, Array(sample), seed)
// ---------------------------------------------------------------------------
// Conversion related
// ---------------------------------------------------------------------------
final def toGraph(hints: Option[BuildHints] = None)
: Graph = {
val result = Graph()
val input = Vertex.derive("Data Source")
toGraphEx(hints, Seq(input), LineStyle.Solid, result.nodes, result.edges)
result
}
/**
* @param nodeSink Vertices and vertex groups will end up here.
* @param edgeSink Edge information ends up here.
* @return The vertex for the current object.
*/
def toGraphEx(hints: Option[BuildHints],
inputs: Seq[Vertex],
edgeStyle: LineStyle,
nodeSink: mutable.Buffer[Node],
edgeSink: mutable.Buffer[Edge])
: (Option[BuildHints], Seq[Vertex])
}
abstract class BatchPoolEx[TBuilder <: BatchPoolExBuilder[_]]
extends BatchPool {
override def builder
: TBuilder
}
abstract class BatchPoolExBuilder[TThis <: BatchPoolExBuilder[_]]
extends BatchPoolBuilder {
override def repr
: TThis
override protected def doCopy()
: TThis
}
abstract class BatchPoolState
extends InstanceState
final case class BatchPoolStateEx(override val parent: InstanceState)
extends BatchPoolState
abstract class BatchPoolDrawContext
extends AutoCloseable {
/**
* Should implement this as constructor argument.
*/
def batch
: Batch
final def isEmpty
: Boolean = batch == null
final def nonEmpty
: Boolean = batch != null
}
| bashimao/ltudl | blaze/src/main/scala/edu/latrobe/blaze/BatchPool.scala | Scala | apache-2.0 | 5,280 |
package argonaut
import scalaz._, syntax.equal._
sealed abstract class CodecJson[A] extends EncodeJson[A] with DecodeJson[A] { outer =>
val Encoder: EncodeJson[A]
val Decoder: DecodeJson[A]
override def encode(a: A) = Encoder.encode(a)
override def decode(c: HCursor) = Decoder.decode(c)
override def setName(n: String): CodecJson[A] =
new CodecJson[A] {
val Encoder = outer.Encoder
val Decoder = outer.Decoder.setName(n)
}
override def tryDecode(c: ACursor) = Decoder.tryDecode(c)
trait CodecLaw {
def encodedecode(a: A)(implicit A: Equal[A]) =
decodeJson(encode(a)).value.exists (_ === a)
}
def codecLaw = new CodecLaw {}
def xmap[B](f: A => B)(g: B => A): CodecJson[B] =
CodecJson.derived(Encoder contramap g, Decoder map f)
}
object CodecJson extends CodecJsons {
def apply[A](encoder: A => Json, decoder: HCursor => DecodeResult[A]): CodecJson[A] =
derived(EncodeJson(encoder), DecodeJson(decoder))
def withReattempt[A](encoder: A => Json, decoder: ACursor => DecodeResult[A]): CodecJson[A] =
derived(EncodeJson(encoder), DecodeJson.withReattempt(decoder))
def derive[A]: CodecJson[A] = macro internal.Macros.materializeCodecImpl[A]
def derived[A](implicit E: EncodeJson[A], D: DecodeJson[A]): CodecJson[A] =
new CodecJson[A] {
val Encoder = E
val Decoder = D
}
}
trait CodecJsons extends GeneratedCodecJsons
| etorreborre/argonaut | src/main/scala/argonaut/CodecJson.scala | Scala | bsd-3-clause | 1,415 |
package org.apache.mesos.chronos.scheduler.graph
import java.io.StringWriter
import java.util.concurrent.ConcurrentHashMap
import java.util.logging.Logger
import javax.annotation.concurrent.ThreadSafe
import org.apache.mesos.chronos.scheduler.jobs.{StoredJob, DependencyBasedJob}
import org.jgrapht.experimental.dag.DirectedAcyclicGraph
import org.jgrapht.ext.{DOTExporter, IntegerNameProvider, StringNameProvider}
import org.jgrapht.graph.DefaultEdge
import scala.collection.convert.decorateAsScala._
import scala.collection.{mutable, _}
import scala.collection.mutable.ListBuffer
/**
* This class provides methods to access dependency structures of jobs.
* @author Florian Leibert (flo@leibert.de)
*/
@ThreadSafe
class JobGraph {
val dag = new DirectedAcyclicGraph[String, DefaultEdge](classOf[DefaultEdge])
val edgeInvocationCount = mutable.Map[DefaultEdge, Long]()
private[this] val log = Logger.getLogger(getClass.getName)
private[this] val jobNameMapping: concurrent.Map[String, StoredJob] = new ConcurrentHashMap[String, StoredJob]().asScala
private[this] val lock = new Object
def parentJobs(job: DependencyBasedJob) = parentJobsOption(job) match {
case None =>
throw new IllegalArgumentException(s"requirement failed: Job ${job.name} does not have all parents defined!")
case Some(jobs) =>
jobs
}
def parentJobsOption(job: DependencyBasedJob): Option[List[StoredJob]] = {
val vertexNamePairs = job.parents.map(x => (x, lookupVertex(x))).toList
var failure = false
val parents = vertexNamePairs.flatMap {
case (x: String, y: Option[StoredJob]) =>
y match {
case None =>
log.warning(s"Parent $x of job ${job.name} not found in job graph!")
failure = true
None
case Some(storedJob: StoredJob) =>
Some(storedJob)
}
}
if (failure)
None
else
Some(parents)
}
def getJobForName(name: String): Option[StoredJob] = {
jobNameMapping.get(name)
}
def replaceVertex(oldVertex: StoredJob, newVertex: StoredJob) {
require(oldVertex.name == newVertex.name, "Vertices need to have the same name!")
jobNameMapping.put(oldVertex.name, newVertex)
}
//TODO(FL): Documentation here and elsewhere in this file.
def addVertex(vertex: StoredJob) {
log.warning("Adding vertex:" + vertex.name)
require(lookupVertex(vertex.name).isEmpty, "Vertex already exists in graph %s".format(vertex.name))
require(!vertex.name.isEmpty, "In order to be added to the graph, the vertex must have a name")
jobNameMapping.put(vertex.name, vertex)
lock.synchronized {
dag.addVertex(vertex.name)
}
log.warning("Current number of vertices:" + dag.vertexSet.size)
}
/* TODO(FL): Replace usage of this method with the hashmap */
def lookupVertex(vertexName: String): Option[StoredJob] = {
jobNameMapping.get(vertexName)
}
def removeVertex(vertex: StoredJob) {
log.info("Removing vertex:" + vertex.name)
require(lookupVertex(vertex.name).isDefined, "Vertex doesn't exist")
jobNameMapping.remove(vertex.name)
lock.synchronized {
dag.removeVertex(vertex.name)
}
log.info("Current number of vertices:" + dag.vertexSet.size)
}
def addDependency(from: String, to: String) {
lock.synchronized {
if (!dag.vertexSet.contains(from) || !dag.vertexSet.contains(to))
throw new NoSuchElementException("Vertex: %s not found in graph. Job rejected!".format(from))
val edge = dag.addDagEdge(from, to)
edgeInvocationCount.put(edge, 0L)
}
}
def removeDependency(from: String, to: String) {
lock.synchronized {
if (!dag.vertexSet.contains(from) || !dag.vertexSet.contains(to))
throw new NoSuchElementException("Vertex: %s not found in graph. Job rejected!".format(from))
val edge = dag.removeEdge(from, to)
edgeInvocationCount.remove(edge)
}
}
def reset() {
jobNameMapping.clear()
lock.synchronized {
edgeInvocationCount.clear()
val names = ListBuffer[String]()
import scala.collection.JavaConversions._
dag.vertexSet.map({
job =>
names += job
})
dag.removeAllVertices(names)
}
}
/**
* Retrieves all the jobs that need to be triggered that depend on the finishedJob.
* @param vertex
* @return a list.
*/
//TODO(FL): Avoid locking on every lookup.
//TODO(FL): This method has some pretty serious side-effects. Refactor.
def getExecutableChildren(vertex: String): List[String] = {
val results = new scala.collection.mutable.ListBuffer[String]
//TODO(FL): Make functional, making locking more efficient
lock.synchronized {
/*
The algorithm:
R = [ ]
for each child in children
E = edges(*, child)
if |E| == 1
R = R + child
else
edge_counts(vertex, child)++
if edge_counts(vertex, child) == min(edge_counts(*, child))
R = R + child
*/
val children = getChildren(vertex)
for (child <- children) {
val edgesToChild = getEdgesToParents(child)
if (edgesToChild.size == 1) {
results += child
}
else {
val currentEdge = dag.getEdge(vertex, child)
if (!edgeInvocationCount.contains(currentEdge)) {
edgeInvocationCount.put(currentEdge, 1L)
} else {
edgeInvocationCount.put(currentEdge, edgeInvocationCount.get(currentEdge).get + 1)
}
val count = edgeInvocationCount.get(currentEdge).get
val min = edgesToChild.map(edgeInvocationCount.getOrElse(_, 0L)).min
if (count == min)
results += child
}
}
}
log.info("Dependents: [%s]".format(results.mkString(",")))
results.toList
}
def getChildren(job: String): Iterable[String] = {
import scala.collection.JavaConversions._
lock.synchronized {
dag.edgesOf(job)
.filter(x => dag.getEdgeSource(x) == job)
.map(x => dag.getEdgeTarget(x))
}
}
def resetDependencyInvocations(vertex: String) {
val edges = getEdgesToParents(vertex)
lock.synchronized {
edges.foreach({
edge =>
edgeInvocationCount.put(edge, 0)
})
}
}
def getEdgesToParents(child: String): Iterable[DefaultEdge] = {
lock.synchronized {
import scala.collection.JavaConversions._
dag.edgesOf(child).filter(n => dag.getEdgeTarget(n).eq(child))
}
}
def makeDotFile(): String = {
val stw = new StringWriter
val exporter = new DOTExporter[String, DefaultEdge](new IntegerNameProvider, new StringNameProvider, null)
exporter.export(stw, dag)
stw.flush()
val result = stw.getBuffer.toString
stw.close()
result
}
}
| BoopBoopBeepBoop/chronos | src/main/scala/org/apache/mesos/chronos/scheduler/graph/JobGraph.scala | Scala | apache-2.0 | 6,845 |
package akkaviz.serialization.serializers
import akka.actor.ActorSystem
class ActorRefSerializerTest extends SerializerTest {
test("Is able to serialize ActorRef") {
val system = ActorSystem()
val ref = system.deadLetters
ActorRefSerializer.canSerialize("") shouldBe false
ActorRefSerializer.canSerialize(ref) shouldBe true
val json: String = ActorRefSerializer.serialize(ref, context)
json shouldBe """{"$type":"akka.actor.ActorRef","path":"akka://default/deadLetters"}"""
}
}
| blstream/akka-viz | monitoring/src/test/scala/akkaviz/serialization/serializers/ActorRefSerializerTest.scala | Scala | mit | 514 |
package com.twitter.server.logging
import com.twitter.finagle.tracing.Trace
import com.twitter.logging.{Formatter, Level => TwLevel}
import com.twitter.util.Time
import java.io.{PrintWriter, StringWriter}
import java.util.logging.{Level, LogRecord}
import scala.collection.mutable
import scala.reflect.NameTransformer
/**
* Implements "glog" style log formatting for util/util-logging handlers
*/
private[server] class LogFormatter extends Formatter {
private val levels = Map[Level, Char](
Level.FINEST -> 'D',
Level.FINER -> 'D',
Level.FINE -> 'D',
TwLevel.TRACE -> 'D',
TwLevel.DEBUG -> 'D',
Level.CONFIG -> 'I',
Level.INFO -> 'I',
TwLevel.INFO -> 'I',
Level.WARNING -> 'W',
TwLevel.WARNING -> 'W',
Level.SEVERE -> 'E',
TwLevel.ERROR -> 'E',
TwLevel.CRITICAL -> 'E',
TwLevel.FATAL -> 'E'
)
// Make some effort to demangle scala names.
private def prettyClass(name: String): String = {
var s = NameTransformer.decode(name)
val dolladolla = s.indexOf("$$")
if (dolladolla > 0) {
s = s.substring(0, dolladolla)
s += "~"
}
s
}
override def format(r: LogRecord): String = {
val msg = formatMessage(r)
val str = new mutable.StringBuilder(msg.length + 30 + 150)
.append(levels.getOrElse(r.getLevel, 'U'))
.append(Time.fromMilliseconds(r.getMillis).format(" MMdd HH:mm:ss.SSS"))
.append(" THREAD")
.append(r.getThreadID)
for (id <- Trace.idOption) {
str.append(" TraceId:")
str.append(id.traceId)
}
if (r.getSourceClassName != null) {
str.append(' ').append(prettyClass(r.getSourceClassName))
if (r.getSourceMethodName != null)
str.append('.').append(r.getSourceMethodName)
}
str.append(": ")
str.append(msg)
if (r.getThrown != null) {
val w = new StringWriter
r.getThrown.printStackTrace(new PrintWriter(w))
str.append('\\n').append(w.toString)
}
str.append('\\n')
str.toString
}
}
| twitter/twitter-server | slf4j-jdk14/src/main/scala/com/twitter/server/logging/LogFormatter.scala | Scala | apache-2.0 | 2,014 |
import java.io.ByteArrayOutputStream
import com.sksamuel.avro4s.{AvroInputStream, AvroOutputStream}
import com.sun.xml.internal.messaging.saaj.util.ByteInputStream
import entity.Employee
/**
* Created by arthur on 17-4-10.
*/
object test {
def avroUsageTest(): Unit ={
// getLocalData().foreach(println)
val employee = Employee(1,"zhangsan",45,"M",4500)
val employee1 = Employee(2,"lisi",44,"M",3500)
val baos = new ByteArrayOutputStream()
val output = AvroOutputStream.json[Employee](baos)
output.write(employee)
output.write(employee1)
output.close()
println(baos.toString("utf-8"))
val json = baos.toString("utf-8")
val in = new ByteInputStream(json.getBytes("utf-8"),json.size)
val input = AvroInputStream.json[Employee](in)
val results = input.iterator()
while (results.hasNext){
println(results.next())
}
}
}
| tyhtao1990/BaymaxHome | jcBigData/KafkaProducer/src/test/scala/test.scala | Scala | apache-2.0 | 895 |
/*-------------------------------------------------------------------------*\\
** ScalaCheck **
** Copyright (c) 2007-2019 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
\\*------------------------------------------------------------------------ */
package org.scalacheck
import Gen._
import Prop._
import Test._
import Arbitrary._
object TestSpecification extends Properties("Test") {
val proved: Prop = 1 + 1 == 2
val passing = forAll( (n: Int) => n == n )
val failing = forAll( (n: Int) => false )
val exhausted = forAll( (n: Int) =>
(n > 0 && n < 0) ==> (n == n)
)
val shrunk = forAll( (t: (Int,Int,Int)) => false )
val propException = forAll { n:Int => throw new java.lang.Exception }
val undefinedInt = for{
n <- arbitrary[Int]
} yield n/0
val genException = forAll(undefinedInt)((n: Int) => true)
property("workers") = forAll { prms: Test.Parameters =>
var res = true
val cb = new Test.TestCallback {
override def onPropEval(n: String, threadIdx: Int, s: Int, d: Int) = {
res = res && threadIdx >= 0 && threadIdx <= (prms.workers-1)
}
}
Test.check(prms.withTestCallback(cb), passing).status match {
case Passed => res
case _ => false
}
}
private def resultInvariant(f: (Test.Parameters, Test.Result) => Boolean): Prop =
forAll { (prms: Test.Parameters, p: Prop) =>
val r = Test.check(prms, p)
s"${r.status}, s=${r.succeeded}, d=${r.discarded}, " +
s"minSuccessful=${prms.minSuccessfulTests}, " +
s"maxDiscardRatio=${prms.maxDiscardRatio}, " +
s"actualDiscardRatio=${r.discarded.toFloat / r.succeeded}, " +
s"workers=${prms.workers}" |: f(prms,r)
}
property("stopCondition") = resultInvariant { (prms, r) =>
r.status match {
case Passed =>
(r.succeeded >= prms.minSuccessfulTests) &&
(r.discarded <= prms.maxDiscardRatio*r.succeeded)
case Exhausted =>
(r.discarded > r.succeeded * prms.maxDiscardRatio) &&
(r.discarded >= prms.minSuccessfulTests * prms.maxDiscardRatio)
case _ =>
(r.succeeded < prms.minSuccessfulTests) &&
(r.discarded <= prms.maxDiscardRatio*r.succeeded)
}
}
property("size") = forAll { prms: Test.Parameters =>
val p = sizedProp { sz => sz >= prms.minSize && sz <= prms.maxSize }
Test.check(prms, p).status == Passed
}
property("propFailing") = forAll { prms: Test.Parameters =>
Test.check(prms, failing).status match {
case _:Failed => true
case _ => false
}
}
property("propPassing") = forAll { prms: Test.Parameters =>
Test.check(prms, passing).status == Passed
}
property("propProved") = forAll { prms: Test.Parameters =>
Test.check(prms, proved).status match {
case _:Test.Proved => true
case _ => false
}
}
property("propExhausted") = forAll { prms: Test.Parameters =>
Test.check(prms, exhausted).status == Exhausted
}
property("propPropException") = forAll { prms: Test.Parameters =>
Test.check(prms, propException).status match {
case _:PropException => true
case _ => false
}
}
property("propGenException") = forAll { prms: Test.Parameters =>
Test.check(prms, genException).status match {
case x:PropException => true :| x.toString
case x => false :| x.toString
}
}
property("propShrunk") = forAll { prms: Test.Parameters =>
Test.check(prms, shrunk).status match {
case Failed(Arg(_,(x:Int,y:Int,z:Int),_,_,_,_)::Nil,_) =>
x == 0 && y == 0 && z == 0
case x => false
}
}
property("disabling shrinking works") = {
object Bogus {
val gen: Gen[Bogus] =
Gen.choose(Int.MinValue, Int.MaxValue).map(Bogus(_))
var shrunk: Boolean = false
implicit def shrinkBogus: Shrink[Bogus] = {
Shrink { (b: Bogus) => shrunk = true; Stream.empty }
}
}
case class Bogus(x: Int)
val prop = Prop.forAll[Bogus, Prop](Bogus.gen) { b => Prop(false) }
val prms = Test.Parameters.default.disableLegacyShrinking
val res = Test.check(prms, prop)
Prop(!res.passed && !Bogus.shrunk)
}
property("Properties.overrideParameters overrides Test.Parameters") = {
val seed0 = rng.Seed.fromBase64("aaaaa_mr05Z_DCbd2PyUolC0h93iH1MQwIdnH2UuI4L=").get
val seed1 = rng.Seed.fromBase64("zzzzz_mr05Z_DCbd2PyUolC0h93iH1MQwIdnH2UuI4L=").get
val myProps = new Properties("MyProps") {
override def overrideParameters(prms: Test.Parameters): Test.Parameters =
prms.withInitialSeed(Some(seed1))
property("initial seed matches") =
Prop { prms =>
val ok = prms.initialSeed == Some(seed1)
Prop.Result(status = if (ok) Prop.Proof else Prop.False)
}
}
Prop {
val prms = Test.Parameters.default.withInitialSeed(Some(seed0))
val results = Test.checkProperties(prms, myProps)
val ok = results.forall { case (_, res) => res.passed }
Prop.Result(status = if (ok) Prop.Proof else Prop.False)
}
}
}
| xuwei-k/scalacheck | jvm/src/test/scala/org/scalacheck/TestSpecification.scala | Scala | bsd-3-clause | 5,421 |
/** Copyright 2015 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prediction.data.storage
import grizzled.slf4j.Logging
import scala.collection.JavaConversions._
import scala.language.existentials
import scala.reflect.runtime.universe._
import scala.concurrent.ExecutionContext.Implicits.global
private[prediction] case class StorageError(val message: String)
/**
* Backend-agnostic data storage layer with lazy initialization and connection
* pooling. Use this object when you need to interface with Event Store in your
* engine.
*/
object Storage extends Logging {
private var errors = 0
private def prefixPath(prefix: String, body: String) = s"${prefix}_${body}"
private val sourcesPrefix = "PIO_STORAGE_SOURCES"
private def sourcesPrefixPath(body: String) =
prefixPath(sourcesPrefix, body)
private val sourceTypesRegex = """PIO_STORAGE_SOURCES_([^_]+)_TYPE""".r
private val sourceKeys: Seq[String] = sys.env.keys.toSeq.flatMap { k =>
sourceTypesRegex findFirstIn k match {
case Some(sourceTypesRegex(sourceType)) => Seq(sourceType)
case None => Nil
}
}
if (sourceKeys.size == 0) warn("There is no properly configured data source.")
private case class ClientMeta(sourceType: String, client: BaseStorageClient)
private case class DataObjectMeta(sourceName: String, databaseName: String)
private val s2cm = scala.collection.mutable.Map[String, Option[ClientMeta]]()
private def updateS2CM(k: String, parallel: Boolean, test: Boolean):
Option[ClientMeta] = {
try {
val keyedPath = sourcesPrefixPath(k)
val sourceType = sys.env(prefixPath(keyedPath, "TYPE"))
// hosts and ports are to be deprecated
val hosts = sys.env(prefixPath(keyedPath, "HOSTS")).split(',')
val ports = sys.env(prefixPath(keyedPath, "PORTS")).split(',').
map(_.toInt)
val props = sys.env.filter(t => t._1.startsWith(keyedPath))
val clientConfig = StorageClientConfig(
hosts = hosts,
ports = ports,
properties = props,
parallel = parallel,
test = test)
val client = getClient(clientConfig, sourceType)
Some(ClientMeta(sourceType, client))
} catch {
case e: Throwable =>
error(s"Error initializing storage client for source ${k}")
error(e.getMessage)
errors += 1
None
}
}
private def sourcesToClientMeta(
source: String,
parallel: Boolean,
test: Boolean): Option[ClientMeta] = {
val sourceName = if (parallel) s"parallel-${source}" else source
s2cm.getOrElseUpdate(sourceName, updateS2CM(source, parallel, test))
}
/** Reference to the app data repository. */
private val EventDataRepository = "EVENTDATA"
private val ModelDataRepository = "MODELDATA"
private val MetaDataRepository = "METADATA"
private val repositoriesPrefix = "PIO_STORAGE_REPOSITORIES"
private def repositoriesPrefixPath(body: String) =
prefixPath(repositoriesPrefix, body)
private val repositoryNamesRegex =
"""PIO_STORAGE_REPOSITORIES_([^_]+)_NAME""".r
private val repositoryKeys: Seq[String] = sys.env.keys.toSeq.flatMap { k =>
repositoryNamesRegex findFirstIn k match {
case Some(repositoryNamesRegex(repositoryName)) => Seq(repositoryName)
case None => Nil
}
}
if (repositoryKeys.size == 0) {
warn("There is no properly configured repository.")
}
private val requiredRepositories = Seq(MetaDataRepository)
requiredRepositories foreach { r =>
if (!repositoryKeys.contains(r)) {
error(s"Required repository (${r}) configuration is missing.")
errors += 1
}
}
private val repositoriesToDataObjectMeta: Map[String, DataObjectMeta] =
repositoryKeys.map(r =>
try {
val keyedPath = repositoriesPrefixPath(r)
val name = sys.env(prefixPath(keyedPath, "NAME"))
val sourceName = sys.env(prefixPath(keyedPath, "SOURCE"))
if (sourceKeys.contains(sourceName)) {
r -> DataObjectMeta(
sourceName = sourceName,
databaseName = name)
} else {
error(s"$sourceName is not a configured storage source.")
r -> DataObjectMeta("", "")
}
} catch {
case e: Throwable =>
error(e.getMessage)
errors += 1
r -> DataObjectMeta("", "")
}
).toMap
private def getClient(
clientConfig: StorageClientConfig,
pkg: String): BaseStorageClient = {
val className = "io.prediction.data.storage." + pkg + ".StorageClient"
try {
Class.forName(className).getConstructors()(0).newInstance(clientConfig).
asInstanceOf[BaseStorageClient]
} catch {
case e: ClassNotFoundException =>
val originalClassName = pkg + ".StorageClient"
Class.forName(originalClassName).getConstructors()(0).
newInstance(clientConfig).asInstanceOf[BaseStorageClient]
case e: java.lang.reflect.InvocationTargetException =>
throw e.getCause
}
}
private[prediction]
def getDataObject[T](repo: String, test: Boolean = false)
(implicit tag: TypeTag[T]): T = {
val repoDOMeta = repositoriesToDataObjectMeta(repo)
val repoDOSourceName = repoDOMeta.sourceName
getDataObject[T](repoDOSourceName, repoDOMeta.databaseName, test = test)
}
private[prediction]
def getPDataObject[T](repo: String)(implicit tag: TypeTag[T]): T = {
val repoDOMeta = repositoriesToDataObjectMeta(repo)
val repoDOSourceName = repoDOMeta.sourceName
getPDataObject[T](repoDOSourceName, repoDOMeta.databaseName)
}
private[prediction] def getDataObject[T](
sourceName: String,
databaseName: String,
parallel: Boolean = false,
test: Boolean = false)(implicit tag: TypeTag[T]): T = {
val clientMeta = sourcesToClientMeta(sourceName, parallel, test) getOrElse {
throw new StorageClientException(
s"Data source $sourceName was not properly initialized.")
}
val sourceType = clientMeta.sourceType
val ctorArgs = dataObjectCtorArgs(clientMeta.client, databaseName)
val classPrefix = clientMeta.client.prefix
val originalClassName = tag.tpe.toString.split('.')
val rawClassName = sourceType + "." + classPrefix + originalClassName.last
val className = "io.prediction.data.storage." + rawClassName
val clazz = try {
Class.forName(className)
} catch {
case e: ClassNotFoundException => Class.forName(rawClassName)
}
val constructor = clazz.getConstructors()(0)
try {
constructor.newInstance(ctorArgs: _*).
asInstanceOf[T]
} catch {
case e: IllegalArgumentException =>
error(
"Unable to instantiate data object with class '" +
constructor.getDeclaringClass.getName + " because its constructor" +
" does not have the right number of arguments." +
" Number of required constructor arguments: " +
ctorArgs.size + "." +
" Number of existing constructor arguments: " +
constructor.getParameterTypes.size + "." +
s" Storage source name: ${sourceName}." +
s" Exception message: ${e.getMessage}).")
errors += 1
throw e
case e: java.lang.reflect.InvocationTargetException =>
throw e.getCause
}
}
private def getPDataObject[T](
sourceName: String,
databaseName: String)(implicit tag: TypeTag[T]): T =
getDataObject[T](sourceName, databaseName, true)
private def dataObjectCtorArgs(
client: BaseStorageClient,
dbName: String): Seq[AnyRef] = {
Seq(client.client, dbName)
}
private[prediction] def verifyAllDataObjects(): Unit = {
println(" Verifying Meta Data Backend")
getMetaDataEngineManifests()
getMetaDataEngineInstances()
getMetaDataEvaluationInstances()
getMetaDataApps()
getMetaDataAccessKeys()
println(" Verifying Model Data Backend")
getModelDataModels()
println(" Verifying Event Data Backend")
val eventsDb = getLEvents(test = true)
println(" Test write Event Store (App Id 0)")
// use appId=0 for testing purpose
eventsDb.init(0)
eventsDb.insert(Event(
event="test",
entityType="test",
entityId="test"), 0)
eventsDb.remove(0)
eventsDb.close()
}
private[prediction] def getMetaDataEngineManifests(): EngineManifests =
getDataObject[EngineManifests](MetaDataRepository)
private[prediction] def getMetaDataEngineInstances(): EngineInstances =
getDataObject[EngineInstances](MetaDataRepository)
private[prediction] def getMetaDataEvaluationInstances(): EvaluationInstances =
getDataObject[EvaluationInstances](MetaDataRepository)
private[prediction] def getMetaDataApps(): Apps =
getDataObject[Apps](MetaDataRepository)
private[prediction] def getMetaDataAccessKeys(): AccessKeys =
getDataObject[AccessKeys](MetaDataRepository)
private[prediction] def getModelDataModels(): Models =
getDataObject[Models](ModelDataRepository)
/** Obtains a data access object that returns [[Event]] related local data
* structure.
*/
def getLEvents(test: Boolean = false): LEvents =
getDataObject[LEvents](EventDataRepository, test = test)
/** Obtains a data access object that returns [[Event]] related RDD data
* structure.
*/
def getPEvents(): PEvents =
getPDataObject[PEvents](EventDataRepository)
if (errors > 0) {
error(s"There were $errors configuration errors. Exiting.")
sys.exit(errors)
}
}
private[prediction] trait BaseStorageClient {
val config: StorageClientConfig
val client: AnyRef
val prefix: String = ""
}
private[prediction] case class StorageClientConfig(
hosts: Seq[String] = Seq(), // deprecated
ports: Seq[Int] = Seq(), // deprecated
parallel: Boolean = false, // parallelized access (RDD)?
test: Boolean = false, // test mode config
properties: Map[String, String] = Map())
private[prediction] class StorageClientException(msg: String)
extends RuntimeException(msg)
| nvoron23/PredictionIO | data/src/main/scala/io/prediction/data/storage/Storage.scala | Scala | apache-2.0 | 10,596 |
class B {
val buzz = Some(Bees.Bee("buzz")).collect {
case Bees.Bee(value) => value
}
}
object Test {
def main(args: Array[String]): Unit = {
new B
}
}
object Bees {
case class Bee(value: String)
}
| som-snytt/dotty | tests/run/i2396.scala | Scala | apache-2.0 | 218 |
/**
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.appjet.common.cli;
import org.apache.commons.lang.WordUtils;
class CliOption(val name: String, val description: String, val argName: Option[String]);
class ParseException(message: String) extends RuntimeException(message);
class CliParser(predef: Array[CliOption]) {
val displayWidth = 80;
val options = Map((for (opt <- predef) yield ((opt.name, opt))): _*);
def parseOptions(args0: Array[String]): (Map[String, String], Array[String]) = {
val (opts, args) = args0.partition(_.startsWith("-"));
(Map((for (arg <- opts) yield {
val parts = arg.split("=", 2);
val name = "-+".r.replaceFirstIn(parts(0), "");
if (parts.length == 1 && options.get(name).map(_.argName.isDefined).exists(x => x))
throw new ParseException("Missing argument for flag: "+name);
(name, parts.orElse(Map(1 -> "true"))(1));
}): _*),
args.toArray);
}
def dprint(prefix: String, value: String) = {
// println(prefix+": "+value+"\\n");
value;
}
def usage = {
val sb = new StringBuilder();
var maxLength = predef.map(opt => 2 + opt.name.length + opt.argName.map(_.length + 1).getOrElse(0) ).reduceRight(Math.max)+2;
for ((n, opt) <- options) {
sb.append(" --"+n+opt.argName.map("=<"+_+">").getOrElse("")+"\\n");
sb.append(" "+WordUtils.wrap(opt.description, displayWidth-5).split("\\n").mkString("\\n "));
sb.append("\\n\\n");
}
sb.toString();
}
}
| railscook/etherpad | infrastructure/net.appjet.common.cli/cli.scala | Scala | apache-2.0 | 2,042 |
package com.cloudwick.generator.utils
import java.io.File
import java.text.SimpleDateFormat
import scala.collection.mutable.ArrayBuffer
import org.apache.avro.file.DataFileWriter
import org.apache.avro.generic.{GenericDatumWriter, GenericRecord}
import org.apache.avro.Schema
import org.slf4j.LoggerFactory
/**
* File handler with inbuilt capability to roll file's and is thread safe
* @author ashrith
*/
class AvroFileHandler(val fileName: String, val schemaDesc: String, val maxFileSizeBytes: Int, val append: Boolean = false) {
lazy val logger = LoggerFactory.getLogger(getClass)
private var stream: DataFileWriter[GenericRecord] = null
private val schema = new Schema.Parser().parse(schemaDesc)
private var openTime: Long = 0
private var bytesWrittenToFile: Long = 0
openFile()
def flush() = {
stream.flush()
}
def close() = {
flush()
try {
logger.debug("Attempting to close the file {}", fileName)
stream.close()
} catch { case _: Throwable => () }
}
def timeSuffix = {
val dateFormat = new SimpleDateFormat("yyyyMMdd_HHmmss")
dateFormat.format(new java.util.Date)
}
def openFile() = {
logger.debug("Attempting to open the file {}", fileName)
val dir = new File(fileName).getParentFile
if ((dir ne null) && !dir.exists) dir.mkdirs
stream = new org.apache.avro.file.DataFileWriter[GenericRecord](new GenericDatumWriter[GenericRecord](schema))
if (append) {
stream.appendTo(new File(fileName))
} else {
stream.create(schema, new File(fileName))
}
openTime = System.currentTimeMillis()
bytesWrittenToFile = 0
}
def roll() = synchronized {
logger.debug("Attempting to roll file")
stream.close()
val n = fileName.lastIndexOf('.')
val newFileName = if (n > 0) {
fileName.substring(0, n) + "-" + timeSuffix + fileName.substring(n)
} else {
fileName + "-" + timeSuffix
}
new File(fileName).renameTo(new File(newFileName))
openFile()
}
def publish(datum: GenericRecord) = {
try {
val lineSizeBytes = datum.toString.getBytes("UTF-8").length // this is not dependable
synchronized {
if (bytesWrittenToFile + lineSizeBytes > maxFileSizeBytes) {
roll()
}
stream.append(datum)
stream.flush()
bytesWrittenToFile += lineSizeBytes
}
} catch {
case e: Throwable => logger.error("Error:: {}", e)
}
}
def publishBuffered(datums: ArrayBuffer[GenericRecord]) = {
var lineSizeBytes: Int = 0
try {
synchronized {
datums.foreach { datum =>
lineSizeBytes = datum.toString.getBytes("UTF-8").length
if (bytesWrittenToFile + lineSizeBytes > maxFileSizeBytes) {
roll()
}
stream.append(datum)
bytesWrittenToFile += lineSizeBytes
}
stream.flush()
}
} catch {
case e: Throwable => logger.error("Error:: {}", e)
}
}
}
| davinashreddy/generator | src/main/scala/com/cloudwick/generator/utils/AvroFileHandler.scala | Scala | apache-2.0 | 2,978 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.storage
import java.util.Locale
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration._
import scala.language.implicitConversions
import org.mockito.Mockito.{mock, when}
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.concurrent.Eventually._
import org.apache.spark._
import org.apache.spark.broadcast.BroadcastManager
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config._
import org.apache.spark.internal.config.Tests._
import org.apache.spark.memory.UnifiedMemoryManager
import org.apache.spark.network.BlockTransferService
import org.apache.spark.network.netty.NettyBlockTransferService
import org.apache.spark.rpc.RpcEnv
import org.apache.spark.scheduler.LiveListenerBus
import org.apache.spark.serializer.{KryoSerializer, SerializerManager}
import org.apache.spark.shuffle.sort.SortShuffleManager
import org.apache.spark.storage.StorageLevel._
import org.apache.spark.util.Utils
trait BlockManagerReplicationBehavior extends SparkFunSuite
with Matchers
with BeforeAndAfter
with LocalSparkContext {
val conf: SparkConf
protected var rpcEnv: RpcEnv = null
protected var master: BlockManagerMaster = null
protected lazy val securityMgr = new SecurityManager(conf)
protected lazy val bcastManager = new BroadcastManager(true, conf, securityMgr)
protected lazy val mapOutputTracker = new MapOutputTrackerMaster(conf, bcastManager, true)
protected lazy val shuffleManager = new SortShuffleManager(conf)
// List of block manager created during an unit test, so that all of the them can be stopped
// after the unit test.
protected val allStores = new ArrayBuffer[BlockManager]
// Reuse a serializer across tests to avoid creating a new thread-local buffer on each test
protected lazy val serializer = new KryoSerializer(conf)
// Implicitly convert strings to BlockIds for test clarity.
protected implicit def StringToBlockId(value: String): BlockId = new TestBlockId(value)
protected def makeBlockManager(
maxMem: Long,
name: String = SparkContext.DRIVER_IDENTIFIER): BlockManager = {
conf.set(TEST_MEMORY, maxMem)
conf.set(MEMORY_OFFHEAP_SIZE, maxMem)
val transfer = new NettyBlockTransferService(conf, securityMgr, "localhost", "localhost", 0, 1)
val memManager = UnifiedMemoryManager(conf, numCores = 1)
val serializerManager = new SerializerManager(serializer, conf)
val store = new BlockManager(name, rpcEnv, master, serializerManager, conf,
memManager, mapOutputTracker, shuffleManager, transfer, securityMgr, None)
memManager.setMemoryStore(store.memoryStore)
store.initialize("app-id")
allStores += store
store
}
before {
rpcEnv = RpcEnv.create("test", "localhost", 0, conf, securityMgr)
conf.set(NETWORK_AUTH_ENABLED, false)
conf.set(DRIVER_PORT, rpcEnv.address.port)
conf.set(IS_TESTING, true)
conf.set(MEMORY_FRACTION, 1.0)
conf.set(MEMORY_STORAGE_FRACTION, 0.999)
conf.set(STORAGE_UNROLL_MEMORY_THRESHOLD, 512L)
// to make a replication attempt to inactive store fail fast
conf.set("spark.core.connection.ack.wait.timeout", "1s")
// to make cached peers refresh frequently
conf.set(STORAGE_CACHED_PEERS_TTL, 10)
sc = new SparkContext("local", "test", conf)
master = new BlockManagerMaster(rpcEnv.setupEndpoint("blockmanager",
new BlockManagerMasterEndpoint(rpcEnv, true, conf,
new LiveListenerBus(conf), None)), conf, true)
allStores.clear()
}
after {
allStores.foreach { _.stop() }
allStores.clear()
rpcEnv.shutdown()
rpcEnv.awaitTermination()
rpcEnv = null
master = null
}
test("get peers with addition and removal of block managers") {
val numStores = 4
val stores = (1 to numStores - 1).map { i => makeBlockManager(1000, s"store$i") }
val storeIds = stores.map { _.blockManagerId }.toSet
assert(master.getPeers(stores(0).blockManagerId).toSet ===
storeIds.filterNot { _ == stores(0).blockManagerId })
assert(master.getPeers(stores(1).blockManagerId).toSet ===
storeIds.filterNot { _ == stores(1).blockManagerId })
assert(master.getPeers(stores(2).blockManagerId).toSet ===
storeIds.filterNot { _ == stores(2).blockManagerId })
// Add driver store and test whether it is filtered out
val driverStore = makeBlockManager(1000, SparkContext.DRIVER_IDENTIFIER)
assert(master.getPeers(stores(0).blockManagerId).forall(!_.isDriver))
assert(master.getPeers(stores(1).blockManagerId).forall(!_.isDriver))
assert(master.getPeers(stores(2).blockManagerId).forall(!_.isDriver))
// Add a new store and test whether get peers returns it
val newStore = makeBlockManager(1000, s"store$numStores")
assert(master.getPeers(stores(0).blockManagerId).toSet ===
storeIds.filterNot { _ == stores(0).blockManagerId } + newStore.blockManagerId)
assert(master.getPeers(stores(1).blockManagerId).toSet ===
storeIds.filterNot { _ == stores(1).blockManagerId } + newStore.blockManagerId)
assert(master.getPeers(stores(2).blockManagerId).toSet ===
storeIds.filterNot { _ == stores(2).blockManagerId } + newStore.blockManagerId)
assert(master.getPeers(newStore.blockManagerId).toSet === storeIds)
// Remove a store and test whether get peers returns it
val storeIdToRemove = stores(0).blockManagerId
master.removeExecutor(storeIdToRemove.executorId)
assert(!master.getPeers(stores(1).blockManagerId).contains(storeIdToRemove))
assert(!master.getPeers(stores(2).blockManagerId).contains(storeIdToRemove))
assert(!master.getPeers(newStore.blockManagerId).contains(storeIdToRemove))
// Test whether asking for peers of a unregistered block manager id returns empty list
assert(master.getPeers(stores(0).blockManagerId).isEmpty)
assert(master.getPeers(BlockManagerId("", "", 1)).isEmpty)
}
test("block replication - 2x replication") {
testReplication(2,
Seq(MEMORY_ONLY, MEMORY_ONLY_SER, DISK_ONLY, MEMORY_AND_DISK_2, MEMORY_AND_DISK_SER_2)
)
}
test("block replication - 3x replication") {
// Generate storage levels with 3x replication
val storageLevels = {
Seq(MEMORY_ONLY, MEMORY_ONLY_SER, DISK_ONLY, MEMORY_AND_DISK, MEMORY_AND_DISK_SER).map {
level => StorageLevel(
level.useDisk, level.useMemory, level.useOffHeap, level.deserialized, 3)
}
}
testReplication(3, storageLevels)
}
test("block replication - mixed between 1x to 5x") {
// Generate storage levels with varying replication
val storageLevels = Seq(
MEMORY_ONLY,
MEMORY_ONLY_SER_2,
StorageLevel(true, false, false, false, 3),
StorageLevel(true, true, false, true, 4),
StorageLevel(true, true, false, false, 5),
StorageLevel(true, true, false, true, 4),
StorageLevel(true, false, false, false, 3),
MEMORY_ONLY_SER_2,
MEMORY_ONLY
)
testReplication(5, storageLevels)
}
test("block replication - off-heap") {
testReplication(2, Seq(OFF_HEAP, StorageLevel(true, true, true, false, 2)))
}
test("block replication - 2x replication without peers") {
intercept[org.scalatest.exceptions.TestFailedException] {
testReplication(1,
Seq(StorageLevel.MEMORY_AND_DISK_2, StorageLevel(true, false, false, false, 3)))
}
}
test("block replication - replication failures") {
/*
Create a system of three block managers / stores. One of them (say, failableStore)
cannot receive blocks. So attempts to use that as replication target fails.
+-----------/fails/-----------> failableStore
|
normalStore
|
+-----------/works/-----------> anotherNormalStore
We are first going to add a normal block manager (i.e. normalStore) and the failable block
manager (i.e. failableStore), and test whether 2x replication fails to create two
copies of a block. Then we are going to add another normal block manager
(i.e., anotherNormalStore), and test that now 2x replication works as the
new store will be used for replication.
*/
// Add a normal block manager
val store = makeBlockManager(10000, "store")
// Insert a block with 2x replication and return the number of copies of the block
def replicateAndGetNumCopies(blockId: String): Int = {
store.putSingle(blockId, new Array[Byte](1000), StorageLevel.MEMORY_AND_DISK_2)
val numLocations = master.getLocations(blockId).size
allStores.foreach { _.removeBlock(blockId) }
numLocations
}
// Add a failable block manager with a mock transfer service that does not
// allow receiving of blocks. So attempts to use it as a replication target will fail.
val failableTransfer = mock(classOf[BlockTransferService]) // this wont actually work
when(failableTransfer.hostName).thenReturn("some-hostname")
when(failableTransfer.port).thenReturn(1000)
conf.set(TEST_MEMORY, 10000L)
val memManager = UnifiedMemoryManager(conf, numCores = 1)
val serializerManager = new SerializerManager(serializer, conf)
val failableStore = new BlockManager("failable-store", rpcEnv, master, serializerManager, conf,
memManager, mapOutputTracker, shuffleManager, failableTransfer, securityMgr, None)
memManager.setMemoryStore(failableStore.memoryStore)
failableStore.initialize("app-id")
allStores += failableStore // so that this gets stopped after test
assert(master.getPeers(store.blockManagerId).toSet === Set(failableStore.blockManagerId))
// Test that 2x replication fails by creating only one copy of the block
assert(replicateAndGetNumCopies("a1") === 1)
// Add another normal block manager and test that 2x replication works
makeBlockManager(10000, "anotherStore")
eventually(timeout(1.second), interval(10.milliseconds)) {
assert(replicateAndGetNumCopies("a2") === 2)
}
}
test("block replication - addition and deletion of block managers") {
val blockSize = 1000
val storeSize = 10000
val initialStores = (1 to 2).map { i => makeBlockManager(storeSize, s"store$i") }
// Insert a block with given replication factor and return the number of copies of the block\\
def replicateAndGetNumCopies(blockId: String, replicationFactor: Int): Int = {
val storageLevel = StorageLevel(true, true, false, true, replicationFactor)
initialStores.head.putSingle(blockId, new Array[Byte](blockSize), storageLevel)
val numLocations = master.getLocations(blockId).size
allStores.foreach { _.removeBlock(blockId) }
numLocations
}
// 2x replication should work, 3x replication should only replicate 2x
assert(replicateAndGetNumCopies("a1", 2) === 2)
assert(replicateAndGetNumCopies("a2", 3) === 2)
// Add another store, 3x replication should work now, 4x replication should only replicate 3x
val newStore1 = makeBlockManager(storeSize, s"newstore1")
eventually(timeout(1.second), interval(10.milliseconds)) {
assert(replicateAndGetNumCopies("a3", 3) === 3)
}
assert(replicateAndGetNumCopies("a4", 4) === 3)
// Add another store, 4x replication should work now
val newStore2 = makeBlockManager(storeSize, s"newstore2")
eventually(timeout(1.second), interval(10.milliseconds)) {
assert(replicateAndGetNumCopies("a5", 4) === 4)
}
// Remove all but the 1st store, 2x replication should fail
(initialStores.tail ++ Seq(newStore1, newStore2)).foreach {
store =>
master.removeExecutor(store.blockManagerId.executorId)
store.stop()
}
assert(replicateAndGetNumCopies("a6", 2) === 1)
// Add new stores, 3x replication should work
val newStores = (3 to 5).map {
i => makeBlockManager(storeSize, s"newstore$i")
}
eventually(timeout(1.second), interval(10.milliseconds)) {
assert(replicateAndGetNumCopies("a7", 3) === 3)
}
}
/**
* Test replication of blocks with different storage levels (various combinations of
* memory, disk & serialization). For each storage level, this function tests every store
* whether the block is present and also tests the master whether its knowledge of blocks
* is correct. Then it also drops the block from memory of each store (using LRU) and
* again checks whether the master's knowledge gets updated.
*/
protected def testReplication(maxReplication: Int, storageLevels: Seq[StorageLevel]) {
import org.apache.spark.storage.StorageLevel._
assert(maxReplication > 1,
s"Cannot test replication factor $maxReplication")
// storage levels to test with the given replication factor
val storeSize = 10000
val blockSize = 1000
// As many stores as the replication factor
val stores = (1 to maxReplication).map {
i => makeBlockManager(storeSize, s"store$i")
}
storageLevels.foreach { storageLevel =>
// Put the block into one of the stores
val blockId = TestBlockId(
"block-with-" + storageLevel.description.replace(" ", "-").toLowerCase(Locale.ROOT))
val testValue = Array.fill[Byte](blockSize)(1)
stores(0).putSingle(blockId, testValue, storageLevel)
// Assert that master know two locations for the block
val blockLocations = master.getLocations(blockId).map(_.executorId).toSet
assert(blockLocations.size === storageLevel.replication,
s"master did not have ${storageLevel.replication} locations for $blockId")
// Test state of the stores that contain the block
stores.filter {
testStore => blockLocations.contains(testStore.blockManagerId.executorId)
}.foreach { testStore =>
val testStoreName = testStore.blockManagerId.executorId
val blockResultOpt = testStore.getLocalValues(blockId)
assert(blockResultOpt.isDefined, s"$blockId was not found in $testStoreName")
val localValues = blockResultOpt.get.data.toSeq
assert(localValues.size == 1)
assert(localValues.head === testValue)
assert(master.getLocations(blockId).map(_.executorId).toSet.contains(testStoreName),
s"master does not have status for ${blockId.name} in $testStoreName")
val memoryStore = testStore.memoryStore
if (memoryStore.contains(blockId) && !storageLevel.deserialized) {
memoryStore.getBytes(blockId).get.chunks.foreach { byteBuffer =>
assert(storageLevel.useOffHeap == byteBuffer.isDirect,
s"memory mode ${storageLevel.memoryMode} is not compatible with " +
byteBuffer.getClass.getSimpleName)
}
}
val blockStatus = master.getBlockStatus(blockId)(testStore.blockManagerId)
// Assert that block status in the master for this store has expected storage level
assert(
blockStatus.storageLevel.useDisk === storageLevel.useDisk &&
blockStatus.storageLevel.useMemory === storageLevel.useMemory &&
blockStatus.storageLevel.useOffHeap === storageLevel.useOffHeap &&
blockStatus.storageLevel.deserialized === storageLevel.deserialized,
s"master does not know correct storage level for ${blockId.name} in $testStoreName")
// Assert that the block status in the master for this store has correct memory usage info
assert(!blockStatus.storageLevel.useMemory || blockStatus.memSize >= blockSize,
s"master does not know size of ${blockId.name} stored in memory of $testStoreName")
// If the block is supposed to be in memory, then drop the copy of the block in
// this store test whether master is updated with zero memory usage this store
if (storageLevel.useMemory) {
val sl = if (storageLevel.useOffHeap) {
StorageLevel(false, true, true, false, 1)
} else {
MEMORY_ONLY_SER
}
// Force the block to be dropped by adding a number of dummy blocks
(1 to 10).foreach {
i => testStore.putSingle(s"dummy-block-$i", new Array[Byte](1000), sl)
}
(1 to 10).foreach {
i => testStore.removeBlock(s"dummy-block-$i")
}
val newBlockStatusOption = master.getBlockStatus(blockId).get(testStore.blockManagerId)
// Assert that the block status in the master either does not exist (block removed
// from every store) or has zero memory usage for this store
assert(
newBlockStatusOption.isEmpty || newBlockStatusOption.get.memSize === 0,
s"after dropping, master does not know size of ${blockId.name} " +
s"stored in memory of $testStoreName"
)
}
// If the block is supposed to be in disk (after dropping or otherwise, then
// test whether master has correct disk usage for this store
if (storageLevel.useDisk) {
assert(master.getBlockStatus(blockId)(testStore.blockManagerId).diskSize >= blockSize,
s"after dropping, master does not know size of ${blockId.name} " +
s"stored in disk of $testStoreName"
)
}
}
master.removeBlock(blockId)
}
}
}
class BlockManagerReplicationSuite extends BlockManagerReplicationBehavior {
val conf = new SparkConf(false).set("spark.app.id", "test")
conf.set(Kryo.KRYO_SERIALIZER_BUFFER_SIZE.key, "1m")
}
class BlockManagerProactiveReplicationSuite extends BlockManagerReplicationBehavior {
val conf = new SparkConf(false).set("spark.app.id", "test")
conf.set(Kryo.KRYO_SERIALIZER_BUFFER_SIZE.key, "1m")
conf.set(STORAGE_REPLICATION_PROACTIVE, true)
conf.set(STORAGE_EXCEPTION_PIN_LEAK, true)
(2 to 5).foreach { i =>
test(s"proactive block replication - $i replicas - ${i - 1} block manager deletions") {
testProactiveReplication(i)
}
}
def testProactiveReplication(replicationFactor: Int) {
val blockSize = 1000
val storeSize = 10000
val initialStores = (1 to 10).map { i => makeBlockManager(storeSize, s"store$i") }
val blockId = "a1"
val storageLevel = StorageLevel(true, true, false, true, replicationFactor)
initialStores.head.putSingle(blockId, new Array[Byte](blockSize), storageLevel)
val blockLocations = master.getLocations(blockId)
logInfo(s"Initial locations : $blockLocations")
assert(blockLocations.size === replicationFactor)
// remove a random blockManager
val executorsToRemove = blockLocations.take(replicationFactor - 1).toSet
logInfo(s"Removing $executorsToRemove")
initialStores.filter(bm => executorsToRemove.contains(bm.blockManagerId)).foreach { bm =>
master.removeExecutor(bm.blockManagerId.executorId)
bm.stop()
// giving enough time for replication to happen and new block be reported to master
eventually(timeout(5.seconds), interval(100.milliseconds)) {
val newLocations = master.getLocations(blockId).toSet
assert(newLocations.size === replicationFactor)
}
}
val newLocations = eventually(timeout(5.seconds), interval(100.milliseconds)) {
val _newLocations = master.getLocations(blockId).toSet
assert(_newLocations.size === replicationFactor)
_newLocations
}
logInfo(s"New locations : $newLocations")
// new locations should not contain stopped block managers
assert(newLocations.forall(bmId => !executorsToRemove.contains(bmId)),
"New locations contain stopped block managers.")
// Make sure all locks have been released.
eventually(timeout(1.second), interval(10.milliseconds)) {
initialStores.filter(bm => newLocations.contains(bm.blockManagerId)).foreach { bm =>
assert(bm.blockInfoManager.getTaskLockCount(BlockInfo.NON_TASK_WRITER) === 0)
}
}
}
}
class DummyTopologyMapper(conf: SparkConf) extends TopologyMapper(conf) with Logging {
// number of racks to test with
val numRacks = 3
/**
* Gets the topology information given the host name
*
* @param hostname Hostname
* @return random topology
*/
override def getTopologyForHost(hostname: String): Option[String] = {
Some(s"/Rack-${Utils.random.nextInt(numRacks)}")
}
}
class BlockManagerBasicStrategyReplicationSuite extends BlockManagerReplicationBehavior {
val conf: SparkConf = new SparkConf(false).set("spark.app.id", "test")
conf.set(Kryo.KRYO_SERIALIZER_BUFFER_SIZE.key, "1m")
conf.set(
STORAGE_REPLICATION_POLICY,
classOf[BasicBlockReplicationPolicy].getName)
conf.set(
STORAGE_REPLICATION_TOPOLOGY_MAPPER,
classOf[DummyTopologyMapper].getName)
}
| aosagie/spark | core/src/test/scala/org/apache/spark/storage/BlockManagerReplicationSuite.scala | Scala | apache-2.0 | 21,500 |
object Store {
def apply[S, A](run: S => A): S => Store[S, A] =
s => new Store(run, s)
} | hmemcpy/milewski-ctfp-pdf | src/content/3.7/code/scala/snippet35.scala | Scala | gpl-3.0 | 94 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.lambda.tools.data
import com.beust.jcommander.Parameters
import org.locationtech.geomesa.lambda.data.LambdaDataStore
import org.locationtech.geomesa.lambda.tools.{LambdaDataStoreCommand, LambdaDataStoreParams}
import org.locationtech.geomesa.tools.data.{CreateSchemaCommand, CreateSchemaParams}
class LambdaCreateSchemaCommand extends CreateSchemaCommand[LambdaDataStore] with LambdaDataStoreCommand {
override val params = new LambdaCreateSchemaParams()
}
@Parameters(commandDescription = "Create a GeoMesa feature type")
class LambdaCreateSchemaParams extends CreateSchemaParams with LambdaDataStoreParams
| ronq/geomesa | geomesa-lambda/geomesa-lambda-tools/src/main/scala/org/locationtech/geomesa/lambda/tools/data/LambdaCreateSchemaCommand.scala | Scala | apache-2.0 | 1,111 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mxnet
import org.apache.mxnet.io.NDArrayIter
import org.scalatest.FunSuite
import org.slf4j.LoggerFactory
class SparseNDArraySuite extends FunSuite {
private val logger = LoggerFactory.getLogger(classOf[SparseNDArraySuite])
test("create CSR NDArray") {
val data = Array(7f, 8f, 9f)
val indices = Array(0f, 2f, 1f)
val indptr = Array(0f, 2f, 2f, 3f)
val shape = Shape(3, 4)
val sparseND = SparseNDArray.csrMatrix(data, indices, indptr, shape, Context.cpu())
assert(sparseND.shape == Shape(3, 4))
assert(sparseND.toArray
sameElements Array(7.0f, 0.0f, 8.0f, 0.0f,
0.0f, 0.0f, 0.0f, 0.0f,
0.0f, 9.0f, 0.0f, 0.0f))
assert(sparseND.sparseFormat == SparseFormat.CSR)
assert(sparseND.getIndptr.toArray sameElements indptr)
assert(sparseND.getIndices.toArray sameElements indices)
}
test("create Row Sparse NDArray") {
val data = Array(
Array(1f, 2f),
Array(3f, 4f)
)
val indices = Array(1f, 4f)
val shape = Shape(6, 2)
val sparseND = SparseNDArray.rowSparseArray(data, indices, shape, Context.cpu())
assert(sparseND.sparseFormat == SparseFormat.ROW_SPARSE)
assert(sparseND.shape == Shape(6, 2))
assert(sparseND.at(1).toArray sameElements Array(1f, 2f))
assert(sparseND.getIndices.toArray sameElements indices)
}
test("Test retain") {
val arr = Array(
Array(1f, 2f),
Array(3f, 4f),
Array(5f, 6f)
)
val indices = Array(0f, 1f, 3f)
val rspIn = SparseNDArray.rowSparseArray(arr, indices, Shape(4, 2), Context.cpu())
val toRetain = Array(0f, 3f)
val rspOut = SparseNDArray.retain(rspIn, toRetain)
assert(rspOut.getData.toArray sameElements Array(1f, 2f, 5f, 6f))
assert(rspOut.getIndices.toArray sameElements Array(0f, 3f))
}
test("Test add") {
val nd = NDArray.array(Array(1f, 2f, 3f), Shape(3)).toSparse(Some(SparseFormat.ROW_SPARSE))
val nd2 = nd + nd
assert(nd2.isInstanceOf[SparseNDArray])
assert(nd2.toArray sameElements Array(2f, 4f, 6f))
}
test("Test DataIter") {
val nd = NDArray.array(Array(1f, 2f, 3f), Shape(1, 3)).toSparse(Some(SparseFormat.CSR))
val arr = IndexedSeq(nd, nd, nd, nd)
val iter = new NDArrayIter(arr)
while (iter.hasNext) {
val tempArr = iter.next().data
tempArr.foreach(ele => {
assert(ele.sparseFormat == SparseFormat.CSR)
assert(ele.shape == Shape(1, 3))
})
}
}
}
| zhreshold/mxnet | scala-package/core/src/test/scala/org/apache/mxnet/SparseNDArraySuite.scala | Scala | apache-2.0 | 3,301 |
package fs2.internal
import java.util.concurrent.atomic.AtomicBoolean
import java.util.concurrent.TimeoutException
import scala.concurrent.SyncVar
import scala.concurrent.duration._
import fs2.{ Scheduler, Strategy }
// for internal use only!
private[fs2] sealed abstract
class Future[+A] {
import Future._
def flatMap[B](f: A => Future[B]): Future[B] = this match {
case Now(a) => Suspend(() => f(a))
case Suspend(thunk) => BindSuspend(thunk, f)
case Async(listen) => BindAsync(listen, f)
case BindSuspend(thunk, g) =>
Suspend(() => BindSuspend(thunk, g andThen (_ flatMap f)))
case BindAsync(listen, g) =>
Suspend(() => BindAsync(listen, g andThen (_ flatMap f)))
}
def map[B](f: A => B): Future[B] =
flatMap(f andThen (b => Future.now(b)))
def listen(cb: A => Trampoline[Unit]): Unit =
(this.step: @unchecked) match {
case Now(a) => cb(a).run
case Async(onFinish) => onFinish(cb)
case BindAsync(onFinish, g) =>
onFinish(x => Trampoline.delay(g(x)) map (_ listen cb))
}
def listenInterruptibly(cb: A => Trampoline[Unit], cancel: AtomicBoolean): Unit =
this.stepInterruptibly(cancel) match {
case Now(a) if !cancel.get => cb(a).run
case Async(onFinish) if !cancel.get =>
onFinish(a =>
if (!cancel.get) cb(a)
else Trampoline.done(()))
case BindAsync(onFinish, g) if !cancel.get =>
onFinish(x =>
if (!cancel.get) Trampoline.delay(g(x)) map (_ listenInterruptibly (cb, cancel))
else Trampoline.done(()))
case _ if cancel.get => ()
}
@annotation.tailrec
final def step: Future[A] = this match {
case Suspend(thunk) => thunk().step
case BindSuspend(thunk, f) => (thunk() flatMap f).step
case _ => this
}
/** Like `step`, but may be interrupted by setting `cancel` to true. */
@annotation.tailrec
final def stepInterruptibly(cancel: AtomicBoolean): Future[A] =
if (!cancel.get) this match {
case Suspend(thunk) => thunk().stepInterruptibly(cancel)
case BindSuspend(thunk, f) => (thunk() flatMap f).stepInterruptibly(cancel)
case _ => this
}
else this
def runAsync(cb: A => Unit): Unit =
listen(a => Trampoline.done(cb(a)))
def runAsyncInterruptibly(cb: A => Unit, cancel: AtomicBoolean): Unit =
listenInterruptibly(a => Trampoline.done(cb(a)), cancel)
def run: A = this match {
case Now(a) => a
case _ => {
val latch = new java.util.concurrent.CountDownLatch(1)
@volatile var result: Option[A] = None
runAsync { a => result = Some(a); latch.countDown }
latch.await
result.get
}
}
def runFor(timeout: FiniteDuration): A = attemptRunFor(timeout) match {
case Left(e) => throw e
case Right(a) => a
}
def attemptRunFor(timeout: FiniteDuration): Either[Throwable,A] = {
val sync = new SyncVar[Either[Throwable,A]]
val interrupt = new AtomicBoolean(false)
runAsyncInterruptibly(a => sync.put(Right(a)), interrupt)
sync.get(timeout.toMillis).getOrElse {
interrupt.set(true)
Left(new TimeoutException())
}
}
def timed(timeout: FiniteDuration)(implicit S: Strategy, scheduler: Scheduler): Future[Either[Throwable,A]] =
//instead of run this though chooseAny, it is run through simple primitive,
//as we are never interested in results of timeout callback, and this is more resource savvy
async[Either[Throwable,A]] { cb =>
val cancel = new AtomicBoolean(false)
val done = new AtomicBoolean(false)
try {
scheduler.scheduleOnce(timeout) {
if (done.compareAndSet(false,true)) {
cancel.set(true)
cb(Left(new TimeoutException()))
}
}
} catch { case e: Throwable => cb(Left(e)) }
runAsyncInterruptibly(a => if(done.compareAndSet(false,true)) cb(Right(a)), cancel)
} (Strategy.sequential)
def after(t: FiniteDuration)(implicit S: Strategy, scheduler: Scheduler): Future[A] =
Future.schedule((), t) flatMap { _ => this }
}
private[fs2] object Future {
case class Now[+A](a: A) extends Future[A]
case class Async[+A](onFinish: (A => Trampoline[Unit]) => Unit) extends Future[A]
case class Suspend[+A](thunk: () => Future[A]) extends Future[A]
case class BindSuspend[A,B](thunk: () => Future[A], f: A => Future[B]) extends Future[B]
case class BindAsync[A,B](onFinish: (A => Trampoline[Unit]) => Unit,
f: A => Future[B]) extends Future[B]
/** Convert a strict value to a `Future`. */
def now[A](a: A): Future[A] = Now(a)
def delay[A](a: => A): Future[A] = Suspend(() => Now(a))
def suspend[A](f: => Future[A]): Future[A] = Suspend(() => f)
def async[A](listen: (A => Unit) => Unit)(implicit S: Strategy): Future[A] =
Async((cb: A => Trampoline[Unit]) => listen { a => S { cb(a).run } })
/** Create a `Future` that will evaluate `a` using the given `ExecutorService`. */
def apply[A](a: => A)(implicit S: Strategy): Future[A] = Async { cb =>
S { cb(a).run }
}
/** Create a `Future` that will evaluate `a` after at least the given delay. */
def schedule[A](a: => A, delay: FiniteDuration)(implicit S: Strategy, scheduler: Scheduler): Future[A] =
apply(a)(scheduler.delayedStrategy(delay))
}
| japgolly/scalaz-stream | core/src/main/scala/fs2/internal/Future.scala | Scala | mit | 5,303 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.yarn.security
import java.util.ServiceLoader
import scala.collection.JavaConverters._
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.security.Credentials
import org.apache.spark.SparkConf
import org.apache.spark.deploy.security.HadoopDelegationTokenManager
import org.apache.spark.deploy.yarn.YarnSparkHadoopUtil
import org.apache.spark.rpc.RpcEndpointRef
import org.apache.spark.util.Utils
/**
* This class loads delegation token providers registered under the YARN-specific
* [[ServiceCredentialProvider]] interface, as well as the builtin providers defined
* in [[HadoopDelegationTokenManager]].
*/
private[spark] class YARNHadoopDelegationTokenManager(
_sparkConf: SparkConf,
_hadoopConf: Configuration,
_schedulerRef: RpcEndpointRef)
extends HadoopDelegationTokenManager(_sparkConf, _hadoopConf, _schedulerRef) {
private val credentialProviders = {
ServiceLoader.load(classOf[ServiceCredentialProvider], Utils.getContextOrSparkClassLoader)
.asScala
.toList
.filter { p => isServiceEnabled(p.serviceName) }
.map { p => (p.serviceName, p) }
.toMap
}
if (credentialProviders.nonEmpty) {
logDebug("Using the following YARN-specific credential providers: " +
s"${credentialProviders.keys.mkString(", ")}.")
}
override def obtainDelegationTokens(creds: Credentials): Long = {
val superInterval = super.obtainDelegationTokens(creds)
credentialProviders.values.flatMap { provider =>
if (provider.credentialsRequired(hadoopConf)) {
provider.obtainCredentials(hadoopConf, sparkConf, creds)
} else {
logDebug(s"Service ${provider.serviceName} does not require a token." +
s" Check your configuration to see if security is disabled or not.")
None
}
}.foldLeft(superInterval)(math.min)
}
// For testing.
override def isProviderLoaded(serviceName: String): Boolean = {
credentialProviders.contains(serviceName) || super.isProviderLoaded(serviceName)
}
override protected def fileSystemsToAccess(): Set[FileSystem] = {
YarnSparkHadoopUtil.hadoopFSsToAccess(sparkConf, hadoopConf)
}
}
| hhbyyh/spark | resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/security/YARNHadoopDelegationTokenManager.scala | Scala | apache-2.0 | 3,036 |
package org.jetbrains.plugins.scala
package lang
package completion3
import com.intellij.application.options.CodeStyle
import org.jetbrains.plugins.scala.util.runners.{RunWithScalaVersions, TestScalaVersion}
//todo: fix for Scala 3
@RunWithScalaVersions(Array(
TestScalaVersion.Scala_2_12
))
class ScalaClausesCompletionTest extends ScalaClausesCompletionTestBase {
override protected def supportedIn(version: ScalaVersion): Boolean = version >= LatestScalaVersions.Scala_2_12
import ScalaCodeInsightTestBase._
import completion.ScalaKeyword.{CASE, MATCH}
import completion.clauses.DirectInheritors.FqnBlockList
def testSyntheticUnapply(): Unit = doPatternCompletionTest(
fileText =
s"""case class Foo(foo: Int = 42)(bar: Int = 42)
|
|Foo()() match {
| case $CARET
|}
""".stripMargin,
resultText =
s"""case class Foo(foo: Int = 42)(bar: Int = 42)
|
|Foo()() match {
| case Foo(foo)$CARET
|}
""".stripMargin,
itemText = "Foo(foo)"
)
def testInnerSyntheticUnapply(): Unit = doPatternCompletionTest(
fileText =
s"""sealed trait Foo
|
|object Foo {
| case class Bar(foo: Int = 42) extends Foo
|}
|
|(_: Foo) match {
| case Foo.B$CARET
|}
""".stripMargin,
resultText =
s"""sealed trait Foo
|
|object Foo {
| case class Bar(foo: Int = 42) extends Foo
|}
|
|(_: Foo) match {
| case Foo.Bar(foo)$CARET
|}
""".stripMargin,
itemText = "Bar(foo)"
)
def testSyntheticUnapplyVararg(): Unit = doPatternCompletionTest(
fileText =
s"""case class Foo(foos: Int*)
|
|Foo() match {
| case $CARET
|}
""".stripMargin,
resultText =
s"""case class Foo(foos: Int*)
|
|Foo() match {
| case Foo(foos@_*)$CARET
|}
""".stripMargin,
itemText = "Foo(foos@_*)"
)
def testUnapply(): Unit = doPatternCompletionTest(
fileText =
s"""trait Foo
|
|object Foo {
| def unapply(foo: Foo): Option[Foo] = None
|}
|
|(_: Foo) match {
| case $CARET
|}
""".stripMargin,
resultText =
s"""trait Foo
|
|object Foo {
| def unapply(foo: Foo): Option[Foo] = None
|}
|
|(_: Foo) match {
| case Foo(foo)$CARET
|}
""".stripMargin,
itemText = "Foo(foo)"
)
def testBeforeCase(): Unit = checkNoBasicCompletion(
fileText =
s"""case class Foo()
|
|Foo() match {
| $CARET
|}
""".stripMargin,
item = "Foo()"
)
def testAfterArrow(): Unit = checkNoBasicCompletion(
fileText =
s"""case class Foo()
|
|Foo() match {
| case _ => $CARET
|}
""".stripMargin,
item = "Foo()"
)
def testNestedPattern(): Unit = doPatternCompletionTest(
fileText =
s"""sealed trait Foo
|
|object Foo {
| case object Bar extends Foo
|
| case class Baz(foo: Foo = Bar) extends Foo
|}
|
|import Foo.Baz
|Baz() match {
| case Baz(null | $CARET)
|}
""".stripMargin,
resultText =
s"""sealed trait Foo
|
|object Foo {
| case object Bar extends Foo
|
| case class Baz(foo: Foo = Bar) extends Foo
|}
|
|import Foo.Baz
|Baz() match {
| case Baz(null | Baz(foo)$CARET)
|}
""".stripMargin,
itemText = "Baz(foo)"
)
def testCollectPattern(): Unit = doPatternCompletionTest(
fileText =
s"""case class Foo(foo: Int = 42)(bar: Int = 42)
|
|Some(Foo()()).collect {
| case $CARET
|}
""".stripMargin,
resultText =
s"""case class Foo(foo: Int = 42)(bar: Int = 42)
|
|Some(Foo()()).collect {
| case Foo(foo)$CARET
|}
""".stripMargin,
itemText = "Foo(foo)"
)
def testNamedPattern(): Unit = doPatternCompletionTest(
fileText =
s"""case class Foo(foo: Int = 42)
|
|Foo() match {
| case foo@$CARET
|}
""".stripMargin,
resultText =
s"""case class Foo(foo: Int = 42)
|
|Foo() match {
| case foo@Foo(foo)$CARET
|}
""".stripMargin,
itemText = "Foo(foo)"
)
def testTuplePattern(): Unit = doPatternCompletionTest(
fileText =
s"""List
|.empty[String]
|.zipWithIndex
|.foreach {
| case tuple@$CARET
|}
""".stripMargin,
resultText =
s"""List
|.empty[String]
|.zipWithIndex
|.foreach {
| case tuple@(str, i)$CARET
|}
""".stripMargin,
itemText = "(str, i)"
)
def testCompleteClause(): Unit = doClauseCompletionTest(
fileText =
s"""sealed trait Foo
|
|final case class Baz() extends Foo
|final case class Bar() extends Foo
|
|Option.empty[Foo].map {
| c$CARET
|}""".stripMargin,
resultText =
s"""sealed trait Foo
|
|final case class Baz() extends Foo
|final case class Bar() extends Foo
|
|Option.empty[Foo].map {
| case Bar() => $CARET
|}""".stripMargin,
itemText = "Bar()"
)
def testCompleteClauseFormatting(): Unit = withCaseAlignment {
doClauseCompletionTest(
fileText =
s"""sealed trait Foo
|
|final case class Bar() extends Foo
|final case class BarBaz() extends Foo
|
|Option.empty[Foo].map {
| c$CARET
| case Bar() => println( 42 ) // rhs should not to be formatted
|}""".stripMargin,
resultText =
s"""sealed trait Foo
|
|final case class Bar() extends Foo
|final case class BarBaz() extends Foo
|
|Option.empty[Foo].map {
| case BarBaz() => $CARET
| case Bar() => println( 42 ) // rhs should not to be formatted
|}""".stripMargin,
itemText = "BarBaz()"
)
}
def testCompleteObjectClause(): Unit = doClauseCompletionTest(
fileText =
s"""sealed trait Foo
|
|final case object Baz extends Foo
|
|Option.empty[Foo].map {
| c$CARET
|}""".stripMargin,
resultText =
s"""sealed trait Foo
|
|final case object Baz extends Foo
|
|Option.empty[Foo].map {
| case Baz => $CARET
|}""".stripMargin,
itemText = "Baz"
)
def testCompleteNamedClause(): Unit = doClauseCompletionTest(
fileText =
s"""sealed trait Foo
|
|final class FooImpl extends Foo
|
|Option.empty[Foo].map {
| c$CARET
|}""".stripMargin,
resultText =
s"""sealed trait Foo
|
|final class FooImpl extends Foo
|
|Option.empty[Foo].map {
| case impl: FooImpl => $CARET
|}""".stripMargin,
itemText = "_: FooImpl"
)
def testCompleteClauseAdjustment(): Unit = doClauseCompletionTest(
fileText =
s"""sealed trait Foo
|
|object Foo {
| final case class Bar() extends Foo
| final case class Baz() extends Foo
|}
|
|import Foo.Baz
|Option.empty[Foo].map {
| c$CARET
|}""".stripMargin,
resultText =
s"""sealed trait Foo
|
|object Foo {
| final case class Bar() extends Foo
| final case class Baz() extends Foo
|}
|
|import Foo.Baz
|Option.empty[Foo].map {
| case Foo.Bar() => $CARET
|}""".stripMargin,
itemText = "Bar()"
)
def testCompleteClauseAdjustmentWithImport(): Unit = doClauseCompletionTest(
fileText =
s"""sealed trait Foo
|
|object Foo {
| final case class Bar() extends Foo
| final case class Baz() extends Foo
|}
|
|import Foo.Baz
|Option.empty[Foo].map {
| c$CARET
|}""".stripMargin,
resultText =
s"""sealed trait Foo
|
|object Foo {
| final case class Bar() extends Foo
| final case class Baz() extends Foo
|}
|
|import Foo.Baz
|Option.empty[Foo].map {
| case Baz() => $CARET
|}""".stripMargin,
itemText = "Baz()"
)
def testCompleteParameterizedClause(): Unit = doClauseCompletionTest(
fileText =
s"""sealed trait Foo
|
|trait FooExt[T] extends Foo
|
|Option.empty[Foo].map {
| c$CARET
|}""".stripMargin,
resultText =
s"""sealed trait Foo
|
|trait FooExt[T] extends Foo
|
|Option.empty[Foo].map {
| case ext: FooExt[_] => $CARET
|}""".stripMargin,
itemText = "_: FooExt[_]"
)
def testCompleteTupleClause(): Unit = doClauseCompletionTest(
fileText =
s"""List
|.empty[String]
|.zipWithIndex
|.foreach {
| c$CARET
|}""".stripMargin,
resultText =
s"""List
|.empty[String]
|.zipWithIndex
|.foreach {
| case (str, i) => $CARET
|}""".stripMargin,
itemText = "(str, i)"
)
def testCompleteClauseBeforeAnother(): Unit = doClauseCompletionTest(
fileText =
s"""List
|.empty[String]
|.zipWithIndex
|.foreach { c$CARET
| case _ =>
|}""".stripMargin,
resultText =
s"""List
|.empty[String]
|.zipWithIndex
|.foreach { case (str, i) => $CARET
|case _ =>
|}""".stripMargin,
itemText = "(str, i)"
)
def testCompleteClauseAfterAnother(): Unit = doClauseCompletionTest(
fileText =
s"""List
|.empty[String]
|.zipWithIndex
|.foreach {
| case _ => c$CARET
|}""".stripMargin,
resultText =
s"""List
|.empty[String]
|.zipWithIndex
|.foreach {
| case _ =>
| case (str, i) => $CARET
|}""".stripMargin,
itemText = "(str, i)"
)
def testCompleteFirstClauseInInfix(): Unit = doClauseCompletionTest(
fileText =
s"""Option.empty[(String, String)] foreach {
| c$CARET
|}""".stripMargin,
resultText =
s"""Option.empty[(String, String)] foreach {
| case (str, str1) => $CARET
|}""".stripMargin,
itemText = "(str, str1)"
)
def testCompleteFirstClauseInPartialFunction(): Unit = doClauseCompletionTest(
fileText =
s"""sealed trait Foo
|case class Bar() extends Foo
|
|val collector: PartialFunction[Foo, Unit] = {
| ca$CARET
|}""".stripMargin,
resultText =
s"""sealed trait Foo
|case class Bar() extends Foo
|
|val collector: PartialFunction[Foo, Unit] = {
| case Bar() => $CARET
|}""".stripMargin,
itemText = "Bar()"
)
def testCompleteSecondClauseInInfix(): Unit = doClauseCompletionTest(
fileText =
s"""Option.empty[(String, String)] foreach {
| case _ =>
| c$CARET
|}""".stripMargin,
resultText =
s"""Option.empty[(String, String)] foreach {
| case _ =>
| case (str, str1) => $CARET
|}""".stripMargin,
itemText = "(str, str1)"
)
def testCompleteFirstClauseInMatch(): Unit = doClauseCompletionTest(
fileText =
s"""("", "") match {
| c$CARET
|}""".stripMargin,
resultText =
s"""("", "") match {
| case (str, str1) => $CARET
|}""".stripMargin,
itemText = "(str, str1)"
)
def testCompleteSecondClauseInMatch(): Unit = doClauseCompletionTest(
fileText =
s"""("", "") match {
| case _ =>
| c$CARET
|}""".stripMargin,
resultText =
s"""("", "") match {
| case _ =>
| case (str, str1) => $CARET
|}""".stripMargin,
itemText = "(str, str1)"
)
def testCompleteSingleLineClause(): Unit = doClauseCompletionTest(
fileText =
s"""Option.empty[(String, String)].map{c$CARET}""".stripMargin,
resultText =
s"""Option.empty[(String, String)].map{ case (str, str1) => $CARET}""".stripMargin,
itemText = "(str, str1)"
)
def testCompleteJavaTypeClause(): Unit = {
this.configureJavaFile(
"public interface Foo",
"Foo"
)
doClauseCompletionTest(
fileText =
s"""class Bar extends Foo
|
|(_: Foo) match {
| ca$CARET
|}""".stripMargin,
resultText =
s"""class Bar extends Foo
|
|(_: Foo) match {
| case bar: Bar => $CARET
|}""".stripMargin,
itemText = "_: Bar"
)
}
def testCompleteWithImportsClause(): Unit = doClauseCompletionTest(
fileText =
s"""import javax.swing.JComponent
|
|(_: JComponent) match {
| c$CARET
|}""".stripMargin,
resultText =
s"""import javax.swing.{JComponent, JTree}
|
|(_: JComponent) match {
| case tree: JTree => $CARET
|}""".stripMargin,
itemText = "_: JTree"
)
def testCompleteInaccessibleClause(): Unit = doClauseCompletionTest(
fileText =
s"""sealed trait Foo
|
|object Foo {
| private case object Bar extends Foo
|}
|
|(_: Foo) match {
| ca$CARET
|}""".stripMargin,
resultText =
s"""sealed trait Foo
|
|object Foo {
| private case object Bar extends Foo
|}
|
|(_: Foo) match {
| case Foo.Bar => $CARET
|}""".stripMargin,
itemText = "Bar",
invocationCount = 2
)
def testNoCompleteInaccessibleClause(): Unit = checkNoCompletion(
fileText =
s"""sealed trait Foo
|
|object Foo {
| private case object Bar extends Foo
|}
|
|(_: Foo) match {
| ca$CARET
|}""".stripMargin
)(isCaseClause(_, "Bar"))
def testNoCompleteClause(): Unit = checkNoCompletion(
fileText =
s"""List.empty[String]
|.zipWithIndex
|.foreach {
| case _ | $CARET
|}""".stripMargin
) {
case LookupString(string) => string.startsWith(CASE)
}
def testSealedTrait(): Unit = doMatchCompletionTest(
fileText =
s"""sealed trait Foo
|
|object FooImpl extends Foo
|
|case class Bar() extends Foo
|
|class Baz extends Foo
|
|object Baz {
| def unapply(baz: Baz) = Option(baz)
|}
|
|(_: Foo) $CARET
""".stripMargin,
resultText =
s"""sealed trait Foo
|
|object FooImpl extends Foo
|
|case class Bar() extends Foo
|
|class Baz extends Foo
|
|object Baz {
| def unapply(baz: Baz) = Option(baz)
|}
|
|(_: Foo) match {
| case FooImpl => $START$CARET???$END
| case Bar() => ???
| case baz: Baz => ???
|}
""".stripMargin
)
def testJavaEnum(): Unit = doMatchCompletionTest(
fileText =
s"""import java.nio.file.FileVisitResult
|
|(_: FileVisitResult) m$CARET
""".stripMargin,
resultText =
s"""import java.nio.file.FileVisitResult
|
|(_: FileVisitResult) match {
| case FileVisitResult.CONTINUE => $START$CARET???$END
| case FileVisitResult.TERMINATE => ???
| case FileVisitResult.SKIP_SUBTREE => ???
| case FileVisitResult.SKIP_SIBLINGS => ???
|}
""".stripMargin
)
def testEmptyJavaEnum(): Unit = {
this.configureJavaFile(
"public enum EmptyEnum {}",
"EmptyEnum"
)
checkNoCompletion(
fileText = s"(_: EmptyEnum) m$CARET"
)(isExhaustiveMatch)
}
def testScalaEnum(): Unit = doMatchCompletionTest(
fileText =
s"""object Margin extends Enumeration {
| type Margin = Value
|
| val TOP, BOTTOM = Value
| val LEFT, RIGHT = Value
|
| private val NULL = Value
|}
|
|(_: Margin.Margin) m$CARET
""".stripMargin,
resultText =
s"""object Margin extends Enumeration {
| type Margin = Value
|
| val TOP, BOTTOM = Value
| val LEFT, RIGHT = Value
|
| private val NULL = Value
|}
|
|(_: Margin.Margin) match {
| case Margin.TOP => $START$CARET???$END
| case Margin.BOTTOM => ???
| case Margin.LEFT => ???
| case Margin.RIGHT => ???
|}
""".stripMargin
)
def testScalaEnum2(): Unit = doMatchCompletionTest(
fileText =
s"""object Margin extends Enumeration {
|
| protected case class Val() extends super.Val
|
| val Top, Bottom = Val()
| val Left, Right = Val()
|}
|
|(_: Margin.Value) m$CARET
""".stripMargin,
resultText =
s"""object Margin extends Enumeration {
|
| protected case class Val() extends super.Val
|
| val Top, Bottom = Val()
| val Left, Right = Val()
|}
|
|(_: Margin.Value) match {
| case Margin.Top => $START$CARET???$END
| case Margin.Bottom => ???
| case Margin.Left => ???
| case Margin.Right => ???
|}
""".stripMargin
)
def testEmptyScalaEnum(): Unit = checkNoCompletion(
fileText =
s"""object Margin extends Enumeration {
| type Margin = Value
|
| private val NULL = Value
|}
|
|(_: Margin.Margin) m$CARET
""".stripMargin
)(isExhaustiveMatch)
def testEmptyScalaEnum2(): Unit = doMatchCompletionTest(
fileText =
s"""object Margin extends Enumeration {
| type Margin = Value
|
| private val NULL = Value
|}
|
|(_: Margin.Margin) m$CARET
""".stripMargin,
resultText =
s"""object Margin extends Enumeration {
| type Margin = Value
|
| private val NULL = Value
|}
|
|(_: Margin.Margin) match {
| case Margin.NULL => $START$CARET???$END
|}
""".stripMargin,
invocationCount = 2
)
def testVarargs(): Unit = doMatchCompletionTest(
fileText =
s"""sealed trait Foo
|
|case class Bar(foos: Foo*) extends Foo
|
|(_: Foo) m$CARET
""".stripMargin,
resultText =
s"""sealed trait Foo
|
|case class Bar(foos: Foo*) extends Foo
|
|(_: Foo) match {
| case Bar(foos@_*) => $START$CARET???$END
|}
""".stripMargin
)
def testNonSealedClass(): Unit = doMatchCompletionTest(
fileText =
s"""trait Foo
|
|class FooImpl extends Foo
|
|object FooImpl {
| def unapply(impl: FooImpl) = Some(impl)
|}
|
|(_: Foo) m$CARET""".stripMargin,
resultText =
s"""trait Foo
|
|class FooImpl extends Foo
|
|object FooImpl {
| def unapply(impl: FooImpl) = Some(impl)
|}
|
|(_: Foo) match {
| case impl: FooImpl => $START$CARET???$END
| case _ => ???
|}""".stripMargin
)
def testMaybe(): Unit = withCaseAlignment {
doMatchCompletionTest(
fileText =
s"""val maybeFoo = Option("foo")
|
|maybeFoo m$CARET
""".stripMargin,
resultText =
s"""val maybeFoo = Option("foo")
|
|maybeFoo match {
| case Some(value) => $START$CARET???$END
| case None => ???
|}
""".stripMargin
)
}
def testList(): Unit = doMatchCompletionTest(
fileText =
s"""(_: List[String]) m$CARET
""".stripMargin,
resultText =
s"""(_: List[String]) match {
| case Nil => $START$CARET???$END
| case ::(head, tl) => ???
|}
""".stripMargin
)
def testTry(): Unit = doMatchCompletionTest(
fileText =
s"""import scala.util.Try
|
|(_: Try[Any]) ma$CARET
""".stripMargin,
resultText =
s"""import scala.util.{Failure, Success, Try}
|
|(_: Try[Any]) match {
| case Failure(exception) => $START$CARET???$END
| case Success(value) => ???
|}
""".stripMargin
)
def testAnonymousInheritor(): Unit = doMatchCompletionTest(
fileText =
s"""sealed trait Foo
|
|case class Bar() extends Foo
|
|val impl = new Foo() {}
|
|(_: Foo) m$CARET
""".stripMargin,
resultText =
s"""sealed trait Foo
|
|case class Bar() extends Foo
|
|val impl = new Foo() {}
|
|(_: Foo) match {
| case Bar() => $START$CARET???$END
| case _ => ???
|}
""".stripMargin
)
def testLowerCaseExtractor(): Unit = doCompletionTest(
fileText =
s"""trait Foo {
|
| object foo {
| def unapply(i: Int) = Some(i)
| }
|}
|
|object Bar extends Foo {
|
| (_: Int) match {
| case f$CARET
| }
|}
""".stripMargin,
resultText =
s"""trait Foo {
|
| object foo {
| def unapply(i: Int) = Some(i)
| }
|}
|
|object Bar extends Foo {
|
| (_: Int) match {
| case foo$CARET
| }
|}
""".stripMargin,
item = "foo"
)
def testExplicitCompanion(): Unit = doMatchCompletionTest(
fileText =
s"""sealed trait Foo
|
|case class Bar() extends Foo
|object Bar
|
|(_: Foo) ma$CARET
""".stripMargin,
resultText =
s"""sealed trait Foo
|
|case class Bar() extends Foo
|object Bar
|
|(_: Foo) match {
| case Bar() => $START$CARET???$END
|}
""".stripMargin
)
def testInfixExpression(): Unit = doMatchCompletionTest(
fileText =
s"""sealed trait Foo
|
|case class Bar() extends Foo
|
|(_: Foo) ma$CARET
|???
""".stripMargin,
resultText =
s"""sealed trait Foo
|
|case class Bar() extends Foo
|
|(_: Foo) match {
| case Bar() => $START$CARET???$END
|}
|???
""".stripMargin
)
// def testPathDependent(): Unit = doMatchCompletionTest(
// fileText =
// s"""class Foo {
// | object Bar {
// | sealed trait Baz
// |
// | val Impl = new Baz {}
// |
// | object BazImpl extends Baz
// |
// | case class CaseBaz() extends Baz
// |
// | class BazBaz[+T, -U] extends Baz
// |
// | object BazBaz {
// | def unapply[T, U](baz: BazBaz[T, U]) = Option(baz)
// | }
// |
// | (_: Baz) m$CARET
// | }
// |}
// """.stripMargin,
// resultText =
// s"""class Foo {
// | object Bar {
// | sealed trait Baz
// |
// | val Impl = new Baz {}
// |
// | object BazImpl extends Baz
// |
// | case class CaseBaz() extends Baz
// |
// | class BazBaz[+T, -U] extends Baz
// |
// | object BazBaz {
// | def unapply[T, U](baz: BazBaz[T, U]) = Option(baz)
// | }
// |
// | (_: Baz) match {
// | case Bar.BazImpl => $CARET
// | case Bar.CaseBaz() =>
// | case baz: Bar.BazBaz[_, _] =>
// | case _ =>
// | }
// | }
// |}
// """.stripMargin
// )
def testConcreteClass(): Unit = doMatchCompletionTest(
fileText =
s"""sealed class Foo
|
|case class Bar() extends Foo
|
|(_: Foo) ma$CARET
""".stripMargin,
resultText =
s"""sealed class Foo
|
|case class Bar() extends Foo
|
|(_: Foo) match {
| case Bar() => $START$CARET???$END
| case _ => ???
|}
""".stripMargin
)
def testAbstractClass(): Unit = doMatchCompletionTest(
fileText =
s"""sealed abstract class Foo
|
|case class Bar() extends Foo
|
|(_: Foo) ma$CARET
""".stripMargin,
resultText =
s"""sealed abstract class Foo
|
|case class Bar() extends Foo
|
|(_: Foo) match {
| case Bar() => $START$CARET???$END
|}
""".stripMargin
)
def testMatchFormatting(): Unit = withCaseAlignment {
doMatchCompletionTest(
fileText =
s"""def foo(maybeString: Option[String]: Unit = {
| maybeString ma$CARET
|}""".stripMargin,
resultText =
s"""def foo(maybeString: Option[String]: Unit = {
| maybeString match {
| case Some(value) => $START$CARET???$END
| case None => ???
| }
|}""".stripMargin
)
}
def testJavaType(): Unit = {
this.configureJavaFile(
fileText =
"""public interface Foo {
| public static Foo createFoo() {
| return new Foo() {};
| }
|}""".stripMargin,
className = "Foo"
)
doMatchCompletionTest(
fileText =
s"""class Bar() extends Foo
|
|(_: Foo) ma$CARET""".stripMargin,
resultText =
s"""class Bar() extends Foo
|
|(_: Foo) match {
| case bar: Bar => $START$CARET???$END
| case _ => ???
|}""".stripMargin
)
}
def testCompoundType(): Unit = doMatchCompletionTest(
fileText =
s"""sealed trait Foo
|
|object Foo {
| final case class Bar() extends Foo
| final case class Baz() extends Foo
| trait FooExt extends Foo
|}
|
|import Foo._
|(if (true) Left(""): Either[String, Bar] else Left(""): Either[String, Baz]) match {
| case Left(value) =>
| case Right(value) =>
| value m$CARET
|}""".stripMargin,
resultText =
s"""sealed trait Foo
|
|object Foo {
| final case class Bar() extends Foo
| final case class Baz() extends Foo
| trait FooExt extends Foo
|}
|
|import Foo._
|(if (true) Left(""): Either[String, Bar] else Left(""): Either[String, Baz]) match {
| case Left(value) =>
| case Right(value) =>
| value match {
| case Bar() => $START$CARET???$END
| case Baz() => ???
| }
|}""".stripMargin
)
def testInaccessibleInheritors(): Unit = doMatchCompletionTest(
fileText =
s"""sealed trait Foo
|
|object Foo {
| final case class Bar() extends Foo
| private case object Baz extends Foo
|}
|
|(_: Foo) m$CARET""".stripMargin,
resultText =
s"""sealed trait Foo
|
|object Foo {
| final case class Bar() extends Foo
| private case object Baz extends Foo
|}
|
|(_: Foo) match {
| case Foo.Bar() => $START$CARET???$END
| case _ => ???
|}""".stripMargin
)
def testInaccessibleInheritors2(): Unit = doMatchCompletionTest(
fileText =
s"""sealed trait Foo
|
|object Foo {
| final case class Bar() extends Foo
| private case object Baz extends Foo
|}
|
|(_: Foo) m$CARET""".stripMargin,
resultText =
s"""sealed trait Foo
|
|object Foo {
| final case class Bar() extends Foo
| private case object Baz extends Foo
|}
|
|(_: Foo) match {
| case Foo.Bar() => $START$CARET???$END
| case Foo.Baz => ???
|}""".stripMargin,
invocationCount = 2
)
def testNonSealedInheritorsThreshold(): Unit = checkNoCompletion(
fileText =
s"""trait Foo
|class Bar1 extends Foo
|class Bar2 extends Foo
|class Bar3 extends Foo
|class Bar4 extends Foo
|class Bar5 extends Foo
|
|(_: Foo) ma$CARET""".stripMargin,
)(isExhaustiveMatch)
def testCaseInFunction(): Unit = doCaseCompletionTest(
fileText =
s"""sealed trait Foo
|
|case class Bar() extends Foo
|
|(_: Option[Foo]).map {
| ca$CARET
|}
""".stripMargin,
resultText =
s"""sealed trait Foo
|
|case class Bar() extends Foo
|
|(_: Option[Foo]).map {
| case Bar() => $START$CARET???$END
|}
""".stripMargin
)
def testCaseInPartialFunction(): Unit = doCaseCompletionTest(
fileText =
s"""sealed trait Foo
|
|case class Bar() extends Foo
|
|(_: Option[Foo]).collect {
| ca$CARET
|}
""".stripMargin,
resultText =
s"""sealed trait Foo
|
|case class Bar() extends Foo
|
|(_: Option[Foo]).collect {
| case Bar() => $START$CARET???$END
|}
""".stripMargin
)
def testCaseInMatch(): Unit = doCaseCompletionTest(
fileText =
s"""sealed trait Foo
|
|case class Bar() extends Foo
|
|(_: Foo) match {
| ca$CARET
|}
""".stripMargin,
resultText =
s"""sealed trait Foo
|
|case class Bar() extends Foo
|
|(_: Foo) match {
| case Bar() => $START$CARET???$END
|}
""".stripMargin
)
def testNoCaseInFunction(): Unit = checkNoCompletion(
fileText =
s"""sealed trait Foo
|
|case class Bar() extends Foo
|
|(_: Option[Foo]).map {
| case _ =>
| ca$CARET
|}
""".stripMargin,
)(isExhaustiveCase)
def testNoCaseInPartialFunction(): Unit = checkNoCompletion(
fileText =
s"""sealed trait Foo
|
|case class Bar() extends Foo
|
|(_: Option[Foo]).collect {
| case _ =>
| ca$CARET
|}
""".stripMargin,
)(isExhaustiveCase)
def testNoCaseInMatch(): Unit = checkNoCompletion(
fileText =
s"""sealed trait Foo
|
|case class Bar() extends Foo
|
|(_: Foo) match {
| case _ =>
| ca$CARET
|}
""".stripMargin,
)(isExhaustiveCase)
def testFqnBlockList(): Unit = for {
fqn <- FqnBlockList
} checkNoCompletion(s"(_: $fqn) m$CARET")(isExhaustiveMatch)
def testQualifiedReference(): Unit = checkNoCompletion(
fileText =
s"""sealed trait Foo
|final case class Bar() extends Foo
|
|(_: Foo) match {
| case bar: Bar =>
| bar.$CARET
|}""".stripMargin
) {
case LookupString(string) =>
string.startsWith(MATCH) || string.startsWith(CASE)
}
private def withCaseAlignment(doTest: => Unit): Unit = {
val settings = CodeStyle.getSettings(getProject)
.getCustomSettings(classOf[formatting.settings.ScalaCodeStyleSettings])
val oldValue = settings.ALIGN_IN_COLUMNS_CASE_BRANCH
try {
settings.ALIGN_IN_COLUMNS_CASE_BRANCH = true
doTest
} finally {
settings.ALIGN_IN_COLUMNS_CASE_BRANCH = oldValue
}
}
// private def doMultipleCompletionTest(fileText: String,
// items: String*): Unit =
// super.doMultipleCompletionTest(fileText, BASIC, DEFAULT_TIME, items.size) { lookup =>
// items.contains(lookup.getLookupString)
// }
}
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/lang/completion3/ScalaClausesCompletionTest.scala | Scala | apache-2.0 | 33,407 |
/*
* Copyright 2015 ligaDATA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ligadata.pmml.transforms.rawtocooked.ruleset
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.Queue
import com.ligadata.kamanja.metadata.MdMgr._
import com.ligadata.kamanja.metadata._
import com.ligadata.pmml.compiler._
import com.ligadata.pmml.support._
import com.ligadata.pmml.traits._
import com.ligadata.pmml.syntaxtree.raw.common._
import com.ligadata.pmml.syntaxtree.cooked.common._
class RuleSetModelPmmlExecNodeGenerator(val ctx : PmmlContext) extends PmmlExecNodeGenerator with com.ligadata.pmml.compiler.LogTrait {
/**
Construct a PmmlExecNode appropriate for the PmmlNode supplied In some cases no node is returned
(i.e., None). This can happen when the PmmlNode content is subsumed by the parent node. See DataField
handling for an example where the DataNode content is added to the parent DataDictionary.
@param dispatcher: PmmlExecNodeGeneratorDispatch
@param qName: String (the original element name from the PMML)
@param pmmlnode:PmmlNode
@return optionally an appropriate PmmlExecNode or None
*/
def make(dispatcher : PmmlExecNodeGeneratorDispatch, qName : String, pmmlnode : PmmlNode) : Option[PmmlExecNode] = {
val node : PmmlRuleSetModel = if (pmmlnode != null && pmmlnode.isInstanceOf[PmmlRuleSetModel]) {
pmmlnode.asInstanceOf[PmmlRuleSetModel]
} else {
if (pmmlnode != null) {
PmmlError.logError(ctx, s"For $qName, expecting a PmmlRuleSetModel... got a ${pmmlnode.getClass.getName}... check PmmlExecNode generator initialization")
}
null
}
val xnode : Option[PmmlExecNode] = if (node != null) {
/** Gather content from RuleSetModel attributes for later use by the Scala code generator */
ctx.pmmlTerms("ModelName") = Some(node.modelName)
ctx.pmmlTerms("FunctionName") = Some(node.functionName)
Some(new xRuleSetModel(node.lineNumber, node.columnNumber, node.modelName, node.functionName, node.algorithmName, node.isScorable))
} else {
None
}
xnode
}
}
class RuleSetPmmlExecNodeGenerator(val ctx : PmmlContext) extends PmmlExecNodeGenerator with com.ligadata.pmml.compiler.LogTrait {
/**
Construct a PmmlExecNode appropriate for the PmmlNode supplied In some cases no node is returned
(i.e., None). This can happen when the PmmlNode content is subsumed by the parent node. See DataField
handling for an example where the DataNode content is added to the parent DataDictionary.
@param dispatcher: PmmlExecNodeGeneratorDispatch
@param qName: String (the original element name from the PMML)
@param pmmlnode:PmmlNode
@return optionally an appropriate PmmlExecNode or None
*/
def make(dispatcher : PmmlExecNodeGeneratorDispatch, qName : String, pmmlnode : PmmlNode) : Option[PmmlExecNode] = {
val node : PmmlRuleSet = if (pmmlnode != null && pmmlnode.isInstanceOf[PmmlRuleSet]) {
pmmlnode.asInstanceOf[PmmlRuleSet]
} else {
if (pmmlnode != null) {
PmmlError.logError(ctx, s"For $qName, expecting a PmmlRuleSet... got a ${pmmlnode.getClass.getName}... check PmmlExecNode generator initialization")
}
null
}
val xnode : Option[PmmlExecNode] = if (node != null) {
val top : Option[PmmlExecNode] = ctx.pmmlExecNodeStack.top
top match {
case Some(top) => {
var rsm : xRuleSetModel = top.asInstanceOf[xRuleSetModel]
rsm.DefaultScore(node.defaultScore)
}
case _ => None
}
Some(new xRuleSet(node.lineNumber, node.columnNumber, node.recordCount, node.nbCorrect, node.defaultScore, node.defaultConfidence))
} else {
None
}
xnode
}
}
class SimpleRulePmmlExecNodeGenerator(val ctx : PmmlContext) extends PmmlExecNodeGenerator with com.ligadata.pmml.compiler.LogTrait {
/**
Construct a PmmlExecNode appropriate for the PmmlNode supplied In some cases no node is returned
(i.e., None). This can happen when the PmmlNode content is subsumed by the parent node. See DataField
handling for an example where the DataNode content is added to the parent DataDictionary.
@param dispatcher: PmmlExecNodeGeneratorDispatch
@param qName: String (the original element name from the PMML)
@param pmmlnode:PmmlNode
@return optionally an appropriate PmmlExecNode or None
*/
def make(dispatcher : PmmlExecNodeGeneratorDispatch, qName : String, pmmlnode : PmmlNode) : Option[PmmlExecNode] = {
val node : PmmlSimpleRule = if (pmmlnode != null && pmmlnode.isInstanceOf[PmmlSimpleRule]) {
pmmlnode.asInstanceOf[PmmlSimpleRule]
} else {
if (pmmlnode != null) {
PmmlError.logError(ctx, s"For $qName, expecting a PmmlSimpleRule... got a ${pmmlnode.getClass.getName}... check PmmlExecNode generator initialization")
}
null
}
val xnode : Option[PmmlExecNode] = if (node != null) {
var rsm : Option[xRuleSetModel] = ctx.pmmlExecNodeStack.apply(1).asInstanceOf[Option[xRuleSetModel]]
val id : Option[String] = Some(node.id)
var rule : xSimpleRule = new xSimpleRule( node.lineNumber, node.columnNumber, id
, node.score
, 0.0 /** recordCount */
, 0.0 /** nbCorrect */
, 0.0 /** confidence */
, 0.0) /** weight */
rsm match {
case Some(rsm) => {
try {
rule.RecordCount(node.recordCount.toDouble)
rule.CorrectCount(node.nbCorrect.toDouble)
rule.Confidence(node.confidence.toDouble)
rule.Weight(node.weight.toDouble)
} catch {
case _ : Throwable => {ctx.logger.debug (s"Unable to coerce one or more mining 'double' fields... name = $id")}
}
rsm.addRule (rule)
}
case _ => None
}
Some(rule)
} else {
None
}
xnode
}
}
class RuleSelectionMethodPmmlExecNodeGenerator(val ctx : PmmlContext) extends PmmlExecNodeGenerator with com.ligadata.pmml.compiler.LogTrait {
/**
Construct a PmmlExecNode appropriate for the PmmlNode supplied In some cases no node is returned
(i.e., None). This can happen when the PmmlNode content is subsumed by the parent node. See DataField
handling for an example where the DataNode content is added to the parent DataDictionary.
@param dispatcher: PmmlExecNodeGeneratorDispatch
@param qName: String (the original element name from the PMML)
@param pmmlnode:PmmlNode
@return optionally an appropriate PmmlExecNode or None
*/
def make(dispatcher : PmmlExecNodeGeneratorDispatch, qName : String, pmmlnode : PmmlNode) : Option[PmmlExecNode] = {
val node : PmmlRuleSelectionMethod = if (pmmlnode != null && pmmlnode.isInstanceOf[PmmlRuleSelectionMethod]) {
pmmlnode.asInstanceOf[PmmlRuleSelectionMethod]
} else {
if (pmmlnode != null) {
PmmlError.logError(ctx, s"For $qName, expecting a PmmlRuleSelectionMethod... got a ${pmmlnode.getClass.getName}... check PmmlExecNode generator initialization")
}
null
}
val xnode : Option[PmmlExecNode] = if (node != null) {
val top : Option[PmmlExecNode] = ctx.pmmlExecNodeStack.apply(1) // xRuleSetModel is the grandparent
top match {
case Some(top) => {
var mf : xRuleSetModel = top.asInstanceOf[xRuleSetModel]
var rsm : xRuleSelectionMethod = new xRuleSelectionMethod(node.lineNumber, node.columnNumber, node.criterion)
mf.addRuleSetSelectionMethod(rsm)
}
case _ => None
}
None
} else {
None
}
xnode
}
}
| traytonwhite/Kamanja | trunk/Pmml/PmmlCompiler/src/main/scala/com/ligadata/pmml/transforms/rawtocooked/ruleset/RuleSetXNodes.scala | Scala | apache-2.0 | 8,041 |
package immortan.utils
import rx.lang.scala.Observable
import rx.lang.scala.schedulers.IOScheduler
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}
object Rx {
def fromFutureOnIo[T](future: Future[T] = null)(implicit ec: ExecutionContext): Observable[T] =
Observable.from(future).observeOn(IOScheduler.apply)
def uniqueFirstAndLastWithinWindow[T](obs: Observable[T], window: Duration): Observable[T] =
obs.throttleFirst(window).merge(obs debounce window).distinctUntilChanged.observeOn(IOScheduler.apply)
def initDelay[T](next: Observable[T], startMillis: Long, timeoutMillis: Long, preStartMsec: Long = 10): Observable[T] = {
val futureProtectedStartMillis = if (startMillis > System.currentTimeMillis) 0L else startMillis
val adjustedTimeout = futureProtectedStartMillis + timeoutMillis - System.currentTimeMillis
val delayLeft = if (adjustedTimeout < preStartMsec) preStartMsec else adjustedTimeout
Observable.just(null).delay(delayLeft.millis).flatMap(_ => next)
}
def retry[T](obs: Observable[T], pick: (Throwable, Int) => Duration, times: Range): Observable[T] =
obs.retryWhen(_.zipWith(Observable from times)(pick) flatMap Observable.timer)
def repeat[T](obs: Observable[T], pick: (Unit, Int) => Duration, times: Range): Observable[T] =
obs.repeatWhen(_.zipWith(Observable from times)(pick) flatMap Observable.timer)
def ioQueue: Observable[Null] = Observable.just(null).subscribeOn(IOScheduler.apply)
def incMinute(errorOrUnit: Any, next: Int): Duration = next.minutes
def incSec(errorOrUnit: Any, next: Int): Duration = next.seconds
def incHour(errorOrUnit: Any, next: Int): Duration = next.hours
}
| btcontract/wallet | app/src/main/java/immortan/utils/Rx.scala | Scala | apache-2.0 | 1,709 |
package dit4c.scheduler.domain
case class ClusterInfo(
displayName: String,
active: Boolean,
supportsSave: Boolean) | dit4c/dit4c | dit4c-scheduler/src/main/scala/dit4c/scheduler/domain/ClusterInfo.scala | Scala | mit | 128 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.tools
import java.io.PrintStream
import java.nio.charset.StandardCharsets
import java.util.concurrent.CountDownLatch
import java.util.{Locale, Properties, Random}
import com.typesafe.scalalogging.LazyLogging
import joptsimple._
import kafka.api.OffsetRequest
import kafka.common.{MessageFormatter, StreamEndException}
import kafka.consumer._
import kafka.message._
import kafka.metrics.KafkaMetricsReporter
import kafka.utils._
import kafka.utils.Implicits._
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.common.errors.{AuthenticationException, WakeupException}
import org.apache.kafka.common.record.TimestampType
import org.apache.kafka.common.serialization.Deserializer
import org.apache.kafka.common.utils.Utils
import scala.collection.JavaConverters._
/**
* Consumer that dumps messages to standard out.
*/
object ConsoleConsumer extends Logging {
var messageCount = 0
private val shutdownLatch = new CountDownLatch(1)
def main(args: Array[String]) {
val conf = new ConsumerConfig(args)
try {
run(conf)
} catch {
case e: AuthenticationException =>
error("Authentication failed: terminating consumer process", e)
Exit.exit(1)
case e: Throwable =>
error("Unknown error when running consumer: ", e)
Exit.exit(1)
}
}
def run(conf: ConsumerConfig) {
val consumer =
if (conf.useOldConsumer) {
checkZk(conf)
val props = getOldConsumerProps(conf)
checkAndMaybeDeleteOldPath(conf, props)
new OldConsumer(conf.filterSpec, props)
} else {
val timeoutMs = if (conf.timeoutMs >= 0) conf.timeoutMs else Long.MaxValue
if (conf.partitionArg.isDefined)
new NewShinyConsumer(Option(conf.topicArg), conf.partitionArg, Option(conf.offsetArg), None, getNewConsumerProps(conf), timeoutMs)
else
new NewShinyConsumer(Option(conf.topicArg), None, None, Option(conf.whitelistArg), getNewConsumerProps(conf), timeoutMs)
}
addShutdownHook(consumer, conf)
try {
process(conf.maxMessages, conf.formatter, consumer, System.out, conf.skipMessageOnError)
} finally {
consumer.cleanup()
conf.formatter.close()
reportRecordCount()
// if we generated a random group id (as none specified explicitly) then avoid polluting zookeeper with persistent group data, this is a hack
if (conf.useOldConsumer && !conf.groupIdPassed)
ZkUtils.maybeDeletePath(conf.options.valueOf(conf.zkConnectOpt), "/consumers/" + conf.consumerProps.get("group.id"))
shutdownLatch.countDown()
}
}
def checkZk(config: ConsumerConfig) {
if (!checkZkPathExists(config.options.valueOf(config.zkConnectOpt), "/brokers/ids")) {
System.err.println("No brokers found in ZK.")
Exit.exit(1)
}
if (!config.options.has(config.deleteConsumerOffsetsOpt) && config.options.has(config.resetBeginningOpt) &&
checkZkPathExists(config.options.valueOf(config.zkConnectOpt), "/consumers/" + config.consumerProps.getProperty("group.id") + "/offsets")) {
System.err.println("Found previous offset information for this group " + config.consumerProps.getProperty("group.id")
+ ". Please use --delete-consumer-offsets to delete previous offsets metadata")
Exit.exit(1)
}
}
def addShutdownHook(consumer: BaseConsumer, conf: ConsumerConfig) {
Runtime.getRuntime.addShutdownHook(new Thread() {
override def run() {
consumer.stop()
shutdownLatch.await()
if (conf.enableSystestEventsLogging) {
System.out.println("shutdown_complete")
}
}
})
}
def process(maxMessages: Integer, formatter: MessageFormatter, consumer: BaseConsumer, output: PrintStream, skipMessageOnError: Boolean) {
while (messageCount < maxMessages || maxMessages == -1) {
val msg: BaseConsumerRecord = try {
consumer.receive()
} catch {
case _: StreamEndException =>
trace("Caught StreamEndException because consumer is shutdown, ignore and terminate.")
// Consumer is already closed
return
case _: WakeupException =>
trace("Caught WakeupException because consumer is shutdown, ignore and terminate.")
// Consumer will be closed
return
case e: Throwable =>
error("Error processing message, terminating consumer process: ", e)
// Consumer will be closed
return
}
messageCount += 1
try {
formatter.writeTo(new ConsumerRecord(msg.topic, msg.partition, msg.offset, msg.timestamp,
msg.timestampType, 0, 0, 0, msg.key, msg.value, msg.headers), output)
} catch {
case e: Throwable =>
if (skipMessageOnError) {
error("Error processing message, skipping this message: ", e)
} else {
// Consumer will be closed
throw e
}
}
if (checkErr(output, formatter)) {
// Consumer will be closed
return
}
}
}
def reportRecordCount() {
System.err.println(s"Processed a total of $messageCount messages")
}
def checkErr(output: PrintStream, formatter: MessageFormatter): Boolean = {
val gotError = output.checkError()
if (gotError) {
// This means no one is listening to our output stream any more, time to shutdown
System.err.println("Unable to write to standard out, closing consumer.")
}
gotError
}
def getOldConsumerProps(config: ConsumerConfig): Properties = {
val props = new Properties
props ++= config.consumerProps
props ++= config.extraConsumerProps
setAutoOffsetResetValue(config, props)
props.put("zookeeper.connect", config.zkConnectionStr)
if (config.timeoutMs >= 0)
props.put("consumer.timeout.ms", config.timeoutMs.toString)
props
}
def checkAndMaybeDeleteOldPath(config: ConsumerConfig, props: Properties) = {
val consumerGroupBasePath = "/consumers/" + props.getProperty("group.id")
if (config.options.has(config.deleteConsumerOffsetsOpt)) {
ZkUtils.maybeDeletePath(config.options.valueOf(config.zkConnectOpt), consumerGroupBasePath)
} else {
val resetToBeginning = OffsetRequest.SmallestTimeString == props.getProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG)
if (resetToBeginning && checkZkPathExists(config.options.valueOf(config.zkConnectOpt), consumerGroupBasePath + "/offsets")) {
System.err.println("Found previous offset information for this group " + props.getProperty("group.id")
+ ". Please use --delete-consumer-offsets to delete previous offsets metadata")
Exit.exit(1)
}
}
}
def getNewConsumerProps(config: ConsumerConfig): Properties = {
val props = new Properties
props ++= config.consumerProps
props ++= config.extraConsumerProps
setAutoOffsetResetValue(config, props)
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, config.bootstrapServer)
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer")
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.ByteArrayDeserializer")
props.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, config.isolationLevel)
props
}
/**
* Used by both getNewConsumerProps and getOldConsumerProps to retrieve the correct value for the
* consumer parameter 'auto.offset.reset'.
* Order of priority is:
* 1. Explicitly set parameter via --consumer.property command line parameter
* 2. Explicit --from-beginning given -> 'earliest'
* 3. Default value of 'latest'
*
* In case both --from-beginning and an explicit value are specified an error is thrown if these
* are conflicting.
*/
def setAutoOffsetResetValue(config: ConsumerConfig, props: Properties) {
val (earliestConfigValue, latestConfigValue) = if (config.useOldConsumer)
(OffsetRequest.SmallestTimeString, OffsetRequest.LargestTimeString)
else
("earliest", "latest")
if (props.containsKey(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG)) {
// auto.offset.reset parameter was specified on the command line
val autoResetOption = props.getProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG)
if (config.options.has(config.resetBeginningOpt) && earliestConfigValue != autoResetOption) {
// conflicting options - latest und earliest, throw an error
System.err.println(s"Can't simultaneously specify --from-beginning and 'auto.offset.reset=$autoResetOption', " +
"please remove one option")
Exit.exit(1)
}
// nothing to do, checking for valid parameter values happens later and the specified
// value was already copied during .putall operation
} else {
// no explicit value for auto.offset.reset was specified
// if --from-beginning was specified use earliest, otherwise default to latest
val autoResetOption = if (config.options.has(config.resetBeginningOpt)) earliestConfigValue else latestConfigValue
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoResetOption)
}
}
class ConsumerConfig(args: Array[String]) {
val parser = new OptionParser(false)
val topicIdOpt = parser.accepts("topic", "The topic id to consume on.")
.withRequiredArg
.describedAs("topic")
.ofType(classOf[String])
val whitelistOpt = parser.accepts("whitelist", "Whitelist of topics to include for consumption.")
.withRequiredArg
.describedAs("whitelist")
.ofType(classOf[String])
val blacklistOpt = parser.accepts("blacklist", "Blacklist of topics to exclude from consumption.")
.withRequiredArg
.describedAs("blacklist")
.ofType(classOf[String])
val partitionIdOpt = parser.accepts("partition", "The partition to consume from. Consumption " +
"starts from the end of the partition unless '--offset' is specified.")
.withRequiredArg
.describedAs("partition")
.ofType(classOf[java.lang.Integer])
val offsetOpt = parser.accepts("offset", "The offset id to consume from (a non-negative number), or 'earliest' which means from beginning, or 'latest' which means from end")
.withRequiredArg
.describedAs("consume offset")
.ofType(classOf[String])
.defaultsTo("latest")
val zkConnectOpt = parser.accepts("zookeeper", "REQUIRED (only when using old consumer): The connection string for the zookeeper connection in the form host:port. " +
"Multiple URLS can be given to allow fail-over.")
.withRequiredArg
.describedAs("urls")
.ofType(classOf[String])
val consumerPropertyOpt = parser.accepts("consumer-property", "A mechanism to pass user-defined properties in the form key=value to the consumer.")
.withRequiredArg
.describedAs("consumer_prop")
.ofType(classOf[String])
val consumerConfigOpt = parser.accepts("consumer.config", s"Consumer config properties file. Note that ${consumerPropertyOpt} takes precedence over this config.")
.withRequiredArg
.describedAs("config file")
.ofType(classOf[String])
val messageFormatterOpt = parser.accepts("formatter", "The name of a class to use for formatting kafka messages for display.")
.withRequiredArg
.describedAs("class")
.ofType(classOf[String])
.defaultsTo(classOf[DefaultMessageFormatter].getName)
val messageFormatterArgOpt = parser.accepts("property", "The properties to initialize the message formatter.")
.withRequiredArg
.describedAs("prop")
.ofType(classOf[String])
val deleteConsumerOffsetsOpt = parser.accepts("delete-consumer-offsets", "If specified, the consumer path in zookeeper is deleted when starting up")
val resetBeginningOpt = parser.accepts("from-beginning", "If the consumer does not already have an established offset to consume from, " +
"start with the earliest message present in the log rather than the latest message.")
val maxMessagesOpt = parser.accepts("max-messages", "The maximum number of messages to consume before exiting. If not set, consumption is continual.")
.withRequiredArg
.describedAs("num_messages")
.ofType(classOf[java.lang.Integer])
val timeoutMsOpt = parser.accepts("timeout-ms", "If specified, exit if no message is available for consumption for the specified interval.")
.withRequiredArg
.describedAs("timeout_ms")
.ofType(classOf[java.lang.Integer])
val skipMessageOnErrorOpt = parser.accepts("skip-message-on-error", "If there is an error when processing a message, " +
"skip it instead of halt.")
val csvMetricsReporterEnabledOpt = parser.accepts("csv-reporter-enabled", "If set, the CSV metrics reporter will be enabled")
val metricsDirectoryOpt = parser.accepts("metrics-dir", "If csv-reporter-enable is set, and this parameter is" +
"set, the csv metrics will be output here")
.withRequiredArg
.describedAs("metrics directory")
.ofType(classOf[java.lang.String])
val newConsumerOpt = parser.accepts("new-consumer", "Use the new consumer implementation. This is the default, so " +
"this option is deprecated and will be removed in a future release.")
val bootstrapServerOpt = parser.accepts("bootstrap-server", "REQUIRED (unless old consumer is used): The server to connect to.")
.withRequiredArg
.describedAs("server to connect to")
.ofType(classOf[String])
val keyDeserializerOpt = parser.accepts("key-deserializer")
.withRequiredArg
.describedAs("deserializer for key")
.ofType(classOf[String])
val valueDeserializerOpt = parser.accepts("value-deserializer")
.withRequiredArg
.describedAs("deserializer for values")
.ofType(classOf[String])
val enableSystestEventsLoggingOpt = parser.accepts("enable-systest-events",
"Log lifecycle events of the consumer in addition to logging consumed " +
"messages. (This is specific for system tests.)")
val isolationLevelOpt = parser.accepts("isolation-level",
"Set to read_committed in order to filter out transactional messages which are not committed. Set to read_uncommitted" +
"to read all messages.")
.withRequiredArg()
.ofType(classOf[String])
.defaultsTo("read_uncommitted")
val groupIdOpt = parser.accepts("group", "The consumer group id of the consumer.")
.withRequiredArg
.describedAs("consumer group id")
.ofType(classOf[String])
if (args.length == 0)
CommandLineUtils.printUsageAndDie(parser, "The console consumer is a tool that reads data from Kafka and outputs it to standard output.")
var groupIdPassed = true
val options: OptionSet = tryParse(parser, args)
val useOldConsumer = options.has(zkConnectOpt)
val enableSystestEventsLogging = options.has(enableSystestEventsLoggingOpt)
// If using old consumer, exactly one of whitelist/blacklist/topic is required.
// If using new consumer, topic must be specified.
var topicArg: String = null
var whitelistArg: String = null
var filterSpec: TopicFilter = null
val extraConsumerProps = CommandLineUtils.parseKeyValueArgs(options.valuesOf(consumerPropertyOpt).asScala)
val consumerProps = if (options.has(consumerConfigOpt))
Utils.loadProps(options.valueOf(consumerConfigOpt))
else
new Properties()
val zkConnectionStr = options.valueOf(zkConnectOpt)
val fromBeginning = options.has(resetBeginningOpt)
val partitionArg = if (options.has(partitionIdOpt)) Some(options.valueOf(partitionIdOpt).intValue) else None
val skipMessageOnError = options.has(skipMessageOnErrorOpt)
val messageFormatterClass = Class.forName(options.valueOf(messageFormatterOpt))
val formatterArgs = CommandLineUtils.parseKeyValueArgs(options.valuesOf(messageFormatterArgOpt).asScala)
val maxMessages = if (options.has(maxMessagesOpt)) options.valueOf(maxMessagesOpt).intValue else -1
val timeoutMs = if (options.has(timeoutMsOpt)) options.valueOf(timeoutMsOpt).intValue else -1
val bootstrapServer = options.valueOf(bootstrapServerOpt)
val keyDeserializer = options.valueOf(keyDeserializerOpt)
val valueDeserializer = options.valueOf(valueDeserializerOpt)
val isolationLevel = options.valueOf(isolationLevelOpt).toString
val formatter: MessageFormatter = messageFormatterClass.newInstance().asInstanceOf[MessageFormatter]
if (keyDeserializer != null && !keyDeserializer.isEmpty) {
formatterArgs.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyDeserializer)
}
if (valueDeserializer != null && !valueDeserializer.isEmpty) {
formatterArgs.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valueDeserializer)
}
formatter.init(formatterArgs)
if (useOldConsumer) {
if (options.has(bootstrapServerOpt))
CommandLineUtils.printUsageAndDie(parser, s"Option $bootstrapServerOpt is not valid with $zkConnectOpt.")
else if (options.has(newConsumerOpt))
CommandLineUtils.printUsageAndDie(parser, s"Option $newConsumerOpt is not valid with $zkConnectOpt.")
val topicOrFilterOpt = List(topicIdOpt, whitelistOpt, blacklistOpt).filter(options.has)
if (topicOrFilterOpt.size != 1)
CommandLineUtils.printUsageAndDie(parser, "Exactly one of whitelist/blacklist/topic is required.")
topicArg = options.valueOf(topicOrFilterOpt.head)
filterSpec = if (options.has(blacklistOpt)) new Blacklist(topicArg) else new Whitelist(topicArg)
Console.err.println("Using the ConsoleConsumer with old consumer is deprecated and will be removed " +
s"in a future major release. Consider using the new consumer by passing $bootstrapServerOpt instead of ${zkConnectOpt}.")
} else {
val topicOrFilterOpt = List(topicIdOpt, whitelistOpt).filter(options.has)
if (topicOrFilterOpt.size != 1)
CommandLineUtils.printUsageAndDie(parser, "Exactly one of whitelist/topic is required.")
topicArg = options.valueOf(topicIdOpt)
whitelistArg = options.valueOf(whitelistOpt)
}
if (useOldConsumer && (partitionArg.isDefined || options.has(offsetOpt)))
CommandLineUtils.printUsageAndDie(parser, "Partition-offset based consumption is supported in the new consumer only.")
if (partitionArg.isDefined) {
if (!options.has(topicIdOpt))
CommandLineUtils.printUsageAndDie(parser, "The topic is required when partition is specified.")
if (fromBeginning && options.has(offsetOpt))
CommandLineUtils.printUsageAndDie(parser, "Options from-beginning and offset cannot be specified together.")
} else if (options.has(offsetOpt))
CommandLineUtils.printUsageAndDie(parser, "The partition is required when offset is specified.")
def invalidOffset(offset: String): Nothing =
CommandLineUtils.printUsageAndDie(parser, s"The provided offset value '$offset' is incorrect. Valid values are " +
"'earliest', 'latest', or a non-negative long.")
val offsetArg =
if (options.has(offsetOpt)) {
options.valueOf(offsetOpt).toLowerCase(Locale.ROOT) match {
case "earliest" => OffsetRequest.EarliestTime
case "latest" => OffsetRequest.LatestTime
case offsetString =>
val offset =
try offsetString.toLong
catch {
case _: NumberFormatException => invalidOffset(offsetString)
}
if (offset < 0) invalidOffset(offsetString)
offset
}
}
else if (fromBeginning) OffsetRequest.EarliestTime
else OffsetRequest.LatestTime
if (!useOldConsumer) {
CommandLineUtils.checkRequiredArgs(parser, options, bootstrapServerOpt)
if (options.has(newConsumerOpt)) {
Console.err.println("The --new-consumer option is deprecated and will be removed in a future major release." +
"The new consumer is used by default if the --bootstrap-server option is provided.")
}
}
if (options.has(csvMetricsReporterEnabledOpt)) {
val csvReporterProps = new Properties()
csvReporterProps.put("kafka.metrics.polling.interval.secs", "5")
csvReporterProps.put("kafka.metrics.reporters", "kafka.metrics.KafkaCSVMetricsReporter")
if (options.has(metricsDirectoryOpt))
csvReporterProps.put("kafka.csv.metrics.dir", options.valueOf(metricsDirectoryOpt))
else
csvReporterProps.put("kafka.csv.metrics.dir", "kafka_metrics")
csvReporterProps.put("kafka.csv.metrics.reporter.enabled", "true")
val verifiableProps = new VerifiableProperties(csvReporterProps)
KafkaMetricsReporter.startReporters(verifiableProps)
}
// if the group id is provided in more than place (through different means) all values must be the same
val groupIdsProvided = Set(
Option(options.valueOf(groupIdOpt)), // via --group
Option(consumerProps.get(ConsumerConfig.GROUP_ID_CONFIG)), // via --consumer-property
Option(extraConsumerProps.get(ConsumerConfig.GROUP_ID_CONFIG)) // via --cosumer.config
).flatten
if (groupIdsProvided.size > 1) {
CommandLineUtils.printUsageAndDie(parser, "The group ids provided in different places (directly using '--group', "
+ "via '--consumer-property', or via '--consumer.config') do not match. "
+ s"Detected group ids: ${groupIdsProvided.mkString("'", "', '", "'")}")
}
groupIdsProvided.headOption match {
case Some(group) =>
consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, group)
case None =>
consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, s"console-consumer-${new Random().nextInt(100000)}")
groupIdPassed = false
}
def tryParse(parser: OptionParser, args: Array[String]): OptionSet = {
try
parser.parse(args: _*)
catch {
case e: OptionException =>
CommandLineUtils.printUsageAndDie(parser, e.getMessage)
}
}
}
def checkZkPathExists(zkUrl: String, path: String): Boolean = {
try {
val zk = ZkUtils.createZkClient(zkUrl, 30 * 1000, 30 * 1000)
zk.exists(path)
} catch {
case _: Throwable => false
}
}
}
class DefaultMessageFormatter extends MessageFormatter {
var printKey = false
var printValue = true
var printTimestamp = false
var keySeparator = "\\t".getBytes(StandardCharsets.UTF_8)
var lineSeparator = "\\n".getBytes(StandardCharsets.UTF_8)
var keyDeserializer: Option[Deserializer[_]] = None
var valueDeserializer: Option[Deserializer[_]] = None
override def init(props: Properties) {
if (props.containsKey("print.timestamp"))
printTimestamp = props.getProperty("print.timestamp").trim.equalsIgnoreCase("true")
if (props.containsKey("print.key"))
printKey = props.getProperty("print.key").trim.equalsIgnoreCase("true")
if (props.containsKey("print.value"))
printValue = props.getProperty("print.value").trim.equalsIgnoreCase("true")
if (props.containsKey("key.separator"))
keySeparator = props.getProperty("key.separator").getBytes(StandardCharsets.UTF_8)
if (props.containsKey("line.separator"))
lineSeparator = props.getProperty("line.separator").getBytes(StandardCharsets.UTF_8)
// Note that `toString` will be called on the instance returned by `Deserializer.deserialize`
if (props.containsKey("key.deserializer"))
keyDeserializer = Some(Class.forName(props.getProperty("key.deserializer")).newInstance().asInstanceOf[Deserializer[_]])
// Note that `toString` will be called on the instance returned by `Deserializer.deserialize`
if (props.containsKey("value.deserializer"))
valueDeserializer = Some(Class.forName(props.getProperty("value.deserializer")).newInstance().asInstanceOf[Deserializer[_]])
}
def writeTo(consumerRecord: ConsumerRecord[Array[Byte], Array[Byte]], output: PrintStream) {
def writeSeparator(columnSeparator: Boolean): Unit = {
if (columnSeparator)
output.write(keySeparator)
else
output.write(lineSeparator)
}
def write(deserializer: Option[Deserializer[_]], sourceBytes: Array[Byte]) {
val nonNullBytes = Option(sourceBytes).getOrElse("null".getBytes(StandardCharsets.UTF_8))
val convertedBytes = deserializer.map(_.deserialize(null, nonNullBytes).toString.
getBytes(StandardCharsets.UTF_8)).getOrElse(nonNullBytes)
output.write(convertedBytes)
}
import consumerRecord._
if (printTimestamp) {
if (timestampType != TimestampType.NO_TIMESTAMP_TYPE)
output.write(s"$timestampType:$timestamp".getBytes(StandardCharsets.UTF_8))
else
output.write(s"NO_TIMESTAMP".getBytes(StandardCharsets.UTF_8))
writeSeparator(printKey || printValue)
}
if (printKey) {
write(keyDeserializer, key)
writeSeparator(printValue)
}
if (printValue) {
write(valueDeserializer, value)
output.write(lineSeparator)
}
}
}
class LoggingMessageFormatter extends MessageFormatter with LazyLogging {
private val defaultWriter: DefaultMessageFormatter = new DefaultMessageFormatter
override def init(props: Properties): Unit = defaultWriter.init(props)
def writeTo(consumerRecord: ConsumerRecord[Array[Byte], Array[Byte]], output: PrintStream): Unit = {
import consumerRecord._
defaultWriter.writeTo(consumerRecord, output)
logger.info({if (timestampType != TimestampType.NO_TIMESTAMP_TYPE) s"$timestampType:$timestamp, " else ""} +
s"key:${if (key == null) "null" else new String(key, StandardCharsets.UTF_8)}, " +
s"value:${if (value == null) "null" else new String(value, StandardCharsets.UTF_8)}")
}
}
class NoOpMessageFormatter extends MessageFormatter {
override def init(props: Properties) {}
def writeTo(consumerRecord: ConsumerRecord[Array[Byte], Array[Byte]], output: PrintStream){}
}
class ChecksumMessageFormatter extends MessageFormatter {
private var topicStr: String = _
override def init(props: Properties) {
topicStr = props.getProperty("topic")
if (topicStr != null)
topicStr = topicStr + ":"
else
topicStr = ""
}
def writeTo(consumerRecord: ConsumerRecord[Array[Byte], Array[Byte]], output: PrintStream) {
import consumerRecord._
val chksum =
if (timestampType != TimestampType.NO_TIMESTAMP_TYPE)
new Message(value, key, timestamp, timestampType, NoCompressionCodec, 0, -1, Message.MagicValue_V1).checksum
else
new Message(value, key, Message.NoTimestamp, Message.MagicValue_V0).checksum
output.println(topicStr + "checksum:" + chksum)
}
}
| MyPureCloud/kafka | core/src/main/scala/kafka/tools/ConsoleConsumer.scala | Scala | apache-2.0 | 27,884 |
package tastytest
object PolymorphicFuncs {
val id: [T] => (t: T) => T = [T] => (t: T) => t
def takesId(f: [T] => (t: T) => T): Int = f(1)
class PolyBox[F <: [T] => (t: T) => T] {
def takesId(f: F) = f(1)
}
}
| lrytz/scala | test/tasty/neg/src-3/PolymorphicFuncs.scala | Scala | apache-2.0 | 224 |
package models.pages
import play.api.libs.json._
import play.api.data.Forms._
import play.api.libs.functional.syntax._
case class Column(order: Long, columnHtml: Option[String], rowsOption: Option[List[Row]])
object Column extends Function3[Long, Option[String], Option[List[models.pages.Row]], Column] {
implicit val columnFormat: Format[Column] = Json.format[Column]
} | jbuffin/WebsiteBuilder | app/models/pages/Column.scala | Scala | mit | 374 |
/**
* *****************************************************************************
* Copyright (c) 2014 Guillaume DUBUISSON DUPLESSIS <guillaume.dubuisson_duplessis@insa-rouen.fr>.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Public License v3.0
* which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/gpl.html
*
* Contributors:
* Guillaume DUBUISSON DUPLESSIS <guillaume.dubuisson_duplessis@insa-rouen.fr> - initial API and implementation
* ****************************************************************************
*/
package binaryTree.P65
import util.ExerciseTemplate
import binaryTree.Tree
import binaryTree.End
import binaryTree.Node
import binaryTree.PositionedNode
trait P65 extends ExerciseTemplate {
/*
P65 (**) Layout a binary tree (2).
An alternative layout method is depicted in the illustration opposite.
Find out the rules and write the corresponding method.
Hint: On a given level, the horizontal distance between neighboring nodes is constant.
Use the same conventions as in problem P64.
scala> Node('a', Node('b', End, Node('c')), Node('d')).layoutBinaryTree2
res0: PositionedNode[Char] = T[3,1]('a T[1,2]('b . T[2,3]('c . .)) T[5,2]('d . .))
The tree at right may be constructed with Tree.fromList(List('n','k','m','c','a','e','d','g','u','p','q')). Use it to check your code.
*/
/*
* For a node u:
* - x(u) = x(p) - 2^(maxDepth - d) for a left node or x(p) + 2^(maxDepth - d) for a right node,
* where maxDepth represents the depth of the tree
* and p represents the parent node.
* - y(u) = d, where d represents its depth (starting from 1)
*
* If r is the root of the tree:
* x(r) = 1 + SUM(2^(maxDepth - i)) for i from 2 to maxLeftDepth
*/
val name = "Layout a binary tree 2 (P65)"
def layoutBinaryTree2[T](t: Tree[T]): Tree[T]
test("Invoking layoutBinaryTree2 on an End should return an End") {
assert(layoutBinaryTree2(End) == End)
}
test("Invoking layoutBinaryTree2 on a Node should return a PositionedNode") {
assert(layoutBinaryTree2(Node('a)) == PositionedNode('a, 1, 1))
}
test("Invoking layoutBinaryTree2 on a non-empty Tree should return a tree of PositionedNodes") {
val d = Node('d')
val g = Node('g')
val e = Node('e', d, g)
val a = Node('a')
val c = Node('c', a, e)
val m = Node('m')
val k = Node('k', c, m)
val q = Node('q')
val p = Node('p', End, q)
val u = Node('u', p, End)
val tree = Node('n', k, u)
val result = layoutBinaryTree2(tree)
val d_ = PositionedNode('d', 4, 5)
val g_ = PositionedNode('g', 6, 5)
val e_ = PositionedNode('e', d_, g_, 5, 4)
val a_ = PositionedNode('a', 1, 4)
val c_ = PositionedNode('c', a_, e_, 3, 3)
val m_ = PositionedNode('m', 11, 3)
val k_ = PositionedNode('k', c_, m_, 7, 2)
val q_ = PositionedNode('q', 21, 4)
val p_ = PositionedNode('p', End, q_, 19, 3)
val u_ = PositionedNode('u', p_, End, 23, 2)
val solution = PositionedNode('n', k_, u_, 15, 1)
assert(result == solution)
}
}
| GuillaumeDD/scala99problems | src/main/scala/binaryTree/P65/P65.scala | Scala | gpl-3.0 | 3,193 |
package io.github.interestinglab.waterdrop.output.utils
import java.util.Properties
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
class KafkaProducerUtil(createProducer: () => KafkaProducer[String, String]) extends Serializable {
lazy val producer = createProducer()
def send(topic: String, value: String): Unit =
producer.send(new ProducerRecord(topic, value))
}
object KafkaProducerUtil {
def apply(config: Properties): KafkaProducerUtil = {
val f = () => {
val producer = new KafkaProducer[String, String](config)
sys.addShutdownHook {
producer.close()
}
producer
}
new KafkaProducerUtil(f)
}
}
| InterestingLab/waterdrop | waterdrop-core/src/main/scala/io/github/interestinglab/waterdrop/output/utils/KafkaProducerUtil.scala | Scala | apache-2.0 | 689 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.connector.expressions
import org.apache.spark.SparkException
import org.apache.spark.sql.catalyst
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.types.{DataType, IntegerType, StringType}
/**
* Helper methods for working with the logical expressions API.
*
* Factory methods can be used when referencing the logical expression nodes is ambiguous because
* logical and internal expressions are used.
*/
private[sql] object LogicalExpressions {
def literal[T](value: T): LiteralValue[T] = {
val internalLit = catalyst.expressions.Literal(value)
literal(value, internalLit.dataType)
}
def literal[T](value: T, dataType: DataType): LiteralValue[T] = LiteralValue(value, dataType)
def parseReference(name: String): NamedReference =
FieldReference(CatalystSqlParser.parseMultipartIdentifier(name))
def reference(nameParts: Seq[String]): NamedReference = FieldReference(nameParts)
def apply(name: String, arguments: Expression*): Transform = ApplyTransform(name, arguments)
def bucket(numBuckets: Int, references: Array[NamedReference]): BucketTransform =
BucketTransform(literal(numBuckets, IntegerType), references)
def bucket(
numBuckets: Int,
references: Array[NamedReference],
sortedCols: Array[NamedReference]): SortedBucketTransform =
SortedBucketTransform(literal(numBuckets, IntegerType), references, sortedCols)
def identity(reference: NamedReference): IdentityTransform = IdentityTransform(reference)
def years(reference: NamedReference): YearsTransform = YearsTransform(reference)
def months(reference: NamedReference): MonthsTransform = MonthsTransform(reference)
def days(reference: NamedReference): DaysTransform = DaysTransform(reference)
def hours(reference: NamedReference): HoursTransform = HoursTransform(reference)
def sort(
reference: Expression,
direction: SortDirection,
nullOrdering: NullOrdering): SortOrder = {
SortValue(reference, direction, nullOrdering)
}
}
/**
* Allows Spark to rewrite the given references of the transform during analysis.
*/
private[sql] sealed trait RewritableTransform extends Transform {
/** Creates a copy of this transform with the new analyzed references. */
def withReferences(newReferences: Seq[NamedReference]): Transform
}
/**
* Base class for simple transforms of a single column.
*/
private[sql] abstract class SingleColumnTransform(ref: NamedReference) extends RewritableTransform {
def reference: NamedReference = ref
override def references: Array[NamedReference] = Array(ref)
override def arguments: Array[Expression] = Array(ref)
override def toString: String = name + "(" + reference.describe + ")"
protected def withNewRef(ref: NamedReference): Transform
override def withReferences(newReferences: Seq[NamedReference]): Transform = {
assert(newReferences.length == 1,
s"Tried rewriting a single column transform (${this}) with multiple references.")
withNewRef(newReferences.head)
}
}
private[sql] final case class BucketTransform(
numBuckets: Literal[Int],
columns: Seq[NamedReference]) extends RewritableTransform {
override val name: String = "bucket"
override def references: Array[NamedReference] = {
arguments.collect { case named: NamedReference => named }
}
override def arguments: Array[Expression] = numBuckets +: columns.toArray
override def describe: String = s"bucket(${arguments.map(_.describe).mkString(", ")})"
override def toString: String = describe
override def withReferences(newReferences: Seq[NamedReference]): Transform = {
this.copy(columns = newReferences)
}
}
private[sql] object BucketTransform {
def unapply(transform: Transform): Option[(Int, Seq[NamedReference], Seq[NamedReference])] =
transform match {
case NamedTransform("sorted_bucket", arguments) =>
var posOfLit: Int = -1
var numOfBucket: Int = -1
arguments.zipWithIndex.foreach {
case (Lit(value: Int, IntegerType), i) =>
numOfBucket = value
posOfLit = i
case _ =>
}
Some(numOfBucket, arguments.take(posOfLit).map(_.asInstanceOf[NamedReference]),
arguments.drop(posOfLit + 1).map(_.asInstanceOf[NamedReference]))
case NamedTransform("bucket", arguments) =>
var numOfBucket: Int = -1
arguments(0) match {
case Lit(value: Int, IntegerType) =>
numOfBucket = value
case _ => throw new SparkException("The first element in BucketTransform arguments " +
"should be an Integer Literal.")
}
Some(numOfBucket, arguments.drop(1).map(_.asInstanceOf[NamedReference]),
Seq.empty[FieldReference])
case _ =>
None
}
}
private[sql] final case class SortedBucketTransform(
numBuckets: Literal[Int],
columns: Seq[NamedReference],
sortedColumns: Seq[NamedReference] = Seq.empty[NamedReference]) extends RewritableTransform {
override val name: String = "sorted_bucket"
override def references: Array[NamedReference] = {
arguments.collect { case named: NamedReference => named }
}
override def arguments: Array[Expression] = (columns.toArray :+ numBuckets) ++ sortedColumns
override def toString: String = s"$name(${arguments.map(_.describe).mkString(", ")})"
override def withReferences(newReferences: Seq[NamedReference]): Transform = {
this.copy(columns = newReferences.take(columns.length),
sortedColumns = newReferences.drop(columns.length))
}
}
private[sql] final case class ApplyTransform(
name: String,
args: Seq[Expression]) extends Transform {
override def arguments: Array[Expression] = args.toArray
override def references: Array[NamedReference] = {
arguments.collect { case named: NamedReference => named }
}
override def toString: String = s"$name(${arguments.map(_.describe).mkString(", ")})"
}
/**
* Convenience extractor for any Literal.
*/
private object Lit {
def unapply[T](literal: Literal[T]): Some[(T, DataType)] = {
Some((literal.value, literal.dataType))
}
}
/**
* Convenience extractor for any NamedReference.
*/
private object Ref {
def unapply(named: NamedReference): Some[Seq[String]] = {
Some(named.fieldNames)
}
}
/**
* Convenience extractor for any Transform.
*/
private[sql] object NamedTransform {
def unapply(transform: Transform): Some[(String, Seq[Expression])] = {
Some((transform.name, transform.arguments))
}
}
private[sql] final case class IdentityTransform(
ref: NamedReference) extends SingleColumnTransform(ref) {
override val name: String = "identity"
override def describe: String = ref.describe
override protected def withNewRef(ref: NamedReference): Transform = this.copy(ref)
}
private[sql] object IdentityTransform {
def unapply(expr: Expression): Option[FieldReference] = expr match {
case transform: Transform =>
transform match {
case IdentityTransform(ref) =>
Some(ref)
case _ =>
None
}
case _ =>
None
}
def unapply(transform: Transform): Option[FieldReference] = transform match {
case NamedTransform("identity", Seq(Ref(parts))) =>
Some(FieldReference(parts))
case _ =>
None
}
}
private[sql] final case class YearsTransform(
ref: NamedReference) extends SingleColumnTransform(ref) {
override val name: String = "years"
override protected def withNewRef(ref: NamedReference): Transform = this.copy(ref)
}
private[sql] object YearsTransform {
def unapply(expr: Expression): Option[FieldReference] = expr match {
case transform: Transform =>
transform match {
case YearsTransform(ref) =>
Some(ref)
case _ =>
None
}
case _ =>
None
}
def unapply(transform: Transform): Option[FieldReference] = transform match {
case NamedTransform("years", Seq(Ref(parts))) =>
Some(FieldReference(parts))
case _ =>
None
}
}
private[sql] final case class MonthsTransform(
ref: NamedReference) extends SingleColumnTransform(ref) {
override val name: String = "months"
override protected def withNewRef(ref: NamedReference): Transform = this.copy(ref)
}
private[sql] object MonthsTransform {
def unapply(expr: Expression): Option[FieldReference] = expr match {
case transform: Transform =>
transform match {
case MonthsTransform(ref) =>
Some(ref)
case _ =>
None
}
case _ =>
None
}
def unapply(transform: Transform): Option[FieldReference] = transform match {
case NamedTransform("months", Seq(Ref(parts))) =>
Some(FieldReference(parts))
case _ =>
None
}
}
private[sql] final case class DaysTransform(
ref: NamedReference) extends SingleColumnTransform(ref) {
override val name: String = "days"
override protected def withNewRef(ref: NamedReference): Transform = this.copy(ref)
}
private[sql] object DaysTransform {
def unapply(expr: Expression): Option[FieldReference] = expr match {
case transform: Transform =>
transform match {
case DaysTransform(ref) =>
Some(ref)
case _ =>
None
}
case _ =>
None
}
def unapply(transform: Transform): Option[FieldReference] = transform match {
case NamedTransform("days", Seq(Ref(parts))) =>
Some(FieldReference(parts))
case _ =>
None
}
}
private[sql] final case class HoursTransform(
ref: NamedReference) extends SingleColumnTransform(ref) {
override val name: String = "hours"
override protected def withNewRef(ref: NamedReference): Transform = this.copy(ref)
}
private[sql] object HoursTransform {
def unapply(expr: Expression): Option[FieldReference] = expr match {
case transform: Transform =>
transform match {
case HoursTransform(ref) =>
Some(ref)
case _ =>
None
}
case _ =>
None
}
def unapply(transform: Transform): Option[FieldReference] = transform match {
case NamedTransform("hours", Seq(Ref(parts))) =>
Some(FieldReference(parts))
case _ =>
None
}
}
private[sql] final case class LiteralValue[T](value: T, dataType: DataType) extends Literal[T] {
override def toString: String = {
if (dataType.isInstanceOf[StringType]) {
s"'$value'"
} else {
s"$value"
}
}
}
private[sql] final case class FieldReference(parts: Seq[String]) extends NamedReference {
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.MultipartIdentifierHelper
override def fieldNames: Array[String] = parts.toArray
override def toString: String = parts.quoted
}
private[sql] object FieldReference {
def apply(column: String): NamedReference = {
LogicalExpressions.parseReference(column)
}
def column(name: String) : NamedReference = {
FieldReference(Seq(name))
}
}
private[sql] final case class SortValue(
expression: Expression,
direction: SortDirection,
nullOrdering: NullOrdering) extends SortOrder {
override def toString(): String = s"$expression $direction $nullOrdering"
}
private[sql] object SortValue {
def unapply(expr: Expression): Option[(Expression, SortDirection, NullOrdering)] = expr match {
case sort: SortOrder =>
Some((sort.expression, sort.direction, sort.nullOrdering))
case _ =>
None
}
}
| ueshin/apache-spark | sql/catalyst/src/main/scala/org/apache/spark/sql/connector/expressions/expressions.scala | Scala | apache-2.0 | 12,242 |
package no.netcompany.testdatagen.generators.misc
// Copyright (C) 2014 Lars Reed -- GNU GPL 2.0 -- see LICENSE.txt
import no.netcompany.testdatagen.Generator
import no.netcompany.testdatagen.aggreg.{FieldConcatenator, WeightedGenerator}
import no.netcompany.testdatagen.generators.{Fixed, FromList, Strings}
/** Generate mail-like strings... */
object MailAddresses {
def apply(): Generator[String] = {
val chars = "aeiouaeiouabcdefghijklmnoprstuvyabcdefghijklmnopqrstuvwxyz"
val pfxGen= WeightedGenerator().
add(10, Strings().lengthBetween(3, 8).chars(chars)).
add(6, FieldConcatenator().add(Strings().lengthBetween(3, 8).chars(chars)).
add(Fixed(".")).
add(Strings().lengthBetween(4, 9).chars(chars)))
FieldConcatenator().
add(pfxGen).
add(Fixed("@")).
add(Strings().lengthBetween(4, 9).chars(chars)).
add(FromList(".com", ".no", ".org", ".net", ".co.uk", ".gov"))
}
}
| lre-mesan/testdata | src/main/scala/no/netcompany/testdatagen/generators/misc/MailAddresses.scala | Scala | gpl-2.0 | 1,034 |
package dotty.runtime.vc
import scala.reflect.ClassTag
abstract class VCPrototype {
}
abstract class VCArrayPrototype[T <: VCPrototype] extends Object with Cloneable {
def apply(idx: Int): Object
def update(idx: Int, el: T): Unit
def length: Int
override def clone: Object = super.clone()
}
abstract class VCFloatPrototype(val underlying: Float) extends VCPrototype {}
abstract class VCFloatCasePrototype(underlying: Float) extends VCFloatPrototype(underlying) with Product1[Float] {
final def _1: Float = underlying
override final def hashCode(): Int = {
underlying.hashCode()
}
override final def toString: String = {
s"$productPrefix($underlying)"
}
}
abstract class VCFloatCompanion[T <: VCFloatPrototype] extends ClassTag[T] {
def box(underlying: Float): T
final def unbox(boxed: T) = boxed.underlying
implicit def classTag: this.type = this
override def newArray(len: Int): Array[T] =
new VCFloatArray(this, len).asInstanceOf[Array[T]]
final def _1$extension(underlying: Float) = underlying
final def hashCode$extension(underlying: Float) = underlying.hashCode()
final def toString$extension(underlying: Float) = s"${productPrefix$extension(underlying)}($underlying)"
def productPrefix$extension(underlying: Float): String
}
final class VCFloatArray[T <: VCFloatPrototype] private (val arr: Array[Float], val ct: VCFloatCompanion[T])
extends VCArrayPrototype[T] {
def this(ct: VCFloatCompanion[T], sz: Int) =
this(new Array[Float](sz), ct)
def apply(idx: Int) =
ct.box(arr(idx))
def update(idx: Int, elem: T) =
arr(idx) = ct.unbox(elem)
def length: Int = arr.length
override def clone(): VCFloatArray[T] = {
new VCFloatArray[T](arr.clone(), ct)
}
override def toString: String = {
"[" + ct.runtimeClass
}
}
abstract class VCObjectPrototype(val underlying: Object) extends VCPrototype {}
abstract class VCObjectCasePrototype(underlying: Object) extends VCObjectPrototype(underlying) with Product1[Object] {
final def _1: Object = underlying
override final def hashCode(): Int = {
underlying.hashCode()
}
override final def toString: String = {
s"$productPrefix($underlying)"
}
}
abstract class VCObjectCompanion[T <: VCObjectPrototype] extends ClassTag[T] {
def box(underlying: Object): T
final def unbox(boxed: T) = boxed.underlying
implicit def classTag: this.type = this
override def newArray(len: Int): Array[T] =
new VCObjectArray(this, len).asInstanceOf[Array[T]]
final def _1$extension(underlying: Object) = underlying
final def hashCode$extension(underlying: Object) = underlying.hashCode()
final def toString$extension(underlying: Object) = s"${productPrefix$extension(underlying)}($underlying)"
def productPrefix$extension(underlying: Object): String
}
final class VCObjectArray[T <: VCObjectPrototype] private (val arr: Array[Object], val ct: VCObjectCompanion[T])
extends VCArrayPrototype[T] {
def this(ct: VCObjectCompanion[T], sz: Int) =
this(new Array[Object](sz), ct)
def apply(idx: Int) =
ct.box(arr(idx))
def update(idx: Int, elem: T) =
arr(idx) = ct.unbox(elem)
def length: Int = arr.length
override def clone(): VCObjectArray[T] = {
new VCObjectArray[T](arr.clone(), ct)
}
override def toString: String = {
"[" + ct.runtimeClass
}
}
abstract class VCShortPrototype(val underlying: Short) extends VCPrototype {}
abstract class VCShortCasePrototype(underlying: Short) extends VCShortPrototype(underlying) with Product1[Short] {
final def _1: Short = underlying
override final def hashCode(): Int = {
underlying.hashCode()
}
override final def toString: String = {
s"$productPrefix($underlying)"
}
}
abstract class VCShortCompanion[T <: VCShortPrototype] extends ClassTag[T] {
def box(underlying: Short): T
final def unbox(boxed: T) = boxed.underlying
implicit def classTag: this.type = this
override def newArray(len: Int): Array[T] =
new VCShortArray(this, len).asInstanceOf[Array[T]]
final def _1$extension(underlying: Short) = underlying
final def hashCode$extension(underlying: Short) = underlying.hashCode()
final def toString$extension(underlying: Short) = s"${productPrefix$extension(underlying)}($underlying)"
def productPrefix$extension(underlying: Short): String
}
final class VCShortArray[T <: VCShortPrototype] private (val arr: Array[Short], val ct: VCShortCompanion[T])
extends VCArrayPrototype[T] {
def this(ct: VCShortCompanion[T], sz: Int) =
this(new Array[Short](sz), ct)
def apply(idx: Int) =
ct.box(arr(idx))
def update(idx: Int, elem: T) =
arr(idx) = ct.unbox(elem)
def length: Int = arr.length
override def clone(): VCShortArray[T] = {
new VCShortArray[T](arr.clone(), ct)
}
override def toString: String = {
"[" + ct.runtimeClass
}
}
abstract class VCLongPrototype(val underlying: Long) extends VCPrototype {}
abstract class VCLongCasePrototype(underlying: Long) extends VCLongPrototype(underlying) with Product1[Long] {
final def _1: Long = underlying
override final def hashCode(): Int = {
underlying.hashCode()
}
override final def toString: String = {
s"$productPrefix($underlying)"
}
}
abstract class VCLongCompanion[T <: VCLongPrototype] extends ClassTag[T] {
def box(underlying: Long): T
final def unbox(boxed: T) = boxed.underlying
implicit def classTag: this.type = this
override def newArray(len: Int): Array[T] =
new VCLongArray(this, len).asInstanceOf[Array[T]]
final def _1$extension(underlying: Long) = underlying
final def hashCode$extension(underlying: Long) = underlying.hashCode()
final def toString$extension(underlying: Long) = s"${productPrefix$extension(underlying)}($underlying)"
def productPrefix$extension(underlying: Long): String
}
final class VCLongArray[T <: VCLongPrototype] private (val arr: Array[Long], val ct: VCLongCompanion[T])
extends VCArrayPrototype[T] {
def this(ct: VCLongCompanion[T], sz: Int) =
this(new Array[Long](sz), ct)
def apply(idx: Int) =
ct.box(arr(idx))
def update(idx: Int, elem: T) =
arr(idx) = ct.unbox(elem)
def length: Int = arr.length
override def clone(): VCLongArray[T] = {
new VCLongArray[T](arr.clone(), ct)
}
override def toString: String = {
"[" + ct.runtimeClass
}
}
abstract class VCIntPrototype(val underlying: Int) extends VCPrototype {}
abstract class VCIntCasePrototype(underlying: Int) extends VCIntPrototype(underlying) with Product1[Int] {
final def _1: Int = underlying
override final def hashCode(): Int = {
underlying.hashCode()
}
override final def toString: String = {
s"$productPrefix($underlying)"
}
}
abstract class VCIntCompanion[T <: VCIntPrototype] extends ClassTag[T] {
def box(underlying: Int): T
final def unbox(boxed: T) = boxed.underlying
implicit def classTag: this.type = this
override def newArray(len: Int): Array[T] =
new VCIntArray(this, len).asInstanceOf[Array[T]]
final def _1$extension(underlying: Int) = underlying
final def hashCode$extension(underlying: Int) = underlying.hashCode()
final def toString$extension(underlying: Int) = s"${productPrefix$extension(underlying)}($underlying)"
def productPrefix$extension(underlying: Int): String
}
final class VCIntArray[T <: VCIntPrototype] private (val arr: Array[Int], val ct: VCIntCompanion[T])
extends VCArrayPrototype[T] {
def this(ct: VCIntCompanion[T], sz: Int) =
this(new Array[Int](sz), ct)
def apply(idx: Int) =
ct.box(arr(idx))
def update(idx: Int, elem: T) =
arr(idx) = ct.unbox(elem)
def length: Int = arr.length
override def clone(): VCIntArray[T] = {
new VCIntArray[T](arr.clone(), ct)
}
override def toString: String = {
"[" + ct.runtimeClass
}
}
abstract class VCDoublePrototype(val underlying: Double) extends VCPrototype {}
abstract class VCDoubleCasePrototype(underlying: Double) extends VCDoublePrototype(underlying) with Product1[Double] {
final def _1: Double = underlying
override final def hashCode(): Int = {
underlying.hashCode()
}
override final def toString: String = {
s"$productPrefix($underlying)"
}
}
abstract class VCDoubleCompanion[T <: VCDoublePrototype] extends ClassTag[T] {
def box(underlying: Double): T
final def unbox(boxed: T) = boxed.underlying
implicit def classTag: this.type = this
override def newArray(len: Int): Array[T] =
new VCDoubleArray(this, len).asInstanceOf[Array[T]]
final def _1$extension(underlying: Double) = underlying
final def hashCode$extension(underlying: Double) = underlying.hashCode()
final def toString$extension(underlying: Double) = s"${productPrefix$extension(underlying)}($underlying)"
def productPrefix$extension(underlying: Double): String
}
final class VCDoubleArray[T <: VCDoublePrototype] private (val arr: Array[Double], val ct: VCDoubleCompanion[T])
extends VCArrayPrototype[T] {
def this(ct: VCDoubleCompanion[T], sz: Int) =
this(new Array[Double](sz), ct)
def apply(idx: Int) =
ct.box(arr(idx))
def update(idx: Int, elem: T) =
arr(idx) = ct.unbox(elem)
def length: Int = arr.length
override def clone(): VCDoubleArray[T] = {
new VCDoubleArray[T](arr.clone(), ct)
}
override def toString: String = {
"[" + ct.runtimeClass
}
}
abstract class VCBooleanPrototype(val underlying: Boolean) extends VCPrototype {}
abstract class VCBooleanCasePrototype(underlying: Boolean) extends VCBooleanPrototype(underlying) with Product1[Boolean] {
final def _1: Boolean = underlying
override final def hashCode(): Int = {
underlying.hashCode()
}
override final def toString: String = {
s"$productPrefix($underlying)"
}
}
abstract class VCBooleanCompanion[T <: VCBooleanPrototype] extends ClassTag[T] {
def box(underlying: Boolean): T
final def unbox(boxed: T) = boxed.underlying
implicit def classTag: this.type = this
override def newArray(len: Int): Array[T] =
new VCBooleanArray(this, len).asInstanceOf[Array[T]]
final def _1$extension(underlying: Boolean) = underlying
final def hashCode$extension(underlying: Boolean) = underlying.hashCode()
final def toString$extension(underlying: Boolean) = s"${productPrefix$extension(underlying)}($underlying)"
def productPrefix$extension(underlying: Boolean): String
}
final class VCBooleanArray[T <: VCBooleanPrototype] private (val arr: Array[Boolean], val ct: VCBooleanCompanion[T])
extends VCArrayPrototype[T] {
def this(ct: VCBooleanCompanion[T], sz: Int) =
this(new Array[Boolean](sz), ct)
def apply(idx: Int) =
ct.box(arr(idx))
def update(idx: Int, elem: T) =
arr(idx) = ct.unbox(elem)
def length: Int = arr.length
override def clone(): VCBooleanArray[T] = {
new VCBooleanArray[T](arr.clone(), ct)
}
override def toString: String = {
"[" + ct.runtimeClass
}
}
abstract class VCCharPrototype(val underlying: Char) extends VCPrototype {}
abstract class VCCharCasePrototype(underlying: Char) extends VCCharPrototype(underlying) with Product1[Char] {
final def _1: Char = underlying
override final def hashCode(): Int = {
underlying.hashCode()
}
override final def toString: String = {
s"$productPrefix($underlying)"
}
// subclasses are expected to implement equals, productPrefix, and canEqual
}
abstract class VCCharCompanion[T <: VCCharPrototype] extends ClassTag[T] {
def box(underlying: Char): T
final def unbox(boxed: T) = boxed.underlying
implicit def classTag: this.type = this
override def newArray(len: Int): Array[T] =
new VCCharArray(this, len).asInstanceOf[Array[T]]
final def _1$extension(underlying: Char) = underlying
final def hashCode$extension(underlying: Char) = underlying.hashCode()
final def toString$extension(underlying: Char) = s"${productPrefix$extension(underlying)}($underlying)"
def productPrefix$extension(underlying: Char): String
}
final class VCCharArray[T <: VCCharPrototype] private (val arr: Array[Char], val ct: VCCharCompanion[T])
extends VCArrayPrototype[T] {
def this(ct: VCCharCompanion[T], sz: Int) =
this(new Array[Char](sz), ct)
def apply(idx: Int) =
ct.box(arr(idx))
def update(idx: Int, elem: T) =
arr(idx) = ct.unbox(elem)
def length: Int = arr.length
override def clone(): VCCharArray[T] = {
new VCCharArray[T](arr.clone(), ct)
}
override def toString: String = {
"[" + ct.runtimeClass
}
}
abstract class VCBytePrototype(val underlying: Byte) extends VCPrototype {}
abstract class VCByteCasePrototype(underlying: Byte) extends VCBytePrototype(underlying) with Product1[Byte] {
final def _1: Byte = underlying
override final def hashCode(): Int = {
underlying.hashCode()
}
override final def toString: String = {
s"$productPrefix($underlying)"
}
}
abstract class VCByteCompanion[T <: VCBytePrototype] extends ClassTag[T] {
def box(underlying: Byte): T
final def unbox(boxed: T) = boxed.underlying
implicit def classTag: this.type = this
override def newArray(len: Int): Array[T] =
new VCByteArray(this, len).asInstanceOf[Array[T]]
final def _1$extension(underlying: Byte) = underlying
final def hashCode$extension(underlying: Byte) = underlying.hashCode()
final def toString$extension(underlying: Byte) = s"${productPrefix$extension(underlying)}($underlying)"
def productPrefix$extension(underlying: Byte): String
}
final class VCByteArray[T <: VCBytePrototype] private (val arr: Array[Byte], val ct: VCByteCompanion[T])
extends VCArrayPrototype[T] {
def this(ct: VCByteCompanion[T], sz: Int) =
this(new Array[Byte](sz), ct)
def apply(idx: Int) =
ct.box(arr(idx))
def update(idx: Int, elem: T) =
arr(idx) = ct.unbox(elem)
def length: Int = arr.length
override def clone(): VCByteArray[T] = {
new VCByteArray[T](arr.clone(), ct)
}
override def toString: String = {
"[" + ct.runtimeClass
}
}
| yusuke2255/dotty | src/dotty/runtime/vc/VCPrototype.scala | Scala | bsd-3-clause | 14,032 |
package connectorFamily.featureModel
object Utils {
implicit def str2FID(str:String): FID = FID(str)
implicit def str2AID(str:String): AID = AIDU(str)
implicit def str2ArrowAID(str: String): ArrowAssoc[AID] =
new ArrowAssoc(AIDU(str))
implicit def str2AIDTerm(str:String): AIDTerm = AIDTerm(AIDU(str))
// implicit def str2AIDTerm(str:String): AIDTerm = AIDTerm(AIDU(str))
implicit def str2FeatTerm(str:String): FIDExp = FIDExp(FID(str))
implicit def int2IntVal(n:Int): IntVal = IntVal(n)
implicit def range2attrRange(r:Range): AttrRange =
if (r.step == 1) IntAttrBounded(r.start,r.end)
else IntAttrSet(r.toIterable)
} | joseproenca/connector-family | src/main/scala/connectorFamily/featureModel/Utils.scala | Scala | mit | 663 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.action
import io.gatling.commons.util.Clock
import io.gatling.core.session.{ Expression, Session }
import io.gatling.core.stats.StatsEngine
class SessionHook(
sessionFunction: Expression[Session],
val name: String,
val statsEngine: StatsEngine,
val clock: Clock,
val next: Action
) extends ChainableAction {
/**
* Applies the function to the Session
*
* @param session the session of the virtual user
*/
override def execute(session: Session): Unit = recover(session) {
sessionFunction(session).map(newSession => next ! newSession)
}
}
| gatling/gatling | gatling-core/src/main/scala/io/gatling/core/action/SessionHook.scala | Scala | apache-2.0 | 1,224 |
package ddd.support.domain.command
trait Command {
def aggregateId: String
}
| pawelkaczor/ddd-leaven-akka | src/main/scala/ddd/support/domain/command/Command.scala | Scala | mit | 80 |
package at.logic.gapt.provers.z3
import java.io.IOException
import at.logic.gapt.formats.veriT.SmtLibExporter
import at.logic.gapt.proofs.HOLSequent
import at.logic.gapt.proofs.lkNew.LKProof
import at.logic.gapt.provers.{ renameConstantsToFi, Prover }
import at.logic.gapt.utils.traits.ExternalProgram
import at.logic.gapt.utils.{ runProcess, withTempFile }
object Z3 extends Z3
class Z3 extends Prover with ExternalProgram {
val nLine = sys.props( "line.separator" )
val unsat = "unsat" + nLine
val sat = "sat" + nLine
override def isValid( seq: HOLSequent ): Boolean = {
runProcess( Seq( "z3", "-smt2", "-in" ), SmtLibExporter( renameConstantsToFi( seq )._1 ) ) match {
case `unsat` => true
case `sat` => false
}
}
override def getLKProof( seq: HOLSequent ): Option[LKProof] =
throw new UnsupportedOperationException
override val isInstalled: Boolean =
try {
runProcess( Seq( "z3", "-version" ) )
true
} catch {
case _: IOException => false
}
}
| loewenheim/gapt | src/main/scala/at/logic/gapt/provers/z3/Z3.scala | Scala | gpl-3.0 | 1,023 |
// scalac: -Xsource:3
//
object Test {
val underscores: Map[_ <: AnyRef, _ >: Null] = Map()
val qmarks: Map[? <: AnyRef, ? >: Null] = Map()
underscores : Map[String, String] // error wildcard variables starting with `_`
qmarks : Map[String, String] // error – wildcard variables should start with `?` to differentiate from the old syntax
// (and have a mildly more readable error...)
}
| scala/scala | test/files/neg/wildcards-future.scala | Scala | apache-2.0 | 429 |
/*
* Copyright 2011-2014 Chris de Vreeze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.ebpi.yaidom
/**
* Wrapper around class `org.w3c.dom.Element`, adapting it to the [[nl.ebpi.yaidom.queryapi.ElemLike]] API.
*
* This wrapper is not thread-safe, and should only be used if the immutable element classes such as [[nl.ebpi.yaidom.simple.Elem]]
* are not the best fit.
*
* Such scenarios could be as follows:
* <ul>
* <li>Conversions from DOM to [[nl.ebpi.yaidom.simple.Elem]] (and back) have more runtime costs than needed or wanted.</li>
* <li>Round-tripping from XML string to "tree", and back to XML string should keep the resulting XML string as much as possible the same.</li>
* <li>In-place updates (instead of "functional updates") of DOM trees are desired.</li>
* <li>The DOM elements are desired for their PSVI information.</li>
* </ul>
*
* Yet be aware that the advantages of immutability and thread-safety (offered by immutable `Elem` classes) are lost when using
* this wrapper API. Mutable DOM trees are also very easy to break, even via the `ElemLike` API, if element predicates with
* side-effects are used.
*
* To explain the "round-tripping" item above, note that class [[nl.ebpi.yaidom.simple.Elem]] considers attributes in an element unordered,
* let alone namespace declarations. That is consistent with the XML Infoset specification, but can sometimes be impractical.
* Using `org.w3c.dom.Element` instances, parsed from XML input sources, chances are that this order is retained.
*
* There are of course limitations to what formatting data is retained in a DOM tree. A good example is the short versus long form
* of an empty element. Typically parsers do not pass any information about this distinction, so it is unknown whether the XML input source
* used the long or short form for an empty element.
*
* It should also be noted that the configuration of XML parsers and serializers can be of substantial influence on the extent that
* "round-tripping" keeps the XML string the same. Whitespace handling is one such area in which different configurations can lead
* to quite different "round-tripping" results.
*
* Note that in one way these wrappers are somewhat unnatural: the `ElemLike` API uses immutable Scala collections everywhere,
* whereas the elements of those collections are mutable (!) DOM node wrappers. The wrappers are idiomatic Scala in their use of
* the Scala Collections API, whereas the wrapped DOM nodes come from a distant past, when imperative programming and "mutability
* everywhere" ruled.
*
* In comparison to XPath against DOM trees, the `ElemLike` API may be more verbose, but it requires no setup and
* "result set handling" boilerplate.
*
* @author Chris de Vreeze
*/
package object dom
| EBPI/yaidom | src/main/scala/nl/ebpi/yaidom/dom/package.scala | Scala | apache-2.0 | 3,307 |
package ratelimiter
/**
* Created by esfandiaramirrahimi on 15-11-03.
*/
trait RateLimiter {
def increment(clientIP: String): Boolean
def increment(clientIP: String, count: Int): Boolean
}
| esfand-r/ratelimiter | src/main/scala/ratelimiter/RateLimiter.scala | Scala | mit | 197 |
/*************************************************************************
* *
* This file is part of the 20n/act project. *
* 20n/act enables DNA prediction for synthetic biology/bioengineering. *
* Copyright (C) 2017 20n Labs, Inc. *
* *
* Please direct all queries to act@20n.com. *
* *
* This program is free software: you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation, either version 3 of the License, or *
* (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program. If not, see <http://www.gnu.org/licenses/>. *
* *
*************************************************************************/
package com.act.biointerpretation.rsmiles.single_sar_construction
import org.scalatest.{FlatSpec, Matchers}
/**
* Currently this class is not a real test.
* It just exemplifies a number of cases in which processing a db smiles into its processed substrate & product form fails
* because some Carbon in the processed substrate ends up with too many hydrogens.
* TODO: fix this bug!
* Not sure what's causing it.
*/
class SingleSarChemicalsTest extends FlatSpec with Matchers {
"SingleSarChemicals" should "have the correct valence" in {
val abstractChem = "[CH](=O)[R]"
val singleSarChemicals = new SingleSarChemicals(null)
val dummyId = 1
val maybeInfo = singleSarChemicals.calculateConcreteSubstrateAndProduct(dummyId, abstractChem)
maybeInfo.isDefined should be(true)
val chemicalInfo = maybeInfo.get
println(s"Abstract chemical: ${chemicalInfo.dbSmiles}")
println(s"Substrate : ${chemicalInfo.getAsSubstrate}")
println(s"Product : ${chemicalInfo.getAsProduct}")
}
"SingleSarChemicals" should "have the correct valence again" in {
val abstractChem = "CC([R])"
val singleSarChemicals = new SingleSarChemicals(null)
val dummyId = 1
val maybeInfo = singleSarChemicals.calculateConcreteSubstrateAndProduct(dummyId, abstractChem)
maybeInfo.isDefined should be(true)
val chemicalInfo = maybeInfo.get
println(s"Abstract chemical: ${chemicalInfo.dbSmiles}")
println(s"Substrate : ${chemicalInfo.getAsSubstrate}")
println(s"Product : ${chemicalInfo.getAsProduct}")
}
"SingleSarChemicals" should "have the correct valence again again" in {
val abstractChem = "[CH2]([R])O"
val singleSarChemicals = new SingleSarChemicals(null)
val dummyId = 1
val maybeInfo = singleSarChemicals.calculateConcreteSubstrateAndProduct(dummyId, abstractChem)
maybeInfo.isDefined should be(true)
val chemicalInfo = maybeInfo.get
println(s"Abstract chemical: ${chemicalInfo.dbSmiles}")
println(s"Substrate : ${chemicalInfo.getAsSubstrate}")
println(s"Product : ${chemicalInfo.getAsProduct}")
}
"SingleSarChemicals" should "have the correct valence on chlorine" in {
val abstractChem = "[CH2]([Cl])O"
val singleSarChemicals = new SingleSarChemicals(null)
val dummyId = 1
val maybeInfo = singleSarChemicals.calculateConcreteSubstrateAndProduct(dummyId, abstractChem)
maybeInfo.isDefined should be(true)
val chemicalInfo = maybeInfo.get
println(s"Abstract chemical: ${chemicalInfo.dbSmiles}")
println(s"Substrate : ${chemicalInfo.getAsSubstrate}")
println(s"Product : ${chemicalInfo.getAsProduct}")
}
}
| 20n/act | reachables/src/test/scala/com/act/biointerpretation/rsmiles/single_sar_construction/SingleSarChemicalsTest.scala | Scala | gpl-3.0 | 4,311 |
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.variable
import scala.collection.mutable
import java.io._
import java.util.zip.{GZIPOutputStream, GZIPInputStream}
import cc.factorie.util.{JavaHashMap, Cubbie}
import cc.factorie.{variable, util}
// For single categorical values
/** A value in a CategoricalDomain.
Each value is assigned an intValue in the range 0...size-1.
Each value has a category of type C.
These are the values used to map from words to integer parameter indices, etc.
@author Andrew McCallum */
trait CategoricalValue[C] extends DiscreteValue {
def domain: CategoricalDomain[C]
def category: C
override def toString: String = category.toString
}
/** A domain for categorical variables. It stores not only a size,
but also the mapping from category values (of type T = this.CategoryType)
to densely packed integers suitable for indices into parameter
vectors. For example, a common use case is mapping Strings (NLP or
document classification words) into indices, and back.
Furthermore if domain.gatherCounts = true, this domain will count
the number of calls to 'index'. Then you can reduce the size of
the Domain by calling 'trimBelowCount' or 'trimBelowSize', which
will recreate the new mapping from categories to densely-packed
non-negative integers (making the old mapping no longer valid).
Thus, in typical usage you would (1) read in the data,
(2) trim the domain, (3) re-read the data with the new
mapping, creating variables.
@author Andrew McCallum
*/
class CategoricalDomain[C] extends DiscreteDomain(0) with IndexedSeq[CategoricalValue[C]] with CategoricalVectorDomain[C] with Domain with cc.factorie.util.ProtectedIntArrayBuffer {
protected class CategoricalValue(val singleIndex:Int, val category:C) extends variable.CategoricalValue[C] {
override def copy = this
def domain = CategoricalDomain.this
def dim1 = CategoricalDomain.this.size
}
type Value <: variable.CategoricalValue[C]
def this(values:Iterable[C]) = { this(); values.foreach(value(_)); freeze() }
private val __indices: java.util.HashMap[C,Value] = new java.util.HashMap[C,Value]
def _indices = __indices
private val lock = new util.RWLock
/** If positive, throw error if size tries to grow larger than it. Use for growable multi-dim Factor weightsSet;
override this method with the largest you think your growable domain will get. */
var maxSize = -1
override def dimensionDomain: CategoricalDomain[C] = this
@inline final override def length = lock.withReadLock { _elements.length }
var growPastMaxSize: Boolean = true
/** Return the CategoricalValue associated with the given category.
If the category is not already in this CategoricalDomain and 'frozen' is false,
and 'mazSize' will not be exceeded,
then add the category to this CategoricalDomain.
This method is thread-safe so that multiple threads may read and index data simultaneously. */
def value(category: C): Value = {
if (category == null) throw new Error("Null is not a valid category.")
if (_frozen) {
__indices.get(category)
} else {
lock.withReadLock {
var thisIndex = __indices.get(category)
if (thisIndex eq null) { // double-tap locking necessary to ensure only one thread adds to _indices
lock.readUnlock()
lock.writeLock()
try {
thisIndex = __indices.get(category)
if (thisIndex eq null) {
val m = _elements.length
if (maxSize > 0 && m >= maxSize) {
if (growPastMaxSize)
throw new Error("Index size exceeded maxSize")
else {
println("Warning - max domain size %d exceeded! Freezing." format maxSize)
freeze()
return null.asInstanceOf[Value]
}
}
// TODO Consider calling "new String(category)" here to avoid substring memory leak: http://stackoverflow.com/questions/15612157/substring-method-in-string-class-causes-memory-leak
val e: Value = newCategoricalValue(m, category).asInstanceOf[Value]
_elements += e
__indices.put(category, e)
thisIndex = e
}
} finally {
lock.writeUnlock()
lock.readLock()
}
}
thisIndex
}
}
}
/** Return the CategoricalValue at index i. */
override def apply(i:Int): Value = _elements(i)
def category(i:Int): C = lock.withReadLock {_elements(i).category.asInstanceOf[C]}
def categories: Seq[C] = lock.withReadLock { _elements.map(_.category.asInstanceOf[C]) }
/** Return the integer associated with the category, do not increment the count of category, even if gatherCounts is true. */
def indexOnly(category:C): Int = {
val v = value(category)
if (v eq null) -1 else v.intValue
}
/** Return the integer associated with the category, and also, if gatherCounts is true, also increment the count of category.
If the category is not already in this CategoricalDomain and 'frozen' is false,
and 'mazSize' will not be exceeded,
then add the category to this CategoricalDomain.
This method is thread-safe so that multiple threads may read and index data simultaneously. */
def index(category:C): Int = {
val i = indexOnly(category)
if (gatherCounts && i != -1) incrementCount(i)
i
}
/** Return the integer associated with the category,
and also (whether or not 'gatherCounts' is true')
increment by 'count' the number of times this Domain says the category has been seen.
If the category is not already in this CategoricalDomain and 'frozen' is false,
and 'maxSize' will not be exceeded,
then add the category to this CategoricalDomain.
This method is thread-safe so that multiple threads may read and index data simultaneously. */
def indexWithCount(category:C, count:Int): Int = {
val i = indexOnly(category)
this synchronized { _increment(i, count) }
i
}
/** Like indexOnly, but throw an exception if the category is not already there. */
def getIndex(category:C): Int = lock.withReadLock({
val v = __indices.get(category)
if (v ne null) v.intValue else throw new Error("Category not present; use index() to cause the creation of a new value.")
})
/** Like indexOnly, but return -1 if the category is not already there. */
def indexOrNegativeOne(category:C): Int = lock.withReadLock({
val v = __indices.get(category)
if (v eq null) -1 else { v.intValue }
})
override def freeze(): Unit = {
_frozen = true
}
def +=(x:C) : Unit = this.index(x)
def ++=(xs:Traversable[C]) : Unit = xs.foreach(this += _)
/** Wipe the domain, its elements, indices and counts clean */
def clear(): Unit = { _frozen = false; _elements.clear(); lock.withWriteLock { _indices.clear(); _clear() } }
// Separate argument types preserves return collection type
def indexAll(c: Iterator[C]) = c map index
def indexAll(c: List[C]) = c map index
def indexAll(c: Array[C]) = c map index
def indexAll(c: Set[C]) = c map index
override def dimensionName(i:Int): String = category(i).toString
override def toString() = "CategoricalDomain[]("+size+")"
protected def newCategoricalValue(i:Int, e:C) = new CategoricalValue(i, e)
/** If type T is not string, this should be overridden to provide de-serialization */
override def stringToCategory(s:String): C = s.asInstanceOf[C]
// Code for managing occurrence counts
/** If true, then each call to CategoricalDomain.index will increment a count associated with value in the domain.
This count can then later be used to trim the set of domain values by various thresholds. */
var gatherCounts = false
def count(i:Int): Int = _apply(i)
def count(category:C): Int = _apply(indexOnly(category))
def counts: cc.factorie.util.IntSeq = _takeAsIntSeq(length) // _toSeq.take(length)
private var cachedCountsTotal: Long = -1
def countsTotal: Long =
if (frozen && cachedCountsTotal >= 0)
cachedCountsTotal
else {
var total: Long = 0
var i = 0
val len = _length
while (i < len) {
total += _apply(i)
i += 1
}
cachedCountsTotal = total
total
}
def incrementCount(i:Int): Unit = this synchronized { _increment(i, 1) }
def incrementCount(category:C): Unit = incrementCount(indexOnly(category))
private def someCountsGathered: Boolean = { var i = 0; while (i < _length) { if (_apply(i) > 0) return true; i += 1 }; false }
/** Returns the number of unique entries trimmed */
def trimBelowCount(threshold:Int, preserveCounts:Boolean = false): Int = {
assert(!frozen)
if (!someCountsGathered) throw new Error("Can't trim without first gathering any counts.")
val origEntries = _elements.clone()
val origCounts = _toArray
clear() // This will also clear the counts
gatherCounts = false
if (preserveCounts) { for (i <- 0 until origEntries.size) if (origCounts(i) >= threshold) indexWithCount(origEntries(i).category.asInstanceOf[C], origCounts(i)) }
else { for (i <- 0 until origEntries.size) if (origCounts(i) >= threshold) indexOnly(origEntries(i).category.asInstanceOf[C]) }
freeze()
origEntries.size - size
}
/** Returns the number of unique entries trimmed */
def trimAboveCount(threshold:Int): Int = {
assert(!frozen)
if (!someCountsGathered) throw new Error("Can't trim without first gathering any counts.")
val origEntries = _elements.clone()
clear()
gatherCounts = false
for (i <- 0 until origEntries.size)
if (_apply(i) <= threshold) indexOnly(origEntries(i).category.asInstanceOf[C])
_clear()
freeze()
origEntries.size - size
}
/** Returns the count threshold below which entries were discarded. */
def trimBelowSize(target:Int): Int = {
assert(!frozen)
var threshold = 2
while (sizeAtOrAboveCount(threshold) >= target) threshold += 1
trimBelowCount(threshold)
threshold
}
/** Return the number of unique entries with count equal to 'c'. */
def sizeAtCount(c:Int): Int = {
if (!someCountsGathered) throw new Error("No counts gathered.")
var ret = 0
val min = math.min(size, _length)
for (i <- 0 until min) if (_apply(i) == c) ret += 1
ret
}
/** Return the number of unique entries with count greater than or equal to 'threshold'.
This returned value will be the size of the Domain after a call to trimBelowCount(threshold). */
def sizeAtOrAboveCount(threshold:Int): Int = {
if (!someCountsGathered) throw new Error("No counts gathered.")
var ret = 0
val min = math.min(size, _length)
for (i <- 0 until min) if (_apply(i) >= threshold) ret += 1
ret
}
/** Return the number of unique entries with count below 'threshold'. */
def sizeBelowCount(threshold:Int): Int = size - sizeAtOrAboveCount(threshold)
}
object CategoricalDomain {
val NULL_INDEX = -1
}
class CategoricalDomainCubbie[T](val cd: CategoricalDomain[T]) extends Cubbie {
// This cubbie automatically writes into the underlying CategoricalDomain instead of
// using an intermediate HashMap representation
setMap(new mutable.Map[String, Any] {
override def update(key: String, value: Any): Unit = {
val isFrozen = cd.frozen
if (key == "size") { /* cd.size = value.asInstanceOf[Int] */ }
else if (key == "frozen") { if (value.asInstanceOf[Boolean]) cd.freeze() }
else if (key == "categories") {
cd.unfreeze()
val categories = value.asInstanceOf[Iterable[String]]
//categories.map(c => if (cd.string2T != null) cd.string2T(c) else c.asInstanceOf[T]).foreach(cd.value(_))
categories.map(c => cd.stringToCategory(c)).foreach(cd.value(_))
if (isFrozen) cd.freeze()
} else sys.error("Unknown cubbie slot key: \\"%s\\"" format key)
}
def += (kv: (String, Any)): this.type = { update(kv._1, kv._2); this }
def -= (key: String): this.type = sys.error("Can't remove slots from cubbie map!")
def get(key: String): Option[Any] =
if (key == "size") Some(cd.size)
else if (key == "frozen") Some(cd.frozen)
else if (key == "categories") Some(cd.categories.map(_.toString)) // toString because not all categories are already Strings
else None //{ println("CategoricalDomainCubbie.get key="+key); None }
def iterator: Iterator[(String, Any)] = List("size", "frozen", "categories").map(s => (s, get(s).get)).iterator
})
}
/* CategoricalDomain also facilitates counting occurrences of entries, and trimming the Domain size.
WARNING: Any indices that you use and store before trimming will not be valid after trimming!
Typical usage:
<pre>
class Token(s:String) extends CategoricalVariable(s)
data.readAndIndex
Domain[Token].trimBelowSize(100000) // this also automatically turns off counting
data.readIndexAndCreateVariables // again
</pre>
*/
// TODO Consider categorical remapping interface in the future.
///** To be used to avoid re-reading the data after CategoricalDomain trimming,
// but not yet implemented. */
//trait CategoricalRemapping { def remapCategories(fn:(Int)=>Int) }
| asubbaswamy/factorie | src/main/scala/cc/factorie/variable/CategoricalDomain.scala | Scala | apache-2.0 | 14,062 |
/**
* Copyright 2011-2016 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.recorder.scenario.template
import com.dongxiguo.fastring.Fastring.Implicits._
import io.gatling.recorder.scenario.{ ProtocolDefinition, ScenarioElement, TagElement }
import io.gatling.recorder.scenario.{ PauseElement, RequestElement }
import io.gatling.recorder.config.RecorderConfiguration
private[scenario] object SimulationTemplate {
def render(
packageName: String,
simulationClassName: String,
protocol: ProtocolDefinition,
headers: Map[Int, Seq[(String, String)]],
scenarioName: String,
scenarioElements: Either[Seq[ScenarioElement], Seq[Seq[ScenarioElement]]]
)(implicit config: RecorderConfiguration): String = {
def renderPackage = if (!packageName.isEmpty) fast"package $packageName\\n" else ""
def renderHeaders = {
def printHeaders(headers: Seq[(String, String)]) = {
if (headers.size > 1) {
val mapContent = headers.map { case (name, value) => fast" ${protectWithTripleQuotes(name)} -> ${protectWithTripleQuotes(value)}" }.mkFastring(",\\n")
fast"""Map(
$mapContent)"""
} else {
val (name, value) = headers(0)
fast"Map(${protectWithTripleQuotes(name)} -> ${protectWithTripleQuotes(value)})"
}
}
headers
.map { case (headersBlockIndex, headersBlock) => fast""" val ${RequestTemplate.headersBlockName(headersBlockIndex)} = ${printHeaders(headersBlock)}""" }
.mkFastring("\\n\\n")
}
def renderScenarioElement(se: ScenarioElement, extractedUris: ExtractedUris) = se match {
case TagElement(text) => fast"// $text"
case PauseElement(duration) => PauseTemplate.render(duration)
case request: RequestElement => RequestTemplate.render(simulationClassName, request, extractedUris)
}
def renderProtocol(p: ProtocolDefinition) = ProtocolTemplate.render(p)
def renderScenario(extractedUris: ExtractedUris) = {
scenarioElements match {
case Left(elements) =>
val scenarioElements = elements.map { element =>
val prefix = element match {
case TagElement(_) => ""
case _ => "."
}
fast"$prefix${renderScenarioElement(element, extractedUris)}"
}.mkFastring("\\n\\t\\t")
fast"""val scn = scenario("$scenarioName")
$scenarioElements"""
case Right(chains) =>
val chainElements = chains.zipWithIndex.map {
case (chain, i) =>
var firstNonTagElement = true
val chainContent = chain.map { element =>
val prefix = element match {
case TagElement(_) => ""
case _ => if (firstNonTagElement) { firstNonTagElement = false; "" } else "."
}
fast"$prefix${renderScenarioElement(element, extractedUris)}"
}.mkFastring("\\n\\t\\t")
fast"val chain_$i = $chainContent"
}.mkFastring("\\n\\n")
val chainsList = (for (i <- 0 until chains.size) yield fast"chain_$i").mkFastring(", ")
fast"""$chainElements
val scn = scenario("$scenarioName").exec(
$chainsList)"""
}
}
def flatScenarioElements(scenarioElements: Either[Seq[ScenarioElement], Seq[Seq[ScenarioElement]]]): Seq[ScenarioElement] =
scenarioElements match {
case Left(scenarioElements) => scenarioElements
case Right(scenarioElements) => scenarioElements.flatten
}
val extractedUris = new ExtractedUris(flatScenarioElements(scenarioElements))
val nonBaseUrls = extractedUris.vals.filter(_.value != protocol.baseUrl)
fast"""$renderPackage
import scala.concurrent.duration._
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import io.gatling.jdbc.Predef._
class $simulationClassName extends Simulation {
val httpProtocol = http${renderProtocol(protocol)}
$renderHeaders
${ValuesTemplate.render(nonBaseUrls)}
${renderScenario(extractedUris)}
setUp(scn.inject(atOnceUsers(1))).protocols(httpProtocol)
}""".toString()
}
}
| GabrielPlassard/gatling | gatling-recorder/src/main/scala/io/gatling/recorder/scenario/template/SimulationTemplate.scala | Scala | apache-2.0 | 4,866 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.ui
import java.util.concurrent.atomic.AtomicLong
import scala.collection.mutable
import org.apache.spark.sql.execution.SparkPlan
import org.apache.spark.sql.execution.metric.{SQLMetricParam, SQLMetricValue}
/**
* A graph used for storing information of an executionPlan of DataFrame.
*
* Each graph is defined with a set of nodes and a set of edges. Each node represents a node in the
* SparkPlan tree, and each edge represents a parent-child relationship between two nodes.
*/
private[ui] case class SparkPlanGraph(
nodes: Seq[SparkPlanGraphNode], edges: Seq[SparkPlanGraphEdge]) {
def makeDotFile(metrics: Map[Long, String]): String = {
val dotFile = new StringBuilder
dotFile.append("digraph G {\n")
nodes.foreach(node => dotFile.append(node.makeDotNode(metrics) + "\n"))
edges.foreach(edge => dotFile.append(edge.makeDotEdge + "\n"))
dotFile.append("}")
dotFile.toString()
}
}
private[sql] object SparkPlanGraph {
/**
* Build a SparkPlanGraph from the root of a SparkPlan tree.
*/
def apply(plan: SparkPlan): SparkPlanGraph = {
val nodeIdGenerator = new AtomicLong(0)
val nodes = mutable.ArrayBuffer[SparkPlanGraphNode]()
val edges = mutable.ArrayBuffer[SparkPlanGraphEdge]()
buildSparkPlanGraphNode(plan, nodeIdGenerator, nodes, edges)
new SparkPlanGraph(nodes, edges)
}
private def buildSparkPlanGraphNode(
plan: SparkPlan,
nodeIdGenerator: AtomicLong,
nodes: mutable.ArrayBuffer[SparkPlanGraphNode],
edges: mutable.ArrayBuffer[SparkPlanGraphEdge]): SparkPlanGraphNode = {
val metrics = plan.metrics.toSeq.map { case (key, metric) =>
SQLPlanMetric(metric.name.getOrElse(key), metric.id,
metric.param.asInstanceOf[SQLMetricParam[SQLMetricValue[Any], Any]])
}
val node = SparkPlanGraphNode(
nodeIdGenerator.getAndIncrement(), plan.nodeName, plan.simpleString, plan.metadata, metrics)
nodes += node
val childrenNodes = plan.children.map(
child => buildSparkPlanGraphNode(child, nodeIdGenerator, nodes, edges))
for (child <- childrenNodes) {
edges += SparkPlanGraphEdge(child.id, node.id)
}
node
}
}
/**
* Represent a node in the SparkPlan tree, along with its metrics.
*
* @param id generated by "SparkPlanGraph". There is no duplicate id in a graph
* @param name the name of this SparkPlan node
* @param metrics metrics that this SparkPlan node will track
*/
private[ui] case class SparkPlanGraphNode(
id: Long,
name: String,
desc: String,
metadata: Map[String, String],
metrics: Seq[SQLPlanMetric]) {
def makeDotNode(metricsValue: Map[Long, String]): String = {
val builder = new mutable.StringBuilder(name)
val values = for {
metric <- metrics
value <- metricsValue.get(metric.accumulatorId)
} yield {
metric.name + ": " + value
}
if (values.nonEmpty) {
// If there are metrics, display each entry in a separate line. We should use an escaped
// "\n" here to follow the dot syntax.
//
// Note: whitespace between two "\n"s is to create an empty line between the name of
// SparkPlan and metrics. If removing it, it won't display the empty line in UI.
builder ++= "\\n \\n"
builder ++= values.mkString("\\n")
}
s""" $id [label="${builder.toString()}"];"""
}
}
/**
* Represent an edge in the SparkPlan tree. `fromId` is the parent node id, and `toId` is the child
* node id.
*/
private[ui] case class SparkPlanGraphEdge(fromId: Long, toId: Long) {
def makeDotEdge: String = s""" $fromId->$toId;\n"""
}
| chenc10/Spark-PAF | sql/core/src/main/scala/org/apache/spark/sql/execution/ui/SparkPlanGraph.scala | Scala | apache-2.0 | 4,448 |
/*
* Copyright 2009-2010 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.json4s
package scalaz
import _root_.scalaz._
import std.option._
trait Types {
type Result[+A] = ValidationNel[Error, A]
sealed abstract class Error extends Product with Serializable
case class UnexpectedJSONError(was: JValue, expected: Class[_ <: JValue]) extends Error
case class NoSuchFieldError(name: String, json: JValue) extends Error
case class UncategorizedError(key: String, desc: String, args: List[Any]) extends Error
case object Fail {
def apply[A](key: String, desc: String, args: List[Any]): Result[A] =
Validation.failureNel(UncategorizedError(key, desc, args))
def apply[A](key: String, desc: String): Result[A] =
Validation.failureNel(UncategorizedError(key, desc, Nil))
}
implicit def JValueMonoid: Monoid[JValue] = Monoid.instance(_ ++ _, JNothing)
implicit def JValueEqual: Equal[JValue] = Equal.equalA
trait JSONR[A] {
def read(json: JValue): Result[A]
}
trait JSONW[A] {
def write(value: A): JValue
}
trait JSON[A] extends JSONR[A] with JSONW[A]
implicit def Result2JSONR[A](f: JValue => Result[A]): JSONR[A] = new JSONR[A] {
def read(json: JValue) = f(json)
}
def fromJSON[A: JSONR](json: JValue): Result[A] = implicitly[JSONR[A]].read(json)
def toJSON[A: JSONW](value: A): JValue = implicitly[JSONW[A]].write(value)
def field[A: JSONR](name: String)(json: JValue): Result[A] = json match {
case JObject(fs) =>
fs.find(_._1 == name)
.map(f => implicitly[JSONR[A]].read(f._2))
.orElse(implicitly[JSONR[A]].read(JNothing).fold(_ => none, x => some(Success(x))))
.getOrElse(Validation.failureNel(NoSuchFieldError(name, json)))
case x =>
Validation.failureNel(UnexpectedJSONError(x, classOf[JObject]))
}
type EitherNel[+a] = NonEmptyList[Error] \/ a
def validate[A: JSONR](name: String) = Kleisli(field[A](name)).mapK[EitherNel, A](_.disjunction)
implicit def function2EitherNel[A](f: A => Result[A]): (A => EitherNel[A]) = (a: A) => f(a).disjunction
implicit def kleisli2Result[A](v: Kleisli[EitherNel, JValue, A]): JValue => Result[A] = v.run.andThen(_.validation)
def makeObj(fields: Traversable[(String, JValue)]): JObject =
JObject(fields.toList.map { case (n, v) => JField(n, v) })
}
object JsonScalaz extends Types with Lifting with Base with org.json4s.scalaz.Tuples
| karolx/json4s | scalaz/src/main/scala/org/json4s/scalaz/JsonScalaz.scala | Scala | apache-2.0 | 2,973 |
package examples.shapelessmonad
import org.scalatest.{Matchers, FlatSpec}
import org.scalatest.concurrent.ScalaFutures
import scala.concurrent.Future._
class ZipSpec extends FlatSpec with ScalaFutures with Matchers {
import scala.concurrent.ExecutionContext.Implicits.global
import scalaz.Scalaz._
"zip" should "provide one future from args of futures" in {
val result = zip(successful(1), successful(true), successful("string"), successful(1.0))
val (a, b, c, d) = result.futureValue
(a, b, c, d) should equal((1, true, "string", 1.0))
}
it should "work with Lists too" in {
zip(List(1, 2), List(3, 4), List(5, 6)) should equal(List((1,3,5), (1,3,6), (1,4,5), (1,4,6), (2,3,5), (2,3,6), (2,4,5), (2,4,6)))
}
it should "work with Options too" in {
zip(Option(1), Option(3), Option(5)) should equal(Some(1,3,5))
}
}
| haghard/shapeless-playbook | src/test/scala/examples/shapelessmonad/ZipSpec.scala | Scala | apache-2.0 | 858 |
/**
* Copyright 2013 Robert Welin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mooo.nilewapps.bokbytarappen.server.service
/**
* Contains routes for user Friend management.
*/
trait FriendService {
}
| nilewapp/BokBytarAppenServer | src/main/scala/com/mooo/nilewapps/bokbytarappen/server/service/FriendService.scala | Scala | apache-2.0 | 731 |
/*
* Copyright (c) 2015 Goldman Sachs.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Eclipse Distribution License v. 1.0 which accompany this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*/
package org.eclipse.collections.impl
import org.eclipse.collections.api.map.sorted.MutableSortedMap
import org.eclipse.collections.impl.Prelude._
import org.eclipse.collections.impl.block.factory.Functions
import org.eclipse.collections.impl.list.mutable.FastList
import org.eclipse.collections.impl.map.sorted.mutable.TreeSortedMap
import org.eclipse.collections.impl.tuple.Tuples
import org.junit.Test
class SynchronizedSortedMapScalaTest extends SynchronizedMapIterableTestTrait
{
val classUnderTest: MutableSortedMap[String, String] = TreeSortedMap.newMapWith("A", "1", "B", "2", "C", "3").asSynchronized()
@Test
def newEmpty_synchronized
{
this.assertSynchronized
{
this.classUnderTest.newEmpty
}
}
@Test
def removeKey_synchronized
{
this.assertSynchronized
{
this.classUnderTest.remove("1")
}
}
@Test
def getIfAbsentPut_synchronized
{
this.assertSynchronized
{
this.classUnderTest.getIfAbsentPut("Nine", () => "foo")
}
}
@Test
def getIfAbsentPutWith_synchronized
{
this.assertSynchronized
{
this.classUnderTest.getIfAbsentPutWith("Nine", Functions.getPassThru[String], "foo")
}
}
@Test
def asUnmodifiable_synchronized
{
this.assertSynchronized
{
this.classUnderTest.asUnmodifiable
}
}
@Test
def toImmutable_synchronized
{
this.assertSynchronized
{
this.classUnderTest.toImmutable
}
}
@Test
def collectKeysAndValues_synchronized
{
this.assertSynchronized
{
this.classUnderTest.collectKeysAndValues(FastList.newListWith[java.lang.Integer](4, 5, 6),
{
_: java.lang.Integer => ""
},
{
_: java.lang.Integer => ""
})
}
}
@Test
def comparator_synchronized
{
this.assertSynchronized
{
this.classUnderTest.comparator
}
}
@Test
def values_synchronized
{
this.assertSynchronized
{
this.classUnderTest.values
}
}
@Test
def keySet_synchronized
{
this.assertSynchronized
{
this.classUnderTest.keySet
}
}
@Test
def entrySet_synchronized
{
this.assertSynchronized
{
this.classUnderTest.entrySet
}
}
@Test
def headMap_synchronized
{
this.assertSynchronized
{
this.classUnderTest.headMap("B")
}
}
@Test
def tailMap_synchronized
{
this.assertSynchronized
{
this.classUnderTest.tailMap("B")
}
}
@Test
def subMap_synchronized
{
this.assertSynchronized
{
this.classUnderTest.subMap("A", "C")
}
}
@Test
def firstKey_synchronized
{
this.assertSynchronized
{
this.classUnderTest.firstKey
}
}
@Test
def lastKey_synchronized
{
this.assertSynchronized
{
this.classUnderTest.lastKey
}
}
@Test
def with_synchronized
{
this.assertSynchronized
{
this.classUnderTest.`with`(Tuples.pair("D", "4"))
}
}
}
| bhav0904/eclipse-collections | scala-unit-tests/src/test/scala/org/eclipse/collections/impl/SynchronizedSortedMapScalaTest.scala | Scala | bsd-3-clause | 3,924 |
package com.raquo.domtypes.generic
package object codecs {
// @TODO[Performance] Which of those methods could benefit from @inline annotation? We typically use those values typed as just `Codec`
// String Codecs
object StringAsIsCodec extends AsIsCodec[String]
// Int Codecs
object IntAsIsCodec extends AsIsCodec[Int]
object IntAsStringCodec extends Codec[Int, String] {
override def decode(domValue: String): Int = domValue.toInt // @TODO this can throw exception. How do we handle this?
override def encode(scalaValue: Int): String = scalaValue.toString
}
// Double Codecs
object DoubleAsIsCodec extends AsIsCodec[Double]
object DoubleAsStringCodec extends Codec[Double, String] {
override def decode(domValue: String): Double = domValue.toDouble// @TODO this can throw exception. How do we handle this?
override def encode(scalaValue: Double): String = scalaValue.toString
}
// Boolean Codecs
object BooleanAsIsCodec extends AsIsCodec[Boolean]
object BooleanAsAttrPresenceCodec extends Codec[Boolean, String] {
override def decode(domValue: String): Boolean = domValue != null
override def encode(scalaValue: Boolean): String = if (scalaValue) "" else null
}
object BooleanAsTrueFalseStringCodec extends Codec[Boolean, String] {
override def decode(domValue: String): Boolean = domValue == "true"
override def encode(scalaValue: Boolean): String = if (scalaValue) "true" else "false"
}
object BooleanAsYesNoStringCodec extends Codec[Boolean, String] {
override def decode(domValue: String): Boolean = domValue == "yes"
override def encode(scalaValue: Boolean): String = if (scalaValue) "yes" else "no"
}
object BooleanAsOnOffStringCodec extends Codec[Boolean, String] {
override def decode(domValue: String): Boolean = domValue == "on"
override def encode(scalaValue: Boolean): String = if (scalaValue) "on" else "off"
}
// Iterable Codecs
object IterableAsSpaceSeparatedStringCodec extends Codec[Iterable[String], String] { // use for e.g. className
override def decode(domValue: String): Iterable[String] = if (domValue == "") Nil else domValue.split(' ')
override def encode(scalaValue: Iterable[String]): String = scalaValue.mkString(" ")
}
object IterableAsCommaSeparatedStringCodec extends Codec[Iterable[String], String] { // use for lists of IDs
override def decode(domValue: String): Iterable[String] = if (domValue == "") Nil else domValue.split(',')
override def encode(scalaValue: Iterable[String]): String = scalaValue.mkString(",")
}
}
| raquo/scala-dom-types | shared/src/main/scala/com/raquo/domtypes/generic/codecs/package.scala | Scala | mit | 2,588 |
/*
* Copyright (c) 2013 Habla Computing
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hablapps.dofollow.portal.project.meeting
import org.hablapps.updatable._
import org.hablapps.speech
import java.text.ParseException
import java.text.SimpleDateFormat
import java.util.Date
import org.hablapps.dofollow._
import org.hablapps.dofollow.portal._
import org.hablapps.dofollow.portal.project.task._
trait Rules{ this: speech.Program
with State
with department.State
with portal.State
with project.State
with administration.State
with discussion.State =>
/** When an attendee is played, the invitee is abandoned
*
* @abandon Invitee When the operator confirms attendance
*
*/
when {
case New(attendee: $[Attendee], _: Attendee) if attendee.isA[Attendee] => implicit state =>
Abandon(attendee.meeting.invitees.filter(_.operator == attendee.operator).filter(_.status.head == PLAYING).head)
}
/** When a meeting is closed it's necessary to abandon roles and destroy resources
*
* @abandon Moderator When the meeting is closed
* @abandon Attendee When the meeting is closed
* @abandon Invitee When the meeting is closed
* @delete Minutes When the meeting is closed
*
*/
when {
case _Set(meeting: $[Meeting], substatus, Completed, true) =>
Sequence(
For(meeting.member){
case m: $[Moderator] if m.isA[Moderator] && m.status.head == PLAYING =>
Sequence(
Let(m, "substatus", ZombieMeeting, true),
Abandon(m)
)
case a: $[Attendee] if a.isA[Attendee] && a.status.head == PLAYING =>
Sequence(
Let(a, "substatus", ZombieMeeting, true),
Abandon(a)
)
case i: $[Invitee] if i.isA[Invitee] && i.status.head == PLAYING =>
Sequence(
Let(i, "substatus", ZombieMeeting, true),
Abandon(i)
)
case _ => ActionId()
},
For(meeting.environment){
case r: $[Minutes] if r.status.head == CREATED => Sequence(
Let(r, "substatus", ZombieMeeting, true),
DeleteR(r)
)
case _ => ActionId()
}
)
}
/** When a minutes is created it's necessary to update the meeting date and the creator */
when {
case Performed(create: CreateMinutes) if create.isA[CreateMinutes] => implicit state =>
Sequence(
Let(create._new_entity.head, "creator", create.moderator, true),
LetWholeExtension(create._new_entity.head, "uploadDate", now)
)
}
/** When an attendee is abandoned it's necessary to abandon his roles
*
* @abandon Issuer When his top role is abandoned
* @abandon Receiver When his top role is abandoned
*
*/
when {
case _Set(attendee: $[Attendee], Attendee._status, ABANDONED, true) if attendee.isA[Attendee] =>
For(attendee.role){
case rol: $[Issuer] if rol.status.get == PLAYING && rol.isA[Issuer] => Abandon(rol)
case rol: $[Receiver] if rol.status.get == PLAYING && rol.isA[Receiver] => Abandon(rol)
case _ => ActionId()
}
}
/** When an invitee is abandoned it's necessary to abandon his roles
*
* @abandon Issuer When his top role is abandoned
* @abandon Receiver When his top role is abandoned
*
*/
when {
case _Set(invitee: $[Invitee], Invitee._status, ABANDONED, true) if invitee.isA[Invitee] =>
For(invitee.role){
case rol: $[Issuer] if rol.status.get == PLAYING && rol.isA[Issuer] => Abandon(rol)
case rol: $[Receiver] if rol.status.get == PLAYING && rol.isA[Receiver] => Abandon(rol)
case _ => ActionId()
}
}
/** When the meeting date is changed it's necessary to change attributes and roles
*
* @play Invitee When a meeting has a new planning
* @abandon Attendee When a meeting has a new planning
*
*/
when {
case _Set(meeting1: $[Meeting], Meeting._date, _, _) if meeting1.isA[Meeting] => implicit state =>
Sequence(
Let(meeting1, "substatus" , Rescheduled, true),
For(meeting1.attendees) {
case attendee if attendee.status.head == PLAYING =>
Sequence(
Play3(Invitee(), attendee.operator, meeting1),
Abandon(attendee)
)
case _ => ActionId()
}
)
}
/** When a meeting is cancelled it's necessary to abandon roles
*
* @abandon Moderator When the meeting is cancelled
* @abandon Attendee When the meeting is cancelled
* @abandon Invitee When the meeting is cancelled
*
*/
when {
case _Set(meeting: $[Meeting], SocialEntity._substatus, Undone, true) if meeting.isA[Meeting] => implicit state =>
Sequence(
For(meeting.attendees) {
case attendee if attendee.status.get == PLAYING =>
Abandon(attendee)
case _ => ActionId()
},
For(meeting.invitees) {
case invitee if invitee.status.get == PLAYING =>
Abandon(invitee)
case _ => ActionId()
},
Abandon(meeting.moderator)
)
}
} | hablapps/app-dofollow | src/main/scala/org/hablapps/dofollow/portal/project/meeting/Rules.scala | Scala | apache-2.0 | 6,643 |
package com.sksamuel.scapegoat.inspections.collections
import com.sksamuel.scapegoat._
/** @author Stephen Samuel */
class FilterOptionAndGet extends Inspection {
def inspector(context: InspectionContext): Inspector = new Inspector(context) {
override def postTyperTraverser = Some apply new context.Traverser {
import context.global._
override def inspect(tree: Tree): Unit = {
tree match {
case Apply(TypeApply(
Select(Apply(Select(_, TermName("filter")), List(Function(_, Select(_, TermName("isDefined"))))),
TermName("map")), args), List(Function(_, Select(_, TermName("get"))))) =>
context.warn("filter(_.isDefined).map(_.get)", tree.pos, Levels.Info,
".filter(_.isDefined).map(_.get) can be replaced with flatten: " + tree.toString().take(500), FilterOptionAndGet.this)
case _ => continue(tree)
}
}
}
}
} | pwwpche/scalac-scapegoat-plugin | src/main/scala/com/sksamuel/scapegoat/inspections/collections/FilterOptionAndGet.scala | Scala | apache-2.0 | 933 |
package controllers
import scalaz._
import Scalaz._
import scalaz.Validation._
import net.liftweb.json._
import io.megam.auth.funnel._
import io.megam.auth.funnel.FunnelErrors._
import play.api.mvc._
import controllers.stack.Results
import models.Constants._
object Disks extends Controller with controllers.stack.APIAuthElement {
def post = StackAction(parse.tolerantText) { implicit request =>
(Validation.fromTryCatchThrowable[Result, Throwable] {
reqFunneled match {
case Success(succ) => {
val freq = succ.getOrElse(throw new Error("Invalid header."))
val email = freq.maybeEmail.getOrElse(throw new Error("Email not found (or) invalid."))
val clientAPIBody = freq.clientAPIBody.getOrElse(throw new Error("Body not found (or) invalid."))
models.disks.Disks.create(email, clientAPIBody) match {
case Success(succ) =>
Status(CREATED)(
FunnelResponse(CREATED, """Disk attached successfully.""", DISKSCLAZ).toJson(true))
case Failure(err) =>
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
case Failure(err) => {
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
}).fold(succ = { a: Result => a }, fail = { t: Throwable => Status(BAD_REQUEST)(t.getMessage) })
}
def show(id: String) = StackAction(parse.tolerantText) { implicit request =>
(Validation.fromTryCatchThrowable[Result, Throwable] {
reqFunneled match {
case Success(succ) => {
val freq = succ.getOrElse(throw new Error("Invalid header."))
val email = freq.maybeEmail.getOrElse(throw new Error("Email not found (or) invalid."))
models.disks.Disks.findByAssemblyId(id,email) match {
case Success(succ) => Ok(Results.resultset(models.Constants.DISKSCOLLECTIONCLAZ, compactRender(Extraction.decompose(succ))))
case Failure(err) =>
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
case Failure(err) => {
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
}).fold(succ = { a: Result => a }, fail = { t: Throwable => Status(BAD_REQUEST)(t.getMessage) })
}
def get(id: String) = StackAction(parse.tolerantText) { implicit request =>
(Validation.fromTryCatchThrowable[Result, Throwable] {
reqFunneled match {
case Success(succ) => {
val freq = succ.getOrElse(throw new Error("Invalid header."))
val email = freq.maybeEmail.getOrElse(throw new Error("Email not found (or) invalid."))
models.disks.Disks.findById(id,email) match {
case Success(succ) => Ok(Results.resultset(models.Constants.DISKSCOLLECTIONCLAZ, compactRender(Extraction.decompose(succ))))
case Failure(err) =>
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
case Failure(err) => {
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
}).fold(succ = { a: Result => a }, fail = { t: Throwable => Status(BAD_REQUEST)(t.getMessage) })
}
def list = StackAction(parse.tolerantText) { implicit request =>
(Validation.fromTryCatchThrowable[Result, Throwable] {
reqFunneled match {
case Success(succ) => {
val freq = succ.getOrElse(throw new Error("Invalid header."))
val email = freq.maybeEmail.getOrElse(throw new Error("Email not found (or) invalid."))
models.disks.Disks.findByEmail(email) match {
case Success(succ) => Ok(Results.resultset(models.Constants.DISKSCOLLECTIONCLAZ, compactRender(Extraction.decompose(succ))))
case Failure(err) =>
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
case Failure(err) => {
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
}).fold(succ = { a: Result => a }, fail = { t: Throwable => Status(BAD_REQUEST)(t.getMessage) })
}
def update = StackAction(parse.tolerantText) { implicit request =>
(Validation.fromTryCatchThrowable[Result, Throwable] {
reqFunneled match {
case Success(succ) => {
val freq = succ.getOrElse(throw new CannotAuthenticateError("Invalid header.", "Read docs.megam.io/api."))
val email = freq.maybeEmail.getOrElse(throw new CannotAuthenticateError("Email not found (or) invalid.", "Read docs.megam.io/api."))
val clientAPIBody = freq.clientAPIBody.getOrElse(throw new Error("Body not found (or) invalid."))
models.disks.Disks.update(email, clientAPIBody) match {
case Success(succ) =>
Status(CREATED)(
FunnelResponse(CREATED, "Your Disks updated successfully.", DISKSCLAZ).toJson(true))
case Failure(err) =>
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
case Failure(err) => {
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
}).fold(succ = { a: Result => a }, fail = { t: Throwable => Status(BAD_REQUEST)(t.getMessage) })
}
def delete(asm_id: String, id: String) = StackAction(parse.tolerantText) { implicit request =>
(Validation.fromTryCatchThrowable[Result, Throwable] {
reqFunneled match {
case Success(succ) => {
val freq = succ.getOrElse(throw new Error("Invalid header."))
val email = freq.maybeEmail.getOrElse(throw new Error("Email not found (or) invalid."))
models.disks.Disks.delete(email, asm_id, id) match {
case Success(succ) =>
Status(CREATED)(
FunnelResponse(CREATED, "Disk removed successfully.", DISKSCLAZ).toJson(true))
case Failure(err) =>
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
case Failure(err) => {
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
}).fold(succ = { a: Result => a }, fail = { t: Throwable => Status(BAD_REQUEST)(t.getMessage) })
}
def initdel(asm_id: String, id: String) = StackAction(parse.tolerantText) { implicit request =>
(Validation.fromTryCatchThrowable[Result, Throwable] {
reqFunneled match {
case Success(succ) => {
val freq = succ.getOrElse(throw new Error("Invalid header."))
val email = freq.maybeEmail.getOrElse(throw new Error("Email not found (or) invalid."))
models.disks.Disks.initdel(email, asm_id, id) match {
case Success(succ) =>
Status(CREATED)(
FunnelResponse(CREATED, "Disk removal started.", DISKSCLAZ).toJson(true))
case Failure(err) =>
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
case Failure(err) => {
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
}).fold(succ = { a: Result => a }, fail = { t: Throwable => Status(BAD_REQUEST)(t.getMessage) })
}
}
| indykish/vertice_gateway | app/controllers/disks/Disks.scala | Scala | mit | 7,668 |
/*
* Wire
* Copyright (C) 2016 Wire Swiss GmbH
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.waz.utils.events
import com.waz.log.BasicLogging.LogTag.DerivedLogTag
import org.scalatest._
class ScanSignalSpec extends FeatureSpec with Matchers with BeforeAndAfter with DerivedLogTag {
implicit val ec: EventContext = EventContext.Global
var received = Seq[Int]()
val capture = (value: Int) => received = received :+ value
before {
received = Seq[Int]()
}
feature("Basic scanning") {
scenario("Normal scanning") {
val s = Signal(1)
val scanned = s.scan(0)(_ + _)
scanned.value shouldEqual Some(0)
scanned(capture)
scanned.value shouldEqual Some(1)
Seq(2, 3, 1) foreach (s ! _)
received shouldEqual Seq(1, 3, 6, 7)
scanned.value shouldEqual Some(7)
}
scenario("disable autowiring when fetching current value") {
val s = Signal(1)
val scanned = s.scan(0)(_ + _)
scanned.currentValue shouldEqual Some(1)
Seq(2, 3, 1) foreach (s ! _)
scanned.value shouldEqual Some(7)
}
scenario("Chained scanning") {
val s = Signal(1)
val scanned = s .scan(0)(_ + _) .scan(1)(_ * _)
scanned.currentValue shouldEqual Some(1)
scanned(capture)
Seq(2, 3, 1) foreach (s ! _)
scanned.currentValue shouldEqual Some(3 * 6 * 7)
received shouldEqual Seq(1, 3, 3 * 6, 3 * 6 * 7)
}
}
feature("Subscriber lifecycle") {
scenario("No subscribers will be left behind") {
val s = Signal(1)
val scanned = s.scan(0)(_ + _)
val sub = scanned(capture)
Seq(2, 3) foreach (s ! _)
s.hasSubscribers shouldEqual true
scanned.hasSubscribers shouldEqual true
sub.destroy()
s.hasSubscribers shouldEqual false
scanned.hasSubscribers shouldEqual false
s ! 4
received shouldEqual Seq(1, 3, 6)
}
}
}
| wireapp/wire-android-sync-engine | zmessaging/src/test/scala/com/waz/utils/events/ScanSignalSpec.scala | Scala | gpl-3.0 | 2,516 |
package hotswap
/**
* Source inspired from http://doc.akka.io/docs/akka/snapshot/scala/actors.html#become-unbecome
*/
import akka.actor.{ ActorSystem, Actor, Props, ActorRef }
object HotSwap {
def props(): Props = Props(new HotSwap())
def apply(): Props = Props(new HotSwap())
}
case object Foo
case object Bar
class HotSwap extends Actor {
import context._
def angry: Receive = {
case Bar => sender ! "I am already angry?"
case Foo => become(happy)
}
def happy: Receive = {
case Foo => sender ! "I am already happy :-)"
case Bar => become(angry)
}
def receive = {
case Foo => become(angry)
case Bar => become(happy)
}
} | Tjoene/thesis | benchmark/src/main/scala/hotswap/HotSwap.scala | Scala | gpl-2.0 | 714 |
package org.scalaide.core.completion
import org.scalaide.core.testsetup.TestProjectSetup
import org.junit.Ignore
import org.junit.Test
import org.junit.Assert._
import org.scalaide.core.testsetup.SDTTestUtils
import org.eclipse.core.runtime.NullProgressMonitor
import org.scalaide.core.internal.completion.ScalaJavaCompletionProposalComputer
object ScalaJavaCompletionTests extends TestProjectSetup("completion")
// FIXME: Test in this class fails, but only comment why was "Uncomment as soon as this regression is fixed."
// I prefer ignored tests to commented out. Jerzy Müller, 27.05.2015
class ScalaJavaCompletionTests {
import ScalaJavaCompletionTests._
private def runTest(pathToClass: String, expectedCompletions: List[List[String]]): Unit = {
// get the unit
val unit = compilationUnit(pathToClass)
// make it a working copy
unit.becomeWorkingCopy(new NullProgressMonitor)
val completionProposalComputer = new ScalaJavaCompletionProposalComputer
// get the marker positions
val positions = SDTTestUtils.positionsOf(unit.getBuffer.getContents.toCharArray(), "/*!*/")
// check the test setup
assertEquals("Different number of expected completions and completion locations", expectedCompletions.length, positions.length)
for (i <- 0 until positions.size) {
// get the proposal
val proposals = completionProposalComputer.mixedInCompletions(unit, positions(i) + 1, new NullProgressMonitor)
import scala.collection.JavaConversions._
// extract the data and sort
val resultCompletion = proposals.map(p => p.getDisplayString).sorted
// check the completions
assertEquals("Wrong set of completions for " + i, expectedCompletions(i), resultCompletion)
}
}
val noCompletion = List[String]()
val oneCompletion = List("getX(): String")
val allCompletions = List("getX(): String", "setX(String): Unit", "x: String", "x_=(String): Unit")
/**
* Test the completion when trying to call the method on a reference.
*/
@Ignore
@Test
def ticket1000412_reference(): Unit = {
val oracle = List(
noCompletion, // outsideTypeDeclaration
//allCompletions, // var1
oneCompletion, // var2
oneCompletion, // var3
oneCompletion, // foo1
allCompletions, // foo2
oneCompletion, // foo3
oneCompletion, // foo4
oneCompletion, // foo5
noCompletion, // foo6
oneCompletion, // foo7
allCompletions, // foo8
allCompletions, // foo9
allCompletions, // foo10
allCompletions, // foo11
allCompletions, // foo12
oneCompletion // foo13
)
reload(scalaCompilationUnit("ticket_1000412/model/ClassA.scala"))
runTest("ticket_1000412/test/TestJavaReference.java", oracle)
}
/**
* Test the completion when trying to call the method when the class extends the class containing the method.
*/
@Ignore
@Test
def ticket1000412_extends(): Unit = {
val oracle = List(
allCompletions, // var11
oneCompletion, // var12
oneCompletion, // var13
allCompletions, // bar1
oneCompletion, // bar2
allCompletions, // bar3
oneCompletion, // bar4
oneCompletion, // bar5
oneCompletion, // bar6
allCompletions, // bar7
oneCompletion, // bar8
allCompletions, // bar9
oneCompletion, // bar10
oneCompletion // bar11
)
reload(scalaCompilationUnit("ticket_1000412/model/ClassA.scala"))
runTest("ticket_1000412/test/TestJavaExtends.java", oracle)
}
}
| sschaef/scala-ide | org.scala-ide.sdt.core.tests/src/org/scalaide/core/completion/ScalaJavaCompletionTests.scala | Scala | bsd-3-clause | 3,554 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.index.legacy
import org.locationtech.geomesa.accumulo.index.AccumuloJoinIndex
import org.locationtech.geomesa.utils.index.IndexMode.IndexMode
// noinspection ScalaDeprecation
import org.locationtech.geomesa.accumulo.index.IndexValueEncoder.IndexValueEncoderImpl
import org.locationtech.geomesa.index.api.{RowKeyValue, WritableFeature}
import org.locationtech.geomesa.index.geotools.GeoMesaDataStore
import org.locationtech.geomesa.index.index.attribute.legacy.AttributeIndexV2
import org.locationtech.geomesa.index.index.attribute.legacy.AttributeIndexV2.AttributeIndexKeySpaceV2
import org.locationtech.geomesa.index.index.attribute.{AttributeIndexKey, AttributeIndexKeySpace}
import org.opengis.feature.simple.SimpleFeatureType
class JoinIndexV2(ds: GeoMesaDataStore[_],
sft: SimpleFeatureType,
attribute: String,
dtg: Option[String],
mode: IndexMode)
extends AttributeIndexV2(ds, sft, attribute, dtg, mode) with AccumuloJoinIndex {
override val keySpace: AttributeIndexKeySpace = new AttributeIndexKeySpaceV2(sft, attribute) {
// noinspection ScalaDeprecation
private val serializer = new IndexValueEncoderImpl(sft)
override def toIndexKey(writable: WritableFeature,
tier: Array[Byte],
id: Array[Byte],
lenient: Boolean): RowKeyValue[AttributeIndexKey] = {
val kv = super.toIndexKey(writable, tier, id, lenient)
lazy val serialized = serializer.serialize(writable.feature)
kv.copy(values = kv.values.map(_.copy(cf = Array.empty, cq = Array.empty, toValue = serialized)))
}
}
}
| locationtech/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/index/legacy/JoinIndexV2.scala | Scala | apache-2.0 | 2,190 |
/*
* MUSIT is a museum database to archive natural and cultural history data.
* Copyright (C) 2016 MUSIT Norway, part of www.uio.no (University of Oslo)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License,
* or any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package controllers.storage
import com.google.inject.Inject
import no.uio.musit.MusitResults.{MusitError, MusitSuccess}
import no.uio.musit.security.Authenticator
import no.uio.musit.security.Permissions.Read
import no.uio.musit.service.MusitController
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import play.api.libs.json.Json
import services.old.KdReportService
class KdReportController @Inject()(
val authService: Authenticator,
val kdReportService: KdReportService
) extends MusitController {
def getReport(mid: Int) = MusitSecureAction(mid, Read).async { implicit request =>
kdReportService.getReport(mid).map {
case MusitSuccess(reports) =>
Ok(Json.toJson(reports))
case err: MusitError =>
InternalServerError(Json.obj("message" -> err.message))
}
}
}
| kpmeen/musit | service_storagefacility/app/controllers/storage/KdReportController.scala | Scala | gpl-2.0 | 1,697 |
package org.psliwa.idea.composerJson.composer.model.repository
case class RepositoryInfo(urls: List[String], packagist: Boolean, repository: Option[Repository[String]] = None)
| psliwa/idea-composer-plugin | src/main/scala/org/psliwa/idea/composerJson/composer/model/repository/RepositoryInfo.scala | Scala | mit | 177 |
package omnibus.domain.topic
import akka.actor.ActorRef
case class TopicPathRef(topicPath: TopicPath, topicRef: Option[ActorRef])
object TopicPathRef {
def apply(ref: ActorRef): TopicPathRef = TopicPathRef(TopicPath(ref), Some(ref))
}
case class TopicPath(segments: List[String]) {
require(segments.nonEmpty, s"Topic path is empty \n")
require(segments.size < 50, s"Topic path is too long \n")
require(segments.exists(_.length < 20), s"Topic path contains overlong segment \n")
require(!segments.exists(_.isEmpty), s"Topic path contains an empty segment \n")
require(!segments.forall(TopicPath.containsBadChars), s"Topic path contains forbidden chars \n")
def prettyStr = segments.mkString("/")
}
object TopicPath {
def apply(rawPath: String): TopicPath = TopicPath(rawPath.split('/').toList)
def apply(ref: ActorRef): TopicPath = {
val refPath = TopicPath.prettyStr(ref)
TopicPath(refPath)
}
def multi(rawPaths: String): List[TopicPath] = rawPaths.split("[/]\\+[/]")
.toList
.map(TopicPath(_))
def splitMultiTopic(topics: String): List[String] = topics.split("[/]\\+[/]").toList
// TODO : find a way to remove the hardcoded string
def prettyStr(ref: ActorRef) = ref.path.toString.split("/topic-repository/").toList(1)
def prettySubscription(topics: Set[ActorRef]): String = {
val setOfTopic = topics.map(TopicPath.prettyStr)
setOfTopic.mkString(" + ")
}
// TODO : dumb list, replace it with a cryptic regex
val forbiddenChars: List[String] = List("*", " ", "?", "%", "(", ")")
def containsBadChars(segment: String): Boolean = forbiddenChars.exists(segment.contains(_))
} | agourlay/omnibus | src/main/scala/omnibus/domain/topic/TopicPath.scala | Scala | apache-2.0 | 1,646 |
/**
* Copyright 2014 Getty Imges, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gettyimages.spray.swagger
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.WordSpec
import com.wordnik.swagger.core.{SwaggerSpec, SwaggerContext}
import com.wordnik.swagger.config.SwaggerConfig
import scala.reflect.runtime.universe._
class SprayApiScannerSpec
extends WordSpec
with ShouldMatchers {
"The SprayApiScanner object" when {
"listing resources" should {
"identify correct API classes based on type and API annotations" in {
val classes = new SprayApiScanner(Seq(typeOf[TestApiWithPathOperation], typeOf[TestApiWithOnlyDataType])).classes
classes.length shouldEqual (2)
classes.find(clazz => clazz == SwaggerContext.loadClass("com.gettyimages.spray.swagger.TestApiWithPathOperation")).nonEmpty shouldBe (true)
classes.find(clazz => clazz == SwaggerContext.loadClass("com.gettyimages.spray.swagger.TestApiWithOnlyDataType")).nonEmpty shouldBe (true)
}
}
}
}
| devsprint/akka-http-swagger | src/test/scala/com/gettyimages/spray/swagger/SprayApiScannerSpec.scala | Scala | apache-2.0 | 1,561 |
package moveRefactoring.container
import moveRefactoring.bar.B
import moveRefactoring.foo.A
object ObjUser {
def main(args: Array[String]) {
val a: A = new A
val b: B = new B
}
}
| ilinum/intellij-scala | testdata/move/scl4613/after/moveRefactoring/container/ObjUser.scala | Scala | apache-2.0 | 193 |
package co.ledger.wallet.web.ripple.core.filesystem
import org.scalajs.dom.raw.Blob
import scala.concurrent.{Future, Promise}
import scala.scalajs.js
/**
*
* ChromeFileSystem
* ledger-wallet-ripple-chrome
*
* Created by Pierre Pollastri on 08/07/2016.
*
* The MIT License (MIT)
*
* Copyright (c) 2016 Ledger
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
class ChromeFileSystem {
def chooseFileEntry(suggestedName: String, acceptMultiple: Boolean = false): Future[FileEntry] = {
// chrome.fileSystem.chooseEntry(object options, function callback)
val promise = Promise[FileEntry]()
js.Dynamic.global.chrome.fileSystem.chooseEntry(js.Dictionary(
"type" -> "saveFile",
"suggestedName" -> suggestedName
), {(entry: js.Dynamic) =>
if (js.isUndefined(entry)) {
promise.failure(new Exception(js.Dynamic.global.chrome.runtime.lastError.message.toString()))
} else {
promise.success(new FileEntry(entry))
}
})
promise.future
}
class FileEntry(entry: js.Dynamic) {
def write(blob: Blob): Future[Unit] = {
val promise = Promise[Unit]()
var truncated = false
entry.createWriter({(writer: js.Dynamic) =>
writer.onwriteend = {() =>
truncated = true
// You need to explicitly set the file size to truncate
// any content that might have been there before
if (!truncated) {
writer.truncate(blob.size)
promise.success()
}
}
writer.onerror = {(e: js.Dynamic) =>
promise.failure(new Exception(e.toString()))
}
writer.write(blob)
})
/*
fileEntry.createWriter(function(fileWriter) {
var truncated = false;
var blob = new Blob([contents]);
fileWriter.onwriteend = function(e) {
if (!truncated) {
truncated = true;
// You need to explicitly set the file size to truncate
// any content that might have been there before
this.truncate(blob.size);
return;
}
status.innerText = 'Export to '+fileDisplayPath+' completed';
};
fileWriter.onerror = function(e) {
status.innerText = 'Export failed: '+e.toString();
};
fileWriter.write(blob);
});
*/
promise.future
}
}
}
object ChromeFileSystem extends ChromeFileSystem | LedgerHQ/ledger-wallet-ripple | src/main/scala/co/ledger/wallet/web/ripple/core/filesystem/ChromeFileSystem.scala | Scala | mit | 3,464 |
package com.asto.dmp.ycd.dao.impl
import com.asto.dmp.ycd.base.Contexts
import com.asto.dmp.ycd.dao.SQL
import com.asto.dmp.ycd.util.{BizUtils, DateUtils, Utils}
object BizDao {
private val maxCalcMonths = 12
//店铺默认经营时间,单位:月
private val defaultStoreAge = 18
val storeIdCalcMonthsRDD = BaseDao.getStoreIdCalcMonthsProps().map(a => (a(0).toString, a(1).toString().toInt)).groupByKey().map(t => (t._1, t._2.min)).persist()
val storeIdArray = BaseDao.getStoreIdCalcMonthsProps(SQL().select("store_id")).map(a => a(0).toString).distinct().collect()
val storeIdCalcMonthsArray = storeIdCalcMonthsRDD.collect()
val storeIdCalcMonthsMap = storeIdCalcMonthsArray.toMap
/**
* 根据传入的storeId和orderDate,过滤掉不需要计算的数据.根据店铺所在区域不同,分别过滤出只需要计算近1、3、12个月的订单数据
*/
private def filterData(storeId: String, orderDate: String): Boolean = {
orderDate >= DateUtils.monthsAgo(storeIdCalcMonthsMap.get(storeId).getOrElse(1), "yyyy-MM-01")
}
/**
* 经营期限(月)= 申请贷款月份(系统运行时间) - 最早一笔网上订单的月份
* 只有1、3个月的数据的店铺的经营月份按照默认值defaultStoreAge计算
*/
//已改
def monthsNumFromEarliestOrder = {
BaseDao.getOrderProps(SQL().select("store_id,order_date").where("order_date != 'null'"))
.map(a => (a(0).toString, a(1).toString))
.groupByKey()
.map(t => (t._1, BizUtils.monthsNumFrom(t._2.min, "yyyy-MM-dd")))
.leftOuterJoin(storeIdCalcMonthsRDD) //(e160d0221914444f9d8639c8234207cc,(26,Some(12)))
.map(t => (t._1, if (t._2._2.getOrElse(1) == 12) t._2._1 else defaultStoreAge)).persist() //只有1、3个月的数据的店铺的经营月份按照默认值defaultStoreAge计算
}
def filterOnePropertyDataInOrder(property: String, maxMonthsNum: Int = maxCalcMonths, filterDataFun: (String, String) => Boolean = filterData) = {
BaseDao.getOrderProps(
SQL().select(s"store_id,$property,order_date").
where(s" order_date >= '${DateUtils.monthsAgo(maxMonthsNum, "yyyy-MM-01")}' and order_date < '${DateUtils.monthsAgo(0, "yyyy-MM-01")}'")
).filter(a => filterDataFun(a(0).toString, a(2).toString))
}
//已改
val avgFor = (property: String) => {
filterOnePropertyDataInOrder(property)
.map(a => (a(0).toString, a(1).toString.toDouble))
.groupByKey()
.map(t => (t._1, t._2.sum.toInt))
.leftOuterJoin(storeIdCalcMonthsRDD) //(af46ef365bac42f88c5f5ecb46e555a5,(519,Some(1)))
.map(t => (t._1, t._2._1 / t._2._2.getOrElse(1)))
}
/**
* 订货额(年)均值 = 近n个月(不含贷款当前月)“金额”字段,金额之和/n
* 返回的元素,如:(33010120120716288A,68260)
*/
//已改
val moneyAmountAnnAvg = avgFor("money_amount")
/**
* 订货条数(年)均值 = 近n个月(不含贷款当前月)“订货量”字段,订货量之和/n、
* 返回的元素,如:(33010120120716288A,427)
*/
//已改
val orderAmountAnnAvg = avgFor("order_amount")
/**
* 月销售增长比(12个月数据版本) = 近3月平均销售/近6月平均销售
* 月销售增长比(3个月数据版本) = 近1月销售/近3月平均销售额
* 月销售增长比(1个月数据版本) 不进行计算
* 返回的数据保留两位小数
*/
//已改
def monthlySalesGrowthRatio = {
val monthsToCalcForFZ = (monthsNum: Int) => if (monthsNum == 12) 3 else if (monthsNum == 3) 1 else 1
val monthsToCalcForFM = (monthsNum: Int) => if (monthsNum == 12) 6 else if (monthsNum == 3) 3 else 1
val filterData = (storeId: String, orderDate: String, monthsNum: (Int, Int)) =>
if (storeIdCalcMonthsMap.get(storeId).getOrElse(1) == 12)
orderDate >= DateUtils.monthsAgo(monthsNum._1, "yyyy-MM-01")
else if (storeIdCalcMonthsMap.get(storeId).getOrElse(1) == 3)
orderDate >= DateUtils.monthsAgo(monthsNum._2, "yyyy-MM-01")
else
false
val filterDataForMonthlySalesGrowthRatioFZ = (storeId: String, orderDate: String) => filterData(storeId, orderDate, (3, 1))
val filterDataForMonthlySalesGrowthRatioFM = (storeId: String, orderDate: String) => filterData(storeId, orderDate, (6, 3))
val computeRDD = (maxMonthsNum: Int, filterDataFun: (String, String) => Boolean, monthsToCalcFun: Int => Int) => {
filterOnePropertyDataInOrder("money_amount", maxMonthsNum, filterDataFun)
.map(a => (a(0).toString, a(1).toString.toDouble))
.groupByKey()
.map(t => (t._1, t._2.sum.toInt))
.leftOuterJoin(storeIdCalcMonthsRDD) //(af46ef365bac42f88c5f5ecb46e555a5,(519,Some(1)))
.map(t => (t._1, t._2._1 / monthsToCalcFun(t._2._2.getOrElse(1))))
}
val fzRDD = computeRDD(3, filterDataForMonthlySalesGrowthRatioFZ, monthsToCalcForFZ)
val fmRDD = computeRDD(6, filterDataForMonthlySalesGrowthRatioFM, monthsToCalcForFM)
fzRDD.leftOuterJoin(fmRDD).map(t => (t._1, Utils.retainDecimal(t._2._1.toDouble / t._2._2.get)))
}
/**
* 每条均价(年)均值(12个月数据版本) = 近12月总提货额 / 近12月总进货条数 = (近12月总提货额/12) / (近12月总进货条数/12)
* 每条均价(年)均值(3个月数据版本) = 近3月总提货额 / 近3月总进货条数
* 每条均价(年)均值(1个月数据版本) = 近1月总提货额 / 近1月总进货条数
*/
//已改
def perCigarAvgPriceOfAnnAvg = {
moneyAmountAnnAvg.leftOuterJoin(orderAmountAnnAvg).filter(t => t._2._2.isDefined && t._2._2.get.toDouble > 0)
.map(t => (t._1, t._2._1 / t._2._2.get))
}
/**
* 近n个月,每个月的订货额(n=1,3,12)
*/
//已改
def moneyAmountPerMonth = {
import Helper.storeIdAndOrderDateOrdering
filterOnePropertyDataInOrder("money_amount")
.map(a => ((a(0).toString, DateUtils.strToStr(a(2).toString, "yyyy-MM-dd", "yyyyMM")), a(1).toString.toDouble))
.groupByKey()
.map(t => (t._1, Utils.retainDecimal(t._2.sum, 2))).sortBy(_._1).cache()
}
/**
* 近n个月,每个月的订货条数(n=1,3,12)
*/
//已改
def orderAmountPerMonth = {
import Helper.storeIdAndOrderDateOrdering
filterOnePropertyDataInOrder("order_amount")
.map(a => ((a(0).toString, DateUtils.strToStr(a(2).toString, "yyyy-MM-dd", "yyyyMM")), a(1).toString.toInt))
.groupByKey()
.map(t => (t._1, t._2.sum)).sortBy(_._1).cache()
}
/**
* 近n个月,每个月的订货次数(n=1,3,12)
*/
//已改
def orderNumberPerMonth = {
import Helper.storeIdAndOrderDateOrdering
filterOnePropertyDataInOrder("order_id")
.map(a => (a(0).toString, DateUtils.strToStr(a(2).toString, "yyyy-MM-dd", "yyyyMM"), a(1).toString))
.distinct()
.map(t => ((t._1, t._2), 1))
.groupByKey()
.map(t => (t._1, t._2.sum))
.sortBy(_._1)
.map(t => (t._1._1, (t._1._2, t._2)))
}
/**
* 近n个月,订货额的top5(n=1,3,12)
*/
//已改
def payMoneyTop5PerMonth = {
BaseDao.getOrderProps(
SQL().select("store_id,order_date,cigar_name,money_amount").
where(s" order_date >= '${DateUtils.monthsAgo(maxCalcMonths, "yyyy-MM-01")}' and order_date < '${DateUtils.monthsAgo(0, "yyyy-MM-01")}'")
).filter(a => filterData(a(0).toString, a(1).toString))
.map(a => ((a(0).toString, DateUtils.strToStr(a(1).toString, "yyyy-MM-dd", "yyyyMM"), a(2).toString), a(3).toString.toDouble))
.groupByKey()
.map(t => ((t._1._1, t._1._2), (Utils.retainDecimal(t._2.sum, 2), t._1._3)))
.groupByKey()
.map(t => (t._1, t._2.toList.sorted.reverse.take(5)))
.map(t => (t._1._1, (t._1._2, t._2)))
.groupByKey().collect()
}
/**
* 近12个月,每个月的订货品类数
*/
//已改
def categoryPerMonth = {
import Helper.storeIdAndOrderDateOrdering
BaseDao.getOrderProps(SQL().select("store_id,cigar_name,order_date")
.where(s"order_amount > '0' and order_date >= '${DateUtils.monthsAgo(maxCalcMonths, "yyyy-MM-01")}' and order_date < '${DateUtils.monthsAgo(0, "yyyy-MM-01")}' "))
.filter(a => filterData(a(0).toString, a(2).toString))
.map(a => (a(0).toString, DateUtils.strToStr(a(2).toString, "yyyy-MM-dd", "yyyyMM"), a(1).toString))
.distinct()
.map(t => ((t._1, t._2), 1))
.groupByKey()
.map(t => (t._1, t._2.sum))
.sortBy(_._1)
.map(t => (t._1._1, (t._1._2, t._2))).cache()
}
/**
* 近n个月,每条均价
*/
//已改
def perCigarPricePerMonth = {
import Helper.storeIdAndOrderDateOrdering
moneyAmountPerMonth.leftOuterJoin(orderAmountPerMonth)
.map(t => (t._1, Utils.retainDecimal(t._2._1 / t._2._2.get, 2)))
.sortBy(_._1)
.map(t => (t._1._1, (t._1._2, t._2)))
}
/**
* 近n个月毛利率
*/
def grossMarginLastYear = {
BaseDao.getOrderProps(
SQL()
.select("store_id,money_amount,order_amount,retail_price,order_date")
.where(s" retail_price <> 'null' and order_date >= '${DateUtils.monthsAgo(maxCalcMonths, "yyyy-MM-01")}' and order_date < '${DateUtils.monthsAgo(0, "yyyy-MM-01")}'")
).filter(a => filterData(a(0).toString, a(4).toString))
//零售指导价和成本价为0时,不参与计算,所以使用filter过滤掉
.filter(a => a(1).toString != "0" && a(3).toString != "0")
.map(a => (a(0), (a(1).toString.toDouble, a(2).toString.toInt * a(3).toString.toDouble)))
.groupByKey()
.map(t => (t._1, t._2.reduce((a, b) => (a._1 + b._1, a._2 + b._2))))
.map(t => (t._1.toString, Utils.retainDecimal(1 - t._2._1 / t._2._2, 3))).persist()
}
/**
* 品类集中度:近n月销售额最高的前10名的销售额占总销售额的比重,TOP10商品的销售额之和/总销售额(近12月)
*/
//已改,未测
def categoryConcentration = {
Contexts.sparkContext
.parallelize(getTop10Category)
.leftOuterJoin(getLastMonthsSales)
.map(t => (t._1, t._2._1._1 / t._2._2.get))
.groupByKey()
.map(t => (t._1, Utils.retainDecimal(t._2.sum))).persist()
}
//已改
def getTop10Category = {
val array = getAllCategoryConcentration
var topIndex: Int = 0
val list = scala.collection.mutable.ListBuffer[(String, (Double, String))]()
storeIdCalcMonthsArray.foreach {
storeIdCalcMonths =>
for (a <- array if a._1 == storeIdCalcMonths._1 && topIndex < 10) {
list += a
topIndex += 1
}
topIndex = 0
}
list
}
//已改
private def getAllCategoryConcentration = {
BaseDao.getOrderProps(
SQL().select("store_id,cigar_name,order_amount,retail_price,order_date")
.where(s" retail_price <> 'null' and order_date >= '${DateUtils.monthsAgo(maxCalcMonths, "yyyy-MM-01")}' and order_date < '${DateUtils.monthsAgo(0, "yyyy-MM-01")}'")
).filter(a => filterData(a(0).toString, a(4).toString))
.map(a => (a(0).toString, a(1).toString, a(2).toString.toInt * a(3).toString.toDouble))
.map(t => ((t._1, t._2), t._3))
.groupByKey()
.map(t => (t._1, t._2.sum))
.map(t => ((t._1._1, t._2), t._1._2))
.sortByKey(ascending = false).map(t => (t._1._1, (t._1._2, t._2))).collect() //((33010220120807247A,300.0),七匹狼(蓝))
}
//已改
private def getLastMonthsSales = {
BaseDao.getOrderProps(
SQL().select("store_id,order_amount,retail_price,order_date").
where(s"retail_price <> 'null' and order_date >= '${DateUtils.monthsAgo(maxCalcMonths, "yyyy-MM-01")}' and order_date < '${DateUtils.monthsAgo(0, "yyyy-MM-01")}'")
).filter(a => filterData(a(0).toString, a(3).toString))
.map(a => (a(0).toString, a(1).toString.toInt * a(2).toString.toDouble))
.map(t => (t._1, t._2))
.groupByKey()
.map(t => (t._1, t._2.sum)) //((33010220120807247A,300.0),七匹狼(蓝))
}
/**
* 销售额租金比(12个月) = 近12月销售额/ 年租金
* 销售额租金比(1个月) = 1月提货额/租赁合同月均额
* 销售额租金比(3个月) = 近3月月均提货额/租赁合同月均额
* 暂时缺失数据,默认为0.6
*/
def salesRentRatio = {
if(BaseDao.getShopYearRentProps(SQL().select("store_id,rent_value")).count == 0){
Contexts.sparkContext.parallelize(storeIdArray).map((_, 0.6)).persist()
} else {
//月均销售额
val avgMoneyAmount = BaseDao.getOrderProps(
SQL().select(s"store_id,money_amount,order_date").
where(s" order_date >= '${DateUtils.monthsAgo(12, "yyyy-MM-01")}' and order_date < '${DateUtils.monthsAgo(0, "yyyy-MM-01")}'")
).filter(a => filterData(a(0).toString, a(2).toString))
.map(a => (a(0).toString, a(1).toString.toDouble))
.groupByKey()
.map(t => (t._1, t._2.sum.toInt))
.leftOuterJoin(storeIdCalcMonthsRDD) //(af46ef365bac42f88c5f5ecb46e555a5,(519,Some(1)))
.map(t => (t._1, t._2._1 / t._2._2.getOrElse(1)))
val storeIdRentValueRDD = BaseDao.getShopYearRentProps(SQL().select("store_id,rent_value")).map(a => (a(0).toString,a(1).toString))
storeIdCalcMonthsRDD.leftOuterJoin(storeIdRentValueRDD)
.map(t => (t._1, t._2._2.getOrElse(-12).toString.toDouble/12)) //除以12表示月租,负数表示没有月租
.leftOuterJoin(avgMoneyAmount)
.map(t => (t._1, if(t._2._1 < 0 || t._2._2.isEmpty) 0.6 else t._2._2.get / t._2._1))
}
}
/**
* 线下商圈指数
* 暂时缺失数据,默认为0.8
*/
def offlineShoppingDistrictIndex = {
Contexts.sparkContext.parallelize(storeIdArray).map((_, 0.8D)).persist()
}
/**
* 周订货量为0触发预警,
* 返回(店铺ID,是否预警)
* @return
*/
def weekOrderAmountWarn = {
val lastWeek = DateUtils.weeksAgo(1)
val storesHaveOrderAmountInLastWeek = BaseDao.getOrderProps(SQL().select("store_id,order_amount")
.where(s" order_date >= '${lastWeek._1}' and order_date <= '${lastWeek._2}'"))
.map(a => (a(0).toString, a(1).toString.toInt))
.groupByKey().map(t => (t._1, t._2.sum)).filter(t => t._2 != 0)
loanStore.leftOuterJoin(storesHaveOrderAmountInLastWeek).map(t => if (t._2._2.isEmpty) (t._1, (t._2._2.getOrElse(0), true)) else (t._1, (t._2._2.getOrElse(0), false)))
}
def loanStore = {
BaseDao.getLoanStoreProps(SQL().select("store_id")).map(a => (a(0).toString, ""))
}
/**
* 预警:环比上四周周均下滑 30% 进货额。计算公式w5 /【(w1+w2+w3+w4)/4】 <= 0.7
* 返回(店铺ID,是否预警)
*/
def moneyAmountWarn = {
val loanMoneyAmountRateWarnValue = 0.7
val moneyAmountLastWeek = moneyAmountFor(DateUtils.weeksAgo(1))
//因为计算出来是四周的总和,即(w1+w2+w3+w4),所以要对进货额/4
val moneyAmountFourWeekAvg = {
moneyAmountFor((DateUtils.weeksAgo(5)._1, DateUtils.weeksAgo(2)._2)).map(t => (t._1, t._2 / 4))
}
val rate = moneyAmountLastWeek.leftOuterJoin(moneyAmountFourWeekAvg)
.filter(_._2._2.isDefined)
.map(t => (t._1, t._2._1 / t._2._2.get))
loanStore.leftOuterJoin(rate)
.map(t => (t._1, Utils.retainDecimal(t._2._2.getOrElse(0D), 3)))
.map(t => if (t._2 > loanMoneyAmountRateWarnValue) (t._1, (t._2, false)) else (t._1, (t._2, true)))
}
def moneyAmountFor(dateRange: (String, String)) = {
BaseDao.getOrderProps(SQL().select("store_id,money_amount,order_date")
.where(s" order_date >= '${dateRange._1}' and order_date <= '${dateRange._2}'"))
.map(a => (a(0).toString, a(1).toString.toDouble))
.groupByKey().map(t => (t._1, Utils.retainDecimal(t._2.sum, 2)))
}
/**
* 活跃品类数
* 当月月均活跃品类(12个月数据版本) = 近三个月月均的订货量≥3的品类之和 (即近三个月的订货量之和≥9的品类之和 )
* 如2015.11显示的活跃品类为,2015.8-2015.10三个月订货量≥9的,品类数之和;
* 计算12个月的活跃评类数。如果不能提取到14个月的数据,则最初两个月的数据为空,均值按近十个月计算;
*
* 当月月均活跃品类(1个月、3个月数据版本) = 近一个月的订货量≥3的品类之和
* 如2015.11显示的活跃品类为,2015.10一个月订货量≥3的品类数之和;
*
* 返回:店铺id,日期,活跃品类数
* 注意:该方法的效率比较低,如果对系统执行时间有影响,可以对该方法进行优化。
*/
//改好
def getActiveCategory = {
val monthRange = (m: Int, calcMonths: Int) =>
if(calcMonths == 12)
(DateUtils.monthsAgo(m + 1, "yyyyMM"), DateUtils.monthsAgo(m + 3, "yyyyMM"))
else
(DateUtils.monthsAgo(m + 1, "yyyyMM"), DateUtils.monthsAgo(m + 1, "yyyyMM"))
val allArray = allData.collect()
val result = scala.collection.mutable.ListBuffer[(String, String, Int)]()
storeIdCalcMonthsArray.foreach {
storeIdCalcMonths =>
(0 until storeIdCalcMonths._2).foreach {
months =>
val thisMonths = DateUtils.monthsAgo(months, "yyyyMM")
val range = monthRange(months, storeIdCalcMonths._2)
val num = allArray.filter(t => t._1._1 == storeIdCalcMonths._1 && range._1 >= t._1._2 && t._1._2 >= range._2)
.map(t => (t._1._3, t._2))
.groupBy(_._1)
.map(t => (t._1, (for (e <- t._2.toList) yield e._2).sum))
.filter(_._2 >= getAmountLimitForActiveCategory(storeIdCalcMonths._2)).toList.size
result += Tuple3(storeIdCalcMonths._1, thisMonths, num)
}
}
result
}
/**
* 当月月均活跃品类(12个月数据版本) = 近三个月的订货量之和≥9的品类之和
* 当月月均活跃品类(1个月、3个月数据版本) = 近一个月的订货量≥3的品类之和
* 如果是12个月的数据,过滤出订货量>=9的数据,如果是其他(1个月或者3个月)的数据,过滤出>=3的数据
*/
val getAmountLimitForActiveCategory = (calcMonths: Int) => if (calcMonths == 12) 9 else 3
def allData = {
val filterData = (storeId: String, orderDate: String) => {
if(storeIdCalcMonthsMap.get(storeId).getOrElse(1) == 12)
orderDate >= DateUtils.monthsAgo(15, "yyyy-MM-01")
else
orderDate >= DateUtils.monthsAgo(storeIdCalcMonthsMap.get(storeId).getOrElse(1), "yyyy-MM-01")
}
BaseDao.getOrderProps(SQL().select("store_id,order_date,cigar_name,order_amount").where(s" order_date >= '${DateUtils.monthsAgo(15, "yyyy-MM-01")}' and order_date < '${DateUtils.monthsAgo(0, "yyyy-MM-01")}' "))
.filter(a => filterData(a(0).toString, a(1).toString))
.map(a => ((a(0).toString, DateUtils.strToStr(a(1).toString, "yyyy-MM-dd", "yyyyMM"), a(2).toString), a(3).toString.toInt))
.groupByKey()
.map(t => (t._1, t._2.sum)).cache()
}
/**
* 获取当月活跃品类数
*/
def getActiveCategoryInLastMonth = {
val filterData = (storeId: String, orderDate: String) => {
if(storeIdCalcMonthsMap.get(storeId).getOrElse(1) == 12)
orderDate >= DateUtils.monthsAgo(3, "yyyy-MM-01")
else
orderDate >= DateUtils.monthsAgo(1, "yyyy-MM-01")
}
BaseDao.getOrderProps(SQL().select("store_id,cigar_name,order_date,order_amount").where(s" order_date >= '${DateUtils.monthsAgo(3, "yyyy-MM-01")}' and order_date < '${DateUtils.monthsAgo(0, "yyyy-MM-01")}'"))
.filter(a => filterData(a(0).toString, a(2).toString))
.map(a => ((a(0).toString, a(1).toString), a(3).toString.toInt))
.groupByKey()
.map(t => (t._1._1, (t._1._2, t._2.sum)))
.leftOuterJoin(storeIdCalcMonthsRDD)
.map(t => ((t._1, t._2._1._1), t._2._1._2, getAmountLimitForActiveCategory(t._2._2.getOrElse(1))))
.filter(t => t._2 >= t._3)
.map(t => (t._1._1, 1))
.groupByKey()
.map(t => (t._1, t._2.sum)).persist()
}
}
object Helper {
implicit val storeIdAndOrderDateOrdering: Ordering[(String, String)] = new Ordering[(String, String)] {
override def compare(a: (String, String), b: (String, String)): Int =
if (a._1 > b._1) 1
else if (a._1 < b._1) -1
else if (a._2 > b._2) -1
else 1
}
} | zj-lingxin/Dmp_ycd | src/main/scala/com/asto/dmp/ycd/dao/impl/BizDao.scala | Scala | mit | 20,209 |
/*
@Title: A Very Big Sum
@Problem Statement:
You are given an array of integers of size N.
You need to print the sum of the elements in the array, keeping in mind that some of those integers may be quite large.
[....]
@URL: https://www.hackerrank.com/challenges/a-very-big-sum
@Courtesy: hackerrank
*/
// Calculating the Sum in functional style
import collection.breakOut
object Main extends App {
var lines = List[String]()
for(ln <- io.Source.stdin.getLines) lines = ln :: lines
println(lines.head.split(" ").foldLeft(0L){_.toLong + _.toLong})
println(lines.head.split(" ").map(_.toLong).sum)
println(lines.head.split(" ").par.map(_.toLong).sum) // Parallel Collection
val convertedNums: List[Long]= (for (x <- lines.head.split(" ")) yield x.toLong)(breakOut)
println(convertedNums.sum)
} | archit47/Competitive-Programming | src/hackerrank/Big Sum.scala | Scala | mit | 820 |
/*
* Copyright 2013-2014 IQ TECH <http://www.iqtech.pl>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.abyss.graph.model
/**
* Created by cane, 22.06.13 15:39
* $Id: Graph.scala,v 1.2 2013-12-31 21:09:28 cane Exp $
*/
class Graph {
}
| iqtech/abyss | abyss-graph/src/main/scala/io/abyss/graph/model/Graph.scala | Scala | apache-2.0 | 767 |
/**
*
* HttpRequestExecutor
* Ledger wallet
*
* Created by Pierre Pollastri on 29/06/15.
*
* The MIT License (MIT)
*
* Copyright (c) 2015 Ledger
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package co.ledger.wallet.core.net
import java.util.concurrent.Executors
import scala.concurrent.{ExecutionContext, Future}
trait HttpRequestExecutor {
def execute(responseBuilder: HttpClient#ResponseBuilder): Unit
}
object HttpRequestExecutor {
val NumberOfThreads = 10
implicit lazy val defaultExecutionContext = ExecutionContext.fromExecutor(Executors.newFixedThreadPool(NumberOfThreads))
private[this] lazy val _defaultExecutor = new BasicHttpRequestExecutor()
def getDefault(): HttpRequestExecutor = _defaultExecutor
} | LedgerHQ/ledger-wallet-android | app/src/main/scala/co/ledger/wallet/core/net/HttpRequestExecutor.scala | Scala | mit | 1,778 |
package org.photon.protocol.dofus.game
import org.photon.protocol.dofus.Int
import com.twitter.util.Try
object Context extends Enumeration(initial = 1) {
type Context = Value
val Solo, Fight = Value
def unapply(s: String): Option[Context] = s match {
case Int(id) => Try(Context(id)).toOption
case _ => None
}
}
| Emudofus/Photon | protocol/main/src/org/photon/protocol/dofus/game/Types.scala | Scala | mit | 324 |
package com.seadowg.milo.test.runtime
import org.specs2.mutable._
import org.specs2.mock._
import com.seadowg.milo.test.helpers.AsyncHelpers._
import com.seadowg.milo.runtime._
class ThreadWorkerSpec extends Specification with Mockito {
"ThreadWorker".title
"when sent work" should {
"execute work it is sent" in {
val worker = new ThreadWorker()
var executed = false
worker.spawn()
worker.send(() => executed = true)
waitUntil(() => executed) mustEqual true
executed = false
worker.send(() => executed = true)
waitUntil(() => executed) mustEqual true
}
}
}
| seadowg/milo | src/test/scala/com/seadowg/milo/test/runtime/ThreadWorkerSpec.scala | Scala | mit | 642 |
package org.jetbrains.plugins.scala.lang.scaladoc.psi.api
import org.jetbrains.plugins.scala.lang.psi.api.ScalaPsiElement
trait ScDocParamRef extends ScalaPsiElement | JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/scaladoc/psi/api/ScDocParamRef.scala | Scala | apache-2.0 | 167 |
package org.jetbrains.plugins.scala.worksheet.cell
import com.intellij.openapi.actionSystem.{AnAction, AnActionEvent}
import com.intellij.openapi.editor.ex.EditorEx
import com.intellij.openapi.fileEditor.{FileEditorManager, TextEditor}
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.worksheet.processor.WorksheetCompiler
import org.jetbrains.plugins.scala.worksheet.processor.WorksheetCompilerUtil.WorksheetCompileRunRequest
/**
* User: Dmitry.Naydanov
* Date: 16.07.18.
*/
abstract class RunCellActionBase(protected val cellDescriptor: CellDescriptor) extends AnAction("Run Cell") {
override def actionPerformed(anActionEvent: AnActionEvent): Unit = {
val file = cellDescriptor.getElement match {
case Some(element) => element.getContainingFile
case _ => return
}
FileEditorManager.getInstance(file.getProject).getSelectedEditor(file.getVirtualFile) match {
case txt: TextEditor => txt.getEditor match {
case ext: EditorEx =>
new WorksheetCompiler(
ext,
file.asInstanceOf[ScalaFile],
(_, _) => {},
false
).compileAndRunCode(convertToRunRequest())
case _ =>
}
case _ =>
}
}
def convertToRunRequest(): WorksheetCompileRunRequest
}
| jastice/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/worksheet/cell/RunCellActionBase.scala | Scala | apache-2.0 | 1,324 |
/**
* Copyright (C) 2013 Carnegie Mellon University
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tdb.examples
import scala.collection.GenIterable
import scala.collection.mutable.Map
import tdb._
import tdb.list._
import tdb.TDB._
import tdb.util._
class PageRankAdjust
(links: AdjustableList[Int, Array[Int]], epsilon: Double, iters: Int)
extends Adjustable[AdjustableList[Int, Double]] {
def run(implicit c: Context) = {
val aggregatorConf = AggregatorListConf(
valueType = AggregatedDoubleColumn(epsilon))
def innerPageRank(i: Int): ListInput[Int, Double] = {
if (i == 1) {
val newRanks = createList[Int, Double](aggregatorConf)
def mapper(pair: (Int, Array[Int]), c: Context) {
val rank = 1.0
put(newRanks, pair._1, 0.15)(c)
val v = (rank / pair._2.size) * .85
putAll(newRanks, for (edge <- pair._2) yield (edge, v))(c)
}
links.foreach(mapper)
newRanks
} else {
val ranks = innerPageRank(i - 1)
val newRanks = createList[Int, Double](aggregatorConf)
def mapper(pair: (Int, Array[Int]), c: Context) {
put(newRanks, pair._1, 0.15)(c)
get(ranks, pair._1) {
case rank =>
val v = (rank / pair._2.size) * .85
putAll(newRanks, for (edge <- pair._2) yield (edge, v))(c)
}(c)
}
links.foreach(mapper)
newRanks
}
}
innerPageRank(iters).getAdjustableList()
}
}
class PageRankAlgorithm(_conf: AlgorithmConf)
extends Algorithm[AdjustableList[Int, Double]](_conf) {
val input = mutator.createList[Int, Array[Int]](conf.listConf.clone(file = ""))
//val data = new GraphData(input, conf.count, conf.mutations, conf.runs)
val data = new GraphFileData(input, conf.file, conf.runs, conf.updateRepeat)
//val data = new LiveJournalData(input)
val adjust = new PageRankAdjust(
input.getAdjustableList(), conf.epsilon, conf.iters)
var naiveTable: Map[Int, Array[Int]] = _
def generateNaive() {
data.generate()
naiveTable = data.table
}
def runNaive() {
naiveHelper(naiveTable)
}
private def naiveHelper(links: Map[Int, Array[Int]]) = {
var ranks = links.map(pair => (pair._1, 1.0))
for (i <- 1 to conf.iters) {
val joined = Map[Int, (Array[Int], Double)]()
for ((url, rank) <- ranks) {
joined(url) = (links(url), rank)
}
val contribs = joined.toSeq.flatMap { case (page, (links, rank)) =>
val contrib = rank / links.size * .85
links.map(url => (url, contrib)) ++ Iterable((page, .15))
}
ranks = Map[Int, Double]()
for ((url, contrib) <- contribs) {
ranks(url) = contrib + ranks.getOrElse(url, 0.0)
}
}
ranks
}
def loadInitial() {
data.load()
}
def hasUpdates() = data.hasUpdates()
def loadUpdate() = data.update()
val epsilon = 0.1
def checkOutput(output: AdjustableList[Int, Double]) = {
val out = output.toBuffer(mutator)
val answer = naiveHelper(data.table)
var check = out.size == answer.size
var totalError = 0.0
var maxError = 0.0
for ((node, rank) <- out) {
if (!answer.contains(node)) {
check = false
} else {
val thisError = (answer(node) - rank) / answer(node)
if (thisError > maxError) {
maxError = thisError
}
totalError += thisError
}
}
val averageError = (totalError / answer.size).abs * 100
if (Experiment.verbosity > 0) {
println("average error = " + averageError + "%")
println("max error = " + (maxError * 100) + "%")
}
//println("output = " + out.sortWith(_._1 < _._1))
//println("answer = " + answer.toBuffer.sortWith(_._1 < _._1))
check && averageError < epsilon
}
}
| twmarshall/tdb | core/src/main/scala/tdb/examples/PageRankAlgorithm.scala | Scala | apache-2.0 | 4,366 |
package org.wquery.query
import org.wquery.lang.Variable
case class SetVariable(override val name: String) extends Variable(name) {
override def toString = "%" + name
}
object SetVariable {
val FunctionArgumentsVariable = "A"
} | marekkubis/wquery | src/main/scala/org/wquery/query/SetVariable.scala | Scala | bsd-3-clause | 234 |
package com.twitter.server.handler
import com.twitter.conversions.time._
import com.twitter.finagle.Service
import com.twitter.finagle.http.{Request, Response}
import com.twitter.io.Buf
import com.twitter.server.util.HttpUtils.newResponse
import com.twitter.util.{Future, Time}
import com.twitter.util.lint.{GlobalRules, Rule}
/**
* Renders failed lint rule alert in an html fragment onto /admin/failedlint.
*/
class FailedLintRuleHandler extends Service[Request, Response] {
private[this] val Ttl = 5.minutes
@volatile private[this] var reloadAfter = Time.Bottom
@volatile private[this] var failedRules: Seq[Rule] = Nil
private[this] def failedLintRules: Seq[Rule] = {
val time = Time.now
if (time > reloadAfter) synchronized {
if (time > reloadAfter) {
failedRules = buildFailedRules
reloadAfter = time + Ttl
}
}
failedRules
}
private[this] def buildFailedRules: Seq[Rule] = {
val rules = GlobalRules.get.iterable
rules.filter(_().nonEmpty).toSeq
}
def apply(req: Request): Future[Response] = {
val failedRules = failedLintRules
val res = if (failedRules.isEmpty) ""
else {
s"""<div class="alert alert-warning alert-dismissable fade in" role="alert">
<button type="button" class="close" data-dismiss="alert" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
<strong>WARNING: ${ failedRules.length } Lint Error(s) Found</strong>
<br/>
<p>
${
(for (rule <- failedRules) yield {
s"<li>${ rule.name }</li>"
}).mkString("\n")
}
For more information, please see the <a href="/admin/lint">lint</a> page.
</p>
</div>"""
}
newResponse(
contentType = "text/plain;charset=UTF-8",
content = Buf.Utf8(res)
)
}
}
| BuoyantIO/twitter-server | src/main/scala/com/twitter/server/handler/FailedLintRuleHandler.scala | Scala | apache-2.0 | 1,875 |
/*
* Copyright 2014 Frugal Mechanic (http://frugalmechanic.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fm.flatfile.excel
import java.io.InputStream
import org.apache.poi.xssf.eventusermodel.XSSFReader
import org.apache.poi.openxml4j.opc.OPCPackage
import org.apache.poi.xssf.model.StylesTable
import org.apache.poi.xssf.eventusermodel.ReadOnlySharedStringsTable
import scala.util.Try
import fm.common.{Logging, SingleUseResource}
import fm.lazyseq.LazySeq
import fm.flatfile.{FlatFileParsedRow, FlatFileReaderOptions}
// TODO: Support reading a File which according to OPCPackage.open() is more efficient than reading from an InputStream
final class XLSXStreamReaderImpl(is: InputStream, options: FlatFileReaderOptions) extends LazySeq[Try[FlatFileParsedRow]] with Logging {
def foreach[U](f: Try[FlatFileParsedRow] => U): Unit = {
val xlsxPackage: OPCPackage = OPCPackage.open(is/*, PackageAccess.READ*/)
try {
val stringsTable: ReadOnlySharedStringsTable = new ReadOnlySharedStringsTable(xlsxPackage)
val xssfReader: XSSFReader = new XSSFReader(xlsxPackage)
val stylesTable: StylesTable = xssfReader.getStylesTable()
val sheetsData: XSSFReader.SheetIterator = xssfReader.getSheetsData.asInstanceOf[XSSFReader.SheetIterator]
require(sheetsData.hasNext, "XLSX File Must have at least one sheet")
var done: Boolean = false
while (sheetsData.hasNext && !done) {
SingleUseResource(sheetsData.next).use { sheetInputStream: InputStream =>
val sheetName: String = sheetsData.getSheetName
if (null == options.sheetName || sheetName.equalsIgnoreCase(options.sheetName)) {
val processor: XLSXStreamProcessor = new XLSXStreamProcessor(options, stylesTable, stringsTable)
processor.processSheet(sheetInputStream, f)
done = true
}
}
}
} finally {
// Since we are using an InputStream the OPCPackage is opened in READ_WRITE mode.
xlsxPackage.close()
// Have to call revert() instead of close() since this should be read only
//xlsxPackage.revert()
}
}
} | frugalmechanic/fm-flatfile | src/main/scala/fm/flatfile/excel/XLSXStreamReaderImpl.scala | Scala | apache-2.0 | 2,689 |
import sun.net.spi.nameservice.dns.DNSNameService
object A {
val x = new DNSNameService
} | pdalpra/sbt | sbt/src/sbt-test/source-dependencies/ext/A.scala | Scala | bsd-3-clause | 91 |
package aia.persistence.sharded
import scala.concurrent.duration._
import akka.actor._
import akka.io.IO
import akka.pattern.ask
import akka.util.Timeout
import aia.persistence.rest.ShoppersServiceSupport
object ShardedMain extends App with ShoppersServiceSupport {
implicit val system = ActorSystem("shoppers")
val shoppers = system.actorOf(ShardedShoppers.props,
ShardedShoppers.name)
startService(shoppers)
}
| RayRoestenburg/akka-in-action | chapter-persistence/src/main/scala/aia/persistence/sharded/ShardedMain.scala | Scala | mit | 428 |
package au.com.dius.pact.provider.scalatest
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
/**
* Provider will be tested against all the defined consumers in the configured default directory.
* Before each and every interactions the tested provider will be restarted.
* A freshly started provider will be initialised with the state before verification take place.
*/
@RunWith(classOf[JUnitRunner])
class ExampleRestartProviderSpec extends PactProviderRestartDslSpec("test_provider") {
lazy val serverStarter: ServerStarter = new ProviderServerStarter
}
| Fitzoh/pact-jvm | pact-jvm-provider-scalatest/src/test/scala/au/com/dius/pact/provider/scalatest/ExampleRestartProviderSpec.scala | Scala | apache-2.0 | 590 |
package org.infinispan.spark.sql
import java.beans.Introspector
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.CatalystTypeConverters
import org.apache.spark.sql.catalyst.expressions.{AttributeReference, GenericRowWithSchema}
import org.apache.spark.sql.types.StructType
object ObjectMapper {
def forBean(schema: StructType, beanClass: Class[_]): (AnyRef, Array[String]) => Row = {
val beanInfo = Introspector.getBeanInfo(beanClass)
val attrs = schema.fields.map(f => AttributeReference(f.name, f.dataType, f.nullable)())
val extractors = beanInfo.getPropertyDescriptors.filterNot(_.getName == "class").map(_.getReadMethod)
val methodsToConverts = extractors.zip(attrs).map { case (e, attr) =>
(e, CatalystTypeConverters.createToCatalystConverter(attr.dataType))
}
(from: Any, columns: Array[String]) => {
if (columns.nonEmpty) {
from match {
case _: Array[_] => new GenericRowWithSchema(from.asInstanceOf[Array[Any]], schema)
case f: Any =>
val rowSchema = StructType(Array(schema(columns.head)))
new GenericRowWithSchema(Array(f), rowSchema)
}
} else {
new GenericRowWithSchema(methodsToConverts.map { case (e, convert) =>
val invoke: AnyRef = e.invoke(from)
convert(invoke)
}, schema)
}
}
}
}
| infinispan/infinispan-spark | src/main/scala/org/infinispan/spark/sql/ObjectMapper.scala | Scala | apache-2.0 | 1,453 |
/*
* Copyright © 2014 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
* This example is based on the Apache Spark Example SparkKMeans. The original file may be found at
* https://github.com/apache/spark/blob/master/examples/src/main/scala/org/apache/spark/examples/SparkKMeans.scala
*
* Copyright 2014 The Apache Software Foundation. Licensed under the Apache License, Version 2.0.
*
*/
package co.cask.cdap.examples.sparkkmeans
import breeze.linalg.{DenseVector, Vector, squaredDistance}
import co.cask.cdap.api.spark.{ScalaSparkProgram, SparkContext}
import org.apache.spark.rdd.NewHadoopRDD
import org.slf4j.{Logger, LoggerFactory}
/**
* Implementation of KMeans Clustering Spark Program.
*/
class SparkKMeansProgram extends ScalaSparkProgram {
import SparkKMeansProgram._
override def run(sc: SparkContext) {
val arguments: String = sc.getRuntimeArguments.get("args")
val args: Array[String] = if (arguments == null) Array() else arguments.split("\\\\s")
LOG.info("Running with arguments {}", args)
// Amount of centers to calculate
val K = if (args.length > 0) args(0).toInt else "2".toInt
val convergeDist = if (args.length > 1) args(1).toDouble else "0.5".toDouble
LOG.info("Processing points data")
val linesDataset: NewHadoopRDD[Array[Byte], Point] =
sc.readFromDataset("points", classOf[Array[Byte]], classOf[Point])
val lines = linesDataset.values
val data = lines.map(pointVector).cache()
LOG.info("Calculating centers")
val kPoints = data.takeSample(withReplacement = false, K, 42)
var tempDist = 1.0
while (tempDist > convergeDist) {
val closest = data.map(p => (closestPoint(p, kPoints), (p, 1)))
val pointStats = closest.reduceByKey { case ((x1, y1), (x2, y2)) => (x1 + x2, y1 + y2)}
val newPoints = pointStats.map { pair =>
(pair._1, pair._2._1 * (1.0 / pair._2._2))
}.collectAsMap()
tempDist = 0.0
for (i <- 0 until K) {
tempDist += squaredDistance(kPoints(i), newPoints(i))
}
for (newP <- newPoints) {
kPoints(newP._1) = newP._2
}
LOG.debug("Finished iteration (delta = {})", tempDist)
}
LOG.info("Center count {}", kPoints.length)
val centers = new Array[(Array[Byte], String)](kPoints.length)
for (i <- kPoints.indices) {
LOG.debug("Center {}, {}", i, kPoints(i).toString)
centers(i) = new Tuple2(i.toString.getBytes, kPoints(i).toArray.mkString(","))
}
LOG.info("Writing centers data")
val originalContext: org.apache.spark.SparkContext = sc.getOriginalSparkContext()
sc.writeToDataset(originalContext.parallelize(centers), "centers", classOf[Array[Byte]], classOf[String])
LOG.info("Done!")
}
}
object SparkKMeansProgram {
private final val LOG: Logger = LoggerFactory.getLogger(classOf[SparkKMeansProgram])
private[sparkkmeans] def pointVector(point: Point): Vector[Double] = {
DenseVector(Array(point.getX, point.getX, point.getZ).map(_.doubleValue()))
}
private[sparkkmeans] def closestPoint(p: Vector[Double], centers: Array[Vector[Double]]): Int = {
var bestIndex = 0
var closest = Double.PositiveInfinity
for (i <- centers.indices) {
val tempDist = squaredDistance(p, centers(i))
if (tempDist < closest) {
closest = tempDist
bestIndex = i
}
}
bestIndex
}
}
| chtyim/cdap | cdap-examples/SparkKMeans/src/main/scala/co/cask/cdap/examples/sparkkmeans/SparkKMeansProgram.scala | Scala | apache-2.0 | 3,907 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.tracing.rewriters
import org.neo4j.cypher.internal.frontend.v2_3.{InternalException, Rewriter}
trait RewriterTaskProcessor extends (RewriterTask => Rewriter) {
def sequenceName: String
def apply(task: RewriterTask): Rewriter = task match {
case RunConditions(name, conditions) => RunConditionRewriter(sequenceName, name, conditions)
case RunRewriter(_, rewriter) => rewriter
}
}
case class RunConditionRewriter(sequenceName: String, name: Option[String], conditions: Set[RewriterCondition]) extends Rewriter {
def apply(input: AnyRef): AnyRef = {
val failures = conditions.toSeq.flatMap(condition => condition(input))
if (failures.isEmpty) {
input
} else {
throw new RewritingConditionViolationException(name, failures)
}
}
case class RewritingConditionViolationException(optName: Option[String], failures: Seq[RewriterConditionFailure])
extends InternalException(buildMessage(sequenceName, optName, failures))
private def buildMessage(sequenceName: String, optName: Option[String], failures: Seq[RewriterConditionFailure]) = {
val name = optName.map(name => s"step '$name'").getOrElse("start of rewriting")
val builder = new StringBuilder
builder ++= s"Error during '$sequenceName' rewriting after $name. The following conditions where violated: \\n"
for (failure <- failures ;
problem <- failure.problems) {
builder ++= s"Condition '${failure.name}' violated. $problem\\n"
}
builder.toString()
}
}
case class DefaultRewriterTaskProcessor(sequenceName: String) extends RewriterTaskProcessor
case class TracingRewriterTaskProcessor(sequenceName: String, onlyWhenChanged: Boolean) extends RewriterTaskProcessor {
override def apply(task: RewriterTask) = task match {
case RunRewriter(name, _) =>
val innerRewriter = super.apply(task)
(in) =>
val result = innerRewriter(in)
if (!onlyWhenChanged || in != result) {
// val resultDoc = pprintToDoc[AnyRef, Any](Result(result))(ResultHandler.docGen)
// val resultString = printCommandsToString(DocFormatters.defaultFormatter(resultDoc))
val resultString = result.toString
Console.print(s"*** $name ($sequenceName):$resultString\\n")
} else {
Console.print(s"*** $name ($sequenceName):\\n--\\n")
}
result
case _ =>
super.apply(task)
}
}
//
//object TracingRewriterTaskProcessor {
// import Pretty._
//
// object ResultHandler extends CustomDocHandler[Any] {
// def docGen = resultDocGen orElse InternalDocHandler.docGen
// }
//
// object resultDocGen extends CustomDocGen[Any] {
// def apply[X <: Any : TypeTag](x: X): Option[DocRecipe[Any]] = x match {
// case Result(result) => Pretty(page(nestWith(indent = 2, group(break :: group(pretty(result))))))
// case _ => None
// }
// }
//
// case class Result(v: Any)
//}
| HuangLS/neo4j | community/cypher/cypher-compiler-2.3/src/main/scala/org/neo4j/cypher/internal/compiler/v2_3/tracing/rewriters/RewriterTaskProcessor.scala | Scala | apache-2.0 | 3,761 |
package com.blogspot.yetanothercoders.hfile
import org.apache.hadoop.hbase.KeyValue
import org.apache.hadoop.hbase.io.ImmutableBytesWritable
/**
* @author dawid
* @since 31.05.15.
*/
object PartitionSorter {
def sortPartition[A <: ImmutableBytesWritable, T <: KeyValue](p: Iterator[(A,T)]): Iterator[(A,T)] = {
implicit def keyValueOrdering: Ordering[(A,T)] = new Ordering[(A,T)] {
def compare(x: (A,T), y: (A,T)): Int = {
new KeyValue.KVComparator().compare(x._2, y._2)
}
}
p.toStream.sorted.toIterator
}
}
| dawidwys/phoenix-on-spark | src/main/scala/com/blogspot/yetanothercoders/hfile/PartitionSorter.scala | Scala | apache-2.0 | 551 |
/*
* Copyright (c) 2015-16 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless
import java.io._
import org.junit.Test
import org.junit.Assert._
import scala.collection.generic.CanBuildFrom
import labelled._
import nat._
import ops.function._
import ops.nat._
import ops.traversable._
import poly.{ ~>> }
import record._
import syntax.{ CoproductOps, GenericZipperOps, HListOps, HListZipperOps, NatOps, RecordOps, TypeableOps, UnionOps }
import syntax.std.TupleOps
import syntax.singleton._
import syntax.zipper._
import test._
import union._
object SerializationTestDefns {
def serializable[M](m: M): Boolean = {
val baos = new ByteArrayOutputStream()
val oos = new ObjectOutputStream(baos)
var ois: ObjectInputStream = null
try {
oos.writeObject(m)
oos.close()
val bais = new ByteArrayInputStream(baos.toByteArray())
ois = new ObjectInputStream(bais)
val m2 = ois.readObject() // just ensure we can read it back
ois.close()
true
} catch {
case thr: Throwable =>
thr.printStackTrace
false
} finally {
oos.close()
if (ois != null) ois.close()
}
}
def assertSerializable[T](t: T): Unit = assertTrue(serializable(t))
def assertSerializableBeforeAfter[T, U](t: T)(op: T => U): Unit = {
assertSerializable(t)
op(t)
assertSerializable(t)
}
object isDefined extends (Option ~>> Boolean) {
def apply[T](o : Option[T]) = o.isDefined
}
object combineL extends Poly2 {
implicit def ci = at[Int, Int]((acc, i) => acc+i)
implicit def cs = at[Int, String]((acc, s) => acc+s.length)
implicit def cb = at[Int, Boolean]((acc, b) => acc+(if(b) 1 else 0))
}
object combineR extends Poly2 {
implicit def ci = at[Int, Int]((i, acc) => acc+i)
implicit def cs = at[String, Int]((s, acc) => acc+s.length)
implicit def cb = at[Boolean, Int]((b, acc) => acc+(if(b) 1 else 0))
}
object selInt extends Poly1 {
implicit def ci = at[Int] { x => x }
}
object smear extends Poly {
implicit val caseIntInt = use((x: Int, y: Int) => x + y)
implicit val caseStringInt = use((x: String, y: Int) => x.toInt + y)
implicit val caseIntString = use((x: Int, y: String) => x + y.toInt)
}
object coIdentity extends Poly1 {
implicit def default[A] = at[A](a => Coproduct[A :+: CNil](a))
}
object gsize extends Poly1 {
implicit def caseInt = at[Int](_ => 1)
implicit def caseString = at[String](_.length)
implicit def default[T] = at[T](_ => 1)
}
object plus extends Poly2 {
implicit val caseInt = at[Int, Int](_ + _)
implicit val caseDouble = at[Double, Double](_ + _)
implicit val caseString = at[String, String](_ + _)
implicit def caseList[T] = at[List[T], List[T]](_ ::: _)
}
trait Quux
class Foo extends Quux
class Bar extends Quux
class Baz extends Quux
trait TC1[F[_]]
object TC1 extends TC10 {
implicit def tc1Id: TC1[Id] = new TC1[Id] {}
}
trait TC10 {
implicit def tc1[F[_]]: TC1[F] = new TC1[F] {}
}
object Sing extends Serializable
case class Wibble(i: Int, s: String)
case class Box[T](t: T)
type K = HList.`'a, 'b, 'c`.T
type R = Record.`'a -> Int, 'b -> String, 'c -> Boolean`.T
type U = Union.`'a -> Int, 'b -> String, 'c -> Boolean`.T
type RM = Record.`'c -> Boolean, 'd -> Double`.T
type KA = Witness.`'a`.T
type KB = Witness.`'b`.T
type KC = Witness.`'c`.T
sealed trait Tree[T]
case class Leaf[T](t: T) extends Tree[T]
case class Node[T](l: Tree[T], r: Tree[T]) extends Tree[T]
trait Functor[F[_]] extends Serializable {
def map[A, B](fa: F[A])(f: A => B): F[B]
}
object Functor extends Functor0 {
def apply[F[_]](implicit f: Lazy[Functor[F]]): Functor[F] = f.value
implicit val idFunctor: Functor[Id] =
new Functor[Id] {
def map[A, B](a: A)(f: A => B): B = f(a)
}
// Induction step for products
implicit def hcons[F[_]](implicit ihc: IsHCons1[F, Functor, Functor]): Functor[F] =
new Functor[F] {
def map[A, B](fa: F[A])(f: A => B): F[B] = {
val (hd, tl) = ihc.unpack(fa)
ihc.pack((ihc.fh.map(hd)(f), ihc.ft.map(tl)(f)))
}
}
// Induction step for coproducts
implicit def ccons[F[_]](implicit icc: IsCCons1[F, Functor, Functor]): Functor[F] =
new Functor[F] {
def map[A, B](fa: F[A])(f: A => B): F[B] =
icc.pack(icc.unpack(fa).fold(hd => Left(icc.fh.map(hd)(f)), tl => Right(icc.ft.map(tl)(f))))
}
implicit def generic[F[_]](implicit gen: Generic1[F, Functor]): Functor[F] =
new Functor[F] {
def map[A, B](fa: F[A])(f: A => B): F[B] =
gen.from(gen.fr.map(gen.to(fa))(f))
}
}
trait Functor0 {
implicit def constFunctor[T]: Functor[Const[T]#λ] =
new Functor[Const[T]#λ] {
def map[A, B](t: T)(f: A => B): T = t
}
}
trait Show[T] extends Serializable {
def show(t: T): String
}
object Show extends LabelledTypeClassCompanion[Show] {
implicit def stringShow: Show[String] = new Show[String] {
def show(t: String) = t
}
implicit def intShow: Show[Int] = new Show[Int] {
def show(n: Int) = n.toString
}
object typeClass extends LabelledTypeClass[Show] {
def emptyProduct = new Show[HNil] {
def show(t: HNil) = ""
}
def product[F, T <: HList](name: String, sh: Show[F], st: Show[T]) = new Show[F :: T] {
def show(ft: F :: T) = {
val head = sh.show(ft.head)
val tail = st.show(ft.tail)
if (tail.isEmpty)
s"$name = $head"
else
s"$name = $head, $tail"
}
}
def emptyCoproduct = new Show[CNil] {
def show(t: CNil) = ""
}
def coproduct[L, R <: Coproduct](name: String, sl: => Show[L], sr: => Show[R]) = new Show[L :+: R] {
def show(lr: L :+: R) = lr match {
case Inl(l) => s"$name(${sl.show(l)})"
case Inr(r) => s"${sr.show(r)}"
}
}
def project[F, G](instance: => Show[G], to: F => G, from: G => F) = new Show[F] {
def show(f: F) = instance.show(to(f))
}
}
}
/**
* A `CanBuildFrom` for `List` implementing `Serializable`, unlike the one provided by the standard library.
*/
implicit def listSerializableCanBuildFrom[T]: CanBuildFrom[List[T], T, List[T]] =
new CanBuildFrom[List[T], T, List[T]] with Serializable {
def apply(from: List[T]) = from.genericBuilder[T]
def apply() = List.newBuilder[T]
}
}
class SerializationTests {
import SerializationTestDefns._
@Test
def testStructures {
val l = 23 :: "foo" :: true :: HNil
type ISB = Int :+: String :+: Boolean :+: CNil
val ci = Coproduct[ISB](23)
val cs = Coproduct[ISB]("foo")
val cb = Coproduct[ISB](true)
val r = 'foo ->> 23 :: 'bar ->> "foo" :: 'baz ->> true :: HNil
assertSerializable(HNil)
assertSerializable(l)
assertSerializable(ci)
assertSerializable(cs)
assertSerializable(cb)
assertSerializable(r)
}
@Test
def testSyntax {
val l = 23 :: "foo" :: true :: HNil
type ISB = Int :+: String :+: Boolean :+: CNil
val cs = Coproduct[ISB]("foo")
val r = 'foo ->> 23 :: 'bar ->> "foo" :: 'baz ->> true :: HNil
type U = Union.`'foo -> Int, 'bar -> String, 'baz -> Boolean`.T
val u = Union[U](bar = "quux")
val t = (23, "foo", true)
assertSerializable(new GenericZipperOps(t))
assertSerializable(new HListOps(l))
assertSerializable(new HListZipperOps(l))
assertSerializable(new CoproductOps(cs))
assertSerializable(new NatOps(_0))
assertSerializable(new RecordOps(r))
assertSerializable(new UnionOps(u))
assertSerializable(new TupleOps(()))
assertSerializable(new TupleOps(t))
assertSerializable(new TypeableOps(23))
}
@Test
def testHListOps {
import ops.hlist._
type L = Int :: String :: Boolean :: HNil
type LP = String :: Boolean :: Int :: HNil
type R = Boolean :: String :: Int :: HNil
type LL = List[Int] :: List[String] :: List[Boolean] :: HNil
type SL = Set[Int] :: Set[String] :: Set[Boolean] :: HNil
type OL = Option[Int] :: Option[String] :: Option[Boolean] :: HNil
type FL = (Int :: HNil) :: (String :: HNil) :: (Boolean :: HNil) :: HNil
type Q = Foo :: Bar :: Baz :: HNil
type IS = Int :: String :: HNil
type LT = (Int, String) :: (Boolean, Double) :: (Char, Float) :: HNil
type AL = (Int => Double) :: (String => Char) :: (Boolean => Float) :: HNil
type I3 = Int :: Int :: Int :: HNil
assertSerializable(IsHCons[L])
assertSerializable(Mapped[L, List])
assertSerializable(Comapped[LL, List])
assertSerializable(NatTRel[LL, List, SL, Set])
assertSerializable(HKernel[HNil])
assertSerializable(HKernel[L])
assertSerializable(ToCoproduct[HNil])
assertSerializable(ToCoproduct[L])
assertSerializable(Length[HNil])
assertSerializable(Length[L])
assertSerializable(Mapper[poly.identity.type, HNil])
assertSerializable(Mapper[poly.identity.type, L])
assertSerializable(FlatMapper[poly.identity.type, HNil])
assertSerializable(FlatMapper[poly.identity.type, FL])
assertSerializable(ConstMapper[Int, HNil])
assertSerializable(ConstMapper[Int, L])
assertSerializable(MapFolder[HNil, Boolean, isDefined.type])
assertSerializable(MapFolder[OL, Boolean, isDefined.type])
assertSerializable(LeftFolder[HNil, Int, combineL.type])
assertSerializable(LeftFolder[L, Int, combineL.type])
assertSerializable(RightFolder[HNil, Int, combineR.type])
assertSerializable(RightFolder[L, Int, combineR.type])
assertSerializable(LeftReducer[L, combineL.type])
assertSerializable(RightReducer[R, combineR.type])
assertSerializable(Unifier[HNil])
assertSerializable(Unifier[Q])
assertSerializable(SubtypeUnifier[HNil, Quux])
assertSerializable(SubtypeUnifier[Q, Quux])
assertSerializable(ToTraversable[HNil, List])
assertSerializable(ToTraversable[L, List])
assertSerializable(ToList[HNil, Nothing])
assertSerializable(ToList[HNil, Int])
assertSerializable(ToList[L, Any])
assertSerializable(ToSized[HNil, List])
assertSerializable(ToSized[L, List])
assertSerializable(Tupler[HNil])
assertSerializable(Tupler[L])
assertSerializable(Init[L])
assertSerializable(Last[L])
assertSerializable(Selector[L, Int])
assertSerializable(Selector[L, String])
assertSerializable(Partition[HNil, Int])
assertSerializable(Partition[L, Int])
assertSerializable(Filter[HNil, Int])
assertSerializable(Filter[L, Int])
assertSerializable(FilterNot[HNil, Int])
assertSerializable(FilterNot[L, Int])
assertSerializable(Remove[L, Int])
assertSerializable(RemoveAll[L, IS])
assertSerializable(Union[L, IS])
assertSerializable(Intersection[L, IS])
assertSerializable(Diff[L, IS])
assertSerializable(Replacer[L, Int, String])
assertSerializable(Modifier[L, Int, String])
assertSerializable(ReplaceAt[L, _1, Double])
assertSerializable(At[L, _0])
assertSerializable(At[L, _1])
assertSerializable(Drop[L, _0])
assertSerializable(Drop[L, _1])
assertSerializable(Take[L, _0])
assertSerializable(Take[L, _1])
assertSerializable(Split[L, _0])
assertSerializable(Split[L, _1])
assertSerializable(ReverseSplit[L, _0])
assertSerializable(ReverseSplit[L, _1])
assertSerializable(SplitLeft[L, Int])
assertSerializable(SplitLeft[L, String])
assertSerializable(ReverseSplitLeft[L, Int])
assertSerializable(ReverseSplitLeft[L, String])
assertSerializable(SplitRight[L, Int])
assertSerializable(SplitRight[L, String])
assertSerializable(ReverseSplitRight[L, Int])
assertSerializable(ReverseSplitRight[L, String])
assertSerializable(Reverse[HNil])
assertSerializable(Reverse[L])
assertSerializable(Align[L, LP])
assertSerializable(Prepend[HNil, L])
assertSerializable(Prepend[L, HNil])
assertSerializable(Prepend[L, LP])
assertSerializable(ReversePrepend[HNil, L])
assertSerializable(ReversePrepend[L, HNil])
assertSerializable(ReversePrepend[L, LP])
assertSerializable(ZipOne[L, FL])
assertSerializable(Transposer[HNil])
assertSerializable(Transposer[FL])
assertSerializable(Zip[HNil])
assertSerializable(Zip[FL])
assertSerializable(Unzip[HNil])
assertSerializable(Unzip[LT])
assertSerializable(ZipApply[HNil, HNil])
assertSerializable(ZipApply[AL, L])
assertSerializable(ZipConst[Int, L])
assertSerializable(ZipWith[HNil, HNil, combineR.type])
assertSerializable(ZipWith[L, I3, combineR.type])
assertSerializable(ZipWithKeys[HNil, HNil])
assertSerializable(ZipWithKeys[K, L])
assertSerializable(Collect[HNil, selInt.type])
assertSerializable(Collect[L, selInt.type])
assertSerializable(Ordering[HNil])
assertSerializable(Ordering[L])
assertSerializable(MapCons[HNil, HNil])
assertSerializable(MapCons[L, FL])
assertSerializable(Interleave[Int, HNil])
assertSerializable(Interleave[Int, L])
assertSerializable(FlatMapInterleave[Int, HNil])
assertSerializable(FlatMapInterleave[Int, FL])
assertSerializable(Permutations[HNil])
assertSerializable(Permutations[L])
assertSerializable(RotateLeft[L, _0])
assertSerializable(RotateLeft[L, _2])
assertSerializable(RotateRight[L, _0])
assertSerializable(RotateRight[L, _2])
assertSerializable(LeftScanner[HNil, Int, smear.type])
assertSerializable(LeftScanner[IS, Int, smear.type])
assertSerializable(RightScanner[HNil, Int, smear.type])
assertSerializable(RightScanner[IS, Int, smear.type])
assertSerializable(Fill[_0, Int])
assertSerializable(Fill[_3, Int])
assertSerializable(Patcher[_0, _1, L, IS])
}
@Test
def testRecords {
import ops.record._
type FA = FieldType[KA, Int]
type FB = FieldType[KB, String]
type FC = FieldType[KC, Boolean]
assertSerializable(Selector[R, KA])
assertSerializable(Selector[R, KB])
assertSerializable(SelectAll[R, HNil])
assertSerializable(SelectAll[R, KA :: HNil])
assertSerializable(SelectAll[R, KA :: KB :: HNil])
assertSerializable(Updater[HNil, FA])
assertSerializable(Updater[R, FA])
assertSerializable(Updater[R, FB])
assertSerializable(Merger[HNil, R])
assertSerializable(Merger[R, HNil])
assertSerializable(Merger[R, RM])
assertSerializable(Merger[RM, R])
assertSerializable(Modifier[R, KA, Int, Char])
assertSerializable(Modifier[R, KB, String, Char])
assertSerializable(Remover[R, KA])
assertSerializable(Remover[R, KB])
assertSerializable(Renamer[R, KA, KB])
assertSerializable(Renamer[R, KC, KA])
assertSerializable(Renamer[R, KB, KC])
assertSerializable(Keys[HNil])
assertSerializable(Keys[R])
assertSerializable(Values[HNil])
assertSerializable(Values[R])
assertSerializable(ToMap[HNil])
assertSerializable(ToMap[R])
assertSerializable(MapValues[poly.identity.type, HNil])
assertSerializable(MapValues[poly.identity.type, R])
}
@Test
def testCoproducts {
import ops.coproduct._
type L = Int :+: String :+: Boolean :+: CNil
type LP = String :+: Boolean :+: Int :+: CNil
type BS = Boolean :+: String :+: CNil
assertSerializable(Inject[L, Int])
assertSerializable(Inject[L, String])
assertSerializable(Inject[L, Boolean])
assertSerializable(Selector[L, Int])
assertSerializable(Selector[L, String])
assertSerializable(Selector[L, Boolean])
assertSerializable(At[L, _0])
assertSerializable(At[L, _1])
assertSerializable(At[L, _2])
assertSerializable(Partition[L, Int])
assertSerializable(Partition[L, String])
assertSerializable(Filter[L, Int])
assertSerializable(Filter[L, String])
assertSerializable(FilterNot[L, Int])
assertSerializable(FilterNot[L, String])
assertSerializable(Remove[L, Int])
assertSerializable(Remove[L, String])
assertSerializable(RemoveLast[L, Int])
assertSerializable(RemoveLast[L, String])
assertSerializable(FlatMap[CNil, coIdentity.type])
assertSerializable(FlatMap[L, coIdentity.type])
assertSerializable(Mapper[poly.identity.type, CNil])
assertSerializable(Mapper[poly.identity.type, L])
assertSerializable(Unifier[L])
assertSerializable(Folder[poly.identity.type, L])
assertSerializable(ZipWithKeys[HNil, CNil])
assertSerializable(ZipWithKeys[K, L])
assertSerializable(Length[CNil])
assertSerializable(Length[L])
assertSerializable(ExtendRight[L, Int])
assertSerializable(ExtendBy[CNil, CNil])
assertSerializable(ExtendBy[L, CNil])
assertSerializable(ExtendBy[L, L])
assertSerializable(ExtendLeftBy[CNil, CNil])
assertSerializable(ExtendLeftBy[L, CNil])
assertSerializable(ExtendLeftBy[L, L])
assertSerializable(ExtendLeftBy[CNil, L])
assertSerializable(ExtendRightBy[CNil, CNil])
assertSerializable(ExtendRightBy[L, CNil])
assertSerializable(ExtendRightBy[L, L])
assertSerializable(RotateLeft[CNil, _0])
assertSerializable(RotateLeft[L, _0])
assertSerializable(RotateLeft[L, _1])
assertSerializable(RotateLeft[L, _2])
assertSerializable(RotateRight[CNil, _0])
assertSerializable(RotateRight[L, _0])
assertSerializable(RotateRight[L, _1])
assertSerializable(RotateRight[L, _2])
assertSerializable(IsCCons[L])
assertSerializable(Split[L, _0])
assertSerializable(Split[L, _1])
assertSerializable(Split[L, _2])
assertSerializable(Take[L, _0])
assertSerializable(Take[L, _1])
assertSerializable(Take[L, _2])
assertSerializable(Drop[L, _0])
assertSerializable(Drop[L, _1])
assertSerializable(Drop[L, _2])
assertSerializable(Reverse[CNil])
assertSerializable(Reverse[L])
assertSerializable(Align[CNil, CNil])
assertSerializable(Align[L, L])
assertSerializable(Align[L, LP])
assertSerializable(InitLast[L])
assertSerializable(Ordering[CNil])
assertSerializable(implicitly[PartialOrdering[CNil]])
assertSerializable(implicitly[PartialOrdering[L]])
assertSerializable(ToHList[CNil])
assertSerializable(ToHList[L])
assertSerializable(Basis[L, CNil])
assertSerializable(Basis[L, BS])
}
@Test
def testUnions {
import ops.union._
assertSerializable(Selector[U, KA])
assertSerializable(Selector[U, KB])
assertSerializable(Keys[CNil])
assertSerializable(Keys[U])
assertSerializable(Values[CNil])
assertSerializable(Values[U])
assertSerializable(ToMap[CNil])
assertSerializable(ToMap[U])
assertSerializable(MapValues[poly.identity.type, CNil])
assertSerializable(MapValues[poly.identity.type, U])
}
@Test
def testTuples {
import ops.tuple._
type L = (Int, String, Boolean)
type LP = (String, Boolean, Int)
type R = (Boolean, String, Int)
type LL = (List[Int], List[String], List[Boolean])
type SL = (Set[Int], Set[String], Set[Boolean])
type OL = (Option[Int], Option[String], Option[Boolean])
type FL = (Tuple1[Int], Tuple1[String], Tuple1[Boolean])
type Q = (Foo, Bar, Baz)
type IS = (Int, String)
type LT = ((Int, String), (Boolean, Double), (Char, Float))
type AL = ((Int => Double), (String => Char), (Boolean => Float))
type I3 = (Int, Int, Int)
assertSerializable(IsComposite[L])
assertSerializable(Prepend[Unit, L])
assertSerializable(Prepend[L, Unit])
assertSerializable(Prepend[L, LP])
assertSerializable(ReversePrepend[Unit, L])
assertSerializable(ReversePrepend[L, Unit])
assertSerializable(ReversePrepend[L, LP])
assertSerializable(At[L, _0])
assertSerializable(At[L, _1])
assertSerializable(Init[L])
assertSerializable(Last[L])
assertSerializable(Selector[L, Int])
assertSerializable(Selector[L, String])
assertSerializable(Filter[Unit, Int])
assertSerializable(Filter[L, Int])
assertSerializable(FilterNot[Unit, Int])
assertSerializable(FilterNot[L, Int])
assertSerializable(Remove[L, Int])
assertSerializable(RemoveAll[L, IS])
assertSerializable(Replacer[L, Int, String])
assertSerializable(ReplaceAt[L, _1, Double])
assertSerializable(Modifier[L, Int, String])
assertSerializable(Take[L, _0])
assertSerializable(Take[L, _1])
assertSerializable(Drop[L, _0])
assertSerializable(Drop[L, _1])
assertSerializable(Split[L, _0])
assertSerializable(Split[L, _1])
assertSerializable(ReverseSplit[L, _0])
assertSerializable(ReverseSplit[L, _1])
assertSerializable(SplitLeft[L, Int])
assertSerializable(SplitLeft[L, String])
assertSerializable(ReverseSplitLeft[L, Int])
assertSerializable(ReverseSplitLeft[L, String])
assertSerializable(SplitRight[L, Int])
assertSerializable(SplitRight[L, String])
assertSerializable(ReverseSplitRight[L, Int])
assertSerializable(ReverseSplitRight[L, String])
assertSerializable(Reverse[Unit])
assertSerializable(Reverse[L])
assertSerializable(Mapper[poly.identity.type, Unit])
assertSerializable(Mapper[poly.identity.type, L])
assertSerializable(FlatMapper[poly.identity.type, Unit])
assertSerializable(FlatMapper[poly.identity.type, FL])
assertSerializable(ConstMapper[Unit, Int])
assertSerializable(ConstMapper[L, Int])
assertSerializable(MapFolder[Unit, Boolean, isDefined.type])
assertSerializable(MapFolder[OL, Boolean, isDefined.type])
assertSerializable(LeftFolder[Unit, Int, combineL.type])
assertSerializable(LeftFolder[L, Int, combineL.type])
assertSerializable(RightFolder[Unit, Int, combineR.type])
assertSerializable(RightFolder[L, Int, combineR.type])
assertSerializable(LeftReducer[L, combineL.type])
assertSerializable(RightReducer[R, combineR.type])
assertSerializable(Transposer[Unit])
assertSerializable(Transposer[FL])
assertSerializable(ZipApply[Unit, Unit])
assertSerializable(ZipApply[AL, L])
assertSerializable(ZipOne[L, FL])
assertSerializable(ZipConst[L, Int])
assertSerializable(Unifier[Unit])
assertSerializable(Unifier[Q])
assertSerializable(SubtypeUnifier[Unit, Quux])
assertSerializable(SubtypeUnifier[Q, Quux])
assertSerializable(Length[Unit])
assertSerializable(Length[L])
assertSerializable(ToTraversable[Unit, List])
assertSerializable(ToTraversable[L, List])
assertSerializable(ToList[Unit, Nothing])
assertSerializable(ToList[Unit, Int])
assertSerializable(ToList[L, Any])
assertSerializable(ToSized[Unit, List])
assertSerializable(ToSized[L, List])
assertSerializable(Collect[Unit, selInt.type])
assertSerializable(Collect[L, selInt.type])
assertSerializable(Permutations[Unit])
assertSerializable(Permutations[L])
assertSerializable(RotateLeft[L, _0])
assertSerializable(RotateLeft[L, _2])
assertSerializable(RotateRight[L, _0])
assertSerializable(RotateRight[L, _2])
assertSerializable(LeftScanner[Unit, Int, smear.type])
assertSerializable(LeftScanner[IS, Int, smear.type])
assertSerializable(RightScanner[Unit, Int, smear.type])
assertSerializable(RightScanner[IS, Int, smear.type])
assertSerializable(Fill[_0, Int])
assertSerializable(Fill[_3, Int])
assertSerializable(Patcher[_0, _1, L, IS])
}
@Test
def testPoly {
assertSerializable(poly.identity)
assertSerializable(isDefined)
assertSerializable(productElements)
assertSerializable(smear)
assertSerializable(coIdentity)
assertSerializable(tupled)
assertSerializable(gsize)
assertSerializable(plus)
}
@Test
def testNats {
assertSerializable(_0)
assertSerializable(_1)
assertSerializable(_2)
assertSerializable(_3)
assertSerializable(_4)
assertSerializable(Pred[_1])
assertSerializable(Sum[_2, _3])
assertSerializable(Diff[_3, _2])
assertSerializable(Prod[_3, _2])
assertSerializable(Div[_2, _6])
assertSerializable(Div[_6, _2])
assertSerializable(Mod[_5, _2])
assertSerializable(LT[_2, _3])
assertSerializable(LTEq[_2, _2])
assertSerializable(Min[_2, _3])
assertSerializable(Pow[_3, _2])
assertSerializable(ToInt[_3])
}
@Test
def testFunctions {
assertSerializable(FnToProduct[() => String])
assertSerializable(FnToProduct[(Int) => String])
assertSerializable(FnToProduct[(Int, Boolean) => String])
assertSerializable(FnFromProduct[(HNil) => String])
assertSerializable(FnFromProduct[(Int :: HNil) => String])
assertSerializable(FnFromProduct[(Int :: Boolean :: HNil) => String])
}
@Test
def testGeneric {
assertSerializable(Generic[(Int, String, Boolean)])
assertSerializable(Generic[Option[Int]])
assertSerializable(DefaultSymbolicLabelling[(Int, String, Boolean)])
assertSerializable(DefaultSymbolicLabelling[Option[Int]])
assertSerializable(LabelledGeneric[(Int, String, Boolean)])
assertSerializable(LabelledGeneric[Option[Int]])
assertSerializable(Generic1[Some, TC1])
assertSerializable(Generic1[List, TC1])
}
@Test
def testTraversable {
type L = Int :: String :: Boolean :: HNil
assertSerializable(FromTraversable[L])
// To satisfy serialization of `ToSizedHList` we must provide a serializable `IsTraversableLike`
import scala.collection.generic.IsTraversableLike
implicit val hack: IsTraversableLike[List[Int]] { type A = Int } = null
assertSerializable(ToSizedHList[List, Int, _4])
}
@Test
def testTypeable {
assertSerializable(Typeable[Any])
assertSerializable(Typeable[AnyRef])
assertSerializable(Typeable[AnyVal])
assertSerializable(Typeable[Unit])
assertSerializable(Typeable[Int])
assertSerializable(Typeable[Double])
assertSerializable(Typeable[String])
assertSerializable(Typeable[Foo])
assertSerializable(Typeable[Witness.`3`.T])
assertSerializable(Typeable[Witness.`"foo"`.T])
assertSerializable(Typeable[Witness.`'foo`.T])
assertSerializable(Typeable[Sing.type])
assertSerializable(Typeable[Foo with Bar])
assertSerializable(Typeable[Option[Int]])
assertSerializable(Typeable[Either[Int, String]])
assertSerializable(Typeable[Left[Int, String]])
assertSerializable(Typeable[Right[Int, String]])
assertSerializable(Typeable[List[Int]])
assertSerializable(Typeable[Map[Int, String]])
assertSerializable(Typeable[Wibble])
assertSerializable(Typeable[Box[Int]])
assertSerializable(Typeable[HNil])
assertSerializable(Typeable[Int :: String :: Boolean :: HNil])
assertSerializable(Typeable[CNil])
assertSerializable(Typeable[Int :+: String :+: Boolean :+: CNil])
assertSerializable(Typeable[Inl[Int, CNil]])
assertSerializable(Typeable[Inr[Int, CNil]])
assertSerializable(TypeCase[List[Int]])
}
@Test
def testHMap {
assertSerializable(HMap[(Set ~?> Option)#λ](Set("foo") -> Option("bar"), Set(23) -> Option(13)))
assertSerializable(new (Set ~?> Option))
assertSerializable(implicitly[(Set ~?> Option)#λ[Set[Int], Option[Int]]])
}
@Test
def testLazy {
assertSerializable(Lazy(23))
assertSerializableBeforeAfter(implicitly[Lazy[Generic[Wibble]]])(_.value)
assertSerializableBeforeAfter(implicitly[Lazy[Generic1[Box, TC1]]])(_.value)
assertSerializableBeforeAfter(implicitly[Lazy[Lazy.Values[Generic[Wibble] :: HNil]]])(_.value)
assertSerializableBeforeAfter(implicitly[Lazy[Lazy.Values[Generic[Wibble] :: Generic1[Box, TC1] :: HNil]]])(_.value)
}
@Test
def testZipper {
import ops.zipper._
val l = 23 :: "foo" :: true :: HNil
val t = (23, "foo", (2.0, true))
val tz0 = t.toZipper
type Z0 = tz0.Self
val tz1 = tz0.right
type Z1 = tz1.Self
val tz2 = tz0.right.right.down
type Z2 = tz2.Self
val tz3 = tz0.right.right
type Z3 = tz3.Self
val lz0 = l.toZipper
type Z4 = lz0.Self
assertSerializable(l.toZipper)
assertSerializable(t.toZipper)
assertSerializable(Right[Z0])
assertSerializable(Right[Z1])
assertSerializable(Right[Z2])
assertSerializable(Left[Z1])
assertSerializable(First[Z0])
assertSerializable(First[Z1])
assertSerializable(First[Z2])
assertSerializable(Last[Z0])
assertSerializable(Last[Z1])
assertSerializable(Last[Z2])
assertSerializable(RightBy[Z0, _1])
assertSerializable(RightBy[Z1, _1])
assertSerializable(RightBy[Z2, _1])
assertSerializable(LeftBy[Z0, _0])
assertSerializable(LeftBy[Z1, _1])
assertSerializable(LeftBy[Z2, _0])
assertSerializable(RightTo[Z0, String])
assertSerializable(RightTo[Z1, String])
assertSerializable(RightTo[Z2, Boolean])
assertSerializable(LeftTo[Z1, Int])
assertSerializable(Up[Z2])
assertSerializable(Down[Z3])
assertSerializable(Root[Z0])
assertSerializable(Root[Z3])
assertSerializable(Get[Z2])
assertSerializable(Put[Z2, Double])
assertSerializable(Put[Z4, Short])
assertSerializable(Insert[Z4, Short])
assertSerializable(Delete[Z4])
assertSerializable(Reify[Z2])
assertSerializable(Reify[Z4])
}
@Test
def testConstraints {
type L = Int :: String :: Boolean :: HNil
type OL = Option[Int] :: Option[String] :: Option[Boolean] :: HNil
type I3 = Int :: Int :: Int :: HNil
type IS = Int :: String :: HNil
type R = Record.`'a -> Int, 'b -> String, 'c -> Boolean`.T
type K = HList.`'a, 'b, 'c`.T
assertSerializable(UnaryTCConstraint[HNil, Option])
assertSerializable(UnaryTCConstraint[OL, Option])
assertSerializable(UnaryTCConstraint[L, Id])
assertSerializable(UnaryTCConstraint[I3, Const[Int]#λ])
assertSerializable(BasisConstraint[HNil, L])
assertSerializable(BasisConstraint[IS, L])
assertSerializable(LUBConstraint[HNil, Int])
assertSerializable(LUBConstraint[I3, Int])
assertSerializable(KeyConstraint[HNil, K])
assertSerializable(KeyConstraint[R, K])
assertSerializable(ValueConstraint[HNil, L])
assertSerializable(ValueConstraint[R, L])
}
@Test
def testSybclass {
type L = Int :: String :: Boolean :: HNil
type C = Int :+: String :+: Boolean :+: CNil
assertSerializable(Data[gsize.type, Wibble, Int])
assertSerializable(Data[gsize.type, List[Int], Int])
assertSerializable(Data[gsize.type, List[HNil], Int])
assertSerializable(Data[gsize.type, List[L], Int])
assertSerializable(Data[gsize.type, List[CNil], Int])
assertSerializable(Data[gsize.type, List[C], Int])
assertSerializable(DataT[poly.identity.type, Wibble])
assertSerializable(DataT[poly.identity.type, List[Int]])
assertSerializable(DataT[poly.identity.type, List[HNil]])
assertSerializable(DataT[poly.identity.type, List[L]])
assertSerializable(DataT[poly.identity.type, List[CNil]])
assertSerializable(DataT[poly.identity.type, List[C]])
assertSerializableBeforeAfter(implicitly[Everything[gsize.type, plus.type, Wibble]])(_(Wibble(2, "a")))
assertSerializableBeforeAfter(implicitly[Everywhere[poly.identity.type, Wibble]])(_(Wibble(2, "a")))
}
@Test
def testFunctor {
assertSerializableBeforeAfter(Functor[Some])(_.map(Some(2))(_.toString))
assertSerializableBeforeAfter(Functor[Option])(_.map(Option(2))(_.toString))
assertSerializableBeforeAfter(Functor[Tree])(_.map(Leaf(2))(_.toString))
assertSerializableBeforeAfter(Functor[List])(_.map(List(2))(_.toString))
}
@Test
def testShow {
// I had to disable the first two during https://github.com/milessabin/shapeless/pull/435, with scala 2.12.0-M2.
// Don't know why they keep capturing their outer class, and the next two don't.
assertSerializableBeforeAfter(Show[Some[Int]])(_.show(Some(2)))
assertSerializableBeforeAfter(Show[Option[Int]]) { show =>
show.show(Some(2))
show.show(None)
}
assertSerializable(Show[Tree[Int]])
assertSerializable(Show[List[Int]])
}
@Test
def testLenses {
val l1 = optic[Tree[Int]]
val l2 = optic[Tree[Int]][Node[Int]]
val l3 = optic[Tree[Int]][Node[Int]].l
val l4 = optic[Tree[Int]][Node[Int]].r
val l5 = optic[Tree[Int]][Node[Int]].l[Node[Int]].r
val l6 = optic[Tree[Int]][Node[Int]].l[Node[Int]].r[Leaf[Int]].t
val l7 = l3 ~ l4
val l8 = optic.hlistSelectLens[Int :: String :: Boolean :: HNil, String]
val l9 = optic.coproductSelectPrism[Int :+: String :+: Boolean :+: CNil, String]
val l10 = optic.hlistNthLens[Int :: String :: Boolean :: HNil, _1]
val l11 = optic.recordLens[Record.`'foo -> Int, 'bar -> String, 'baz -> Boolean`.T]('bar)
val l12 = optic[Tree[Int]].l.r.l.t
val l13 = optic[Node[Int]] >> 'r
val l14 = optic[Node[Int]] >> _1
assertSerializable(l1)
assertSerializable(l2)
assertSerializable(l3)
assertSerializable(l4)
assertSerializable(l5)
assertSerializable(l6)
assertSerializable(l7)
assertSerializable(l8)
assertSerializable(l9)
assertSerializable(l10)
assertSerializable(l11)
assertSerializable(l12)
assertSerializable(l13)
assertSerializable(l14)
}
@Test
def testDefault {
val d1 = Default[DefaultTestDefinitions.CC]
val d2 = Default.AsRecord[DefaultTestDefinitions.CC]
val d3 = Default.AsOptions[DefaultTestDefinitions.CC]
assertSerializable(d1)
assertSerializable(d2)
assertSerializable(d3)
}
}
| clhodapp/shapeless | core/jvm/src/test/scala/shapeless/serialization.scala | Scala | apache-2.0 | 34,199 |
/*
rule = Http4sUseLiteralsSyntax
*/
package fix
import org.http4s.Uri
import org.http4s.syntax.all._
object LiteralsSyntaxWithExistingSyntaxImportTests {
Uri.unsafeFromString("foo.com")
}
| http4s/http4s | scalafix-internal/input/src/main/scala/fix/LiteralsSyntaxWithExistingSyntaxImportTests.scala | Scala | apache-2.0 | 194 |
/*
* @author Philip Stutz
*
* Copyright 2013 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.signalcollect.util
import scala.collection.mutable.Buffer
import scala.collection.mutable.ArrayBuffer
import scala.annotation.tailrec
import scala.util.Random
object SplayIntSet {
@inline final def nullNode = null.asInstanceOf[SplayNode]
@inline final def ?(n: AnyRef) = n != null
}
final class SplayNode(
var intSet: Any,
var intervalFrom: Int = Int.MinValue,
var intervalTo: Int = Int.MaxValue) {
import SplayIntSet._
var left: SplayNode = _
var right: SplayNode = _
def bytes: Int = {
if (intSet != null) {
intSet match {
case bs: Array[Byte] =>
bs.length
case ls: Array[Long] =>
ls.length * 8
}
} else 0
}
override def toString = {
val min = minElement
val max = maxElement
val density = ((size / range.toDouble) * 1000).round / 10.0
s"SplayNode([$intervalFrom to $intervalTo], min = $min, max = $max, range = $range, #entries = $size, density = $density%, bytes = $bytes)"
}
@inline final def range: Long = {
(intervalTo.toLong - intervalFrom) + 1
}
@inline final def density: Double = {
size / range.toDouble
}
final def isEntireRangeContained: Boolean = {
val r = range
if (range > size.toLong) {
false
} else {
size == range
}
}
final def insert(i: Int, overheadFraction: Float): Boolean = {
if (intSet != null) {
val wasInserted = intSet match {
// It's a FastInsertIntSet
case bs: Array[Byte] =>
val sizeBefore = new FastInsertIntSet(bs).size
val after = new FastInsertIntSet(bs).insert(i, overheadFraction)
intSet = after
val sizeAfter = new FastInsertIntSet(after).size
if (density > 0.12) {
val bitSet = BitSet.create(intervalFrom, range.toInt)
new FastInsertIntSet(after).foreach(new BitSet(bitSet).insert(_))
intSet = bitSet
}
sizeAfter > sizeBefore
case ls: Array[Long] =>
new BitSet(ls).insert(i)
}
if (isEntireRangeContained) {
// Int set being null means that all elements in the interval are contained.
intSet = null
}
wasInserted
} else {
// Whole interval is contained already.
false
}
}
@inline final def isInRange(i: Int): Boolean = {
i >= intervalFrom && i <= intervalTo
}
@inline final def contains(i: Int): Boolean = {
if (intSet != null) {
intSet match {
case bs: Array[Byte] =>
new FastInsertIntSet(bs).contains(i)
case ls: Array[Long] =>
new BitSet(ls).contains(i)
}
} else {
isInRange(i)
}
}
/**
* Assumes that the int set is not null.
*/
@tailrec final def foreach(f: Int => Unit, pending: List[SplayNode] = Nil) {
if (intSet != null) {
intSet match {
case bs: Array[Byte] =>
new FastInsertIntSet(bs).foreach(f)
case ls: Array[Long] =>
new BitSet(ls).foreach(f)
}
} else {
// Int set being null means that all numbers in the interval are contained.
var i = intervalFrom
while (i <= intervalTo) {
f(i)
i += 1
}
}
if (?(left) && ?(right)) {
left.foreach(f, right :: pending)
} else if (?(left)) {
left.foreach(f, pending)
} else if (?(right)) {
right.foreach(f, pending)
} else {
pending match {
case Nil =>
case head :: tail =>
head.foreach(f, tail)
}
}
}
final def validate {
foreachNode {
node =>
if (?(node.left)) {
assert(node.intervalFrom > node.left.intervalTo)
}
if (?(node.right)) {
assert(node.intervalTo < node.right.intervalFrom)
}
}
}
@tailrec @inline final def foreachNode(f: SplayNode => Unit, pending: List[SplayNode] = Nil) {
f(this)
if (?(left) && ?(right)) {
left.foreachNode(f, right :: pending)
} else if (?(left)) {
left.foreachNode(f, pending)
} else if (?(right)) {
right.foreachNode(f, pending)
} else {
pending match {
case Nil =>
case head :: tail =>
head.foreachNode(f, tail)
}
}
}
@inline final def size: Int = {
if (intSet != null) {
intSet match {
case bs: Array[Byte] =>
new FastInsertIntSet(bs).size
case ls: Array[Long] =>
new BitSet(ls).size
}
} else {
intervalTo - intervalFrom + 1
}
}
@inline final def minElement: Int = {
if (intSet != null) {
intSet match {
case bs: Array[Byte] =>
new FastInsertIntSet(bs).min
case ls: Array[Long] =>
new BitSet(ls).min
}
} else {
intervalFrom
}
}
def maxElement: Int = {
if (intSet != null) {
intSet match {
case bs: Array[Byte] =>
new FastInsertIntSet(bs).max
case ls: Array[Long] =>
new BitSet(ls).max
}
} else {
intervalTo
}
}
}
/**
* Uses Splay trees to efficiently store integer sets.
* Whilst an ordinary Splay tree contains one number per node, here each node
* is responsible for a whole interval. The initial interval of the root node
* spans all integers. Whenever a node reaches 'maxNodeIntSetSize', that node is split into
* two nodes, and the interval for which the nodes are responsible is also split.
*/
abstract class SplayIntSet {
import SplayIntSet._
def printDiagnosticInfo {
val id = Random.nextInt(10)
println(s"$id: SplayIntSet diagnostic info:")
if (root != null) {
root.foreachNode(node => println(s"$id\\t" + node.toString))
}
}
// Approximate size in bytes of the internal representations. Does not consider object overhead.
def bytes: Int = {
if (root != null) {
var bytes = 0
root.foreachNode(node => bytes += node.bytes)
bytes
} else {
0
}
}
def overheadFraction: Float
def maxNodeIntSetSize: Int
var size: Int = 0
var root: SplayNode = _
// Avoid constructor to ensure that nothing unnecessary is stored.
// The passed root cannot have any child nodes.
def initializeWithRoot(r: SplayNode) {
assert(r.left == null && r.right == null)
root = r
size = root.size
}
def toBuffer: Buffer[Int] = {
val buffer = new ArrayBuffer[Int]
if (size > 0) {
root.foreach(buffer.append(_))
}
buffer
}
def toList: List[Int] = toBuffer.toList
def toSet: Set[Int] = toBuffer.toSet
/**
* Asserts that the root has been set.
*/
@inline final def foreach(f: Int => Unit) {
if (size > 0) {
root.foreach(f)
}
}
/**
* Returns true iff i is contained in the set.
*/
def contains(i: Int): Boolean = {
if (?(root)) {
//root = splay(root, i)
//root.contains(i)
val node = find(root, i)
node.contains(i)
} else {
false
}
}
/**
* Inserts i into the set, returns false if i was already contained.
*/
def insert(i: Int): Boolean = {
if (?(root)) {
root = splay(root, i)
val inserted = root.insert(i, overheadFraction)
if (inserted) size += 1
val nodeIntSet = root.intSet
// Null would mean that the set is efficiently represented already.
if (nodeIntSet != null &&
nodeIntSet.isInstanceOf[Array[Byte]] &&
new FastInsertIntSet(nodeIntSet.asInstanceOf[Array[Byte]]).size > maxNodeIntSetSize) {
//println(s"Has now more than $maxNodeIntSetSize entires, splitting")
val (set1, set2) = new FastInsertIntSet(nodeIntSet.asInstanceOf[Array[Byte]]).split(overheadFraction)
val set2Min = new FastInsertIntSet(set2).min
val newNode = new SplayNode(set1, root.intervalFrom, set2Min - 1)
root.intSet = set2
root.intervalFrom = set2Min
insertNode(root, newNode)
}
return inserted
} else {
// Tree is empty.
val repr = Ints.createEmptyFastInsertIntSet
new FastInsertIntSet(repr).insert(i, overheadFraction)
root = new SplayNode(repr)
root.insert(i, overheadFraction)
size += 1
return true
}
}
/**
* Searches from root for an insertion point for 'newNode'.
* There can be no other node in the tree that intersects with the interval of 'newNode'.
*/
@tailrec private def insertNode(root: SplayNode, newNode: SplayNode) {
if (newNode.intervalTo < root.intervalFrom) {
val rootLeft = root.left
if (?(rootLeft)) {
insertNode(rootLeft, newNode)
} else {
root.left = newNode
}
} else if (newNode.intervalFrom > root.intervalTo) {
val rootRight = root.right
if (?(rootRight)) {
insertNode(rootRight, newNode)
} else {
root.right = newNode
}
} else {
throw new Exception(
s"The new node interval from ${newNode.intervalFrom} to ${newNode.intervalTo} " +
s"intersects with the interval ${root.intervalFrom} to ${root.intervalTo} of an existing node.")
}
}
/**
* Finds and returns the node that is responsible for the interval into
* which i falls.
*/
@tailrec private def find(node: SplayNode, i: Int): SplayNode = {
if (node.intervalFrom > i) {
find(node.left, i)
} else if (node.intervalTo < i) {
find(node.right, i)
} else {
node
}
}
/**
* Searches for the node that is responsible for key i, starting from node 'root'.
* Splays the responsible node to the where 'root' is initially and returns it.
*/
@tailrec private def splay(root: SplayNode, i: Int): SplayNode = {
if (i < root.intervalFrom) {
val rootLeft = root.left
// We're going down to the left of the root.
if (i < rootLeft.intervalFrom) {
// We're going down left twice, rotate root.left.left up to be the new root, continue search from there.
splay(rightRight(root, rootLeft, rootLeft.left), i)
} else if (i > rootLeft.intervalTo) {
// We're going down left and then right, rotate root.left.right up to be the new root, continue search from there.
splay(leftRight(root, rootLeft, rootLeft.right), i)
} else {
// root.left is the new root, rotate it up.
right(root, rootLeft)
}
} else if (i > root.intervalTo) {
val rootRight = root.right
// We're going down to the right of the root.
if (i < rootRight.intervalFrom) {
// We're going down right and then left, rotate root.right.left up to be the new root, continue search from there.
splay(rightLeft(root, rootRight, rootRight.left), i)
} else if (i > rootRight.intervalTo) {
// We're going down right and then left, rotate root.right.right up to be the new root, continue search from there.
splay(leftLeft(root, rootRight, rootRight.right), i)
} else {
// root.right is the new root, rotate it up.
left(root, rootRight)
}
} else {
// i falls into the interval of the root already. We're done.
root
}
}
def leftRight(root: SplayNode, rootLeft: SplayNode, rootLeftRight: SplayNode): SplayNode = {
right(root, left(rootLeft, rootLeftRight))
}
def rightLeft(root: SplayNode, rootRight: SplayNode, rootRightLeft: SplayNode): SplayNode = {
left(root, right(rootRight, rootRightLeft))
}
def rightRight(root: SplayNode, rootLeft: SplayNode, rootLeftLeft: SplayNode): SplayNode = {
right(root, right(rootLeft, rootLeftLeft))
}
def leftLeft(root: SplayNode, rootRight: SplayNode, rootRightRight: SplayNode): SplayNode = {
left(root, left(rootRight, rootRightRight))
}
/**
* Rotates 'rootRight' left in order to make it the new root, returns that new root.
*/
def left(root: SplayNode, rootRight: SplayNode): SplayNode = {
root.right = rootRight.left
rootRight.left = root
rootRight
}
/**
* Rotates 'rootLeft' right in order to make it the new root, returns that new root.
*/
def right(root: SplayNode, rootLeft: SplayNode): SplayNode = {
root.left = rootLeft.right
rootLeft.right = root
rootLeft
}
}
| danihegglin/DynDCO | src/main/scala/com/signalcollect/util/SplayIntSet.scala | Scala | apache-2.0 | 12,840 |
package org.openurp.edu.eams.web.helper
import java.util.Arrays
import org.apache.commons.collections.CollectionUtils
import org.beangle.commons.bean.transformers.PropertyTransformer
import org.beangle.commons.collection.Collections
import org.beangle.commons.collection.Order
import org.beangle.data.jpa.dao.OqlBuilder
import org.beangle.commons.entity.util.ValidEntityKeyPredicate
import org.beangle.commons.lang.Strings
import org.beangle.security.blueprint.Resource
import org.beangle.struts2.helper.Params
import org.beangle.struts2.helper.QueryHelper
import org.openurp.edu.eams.base.Building
import org.openurp.base.Room
import org.openurp.edu.base.Adminclass
import org.openurp.edu.base.Direction
import org.openurp.edu.base.Major
import org.openurp.edu.base.Teacher
class BaseInfoSearchHelper extends SearchHelper {
def searchAdminclass(): Iterable[Adminclass] = {
entityDao.search(buildAdminclassQuery())
}
def searchTeacher(): Iterable[Teacher] = entityDao.search(buildTeacherQuery())
def searchRoom(): Iterable[Room] = entityDao.search(buildRoomQuery())
def buildAdminclassQuery(): OqlBuilder[Adminclass] = {
val builder = OqlBuilder.from(classOf[Adminclass], "adminclass")
QueryHelper.populateConditions(builder)
val stdTypeId = Params.getLong("adminclass.stdType.id")
val resourceName = getResourceName
val resource = funcPermissionService.getResource(resourceName)
if (null != resource) {
builder.where("adminclass.stdType in (:stdTyps)", restrictionHelper.stdTypes)
builder.where("adminclass.department in (:departments)", restrictionHelper.getDeparts)
} else {
if (ValidEntityKeyPredicate.Instance.apply(stdTypeId)) {
}
}
val enabled = Params.getBoolean("enabled")
if (true == enabled) {
builder.where("adminclass.effectiveAt <= :now and (adminclass.invalidAt is null or adminclass.invalidAt >= :now)",
new java.util.Date())
} else if (false == enabled) {
builder.where("adminclass.effectiveAt > :now or adminclass.invalidAt < :now", new java.util.Date())
}
builder.limit(QueryHelper.getPageLimit)
var orderByPras = Params.get(Order.ORDER_STR)
if (Strings.isEmpty(orderByPras)) {
orderByPras = "adminclass.code"
builder.orderBy(new Order("adminclass.grade", false))
builder.orderBy(new Order("adminclass.code"))
} else {
builder.orderBy(orderByPras)
}
builder
}
def buildTeacherQuery(): OqlBuilder[Teacher] = {
val builder = OqlBuilder.from(classOf[Teacher], "teacher")
QueryHelper.populateConditions(builder)
val resourceName = getResourceName
val resource = funcPermissionService.getResource(resourceName)
if (null != resource) {
}
builder.limit(QueryHelper.getPageLimit)
var orderByPras = Params.get(Order.ORDER_STR)
if (Strings.isEmpty(orderByPras)) {
orderByPras = "teacher.code"
}
builder.orderBy(orderByPras)
builder
}
def buildRoomQuery(): OqlBuilder[Room] = {
val builder = OqlBuilder.from(classOf[Room], "classroom")
QueryHelper.populateConditions(builder)
val departIdSeq = Params.get("roomDepartId")
val resourceName = getResourceName
val resource = funcPermissionService.getResource(resourceName)
if (Strings.isEmpty(departIdSeq) && null != resource) {
val departs = restrictionHelper.getDeparts
if (!departs.isEmpty) {
builder.where("exists(from classroom.departments department where department in (:departs))",
departs)
} else {
builder.where("1=2")
}
} else {
val departIds = Strings.splitToInt(departIdSeq)
if (!org.beangle.commons.lang.Arrays.isEmpty(departIds)) {
builder.where("exists(from classroom.departments department where department.id in (:departIds))",
departIds)
} else {
builder.where("1=2")
}
}
builder.limit(QueryHelper.getPageLimit)
var orderByPras = Params.get(Order.ORDER_STR)
if (Strings.isEmpty(orderByPras)) {
orderByPras = "classroom.name"
}
builder.orderBy(orderByPras)
builder
}
def buildBuildingQuery(): OqlBuilder[Building] = {
val builder = OqlBuilder.from(classOf[Building], "building")
builder.where("building.department in (:departs)", restrictionHelper.getDeparts)
QueryHelper.populateConditions(builder)
builder.limit(QueryHelper.getPageLimit)
var orderByPras = Params.get(Order.ORDER_STR)
if (Strings.isEmpty(orderByPras)) {
orderByPras = "building.code"
}
builder.orderBy(orderByPras)
builder
}
def buildMajorQuery(): OqlBuilder[Major] = {
val builder = OqlBuilder.from(classOf[Major], "major")
QueryHelper.populateConditions(builder)
builder.limit(QueryHelper.getPageLimit)
var orderByPras = Params.get(Order.ORDER_STR)
if (Strings.isEmpty(orderByPras)) {
orderByPras = "major.code"
}
builder.orderBy(orderByPras)
builder
}
def buildMajorQuery(educationId: java.lang.Long): OqlBuilder[Major] = {
val builder = OqlBuilder.from(classOf[Major], "major")
QueryHelper.populateConditions(builder)
builder.limit(QueryHelper.getPageLimit)
var orderByPras = Params.get(Order.ORDER_STR)
if (Strings.isEmpty(orderByPras)) {
orderByPras = "major.code"
}
if (null != educationId) {
builder.where("exists (from major.educations edu where edu.id=:eduId)", educationId)
}
builder.orderBy(orderByPras)
builder
}
def buildDirectionQuery(): OqlBuilder[Direction] = {
val builder = OqlBuilder.from(classOf[Direction], "direction")
QueryHelper.populateConditions(builder)
builder.limit(QueryHelper.getPageLimit)
var orderByPras = Params.get(Order.ORDER_STR)
if (Strings.isEmpty(orderByPras)) {
orderByPras = "direction.code"
}
builder.orderBy(orderByPras)
builder
}
def buildDirectionQuery(educationId: java.lang.Long): OqlBuilder[Direction] = {
val builder = OqlBuilder.from(classOf[Direction], "direction")
QueryHelper.populateConditions(builder)
builder.limit(QueryHelper.getPageLimit)
var orderByPras = Params.get(Order.ORDER_STR)
if (Strings.isEmpty(orderByPras)) {
orderByPras = "direction.code"
}
if (null != educationId) {
val hql = "exists (from direction.departs dd where dd.education.id = :educationId)"
builder.where(hql, educationId)
}
builder.orderBy(orderByPras)
builder
}
}
| openurp/edu-eams-webapp | web/src/main/scala/org/openurp/edu/eams/web/helper/BaseInfoSearchHelper.scala | Scala | gpl-3.0 | 6,488 |
package utils
import java.security.MessageDigest
object Hashing {
def hashSaltString(str:String):String = {
var saltstr = str + AppConfig.getProp("hash.salt")
var md5val = ""
var algorithm = MessageDigest.getInstance("SHA")
var defaultBytes = saltstr.getBytes
algorithm.reset
algorithm.update(defaultBytes)
var messageDigest = algorithm.digest()
var hexString = new StringBuffer
for (i <- 0 until messageDigest.length) {
var hex = Integer.toHexString(0xFF & messageDigest(i))
if (hex.length == 1) {
hexString.append('0')
}
hexString.append(hex)
}
md5val = hexString.toString()
md5val
}
}
| lstoll/twitter-chat | app/utils/Hashing.scala | Scala | mit | 686 |
package scala.meta.internal.bench
import java.nio.file._
import java.util.concurrent.TimeUnit
import org.openjdk.jmh.annotations._
import org.openjdk.jmh.annotations.Mode._
import scala.meta.cli._
import scala.meta.io._
import scala.meta.internal.bench.Metacp._
import scala.meta.metacp._
import scala.meta.tests.metacp._
object Metacp {
@State(Scope.Benchmark)
class BenchmarkState extends FileFixtures {
val jdk = Library.jdk.classpath()
val scalaLibrary = Library.scalaLibrary.classpath()
}
}
trait Metacp {
def runImpl(classpath: Classpath, dependencyClasspath: Classpath): Unit = {
val tmp = Files.createTempDirectory("metacp_")
val settings = Settings()
.withCacheDir(AbsolutePath(tmp))
.withDependencyClasspath(dependencyClasspath)
.withClasspath(classpath)
.withScalaLibrarySynthetics(false)
val reporter = Reporter().withOut(System.out).withErr(System.err)
scala.meta.cli.Metacp.process(settings, reporter) match {
case Some(_) => ()
case None => sys.error("conversion failed")
}
}
}
@BenchmarkMode(Array(SampleTime))
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@Warmup(iterations = 3, time = 10, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 3, time = 10, timeUnit = TimeUnit.SECONDS)
@Fork(value = 1, jvmArgs = Array("-Xms2G", "-Xmx2G"))
class MetacpJDK extends Metacp {
@Benchmark
def run(bs: BenchmarkState): Unit = {
runImpl(bs.jdk, Classpath(Nil))
}
}
@BenchmarkMode(Array(SampleTime))
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@Warmup(iterations = 3, time = 10, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 3, time = 10, timeUnit = TimeUnit.SECONDS)
@Fork(value = 1, jvmArgs = Array("-Xms2G", "-Xmx2G"))
class MetacpScalaLibrary extends Metacp {
@Benchmark
def run(bs: BenchmarkState): Unit = {
runImpl(bs.scalaLibrary, bs.jdk)
}
}
| MasseGuillaume/scalameta | bench/suite/src/main/scala/scala/meta/internal/bench/Metacp.scala | Scala | bsd-3-clause | 1,858 |
package spinoco.fs2.cassandra.internal
import shapeless.labelled.FieldType
import shapeless.ops.record.Selector
import shapeless.{::, DepFn1, HList, HNil}
/**
* Type class supporting multiple record field selection.
*
* @author Miles Sabin
*/
/* moved from shapeless as the current shapeless version does not seem to work properly **/
@annotation.implicitNotFound(msg = "No fields ${K} in record ${L}")
trait SelectAll[L <: HList, K <: HList] extends DepFn1[L] with Serializable { type Out <: HList }
object SelectAll {
def apply[L <: HList, K <: HList](implicit sa: SelectAll[L, K]): Aux[L, K, sa.Out] = sa
type Aux[L <: HList, K <: HList, Out0 <: HList] = SelectAll[L, K] { type Out = Out0 }
implicit def hnilSelectAll[L <: HList]: Aux[L, HNil, HNil] =
new SelectAll[L, HNil] {
type Out = HNil
def apply(l: L): Out = HNil
}
implicit def hconsSelectAll[L <: HList, K,V, KT <: HList]
(implicit
sh: Selector.Aux[L, K,V],
st: SelectAll[L, KT]
): Aux[L, FieldType[K,V] :: KT, FieldType[K,V] :: st.Out] =
new SelectAll[L, FieldType[K,V] :: KT] {
type Out = FieldType[K,V] :: st.Out
def apply(l: L): Out = sh(l).asInstanceOf[FieldType[K,V]] :: st(l)
}
}
| Spinoco/fs2-cassandra | core/src/main/scala/spinoco/fs2/cassandra/internal/SelectAll.scala | Scala | mit | 1,221 |
package extruder.data
import cats.implicits._
import cats.laws.discipline.MonadErrorTests
import extruder.data.ValidationCatsInstances._
import org.scalatest.FunSuite
import org.typelevel.discipline.scalatest.Discipline
class ValidationSuite extends FunSuite with Discipline {
checkAll("Validation", MonadErrorTests[Validation, Throwable].monadError[Int, Int, Int])
}
| janstenpickle/extruder | core/src/test/scala/extruder/data/ValidationSuite.scala | Scala | mit | 372 |
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2015, Gary Keorkunian **
** **
\\* */
package squants.energy
import squants.{ Time, _ }
import squants.time._
/**
* Represents the rate of change of [[squants.energy.Power]] over time
*
* @author garyKeorkunian
* @since 0.1
*
* @param value value in [[squants.energy.WattsPerHour]]
*/
final class PowerRamp private (val value: Double, val unit: PowerRampUnit)
extends Quantity[PowerRamp]
with TimeDerivative[Power]
with SecondTimeDerivative[Energy] {
def dimension = PowerRamp
protected[squants] def timeIntegrated = Watts(toWattsPerHour)
protected[squants] def time = Hours(1)
def *(that: TimeSquared): Energy = this * that.time1 * that.time2
def toWattsPerHour = to(WattsPerHour)
def toWattsPerMinutes = to(WattsPerMinute)
def toKilowattsPerHour = to(KilowattsPerHour)
def toKilowattsPerMinute = to(KilowattsPerMinute)
def toMegawattsPerHour = to(MegawattsPerHour)
def toGigawattsPerHour = to(GigawattsPerHour)
}
object PowerRamp extends Dimension[PowerRamp] {
private[energy] def apply[A](n: A, unit: PowerRampUnit)(implicit num: Numeric[A]) = new PowerRamp(num.toDouble(n), unit)
def apply(change: Power, time: Time): PowerRamp = apply(change.toWatts / time.toHours, WattsPerHour)
def apply(value: Any) = parse(value)
def name = "PowerRamp"
def primaryUnit = WattsPerHour
def siUnit = WattsPerHour
def units = Set(WattsPerHour, WattsPerMinute, KilowattsPerHour, KilowattsPerMinute, MegawattsPerHour, GigawattsPerHour)
}
trait PowerRampUnit extends UnitOfMeasure[PowerRamp] with UnitConverter {
def apply[A](n: A)(implicit num: Numeric[A]) = PowerRamp(n, this)
}
object WattsPerHour extends PowerRampUnit with PrimaryUnit with SiUnit {
val symbol = "W/h"
}
object WattsPerMinute extends PowerRampUnit with SiUnit {
val conversionFactor = WattsPerHour.conversionFactor / 60D
val symbol = "W/m"
}
object KilowattsPerHour extends PowerRampUnit with SiUnit {
val conversionFactor = MetricSystem.Kilo
val symbol = "kW/h"
}
object KilowattsPerMinute extends PowerRampUnit with SiUnit {
val conversionFactor = KilowattsPerHour.conversionFactor / 60D
val symbol = "kW/m"
}
object MegawattsPerHour extends PowerRampUnit with SiUnit {
val conversionFactor = MetricSystem.Mega
val symbol = "MW/h"
}
object GigawattsPerHour extends PowerRampUnit with SiUnit {
val conversionFactor = MetricSystem.Giga
val symbol = "GW/h"
}
object PowerRampConversions {
lazy val wattPerHour = WattsPerHour(1)
lazy val Wph = wattPerHour
lazy val wattPerMinute = WattsPerMinute(1)
lazy val Wpm = wattPerMinute
lazy val kilowattPerHour = KilowattsPerHour(1)
lazy val kWph = kilowattPerHour
lazy val kilowattPerMinute = KilowattsPerMinute(1)
lazy val kWpm = kilowattPerMinute
lazy val megawattPerHour = MegawattsPerHour(1)
lazy val MWph = megawattPerHour
lazy val gigawattPerHour = GigawattsPerHour(1)
lazy val GWph = gigawattPerHour
implicit class PowerRampConversions[A](n: A)(implicit num: Numeric[A]) {
def Wph = WattsPerHour(n)
def Wpm = WattsPerMinute(n)
def kWph = KilowattsPerHour(n)
def kWpm = KilowattsPerMinute(n)
def MWph = MegawattsPerHour(n)
def GWph = GigawattsPerHour(n)
}
implicit class PowerRampStringConversion(s: String) {
def toPowerRamp = PowerRamp(s)
}
implicit object PowerRampNumeric extends AbstractQuantityNumeric[PowerRamp](PowerRamp.primaryUnit)
}
| typelevel/squants | shared/src/main/scala/squants/energy/PowerRamp.scala | Scala | apache-2.0 | 3,895 |
package com.github.mdr.mash.ns.collections
import com.github.mdr.mash.functions._
import com.github.mdr.mash.inference._
import com.github.mdr.mash.ns.collections.FlatMapFunction.zipWithMashIndex
import com.github.mdr.mash.runtime.MashUnit
object EachFunction extends MashFunction("collections.each") {
object Params {
val Action = Parameter(
nameOpt = Some("action"),
summaryOpt = Some("Function used to act on elements of the sequence"))
val Sequence = Parameter(
nameOpt = Some("sequence"),
summaryOpt = Some("Sequence to run an action over"))
}
import Params._
val params = ParameterModel(Action, Sequence)
def call(boundParams: BoundParams): MashUnit = {
val sequence = boundParams.validateSequence(Sequence)
boundParams.validateFunction1Or2(Action) match {
case Left(action) ⇒ sequence.foreach(action)
case Right(action) ⇒ zipWithMashIndex(sequence).foreach(action.tupled)
}
MashUnit
}
override def typeInferenceStrategy = EachTypeInferenceStrategy
override def summaryOpt = Some("Perform an action for each element in a sequence")
}
object EachTypeInferenceStrategy extends TypeInferenceStrategy {
def inferTypes(inferencer: Inferencer, arguments: TypedArguments): Option[Type] = {
import EachFunction.Params._
val argBindings = EachFunction.params.bindTypes(arguments)
val sequenceTypeOpt = argBindings.getType(Sequence)
val actionExprOpt = argBindings.getArgument(Action)
MapTypeInferenceStrategy.inferMappedType(inferencer, actionExprOpt, sequenceTypeOpt)
Some(Unit)
}
} | mdr/mash | src/main/scala/com/github/mdr/mash/ns/collections/EachFunction.scala | Scala | mit | 1,600 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.kafka010
import org.apache.kafka.clients.CommonClientConfigs
import org.apache.kafka.common.config.{SaslConfigs, SslConfigs}
import org.apache.kafka.common.security.auth.SecurityProtocol.SASL_SSL
import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.util.Utils.REDACTION_REPLACEMENT_TEXT
private[spark] case class KafkaTokenClusterConf(
identifier: String,
authBootstrapServers: String,
targetServersRegex: String,
securityProtocol: String,
kerberosServiceName: String,
trustStoreType: Option[String],
trustStoreLocation: Option[String],
trustStorePassword: Option[String],
keyStoreType: Option[String],
keyStoreLocation: Option[String],
keyStorePassword: Option[String],
keyPassword: Option[String],
tokenMechanism: String,
specifiedKafkaParams: Map[String, String]) {
override def toString: String = s"KafkaTokenClusterConf{" +
s"identifier=$identifier, " +
s"authBootstrapServers=$authBootstrapServers, " +
s"targetServersRegex=$targetServersRegex, " +
s"securityProtocol=$securityProtocol, " +
s"kerberosServiceName=$kerberosServiceName, " +
s"trustStoreType=$trustStoreType, " +
s"trustStoreLocation=$trustStoreLocation, " +
s"trustStorePassword=${trustStorePassword.map(_ => REDACTION_REPLACEMENT_TEXT)}, " +
s"keyStoreType=$keyStoreType, " +
s"keyStoreLocation=$keyStoreLocation, " +
s"keyStorePassword=${keyStorePassword.map(_ => REDACTION_REPLACEMENT_TEXT)}, " +
s"keyPassword=${keyPassword.map(_ => REDACTION_REPLACEMENT_TEXT)}, " +
s"tokenMechanism=$tokenMechanism, " +
s"specifiedKafkaParams=${KafkaRedactionUtil.redactParams(specifiedKafkaParams.toSeq)}}"
}
private [kafka010] object KafkaTokenSparkConf extends Logging {
val CLUSTERS_CONFIG_PREFIX = "spark.kafka.clusters."
val DEFAULT_TARGET_SERVERS_REGEX = ".*"
val DEFAULT_SASL_KERBEROS_SERVICE_NAME = "kafka"
val DEFAULT_SECURITY_PROTOCOL_CONFIG = SASL_SSL.name
val DEFAULT_SASL_TOKEN_MECHANISM = "SCRAM-SHA-512"
def getClusterConfig(sparkConf: SparkConf, identifier: String): KafkaTokenClusterConf = {
val configPrefix = s"$CLUSTERS_CONFIG_PREFIX$identifier."
val sparkClusterConf = sparkConf.getAllWithPrefix(configPrefix).toMap
val configKafkaPrefix = s"${configPrefix}kafka."
val sparkClusterKafkaConf = sparkConf.getAllWithPrefix(configKafkaPrefix).toMap
val result = KafkaTokenClusterConf(
identifier,
sparkClusterConf
.getOrElse(s"auth.${CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG}",
throw new NoSuchElementException(
s"${configPrefix}auth.${CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG}")),
sparkClusterConf.getOrElse(s"target.${CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG}.regex",
KafkaTokenSparkConf.DEFAULT_TARGET_SERVERS_REGEX),
sparkClusterConf.getOrElse(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG,
DEFAULT_SECURITY_PROTOCOL_CONFIG),
sparkClusterConf.getOrElse(SaslConfigs.SASL_KERBEROS_SERVICE_NAME,
KafkaTokenSparkConf.DEFAULT_SASL_KERBEROS_SERVICE_NAME),
sparkClusterConf.get(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG),
sparkClusterConf.get(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG),
sparkClusterConf.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG),
sparkClusterConf.get(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG),
sparkClusterConf.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG),
sparkClusterConf.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG),
sparkClusterConf.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG),
sparkClusterConf.getOrElse("sasl.token.mechanism",
KafkaTokenSparkConf.DEFAULT_SASL_TOKEN_MECHANISM),
sparkClusterKafkaConf
)
logDebug(s"getClusterConfig($identifier): $result")
result
}
def getAllClusterConfigs(sparkConf: SparkConf): Set[KafkaTokenClusterConf] = {
sparkConf.getAllWithPrefix(KafkaTokenSparkConf.CLUSTERS_CONFIG_PREFIX).toMap.keySet
.flatMap { k =>
val split = k.split('.')
if (split.length > 0 && split(0).nonEmpty) {
Some(split(0))
} else {
None
}
}.map(getClusterConfig(sparkConf, _))
}
}
| ueshin/apache-spark | external/kafka-0-10-token-provider/src/main/scala/org/apache/spark/kafka010/KafkaTokenSparkConf.scala | Scala | apache-2.0 | 5,043 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.util.regex.{MatchResult, Pattern}
import org.apache.commons.lang3.StringEscapeUtils
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.util.StringUtils
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
trait StringRegexExpression extends ImplicitCastInputTypes {
self: BinaryExpression =>
def escape(v: String): String
def matches(regex: Pattern, str: String): Boolean
override def dataType: DataType = BooleanType
override def inputTypes: Seq[DataType] = Seq(StringType, StringType)
// try cache the pattern for Literal 尝试缓存Literal的模式
private lazy val cache: Pattern = right match {
case x @ Literal(value: String, StringType) => compile(value)
case _ => null
}
protected def compile(str: String): Pattern = if (str == null) {
null
} else {
// Let it raise exception if couldn't compile the regex string
//如果无法编译正则表达式字符串,请引发异常
Pattern.compile(escape(str))
}
protected def pattern(str: String) = if (cache == null) compile(str) else cache
protected override def nullSafeEval(input1: Any, input2: Any): Any = {
val regex = pattern(input2.asInstanceOf[UTF8String].toString)
if(regex == null) {
null
} else {
matches(regex, input1.asInstanceOf[UTF8String].toString)
}
}
}
/**
* Simple RegEx pattern matching function
* 简单的RegEx模式匹配功能
*/
case class Like(left: Expression, right: Expression)
extends BinaryExpression with StringRegexExpression with CodegenFallback {
override def escape(v: String): String = StringUtils.escapeLikeRegex(v)
override def matches(regex: Pattern, str: String): Boolean = regex.matcher(str).matches()
override def toString: String = s"$left LIKE $right"
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val patternClass = classOf[Pattern].getName
//stripSuffix去掉<string>字串中结尾的字符
val escapeFunc = StringUtils.getClass.getName.stripSuffix("$") + ".escapeLikeRegex"
val pattern = ctx.freshName("pattern")
if (right.foldable) {
val rVal = right.eval()
if (rVal != null) {
val regexStr =
StringEscapeUtils.escapeJava(escape(rVal.asInstanceOf[UTF8String].toString()))
ctx.addMutableState(patternClass, pattern,
s"""$pattern = ${patternClass}.compile("$regexStr");""")
// We don't use nullSafeCodeGen here because we don't want to re-evaluate right again.
val eval = left.gen(ctx)
s"""
${eval.code}
boolean ${ev.isNull} = ${eval.isNull};
${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
${ev.primitive} = $pattern.matcher(${eval.primitive}.toString()).matches();
}
"""
} else {
s"""
boolean ${ev.isNull} = true;
${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
"""
}
} else {
nullSafeCodeGen(ctx, ev, (eval1, eval2) => {
s"""
String rightStr = ${eval2}.toString();
${patternClass} $pattern = ${patternClass}.compile($escapeFunc(rightStr));
${ev.primitive} = $pattern.matcher(${eval1}.toString()).matches();
"""
})
}
}
}
case class RLike(left: Expression, right: Expression)
extends BinaryExpression with StringRegexExpression with CodegenFallback {
override def escape(v: String): String = v
override def matches(regex: Pattern, str: String): Boolean = regex.matcher(str).find(0)
override def toString: String = s"$left RLIKE $right"
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val patternClass = classOf[Pattern].getName
val pattern = ctx.freshName("pattern")
if (right.foldable) {
val rVal = right.eval()
if (rVal != null) {
val regexStr =
StringEscapeUtils.escapeJava(rVal.asInstanceOf[UTF8String].toString())
ctx.addMutableState(patternClass, pattern,
s"""$pattern = ${patternClass}.compile("$regexStr");""")
// We don't use nullSafeCodeGen here because we don't want to re-evaluate right again.
val eval = left.gen(ctx)
s"""
${eval.code}
boolean ${ev.isNull} = ${eval.isNull};
${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
if (!${ev.isNull}) {
${ev.primitive} = $pattern.matcher(${eval.primitive}.toString()).find(0);
}
"""
} else {
s"""
boolean ${ev.isNull} = true;
${ctx.javaType(dataType)} ${ev.primitive} = ${ctx.defaultValue(dataType)};
"""
}
} else {
nullSafeCodeGen(ctx, ev, (eval1, eval2) => {
s"""
String rightStr = ${eval2}.toString();
${patternClass} $pattern = ${patternClass}.compile(rightStr);
${ev.primitive} = $pattern.matcher(${eval1}.toString()).find(0);
"""
})
}
}
}
/**
* Splits str around pat (pattern is a regular expression).
* plits str around pat(pattern是一个正则表达式)
*/
case class StringSplit(str: Expression, pattern: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
override def left: Expression = str
override def right: Expression = pattern
override def dataType: DataType = ArrayType(StringType)
override def inputTypes: Seq[DataType] = Seq(StringType, StringType)
override def nullSafeEval(string: Any, regex: Any): Any = {
val strings = string.asInstanceOf[UTF8String].split(regex.asInstanceOf[UTF8String], -1)
new GenericArrayData(strings.asInstanceOf[Array[Any]])
}
override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val arrayClass = classOf[GenericArrayData].getName
nullSafeCodeGen(ctx, ev, (str, pattern) =>
// Array in java is covariant, so we don't need to cast UTF8String[] to Object[].
s"""${ev.primitive} = new $arrayClass($str.split($pattern, -1));""")
}
override def prettyName: String = "split"
}
/**
* Replace all substrings of str that match regexp with rep.
* 将与regexp匹配的str的所有子字符串替换为rep
*
* NOTE: this expression is not THREAD-SAFE, as it has some internal mutable status.
*/
case class RegExpReplace(subject: Expression, regexp: Expression, rep: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
// last regex in string, we will update the pattern iff regexp value changed.
//在字符串的最后一个正则表达式中,我们将更新正则表达式的模式更改
@transient private var lastRegex: UTF8String = _
// last regex pattern, we cache it for performance concern
//最后一个正则表达式模式,我们缓存它以解决性能问题
@transient private var pattern: Pattern = _
// last replacement string, we don't want to convert a UTF8String => java.langString every time.
//最后一个替换字符串,我们不希望每次都转换UTF8String => java.langString
@transient private var lastReplacement: String = _
@transient private var lastReplacementInUTF8: UTF8String = _
// result buffer write by Matcher
@transient private val result: StringBuffer = new StringBuffer
override def nullSafeEval(s: Any, p: Any, r: Any): Any = {
if (!p.equals(lastRegex)) {
// regex value changed
lastRegex = p.asInstanceOf[UTF8String].clone()
pattern = Pattern.compile(lastRegex.toString)
}
if (!r.equals(lastReplacementInUTF8)) {
// replacement string changed
lastReplacementInUTF8 = r.asInstanceOf[UTF8String].clone()
lastReplacement = lastReplacementInUTF8.toString
}
val m = pattern.matcher(s.toString())
result.delete(0, result.length())
while (m.find) {
m.appendReplacement(result, lastReplacement)
}
m.appendTail(result)
UTF8String.fromString(result.toString)
}
override def dataType: DataType = StringType
override def inputTypes: Seq[AbstractDataType] = Seq(StringType, StringType, StringType)
override def children: Seq[Expression] = subject :: regexp :: rep :: Nil
override def prettyName: String = "regexp_replace"
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val termLastRegex = ctx.freshName("lastRegex")
val termPattern = ctx.freshName("pattern")
val termLastReplacement = ctx.freshName("lastReplacement")
val termLastReplacementInUTF8 = ctx.freshName("lastReplacementInUTF8")
val termResult = ctx.freshName("result")
val classNamePattern = classOf[Pattern].getCanonicalName
val classNameStringBuffer = classOf[java.lang.StringBuffer].getCanonicalName
ctx.addMutableState("UTF8String", termLastRegex, s"${termLastRegex} = null;")
ctx.addMutableState(classNamePattern, termPattern, s"${termPattern} = null;")
ctx.addMutableState("String", termLastReplacement, s"${termLastReplacement} = null;")
ctx.addMutableState("UTF8String",
termLastReplacementInUTF8, s"${termLastReplacementInUTF8} = null;")
ctx.addMutableState(classNameStringBuffer,
termResult, s"${termResult} = new $classNameStringBuffer();")
nullSafeCodeGen(ctx, ev, (subject, regexp, rep) => {
s"""
if (!$regexp.equals(${termLastRegex})) {
// regex value changed
${termLastRegex} = $regexp.clone();
${termPattern} = ${classNamePattern}.compile(${termLastRegex}.toString());
}
if (!$rep.equals(${termLastReplacementInUTF8})) {
// replacement string changed
${termLastReplacementInUTF8} = $rep.clone();
${termLastReplacement} = ${termLastReplacementInUTF8}.toString();
}
${termResult}.delete(0, ${termResult}.length());
java.util.regex.Matcher m = ${termPattern}.matcher($subject.toString());
while (m.find()) {
m.appendReplacement(${termResult}, ${termLastReplacement});
}
m.appendTail(${termResult});
${ev.primitive} = UTF8String.fromString(${termResult}.toString());
${ev.isNull} = false;
"""
})
}
}
/**
* Extract a specific(idx) group identified by a Java regex.
* 提取由Java正则表达式标识的特定(idx)组
*
* NOTE: this expression is not THREAD-SAFE, as it has some internal mutable status.
*/
case class RegExpExtract(subject: Expression, regexp: Expression, idx: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
def this(s: Expression, r: Expression) = this(s, r, Literal(1))
// last regex in string, we will update the pattern iff regexp value changed.
//在字符串的最后一个正则表达式中,我们将更新正则表达式的模式更改
@transient private var lastRegex: UTF8String = _
// last regex pattern, we cache it for performance concern
//最后一个正则表达式模式,我们缓存它以解决性能问题
@transient private var pattern: Pattern = _
override def nullSafeEval(s: Any, p: Any, r: Any): Any = {
if (!p.equals(lastRegex)) {
// regex value changed
lastRegex = p.asInstanceOf[UTF8String].clone()
pattern = Pattern.compile(lastRegex.toString)
}
val m = pattern.matcher(s.toString)
if (m.find) {
val mr: MatchResult = m.toMatchResult
UTF8String.fromString(mr.group(r.asInstanceOf[Int]))
} else {
UTF8String.EMPTY_UTF8
}
}
override def dataType: DataType = StringType
override def inputTypes: Seq[AbstractDataType] = Seq(StringType, StringType, IntegerType)
override def children: Seq[Expression] = subject :: regexp :: idx :: Nil
override def prettyName: String = "regexp_extract"
override protected def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): String = {
val termLastRegex = ctx.freshName("lastRegex")
val termPattern = ctx.freshName("pattern")
val classNamePattern = classOf[Pattern].getCanonicalName
ctx.addMutableState("UTF8String", termLastRegex, s"${termLastRegex} = null;")
ctx.addMutableState(classNamePattern, termPattern, s"${termPattern} = null;")
nullSafeCodeGen(ctx, ev, (subject, regexp, idx) => {
s"""
if (!$regexp.equals(${termLastRegex})) {
// regex value changed
${termLastRegex} = $regexp.clone();
${termPattern} = ${classNamePattern}.compile(${termLastRegex}.toString());
}
java.util.regex.Matcher m =
${termPattern}.matcher($subject.toString());
if (m.find()) {
java.util.regex.MatchResult mr = m.toMatchResult();
${ev.primitive} = UTF8String.fromString(mr.group($idx));
${ev.isNull} = false;
} else {
${ev.primitive} = UTF8String.EMPTY_UTF8;
${ev.isNull} = false;
}"""
})
}
}
| tophua/spark1.52 | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala | Scala | apache-2.0 | 13,743 |
/* NSC -- new Scala compiler
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala
package tools.nsc
package ast
import scala.language.implicitConversions
import java.awt.{List => _, _}
import java.awt.event._
import java.io.StringWriter
import javax.swing._
import javax.swing.event.TreeModelListener
import javax.swing.tree._
import scala.concurrent.Lock
import scala.text._
/**
* Tree browsers can show the AST in a graphical and interactive
* way, useful for debugging and understanding.
*
* @author Iulian Dragos
* @version 1.0
*/
abstract class TreeBrowsers {
val global: Global
import global._
import nme.EMPTY
val borderSize = 10
def create(): SwingBrowser = new SwingBrowser()
/** Pseudo tree class, so that all JTree nodes are treated uniformly */
case class ProgramTree(units: List[UnitTree]) extends Tree {
override def toString: String = "Program"
}
/** Pseudo tree class, so that all JTree nodes are treated uniformly */
case class UnitTree(unit: CompilationUnit) extends Tree {
override def toString: String = unit.toString
}
/**
* Java Swing pretty printer for Scala abstract syntax trees.
*/
class SwingBrowser {
def browse(pName: String, units: Iterator[CompilationUnit]): Unit =
browse(pName, units.toList)
/** print the whole program */
def browse(pName: String, units: List[CompilationUnit]): Unit = {
var unitList: List[UnitTree] = Nil
for (i <- units)
unitList = UnitTree(i) :: unitList
val tm = new ASTTreeModel(ProgramTree(unitList))
val frame = new BrowserFrame(pName)
frame.setTreeModel(tm)
val lock = new Lock()
frame.createFrame(lock)
// wait for the frame to be closed
lock.acquire()
}
}
/** Tree model for abstract syntax trees */
class ASTTreeModel(val program: Tree) extends TreeModel {
var listeners: List[TreeModelListener] = Nil
/** Add a listener to this tree */
def addTreeModelListener(l: TreeModelListener): Unit =
listeners = l :: listeners
/** Return the index'th child of parent */
def getChild(parent: AnyRef, index: Int): AnyRef =
packChildren(parent)(index)
/** Return the number of children this 'parent' has */
def getChildCount(parent: AnyRef): Int =
packChildren(parent).length
/** Return the index of the given child */
def getIndexOfChild(parent: AnyRef, child: AnyRef): Int =
packChildren(parent) indexOf child
/** Return the root node */
def getRoot(): AnyRef = program
/** Test whether the given node is a leaf */
def isLeaf(node: AnyRef): Boolean = packChildren(node).isEmpty
def removeTreeModelListener(l: TreeModelListener): Unit =
listeners = listeners filterNot (_ == l)
/** we ignore this message for now */
def valueForPathChanged(path: TreePath, newValue: AnyRef) = ()
/**
* Return a list of children for the given node.
*/
def packChildren(t: AnyRef): List[AnyRef] = TreeInfo.children(t.asInstanceOf[Tree])
}
/**
* A window that can host the Tree widget and provide methods for
* displaying information
*
* @author Iulian Dragos
* @version 1.0
*/
class BrowserFrame(phaseName: String = "unknown") {
try {
UIManager.setLookAndFeel("com.sun.java.swing.plaf.nimbus.NimbusLookAndFeel")
}
catch {
case _: Throwable => UIManager.setLookAndFeel(UIManager.getCrossPlatformLookAndFeelClassName())
}
val frame = new JFrame("Scala AST after " + phaseName + " phase")
frame.setJMenuBar(new ASTMenuBar())
val topLeftPane = new JPanel(new BorderLayout())
val topRightPane = new JPanel(new BorderLayout())
val bottomPane = new JPanel(new BorderLayout())
var splitPane: JSplitPane = _
var treeModel: ASTTreeModel = _
var jTree: JTree = _
val textArea: JTextArea = new JTextArea(30, 120)
textArea.setBorder(BorderFactory.createEmptyBorder(borderSize, borderSize, borderSize, borderSize))
val infoPanel = new TextInfoPanel()
private def setExpansionState(root: JTree, expand: Boolean): Unit = {
def _setExpansionState(root: JTree, path: TreePath): Unit = {
val last = path.getLastPathComponent
for (i <- 0 until root.getModel.getChildCount(last)) {
val child = root.getModel.getChild(last, i)
val childPath = path pathByAddingChild child
_setExpansionState(root, childPath)
}
if (expand) {jTree expandPath path}
else {jTree collapsePath path}
}
_setExpansionState(root, new TreePath(root.getModel.getRoot))
}
def expandAll(subtree: JTree) = setExpansionState(subtree, expand = true)
def collapseAll(subtree: JTree) = setExpansionState(subtree, expand = false)
/** Create a frame that displays the AST.
*
* @param lock The lock is used in order to stop the compilation thread
* until the user is done with the tree inspection. Swing creates its
* own threads when the frame is packed, and therefore execution
* would continue. However, this is not what we want, as the tree and
* especially symbols/types would change while the window is visible.
*/
def createFrame(lock: Lock): Unit = {
lock.acquire() // keep the lock until the user closes the window
frame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE)
frame.addWindowListener(new WindowAdapter() {
/** Release the lock, so compilation may resume after the window is closed. */
override def windowClosed(e: WindowEvent): Unit = lock.release()
})
jTree = new JTree(treeModel) {
/** Return the string for a tree node. */
override def convertValueToText(value: Any, sel: Boolean,
exp: Boolean, leaf: Boolean,
row: Int, hasFocus: Boolean) = {
val (cls, name) = TreeInfo.treeName(value.asInstanceOf[Tree])
if (name != EMPTY)
cls + "[" + name + "]"
else
cls
}
}
jTree.addTreeSelectionListener(new javax.swing.event.TreeSelectionListener() {
def valueChanged(e: javax.swing.event.TreeSelectionEvent): Unit = {
textArea.setText(e.getPath().getLastPathComponent().toString)
infoPanel.update(e.getPath().getLastPathComponent())
}
})
val topSplitPane = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT, topLeftPane, topRightPane)
topSplitPane.setResizeWeight(0.5)
jTree.setBorder(
BorderFactory.createEmptyBorder(borderSize, borderSize, borderSize, borderSize))
topLeftPane.add(new JScrollPane(jTree), BorderLayout.CENTER)
topRightPane.add(new JScrollPane(infoPanel), BorderLayout.CENTER)
bottomPane.add(new JScrollPane(textArea), BorderLayout.CENTER)
textArea.setFont(new Font("monospaced", Font.PLAIN, 14))
textArea.setEditable(false)
splitPane = new JSplitPane(JSplitPane.VERTICAL_SPLIT, topSplitPane, bottomPane)
frame.getContentPane().add(splitPane)
frame.pack()
frame.setVisible(true)
}
class ASTMenuBar extends JMenuBar {
val menuKey = Toolkit.getDefaultToolkit().getMenuShortcutKeyMask()
val shiftKey = InputEvent.SHIFT_MASK
val jmFile = new JMenu("File")
// val jmiSaveImage = new JMenuItem(
// new AbstractAction("Save Tree Image") {
// putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_S, menuKey, false))
// override def actionPerformed(e: ActionEvent) {
// //TODO
// }
// }
// )
// jmFile add jmiSaveImage
def closeWindow() = frame.getToolkit().getSystemEventQueue().postEvent(
new WindowEvent(frame, WindowEvent.WINDOW_CLOSING))
val jmiCancel = new JMenuItem (
new AbstractAction("Cancel Compilation") {
putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_Q, menuKey + shiftKey, false))
override def actionPerformed(e: ActionEvent) {
closeWindow()
global.currentRun.cancel()
}
}
)
jmFile add jmiCancel
val jmiExit = new JMenuItem (
new AbstractAction("Exit") {
putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_Q, menuKey, false))
override def actionPerformed(e: ActionEvent) = closeWindow()
}
)
jmFile add jmiExit
add(jmFile)
val jmView = new JMenu("View")
val jmiExpand = new JMenuItem(
new AbstractAction("Expand All Nodes") {
putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_E, menuKey, false))
override def actionPerformed(e: ActionEvent) {
expandAll(jTree)
}
}
)
jmView add jmiExpand
val jmiCollapse = new JMenuItem(
new AbstractAction("Collapse All Nodes") {
putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(KeyEvent.VK_L, menuKey, false))
override def actionPerformed(e: ActionEvent) {
collapseAll(jTree)
}
}
)
jmView add jmiCollapse
add(jmView)
}
def setTreeModel(tm: ASTTreeModel): Unit = treeModel = tm
}
/**
* Present detailed information about the selected tree node.
*/
class TextInfoPanel extends JTextArea(20, 50) {
setBorder(BorderFactory.createEmptyBorder(borderSize, borderSize, borderSize, borderSize))
setEditable(false)
setFont(new Font("monospaced", Font.PLAIN, 12))
def update(v: AnyRef): Unit = {
val t: Tree = v.asInstanceOf[Tree]
val str = new StringBuilder()
var buf = new StringWriter()
t match {
case ProgramTree(_) => ()
case UnitTree(_) => ()
case _ =>
str.append("tree.id: ").append(t.id)
str.append("\\ntree.pos: ").append(t.pos)
str.append("\\nSymbol: ").append(TreeInfo.symbolText(t))
str.append("\\nSymbol owner: ").append(
if ((t.symbol ne null) && t.symbol != NoSymbol)
t.symbol.owner.toString
else
"NoSymbol has no owner")
if ((t.symbol ne null) && t.symbol.isType) {
str.append("\\ntermSymbol: " + t.symbol.tpe.termSymbol
+ "\\ntypeSymbol: " + t.symbol.tpe.typeSymbol)
if (t.symbol.isTypeSkolem)
str.append("\\nSkolem of: " + t.symbol.deSkolemize)
}
str.append("\\nSymbol tpe: ")
if (t.symbol ne null) {
str.append(t.symbol.tpe).append("\\n")
buf = new StringWriter()
TypePrinter.toDocument(t.symbol.tpe).format(getWidth() / getColumnWidth(), buf)
str.append(buf.toString)
}
str.append("\\n\\nSymbol info: \\n")
TreeInfo.symbolTypeDoc(t).format(getWidth() / getColumnWidth(), buf)
str.append(buf.toString)
str.append("\\n\\nSymbol Attributes: \\n").append(TreeInfo.symbolAttributes(t))
str.append("\\ntree.tpe: ")
if (t.tpe ne null) {
str.append(t.tpe.toString).append("\\n")
buf = new StringWriter()
TypePrinter.toDocument(t.tpe).format(getWidth() / getColumnWidth(), buf)
str.append(buf.toString)
}
}
setText(str.toString)
}
}
/** Computes different information about a tree node. It
* is used as central place to do all pattern matching against
* Tree.
*/
object TreeInfo {
/** Return the case class name and the Name, if the node defines one */
def treeName(t: Tree): (String, Name) = ((t.productPrefix, t match {
case UnitTree(unit) => newTermName("" + unit)
case Super(_, mix) => newTermName("mix: " + mix)
case This(qual) => qual
case Select(_, selector) => selector
case Ident(name) => name
case SelectFromTypeTree(_, selector) => selector
case x: DefTree => x.name
case _ => EMPTY
}))
/** Return a list of children for the given tree node */
def children(t: Tree): List[Tree] = t match {
case ProgramTree(units) =>
units
case UnitTree(unit) =>
List(unit.body)
case DocDef(comment, definition) =>
List(definition)
case ClassDef(mods, name, tparams, impl) => {
var children: List[Tree] = List()
children = tparams ::: children
mods.annotations ::: impl :: children
}
case PackageDef(pid, stats) =>
stats
case ModuleDef(mods, name, impl) =>
mods.annotations ::: List(impl)
case ValDef(mods, name, tpe, rhs) =>
mods.annotations ::: List(tpe, rhs)
case DefDef(mods, name, tparams, vparams, tpe, rhs) =>
mods.annotations ::: tpe :: rhs :: vparams.flatten ::: tparams
case TypeDef(mods, name, tparams, rhs) =>
mods.annotations ::: rhs :: tparams // @M: was List(rhs, lobound)
case Import(expr, selectors) =>
List(expr)
case CaseDef(pat, guard, body) =>
List(pat, guard, body)
case Template(parents, self, body) =>
parents ::: List(self) ::: body
case LabelDef(name, params, rhs) =>
params ::: List(rhs)
case Block(stats, expr) =>
stats ::: List(expr)
case Alternative(trees) =>
trees
case Bind(name, rhs) =>
List(rhs)
case UnApply(fun, args) =>
fun :: args
case Match(selector, cases) =>
selector :: cases
case Function(vparams, body) =>
vparams ::: List(body)
case Assign(lhs, rhs) =>
List(lhs, rhs)
case If(cond, thenp, elsep) =>
List(cond, thenp, elsep)
case Return(expr) =>
List(expr)
case Throw(expr) =>
List(expr)
case New(init) =>
List(init)
case Typed(expr, tpe) =>
List(expr, tpe)
case TypeApply(fun, args) =>
List(fun) ::: args
case Apply(fun, args) =>
List(fun) ::: args
case ApplyDynamic(qual, args) =>
List(qual) ::: args
case Super(qualif, mix) =>
List(qualif)
case This(qualif) =>
Nil
case Select(qualif, selector) =>
List(qualif)
case Ident(name) =>
Nil
case Literal(value) =>
Nil
case TypeTree() =>
Nil
case Annotated(annot, arg) =>
annot :: List(arg)
case SingletonTypeTree(ref) =>
List(ref)
case SelectFromTypeTree(qualif, selector) =>
List(qualif)
case CompoundTypeTree(templ) =>
List(templ)
case AppliedTypeTree(tpe, args) =>
tpe :: args
case TypeBoundsTree(lo, hi) =>
List(lo, hi)
case ExistentialTypeTree(tpt, whereClauses) =>
tpt :: whereClauses
case Try(block, catches, finalizer) =>
block :: catches ::: List(finalizer)
case ArrayValue(elemtpt, elems) =>
elemtpt :: elems
case EmptyTree =>
Nil
case Star(t) =>
List(t)
}
/** Return a textual representation of this t's symbol */
def symbolText(t: Tree): String = {
val prefix =
if (t.hasSymbolField) "[has] "
else if (t.isDef) "[defines] "
else ""
prefix + t.symbol
}
/** Return t's symbol type */
def symbolTypeDoc(t: Tree): Document = {
val s = t.symbol
if (s ne null)
TypePrinter.toDocument(s.info)
else
DocNil
}
/** Return a textual representation of (some of) the symbol's
* attributes */
def symbolAttributes(t: Tree): String = {
val s = t.symbol
if ((s ne null) && (s != NoSymbol)) {
var str = s.flagString
if (s.isStaticMember) str = str + " isStatic "
(str + " annotations: " + s.annotations.mkString("", " ", "")
+ (if (s.isTypeSkolem) "\\ndeSkolemized annotations: " + s.deSkolemize.annotations.mkString("", " ", "") else ""))
}
else ""
}
}
object TypePrinter {
///////////////// Document pretty printer ////////////////
implicit def view(n: String): Document = DocText(n)
def toDocument(sym: Symbol): Document =
toDocument(sym.info)
def symsToDocument(syms: List[Symbol]): Document = syms match {
case Nil => DocNil
case s :: Nil => Document.group(toDocument(s))
case _ =>
Document.group(
syms.tail.foldLeft (toDocument(syms.head) :: ", ") (
(d: Document, s2: Symbol) => toDocument(s2) :: ", " :/: d) )
}
def toDocument(ts: List[Type]): Document = ts match {
case Nil => DocNil
case t :: Nil => Document.group(toDocument(t))
case _ =>
Document.group(
ts.tail.foldLeft (toDocument(ts.head) :: ", ") (
(d: Document, t2: Type) => toDocument(t2) :: ", " :/: d) )
}
def toDocument(t: Type): Document = t match {
case ErrorType => "ErrorType()"
case WildcardType => "WildcardType()"
case NoType => "NoType()"
case NoPrefix => "NoPrefix()"
case ThisType(s) => "ThisType(" + s.name + ")"
case SingleType(pre, sym) =>
Document.group(
Document.nest(4, "SingleType(" :/:
toDocument(pre) :: ", " :/: sym.name.toString :: ")")
)
case ConstantType(value) =>
"ConstantType(" + value + ")"
case TypeRef(pre, sym, args) =>
Document.group(
Document.nest(4, "TypeRef(" :/:
toDocument(pre) :: ", " :/:
sym.name.toString + sym.idString :: ", " :/:
"[ " :: toDocument(args) ::"]" :: ")")
)
case TypeBounds(lo, hi) =>
Document.group(
Document.nest(4, "TypeBounds(" :/:
toDocument(lo) :: ", " :/:
toDocument(hi) :: ")")
)
case RefinedType(parents, defs) =>
Document.group(
Document.nest(4, "RefinedType(" :/:
toDocument(parents) :: ")")
)
case ClassInfoType(parents, defs, clazz) =>
Document.group(
Document.nest(4,"ClassInfoType(" :/:
toDocument(parents) :: ", " :/:
clazz.name.toString + clazz.idString :: ")")
)
case MethodType(params, result) =>
Document.group(
Document.nest(4, "MethodType(" :/:
Document.group("(" :/:
symsToDocument(params) :/:
"), ") :/:
toDocument(result) :: ")")
)
case NullaryMethodType(result) =>
Document.group(
Document.nest(4,"NullaryMethodType(" :/:
toDocument(result) :: ")")
)
case PolyType(tparams, result) =>
Document.group(
Document.nest(4,"PolyType(" :/:
Document.group("(" :/:
symsToDocument(tparams) :/:
"), ") :/:
toDocument(result) :: ")")
)
case AnnotatedType(annots, tp) =>
Document.group(
Document.nest(4, "AnnotatedType(" :/:
annots.mkString("[", ",", "]") :/:
"," :/: toDocument(tp) :: ")")
)
case ExistentialType(tparams, result) =>
Document.group(
Document.nest(4, "ExistentialType(" :/:
Document.group("(" :/: symsToDocument(tparams) :/: "), ") :/:
toDocument(result) :: ")"))
case ImportType(expr) =>
"ImportType(" + expr.toString + ")"
case SuperType(thistpe, supertpe) =>
Document.group(
Document.nest(4, "SuperType(" :/:
toDocument(thistpe) :/: ", " :/:
toDocument(supertpe) ::")"))
case _ =>
sys.error("Unknown case: " + t.toString +", "+ t.getClass)
}
}
}
| felixmulder/scala | src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala | Scala | bsd-3-clause | 20,296 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.index
import org.geotools.data.Query
import org.junit.runner.RunWith
import org.locationtech.geomesa.index.conf.QueryHints.RichHints
import org.locationtech.geomesa.index.planning.QueryPlanner
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.filter.Filter
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class IndexPackageObjectTest extends Specification {
"index" should {
"compute target schemas from transformation expressions" in {
val sftName = "targetSchemaTest"
val defaultSchema = "name:String,geom:Point:srid=4326,dtg:Date"
val origSFT = SimpleFeatureTypes.createType(sftName, defaultSchema)
origSFT.setDtgField("dtg")
val query = new Query(sftName, Filter.INCLUDE, Array("name", "helloName=strConcat('hello', name)", "geom"))
QueryPlanner.setQueryTransforms(origSFT, query)
val transform = query.getHints.getTransformSchema
transform must beSome
SimpleFeatureTypes.encodeType(transform.get) mustEqual "name:String,helloName:String,*geom:Point:srid=4326"
}
}
}
| aheyne/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/index/IndexPackageObjectTest.scala | Scala | apache-2.0 | 1,735 |
package io.udash.rpc.internals
import com.typesafe.scalalogging.LazyLogging
import org.atmosphere.cpr._
private[rpc] trait BroadcasterInit extends LazyLogging {
private var factory: BroadcasterFactory = _
private var metaBroadcaster: MetaBroadcaster = _
def init(factory: BroadcasterFactory, metaBroadcaster: MetaBroadcaster) = {
if (this.factory != null || this.metaBroadcaster != null) {
logger.warn("BroadcastManager is reinitialized! It should not happen!")
}
this.factory = factory
this.metaBroadcaster = metaBroadcaster
}
protected final def withBroadcaster(clientId: String)(op: Broadcaster => Unit): Unit = {
require(factory != null, "Init manager with BroadcasterFactory first!")
op(factory.lookup[Broadcaster](clientPath(clientId), true))
}
protected final def withMetaBroadcaster(op: MetaBroadcaster => Unit): Unit = {
require(metaBroadcaster != null, "Init manager with MetaBroadcaster first!")
op(metaBroadcaster)
}
protected final def clientPath(clientId: String) = s"/client/$clientId"
protected final def pathWildcard = "*"
}
private[rpc] object BroadcastManager extends BroadcasterInit {
def registerResource(resource: AtmosphereResource, clientId: String): Unit =
withBroadcaster(clientId)(_.addAtmosphereResource(resource))
def sendToClient(clientId: String, msg: String): Unit =
withBroadcaster(clientId)(_.broadcast(msg))
def broadcastToAllClients(msg: String): Unit =
withMetaBroadcaster(_.broadcastTo(clientPath(pathWildcard), msg))
def broadcast(msg: String): Unit =
withMetaBroadcaster(_.broadcastTo(clientPath(pathWildcard), msg))
}
| UdashFramework/udash-core | rpc/.jvm/src/main/scala/io/udash/rpc/internals/BroadcastManager.scala | Scala | apache-2.0 | 1,656 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.