code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
|---|---|---|---|---|---|
package me.archdev.api.external
import cats.data.Xor
import me.archdev.TweetRoomActor
import io.circe.generic.auto._
import io.circe.parser._
import io.circe.syntax._
/**
* External messaging protocol between client and server.
* This layer of abstraction created to make internal messaging protocol independent from service API.
*/
sealed trait CommandProtocol
case class PublishTweet(tweet: String) extends CommandProtocol
sealed trait EventProtocol
case class TweetPublished(tweet: String) extends EventProtocol
case class FailureMessage(message: String)
object InternalServerError {
def apply() = FailureMessage("Internal server error")
}
object ExternalProtocol {
import me.archdev.api.internal._
def serializeEvent(eventProtocol: EventProtocol): Xor[FailureMessage, String] =
Xor.right(eventProtocol.asJson.noSpaces)
def serializeFailureMessage(failureMessage: FailureMessage): String =
s"""{"FailureMessage": ${failureMessage.asJson.noSpaces}}"""
def deserializeCommand(text: String): Xor[FailureMessage, CommandProtocol] =
decode[CommandProtocol](text).bimap(
error => FailureMessage(error.getMessage),
result => result
)
def convertToInternalCommand(userId: String, command: CommandProtocol): Xor[FailureMessage, Command] =
command match {
case PublishTweet(tweet) =>
Xor.Right(TweetRoomActor.PublishTweet(userId, tweet))
case unhandledCommand =>
// TODO: log this stuff
Xor.Left(InternalServerError())
}
def convertToExternalEvent(event: Event): Xor[FailureMessage, EventProtocol] =
event match {
case TweetRoomActor.TweetPublished(tweet) =>
Xor.Right(TweetPublished(tweet))
case unhandledEvent =>
// TODO: log this stuff
Xor.Left(InternalServerError())
}
}
|
ArchDev/unidirectional-akka-redux
|
backend/src/main/scala/me/archdev/api/external/Protocol.scala
|
Scala
|
mit
| 1,817
|
object Main {
def main(args: Array[String]) {
println("Hello, Scala!")
}
}
|
pieces029/test-project
|
scala-console/src/main/scala/com.andrewreitz.test/Main.scala
|
Scala
|
apache-2.0
| 83
|
package $package$
package comet
import net.liftweb.actor._
import net.liftweb.http._
import js._
import JsCmds._
import JE._
import scala.xml.NodeSeq
object Presentation extends LiftActor with ListenerManager {
object Ask
object Init
val initialStep = "title"
var currentStep:String = initialStep
def createUpdate = currentStep
override def lowPriority = {
case Ask => reply(currentStep)
case Init => this ! initialStep
case id:String => currentStep = id; updateListeners()
}
}
class PresentationComet extends CometActor with CometListener {
override def render = NodeSeq.Empty
override def registerWith = Presentation
override def lowPriority = {
case id:String => partialUpdate(
Call("window.Presentation.goto", id)
)
}
}
|
sjfloat/presentera.g8
|
src/main/g8/src/main/scala/$package$/comet/Presentation.scala
|
Scala
|
apache-2.0
| 779
|
package kafka.producer
import java.util.Properties
import org.apache.kafka.clients.producer._
object KafkaProducer extends App {
// input
val topic = "test-topic-name"
val msg = "test 1 test 2 test 1 2"
// config
val properties = new Properties()
properties.put("bootstrap.servers", "localhost:9092")
properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")
val producer = new KafkaProducer[String, String](properties)
val record = new ProducerRecord(topic, "key", msg)
producer.send(record)
producer.close()
}
|
abilashgt/study_bigdata
|
spark/basic_spark/src/main/scala/kafka/producer/KafkaProducer.scala
|
Scala
|
apache-2.0
| 669
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.bwsw.sj.engine.core.engine.input
import java.io.Closeable
import com.bwsw.sj.common.dal.model.stream.StreamDomain
import com.bwsw.sj.common.engine.core.entities.Envelope
import scala.collection.mutable
/**
* Class is able to do checkpoint of processing messages
*
* @param inputs set of streams and their set of partitions that this task input is responsible for
* @author Kseniya Mikhaleva
*/
abstract class CheckpointTaskInput[E <: Envelope](inputs: scala.collection.mutable.Map[StreamDomain, Array[Int]]) extends Closeable {
private val lastEnvelopesByStreams: mutable.Map[(String, Int), Envelope] = createStorageOfLastEnvelopes()
private def createStorageOfLastEnvelopes(): mutable.Map[(String, Int), Envelope] = {
inputs.flatMap(x => x._2.map(y => ((x._1.name, y), new Envelope())))
}
def registerEnvelope(envelope: E): Unit = {
lastEnvelopesByStreams((envelope.stream, envelope.partition)) = envelope
}
def setConsumerOffsetToLastEnvelope(): Unit = {
lastEnvelopesByStreams.values.filterNot(_.isEmpty()).foreach(envelope => {
setConsumerOffset(envelope.asInstanceOf[E])
})
lastEnvelopesByStreams.clear()
}
protected def setConsumerOffset(envelope: E): Unit
def prepareToCheckpoint(): Unit =
setConsumerOffsetToLastEnvelope()
}
|
bwsw/sj-platform
|
core/sj-engine-core/src/main/scala/com/bwsw/sj/engine/core/engine/input/CheckpointTaskInput.scala
|
Scala
|
apache-2.0
| 2,119
|
package dotty.tools.dotc.util
import scala.language.unsafeNulls
import org.junit.Assert._
import org.junit.Test
class DiffUtilTests {
def testExpected(found: String, expected: String, foundColoring: String, expectedColoring: String): Unit = {
def humanAscii(str: String): String =
str
.replace(Console.RESET, ">")
.replace(Console.BOLD, "")
.replace(Console.RED, "<R|")
.replace(Console.GREEN, "<G|")
// merging two aligning colors
.replace("><R|", "")
.replace("><G|", "")
val diff = DiffUtil.mkColoredTypeDiff(found, expected)
val fnd = humanAscii(diff._1)
val exp = humanAscii(diff._2)
if (fnd != foundColoring) fail(s"expected(found):\\n$foundColoring but was: \\n$fnd")
if (exp != expectedColoring) fail(s"expected(expected): \\n$expectedColoring but was: \\n$exp")
}
@Test
def simpleString(): Unit = {
testExpected("Foo", "Bar", "<R|Foo>", "<G|Bar>")
testExpected("Bar", "Foo", "<R|Bar>", "<G|Foo>")
}
@Test
def tuple(): Unit = {
testExpected("(Foo, Bar)", "(Bar, Foo)", "(<R|Foo>, <R|Bar>)", "(<G|Bar>, <G|Foo>)")
testExpected("(Int, Bar, Float)", "Bar", "<R|(Int, >Bar<R|, Float)>", "Bar")
}
@Test
def tupleSeq(): Unit = {
testExpected("(Foo, Seq[Bar])", "Seq[Bar]", "<R|(Foo, >Seq[Bar]<R|)>", "Seq[Bar]")
testExpected("Seq[Bar]", "(Foo, Seq[Bar])", "Seq[Bar]", "<G|(Foo, >Seq[Bar]<G|)>")
}
@Test
def seqTuple(): Unit = {
testExpected("Seq[(Foo, Bar)]", "Seq[Bar]", "Seq[<R|(Foo, >Bar<R|)>]", "Seq[Bar]")
testExpected("Seq[Bar]", "Seq[(Foo, Bar)]", "Seq[Bar]", "Seq[<G|(Foo, >Bar<G|)>]")
}
@Test
def seqSeq(): Unit = {
testExpected("Seq[Seq[Seq[Foo]]]", "Seq[List[Seq[(Bar, Foo)]]]", "Seq[<R|Seq>[Seq[Foo]]]", "Seq[<G|List>[Seq[<G|(Bar, >Foo<G|)>]]]")
testExpected("Seq[List[Seq[(Bar, Foo)]]]", "Seq[Seq[Seq[Foo]]]", "Seq[<R|List>[Seq[<R|(Bar, >Foo<R|)>]]]", "Seq[<G|Seq>[Seq[Foo]]]")
}
}
|
dotty-staging/dotty
|
compiler/test/dotty/tools/dotc/util/DiffUtilTests.scala
|
Scala
|
apache-2.0
| 1,971
|
// Copyright (C) 2010-2011 Monash University
//
// This file is part of Factotum.
//
// Factotum is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Factotum is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with Factotum. If not, see <http://www.gnu.org/licenses/>.
//
// Designed and implemented by Dmitri Nikulin.
//
// Repository: https://github.com/dnikulin/factotum
// Email: dnikulin+factotum@gmail.com
package com.dnikulin.factotum.render
import scala.xml.{Node, NodeSeq}
import net.liftweb.common._
import net.liftweb.http._
import com.dnikulin.vijil.text._
import com.dnikulin.factotum.engine.TextStub
import com.dnikulin.factotum.report.SessionReport
import com.dnikulin.factotum.web.FactotumWeb
object SectionMarker {
val noHeader: List[(String, String)] = Nil
val emptyResponse =
Full(InMemoryResponse(new Array[Byte](0), noHeader, Nil, 200))
def hideIf(klass: String, hide: Boolean): String =
if (hide) (klass + " hidden").trim else klass
def makeMarker(text: TextStub): Node = {
val owner =
text.token.owner.
map(_.openId).
map("owned by '%s'".format(_)).
openOr("Factotum Commons")
val title = "'%s', %s".format(text.meta.name, owner)
val report = SessionReport.is
// Check initial state
val marked = report.read(report.texts.exists(_.hash == text.hash))
// Initial CSS classes
val class1 = hideIf("sectionMarker", marked)
val class0 = hideIf("sectionMarked", !marked)
// Link DOM IDs
val id1 = "fact_mark1_" + text.hash
val id0 = "fact_mark0_" + text.hash
// Link JavaScript
val js1 = ("javascript:fact_mark1(\\"" + text.hash + "\\");")
val js0 = ("javascript:fact_mark0(\\"" + text.hash + "\\");")
<span>
<a id={id1} href={js1} class={class1} title={title}>{text.meta.name}</a>
<a id={id0} href={js0} class={class0} title={title}>{text.meta.name}</a>
</span>
}
def markSection(hash: String, state: Boolean): Box[LiftResponse] = {
val texts = FactotumWeb.store.text(hash)
val report = SessionReport.is
texts.foreach(report.markSection(_, state))
emptyResponse
}
def installDispatch() {
LiftRules.dispatch.append {
case Req(List("select", "mark", hash), _, _) => () =>
markSection(hash, true)
case Req(List("select", "unmark", hash), _, _) => () =>
markSection(hash, false)
}
}
}
|
dnikulin/factotum
|
src/main/scala/com/dnikulin/factotum/render/SectionMarker.scala
|
Scala
|
agpl-3.0
| 2,878
|
def =>=[E, A, B, C]: (Product[E, A] => B) => (Product[E, B] => C) =>
(Product[E, A] => C) = f => g => {
case Product((e, a)) =>
val b = f(Product((e, a)))
val c = g(Product((e, b)))
c
}
|
hmemcpy/milewski-ctfp-pdf
|
src/content/3.7/code/scala/snippet09.scala
|
Scala
|
gpl-3.0
| 206
|
import java.util.List
class JavaGenerics {
def r(l : List[String]) = l.get(0).<ref>toLowerCase
}
|
LPTK/intellij-scala
|
testdata/resolve/nonlocal/javaGenerics.scala
|
Scala
|
apache-2.0
| 99
|
package org.fayalite.util.img
import java.awt.image.{DataBufferByte, DataBufferInt, RenderedImage}
import javax.imageio.ImageIO
import akka.util.ByteString
import rx._
import scala.collection.Iterable
import scala.io.Source
import scala.util.{Failure, Success, Try}
import java.awt.image.BufferedImage
import java.awt.Color
/**
* Image / manipulation / conversion utils
*/
object ImageUtils {
/**
* A set of demonstrative functions that show how to convert
* canvas data from HTML5 ByteStrings received through a
* websocket into java compatible images and back!
*/
def byteStringToUInt8ToRGBInt(bs : ByteString) = {
val uint = bs.toIterable.map {
b => b & 0xFF
}.grouped(4).map {
_.toList
}.map {
case List(r, g, b, a) => new Color(r, g, b, a).getRGB
}
uint.toArray
}
case class UIParams(width: Int, height: Int)
object RGBI {
def apply(bs: ByteString)(implicit uip: UIParams) = {
new RGBI(byteStringToUInt8ToRGBInt(bs), uip.width, uip.height)
}
}
class RGBI(rgba: Array[Int], width: Int, height: Int) {
import java.awt.image.BufferedImage
val image = new BufferedImage(width, height, BufferedImage.TYPE_4BYTE_ABGR);
val g = image.createGraphics()
image.setRGB(0, 0, width, height, rgba, 0, width)
def save(path: String) = {
val ri = image.asInstanceOf[RenderedImage]
val fi = new java.io.File("adfsf.png")
ImageIO.write(ri, "PNG", fi)
}
def byteString = bufferedImageToByteString(image)
}
implicit class BufferedImageExtensions(bi: BufferedImage) {
def save(path: String) = {
val ri = bi.asInstanceOf[RenderedImage]
val fi = new java.io.File(path)
ImageIO.write(ri, "PNG", fi)
}
}
def bufferedImageToByteString(bi: BufferedImage) = {
val w = bi.getWidth
val h = bi.getHeight
val rgbaInt = bi.getRGB(0, 0, w, h, null, 0, w)
val rgba = rgbaInt.flatMap{ b =>
val c = new Color(b, true)
Seq(
c.getRed,
c.getGreen,
c.getBlue,
c.getAlpha
).map{_.toByte}
}
ByteString(rgba)
}
implicit def byteStringToUInt8(bs : ByteString): IndexedSeq[Int] = {
bs.map {
b => b & 0xFF
}
}
implicit def uInt8ToByteString(uint8: IndexedSeq[Int]) : ByteString = {
ByteString(uint8.map{_.toByte}.toArray)
}
/**
* Just an example to show how to use AWT
* / Java2d Image api.
*
* @return : BufferedImage with some random string
*/
def createTestImage() = {
val width = 500
val height = 500
import java.awt.image.BufferedImage
val image = new BufferedImage(width, height, BufferedImage.TYPE_4BYTE_ABGR);
val g = image.createGraphics()
g.setColor(Color.BLACK)
g.fillRect(0, 0, width, height)
g.setColor(Color.white)
g.drawString("yo " + scala.util.Random.nextString(10), 100, 100)
image
}
}
|
ryleg/fayalite
|
core/src/main/scala/org/fayalite/util/img/ImageUtils.scala
|
Scala
|
mit
| 2,912
|
package de.qualitune
/**
* CPSTextInterpreter - parses and interprets the CPSText DSL.
* Copyright (C) 2011 Max Leuthaeuser
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import de.qualitune.ast.CPSProgram
import de.qualitune.parser.CPSTextParser
import java.io.{InputStreamReader, BufferedReader, File}
import de.qualitune.util.IOUtils
import tools.nsc.Global
import de.qualitune.config.{ConfigReporter, Configuration}
import de.qualitune.checks.CPSChecks
import transformator.{CPSProgramTransformator, ExecutableString}
/**
* Interpreter for CPSText containing static methods for interpreting CPSText code and programs.
*
* @author Max Leuthaeuser
* @since 22.11.2011
*/
object CPSTextRunner {
/**
* Runs a CPSProgram AST representing a piece of CPSText code.
*
* @param cst: the CPSProgram representing the concrete syntax tree
* @param config: the current configuration
*/
def runCST(cst: CPSProgram, config: Configuration) {
// Some static checks before starting the actual interpretation.
if (config.debugging.enabled) {
config.debugging.write("Running static checks...")
config.debugging.write("\\t1) Checking names")
}
CPSChecks.checkNames(cst)
if (config.debugging.enabled) config.debugging.write("\\t2) Checking imports")
CPSChecks.checkImports(cst)
if (config.debugging.enabled) config.debugging.write("\\t2) Checking role bindings")
CPSChecks.checkBindings(cst)
if (config.debugging.enabled) config.debugging.write("\\t4) Checking CPS objects")
CPSChecks.checkCPSObjects(cst)
if (config.debugging.enabled) config.debugging.write("\\t5) Checking role constrains")
CPSChecks.checkConstrains(cst)
var jre = "java"
var removeFile = "rm cpsprogram_Main.scala"
var removeClasses = "rm -rf temp"
val sep = System.getProperties.getProperty("path.separator");
if (IOUtils.isWindows) {
jre = "cmd.exe /C " + jre
removeFile = "cmd.exe /C del /S cpsprogram_Main.scala"
removeClasses = "cmd.exe /C RD /S /q temp"
}
config.debugging.write("# Starting")
if (config.interpretation.enabled) {
IOUtils.Time("Interpretation") {
val s = new ExecutableString()
s + ("object cpsprogram_Main {\\n")
new CPSProgramTransformator().apply(s, cst, null)
s + ("def main(args: Array[String]) { " + s.getInPlace + "} \\n}")
IOUtils.writeToFile("cpsprogram_Main.scala", s.toString)
IOUtils.createDirectory(new File("temp"))
val settings = new scala.tools.nsc.Settings(error)
settings.outdir.value = "temp"
settings.classpath.value = System.getProperty("java.class.path", ".");
settings.deprecation.value = true
settings.unchecked.value = true
val reporter = new ConfigReporter(settings, config)
val compiler = new Global(settings, reporter)
(new compiler.Run).compile(List("cpsprogram_Main.scala"))
config.debugging.write("# Output of compilation process: \\n")
reporter.printSummary()
config.debugging.write("# Finished. Exit code: " + (if (reporter.hasErrors) 1 else 0))
}
}
if (config.execution.enabled) {
IOUtils.Time("Execution") {
val proc = Runtime.getRuntime.exec(jre + " -cp temp" + sep + "CPSTextInterpreter.jar" + sep + ". cpsprogram_Main", null, new File("."))
config.debugging.write("# Output of CPSText program: \\n")
val reader = new BufferedReader(new InputStreamReader(proc.getInputStream))
Stream.continually(reader.readLine()).takeWhile(_ != null).foreach(x => config.debugging.write(" > " + IOUtils.now + ": " + x))
val exitCode = proc.waitFor()
config.debugging.write("\\n# Finished. Exit code: " + exitCode)
}
}
if (config.clean) {
IOUtils.Time("Cleaning up") {
Runtime.getRuntime.exec(removeFile).waitFor()
Runtime.getRuntime.exec(removeClasses).waitFor()
}
}
config.debugging.write("# Shutting down")
}
/**
* Parses and runs a String containing CPSText code.
*
* @param code: the piece of CPSText code you want to interpret.
* @param config: the current configuration
*/
def runCode(code: String, config: Configuration) {
runCST(CPSTextParser.parse(code), config)
}
}
|
max-leuthaeuser/CPSTextInterpreter
|
src/main/scala/de/qualitune/CPSTextRunner.scala
|
Scala
|
gpl-3.0
| 5,030
|
/* date: Aug 31, 2012
The Group 'g' command conditionally controls one or more
other commands, such as, Display, Assign commands. There
are four types of 'g' commands
Script Examples:
g (1)=(1) // if then
ge // else
ge (2)=(2) // else if
g // ends group scope
Example:
g ($ans)=(2)
d Good, your answer is correct
g
d Sorry, the answer is 2
The scope of the 'g' command extends to the end of the
Card set unless a 'g' command without a condition is
detected. Example:
...
g (1)=(1)
d within scope
g //end of scope
d outside of scope
c // new card set
...
*/
package com.script
object GroupCommand {
// extract 'e' else tag and condition
val groupRegex="""(e)?\\s*(.*)?""" .r
def groupCommand(script:collection.mutable.ArrayBuffer[String],
line: String)={
val (elseTag,condition)=extractElseAndLogic(line)
if( !(elseTag ==null || elseTag =="e" ) )
throw new SyntaxException("letter following 'g' is not 'e'")
if(condition !=null) {
// '(1) <> nc ns (2)' becomes '(1)<>ncns(2)'
val reduce=LogicSupport.removeSpacesInOperand(condition)
// check syntax of 'condition' expression--throw exceptions.
ValidLogic.validLogic(reduce)
GroupScript.groupScript(script, elseTag, reduce)
}
else
GroupScript.groupScript(script, elseTag, condition)
}
def extractElseAndLogic(line:String):(String,String)={
line match {
case groupRegex(elseTag, condition)=>
(elseTag, condition)
case _=>
(null, null)
}
}
}
|
hangle/Script
|
src/GroupCommand.scala
|
Scala
|
apache-2.0
| 1,648
|
object Test {
def main(args: Array[String]) {
println(Seq(List('1', '2', '3'), List('a', 'b', 'c')).view.addString(new StringBuilder, "_"))
}
}
|
felixmulder/scala
|
test/files/run/t5656.scala
|
Scala
|
bsd-3-clause
| 158
|
/*
* Copyright (C) 2015 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.crossdata.server
import java.io.File
import java.util.UUID
import java.util.concurrent.TimeUnit
import akka.actor.{ActorRef, ActorSystem}
import akka.pattern.ask
import akka.cluster.pubsub.DistributedPubSub
import akka.cluster.pubsub.DistributedPubSubMediator.{Publish, SendToAll}
import akka.http.scaladsl.model._
import akka.http.scaladsl.model.Multipart.BodyPart
import akka.http.scaladsl.server.Directive
import akka.http.scaladsl.server.Directives._
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.FileIO
import akka.util.Timeout
import com.stratio.crossdata.common.security.Session
import com.stratio.crossdata.common.util.akka.keepalive.LiveMan.HeartBeat
import com.stratio.crossdata.common._
import com.stratio.crossdata.server.actors.ResourceManagerActor
import com.stratio.crossdata.server.config.ServerConfig
import com.stratio.crossdata.util.HdfsUtils
import com.typesafe.config.{Config, ConfigException}
import org.apache.log4j.Logger
import org.apache.spark.sql.crossdata.XDContext
import org.apache.spark.sql.crossdata.serializers.CrossdataSerializer
import org.json4s.jackson
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.util.{Success, Try}
class CrossdataHttpServer(config: Config, serverActor: ActorRef, implicit val system: ActorSystem) extends CrossdataSerializer {
import de.heikoseeberger.akkahttpjson4s.Json4sSupport._
implicit val serialization = jackson.Serialization
import ResourceManagerActor._
implicit val executionContext = system.dispatcher
implicit val materializer = ActorMaterializer()
lazy val logger = Logger.getLogger(classOf[CrossdataHttpServer])
lazy val mediator = DistributedPubSub(system).mediator
private val requestExecutionTimeout: FiniteDuration = Try(
FiniteDuration(config.getDuration(ServerConfig.Http.RequestExecutionTimeout).toMillis, TimeUnit.MILLISECONDS)
).recover{
case configExc: ConfigException =>
logger.warn("Http request execution timeout not found. Using the default value $HttpRequestEx", configExc)
ServerConfig.DefaultHTTPRequestExecutionTimeout
} get
type SessionDirective[Session] = Directive[Tuple1[Session]]
lazy val route =
path("upload" / JavaUUID) { sessionUUID =>
entity(as[Multipart.FormData]) { formData =>
// collect all parts of the multipart as it arrives into a map
var path = ""
val allPartsF: Future[Map[String, Any]] = formData.parts.mapAsync[(String, Any)](1) {
case part: BodyPart if part.name == "fileChunk" =>
// stream into a file as the chunks of it arrives and return a future file to where it got stored
val file = new java.io.File(s"/tmp/${part.filename.getOrElse("uploadFile")}")
path = file.getAbsolutePath
logger.info("Uploading file...")
// TODO map is not used
part.entity.dataBytes.runWith(FileIO.toFile(file)).map(_ => part.name -> file)
}.runFold(Map.empty[String, Any])((map, tuple) => map + tuple)
// when processing have finished create a response for the user
onSuccess(allPartsF) { allParts =>
logger.info("Recieved file")
complete {
val hdfsConfig = XDContext.xdConfig.getConfig("hdfs")
val hdfsPath = writeJarToHdfs(hdfsConfig, path)
val session = Session(sessionUUID, null)
val user = "fileupload"
allParts.values.toSeq.foreach{
case file: File =>
file.delete
logger.info("Tmp file deleted")
case _ => logger.error("Problem deleting the temporary file.")
}
//Send a broadcast message to all servers
mediator ! Publish(AddJarTopic, CommandEnvelope(AddJARCommand(hdfsPath, hdfsConfig = Option(hdfsConfig)), session, user))
hdfsPath
}
}
}
} ~ path("query" / JavaUUID) { requestId =>
post {
entity(as[CommandEnvelope]) { rq: CommandEnvelope =>
rq.cmd match {
case _: CloseSessionCommand => // Commands with no confirmation
serverActor ! rq
complete(StatusCodes.OK)
case _ => // Commands requiring confirmation
implicit val _ = Timeout(requestExecutionTimeout)
onComplete(serverActor ? rq) {
case Success(SQLReply(requestId, _)) if requestId != rq.cmd.requestId =>
complete(StatusCodes.ServerError, s"Request ids do not match: (${rq.cmd.requestId}, $requestId)")
case Success(reply: ServerReply) =>
reply match {
case qcr: QueryCancelledReply => complete(qcr)
case _ => complete(reply)
}
case other => complete(StatusCodes.ServerError, s"Internal XD server error: $other")
}
}
} /*~ getRqEnt { rq: HttpRequest => //TODO: Remove this debugging tool when a minimal stable API has been reached
onComplete(rq.entity.toStrict(5 seconds)) {
case Success(s: HttpEntity.Strict) =>
import org.json4s.jackson.JsonMethods._
val bs = s.data.toIterator.toArray
val parsed = parse(new String(bs), false)
println("\\n\\n\\n" + parsed.toString)
val extracted = parsed.extract[CommandEnvelope]
complete(parsed.toString)
}
}*/
}
} ~ path("sessions") {
post { //Session life proof is not a PUT to /session/idSession for security reasons.
entity(as[HeartBeat[UUID]]) { heartBeat =>
mediator ! SendToAll("/user/client-monitor", heartBeat) //TODO: Hardcoded path
complete(StatusCodes.Success) //Doesn't give clues on active sessions...
}
}
} ~ complete("Welcome to Crossdata HTTP Server")
//TODO: Remove this debugging tool when a minimal stable API has been reached
/*val getRqEnt = extract[HttpRequest] { rqCtx =>
rqCtx.request
}*/
private def writeJarToHdfs(hdfsConfig: Config, jar: String): String = {
val user = hdfsConfig.getString("user")
val hdfsMaster = hdfsConfig.getString("namenode")
val destPath = s"/user/$user/externalJars/"
val hdfsUtil = HdfsUtils(hdfsConfig)
//send to HDFS if not exists
val jarName = new File(jar).getName
if (!hdfsUtil.fileExist(s"$destPath/$jarName")) {
hdfsUtil.write(jar, destPath)
}
s"$hdfsMaster/$destPath/$jarName"
}
}
|
pmadrigal/Crossdata
|
server/src/main/scala/com/stratio/crossdata/server/CrossdataHttpServer.scala
|
Scala
|
apache-2.0
| 7,205
|
package me.lachlanap.dropunit
import me.lachlanap.dropunit.resources.DiskBlueprintLoader
import me.lachlanap.dropunit.ui.UIConfig
import me.lachlanap.dropunit.world._
object Launcher {
def main(args: Array[String]): Unit = {
import com.badlogic.gdx.backends.lwjgl._
val worldConfig = WorldConfig(
columns = 5,
columnWidth = 2,
separation = 10,
maxHeight = 10
)
val uiConfig = UIConfig(
width = 1000,
height = 600,
pixelsPerMetre = 30
)
val appcfg = new LwjglApplicationConfiguration
appcfg.title = "Drop Unit"
appcfg.width = uiConfig.width
appcfg.height = uiConfig.height
appcfg.forceExit = false
appcfg.backgroundFPS = -1
appcfg.resizable = false
appcfg.vSyncEnabled = true
new LwjglApplication(new DropUnitCore(worldConfig, uiConfig, new DiskBlueprintLoader), appcfg)
}
}
|
thorinii/dropunit
|
src/main/scala/me/lachlanap/dropunit/Launcher.scala
|
Scala
|
mit
| 880
|
/*
* Copyright 2017 helloscala.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package helloscala.jdbc
import java.lang.reflect.{Field, Modifier}
import java.sql._
import java.time._
import java.util.{Objects, Properties, HashMap => JHashMap, Map => JMap}
import com.typesafe.config.Config
import com.typesafe.scalalogging.StrictLogging
import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
import helloscala.common.{Configuration, HSCommons}
import helloscala.common.util._
import scala.collection.{immutable, mutable}
import scala.compat.java8.FunctionConverters._
import scala.reflect.ClassTag
/**
* Created by yangbajing(yangbajing@gmail.com) on 2017-07-27.
*/
object JdbcUtils extends StrictLogging {
val DATASOURCE_PATH = HSCommons.PERSISTENCE_PATH + ".datasource"
val AND = "AND"
val OR = "OR"
/**
* 用户限制返回记录条数时说明不做限制
*/
val INF_LIMIT = -1
/**
* Constant that indicates an unknown (or unspecified) SQL type.
*
* @see java.sql.Types
*/
val TYPE_UNKNOWN = Integer.MIN_VALUE
val BeanIgnoreClass = classOf[BeanIgnore]
object Keys {
val USE_TRANSACTION = "useTransaction"
val IGNORE_WARNINGS = "ignoreWarnings"
val ALLOW_PRINT_LOG = "allowPrintLog"
val NUM_THREADS = "numThreads"
val MAX_CONNECTIONS = "maxConnections"
val REGISTER_MBEANS = "registerMbeans"
val QUEUE_SIZE = "queueSize"
val keys =
List(USE_TRANSACTION, IGNORE_WARNINGS, ALLOW_PRINT_LOG, NUM_THREADS, MAX_CONNECTIONS, REGISTER_MBEANS, QUEUE_SIZE)
}
// Check for JDBC 4.1 getObject(int, Class) method - available on JDK 7 and higher
private val getObjectWithTypeAvailable =
ClassUtils.hasMethod(classOf[ResultSet], "getObject", classOf[Int], classOf[Class[_]])
def columnLabels(metadata: ResultSetMetaData): immutable.IndexedSeq[String] =
(1 to metadata.getColumnCount).map(i => Option(metadata.getColumnLabel(i)).getOrElse(metadata.getColumnName(i)))
/**
* 将所有 :name 命名参数替换成?
* @param sql 采用命名参数编写的SQL语句
* @return (转换后SQL语句,提取出的参数和索引),索引从1开始编号
*/
def namedParameterToQuestionMarked(sql: String): (String, Map[String, Int]) = {
val sqlBuf = mutable.Buffer.empty[Char]
val paramBuf = mutable.Buffer.empty[Char]
val params = mutable.Map.empty[String, Int]
var idx = 0
var isName = false
sql.foreach {
case '?' =>
sqlBuf.append('?')
isName = true
case c @ (',' | ')') if isName =>
sqlBuf.append(c)
idx += 1
params += (paramBuf.mkString.trim -> idx)
paramBuf.clear()
isName = false
case c if isName =>
paramBuf.append(c)
case c =>
sqlBuf.append(c)
}
(sqlBuf.mkString, params.toMap)
}
def generateWhere(wheres: Seq[AnyRef], step: String = AND): String = {
val list = wheres.flatMap {
case Some(partial) => Some(partial)
case None => None
case partial => Some(partial)
}
if (list.isEmpty) "" else list.mkString(" where ", s" $step ", " ")
}
def where(wheres: Seq[String], step: String = AND): String =
if (wheres.isEmpty) "" else wheres.mkString(" where ", s" $step ", " ")
def whereOption(wheres: Seq[Option[String]], step: String = AND): String =
where(wheres.flatten, step)
def toCountSql(sql: String): String = {
def indexOf(text: String, str: String): Int = {
val idx = text.indexOf(str.toUpperCase())
if (idx < 0) text.indexOf(str.toLowerCase()) else idx
}
def takeString(text: String, str: String): String = {
val idx = indexOf(text, str)
if (idx > 0) text.substring(0, idx) else text
}
var countSql = sql.trim
countSql = takeString(countSql, "order")
countSql = takeString(countSql, "limit")
countSql = takeString(countSql, "offset")
countSql = takeString(countSql, "group")
val idxFrom = indexOf(countSql, "from")
countSql.substring(0, 6) + " count(*) " + countSql.drop(idxFrom).trim
}
/**
* 用户限制返回记录条数时说明不做限制
* @param limit 限制记录条数
* @return limit < 1:将不限制返回记录条数,反之将限制返回记录条数
*/
def isInfLimit(limit: Int): Boolean = limit < 1
def nonInfLimit(limit: Int): Boolean = !isInfLimit(limit)
def nonInfLimit(limit: Int, additionalSql: => String): String =
if (isInfLimit(limit)) "" else additionalSql
def resultSetToMap(rs: ResultSet): Map[String, Object] = {
val metaData = rs.getMetaData
(1 to metaData.getColumnCount).map { column =>
val label = metaData.getColumnLabel(column)
label -> getResultSetValue(rs, column) //rs.getObject(label)
}.toMap
}
def resultSetToJMap(rs: ResultSet): JMap[String, Object] = {
val result = new JHashMap[String, Object]()
val metaData = rs.getMetaData
(1 to metaData.getColumnCount)
.foreach { column =>
val label = metaData.getColumnLabel(column)
result.put(label, getResultSetValue(rs, column) /*rs.getObject(label)*/ )
}
result
}
private def filterFields(fields: scala.Array[Field]): Map[String, Field] = {
val result = mutable.Map.empty[String, Field]
val len = fields.length
var i = 0
while (i < len) {
val field = fields(i)
val anns = field.getDeclaredAnnotations
val isInvalid = Modifier.isStatic(field.getModifiers) ||
anns.exists(ann => ann.annotationType() == BeanIgnoreClass)
if (!isInvalid) {
field.setAccessible(true)
result.put(field.getName, field)
}
i += 1
}
result.toMap
}
def resultSetToBean[T](rs: ResultSet)(implicit ev1: ClassTag[T]): T =
resultSetToBean(rs, toPropertiesName = true)
def resultSetToBean[T](rs: ResultSet, toPropertiesName: Boolean)(implicit ev1: ClassTag[T]): T = {
val dest = ev1.runtimeClass.newInstance().asInstanceOf[T]
val cls = dest.getClass
val fields = filterFields(cls.getDeclaredFields)
val metaData = rs.getMetaData
var col = 1
val columnCount = metaData.getColumnCount
while (col <= columnCount) {
var label = metaData.getColumnLabel(col)
if (toPropertiesName) {
label = convertUnderscoreNameToPropertyName(label)
}
for (field <- fields.get(label)) {
val requiredType = field.getType
val value = getResultSetValue(rs, col, requiredType)
field.set(dest, value)
}
col += 1
}
dest
}
def preparedStatementCreator(sql: String, namedSql: String = ""): ConnectionPreparedStatementCreator =
new ConnectionPreparedStatementCreatorImpl(sql, namedSql)
def preparedStatementAction[R](args: Iterable[Any], func: PreparedStatement => R): PreparedStatementAction[R] =
new PreparedStatementActionImpl(args, func)
def preparedStatementActionUseUpdate(args: Iterable[Any]): PreparedStatementAction[Int] =
new PreparedStatementActionImpl(args, pstmt => {
setStatementParameters(pstmt, args)
pstmt.executeUpdate()
})
def preparedStatementActionUseUpdate(
args: Map[String, Any],
paramIndex: Map[String, Int]): PreparedStatementAction[Int] =
new PreparedStatementActionImpl(args, pstmt => {
for ((param, index) <- paramIndex) {
setParameter(pstmt, index, args(param))
}
pstmt.executeUpdate()
})
def preparedStatementActionUseBatchUpdate(
argsList: Iterable[Iterable[Any]]): PreparedStatementAction[scala.Array[Int]] =
new PreparedStatementActionImpl(argsList, pstmt => {
for (args <- argsList) {
setStatementParameters(pstmt, args)
pstmt.addBatch()
}
pstmt.executeBatch()
})
def preparedStatementActionUseBatchUpdate(
argsList: Iterable[Map[String, Any]],
paramIndex: Map[String, Int]): PreparedStatementAction[scala.Array[Int]] =
new PreparedStatementActionImpl(argsList, pstmt => {
for (args <- argsList) {
for ((param, index) <- paramIndex) {
setParameter(pstmt, index, args(param))
}
pstmt.addBatch()
}
pstmt.executeBatch()
})
def setStatementParameters(
pstmt: PreparedStatement,
args: Map[String, Any],
paramIndex: Map[String, Int]): PreparedStatement = {
for ((param, index) <- paramIndex) {
setParameter(pstmt, index, args(param))
}
pstmt
}
def setStatementParameters(pstmt: PreparedStatement, args: Iterable[Any]): PreparedStatement = {
var i = 0
for (arg <- args) {
i += 1
setParameter(pstmt, i, arg)
}
pstmt
}
private val JAVA_UTIL_DATE_NAME = "java.util.Date"
def setParameter(pstmt: PreparedStatement, i: Int, arg: Any): Unit = {
val obj = arg match {
case zdt: ZonedDateTime => Timestamp.from(zdt.toInstant)
case ldt: LocalDateTime => Timestamp.valueOf(ldt)
case ld: LocalDate => Date.valueOf(ld)
case odt: OffsetDateTime => Timestamp.from(odt.toInstant)
case t: LocalTime => java.sql.Time.valueOf(t)
case e: Enumeration#Value => e.id
case d: java.util.Date if d.getClass.getName == JAVA_UTIL_DATE_NAME =>
new Date(d.getTime)
case _ => arg
}
pstmt.setObject(i, obj)
}
def closeStatement(stmt: Statement): Unit =
if (stmt ne null) {
try stmt.close()
catch {
case ex: SQLException =>
logger.trace("Could not close JDBC Statement", ex)
case ex: Throwable =>
// We don't trust the JDBC driver: It might throw RuntimeException or Error.
logger.trace("Unexpected exception on closing JDBC Statement", ex)
}
}
def closeResultSet(rs: ResultSet): Unit =
if (rs != null) {
try rs.close()
catch {
case ex: SQLException =>
logger.trace("Could not close JDBC ResultSet", ex)
case ex: Throwable =>
// We don't trust the JDBC driver: It might throw RuntimeException or Error.
logger.trace("Unexpected exception on closing JDBC ResultSet", ex)
}
}
def closeConnection(con: Connection): Unit =
if (con != null) {
try con.close()
catch {
case ex: SQLException =>
logger.error("Could not close JDBC Connection", ex)
case ex: Throwable =>
// We don't trust the JDBC driver: It might throw RuntimeException or Error.
logger.error("Unexpected exception on closing JDBC Connection", ex)
}
}
def getResultSetValue(
rs: ResultSet,
index: Int,
requiredType: Class[_],
defaultTimeZone: ZoneOffset = TimeUtils.ZONE_CHINA_OFFSET): Any =
if (requiredType == null) {
getResultSetValue(rs, index)
} else if (classOf[String] == requiredType) {
rs.getString(index)
} else if (classOf[BigDecimal] == requiredType) {
rs.getBigDecimal(index)
} else if (classOf[java.sql.Timestamp] == requiredType) {
rs.getTimestamp(index)
} else if (classOf[java.sql.Date] == requiredType) {
rs.getDate(index)
} else if (classOf[LocalDate] == requiredType) {
rs.getDate(index).toLocalDate
} else if (classOf[LocalTime] == requiredType) {
rs.getTime(index).toLocalTime
} else if (classOf[ZonedDateTime] == requiredType) {
rs.getTimestamp(index).toInstant.atZone(defaultTimeZone)
} else if (classOf[LocalDateTime] == requiredType) {
rs.getTimestamp(index).toLocalDateTime
} else if (classOf[java.sql.Time] == requiredType) {
rs.getTime(index)
} else if (classOf[scala.Array[Byte]] == requiredType) {
rs.getBytes(index)
} else if (classOf[Blob] == requiredType) {
rs.getBlob(index)
} else if (classOf[Clob] == requiredType) {
rs.getClob(index)
} else if (requiredType.isEnum) {
rs.getObject(index) match {
case s: String => s
case n: Number =>
NumberUtils.convertNumberToTargetClass(n, classOf[Integer])
case _ => rs.getString(index)
}
} else {
var value: Any = null
if (classOf[Boolean] == requiredType || classOf[java.lang.Boolean] == requiredType) {
value = rs.getBoolean(index)
} else if (classOf[Byte] == requiredType || classOf[java.lang.Byte] == requiredType) {
value = rs.getByte(index)
} else if (classOf[Short] == requiredType || classOf[java.lang.Short] == requiredType) {
value = rs.getShort(index)
} else if (classOf[Int] == requiredType || classOf[Integer] == requiredType) {
value = rs.getInt(index)
} else if (classOf[Long] == requiredType || classOf[java.lang.Long] == requiredType) {
value = rs.getLong(index)
} else if (classOf[Float] == requiredType || classOf[java.lang.Float] == requiredType) {
value = rs.getFloat(index)
} else if (classOf[Double] == requiredType || classOf[java.lang.Double] == requiredType || classOf[Number] == requiredType) {
value = rs.getDouble(index)
} else {
// Some unknown type desired -> rely on getObject.
if (getObjectWithTypeAvailable) {
try value = rs.getObject(index, requiredType)
catch {
case err: AbstractMethodError =>
logger.debug("JDBC driver does not implement JDBC 4.1 'getObject(int, Class)' method", err)
case ex: SQLFeatureNotSupportedException =>
logger.debug("JDBC driver does not support JDBC 4.1 'getObject(int, Class)' method", ex)
case ex: SQLException =>
logger.debug("JDBC driver has limited support for JDBC 4.1 'getObject(int, Class)' method", ex)
}
// } else {
// // Corresponding SQL types for JSR-310, left up
// // to the caller to convert them (e.g. through a ConversionService).
// val typeName = requiredType.getSimpleName
// value = typeName match {
// case "ZonedDateTime" => rs.getTimestamp(index).toInstant.atZone(TimeUtils.ZONE_CHINA_OFFSET)
// case "LocalDateTime" => rs.getTimestamp(index).toLocalDateTime
// case "LocalDate" => rs.getDate(index).toLocalDate
// case "LocalTime" => rs.getTime(index).toLocalTime
// case _ =>
// // Fall back to getObject without type specification, again
// // left up to the caller to convert the value if necessary.
// getResultSetValue(rs, index)
// }
}
}
if (rs.wasNull()) null else value
}
/**
* Retrieve a JDBC column value from a ResultSet, using the most appropriate
* value type. The returned value should be a detached value object, not having
* any ties to the active ResultSet: in particular, it should not be a Blob or
* Clob object but rather a byte array or String representation, respectively.
* <p>Uses the {@code getObject(index)} method, but includes additional "hacks"
* to get around Oracle 10g returning a non-standard object for its TIMESTAMP
* datatype and a {@code java.sql.Date} for DATE columns leaving out the
* time portion: These columns will explicitly be extracted as standard
* {@code java.sql.Timestamp} object.
*
* @param rs is the ResultSet holding the data
* @param index is the column index
* @return the value object
* @throws SQLException if thrown by the JDBC API
* @see java.sql.Blob
* @see java.sql.Clob
* @see java.sql.Timestamp
*/
@throws[SQLException]
def getResultSetValue(rs: ResultSet, index: Int): AnyRef = {
val obj = rs.getObject(index)
val className: String = if (obj == null) null else obj.getClass.getName
obj match {
case null =>
null
case blob: Blob =>
blob.getBytes(1, blob.length().toInt)
case clob: Clob =>
clob.getSubString(1, clob.length().toInt)
case _ if "oracle.sql.TIMESTAMP" == className || "oracle.sql.TIMESTAMPTZ" == className =>
rs.getTimestamp(index)
case _ if className.startsWith("oracle.sql.DATE") =>
val metaDataClassName = rs.getMetaData.getColumnClassName(index)
if ("java.sql.Timestamp" == metaDataClassName || "oracle.sql.TIMESTAMP" == metaDataClassName)
rs.getTimestamp(index)
else
rs.getDate(index)
case _: Date if "java.sql.Timestamp" == rs.getMetaData.getColumnClassName(index) =>
rs.getTimestamp(index)
case other =>
other
}
}
def supportsBatchUpdates(con: Connection): Boolean = {
var b = false
try {
val dbmd = con.getMetaData
if (dbmd != null) {
if (dbmd.supportsBatchUpdates) {
logger.debug("JDBC driver supports batch updates")
b = true
} else {
logger.debug("JDBC driver does not support batch updates")
}
}
} catch {
case ex: SQLException =>
logger.debug("JDBC driver 'supportsBatchUpdates' method threw exception", ex)
}
b
}
/**
* Extract a common name for the database in use even if various drivers/platforms provide varying names.
*
* @param source the name as provided in database metadata
* @return the common name to be used
*/
def commonDatabaseName(source: String): String = {
var name = source
if (source != null && source.startsWith("DB2")) {
name = "DB2"
} else if ("Sybase SQL Server" == source || "Adaptive Server Enterprise" == source || "ASE" == source ||
"sql server".equalsIgnoreCase(source)) {
name = "Sybase"
}
name
}
/**
* Check whether the given SQL type is numeric.
*
* @param sqlType the SQL type to be checked
* @return whether the type is numeric
*/
def isNumeric(sqlType: Int): Boolean =
Types.BIT == sqlType || Types.BIGINT == sqlType || Types.DECIMAL == sqlType || Types.DOUBLE == sqlType ||
Types.FLOAT == sqlType || Types.INTEGER == sqlType || Types.NUMERIC == sqlType || Types.REAL == sqlType ||
Types.SMALLINT == sqlType || Types.TINYINT == sqlType
/**
* Determine the column name to use. The column name is determined based on a
* lookup using ResultSetMetaData.
* <p>This method implementation takes into account recent clarifications
* expressed in the JDBC 4.0 specification:
* <p><i>columnLabel - the label for the column specified with the SQL AS clause.
* If the SQL AS clause was not specified, then the label is the name of the column</i>.
*
* @return the column name to use
* @param resultSetMetaData the current meta data to use
* @param columnIndex the index of the column for the look up
* @throws SQLException in case of lookup failure
*/
@throws[SQLException]
def lookupColumnName(resultSetMetaData: ResultSetMetaData, columnIndex: Int): String = {
var name = resultSetMetaData.getColumnLabel(columnIndex)
if (name == null || name.length < 1) {
name = resultSetMetaData.getColumnName(columnIndex)
}
name
}
def convertPropertyNameToUnderscore(obj: JMap[String, Object]): JMap[String, Object] =
convertPropertyNameToUnderscore(obj, true)
def convertPropertyNameToUnderscore(obj: JMap[String, Object], isLower: Boolean): JMap[String, Object] = {
val result = new JHashMap[String, Object]()
val func: (String, Object) => Unit = (key, value) =>
result.put(convertPropertyNameToUnderscore(key, isLower), value)
obj.forEach(asJavaBiConsumer(func))
result
}
def convertPropertyNameToUnderscore(obj: Map[String, Any]): Map[String, Any] =
convertPropertyNameToUnderscore(obj, true)
def convertPropertyNameToUnderscore(obj: Map[String, Any], isLower: Boolean): Map[String, Any] =
obj.map {
case (key, value) =>
convertPropertyNameToUnderscore(key, isLower) -> value
}
/**
* 字符串从属性形式转换为全小写的下划线形式
*
* @param name 待转字符串
* @see convertPropertyNameToUnderscore(name: String, isLower: Boolean)
* @return
*/
def convertPropertyNameToUnderscore(name: String): String =
convertPropertyNameToUnderscore(name, isLower = true)
/**
* 字符串从属性形式转换为下划线形式
*
* @param name 待转字符串
* @param isLower 转换成下划线形式后是否使用小写,false将完全使用大写
* @return 转换后字符串
*/
def convertPropertyNameToUnderscore(name: String, isLower: Boolean): String =
if (StringUtils.isBlank(name)) {
name
} else {
val sb = new StringBuilder
for (c <- name) {
if (Character.isUpperCase(c)) {
sb.append('_')
}
sb.append(
if (isLower) Character.toLowerCase(c)
else Character.toUpperCase(c.toUpper))
}
sb.toString()
}
def convertUnderscoreNameToPropertyName(obj: Map[String, Any]): Map[String, Any] =
obj.map {
case (key, value) => convertUnderscoreNameToPropertyName(key) -> value
}
def convertUnderscoreNameToPropertyName(obj: JMap[String, Object]): JMap[String, Object] = {
val result = new JHashMap[String, Object]()
val func: (String, Object) => Unit = (key, value) => result.put(convertUnderscoreNameToPropertyName(key), value)
obj.forEach(asJavaBiConsumer(func))
result
}
/**
* Convert a column name with underscores to the corresponding property name using "camel case". A name
* like "customer_number" would match a "customerNumber" property name.
*
* @param name the column name to be converted
* @return the name using "camel case"
*/
def convertUnderscoreNameToPropertyName(name: String): String = {
val result = new StringBuilder
var nextIsUpper = false
if (name != null && name.length > 0) {
if (name.length > 1 && name.substring(1, 2) == "_") {
result.append(name.substring(0, 1).toUpperCase)
} else {
result.append(name.substring(0, 1).toLowerCase)
}
var i = 1
val len = name.length
while (i < len) {
val s = name.substring(i, i + 1)
if (s == "_") {
nextIsUpper = true
} else if (nextIsUpper) {
result.append(s.toUpperCase)
nextIsUpper = false
} else {
result.append(s.toLowerCase)
}
i += 1
}
}
result.toString
}
def execute[R](
pscFunc: ConnectionPreparedStatementCreator,
actionFunc: PreparedStatementAction[R],
ignoreWarnings: Boolean = true,
allowPrintLog: Boolean = true,
useTransaction: Boolean = false,
autoClose: Boolean = false
)(implicit con: Connection): R = {
assert(Objects.nonNull(con), "con: Connection must not be null")
assert(Objects.nonNull(pscFunc), "Connection => PreparedStatement must not be null")
assert(Objects.nonNull(actionFunc), "PreparedStatement => R must not be null")
var pstmt: PreparedStatement = null
val isAutoCommit = con.getAutoCommit
var commitSuccess = false
var beginTime: Instant = null
try {
if (autoClose && useTransaction) {
con.setAutoCommit(false)
}
if (allowPrintLog) {
beginTime = Instant.now()
}
val connection = con
pstmt = pscFunc.apply(connection)
val result = actionFunc.apply(pstmt)
JdbcUtils.handleWarnings(ignoreWarnings, allowPrintLog, pstmt)
commitSuccess = true
result
} catch {
case sqlEx: SQLException =>
// if (logger.underlying.isDebugEnabled) {
// val metaData = pstmt.getParameterMetaData
// val parameterTypes = (1 to metaData.getParameterCount).map(idx => metaData.getParameterTypeName(idx))
// handleSqlLogs(beginTime, parameterTypes, pscFunc, actionFunc)
// }
throw sqlEx
} finally {
val parameterTypes =
try {
if (allowPrintLog) {
val metaData = pstmt.getParameterMetaData
(1 to metaData.getParameterCount).map(idx => metaData.getParameterTypeName(idx))
} else
Nil
} catch {
case e: Exception =>
handleSqlLogs(beginTime, Nil, pscFunc, actionFunc)
logger.warn("获取parameterTypes异常", e)
Nil
}
closeStatement(pstmt)
if (autoClose) {
if (useTransaction) {
try {
if (commitSuccess) {
con.commit()
} else {
con.rollback()
}
} catch {
case ex: Exception =>
logger.error("提交或回滚事物失败", ex)
}
con.setAutoCommit(isAutoCommit)
}
JdbcUtils.closeConnection(con)
}
if (allowPrintLog) {
handleSqlLogs(beginTime, parameterTypes, pscFunc, actionFunc)
}
}
}
def handleSqlLogs(
beginTime: Instant,
parameterTypes: Seq[String],
pscFunc: ConnectionPreparedStatementCreator,
actionFunc: PreparedStatementAction[_]): Unit = {
val endTime = Instant.now()
val dua = java.time.Duration.between(beginTime, endTime)
val sql = pscFunc match {
case pscFuncImpl: ConnectionPreparedStatementCreatorImpl =>
pscFuncImpl.getSql
case _ => ""
}
var dumpParameters = ""
if (parameterTypes.nonEmpty) {
val parameters = actionFunc match {
case actionFuncImpl: PreparedStatementActionImpl[_] =>
parameterTypes.zip(actionFuncImpl.args).map {
case (paramType, value) => s"\\t\\t$paramType: $value"
}
case _ =>
parameterTypes.map(paramType => s"\\t\\t$paramType:")
}
dumpParameters = "\\n" + parameters.mkString("\\n")
}
logger.info(s"[$dua] $sql $dumpParameters")
}
def handleWarnings(ignoreWarnings: Boolean, allowPrintLog: Boolean, stmt: Statement): Unit =
if (ignoreWarnings) {
if (allowPrintLog) {
var warningToLog = stmt.getWarnings
while (warningToLog != null) {
logger.warn(
"SQLWarning ignored: SQL state '" + warningToLog.getSQLState + "', error code '" + warningToLog.getErrorCode + "', message [" + warningToLog.getMessage + "]")
warningToLog = warningToLog.getNextWarning
}
}
} else {
handleWarnings(stmt.getWarnings)
}
@inline
@throws[SQLWarning]
protected def handleWarnings(warning: SQLWarning): Unit = if (warning != null) throw warning
@inline def createHikariDataSource(config: Configuration): HikariDataSource =
createHikariDataSource(config.getProperties(null))
@inline def createHikariDataSource(config: Config): HikariDataSource =
createHikariDataSource(Configuration(config))
def createHikariDataSource(props: Properties): HikariDataSource = {
val config = new HikariConfig(Keys.keys.foldLeft(props) { (props, key) =>
props.remove(key); props
})
createHikariDataSource(config)
}
def createHikariDataSource(config: HikariConfig): HikariDataSource =
new HikariDataSource(config)
}
|
helloscala/helloscala
|
hs-jdbc/src/main/scala/helloscala/jdbc/JdbcUtils.scala
|
Scala
|
apache-2.0
| 27,737
|
package wom.callable
import wom.expression.WomExpression
import wom.graph.GraphNode._
import wom.graph.{Graph, TaskCallNode}
final case class WorkflowDefinition(name: String,
innerGraph: Graph,
meta: Map[String, String],
parameterMeta: Map[String, String],
declarations: List[(String, WomExpression)]) extends ExecutableCallable {
override lazy val toString = s"[Workflow $name]"
override val graph: Graph = innerGraph
// FIXME: how to get a meaningful order from the node set ?
override lazy val inputs: List[_ <: Callable.InputDefinition] = innerGraph.nodes.inputDefinitions.toList
override lazy val outputs: List[_ <: Callable.OutputDefinition] = innerGraph.nodes.outputDefinitions.toList
override lazy val taskCallNodes: Set[TaskCallNode] = innerGraph.allNodes collect {
case taskNode: TaskCallNode => taskNode
}
}
|
ohsu-comp-bio/cromwell
|
wom/src/main/scala/wom/callable/WorkflowDefinition.scala
|
Scala
|
bsd-3-clause
| 989
|
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.keras.layers
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.utils.Shape
import com.intel.analytics.bigdl.dllib.keras.Sequential
import com.intel.analytics.bigdl.dllib.keras.serializer.ModuleSerializationTest
class SpatialDropout1DSpec extends KerasBaseSpec {
"SpatialDropout1D forward and backward" should "work properly" in {
val seq = Sequential[Float]()
val layer = SpatialDropout1D[Float](0.5, inputShape = Shape(3, 4))
seq.add(layer)
seq.getOutputShape().toSingle().toArray should be (Array(-1, 3, 4))
val input = Tensor[Float](2, 3, 4).rand()
val output = seq.forward(input)
val gradInput = seq.backward(input, output)
}
}
class SpatialDropout1DSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val layer = SpatialDropout1D[Float](0.5, inputShape = Shape(3, 4))
layer.build(Shape(2, 3, 4))
val input = Tensor[Float](2, 3, 4).rand()
runSerializationTest(layer, input)
}
}
|
intel-analytics/BigDL
|
scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/keras/layers/SpatialDropout1DSpec.scala
|
Scala
|
apache-2.0
| 1,648
|
/*
* Artificial Intelligence for Humans
* Volume 1: Fundamental Algorithms
* Scala Version
* http://www.aifh.org
* http://www.jeffheaton.com
*
* Code repository:
* https://github.com/jeffheaton/aifh
* Copyright 2013 by Jeff Heaton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information on Heaton Research copyrights, licenses
* and trademarks visit:
* http://www.heatonresearch.com/copyright
*/
package com.heatonresearch.aifh.kmeans
import com.heatonresearch.aifh.AIFHError
import com.heatonresearch.aifh.distance.{CalculateDistance, EuclideanDistance}
import com.heatonresearch.aifh.general.data.BasicData
import com.heatonresearch.aifh.randomize.BasicGenerateRandom
import com.heatonresearch.aifh.randomize.GenerateRandom
import scala.collection.mutable.ListBuffer
/**
* KMeans Clustering. First, observations are each placed into random clusters. There are two methods to do this:
* random and Forgy. Then we iterate through assignment and update steps. Assignment places clusters in new clusters
* that they might be closer to. Update updates the center of each cluster, called the centroid. The center of each
* cluster is the mean of all observations in that cluster.
* <p/>
* This class uses a number of supporting objects:
* <p/>
* randomGeneration: The random number generator used for clustering.
* distanceMetric: The distance metric used to determine distance to centroids.
* <p/>
* http://en.wikipedia.org/wiki/Kmeans
* @param k The number of clusters (K).
*/
class KMeans(val k : Int) {
/**
* The clusters.
*/
private val clusters = ListBuffer[Cluster]()
/**
* The random number generator to use.
*/
var randomGeneration: GenerateRandom = new BasicGenerateRandom()
/**
* The
*/
var distanceMetric: CalculateDistance = new EuclideanDistance
/**
* Validate and find the number of dimensions from the first observation.
*
* @param theObservations The observations.
* @return The number of dimensions.
*/
private def findDimensions(theObservations: Vector[BasicData]): Int = {
if (theObservations.isEmpty) {
throw new AIFHError("No observations provided to cluster, array zero length.")
}
if (theObservations.size < k) {
throw new AIFHError("There are fewer observations (" + theObservations.size + ") than k (" + k + ").")
}
val dimensions: Int = theObservations(0).input.length
if (dimensions == 0) {
throw new AIFHError("Observations have no dimensions.")
}
dimensions
}
/**
* Init the observations to random clusters. Use the "Init Random" algorithm. The Random Partition method first
* randomly assigns a cluster to each observation and then proceeds to the update step, thus computing the initial mean to be the centroid of the cluster's randomly assigned points.
*
* @param theObservations The observations to cluster.
*/
def initRandom(theObservations: Vector[BasicData]) {
val dimensions: Int = findDimensions(theObservations)
for(i <- 0 until k) {
clusters += new Cluster(dimensions)
}
for (observation <- theObservations) {
val clusterIndex: Int = randomGeneration.nextInt(k)
val cluster: Cluster = clusters(clusterIndex)
cluster.addObservation(observation)
}
for (cluster <- clusters) {
if (cluster.noObservations == 0) {
var done: Boolean = false
while (!done) {
val sourceIndex: Int = randomGeneration.nextInt(k)
val source: Cluster = clusters(sourceIndex)
if ((source ne cluster) && source.noObservations > 1) {
val sourceObservationIndex: Int = randomGeneration.nextInt(source.noObservations)
val sourceObservation: BasicData = source.getObservation(sourceObservationIndex)
source.removeObservation(sourceObservationIndex)
cluster.addObservation(sourceObservation)
done = true
}
}
}
}
updateStep()
}
/**
* Init the observations to random clusters. The Forgy method randomly chooses k observations from the
* data set and uses these as the initial means.
*
* @param theObservations The observations to cluster.
*/
def initForgy(theObservations: Vector[BasicData]) {
val dimensions: Int = findDimensions(theObservations)
clusters.clear()
var usedObservations = Set.empty[Integer]
for(i <- 0 until k) {
val cluster = new Cluster(dimensions)
clusters += cluster
var observationIndex: Int = -1
while (observationIndex == -1) {
observationIndex = randomGeneration.nextInt(theObservations.size)
if (usedObservations.contains(observationIndex)) {
observationIndex = -1
}
}
val observation = theObservations(observationIndex).input
cluster.setCenter(observation)
usedObservations += observationIndex
}
for (observation <- theObservations) {
val cluster: Cluster = findNearestCluster(observation.input)
cluster.addObservation(observation)
}
updateStep()
}
/**
* The update step updates the centroids.
*/
private def updateStep() {
for (cluster <- clusters) {
cluster.calculateCenter()
}
}
/**
* The assignment step assigns observations to the nearest clusters.
*
* @return True, if we are done. We are done if no observations moved clusters.
*/
private def assignmentStep: Boolean = {
var done: Boolean = true
for (cluster <- clusters) {
var observationIndex: Int = 0
var observationCount = cluster.noObservations
if (observationCount > 1) {
while (observationIndex < observationCount) {
val observation: BasicData = cluster.getObservation(observationIndex)
observationIndex += 1
val targetCluster: Cluster = findNearestCluster(observation.input)
if (targetCluster ne cluster) {
cluster.removeObservation(observation)
targetCluster.addObservation(observation)
observationCount -= 1
done = false
}
}
}
}
done
}
/**
* Find the nearest cluster for an observation.
*
* @param observation The observation.
* @return The nearest cluster.
*/
def findNearestCluster(observation: Vector[Double]): Cluster = {
var result: Cluster = null
var resultDist: Double = Double.PositiveInfinity
for (cluster <- clusters) {
val dist: Double = distanceMetric.calculate(observation, cluster.getCenter)
if (dist < resultDist) {
resultDist = dist
result = cluster
}
}
result
}
/**
* Perform one iteration of assignment and update steps.
*
* @return True, if we are done, no new assignments.
*/
def iteration: Boolean = {
if (clusters.isEmpty) {
throw new AIFHError("Must call one of the init methods first.")
}
val done: Boolean = assignmentStep
if (!done) {
updateStep()
}
done
}
/**
* Perform the specified number of iterations. Stop early if we are done.
*
* @param maxIterations The max number of iterations.
* @return True, if we are done.
*/
def iteration(maxIterations: Int): Int = {
var iterationCount: Int = 1
while (iterationCount <= maxIterations && !iteration) {
iterationCount += 1
}
iterationCount
}
/**
* @return The clusters.
*/
def getClusters: List[Cluster] = clusters.toList
}
|
HairyFotr/aifh
|
vol1/scala-examples/src/main/scala/com/heatonresearch/aifh/kmeans/KMeans.scala
|
Scala
|
apache-2.0
| 8,012
|
package scalan.arrays
import scala.reflect.ClassTag
import scalan.common.OverloadHack.Overloaded1
import scalan.staged.BaseExp
import scalan.{Scalan, ScalanExp, ScalanStd}
import scala.reflect.runtime.universe._
trait ArrayOps { self: Scalan =>
type Arr[T] = Rep[Array[T]]
implicit class RepArrayOps[T: Elem](xs: Arr[T]) {
def apply(n: Rep[Int]): Rep[T] = array_apply(xs, n)
def apply(ns: Arr[Int])(implicit o: Overloaded1): Arr[T] = array_applyMany(xs, ns)
def length = array_length(xs)
def mapBy[R: Elem](f: Rep[T => R]) = array_map(xs, f)
def map[R: Elem](f: Rep[T] => Rep[R]) = array_map(xs, fun(f))
def flatMapBy[R: Elem](f: Rep[T => Array[R]]) = array_flat_map(xs, f)
def flatMap[R: Elem](f: Rep[T] => Arr[R]) = array_flat_map(xs, fun(f))
def reduce(implicit m: RepMonoid[T]) = array_reduce(xs)
def fold[S: Elem](init: Rep[S], f: Rep[((S, T)) => S]): Rep[S] = array_fold[T, S](xs, init, f)
def foldLeft[S: Elem](init: Rep[S])(f: Rep[(S, T)] => Rep[S]): Rep[S] = array_fold[T, S](xs, init, fun(f))
def mapReduceBy[K: Elem, V: Elem](map: Rep[T => (K, V)], reduce: Rep[((V, V)) => V]) = array_map_reduce(xs, map, reduce)
def mapReduce[K: Elem, V: Elem](map: Rep[T] => Rep[(K, V)], reduce: (Rep[V], Rep[V]) => Rep[V]) = array_map_reduce(xs, fun(map), fun2(reduce))
def scan(implicit m: RepMonoid[T]) = array_scan(xs)
def zip[U](ys: Arr[U]): Arr[(T, U)] = array_zip(xs, ys)
def sort(implicit o: Ordering[T]): Arr[T] = array_sort(xs)(o)
def slice(start: Rep[Int], length: Rep[Int]): Arr[T] = array_slice(xs, start, length)
def filterBy(f: Rep[T => Boolean]) = array_filter(xs, f)
def filter(f: Rep[T] => Rep[Boolean]) = array_filter(xs, fun(f))
def find(f: Rep[T] => Rep[Boolean]) = array_find(xs, fun(f))
def grouped(size: Rep[Int]) = array_grouped(xs, size)
def stride(start: Rep[Int], length: Rep[Int], stride: Rep[Int]) =
array_stride(xs, start, length, stride)
def update(index: Rep[Int], value: Rep[T]) = array_update(xs, index, value)
def :+(value: Rep[T]) = array_append(xs, value)
def append(value: Rep[T]) = array_append(xs, value)
def +:(value: Rep[T]) = array_cons(value, xs)
def prepend(value: Rep[T]) = array_cons(value, xs)
def updateMany(indexes: Arr[Int], values: Arr[T]) = array_updateMany(xs, indexes, values)
// new functions to support SQL-like queries
def sum(implicit n: Numeric[T]): Rep[T] = array_sum(xs)
def max(implicit o: Ordering[T]): Rep[T] = array_max(xs)
def min(implicit o: Ordering[T]): Rep[T] = array_min(xs)
def avg(implicit n: Numeric[T]): Rep[Double] = array_avg(xs)
def sortBy[O: Elem](by: Rep[T => O])(implicit o: Ordering[O]): Arr[T] = array_sort_by(xs, by)
def groupBy[G: Elem](by: Rep[T => G]) = array_group_by(xs, by)
def count(f: Rep[T => Boolean]) = array_count(xs, f)
def sumBy[S: Elem](f: Rep[T => S])(implicit n: Numeric[S]): Rep[S] = array_sum_by(xs, f)
def toList = array_toList(xs)
def reverse = array_reverse(xs)
}
implicit class RepNArrayOps[T: Elem](xss: Arr[Array[T]]) {
def flatten = array_flatten(xss)
}
// name to avoid conflict with scala.Array
object SArray {
def rangeFrom0(n: Rep[Int]) = array_rangeFrom0(n)
def tabulate[T: Elem](n: Rep[Int])(f: Rep[Int] => Rep[T]): Arr[T] =
rangeFrom0(n).map(f)
def repeat[T: Elem](n: Rep[Int])(f: Rep[Int => T]): Arr[T] = rangeFrom0(n).mapBy(f)
def replicate[T: Elem](len: Rep[Int], v: Rep[T]) = array_replicate(len, v)
def singleton[T: Elem](v: Rep[T]) = array_replicate(1, v)
def empty[T: Elem] = array_empty[T]
def fromSyms[T:Elem](syms: Seq[Rep[T]]): Arr[T] = array_fromSyms(syms)
}
trait ArrayFunctor extends Functor[Array] {
def tag[T](implicit tT: WeakTypeTag[T]) = weakTypeTag[Array[T]]
def lift[T](implicit eT: Elem[T]) = element[Array[T]]
def unlift[T](implicit eFT: Elem[Array[T]]) = eFT.eItem
def getElem[T](fa: Rep[Array[T]]) = rep_getElem(fa)
def unapply[A](e: Elem[_]) = e match {
case ae: ArrayElem[_] => Some(ae.asElem[Array[A]])
case _ => None
}
def map[A:Elem,B:Elem](xs: Rep[Array[A]])(f: Rep[A] => Rep[B]) = xs.mapBy(fun(f))
}
implicit val arrayContainer: Functor[Array] = new ArrayFunctor {}
abstract class ArrayElem[A](implicit eItem: Elem[A])
extends EntityElem1[A, Array[A], Array](eItem, container[Array]) {
}
case class ScalaArrayElem[A](override val eItem: Elem[A]) extends ArrayElem[A]()(eItem) {
def parent: Option[Elem[_]] = Some(arrayElement(eItem))
override def isEntityType = eItem.isEntityType
override lazy val tyArgSubst: Map[String, TypeDesc] = {
Map("A" -> Left(eItem))
}
lazy val tag = {
implicit val tag1 = eItem.tag
weakTypeTag[Array[A]]
}
protected def getDefaultRep =
SArray.empty(eItem)
override def canEqual(other: Any) = other.isInstanceOf[ScalaArrayElem[_]]
}
// require: n in xs.indices
def array_apply[T](xs: Arr[T], n: Rep[Int]): Rep[T]
// require: forall i -> is(i) in xs.indices
// provide: res.length == is.length
def array_applyMany[T](xs: Arr[T], is: Arr[Int]): Arr[T]
def array_length[T](xs: Arr[T]): Rep[Int]
// provide: xs.length == res.length
def array_map[T, R: Elem](xs: Arr[T], f: Rep[T => R]): Arr[R]
def array_flat_map[T, R: Elem](xs: Arr[T], f: Rep[T => Array[R]]): Arr[R]
def array_reduce[T](xs: Arr[T])(implicit m: RepMonoid[T]): Rep[T]
def array_fold[T,S:Elem](xs: Arr[T], init:Rep[S], f:Rep[((S,T))=>S]): Rep[S]
def array_map_reduce[T,K:Elem,V:Elem](xs: Arr[T], map:Rep[T=>(K,V)], reduce:Rep[((V,V))=>V]): MM[K,V]
// provide: res._1.length == xs.length && res._2 = array_reduce(xs)
def array_scan[T](xs: Arr[T])(implicit m: RepMonoid[T], elem : Elem[T]): Rep[(Array[T], T)]
// require: xs.length == ys.length
// provide: res.length == xs.length
def array_zip[T, U](xs: Arr[T], ys: Arr[U]): Arr[(T, U)]
def array_sort[T](xs: Arr[T])(implicit o:Ordering[T]): Arr[T]
// provide: res.length == len
def array_replicate[T: Elem](len: Rep[Int], v: Rep[T]): Arr[T]
// require: start in xs.indices && start + length in xs.indices
// provide: res.length == length
def array_slice[T](xs: Arr[T], start: Rep[Int], length: Rep[Int]): Arr[T]
// provide: res.length = n
def array_rangeFrom0(n: Rep[Int]): Arr[Int]
def array_filter[T](xs: Arr[T], f: Rep[T => Boolean]): Arr[T]
def array_find[T](xs: Arr[T], f: Rep[T => Boolean]): Arr[Int]
def array_grouped[T](xs: Arr[T], size: Rep[Int]): Arr[Array[T]]
// require: start in xs.indices && start + length * stride in xs.indices
// provide: res.length == length
def array_stride[T](xs: Arr[T], start: Rep[Int], length: Rep[Int], stride: Rep[Int]): Arr[T]
// require: index in xs.indices
// provide: res.length == xs.length
def array_update[T](xs: Arr[T], index: Rep[Int], value: Rep[T]): Arr[T]
def array_append[T](xs: Arr[T], value: Rep[T]): Arr[T]
// require: forall i -> indexes(i) in xs.indices && indexes.length == values.length
// provide: res.length == xs.length
def array_updateMany[T](xs: Arr[T], indexes: Arr[Int], values: Arr[T]): Arr[T]
def array_sum[T:Elem](xs: Arr[T])(implicit n: Numeric[T]): Rep[T]
def array_max[T:Elem](xs: Arr[T])(implicit o: Ordering[T]): Rep[T]
def array_min[T:Elem](xs: Arr[T])(implicit o: Ordering[T]): Rep[T]
def array_avg[T:Elem](xs: Arr[T])(implicit n: Numeric[T]): Rep[Double]
def array_sort_by[T:Elem, O:Elem](xs: Arr[T], by: Rep[T => O])(implicit o:Ordering[O]): Arr[T]
def array_group_by[T:Elem, G:Elem](xs: Arr[T], by: Rep[T => G]): MM[G, ArrayBuffer[T]]
def array_count[T:Elem](xs: Arr[T], f: Rep[T => Boolean]): Rep[Int]
def array_sum_by[T:Elem, S:Elem](xs: Arr[T], f: Rep[T => S])(implicit n: Numeric[S]): Rep[S]
def array_empty[T: Elem]: Arr[T]
def array_toList[T:Elem](xs: Arr[T]): Lst[T]
def array_reverse[T:Elem](xs: Arr[T]): Arr[T]
def array_flatten[T:Elem](xs: Arr[Array[T]]): Arr[T]
def array_cons[T:Elem](value: Rep[T], xs: Arr[T]): Arr[T]
def array_binary_search[T:Elem](i: Rep[T], is: Arr[T])(implicit o: Ordering[T]): Rep[Int]
def array_randomGaussian(m: Rep[Double], e: Rep[Double], arr: Arr[Double]): Arr[Double]
def array_fromSyms[T:Elem](syms: Seq[Rep[T]]): Arr[T]
def array_concat[A: Elem](arrs: Arr[A]*): Arr[A] = {
val arr: Arr[Array[A]] = SArray.fromSyms(arrs)
arr.flatten
}
}
trait ArrayOpsStd extends ArrayOps {
self: ScalanStd =>
import TagImplicits.elemToClassTag
def array_apply[T](x: Arr[T], n: Rep[Int]): Rep[T] = x(n)
def array_applyMany[T](x: Arr[T], is: Arr[Int]): Arr[T] = {
implicit val ct = arrayToClassTag(x)
scala.Array.tabulate(is.length)(i => x(is(i)))
}
def array_length[T](a: Arr[T]): Rep[Int] = a.length
def array_map[T, R: Elem](xs: Array[T], f: T => R) = genericArrayOps(xs).map(f)
def array_flat_map[T, R: Elem](xs: Array[T], f: T => Array[R]) = genericArrayOps(xs).flatMap(x => f(x).toSeq)
def array_reduce[T](xs: Arr[T])(implicit m: RepMonoid[T]) = xs.fold(m.zero)(m.append)
def array_fold[T, S: Elem](xs: Arr[T], init: Rep[S], f: Rep[((S, T)) => S]): Rep[S] = {
var state = init
for (x <- xs) {
state = f((state, x))
}
state
}
def array_update[T](xs: Arr[T], index: Rep[Int], value: Rep[T]): Arr[T] = {
implicit val ct = arrayToClassTag(xs)
// xs.update(index, value)
// xs //in-place operation contradicts with lms
val xs1 = xs.clone()
xs1.update(index, value)
xs1
}
def array_updateMany[T](xs: Arr[T], indexes: Arr[Int], values: Arr[T]): Arr[T] = {
implicit val ct = arrayToClassTag(xs)
// (0 until indexes.length).foreach(i => xs.update(indexes(i), values(i)))
// xs //in-place operation contradicts with lms
val xs1 = xs.clone()
(0 until indexes.length).foreach(i => xs1.update(indexes(i), values(i)))
xs1
}
def array_append[T](xs: Arr[T], value: Rep[T]): Arr[T] = {
implicit val ct = arrayToClassTag(xs)
xs :+ value
}
def array_sum_by[T: Elem, S: Elem](xs: Arr[T], f: Rep[T => S])(implicit n: Numeric[S]): Rep[S] = {
var sum = n.zero
for (x <- xs) {
sum += f(x)
}
sum
}
def array_map_reduce[T, K: Elem, V: Elem](xs: Arr[T], map: (T) => (K, V), reduce: ((V, V)) => V) = {
val result = scala.collection.mutable.Map.empty[K, V]
xs.foldLeft(result)((r, x) => {
val pair = map(x)
val key = pair._1
val value = pair._2
result.update(key, if (result.contains(key)) reduce((result(key), value)) else value)
result
})
}
def array_zip[T, U](xs: Array[T], ys: Array[U]): Array[(T, U)] = (xs, ys).zipped.toArray
def array_sort[T](xs: Arr[T])(implicit o: Ordering[T]): Arr[T] = {
scala.util.Sorting.quickSort(xs)
xs
}
def array_scan[T](xs: Array[T])(implicit m: RepMonoid[T], elem: Elem[T]): Rep[(Array[T], T)] = {
val scan = xs.scan(m.zero)(m.append)
val sum = scan.last
(scan.dropRight(1).toArray, sum)
}
def array_replicate[T: Elem](len: Rep[Int], v: Rep[T]): Arr[T] = scala.Array.fill(len)(v)
def array_slice[T](xs: Arr[T], start: Rep[Int], length: Rep[Int]): Arr[T] =
genericArrayOps(xs).slice(start, start + length)
def array_rangeFrom0(n: Rep[Int]): Arr[Int] = 0.until(n).toArray
def array_filter[T](xs: Array[T], f: T => Boolean): Array[T] =
genericArrayOps(xs).filter(f)
def array_find[T](xs: Array[T], f: T => Boolean): Array[Int] = {
val buf = scala.collection.mutable.ArrayBuffer.empty[Int]
for (i <- 0 until xs.length) {
if (f(xs(i))) buf += i
}
buf.toArray
}
def array_count[T:Elem](xs: Arr[T], f: Rep[T] => Rep[Boolean]): Rep[Int] = {
genericArrayOps(xs).count(f)
}
def array_grouped[T](xs: Arr[T], size: Rep[Int]): Arr[Array[T]] = {
implicit val ct = arrayToClassTag(xs)
xs.iterator.grouped(size).map(_.toArray).toArray
}
def array_stride[T](xs: Arr[T], start: Rep[Int], length: Rep[Int], stride: Rep[Int]): Arr[T] = {
implicit val ct = arrayToClassTag(xs)
scala.Array.tabulate(length) { i =>
xs(start + i * stride)
}
}
def array_sum[T:Elem](xs: Arr[T])(implicit n: Numeric[T]): Rep[T] = genericArrayOps(xs).sum
def array_max[T:Elem](xs: Arr[T])(implicit o: Ordering[T]): Rep[T] = genericArrayOps(xs).max
def array_min[T:Elem](xs: Arr[T])(implicit o: Ordering[T]): Rep[T] = genericArrayOps(xs).min
def array_avg[T:Elem](xs: Arr[T])(implicit n: Numeric[T]): Rep[Double] = genericArrayOps(xs).sum.toDouble / xs.length
def array_sort_by[T:Elem, O:Elem](xs: Arr[T], by: Rep[T => O])(implicit o:Ordering[O]): Arr[T] = genericArrayOps(xs).sortBy[O](by)
def array_group_by[T:Elem, G:Elem](xs: Arr[T], by: Rep[T => G]): MM[G, ArrayBuffer[T]] = {
val result = scala.collection.mutable.Map.empty[G, ArrayBuffer[T]]
for (x <- xs) {
val key = by(x)
if (result.contains(key)) {
result(key) += x
} else {
result.update(key, ArrayBuffer(x))
}
}
result
}
def array_empty[T: Elem]: Arr[T] = scala.Array.empty[T]
def array_reverse[T:Elem](xs: Arr[T]): Arr[T] = genericArrayOps(xs).reverse
def array_flatten[T:Elem](xs: Arr[Array[T]]): Arr[T] = genericArrayOps(xs).flatten
def array_cons[T:Elem](value: Rep[T], xs: Arr[T]): Arr[T] = {
val buf = new Array[T](xs.length + 1)
buf(0) = value
xs.copyToArray(buf, 1)
buf
}
def array_toList[T:Elem](xs: Array[T]): Lst[T] = xs.to[List]
def arrayToClassTag[T](xs: Rep[Array[T]]): ClassTag[T] = ClassTag(xs.getClass.getComponentType)
def array_binary_search[T:Elem](i: T, is: Array[T])(implicit o: Ordering[T]): Rep[Int] = {
element[T] match {
case BooleanElement =>
!!!(s"binarySearch isn't defined for array of ${element[T]}")
case ByteElement =>
java.util.Arrays.binarySearch(is.asInstanceOf[Array[Byte]], i.asInstanceOf[Byte])
case CharElement =>
java.util.Arrays.binarySearch(is.asInstanceOf[Array[Char]], i.asInstanceOf[Char])
case ShortElement =>
java.util.Arrays.binarySearch(is.asInstanceOf[Array[Short]], i.asInstanceOf[Short])
case IntElement =>
java.util.Arrays.binarySearch(is.asInstanceOf[Array[Int]], i.asInstanceOf[Int])
case LongElement =>
java.util.Arrays.binarySearch(is.asInstanceOf[Array[Long]], i.asInstanceOf[Long])
case FloatElement =>
java.util.Arrays.binarySearch(is.asInstanceOf[Array[Float]], i.asInstanceOf[Float])
case DoubleElement =>
java.util.Arrays.binarySearch(is.asInstanceOf[Array[Double]], i.asInstanceOf[Double])
case _ => // all others are AnyRef ancestors
java.util.Arrays.binarySearch(is.asInstanceOf[Array[AnyRef]], i.asInstanceOf[AnyRef])
}
}
def array_randomGaussian(m: Rep[Double], e: Rep[Double], arr: Arr[Double]): Arr[Double] =
arr.map(_ => scala.util.Random.nextGaussian() * e + m)
def array_fromSyms[T:Elem](syms: Seq[Rep[T]]): Arr[T] = syms.toArray
}
trait ArrayOpsExp extends ArrayOps with BaseExp { self: ScalanExp =>
def withElemOfArray[T, R](xs: Arr[T])(block: Elem[T] => R): R =
withElemOf(xs) { eTArr =>
block(eTArr.eItem)
}
abstract class ArrayDef[T](implicit val eItem: Elem[T]) extends Def[Array[T]] {
lazy val selfType = element[Array[T]]
}
case class ArrayLength[T](xs: Exp[Array[T]]) extends BaseDef[Int]
case class ArrayApply[T](xs: Exp[Array[T]], index: Exp[Int]) extends Def[T] {
lazy val selfType = xs.elem.eItem
}
case class ArrayApplyMany[T](xs: Exp[Array[T]], indices: Exp[Array[Int]]) extends ArrayDef[T]()(xs.elem.eItem)
case class ArrayMap[T, R](xs: Exp[Array[T]], f: Exp[T => R]) extends ArrayDef[R]()(f.elem.eRange)
case class ArrayMapFilter[T, R](xs: Exp[Array[T]], f: Exp[T => (Boolean,R)]) extends ArrayDef[R]()(f.elem.eRange.eSnd)
case class ArrayFlatMap[T, R](xs: Exp[Array[T]], f: Exp[T => Array[R]]) extends ArrayDef[R]()(f.elem.eRange.eItem)
case class ArrayReduce[T](xs: Exp[Array[T]], implicit val m: RepMonoid[T]) extends Def[T] {
def selfType = xs.elem.eItem
}
case class ArrayFold[T,S](xs: Exp[Array[T]], init:Exp[S], f:Exp[((S,T))=>S])(implicit val eS: Elem[S]) extends BaseDef[S]
case class ArrayScan[T](xs: Exp[Array[T]], implicit val m: RepMonoid[T])(implicit val eT: Elem[T]) extends BaseDef[(Array[T], T)]
case class ArraySort[T](xs: Exp[Array[T]], implicit val o:Ordering[T])(implicit eItem: Elem[T]) extends ArrayDef[T]
case class ArrayZip[T, U](xs: Exp[Array[T]], ys: Exp[Array[U]])(implicit val eT: Elem[T], val eU: Elem[U]) extends ArrayDef[(T, U)]
case class ArrayMapReduce[T,K,V](in: Exp[Array[T]], map:Exp[T=>(K,V)], reduce:Exp[((V,V))=>V])(implicit val eK: Elem[K], val eV: Elem[V]) extends MMapDef[K,V]
case class ArrayReplicate[T](len: Exp[Int], v: Exp[T])(implicit eItem: Elem[T]) extends ArrayDef[T]
case class ArrayStride[T](xs: Exp[Array[T]], start: Exp[Int], length: Exp[Int], stride: Exp[Int])(implicit eItem: Elem[T]) extends ArrayDef[T]
case class ArrayUpdate[T](xs: Exp[Array[T]], index: Exp[Int], value: Exp[T])(implicit eItem: Elem[T]) extends ArrayDef[T]
case class ArrayUpdateMany[T](xs: Exp[Array[T]], indexes: Exp[Array[Int]], values: Exp[Array[T]])(implicit eItem: Elem[T]) extends ArrayDef[T]
case class ArrayAppend[T](xs: Exp[Array[T]], value: Exp[T])(implicit eItem: Elem[T]) extends ArrayDef[T]
case class ArrayCons[T](value: Exp[T], xs: Exp[Array[T]])(implicit eItem: Elem[T]) extends ArrayDef[T]
case class ArrayReverse[T](xs: Exp[Array[T]])(implicit eItem: Elem[T]) extends ArrayDef[T]
case class ArrayFlatten[T](xss: Arr[Array[T]])(implicit eItem: Elem[T]) extends ArrayDef[T]
case class ArrayRangeFrom0(n: Exp[Int]) extends ArrayDef[Int]
case class ArrayFilter[T](xs: Exp[Array[T]], f: Exp[T => Boolean])(implicit eItem: Elem[T]) extends ArrayDef[T]
case class ArrayFind[T](xs: Exp[Array[T]], f: Exp[T => Boolean]) extends ArrayDef[Int]
case class ArraySortBy[T, O](xs: Exp[Array[T]], f: Exp[T => O], o: Ordering[O])(implicit eItem: Elem[T], val eO: Elem[O]) extends ArrayDef[T]
case class ArrayGroupBy[T, G](xs: Exp[Array[T]], by: Exp[T => G])(implicit val eT: Elem[T], val eG: Elem[G]) extends MMapDef[G, ArrayBuffer[T]]
case class ArraySum[T](xs: Exp[Array[T]], n: Numeric[T])(implicit val eT: Elem[T]) extends BaseDef[T]
case class ArraySumBy[T, S](xs: Exp[Array[T]], f: Exp[T => S], n: Numeric[S])(implicit val eT: Elem[T], val eS: Elem[S]) extends BaseDef[S]
case class ArrayMax[T](xs: Exp[Array[T]], o: Ordering[T])(implicit val eT: Elem[T]) extends BaseDef[T]
case class ArrayMin[T](xs: Exp[Array[T]], o: Ordering[T])(implicit val eT: Elem[T]) extends BaseDef[T]
case class ArrayAvg[T](xs: Exp[Array[T]], n: Numeric[T]) extends BaseDef[Double]
case class ArrayCount[T](xs: Exp[Array[T]], f: Exp[T => Boolean])(implicit val eT: Elem[T]) extends BaseDef[Int]
case class ArrayEmpty[T]()(implicit eItem: Elem[T]) extends ArrayDef[T]
case class ArrayToList[T](xs: Exp[Array[T]])(implicit val eT: Elem[T]) extends ListDef[T]
case class ArrayBinarySearch[T](i: Exp[T], xs: Exp[Array[T]], o: Ordering[T])(implicit val eT: Elem[T]) extends BaseDef[Int]
case class ArrayRandomGaussian[T](m: Exp[T], e: Exp[T], xs: Exp[Array[T]])(implicit eItem: Elem[T]) extends ArrayDef[T]
case class SymsArray[A](symbols: Seq[Exp[A]])(implicit override val eItem: Elem[A]) extends ArrayDef[A] {
val length: Rep[Int] = symbols.length
}
def array_update[T](xs: Arr[T], index: Rep[Int], value: Rep[T]): Arr[T] = {
implicit val eT = xs.elem.eItem
ArrayUpdate(xs, index, value)
}
def array_updateMany[T](xs: Arr[T], indexes: Arr[Int], values: Arr[T]): Arr[T] = {
implicit val eT = xs.elem.eItem
ArrayUpdateMany(xs, indexes, values)
}
def array_append[T](xs: Arr[T], value: Rep[T]): Arr[T] = {
implicit val eT = xs.elem.eItem
ArrayAppend(xs, value)
}
def array_sum[T:Elem](xs: Arr[T])(implicit n: Numeric[T]): Rep[T] = ArraySum(xs, n)
def array_max[T:Elem](xs: Arr[T])(implicit o: Ordering[T]): Rep[T] = ArrayMax(xs, o)
def array_min[T:Elem](xs: Arr[T])(implicit o: Ordering[T]): Rep[T] = ArrayMin(xs, o)
def array_avg[T:Elem](xs: Arr[T])(implicit n: Numeric[T]): Rep[Double] = ArrayAvg(xs, n)
def array_sort_by[T:Elem, O:Elem](xs: Arr[T], by: Rep[T => O])(implicit o:Ordering[O]): Arr[T] = ArraySortBy(xs, by, o)
def array_group_by[T:Elem, G:Elem](xs: Arr[T], by: Rep[T => G]) = ArrayGroupBy(xs, by)
def array_count[T:Elem](xs: Arr[T], f: Rep[T => Boolean]): Rep[Int] = ArrayCount(xs, f)
def array_sum_by[T:Elem,S:Elem](xs: Arr[T], f: Rep[T=>S])(implicit n: Numeric[S]): Rep[S] = ArraySumBy(xs, f, n)
def array_apply[T](xs: Exp[Array[T]], n: Exp[Int]): Rep[T] =
withElemOfArray(xs) { implicit eT => ArrayApply(xs, n) }
def array_applyMany[T](xs: Exp[Array[T]], is: Exp[Array[Int]]): Arr[T] =
withElemOfArray(xs) { implicit eT => ArrayApplyMany(xs, is) }
def array_length[T](a: Exp[Array[T]]): Rep[Int] = ArrayLength(a)
def array_map[T, R: Elem](xs: Exp[Array[T]], f: Exp[T => R]) = ArrayMap(xs, f)
def array_flat_map[T, R: Elem](xs: Exp[Array[T]], f: Exp[T => Array[R]]) = ArrayFlatMap(xs, f)
def array_reduce[T](xs: Arr[T])(implicit m: RepMonoid[T]) =
withElemOfArray(xs) { implicit eT => ArrayReduce(xs, m) }
def array_fold[T,S:Elem](xs: Arr[T], init:Rep[S], f:Rep[((S,T))=>S]): Rep[S] =
withElemOfArray(xs) { implicit eT => ArrayFold(xs, init, f) }
def array_map_reduce[T,K:Elem,V:Elem](xs: Exp[Array[T]], map:Exp[T=>(K,V)], reduce:Exp[((V,V))=>V]) = ArrayMapReduce(xs, map, reduce)
def array_scan[T](xs: Arr[T])(implicit m: RepMonoid[T], elem : Elem[T]): Rep[(Array[T], T)] =
ArrayScan(xs, m)
def array_zip[T, U](xs: Arr[T], ys: Arr[U]): Arr[(T, U)] = {
implicit val eT = xs.elem.eItem
implicit val eU = ys.elem.eItem
ArrayZip(xs, ys)
}
def array_sort[T](xs: Arr[T])(implicit o:Ordering[T]): Arr[T] = {
implicit val eT = xs.elem.eItem
ArraySort(xs, o)
}
def array_replicate[T: Elem](len: Rep[Int], v: Rep[T]): Arr[T] =
ArrayReplicate(len, v)
def array_slice[T](xs: Arr[T], start: Rep[Int], length: Rep[Int]): Arr[T] =
array_stride(xs, start, length, 1)
def array_rangeFrom0(n: Rep[Int]): Arr[Int] =
ArrayRangeFrom0(n)
def array_filter[T](xs: Arr[T], f: Rep[T => Boolean]): Arr[T] =
withElemOfArray(xs) { implicit eT => ArrayFilter(xs, f) }
def array_find[T](xs: Arr[T], f: Rep[T => Boolean]): Arr[Int] =
ArrayFind(xs, f)
def array_grouped[T](xs: Arr[T], size: Rep[Int]): Arr[Array[T]] = {
implicit val eT = xs.elem.eItem
SArray.tabulate(xs.length div size) { i => xs.slice(i * size, size) }
}
def array_stride[T](xs: Arr[T], start: Rep[Int], length: Rep[Int], stride: Rep[Int]): Arr[T] = {
implicit val eT = xs.elem.eItem
ArrayStride(xs, start, length, stride)
}
def array_toList[T:Elem](xs: Arr[T]): Lst[T] =
ArrayToList(xs)
def array_empty[T: Elem]: Arr[T] = ArrayEmpty[T]()
def array_reverse[T:Elem](xs: Arr[T]): Arr[T] = ArrayReverse(xs)
def array_flatten[T:Elem](xs: Arr[Array[T]]): Arr[T] = ArrayFlatten(xs)
def array_cons[T:Elem](value: Rep[T], xs: Arr[T]): Arr[T] = ArrayCons(value, xs)
// def accessOnlyFirst(x: Exp[_], exp: Exp[_]): Boolean = {
// exp match {
// case Def(Tup(l, r)) => accessOnlyFirst(x, l) && accessOnlyFirst(x, r)
// case Def(Second(t)) => accessOnlyFirst(x, t)
// case Def(First(t)) => (t == x || accessOnlyFirst(x, t))
// case Def(ApplyBinOp(op, l, r)) => accessOnlyFirst(x, l) && accessOnlyFirst(x, r)
// case Def(ApplyUnOp(opr, opd)) => accessOnlyFirst(x, opd)
// case Def(Const(_)) => true
// case _ => false
// }
// }
//
// def accessOnlySecond(x: Exp[_], exp: Exp[_]): Boolean = {
// exp match {
// case Def(Tup(l, r)) => accessOnlySecond(x, l) && accessOnlySecond(x, r)
// case Def(First(t)) => accessOnlySecond(x, t)
// case Def(Second(t)) => (t == x || accessOnlySecond(x, t))
// case Def(ApplyBinOp(op, l, r)) => accessOnlySecond(x, l) && accessOnlySecond(x, r)
// case Def(ApplyUnOp(opr, opd)) => accessOnlySecond(x, opd)
// case Def(Const(_)) => true
// case _ => false
// }
// }
//
// def firstOnlyExp(oldX: Exp[_], newX: Exp[_], exp: Exp[_]): Exp[_] = {
// exp match {
// case Def(t: Tup[a, b]) => {
// implicit val eA = t.a.elem
// implicit val eB = t.b.elem
// Tup[a,b](firstOnlyExp(oldX, newX, t.a).asRep[a], firstOnlyExp(oldX, newX, t.b).asRep[b])
// }
// case Def(s: Second[a,b]) => {
// val pair = s.pair
// implicit val eA = pair.elem.eFst
// implicit val eB = pair.elem.eSnd
// Second[a,b](firstOnlyExp(oldX, newX, pair).asRep[(a,b)])
// }
// case Def(f: First[a, b]) => {
// val pair = f.pair
// if (pair == oldX) {
// newX
// } else {
// implicit val eA = pair.elem.eFst
// implicit val eB = pair.elem.eSnd
// First[a, b](firstOnlyExp(oldX, newX, pair).asRep[(a, b)])
// }
// }
// case Def(bin: ApplyBinOp[a,r]) =>
// ApplyBinOp[a,r](bin.op, firstOnlyExp(oldX, newX, bin.lhs).asRep[a], firstOnlyExp(oldX, newX, bin.rhs).asRep[a])
// case Def(un: ApplyUnOp[a, r]) =>
// ApplyUnOp[a,r](un.op, firstOnlyExp(oldX, newX, un.arg).asRep[a])
// case _ => exp
// }
// }
// def firstOnly[A:Elem,B:Elem](l: Lambda[_, _]): Exp[A => B] = {
// val newSym = fresh[A => B]
// val newX = fresh[A]
// val first = firstOnlyExp(l.x, newX, l.y)
// val newLam = new Lambda[A, B](None, newX, first.asRep[B], newSym, l.mayInline)
// toExp(newLam, newSym)
// }
//
// def secondOnlyExp(oldX: Exp[_], newX: Exp[_], exp: Exp[_]): Exp[_] = {
// exp match {
// case Def(t: Tup[a, b]) => {
// implicit val eA = t.a.elem
// implicit val eB = t.b.elem
// Tup[a, b](secondOnlyExp(oldX, newX, t.a).asRep[a], secondOnlyExp(oldX, newX, t.b).asRep[b])
// }
// case Def(f: First[a, b]) => {
// val pair = f.pair
// implicit val eA = pair.elem.eFst
// implicit val eB = pair.elem.eSnd
// First[a, b](secondOnlyExp(oldX, newX, pair).asRep[(a, b)])
// }
// case Def(s: Second[a, b]) => {
// val pair = s.pair
// if (pair == oldX) {
// newX
// } else {
// implicit val eA = pair.elem.eFst
// implicit val eB = pair.elem.eSnd
// Second[a, b](secondOnlyExp(oldX, newX, pair).asRep[(a, b)])
// }
// }
// case Def(bin: ApplyBinOp[a,r]) =>
// ApplyBinOp[a,r](bin.op, secondOnlyExp(oldX, newX, bin.lhs).asRep[a], secondOnlyExp(oldX, newX, bin.rhs).asRep[a])
// case Def(un: ApplyUnOp[a, r]) =>
// ApplyUnOp[a,r](un.op, secondOnlyExp(oldX, newX, un.arg).asRep[a])
// case _ => exp
// }
// }
// def secondOnly[A:Elem,B:Elem](l: Lambda[_, _]): Exp[A => B] = {
// val newSym = fresh[A => B]
// val newX = fresh[A]
// val second = secondOnlyExp(l.x, newX, l.y)
// val newLam = new Lambda[A, B](None, newX, second.asRep[B], newSym, l.mayInline)
// toExp(newLam, newSym)
// }
def array_binary_search[T:Elem](i: Rep[T], is: Arr[T])(implicit o: Ordering[T]): Rep[Int] = {
ArrayBinarySearch(i, is, o)
}
def array_randomGaussian(m: Rep[Double], e: Rep[Double], arr: Arr[Double]): Arr[Double] = {
ArrayRandomGaussian(m, e, arr)
}
def array_fromSyms[T:Elem](syms: Seq[Rep[T]]): Arr[T] = {
SymsArray(syms)
}
object ArgAccess extends Enumeration {
type ArgAccess = Value
val Non, First, Second, Both = Value
implicit class ArgAccessOps(acc: ArgAccess) {
def &&(other: ArgAccess): ArgAccess = acc match {
case Non => other match {
case Non => Non
case o => o
}
case First => other match {
case _ if other <= First => First
case _ => Both
}
case Second => other match {
case Non => Second
case First => Both
case Second => Second
case Both => Both
}
case _ => Both
}
}
}
import ArgAccess._
def getArgAccess[A,B](l: Lambda[(A,B),_]): ArgAccess = {
val sch = l.scheduleAll
val first = l.x._1
val second = l.x._2
val accFirst = if (sch.exists(te => te.sym == first)) ArgAccess.First else Non
val accSecond = if (sch.exists(te => te.sym == second)) ArgAccess.Second else Non
accFirst && accSecond
}
object LambdaWithAccess {
def unapply[T](s: Exp[T]): Option[(Def[T], ArgAccess)] = s match {
case Def(l: Lambda[(a,b),_]@unchecked) => Some((l, getArgAccess(l)))
case _ => None
}
}
def sliceFirst[A,B,C](f: Exp[((A,B)) => C]): Exp[A => C] = {
val elem = f.elem.eDom
implicit val eA = elem.eFst
implicit val eB = elem.eSnd
implicit val eC = f.elem.eRange
fun { x: Exp[A] => f(Pair(x, fresh[B])) }
}
def sliceSecond[A,B,C](f: Exp[((A,B)) => C]): Exp[B => C] = {
val elem = f.elem.eDom
implicit val eA = elem.eFst
implicit val eB = elem.eSnd
implicit val eC = f.elem.eRange
fun { x: Exp[B] => f(Pair(fresh[A], x)) }
}
case class ArrayStruct[Val, ValSchema]
(val arrays: Rep[ValSchema])
(implicit val eVal: Elem[Val], val eValSchema: Elem[ValSchema])
extends ArrayDef[Val]
def arrayStruct[Val <: Struct, ValSchema <: Struct]
(arrays: Rep[ValSchema])
(implicit eVal: Elem[Val], eValSchema: Elem[ValSchema]): Arr[Val] =
reifyObject(ArrayStruct(arrays)(eVal, eValSchema))
override def rewriteDef[T](d: Def[T]) = d match {
case ArrayApply(Def(d2), i) => d2 match {
case ArrayApplyMany(xs, is) =>
implicit val eT = xs.elem.eItem
xs(is(i))
case ArrayMap(xs, f) =>
implicit val eT = xs.elem.eItem
f(xs(i))
case ArrayZip(xs: Arr[a] @unchecked, ys: Arr[b] @unchecked) =>
val xs1 = xs.asRep[Array[a]]
val ys1 = ys.asRep[Array[b]]
implicit val e1 = xs1.elem.eItem
implicit val e2 = ys1.elem.eItem
(RepArrayOps(xs1)(e1)(i), RepArrayOps(ys1)(e2)(i))
case ArrayReplicate(_, x) => x
case ArrayStride(xs, start, _, stride) =>
implicit val eT = xs.elem.eItem
xs(start + i * stride)
case ArrayRangeFrom0(_) => i
case _ =>
super.rewriteDef(d)
}
case ArrayApplyMany(Def(d2: Def[Array[a]] @unchecked), is) =>
d2.asDef[Array[a]] match {
case ArrayApplyMany(xs, is1) =>
implicit val eT = xs.elem.eItem
xs(is1(is))
case ArrayMap(xs, f) =>
implicit val eT = xs.elem.eItem
xs(is).mapBy(f)
case ArrayZip(xs: Arr[a] @unchecked, ys: Arr[b] @unchecked) =>
val xs1 = xs.asRep[Array[a]]
val ys1 = ys.asRep[Array[b]]
implicit val e1 = xs1.elem.eItem
implicit val e2 = ys1.elem.eItem
ArrayZip(RepArrayOps(xs1)(e1)(is), RepArrayOps(ys1)(e2)(is))(e1, e2)
case ArrayReplicate(_, x) =>
implicit val eT = x.elem
SArray.replicate(is.length, x)
case ArrayStride(xs, start, _, stride) =>
implicit val eT = xs.elem.eItem
xs(is.map { i => start + i * stride})
case ArrayRangeFrom0(_) => is
case _ =>
super.rewriteDef(d)
}
case ArrayLength(Def(d2: Def[Array[a]]@unchecked)) =>
d2.asDef[Array[a]] match {
case Const(scalaArray) => toRep(scalaArray.length)
case ArrayApplyMany(_, is) => is.length
case ArrayMap(xs, _) =>
implicit val eT = xs.elem.eItem
xs.length
case ArrayZip(xs, _) =>
implicit val eT = xs.elem.eItem
xs.length
case ArrayFilter(xs, f) =>
implicit val eT = xs.elem.eItem
ArrayCount(xs, f)
case ArraySort(xs, o) =>
implicit val eT = xs.elem.eItem
xs.length
case ArrayReplicate(length, _) => length
case ArrayStride(_, _, length, _) => length
case ArrayRangeFrom0(n) => n
case ArrayEmpty() => toRep(0)
case _ =>
super.rewriteDef(d)
}
case ArrayFilter(Def(zip: ArrayZip[x, y]), p_ @ LambdaWithAccess(_, acc)) if acc == ArgAccess.First =>
val pred = p_.asRep[((x,y)) => Boolean]
val xs1 = zip.xs
val ys1 = zip.ys
implicit val eX = xs1.elem.eItem
implicit val eY = ys1.elem.eItem
val positions = ArrayFind(xs1, sliceFirst(pred))
val res = ArrayZip(ArrayApplyMany(xs1, positions), ArrayApplyMany(ys1, positions))
res
case ArrayFilter(Def(zip: ArrayZip[x, y]), p_ @ LambdaWithAccess(_, acc)) if acc == ArgAccess.Second =>
val pred = p_.asRep[((x,y)) => Boolean]
val xs1 = zip.xs
val ys1 = zip.ys
implicit val eX = xs1.elem.eItem
implicit val eY = ys1.elem.eItem
val positions = ArrayFind(ys1, sliceSecond(pred))
val res = ArrayZip(ArrayApplyMany(xs1, positions), ArrayApplyMany(ys1, positions))
res
// case ArrayFind(Def(zip: ArrayZip[x, y]), p_ @ LambdaWithAccess(_, acc)) if acc == ArgAccess.First =>
// val pred = p_.asRep[((x,y)) => Boolean]
// val xs1 = zip.xs
// ArrayFind(xs1, sliceFirst(pred))
//
// case ArrayFind(Def(zip: ArrayZip[x, y]), p_ @ LambdaWithAccess(_, acc)) if acc == ArgAccess.Second =>
// val pred = p_.asRep[((x,y)) => Boolean]
// val ys1 = zip.ys
// ArrayFind(ys1, sliceSecond(pred))
case ArrayCount(Def(zip: ArrayZip[x, y]), p_ @ LambdaWithAccess(_, acc)) if acc == ArgAccess.First =>
val pred = p_.asRep[((x,y)) => Boolean]
val xs1 = zip.xs
implicit val eX = xs1.elem.eItem
ArrayCount(xs1, sliceFirst(pred))
case ArrayCount(Def(zip: ArrayZip[x, y]), p_ @ LambdaWithAccess(_, acc)) if acc == ArgAccess.Second =>
val pred = p_.asRep[((x,y)) => Boolean]
val ys1 = zip.ys
implicit val eY = ys1.elem.eItem
ArrayCount(ys1, sliceSecond(pred))
case ArrayMap(Def(zip: ArrayZip[x, y]), f_ @ LambdaWithAccess(l: Lambda[_,b], acc)) if acc == ArgAccess.First =>
val f = f_.asRep[((x,y)) => b]
val xs1 = zip.xs
implicit val eX = xs1.elem.eItem
implicit val eB = l.eB
val res = ArrayMap(xs1, sliceFirst(f))
res
case ArrayMap(Def(zip: ArrayZip[x, y]), f_ @ LambdaWithAccess(l: Lambda[_,b], acc)) if acc == ArgAccess.Second =>
val f = f_.asRep[((x,y)) => b]
val ys1 = zip.ys
implicit val eY = ys1.elem.eItem
implicit val eB = l.eB
val res = ArrayMap(ys1, sliceSecond(f))
res
case ArrayMap(xs, Def(IdentityLambda())) => xs
// case ArrayMap(xs: Rep[Array[a]], Def(ConstantLambda(v))) =>
// val xs1 = xs.asRep[Array[a]]
// implicit val eA = xs1.elem.eItem
// SArray.replicate(xs1.length, v)(v.elem)
// should be the last rule in this rewriteDef (with ArrayMap on top)
case ArrayMap(ys @ Def(d2), f: Rep[Function1[a, b]]@unchecked) =>
d2.asDef[Array[a]] match {
// case ArrayMap(xs: Rep[Array[c]]@unchecked, g) => //TODO if hasSingleUsage(ys)
// val xs1 = xs.asRep[Array[c]]
// val g1 = g.asRep[c => a]
// implicit val eB = f.elem.eRange
// implicit val eC = xs.elem.eItem
// val res = xs1.map { x => f(g1(x))}
// res
case ArrayReplicate(length, x) =>
implicit val eB = f.elem.eRange
SArray.replicate(length, f(x))
case _ =>
super.rewriteDef(d)
}
case ArraySumBy(Def(zip: ArrayZip[x, y]), f_ @ LambdaWithAccess(l: Lambda[a,b], acc), n) if acc == ArgAccess.Second =>
val f = f_.asRep[((x,y)) => b]
val xs1 = zip.xs
implicit val eX = xs1.elem.eItem
implicit val eB = l.eB
ArraySumBy(xs1, sliceFirst(f), n.asInstanceOf[Numeric[b]])
case ArraySumBy(Def(zip: ArrayZip[x, y]), f_ @ LambdaWithAccess(l: Lambda[a,b], acc), n) if acc == ArgAccess.Second =>
val f = f_.asRep[((x,y)) => b]
val ys1 = zip.ys
implicit val eY = ys1.elem.eItem
implicit val eB = l.eB
ArraySumBy(ys1, sliceSecond(f), n.asInstanceOf[Numeric[b]])
case ArraySumBy(Def(ArrayApplyMany(xs, Def(find: ArrayFind[c]))), by: Rep[Function1[a, b]] @unchecked, n) =>
val a1 = xs.asRep[Array[a]]
val a2 = find.xs
implicit val eA = a1.elem.eItem
implicit val eC = a2.elem.eItem
implicit val eB = by.elem.eRange
implicit val num = n.asInstanceOf[Numeric[b]]
val len = a2.length
loopUntil2(0, num.zero)({ (i, sum) => i === len }, { (i, sum) => (i + 1, IF (find.f(a2(i))) THEN sum + by(a1(i)) ELSE sum) })._2
case ArraySumBy(Def(many: ArrayApplyMany[_]), by:Rep[Function1[a, b]] @unchecked, n) =>
val xs = many.xs.asRep[Array[a]]
val indices = many.indices
implicit val eA = xs.elem.eItem
implicit val eB = by.elem.eRange
implicit val num = n.asInstanceOf[Numeric[b]]
indices.fold[b](num.zero, fun { p => p._1 + by(xs(p._2))})
case ArraySumBy(Def(filter: ArrayFilter[_]), by:Rep[Function1[a, b]] @unchecked, n) =>
val xs = filter.xs.asRep[Array[a]]
implicit val eA = xs.elem.eItem
implicit val eB = by.elem.eRange
implicit val num = n.asInstanceOf[Numeric[b]]
xs.fold[b](num.zero,
fun { p => {
val sum = p._1
val x = p._2
IF (filter.f(x)) THEN sum + by(x) ELSE sum
}})
case ArrayFilter(Def(d2: Def[Array[a]]@unchecked), f) =>
d2.asDef[Array[a]] match {
case ArrayFilter(xs, g) =>
implicit val eT = xs.elem.eItem
xs.filter { x => f(x) && g(x)}
case _ =>
super.rewriteDef(d)
}
case ArrayZip(l, Def(r: ArrayZip[_, _])) =>
implicit val e1 = l.elem.eItem
implicit val e2 = r.selfType.eItem.eFst
implicit val e3 = r.selfType.eItem.eSnd
val res = SArray.tabulate(l.length)(i => Pair(l(i), Pair(r.xs(i), r.ys(i))))
res
case ArrayZip(Def(ArrayReplicate(len, v1: Rep[a])), Def(ArrayReplicate(_, v2: Rep[b]))) =>
implicit val eA = v1.elem
implicit val eB = v2.elem
SArray.replicate(len, (v1, v2))
// Rule: fuse ArrayZip into SArray.tabulate
// should be the last with ArrayZip on top
case ArrayZip(l, Def(map: ArrayMap[x, y]@unchecked)) =>
map.xs match {
case Def(range: ArrayRangeFrom0) =>
implicit val eL = l.elem.eItem
val f = map.f.asInstanceOf[Rep[Int =>y]]
val res = SArray.tabulate(range.n)(i => Pair(l(i), f(i)))
res
case _ =>
super.rewriteDef(d)
}
// // Rule: fuse ArrayMap into ArrayMapReduce
// case mr@ArrayMapReduce(Def(map1: ArrayMap[x, y]@unchecked), map2: Rep[Function1[_, (k, v)]], reduce) =>
// val xs = map1.xs.asRep[Array[x]]
// implicit val eX = xs.elem.eItem
// implicit val eY = map1.elem.eItem
// implicit val eK = mr.elemKey.asElem[k]
// implicit val eV = mr.elemValue.asElem[v]
// val res = xs.mapReduceBy[k, v](fun { e => map2.asRep[y => (k, v)](map1.f(e)) }, reduce.asRep[((v, v)) => v])
// res
case _ => super.rewriteDef(d)
}
}
|
PCMNN/scalan-ce
|
core/src/main/scala/scalan/arrays/ArrayOps.scala
|
Scala
|
apache-2.0
| 39,261
|
package mesosphere.marathon
package api.v2
import mesosphere.UnitTest
import mesosphere.marathon.api.TestAuthFixture
import mesosphere.marathon.core.deployment.{DeploymentPlan, DeploymentStep, DeploymentStepInfo}
import mesosphere.marathon.core.group.GroupManager
import mesosphere.marathon.state.{AppDefinition, PathId}
import mesosphere.marathon.test.{GroupCreation, JerseyTest}
import scala.collection.immutable.Seq
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
class DeploymentsResourceTest extends UnitTest with GroupCreation with JerseyTest {
case class Fixture(
service: MarathonSchedulerService = mock[MarathonSchedulerService],
groupManager: GroupManager = mock[GroupManager],
config: MarathonConf = mock[MarathonConf],
auth: TestAuthFixture = new TestAuthFixture) {
val deploymentsResource = new DeploymentsResource(service, groupManager, auth.auth, auth.auth, config)
}
"Deployments Resource" should {
"access without authentication is denied" in new Fixture {
Given("An unauthenticated request")
auth.authenticated = false
val req = auth.request
val app = AppDefinition(PathId("/test"), cmd = Some("sleep"))
val targetGroup = createRootGroup(apps = Map(app.id -> app))
val deployment = DeploymentStepInfo(DeploymentPlan(createRootGroup(), targetGroup), DeploymentStep(Seq.empty), 1)
service.listRunningDeployments() returns Future.successful(Seq(deployment))
When("the i r =>ndex is fetched")
val running = asyncRequest { r => deploymentsResource.running(req, r) }
Then("we receive a NotAuthenticated response")
running.getStatus should be(auth.NotAuthenticatedStatus)
When("one app version is fetched")
val cancel = asyncRequest { r => deploymentsResource.cancel(deployment.plan.id, false, req, r) }
Then("we receive a NotAuthenticated response")
cancel.getStatus should be(auth.NotAuthenticatedStatus)
}
"access without authorization is denied" in new Fixture {
Given("An unauthorized request")
auth.authenticated = true
auth.authorized = false
val req = auth.request
val app = AppDefinition(PathId("/test"), cmd = Some("sleep"))
val targetGroup = createRootGroup(apps = Map(app.id -> app))
val deployment = DeploymentStepInfo(DeploymentPlan(createRootGroup(), targetGroup), DeploymentStep(Seq.empty), 1)
service.listRunningDeployments() returns Future.successful(Seq(deployment))
When("one app version is fetched")
val cancel = asyncRequest { r => deploymentsResource.cancel(deployment.plan.id, false, req, r) }
Then("we receive a not authorized response")
cancel.getStatus should be(auth.UnauthorizedStatus)
}
}
}
|
gsantovena/marathon
|
src/test/scala/mesosphere/marathon/api/v2/DeploymentsResourceTest.scala
|
Scala
|
apache-2.0
| 2,797
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.source.libsvm
import java.io.File
import com.google.common.base.Charsets
import com.google.common.io.Files
import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.linalg.{DenseVector, SparseVector, Vectors}
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.util.Utils
class LibSVMRelationSuite extends SparkFunSuite with MLlibTestSparkContext {
var tempDir: File = _
var path: String = _
override def beforeAll(): Unit = {
super.beforeAll()
val lines =
"""
|1 1:1.0 3:2.0 5:3.0
|0
|0 2:4.0 4:5.0 6:6.0
""".stripMargin
tempDir = Utils.createTempDir()
val file = new File(tempDir, "part-00000")
Files.write(lines, file, Charsets.US_ASCII)
path = tempDir.toURI.toString
}
override def afterAll(): Unit = {
Utils.deleteRecursively(tempDir)
super.afterAll()
}
test("select as sparse vector") {
val df = sqlContext.read.format("libsvm").load(path)
assert(df.columns(0) == "label")
assert(df.columns(1) == "features")
val row1 = df.first()
assert(row1.getDouble(0) == 1.0)
val v = row1.getAs[SparseVector](1)
assert(v == Vectors.sparse(6, Seq((0, 1.0), (2, 2.0), (4, 3.0))))
}
test("select as dense vector") {
val df = sqlContext.read.format("libsvm").options(Map("vectorType" -> "dense"))
.load(path)
assert(df.columns(0) == "label")
assert(df.columns(1) == "features")
assert(df.count() == 3)
val row1 = df.first()
assert(row1.getDouble(0) == 1.0)
val v = row1.getAs[DenseVector](1)
assert(v == Vectors.dense(1.0, 0.0, 2.0, 0.0, 3.0, 0.0))
}
test("select a vector with specifying the longer dimension") {
val df = sqlContext.read.option("numFeatures", "100").format("libsvm")
.load(path)
val row1 = df.first()
val v = row1.getAs[SparseVector](1)
assert(v == Vectors.sparse(100, Seq((0, 1.0), (2, 2.0), (4, 3.0))))
}
}
|
chenc10/Spark-PAF
|
mllib/src/test/scala/org/apache/spark/ml/source/libsvm/LibSVMRelationSuite.scala
|
Scala
|
apache-2.0
| 2,775
|
package net.revenj.database.postgres.converters
import java.io.ByteArrayInputStream
import java.nio.charset.Charset
import net.revenj.Utils
import net.revenj.database.postgres.PostgresReader
import org.xml.sax.InputSource
import scala.xml.Elem
object XmlConverter extends Converter[Elem] {
private val utf8 = Charset.forName("UTF-8")
override val dbName = "xml"
override def default(): Elem = null
private def toElem(xml: String) = Utils.parse[Elem](new InputSource(new ByteArrayInputStream(xml.getBytes(utf8))))
override def parseRaw(reader: PostgresReader, start: Int, context: Int): Elem = {
toElem(StringConverter.parseRaw(reader, start, context))
}
override def parseCollectionItem(reader: PostgresReader, context: Int): Elem = {
toElem(StringConverter.parseCollectionItem(reader, context))
}
override def parseNullableCollectionItem(reader: PostgresReader, context: Int): Option[Elem] = {
StringConverter.parseNullableCollectionItem(reader, context).map(toElem)
}
override def toTuple(value: Elem): PostgresTuple = ValueTuple.from(value.toString)
}
|
ngs-doo/revenj
|
scala/revenj-core/src/main/scala/net/revenj/database/postgres/converters/XmlConverter.scala
|
Scala
|
bsd-3-clause
| 1,101
|
package ch06
/**
* Created by fqc on 2016/7/13.
* getter and setter 的使用
*/
class CannotYoungAgain {
private var privateAge: Int = 20
def age = privateAge //getter
/*def age_(newVale: Int): Unit = { //普通方法,不是getter块的写法
//setter
if (newVale > privateAge) privateAge = newVale //岁数只能正在增长
}*/
def age_= (newValue: Int){ //getter age_=注意这是一体 参数(newValue: Int)
if (newValue > privateAge) privateAge = newValue //岁数只能正在增长
}
}
object CannotYoungAgain {
def main(args: Array[String]) {
var again: CannotYoungAgain = new CannotYoungAgain
println(again.age)
again.age = 40
again.age = 10
println(again.age)
}
}
|
fqc/Scala_sidepro
|
src/ch06/CannotYoungAgain.scala
|
Scala
|
mit
| 735
|
/*
* Copyright (c) 2018. Fengguo Wei and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License v2.0
* which accompanies this distribution, and is available at
* https://www.apache.org/licenses/LICENSE-2.0
*
* Detailed contributors are listed in the CONTRIBUTOR.md
*/
package org.argus.jawa.flow.interprocedural
import org.argus.jawa.flow.Context
import org.argus.jawa.flow.pta.rfa.RFAFact
import org.argus.jawa.flow.pta.{FieldSlot, Instance, PTAResult, VarSlot}
import org.argus.jawa.core._
import org.argus.jawa.core.elements.{JawaType, Signature}
import org.argus.jawa.core.util.{ISet, _}
trait IndirectCall {
def isIndirectCall(global: Global, typ: JawaType, subSig: String): Boolean
def guessCallTarget(global: Global, signature: Signature): ISet[JawaMethod]
def getCallTarget(global: Global, inss: ISet[Instance], callerContext: Context, args: IList[String], pTAResult: PTAResult): (ISet[(JawaMethod, Instance)], (ISet[RFAFact], IList[String], IList[String]) => ISet[RFAFact])
}
class RunnableStartRun extends IndirectCall {
private val start: Signature = new Signature("Ljava/lang/Runnable;.start:()V")
private val run: Signature = new Signature("Ljava/lang/Runnable;.run:()V")
override def isIndirectCall(global: Global, typ: JawaType, subSig: String): Boolean = {
val clazz = global.getClassOrResolve(typ)
val runnable = global.getClassOrResolve(start.getClassType)
runnable.isAssignableFrom(clazz) && subSig == start.getSubSignature
}
def guessCallTarget(global: Global, signature: Signature): ISet[JawaMethod] = {
val newsig = new Signature(signature.getClassType, run.methodName, run.proto)
CallHandler.resolveSignatureBasedCall(global, newsig, "virtual")
}
override def getCallTarget(global: Global, inss: ISet[Instance], callerContext: Context, args: IList[String], pTAResult: PTAResult): (ISet[(JawaMethod, Instance)], (ISet[RFAFact], IList[String], IList[String]) => ISet[RFAFact]) = {
val callees: MSet[(JawaMethod, Instance)] = msetEmpty
inss.foreach { ins =>
val fieldSlot = FieldSlot(ins, Constants.THREAD_RUNNABLE)
val runnableInss = pTAResult.pointsToSet(callerContext, fieldSlot)
runnableInss foreach { runnableIns =>
val clazz = global.getClassOrResolve(runnableIns.typ)
val runnable = global.getClassOrResolve(run.getClassType)
if (runnable.isAssignableFrom(clazz)) {
clazz.getMethod(run.getSubSignature) match {
case Some(m) => callees += ((m, runnableIns))
case None =>
}
}
}
}
(callees.toSet, mapFactsToCallee)
}
def mapFactsToCallee: (ISet[RFAFact], IList[String], IList[String]) => ISet[RFAFact] = (factsToCallee, args, params) => {
val varFacts = factsToCallee.filter(f=>f.s.isInstanceOf[VarSlot])
val result = msetEmpty[RFAFact]
val killFacts = msetEmpty[RFAFact]
val argSlot = VarSlot(args.head)
val paramSlot = VarSlot(params.head)
varFacts.foreach { varFact =>
if(varFact.s.getId == argSlot.getId) {
val runnableSlot = FieldSlot(varFact.v, Constants.THREAD_RUNNABLE)
factsToCallee.foreach { fact =>
if(fact.s == runnableSlot) {
result += RFAFact(paramSlot, fact.v)
killFacts += fact
}
}
}
}
factsToCallee -- varFacts -- killFacts ++ result
}
}
class ExecutorExecuteRun extends IndirectCall {
private val start: Signature = new Signature("Ljava/util/concurrent/ExecutorService;.execute:(Ljava/lang/Runnable;)V")
private val run: Signature = new Signature("Ljava/lang/Runnable;.run:()V")
override def isIndirectCall(global: Global, typ: JawaType, subSig: String): Boolean = {
val clazz = global.getClassOrResolve(typ)
val executor = global.getClassOrResolve(start.getClassType)
executor.isAssignableFrom(clazz) && subSig == start.getSubSignature
}
def guessCallTarget(global: Global, signature: Signature): ISet[JawaMethod] = isetEmpty
override def getCallTarget(global: Global, inss: ISet[Instance], callerContext: Context, args: IList[String], pTAResult: PTAResult): (ISet[(JawaMethod, Instance)], (ISet[RFAFact], IList[String], IList[String]) => ISet[RFAFact]) = {
val varSlot = VarSlot(args(1))
val runnableInss = pTAResult.pointsToSet(callerContext, varSlot)
val callees: MSet[(JawaMethod, Instance)] = msetEmpty
runnableInss.foreach { runnableIns =>
val clazz = global.getClassOrResolve(runnableIns.typ)
val runnable = global.getClassOrResolve(run.getClassType)
if (runnable.isAssignableFrom(clazz)) {
clazz.getMethod(run.getSubSignature) match {
case Some(m) => callees += ((m, runnableIns))
case None =>
}
}
}
(callees.toSet, mapFactsToCallee)
}
def mapFactsToCallee: (ISet[RFAFact], IList[String], IList[String]) => ISet[RFAFact] = (factsToCallee, args, params) => {
val varFacts = factsToCallee.filter(f=>f.s.isInstanceOf[VarSlot])
val result = msetEmpty[RFAFact]
val argSlot = VarSlot(args(1))
val paramSlot = VarSlot(params.head)
varFacts.foreach { varFact =>
if(varFact.s.getId == argSlot.getId) {
result += RFAFact(paramSlot, varFact.v)
}
}
factsToCallee -- varFacts ++ result
}
}
class HandlerMessage extends IndirectCall {
private val dispatchMessage: Signature = new Signature("Landroid/os/Handler;.dispatchMessage:(Landroid/os/Message;)V")
private val handleMessage: Signature = new Signature("Landroid/os/Handler;.handleMessage:(Landroid/os/Message;)V")
override def isIndirectCall(global: Global, typ: JawaType, subSig: String): Boolean = {
val clazz = global.getClassOrResolve(typ)
val handler = global.getClassOrResolve(dispatchMessage.getClassType)
handler.isAssignableFrom(clazz) && subSig == dispatchMessage.getSubSignature
}
def guessCallTarget(global: Global, signature: Signature): ISet[JawaMethod] = {
val newsig = new Signature(signature.getClassType, handleMessage.methodName, handleMessage.proto)
CallHandler.resolveSignatureBasedCall(global, newsig, "virtual")
}
override def getCallTarget(global: Global, inss: ISet[Instance], callerContext: Context, args: IList[String], pTAResult: PTAResult): (ISet[(JawaMethod, Instance)], (ISet[RFAFact], IList[String], IList[String]) => ISet[RFAFact]) = {
val callees: MSet[(JawaMethod, Instance)] = msetEmpty
inss.foreach { ins =>
val clazz = global.getClassOrResolve(ins.typ)
val handler = global.getClassOrResolve(handleMessage.getClassType)
if (handler.isAssignableFrom(clazz)) {
clazz.getMethod(handleMessage.getSubSignature) match {
case Some(m) => callees += ((m, ins))
case None =>
}
}
}
(callees.toSet, mapFactsToCallee)
}
def mapFactsToCallee: (ISet[RFAFact], IList[String], IList[String]) => ISet[RFAFact] = (factsToCallee, args, params) => {
val varFacts = factsToCallee.filter(f=>f.s.isInstanceOf[VarSlot])
val argSlots = args.map(VarSlot)
val paramSlots = params.map(VarSlot)
val result = msetEmpty[RFAFact]
for(i <- argSlots.indices){
val argSlot = argSlots(i)
val paramSlot = paramSlots(i)
varFacts.foreach{ fact =>
if(fact.s.getId == argSlot.getId) result += RFAFact(paramSlot, fact.v)
}
}
factsToCallee -- varFacts ++ result
}
}
class AsyncTask extends IndirectCall {
private val execute: Signature = new Signature("Landroid/os/AsyncTask;.execute:([Ljava/lang/Object;)Landroid/os/AsyncTask;")
private val run: Signature = new Signature("Landroid/os/AsyncTask;.run:([Ljava/lang/Object;)V")
override def isIndirectCall(global: Global, typ: JawaType, subSig: String): Boolean = {
val clazz = global.getClassOrResolve(typ)
val asyncTask = global.getClassOrResolve(execute.getClassType)
asyncTask.isAssignableFrom(clazz) && subSig == execute.getSubSignature
}
def guessCallTarget(global: Global, signature: Signature): ISet[JawaMethod] = {
val newsig = new Signature(signature.getClassType, run.methodName, run.proto)
CallHandler.resolveSignatureBasedCall(global, newsig, "virtual")
}
override def getCallTarget(global: Global, inss: ISet[Instance], callerContext: Context, args: IList[String], pTAResult: PTAResult): (ISet[(JawaMethod, Instance)], (ISet[RFAFact], IList[String], IList[String]) => ISet[RFAFact]) = {
val callees: MSet[(JawaMethod, Instance)] = msetEmpty
inss.foreach { ins =>
val clazz = global.getClassOrResolve(ins.typ)
val asyncTask = global.getClassOrResolve(run.getClassType)
if (asyncTask.isAssignableFrom(clazz)) {
clazz.getMethod(run.getSubSignature) match {
case Some(m) => callees += ((m, ins))
case None =>
}
}
}
(callees.toSet, mapFactsToCallee)
}
def mapFactsToCallee: (ISet[RFAFact], IList[String], IList[String]) => ISet[RFAFact] = (factsToCallee, args, params) => {
val varFacts = factsToCallee.filter(f=>f.s.isInstanceOf[VarSlot])
val argSlots = args.map(VarSlot)
val paramSlots = params.map(VarSlot)
val result = msetEmpty[RFAFact]
for(i <- argSlots.indices){
val argSlot = argSlots(i)
val paramSlot = paramSlots(i)
varFacts.foreach{ fact =>
if(fact.s.getId == argSlot.getId) result += RFAFact(paramSlot, fact.v)
}
}
factsToCallee -- varFacts ++ result
}
}
/**
* Created by fgwei on 4/21/17.
*/
object IndirectCallResolver {
/**
* Map to store indirect calls, e.g., Ljava/lang/Thread;.start:()V -> Ljava/lang/Runnable;.run:()V
* Only handle non-static calls for now .
*/
private var indirectCallResolvers: ISet[IndirectCall] = isetEmpty
def addCall(call: IndirectCall): Unit = this.indirectCallResolvers += call
addCall(new RunnableStartRun)
addCall(new ExecutorExecuteRun)
addCall(new HandlerMessage)
addCall(new AsyncTask)
def getCallResolver(global: Global, typ: JawaType, subSig: String): Option[IndirectCall] = {
indirectCallResolvers.find { c =>
c.isIndirectCall(global, typ, subSig)
}
}
}
|
arguslab/Argus-SAF
|
jawa/src/main/scala/org/argus/jawa/flow/interprocedural/IndirectCallResolver.scala
|
Scala
|
apache-2.0
| 10,197
|
/*
* Copyright (c) 2014-2015 Snowplow Analytics Ltd.
* All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache
* License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied.
*
* See the Apache License Version 2.0 for the specific language
* governing permissions and limitations there under.
*/
package com.snowplowanalytics.snowplow.storage.kinesis.s3
// json4s
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.json4s.JsonDSL._
// Config
import com.typesafe.config.Config
// Tracker
import com.snowplowanalytics.snowplow.scalatracker.Tracker
import com.snowplowanalytics.snowplow.scalatracker.SelfDescribingJson
import com.snowplowanalytics.snowplow.scalatracker.emitters.AsyncEmitter
/**
* Functionality for sending Snowplow events for monitoring purposes
*/
object SnowplowTracking {
private val HeartbeatInterval = 300000L
private val StorageType = "AMAZON_S3"
/**
* Configure a Tracker based on the configuration HOCON
*
* @param config The "monitoring.snowplow" section of the HOCON
* @return a new tracker instance
*/
def initializeTracker(config: Config): Tracker = {
val endpoint = config.getString("collector-uri")
val port = config.getInt("collector-port")
val appName = config.getString("app-id")
// Not yet used
val method = config.getString("method")
val emitter = AsyncEmitter.createAndStart(endpoint, port)
new Tracker(List(emitter), generated.Settings.name, appName)
}
/**
* Send an initialization event and schedule heartbeat and shutdown events
*
* @param tracker a Tracker instance
*/
def initializeSnowplowTracking(tracker: Tracker) {
trackApplicationInitialization(tracker)
Runtime.getRuntime.addShutdownHook(new Thread() {
override def run() {
trackApplicationShutdown(tracker)
}
})
val heartbeatThread = new Thread {
override def run() {
while (true) {
trackApplicationHeartbeat(tracker, HeartbeatInterval)
Thread.sleep(HeartbeatInterval)
}
}
}
heartbeatThread.start()
}
/**
* If a tracker has been configured, send a sink_write_failed event
*
* @param tracker a Tracker instance
* @param lastRetryPeriod The backoff period after a failure
* @param message What went wrong
*/
def sendFailureEvent(
tracker: Tracker,
lastRetryPeriod: Long,
message: String) {
tracker.trackUnstructEvent(SelfDescribingJson(
"iglu:com.snowplowanalytics.monitoring.kinesis/storage_write_failed/jsonschema/1-0-0",
("lastRetryPeriod" -> lastRetryPeriod) ~
("storage" -> StorageType) ~
("message" -> message)
))
}
/**
* Send an application_initialized unstructured event
*
* @param tracker a Tracker instance
*/
private def trackApplicationInitialization(tracker: Tracker) {
tracker.trackUnstructEvent(SelfDescribingJson(
"iglu:com.snowplowanalytics.monitoring.kinesis/app_initialized/jsonschema/1-0-0",
JObject(Nil)
))
}
/**
* Send an application_shutdown unstructured event
*
* @param tracker a Tracker instance
*/
def trackApplicationShutdown(tracker: Tracker) {
tracker.trackUnstructEvent(SelfDescribingJson(
"iglu:com.snowplowanalytics.monitoring.kinesis/app_shutdown/jsonschema/1-0-0",
JObject(Nil)
))
}
/**
* Send a heartbeat unstructured event
*
* @param tracker a Tracker instance
* @param heartbeatInterval Time between heartbeats in milliseconds
*/
private def trackApplicationHeartbeat(tracker: Tracker, heartbeatInterval: Long) {
tracker.trackUnstructEvent(SelfDescribingJson(
"iglu:com.snowplowanalytics.monitoring.kinesis/app_heartbeat/jsonschema/1-0-0",
"interval" -> heartbeatInterval
))
}
}
|
ClaraVista-IT/kinesis-s3
|
src/main/scala/com.snowplowanalytics.snowplow.storage.kinesis/s3/SnowplowTracking.scala
|
Scala
|
apache-2.0
| 4,225
|
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.2
* @date Sun Jan 11 19:05:20 EST 2015
* @see LICENSE (MIT style license file).
*/
// FIX: needs improved optimization
package scalation.analytics
import math.{exp, log, round}
import scalation.linalgebra.{MatriD, MatrixD, VectorD, VectorI}
import scalation.math.Combinatorics.fac
import scalation.minima.QuasiNewton
import scalation.plot.Plot
import scalation.util.Error
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `PoissonRegression` class supports Poisson regression. In this case,
* x' may be multi-dimensional '[1, x_1, ... x_k]'. Fit the parameter
* vector 'b' in the Poisson regression equation
* <p>
* log (mu(x)) = b dot x = b_0 + b_1 * x_1 + ... b_k * x_k
* <p>
* where 'e' represents the residuals (the part not explained by the model)
* and 'y' is now integer valued.
* @see see.stanford.edu/materials/lsoeldsee263/05-ls.pdf
* @param x the input/design matrix augmented with a first column of ones
* @param y the integer response vector, y_i in {0, 1, ... }
* @param fn the names of the features/variable
*/
class PoissonRegression (x: MatrixD, y: VectorI, fn: Array [String] = null)
extends Predictor with Error
{
if (y != null && x.dim1 != y.dim) flaw ("constructor", "dimensions of x and y are incompatible")
private val DEBUG = false // debug flag
private val k = x.dim2 - 1 // number of variables
private val n = x.dim1.toDouble // number of data points (rows)
private val r_df = (n-1.0) / (n-k-1.0) // ratio of degrees of freedom
private var n_dev = -1.0 // null dev: -LL, for null model (intercept only)
private var r_dev = -1.0 // residual dev: -LL, for full model
private var aic = -1.0 // Akaike’s Information Criterion
private var pseudo_rSq = -1.0 // McFaffen's pseudo R-squared
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** For a given parameter vector b, compute - Log-Likelihood (-LL).
* -LL is the standard measure.
* @see dept.stat.lsa.umich.edu/~kshedden/Courses/Stat600/Notes/glm.pdf
* @param b the parameters to fit
*/
def ll (b: VectorD): Double =
{
var sum = 0.0
for (i <- 0 until x.dim1) {
val bx = b dot x(i)
sum += y(i) * bx - exp (bx) // last term not needed [ - log (fac (y(i))) ]
} // for
-sum // set up for minimization
} // ll
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** For a given parameter vector b = [b(0)], compute -2 * Log-Likelihood (-2LL).
* -2LL is the standard measure that follows a Chi-Square distribution.
* @see dept.stat.lsa.umich.edu/~kshedden/Courses/Stat600/Notes/glm.pdf
* @param b the parameters to fit
*/
def ll_null (b: VectorD): Double =
{
var sum = 0.0
for (i <- 0 until x.dim1) {
val bx = b(0) // only use the intercept
sum += y(i) * bx - exp (bx) // last term not needed [ - log (fac (y(i))) ]
} // for
- sum // set up for minimization
} // ll_null
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** For the full model, train the classifier by fitting the parameter vector
* (b-vector) in the logistic regression equation using maximum likelihood.
* Do this by minimizing -2LL.
*/
override def train ()
{
val b0 = new VectorD (x.dim2) // use b_0 = 0 for starting guess for parameters
val bfgs = new QuasiNewton (ll) // minimizer for -2LL
b = bfgs.solve (b0) // find optimal solution for parameters
r_dev = ll (b) // measure of fitness for full model
aic = r_dev + 2.0 * x.dim2
} // train
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** For the null model, train the classifier by fitting the parameter vector
* (b-vector) in the logistic regression equation using maximum likelihood.
* Do this by minimizing -2LL.
*/
def train_null ()
{
val b0 = new VectorD (x.dim2) // use b0 = 0 for starting guess for parameters
val bfgs = new QuasiNewton (ll_null) // minimizer for -2LL
val b_n = bfgs.solve (b0) // find optimal solution for parameters
n_dev = ll_null (b_n) // measure of fitness for null nodel
} // train_null
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the quality of fit including rSquared. Assumes both train_null and
* train have already been called.
*/
def fit: VectorD =
{
pseudo_rSq = 1.0 - r_dev / n_dev
VectorD (n_dev, r_dev, aic, pseudo_rSq)
} // fit
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Classify the value of 'y = f(z)' by evaluating the formula 'y = exp (b dot z)'.
* @param z the new vector to predict
*/
def predict (z: VectorD): Double = (round (exp (b dot z))).toDouble
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Classify the value of 'y = f(z)' by evaluating the formula 'y = exp (b dot z)',
* for an integer vector.
* @param z the new integer vector to predict
*
def predict (z: VectorI): Tuple2 [Int, String] = predict (z.toDouble)
*/
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Predict the value of 'y = f(z)' by evaluating the formula 'y = exp (b dot z)',
* for each row 'z(i)' of matrix z.
* @param z the new matrix to predict
*/
def predict (z: MatriD): VectorD =
{
VectorD (for (i <- 0 until z.dim1) yield predict (z(i)))
} // predict
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Perform backward elimination to remove the least predictive variable
* from the model, returning the variable to eliminate, the new parameter
* vector, the new R-squared value and the new F statistic.
* FIX or remove
*
def backElim (): Tuple4 [Int, VectorD, Double, Double] =
{
var j_max = -1 // index of variable to eliminate
var b_max: VectorD = null // parameter values for best solution
var rSq_max = -1.0 // currently maximizing R squared
var fS_max = -1.0 // could optimize on F statistic
for (j <- 1 to k) {
val keep = n.toInt // i-value large enough to not exclude any rows in slice
val rg_j = new PoissonRegression (x.sliceExclude (keep, j), y) // regress with x_j removed
rg_j.train ()
val (b, rSq, fS, rBar) = rg_j.fit
if (rSq > rSq_max) { j_max = j; b_max = b; rSq_max = rSq; fS_max = fS}
} // for
(j_max, b_max, rSq_max, fS_max)
} // backElim
*/
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the Variance Inflation Factor (VIF) for each variable to test
* for multi-colinearity by regressing xj against the rest of the variables.
* A VIF over 10 indicates that over 90% of the varaince of xj can be predicted
* from the other variables, so xj is a candidate for removal from the model.
* FIX or remove
*
def vif: VectorD =
{
val vifV = new VectorD (k) // VIF vector
for (j <- 1 to k) {
val keep = n.toInt // i-value large enough to not exclude any rows in slice
val x_j = x.col(j) // x_j is jth column in x
val rg_j = new PoissonRegression (x.sliceExclude (keep, j), x_j) // regress with x_j removed
rg_j.train ()
vifV(j-1) = 1.0 / (1.0 - rg_j.fit._2) // store vif for x_1 in vifV(0)
} // for
vifV
} // vif
*/
} // PoissonRegression class
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `PoissonRegression` object tests the `PoissonRegression` class.
* @see http://www.cookbook-r.com/Statistical_analysis/Logistic_regression/
* Answer: b = (-8.8331, 0.4304),
* n_dev = 43.860, r_dev = 25.533, aci = 29.533, pseudo_rSq = 0.4178
*/
object PoissonRegressionTest extends App
{
// 32 data points: One Mpg
val x = new MatrixD ((32, 2), 1.0, 21.0, // 1 - Mazda RX4
1.0, 21.0, // 2 - Mazda RX4 Wa
1.0, 22.8, // 3 - Datsun 710
1.0, 21.4, // 4 - Hornet 4 Drive
1.0, 18.7, // 5 - Hornet Sportabout
1.0, 18.1, // 6 - Valiant
1.0, 14.3, // 7 - Duster 360
1.0, 24.4, // 8 - Merc 240D
1.0, 22.8, // 9 - Merc 230
1.0, 19.2, // 10 - Merc 280
1.0, 17.8, // 11 - Merc 280C
1.0, 16.4, // 12 - Merc 450S
1.0, 17.3, // 13 - Merc 450SL
1.0, 15.2, // 14 - Merc 450SLC
1.0, 10.4, // 15 - Cadillac Fleetwood
1.0, 10.4, // 16 - Lincoln Continental
1.0, 14.7, // 17 - Chrysler Imperial
1.0, 32.4, // 18 - Fiat 128
1.0, 30.4, // 19 - Honda Civic
1.0, 33.9, // 20 - Toyota Corolla
1.0, 21.5, // 21 - Toyota Corona
1.0, 15.5, // 22 - Dodge Challenger
1.0, 15.2, // 23 - AMC Javelin
1.0, 13.3, // 24 - Camaro Z28
1.0, 19.2, // 25 - Pontiac Firebird
1.0, 27.3, // 26 - Fiat X1-9
1.0, 26.0, // 27 - Porsche 914-2
1.0, 30.4, // 28 - Lotus Europa
1.0, 15.8, // 29 - Ford Pantera L
1.0, 19.7, // 30 - Ferrari Dino
1.0, 15.0, // 31 - Maserati Bora
1.0, 21.4) // 32 - Volvo 142E
val y = VectorI (0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1)
var z: VectorD = null
println ("x = " + x)
println ("y = " + y)
val rg = new PoissonRegression (x, y)
rg.train_null () // train based on null model
rg.train () // train based on full model
val b = rg.coefficient // obtain coefficients
val ft = rg.fit // obtain quality of fit
println ("---------------------------------------------------------------")
println ("Poisson Regression Regression Results")
println ("b = " + b)
println ("n_dev = " + ft(0))
println ("r_dev = " + ft(1))
println ("aic = " + ft(2))
println ("pseudo_rSq = " + ft(3))
z = VectorD (1.0, 15.0) // predict point z
println ("predict (" + z + ") = " + rg.predict (z))
z = VectorD (1.0, 30.0) // predict point z
println ("predict (" + z + ") = " + rg.predict (z))
} // PoissonRegressionTest object
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `PoissonRegressionTest2` object tests the `PoissonRegression` class.
* @see statmaster.sdu.dk/courses/st111/module03/index.html
* @see www.stat.wisc.edu/~mchung/teaching/.../GLM.logistic.Rpackage.pdf
*/
object PoissonRegressionTest2 extends App
{
// 40 data points: One Low Medium High
val x = new MatrixD ((40, 4), 1.0, 102.0, 89.0, 0.0,
1.0, 7.0, 233.0, 1.0,
1.0, 0.0, 4.0, 41.0,
1.0, 8.0, 37.0, 13.0,
1.0, 40.0, 79.0, 26.0,
1.0, 0.0, 625.0, 156.0,
1.0, 0.0, 12.0, 79.0,
1.0, 0.0, 3.0, 119.0,
1.0, 115.0, 136.0, 65.0,
1.0, 428.0, 416.0, 435.0,
1.0, 34.0, 174.0, 56.0,
1.0, 0.0, 0.0, 37.0,
1.0, 97.0, 162.0, 89.0,
1.0, 56.0, 47.0, 132.0,
1.0, 1214.0, 1515.0, 324.0,
1.0, 30.0, 103.0, 161.0,
1.0, 8.0, 11.0, 158.0,
1.0, 52.0, 155.0, 144.0,
1.0, 142.0, 119.0, 24.0,
1.0, 1370.0, 2968.0, 1083.0,
1.0, 790.0, 161.0, 231.0,
1.0, 1142.0, 157.0, 131.0,
1.0, 0.0, 2.0, 49.0,
1.0, 0.0, 0.0, 50.0,
1.0, 5.0, 68.0, 49.0,
1.0, 0.0, 0.0, 48.0,
1.0, 0.0, 6.0, 40.0,
1.0, 1.0, 8.0, 64.0,
1.0, 0.0, 998.0, 551.0,
1.0, 253.0, 99.0, 60.0,
1.0, 1395.0, 799.0, 244.0,
1.0, 0.0, 0.0, 50.0,
1.0, 1.0, 68.0, 145.0,
1.0, 1318.0, 1724.0, 331.0,
1.0, 0.0, 0.0, 79.0,
1.0, 3.0, 31.0, 37.0,
1.0, 195.0, 108.0, 206.0,
1.0, 0.0, 15.0, 121.0,
1.0, 0.0, 278.0, 513.0,
1.0, 0.0, 0.0, 253.0)
val y = VectorI (0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1,
1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1)
val fn = Array ("One", "Low", "Medium", "High")
println ("x = " + x)
println ("y = " + y)
// val rg = new PoissonRegression (x(0 until x.dim1, 0 until 2), y, fn)
val rg = new PoissonRegression (x, y, fn)
rg.train_null () // train based on null model
rg.train () // train based on full model
println ("---------------------------------------------------------------")
println ("Poisson Regression Regression Results")
println ("fit = " + rg.fit)
val z = VectorD (1.0, 100.0, 100.0, 100.0) // predict point z
println ("predict (" + z + ") = " + rg.predict (z))
// new Plot (x.col(1), y, yyp)
// new Plot (x.col(2), y, yyp)
} // PoissonRegressionTest2 object
|
mvnural/scalation
|
src/main/scala/scalation/analytics/PoissonRegression.scala
|
Scala
|
mit
| 16,488
|
package monocle.std
@deprecated("instances have been move to typeclass companion object", since = "1.4.0")
object vector
@deprecated("instances have been move to typeclass companion object", since = "1.4.0")
trait VectorOptics
|
aoiroaoino/Monocle
|
core/shared/src/main/scala/monocle/std/Vector.scala
|
Scala
|
mit
| 229
|
package main.java.piratebot.pirates
import main.java.piratebot._
class Brute(game: Game, player: Player) extends Pirate(game, player) {
val rank = 14
val name = "Brute"
override def dayAction(round : Round): RetriableMethodResponse.Value = {
val pirateToKill = round.dayStack.last
round.killPirate(pirateToKill)
game.printer.print(Channel.Debug, tag + ": killed " + pirateToKill.tag)
RetriableMethodResponse.Complete
}
def getSubRank(player : Player) : Int = {
Array(1, 6, 3, 2, 4, 5)(player.playerId)
}
}
|
ItCouldHaveBeenGreat/Eyepatch
|
src/main/java/piratebot/pirates/brute.scala
|
Scala
|
gpl-3.0
| 572
|
package at.logic.gapt.expr
object NonLogicalConstant {
def unapply( e: Expr ) = e match {
case c: LogicalConstant => None
case Const( n, t ) => Some( n, t )
case _ => None
}
}
object HOLFunction {
def apply( head: Expr, args: List[Expr] ): Expr = {
val res = Apps( head, args )
require( res.ty != To )
res
}
def unapply( e: Expr ): Option[( Expr, List[Expr] )] = e match {
case Apps( head @ ( NonLogicalConstant( _, _ ) | Var( _, _ ) ), args ) if e.ty != To => Some( head, args )
case _ => None
}
}
object FOLHeadType {
def apply( ret: Ty, arity: Int ): Ty = arity match {
case 0 => ret
case n => Ti -> FOLHeadType( ret, n - 1 )
}
def unapply( t: Ty ): Option[( Ty, Int )] = t match {
case Ti -> FOLHeadType( t, n ) => Some( ( t, n + 1 ) )
case _ => Some( ( t, 0 ) )
}
}
object FOLFunction {
def apply( sym: String, args: FOLTerm* )( implicit dummyImplicit: DummyImplicit ): FOLTerm = FOLFunction( sym, args )
def apply( sym: String, args: Seq[FOLTerm] ): FOLTerm =
Apps( FOLFunctionConst( sym, args.size ), args ).asInstanceOf[FOLTerm]
def unapply( e: FOLTerm ): Option[( String, List[FOLTerm] )] = e match {
case Apps( FOLFunctionConst( sym, _ ), args ) =>
Some( ( sym, args.asInstanceOf[List[FOLTerm]] ) )
case _ => None
}
}
class QuantifierHelper( val q: QuantifierC ) {
def apply( v: Var, formula: Expr ): Formula =
App( q( v.ty ), Abs( v, formula ) ).asInstanceOf[Formula]
def apply( v: FOLVar, formula: FOLFormula ): FOLFormula =
apply( v, formula.asInstanceOf[Expr] ).asInstanceOf[FOLFormula]
def unapply( e: Expr ): Option[( Var, Formula )] = e match {
// TODO: eta-expansion?
case App( q( _ ), Abs( v, formula: Formula ) ) => Some( ( v, formula ) )
case _ => None
}
def unapply( f: FOLFormula ): Option[( FOLVar, FOLFormula )] =
unapply( f.asInstanceOf[FOLExpression] )
def unapply( f: FOLExpression ): Option[( FOLVar, FOLFormula )] = unapply( f.asInstanceOf[Expr] ) match {
case Some( ( v: FOLVar, formula: FOLFormula ) ) => Some( ( v, formula ) )
case _ => None
}
object Block {
def apply( vars: Seq[Var], formula: Expr ): Expr = vars match {
case v +: vs => QuantifierHelper.this( v, apply( vs, formula ) )
case Seq() => formula
}
def apply( vars: Seq[Var], formula: Formula ): Formula =
apply( vars, formula.asInstanceOf[Expr] ).asInstanceOf[Formula]
def apply( vars: Seq[FOLVar], formula: FOLFormula ): FOLFormula =
apply( vars, formula.asInstanceOf[Expr] ).asInstanceOf[FOLFormula]
private object SingleQ {
def unapply( e: Expr ) = QuantifierHelper.this.unapply( e )
}
def unapply( e: Expr ): Some[( List[Var], Expr )] = e match {
case SingleQ( v, Block( vs, f ) ) => Some( ( v :: vs, f ) )
case _ => Some( ( List(), e ) )
}
def unapply( f: Formula ): Some[( List[Var], Formula )] =
unapply( f.asInstanceOf[Expr] ).asInstanceOf[Some[( List[Var], Formula )]]
def unapply( f: FOLFormula ): Some[( List[FOLVar], FOLFormula )] =
unapply( f.asInstanceOf[Expr] ).asInstanceOf[Some[( List[FOLVar], FOLFormula )]]
}
}
object All extends QuantifierHelper( ForallC )
object Ex extends QuantifierHelper( ExistsC )
object Quant {
def apply( x: Var, sub: Formula, pol: Boolean ): Formula =
if ( pol ) All( x, sub ) else Ex( x, sub )
def unapply( f: Formula ): Option[( Var, Formula, Boolean )] =
f match {
case All( v, g ) => Some( ( v, g, true ) )
case Ex( v, g ) => Some( ( v, g, false ) )
case _ => None
}
}
class BinaryPropConnectiveHelper( val c: MonomorphicLogicalC ) {
def apply( a: Expr, b: Expr ): Formula =
Apps( c(), a, b ).asInstanceOf[Formula]
def apply( a: FOLFormula, b: FOLFormula ): FOLFormula =
apply( a, b.asInstanceOf[Expr] ).asInstanceOf[FOLFormula]
def apply( a: PropFormula, b: PropFormula ): PropFormula =
apply( a, b.asInstanceOf[Expr] ).asInstanceOf[PropFormula]
def unapply( formula: Expr ): Option[( Formula, Formula )] = formula match {
case App( App( c(), a: Formula ), b: Formula ) => Some( ( a, b ) )
case _ => None
}
def unapply( formula: FOLFormula ): Option[( FOLFormula, FOLFormula )] =
unapply( formula.asInstanceOf[FOLExpression] )
def unapply( formula: FOLExpression ): Option[( FOLFormula, FOLFormula )] =
unapply( formula.asInstanceOf[Expr] ) match {
case Some( ( a: FOLFormula, b: FOLFormula ) ) => Some( ( a, b ) )
case _ => None
}
def unapply( formula: PropFormula ): Option[( PropFormula, PropFormula )] =
unapply( formula.asInstanceOf[Expr] ) match {
case Some( ( a: PropFormula, b: PropFormula ) ) => Some( ( a, b ) )
case _ => None
}
}
class MonoidalBinaryPropConnectiveHelper( c: MonomorphicLogicalC, val neutral: MonomorphicLogicalC ) extends BinaryPropConnectiveHelper( c ) {
def apply( fs: TraversableOnce[Expr] ): Formula = nAry( fs.toSeq: _* )
def apply( fs: TraversableOnce[FOLFormula] )( implicit d: DummyImplicit ): FOLFormula = nAry( fs.toSeq: _* )
def leftAssociative( fs: Expr* ): Formula =
fs.reduceLeftOption( super.apply ).getOrElse( neutral() ).asInstanceOf[Formula]
def leftAssociative( fs: FOLFormula* ): FOLFormula =
leftAssociative( fs.asInstanceOf[Seq[Expr]]: _* ).asInstanceOf[FOLFormula]
def rightAssociative( fs: Expr* ): Formula =
fs.reduceRightOption( super.apply ).getOrElse( neutral() ).asInstanceOf[Formula]
def rightAssociative( fs: FOLFormula* ): FOLFormula =
rightAssociative( fs.asInstanceOf[Seq[Expr]]: _* ).asInstanceOf[FOLFormula]
object nAry {
def apply( fs: Expr* )( implicit d: DummyImplicit ): Formula = leftAssociative( fs: _* )
def apply( fs: FOLFormula* )( implicit d: DummyImplicit ): FOLFormula = leftAssociative( fs: _* )
private object Binary {
def unapply( formula: Expr ) = MonoidalBinaryPropConnectiveHelper.this.unapply( formula )
}
def unapply( formula: Formula ): Some[List[Formula]] = formula match {
case Binary( nAry( as ), nAry( bs ) ) => Some( as ::: bs )
case neutral() => Some( List() )
case _ => Some( List( formula ) )
}
def unapply( formula: FOLFormula ): Some[List[FOLFormula]] =
unapply( formula.asInstanceOf[Formula] ).asInstanceOf[Some[List[FOLFormula]]]
}
}
object And extends MonoidalBinaryPropConnectiveHelper( AndC, TopC )
object Or extends MonoidalBinaryPropConnectiveHelper( OrC, BottomC )
object Imp extends BinaryPropConnectiveHelper( ImpC ) {
object Block {
def apply( as: Seq[Formula], b: Formula ): Formula = as.foldRight( b )( Imp( _, _ ) )
def unapply( f: Formula ): Some[( List[Formula], Formula )] = f match {
case Imp( a, Block( as, b ) ) => Some( ( a :: as, b ) )
case b => Some( ( Nil, b ) )
}
}
}
class UnaryPropConnectiveHelper( val c: MonomorphicLogicalC ) {
def apply( a: Expr ): Formula = Apps( c(), a ).asInstanceOf[Formula]
def apply( a: FOLFormula ): FOLFormula = apply( a.asInstanceOf[Expr] ).asInstanceOf[FOLFormula]
def apply( a: PropFormula ): PropFormula = apply( a.asInstanceOf[Expr] ).asInstanceOf[PropFormula]
def unapply( formula: Expr ): Option[Formula] = formula match {
case App( c(), a: Formula ) => Some( a )
case _ => None
}
def unapply( formula: FOLFormula ): Option[FOLFormula] =
unapply( formula.asInstanceOf[FOLExpression] )
def unapply( formula: FOLExpression ): Option[FOLFormula] =
unapply( formula.asInstanceOf[Expr] ) match {
case Some( a: FOLFormula ) => Some( a )
case _ => None
}
def unapply( formula: PropFormula ): Option[PropFormula] =
unapply( formula.asInstanceOf[Expr] ) match {
case Some( a: PropFormula ) => Some( a )
case _ => None
}
}
object Neg extends UnaryPropConnectiveHelper( NegC )
class NullaryPropConnectiveHelper( val c: MonomorphicLogicalC ) {
def apply(): PropFormula with Const = c().asInstanceOf[PropFormula with Const]
def unapply( formula: PropFormula ) = formula match {
case c() => true
case _ => false
}
}
object Top extends NullaryPropConnectiveHelper( TopC )
object Bottom extends NullaryPropConnectiveHelper( BottomC )
object Eq {
def apply( a: Expr, b: Expr ): Atom = Apps( EqC( a.ty ), a, b ).asInstanceOf[Atom]
def apply( a: FOLTerm, b: FOLTerm ): FOLAtom =
apply( a, b.asInstanceOf[Expr] ).asInstanceOf[FOLAtom]
def unapply( e: Expr ): Option[( Expr, Expr )] = e match {
case App( App( EqC( _ ), a ), b ) => Some( a, b )
case _ => None
}
def unapply( f: FOLFormula ): Option[( FOLTerm, FOLTerm )] = unapply( f.asInstanceOf[FOLExpression] )
def unapply( f: FOLExpression ): Option[( FOLTerm, FOLTerm )] = f.asInstanceOf[Expr] match {
case Eq( a: FOLTerm, b: FOLTerm ) => Some( a, b )
case _ => None
}
}
|
gebner/gapt
|
core/src/main/scala/at/logic/gapt/expr/constructors.scala
|
Scala
|
gpl-3.0
| 9,225
|
package com.tu.bdap.gc
import org.apache.spark.{ SparkConf, SparkContext }
import org.apache.spark.graphx.{ Edge, Graph }
import org.apache.spark.graphx._
import Math._
import scala.util.Random
import com.tu.bdap.utils.DataSetLoader
/**
* Graph Colouring assigns each vertex a colour, <br>
* so all neighbouring vertices have different colours. <br>
* Vertices randomly decide to pick a colour. <br>
* If neighbouring vertices simultaneously pick a colour, <br>
* the one with the smallest ID gets priority
*/
object GC_BDAP {
/**
* Loads a graph from local disk or hdfs and executes Graph Colouring
* @param args args[0] should contain path, args[1] is an integer identifying the dataset
*/
def main(args: Array[String]): Unit = {
//Start the Spark context
val conf = new SparkConf()
.setAppName("GC")
//.setMaster("local")
val sc = new SparkContext(conf)
//Set NumIterations
val numIterations = 10
//Check input arguments
if (args.length < 2) {
System.err.println("Invalid input arguments")
return
}
//Set input Arguments
val dataSetPath = args(0)
val dataSet = args(1).toInt
//Load DataSet
var edges = dataSet match {
case 1 => DataSetLoader.loadUSA(sc, dataSetPath)
case 2 => DataSetLoader.loadTwitter(sc, dataSetPath)
case 3 => DataSetLoader.loadFriendster(sc, dataSetPath)
case _ => null
}
//Check if DataSet could be loaded
if (edges == null) {
System.err.println("Could not load DataSet")
return
}
//Make the Graph undirected
edges = edges.flatMap(edge => Seq(Edge(edge.srcId, edge.dstId, 0), Edge(edge.dstId, edge.srcId, 0)))
//Create Graph from edges
var graph = Graph.fromEdges(edges, 0)
//calculate number of neighbour for all vertices
val degrees = graph.outDegrees
var init = graph.outerJoinVertices(degrees) { (_, _, optDegree) =>
optDegree.getOrElse(1)
}
//default values
val gc = init.mapVertices((id, value) => {
(-1, -1, value)
})
//execute pregel API
val result = gc.pregel[(VertexId, Int)](initialMsg, numIterations, EdgeDirection.Out)(compute, sendMsg, mergeMsg)
result.vertices.collect
}
val initialMsg = (Long.MaxValue, 0)
/** Randomly choose tentative node and resolve conflicts, <br>
* if nodes simultaneously pick colour
* @param id vertex id
* @param value current vertex value
* @param message incoming message
* @return updated vertex value
*/
def compute(id: VertexId, value: (Int, Int, Int), message: (Long, Int)): (Int, Int, Int) = {
val r = Random
val neighbours = value._3 - message._2
if (value._2 == 1) {
return (value._1, value._2, neighbours)
}
// resolve conflict of marked nodes, select the one with smaller id
if (value._2 == 0) {
if (message._1 > id) {
return (value._1 + 1, 1, neighbours)
}
}
// pick colour if no neighbours are left
if (neighbours == 0) {
return (value._1 + 1, 1, neighbours)
}
// randomly decide, whether to pick a colour
if (r.nextFloat() <= 1./(2 * neighbours)) {
return (value._1 + 1, 0, neighbours)
}
return (value._1 + 1, -1, neighbours)
}
/** Send current vertex value to neighbouring vertices
* @param triplet triplets describing edge and adjacent values
* @return an Iterator containing send messages
*/
def sendMsg(triplet: EdgeTriplet[(Int, Int, Int), Long]): Iterator[(Long, (Long, Int))] = {
// don't send message to an inactive vertex
if (triplet.dstAttr._2 == 1) {
return Iterator.empty
}
// source vertex is tentative
if (triplet.srcAttr._2 == 0) {
return Iterator((triplet.dstId, (triplet.srcId, 0)))
}
// source vertex is coloured
if (triplet.srcAttr._2 == 1) {
return Iterator((triplet.dstId, (Long.MaxValue, 1)))
}
// source vertex is unassigned
if (triplet.srcAttr._2 == -1) {
return Iterator((triplet.dstId, (Long.MaxValue, 0)))
}
Iterator.empty
}
/** An associative and commutative function, that combines messages. <br>
* @param msg1 1st message
* @param msg2 2nd message
* @return tuple containing smallest vertex ID and number of alive neighbours
*/
def mergeMsg(msg1: (Long, Int), msg2: (Long, Int)): (Long, Int) = {
(min(msg1._1, msg2._1), msg1._2 + msg2._2)
}
}
|
janukowitsch/bdapro-graph-bench
|
Spark/src/main/scala/com/tu/bdap/gc/GC_BDAP.scala
|
Scala
|
mit
| 4,442
|
package slick.jdbc.meta
import slick.jdbc.PositionedResult
/** A qualified name with an optional catalog and schema. */
case class MQName(catalog: Option[String], schema: Option[String], name: String) {
override def toString = "MQName(" + catalog.map(_ + ".").getOrElse("") + schema.map(_ + ".").getOrElse("") + name + ")"
def catalog_? = catalog.orNull
def schema_? = schema.orNull
}
object MQName {
private[meta] def from(r: PositionedResult) = MQName(r.<<, r.<<, r.<<)
private[meta] def optionalFrom(r: PositionedResult) = {
val cat = r.nextStringOption
val schema = r.nextStringOption
r.nextStringOption map (MQName(cat, schema, _))
}
def local(name: String) = MQName(Some(""), Some(""), name)
}
|
jkutner/slick
|
slick/src/main/scala/slick/jdbc/meta/MQName.scala
|
Scala
|
bsd-2-clause
| 732
|
package kata.scala
import scala.collection.mutable
class GraphSearch(private val marked: Set[Int]) {
def hasPathTo(vertex: Int): Boolean = marked.contains(vertex)
}
object DepthFirstSearch {
def create(graph: Graph, startVertex: Int): Option[GraphSearch] = {
if (!graph.contains(startVertex)) None
else {
def depthFirstSearch(vertex: Int, marked: Set[Int]): Set[Int] = {
graph.adjacentTo(vertex)
.filterNot(v => marked.contains(v))
.foldLeft(marked + vertex)((marked, v) => depthFirstSearch(v, marked))
}
Some(new GraphSearch(depthFirstSearch(startVertex, Set())))
}
}
}
object BreadthFirstSearch {
def create(graph: Graph, startVertex: Int): Option[GraphSearch] = {
if (!graph.contains(startVertex)) None
else {
def breadthFirstSearch(vertex: Int): Set[Int] = {
val queue = new mutable.Queue[Int]() += vertex
val marked = mutable.Set[Int]() += vertex
while (queue.nonEmpty) {
val v = queue.dequeue()
graph.adjacentTo(v).filterNot(v => marked.contains(v)).foreach(
v => {
marked += v
queue += v
}
)
}
Set() ++ marked
}
Some(new GraphSearch(breadthFirstSearch(startVertex)))
}
}
}
|
Alex-Diez/Scala-TDD-Katas
|
old-katas/graph-search-kata/day-8/src/main/scala/kata/scala/GraphSearch.scala
|
Scala
|
mit
| 1,534
|
/*
* This file is part of eCobertura.
*
* Copyright (c) 2009, 2010 Joachim Hofer
* All rights reserved.
*
* This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package ecobertura.ui.util
import org.eclipse.jdt.core.IJavaElement
import org.eclipse.jdt.ui.JavaUI
object JavaEditorOpener {
def openAndReveal(javaElement: IJavaElement) = {
val editorPart = JavaUI.openInEditor(javaElement)
JavaUI.revealInEditor(editorPart, javaElement)
}
}
|
jmhofer/eCobertura
|
ecobertura.ui/src/main/scala/ecobertura/ui/util/JavaEditorOpener.scala
|
Scala
|
epl-1.0
| 626
|
package com.geishatokyo.sqlgen
import com.geishatokyo.sqlgen.core.{Cell, DataType, Row, Sheet}
import com.geishatokyo.sqlgen.logger.Logger
import scala.language.experimental.macros
import scala.util.DynamicVariable
/**
* Created by takezoux2 on 15/05/04.
*/
class SheetScope{
private val currentRow = new DynamicVariable[Row](null)
def row = currentRow.value
def withRow(row: Row)(func: => Any) = {
currentRow.withValue(row)(func)
}
}
class ColumnRef(sheet: Sheet, var columnName : String, sheetScope: SheetScope) {
def row = {
val row = sheetScope.row
if(row == null) throw new Exception("Not in row scope")
row
}
def foreach(func: Cell => Unit) : Unit = {
if(sheet.hasColumn(columnName)) {
sheet.column(columnName).cells.foreach(c => sheetScope.withRow(c.row){
func(c)
})
}
}
def name : String = columnName
def name_=(newName: String) = {
sheet.column(columnName).header.name = newName
columnName = newName
}
def ensureExists() : ColumnRef = {
if(!sheet.hasColumn(columnName)){
sheet.addHeader(columnName)
}
this
}
/**
* Set value to foreach cell
* @param e
*/
def :=(e : => Any) : Unit = {
ensureExists()
sheet.rows.foreach(r => {
sheetScope.withRow(r){
val c = r(columnName)
val v = e
e match{
case cr: ColumnRef => {
c.value = r(cr.columnName).value
}
case _ => c.value = e
}
}
})
}
/**
* Set value only when cell is empty.
* @param e
*/
def ?=(e : => Any) : Unit = {
ensureExists()
setIfEmpty(e)
}
def map(mapV : Cell => Any) : Unit = {
foreach(cell => {
val v = mapV(cell)
cell.value = v
})
}
def mapIfEmpty(func : Cell => Any) : Unit = {
foreach(cell => {
if(cell.isEmpty){
cell.value = func(cell)
}
})
}
def setIfEmpty(func : => Any) : Unit = {
foreach(cell => {
if(cell.isEmpty){
val v = func
v match{
case cr : ColumnRef => {
cell.value = cr.row(cr.columnName).value
}
case _ => {
cell.value = v
}
}
}
})
}
def mapTo(convs: (String,Any)*) = {
val mapping = convs.toMap
foreach(cell => {
val v = cell.asString
val mapped = mapping.getOrElse(v,cell.value)
cell.value = mapped
})
}
def mapTo(pf: PartialFunction[Any,Any]) = {
foreach(cell => {
cell.value = pf(cell.value)
})
}
def mapEnum[E <: Enumeration](e: E) = {
foreach(cell => {
if(cell.dataType == DataType.Integer) {
} else {
val s = cell.asString
try{
cell.value = e.withName(s).id
} catch{
case t: Throwable => //変換できないときは無視
}
}
})
}
def asString = {
row(columnName).asString
}
def asInt = {
row(columnName).asLong.toInt
}
def asLong = {
row(columnName).asLong
}
def asDouble = {
row(columnName).asDouble
}
def asDate = {
row(columnName).asJavaTime
}
override def toString: String = {
asString
}
def +(any: Any) = {
row(columnName) + any
}
def -(any: Any) = {
row(columnName) - any
}
def *(any: Any) = {
row(columnName) * any
}
def /(any: Any) = {
row(columnName) / any
}
def %(any: Any) = {
row(columnName) % any
}
def isIgnore: Boolean = sheet.header(columnName).isIgnore
def isIgnore_=(v: Boolean) = sheet.header(columnName).isIgnore = v
def isId: Boolean = sheet.header(columnName).isId
def isId_=(v: Boolean) = sheet.header(columnName).isId = v
/**
* 条件を満たしていない場合、生成を中断する
* @param validationFunc
* @param message
*/
def validate(validationFunc: Cell => Boolean, message: String = "Invalid data") = {
foreach(cell => try{
if(!validationFunc(cell)) {
throw SQLGenException.atRow(cell.row, message)
}
} catch {
case t: SQLGenException => throw t
case t: Throwable => throw SQLGenException.atRow(cell.row,message,t)
})
}
/**
* 警告を表示
* @param warningFunc
* @param message
*/
def warning(warningFunc: Cell => Boolean, message: String = "Warning. FIXME!") = {
foreach(cell => try{
if(!warningFunc(cell)) {
Logger.logWarning(message)
}
} catch {
case t: SQLGenException => throw t
case t: Throwable => throw SQLGenException.atRow(cell.row,"Other error is thrown",t)
})
}
}
|
geishatokyo/sql-generator
|
src/main/scala/com/geishatokyo/sqlgen/ColumnRef.scala
|
Scala
|
mit
| 4,617
|
package pdi.jwt
case class JsonDataEntry(
algo: JwtAlgorithm,
header: String,
headerClass: JwtHeader,
header64: String,
signature: String,
token: String,
tokenUnsigned: String,
tokenEmpty: String,
headerJson: ujson.Value
) extends JsonDataEntryTrait[ujson.Value]
trait JwtUpickleFixture extends JsonCommonFixture[ujson.Value] {
val claimJson: ujson.Value = ujson.read(claim)
val headerEmptyJson: ujson.Value = ujson.read(headerEmpty)
def mapData(data: DataEntryBase): JsonDataEntry = JsonDataEntry(
algo = data.algo,
header = data.header,
headerClass = data.headerClass,
header64 = data.header64,
signature = data.signature,
token = data.token,
tokenUnsigned = data.tokenUnsigned,
tokenEmpty = data.tokenEmpty,
headerJson = ujson.read(data.header)
)
}
|
pauldijou/jwt-scala
|
json/upickle/src/test/scala/JwtUpickleFixture.scala
|
Scala
|
apache-2.0
| 838
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.classification
import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.linalg.{Vector, Vectors}
final class TestProbabilisticClassificationModel(
override val uid: String,
override val numClasses: Int)
extends ProbabilisticClassificationModel[Vector, TestProbabilisticClassificationModel] {
override def copy(extra: org.apache.spark.ml.param.ParamMap): this.type = defaultCopy(extra)
override protected def predictRaw(input: Vector): Vector = {
input
}
override protected def raw2probabilityInPlace(rawPrediction: Vector): Vector = {
rawPrediction
}
def friendlyPredict(input: Vector): Double = {
predict(input)
}
}
class ProbabilisticClassifierSuite extends SparkFunSuite {
test("test thresholding") {
val thresholds = Array(0.5, 0.2)
val testModel = new TestProbabilisticClassificationModel("myuid", 2).setThresholds(thresholds)
assert(testModel.friendlyPredict(Vectors.dense(Array(1.0, 1.0))) === 1.0)
assert(testModel.friendlyPredict(Vectors.dense(Array(1.0, 0.2))) === 0.0)
}
test("test thresholding not required") {
val testModel = new TestProbabilisticClassificationModel("myuid", 2)
assert(testModel.friendlyPredict(Vectors.dense(Array(1.0, 2.0))) === 1.0)
}
}
|
practice-vishnoi/dev-spark-1
|
mllib/src/test/scala/org/apache/spark/ml/classification/ProbabilisticClassifierSuite.scala
|
Scala
|
apache-2.0
| 2,089
|
/*
* Copyright 2012 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.money.core.context
import com.comcast.money.api.SpanInfo
import com.comcast.money.core.internal.{ SpanContext, SpanLocal }
import com.typesafe.config.{ Config, ConfigFactory }
import org.slf4j.MDC
import org.slf4j.spi.MDCAdapter
object FormattedMdcContextStorageFilter {
private val DefaultConfig = ConfigFactory.parseString(
"""
|format-ids-as-hex = false
|key = moneyTrace
|format = "[ span-id=%2$s ][ trace-id=%1$s ][ parent-id=%3$s ][ span-name=%4$s ]"
|""".stripMargin)
private val MdcKeyKey = "key"
private val FormatKey = "format"
private val FormatIdsAsHexKey = "format-ids-as-hex"
def apply(conf: Config): FormattedMdcContextStorageFilter = apply(conf, SpanLocal, MDC.getMDCAdapter)
def apply(conf: Config, spanContext: SpanContext, mdc: MDCAdapter): FormattedMdcContextStorageFilter = {
val effectiveConfig = conf.withFallback(DefaultConfig)
val mdcKey = effectiveConfig.getString(MdcKeyKey)
val format = effectiveConfig.getString(FormatKey)
val formatIdsAsHex = effectiveConfig.getBoolean(FormatIdsAsHexKey)
new FormattedMdcContextStorageFilter(spanContext, mdc, mdcKey, format, formatIdsAsHex)
}
}
/**
* Context storage filter that updates MDC properties when the span changes for the current thread.
*/
class FormattedMdcContextStorageFilter(
override val spanContext: SpanContext,
mdc: MDCAdapter,
mdcKey: String,
format: String,
formatIdsAsHex: Boolean = false) extends AbstractMdcContextStorageFilter {
override def updateMdc(currentSpanInfo: Option[SpanInfo]): Unit = currentSpanInfo match {
case Some(info) =>
val spanId = info.id
val moneyTrace = if (formatIdsAsHex) {
format.format(spanId.traceIdAsHex(), spanId.selfIdAsHex(), spanId.parentIdAsHex(), info.name)
} else {
format.format(spanId.traceId(), spanId.selfId(), spanId.parentId(), info.name)
}
mdc.put(mdcKey, moneyTrace)
case None =>
mdc.remove(mdcKey)
}
}
|
Comcast/money
|
money-core/src/main/scala/com/comcast/money/core/context/FormattedMdcContextStorageFilter.scala
|
Scala
|
apache-2.0
| 2,631
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.optimizer
import scala.collection.immutable.HashSet
import scala.collection.mutable.{ArrayBuffer, Stack}
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.expressions.{BinaryExpression, MultiLikeBase, _}
import org.apache.spark.sql.catalyst.expressions.Literal.{FalseLiteral, TrueLiteral}
import org.apache.spark.sql.catalyst.expressions.aggregate._
import org.apache.spark.sql.catalyst.expressions.objects.AssertNotNull
import org.apache.spark.sql.catalyst.plans._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules._
import org.apache.spark.sql.catalyst.trees.AlwaysProcess
import org.apache.spark.sql.catalyst.trees.TreePattern._
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
/*
* Optimization rules defined in this file should not affect the structure of the logical plan.
*/
/**
* Replaces [[Expression Expressions]] that can be statically evaluated with
* equivalent [[Literal]] values.
*/
object ConstantFolding extends Rule[LogicalPlan] {
private def hasNoSideEffect(e: Expression): Boolean = e match {
case _: Attribute => true
case _: Literal => true
case c: Cast if !conf.ansiEnabled => hasNoSideEffect(c.child)
case _: NoThrow if e.deterministic => e.children.forall(hasNoSideEffect)
case _ => false
}
def apply(plan: LogicalPlan): LogicalPlan = plan.transformWithPruning(AlwaysProcess.fn, ruleId) {
case q: LogicalPlan => q.transformExpressionsDownWithPruning(
AlwaysProcess.fn, ruleId) {
// Skip redundant folding of literals. This rule is technically not necessary. Placing this
// here avoids running the next rule for Literal values, which would create a new Literal
// object and running eval unnecessarily.
case l: Literal => l
case Size(c: CreateArray, _) if c.children.forall(hasNoSideEffect) =>
Literal(c.children.length)
case Size(c: CreateMap, _) if c.children.forall(hasNoSideEffect) =>
Literal(c.children.length / 2)
// Fold expressions that are foldable.
case e if e.foldable => Literal.create(e.eval(EmptyRow), e.dataType)
}
}
}
/**
* Substitutes [[Attribute Attributes]] which can be statically evaluated with their corresponding
* value in conjunctive [[Expression Expressions]]
* e.g.
* {{{
* SELECT * FROM table WHERE i = 5 AND j = i + 3
* ==> SELECT * FROM table WHERE i = 5 AND j = 8
* }}}
*
* Approach used:
* - Populate a mapping of attribute => constant value by looking at all the equals predicates
* - Using this mapping, replace occurrence of the attributes with the corresponding constant values
* in the AND node.
*/
object ConstantPropagation extends Rule[LogicalPlan] with PredicateHelper {
def apply(plan: LogicalPlan): LogicalPlan = plan.transformUpWithPruning(
_.containsAllPatterns(LITERAL, FILTER), ruleId) {
case f: Filter =>
val (newCondition, _) = traverse(f.condition, replaceChildren = true, nullIsFalse = true)
if (newCondition.isDefined) {
f.copy(condition = newCondition.get)
} else {
f
}
}
type EqualityPredicates = Seq[((AttributeReference, Literal), BinaryComparison)]
/**
* Traverse a condition as a tree and replace attributes with constant values.
* - On matching [[And]], recursively traverse each children and get propagated mappings.
* If the current node is not child of another [[And]], replace all occurrences of the
* attributes with the corresponding constant values.
* - If a child of [[And]] is [[EqualTo]] or [[EqualNullSafe]], propagate the mapping
* of attribute => constant.
* - On matching [[Or]] or [[Not]], recursively traverse each children, propagate empty mapping.
* - Otherwise, stop traversal and propagate empty mapping.
* @param condition condition to be traversed
* @param replaceChildren whether to replace attributes with constant values in children
* @param nullIsFalse whether a boolean expression result can be considered to false e.g. in the
* case of `WHERE e`, null result of expression `e` means the same as if it
* resulted false
* @return A tuple including:
* 1. Option[Expression]: optional changed condition after traversal
* 2. EqualityPredicates: propagated mapping of attribute => constant
*/
private def traverse(condition: Expression, replaceChildren: Boolean, nullIsFalse: Boolean)
: (Option[Expression], EqualityPredicates) =
condition match {
case e @ EqualTo(left: AttributeReference, right: Literal)
if safeToReplace(left, nullIsFalse) =>
(None, Seq(((left, right), e)))
case e @ EqualTo(left: Literal, right: AttributeReference)
if safeToReplace(right, nullIsFalse) =>
(None, Seq(((right, left), e)))
case e @ EqualNullSafe(left: AttributeReference, right: Literal)
if safeToReplace(left, nullIsFalse) =>
(None, Seq(((left, right), e)))
case e @ EqualNullSafe(left: Literal, right: AttributeReference)
if safeToReplace(right, nullIsFalse) =>
(None, Seq(((right, left), e)))
case a: And =>
val (newLeft, equalityPredicatesLeft) =
traverse(a.left, replaceChildren = false, nullIsFalse)
val (newRight, equalityPredicatesRight) =
traverse(a.right, replaceChildren = false, nullIsFalse)
val equalityPredicates = equalityPredicatesLeft ++ equalityPredicatesRight
val newSelf = if (equalityPredicates.nonEmpty && replaceChildren) {
Some(And(replaceConstants(newLeft.getOrElse(a.left), equalityPredicates),
replaceConstants(newRight.getOrElse(a.right), equalityPredicates)))
} else {
if (newLeft.isDefined || newRight.isDefined) {
Some(And(newLeft.getOrElse(a.left), newRight.getOrElse(a.right)))
} else {
None
}
}
(newSelf, equalityPredicates)
case o: Or =>
// Ignore the EqualityPredicates from children since they are only propagated through And.
val (newLeft, _) = traverse(o.left, replaceChildren = true, nullIsFalse)
val (newRight, _) = traverse(o.right, replaceChildren = true, nullIsFalse)
val newSelf = if (newLeft.isDefined || newRight.isDefined) {
Some(Or(left = newLeft.getOrElse(o.left), right = newRight.getOrElse((o.right))))
} else {
None
}
(newSelf, Seq.empty)
case n: Not =>
// Ignore the EqualityPredicates from children since they are only propagated through And.
val (newChild, _) = traverse(n.child, replaceChildren = true, nullIsFalse = false)
(newChild.map(Not), Seq.empty)
case _ => (None, Seq.empty)
}
// We need to take into account if an attribute is nullable and the context of the conjunctive
// expression. E.g. `SELECT * FROM t WHERE NOT(c = 1 AND c + 1 = 1)` where attribute `c` can be
// substituted into `1 + 1 = 1` if 'c' isn't nullable. If 'c' is nullable then the enclosing
// NOT prevents us to do the substitution as NOT flips the context (`nullIsFalse`) of what a
// null result of the enclosed expression means.
private def safeToReplace(ar: AttributeReference, nullIsFalse: Boolean) =
!ar.nullable || nullIsFalse
private def replaceConstants(condition: Expression, equalityPredicates: EqualityPredicates)
: Expression = {
val constantsMap = AttributeMap(equalityPredicates.map(_._1))
val predicates = equalityPredicates.map(_._2).toSet
def replaceConstants0(expression: Expression) = expression transform {
case a: AttributeReference => constantsMap.getOrElse(a, a)
}
condition transform {
case e @ EqualTo(_, _) if !predicates.contains(e) => replaceConstants0(e)
case e @ EqualNullSafe(_, _) if !predicates.contains(e) => replaceConstants0(e)
}
}
}
/**
* Reorder associative integral-type operators and fold all constants into one.
*/
object ReorderAssociativeOperator extends Rule[LogicalPlan] {
private def flattenAdd(
expression: Expression,
groupSet: ExpressionSet): Seq[Expression] = expression match {
case expr @ Add(l, r, _) if !groupSet.contains(expr) =>
flattenAdd(l, groupSet) ++ flattenAdd(r, groupSet)
case other => other :: Nil
}
private def flattenMultiply(
expression: Expression,
groupSet: ExpressionSet): Seq[Expression] = expression match {
case expr @ Multiply(l, r, _) if !groupSet.contains(expr) =>
flattenMultiply(l, groupSet) ++ flattenMultiply(r, groupSet)
case other => other :: Nil
}
private def collectGroupingExpressions(plan: LogicalPlan): ExpressionSet = plan match {
case Aggregate(groupingExpressions, aggregateExpressions, child) =>
ExpressionSet.apply(groupingExpressions)
case _ => ExpressionSet(Seq.empty)
}
def apply(plan: LogicalPlan): LogicalPlan = plan.transformWithPruning(
_.containsPattern(BINARY_ARITHMETIC), ruleId) {
case q: LogicalPlan =>
// We have to respect aggregate expressions which exists in grouping expressions when plan
// is an Aggregate operator, otherwise the optimized expression could not be derived from
// grouping expressions.
// TODO: do not reorder consecutive `Add`s or `Multiply`s with different `failOnError` flags
val groupingExpressionSet = collectGroupingExpressions(q)
q.transformExpressionsDownWithPruning(_.containsPattern(BINARY_ARITHMETIC)) {
case a @ Add(_, _, f) if a.deterministic && a.dataType.isInstanceOf[IntegralType] =>
val (foldables, others) = flattenAdd(a, groupingExpressionSet).partition(_.foldable)
if (foldables.size > 1) {
val foldableExpr = foldables.reduce((x, y) => Add(x, y, f))
val c = Literal.create(foldableExpr.eval(EmptyRow), a.dataType)
if (others.isEmpty) c else Add(others.reduce((x, y) => Add(x, y, f)), c, f)
} else {
a
}
case m @ Multiply(_, _, f) if m.deterministic && m.dataType.isInstanceOf[IntegralType] =>
val (foldables, others) = flattenMultiply(m, groupingExpressionSet).partition(_.foldable)
if (foldables.size > 1) {
val foldableExpr = foldables.reduce((x, y) => Multiply(x, y, f))
val c = Literal.create(foldableExpr.eval(EmptyRow), m.dataType)
if (others.isEmpty) c else Multiply(others.reduce((x, y) => Multiply(x, y, f)), c, f)
} else {
m
}
}
}
}
/**
* Optimize IN predicates:
* 1. Converts the predicate to false when the list is empty and
* the value is not nullable.
* 2. Removes literal repetitions.
* 3. Replaces [[In (value, seq[Literal])]] with optimized version
* [[InSet (value, HashSet[Literal])]] which is much faster.
*/
object OptimizeIn extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan.transformWithPruning(
_.containsPattern(IN), ruleId) {
case q: LogicalPlan => q.transformExpressionsDownWithPruning(_.containsPattern(IN), ruleId) {
case In(v, list) if list.isEmpty =>
// When v is not nullable, the following expression will be optimized
// to FalseLiteral which is tested in OptimizeInSuite.scala
If(IsNotNull(v), FalseLiteral, Literal(null, BooleanType))
case expr @ In(v, list) if expr.inSetConvertible =>
val newList = ExpressionSet(list).toSeq
if (newList.length == 1
// TODO: `EqualTo` for structural types are not working. Until SPARK-24443 is addressed,
// TODO: we exclude them in this rule.
&& !v.isInstanceOf[CreateNamedStruct]
&& !newList.head.isInstanceOf[CreateNamedStruct]) {
EqualTo(v, newList.head)
} else if (newList.length > conf.optimizerInSetConversionThreshold) {
val hSet = newList.map(e => e.eval(EmptyRow))
InSet(v, HashSet() ++ hSet)
} else if (newList.length < list.length) {
expr.copy(list = newList)
} else { // newList.length == list.length && newList.length > 1
expr
}
}
}
}
/**
* Simplifies boolean expressions:
* 1. Simplifies expressions whose answer can be determined without evaluating both sides.
* 2. Eliminates / extracts common factors.
* 3. Merge same expressions
* 4. Removes `Not` operator.
*/
object BooleanSimplification extends Rule[LogicalPlan] with PredicateHelper {
def apply(plan: LogicalPlan): LogicalPlan = plan.transformWithPruning(
_.containsAnyPattern(AND_OR, NOT), ruleId) {
case q: LogicalPlan => q.transformExpressionsUpWithPruning(
_.containsAnyPattern(AND_OR, NOT), ruleId) {
case TrueLiteral And e => e
case e And TrueLiteral => e
case FalseLiteral Or e => e
case e Or FalseLiteral => e
case FalseLiteral And _ => FalseLiteral
case _ And FalseLiteral => FalseLiteral
case TrueLiteral Or _ => TrueLiteral
case _ Or TrueLiteral => TrueLiteral
case a And b if Not(a).semanticEquals(b) =>
If(IsNull(a), Literal.create(null, a.dataType), FalseLiteral)
case a And b if a.semanticEquals(Not(b)) =>
If(IsNull(b), Literal.create(null, b.dataType), FalseLiteral)
case a Or b if Not(a).semanticEquals(b) =>
If(IsNull(a), Literal.create(null, a.dataType), TrueLiteral)
case a Or b if a.semanticEquals(Not(b)) =>
If(IsNull(b), Literal.create(null, b.dataType), TrueLiteral)
case a And b if a.semanticEquals(b) => a
case a Or b if a.semanticEquals(b) => a
// The following optimizations are applicable only when the operands are not nullable,
// since the three-value logic of AND and OR are different in NULL handling.
// See the chart:
// +---------+---------+---------+---------+
// | operand | operand | OR | AND |
// +---------+---------+---------+---------+
// | TRUE | TRUE | TRUE | TRUE |
// | TRUE | FALSE | TRUE | FALSE |
// | FALSE | FALSE | FALSE | FALSE |
// | UNKNOWN | TRUE | TRUE | UNKNOWN |
// | UNKNOWN | FALSE | UNKNOWN | FALSE |
// | UNKNOWN | UNKNOWN | UNKNOWN | UNKNOWN |
// +---------+---------+---------+---------+
// (NULL And (NULL Or FALSE)) = NULL, but (NULL And FALSE) = FALSE. Thus, a can't be nullable.
case a And (b Or c) if !a.nullable && Not(a).semanticEquals(b) => And(a, c)
// (NULL And (FALSE Or NULL)) = NULL, but (NULL And FALSE) = FALSE. Thus, a can't be nullable.
case a And (b Or c) if !a.nullable && Not(a).semanticEquals(c) => And(a, b)
// ((NULL Or FALSE) And NULL) = NULL, but (FALSE And NULL) = FALSE. Thus, c can't be nullable.
case (a Or b) And c if !c.nullable && a.semanticEquals(Not(c)) => And(b, c)
// ((FALSE Or NULL) And NULL) = NULL, but (FALSE And NULL) = FALSE. Thus, c can't be nullable.
case (a Or b) And c if !c.nullable && b.semanticEquals(Not(c)) => And(a, c)
// (NULL Or (NULL And TRUE)) = NULL, but (NULL Or TRUE) = TRUE. Thus, a can't be nullable.
case a Or (b And c) if !a.nullable && Not(a).semanticEquals(b) => Or(a, c)
// (NULL Or (TRUE And NULL)) = NULL, but (NULL Or TRUE) = TRUE. Thus, a can't be nullable.
case a Or (b And c) if !a.nullable && Not(a).semanticEquals(c) => Or(a, b)
// ((NULL And TRUE) Or NULL) = NULL, but (TRUE Or NULL) = TRUE. Thus, c can't be nullable.
case (a And b) Or c if !c.nullable && a.semanticEquals(Not(c)) => Or(b, c)
// ((TRUE And NULL) Or NULL) = NULL, but (TRUE Or NULL) = TRUE. Thus, c can't be nullable.
case (a And b) Or c if !c.nullable && b.semanticEquals(Not(c)) => Or(a, c)
// Common factor elimination for conjunction
case and @ (left And right) =>
// 1. Split left and right to get the disjunctive predicates,
// i.e. lhs = (a || b), rhs = (a || c)
// 2. Find the common predict between lhsSet and rhsSet, i.e. common = (a)
// 3. Remove common predict from lhsSet and rhsSet, i.e. ldiff = (b), rdiff = (c)
// 4. If common is non-empty, apply the formula to get the optimized predicate:
// common || (ldiff && rdiff)
// 5. Else if common is empty, split left and right to get the conjunctive predicates.
// for example lhs = (a && b), rhs = (a && c) => all = (a, b, a, c), distinct = (a, b, c)
// optimized predicate: (a && b && c)
val lhs = splitDisjunctivePredicates(left)
val rhs = splitDisjunctivePredicates(right)
val common = lhs.filter(e => rhs.exists(e.semanticEquals))
if (common.nonEmpty) {
val ldiff = lhs.filterNot(e => common.exists(e.semanticEquals))
val rdiff = rhs.filterNot(e => common.exists(e.semanticEquals))
if (ldiff.isEmpty || rdiff.isEmpty) {
// (a || b || c || ...) && (a || b) => (a || b)
common.reduce(Or)
} else {
// (a || b || c || ...) && (a || b || d || ...) =>
// a || b || ((c || ...) && (d || ...))
(common :+ And(ldiff.reduce(Or), rdiff.reduce(Or))).reduce(Or)
}
} else {
// No common factors from disjunctive predicates, reduce common factor from conjunction
val all = splitConjunctivePredicates(left) ++ splitConjunctivePredicates(right)
val distinct = ExpressionSet(all)
if (all.size == distinct.size) {
// No common factors, return the original predicate
and
} else {
// (a && b) && a && (a && c) => a && b && c
buildBalancedPredicate(distinct.toSeq, And)
}
}
// Common factor elimination for disjunction
case or @ (left Or right) =>
// 1. Split left and right to get the conjunctive predicates,
// i.e. lhs = (a && b), rhs = (a && c)
// 2. Find the common predict between lhsSet and rhsSet, i.e. common = (a)
// 3. Remove common predict from lhsSet and rhsSet, i.e. ldiff = (b), rdiff = (c)
// 4. If common is non-empty, apply the formula to get the optimized predicate:
// common && (ldiff || rdiff)
// 5. Else if common is empty, split left and right to get the conjunctive predicates.
// for example lhs = (a || b), rhs = (a || c) => all = (a, b, a, c), distinct = (a, b, c)
// optimized predicate: (a || b || c)
val lhs = splitConjunctivePredicates(left)
val rhs = splitConjunctivePredicates(right)
val common = lhs.filter(e => rhs.exists(e.semanticEquals))
if (common.nonEmpty) {
val ldiff = lhs.filterNot(e => common.exists(e.semanticEquals))
val rdiff = rhs.filterNot(e => common.exists(e.semanticEquals))
if (ldiff.isEmpty || rdiff.isEmpty) {
// (a && b) || (a && b && c && ...) => a && b
common.reduce(And)
} else {
// (a && b && c && ...) || (a && b && d && ...) =>
// a && b && ((c && ...) || (d && ...))
(common :+ Or(ldiff.reduce(And), rdiff.reduce(And))).reduce(And)
}
} else {
// No common factors in conjunctive predicates, reduce common factor from disjunction
val all = splitDisjunctivePredicates(left) ++ splitDisjunctivePredicates(right)
val distinct = ExpressionSet(all)
if (all.size == distinct.size) {
// No common factors, return the original predicate
or
} else {
// (a || b) || a || (a || c) => a || b || c
buildBalancedPredicate(distinct.toSeq, Or)
}
}
case Not(TrueLiteral) => FalseLiteral
case Not(FalseLiteral) => TrueLiteral
case Not(a GreaterThan b) => LessThanOrEqual(a, b)
case Not(a GreaterThanOrEqual b) => LessThan(a, b)
case Not(a LessThan b) => GreaterThanOrEqual(a, b)
case Not(a LessThanOrEqual b) => GreaterThan(a, b)
case Not(a Or b) => And(Not(a), Not(b))
case Not(a And b) => Or(Not(a), Not(b))
case Not(Not(e)) => e
case Not(IsNull(e)) => IsNotNull(e)
case Not(IsNotNull(e)) => IsNull(e)
}
}
}
/**
* Move/Push `Not` operator if it's beneficial.
*/
object NotPropagation extends Rule[LogicalPlan] {
// Given argument x, return true if expression Not(x) can be simplified
// E.g. let x == Not(y), then canSimplifyNot(x) == true because Not(x) == Not(Not(y)) == y
// For the case of x = EqualTo(a, b), recursively check each child expression
// Extra nullable check is required for EqualNullSafe because
// Not(EqualNullSafe(e, null)) is different from EqualNullSafe(e, Not(null))
private def canSimplifyNot(x: Expression): Boolean = x match {
case Literal(_, BooleanType) | Literal(_, NullType) => true
case _: Not | _: IsNull | _: IsNotNull | _: And | _: Or => true
case _: GreaterThan | _: GreaterThanOrEqual | _: LessThan | _: LessThanOrEqual => true
case EqualTo(a, b) if canSimplifyNot(a) || canSimplifyNot(b) => true
case EqualNullSafe(a, b)
if !a.nullable && !b.nullable && (canSimplifyNot(a) || canSimplifyNot(b)) => true
case _ => false
}
def apply(plan: LogicalPlan): LogicalPlan = plan.transformWithPruning(
_.containsPattern(NOT), ruleId) {
case q: LogicalPlan => q.transformExpressionsDownWithPruning(_.containsPattern(NOT), ruleId) {
// Move `Not` from one side of `EqualTo`/`EqualNullSafe` to the other side if it's beneficial.
// E.g. `EqualTo(Not(a), b)` where `b = Not(c)`, it will become
// `EqualTo(a, Not(b))` => `EqualTo(a, Not(Not(c)))` => `EqualTo(a, c)`
// In addition, `if canSimplifyNot(b)` checks if the optimization can converge
// that avoids the situation two conditions are returning to each other.
case EqualTo(Not(a), b) if !canSimplifyNot(a) && canSimplifyNot(b) => EqualTo(a, Not(b))
case EqualTo(a, Not(b)) if canSimplifyNot(a) && !canSimplifyNot(b) => EqualTo(Not(a), b)
case EqualNullSafe(Not(a), b) if !canSimplifyNot(a) && canSimplifyNot(b) =>
EqualNullSafe(a, Not(b))
case EqualNullSafe(a, Not(b)) if canSimplifyNot(a) && !canSimplifyNot(b) =>
EqualNullSafe(Not(a), b)
// Push `Not` to one side of `EqualTo`/`EqualNullSafe` if it's beneficial.
// E.g. Not(EqualTo(x, false)) => EqualTo(x, true)
case Not(EqualTo(a, b)) if canSimplifyNot(b) => EqualTo(a, Not(b))
case Not(EqualTo(a, b)) if canSimplifyNot(a) => EqualTo(Not(a), b)
case Not(EqualNullSafe(a, b)) if !a.nullable && !b.nullable && canSimplifyNot(b) =>
EqualNullSafe(a, Not(b))
case Not(EqualNullSafe(a, b)) if !a.nullable && !b.nullable && canSimplifyNot(a) =>
EqualNullSafe(Not(a), b)
}
}
}
/**
* Simplifies binary comparisons with semantically-equal expressions:
* 1) Replace '<=>' with 'true' literal.
* 2) Replace '=', '<=', and '>=' with 'true' literal if both operands are non-nullable.
* 3) Replace '<' and '>' with 'false' literal if both operands are non-nullable.
* 4) Unwrap '=', '<=>' if one side is a boolean literal
*/
object SimplifyBinaryComparison
extends Rule[LogicalPlan] with PredicateHelper with ConstraintHelper {
private def canSimplifyComparison(
left: Expression,
right: Expression,
notNullExpressions: => ExpressionSet): Boolean = {
if (left.semanticEquals(right)) {
(!left.nullable && !right.nullable) || notNullExpressions.contains(left)
} else {
false
}
}
def apply(plan: LogicalPlan): LogicalPlan = plan.transformWithPruning(
_.containsPattern(BINARY_COMPARISON), ruleId) {
case l: LogicalPlan =>
lazy val notNullExpressions = ExpressionSet(l match {
case Filter(fc, _) =>
splitConjunctivePredicates(fc).collect {
case i: IsNotNull => i.child
}
case _ => Seq.empty
})
l.transformExpressionsUpWithPruning(_.containsPattern(BINARY_COMPARISON)) {
// True with equality
case a EqualNullSafe b if a.semanticEquals(b) => TrueLiteral
case a EqualTo b if canSimplifyComparison(a, b, notNullExpressions) => TrueLiteral
case a GreaterThanOrEqual b if canSimplifyComparison(a, b, notNullExpressions) =>
TrueLiteral
case a LessThanOrEqual b if canSimplifyComparison(a, b, notNullExpressions) => TrueLiteral
// False with inequality
case a GreaterThan b if canSimplifyComparison(a, b, notNullExpressions) => FalseLiteral
case a LessThan b if canSimplifyComparison(a, b, notNullExpressions) => FalseLiteral
// Optimize equalities when one side is Literal in order to help pushing down the filters
case a EqualTo TrueLiteral => a
case TrueLiteral EqualTo b => b
case a EqualTo FalseLiteral => Not(a)
case FalseLiteral EqualTo b => Not(b)
case a EqualNullSafe TrueLiteral if !a.nullable => a
case TrueLiteral EqualNullSafe b if !b.nullable => b
case a EqualNullSafe FalseLiteral if !a.nullable => Not(a)
case FalseLiteral EqualNullSafe b if !b.nullable => Not(b)
}
}
}
/**
* Simplifies conditional expressions (if / case).
*/
object SimplifyConditionals extends Rule[LogicalPlan] with PredicateHelper {
private def falseOrNullLiteral(e: Expression): Boolean = e match {
case FalseLiteral => true
case Literal(null, _) => true
case _ => false
}
def apply(plan: LogicalPlan): LogicalPlan = plan.transformWithPruning(
_.containsAnyPattern(IF, CASE_WHEN), ruleId) {
case q: LogicalPlan => q transformExpressionsUp {
case If(TrueLiteral, trueValue, _) => trueValue
case If(FalseLiteral, _, falseValue) => falseValue
case If(Literal(null, _), _, falseValue) => falseValue
case If(cond, TrueLiteral, FalseLiteral) =>
if (cond.nullable) EqualNullSafe(cond, TrueLiteral) else cond
case If(cond, FalseLiteral, TrueLiteral) =>
if (cond.nullable) Not(EqualNullSafe(cond, TrueLiteral)) else Not(cond)
case If(cond, trueValue, falseValue)
if cond.deterministic && trueValue.semanticEquals(falseValue) => trueValue
case If(cond, l @ Literal(null, _), FalseLiteral) if !cond.nullable => And(cond, l)
case If(cond, l @ Literal(null, _), TrueLiteral) if !cond.nullable => Or(Not(cond), l)
case If(cond, FalseLiteral, l @ Literal(null, _)) if !cond.nullable => And(Not(cond), l)
case If(cond, TrueLiteral, l @ Literal(null, _)) if !cond.nullable => Or(cond, l)
case CaseWhen(Seq((cond, TrueLiteral)), Some(FalseLiteral)) =>
if (cond.nullable) EqualNullSafe(cond, TrueLiteral) else cond
case CaseWhen(Seq((cond, FalseLiteral)), Some(TrueLiteral)) =>
if (cond.nullable) Not(EqualNullSafe(cond, TrueLiteral)) else Not(cond)
case e @ CaseWhen(branches, elseValue) if branches.exists(x => falseOrNullLiteral(x._1)) =>
// If there are branches that are always false, remove them.
// If there are no more branches left, just use the else value.
// Note that these two are handled together here in a single case statement because
// otherwise we cannot determine the data type for the elseValue if it is None (i.e. null).
val newBranches = branches.filter(x => !falseOrNullLiteral(x._1))
if (newBranches.isEmpty) {
elseValue.getOrElse(Literal.create(null, e.dataType))
} else {
e.copy(branches = newBranches)
}
case CaseWhen(branches, _) if branches.headOption.map(_._1).contains(TrueLiteral) =>
// If the first branch is a true literal, remove the entire CaseWhen and use the value
// from that. Note that CaseWhen.branches should never be empty, and as a result the
// headOption (rather than head) added above is just an extra (and unnecessary) safeguard.
branches.head._2
case CaseWhen(branches, _) if branches.exists(_._1 == TrueLiteral) =>
// a branch with a true condition eliminates all following branches,
// these branches can be pruned away
val (h, t) = branches.span(_._1 != TrueLiteral)
CaseWhen( h :+ t.head, None)
case e @ CaseWhen(branches, elseOpt)
if branches.forall(_._2.semanticEquals(elseOpt.getOrElse(Literal(null, e.dataType)))) =>
val elseValue = elseOpt.getOrElse(Literal(null, e.dataType))
// For non-deterministic conditions with side effect, we can not remove it, or change
// the ordering. As a result, we try to remove the deterministic conditions from the tail.
var hitNonDeterministicCond = false
var i = branches.length
while (i > 0 && !hitNonDeterministicCond) {
hitNonDeterministicCond = !branches(i - 1)._1.deterministic
if (!hitNonDeterministicCond) {
i -= 1
}
}
if (i == 0) {
elseValue
} else {
e.copy(
branches = branches.take(i).map(branch => (branch._1, elseValue)),
elseValue = elseOpt.filterNot(_.semanticEquals(Literal(null, e.dataType))))
}
case e @ CaseWhen(_, elseOpt)
if elseOpt.exists(_.semanticEquals(Literal(null, e.dataType))) =>
e.copy(elseValue = None)
}
}
}
/**
* Push the foldable expression into (if / case) branches.
*/
object PushFoldableIntoBranches extends Rule[LogicalPlan] with PredicateHelper {
// To be conservative here: it's only a guaranteed win if all but at most only one branch
// end up being not foldable.
private def atMostOneUnfoldable(exprs: Seq[Expression]): Boolean = {
exprs.filterNot(_.foldable).size < 2
}
// Not all UnaryExpression can be pushed into (if / case) branches, e.g. Alias.
private def supportedUnaryExpression(e: UnaryExpression): Boolean = e match {
case _: IsNull | _: IsNotNull => true
case _: UnaryMathExpression | _: Abs | _: Bin | _: Factorial | _: Hex => true
case _: String2StringExpression | _: Ascii | _: Base64 | _: BitLength | _: Chr | _: Length =>
true
case _: CastBase => true
case _: GetDateField | _: LastDay => true
case _: ExtractIntervalPart[_] => true
case _: ArraySetLike => true
case _: ExtractValue => true
case _ => false
}
// Not all BinaryExpression can be pushed into (if / case) branches.
private def supportedBinaryExpression(e: BinaryExpression): Boolean = e match {
case _: BinaryComparison | _: StringPredicate | _: StringRegexExpression => true
case _: BinaryArithmetic => true
case _: BinaryMathExpression => true
case _: AddMonths | _: DateAdd | _: DateAddInterval | _: DateDiff | _: DateSub |
_: DateAddYMInterval | _: TimestampAddYMInterval | _: TimeAdd => true
case _: FindInSet | _: RoundBase => true
case _ => false
}
def apply(plan: LogicalPlan): LogicalPlan = plan.transformWithPruning(
_.containsAnyPattern(CASE_WHEN, IF), ruleId) {
case q: LogicalPlan => q.transformExpressionsUpWithPruning(
_.containsAnyPattern(CASE_WHEN, IF), ruleId) {
case u @ UnaryExpression(i @ If(_, trueValue, falseValue))
if supportedUnaryExpression(u) && atMostOneUnfoldable(Seq(trueValue, falseValue)) =>
i.copy(
trueValue = u.withNewChildren(Array(trueValue)),
falseValue = u.withNewChildren(Array(falseValue)))
case u @ UnaryExpression(c @ CaseWhen(branches, elseValue))
if supportedUnaryExpression(u) && atMostOneUnfoldable(branches.map(_._2) ++ elseValue) =>
c.copy(
branches.map(e => e.copy(_2 = u.withNewChildren(Array(e._2)))),
Some(u.withNewChildren(Array(elseValue.getOrElse(Literal(null, c.dataType))))))
case b @ BinaryExpression(i @ If(_, trueValue, falseValue), right)
if supportedBinaryExpression(b) && right.foldable &&
atMostOneUnfoldable(Seq(trueValue, falseValue)) =>
i.copy(
trueValue = b.withNewChildren(Array(trueValue, right)),
falseValue = b.withNewChildren(Array(falseValue, right)))
case b @ BinaryExpression(left, i @ If(_, trueValue, falseValue))
if supportedBinaryExpression(b) && left.foldable &&
atMostOneUnfoldable(Seq(trueValue, falseValue)) =>
i.copy(
trueValue = b.withNewChildren(Array(left, trueValue)),
falseValue = b.withNewChildren(Array(left, falseValue)))
case b @ BinaryExpression(c @ CaseWhen(branches, elseValue), right)
if supportedBinaryExpression(b) && right.foldable &&
atMostOneUnfoldable(branches.map(_._2) ++ elseValue) =>
c.copy(
branches.map(e => e.copy(_2 = b.withNewChildren(Array(e._2, right)))),
Some(b.withNewChildren(Array(elseValue.getOrElse(Literal(null, c.dataType)), right))))
case b @ BinaryExpression(left, c @ CaseWhen(branches, elseValue))
if supportedBinaryExpression(b) && left.foldable &&
atMostOneUnfoldable(branches.map(_._2) ++ elseValue) =>
c.copy(
branches.map(e => e.copy(_2 = b.withNewChildren(Array(left, e._2)))),
Some(b.withNewChildren(Array(left, elseValue.getOrElse(Literal(null, c.dataType))))))
}
}
}
/**
* Simplifies LIKE expressions that do not need full regular expressions to evaluate the condition.
* For example, when the expression is just checking to see if a string starts with a given
* pattern.
*/
object LikeSimplification extends Rule[LogicalPlan] {
// if guards below protect from escapes on trailing %.
// Cases like "something\\%" are not optimized, but this does not affect correctness.
private val startsWith = "([^_%]+)%".r
private val endsWith = "%([^_%]+)".r
private val startsAndEndsWith = "([^_%]+)%([^_%]+)".r
private val contains = "%([^_%]+)%".r
private val equalTo = "([^_%]*)".r
private def simplifyLike(
input: Expression, pattern: String, escapeChar: Char = '\\\\'): Option[Expression] = {
if (pattern.contains(escapeChar)) {
// There are three different situations when pattern containing escapeChar:
// 1. pattern contains invalid escape sequence, e.g. 'm\\aca'
// 2. pattern contains escaped wildcard character, e.g. 'ma\\%ca'
// 3. pattern contains escaped escape character, e.g. 'ma\\\\ca'
// Although there are patterns can be optimized if we handle the escape first, we just
// skip this rule if pattern contains any escapeChar for simplicity.
None
} else {
pattern match {
case startsWith(prefix) =>
Some(StartsWith(input, Literal(prefix)))
case endsWith(postfix) =>
Some(EndsWith(input, Literal(postfix)))
// 'a%a' pattern is basically same with 'a%' && '%a'.
// However, the additional `Length` condition is required to prevent 'a' match 'a%a'.
case startsAndEndsWith(prefix, postfix) =>
Some(And(GreaterThanOrEqual(Length(input), Literal(prefix.length + postfix.length)),
And(StartsWith(input, Literal(prefix)), EndsWith(input, Literal(postfix)))))
case contains(infix) =>
Some(Contains(input, Literal(infix)))
case equalTo(str) =>
Some(EqualTo(input, Literal(str)))
case _ => None
}
}
}
private def simplifyMultiLike(
child: Expression, patterns: Seq[UTF8String], multi: MultiLikeBase): Expression = {
val (remainPatternMap, replacementMap) =
patterns.map { p =>
p -> Option(p).flatMap(p => simplifyLike(child, p.toString))
}.partition(_._2.isEmpty)
val remainPatterns = remainPatternMap.map(_._1)
val replacements = replacementMap.map(_._2.get)
if (replacements.isEmpty) {
multi
} else {
multi match {
case l: LikeAll => And(replacements.reduceLeft(And), l.copy(patterns = remainPatterns))
case l: NotLikeAll =>
And(replacements.map(Not(_)).reduceLeft(And), l.copy(patterns = remainPatterns))
case l: LikeAny => Or(replacements.reduceLeft(Or), l.copy(patterns = remainPatterns))
case l: NotLikeAny =>
Or(replacements.map(Not(_)).reduceLeft(Or), l.copy(patterns = remainPatterns))
}
}
}
def apply(plan: LogicalPlan): LogicalPlan = plan.transformAllExpressionsWithPruning(
_.containsPattern(LIKE_FAMLIY), ruleId) {
case l @ Like(input, Literal(pattern, StringType), escapeChar) =>
if (pattern == null) {
// If pattern is null, return null value directly, since "col like null" == null.
Literal(null, BooleanType)
} else {
simplifyLike(input, pattern.toString, escapeChar).getOrElse(l)
}
case l @ LikeAll(child, patterns) => simplifyMultiLike(child, patterns, l)
case l @ NotLikeAll(child, patterns) => simplifyMultiLike(child, patterns, l)
case l @ LikeAny(child, patterns) => simplifyMultiLike(child, patterns, l)
case l @ NotLikeAny(child, patterns) => simplifyMultiLike(child, patterns, l)
}
}
/**
* Replaces [[Expression Expressions]] that can be statically evaluated with
* equivalent [[Literal]] values. This rule is more specific with
* Null value propagation from bottom to top of the expression tree.
*/
object NullPropagation extends Rule[LogicalPlan] {
private def isNullLiteral(e: Expression): Boolean = e match {
case Literal(null, _) => true
case _ => false
}
def apply(plan: LogicalPlan): LogicalPlan = plan.transformWithPruning(
t => t.containsAnyPattern(NULL_CHECK, NULL_LITERAL, COUNT, COALESCE)
|| t.containsAllPatterns(WINDOW_EXPRESSION, CAST, LITERAL), ruleId) {
case q: LogicalPlan => q.transformExpressionsUpWithPruning(
t => t.containsAnyPattern(NULL_CHECK, NULL_LITERAL, COUNT, COALESCE)
|| t.containsAllPatterns(WINDOW_EXPRESSION, CAST, LITERAL), ruleId) {
case e @ WindowExpression(Cast(Literal(0L, _), _, _, _), _) =>
Cast(Literal(0L), e.dataType, Option(conf.sessionLocalTimeZone))
case e @ AggregateExpression(Count(exprs), _, _, _, _) if exprs.forall(isNullLiteral) =>
Cast(Literal(0L), e.dataType, Option(conf.sessionLocalTimeZone))
case ae @ AggregateExpression(Count(exprs), _, false, _, _) if !exprs.exists(_.nullable) =>
// This rule should be only triggered when isDistinct field is false.
ae.copy(aggregateFunction = Count(Literal(1)))
case IsNull(c) if !c.nullable => Literal.create(false, BooleanType)
case IsNotNull(c) if !c.nullable => Literal.create(true, BooleanType)
case EqualNullSafe(Literal(null, _), r) => IsNull(r)
case EqualNullSafe(l, Literal(null, _)) => IsNull(l)
case AssertNotNull(c, _) if !c.nullable => c
// For Coalesce, remove null literals.
case e @ Coalesce(children) =>
val newChildren = children.filterNot(isNullLiteral)
if (newChildren.isEmpty) {
Literal.create(null, e.dataType)
} else if (newChildren.length == 1) {
newChildren.head
} else {
val nonNullableIndex = newChildren.indexWhere(e => !e.nullable)
if (nonNullableIndex > -1) {
Coalesce(newChildren.take(nonNullableIndex + 1))
} else {
Coalesce(newChildren)
}
}
// If the value expression is NULL then transform the In expression to null literal.
case In(Literal(null, _), _) => Literal.create(null, BooleanType)
case InSubquery(Seq(Literal(null, _)), _) => Literal.create(null, BooleanType)
// Non-leaf NullIntolerant expressions will return null, if at least one of its children is
// a null literal.
case e: NullIntolerant if e.children.exists(isNullLiteral) =>
Literal.create(null, e.dataType)
}
}
}
/**
* Unwrap the input of IsNull/IsNotNull if the input is NullIntolerant
* E.g. IsNull(Not(null)) == IsNull(null)
*/
object NullDownPropagation extends Rule[LogicalPlan] {
// Return true iff the expression returns non-null result for all non-null inputs.
// Not all `NullIntolerant` can be propagated. E.g. `Cast` is `NullIntolerant`; however,
// cast('Infinity' as integer) is null. Hence, `Cast` is not supported `NullIntolerant`.
// `ExtractValue` is also not supported. E.g. the planner may resolve column `a` to `a#123`,
// then IsNull(a#123) cannot be optimized.
// Applying to `EqualTo` is too disruptive for [SPARK-32290] optimization, not supported for now.
// If e has multiple children, the deterministic check is required because optimizing
// IsNull(a > b) to Or(IsNull(a), IsNull(b)), for example, may cause skipping the evaluation of b
private def supportedNullIntolerant(e: NullIntolerant): Boolean = (e match {
case _: Not => true
case _: GreaterThan | _: GreaterThanOrEqual | _: LessThan | _: LessThanOrEqual
if e.deterministic => true
case _ => false
}) && e.children.nonEmpty
def apply(plan: LogicalPlan): LogicalPlan = plan.transformWithPruning(
_.containsPattern(NULL_CHECK), ruleId) {
case q: LogicalPlan => q.transformExpressionsDownWithPruning(
_.containsPattern(NULL_CHECK), ruleId) {
case IsNull(e: NullIntolerant) if supportedNullIntolerant(e) =>
e.children.map(IsNull(_): Expression).reduceLeft(Or)
case IsNotNull(e: NullIntolerant) if supportedNullIntolerant(e) =>
e.children.map(IsNotNull(_): Expression).reduceLeft(And)
}
}
}
/**
* Replace attributes with aliases of the original foldable expressions if possible.
* Other optimizations will take advantage of the propagated foldable expressions. For example,
* this rule can optimize
* {{{
* SELECT 1.0 x, 'abc' y, Now() z ORDER BY x, y, 3
* }}}
* to
* {{{
* SELECT 1.0 x, 'abc' y, Now() z ORDER BY 1.0, 'abc', Now()
* }}}
* and other rules can further optimize it and remove the ORDER BY operator.
*/
object FoldablePropagation extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = {
CleanupAliases(propagateFoldables(plan)._1)
}
private def propagateFoldables(plan: LogicalPlan): (LogicalPlan, AttributeMap[Alias]) = {
plan match {
case p: Project =>
val (newChild, foldableMap) = propagateFoldables(p.child)
val newProject =
replaceFoldable(p.withNewChildren(Seq(newChild)).asInstanceOf[Project], foldableMap)
val newFoldableMap = collectFoldables(newProject.projectList)
(newProject, newFoldableMap)
case a: Aggregate =>
val (newChild, foldableMap) = propagateFoldables(a.child)
val newAggregate =
replaceFoldable(a.withNewChildren(Seq(newChild)).asInstanceOf[Aggregate], foldableMap)
val newFoldableMap = collectFoldables(newAggregate.aggregateExpressions)
(newAggregate, newFoldableMap)
// We can not replace the attributes in `Expand.output`. If there are other non-leaf
// operators that have the `output` field, we should put them here too.
case e: Expand =>
val (newChild, foldableMap) = propagateFoldables(e.child)
val expandWithNewChildren = e.withNewChildren(Seq(newChild)).asInstanceOf[Expand]
val newExpand = if (foldableMap.isEmpty) {
expandWithNewChildren
} else {
val newProjections = expandWithNewChildren.projections.map(_.map(_.transform {
case a: AttributeReference if foldableMap.contains(a) => foldableMap(a)
}))
if (newProjections == expandWithNewChildren.projections) {
expandWithNewChildren
} else {
expandWithNewChildren.copy(projections = newProjections)
}
}
(newExpand, foldableMap)
case u: UnaryNode if canPropagateFoldables(u) =>
val (newChild, foldableMap) = propagateFoldables(u.child)
val newU = replaceFoldable(u.withNewChildren(Seq(newChild)), foldableMap)
(newU, foldableMap)
// Join derives the output attributes from its child while they are actually not the
// same attributes. For example, the output of outer join is not always picked from its
// children, but can also be null. We should exclude these miss-derived attributes when
// propagating the foldable expressions.
// TODO(cloud-fan): It seems more reasonable to use new attributes as the output attributes
// of outer join.
case j: Join =>
val (newChildren, foldableMaps) = j.children.map(propagateFoldables).unzip
val foldableMap = AttributeMap(
foldableMaps.foldLeft(Iterable.empty[(Attribute, Alias)])(_ ++ _.baseMap.values).toSeq)
val newJoin =
replaceFoldable(j.withNewChildren(newChildren).asInstanceOf[Join], foldableMap)
val missDerivedAttrsSet: AttributeSet = AttributeSet(newJoin.joinType match {
case _: InnerLike | LeftExistence(_) => Nil
case LeftOuter => newJoin.right.output
case RightOuter => newJoin.left.output
case FullOuter => newJoin.left.output ++ newJoin.right.output
case _ => Nil
})
val newFoldableMap = AttributeMap(foldableMap.baseMap.values.filterNot {
case (attr, _) => missDerivedAttrsSet.contains(attr)
}.toSeq)
(newJoin, newFoldableMap)
// For other plans, they are not safe to apply foldable propagation, and they should not
// propagate foldable expressions from children.
case o =>
val newOther = o.mapChildren(propagateFoldables(_)._1)
(newOther, AttributeMap.empty)
}
}
private def replaceFoldable(plan: LogicalPlan, foldableMap: AttributeMap[Alias]): plan.type = {
if (foldableMap.isEmpty) {
plan
} else {
plan transformExpressions {
case a: AttributeReference if foldableMap.contains(a) => foldableMap(a)
}
}
}
private def collectFoldables(expressions: Seq[NamedExpression]) = {
AttributeMap(expressions.collect {
case a: Alias if a.child.foldable => (a.toAttribute, a)
})
}
/**
* List of all [[UnaryNode]]s which allow foldable propagation.
*/
private def canPropagateFoldables(u: UnaryNode): Boolean = u match {
// Handling `Project` is moved to `propagateFoldables`.
case _: Filter => true
case _: SubqueryAlias => true
// Handling `Aggregate` is moved to `propagateFoldables`.
case _: Window => true
case _: Sample => true
case _: GlobalLimit => true
case _: LocalLimit => true
case _: Generate => true
case _: Distinct => true
case _: AppendColumns => true
case _: AppendColumnsWithObject => true
case _: RepartitionByExpression => true
case _: Repartition => true
case _: RebalancePartitions => true
case _: Sort => true
case _: TypedFilter => true
case _ => false
}
}
/**
* Removes [[Cast Casts]] that are unnecessary because the input is already the correct type.
*/
object SimplifyCasts extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan.transformAllExpressionsWithPruning(
_.containsPattern(CAST), ruleId) {
case Cast(e, dataType, _, _) if e.dataType == dataType => e
case c @ Cast(Cast(e, dt1: NumericType, _, _), dt2: NumericType, _, _)
if isWiderCast(e.dataType, dt1) && isWiderCast(dt1, dt2) =>
c.copy(child = e)
case c @ Cast(e, dataType, _, _) => (e.dataType, dataType) match {
case (ArrayType(from, false), ArrayType(to, true)) if from == to => e
case (MapType(fromKey, fromValue, false), MapType(toKey, toValue, true))
if fromKey == toKey && fromValue == toValue => e
case _ => c
}
}
// Returns whether the from DataType can be safely casted to the to DataType without losing
// any precision or range.
private def isWiderCast(from: DataType, to: NumericType): Boolean = (from, to) match {
case (from: NumericType, to: DecimalType) if to.isWiderThan(from) => true
case (from: DecimalType, to: NumericType) if from.isTighterThan(to) => true
case (from: IntegralType, to: IntegralType) => Cast.canUpCast(from, to)
case _ => from == to
}
}
/**
* Removes nodes that are not necessary.
*/
object RemoveDispensableExpressions extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan.transformAllExpressionsWithPruning(
_.containsPattern(UNARY_POSITIVE), ruleId) {
case UnaryPositive(child) => child
}
}
/**
* Removes the inner case conversion expressions that are unnecessary because
* the inner conversion is overwritten by the outer one.
*/
object SimplifyCaseConversionExpressions extends Rule[LogicalPlan] {
def apply(plan: LogicalPlan): LogicalPlan = plan.transformWithPruning(
_.containsPattern(UPPER_OR_LOWER), ruleId) {
case q: LogicalPlan => q.transformExpressionsUpWithPruning(
_.containsPattern(UPPER_OR_LOWER), ruleId) {
case Upper(Upper(child)) => Upper(child)
case Upper(Lower(child)) => Upper(child)
case Lower(Upper(child)) => Lower(child)
case Lower(Lower(child)) => Lower(child)
}
}
}
/**
* Combine nested [[Concat]] expressions.
*/
object CombineConcats extends Rule[LogicalPlan] {
private def flattenConcats(concat: Concat): Concat = {
val stack = Stack[Expression](concat)
val flattened = ArrayBuffer.empty[Expression]
while (stack.nonEmpty) {
stack.pop() match {
case Concat(children) =>
stack.pushAll(children.reverse)
// If `spark.sql.function.concatBinaryAsString` is false, nested `Concat` exprs possibly
// have `Concat`s with binary output. Since `TypeCoercion` casts them into strings,
// we need to handle the case to combine all nested `Concat`s.
case c @ Cast(Concat(children), StringType, _, _) =>
val newChildren = children.map { e => c.copy(child = e) }
stack.pushAll(newChildren.reverse)
case child =>
flattened += child
}
}
Concat(flattened.toSeq)
}
private def hasNestedConcats(concat: Concat): Boolean = concat.children.exists {
case c: Concat => true
case c @ Cast(Concat(children), StringType, _, _) => true
case _ => false
}
def apply(plan: LogicalPlan): LogicalPlan = plan.transformAllExpressionsWithPruning(
_.containsPattern(CONCAT), ruleId) {
case concat: Concat if hasNestedConcats(concat) =>
flattenConcats(concat)
}
}
|
WeichenXu123/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/expressions.scala
|
Scala
|
apache-2.0
| 51,265
|
package io.scalac.frees.login.handlers.task.github
import fs2.Task
import fs2.util.NonFatal
import io.scalac.frees.login.algebras._
import io.scalac.frees.login.types.GitHubId
import org.http4s._
import org.http4s.client.blaze.PooledHttp1Client
import org.http4s.dsl.POST
/**
* This is simple implementation of GitHubClient,
* which is also `GitHubClient.Handler[Task]`.
* It was created as part of learning when writing blog post but it is not really
* related to Free Monads.
* I wanted to use `github4s` but I could not obtain emails list with it.
*/
class InHouseGHClient(
val clientId: String,
val clientSecret: String
) extends GitHubClient.Handler[Task] {
private val httpClient = PooledHttp1Client()
def login(code: String): Task[GitHubDataResponse] = {
def requestAccessToken: Task[UrlForm] = {
val form = UrlForm(
"client_id" -> clientId,
"client_secret" -> clientSecret,
"code" -> code
)
for {
entity <- UrlForm.entityEncoder.toEntity(form)
request = Request(
method = POST,
uri = Uri.uri("https://github.com/login/oauth/access_token"),
body = entity.body
)
resp <- httpClient.expect[UrlForm](request)
} yield resp
}
def requestUserData(accessToken: String): Task[GitHubDataResponse] = {
import io.circe.generic.auto._
import org.http4s.circe._
val requestUserId: Task[GitHubId] = {
val uri = Uri.uri("https://api.github.com/user")
.withQueryParam("access_token", accessToken)
httpClient
.expect(uri)(jsonOf[GitHubUser])
.map(_.id)
}
val requestPrimaryVerifiedEmail: Task[Option[String]] = {
val uri = Uri.uri("https://api.github.com/user/emails")
.withQueryParam("access_token", accessToken)
httpClient.expect(uri)(jsonOf[Vector[GitHubEmail]])
.map(_.filter(e => e.primary && e.verified).map(_.email).headOption)
}
for {
id <- requestUserId
emailOpt <- requestPrimaryVerifiedEmail
} yield {
emailOpt.map(GitHubData(id, _)).getOrElse(GitHubNoEmail)
}
}
requestAccessToken.flatMap { f =>
val hasScope = f.get("scope").exists(s => s == "user" || s == "user:email")
val accessTokenOpt: Option[String] = f.getFirst("access_token")
if (hasScope && accessTokenOpt.isDefined)
requestUserData(accessTokenOpt.get)
else
Task.now(GitHubInsufficientPermissions)
}.handle {
case NonFatal(t) =>
GitHubFailure(t)
}
}
//Following classes are for circe decoding and basic logic over these
case class GitHubEmail(email: String, primary: Boolean, verified: Boolean)
case class GitHubUser(id: Long)
}
|
LGLO/freestyle-login
|
src/main/scala/io/scalac/frees/login/handlers/task/github/InHouseGHClient.scala
|
Scala
|
apache-2.0
| 2,784
|
/*
* Copyright (C) 2017 Michael Dippery <michael@monkey-robot.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mipadi.jupiter.time
import java.time.{LocalDateTime, ZonedDateTime, ZoneId}
import java.util.Date
import org.scalatest._
import com.mipadi.jupiter.time.DateConversions._
class DateConversionsSpec extends FlatSpec with Matchers {
"A date" should "be convertible to a legacy date" in {
val date = new Date(1495587575000L)
val expected = new Date(1495587575000L)
date.toDate should be (expected)
}
it should "be convertible to a local datetime" in {
val date = new Date(1495587575000L)
val expected = LocalDateTime.of(2017, 5, 24, 0, 59, 35)
date.toLocal should be (expected)
}
it should "be convertible to a zoned datetime" in {
val date = new Date(1495587575000L)
val expected = ZonedDateTime.of(2017, 5, 24, 0, 59, 35, 0, ZoneId.of("UTC"))
date.toZoned should be (expected)
}
"A local datetime" should "be convertible to a legacy date" in {
val date = LocalDateTime.of(2017, 5, 24, 0, 59, 35)
val expected = new Date(1495587575000L)
date.toDate should be (expected)
}
it should "be convertible to a local datetime" in {
val date = LocalDateTime.of(2017, 5, 24, 0, 59, 35)
val expected = LocalDateTime.of(2017, 5, 24, 0, 59, 35)
date.toLocal should be (expected)
}
it should "be convertible to a zoned datetime" in {
val date = LocalDateTime.of(2017, 5, 24, 0, 59, 35)
val expected = ZonedDateTime.of(2017, 5, 24, 0, 59, 35, 0, ZoneId.systemDefault)
date.toZoned should be (expected)
}
"A zoned datetime" should "be convertible to a legacy date" in {
val date = ZonedDateTime.of(2017, 5, 24, 0, 59, 35, 0, ZoneId.of("UTC"))
val expected = new Date(1495587575000L)
date.toDate should be (expected)
}
it should "be convertible to a local datetime" in {
val date = ZonedDateTime.of(2017, 5, 24, 0, 59, 35, 0, ZoneId.systemDefault)
val expected = LocalDateTime.of(2017, 5, 24, 0, 59, 35)
date.toLocal should be (expected)
}
it should "be convertible to a zoned datetime" in {
val date = ZonedDateTime.of(2017, 5, 24, 0, 59, 35, 0, ZoneId.of("UTC"))
val expected = ZonedDateTime.of(2017, 5, 24, 0, 59, 35, 0, ZoneId.of("UTC"))
date.toZoned should be (expected)
}
}
|
mdippery/jupiter
|
src/test/scala/com/mipadi/jupiter/time/DateConversionsSpec.scala
|
Scala
|
apache-2.0
| 2,857
|
package control
import scala.util.control.NonFatal
import scalaz._, Scalaz._, \\&/._
/**
* A data type for holding statuses. This is effectively just an
* Either with a specialized left. This particular specialization
* handles string/exception based failures and should be used
* to wrap up unsafe apis (i.e. java code).
*
* This specialization exists for a number of reasons:
* - scala.
* - having a single type param helps inference in a non-trivial way
* (this is essential to it later being used in a monad transformer).
* - useful methods for manipulating error messages.
* - better pattern matching support.
* - and again, scala.
*/
sealed trait Status[+A] {
@inline final def fold[X](
ok: A => X,
error: These[String, Throwable] => X
): X = this match {
case Ok(a) => ok(a)
case Error(e) => error(e)
}
@inline final def foldAll[X](
ok: A => X,
fail: String => X,
exception: Throwable => X,
both: (String, Throwable) => X
): X = fold(ok, _ match {
case This(m) => fail(m)
case That(e) => exception(e)
case Both(m, e) => both(m, e)
})
def map[B](f: A => B): Status[B] =
flatMap(f andThen Status.ok[B])
def flatMap[B](f: A => Status[B]): Status[B] =
fold(f, Status.these[B])
def mapError(f: These[String, Throwable] => These[String, Throwable]): Status[A] =
fold(Status.ok, f andThen Status.these)
def mapErrorMessage(f: Option[String] => String): Status[A] =
foldAll(
Status.ok,
m => Status.fail(f(Some(m))),
t => Status.error(f(None), t),
(m, t) => Status.error(f(Some(m)), t)
)
def prependErrorMessage(annotation: String): Status[A] =
mapErrorMessage({
case None => annotation
case Some(current) => s"${annotation} - ${current}"
})
def isOk: Boolean =
fold(_ => true, _ => false)
def isError: Boolean =
!isOk
def toDisjunction: These[String, Throwable] \\/ A =
fold(_.right, _.left)
def toOption: Option[A] =
fold(_.some, _ => none[A])
def toEither: Either[These[String, Throwable], A] =
toDisjunction.toEither
def toOptionError: Option[These[String, Throwable]] =
fold(_ => none, _.some)
def toOptionErrorMessage: Option[String] =
fold(_ => none, e => Status.asString(e).some)
def getOrElse[AA >: A](otherwise: => AA): AA =
toOption.getOrElse(otherwise)
def |||[AA >: A](otherwise: => Status[AA]): Status[AA] =
if (isOk) this else otherwise
}
case class Ok[A](value: A) extends Status[A]
case class Error[A](error: These[String, Throwable]) extends Status[A]
object Status {
def safe[A](thunk: => A): Status[A] =
try ok(thunk) catch { case NonFatal(t) => exception(t) }
def option[A](thunk: => A): Status[Option[A]] =
try ok(Option(thunk)) catch { case NonFatal(t) => exception(t) }
def ok[A](a: A): Status[A] =
Ok(a)
def exception[A](t: Throwable): Status[A] =
these(That(t))
def fail[A](message: String): Status[A] =
these(This(message))
def error[A](message: String, t: Throwable): Status[A] =
these(Both(message, t))
def these[A](error: These[String, Throwable]): Status[A] =
Error(error)
def fromDisjunction[A](v: These[String, Throwable] \\/ A): Status[A] =
v.fold(these, ok)
def asString(these: These[String, Throwable]) = these match {
case (This(m)) => m
case (That(t)) => Throwables.renderWithStack(t)
case (Both(m, t)) => s"${m}, caused by:\\n${Throwables.renderWithStack(t)}}"
}
def prependThis(these: These[String, Throwable], prepend: String): These[String, Throwable] =
these.fold(m => This(prepend + " - " + m),
t => Both(prepend, t),
(m, t) => Both(prepend + " - " + m, t))
implicit def StatusMonad: Monad[Status] = new Monad[Status] {
def point[A](v: => A) = ok(v)
def bind[A, B](m: Status[A])(f: A => Status[B]) = m.flatMap(f)
}
implicit def StatusEqual[A: Equal]: Equal[Status[A]] = {
implicit def ThrowableEqual = Equal.equalA[Throwable]
implicitly[Equal[These[String, Throwable] \\/ A]].contramap(_.toDisjunction)
}
}
object Throwables {
def render(t: Throwable): String =
s"Error[${t.getClass.getName}]" + (Option(t.getMessage) match {
case None => ""
case Some(message) => s" ${message}"
})
def renderWithStack(t: Throwable): String =
s"""============================================================
|${render(t)}
|------------------------------------------------------------
|${traceWithIndent(t, " ")}
|============================================================
|""".stripMargin
def trace(t: Throwable): String = {
val out = new java.io.StringWriter
t.printStackTrace(new java.io.PrintWriter(out))
out.toString
}
def traceWithIndent(t: Throwable, indent: String): String =
trace(t).lines.map(line => indent + line).mkString("\\n")
}
|
etorreborre/lambdajam-2014
|
src/main/scala/control/Status.scala
|
Scala
|
mit
| 5,149
|
package controllers
import play.api.mvc._
import play.api.i18n.Lang
trait BaseController extends ControllerActions
case class ViewContext(request: Request[AnyContent], lang: Lang, maybeUser: Option[models.User]) extends WrappedRequest(request)
trait ControllerActions extends Controller with misc.Logging {
type ResultOrCredentials = Option[Either[Result, (String,String)]]
def ViewContextAction(f: ViewContext => Result): Action[AnyContent] = {
Action { implicit request =>
f(ViewContext(request, lang, getUser(request)))
}
}
def AuthenticatedAction(f: ViewContext => Result): Action[AnyContent] = {
ViewContextAction { implicit context =>
maybeCredentials(context) map { resultOrCredentials =>
resultOrCredentials match {
case Left(errorResult) => errorResult
case Right(credentials) =>
val (username, password) = credentials
if (authenticate(username, password)) {
f(context)
} else {
Unauthorized
}
}
} getOrElse {
Unauthorized("No user name and password provided")
}
}
}
def getUser(request: Request[AnyContent]): Option[models.User] = {
None
}
def authenticate[A](username: String, password: String)(implicit request: Request[AnyContent]): Boolean = {
logger.info(s"Authenticating (${username},${password})")
true
}
protected def maybeCredentials(request: Request[AnyContent]): ResultOrCredentials = {
readQueryString(request) orElse readBasicAuthentication(request.headers)
}
protected def readBasicAuthentication(headers: Headers): ResultOrCredentials = {
headers.get(play.mvc.Http.HeaderNames.AUTHORIZATION) map { header =>
val BasicHeader = "Basic (.*)".r
header match {
case BasicHeader(base64) => {
try {
val decodedBytes = org.apache.commons.codec.binary.Base64.decodeBase64(base64.getBytes)
val credentials = new String(decodedBytes).split(":",2)
credentials match {
case Array(username, password) =>
Right(username, password)
case _ =>
Left(BadRequest("Invalid authorization header"))
}
} catch {
case th: Throwable => Left(BadRequest("Invalid authorization header"))
}
}
case _ => Left(BadRequest("Bad authorization header"))
}
}
}
protected def readQueryString(request: Request[AnyContent]): ResultOrCredentials = {
request.queryString.get("user").map{ username =>
request.queryString.get("password").map { password =>
Right( (username.head, password.head) )
}.getOrElse{
Left(BadRequest("Password not specified"))
}
}
}
}
|
alanktwong/play-eventsourced
|
app/controllers/BaseController.scala
|
Scala
|
mit
| 2,601
|
package core
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll, FunSpec}
import org.scalatest.Matchers
class ServicePathParametersSpec extends FunSpec with Matchers {
describe("with a service") {
val baseJson = """
{
"name": "API Builder",
"apidoc": { "version": "0.9.6" },
"enums": {
"age_group": {
"values": [
{ "name": "Youth" },
{ "name": "Adult" }
]
}
},
"models": {
"tag": {
"fields": [
{ "name": "id", "type": "long" }
]
},
"user": {
"fields": [
{ "name": "id", "type": "long" },
{ "name": "name", "type": "string" },
{ "name": "created_at_date", "type": "date-iso8601" },
{ "name": "created_at_date_time", "type": "date-time-iso8601" },
{ "name": "tag", "type": "tag" },
{ "name": "tags", "type": "map" },
{ "name": "age_group", "type": "age_group" }
]
}
},
"resources": {
"user": {
"operations": [
{
"method": "%s",
"path": "%s"
}
]
}
}
}
"""
it("numbers can be path parameters") {
val json = baseJson.format("GET", "/:id")
TestHelper.serviceValidatorFromApiJson(json).errors should be(Nil)
}
it("strings can be path parameters") {
val json = baseJson.format("GET", "/:name")
TestHelper.serviceValidatorFromApiJson(json).errors should be(Nil)
}
it("supports file extensions") {
val json = baseJson.format("GET", "/:id.html")
TestHelper.serviceValidatorFromApiJson(json).errors should be(Nil)
}
it("parameters not defined on the model are accepted (assumed strings)") {
val json = baseJson.format("GET", "/:some_string")
TestHelper.serviceValidatorFromApiJson(json).errors should be(Nil)
}
it("enums can be path parameters - assumed type is string") {
val json = baseJson.format("GET", "/:age_group")
TestHelper.serviceValidatorFromApiJson(json).errors should be(Nil)
}
it("dates can be path parameters") {
val json = baseJson.format("GET", "/:created_at_date")
TestHelper.serviceValidatorFromApiJson(json).errors should be(Nil)
}
it("date-time can be path parameters") {
val json = baseJson.format("GET", "/:created_at_date_time")
TestHelper.serviceValidatorFromApiJson(json).errors should be(Nil)
}
it("other models cannot be path parameters") {
val json = baseJson.format("GET", "/:tag")
TestHelper.serviceValidatorFromApiJson(json).errors.mkString("") should be(
"Resource[user] GET /users/:tag path parameter[tag] has an invalid type[tag]. Valid types for path parameters are: enum, boolean, decimal, integer, double, long, string, date-iso8601, date-time-iso8601, uuid."
)
}
it("unsupported types declared as parameters are validated") {
val json = baseJson.format("POST", "/:tags")
TestHelper.serviceValidatorFromApiJson(json).errors.mkString("") should be(
"Resource[user] POST /users/:tags path parameter[tags] has an invalid type[map[string]]. Valid types for path parameters are: enum, boolean, decimal, integer, double, long, string, date-iso8601, date-time-iso8601, uuid."
)
}
}
describe("w/ a union resource") {
val baseJson = s"""
{
"name": "API Builder",
"apidoc": { "version": "0.9.6" },
"unions": {
"user": {
"types": [
{ "type": "registered" },
{ "type": "guest" }
]
}
},
"models": {
"registered": {
"fields": [
{ "name": "guid", "type": "uuid" },
{ "name": "age", "type": "integer" }
]
},
"guest": {
"fields": [
{ "name": "guid", "type": "uuid" },
{ "name": "age", "type": "long" }
]
}
},
"resources": {
"user": {
"operations": [
{ "method": "GET", "path": "/users/:%s" }
]
}
}
}
"""
it("can identify common type for path parameter if all union types have the same type") {
val json = baseJson.format("guid")
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors should be(Nil)
val userResource = validator.service().resources.head
val op = userResource.operations.head
val param = op.parameters.head
param.name should be("guid")
param.`type` should be("uuid")
}
it("uses default 'string' if path parameter type varies across union type") {
val json = baseJson.format("age")
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors should be(Nil)
val userResource = validator.service().resources.head
val op = userResource.operations.head
val param = op.parameters.head
param.name should be("age")
param.`type` should be("string")
}
}
it("passes correctly specified path parameters") {
val baseJson = """
{
"name": "API Builder",
"models": {
"user": { "fields": [ { "name": "id", "type": "long" } ] }
},
"resources": {
"user": {
"path": "/foo/:id/bar",
"operations": [
{
"method": "GET",
"parameters": [ { "name": "id", "type": "long", "location": "path" } ]
}
]
}
}
}
"""
val json = baseJson.format("age")
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors shouldBe List.empty
}
it("fails missing path parameters") {
val baseJson = """
{
"name": "API Builder",
"models": {
"user": { "fields": [ { "name": "id", "type": "long" } ] }
},
"resources": {
"user": {
"operations": [
{
"method": "GET",
"parameters": [ { "name": "id", "type": "long", "location": "path" } ]
}
]
}
}
}
"""
val json = baseJson.format("age")
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors().mkString("") shouldBe "Resource[user] GET /users path parameter[id] is missing from the path[/users]"
}
it("fails incorrectly named path parameters in the middle of the path") {
val baseJson = """
{
"name": "API Builder",
"models": {
"user": { "fields": [ { "name": "id", "type": "long" } ] }
},
"resources": {
"user": {
"path": "/foo/:ids/bar",
"operations": [
{
"method": "GET",
"parameters": [ { "name": "id", "type": "long", "location": "path" } ]
}
]
}
}
}
"""
val json = baseJson.format("age")
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors().mkString("") shouldBe "Resource[user] GET /foo/:ids/bar path parameter[id] is missing from the path[/foo/:ids/bar]"
}
it("fails incorrectly named path parameters at the end of the path") {
val baseJson = """
{
"name": "API Builder",
"models": {
"user": { "fields": [ { "name": "id", "type": "long" } ] }
},
"resources": {
"user": {
"path": "/foo/:ids",
"operations": [
{
"method": "GET",
"parameters": [ { "name": "id", "type": "long", "location": "path" } ]
}
]
}
}
}
"""
val json = baseJson.format("age")
val validator = TestHelper.serviceValidatorFromApiJson(json)
validator.errors().mkString("") shouldBe "Resource[user] GET /foo/:ids path parameter[id] is missing from the path[/foo/:ids]"
}
}
|
gheine/apidoc
|
core/src/test/scala/core/ServicePathParametersSpec.scala
|
Scala
|
mit
| 7,983
|
package com.jejking.rprng.api
import akka.actor._
import akka.stream.SystemMaterializer
import akka.stream.scaladsl.{Keep, Sink, Source}
import akka.stream.testkit.scaladsl.TestSink
import com.jejking.rprng.rng.CommonsMathRandomGeneratorFactory
import org.apache.commons.math3.random.MersenneTwister
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import org.scalatest.time.SpanSugar._
import org.scalatest.{BeforeAndAfterAll, Inspectors}
import scala.concurrent.Future
/**
* Tests [[ToSizedSet]] custom streams processing stage.
*/
class ToSizedSetSpec extends AnyFlatSpec with Matchers with Inspectors with ScalaFutures with BeforeAndAfterAll {
implicit override val patienceConfig = PatienceConfig(timeout = 1 second, interval = 100 milliseconds)
implicit val system = ActorSystem("test")
implicit val materializer = SystemMaterializer.get(system)
"the stage" should "produce a single set of right size from a sequence of integers when requested" in {
val toSizedSet = ToSizedSet(5)
val set: Future[Set[Int]] = Source(1 to 10)
.via(toSizedSet)
.take(1)
.toMat(Sink.head)(Keep.right)
.run()
whenReady(set) {
s => s shouldBe Set(1, 2, 3, 4, 5)
}
}
it should "produce multiple sets of right size from a sequence of integers when requested" in {
val toSizedSet = ToSizedSet(5)
Source(1 to 20)
.via(toSizedSet)
.runWith(TestSink.probe[Set[Int]])
.request(2)
.expectNext(Set(1,2,3,4,5), Set(6,7,8,9,10))
}
it should "produce multiple sets of right size from a random source of integers" in {
val randomIterator = new Iterator[Int] {
val rng = CommonsMathRandomGeneratorFactory.createNewGeneratorInstance[MersenneTwister]()
override def hasNext: Boolean = true
override def next(): Int = rng.nextInt(100)
}
val toSizedSet = ToSizedSet(5)
val futureSeq: Future[Seq[Set[Int]]] = Source.fromIterator(() => randomIterator)
.via(toSizedSet)
.take(100)
.grouped(100)
.toMat(Sink.head)(Keep.right)
.run()
whenReady(futureSeq) {
seq => {
seq should have size 100
forAll(seq) {
set => set should have size 5
forAll(set) {
i => i should (be >= 0 and be < 100)
}
}
}
}
}
override def afterAll(): Unit = {
this.system.terminate()
}
}
|
jejking/rprng
|
src/test/scala/com/jejking/rprng/api/ToSizedSetSpec.scala
|
Scala
|
apache-2.0
| 2,765
|
package com.twitter.finatra.streams.queryable.thrift.partitioning
import com.twitter.finatra.streams.queryable.thrift.client.partitioning.utils.KafkaUtils.{
murmur2,
toPositive
}
import com.twitter.finatra.streams.queryable.thrift.domain.{KafkaPartitionId, ServiceShardId}
object KafkaPartitioner {
def partitionId(numPartitions: Int, keyBytes: Array[Byte]): KafkaPartitionId = {
val partitionId = toPositive(murmur2(keyBytes)) % numPartitions
KafkaPartitionId(partitionId)
}
}
case class KafkaPartitioner(serviceShardPartitioner: ServiceShardPartitioner, numPartitions: Int) {
def shardIds(keyBytes: Array[Byte]): IndexedSeq[ServiceShardId] = {
val kafkaPartitionId = KafkaPartitioner.partitionId(numPartitions, keyBytes)
IndexedSeq(
serviceShardPartitioner.activeShardId(kafkaPartitionId),
serviceShardPartitioner.standbyShardIds(kafkaPartitionId).head
)
}
}
|
twitter/finatra
|
kafka-streams/kafka-streams-queryable-thrift-client/src/main/scala/com/twitter/finatra/streams/queryable/thrift/partitioning/KafkaPartitioner.scala
|
Scala
|
apache-2.0
| 909
|
import ds.{Orderable, BST}
import org.junit.runner.RunWith
import org.scalacheck.Arbitrary._
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import org.scalatest.prop.Checkers
import org.scalacheck.Arbitrary
@RunWith(classOf[JUnitRunner])
class SetTest extends FunSuite with Checkers {
implicit def arbBST[T : Orderable](implicit a: Arbitrary[T]) = Arbitrary {
for {
v <- Arbitrary.arbitrary[List[T]]
} yield BST(v:_*)
}
def isSorted(l: List[Int]) = l.isEmpty || l.zip(l.tail).forall(x => x._1 <= x._2)
test("Set Insert") {
check((s: BST[Int], x: Int) => s.insert(x).member(x))
}
test("Set delete") {
check((s: BST[Int], x: Int) => !s.delete(x).member(x))
}
test("Set Inorder") {
check((s: BST[Int]) => isSorted(s.toList))
}
}
|
amitdev/functional-ds
|
src/test/scala/SetTest.scala
|
Scala
|
mit
| 827
|
//
// Vtime.scala -- Scala object Vtime
// Project OrcScala
//
// $Id: Vtime.scala 2933 2011-12-15 16:26:02Z jthywissen $
//
// Created by dkitchin on Aug 8, 2011.
//
// Copyright (c) 2011 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.lib.time
import orc.Handle
import orc.error.runtime.RuntimeSupportException
import orc.run.core.VirtualClockOperation
import orc.values.sites.Site0
/** @author dkitchin
*/
object Vtime extends Site0 with VirtualClockOperation {
// Do not invoke directly.
def call(h: Handle) { h !! (new RuntimeSupportException("Vtime")) }
override val quiescentWhileInvoked: Boolean = false
}
|
laurenyew/cOrcS
|
src/orc/lib/time/Vtime.scala
|
Scala
|
bsd-3-clause
| 858
|
package org.denigma.kappa
import fastparse.core.Parsed
import org.denigma.kappa.parsers.{CommentLinksParser, KappaParser}
import org.scalatest.{Inside, Matchers, WordSpec}
class CommentsParserSuite extends WordSpec with Matchers with Inside {
"parse comments" in {
val parser = new CommentLinksParser
inside(parser.linkAfterComment.parse("'a.b' A(x),B(x) <-> A(x!1),B(x!1) @ 'on_rate','off_rate' #A binds B")) {
case failure: Parsed.Failure[_,_] =>
}
val comment = "#^ hello world"
inside(parser.linkAfterComment.parse(comment)) {
case failure: Parsed.Failure[_,_] =>
}
val linkAfterComment = "#^ http://hello.world"
inside(parser.linkAfterComment.parse(linkAfterComment)) {
case Parsed.Success(value: String, index: Int) if value=="http://hello.world" =>
//println("parsed comment = "+value)
}
}
/*
"parse PDF comments" in {
val paper = "#^ :in_paper /resources/pdf/eptcs.pdf"
val page = "#^ :on_page 1"
}
*/
"parse numbers" in {
val parser = new KappaParser
inside(parser.number.parse("10")) { case Parsed.Success(10, index: Int)=> }
inside(parser.number.parse("-10")) { case Parsed.Success(-10, index: Int)=> }
inside(parser.number.parse("10.1234")) { case Parsed.Success(10.1234, index: Int)=> }
inside(parser.number.parse("10E2")) { case Parsed.Success(10E2, index: Int)=> }
inside(parser.number.parse("10.9E3")) { case Parsed.Success(10.9E3, index: Int)=> }
}
}
|
antonkulaga/kappa-notebook
|
app/js/src/test/scala/org/denigma/kappa/CommentsParserSuite.scala
|
Scala
|
mpl-2.0
| 1,550
|
package com.arcusys.valamis.util.mustache
case class RootToken(children: List[Token]) extends Token with CompositeToken {
private val childrenSource = children.map(_.templateSource).mkString
def render(context: Any, partials: Map[String, Mustache], callstack: List[Any]): TokenProduct =
composite(children, context, partials, callstack)
def templateSource: String = childrenSource
}
|
ViLPy/Valamis
|
valamis-util/src/main/scala/com/arcusys/valamis/util/mustache/RootToken.scala
|
Scala
|
lgpl-3.0
| 401
|
package model.battle
/**
* Created by salim on 12/09/2016.
*/
trait Battleable {
var maxHitPoints: Int
var damagePoints: Int = 0
def doDamage(points: Int) = {
damagePoints += points
}
def canBattle = !(battleStatus == Status.Unconcious)
def battleStatus: Status.Value = {
hitPoints match {
case 0 => Status.Unconcious
case _ => Status.OK
}
}
def hitPoints = {
math.max(maxHitPoints - damagePoints, 0)
}
}
|
salimfadhley/scalamoo
|
src/main/scala/model/battle/Battleable.scala
|
Scala
|
mit
| 466
|
/******************************************************************************
* Copyright (c) 2014, Equal Experts Ltd
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation
* are those of the authors and should not be interpreted as representing
* official policies, either expressed or implied, of the Midas Project.
******************************************************************************/
package com.ee.midas
import org.specs2.mutable.Specification
import org.junit.runner.RunWith
import org.specs2.runner.JUnitRunner
import org.specs2.mutable.BeforeAfter
import com.ee.midas.transform.TransformType
import java.net.{URL, URI}
import java.io.File
@RunWith(classOf[JUnitRunner])
class CLIParserSpecs extends Specification {
trait SetupTeardown extends BeforeAfter {
val loader = this.getClass.getClassLoader
val expansionFolder = TransformType.EXPANSION.toString.toLowerCase
val contractionFolder = TransformType.CONTRACTION.toString.toLowerCase
val defaultBaseDeltasDirURI : URI = loader.getResource("deltas").toURI
val defaultMidasConfigURL : URL = new URL(defaultBaseDeltasDirURI.toString + "/midas.config")
val defaultExpansionDeltasDirURL: URL = new File(defaultBaseDeltasDirURI.getPath + "/" + expansionFolder).toURI.toURL
val defaultContractionDeltasDirURL: URL = new File(defaultBaseDeltasDirURI.getPath + "/" + contractionFolder).toURI.toURL
val newBaseDeltasDir = "test-data/cliParserSpecs/deltas"
val newExpansionDeltaURI = newBaseDeltasDir + "/" + expansionFolder
val newContractionDeltaURI = newBaseDeltasDir + "/" + contractionFolder
val newBaseDeltasdirFile = new File(newBaseDeltasDir)
val newExpansionDeltasdirFile = new File(newExpansionDeltaURI)
val newContractionDeltasdirFile = new File(newContractionDeltaURI)
val userSuppliedURI = new File(newBaseDeltasDir).toURI
def before: Any = {
newBaseDeltasdirFile.mkdirs()
newExpansionDeltasdirFile.mkdirs()
newContractionDeltasdirFile.mkdirs()
}
def after: Any = {
newExpansionDeltasdirFile.delete
newContractionDeltasdirFile.delete
newBaseDeltasdirFile.delete
}
}
sequential
"Midas" should {
"run with default values" in new SetupTeardown {
CLIParser.parse(Array()) match {
case Some(config) =>
config.midasHost mustEqual "localhost"
config.midasPort mustEqual 27020
config.mongoHost mustEqual "localhost"
config.mongoPort mustEqual 27017
config.baseDeltasDir mustEqual defaultBaseDeltasDirURI
success
case None =>
failure("Should have run with default Values")
}
}
"run on a given HOST and connect to default source and mongoPort" in new SetupTeardown {
CLIParser.parse(Array("--host","www.midasservice.in")) match {
case Some(config) =>
config.midasHost mustEqual "www.midasservice.in"
config.midasPort mustEqual 27020
config.mongoHost mustEqual "localhost"
config.mongoPort mustEqual 27017
config.baseDeltasDir mustEqual defaultBaseDeltasDirURI
success
case None =>
failure("Should have run with given PORT while using defaults for source and mongoPort")
}
}
"run on a given PORT and connect to default source and mongoPort" in new SetupTeardown {
CLIParser.parse(Array("--port","27040")) match {
case Some(config) =>
config.midasHost mustEqual "localhost"
config.midasPort mustEqual 27040
config.mongoHost mustEqual "localhost"
config.mongoPort mustEqual 27017
config.baseDeltasDir mustEqual defaultBaseDeltasDirURI
success
case None =>
failure("Should have run with given PORT while using defaults for source and mongoPort")
}
}
"run on default port and connect to given MONGOHOST on default mongoPort" in new SetupTeardown {
CLIParser.parse(Array("--source","192.168.1.44")) match {
case Some(config) =>
config.midasHost mustEqual "localhost"
config.midasPort mustEqual 27020
config.mongoHost mustEqual "192.168.1.44"
config.mongoPort mustEqual 27017
config.baseDeltasDir mustEqual defaultBaseDeltasDirURI
success
case None =>
failure("Should have run with given MONGOHOST while using defaults for port and mongoPort")
}
}
" run on default port and connect to default mongoHost on given MONGOPORT" in new SetupTeardown {
CLIParser.parse(Array("--mongoPort","27019")) match {
case Some(config) =>
config.midasHost mustEqual "localhost"
config.midasPort mustEqual 27020
config.mongoHost mustEqual "localhost"
config.mongoPort mustEqual 27019
config.baseDeltasDir mustEqual defaultBaseDeltasDirURI
success
case None =>
failure("Should have run with given MONGOPORT while using defaults for port and mongoHost")
}
}
"run on given PORT and connect to given MONGOHOST on default mongoPort" in new SetupTeardown {
CLIParser.parse(Array("--port","27040","--source","192.168.1.44")) match {
case Some(config) =>
config.midasHost mustEqual "localhost"
config.midasPort mustEqual 27040
config.mongoHost mustEqual "192.168.1.44"
config.mongoPort mustEqual 27017
config.baseDeltasDir mustEqual defaultBaseDeltasDirURI
success
case None =>
failure("Should have run with given PORT and MONGOHOST while using defaults for mongoPort")
}
}
"run on given PORT and connect to default mongoHost on given MONGOPORT" in new SetupTeardown {
CLIParser.parse(Array("--port","27040","--mongoPort","27019")) match {
case Some(config) =>
config.midasHost mustEqual "localhost"
config.midasPort mustEqual 27040
config.mongoHost mustEqual "localhost"
config.mongoPort mustEqual 27019
config.baseDeltasDir mustEqual defaultBaseDeltasDirURI
success
case None =>
failure("Should have run with given PORT and MONGOPORT while using defaults for mongoHost")
}
}
" run on default port and connect to given MONGOHOST on MONGOPORT" in new SetupTeardown {
CLIParser.parse(Array("--source","192.168.1.44","--mongoPort","27019")) match {
case Some(config) =>
config.midasHost mustEqual "localhost"
config.midasPort mustEqual 27020
config.mongoHost mustEqual "192.168.1.44"
config.mongoPort mustEqual 27019
config.baseDeltasDir mustEqual defaultBaseDeltasDirURI
success
case None =>
failure("Should have run with given MONGOHOST and MONGOPORT while using defaults for port")
}
}
"use the specified directory for picking up delta files " in new SetupTeardown {
CLIParser.parse(Array("--port", "27040", "--source", "192.168.1.44",
"--mongoPort", "27019", "--deltasDir", newBaseDeltasDir)) match {
case Some(config) =>
config.midasHost mustEqual "localhost"
config.midasPort mustEqual 27040
config.mongoHost mustEqual "192.168.1.44"
config.mongoPort mustEqual 27019
config.baseDeltasDir mustEqual newBaseDeltasdirFile.toURI
success
case None =>
failure("Should have used specified deltas directory")
}
}
"fails when given a deltasDir that doesn't exist" in {
CLIParser.parse(Array("--deltasDir", "someDir/someFile")) match {
case None =>
success
case Some(config) =>
failure("Should have failed for a directory that doesn't exist")
}
}
"fails when an invalid option is given" in {
CLIParser.parse(Array("--invalidOption", "invalidValue")) match {
case None =>
success
case Some(config) =>
failure("Should have failed for an invalid option")
}
}
}
}
|
EqualExperts/Midas
|
src/test/scala/com/ee/midas/CLIParserSpecs.scala
|
Scala
|
bsd-2-clause
| 9,729
|
package com.github.mdr.mash.repl.browser.handler
import com.github.mdr.mash.input.InputAction
import com.github.mdr.mash.repl._
import com.github.mdr.mash.repl.browser.HelpBrowserState
import com.github.mdr.mash.repl.browser.ObjectBrowserActions._
trait HelpBrowserActionHandler {
self: ObjectBrowserActionHandler with Repl ⇒
protected def handleHelpBrowserAction(action: InputAction, browserState: HelpBrowserState): Unit =
commonBrowserActionHandler(browserState)
.orElse(helpBrowserActionHandler(browserState))
.lift(action)
private def helpBrowserActionHandler(browserState: HelpBrowserState): PartialFunction[InputAction, Unit] = {
case ViewAsTree ⇒ viewAsTree(browserState)
case NextItem ⇒ updateState(browserState.nextItem(terminalRows))
case PreviousItem ⇒ updateState(browserState.previousItem(terminalRows))
case NextPage ⇒ updateState(browserState.nextPage(terminalRows))
case PreviousPage ⇒ updateState(browserState.previousPage(terminalRows))
case FirstItem ⇒ updateState(browserState.firstItem(terminalRows))
case LastItem ⇒ updateState(browserState.lastItem(terminalRows))
}
}
|
mdr/mash
|
src/main/scala/com/github/mdr/mash/repl/browser/handler/HelpBrowserActionHandler.scala
|
Scala
|
mit
| 1,181
|
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import org.joda.time.LocalDate
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever
case class Car(regNumber: String,
isNew: Boolean = false,
price: Int,
emissions: Int,
dateOfPurchase: LocalDate) {
}
case class LEC01(cars: List[Car] = List.empty) extends CtBoxIdentifier(name = "Low emission car.")
with CtValue[List[Car]]
with Input
with ValidatableBox[ComputationsBoxRetriever] {
override def value = cars
override def validate(boxRetriever: ComputationsBoxRetriever): Set[CtValidation] = {
(boxRetriever.cpQ1000(), value) match {
case (CPQ1000(Some(false)) | CPQ1000(None), list) if list.nonEmpty => Set(CtValidation(Some("LEC01"), "error.LEC01.cannot.exist"))
case (CPQ1000(Some(true)), list) if list.isEmpty => Set(CtValidation(Some("LEC01"), "error.LEC01.required"))
case _ => Set.empty
}
}
}
|
liquidarmour/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/computations/LEC01.scala
|
Scala
|
apache-2.0
| 1,590
|
package model.domain
case class Category(value: String)
/**
* A list of (name -> value) string attributes for every product.
* @param map A map of attributes, that guarantees we don't have duplicate values for a given attribute.
*/
case class Attributes(map: Map[String, String]) {
/**
* Adds a new attribute to the map of attributes.
* @param other The new category to add to the set.
* @return A new instance of [[Attributes]] with the new attribute added.
* If the attribute name is not unique, the map will "upsert" e.g it will update
* the value of a given attribute key.
*/
def +(other: (String, String)): Attributes = copy(map + other)
}
object Attributes {
def empty: Attributes = Attributes(Map.empty)
}
/**
* In here we are storing a list of categories for a product.
* We could simply use a set, but we are wrapping it into a separate class
* just in case we may choose to add some functionality at a later stage.
*
* We use a Set because we want to make sure we have no duplicates in the data structure.
* @param values The set of values to use for categories.
*/
case class Categories(values: Set[Category]) {
def categories: Set[String] = values.map(_.value)
/**
* Adds a new category to the set of categories.
* @param other The new category to add to the set.
* @return A new instance of [[Categories]] with the new category appended. If the category is not unique
* it will not be appended.
*/
def +(other: Category): Categories = copy(values = values + other)
}
object Categories {
def empty: Categories = Categories(Set.empty[Category])
/**
* Helper to allow us to parse a structure from the database or storage easily.
* @param set A set of strings that get persisted to the DB where each string represents a category.
* @return A [[Categories]] instance corresponding to the input set.
*/
def set(set: Set[String]): Categories = Categories(set.map(Category.apply))
def apply(cat: Category): Categories = Categories(Set(cat))
}
case class CategoryDistribution(map: Map[String, Long])
/**
* A simple product definition, containing the basic set of information about a product.
* @param title The title of the product.
* @param attributes The attributes map of the underlying product.
*/
case class StoreProduct(
title: String,
attributes: Attributes = Attributes.empty,
categories: Categories = Categories.empty
)
|
alexflav23/exercises
|
rps/app/model/domain/Product.scala
|
Scala
|
apache-2.0
| 2,483
|
package chapter8
import scala.io.Source
object methodDemo {
def main(args: Array[String]): Unit = {
processFile("/home/nikhil/nik", 10)
}
def processFile(filename: String, width: Int): Unit = {
val source = Source.fromFile(filename)
for (line <- source.getLines())
processLine(filename, width, line)
}
private def processLine(filename: String, width: Int, line: String): Unit = {
if (line.length > width)
println(filename + ":" + line.trim)
}
}
|
NikhilJose/ScalaPractice
|
src/main/scala/chapter8/methodDemo.scala
|
Scala
|
apache-2.0
| 493
|
package com.datastax.spark.connector.rdd
import java.io.IOException
import com.datastax.driver.core._
import com.datastax.spark.connector._
import com.datastax.spark.connector.cql._
import com.datastax.spark.connector.rdd.CassandraLimit._
import com.datastax.spark.connector.rdd.partitioner.dht.{Token => ConnectorToken}
import com.datastax.spark.connector.rdd.partitioner.{CassandraPartition, CassandraPartitionGenerator, CqlTokenRange, NodeAddresses, _}
import com.datastax.spark.connector.rdd.reader._
import com.datastax.spark.connector.types.ColumnType
import com.datastax.spark.connector.util.CqlWhereParser.{EqPredicate, InListPredicate, InPredicate, Predicate, RangePredicate}
import com.datastax.spark.connector.util.Quote._
import com.datastax.spark.connector.util.{CountingIterator, CqlWhereParser, ReflectionUtil}
import com.datastax.spark.connector.writer.RowWriterFactory
import org.apache.spark.metrics.InputMetricsUpdater
import org.apache.spark.rdd.{PartitionCoalescer, RDD}
import org.apache.spark.{Partition, Partitioner, SparkContext, TaskContext}
import scala.collection.JavaConversions._
import scala.language.existentials
import scala.reflect.ClassTag
/** RDD representing a Table Scan of A Cassandra table.
*
* This class is the main entry point for analyzing data in Cassandra database with Spark.
* Obtain objects of this class by calling
* [[com.datastax.spark.connector.SparkContextFunctions.cassandraTable]].
*
* Configuration properties should be passed in the [[org.apache.spark.SparkConf SparkConf]]
* configuration of [[org.apache.spark.SparkContext SparkContext]].
* `CassandraRDD` needs to open connection to Cassandra, therefore it requires appropriate
* connection property values to be present in [[org.apache.spark.SparkConf SparkConf]].
* For the list of required and available properties, see
* [[com.datastax.spark.connector.cql.CassandraConnector CassandraConnector]].
*
* `CassandraRDD` divides the data set into smaller partitions, processed locally on every
* cluster node. A data partition consists of one or more contiguous token ranges.
* To reduce the number of roundtrips to Cassandra, every partition is fetched in batches.
*
* The following properties control the number of partitions and the fetch size:
* - spark.cassandra.input.split.size_in_mb: approx amount of data to be fetched into a single Spark
* partition, default 64 MB
* - spark.cassandra.input.fetch.size_in_rows: number of CQL rows fetched per roundtrip,
* default 1000
*
* A `CassandraRDD` object gets serialized and sent to every Spark Executor, which then
* calls the `compute` method to fetch the data on every node. The `getPreferredLocations`
* method tells Spark the preferred nodes to fetch a partition from, so that the data for
* the partition are at the same node the task was sent to. If Cassandra nodes are collocated
* with Spark nodes, the queries are always sent to the Cassandra process running on the same
* node as the Spark Executor process, hence data are not transferred between nodes.
* If a Cassandra node fails or gets overloaded during read, the queries are retried
* to a different node.
*
* By default, reads are performed at ConsistencyLevel.LOCAL_ONE in order to leverage data-locality
* and minimize network traffic. This read consistency level is controlled by the
* spark.cassandra.input.consistency.level property.
*/
class CassandraTableScanRDD[R] private[connector](
@transient val sc: SparkContext,
val connector: CassandraConnector,
val keyspaceName: String,
val tableName: String,
val columnNames: ColumnSelector = AllColumns,
val where: CqlWhereClause = CqlWhereClause.empty,
val limit: Option[CassandraLimit] = None,
val clusteringOrder: Option[ClusteringOrder] = None,
val readConf: ReadConf = ReadConf(),
overridePartitioner: Option[Partitioner] = None)(
implicit
val classTag: ClassTag[R],
@transient val rowReaderFactory: RowReaderFactory[R])
extends CassandraRDD[R](sc, Seq.empty)
with CassandraTableRowReaderProvider[R]
with SplitSizeEstimator[R] {
override type Self = CassandraTableScanRDD[R]
override protected def copy(
columnNames: ColumnSelector = columnNames,
where: CqlWhereClause = where,
limit: Option[CassandraLimit] = limit,
clusteringOrder: Option[ClusteringOrder] = None,
readConf: ReadConf = readConf,
connector: CassandraConnector = connector): Self = {
require(sc != null,
"RDD transformation requires a non-null SparkContext. " +
"Unfortunately SparkContext in this CassandraRDD is null. " +
"This can happen after CassandraRDD has been deserialized. " +
"SparkContext is not Serializable, therefore it deserializes to null." +
"RDD transformations are not allowed inside lambdas used in other RDD transformations.")
new CassandraTableScanRDD[R](
sc = sc,
connector = connector,
keyspaceName = keyspaceName,
tableName = tableName,
columnNames = columnNames,
where = where,
limit = limit,
clusteringOrder = clusteringOrder,
readConf = readConf,
overridePartitioner = overridePartitioner)
}
override protected def convertTo[B : ClassTag : RowReaderFactory]: CassandraTableScanRDD[B] = {
new CassandraTableScanRDD[B](
sc = sc,
connector = connector,
keyspaceName = keyspaceName,
tableName = tableName,
columnNames = columnNames,
where = where,
limit = limit,
clusteringOrder = clusteringOrder,
readConf = readConf,
overridePartitioner = overridePartitioner)
}
/**
* Internal method for assigning a partitioner to this RDD, this lacks type safety checks for
* the Partitioner of type [K]. End users will use the implicit provided in
* [[CassandraTableScanPairRDDFunctions]]
*/
private[connector] def withPartitioner[K, V, T <: ConnectorToken[V]](
partitioner: Option[Partitioner]): CassandraTableScanRDD[R] = {
val cassPart = partitioner match {
case Some(newPartitioner: CassandraPartitioner[K, V, T]) => {
this.partitioner match {
case Some(currentPartitioner: CassandraPartitioner[K, V, T]) =>
/** Preserve the mapping set by the current partitioner **/
logDebug(
s"""Preserving Partitioner: $currentPartitioner with mapping
|${currentPartitioner.keyMapping}""".stripMargin)
Some(
newPartitioner
.withTableDef(tableDef)
.withKeyMapping(currentPartitioner.keyMapping))
case _ =>
logDebug(s"Assigning new Partitioner $newPartitioner")
Some(newPartitioner.withTableDef(tableDef))
}
}
case Some(other: Partitioner) => throw new IllegalArgumentException(
s"""Unable to assign
|non-CassandraPartitioner $other to CassandraTableScanRDD """.stripMargin)
case None => None
}
new CassandraTableScanRDD[R](
sc = sc,
connector = connector,
keyspaceName = keyspaceName,
tableName = tableName,
columnNames = columnNames,
where = where,
limit = limit,
clusteringOrder = clusteringOrder,
readConf = readConf,
overridePartitioner = cassPart)
}
/** Selects a subset of columns mapped to the key and returns an RDD of pairs.
* Similar to the builtin Spark keyBy method, but this one uses implicit
* RowReaderFactory to construct the key objects.
* The selected columns must be available in the CassandraRDD.
*
* If the selected columns contain the complete partition key a
* `CassandraPartitioner` will also be created.
*
* @param columns column selector passed to the rrf to create the row reader,
* useful when the key is mapped to a tuple or a single value
*/
def keyBy[K](columns: ColumnSelector)(implicit
classtag: ClassTag[K],
rrf: RowReaderFactory[K],
rwf: RowWriterFactory[K]): CassandraTableScanRDD[(K, R)] = {
val kRRF = implicitly[RowReaderFactory[K]]
val vRRF = rowReaderFactory
implicit val kvRRF = new KeyValueRowReaderFactory[K, R](columns, kRRF, vRRF)
val selectedColumnNames = columns.selectFrom(tableDef).map(_.columnName).toSet
val partitionKeyColumnNames = PartitionKeyColumns.selectFrom(tableDef).map(_.columnName).toSet
if (selectedColumnNames.containsAll(partitionKeyColumnNames)) {
val partitioner = partitionGenerator.partitioner[K](columns)
logDebug(
s"""Made partitioner ${partitioner} for $this""".stripMargin)
convertTo[(K, R)].withPartitioner(partitioner)
} else {
convertTo[(K, R)]
}
}
/** Extracts a key of the given class from the given columns.
*
* @see `keyBy(ColumnSelector)` */
def keyBy[K](columns: ColumnRef*)(implicit
classtag: ClassTag[K],
rrf: RowReaderFactory[K],
rwf: RowWriterFactory[K]): CassandraTableScanRDD[(K, R)] =
keyBy(SomeColumns(columns: _*))
/** Extracts a key of the given class from all the available columns.
*
* @see `keyBy(ColumnSelector)` */
def keyBy[K]()(implicit
classtag: ClassTag[K],
rrf: RowReaderFactory[K],
rwf: RowWriterFactory[K]): CassandraTableScanRDD[(K, R)] =
keyBy(AllColumns)
@transient lazy val partitionGenerator = {
if (containsPartitionKey(where)) {
CassandraPartitionGenerator(connector, tableDef, 1)
} else {
val reevaluatedSplitCount = splitCount.getOrElse(estimateSplitCount(splitSize))
CassandraPartitionGenerator(connector, tableDef, reevaluatedSplitCount)
}
}
/**
* This method overrides the default spark behavior and will not create a CoalesceRDD. Instead it will reduce
* the number of partitions by adjusting the partitioning of C* data on read. Using this method will override
* spark.cassandra.input.split.size.
* The method is useful with where() method call, when actual size of data is smaller then the table size.
* It has no effect if a partition key is used in where clause.
*
* @param numPartitions number of partitions
* @param shuffle whether to call shuffle after
* @param partitionCoalescer is ignored if no shuffle, or just passed to shuffled CoalesceRDD
* @param ord
* @return new CassandraTableScanRDD with predefined number of partitions
*/
override def coalesce(numPartitions: Int, shuffle: Boolean = false, partitionCoalescer: Option[PartitionCoalescer])(implicit ord: Ordering[R] = null): RDD[R]
= {
val rdd = copy(readConf = readConf.copy(splitCount = Some(numPartitions)))
if (shuffle) {
rdd.superCoalesce(numPartitions, shuffle, partitionCoalescer)
} else {
rdd
}
}
private def superCoalesce(numPartitions: Int, shuffle: Boolean = false, partitionCoalescer: Option[PartitionCoalescer])(implicit ord: Ordering[R] = null) =
super.coalesce(numPartitions, shuffle, partitionCoalescer);
@transient override val partitioner = overridePartitioner
override def getPartitions: Array[Partition] = {
verify() // let's fail fast
val partitions: Array[Partition] = partitioner match {
case Some(cassPartitioner: CassandraPartitioner[_, _, _]) => {
cassPartitioner.verify()
cassPartitioner.partitions.toArray[Partition]
}
case Some(other: Partitioner) =>
throw new IllegalArgumentException(s"Invalid partitioner $other")
case None => partitionGenerator.partitions.toArray[Partition]
}
logDebug(s"Created total ${partitions.length} partitions for $keyspaceName.$tableName.")
logTrace("Partitions: \\n" + partitions.mkString("\\n"))
partitions
}
private lazy val nodeAddresses = new NodeAddresses(connector)
private lazy val partitionKeyStr =
tableDef.partitionKey.map(_.columnName).map(quote).mkString(", ")
override def getPreferredLocations(split: Partition): Seq[String] =
split.asInstanceOf[CassandraPartition[_, _]].endpoints.flatMap(nodeAddresses.hostNames).toSeq
private def tokenRangeToCqlQuery(range: CqlTokenRange[_, _]): (String, Seq[Any]) = {
val columns = selectedColumnRefs.map(_.cql).mkString(", ")
val (cql, values) = if (containsPartitionKey(where)) {
("", Seq.empty)
} else {
range.cql(partitionKeyStr)
}
val filter = (cql +: where.predicates).filter(_.nonEmpty).mkString(" AND ")
val limitClause = limitToClause(limit)
val orderBy = clusteringOrder.map(_.toCql(tableDef)).getOrElse("")
val quotedKeyspaceName = quote(keyspaceName)
val quotedTableName = quote(tableName)
val queryTemplate =
s"SELECT $columns " +
s"FROM $quotedKeyspaceName.$quotedTableName " +
s"WHERE $filter $orderBy $limitClause ALLOW FILTERING"
val queryParamValues = values ++ where.values
(queryTemplate, queryParamValues)
}
private def createStatement(session: Session, cql: String, values: Any*): Statement = {
try {
val stmt = session.prepare(cql)
stmt.setConsistencyLevel(consistencyLevel)
val converters = stmt.getVariables
.map(v => ColumnType.converterToCassandra(v.getType))
.toArray
val convertedValues =
for ((value, converter) <- values zip converters)
yield converter.convert(value)
val bstm = stmt.bind(convertedValues: _*)
bstm.setFetchSize(fetchSize)
bstm
}
catch {
case t: Throwable =>
throw new IOException(s"Exception during preparation of $cql: ${t.getMessage}", t)
}
}
private def fetchTokenRange(
scanner: Scanner,
range: CqlTokenRange[_, _],
inputMetricsUpdater: InputMetricsUpdater): Iterator[R] = {
val session = scanner.getSession()
val (cql, values) = tokenRangeToCqlQuery(range)
logDebug(
s"Fetching data for range ${range.cql(partitionKeyStr)} " +
s"with $cql " +
s"with params ${values.mkString("[", ",", "]")}")
val stmt = createStatement(session, cql, values: _*)
try {
val scanResult = scanner.scan(stmt)
val iteratorWithMetrics = scanResult.rows.map(inputMetricsUpdater.updateMetrics)
val result = iteratorWithMetrics.map(rowReader.read(_, scanResult.metadata))
logDebug(s"Row iterator for range ${range.cql(partitionKeyStr)} obtained successfully.")
result
} catch {
case t: Throwable =>
throw new IOException(s"Exception during execution of $cql: ${t.getMessage}", t)
}
}
override def compute(split: Partition, context: TaskContext): Iterator[R] = {
val partition = split.asInstanceOf[CassandraPartition[_, _]]
val tokenRanges = partition.tokenRanges
val metricsUpdater = InputMetricsUpdater(context, readConf)
val columnNames = selectedColumnRefs.map(_.selectedAs).toIndexedSeq
val scanner = connector.connectionFactory.getScanner(readConf, connector.conf, columnNames)
// Iterator flatMap trick flattens the iterator-of-iterator structure into a single iterator.
// flatMap on iterator is lazy, therefore a query for the next token range is executed not earlier
// than all of the rows returned by the previous query have been consumed
val rowIterator = tokenRanges.iterator.flatMap(
fetchTokenRange(scanner, _: CqlTokenRange[_, _], metricsUpdater))
val countingIterator = new CountingIterator(rowIterator, limitForIterator(limit))
context.addTaskCompletionListener { (context) =>
val duration = metricsUpdater.finish() / 1000000000d
logDebug(f"Fetched ${countingIterator.count} rows from $keyspaceName.$tableName " +
f"for partition ${partition.index} in $duration%.3f s.")
scanner.close()
}
countingIterator
}
override def toEmptyCassandraRDD: EmptyCassandraRDD[R] = {
new EmptyCassandraRDD[R](
sc = sc,
keyspaceName = keyspaceName,
tableName = tableName,
columnNames = columnNames,
where = where,
limit = limit,
clusteringOrder = clusteringOrder,
readConf = readConf)
}
override def cassandraCount(): Long = {
columnNames match {
case SomeColumns(_) =>
logWarning("You are about to count rows but an explicit projection has been specified.")
case _ =>
}
val counts = CassandraTableScanRDD.countRDD(this)
counts.reduce(_ + _)
}
private def containsPartitionKey(clause: CqlWhereClause): Boolean = {
val pk = tableDef.partitionKey.map(_.columnName).toSet
val wherePredicates: Seq[Predicate] = clause.predicates.flatMap(CqlWhereParser.parse)
val whereColumns: Set[String] = wherePredicates.collect {
case EqPredicate(c, _) if pk.contains(c) => c
case InPredicate(c) if pk.contains(c) => c
case InListPredicate(c, _) if pk.contains(c) => c
case RangePredicate(c, _, _) if pk.contains(c) =>
throw new UnsupportedOperationException(
s"Range predicates on partition key columns (here: $c) are " +
s"not supported in where. Use filter instead.")
}.toSet
val primaryKeyComplete = whereColumns.nonEmpty && whereColumns.size == pk.size
val whereColumnsAllIndexed = whereColumns.forall(tableDef.isIndexed)
if (!primaryKeyComplete && !whereColumnsAllIndexed) {
val missing = pk -- whereColumns
throw new UnsupportedOperationException(
s"Partition key predicate must include all partition key columns or partition key columns need" +
s" to be indexed. Missing columns: ${missing.mkString(",")}"
)
}
primaryKeyComplete
}
}
object CassandraTableScanRDD {
def apply[T : ClassTag : RowReaderFactory](
sc: SparkContext,
keyspaceName: String,
tableName: String): CassandraTableScanRDD[T] = {
new CassandraTableScanRDD[T](
sc = sc,
connector = CassandraConnector(sc),
keyspaceName = keyspaceName,
tableName = tableName,
readConf = ReadConf.fromSparkConf(sc.getConf),
columnNames = AllColumns,
where = CqlWhereClause.empty)
}
def apply[K, V](
sc: SparkContext,
keyspaceName: String,
tableName: String)(
implicit
keyCT: ClassTag[K],
valueCT: ClassTag[V],
rrf: RowReaderFactory[(K, V)],
rwf: RowWriterFactory[K]): CassandraTableScanRDD[(K, V)] = {
val rdd = new CassandraTableScanRDD[(K, V)](
sc = sc,
connector = CassandraConnector(sc),
keyspaceName = keyspaceName,
tableName = tableName,
readConf = ReadConf.fromSparkConf(sc.getConf),
columnNames = AllColumns,
where = CqlWhereClause.empty)
rdd.withPartitioner(rdd.partitionGenerator.partitioner[K](PartitionKeyColumns))
}
/**
* It is used by cassandraCount() and spark sql cassandra source to push down counts to cassandra
* @param rdd
* @tparam R
* @return rdd, each partitions will have only one long value: number of rows in the partition
*/
def countRDD[R] (rdd: CassandraTableScanRDD[R]): CassandraTableScanRDD[Long] = {
new CassandraTableScanRDD[Long](
sc = rdd.sc,
connector = rdd.connector,
keyspaceName = rdd.keyspaceName,
tableName = rdd.tableName,
columnNames = SomeColumns(RowCountRef),
where = rdd.where,
limit = rdd.limit,
clusteringOrder = rdd.clusteringOrder,
readConf = rdd.readConf)
}
}
|
shashwat7/spark-cassandra-connector
|
spark-cassandra-connector/src/main/scala/com/datastax/spark/connector/rdd/CassandraTableScanRDD.scala
|
Scala
|
apache-2.0
| 19,424
|
package com.hyenawarrior.oldnorsedictionary.modelview
import android.view.View
import android.widget.AdapterView
import android.widget.AdapterView.OnItemSelectedListener
/**
* Created by HyenaWarrior on 2017.06.18..
*/
class ItemListener(callback: Int => Unit) extends OnItemSelectedListener
{
override def onNothingSelected(adapterView: AdapterView[_]) = ()
override def onItemSelected(adapterView: AdapterView[_], view: View, index: Int, l: Long): Unit =
{
callback(index)
}
}
|
HyenaSoftware/IG-Dictionary
|
app/src/main/scala/com/hyenawarrior/oldnorsedictionary/modelview/ItemListener.scala
|
Scala
|
lgpl-3.0
| 489
|
package services.actors
import helpers._
/**
* @author zepeng.li@gmail.com
*/
trait CanonicalNamedActor extends CanonicalNamed {
def actorPath = s"/user/$basicName"
}
trait CanonicalNameAsShardName {
self: CanonicalNamed =>
def shardName: String = basicName
}
|
lizepeng/app.io
|
modules/services/app/services/actors/CanonicalNamedActor.scala
|
Scala
|
apache-2.0
| 272
|
package typeformation.cf
import java.time.{Duration, ZonedDateTime}
import Template.Mapping
import io.circe.Json
trait CfExp[+T]
object CfExp {
type E[+T] = CfExp[T]
trait IsLit[T]
trait Ref[T] {
def value: T
}
private[cf] object IsLit {
implicit val stringLit: IsLit[String] = new IsLit[String] {}
implicit val IntLit: IsLit[Int] = new IsLit[Int] {}
implicit val longLit: IsLit[Long] = new IsLit[Long] {}
implicit val doubleLit: IsLit[Double] = new IsLit[Double] {}
implicit val boolLit: IsLit[Boolean] = new IsLit[Boolean] {}
implicit val dateTimeLit: IsLit[ZonedDateTime] = new IsLit[ZonedDateTime] {}
implicit val jsonLit: IsLit[Json] = new IsLit[Json] {}
implicit val durationLit: IsLit[Duration] = new IsLit[Duration] {}
implicit def propertyLit[T <: ResourceProperty]: IsLit[T] = new IsLit[T] {}
implicit def listLit[T: IsLit]: IsLit[List[T]] = new IsLit[List[T]] {}
}
private[cf] case class Lit[T: IsLit](value: T) extends E[T]
private[cf] case class ResourceRef(value: Resource)
extends Ref[Resource]
with E[String]
private[cf] case class ParameterRef(value: Parameter)
extends Ref[Parameter]
with E[String]
private[cf] case class PseudoParameterRef(value: PseudoParameter)
extends Ref[PseudoParameter]
with E[String]
private[cf] case class FnBase64(exp: E[String]) extends E[String]
private[cf] case class FnAnd(cond1: E[Boolean], cond2: E[Boolean])
extends E[Boolean]
private[cf] case class FnEquals[T](left: E[T], right: E[T]) extends E[Boolean]
private[cf] case class FnIf[T](cond: E[Boolean], ifTrue: E[T], ifFalse: E[T])
extends E[T]
private[cf] case class FnNot(cond: E[Boolean]) extends E[Boolean]
private[cf] case class FnOr(conds: Seq[E[Boolean]]) extends E[Boolean]
private[cf] case class FnSelect[T](index: Int, values: E[List[T]])
extends E[T]
private[cf] case class FnGetAZs(region: Option[String])
extends E[List[String]]
private[cf] case class FnSub(string: String,
mappings: Option[Map[String, CfExp[String]]])
extends E[String]
private[cf] case class FnGetAtt(logicalId: String, attributeName: String)
extends E[String]
private[cf] case class FnFindInMap(mapName: Mapping,
topLevelKey: E[String],
secondLevelKey: E[String])
extends E[String]
private[cf] case class FnSplit(delimiter: String, string: E[String])
extends E[List[String]]
private[cf] case class FnJoin(delimiter: String, values: List[Json])
extends E[String]
private[cf] case class FnImportValue(sharedValueToImport: E[String])
extends E[String]
}
|
typeformation/typeformation
|
cf/src/main/scala/typeformation/cf/CfExp.scala
|
Scala
|
mit
| 2,753
|
import sbt._
import Keys._
import sbtassembly.Plugin._
import AssemblyKeys._
import sbtjarjar._
import JarJarPlugin._
object ScaldingProject extends Build {
val ScalaVersion = "2.10.6"
val ScaldingVersion = "0.15.0"
val ParquetVersion = "1.6.0"
val AvroVersion = "1.7.6"
val AlgebirdVersion = "0.10.2"
val HadoopVersion = "2.3.0-cdh5.0.1"
val commonSettings = Seq(
organization := "com.tapad",
scalaVersion := ScalaVersion,
scalacOptions := Seq("-deprecation", "-language:_"),
credentials += Credentials(Path.userHome / ".ivy2" / ".credentials"),
resolvers += "Local Maven Repository" at "file://" + Path.userHome.absolutePath + "/.m2/repository",
libraryDependencies ++= Seq(
"ch.qos.logback" % "logback-classic" % "1.1.3",
"org.scalatest" %% "scalatest" % "2.2.6" % "test",
"org.mockito" % "mockito-core" % "1.9.5" % "test"
)
)
val commonExcludedJars = Set(
"servlet-api-2.5.jar"
)
val commonMergeStrategy: PartialFunction[String, MergeStrategy] = {
case fp if fp.endsWith("pom.properties") => MergeStrategy.discard
case fp if fp.endsWith("pom.xml") => MergeStrategy.discard
case fp if fp.endsWith(".class") => MergeStrategy.last
case fp if fp.endsWith(".html") => MergeStrategy.discard
}
val commonAssemblySettings = assemblySettings ++ Seq(
excludedJars in assembly <<= (fullClasspath in assembly) map { cp =>
cp.filter(jar => commonExcludedJars.apply(jar.data.getName))
},
mergeStrategy in assembly <<= (mergeStrategy in assembly) { dedup =>
commonMergeStrategy orElse {
case fp => dedup(fp)
}
}
)
lazy val root = Project(
"root",
file(".")
).aggregate(jobs)
lazy val jobs = Project(
"jobs",
file("jobs"),
settings = commonSettings ++ commonAssemblySettings ++ jarjarSettings ++ Seq(
libraryDependencies ++= Seq(
"com.twitter" %% "scalding-core" % ScaldingVersion,
"com.twitter" %% "scalding-avro" % ScaldingVersion,
"com.twitter" %% "algebird-core" % AlgebirdVersion,
"com.twitter" % "parquet-hadoop" % ParquetVersion,
"com.twitter" % "parquet-avro" % ParquetVersion,
"com.twitter" %% "parquet-scala" % ParquetVersion,
"org.apache.avro" % "avro" % AvroVersion,
"org.apache.avro" % "avro-mapred" % AvroVersion,
"org.apache.hadoop" % "hadoop-client" % HadoopVersion
),
jarName in assembly := "herp-derp.jar",
mainClass in (Compile, run) := Some("com.twitter.scalding.Tool"),
mainClass in assembly := Some("com.twitter.scalding.Tool"),
jarjar <<= jarjar dependsOn assembly,
JarJarKeys.jarName in jarjar <<= jarName in assembly,
JarJarKeys.rules in jarjar := Seq(
s"rule parquet.** parquet.jarjar.@1"
)
)
)
}
|
Tapad/sbt-jarjar
|
src/sbt-test/sbtjarjar/sample-scalding-project/project/Build.scala
|
Scala
|
bsd-3-clause
| 2,963
|
/**
* This file is part of mycollab-services.
*
* mycollab-services is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* mycollab-services is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with mycollab-services. If not, see <http://www.gnu.org/licenses/>.
*/
package com.esofthead.mycollab.schedule.email.crm
import com.esofthead.mycollab.schedule.email.SendingRelayEmailNotificationAction
/**
* @author MyCollab Ltd
* @since 5.1.0
*/
trait CaseRelayEmailNotificationAction extends SendingRelayEmailNotificationAction {
}
|
maduhu/mycollab
|
mycollab-services/src/main/scala/com/esofthead/mycollab/schedule/email/crm/CaseRelayEmailNotificationAction.scala
|
Scala
|
agpl-3.0
| 988
|
package Chapter09
import scala.annotation.tailrec
object FilesAndRegularExpressions {
// topics:
// reading lines
// reading characters
// reading tokens and numbers
// reading from URLs and other sources
// reading binary files
// writing text files
// visiting directories
// serialization
// process control
// regular expressions
// regular expression groups
// read: Source.fromFile
// write: java PrintWriter
// "regex".r is a Regex object
// use """ ... """
// extract regex groups using pattern matching: regex(v1, v2, ...) <- string
// reading lines
def readingLines = {
// reading text file
import scala.io.Source
val src = Source.fromFile("/tmp/test.txt", "UTF-8")
try {
val lineIterator = src.getLines
for (line <- lineIterator) println(line)
} finally {
src.close()
}
}
// reading characters
def readingCharacters = {
// Source extends Iterator[Char]
import scala.io.Source
val src = Source.fromFile("/tmp/test.txt", "UTF-8")
try {
for (char <- src) print(char)
} finally {
src.close()
}
def buffered = {
// peek without consuming
val src = Source.fromFile("/tmp/test.txt", "UTF-8")
try {
val iter = src.buffered // cache head
while (iter.hasNext) {
if (iter.head != 'Q') {
print(iter.head)
}
val nextchar = iter.next()
}
//or, if file is small:
val contents: String = src.mkString
} finally {
src.close()
}
}
}
// reading tokens and numbers
def readingTokensAndNumbers = {
// quick-and-dirty way of reading all whitespace-separated tokens
import scala.io.Source
val src = Source.fromFile("/tmp/test.txt", "UTF-8")
try {
val tokens = src.mkString.split(raw"\s+")
val numbers = tokens.map(_.toDouble)
} finally {
src.close()
}
// you can always use the java.util.Scanner to process a mixture of text and numbers
// or, from stdin
import scala.io.StdIn
val age = StdIn.readInt()
}
// reading from URLs and other sources
def readingFromURLsAndOtherSources = {
import scala.io.Source
val urlSrc = Source.fromURL("http://ya.ru", "UTF-8")
val stringSrc = Source.fromString("Hello, World!") // useful for debugging
val sinSrc = Source.stdin
List(urlSrc, stringSrc, sinSrc).foreach(_.close)
}
// reading binary files
def readingBinaryFiles = {
// scala has no provision for reading binary files
import java.io.{File, FileInputStream}
val file = new File("/tmp/test.bin")
val ins = new FileInputStream(file)
try {
val buff = new Array[Byte](file.length.toInt)
ins.read(buff)
} finally {
ins.close()
}
}
// writing text files
def writingTextFiles = {
// scala has no built-in support for writing files, use java.io.PrintWriter
import java.io.PrintWriter
val out = new PrintWriter("/tmp/out.txt")
try {
for (n <- 1 to 100) out.println(n)
// you may have problems with printf
val price = 42.0
out.printf("%10.2f", price.asInstanceOf[AnyRef]) // ugh
// instead, use string interpolation
out.print(f"${price}%10.2f")
} finally {
out.close()
}
}
// visiting directories
def visitingDirectories = {
// there are no built-in classes for traversing a file system
// use java.nio.file Files.list, Files.walk
import java.nio.file.{Files, Paths, Path}
import scala.util.Try
def processPath(path: Path) = {
val res = Try { println(path.toAbsolutePath.toString) }
if (res.isFailure) println(res.failed.get.getMessage)
}
// not good: throws java.io.UncheckedIOException
val allEntries = Files.walk(Paths.get("/tmp")) // recursive, DFS
val onelevelEntries = Files.list(Paths.get("/tmp")) // not recursive
try {
allEntries.forEach(p => processPath(p))
} finally {
println("closing streams ...")
allEntries.close(); onelevelEntries.close()
}
}
// serialization
def serialization = {
// short-term storage or transmission to another jvm
@SerialVersionUID(42L) class Person extends Serializable {
// scala collections are serializable
import scala.collection.mutable.ArrayBuffer
private val friends = ArrayBuffer.empty[Person]
}
import java.io.{ObjectOutputStream, FileOutputStream, ObjectInputStream, FileInputStream}
val fred = new Person
// save
val out = new ObjectOutputStream(new FileOutputStream("/tmp/test.obj"))
out.writeObject(fred)
out.close()
// load
val in = new ObjectInputStream(new FileInputStream("/tmp/test.obj"))
val savedFred = in.readObject().asInstanceOf[Person]
in.close()
}
// process control
def processControl = {
// shell commands, scripts
// postfix syntax is being deprecated
// implicit conversion from strings to ProcessBuilder
import java.io.File
import java.net.URL
import scala.sys.process._
var exitcode = "ls -al /tmp".! // print listing to stdout
val listing = "ls -al /tmp".!! // if exitcode != 0 => exception
// pipe
exitcode = ("ls -al /tmp" #| "grep u").!
// redirect
exitcode = ("ls -al /tmp" #> new File("/tmp/list.txt")).!
// append
exitcode = ("ls -al /opt" #>> new File("/tmp/list.txt")).!
// from a file
exitcode = ("grep u" #< new File("/tmp/list.txt")).!
exitcode = ("grep Scala" #< new URL("http://ya.ru")).!
// you can combine processes with #&& and #||
// # prefix ensure equal precedence for all ops
// custom environment: Process object
val p = Process("ls -la", new File("/tmp"), ("LANG", "en_US.utf8"))
exitcode = (p #| "grep u").!
// for perverts: scala for shell script
/*
#!/bin/sh
exec scala "$0" "$@"
!#
Scala commands
*/
// scala script from java program, javax.script
// ScriptEngine engine = new ScriptEngineManager().getScriptEngineByName("scala")
}
// regular expressions
def regularExpressions = {
// scala.util.matching.Regex
val numPattern = "[0-9]+".r
val wsnumwsPattern = """\s+[0-9]+\s+""".r // raw string syntax
// find all
for (s <- numPattern.findAllIn("99 bottles, 98 bottles")) println(s) // 99, 98
// find first
val firstmatch = wsnumwsPattern.findFirstIn("99 bottles, 98 bottles") // Some("98")
// check whole string against regex pattern
"^[0-9]+$".r findFirstIn "some string" match {
case None => println("not numeric")
case Some(x) => println("all numbers")
}
// or
if ("some string".matches("[0-9]+")) println("all numbers")
// replace
println(numPattern.replaceFirstIn("99 bottles, 98 bottles", "XX"))
println(numPattern.replaceAllIn("99 bottles, 98 bottles", "XX"))
println(numPattern.replaceSomeIn("99 bottles, 98 bottles", {
m => if (m.matched.toInt % 2 == 0) Some("XX") else None
}))
// more complex example of replace
val varPattern = """\$[0-9]+""".r
def format(message: String, vars: String*) = varPattern.replaceSomeIn(message, {
m => vars.lift(m.matched.tail.toInt)
})
println(format("At $1, there was $2 on $0",
"planet 7", "12:30 pm", "a disturbance of the force"))
}
// regular expression groups
def regularExpressionGroups = {
// get subexpressions of regex: parentheses and Match object
val numitemPattern = "([0-9]+) ([a-z]+)".r
// m.matched: string
// m.group(n): n-th group
// m.start, m.end, m.start(n), m.end(n): substring indices
for (m <- numitemPattern.findAllMatchIn("99 bottles, 98 bottles")) println(m.group(0))
// groups by name
val namedNumitemPattern = "([0-9]+) ([a-z]+)".r("num", "item")
// using extractor: it MUST match the string, there MUST be a group for each variable
val numitemPattern(num, item) = "99 bottles"
// groups with multiple matches
for (numitemPattern(num, item) <- numitemPattern.findAllIn("99 bottles, 98 bottles"))
println(s"${num}: $item")
}
}
object FilesAndRegularExpressions_Exercises {
// 1. Write a Scala code snippet that reverses the lines in a file
// (making the last line the first one, and so on).
def ex1 = {
val fname = "/tmp/lines.txt"
def getLines(fn: String): Seq[String] = {
import scala.io.Source
val src = Source.fromFile(fn)
val lines = src.getLines().toArray
src.close()
lines
}
def putLines(fn: String, ls: Seq[String]): Unit = {
import java.io.PrintWriter
val out = new PrintWriter(fn)
ls.foreach(out.println)
out.close()
}
val revLines = getLines(fname).reverse
putLines(fname, revLines)
// with try .. catch
def inTry = {
import scala.util.{Try, Success, Failure}
val fname = "/tmp/lines.txt.nosuchfile"
val res = Try{ getLines(fname).reverse }.flatMap(revlines =>
Try { putLines(fname, revlines) })
res match {
case Success(x) => println("writed")
case Failure(x) => println(s"failed: ${x.getMessage}")
}
}
}
// 2. Write a Scala program that reads a file with tabs,
// replaces each tab with spaces so
// that tab stops are at n-column boundaries,
// and writes the result to the same file.
def ex2 = {
// n-column boundaries: if tabsize = 4: 0, 4, 8, 12, ...
// n % 4 = 0,1,2,3,0,1,2,3,...
// replace tabs for spaces in one line
def tab2spaces(line: String, tabsize: Int = 4): String = {
def nspaces(col: Int): Int = tabsize - (col % tabsize) // 0 => 4, 1 => 3, 2 => 2, 3 => 1
val res = StringBuilder.newBuilder
for (ch <- line) {
if (ch == '\t') res.append(" " * nspaces(res.length))
else res.append(ch)
}
res.toString
}
// mini test
def check(s1: String, s2: String) = {
assert(tab2spaces(s1).equals(s2), s"wrong: '${tab2spaces(s1)}' != '$s2'")
}
check("\t1", " 1")
check("1\t2", "1 2")
check("12\t3", "12 3")
check("123\t4", "123 4")
check("1234\t5", "1234 5")
check("12345\t6", "12345 6")
// process file
import scala.io.Source
import java.io.PrintWriter
import scala.util.Try
val fname = "/tmp/tabs.txt"
val res = Try {
val text = {
for (line <- Source.fromFile(fname).getLines) yield tab2spaces(line)
}.toArray // need to copy before PrintWriter // write to the same file
// TODO: write to a temp file then rename; stream processing
val out = new PrintWriter(fname)
text.foreach(out.println)
out.close()
}
if (res.isSuccess) println("file updated")
else println(s"error:$res")
}
// 3. Write a Scala code snippet that
// reads a file and prints all words with more than 12 characters
// to the console. Extra credit if you can do this in a single line.
def ex3 = {
import scala.io.Source
val fname = "/tmp/tabs.txt"
val sep = """\s+"""
Source.fromFile(fname).getLines.flatMap(_.split(sep)).filter(_.length > 12).foreach(println)
}
// 4. Write a Scala program that
// reads a text file containing only floating-point numbers.
// Print the sum, average, maximum, and minimum of the numbers in the file.
def ex4 = {
// one number on one line
def writeFloats(count: Int, fname: String): Unit = {
def nextFloat: Float = util.Random.nextFloat()
val pw = new java.io.PrintWriter(fname)
for (n <- 1 to count) pw.println(nextFloat)
pw.close()
}
// TODO: process a stream of lines
def readFloats(fname: String): Unit = {
val src = scala.io.Source.fromFile(fname)
val floats = src.getLines.flatMap(_.split("""\s+""")).map(_.toFloat).toVector
src.close()
println(s"sum: ${floats.sum}; average: ${floats.sum / floats.length}")
println(s"max: ${floats.max}; min: ${floats.min}")
}
val fname = "/tmp/floats.txt"
val numbersCount = 777
// create test file
writeFloats(numbersCount, fname)
// read and calculate
readFloats(fname)
}
// 5. Write a Scala program that writes the powers of 2 and their reciprocals to a file,
// with the exponent ranging from 0 to 20.
// Line up the columns:
//
// 1 1
// 2 0.5
// 4 0.25
// ... ...
def ex5 = {
// Definitions of reciprocal
// a mathematical expression or function so related to another that their product is one
val fname = "/tmp/powers.txt"
// https://docs.oracle.com/javase/7/docs/api/java/util/Formatter.html
// val leftPad = "%8d %1.9f".format(pow, recip)
// val rightPad = "%s%.9f".format(pow.toString.padTo(12, ' ').mkString, recip)
// val rightPad = "%-8d %.9f".format(pow, recip)
def simple = {
val out = new java.io.PrintWriter(fname)
for(i <- 0 to 20) {
val power = math.pow(2, i)
val recip = 1.0 / power
val line = f"${power.toInt.toString.padTo(12, ' ')}${recip}%.7f"
println(line); out.println(line)
}
out.close
}
def onStreams = {
val powersOfTwo: Stream[Int] = Stream.iterate(1)(_ * 2)
def reciprocals(inp: Stream[Int]): Stream[Double] =
(1.0 / inp.head) #:: reciprocals(inp.tail)
def lines(ints: Stream[Int], floats: Stream[Double]): Stream[String] =
"%-11d %.7f".format(ints.head, floats.head) #:: lines(ints.tail, floats.tail)
// output
val text = lines(powersOfTwo, reciprocals(powersOfTwo))
.take(21).mkString("\n")
println(text)
val pw = new java.io.PrintWriter(fname)
pw.write(text)
pw.close()
}
}
// 6. Make a regular expression searching for quoted strings
// "like this, maybe with \" or \\"
// in a Java or C++ program.
// Write a Scala program that prints out all such strings in a source file.
def ex6 = {
// https://docs.oracle.com/javase/7/docs/api/java/util/regex/Pattern.html
// http://regexr.com/3ecvf
val testData =
"""
|"simple one"
| a little "harder" string
|escaped "quote\"here", no?
|double "escaped quote\\"
|invalid \"sequence\"
|"like this, maybe with \" or \\"
""".stripMargin.trim.split("\n").toVector
val regex =
"""
|"(?:[^"\\]|\\.)*"
""".stripMargin.trim.r
// test
for (s <- testData) println(s"$s -> ${regex.findAllIn(s).toVector}")
// quoted strings in a file
val src = scala.io.Source.fromString(testData.mkString("\n"))
for {
line <- src.getLines
quote <- regex.findAllIn(line)
} println(quote)
src.close()
}
// 7. Write a Scala program that reads a text file
// and prints all tokens in the file that are not floating-point numbers.
// Use a regular expression.
def ex7 = {
def isFloat(str: String): Boolean = {
// http://regexr.com/3ed1t
val regex = """ (?:\d*\.\d+)|(?:\d+\.\d*) """.trim
str.matches(regex)
}
// input
val src = scala.io.Source.fromString("one 2 three 4.0 5,6 7.8 .9 1. . ")
// val src = scala.io.Source.fromFile("/tmp/myfile.txt")
val tokens = for {
line <- src.getLines
token <- line.split("""\s+""")
} yield token
val notfloats = tokens.filterNot(isFloat)
for (s <- notfloats) println(s)
src.close()
}
// 8. Write a Scala program that prints the
// src attributes of all img tags of a web page.
// Use regular expressions and groups.
def ex8 = {
// http://regexr.com/3ed5l
// http://docs.oracle.com/javase/6/docs/api/java/util/regex/Pattern.html#special
val url = "https://www.foxnews.com/us"
// regex
val imgr = """ <\s*img\s*[^>]*> """.trim.r
val srcr = """ \s+src\s*=\s*"(.*?)" """.trim.r
def simple = {
// get text
val src = scala.io.Source.fromURL(url)
val text = src.getLines.mkString(" ").toLowerCase
src.close()
// print src attr
for {
img <- imgr.findAllIn(text)
srcr(src) <- srcr.findAllIn(img)
} println(src)
}
def async = {
import scala.concurrent._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
def fetchUrl(url: String): Future[String] = Future {
val src = scala.io.Source.fromURL(url)
val text = src.getLines.mkString(" ").toLowerCase
src.close()
text
}
def extractImgSrc(text: String): Iterator[String] = {
for {
img <- imgr.findAllIn(text.toLowerCase)
srcr(src) <- srcr.findAllIn(img)
} yield src
}
// src iterator
val srcs = fetchUrl(url) map extractImgSrc
val res = Await.result(srcs, 3.seconds)
res foreach println
}
}
// 9. Write a Scala program that
// counts how many files with .class extension are
// in a given directory and its subdirectories.
def ex9 = {
// find / -iname '*.class' -type f 2>/dev/null | grep class | wc -l
// find / -iname '*.class' -type f -print0 2>/dev/null | xargs -0i echo | wc -l
def fileListFilesStream(startFrom: String = "/tmp", list: Boolean = false) = {
// scala> fileListFilesStream("/")
// java.lang.OutOfMemoryError: Java heap space
import java.io.File
def listFiles(dir: File): Option[Seq[File]] = Option(dir.listFiles)
def listAll(dir: File): Stream[File] = {
val children: Seq[File] = listFiles(dir).getOrElse(Array.empty[File])
if (children.isEmpty) Stream.empty[File]
else children.head #:: children.tail.toStream
.append(children.filter(_.isDirectory).flatMap(listAll))
}
val res = for {
file <- listAll(new File(startFrom))
if file.isFile && file.getName.endsWith(".class")
} yield file.getAbsolutePath
if (list) res foreach println
println(s"number of '*.class' files: ${res.length}")
}
def fileListFilesStream2(startFrom: String = "/tmp", list: Boolean = false) = {
// preceding version slightly refactored
// scala> fileListFilesStream2("/")
// number of '*.class' files: 14704
import scala.util.{Failure, Success, Try}
import java.io.File
def listFiles(dir: File): Seq[File] = {
// with links and exceptions handling
def isLink = !dir.getAbsolutePath.equals(dir.getCanonicalPath)
def files: Option[Seq[File]] = {
val lst = if (dir.isDirectory && !isLink) dir.listFiles else null
Option(lst)
}
Try { files } match {
case Failure(err) => println(s"listFiles error: $err"); Array.empty[File]
case Success(opt) => opt.getOrElse(Array.empty[File])
}
}
def listAll(dir: File): Stream[File] = {
val children = listFiles(dir)
dir #:: {
if (children.isEmpty) Stream.empty[File]
else children.toStream flatMap listAll
}
}
val res = listAll(new File(startFrom))
.filter(f => f.isFile && f.getName.endsWith(".class"))
// debug
if (list) res foreach println
// result
println(s"number of '*.class' files: ${res.length}")
}
def fileListFilesStreamReactive(startFrom: String = "/tmp", list: Boolean = false) = {
// scala> fileListFilesStreamReactive("/")
// java.lang.OutOfMemoryError: GC overhead limit exceeded
// http://reactivex.io/rxscala/
import rx.lang.scala.Observable
import scala.util.{Failure, Success, Try}
import java.io.File
def listFiles(dir: File): Option[Seq[File]] = Option(dir.listFiles)
def listAll(dir: File): Stream[File] = {
// possible exceptions processing
val lst: Try[Seq[File]] = Try { listFiles(dir).getOrElse(Array.empty[File]) }
val children: Seq[File] = lst match {
case Failure(err) => println(err); Array.empty[File]
case Success(seq) => seq
}
if (children.isEmpty) Stream.empty[File]
else children.head #:: children.tail.toStream
.append(children.withFilter(_.isDirectory).flatMap(listAll))
}
def files(dir: File): Observable[File] = Observable[File](subscriber => {
val list = listAll(dir)
for (file <- list if !subscriber.isUnsubscribed) {
subscriber.onNext(file) // callbacks
}
subscriber.onCompleted()
})
val allfiles = files(new File(startFrom))
val classes = allfiles.filter(f => f.isFile && f.getName.endsWith(".class"))
// debug callback
if (list) classes.subscribe(f => println(f))
// result
val count = classes.length // .count(file => true) // classes.foldLeft(0) { case (cnt, file) => cnt+1 }
// result callback
count.subscribe(c => println(s"observer, # of .class files: $c"))
// for extra processing?
classes
}
def jnfDirectoryStreamReactive(startFrom: String = "/tmp", list: Boolean = false) = {
// java.nio.file DirectoryStream + RX
// scala> jnfDirectoryStreamReactive("/")
// # of class files: 14704
import java.nio.{file => jnf}
import rx.lang.scala.Observable
def listFiles(dir: jnf.Path): Observable[jnf.Path] = Observable[jnf.Path](subscriber => {
val pstream = jnf.Files.newDirectoryStream(dir)
val iter = pstream.iterator
while (iter.hasNext && !subscriber.isUnsubscribed) {
subscriber.onNext(iter.next)
}
subscriber.onCompleted()
pstream.close()
}).onErrorResumeNext(_ => Observable.empty)
def listAll(dir: jnf.Path): Observable[jnf.Path] = {
val lst = listFiles(dir)
val files = lst.filter(jnf.Files.isRegularFile(_))
val dirs = lst.filter(jnf.Files.isDirectory(_, jnf.LinkOption.NOFOLLOW_LINKS))
val children = dirs.flatMap(listAll)
files ++ dirs ++ children
}
val dir = jnf.FileSystems.getDefault.getPath(startFrom)
val classFiles = listAll(dir)
.filter(p => jnf.Files.isRegularFile(p) && p.toString.endsWith(".class"))
if (list) classFiles subscribe { p => println(p.toString) }
val count = classFiles.length.subscribe(cnt => println(s"# of class files: $cnt"))
classFiles
}
def jnfWalkFileTreeReactive(startFrom: String = "/tmp", list: Boolean = false) = {
// scala> jnfWalkFileTreeReactive("/")
//visitFileFailed: java.nio.file.AccessDeniedException: /.pulse
//visitFileFailed: java.nio.file.AccessDeniedException: /proc/tty/driver
//visitFileFailed: java.nio.file.AccessDeniedException: /proc/1/task/1/fd
// ...
// # of class files: 14704
import java.nio.{file => jnf}
import java.io.{File, IOException}
import rx.lang.scala.Observable
import scala.language.implicitConversions
def listFiles(dir: jnf.Path) = Observable[jnf.Path](subscriber => {
val visitor = new jnf.SimpleFileVisitor[jnf.Path] {
override def visitFile(file: jnf.Path, attrs: jnf.attribute.BasicFileAttributes) = {
if (subscriber.isUnsubscribed) jnf.FileVisitResult.TERMINATE
else {
subscriber.onNext(file)
jnf.FileVisitResult.CONTINUE
}
}
override def visitFileFailed(file: jnf.Path, exc: IOException) = {
println(s"visitFileFailed: $exc")
//subscriber.onError(exc) // exactly once
jnf.FileVisitResult.CONTINUE
}
override def postVisitDirectory(dir: jnf.Path, exc: IOException) = {
if (exc != null) println(s"postVisitDirectory: $exc")
jnf.FileVisitResult.CONTINUE
}
}
jnf.Files.walkFileTree(dir, visitor)
subscriber.onCompleted()
}) // .onErrorResumeNext(_ => Observable.empty)
val startDir = new File(startFrom).toPath
val files = listFiles(startDir)
val classfiles = files.filter(p => p.toString.endsWith(".class"))
val count = classfiles.length
// debug
if (list) classfiles.subscribe(p => println(p))
// result
count.subscribe(c => println(s"# of class files: $c"))
classfiles
}
}
// 10. Expand the example in Section 9.8, “Serialization,” on page 113.
// Construct a few Person objects,
// make some of them friends of others,
// and save an Array[Person] to a file.
// Read the array back in and verify that the friend relations are intact.
def ex10 = {
import java.io.{FileInputStream, FileOutputStream, ObjectInputStream, ObjectOutputStream}
import scala.collection.mutable
@SerialVersionUID(42L) class Person(val name: String) extends Serializable {
private val friends = mutable.ArrayBuffer.empty[String]
def addFriend(fname: String): Unit = {
friends += fname
}
override def toString: String = s"Person($name), friends = ${friends.mkString(",")}"
}
val alice = new Person("Alice")
val bob = new Person("Bob")
val charlie = new Person("Charlie")
alice.addFriend(bob.name)
alice.addFriend(charlie.name)
bob.addFriend(alice.name)
val lst = Array(alice, bob, charlie)
val fname = "/tmp/friends.oos"
val oos = new ObjectOutputStream(new FileOutputStream(fname))
oos.writeObject(lst)
oos.close()
val ois = new ObjectInputStream(new FileInputStream(fname))
val saved = ois.readObject().asInstanceOf[Array[Person]]
ois.close()
println(saved.mkString("\n"))
require(saved.mkString == lst.mkString)
}
}
|
vasnake/scala-for-the-impatient
|
src/main/scala/Chapter09/FilesAndRegularExpressions.scala
|
Scala
|
gpl-3.0
| 29,129
|
/*
* Copyright (c) 2016 dawid.melewski
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package actyxpoweruseralert.actors
import actyxpoweruseralert.model.MachineEndpoint
import actyxpoweruseralert.services.{
ActorAlertService,
InMemoryMachinesLogStorageService,
MachineParkApi
}
import akka.actor.{ Actor, ActorLogging, Props }
import akka.contrib.throttle.TimerBasedThrottler
class MainActor(api: MachineParkApi) extends Actor with ActorLogging {
import MainActor._
import akka.contrib.throttle.Throttler._
import context.dispatcher
import scala.concurrent.duration._
var machines: List[MachineEndpoint] = List.empty
private val storage = new InMemoryMachinesLogStorageService()
private val alarms = List(new ActorAlertService(context.system, storage))
private val machinesThrottler =
context.actorOf(Props(classOf[TimerBasedThrottler], 2 msgsPer 1.minute))
private val machineThrottler =
context.actorOf(Props(classOf[TimerBasedThrottler], 70 msgsPer 1.seconds))
machinesThrottler ! SetTarget(
Some(context.actorOf(MachinesActor.props(api, self))))
machineThrottler ! SetTarget(
Some(context.actorOf(MachineActor.props(api, self, storage, alarms))))
override def receive: Receive = {
case Start =>
machinesThrottler ! MachinesActor.GetMachinesEndpoints
case MachinesActor.MachineEndpoints(list) =>
log.debug(s"Got list of machines endpoints [${list.size}]")
val newMachinesEnpoints = list.diff(machines)
newMachinesEnpoints.foreach(self ! _)
machines = machines ++ newMachinesEnpoints
context.system.scheduler.scheduleOnce(2.minutes, self, Start)
case msg: MachineEndpoint =>
// log.debug(s"Got machine endpoint [$msg]")
machineThrottler ! msg
}
}
object MainActor {
def props(api: MachineParkApi): Props = Props(new MainActor(api))
case object Start
}
|
meloniasty/ActyxPowerUserAlert
|
src/main/scala/actyxpoweruseralert/actors/MainActor.scala
|
Scala
|
mit
| 2,913
|
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.businessdetails
import jto.validation.forms._
import jto.validation.{From, Rule, Write}
import play.api.libs.json.Json
case class LettersAddress(lettersAddress: Boolean)
object LettersAddress {
implicit val formats = Json.format[LettersAddress]
import utils.MappingUtils.Implicits._
implicit val formRule: Rule[UrlFormEncoded, LettersAddress] =
From[UrlFormEncoded] { __ =>
import jto.validation.forms.Rules._
(__ \\ "lettersAddress").read[Boolean].withMessage("error.required.atb.lettersaddress") map LettersAddress.apply
}
implicit val formWrites: Write[LettersAddress, UrlFormEncoded] =
Write {
case LettersAddress(b) =>
Map("lettersAddress" -> Seq(b.toString))
}
}
|
hmrc/amls-frontend
|
app/models/businessdetails/LettersAddress.scala
|
Scala
|
apache-2.0
| 1,343
|
package examples.demo
import java.awt.Dimension
import examples.demo.ui.{Circle, Shape, ShapesPanel}
import rescala.default._
import scala.swing.{MainFrame, SimpleSwingApplication, UIElement}
/** So far, we showed Vars and Signals defined as a transformation
* of one other Var or Signal. Signals can, however, also be derived
* from a combination of multiple other Vars and Signals through
* Signal expressions. To demonstrate Signal expressions, we extend
* our animation to scale the moving circle's trajectory with the
* drawing panel's size.
*
* The ShapePanel class offers Signals width and height, that
* reflect the panel's current size at any point in time, similar
* to how nsTime always reflects the current system clock. To be
* able to use these members of ShapesPanel in the definition of
* posX and posY, we must instantiate the ShapesPanel beforehand,
* but the instantiation of ShapesPanel, uses posX and posY. But,
* because ShapesPanel takes a Signal of a List of Shapes in its
* constructor, we can resolve this circular dependency by
* instantiating it with a Var containing an initially empty List,
* and then update the Var to add the Circle at posX and posY at
* the end. For this update, we use the Var's .transform method.
*
* With the ShapesPanel now instantiated at the very beginning,
* its width and height signal are available when defining posX
* and posY. We thus now change their definitions into Signal
* expressions that use a third of the panels width and height
* as the respective horizontal or vertical radius of the new
* oval trajectory, with angle still defining the position of
* the displayed circle on that trajectory.
*/
object DScaledClockCircle extends SimpleSwingApplication {
val NanoSecond = 1000000000L
val nsTime = Var(System.nanoTime())
def tick() = nsTime.set(System.nanoTime())
val shapes = Var[List[Shape]](List.empty)
val panel = new ShapesPanel(shapes)
val angle = nsTime.map(_.toDouble / NanoSecond * math.Pi)
val pos = Signal {
Pos(
(panel.width() / 2 - 50).toDouble * math.sin(angle()),
(panel.height() / 2 - 50).toDouble * math.cos(angle())
)
}
shapes.transform(new Circle(pos, Var(50)) :: _)
override lazy val top = {
panel.preferredSize = new Dimension(400, 300)
new MainFrame {
title = "REScala Demo"
contents = panel
setLocationRelativeTo(new UIElement { override def peer = null })
}
}
override def main(args: Array[String]): Unit = {
super.main(args)
while (!top.visible) Thread.sleep(5)
while (top.visible) {
Thread.sleep(1)
tick()
}
}
}
|
guidosalva/REScala
|
Code/Examples/examples/src/main/scala/examples/demo/DScaledClockCircle.scala
|
Scala
|
apache-2.0
| 2,678
|
package chrome
import scala.scalajs.js
import scala.scalajs.js.annotation._
package object storage {
val sync = values.sync
val local = values.local
@js.native
@JSGlobal("chrome.storage")
private object values extends js.Object {
val sync: StorageArea = js.native
val local: StorageArea = js.native
}
}
|
erdavila/auto-steamgifts
|
src/main/scala/chrome/storage/package.scala
|
Scala
|
mit
| 326
|
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.observables
import minitest.TestSuite
import monix.execution.Ack
import monix.execution.Ack.Continue
import monix.execution.schedulers.TestScheduler
import monix.reactive.OverflowStrategy.Unbounded
import monix.execution.exceptions.DummyException
import monix.reactive.subjects.ConcurrentSubject
import monix.reactive.{Observable, Observer}
import scala.concurrent.Future
import scala.concurrent.duration._
object RefCountObservableSuite extends TestSuite[TestScheduler] {
def setup(): TestScheduler = TestScheduler()
def tearDown(s: TestScheduler): Unit = {
assert(s.state.tasks.isEmpty,
"TestScheduler should have no pending tasks")
}
test("should work") { implicit s =>
var received = 0L
var completed = 0
def createObserver = new Observer[Long] {
def onNext(elem: Long): Future[Ack] = {
received += 1
Continue
}
def onError(ex: Throwable): Unit = ()
def onComplete(): Unit = completed += 1
}
val ref = Observable.interval(2.seconds).publish.refCount
val s1 = ref.subscribe(createObserver)
assertEquals(received, 1)
s.tick(2.seconds); assertEquals(received, 2)
val s2 = ref.subscribe(createObserver)
s.tick(); assertEquals(received, 2)
s.tick(2.seconds); assertEquals(received, 4)
s.tick(2.seconds); assertEquals(received, 6)
s1.cancel()
s.tick(); assertEquals(received, 6)
s.tick(2.seconds); assertEquals(received, 7)
assertEquals(completed, 0)
s2.cancel()
s.tick(2.seconds); assertEquals(received, 7)
assertEquals(completed, 0)
s.tick(2.seconds)
ref.subscribe(createObserver)
s.tick(2.seconds); assertEquals(received, 7)
assertEquals(completed, 0)
ref.subscribe(createObserver)
s.tick(2.seconds); assertEquals(received, 7)
assertEquals(completed, 0)
}
test("onError should stop everything") { implicit s =>
var received = 0L
var completed = 0
def createObserver = new Observer[Long] {
def onNext(elem: Long): Future[Ack] = {
received += 1
Continue
}
def onError(ex: Throwable): Unit = completed += 1
def onComplete(): Unit = ()
}
val ch = ConcurrentSubject.publish[Long](Unbounded)
val ref = ch.publish.refCount
ref.subscribe(createObserver)
ref.subscribe(createObserver)
assertEquals(received, 0)
ch.onNext(1)
s.tick(); assertEquals(received, 2)
ch.onError(DummyException("dummy"))
s.tick(); assertEquals(completed, 2)
ref.subscribe(createObserver)
assertEquals(completed, 3)
ref.subscribe(createObserver)
assertEquals(completed, 4)
assertEquals(received, 2)
}
test("onComplete") { implicit s =>
var received = 0L
var completed = 0
def createObserver = new Observer[Long] {
def onNext(elem: Long): Future[Ack] = {
received += 1
Continue
}
def onError(ex: Throwable): Unit = ()
def onComplete(): Unit = completed += 1
}
val ch = ConcurrentSubject.publish[Long](Unbounded)
val ref = ch.publish.refCount
ref.subscribe(createObserver)
ref.subscribe(createObserver)
ch.onNext(1)
ch.onComplete()
s.tick()
assertEquals(received, 2)
assertEquals(completed, 2)
}
test("cancel and stop should be idempotent") { implicit s =>
val ch = ConcurrentSubject.publish[Long](Unbounded)
var received = 0L
var completed = 0
def createObserver = new Observer[Long] {
def onNext(elem: Long): Future[Ack] = {
received += 1
Continue
}
def onError(ex: Throwable): Unit = ()
def onComplete(): Unit = completed += 1
}
val ref = ch.publish.refCount
val s1 = ref.subscribe(createObserver)
// Subscriber that cancels immediately after the first elem
val s2 = ref.take(0).subscribe(createObserver)
ch.onNext(10); s.tick()
assertEquals(received, 1)
assertEquals(completed, 1)
s2.cancel(); s.tick()
assertEquals(completed, 1)
ch.onNext(20); s.tick()
assertEquals(received, 2)
assertEquals(completed, 1)
s1.cancel(); s.tick()
ch.onNext(30); s.tick()
assertEquals(received, 2)
assertEquals(completed, 1)
}
}
|
Wogan/monix
|
monix-reactive/shared/src/test/scala/monix/reactive/observables/RefCountObservableSuite.scala
|
Scala
|
apache-2.0
| 4,914
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.nodes.logical
import org.apache.flink.table.planner.plan.nodes.FlinkConventions
import org.apache.calcite.plan._
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rel.convert.ConverterRule
import org.apache.calcite.rel.core.Match
import org.apache.calcite.rel.logical.LogicalMatch
import org.apache.calcite.rel.{RelCollation, RelNode}
import org.apache.calcite.rex.RexNode
import org.apache.calcite.util.ImmutableBitSet
import java.util
class FlinkLogicalMatch(
cluster: RelOptCluster,
traitSet: RelTraitSet,
input: RelNode,
rowType: RelDataType,
pattern: RexNode,
strictStart: Boolean,
strictEnd: Boolean,
patternDefinitions: util.Map[String, RexNode],
measures: util.Map[String, RexNode],
after: RexNode,
subsets: util.Map[String, _ <: util.SortedSet[String]],
allRows: Boolean,
partitionKeys: ImmutableBitSet,
orderKeys: RelCollation,
interval: RexNode)
extends Match(
cluster,
traitSet,
input,
rowType,
pattern,
strictStart,
strictEnd,
patternDefinitions,
measures,
after,
subsets,
allRows,
partitionKeys,
orderKeys,
interval)
with FlinkLogicalRel {
override def copy(traitSet: RelTraitSet, inputs: util.List[RelNode]): RelNode = {
new FlinkLogicalMatch(
cluster,
traitSet,
inputs.get(0),
rowType,
pattern,
strictStart,
strictEnd,
getPatternDefinitions,
getMeasures,
getAfter,
getSubsets,
allRows,
getPartitionKeys,
getOrderKeys,
getInterval)
}
}
private class FlinkLogicalMatchConverter
extends ConverterRule(
classOf[LogicalMatch],
Convention.NONE,
FlinkConventions.LOGICAL,
"FlinkLogicalMatchConverter") {
override def convert(rel: RelNode): RelNode = {
val logicalMatch = rel.asInstanceOf[LogicalMatch]
val traitSet = rel.getTraitSet.replace(FlinkConventions.LOGICAL)
val newInput = RelOptRule.convert(logicalMatch.getInput, FlinkConventions.LOGICAL)
new FlinkLogicalMatch(
rel.getCluster,
traitSet,
newInput,
logicalMatch.getRowType,
logicalMatch.getPattern,
logicalMatch.isStrictStart,
logicalMatch.isStrictEnd,
logicalMatch.getPatternDefinitions,
logicalMatch.getMeasures,
logicalMatch.getAfter,
logicalMatch.getSubsets,
logicalMatch.isAllRows,
logicalMatch.getPartitionKeys,
logicalMatch.getOrderKeys,
logicalMatch.getInterval)
}
}
object FlinkLogicalMatch {
val CONVERTER: ConverterRule = new FlinkLogicalMatchConverter()
}
|
tillrohrmann/flink
|
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/nodes/logical/FlinkLogicalMatch.scala
|
Scala
|
apache-2.0
| 3,474
|
package com.krrrr38.mackerel4s
package object api {
val mockSetting = new ClientSetting {
override val API_VERSION: String = "v0"
override val AUTH_HEADER_KEY: String = ""
override val BASE_URL: String = s"${MockApiServer.url}/$API_VERSION"
}
}
|
krrrr38/mackerel-client-scala
|
src/test/scala/com/krrrr38/mackerel4s/api/package.scala
|
Scala
|
mit
| 262
|
package com.codacy.client.bitbucket.v1
import play.api.libs.functional.syntax._
import play.api.libs.json._
case class SshKey(pk: Long, key: String, label: String)
object SshKey {
implicit val reader: Reads[SshKey] = (
(__ \\ "pk").read[Long] and
(__ \\ "key").read[String] and
(__ \\ "label").read[String]
)(SshKey.apply _)
}
|
rtfpessoa/bitbucket-scala-client
|
src/main/scala/com/codacy/client/bitbucket/v1/SshKey.scala
|
Scala
|
apache-2.0
| 349
|
/*
* Copyright 2014–2017 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.api.services
import scala.Predef.$conforms
import slamdata.Predef._
import quasar.Data
import quasar.DataArbitrary._
import quasar.api._
import quasar.api.ApiError._
import quasar.api.ApiErrorEntityDecoder._
import quasar.api.matchers._
import quasar.api.PathUtils._
import quasar.api.services.Fixture._
import quasar.contrib.pathy._, PathArbitrary._
import quasar.fp._
import quasar.fp.numeric._
import quasar.fs._
import quasar.fs.InMemory._
import quasar.fs.mount._
import quasar.fs.mount.module.Module
import quasar.main.CoreEffIO
import quasar.sql._
import eu.timepit.refined.numeric.{NonNegative, Positive => RPositive}
import eu.timepit.refined.auto._
import eu.timepit.refined.scalacheck.numeric._
import shapeless.tag.@@
import org.http4s.{Query, _}
import org.http4s.dsl._
import org.http4s.headers._
import pathy.scalacheck.PathyArbitrary._
import pathy.scalacheck.{AlphaCharacters, PathOf}
import pathy.scalacheck.PathOf.{absFileOfArbitrary, relFileOfArbitrary}
import pathy.Path._
import matryoshka.data.Fix
import scalaz.{Failure => _, Zip =>_, _}, Scalaz._
import scalaz.concurrent.Task
import scalaz.stream.Process
class InvokeServiceSpec extends quasar.Qspec with FileSystemFixture with Http4s {
import FileSystemError.pathErr
import PathError.pathNotFound
def service(mem: InMemState, mounts: Map[APath, MountConfig] = Map.empty): Service[Request, Response] =
HttpService.lift(req => Fixture.inMemFSWeb(mem, MountingsConfig(mounts)).flatMap { fs =>
invoke.service[CoreEffIO].toHttpService(fs).apply(req)
}).orNotFound
def sampleStatement(name: String): Statement[Fix[Sql]] = {
val selectAll = sqlE"select * from :Bar"
FunctionDecl(CIName(name), List(CIName("Bar")), selectAll)
}
def isExpectedResponse(data: Vector[Data], response: Response, format: MessageFormat) = {
val expectedBody: Process[Task, String] = format.encode(Process.emitAll(data))
response.as[String].unsafePerformSync must_=== expectedBody.runLog.unsafePerformSync.mkString("")
response.status must_=== Status.Ok
response.contentType must_=== Some(`Content-Type`(format.mediaType, Charset.`UTF-8`))
}
"Invoke Service" should {
"GET" >> {
"produce a 404 if file does not exist" >> prop { file: AFile =>
val response = service(InMemState.empty)(Request(uri = pathUri(file))).unsafePerformSync
response.status must_= Status.NotFound
response.as[ApiError].unsafePerformSync must beApiErrorLike[Module.Error](
Module.Error.FSError(pathErr(pathNotFound(file))))
}
"produce a 400 bad request if path is a directory instead of a file" >>
prop { (file: AFile, sampleData: Vector[Data]) =>
val state = InMemState.fromFiles(Map(file -> sampleData))
val response = service(state)(Request(uri = pathUri(fileParent(file)))).unsafePerformSync
response.status must_= Status.BadRequest
response.as[ApiError].unsafePerformSync must_=
apiError(BadRequest withReason "Path must be a file")
}
"produce a 400 bad request if not all params are supplied with explanation of " +
"which function parameters are missing from the query string" >>
prop { (functionFile: AFile, dataFile: AFile, sampleData: Vector[Data]) =>
val statements = List(sampleStatement(fileName(functionFile).value))
val mounts = Map((fileParent(functionFile): APath) -> MountConfig.moduleConfig(statements))
val state = InMemState.fromFiles(Map(dataFile -> sampleData))
val response = service(state, mounts)(Request(uri = pathUri(functionFile))).unsafePerformSync
response.status must_= Status.BadRequest
response.as[ApiError].unsafePerformSync must beApiErrorLike[Module.Error](
Module.Error.ArgumentsMissing(List(CIName("Bar"))))
}
"return evaluation of sql statement contained within function body if all params are supplied" >> {
"in straightforward case" >>
prop { (functionFile: PathOf[Abs, File, Sandboxed, AlphaCharacters],
dataFile: PathOf[Abs, File, Sandboxed, AlphaCharacters],
sampleData: Vector[Data]) =>
val statements = List(sampleStatement(fileName(functionFile.path).value))
val mounts = Map((fileParent(functionFile.path):APath) -> MountConfig.moduleConfig(statements))
val state = InMemState.fromFiles(Map(dataFile.path -> sampleData))
val arg = "`" + posixCodec.printPath(dataFile.path) + "`"
val request = Request(uri = pathUri(functionFile.path).copy(query = Query.fromPairs("bar" -> arg)))
val response = service(state, mounts)(request).unsafePerformSync
isExpectedResponse(sampleData, response, MessageFormat.Default)
}
"if file in module function is relative" >>
prop { (functionFile: PathOf[Abs, File, Sandboxed, AlphaCharacters],
rDataFile: PathOf[Rel, File, Sandboxed, AlphaCharacters],
sampleData: Vector[Data]) =>
val statements = List(sampleStatement(fileName(functionFile.path).value))
val mounts = Map((fileParent(functionFile.path):APath) -> MountConfig.moduleConfig(statements))
val dataFile = fileParent(functionFile.path) </> rDataFile.path
val state = InMemState.fromFiles(Map(dataFile -> sampleData))
val arg = "`" + posixCodec.printPath(rDataFile.path) + "`"
val request = Request(uri = pathUri(functionFile.path).copy(query = Query.fromPairs("bar" -> arg)))
val response = service(state, mounts)(request).unsafePerformSync
isExpectedResponse(sampleData, response, MessageFormat.Default)
}
"if function references other functions in the same module" >>
prop { (moduleDir: ADir,
dataFile: PathOf[Abs, File, Sandboxed, AlphaCharacters],
sampleData: Vector[Data]) => moduleDir ≠ rootDir ==> {
val statements =
sqlM"""
CREATE FUNCTION FOO(:a)
BEGIN
tmp := BAR(:a);
SELECT * FROM tmp
END;
CREATE FUNCTION BAR(:a)
BEGIN
select * from :a
END
"""
val mounts = Map((moduleDir: APath) -> MountConfig.moduleConfig(statements))
val state = InMemState.fromFiles(Map(dataFile.path -> sampleData))
val arg = "`" + posixCodec.printPath(dataFile.path) + "`"
val request = Request(uri = pathUri(moduleDir </> file("FOO")).copy(query = Query.fromPairs("a" -> arg)))
val response = service(state, mounts)(request).unsafePerformSync
isExpectedResponse(sampleData, response, MessageFormat.Default)
}}
"if function references a view" >>
prop { (moduleDir: ADir,
sampleData: Vector[Data]) => moduleDir ≠ rootDir ==> {
val dataFile = rootDir </> file("dataFile")
val viewFile = rootDir </> file("viewFile")
val statements =
sqlM"""
CREATE FUNCTION FOO(:a)
BEGIN
select * from `/viewFile`
END
"""
val mounts = Map(
(moduleDir: APath) -> MountConfig.moduleConfig(statements),
(viewFile: APath) -> MountConfig.viewConfig0(sqlB"select * from `/dataFile`"))
val state = InMemState.fromFiles(Map(dataFile -> sampleData))
val request = Request(uri = pathUri(moduleDir </> file("FOO")).copy(query = Query.fromPairs("a" -> "true")))
val response = service(state, mounts)(request).unsafePerformSync
isExpectedResponse(sampleData, response, MessageFormat.Default)
}}
"if function references a function in another module" >>
prop { (moduleDir: ADir,
dataFile: PathOf[Abs, File, Sandboxed, AlphaCharacters],
sampleData: Vector[Data]) => moduleDir ≠ rootDir ==> {
val otherModuleDirPath = rootDir </> dir("otherModule")
val otherModuleStatements =
sqlM"""
CREATE FUNCTION BAR(:a)
BEGIN
select * from :a
END
"""
val statements =
sqlM"""
IMPORT `/otherModule/`;
CREATE FUNCTION FOO(:a)
BEGIN
tmp := BAR(:a);
SELECT * FROM tmp
END
"""
val mounts = Map(
(moduleDir: APath) -> MountConfig.moduleConfig(statements),
(otherModuleDirPath: APath) -> MountConfig.moduleConfig(otherModuleStatements))
val state = InMemState.fromFiles(Map(dataFile.path -> sampleData))
val arg = "`" + posixCodec.printPath(dataFile.path) + "`"
val request = Request(uri = pathUri(moduleDir </> file("FOO")).copy(query = Query.fromPairs("a" -> arg)))
val response = service(state, mounts)(request).unsafePerformSync
isExpectedResponse(sampleData, response, MessageFormat.Default)
}}
}
"if query in function is constant even if not supported by connector" >>
prop { (functionFile: PathOf[Abs, File, Sandboxed, AlphaCharacters]) =>
val constant = sqlE"select (1,2)"
val statements = List(FunctionDecl(CIName(fileName(functionFile.path).value), Nil, constant))
val mounts = Map((fileParent(functionFile.path):APath) -> MountConfig.moduleConfig(statements))
val state = InMemState.empty
val request = Request(uri = pathUri(functionFile.path))
val response = service(state, mounts)(request).unsafePerformSync
isExpectedResponse(Vector(Data.Int(1), Data.Int(2)), response, MessageFormat.Default)
}
"support offset and limit" >> {
prop { (functionFile: PathOf[Abs, File, Sandboxed, AlphaCharacters],
dataFile: PathOf[Abs, File, Sandboxed, AlphaCharacters],
sampleData: Vector[Data],
offset: Int @@ NonNegative,
limit: Int @@ RPositive) =>
val statements = List(sampleStatement(fileName(functionFile.path).value))
val mounts = Map((fileParent(functionFile.path):APath) -> MountConfig.moduleConfig(statements))
val state = InMemState.fromFiles(Map(dataFile.path -> sampleData))
val arg = "`" + posixCodec.printPath(dataFile.path) + "`"
val request = Request(uri = pathUri(functionFile.path).copy(
query = Query.fromPairs("bar" -> arg, "offset" -> offset.toString, "limit" -> limit.toString)))
val response = service(state, mounts)(request).unsafePerformSync
isExpectedResponse(sampleData.drop(offset).take(limit), response, MessageFormat.Default)
}
}
"support disposition" >>
prop { (functionFile: PathOf[Abs, File, Sandboxed, AlphaCharacters],
dataFile: PathOf[Abs, File, Sandboxed, AlphaCharacters],
sampleData: Vector[Data]) =>
val disposition = `Content-Disposition`("attachement", Map("filename" -> "data.json"))
val statements = List(sampleStatement(fileName(functionFile.path).value))
val mounts = Map((fileParent(functionFile.path):APath) -> MountConfig.moduleConfig(statements))
val state = InMemState.fromFiles(Map(dataFile.path -> sampleData))
val arg = "`" + posixCodec.printPath(dataFile.path) + "`"
val request = Request(
uri = pathUri(functionFile.path).copy(query = Query.fromPairs("bar" -> arg)),
headers = Headers(Accept(jsonReadableLine.mediaType.withExtensions(Map("disposition" -> disposition.value)))))
val response = service(state, mounts)(request).unsafePerformSync
isExpectedResponse(sampleData, response, MessageFormat.Default)
response.headers.get(`Content-Disposition`) must_=== Some(disposition)
}
}
}
}
|
drostron/quasar
|
web/src/test/scala/quasar/api/services/InvokeServiceSpec.scala
|
Scala
|
apache-2.0
| 13,068
|
// akka
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.stream.ActorMaterializer
import akka.event.Logging
// scala
import scala.io.StdIn
// typesafe
import com.typesafe.config.ConfigFactory
object AkkaHttpApp extends Service {
def main(args: Array[String]): Unit = {
implicit val system = ActorSystem()
implicit val mat = ActorMaterializer()
import system.dispatcher
val config = ConfigFactory.load()
val logger = Logging(system, getClass)
Http().bindAndHandle(routes, config.getString("http.interface"), config.getInt("http.port"))
StdIn.readLine("Hit ENTER to exit")
system.shutdown()
system.awaitTermination()
}
}
|
bigsnarfdude/addifier
|
src/main/scala/AkkaHttp.scala
|
Scala
|
mit
| 696
|
/*
*
* * Copyright 2014 websudos ltd.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package com.websudos.phantom.zookeeper
import java.net.InetSocketAddress
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
import com.websudos.util.testing._
class ZooKeeperInstanceTest extends FlatSpec with Matchers with BeforeAndAfterAll {
val instance = new ZookeeperInstance()
override def beforeAll(): Unit = {
super.beforeAll()
instance.start()
}
override def afterAll(): Unit = {
super.afterAll()
instance.stop()
}
it should "correctly set the status flag to true after starting the ZooKeeper Instance" in {
instance.isStarted shouldEqual true
}
it should "correctly initialise a ZooKeeper ServerSet after starting a ZooKeeper instance" in {
instance.zookeeperServer.isRunning shouldEqual true
}
it should "retrieve the correct data from the Cassandra path by default" in {
instance.richClient.getData("/cassandra", watch = false).successful {
res => {
res shouldNot equal(null)
res.data shouldNot equal(null)
new String(res.data) shouldEqual s"localhost:${DefaultCassandraManager.cassandraPort}"
}
}
}
it should "correctly parse the retrieved data into a Sequence of InetSocketAddresses" in {
instance.hostnamePortPairs.successful {
res => {
res shouldEqual Seq(new InetSocketAddress("localhost", DefaultCassandraManager.cassandraPort))
}
}
}
}
|
nosheenzaza/phantom-data-centric
|
phantom-zookeeper/src/test/scala/com/websudos/phantom/zookeeper/ZooKeeperInstanceTest.scala
|
Scala
|
gpl-2.0
| 2,038
|
/*
* Copyright (c) 2016, Groupon, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* Neither the name of GROUPON nor the names of its contributors may be
* used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.apache.spark.groupon.metrics.example
import java.lang.management.ManagementFactory
import java.util.concurrent.{TimeUnit, ScheduledThreadPoolExecutor}
import org.apache.spark.SparkConf
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.groupon.metrics.UserMetricsSystem
import org.apache.spark.scheduler.TaskInfo
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.receiver.Receiver
import org.apache.spark.streaming.scheduler.{StreamingListenerBatchSubmitted, StreamingListener}
import org.apache.spark.streaming.{Seconds, StreamingContext}
/**
* Spark Streaming application to benchmark performance, mainly on the driver.
* Run with something like this:
* $SPARK_HOME/bin/spark-submit
* --class org.apache.spark.groupon.metrics.example.MetricsBenchmarkApp
* --master yarn-cluster
* --queue public
* --conf spark.dynamicAllocation.enabled=false
* --conf spark.streaming.ui.retainedBatches=120
* --conf spark.locality.wait=0s
* --files log4j.properties
* --num-executors 6
* --executor-cores 1
* metrics.jar 2400 12500 4
*/
object MetricsBenchmarkApp {
def main(args: Array[String]): Unit = {
if (args.length < 3) {
System.err.println(
"Usage: MetricsBenchmarkApp <timeout seconds> <metrics per second> <number of tasks that produce metrics>")
System.exit(1)
}
val Seq(timeoutSeconds, metricsPerSecond, numMetricsProducers) = args.toSeq.map(_.toInt)
val sparkConf = new SparkConf().setAppName(s"MetricsBenchmarkApp: ${metricsPerSecond * numMetricsProducers} metrics per second")
val streamingContext = new StreamingContext(sparkConf, Seconds(1))
UserMetricsSystem.initialize(streamingContext.sparkContext)
streamingContext.addStreamingListener(new StreamingMetricsListener)
val metricsStreams = for(i <- 0 until numMetricsProducers) yield {
streamingContext.receiverStream[Long](new MetricsProducingReceiver(metricsPerSecond))
}
val numberStream = streamingContext.receiverStream[Long](new NumberProducingReceiver)
streamingContext.union(metricsStreams :+ numberStream).foreachRDD(rdd => {
lazy val actionTimer = UserMetricsSystem.timer("DriverActionTimer")
lazy val collectCount = UserMetricsSystem.counter("BenchmarkCounterDriver")
val isEmpty = actionTimer.time({
rdd.isEmpty()
})
if (!isEmpty) collectCount.inc(numMetricsProducers)
})
streamingContext.start()
streamingContext.awaitTerminationOrTimeout(timeoutSeconds * 1000)
}
}
class StreamingMetricsListener extends StreamingListener {
val runtime = Runtime.getRuntime
val osStats = ManagementFactory.getOperatingSystemMXBean.asInstanceOf[com.sun.management.OperatingSystemMXBean]
lazy val totalMemoryGauge = UserMetricsSystem.gauge("MemoryTotal")
lazy val maxMemoryGauge = UserMetricsSystem.gauge("MemoryMax")
lazy val freeMemoryGauge = UserMetricsSystem.gauge("MemoryFree")
lazy val usedMemoryGauge = UserMetricsSystem.gauge("MemoryUsedPercent")
lazy val cpuLoadGauge = UserMetricsSystem.gauge("CPULoadPercent")
lazy val usedMemoryHistogram = UserMetricsSystem.histogram("MemoryUsedHistogram")
lazy val cpuLoadHistogram = UserMetricsSystem.histogram("CPULoadHistogram")
override def onBatchSubmitted(batchSubmitted: StreamingListenerBatchSubmitted): Unit = {
val free = runtime.freeMemory()
val total = runtime.totalMemory()
val max = runtime.maxMemory()
val usedMemPercent = (total - free).toFloat / total.toFloat
val cpuLoadPercent = osStats.getProcessCpuLoad
totalMemoryGauge.set(total / 1024)
maxMemoryGauge.set(max / 1024)
freeMemoryGauge.set(free / 1024)
usedMemoryGauge.set(usedMemPercent)
usedMemoryHistogram.update((usedMemPercent * 100).toInt)
cpuLoadGauge.set(cpuLoadPercent)
cpuLoadHistogram.update((cpuLoadPercent * 100).toInt)
}
def getSchedulerDelay(info: TaskInfo, metrics: TaskMetrics, currentTime: Long): Long = {
val totalExecutionTime = info.finishTime - info.launchTime
val executorOverhead = metrics.executorDeserializeTime + metrics.resultSerializationTime
val gettingResultTime = info.finishTime - info.gettingResultTime
math.max(0, totalExecutionTime - metrics.executorRunTime - executorOverhead - gettingResultTime)
}
}
class MetricsProducingReceiver(val metricsPerSecond: Int) extends Receiver[Long](StorageLevel.MEMORY_ONLY) {
lazy val counter = UserMetricsSystem.counter("BenchmarkCounter")
lazy val loadCreatingMeter = UserMetricsSystem.meter("LoadCreatingMeter")
@transient lazy val scheduler = new ScheduledThreadPoolExecutor(1)
override def onStart(): Unit = {
scheduler.scheduleAtFixedRate(
new Runnable {
override def run(): Unit = {
(0 until metricsPerSecond).foreach(i => {
loadCreatingMeter.mark()
})
counter.inc()
}
}, 0, 1, TimeUnit.SECONDS
)
}
override def onStop(): Unit = {
scheduler.shutdownNow()
}
}
class NumberProducingReceiver extends Receiver[Long](StorageLevel.MEMORY_ONLY) {
@transient lazy val scheduler = new ScheduledThreadPoolExecutor(1)
override def onStart(): Unit = {
scheduler.scheduleAtFixedRate(
new Runnable {
override def run(): Unit = {
store(1)
}
}, 0, 1, TimeUnit.SECONDS
)
}
override def onStop(): Unit = {
scheduler.shutdownNow()
}
}
|
groupon/spark-metrics
|
src/main/scala/org/apache/spark/groupon/metrics/example/MetricsBenchmarkApp.scala
|
Scala
|
bsd-3-clause
| 6,983
|
class Regress {
var v: Int = _
def f = 42
var w: Int = (_) // error: not default value syntax
}
|
som-snytt/dotty
|
tests/neg/t11437.scala
|
Scala
|
apache-2.0
| 106
|
package type_env
import exceptions.ICE
import tir._
import tpass.TPass
/* Given some type env as argument, this pass walks through the
* expression and inserts that environment as the new parent
* of the FIRST environment it encounters. This is designed
* to insert a type environment into a chain of environments. */
object TypeEnvUpdateParentWalk extends TPass[TTypeEnv, Unit] {
override def combine(u: Unit, v: Unit) = u
override def combineList(u: List[Unit]) = ()
override def default = ()
override def apply(env: TTypeEnv, exp: TExp) = exp match {
case let: TExpLetIn => {
assert(let.env.parent == env.parent)
let.env.parent = Some(env)
}
case matchRow: TExpMatchRow => {
assert(matchRow.env.parent == env.parent)
matchRow.env.parent = Some(env)
}
case other => super.apply(env, other)
}
override def apply(env: TTypeEnv, program: TJavaProgram) =
// If this is needed, there is no particular reason not to.
// However, it probably means you are using type environments wrong.
throw new ICE("Cannot add a parent to the top level environment")
override def apply(env: TTypeEnv, dec: TDec) = dec match {
case javaFun: TJavaFun => {
assert(javaFun.env.parent == env.parent)
javaFun.env.parent = Some(env)
}
case other => super.apply(env, other)
}
}
|
j-c-w/mlc
|
src/main/scala/type_env/TypeEnvUpdateParentWalk.scala
|
Scala
|
gpl-3.0
| 1,358
|
package com.szadowsz.starform.model
import com.szadowsz.starform.model.accrete.AccreteProfile
import com.szadowsz.starform.model.accrete.constants.AccreteConstants
import com.szadowsz.starform.model.eco.calc.EcoCalc
import com.szadowsz.starform.model.star.calc.StarCalc
import com.szadowsz.starform.model.star.constants.StarConstants
import com.szadowsz.starform.system.bodies.base.Star
/**
* Created on 13/04/2017.
*/
trait StarformProfile[S <: Star, C <: StarConstants, E <: EcoCalc] extends AccreteProfile {
val starConstants : C
def buildEcoCalc() : E
def buildStarCalc(sConst : C) : StarCalc[S]
}
|
zakski/accrete-starform-stargen
|
recreations/composite/src/main/scala/com/szadowsz/starform/model/StarformProfile.scala
|
Scala
|
apache-2.0
| 617
|
/*
* Copyright 2009-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package json
import org.specs2.mutable.Specification
import java.util.UUID
object SerializationBugs extends Specification {
import Serialization.{read, write => swrite}
implicit val formats = Serialization.formats(NoTypeHints)
"plan1.Plan can be serialized (issue 341)" in {
import plan1._
val game = Game(Map("a" -> Plan(Some(Action(1, None)))))
val ser = swrite(game)
read[Game](ser) mustEqual game
}
"plan2.Plan can be serialized (issue 341)" in {
import plan2._
val g1 = Game(Map("a" -> Plan(Some(Action("f1", "s", Array(), None)),
Some("A"),
Some(Action("f2", "s2", Array(0, 1, 2), None)))))
val ser = swrite(g1)
val g2 = read[Game](ser)
val plan = g2.buy("a")
val leftOp = plan.leftOperand.get
val rightOp = plan.rightOperand.get
(g2.buy.size mustEqual 1) and
(leftOp.functionName mustEqual "f1") and
(leftOp.symbol mustEqual "s") and
(leftOp.inParams.toList mustEqual Nil) and
(leftOp.subOperand mustEqual None) and
(plan.operator mustEqual Some("A")) and
(rightOp.functionName mustEqual "f2") and
(rightOp.symbol mustEqual "s2") and
(rightOp.inParams.toList mustEqual List(0, 1, 2)) and
(rightOp.subOperand mustEqual None)
}
"null serialization bug" in {
val x = new X(null)
val ser = swrite(x)
read[X](ser) mustEqual x
}
"StackOverflowError with large Lists" in {
val xs = LongList(List.fill(5000)(0).map(Num))
val ser = swrite(xs)
read[LongList](ser).xs.length mustEqual 5000
}
"Custom serializer should work with Option" in {
class UUIDFormat extends Serializer[UUID] {
val UUIDClass = classOf[UUID]
def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), UUID] = {
case (TypeInfo(UUIDClass, _), JString(x)) => UUID.fromString(x)
}
def serialize(implicit format: Formats): PartialFunction[Any, JValue] = {
case x: UUID => JString(x.toString)
}
}
implicit val formats = Serialization.formats(NoTypeHints) + new UUIDFormat
val o1 = OptionalUUID(None)
val o2 = OptionalUUID(Some(UUID.randomUUID))
(read[OptionalUUID](swrite(o1)) mustEqual o1) and
(read[OptionalUUID](swrite(o2)) mustEqual o2)
}
"TypeInfo is not correctly constructed for customer serializer -- 970" in {
class SeqFormat extends Serializer[Seq[_]] {
val SeqClass = classOf[Seq[_]]
def serialize(implicit format: Formats) = {
case seq: Seq[_] => JArray(seq.toList.map(Extraction.decompose))
}
def deserialize(implicit format: Formats) = {
case (TypeInfo(SeqClass, parameterizedType), JArray(xs)) =>
val typeInfo = TypeInfo(parameterizedType
.map(_.getActualTypeArguments()(0))
.getOrElse(failure("No type parameter info for type Seq")).asInstanceOf[Class[_]], None)
xs.map(x => Extraction.extract(x, typeInfo))
}
}
implicit val formats = DefaultFormats + new SeqFormat
val seq = Seq(1, 2, 3)
val ser = Extraction.decompose(seq)
Extraction.extract[Seq[Int]](ser) mustEqual seq
}
"Serialization of an opaque value should not fail" in {
val o = Opaque(JObject(JField("some", JString("data")) :: Nil))
val ser = Serialization.write(o)
ser mustEqual """{"x":{"some":"data"}}"""
}
"Map with Map value" in {
val a = Map("a" -> Map("a" -> 5))
val b = Map("b" -> 1)
val str = Serialization.write(MapWithMap(a, b))
read[MapWithMap](str) mustEqual MapWithMap(a, b)
}
"Either can't be deserialized with type hints" in {
implicit val formats = DefaultFormats + FullTypeHints(classOf[Either[_, _]] :: Nil)
val x = Eith(Left("hello"))
val s = Serialization.write(x)
read[Eith](s) mustEqual x
}
"Custom serializer should work as Map key (scala 2.9) (issue #1077)" in {
class SingleOrVectorSerializer extends Serializer[SingleOrVector[Double]] {
private val singleOrVectorClass = classOf[SingleOrVector[Double]]
def deserialize(implicit format: Formats) = {
case (TypeInfo(`singleOrVectorClass`, _), json) => json match {
case JObject(List(JField("val", JDouble(x)))) => SingleValue(x)
case JObject(List(JField("val", JArray(xs: List[_])))) =>
VectorValue(xs.asInstanceOf[List[JDouble]].map(_.num).toIndexedSeq)
case x => throw new MappingException("Can't convert " + x + " to SingleOrVector")
}
}
def serialize(implicit format: Formats) = {
case SingleValue(x: Double) => JObject(List(JField("val", JDouble(x))))
case VectorValue(x: Vector[_]) =>
JObject(List(JField("val", JArray(x.asInstanceOf[Vector[Double]].toList.map(JDouble(_))))))
}
}
implicit val formats = DefaultFormats + new SingleOrVectorSerializer
val ser = swrite(MapHolder(Map("hello" -> SingleValue(2.0))))
read[MapHolder](ser) mustEqual MapHolder(Map("hello" -> SingleValue(2.0)))
}
"Constructor memoization should not ignore type parameters" in {
val jsonA = """ { "data": { "foo": "string" }, "success": true } """
val jsonB = """ { "data": { "bar": "string" }, "success": true } """
(read[SomeContainer[TypeA]](jsonA) mustEqual SomeContainer(TypeA("string"))) and
(read[SomeContainer[TypeB]](jsonB) mustEqual SomeContainer(TypeB("string")))
}
}
case class TypeA(foo: String)
case class TypeB(bar: String)
case class SomeContainer[D](data: D)
case class Eith(x: Either[String, Int])
case class MapWithMap(a: Map[String, Map[String, Int]], b: Map[String, Int])
case class LongList(xs: List[Num])
case class Num(x: Int)
case class X(yy: Y)
case class Y(ss: String)
case class OptionalUUID(uuid: Option[UUID])
package plan1 {
case class Plan(plan: Option[Action])
case class Game(game: Map[String, Plan])
case class Action(id: Int, subAction: Option[Action])
}
package plan2 {
case class Plan(leftOperand: Option[Action], operator: Option[String],
rightOperand: Option[Action])
case class Game(buy: Map[String, Plan])
case class Action(functionName: String, symbol: String,
inParams: Array[Number], subOperand: Option[Action])
}
case class Opaque(x: JValue)
sealed trait SingleOrVector[A]
case class SingleValue[A](value: A) extends SingleOrVector[A]
case class VectorValue[A](value: IndexedSeq[A]) extends SingleOrVector[A]
case class MapHolder(a: Map[String, SingleOrVector[Double]])
|
lzpfmh/framework-2
|
core/json/src/test/scala/net/liftweb/json/SerializationBugs.scala
|
Scala
|
apache-2.0
| 7,196
|
/**
Open Bank Project - API
Copyright (C) 2011, 2014, TESOBE / Music Pictures Ltd
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Email: contact@tesobe.com
TESOBE / Music Pictures Ltd
Osloerstrasse 16/17
Berlin 13359, Germany
This product includes software developed at
TESOBE (http://www.tesobe.com/)
by
Ayoub Benali: ayoub AT tesobe DOT com
*/
package com.tesobe.status.messageQueue
import net.liftweb.actor.{LiftActor, LAFuture}
import net.liftweb.common.Loggable
import com.tesobe.status.model.DetailedBankStatues
class BankStatuesHandler(banksStatues: LAFuture[DetailedBankStatues]) extends LiftActor with Loggable{
import net.liftweb.common.{Full, Empty, Box}
import net.liftmodules.amqp.AMQPMessage
import com.tesobe.status.messageQueue.BankStatuesListener
import com.tesobe.status.model.{
BanksStatuesReply,
SupportedBanksReply,
DetailedBankStatus
}
private var bankStatues: Box[BanksStatuesReply] = Empty
private var supportedBanks: Box[SupportedBanksReply] = Empty
BankStatuesListener.subscribeForBanksStatues(this)
BankStatuesListener.subscribeForBanksList(this)
def completeFutureIfPossible(): Unit = {
if(bankStatues.isDefined && supportedBanks.isDefined){
val statues: Set[DetailedBankStatus] =
supportedBanks.get.banks.map{b => {
val isFound = bankStatues.get.find(b.country, b.nationalIdentifier)
val (status, lastUpdate) =
if(isFound.isDefined){
(isFound.get.status, Some(isFound.get.lastUpdate))
}else{
(false, None)
}
DetailedBankStatus(
b.country,
b.nationalIdentifier,
b.name,
status,
lastUpdate
)
}}
banksStatues.complete(Full(DetailedBankStatues(statues)))
}
}
MessageSender.getStatues
MessageSender.getBankList
protected def messageHandler = {
case msg@AMQPMessage(statues: BanksStatuesReply) => {
logger.info("received bank statues message")
if(bankStatues.isEmpty){
bankStatues = Full(statues)
completeFutureIfPossible
}
}
case msg@AMQPMessage(banks: SupportedBanksReply) => {
logger.info("received supported banks message")
if(supportedBanks.isEmpty){
supportedBanks = Full(banks)
completeFutureIfPossible
}
}
}
}
|
OpenBankProject/status
|
src/main/scala/com/tesobe/status/messageQueue/BankStatuesHandler.scala
|
Scala
|
agpl-3.0
| 2,973
|
/*
* Copyright 2014 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s
package server
package middleware
import cats.Applicative
import cats.Monad
import cats.data.Kleisli
import cats.data.NonEmptyList
import org.http4s.Status.MovedPermanently
import org.http4s.Uri.Authority
import org.http4s.Uri.RegName
import org.http4s.Uri.Scheme
import org.http4s.headers.Host
import org.http4s.headers.Location
import org.http4s.headers.`Content-Type`
import org.http4s.syntax.header._
import org.log4s.getLogger
import org.typelevel.ci._
/** [[Middleware]] to redirect http traffic to https.
* Inspects `X-Forwarded-Proto` header and if it is set to `http`,
* redirects to `Host` with same URL with https schema; otherwise does nothing.
* This middleware is useful when a service is deployed behind a load balancer
* which does not support such redirect feature, e.g. Heroku.
*/
object HttpsRedirect {
private[HttpsRedirect] val logger = getLogger
def apply[F[_], G[_]](http: Http[F, G])(implicit F: Applicative[F]): Http[F, G] =
Kleisli { req =>
(req.headers.get(ci"X-Forwarded-Proto"), req.headers.get[Host]) match {
case (Some(NonEmptyList(proto, _)), Some(host))
if Scheme.fromString(proto.value).contains(Scheme.http) =>
logger.debug(s"Redirecting ${req.method} ${req.uri} to https on $host")
val authority = Authority(host = RegName(host.value))
val location = req.uri.copy(scheme = Some(Scheme.https), authority = Some(authority))
val headers = Headers(Location(location), `Content-Type`(MediaType.text.xml))
val response = Response[G](status = MovedPermanently, headers = headers)
F.pure(response)
case _ =>
http(req)
}
}
def httpRoutes[F[_]: Monad](httpRoutes: HttpRoutes[F]): HttpRoutes[F] =
apply(httpRoutes)
def httpApp[F[_]: Applicative](httpApp: HttpApp[F]): HttpApp[F] =
apply(httpApp)
}
|
rossabaker/http4s
|
server/shared/src/main/scala/org/http4s/server/middleware/HttpsRedirect.scala
|
Scala
|
apache-2.0
| 2,487
|
package mesosphere.marathon
import java.util.{ TimerTask, Timer }
import java.util.concurrent.atomic.AtomicBoolean
import akka.actor.{ ActorRef, ActorSystem }
import akka.testkit.{ TestKit, TestProbe }
import com.twitter.common.base.ExceptionalCommand
import com.twitter.common.zookeeper.{ Group, Candidate }
import com.twitter.common.zookeeper.Group.JoinException
import mesosphere.chaos.http.HttpConf
import mesosphere.marathon.Protos.StorageVersion
import mesosphere.marathon.health.HealthCheckManager
import mesosphere.marathon.state.{ AppRepository, Migration }
import mesosphere.marathon.tasks.TaskTracker
import mesosphere.mesos.util.FrameworkIdUtil
import mesosphere.util.BackToTheFuture.Timeout
import org.apache.mesos.SchedulerDriver
import org.apache.mesos.{ Protos => mesos }
import org.apache.mesos.state.InMemoryState
import org.mockito.Matchers.{ any, eq => mockEq }
import org.mockito.Mockito
import org.mockito.Mockito.{ times, verify, when }
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.rogach.scallop.ScallopOption
import org.scalatest.{ Matchers, BeforeAndAfterAll }
import scala.concurrent.duration._
object MarathonSchedulerServiceTest {
import Mockito.mock
val ReconciliationDelay = 5000L
val ReconciliationInterval = 5000L
def mockConfig = {
val config = mock(classOf[MarathonConf])
when(config.reconciliationInitialDelay).thenReturn(scallopOption(Some(ReconciliationDelay)))
when(config.reconciliationInterval).thenReturn(scallopOption(Some(ReconciliationInterval)))
when(config.zkFutureTimeout).thenReturn(Timeout(1.second))
config
}
def scallopOption[A](a: Option[A]): ScallopOption[A] = {
new ScallopOption[A]("") {
override def get = a
override def apply() = a.get
}
}
}
class MarathonSchedulerServiceTest
extends TestKit(ActorSystem("System"))
with MarathonSpec
with BeforeAndAfterAll
with Matchers {
import MarathonSchedulerServiceTest._
import system.dispatcher
var probe: TestProbe = _
var healthCheckManager: HealthCheckManager = _
var candidate: Option[Candidate] = _
var config: MarathonConf = _
var httpConfig: HttpConf = _
var frameworkIdUtil: FrameworkIdUtil = _
var leader: AtomicBoolean = _
var appRepository: AppRepository = _
var taskTracker: TaskTracker = _
var scheduler: MarathonScheduler = _
var migration: Migration = _
var schedulerActor: ActorRef = _
before {
probe = TestProbe()
healthCheckManager = mock[HealthCheckManager]
candidate = mock[Option[Candidate]]
config = mockConfig
httpConfig = mock[HttpConf]
frameworkIdUtil = mock[FrameworkIdUtil]
leader = mock[AtomicBoolean]
appRepository = mock[AppRepository]
taskTracker = mock[TaskTracker]
scheduler = mock[MarathonScheduler]
migration = mock[Migration]
schedulerActor = probe.ref
}
test("Start timer when elected") {
val timer = mock[Timer]
when(frameworkIdUtil.fetch(any(), any())).thenReturn(None)
val schedulerService = new MarathonSchedulerService(
healthCheckManager,
candidate,
config,
httpConfig,
frameworkIdUtil,
leader,
appRepository,
taskTracker,
scheduler,
system,
migration,
schedulerActor
) {
override def runDriver(abdicateCmdOption: Option[ExceptionalCommand[JoinException]]): Unit = ()
override def newDriver() = mock[SchedulerDriver]
}
schedulerService.reconciliationTimer = timer
schedulerService.onElected(mock[ExceptionalCommand[Group.JoinException]])
verify(timer).schedule(any[TimerTask](), mockEq(ReconciliationDelay), mockEq(ReconciliationInterval))
verify(timer).schedule(any(), mockEq(ReconciliationDelay + ReconciliationInterval))
}
test("Cancel timer when defeated") {
val timer = mock[Timer]
when(frameworkIdUtil.fetch(any(), any())).thenReturn(None)
val schedulerService = new MarathonSchedulerService(
healthCheckManager,
candidate,
config,
httpConfig,
frameworkIdUtil,
leader,
appRepository,
taskTracker,
scheduler,
system,
migration,
schedulerActor
) {
override def runDriver(abdicateCmdOption: Option[ExceptionalCommand[JoinException]]): Unit = ()
override def newDriver() = mock[SchedulerDriver]
}
schedulerService.reconciliationTimer = timer
schedulerService.onDefeated()
verify(timer).cancel()
assert(schedulerService.reconciliationTimer != timer, "timer should be replaced after leadership defeat")
}
test("Re-enable timer when re-elected") {
val timer = mock[Timer]
when(frameworkIdUtil.fetch(any(), any())).thenReturn(None)
val schedulerService = new MarathonSchedulerService(
healthCheckManager,
candidate,
config,
httpConfig,
frameworkIdUtil,
leader,
appRepository,
taskTracker,
scheduler,
system,
migration,
schedulerActor
) {
override def runDriver(abdicateCmdOption: Option[ExceptionalCommand[JoinException]]): Unit = ()
override def newDriver() = mock[SchedulerDriver]
override def newTimer() = timer
}
schedulerService.onElected(mock[ExceptionalCommand[Group.JoinException]])
schedulerService.onDefeated()
schedulerService.onElected(mock[ExceptionalCommand[Group.JoinException]])
verify(timer, times(2)).schedule(any[TimerTask](), mockEq(ReconciliationDelay), mockEq(ReconciliationInterval))
verify(timer, times(2)).schedule(any(), mockEq(ReconciliationDelay + ReconciliationInterval))
verify(timer).cancel()
}
test("Always fetch current framework ID") {
val frameworkId = mesos.FrameworkID.newBuilder.setValue("myId").build()
val timer = mock[Timer]
frameworkIdUtil = new FrameworkIdUtil(new InMemoryState)
val schedulerService = new MarathonSchedulerService(
healthCheckManager,
candidate,
config,
httpConfig,
frameworkIdUtil,
leader,
appRepository,
taskTracker,
scheduler,
system,
migration,
schedulerActor
) {
override def runDriver(abdicateCmdOption: Option[ExceptionalCommand[JoinException]]): Unit = ()
override def newDriver() = mock[SchedulerDriver]
override def newTimer() = timer
}
schedulerService.frameworkId should be(None)
implicit lazy val timeout = Timeout(1.second)
frameworkIdUtil.store(frameworkId)
awaitAssert(schedulerService.frameworkId should be(Some(frameworkId)))
}
test("Abdicate leadership when migration fails and reoffer leadership") {
val timer = mock[Timer]
when(frameworkIdUtil.fetch(any(), any())).thenReturn(None)
candidate = Some(mock[Candidate])
val schedulerService = new MarathonSchedulerService(
healthCheckManager,
candidate,
config,
httpConfig,
frameworkIdUtil,
leader,
appRepository,
taskTracker,
scheduler,
system,
migration,
schedulerActor
) {
override def runDriver(abdicateCmdOption: Option[ExceptionalCommand[JoinException]]): Unit = ()
override def newDriver() = mock[SchedulerDriver]
}
// use an Answer object here because Mockito's thenThrow does only
// allow to throw RuntimeExceptions
when(migration.migrate()).thenAnswer(new Answer[StorageVersion] {
override def answer(invocation: InvocationOnMock): StorageVersion = {
import java.util.concurrent.TimeoutException
throw new TimeoutException("Failed to wait for future within timeout")
}
})
schedulerService.onElected(mock[ExceptionalCommand[Group.JoinException]])
awaitAssert { verify(candidate.get).offerLeadership(schedulerService) }
assert(schedulerService.isLeader == false)
}
}
|
sielaq/marathon
|
src/test/scala/mesosphere/marathon/MarathonSchedulerServiceTest.scala
|
Scala
|
apache-2.0
| 7,873
|
package org.jetbrains.plugins.scala.lang.typeInference
package generated
class TypeInferenceHigherKindsTest extends TypeInferenceTestBase {
//This class was generated by build script, please don't change this
override def folderPath: String = super.folderPath + "higherKinds/"
def testSCL1655() {doTest()}
def testhk01() {doTest()}
def testhk02() {doTest()}
def testhk03() {doTest()}
def testhk04() {doTest()}
def testhk05() {doTest()}
}
|
triggerNZ/intellij-scala
|
test/org/jetbrains/plugins/scala/lang/typeInference/generated/TypeInferenceHigherKindsTest.scala
|
Scala
|
apache-2.0
| 460
|
package spinoco.protocol.mail.header
import scodec.Codec
import spinoco.protocol.mail.EmailAddress
import spinoco.protocol.mail.header.codec.EmailAddressCodec
/**
* RFC 5322 3.6.6:
*
* Resent fields SHOULD be added to any message that is reintroduced by
* a user into the transport system. A separate set of resent fields
* SHOULD be added each time this is done.
*
* For instance, the "Resent-Sender:" field corresponds to
* the "Sender:" field
*
*/
case class `Resent-Sender`(sender: EmailAddress) extends DefaultEmailHeaderField
object `Resent-Sender` extends DefaultHeaderDescription[`Resent-Sender`] {
val codec: Codec[`Resent-Sender`] = {
EmailAddressCodec.codec.xmap(
`Resent-Sender`.apply, _.sender
)
}
}
|
Spinoco/protocol
|
mail/src/main/scala/spinoco/protocol/mail/header/`Resent-Sender`.scala
|
Scala
|
mit
| 759
|
/**
* Copyright (c) 2015 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.trustedanalytics.sparktk.frame.internal.ops.groupby.aggregators
import org.trustedanalytics.sparktk.frame.DataTypes.DataType
import org.trustedanalytics.sparktk.frame.DataTypes
import org.trustedanalytics.sparktk.frame.internal.ops.binning.DiscretizationFunctions
/**
* Aggregator for computing histograms using a list of cutoffs.
*
* The histogram is a vector containing the percentage of observations found in each bin
*/
case class HistogramAggregator(cutoffs: List[Double], includeLowest: Option[Boolean] = None, strictBinning: Option[Boolean] = None) extends GroupByAggregator {
require(cutoffs.size >= 2, "At least one bin is required in cutoff array")
require(cutoffs == cutoffs.sorted, "the cutoff points of the bins must be monotonically increasing")
/** An array that aggregates the number of elements in each bin */
override type AggregateType = Array[Double]
/** The bin number for a column value */
override type ValueType = Int
/** The 'empty' or 'zero' or default value for the aggregator */
override def zero = Array.ofDim[Double](cutoffs.size - 1)
/**
* Get the bin index for the column value based on the cutoffs
*
* Strict binning is disabled so values smaller than the first bin are assigned to the first bin,
* and values larger than the last bin are assigned to the last bin.
*/
override def mapFunction(columnValue: Any, columnDataType: DataType): ValueType = {
if (columnValue != null) {
DiscretizationFunctions.binElement(DataTypes.toDouble(columnValue),
cutoffs,
lowerInclusive = includeLowest.getOrElse(true),
strictBinning = strictBinning.getOrElse(false))
}
else -1
}
/**
* Increment the count for the bin corresponding to the bin index
*/
override def add(binArray: AggregateType, binIndex: ValueType): AggregateType = {
if (binIndex >= 0) binArray(binIndex) += 1
binArray
}
/**
* Sum two binned lists.
*/
override def merge(binArray1: AggregateType, binArray2: AggregateType) = {
(binArray1, binArray2).zipped.map(_ + _)
}
/**
* Return the vector containing the percentage of observations found in each bin
*/
override def getResult(binArray: AggregateType): Any = {
val total = binArray.sum
if (total > 0) DataTypes.toVector()(binArray.map(_ / total)) else binArray
}
}
|
shibanis1/spark-tk
|
core/src/main/scala/org/trustedanalytics/sparktk/frame/internal/ops/groupby/aggregators/HistogramAggregator.scala
|
Scala
|
apache-2.0
| 3,059
|
/**
*
* Copyright (c) 2015-2017 Rodney S.K. Lai
* https://github.com/rodney-lai
*
* Permission to use, copy, modify, and/or distribute this software for
* any purpose with or without fee is hereby granted, provided that the
* above copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*/
import org.scalatestplus.play._
import org.scalatestplus.play.guice._
import play.api.test._
import play.api.test.Helpers._
/**
* add your integration spec here.
* An integration test will fire up a whole play application in a real (or headless) browser
*/
class IntegrationSpec extends PlaySpec with GuiceOneServerPerTest with OneBrowserPerTest with HtmlUnitFactory {
"Application" should {
"work from within a browser" in {
go to ("http://localhost:" + port)
pageSource must include ("Rodney's Test Server")
}
}
}
|
rodney-lai/test-site
|
home/test/IntegrationSpec.scala
|
Scala
|
isc
| 1,375
|
package com.technophobia.substeps.domain
import com.technophobia.substeps.domain.execution.RunResult
import com.technophobia.substeps.domain.repositories.SubstepRepository
import com.technophobia.substeps.domain.events.{ExecutionCompleted, ExecutionStarted, DomainEventPublisher}
import java.util.Date
case class BasicScenario(override val title: String, val steps: Seq[SubstepInvocation],override val tags: Set[Tag]) extends Scenario(title, tags) {
assert(steps != null, "Steps must not be null for a BasicScenario")
def run(): RunResult = {
DomainEventPublisher.instance().publish(ExecutionStarted(this))
val result = steps.foldLeft[RunResult](RunResult.NoneRun)((b,a) => b.combine(a.run()))
DomainEventPublisher.instance().publish(ExecutionCompleted(this, result))
result
}
}
object BasicScenario {
def apply(substepRepository: SubstepRepository, scenarioTitle: String, stepInvocations: Seq[String], tags: Set[Tag]) : BasicScenario = {
new BasicScenario(scenarioTitle, stepInvocations.map(SubstepInvocation(substepRepository, _)), tags)
}
}
|
G2G3Digital/substeps-scala-core
|
src/main/scala/com/technophobia/substeps/domain/BasicScenario.scala
|
Scala
|
lgpl-3.0
| 1,086
|
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.docgen
import org.junit.Test
import org.neo4j.cypher.{ConstraintValidationException, CypherExecutionException}
import org.neo4j.kernel.api.constraints.{NodePropertyConstraint, RelationshipPropertyConstraint}
import org.neo4j.test.TestEnterpriseGraphDatabaseFactory
import scala.collection.JavaConverters._
class ConstraintsTest extends DocumentingTestBase with SoftReset {
override protected def newTestGraphDatabaseFactory() = new TestEnterpriseGraphDatabaseFactory()
def section: String = "Constraints"
@Test def create_unique_constraint() {
testQuery(
title = "Create uniqueness constraint",
text = "To create a constraint that makes sure that your database will never contain more than one node with a specific " +
"label and one property value, use the `IS UNIQUE` syntax.",
queryText = "CREATE CONSTRAINT ON (book:Book) ASSERT book.isbn IS UNIQUE",
optionalResultExplanation = "",
assertions = (p) => assertNodeConstraintExist("Book", "isbn")
)
}
@Test def drop_unique_constraint() {
generateConsole = false
prepareAndTestQuery(
title = "Drop uniqueness constraint",
text = "By using `DROP CONSTRAINT`, you remove a constraint from the database.",
queryText = "DROP CONSTRAINT ON (book:Book) ASSERT book.isbn IS UNIQUE",
optionalResultExplanation = "",
prepare = _ => executePreparationQueries(List("CREATE CONSTRAINT ON (book:Book) ASSERT book.isbn IS UNIQUE")),
assertions = (p) => assertNodeConstraintDoesNotExist("Book", "isbn")
)
}
@Test def play_nice_with_unique_property_constraint() {
generateConsole = false
prepareAndTestQuery(
title = "Create a node that complies with unique property constraints",
text = "Create a `Book` node with an `isbn` that isn't already in the database.",
queryText = "CREATE (book:Book {isbn: '1449356265', title: 'Graph Databases'})",
optionalResultExplanation = "",
prepare = _ => executePreparationQueries(List("CREATE CONSTRAINT ON (book:Book) ASSERT book.isbn IS UNIQUE")),
assertions = (p) => assertNodeConstraintExist("Book", "isbn")
)
}
@Test def break_unique_property_constraint() {
generateConsole = false
engine.execute("CREATE CONSTRAINT ON (book:Book) ASSERT book.isbn IS UNIQUE")
engine.execute("CREATE (book:Book {isbn: '1449356265', title: 'Graph Databases'})")
testFailingQuery[CypherExecutionException](
title = "Create a node that breaks a unique property constraint",
text = "Create a `Book` node with an `isbn` that is already used in the database.",
queryText = "CREATE (book:Book {isbn: '1449356265', title: 'Graph Databases'})",
optionalResultExplanation = "In this case the node isn't created in the graph."
)
}
@Test def fail_to_create_constraint() {
generateConsole = false
engine.execute("CREATE (book:Book {isbn: '1449356265', title: 'Graph Databases'})")
engine.execute("CREATE (book:Book {isbn: '1449356265', title: 'Graph Databases 2'})")
testFailingQuery[CypherExecutionException](
title = "Failure to create a unique property constraint due to conflicting nodes",
text = "Create a unique property constraint on the property `isbn` on nodes with the `Book` label when there are two nodes with" +
" the same `isbn`.",
queryText = "CREATE CONSTRAINT ON (book:Book) ASSERT book.isbn IS UNIQUE",
optionalResultExplanation = "In this case the constraint can't be created because it is violated by existing " +
"data. We may choose to use <<query-schema-index>> instead or remove the offending nodes and then re-apply the " +
"constraint."
)
}
@Test def create_node_property_existence_constraint() {
testQuery(
title = "Create node property existence constraint",
text = "To create a constraint that makes sure that all nodes with a certain label have a certain property, use the `ASSERT exists(identifier.propertyName)` syntax.",
queryText = "CREATE CONSTRAINT ON (book:Book) ASSERT exists(book.isbn)",
optionalResultExplanation = "",
assertions = (p) => assertNodeConstraintExist("Book", "isbn")
)
}
@Test def drop_node_property_existence_constraint() {
generateConsole = false
prepareAndTestQuery(
title = "Drop node property existence constraint",
text = "By using +DROP+ +CONSTRAINT+, you remove a constraint from the database.",
queryText = "DROP CONSTRAINT ON (book:Book) ASSERT exists(book.isbn)",
optionalResultExplanation = "",
prepare = _ => executePreparationQueries(List("CREATE CONSTRAINT ON (book:Book) ASSERT exists(book.isbn)")),
assertions = (p) => assertNodeConstraintDoesNotExist("Book", "isbn")
)
}
@Test def play_nice_with_node_property_existence_constraint() {
generateConsole = false
prepareAndTestQuery(
title = "Create a node that complies with property existence constraints",
text = "Create a `Book` node with an existing `isbn` property.",
queryText = "CREATE (book:Book {isbn: '1449356265', title: 'Graph Databases'})",
optionalResultExplanation = "",
prepare = _ => executePreparationQueries(List("CREATE CONSTRAINT ON (book:Book) ASSERT exists(book.isbn)")),
assertions = (p) => assertNodeConstraintExist("Book", "isbn")
)
}
@Test def break_node_property_existence_constraint() {
generateConsole = false
engine.execute("CREATE CONSTRAINT ON (book:Book) ASSERT exists(book.isbn)")
testFailingQuery[ConstraintValidationException](
title = "Create a node that breaks a property existence constraint",
text = "Trying to create a `Book` node without an `isbn` property, given a property existence constraint on `:Book(isbn)`.",
queryText = "CREATE (book:Book {title: 'Graph Databases'})",
optionalResultExplanation = "In this case the node isn't created in the graph."
)
}
@Test def break_node_property_existence_constraint_by_removing_property() {
generateConsole = false
engine.execute("CREATE CONSTRAINT ON (book:Book) ASSERT exists(book.isbn)")
engine.execute("CREATE (book:Book {isbn: '1449356265', title: 'Graph Databases'})")
testFailingQuery[ConstraintValidationException](
title = "Removing an existence constrained node property",
text = "Trying to remove the `isbn` property from an existing node `book`, given a property existence constraint on `:Book(isbn)`.",
queryText = "MATCH (book:Book {title: 'Graph Databases'}) REMOVE book.isbn",
optionalResultExplanation = "In this case the property is not removed."
)
}
@Test def fail_to_create_node_property_existence_constraint() {
generateConsole = false
engine.execute("CREATE (book:Book {title: 'Graph Databases'})")
testFailingQuery[CypherExecutionException](
title = "Failure to create a node property existence constraint due to existing node",
text = "Create a constraint on the property `isbn` on nodes with the `Book` label when there already exists " +
" a node without an `isbn`.",
queryText = "CREATE CONSTRAINT ON (book:Book) ASSERT exists(book.isbn)",
optionalResultExplanation = "In this case the constraint can't be created because it is violated by existing " +
"data. We may choose to remove the offending nodes and then re-apply the constraint."
)
}
@Test def create_relationship_property_existence_constraint() {
testQuery(
title = "Create relationship property existence constraint",
text = "To create a constraint that makes sure that all relationships with a certain type have a certain property, use the `ASSERT exists(identifier.propertyName)` syntax.",
queryText = "CREATE CONSTRAINT ON ()-[like:LIKED]-() ASSERT exists(like.day)",
optionalResultExplanation = "",
assertions = (p) => assertRelationshipConstraintExist("LIKED", "day")
)
}
@Test def drop_relationship_property_existence_constraint() {
generateConsole = false
prepareAndTestQuery(
title = "Drop relationship property existence constraint",
text = "To remove a constraint from the database, use `DROP CONSTRAINT`.",
queryText = "DROP CONSTRAINT ON ()-[like:LIKED]-() ASSERT exists(like.day)",
optionalResultExplanation = "",
prepare = _ => executePreparationQueries(List("CREATE CONSTRAINT ON ()-[like:LIKED]-() ASSERT exists(like.day)")),
assertions = (p) => assertRelationshipConstraintDoesNotExist("LIKED", "day")
)
}
@Test def play_nice_with_relationship_property_existence_constraint() {
generateConsole = false
prepareAndTestQuery(
title = "Create a relationship that complies with property existence constraints",
text = "Create a `LIKED` relationship with an existing `day` property.",
queryText = "CREATE (user:User)-[like:LIKED {day: 'yesterday'}]->(book:Book)",
optionalResultExplanation = "",
prepare = _ => executePreparationQueries(List("CREATE CONSTRAINT ON ()-[like:LIKED]-() ASSERT exists(like.day)")),
assertions = (p) => assertRelationshipConstraintExist("LIKED", "day")
)
}
@Test def break_relationship_property_existence_constraint() {
generateConsole = false
engine.execute("CREATE CONSTRAINT ON ()-[like:LIKED]-() ASSERT exists(like.day)")
testFailingQuery[ConstraintValidationException](
title = "Create a relationship that breaks a property existence constraint",
text = "Trying to create a `LIKED` relationship without a `day` property, given a property existence constraint `:LIKED(day)`.",
queryText = "CREATE (user:User)-[like:LIKED]->(book:Book)",
optionalResultExplanation = "In this case the relationship isn't created in the graph."
)
}
@Test def break_relationship_property_existence_constraint_by_removing_property() {
generateConsole = false
engine.execute("CREATE CONSTRAINT ON ()-[like:LIKED]-() ASSERT exists(like.day)")
engine.execute("CREATE (user:User)-[like:LIKED {day: 'today'}]->(book:Book)")
testFailingQuery[ConstraintValidationException](
title = "Removing an existence constrained relationship property",
text = "Trying to remove the `day` property from an existing relationship `like` of type `LIKED`, given a property existence constraint `:LIKED(day)`.",
queryText = "MATCH (user:User)-[like:LIKED]->(book:Book) REMOVE like.day",
optionalResultExplanation = "In this case the property is not removed."
)
}
@Test def fail_to_create_relationship_property_existence_constraint() {
generateConsole = false
engine.execute("CREATE (user:User)-[like:LIKED]->(book:Book)")
testFailingQuery[CypherExecutionException](
title = "Failure to create a relationship property existence constraint due to existing relationship",
text = "Create a constraint on the property `day` on relationships with the `LIKED` type when there already " +
"exists a relationship without a property named `day`.",
queryText = "CREATE CONSTRAINT ON ()-[like:LIKED]-() ASSERT exists(like.day)",
optionalResultExplanation = "In this case the constraint can't be created because it is violated by existing " +
"data. We may choose to remove the offending relationships and then re-apply the constraint."
)
}
private def assertNodeConstraintDoesNotExist(labelName: String, propName: String) {
assert(getNodeConstraintIterator(labelName, propName).isEmpty, "Expected constraint iterator to be empty")
}
private def assertNodeConstraintExist(labelName: String, propName: String) {
assert(getNodeConstraintIterator(labelName, propName).size === 1)
}
private def assertRelationshipConstraintDoesNotExist(typeName: String, propName: String) {
assert(getRelationshipConstraintIterator(typeName, propName).isEmpty, "Expected constraint iterator to be empty")
}
private def assertRelationshipConstraintExist(typeName: String, propName: String) {
assert(getRelationshipConstraintIterator(typeName, propName).size === 1)
}
private def getNodeConstraintIterator(labelName: String, propName: String): Iterator[NodePropertyConstraint] = {
val statement = db.statement
val prop = statement.readOperations().propertyKeyGetForName(propName)
val label = statement.readOperations().labelGetForName(labelName)
statement.readOperations().constraintsGetForLabelAndPropertyKey(label, prop).asScala
}
private def getRelationshipConstraintIterator(typeName: String, propName: String): Iterator[RelationshipPropertyConstraint] = {
val statement = db.statement
val prop = statement.readOperations().propertyKeyGetForName(propName)
val relType = statement.readOperations().relationshipTypeGetForName(typeName)
statement.readOperations().constraintsGetForRelationshipTypeAndPropertyKey(relType, prop).asScala
}
}
|
HuangLS/neo4j
|
manual/cypher/cypher-docs/src/test/scala/org/neo4j/cypher/docgen/ConstraintsTest.scala
|
Scala
|
apache-2.0
| 13,754
|
class Parameterless {
def bar(i: Int = 1) = 1
bar()
bar()
this bar()
this.bar()
}
class Child1 extends Parameterless {
override def bar(i: Int) = 2
bar()
}
class Child2 extends Parameterless {
override def bar(i: Int) = 3
bar()
}
class Child3 extends Parameterless {
override def bar(i: Int) = 4
bar()
}
|
katejim/intellij-scala
|
testdata/changeSignature/fromScala/Parameterless_after.scala
|
Scala
|
apache-2.0
| 332
|
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This file is part of Rudder.
*
* Rudder is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU General Public License version 3, the copyright holders add
* the following Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU General
* Public License version 3, when you create a Related Module, this
* Related Module is not considered as a part of the work and may be
* distributed under the license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* Rudder is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Rudder. If not, see <http://www.gnu.org/licenses/>.
*
*************************************************************************************
*/
package com.normation.rudder.web.components
import net.liftweb.common.Failure
import com.normation.utils.HashcodeCaching
case class ComponentInitializationException(val failure:Failure) extends Exception(failure.messageChain) with HashcodeCaching
|
armeniaca/rudder
|
rudder-web/src/main/scala/com/normation/rudder/web/components/ComponentInitializationException.scala
|
Scala
|
gpl-3.0
| 1,942
|
package io.buoyant.telemetry.statsd
import com.timgroup.statsd.NoOpStatsDClient
import org.scalatest._
class StatsDStatsReceiverTest extends FunSuite {
test("creates a stats receiver") {
val stats = new StatsDStatsReceiver(new NoOpStatsDClient, 1.0d)
assert(stats.isInstanceOf[StatsDStatsReceiver])
}
test("stops StatsDClient on close") {
val statsDClient = new MockStatsDClient
val stats = new StatsDStatsReceiver(statsDClient, 1.0d)
assert(!statsDClient.stopped)
stats.close()
assert(statsDClient.stopped)
}
test("metric names are correctly rewritten") {
val names = Seq(
Seq("foo", "$", "", "bar/baz.stuff#word?who//what^when*where\\\\why", "#/^//\\\\huh?@$%^&"),
Seq("clnt", "zipkin-tracer", "service_creation", "service_acquisition_latency_ms"),
Seq("rt", "http", "client", "#/io.l5d.fs/default/path/http/1.1/GET/default", "request_latency_ms")
)
val newNames = names.map { StatsDStatsReceiver.mkName(_) }
val expected = Seq(
"foo._.bar.baz_stuff_word_who.what_when_where_why._._._huh______",
"clnt.zipkin_tracer.service_creation.service_acquisition_latency_ms",
"rt.http.client._.io_l5d_fs.default.path.http.1_1.GET.default.request_latency_ms"
)
assert(newNames == expected)
}
}
|
denverwilliams/linkerd
|
telemetry/statsd/src/test/scala/io/buoyant/telemetry/statsd/StatsDStatsReceiverTest.scala
|
Scala
|
apache-2.0
| 1,288
|
package matrix
import componentwork._,
componentswing.{IViewDrawS,IViewDrawF,ObjTexture},
javax.swing.Timer,
java.awt.event.{ActionListener, ActionEvent},
java.awt.{Point,Rectangle},
scala.collection.mutable.{Map => MMap},
scala.math.sqrt
class Camera(val name:MMap[String, Component]) extends Component {
//Multiinterface "ICamera"
val ICameraEx:ICameraFR = new ICameraFR; var ICameraIm = MMap[Component, ICameraS]()
interfaces += ("ICamera" -> (ICameraEx,"ICameraF","ICameraS", (c:Component, i:Interface) => {ICameraIm += (c -> i.asInstanceOf[ICameraS])}, (c:Component) => {ICameraIm -= c; false}, true))
//Interface "IViewDraw"
val IViewDrawEx:IViewDrawSR = new IViewDrawSR; var IViewDrawIm:IViewDrawF = null
interfaces += ("IViewDraw" -> (IViewDrawEx,"IViewDrawS","IViewDrawF", (c:Component, i:Interface) => {IViewDrawIm = i.asInstanceOf[IViewDrawF]}, (c:Component) => {IViewDrawIm = null; false}, false))
//Interface "ILayout"
val ILayoutEx:ILayoutSR = new ILayoutSR; var ILayoutIm:ILayoutF = null
interfaces += ("ILayout" -> (ILayoutEx,"ILayoutS","ILayoutF", (c:Component, i:Interface) => {ILayoutIm = i.asInstanceOf[ILayoutF]}, (c:Component) => {ILayoutIm = null; false}, false))
//Interface "IControl"
val IControlEx:IControlSR = new IControlSR; var IControlIm:IControlF = null
interfaces += ("IControl" -> (IControlEx,"IControlS","IControlF", (c:Component, i:Interface) => {IControlIm = i.asInstanceOf[IControlF]}, (c:Component) => {IControlIm = null; false}, false))
//Interfaces export realization
class ICameraFR extends ICameraF {
override def connection(c:Component) = {
IViewDrawEx.objects += (c -> new ObjTexture(null, new Point(0,0), new Point(0,0))) //New virtual object
}
override def disconnection(c:Component) = {
IViewDrawEx.objects -= c
if(IViewDrawIm != null){IViewDrawIm.refresh()}
}
}
class IViewDrawSR extends IViewDrawS {
override def connection(c:Component) = {
}
}
class ILayoutSR extends ILayoutS
class IControlSR extends IControlS {
override def setMovement(f:Boolean) = {movement = f; if(f){shotLoop.synchronized{shotLoop.notify()}}}
}
//Constructor/deconstructor
override def construction() = {
super.construction()
shotLoop.start()
shotTimer.start()
}
override def deconstruction() = {
super.deconstruction()
work = false; shotLoop.synchronized{shotLoop.notify()}
}
//Shot loop
private val shotTimer:Thread = new Thread(){
override def run() = {
while(work){
Thread.sleep(50)
shotLoop.synchronized{shotLoop.notify()}}
}
}
private val shotLoop:Thread = new Thread(){
override def run() = {
while(work){
this.synchronized{wait()}
if(work && movement){
var refresh = false
if(ILayoutIm != null){
//Update
ILayoutIm.centers.foreach((center:(Component, Dot)) => {
try{
val vo = center._1; val cp = center._2; val ocn = IViewDrawEx.objects(vo).center
val cnc = if((ocn.x != cp.x.asInstanceOf[Int])||(ocn.y != cp.y.asInstanceOf[Int])){
ocn.x = cp.x.asInstanceOf[Int]; ocn.y = cp.y.asInstanceOf[Int]
true}
else{
false}
val(tcf,tx,ts) = ICameraIm(vo).getTexture
if(tx == null){
IViewDrawEx.objects(vo).texture = if(IViewDrawEx.objects(vo).texture != null){refresh = true; null}else{null}}
else{
if(tcf || cnc){
IViewDrawEx.objects(vo).texture = tx
val s = ts / 2
IViewDrawEx.objects(vo).coordinates.x = (cp.x - s).asInstanceOf[Int]
IViewDrawEx.objects(vo).coordinates.y = (cp.y - s).asInstanceOf[Int]
refresh = true}}}
catch{
case ex:NoSuchElementException => {/*do nothing*/}}
})}
//Refresh if need
if((IViewDrawIm != null) && refresh){IViewDrawIm.refresh()}}}
}
}
//Fields
private var work = true
private var movement = false
//Service code
construction()
}
|
AlexCAB/whfzf
|
matrix/Camera.scala
|
Scala
|
mit
| 4,290
|
/*
* Copyright 2014 Commonwealth Computer Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the License);
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.locationtech.geomesa.security
import org.apache.accumulo.core.security.Authorizations
/**
* Default implementation of the AuthorizationsProvider that doesn't provide any authorizations
*/
class DefaultAuthorizationsProvider extends AuthorizationsProvider {
var authorizations: Authorizations = new Authorizations
override def getAuthorizations: Authorizations = authorizations
override def configure(params: java.util.Map[String, java.io.Serializable]) {
val authString = authsParam.lookUp(params).asInstanceOf[String]
if (authString == null || authString.isEmpty)
authorizations = new Authorizations()
else
authorizations = new Authorizations(authString.split(","):_*)
}
}
|
mmatz-ccri/geomesa
|
geomesa-security/src/main/scala/org/locationtech/geomesa/security/DefaultAuthorizationsProvider.scala
|
Scala
|
apache-2.0
| 1,339
|
/*
* Copyright 2016 Frugal Mechanic (http://frugalmechanic.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fm.common
import org.scalajs.dom.raw._
sealed abstract class ElementType[+T <: Element](val name: String)
object ElementType {
implicit case object Anchor extends ElementType[HTMLAnchorElement]("a")
implicit case object Div extends ElementType[HTMLDivElement]("div")
implicit case object IFrame extends ElementType[HTMLIFrameElement]("iframe")
implicit case object Input extends ElementType[HTMLInputElement]("input")
implicit case object Option extends ElementType[HTMLOptionElement]("option")
implicit case object Span extends ElementType[HTMLSpanElement]("span")
// implicit case object TH extends ElementType[HTMLTableHeaderCellElement]("th")
implicit case object TD extends ElementType[HTMLTableCellElement]("td")
implicit case object TR extends ElementType[HTMLTableRowElement]("tr")
}
|
frugalmechanic/fm-common
|
js/src/main/scala/fm/common/ElementType.scala
|
Scala
|
apache-2.0
| 1,449
|
package cromwell.core.io
import java.util.concurrent.TimeoutException
case class IoTimeout(command: IoCommand[_]) extends TimeoutException(s"The I/O operation $command timed out")
|
ohsu-comp-bio/cromwell
|
core/src/main/scala/cromwell/core/io/IoTimeout.scala
|
Scala
|
bsd-3-clause
| 182
|
/**
* Copyright © 2012 Gustav van der Merwe
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
object Nat { def apply(natural: Objct) = Judgement("nat", List(natural), PostFix) }
object Sum { def apply(a: Objct, b: Objct, sum: Objct) = Judgement("sum", List(a, b, sum)) }
case object Zero extends Objct
case class Succ(pre: Objct) extends Objct {
override def matchVarObj(e: EnvMap, o: Objct): EnvMap = {
o match {
case Succ(n) ⇒ pre.matchVarObj(e, n) // ++ e // TODO: is this needed? think
case _ ⇒ throw ObjctMismatch
}
}
override def vars = pre.vars
override def replaceVars(e: EnvMap): Objct = Succ(pre.replaceVars(e))
}
object Naturals extends ObjctDef {
def definition = Set(
// church encoding of naturals
Axiom(Nat(Zero)),
InferenceRule(Set(Nat(Var("a"))),
Nat(Succ(Var("a"))))
)
def rules = naturalEquality ++ naturalParity ++ naturalAddition ++ naturalMax
//breaks on if succ(a) nat then a nat due increasing/building the theorem rather than reducing it
// ++ naturalsTheorems
def naturalEquality = Set(
// equality definition
Axiom(Eq(Zero, Zero)),
InferenceRule(Set(Eq(Var("a"), Var("b"))),
Eq(Succ(Var("a")), Succ(Var("b"))))
)
def naturalParity = Set(
// even and odd numbers
Axiom(Judgement("even", List(Zero))),
InferenceRule(Set(Judgement("odd", List(Var("a")))),
Judgement("even", List(Succ(Var("a"))))),
InferenceRule(Set(Judgement("even", List(Var("a")))),
Judgement("odd", List(Succ(Var("a")))))
)
def naturalAddition = Set(
// sum definition (uniqueness not proved/shown here)
InferenceRule(Set(Nat(Var("a"))),
Sum(Zero, Var("a"), Var("a"))),
InferenceRule(Set(Nat(Var("a")), Nat(Var("b")), Nat(Var("c")), Sum(Var("a"), Var("b"), Var("c"))),
Sum(Succ(Var("a")), Var("b"), Succ(Var("c"))))
)
def naturalMax = Set(
// max of two numbers is third
InferenceRule(Set(Nat(Var("a"))),
Judgement("max", List(Var("a"), Zero, Var("a")))),
InferenceRule(Set(Nat(Var("a"))),
Judgement("max", List(Zero, Var("a"), Var("a")))),
InferenceRule(Set(Nat(Var("a")), Nat(Var("b")), Nat(Var("c")), Judgement("max", List(Var("a"), Var("b"), Var("c")))),
Judgement("max", List(Succ(Var("a")), Succ(Var("b")), Succ(Var("c")))))
)
def naturalsTheorems = Set(
// equality is reflexive
InferenceRule(Set(Nat(Var("a"))),
Eq(Var("a"), Var("a"))),
// if succ(a) nat then a nat
InferenceRule(Set(Nat(Succ(Var("a")))),
Nat(Var("a")))
)
}
|
gvdm/proof-system
|
src/Naturals.scala
|
Scala
|
gpl-3.0
| 3,159
|
package com.wix.fax.interfax.sl
case class Credentials(username: String, password: String)
|
wix/libfax
|
libfax-interfaxsl/src/main/scala/com/wix/fax/interfax/sl/Credentials.scala
|
Scala
|
apache-2.0
| 93
|
import scala.annotation.static
class C {
val a: Int = 3
class D
object D {
@static def foo: Int = a * a // error
}
}
@main
def Test =
val c = new C
println(c.D.foo)
|
dotty-staging/dotty
|
tests/neg/i11100.scala
|
Scala
|
apache-2.0
| 183
|
package cromwell.backend.validation.exception
import common.exception.MessageAggregation
case class ValidationAggregatedException(override val exceptionContext: String,
override val errorMessages: Traversable[String]) extends MessageAggregation
|
ohsu-comp-bio/cromwell
|
backend/src/main/scala/cromwell/backend/validation/exception/ValidationAggregatedException.scala
|
Scala
|
bsd-3-clause
| 288
|
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.index
import java.util.AbstractMap.SimpleEntry
import org.apache.accumulo.core.data.{Key, Value}
import org.apache.hadoop.io.Text
import org.geotools.data.Query
import org.geotools.factory.CommonFactoryFinder
import org.junit.runner.RunWith
import org.locationtech.geomesa.accumulo.TestWithDataStore
import org.locationtech.geomesa.features.SerializationOption.SerializationOptions
import org.locationtech.geomesa.features.{ScalaSimpleFeature, SerializationType, SimpleFeatureSerializers}
import org.locationtech.geomesa.utils.iterators.SortingSimpleFeatureIterator
import org.opengis.filter.sort.SortBy
import org.specs2.mock.Mockito
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class QueryPlannerTest extends Specification with Mockito with TestWithDataStore {
override val spec = "*geom:Point,dtg:Date,s:String"
val sf = new ScalaSimpleFeature("id", sft)
sf.setAttributes(Array[AnyRef]("POINT(45 45)", "2014/10/10T00:00:00Z", "string"))
val sf2 = new ScalaSimpleFeature("id2", sft)
sf2.setAttributes(Array[AnyRef]("POINT(45 45)", "2014/10/10T00:00:00Z", "astring"))
addFeatures(Seq(sf, sf2))
val planner = ds.queryPlanner
"adaptStandardIterator" should {
"return a LazySortedIterator when the query has an order by clause" >> {
val query = new Query(sft.getTypeName)
query.setSortBy(Array(SortBy.NATURAL_ORDER))
val result = planner.runQuery(sft, query)
result must beAnInstanceOf[SortingSimpleFeatureIterator]
}
"not return a LazySortedIterator when the query does not have an order by clause" >> {
val query = new Query(sft.getTypeName)
query.setSortBy(null)
val result = planner.runQuery(sft, query)
result must not (beAnInstanceOf[SortingSimpleFeatureIterator])
}
"decode and set visibility properly" >> {
import org.locationtech.geomesa.security._
val query = new Query(sft.getTypeName)
planner.configureQuery(query, sft) // have to do manually
val visibilities = Array("", "USER", "ADMIN")
val expectedVis = visibilities.map(vis => if (vis.isEmpty) None else Some(vis))
val serializer = SimpleFeatureSerializers(sft, SerializationType.KRYO, SerializationOptions.withoutId)
val value = new Value(serializer.serialize(sf))
val kvs = visibilities.zipWithIndex.map { case (vis, ndx) =>
val key = new Key(new Text(ndx.toString), new Text("cf"), new Text("cq"), new Text(vis))
new SimpleEntry[Key, Value](key, value)
}
val expectedResult = kvs.map(RecordIndex.entriesToFeatures(sft, sft)).map(_.visibility)
expectedResult must haveSize(kvs.length)
expectedResult mustEqual expectedVis
}
"sort with a projected SFT" >> {
val ff = CommonFactoryFinder.getFilterFactory2
val query = new Query(sft.getTypeName)
query.setSortBy(Array(SortBy.NATURAL_ORDER))
query.setProperties(List(ff.property("s")))
val result = planner.runQuery(sft, query).toList
result.map(_.getID) mustEqual Seq("id", "id2")
forall(result)(r => r.getAttributeCount mustEqual 2) // geom always gets included
result.map(_.getAttribute("s")) must containTheSameElementsAs(Seq("string", "astring"))
}
}
}
|
nagavallia/geomesa
|
geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/index/QueryPlannerTest.scala
|
Scala
|
apache-2.0
| 3,835
|
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.