code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
package com.twitter.sbt
import java.io.{BufferedReader, FileReader, FileWriter, File}
import sbt._
object FileFilter {
/**
* Perform configure-style `@key@` substitution on a file as it's being copied.
*/
def filter(source: File, destination: File, filters: Map[String, String]) {
val in = new BufferedReader(new FileReader(source))
val out = new FileWriter(destination)
var line = in.readLine()
while (line ne null) {
filters.keys.foreach { token =>
line = line.replace("@" + token + "@", filters(token))
}
out.write(line)
out.write("\\n")
line = in.readLine()
}
in.close()
out.close()
}
}
| travisbrown/zipkin | project/FileFilter.scala | Scala | apache-2.0 | 670 |
package test
package sample
package leader
/**
* @author Samira Tasharofi (tasharo1@illinois.edu)
*/
import akka.actor.{ ActorSystem, Actor, Props, ActorRef }
import bita.{ ScheduleEnvelope, LogicalMessage, EventID }
import java.util.concurrent.CountDownLatch
import scala.collection.mutable.HashSet
import akka.util.Timeout
import akka.bita.pattern.Patterns._
import scala.collection.mutable.ArrayBuffer
/**
* The implementation of buggy leader election algorithm described in
* http://www.springerlink.com/content/bpdeff986nkfwgjp/fulltext.pdf
*/
case class Init(candidates: Array[ActorRef])
case class Capture(candidate: ActorRef, candidatePriority: Int, candidateCaptured: Array[ActorRef])
case class Elect(candidate: ActorRef)
case class Accept(candidate: ActorRef, candidateCaptured: Array[ActorRef])
case class Down(candidate: ActorRef)
case object Kill
object State extends Enumeration {
type State = Value
val CANDIDATE, CAPTURED, SURRENDERED, ELECTED, KILLED = Value
}
class NodeActor(priority: Int, bugLatch: CountDownLatch, simulator: ActorRef, electLatch: CountDownLatch) extends Actor {
import State._
var leader: ActorRef = _
var state: State = KILLED
var candidates = ArrayBuffer[ActorRef]()
var iterationCaptured = ArrayBuffer[ActorRef]()
var downNodes = HashSet[ActorRef]()
var countedDownElectLatch = false
private def changeState(newState: State) {
state = newState
if (state == ELECTED) {
simulator ! Elected(self, candidates)
}
log("update state " + priority + ": " + state)
}
override def receive() = {
case Init(candidates: Array[ActorRef]) => {
candidates.foreach(this.candidates.+=(_))
changeState(CANDIDATE)
sender ! 'OK
}
case msg @ _ if (state == KILLED) => log("killed received message " + msg)
// it is called when the actor starts its execution
case 'Start => {
broadcast("capture")
}
case Capture(candidate, candidatePriority, candidateCaptured) => {
if (downNodes.contains(candidate))
downNodes.remove(candidate)
state match {
case CANDIDATE => {
if (candidateCaptured.size > iterationCaptured.size
|| (candidateCaptured.size == iterationCaptured.size && priority > candidatePriority)) {
// we lose
candidate ! Accept(self, iterationCaptured.toArray[ActorRef])
changeState(CAPTURED)
leader = candidate
}
}
case ELECTED => candidate ! Elect(self)
case CAPTURED => ()
case SURRENDERED => ()
}
}
case Accept(candidate, candidateCaptured) => {
if (downNodes.contains(candidate))
downNodes.remove(candidate)
state match {
case CANDIDATE => {
iterationCaptured.+=(candidate)
candidateCaptured.foreach(iterationCaptured.+=(_))
checkMajority()
}
case CAPTURED => leader ! Accept(candidate, candidateCaptured)
case SURRENDERED => ()
case ELECTED => candidate ! Elect(self)
}
}
case Elect(candidate) => {
if (state == ELECTED) {
bugLatch.countDown()
logError("Bug found!!!!!!!! ")
} else {
if (downNodes.contains(candidate))
downNodes.remove(candidate)
leader = candidate
changeState(SURRENDERED)
iterationCaptured.clear()
}
if (!countedDownElectLatch) {
electLatch.countDown()
countedDownElectLatch = true
}
println("receive a leader")
}
case Down(candidate) => {
if (state == KILLED) log(" I am killed")
if (!downNodes.contains(candidate))
downNodes.add(candidate);
if (leader == candidate)
leader = null;
if (state == CANDIDATE) {
checkMajority()
} else if (leader == null
&& (state == SURRENDERED || state == CAPTURED)) {
broadcast("capture")
checkMajority()
}
}
case Kill => {
changeState(KILLED)
broadcast("down")
println("killed")
}
}
def broadcast(msg: String) {
try {
msg match {
case "capture" => candidates.filterNot(_ == self).foreach(_ ! Capture(self, priority, iterationCaptured.toArray[ActorRef]))
case "elect" => candidates.filterNot(_ == self).foreach(_ ! Elect(self))
case "down" => candidates.filterNot(_ == self).foreach(_ ! Down(self))
}
} catch {
case ex: Throwable => {
println(ex.printStackTrace())
println(iterationCaptured)
if (candidates == null)
println("candidate is null")
}
}
}
def checkMajority() {
val numCaptured = iterationCaptured.size + 1
val numCandidates = candidates.size
val numdownNodes = downNodes.size
if ((numCaptured > numCandidates / 2)
|| (numCaptured + numdownNodes == numCandidates)) {
log("I am the leader")
changeState(ELECTED)
/**
* Code to automatically detect deadlocks in Erlang's LE
*/
leader = self
broadcast("elect")
electLatch.countDown()
println("elected as the leader")
} else {
changeState(CANDIDATE)
}
}
private def log(msg: String) {
}
private def logError(msg: String) {
print("Error! " + this.priority + ": " + msg)
}
}
case class Elected(node: ActorRef, nodes: ArrayBuffer[ActorRef])
class Simulator(scenario: Int, leaderLatch: CountDownLatch) extends Actor {
implicit val timeout = Timeout(10000)
var round = 0
override def receive() = {
case Elected(node, nodes) => if (scenario == 1 && round == 0) {
node ! Kill
node ! Init(nodes.toArray[ActorRef])
node ! 'Start
round += 1
leaderLatch.countDown
} else {
leaderLatch.countDown
}
}
}
| samira-t/Bita | src/test/scala/sample/LeaderElection.scala | Scala | apache-2.0 | 5,839 |
package org.openurp.edu.eams.teach.lesson.service
object LessonOperateViolation extends Enumeration {
val NO_VIOLATION = new LessonOperateViolation()
val LESSON_VIOLATION = new LessonOperateViolation()
val PERMIT_VIOLATION = new LessonOperateViolation()
class LessonOperateViolation extends Val
import scala.language.implicitConversions
implicit def convertValue(v: Value): LessonOperateViolation = v.asInstanceOf[LessonOperateViolation]
}
| openurp/edu-eams-webapp | core/src/main/scala/org/openurp/edu/eams/teach/lesson/service/LessonOperateViolation.scala | Scala | gpl-3.0 | 458 |
package com.mtraina.bookservice
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import akka.http.scaladsl.marshallers.xml.ScalaXmlSupport._
import akka.http.scaladsl.server.Directives._
import akka.stream.ActorMaterializer
import spray.json.DefaultJsonProtocol
trait JsonSupport extends SprayJsonSupport with DefaultJsonProtocol {
implicit val bookFormat = jsonFormat5(Book)
}
trait Service extends JsonSupport {
val persistence = new Persistence
val route =
path(""){
get {
complete {
<h1>hello book service</h1>
}
}
} ~
path("query_params"){
parameters('id, 'isbn) { (id, isbn) =>
complete(s"id: $id and isbn: $isbn")
}
} ~
pathPrefix("path_params"){
path(IntNumber) { id =>
complete(s"id: $id")
}
} ~
path("books"){
get {
complete {
persistence.book()
}
}
} ~
pathPrefix("balls") {
pathEnd {
complete("all balls!")
} ~
path(IntNumber) { int =>
complete(if (int % 2 == 0) "even ball" else "odd ball")
}
}
}
object Boot extends App with Service {
implicit val system = ActorSystem("book-service-system")
implicit val materializer = ActorMaterializer()
val bindingFuture = Http().bindAndHandle(route, "localhost", 8080)
} | mtraina/book-service-akka-http | src/main/scala/com/mtraina/bookservice/bookservice.scala | Scala | mit | 1,420 |
package de.ust.skill.parser
import java.io.File
import java.io.FileNotFoundException
import java.lang.Long
import java.nio.file.FileSystems
import scala.annotation.migration
import scala.annotation.tailrec
import scala.collection.JavaConversions.bufferAsJavaList
import scala.collection.JavaConversions.seqAsJavaList
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashSet
import scala.collection.mutable.ListBuffer
import scala.util.parsing.combinator.RegexParsers
import de.ust.skill.ir
import de.ust.skill.ir.Comment
import de.ust.skill.ir.Hint
import de.ust.skill.ir.Restriction
import de.ust.skill.ir.TypeContext
import de.ust.skill.ir.restriction.AbstractRestriction
import de.ust.skill.ir.restriction.ConstantLengthPointerRestriction
import de.ust.skill.ir.restriction.FloatDefaultRestriction
import de.ust.skill.ir.restriction.FloatRangeRestriction
import de.ust.skill.ir.restriction.IntDefaultRestriction
import de.ust.skill.ir.restriction.IntRangeRestriction
import de.ust.skill.ir.restriction.MonotoneRestriction
import de.ust.skill.ir.restriction.NameDefaultRestriction
import de.ust.skill.ir.restriction.NonNullRestriction
import de.ust.skill.ir.restriction.SingletonRestriction
import de.ust.skill.ir.restriction.StringDefaultRestriction
import de.ust.skill.ir.restriction.UniqueRestriction
import de.ust.skill.ir.restriction.DefaultRestriction
import de.ust.skill.ir.restriction.CodingRestriction
/**
* Converts a character stream into an AST using parser combinators.
*
* Grammar as explained in the paper.
*/
abstract class AbstractFileParser[Decl](
protected val delimitWithUnderscore : Boolean,
protected val delimitWithCamelCase : Boolean) extends RegexParsers {
def stringToName(name : String) : Name = new Name(name, delimitWithUnderscore, delimitWithCamelCase)
var currentFile : File = _
/**
* Usual identifiers including arbitrary unicode characters.
*/
protected def id = positioned[Name]("""[a-zA-Z_\\u007f-\\uffff][\\w\\u007f-\\uffff]*""".r ^^ stringToName)
/**
* Skill integer literals
*/
protected def int : Parser[Long] = hexInt | generalInt
protected def hexInt : Parser[Long] = "0x" ~> ("""[0-9a-fA-F]*""".r ^^ { i β Long.parseLong(i, 16) })
protected def generalInt : Parser[Long] = """-?[0-9]*\\.*""".r >> { i β
try {
success(Long.parseLong(i))
} catch {
case e : Exception β failure("not an int")
}
}
/**
* Floating point literal, as taken from the JavaTokenParsers definition.
*
* @note if the target can be an integer as well, the integer check has to come first
*/
def floatingPointNumber : Parser[Double] = """-?(\\d+(\\.\\d*)?|\\d*\\.\\d+)([eE][+-]?\\d+)?[fFdD]?""".r ^^ { _.toDouble }
/**
* We use string literals to encode paths. If someone really calls a file ", someone should beat him hard.
*/
protected def string = "\\"" ~> """[^"]*""".r <~ "\\""
/**
* Files may start with an arbitrary of lines starting with '#'
* Theses lines sereve as true comments and do not affect the specification.
*/
protected def headComment = rep("""^#[^\\r\\n]*[\\r\\n]*""".r)
/**
* Includes are just strings containing relative paths to *our* path.
*/
protected def includes = ("include" | "with") ~> rep(
string ^^ { s β new File(currentFile.getParentFile, s).getAbsolutePath });
/**
* creates a shorthand for a more complex type
*/
protected def typedef = opt(comment) ~ ("typedef" ~> id) ~ rep(fieldRestriction | hint) ~ fieldType <~ ";" ^^ {
case c ~ name ~ specs ~ target β Typedef(
currentFile,
name,
new Description(
c.getOrElse(Comment.NoComment.get),
specs.collect { case r : Restriction β r },
specs.collect { case h : Hint β h }),
target)
};
/**
* A field with language custom properties. This field will almost behave like an auto field.
*/
protected def customField(c : Comment) = ("custom" ~> id) ~ customFiledOptions ~ string ~! id ^^ {
case lang ~ opts ~ t ~ n β new Customization(c, lang, opts, t, n)
}
protected def customFiledOptions : Parser[Map[Name, List[String]]] = (
rep("!" ~> id ~!
(("(" ~> rep(string) <~ ")") | opt(string) ^^ { s β s.toList })) ^^ {
s β s.map { case n ~ args β n -> args }.toMap
})
/**
* Unfortunately, the straight forward definition of this would lead to recursive types, thus we disallowed ADTs as
* arguments to maps. Please note that this does not prohibit formulation of any structure, although it might
* require the introduction of declarations, which essentially rename another more complex type. This has also an
* impact on the way, data is and can be stored.
*/
protected def fieldType = ((("map" | "set" | "list") ~! ("<" ~> repsep(baseType, ",") <~ ">")) ^^ {
case "map" ~ l β {
if (1 >= l.size)
throw ParseException(s"Did you mean set<${l.mkString}> instead of map?")
else
new de.ust.skill.parser.MapType(l)
}
case "set" ~ l β {
if (1 != l.size)
throw ParseException(s"Did you mean map<${l.mkString}> instead of set?")
else
new de.ust.skill.parser.SetType(l.head)
}
case "list" ~ l β {
if (1 != l.size)
throw ParseException(s"Did you mean map<${l.mkString}> instead of list?")
else
new de.ust.skill.parser.ListType(l.head)
}
}
// we use a backtracking approach here, because it simplifies the AST generation
| arrayType
| baseType)
protected def arrayType = ((baseType ~ ("[" ~> int <~ "]")) ^^ { case n ~ arr β new ConstantLengthArrayType(n, arr) }
| (baseType <~ ("[" ~ "]")) ^^ { n β new ArrayType(n) })
protected def baseType = id ^^ { new BaseType(_) }
/**
* Comments are first class citizens of our language, because we want to emit them in the output binding.
*
* The intermediate representation is without the leading "/Β°" and trailing "Β°/" (where Β°=*)
*/
protected def comment : Parser[Comment] = """/\\*+""".r ~> ("""[\\S\\s]*?\\*/""".r) ^^ { s β
// scan s to split it into pieces
@inline def scan(last : Int) : ListBuffer[String] = {
var begin = 0;
var next = 0;
// we have to insert a line break, because the whitespace handling may have removed one
var r = ListBuffer[String]("\\n")
while (next < last) {
s.charAt(next) match {
case ' ' | '\\t' | 0x0B | '\\f' | '\\r' β
if (begin != next)
r.append(s.substring(begin, next))
begin = next + 1;
case '\\n' β
if (begin != next)
r.append(s.substring(begin, next))
r.append("\\n")
begin = next + 1;
case _ β
}
next += 1
}
if (begin != last) r.append(s.substring(begin, last))
r
}
val ws = scan(s.size - 2)
val r = new Comment
@tailrec def parse(ws : ListBuffer[String], text : ListBuffer[String]) : Unit =
if (ws.isEmpty) r.init(text)
else (ws.head, ws.tail) match {
case ("\\n", ws) if (ws.isEmpty) β r.init(text)
case ("\\n", ws) if (ws.head == "*") β parse(ws.tail, text)
case ("\\n", ws) β parse(ws, text)
case (w, ws) if w.matches("""\\*?@.+""") β
val end = if (w.contains(":")) w.lastIndexOf(':') else w.size
val tag = w.substring(w.indexOf('@') + 1, end).toLowerCase
r.init(text, tag); parse(ws, ListBuffer[String]())
case (w, ws) β text.append(w); parse(ws, text)
}
parse(ws, ListBuffer[String]())
r
}
/**
* restrictions as defined in the paper.
*
* @note the implementation is more liberal then the specification of the specification language, because some illegal arguments are dropped
*/
protected def typeRestriction : Parser[Restriction] = "@" ~> id >> {
_.lowercase match {
case "unique" β opt("(" ~ ")") ^^ { _ β new UniqueRestriction }
case "singleton" β opt("(" ~ ")") ^^ { _ β new SingletonRestriction }
case "monotone" β opt("(" ~ ")") ^^ { _ β new MonotoneRestriction }
case "abstract" β opt("(" ~ ")") ^^ { _ β new AbstractRestriction }
case "default" β "(" ~> defaultRestrictionParameter <~ ")" ^^ { _ β null }
case unknown β opt("(" ~> repsep((int | string | floatingPointNumber), ",") <~ ")") ^^ { arg β
ParseException(s"$unknown${
arg.mkString("(", ", ", ")")
} is either not supported or an invalid restriction name")
}
}
}
protected def fieldRestriction : Parser[Restriction] = "@" ~> (id ^^ { _.lowercase }) >> fieldRestrictionInner;
protected def deflautRestrictionInnerArgument : Parser[DefaultRestriction] = (
int ^^ { new IntDefaultRestriction(_) }
| string ^^ { new StringDefaultRestriction(_) }
| floatingPointNumber ^^ { new FloatDefaultRestriction(_) }
| repsep(id, "." | "::") ^^ { names β new NameDefaultRestriction(names.map(_.ir)) })
protected def rangeRestrictionInnerArgument = (
int ~ ("," ~> int) ~ opt("," ~> string ~ ("," ~> string)) ^^ {
case low ~ high ~ None β new IntRangeRestriction(low, high, true, true)
case low ~ high ~ Some(l ~ h) β new IntRangeRestriction(low, high, "inclusive" == l, "inclusive" == h)
}
|
floatingPointNumber ~ ("," ~> floatingPointNumber) ~ opt("," ~> string ~ ("," ~> string)) ^^ {
case low ~ high ~ None β new FloatRangeRestriction(low, high, true, true)
case low ~ high ~ Some(l ~ h) β new FloatRangeRestriction(low, high, "inclusive" == l, "inclusive" == h)
})
protected def fieldRestrictionInner(name : String) : Parser[Restriction] = {
name match {
case "nonnull" β opt("(" ~ ")") ^^ { _ β new NonNullRestriction }
case "default" β "(" ~> deflautRestrictionInnerArgument <~ ")"
case "min" β "(" ~> minRestrictionInner <~ ")"
case "max" β "(" ~> maxRestrictionInner <~ ")"
case "range" β "(" ~> rangeRestrictionInnerArgument <~ ")"
case "coding" β ("(" ~> string <~ ")") ^^ { s β new CodingRestriction(s) }
case "constantlengthpointer" β opt("(" ~ ")") ^^ { _ β new ConstantLengthPointerRestriction }
case "oneof" β ("(" ~> repsep(id, ",") <~ ")") ^^ { _ β null }
case unknown β opt("(" ~> repsep((int | string | floatingPointNumber), ",") <~ ")") ^^ { arg β
ParseException(s"$unknown${
arg.mkString("(", ", ", ")")
} is either not supported or an invalid restriction name")
}
}
}
protected def minRestrictionInner : Parser[Restriction] = (
int ~ opt("," ~> string) ^^ {
case low ~ None β new IntRangeRestriction(low, Long.MAX_VALUE, true, true)
case low ~ Some("inclusive") β new IntRangeRestriction(low, Long.MAX_VALUE, true, true)
case low ~ Some("exclusive") β new IntRangeRestriction(low, Long.MAX_VALUE, false, true)
}
|
floatingPointNumber ~ opt("," ~> string) ^^ {
case low ~ None β new FloatRangeRestriction(low, Double.MaxValue, true, true)
case low ~ Some("inclusive") β new FloatRangeRestriction(low, Double.MaxValue, true, true)
case low ~ Some("exclusive") β new FloatRangeRestriction(low, Double.MaxValue, false, true)
})
protected def maxRestrictionInner : Parser[Restriction] = (
int ~ opt("," ~> string) ^^ {
case high ~ None β new IntRangeRestriction(Long.MIN_VALUE, high, true, true)
case high ~ Some("inclusive") β new IntRangeRestriction(Long.MIN_VALUE, high, true, true)
case high ~ Some("exclusive") β new IntRangeRestriction(Long.MIN_VALUE, high, true, false)
}
|
floatingPointNumber ~ opt("," ~> string) ^^ {
case high ~ None β new FloatRangeRestriction(Double.MinValue, high, true, true)
case high ~ Some("inclusive") β new FloatRangeRestriction(Double.MinValue, high, true, true)
case high ~ Some("exclusive") β new FloatRangeRestriction(Double.MinValue, high, true, false)
})
protected def defaultRestrictionParameter = int | string | floatingPointNumber | repsep(id, "." | "::")
/**
* hints as defined in the paper. Because hints can be ignored by the generator, it is safe to allow arbitrary
* identifiers and to warn if the identifier is not a known hint.
*/
protected def hint : Parser[Hint] = "!" ~> id >> { n β
hintArgs(n.lowercase) ^^ {
case args β
try {
Hint.get(Hint.Type.valueOf(n.lowercase), args.map(_.ir))
} catch { case e : IllegalArgumentException β throw ParseException(s"$n is not the name of a hint.") }
}
}
protected def hintArgs(name : String) : Parser[List[Name]] = name match {
case "constantmutator" β (("(" ~> int ~ ("," ~> int <~ ")")) ^^ {
case min ~ max β List(stringToName(min.toString), stringToName(max.toString))
})
case "provider" | "owner" β ("(" ~> repsep(id, ",") <~ ")")
case "removerestrictions" β (opt("(" ~> repsep(string, ",") <~ ")") ^^ { _.getOrElse(Nil).map(stringToName) })
case "pragma" β ((id ~ opt("(" ~> repsep(id, ",") <~ ")")) ^^ {
case f ~ fs β List(f) ++ fs.toList.flatten
})
case _ β success(List[Name]())
}
/**
* Description of a field.
*/
protected def fieldDescription = opt(comment) ~ rep(fieldRestriction | hint) ^^ {
case c ~ specs β new Description(c.getOrElse(Comment.NoComment.get), specs.collect { case r : Restriction β r }, specs.collect { case h : Hint β h })
}
/**
* Description of a declration.
*/
protected def typeDescription = opt(comment) ~ rep(typeRestriction | hint) ^^ {
case c ~ specs β new Description(c.getOrElse(Comment.NoComment.get), specs.collect { case r : Restriction β r }, specs.collect { case h : Hint β h })
}
def process(in : File) : (List[String], List[Decl]);
}
| skill-lang/skill | src/main/scala/de/ust/skill/parser/AbstractFileParser.scala | Scala | bsd-3-clause | 14,135 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2014 MineFormers
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package de.mineformers.core.client.ui.skin.drawable
import de.mineformers.core.util.math.shape2d.{Point, Size}
import de.mineformers.core.util.renderer.GuiUtils
/**
* Drawable
*
* @author PaleoCrafter
*/
trait Drawable {
val utils = GuiUtils
var size: Size = Size(0, 0)
def init(): Unit = {
}
def draw(mousePos: Point, pos: Point, z: Int)
}
| MineFormers/MFCore | src/main/scala/de/mineformers/core/client/ui/skin/drawable/Drawable.scala | Scala | mit | 1,503 |
package distributed
import akka.actor.ActorSystem
import akka.pattern
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{Flow, Keep}
import akka.stream.testkit.scaladsl.{TestSink, TestSource}
import akka.stream.testkit
import org.scalatest.{FlatSpec, Matchers}
import scala.concurrent.Future
import scala.concurrent.duration._
/**
* This class acts as a Source that gets messages from across a network boundary and maintains back-pressure.
*/
class RemoteSourceSpec extends FlatSpec with Matchers {
implicit val system = ActorSystem()
implicit val materializer = ActorMaterializer()
"RemoteSource" should "execute a simple flow" in {
val flowUnderTest = Flow[Int].takeWhile(_ < 5)
val (source, sink) = TestSource.probe[Int]
.via(flowUnderTest)
.toMat(TestSink.probe[Int])(Keep.both)
.run()
sink.request(n = 3)
source.sendNext(1)
source.sendNext(2)
source.sendNext(3)
sink.expectNext(1, 2, 3)
source.sendError(new Exception("Power surge in the linear subroutine C-47!"))
val ex = sink.expectError()
assert(ex.getMessage.contains("C-47"))
}
it should "handle back-pressure correctly" in {
val flowUnderTest = Flow[Int].takeWhile(_ < 5)
val (source, sink) = TestSource.probe[Int]
.via(flowUnderTest)
.toMat(TestSink.probe[Int])(Keep.both)
.run()
sink.request(n = 3)
source.sendNext(1)
source.sendNext(2)
source.sendNext(3)
sink.expectNext(1, 2, 3)
source.sendError(new Exception("Power surge in the linear subroutine C-47!"))
val ex = sink.expectError()
assert(ex.getMessage.contains("C-47"))
}
}
| johnhainline/spark-conditional-linear-regression | src/test/scala/distributed/RemoteSourceSpec.scala | Scala | mit | 1,653 |
/*
* Copyright 2015 cookie.ai
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ai.cookie.spark.sql.sources
import org.apache.spark.sql.{DataFrame, DataFrameReader}
import ai.cookie.spark.sql.sources.libsvm.DefaultSource._
package object libsvm {
/**
* Extends [[org.apache.spark.sql.DataFrameReader]] with LIBSVM support.
* @param read the associated reader.
*/
implicit class LibSVMDataFrameReader(read: DataFrameReader) {
/**
* Read a LIBSVM dataset.
* @param path the Hadoop-style path to a file in LIBSVM format.
* @param numFeatures the number of features to expect per row. If not specified, it is computed automatically.
*/
def libsvm(path: String, numFeatures: Option[Int] = None): DataFrame = {
val parameters = (
Seq() ++ numFeatures.map(NumFeatures -> _.toString)
).toMap
read.format(classOf[DefaultSource].getName).options(parameters).load(path)
}
}
}
| cookieai/cookie-datasets | src/main/scala/ai/cookie/spark/sql/sources/libsvm/package.scala | Scala | apache-2.0 | 1,481 |
package com.dslplatform.api.client
import com.dslplatform.api.patterns.AggregateRoot
import com.dslplatform.api.patterns.Identifiable
import scala.reflect.ClassTag
import scala.concurrent.Future
class HttpCrudProxy(httpClient: HttpClient)
extends CrudProxy {
import HttpClientUtil._
private val CrudUri = "Crud.svc"
def read[TIdentifiable <: Identifiable: ClassTag](
uri: String): Future[TIdentifiable] = {
val domainName: String = httpClient.getDslName[TIdentifiable]
httpClient.sendRequest[TIdentifiable](
GET, CrudUri / domainName + "?uri=" + encode(uri), Set(200))
}
def create[TAggregateRoot <: AggregateRoot: ClassTag](
aggregate: TAggregateRoot): Future[TAggregateRoot] = {
val service: String = CrudUri / httpClient.getDslName[TAggregateRoot]
httpClient.sendRequest[TAggregateRoot](
POST(aggregate), service, Set(201))
}
def update[TAggregate <: AggregateRoot: ClassTag](
aggregate: TAggregate): Future[TAggregate] = {
val uri: String = aggregate.URI
val domainName: String = httpClient.getDslName[TAggregate]
httpClient.sendRequest[TAggregate](
PUT(aggregate),
CrudUri / domainName + "?uri=" + encode(uri),
Set(200))
}
def delete[TAggregateRoot <: AggregateRoot: ClassTag](
uri: String): Future[TAggregateRoot] = {
val domainName: String = httpClient.getDslName[TAggregateRoot]
httpClient.sendRequest[TAggregateRoot](
DELETE,
CrudUri / domainName + "?uri=" + encode(uri),
Set(200))
}
}
| ngs-doo/dsl-client-scala | http/src/main/scala/com/dslplatform/api/client/HttpCrudProxy.scala | Scala | bsd-3-clause | 1,536 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.2
* @date Fri Oct 16 15:05:06 EDT 2009
* @see LICENSE (MIT style license file).
*/
package scalation.activity
import java.util.concurrent.ConcurrentLinkedQueue
import collection.mutable.PriorityQueue
import scalation.animation.{AnimateCommand, DgAnimator}
import scalation.animation.CommandType._
import scalation.dynamics.Derivatives.Derivative
import scalation.linalgebra.{VectorD, VectorI}
import scalation.random.{Uniform, Variate}
import scalation.scala2d.{Ellipse, QCurve, Rectangle}
import scalation.scala2d.Colors._
import scalation.util.{Error, Identifiable, PQItem}
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `PlaceI` class represents a discrete place (can hold tokens).
* @param x the place's x-coordinate
* @param y the place's y-coordinate
* @param tokens the number of tokens per color
* @param stays whether the tokens stay (test arc)
*/
class PlaceI (val x: Double, val y: Double, var tokens: VectorI, stays: Boolean = false)
extends Identifiable
{
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add tokens to 'this' discrete place.
* @param _token the token vector to add
*/
def add (_tokens: VectorI) { tokens += _tokens }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Take tokens from 'this' discrete place.
* @param _token the token vector to take away
*/
def take (_tokens: VectorI) { tokens -= _tokens }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Determine whether 'this' place holds at least the token vector (i.e.,
* the requisite number of tokens of each color). Alternative: use
* threshold predicate in `PetriNetRules`.
* @param _token the token vector
*/
def holds (_tokens: VectorI): Boolean = tokens >= _tokens
} // PlaceI class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `PlaceD` class represents a continuous place (can hold fluids).
* @param x the place's x-coordinate
* @param y the place's y-coordinate
* @param fluids the amount of fluid per color
* @param stays whether the fluids stay (test arc)
*/
class PlaceD (val x: Double, val y: Double, var fluids: VectorD, stays: Boolean = false)
extends Identifiable
{
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add fluids to 'this' continuous place.
* @param _fluids the fluid vector to add
*/
def add (_fluids: VectorD) { fluids += _fluids }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Take fluids from 'this' continuous place.
* @param _fluids the fluid vector to take away
*/
def take (_fluids: VectorD) { fluids -= _fluids }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Determine whether 'this' place holds at least the fluid vector (i.e.,
* the requisite amount of fluid of each color). Alternative: use
* threshold predicate in `PetriNetRules`.
* @param _fluids the fluid vector
*/
def holds (_fluids: VectorD): Boolean = fluids >= _fluids
} // PlaceD class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Transition` class represents a timed transition.
* @param x the x-coordinate for 'this' transition
* @param y the y-coordinate for 'this' transition
* @param firingDist the random variate for the firing distribution
* @param colors the colors of the tokens (needed for firing rules)
*/
class Transition (val x: Double, val y: Double, firingDist: Variate, colors: Array [Color])
extends PQItem with Ordered [Transition] with PetriNetRules with Identifiable
{
/** The containing Petri net
*/
var pnet: PetriNet = null
/** The animation command queue
*/
var cqueue: ConcurrentLinkedQueue [AnimateCommand] = null
/** Arcs incoming from discrete places
*/
var inI: Array [ArcI] = null
/** Arcs incoming from continuous places
*/
var inD: Array [ArcD] = null
/** Arcs outgoing to discrete places
*/
var outI: Array [ArcI] = null
/** Arcs outgoing to continuous places
*/
var outD: Array [ArcD] = null
/** The firing delay for 'this' transition
*/
var firingDelay: Double = 0.0
/** Token vector for transition
*/
var tokens: VectorI = new VectorI (colors.length)
/** Fluid vector for transition
*/
var fluids: VectorD = new VectorD (colors.length)
/** A transition is locked from the time it is enabled until it fires
*/
var locked: Boolean = false
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Connect 'this' transition to all the incoming and outgoing discrete arcs
* as well as the containing Petri net.
* @param _pnet the containing Petri net
* @param _in the incoming arcs from discrete/`Int` places
* @param _out the outgoing arcs to discrete/`Int` places
*/
def connect (_pnet: PetriNet, _in: Array [ArcI], _out: Array [ArcI])
{
pnet = _pnet
cqueue = pnet.getCommandQueue
inI = _in
inD = Array [ArcD] ()
outI = _out
outD = Array [ArcD] ()
} // connect
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Connect 'this' transition to all the incoming and outgoing continuous arcs
* as well as the containing Petri net.
* @param _pnet the containing Petri net
* @param _in the incoming arcs from continuous/`Double` places
* @param _out the outgoing arcs to continuous/`Double` places
*/
def connect (_pnet: PetriNet, _in: Array [ArcD], _out: Array [ArcD])
{
pnet = _pnet
cqueue = pnet.getCommandQueue
inI = Array [ArcI] ()
inD = _in
outI = Array [ArcI] ()
outD = _out
} // connect
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Connect 'this' transition to all the incoming and outgoing arcs as well as
* the containing Petri net.
* @param _pnet the containing Petri net
* @param _inI the incoming arcs from discrete/`Int` places
* @param _inD the incoming arcs from continuous/`Double` places
* @param _outI the outgoing arcs to discrete/`Int` places
* @param _outD the outgoing arcs to continuous/`Double` places
*/
def connect (_pnet: PetriNet, _inI: Array [ArcI], _inD: Array [ArcD], _outI: Array [ArcI], _outD: Array [ArcD])
{
pnet = _pnet
cqueue = pnet.getCommandQueue
inI = _inI
inD = _inD
outI = _outI
outD = _outD
} // connect
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add tokens to 'this' transition.
* @param _token the token vector to add
*/
def addTokens (_tokens: VectorI) { tokens += _tokens }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Take tokens from 'this' transition.
* @param _token the token vector to take away
*/
def takeTokens (_tokens: VectorI) { tokens -= _tokens }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add fluids to 'this' transition.
* @param _fluids the fluid vector to add
*/
def addFluids (_fluids: VectorD) { fluids += _fluids }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Take fluids from 'this' transition.
* @param _fluids the fluid vector to take away
*/
def takeFluids (_fluids: VectorD) { fluids -= _fluids }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check the incoming arcs from discrete place for enough tokens of the
* right colors and the incoming arcs from continuous places for enough
* fluid of the right colors.
*/
def checkGuard: Boolean =
{
if (locked) return false
for (iI <- inI if ! iI.place.holds (iI.minTokens)) return false
for (iD <- inD if ! iD.place.holds (iD.minFluids)) return false
true
} // checkGuard
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Enable 'this' transition by computing the firing delay. Should immediately
* place it on the time ordered firing list. Also, move tokens/fluids from
* input places to 'this' transition.
*/
def enable (): Double =
{
locked = true // this transition is now in progress, so it's locked
//:: Calculate the firing delay = firing-time - enablement-time
firingDelay = calcFiringDelay (firingDist, null, null, null, null)
println ("Transition.enable: firingDelay = " + firingDelay)
//:: Pull tokens from incoming discrete places (move to transition).
for (iI <- inI) { // for each incoming discrete arc
val place = iI.place // discrete place token source
val _tokens = iI._tokenFlow (place.tokens, pnet.clock, firingDelay) // how many tokens
place.take (_tokens) // take tokens from place
addTokens (_tokens) // add these tokens to transition
println ("Transition.enable: move " + _tokens + " tokens to transition " + id + " at " + pnet.clock)
//:: Move these tokens from their discrete place (place.id) to the transition (this.id).
//:: For each color, move number = 'tokens(i)' tokens
for (i <- 0 until _tokens.dim) {
val number = _tokens(i)
if (number > 0) {
cqueue.add (AnimateCommand (MoveTokens2Node, -1, null, null, false,
colors(i), Array (number), pnet.clock, place.id, id))
} // if
} // for
} // for
//:: Pull fluids from incoming continuous places (move to transition).
for (iD <- inD) { // for each incoming continuous arc
val place = iD.place // continuous place fluid source
val _fluids = iD._fluidFlow (place.fluids, pnet.clock, firingDelay) // how much fluid
place.take (_fluids) // take fluids from place
addFluids (_fluids) // add these fluids to transition
println ("Transition.enable: move " + _fluids + " fluids to transition " + id + " at " + pnet.clock)
//:: Move these fluids from their continuous place (place.id) to the transition (this.id).
//:: For each color, move amount = 'fluids(i)' fluids
for (i <- 0 until _fluids.dim) {
val amount = _fluids(i)
if (amount > 0) {
// Adjust the sizes of tokens at both nodes by the amount
cqueue.add (AnimateCommand (ScaleTokensAt, -1, null, null, false,
colors(i), Array (amount), pnet.clock, place.id, id))
} // if
} // for
} // for
firingDelay
} // enable
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Fire 'this' transition by moving the requisite number and color of tokens
* from 'this' transition to each outgoing discrete place and the requisite
* amount and color of fluid to each outgoing continuous place.
*/
def fire ()
{
//:: Push tokens to outgoing discrete places (move to outgoing place).
for (oI <- outI) { // for each outgoing discrete arc
val place = oI.place // discrete place token target
val _tokens = oI._tokenFlow (tokens, pnet.clock, firingDelay) // how many tokens to move
takeTokens (_tokens) // take tokens from transition
place.add (_tokens) // add these tokens to place
println ("Transition.fire: move " + _tokens + " tokens to place " + place.id + " at " + pnet.clock)
//:: Move these tokens from the transition (this.id) to their discrete place (place.id)
//:: For each color, move number = 'tokens(i)' tokens
for (i <- 0 until _tokens.dim) {
val number = _tokens(i)
if (number > 0) {
cqueue.add (AnimateCommand (MoveTokens2Node, -1, null, null, false,
colors(i), Array (number), pnet.clock, id, place.id))
} // if
} // for
} // for
//:: Push fluids to outgoing continuous places (move to outgoing place).
for (oD <- outD) { // for each outgoing continuous arc
val place = oD.place // continuous place token target
val _fluids = oD._fluidFlow (fluids, pnet.clock, firingDelay) // how much fluid to move
takeFluids (_fluids) // take fluids from transition
place.add (_fluids) // add these fluids to place
println ("Transition.fire: move " + _fluids + " fluids to place " + place.id + " at " + pnet.clock)
//:: Move these fluids from the transition (this.id) to their continuous place (place.id)
//:: For each color, move amount = 'fluids(i)' tokens
for (i <- 0 until _fluids.dim) {
val amount = _fluids(i)
if (amount > 0) {
// Adjust the sizes of tokens at both nodes by the amount
cqueue.add (AnimateCommand (ScaleTokensAt, -1, null, null, false,
colors(i), Array (amount), pnet.clock, id, place.id))
} // if
} // for
} // for
locked = false // this transition is now in complete, so it's unlocked
} // fire
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compare 'this' transition to 'tr2' based on firing time.
* @param tr2 the other transition
*/
def compare (tr2: Transition) = actTime compare tr2.actTime
} // Transition class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `ArcI` class represents an arc connecting discrete place with a
* transition. If incoming is true the arc is from the place to transition,
* otherwise it is from the transition to the place (outgoing).
* @param place the discrete place at one end of the arc
* @param transition the transition at the other end of the arc
* @param incoming whether the arc goes into a transition
* @param minTokens minimum number of tokens to transport over the arc
* @param rates the rate vector for the linear flow model
* @param testArc whether the arc is a test arc meaning the tokens/fluids stay
* @param scaleFactor the scale factor for the firing delay
*/
class ArcI (val place: PlaceI, val transition: Transition, incoming: Boolean, val minTokens: VectorI,
rates: VectorI = null, testArc: Boolean = false, scaleFactor: Double = 1.0)
extends PetriNetRules with Identifiable
{
{
if (place == null) flaw ("constructor", "discrete place must not be null")
if (transition == null) flaw ("constructor", "transition must not be null")
if ( ! incoming && testArc) flaw ("constructor", "test arcs must be incoming")
} // primary constructor
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the number of tokens of each color to flow over 'this' arc.
* @param tokens the number of tokens available
* @param time the current time
* @param firingDelay the time it takes for the transition to fire
*/
def _tokenFlow (tokens: VectorI, time: Double, firingDelay: Double): VectorI =
{
tokenFlow (tokens, minTokens, rates, firingDelay / scaleFactor)
} // _tokenFlow
} // ArcI class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `ArcD` class represents an arc connecting continuous place with a
* transition. If incoming is true the arc is from the place to transition,
* otherwise it is from the transition to the place (outgoing).
* @param place the continuous place at one end of the arc
* @param transition the transition the other end of the arc
* @param incoming whether the arc goes into a transition
* @param minFluids minimum amount of fluid to transport over the arc
* @param rates the rate vector for the linear flow model
* @param derv the array of derivative functions for ODE's
* @param testArc whether the arc is a test arc meaning the tokens/fluids stay
* @param scaleFactor the scale factor for the firing delay
*/
class ArcD (val place: PlaceD, val transition: Transition, incoming: Boolean, val minFluids: VectorD,
rates: VectorD = null, derv: Array [Derivative] = null, testArc: Boolean = false,
scaleFactor: Double = 1.0)
extends PetriNetRules with Identifiable
{
{
if (place == null) flaw ("constructor", "continuous place must not be null")
if (transition == null) flaw ("constructor", "transition must not be null")
if ( ! incoming && testArc) flaw ("constructor", "test arcs must be incoming")
if ( ! incoming && derv != null) flaw ("constructor", "only incoming arcs may have ODE's")
} // primary constructor
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the amount of fluid of each color to flow over 'this' arc.
* @param fluids the amount of fluid available
* @param time the current time
* @param firingDelay the time it takes for the transition to fire
*/
def _fluidFlow(fluids: VectorD, time: Double, firingDelay: Double): VectorD =
{
if (derv == null) { // use a linear or constant flow model
fluidFlow (fluids, minFluids, rates, firingDelay / scaleFactor)
} else { // use an ODE based flow model
fluidFlow (fluids, derv, time, firingDelay / scaleFactor)
} // if
} // _fluidFlow
} // ArcD class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `PetriNet` class provides a simulation engine for Hybrid Colored Petri Nets.
* Reference: "Discrete-event simulation of fluid stochastic Petri Nets"
* @param colors array of colors for tokens/fluids
* @param placeI array of discrete places
* @param placeD array of continuous places
* @param transition array of timed transitions
*/
class PetriNet (colors: Array [Color], placeI: Array [PlaceI], placeD: Array [PlaceD],
transition: Array [Transition])
extends PetriNetRules with Error
{
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Construct a discrete Petri net (tokens, but no fluids).
* @param colors array of colors for tokens
* @param placeI array of discrete places
* @param transition array of timed transitions
*/
def this (colors: Array [Color], placeI: Array [PlaceI], transition: Array [Transition])
{
this (colors, placeI, Array [PlaceD] (), transition)
} // constructor
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Construct a continuous Petri net (fluids, but no tokens).
* @param colors array of colors for fluids
* @param placeD array of continuous places
* @param transition array of timed transitions
*/
def this (colors: Array [Color], placeD: Array [PlaceD], transition: Array [Transition])
{
this (colors, Array [PlaceI] (), placeD, transition)
} // constructor
/** The current time
*/
private var _clock = 0.0
/** The Petri net directed graph animator
*/
private val pna = new DgAnimator ("PetriNetAnimator", white, black)
/** The animation command queue
*/
private val cqueue = pna.getCommandQueue
/** Number of colors (need at least 1)
*/
private val ncolors = colors.length
/** Number of discrete places (which hold entities)
*/
private val ndplaces = if (placeI == null) 0 else placeI.length
/** Number of continuous places (which hold fluids)
*/
private val ncplaces = if (placeD == null) 0 else placeD.length
/** Number of timed transitions (need at least 1)
*/
private val ntransitions = transition.length
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get the current time.
*/
def clock = _clock
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Get the animation command queue.
*/
def getCommandQueue = cqueue
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert the Petri net to the string representation.
*/
override def toString =
{
var s = "PetriNet (\\n"
println ("placeI = " + placeI)
for (pI <- placeI) s += "\\tPlaceI [ " + pI.id + " ]\\n"
println ("placeD = " + placeD)
for (pD <- placeD) s += "\\tPlaceD [ " + pD.id + " ]\\n"
for (tr <- transition) {
s += " \\tTransition [ " + tr.id + " ]\\n"
for (aI <- tr.inI) s += " \\t\\tArcI [ " + aI.place.id + " , " + aI.transition.id + " ]\\n"
for (aD <- tr.inD) s += " \\t\\tArcD [ " + aD.place.id + " , " + aD.transition.id + " ]\\n"
for (aI <- tr.outI) s += " \\t\\tArcI [ " + aI.transition.id + " , " + aI.place.id + " ]\\n"
for (aD <- tr.outD) s += " \\t\\tArcD [ " + aD.transition.id + " , " + aD.place.id + " ]\\n"
} // for
s += ")"
s
} // toString
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Initialize the animation by drawing the Petri net components onto the
* animation drawing panel using animation commands.
* @param timeDilationFactor time dilation is used to speed up/slow down animation
* @param gColors the colors for nodes and edges in the graph
* i.e., discrete-places, continuous-places, transitions and arcs
*/
def initAnimation (gColors: Array [Color] = Array (yellow, gold, silver, lightyellow),
timeDilationFactor: Double = 1000.0)
{
println ("PetriNet.initAnimation: begin drawing the Petri net graph")
//:: Draw the discrete places along with their initial tokens.
cqueue.add (AnimateCommand (TimeDilation, -1, null, null, true, null,
Array [Double] (timeDilationFactor), 0.0))
for (pI <- placeI) {
cqueue.add (AnimateCommand (CreateNode, pI.id, Ellipse (), "pI" + pI.id, false, gColors(0),
Array [Double] (pI.x, pI.y, 30, 30), 0))
val tokens = pI.tokens
for (i <- 0 until tokens.dim) { // number of tokens by color at this place
for (j <- 0 until tokens(i)) {
val tk_id = Counter.next ()
println ("PetriNet.initAnimation: token " + tk_id + " for place " + pI.id)
cqueue.add (AnimateCommand (CreateToken, tk_id, Ellipse (), "tk" + tk_id, false,
colors(i), null, 0, pI.id))
} // for
} // for
} // for
//:: Draw the continuous places along with their initial fluid levels.
for (pD <- placeD) {
cqueue.add (AnimateCommand (CreateNode, pD.id, Ellipse (), "pD" + pD.id, false, gColors(1),
Array [Double] (pD.x, pD.y, 30, 40), 0))
val fluids = pD.fluids
for (i <- 0 until fluids.dim) { // amount of fluids by color at this place
val fl_id = Counter.next ()
val amount = fluids(i)
if (amount > 0) {
println ("PetriNet.initAnimation: fluid " + fl_id + " with amount " + amount + " for place " + pD.id)
cqueue.add (AnimateCommand (CreateToken, fl_id, Ellipse (), "fl" + fl_id, false, colors(i),
Array [Double] (amount, amount), 0, pD.id))
} // if
} // for
} // for
//:: Draw the transitions along with their incoming and outgoing arcs.
for (tr <- transition) {
cqueue.add (AnimateCommand (CreateNode, tr.id, Rectangle (), "tr" + tr.id, true, gColors(2),
Array [Double] (tr.x, tr.y, 30, 60), 0))
for (aI <- tr.inI) {
cqueue.add (AnimateCommand (CreateEdge, aI.id, QCurve (), "aI" + aI.id, true, gColors(3),
null, 0, aI.place.id, aI.transition.id))
} // for
for (aD <- tr.inD) {
cqueue.add (AnimateCommand (CreateEdge, aD.id, QCurve (), "aD" + aD.id, true, gColors(3),
null, 0, aD.place.id, aD.transition.id))
} // for
for (aI <- tr.outI) {
cqueue.add (AnimateCommand (CreateEdge, aI.id, QCurve (), "aI" + aI.id, true, gColors(3),
null, 0, aI.transition.id, aI.place.id))
} // for
for (aD <- tr.outD) {
cqueue.add (AnimateCommand (CreateEdge, aD.id, QCurve (), "aD" + aD.id, true, gColors(3),
null, 0, aD.transition.id, aD.place.id))
} // for
} // for
println ("PetriNet.initAnimation: end drawing the Petri net graph")
} // initAnimation
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Simulate the execution of the Petri Net.
* @param tStart the starting time for the simulation
* @param tStop the stopping time for the simulation
*/
def simulate (tStart: Double, tStop: Double)
{
// The list of transitions to be fired (time-ordered list of transitions)
val firingList = PriorityQueue.empty [Transition]
// val firingList = new PQueue [Transition] ()
_clock = tStart
var continue = true
println ("PetriNet.simulate: initialize animation of the Petri net at " + 0.0)
initAnimation ()
println ("PetriNet.simulate: start simulation at " + _clock)
while (_clock < tStop && continue) {
//:: Enable other transitions whose guards are true.
for (tran <- transition) {
println ("PetriNet.simulate: check guard for transition " + tran.id + " at " + _clock)
if (tran.checkGuard) {
println ("PetriNet.simulate: enable transition " + tran.id + " at " + _clock)
tran.actTime = _clock + tran.enable () // enable returns firing delay
firingList += tran
} // if
} // for
continue = ! firingList.isEmpty
//:: Fire the next (in time order) enabled transition.
if (continue) {
val nextTran = firingList.dequeue () // remove from firing list
_clock = nextTran.actTime // advance time
println ("PetriNet.simulate: fire transition " + nextTran.id + " at " + _clock)
nextTran.fire () // fire the next transition
} // if
} // while
println ("PetriNet.simulate: stop simulation at " + _clock +
" with firing list = " + firingList)
println ("PetriNet.simulate: start animation")
pna.animate (0.0, tStop)
} // simulate
} // PetriNet class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `Counter` object is used to provide unique identifiers for tokens/fluids.
*/
object Counter
{
private var count = 1000 // nodes (places, transitions), edges <= 1000,
// tokens/fluids > 1000
def next (): Int = { count += 1; count }
} // Counter object
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `PetriNetTest` object is used to test the `PetriNet` class.
*/
object PetriNetTest extends App
{
//:: Set up the colors for tokens and fluids (note: colors for tokens and fluids must be disjoint).
val colors = Array [Color] (green, blue, purple)
//:: Define the places along with their initial markings by color.
val placeI = Array [PlaceI] (new PlaceI (100, 100, VectorI (2, 2, 0)),
new PlaceI (500, 100, VectorI (0, 0, 0)))
val placeD = Array [PlaceD] (new PlaceD (100, 400, VectorD (0.0, 0.0, 10.5)),
new PlaceD (500, 400, VectorD (0.0, 0.0, 0.0)))
//:: Define the transitions.
val transt = Array [Transition] (new Transition (300, 250, new Uniform (4, 6), colors))
//:: Define the overall Petri net.
val pnet = new PetriNet (colors, placeI, placeD, transt)
//:: For each transition, link to all of the incoming/outgoing places via true/false arcs.
//:: Also, establish a back link to the containing Petri net.
transt(0).connect (pnet,
Array [ArcI] (new ArcI (placeI(0), transt(0), true, VectorI (1, 1, 0))),
Array [ArcD] (new ArcD (placeD(0), transt(0), true, VectorD (0.0, 0.0, 5.5))),
Array [ArcI] (new ArcI (placeI(1), transt(0), false, VectorI (1, 1, 0))),
Array [ArcD] (new ArcD (placeD(1), transt(0), false, VectorD (0.0, 0.0, 5.5))))
println (pnet)
pnet.simulate (2, 10)
} // PetriNetTest object
| NBKlepp/fda | scalation_1.2/src/main/scala/scalation/activity/PetriNet.scala | Scala | mit | 31,062 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package java.nio.charset
private[charset] object ISO_8859_1 extends ISO_8859_1_And_US_ASCII_Common(
"ISO-8859-1", Array(
"csISOLatin1", "IBM-819", "iso-ir-100", "8859_1", "ISO_8859-1", "l1",
"ISO8859-1", "ISO_8859_1", "cp819", "ISO8859_1", "latin1",
"ISO_8859-1:1987", "819", "IBM819"),
maxValue = 0xff)
| scala-js/scala-js | javalib/src/main/scala/java/nio/charset/ISO_8859_1.scala | Scala | apache-2.0 | 599 |
/*
* Copyright (c) 2014 FranΓ§ois Cabrol.
*
* This file is part of MelVi.
*
* MelVi is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* MelVi is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with MelVi. If not, see <http://www.gnu.org/licenses/>.
*/
package com.cabrol.francois.melvi.view
import javax.swing.JFrame
import javafx.embed.swing.JFXPanel
import javafx.application.Platform
import javafx.scene.Scene
import javafx.scene.layout.BorderPane
import javafx.scene.layout.HBox
import javafx.scene.control.Button
import javafx.geometry.Insets
import com.cabrol.francois.melvi.factory.{GraphicsType, ChartFactory}
import com.cabrol.francois.libjamu.musictheory.entity.note.Note
import java.awt.{Point, Component}
/**
* Created with IntelliJ IDEA.
* User: francois * Date: 2014-02-02
*/
class VisualiserView(notes:List[Note], graphicsType:GraphicsType.GraphicsType) {
val frame = new JFrame("Melody Chart");
initAndShowGUI
def initAndShowGUI {
// This method is invoked on the EDT thread
val fxPanel = new JFXPanel();
frame.add(fxPanel);
frame.setSize(300, 200);
frame.setVisible(true);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
Platform.runLater(new Runnable() {
def run() {
initFX(fxPanel);
}
})
}
def initFX(fxPanel:JFXPanel) {
// This method is invoked on the JavaFX thread
val scene = createScene
fxPanel.setScene(scene)
}
def createScene:Scene = {
// Create the option menu
val hbox = new MenuTop(notes)
// Create the charts panels
val panel = ChartFactory.createChartPanel(graphicsType, notes)
// Create the container
val border = new BorderPane()
border.setTop(hbox)
border.setCenter(panel)
new Scene(border,400,400)
}
def getWidth:Int = frame.getWidth
def getHeight:Int = frame.getHeight
def getX:Int = frame.getX
def getY:Int = frame.getY
def setLocation(p:Point) = frame.setLocation(p)
}
| francoiscabrol/MelVi | src/main/scala/com/cabrol/francois/melvi/view/VisualiserView.scala | Scala | gpl-3.0 | 2,461 |
package com.github.mkorman9
import java.time.LocalDateTime
import awscala.dynamodbv2.{DynamoDB, _}
import com.amazonaws.services.dynamodbv2.model.ScalarAttributeType
import org.scalatest._
import com.github.mkorman9.DynamoDSL._
class DynamoIntegrationTest extends FunSuite with Matchers with BeforeAndAfterAll {
implicit var connection: DynamoDB = _
override def beforeAll = {
connection = DynamoDB.local()
connection.createTable("Cat",
("roleName", ScalarAttributeType.S),
("name", ScalarAttributeType.S),
Seq(),
Seq()
)
connection.createTable("Dog",
("name", ScalarAttributeType.S)
)
connection.createTable("Duck",
("color", ScalarAttributeType.S),
("name", ScalarAttributeType.S),
Seq(
("height", ScalarAttributeType.N)
),
Seq(
LocalSecondaryIndex(
name = "ByHeight",
keySchema = Seq(KeySchema("color", KeyType.Hash), KeySchema("height", KeyType.Range)),
projection = Projection(ProjectionType.All)
)
)
)
connection.createTable("Parrot",
("id", ScalarAttributeType.N)
)
val snakesTable = Table(
name = "Snake",
hashPK = "id",
rangePK = Some("name"),
attributes = Seq(
AttributeDefinition("id", ScalarAttributeType.N),
AttributeDefinition("name", ScalarAttributeType.S),
AttributeDefinition("color", ScalarAttributeType.S)
),
localSecondaryIndexes = Seq(),
globalSecondaryIndexes = Seq(
GlobalSecondaryIndex(
name = "ByColor",
keySchema = Seq(KeySchema("color", KeyType.Hash), KeySchema("name", KeyType.Range)),
projection = Projection(ProjectionType.All),
provisionedThroughput = ProvisionedThroughput(5, 5)
)
),
provisionedThroughput = None
)
connection.createTable(snakesTable)
}
test("Mapper should persist correct set of data with hash and sort key") {
val catToDelete = CatDataModel("Leila", "Hunter", None, LocalDateTime.now().minusYears(12), 10f)
val catsWithMousesOver100 = List(
CatDataModel("Johnny", "Hunter", Some(112), LocalDateTime.now().minusYears(1), 12.5f),
CatDataModel("Pablo", "Hunter", Some(117), LocalDateTime.now().minusYears(1), 14.1f)
)
val cats = List(
CatDataModel("Mike", "Worker", Some(41), LocalDateTime.now().minusYears(3), 14.2f),
CatDataModel("Ricky", "Unemployed", None,LocalDateTime.now().minusYears(2), 17f),
catToDelete
) ::: catsWithMousesOver100
CatsMapping.putAll(cats)
val huntersBeforeRemoving = CatsMapping query {
CatsMapping.roleName === "Hunter"
}
val catToDeleteFromDb = CatsMapping.get("Hunter", "Leila")
CatsMapping.delete("Hunter", "Leila")
val huntersAfterRemoving = CatsMapping query {
CatsMapping.roleName === "Hunter"
}
val deletedCat = CatsMapping.get("Hunter", "Leila")
val catsWithMousesOver100FromDb = CatsMapping scan { CatsMapping.mousesConsumed > 100 }
catToDeleteFromDb should be (Some(catToDelete))
deletedCat should be (None)
huntersBeforeRemoving.size should be(3)
huntersAfterRemoving.size should be(2)
huntersBeforeRemoving forall (cats.contains(_)) should be(true)
huntersAfterRemoving forall (cats.contains(_)) should be(true)
catsWithMousesOver100FromDb.toSet should be (catsWithMousesOver100.toSet)
}
test("Mapper should persist correct set of data with just hash key") {
val dogToDelete = DogDataModel("Max", List("black", "white"), BigDecimal("1212123.3453453"))
val dogs = List(
DogDataModel("Rex", List("brown", "white"), BigDecimal("1212123.3453453")),
dogToDelete
)
DogsMapping.putAll(dogs)
val maxBeforeRemoving = DogsMapping query {
DogsMapping.name === "Max"
}
val dogToDeleteFromDb = DogsMapping.get("Max")
DogsMapping.delete("Max")
val maxAfterRemoving = DogsMapping query {
DogsMapping.name === "Max"
}
val deletedDog = DogsMapping.get("Max")
dogToDeleteFromDb should be (Some(dogToDelete))
deletedDog should be (None)
maxBeforeRemoving.size should be(1)
maxAfterRemoving.size should be(0)
dogs.contains(maxBeforeRemoving.head) should be(true)
}
test("Mapper should retrieve set of data using local secondary index") {
val whiteDucksOver4 = List(
DuckDataModel("White", "John", 5),
DuckDataModel("White", "Paul", 6)
)
val ducks = List(
DuckDataModel("Black", "Raul", 5),
DuckDataModel("White", "Mike", 3)
) ::: whiteDucksOver4
DucksMapping.putAll(ducks)
val whiteDucksOver4FromDb = DucksMapping query(DucksMapping.ByHeight,
DucksMapping.color === "White" and DucksMapping.height > 4
)
whiteDucksOver4FromDb.toSet should be (whiteDucksOver4.toSet)
}
test("Mapper should retrieve set of data using global secondary index") {
val whiteAkensSnakes = List(
SnakeDataModel(0, "Akens", "White", 5),
SnakeDataModel(1, "Akens", "White", 6)
)
val snakes = List(
SnakeDataModel(2, "Black", "Paul", 3),
SnakeDataModel(3, "Green", "Ryan", 6)
) ::: whiteAkensSnakes
SnakesMapping.putAll(snakes)
val whiteAkensSnakesFromDb = SnakesMapping.query(SnakesMapping.ByColor,
SnakesMapping.color === "White" and SnakesMapping.name === "Akens"
)
whiteAkensSnakesFromDb.toSet should be (whiteAkensSnakes.toSet)
}
test("Mapper should be able to serializer and deserialize JSON objects") {
val parrotInfo = ParrotInfo("Cacado", 20)
val parrotToSave = ParrotDataModel(1L, parrotInfo)
ParrotsMapping.put(parrotToSave)
val retrievedParrot = ParrotsMapping.get(1L).get
parrotToSave should be (retrievedParrot)
}
}
| mkorman9/scala-dynamodb-mapper | src/test/scala/com/github/mkorman9/DynamoIntegrationTest.scala | Scala | apache-2.0 | 5,793 |
package controllers.sample
import javax.inject._
import play.api._
import play.api.mvc._
@Singleton
class JavaScriptSampleController @Inject() extends Controller {
def index = Action {
Ok(views.html.sample.javascript_sample_index(
"Welcome to Apl Site Dispatcher")(Flash(Map("success" -> "Welcome!"))))
}
}
| tnddn/iv-web | portal/rest-portal/app/controllers/sample/JavaScriptSampleController.scala | Scala | apache-2.0 | 326 |
package com.twitter.scrooge.backend
import com.twitter.finagle
import com.twitter.finagle.SourcedException
import com.twitter.finagle.stats.NullStatsReceiver
import com.twitter.finagle.thrift.ThriftClientRequest
import com.twitter.scrooge.{Option, Utilities}
import com.twitter.scrooge.testutil.{EvalHelper, JMockSpec}
import com.twitter.util.{Await, Future}
import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}
import java.nio.ByteBuffer
import java.util.EnumSet
import org.apache.thrift.protocol._
import org.apache.thrift.transport.TMemoryBuffer
import org.jmock.Expectations.returnValue
import org.jmock.lib.legacy.ClassImposteriser
import org.jmock.{Expectations, Mockery}
import thrift.java_test._
class JavaGeneratorSpec extends JMockSpec with EvalHelper {
def stringToBytes(string: String) = ByteBuffer.wrap(string.getBytes)
"JavaGenerator" should {
"generate an enum" should {
"correct constants" in { _ =>
NumberID.ONE.getValue() must be(1)
NumberID.TWO.getValue() must be(2)
NumberID.THREE.getValue() must be(3)
NumberID.FIVE.getValue() must be(5)
NumberID.SIX.getValue() must be(6)
NumberID.EIGHT.getValue() must be(8)
}
"findByValue" in { _ =>
NumberID.findByValue(1) must be(NumberID.ONE)
NumberID.findByValue(2) must be(NumberID.TWO)
NumberID.findByValue(3) must be(NumberID.THREE)
NumberID.findByValue(5) must be(NumberID.FIVE)
NumberID.findByValue(6) must be(NumberID.SIX)
NumberID.findByValue(8) must be(NumberID.EIGHT)
}
"java-serializable" in { _ =>
val bos = new ByteArrayOutputStream()
val out = new ObjectOutputStream(bos)
out.writeObject(NumberID.ONE)
out.writeObject(NumberID.TWO)
bos.close()
val bytes = bos.toByteArray
val in = new ObjectInputStream(new ByteArrayInputStream(bytes))
var obj = in.readObject()
obj.isInstanceOf[NumberID] must be(true)
obj.asInstanceOf[NumberID].getValue must be(NumberID.ONE.getValue)
obj.asInstanceOf[NumberID].name must be(NumberID.ONE.name)
obj = in.readObject()
obj.isInstanceOf[NumberID] must be(true)
obj.asInstanceOf[NumberID].getValue must be(NumberID.TWO.getValue)
obj.asInstanceOf[NumberID].name must be(NumberID.TWO.name)
}
"use an EnumSet for a set of enums" in { _ =>
val obj = new StructWithEnumSet.Builder().build()
obj.getCodes.isInstanceOf[EnumSet[ReturnCode]] must be(true)
obj.getCodes.size must be(0)
obj.getCodesWithDefault.isInstanceOf[EnumSet[ReturnCode]] must be(true)
obj.getCodesWithDefault.size must be(1)
val prot = new TBinaryProtocol(new TMemoryBuffer(64))
StructWithEnumSet.encode(obj, prot)
val decoded = StructWithEnumSet.decode(prot)
decoded.getCodes.isInstanceOf[EnumSet[ReturnCode]] must be(true)
}
}
"generate constants" in { _ =>
Constants.myWfhDay must be(WeekDay.THU)
Constants.myDaysOut must be(Utilities.makeList(WeekDay.THU, WeekDay.SAT, WeekDay.SUN))
Constants.name must be("Columbo")
Constants.someInt must be(1)
Constants.someDouble must be(3.0)
Constants.someList must be(Utilities.makeList("piggy"))
Constants.emptyList must be(Utilities.makeList())
Constants.someMap must be(Utilities.makeMap(Utilities.makeTuple("foo", "bar")))
Constants.someSimpleSet must be(Utilities.makeSet("foo", "bar"))
Constants.someSet must be(Utilities.makeSet(
Utilities.makeList("piggy"),
Utilities.makeList("kitty")
))
Constants.long_key_long_value_map.get(2147483648L) must be(2147483648L)
Constants.long_set.contains(2147483648L) must be(true)
Constants.long_list.contains(2147483648L) must be(true)
}
"basic structs" should {
"ints" should {
"read" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startRead(e, protocol, new TField("baby", TType.I16, 1))
one(protocol).readI16(); will(returnValue(16: Short))
nextRead(e, protocol, new TField("mama", TType.I32, 2))
one(protocol).readI32(); will(returnValue(32))
nextRead(e, protocol, new TField("papa", TType.I64, 3))
one(protocol).readI64(); will(returnValue(64L))
endRead(e, protocol)
}
whenExecuting {
Ints.decode(protocol) must be(new Ints(16, 32, 64L))
}
}
"write" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startWrite(e, protocol, new TField("baby", TType.I16, 1))
one(protocol).writeI16(`with`(Expectations.equal(16: Short)))
nextWrite(e, protocol, new TField("mama", TType.I32, 2))
one(protocol).writeI32(`with`(Expectations.equal(32)))
nextWrite(e, protocol, new TField("papa", TType.I64, 3))
one(protocol).writeI64(`with`(Expectations.equal(64L)))
endWrite(e, protocol)
}
whenExecuting {
new Ints(16, 32, 64L).write(protocol) must be(())
}
}
}
"bytes" should {
"read" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startRead(e, protocol, new TField("x", TType.BYTE, 1))
one(protocol).readByte(); will(returnValue(3.toByte))
nextRead(e, protocol, new TField("y", TType.STRING, 2))
one(protocol).readBinary(); will(returnValue(stringToBytes("hello")))
endRead(e, protocol)
}
whenExecuting {
val bytes = Bytes.decode(protocol)
bytes.getX must be(3.toByte)
new String(bytes.getY.array) must be("hello")
}
}
"write" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startWrite(e, protocol, new TField("x", TType.BYTE, 1))
one(protocol).writeByte(`with`(Expectations.equal(16.toByte)))
nextWrite(e, protocol, new TField("y", TType.STRING, 2))
one(protocol).writeBinary(`with`(Expectations.equal(stringToBytes("goodbye"))))
endWrite(e, protocol)
}
whenExecuting {
new Bytes(16.toByte, stringToBytes("goodbye")).write(protocol) must be(())
}
}
}
"bool, double, string" should {
"read" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startRead(e, protocol, new TField("alive", TType.BOOL, 1))
one(protocol).readBool(); will(returnValue(true))
nextRead(e, protocol, new TField("pi", TType.DOUBLE, 2))
one(protocol).readDouble(); will(returnValue(3.14))
nextRead(e, protocol, new TField("name", TType.STRING, 3))
one(protocol).readString(); will(returnValue("bender"))
endRead(e, protocol)
}
whenExecuting {
Misc.decode(protocol) must be(new Misc(true, 3.14, "bender"))
}
}
"write" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startWrite(e, protocol, new TField("alive", TType.BOOL, 1))
one(protocol).writeBool(`with`(Expectations.equal(false)))
nextWrite(e, protocol, new TField("pi", TType.DOUBLE, 2))
one(protocol).writeDouble(`with`(Expectations.equal(6.28)))
nextWrite(e, protocol, new TField("name", TType.STRING, 3))
one(protocol).writeString(`with`(Expectations.equal("fry")))
endWrite(e, protocol)
}
whenExecuting {
new Misc(false, 6.28, "fry").write(protocol) must be(())
}
}
}
"lists, sets, and maps" should {
val exemplar = new Compound.Builder()
.intlist(Utilities.makeList(10, 20))
.intset(Utilities.makeSet(44, 55))
.namemap(Utilities.makeMap(Utilities.makeTuple("wendy", 500)))
.nested(Utilities.makeList(Utilities.makeSet(9)))
.build()
"read" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startRead(e, protocol, new TField("intlist", TType.LIST, 1))
one(protocol).readListBegin(); will(returnValue(new TList(TType.I32, 2)))
one(protocol).readI32(); will(returnValue(10))
one(protocol).readI32(); will(returnValue(20))
one(protocol).readListEnd()
nextRead(e, protocol, new TField("intset", TType.SET, 2))
one(protocol).readSetBegin(); will(returnValue(new TSet(TType.I32, 2)))
one(protocol).readI32(); will(returnValue(44))
one(protocol).readI32(); will(returnValue(55))
one(protocol).readSetEnd()
nextRead(e, protocol, new TField("namemap", TType.MAP, 3))
one(protocol).readMapBegin(); will(returnValue(new TMap(TType.STRING, TType.I32, 1)))
one(protocol).readString(); will(returnValue("wendy"))
one(protocol).readI32(); will(returnValue(500))
one(protocol).readMapEnd()
nextRead(e, protocol, new TField("nested", TType.LIST, 4))
one(protocol).readListBegin(); will(returnValue(new TList(TType.SET, 1)))
one(protocol).readSetBegin(); will(returnValue(new TSet(TType.I32, 1)))
one(protocol).readI32(); will(returnValue(9))
one(protocol).readSetEnd()
one(protocol).readListEnd()
endRead(e, protocol)
}
whenExecuting {
Compound.decode(protocol) must be(exemplar)
}
}
"write" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startWrite(e, protocol, new TField("intlist", TType.LIST, 1))
one(protocol).writeListBegin(`with`(listEqual(new TList(TType.I32, 2))))
one(protocol).writeI32(`with`(Expectations.equal(10)))
one(protocol).writeI32(`with`(Expectations.equal(20)))
one(protocol).writeListEnd()
nextWrite(e, protocol, new TField("intset", TType.SET, 2))
one(protocol).writeSetBegin(`with`(setEqual(new TSet(TType.I32, 2))))
one(protocol).writeI32(`with`(Expectations.equal(44)))
one(protocol).writeI32(`with`(Expectations.equal(55)))
one(protocol).writeSetEnd()
nextWrite(e, protocol, new TField("namemap", TType.MAP, 3))
one(protocol).writeMapBegin(`with`(mapEqual(new TMap(TType.STRING, TType.I32, 1))))
one(protocol).writeString(`with`(Expectations.equal("wendy")))
one(protocol).writeI32(`with`(Expectations.equal(500)))
one(protocol).writeMapEnd()
nextWrite(e, protocol, new TField("nested", TType.LIST, 4))
one(protocol).writeListBegin(`with`(listEqual(new TList(TType.SET, 1))))
one(protocol).writeSetBegin(`with`(setEqual(new TSet(TType.I32, 1))))
one(protocol).writeI32(9)
one(protocol).writeSetEnd()
one(protocol).writeListEnd()
endWrite(e, protocol)
}
whenExecuting {
exemplar.write(protocol) must be(())
}
}
}
}
"complicated structs" should {
"with required fields" should {
"read" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startRead(e, protocol, new TField("string", TType.STRING, 1))
one(protocol).readString(); will(returnValue("yo"))
endRead(e, protocol)
}
whenExecuting {
RequiredString.decode(protocol) must be(new RequiredString("yo"))
}
}
"missing required value throws exception during deserialization" should {
"with no default value" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e =>
emptyRead(e, protocol)
}
whenExecuting {
intercept[TProtocolException] {
RequiredString.decode(protocol)
}
}
}
"with default value" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e =>
emptyRead(e, protocol)
}
whenExecuting {
intercept[TProtocolException] {
RequiredStringWithDefault.decode(protocol)
}
}
}
}
"null required value throws exception during serialization" should {
"with no default value" in { e => import e._
val protocol = mock[TProtocol]
intercept[TProtocolException] {
new RequiredString(null).write(protocol)
}
}
"with default value" in { e => import e._
val protocol = mock[TProtocol]
intercept[TProtocolException] {
new RequiredStringWithDefault(null).write(protocol)
}
}
}
}
"with optional fields" should {
"read" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startRead(e, protocol, new TField("name", TType.STRING, 1))
one(protocol).readString(); will(returnValue("Commie"))
nextRead(e, protocol, new TField("age", TType.I32, 2))
one(protocol).readI32(); will(returnValue(14))
endRead(e, protocol)
}
whenExecuting {
OptionalInt.decode(protocol) must be(new OptionalInt("Commie", new Option.Some(14)))
}
}
"read with missing field" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startRead(e, protocol, new TField("name", TType.STRING, 1))
one(protocol).readString(); will(returnValue( "Commie"))
endRead(e, protocol)
}
whenExecuting {
OptionalInt.decode(protocol) must be(new OptionalInt("Commie", Option.none()))
}
}
"write" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startWrite(e, protocol, new TField("name", TType.STRING, 1))
one(protocol).writeString(`with`(Expectations.equal("Commie")))
nextWrite(e, protocol, new TField("age", TType.I32, 2))
one(protocol).writeI32(`with`(Expectations.equal(14)))
endWrite(e, protocol)
}
whenExecuting {
new OptionalInt("Commie", new Option.Some(14)).write(protocol) must be(())
}
}
"write with missing field" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startWrite(e, protocol, new TField("name", TType.STRING, 1))
one(protocol).writeString(`with`(Expectations.equal("Commie")))
endWrite(e, protocol)
}
whenExecuting {
new OptionalInt("Commie", Option.none()).write(protocol) must be(())
}
}
}
"with default values" should {
"read with value missing, using default" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
one(protocol).readStructBegin()
one(protocol).readFieldBegin(); will(returnValue(new TField("stop", TType.STOP, 10)))
one(protocol).readStructEnd()
}
whenExecuting {
DefaultValues.decode(protocol) must be(new DefaultValues("leela"))
}
}
"read with value present" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
one(protocol).readStructBegin()
nextRead(e, protocol, new TField("name", TType.STRING, 1))
one(protocol).readString(); will(returnValue( "delilah"))
one(protocol).readFieldBegin(); will(returnValue(new TField("stop", TType.STOP, 10)))
one(protocol).readStructEnd()
}
whenExecuting {
DefaultValues.decode(protocol) must be(new DefaultValues("delilah"))
}
}
}
"nested" should {
"read" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startRead(e, protocol, new TField("name", TType.STRING, 1))
one(protocol).readString(); will(returnValue("United States of America"))
nextRead(e, protocol, new TField("provinces", TType.LIST, 2))
one(protocol).readListBegin(); will(returnValue(new TList(TType.STRING, 2)))
one(protocol).readString(); will(returnValue("connecticut"))
one(protocol).readString(); will(returnValue("california"))
one(protocol).readListEnd()
nextRead(e, protocol, new TField("emperor", TType.STRUCT, 5))
/** Start of Emperor struct **/
startRead(e, protocol, new TField("name", TType.STRING, 1))
one(protocol).readString(); will(returnValue( "Bush"))
nextRead(e, protocol, new TField("age", TType.I32, 2))
one(protocol).readI32(); will(returnValue(42))
endRead(e, protocol)
/** End of Emperor struct **/
endRead(e, protocol)
}
whenExecuting {
Empire.decode(protocol) must be(new Empire(
"United States of America",
Utilities.makeList("connecticut", "california"),
new Emperor("Bush", 42)))
}
}
"write" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startWrite(e, protocol, new TField("name", TType.STRING, 1))
one(protocol).writeString(`with`(Expectations.equal("Canada")))
nextWrite(e, protocol, new TField("provinces", TType.LIST, 2))
one(protocol).writeListBegin(`with`(listEqual(new TList(TType.STRING, 2))))
one(protocol).writeString(`with`(Expectations.equal("Manitoba")))
one(protocol).writeString(`with`(Expectations.equal("Alberta")))
one(protocol).writeListEnd()
nextWrite(e, protocol, new TField("emperor", TType.STRUCT, 5))
// emperor
startWrite(e, protocol, new TField("name", TType.STRING, 1))
one(protocol).writeString(`with`(Expectations.equal("Larry")))
nextWrite(e, protocol, new TField("age", TType.I32, 2))
one(protocol).writeI32(13)
endWrite(e, protocol)
endWrite(e, protocol)
}
whenExecuting {
new Empire(
"Canada",
Utilities.makeList("Manitoba", "Alberta"),
new Emperor("Larry", 13)
).write(protocol) must be(())
}
}
}
"exception" in { _ =>
(new Xception(1, "boom")).isInstanceOf[Exception] must be(true)
new Xception(2, "kathunk").getMessage must be("kathunk")
}
"exception getMessage" in { _ =>
new StringMsgException(1, "jeah").getMessage must be("jeah")
new NonStringMessageException(5).getMessage must be("5")
}
"with more than 22 fields" should {
"apply" in { _ =>
new Biggie.Builder().build().getNum25() must be(25)
}
"two default object must be equal" in { _ =>
new Biggie.Builder().build() must be(new Biggie.Builder().build())
}
"copy and equals" in { _ =>
new Biggie.Builder().build().copy().num10(-5).build() must be(new Biggie.Builder().num10(-5).build())
}
"hashCode is the same for two similar objects" in { _ =>
new Biggie.Builder().build().hashCode must be(new Biggie.Builder().build().hashCode)
new Biggie.Builder().num10(-5).build().hashCode must be(new Biggie.Builder().num10(-5).build().hashCode)
}
"hashCode is different for two different objects" in { _ =>
new Biggie.Builder().num10(-5).build().hashCode must not be(new Biggie.Builder().build().hashCode)
}
"toString" in { _ =>
new Biggie.Builder().build().toString must be(("Biggie(" + 1.to(25).map(_.toString).mkString(",") + ")"))
}
}
}
"unions" should {
"zero fields" should {
"read" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e =>
emptyRead(e, protocol)
}
whenExecuting {
intercept[TProtocolException] {
Bird.decode(protocol)
}
}
}
"instantiate" in { _ =>
intercept[NullPointerException] {
Bird.newRaptor(null)
}
}
}
"one field" should {
"read" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startRead(e, protocol, new TField("hummingbird", TType.STRING, 2))
one(protocol).readString(); will(returnValue("Ruby-Throated"))
endRead(e, protocol)
}
whenExecuting {
Bird.decode(protocol) must be(Bird.newHummingbird("Ruby-Throated"))
}
}
"write" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startWrite(e, protocol, new TField("owlet_nightjar", TType.STRING, 3))
one(protocol).writeString(`with`(Expectations.equal("foo")))
endWrite(e, protocol)
}
whenExecuting {
Bird.newOwletNightjar("foo").write(protocol)
}
}
}
"more than one field" should {
"read" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startRead(e, protocol, new TField("hummingbird", TType.STRING, 2))
one(protocol).readString(); will(returnValue("Anna's Hummingbird"))
nextRead(e, protocol, new TField("owlet_nightjar", TType.STRING, 3))
one(protocol).readBinary(); will(returnValue(ByteBuffer.allocate(1)))
endRead(e, protocol)
}
whenExecuting {
intercept[TProtocolException] {
Bird.decode(protocol)
}
}
}
// no write test because it's not possible
}
"unknown field" should {
"read as unknown" in { _ =>
val prot = new TBinaryProtocol(new TMemoryBuffer(64))
val unionField = new NewUnionField(
14653230,
new SomeInnerUnionStruct(26, "a_a")
)
val newUnion = UnionPostEvolution.newNewField(unionField)
UnionPostEvolution.encode(newUnion, prot)
val decoded = UnionPreEvolution.decode(prot)
// work around weird error when trying to reference java enums from scala.
// java.lang.AssertionError: thrift/java_test/UnionPreEvolution$AnotherName already declared as ch.epfl.lamp.fjbg.JInnerClassesAttribute$Entry@3ac8b10
decoded.setField.toString must be("UNKNOWN_UNION_VALUE")
}
}
"nested struct" should {
"read" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startRead(e, protocol, new TField("raptor", TType.STRUCT, 1))
startRead(e, protocol, new TField("isOwl", TType.BOOL, 1))
one(protocol).readBool(); will(returnValue(false))
nextRead(e, protocol, new TField("species", TType.STRING, 2))
one(protocol).readString(); will(returnValue("peregrine"))
endRead(e, protocol)
endRead(e, protocol)
}
whenExecuting {
Bird.decode(protocol) must be(Bird.newRaptor(new Raptor(false, "peregrine")))
}
}
"write" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startWrite(e, protocol, new TField("raptor", TType.STRUCT, 1))
startWrite(e, protocol, new TField("isOwl", TType.BOOL, 1))
one(protocol).writeBool(`with`(Expectations.equal(true)))
nextWrite(e, protocol, new TField("species", TType.STRING, 2))
one(protocol).writeString(`with`(Expectations.equal("Tyto alba")))
endWrite(e, protocol)
endWrite(e, protocol)
}
whenExecuting {
Bird.newRaptor(new Raptor(true, "Tyto alba")).write(protocol)
}
}
}
"collection" should {
"read" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startRead(e, protocol, new TField("flock", TType.LIST, 4))
one(protocol).readListBegin(); will(returnValue(new TList(TType.STRING, 3)))
one(protocol).readString(); will(returnValue("starling"))
one(protocol).readString(); will(returnValue("kestrel"))
one(protocol).readString(); will(returnValue("warbler"))
one(protocol).readListEnd()
endRead(e, protocol)
}
whenExecuting {
Bird.decode(protocol) must be(Bird.newFlock(Utilities.makeList("starling", "kestrel", "warbler")))
}
}
"write" in { cycle => import cycle._
val protocol = mock[TProtocol]
expecting { e => import e._
startWrite(e, protocol, new TField("flock", TType.LIST, 4))
one(protocol).writeListBegin(`with`(listEqual(new TList(TType.STRING, 3))))
one(protocol).writeString(`with`(Expectations.equal("starling")))
one(protocol).writeString(`with`(Expectations.equal("kestrel")))
one(protocol).writeString(`with`(Expectations.equal("warbler")))
one(protocol).writeListEnd()
endWrite(e, protocol)
}
whenExecuting {
Bird.newFlock(Utilities.makeList("starling", "kestrel", "warbler")).write(protocol)
}
}
}
"primitive field type" in { _ =>
import thrift.java_def._default_._
val protocol = new TBinaryProtocol(new TMemoryBuffer(10000))
var original: NaughtyUnion = NaughtyUnion.newValue(1)
NaughtyUnion.encode(original, protocol)
NaughtyUnion.decode(protocol) must be(original)
original = NaughtyUnion.newFlag(true)
NaughtyUnion.encode(original, protocol)
NaughtyUnion.decode(protocol) must be(original)
original = NaughtyUnion.newText("false")
NaughtyUnion.encode(original, protocol)
NaughtyUnion.decode(protocol) must be(original)
}
}
}
"JavaGenerator service" should {
"generate a service interface" in { _ =>
val service: SimpleService.Iface = new SimpleService.Iface {
def deliver(where: String) = 3
}
service.deliver("Boston") must be(3)
}
"generate a future-based service interface" in { _ =>
val service: SimpleService.FutureIface = new SimpleService.FutureIface {
def deliver(where: String) = Future(3)
}
Await.result(service.deliver("Boston")) must be(3)
}
"generate FinagledClient" should {
val context = new Mockery
context.setImposteriser(ClassImposteriser.INSTANCE)
val impl = context.mock(classOf[ExceptionalService.FutureIface])
val service = new ExceptionalService$FinagleService(impl, new TBinaryProtocol.Factory)
val clientService = new finagle.Service[ThriftClientRequest, Array[Byte]] {
def apply(req: ThriftClientRequest) = service(req.message)
}
val client = new ExceptionalService$FinagleClient(clientService,
new TBinaryProtocol.Factory, "ExceptionalService", NullStatsReceiver)
"success" in { _ =>
context.checking(new Expectations {
one(impl).deliver("Boston");
will(returnValue(Future.value(42)))
})
Await.result(client.deliver("Boston")) must be(42)
context.assertIsSatisfied()
}
"exception" in { _ =>
val ex = new Xception(1, "boom")
context.checking(new Expectations {
one(impl).deliver("Boston");
will(returnValue(Future.exception(ex)))
})
assert(new Xception(1, "boom") == ex)
val e = intercept[Xception] {
Await.result(client.deliver("Boston"))
}
e must be(new Xception(1, "boom"))
context.assertIsSatisfied()
}
"source exception" in { _ =>
val ex = new SourcedException {}
context.checking(new Expectations {
one(impl).deliver("Boston");
will(returnValue(Future.exception(ex)))
})
val e = intercept[SourcedException] {
Await.result(client.deliver("Boston"))
}
e.serviceName must be("ExceptionalService")
context.assertIsSatisfied()
}
}
}
}
| nkhuyu/scrooge | scrooge-generator/src/test/scala/com/twitter/scrooge/backend/JavaGeneratorSpec.scala | Scala | apache-2.0 | 29,760 |
package interretis
import interretis.utils.SparkContextBuilder.buildContext
import org.apache.spark.rdd.RDD
import language.postfixOps
class CharacterCount {
def countCharacter(verses: RDD[String], character: String): Long = {
val mentions = verses filter (_ contains character)
mentions count
}
}
object CharacterCount {
def main(args: Array[String]): Unit = {
val (book, character) = processArguments(args)
val sc = buildContext(appName = "Character Count")
val verses = sc textFile book
val app = new CharacterCount
val count = app countCharacter (verses, character)
println(s"Character $character is mentioned $count times in $book")
}
private def processArguments(args: Array[String]) = {
val actual = args.length
val expected = 2
if (actual != expected) {
sys error (s"$expected arguments required and $actual given")
sys exit 1
}
val book = args(0)
val character = args(1)
(book, character)
}
}
| MarekDudek/spark-certification | src/main/scala/interretis/CharacterCount.scala | Scala | mit | 1,001 |
package mimir.ml.spark
import java.sql.Timestamp
import java.sql.Date
import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.ml.PipelineModel
import org.apache.spark.ml.feature.Imputer
import org.apache.spark.sql.{SparkSession, SQLContext, DataFrame, Row, Dataset}
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.types.{
DataType,
DoubleType,
LongType,
FloatType,
BooleanType,
IntegerType,
StringType,
StructField,
StructType,
ShortType,
DateType,
TimestampType
}
import mimir.Database
import mimir.algebra._
import mimir.exec.spark.RAToSpark
import mimir.exec.spark.MimirSpark
import mimir.provenance.Provenance
import mimir.util.SparkUtils
import mimir.util.ExperimentalOptions
object SparkML {
type SparkModel = PipelineModel
case class SparkModelGeneratorParams(db:Database, predictionCol:ID, handleInvalid:String /*keep, skip, error*/)
type SparkModelGenerator = SparkModelGeneratorParams => PipelineModel
}
abstract class SparkML {
def getSparkSession() : SparkContext = {
MimirSpark.get.sparkSession.sparkContext
}
def getSparkSqlContext() : SQLContext = {
MimirSpark.get
}
type DataFrameTransformer = (DataFrame) => DataFrame
protected def nullValueReplacement(df:DataFrame): DataFrame = {
import org.apache.spark.sql.functions.mean
val imputerCols = df.schema.fields.flatMap(col => {
if(df.filter(df(col.name).isNull).count > 0)
col.dataType match {
case IntegerType | LongType | DoubleType | FloatType => Some(col.name)
case StringType => None
case _ => None
}
else None
}).toArray
new Imputer().setInputCols(imputerCols) .setOutputCols(imputerCols).fit(df).transform(df)
}
def fillNullValues(df:DataFrame) : DataFrame = {
df.schema.fields.foldLeft(df)((init, curr) => {
curr.dataType match {
case LongType => init.na.fill(0L, Seq(curr.name))
case IntegerType => init.na.fill(0L, Seq(curr.name))
case FloatType => init.na.fill(0.0, Seq(curr.name))
case DoubleType => init.na.fill(0.0, Seq(curr.name))
case ShortType => init.na.fill(0.0, Seq(curr.name))
case DateType => init.na.fill(0, Seq(curr.name))
case BooleanType => init.na.fill(0, Seq(curr.name))
case TimestampType => init.na.fill(0L, Seq(curr.name))
case x => init.na.fill("", Seq(curr.name))
}
})
}
def applyModelDB(model : PipelineModel, query : Operator, db:Database, dfTransformer:Option[DataFrameTransformer] = None) : DataFrame = {
val data = db.query(query)(results => {
results.toList.map(row => row.provenance +: row.tupleSchema.zip(row.tuple).filterNot(_._1._1.equalsIgnoreCase("rowid")).unzip._2)
})
applyModel(
model,
(ID("rowid"), TString()) +:
db.typechecker
.schemaOf(query)
.filterNot { _._1.equals(ID("rowid")) },
data,
db,
dfTransformer
)
}
def applyModel( model : PipelineModel, cols:Seq[(ID, Type)], testData : List[Seq[PrimitiveValue]], db: Database, dfTransformer:Option[DataFrameTransformer] = None): DataFrame = {
val sqlContext = getSparkSqlContext()
import sqlContext.implicits._
val modDF = dfTransformer.getOrElse((df:DataFrame) => df)
model.transform(modDF(sqlContext.createDataFrame(
getSparkSession().parallelize(testData.map( row => {
Row(row.zip(cols).map(value => new RAToSpark(db).mimirExprToSparkExpr(null, value._1)):_*)
})), StructType(cols.toList.map(col => StructField(col._1.id, RAToSpark.getSparkType(col._2), true))))))
}
def applyModel( model : PipelineModel, inputDF:DataFrame): DataFrame = {//inputPlan:LogicalPlan): DataFrame = {
model.transform(inputDF)
}
def extractPredictions(model : PipelineModel, predictions:DataFrame, maxPredictions:Int = 5) : Seq[(String, (String, Double))]
def extractPredictionsForRow(model : PipelineModel, predictions:DataFrame, rowid:String, maxPredictions:Int = 5) : Seq[(String, Double)]
def getNative(value:PrimitiveValue, t:Type): Any = {
value match {
case NullPrimitive() => t match {
case TInt() => 0L
case TFloat() => new java.lang.Double(0.0)
case TDate() => RAToSpark.defaultDate
case TString() => ""
case TBool() => new java.lang.Boolean(false)
case TRowId() => ""
case TType() => ""
case TAny() => ""
case TTimestamp() => RAToSpark.defaultTimestamp
case TInterval() => ""
case TUser(name) => getNative(value, mimir.algebra.TypeRegistry.registeredTypes(name)._2)
case x => ""
}
case RowIdPrimitive(s) => s
case StringPrimitive(s) => s
case IntPrimitive(i) => i
case FloatPrimitive(f) => new java.lang.Double(f)
case BoolPrimitive(b) => new java.lang.Boolean(b)
case ts@TimestampPrimitive(y,m,d,h,mm,s,ms) => SparkUtils.convertTimestamp(ts)
case dt@DatePrimitive(y,m,d) => SparkUtils.convertDate(dt)
case x => x.asString
}
}
}
| UBOdin/mimir | src/main/scala/mimir/ml/spark/SparkML.scala | Scala | apache-2.0 | 5,240 |
/**
* Copyright: Copyright (C) 2016, Jaguar Land Rover
* License: MPL-2.0
*/
package org.genivi.webserver.controllers
import jp.t2v.lab.play2.auth._
import org.genivi.webserver.Authentication.{Account, User, Role}
import play.api.mvc.{RequestHeader, Result}
import play.api.mvc.Results.{Redirect, Forbidden, Unauthorized}
import scala.concurrent.{ExecutionContext, Future}
import scala.reflect.{ClassTag, classTag}
/**
* Trait for authentication implicits.
*
*/
trait AuthConfigImpl extends AuthConfig {
/**
* A type that is used to identify a user.
* `String`, `Int`, `Long` and so on.
*/
type Id = String
/**
* A type that represents a user in your application.
* `User`, `Account` and so on.
*/
type User = Account
/**
* A type that is defined by every action for authorization.
* This sample uses the following trait:
*
* sealed trait Role
* case object Administrator extends Role
* case object NormalUser extends Role
*/
type Authority = Role
/**
* A `ClassTag` is used to retrieve an id from the Cache API.
* Use something like this:
*/
val idTag: ClassTag[Id] = classTag[Id]
/**
* The session timeout in seconds
*/
val sessionTimeoutInSeconds: Int = 3600
/**
* Where to redirect the user after a successful login.
*/
def loginSucceeded(request: RequestHeader)(implicit ctx: ExecutionContext): Future[Result] = {
val uri = request.session.get("access_uri").getOrElse(routes.Application.index().url.toString)
Future.successful(Redirect(uri).withSession(request.session - "access_uri"))
}
/**
* Where to redirect the user after logging out
*/
def logoutSucceeded(request: RequestHeader)(implicit ctx: ExecutionContext): Future[Result] =
Future.successful(Redirect(routes.Application.login()))
/**
* If the user is not logged in and tries to access a protected resource then redirct them as follows:
*/
def authenticationFailed(request: RequestHeader)(implicit ctx: ExecutionContext): Future[Result] = {
val redirectResponse = Redirect(routes.Application.login()).withSession("access_uri" -> request.uri)
request.headers.get("Accept") match {
case Some(r) =>
if (r.contains("application/json")) {
Future.successful(Unauthorized)
} else {
Future.successful(redirectResponse)
}
case None => Future.successful(redirectResponse)
}
}
/**
* If authorization failed (usually incorrect password) redirect the user as follows:
*/
override def authorizationFailed(request: RequestHeader, user: User, authority: Option[Authority])
(implicit context: ExecutionContext): Future[Result] = {
Future.successful(Forbidden("no permission"))
}
/**
* A function that determines what `Authority` a user has.
* You should alter this procedure to suit your application.
*/
def authorize(user: User, authority: Authority)(implicit ctx: ExecutionContext): Future[Boolean] = Future.successful {
println("Authorizing")
(user.role, authority) match {
case (User, User) => true
case _ => false
}
}
/**
* (Optional)
* You can custom SessionID Token handler.
* Default implemntation use Cookie.
*/
override lazy val tokenAccessor = new CookieTokenAccessor(
/*
* Whether use the secure option or not use it in the cookie.
* However default is false, I strongly recommend using true in a production.
*/
cookieSecureOption = false,
cookieMaxAge = Some(sessionTimeoutInSeconds)
)
}
| PDXostc/rvi_sota_server | web-server/app/org/genivi/webserver/controllers/AuthConfigImpl.scala | Scala | mpl-2.0 | 3,624 |
/*
* Copyright 2015 Webtrends (http://www.webtrends.com)
*
* See the LICENCE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webtrends.harness.component
import akka.actor.{ActorRef, Actor}
import akka.pattern.ask
import akka.util.Timeout
import com.webtrends.harness.logging.Logger
import scala.concurrent.duration._
import com.webtrends.harness.HarnessConstants
import scala.concurrent.{Await, Promise, Future}
import scala.util.{Failure, Success}
/**
* This is a helper class that enables developers who use to trait to interact with the ComponentManager
* easily on the harness
*/
trait ComponentHelper {
this: Actor =>
import context.dispatcher
var componentManagerInitialized = false
var componentManager:Option[ActorRef] = None
/**
* This should only be called once to initialize the component manager actor. Will retry when called
* as many methods require this to work.
*/
def initComponentHelper : Future[ActorRef] = {
val p = Promise[ActorRef]()
def awaitComponentManager(timeOut: Deadline) {
if (timeOut.isOverdue() && !componentManagerInitialized) {
componentManagerInitialized = true
p failure ComponentException("Component Manager", "Failed to get component manager")
} else {
context.actorSelection(HarnessConstants.ComponentFullName).resolveOne()(1 second) onComplete {
case Success(s) =>
componentManager = Some(s)
componentManagerInitialized = true
p success s
case Failure(f) => awaitComponentManager(timeOut)
}
}
}
componentManager match {
case Some(cm) => p success cm
case None =>
if (!componentManagerInitialized) {
val deadline = 5 seconds fromNow
awaitComponentManager(deadline)
} else {
p failure ComponentException("Component Manager", "Component manager did not initialize")
}
}
p.future
}
/**
* Wrapper function around request that allows the developer to not have to deal with the
* ComponentResponse return object, and just deal with the message that they care about
*
* @param name name of the component
* @param msg msg to send to the component
* @return
*/
def unwrapRequest[T, U](name:String, msg:ComponentRequest[T]) : Future[U] = {
val p = Promise[U]()
componentRequest(name, msg).mapTo[ComponentResponse[U]] onComplete {
case Success(s) => p success s.resp
case Failure(f) => p failure f
}
p.future
}
def request[T](name:String, msg:Any, childName:Option[String]=None) : Future[ComponentResponse[T]] =
componentRequest(name, ComponentRequest(msg, childName))
/**
* Simplest way to make a request directly to a component, will return a Future holding whatever the component returns
* @param name Name of the component
* @param msg Message to send it
*/
def unwrapSelfRequest[T](name:String, msg:AnyRef) : Future[T] = {
unwrapRequest[msg.type, T](name, ComponentRequest[msg.type](msg, Some(ComponentManager.ComponentRef)))
}
/**
* Wrapper function that allows developer to make requests to components individually without having to know about the
* ComponentManager as the parent that routes the messages to the various components
*
* @param name name of the component
* @param msg message you want to send to the component
* @return
*/
def componentRequest[T, U](name:String, msg:ComponentRequest[T]) : Future[ComponentResponse[U]] = {
val p = Promise[ComponentResponse[U]]()
initComponentHelper onComplete {
case Success(cm) =>
(cm ? Request(name, msg))(msg.timeout).mapTo[ComponentResponse[U]] onComplete {
case Success(s) => p success s
case Failure(f) => p failure f
}
case Failure(f) => p failure f
}
p.future
}
def selfMessage(name:String, msg:Any) =
componentMessage(name, ComponentMessage(msg, Some(ComponentManager.ComponentRef)))
/**
* Wrapper function that will allow you to send any message in and it will
* wrap the msg within a ComponentMessage case class
*
* @param name name of component
* @param msg message to send
* @param childName name of component's child, or 'self' if one wants to hit the component itself
*/
def message(name:String, msg:Any, childName:Option[String]=None) =
componentMessage(name, ComponentMessage(msg, childName))
/**
* Wrapper function that allows the developer to message components individually without having to know about the
* ComponentManager as the parent that routes the messages to the various components
*
* @param name name of the component
* @param msg message you want to send to the component
*/
def componentMessage[T](name:String, msg:ComponentMessage[T]) = {
initComponentHelper onComplete {
case Success(cm) =>
cm ! Message(name, msg)
case Failure(f) => throw f
}
}
/**
* Wrapper function that allows developers to get the actor reference for a particular component
*
* @param name the name of the component
* @param timeout implicit timeout value
* @return
*/
def getComponent(name:String)(implicit timeout:Timeout) : Future[ActorRef] = {
val p = Promise[ActorRef]()
initComponentHelper onComplete {
case Success(cm) =>
(cm ? GetComponent(name))(timeout).mapTo[Option[ActorRef]] onComplete {
case Success(s) =>
s match {
case Some(ref) => p success ref
case None => p failure ComponentNotFoundException("Component Manager", s"component $name not found")
}
case Failure(f) => p failure f
}
case Failure(f) => p failure f
}
p.future
}
}
| davis20/wookiee | wookiee-core/src/main/scala/com/webtrends/harness/component/ComponentHelper.scala | Scala | apache-2.0 | 6,414 |
/*
active-learning-scala: Active Learning library for Scala
Copyright (c) 2014 Davi Pereira dos Santos
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package al.strategies
import java.io.{File, FileWriter}
import clean.lib.Global
import ml.Pattern
import ml.classifiers.NoLearner
import ml.clusterers.HC.HClusterer
case class HS(pool: Seq[Pattern], debug: Boolean = false)
extends StrategyAgnostic {
override val toString = "Hierarchical Sampling"
val abr = "HS"
val id = 1
// println(
// """
// |The executable file used for Cluster-based strategy is part of HS.
// |
// |Hierarchical Sampling (HS) version 1.0
// |Daniel Hsu <djhsu@cs.ucsd.edu>
// |(C) Copyright 2008, Daniel Hsu
// |
// |HS is free software: you can redistribute it and/or modify it under the terms
// |of the GNU General Public License as published by the Free Software Foundation,
// |either version 3 of the License, or (at your option) any later version.
// |
// |HS is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
// |without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
// |PARTICULAR PURPOSE. See the GNU General Public License for more details.
// |
// |You should have received a copy of the GNU General Public License with this
// |program. If not, see <http://www.gnu.org/licenses/>.
// |
// |
// |Based on work described in
// |
// | Sanjoy Dasgupta and Daniel Hsu, Hierarchical sampling for active learning.
// | Twenty-Fifth International Conference on Machine Learning, 2008.
// |
// |
// """.stripMargin)
val learner = NoLearner()
lazy val size = rest.length
lazy val clusters = {
// println("Calling Weka WARD clusterer...")
val r = HClusterer(rest)
// println(" Weka WARD clusterer called.")
r
}
lazy val uuid = pool.take(20).map(_.id).mkString(".") + s"-${pool.size}-"
lazy val outputPath = "/home/davi/clusterings"
lazy val dataset = pool.head.dataset().relationName().split("/").last.take(30)
lazy val tree_file = s"$outputPath/ClusterBased-$dataset-" + uuid + ".tree"
lazy val labels_file = s"$outputPath/ClusterBased-$dataset-" + uuid + ".labels"
lazy val ids_file = s"$outputPath/ClusterBased-$dataset-" + uuid + ".ids"
lazy val results = {
// println("The executable file used for Cluster-based strategy is part of HS. Hierarchical Sampling (HS) version 1.0 see LICENSE GPL file.")
println(" HS GPL ")
val f = new File(tree_file)
if (f.exists() && f.length() > 0) println("Tree already done, recovering ...")
else {
val parents = clusters.parent_vector.mkString("\\n")
val labels = rest.map(_.label).mkString("\\n")
val ids = rest.map(_.id).mkString("\\n")
val fw2 = new FileWriter(labels_file)
fw2.write(labels)
fw2.close()
val fw3 = new FileWriter(ids_file)
fw3.write(ids)
fw3.close()
val fw = new FileWriter(tree_file)
fw.write(parents)
fw.close()
}
println("Calling external program...")
import scala.sys.process._
val s = Seq(Global.appPath + "/external-software/hierarchical-al/sample", nclasses.toString, tree_file, labels_file, "foo").lines.map(_.toInt).toArray
println(" external program called...")
s
}
protected def next(unlabeled: Seq[Pattern], labeled: Seq[Pattern]) = {
val selected = rest(results(size - unlabeled.size))
selected
}
protected def visual_test(selected: Pattern, unlabeled: Seq[Pattern], labeled: Seq[Pattern]) {
plot.zera()
for (p <- distinct_pool) plot.bola(p.x, p.y, p.label.toInt, 9)
for (p <- labeled) plot.bola(p.x, p.y, p.label.toInt + 5, 9)
if (selected != null) plot.bola(selected.x, selected.y, -1, 15)
plot.mostra()
Thread.sleep((delay * 10).round.toInt)
}
}
| active-learning/active-learning-scala | src/main/scala/al/strategies/HS.scala | Scala | gpl-2.0 | 4,673 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.libs.ws
import java.io.Closeable
import java.io.IOException
/**
* A Play specific WS client that can use Play specific classes in the request and response building.
*
* Typically, access this class through dependency injection, i.e.
*
* {{{
* class MyService @Inject()(ws: WSClient) {
* val response: Future[WSResponse] = ws.url("http://example.com").get()
* }
* }}}
*
* Please see the documentation at https://www.playframework.com/documentation/latest/ScalaWS for more details.
*/
trait WSClient extends Closeable {
/**
* The underlying implementation of the client, if any. You must cast explicitly to the type you want.
* @tparam T the type you are expecting (i.e. isInstanceOf)
* @return the backing class.
*/
def underlying[T]: T
/**
* Generates a request.
*
* @param url The base URL to make HTTP requests to.
* @return a request
*/
def url(url: String): WSRequest
/** Closes this client, and releases underlying resources. */
@throws[IOException]
def close(): Unit
}
| benmccann/playframework | transport/client/play-ws/src/main/scala/play/api/libs/ws/WSClient.scala | Scala | apache-2.0 | 1,127 |
package be.rubenpieters.gre.rules
import java.util.UUID
/**
* Created by rpieters on 7/08/2016.
*/
// this is code im playing with, to be integrated with or replace the corresponding classes
abstract class Rule {
this: OverrideCreator with Costed with Labeled =>
def apply(fromEntityId: String)(immutableEntityManager: ImmutableEntityManager) = {
val propertyOverrides = createOverrides(fromEntityId, immutableEntityManager)
val propertyOverridesPerEntity = propertyOverrides.groupBy(_.entityName)
val updatedEntityMap = propertyOverridesPerEntity.map { case (entityName, propertyOverrideSeq) =>
val entity = immutableEntityManager.entityMap.get(entityName).get
(entityName,
ImmutableEntity(
entity.groupId,
entity.uniqueId,
entity.properties ++ PropertyOverride.seqPropertyOverrideToMap(propertyOverrideSeq),
entity.ruleSet
))
}
ImmutableEntityManager(immutableEntityManager.entityMap ++ updatedEntityMap, immutableEntityManager.entityIdSequence, immutableEntityManager.nextEntityId)
}
}
trait OverrideCreator {
def createOverrides(fromEntityId: String, immutableEntityManager: ImmutableEntityManager): Seq[PropertyOverride]
}
trait Costed {
def cost: Long = 1
}
trait Labeled {
def label: String
override def toString = {
s"Rule[$label]"
}
}
trait UuidLabeled extends Labeled {
def label: String = UUID.randomUUID().toString
}
case class PropertyOverride(entityName: String, propertyName: String, oldValue: Option[Long], newValue: Long) {
}
object PropertyOverride {
def seqPropertyOverrideToMap(propertyOverrides: Seq[PropertyOverride]): Map[String, Long] = {
propertyOverrides.map{ po => (po.propertyName, po.newValue)}.toMap
}
}
case class ImmutableEntity(
groupId: String,
uniqueId: String,
properties: Map[String, Long],
ruleSet: RuleSet
) {
def nextRuleFunc: (ImmutableEntityManager) => ImmutableEntityManager = {
val nextRule = ruleSet.ruleSeq(ruleSet.currentRuleId)
nextRule(uniqueId)
}
}
case class RuleSet(ruleSeq: Seq[Rule], currentRuleId: Int)
object RuleSet {
def init(ruleSeq: Seq[Rule]): RuleSet = {
ruleSeq match {
case Seq() => throw new IllegalArgumentException("RuleSequence must have at least one rule to initialize")
case _ =>
}
RuleSet(ruleSeq, 0)
}
}
case class ImmutableEntityManager(entityMap: Map[String, ImmutableEntity], entityIdSequence: Seq[String], currentEntityId: Int) {
def nextRuleFunc: (ImmutableEntityManager) => ImmutableEntityManager = {
entityMap.get(entityIdSequence(currentEntityId)).get.nextRuleFunc
}
def applyNextRule: ImmutableEntityManager = {
val nextRuleFunc = entityMap.get(entityIdSequence(currentEntityId)).get.nextRuleFunc
nextRuleFunc.apply(this)
}
def nextEntityId = {
val tryNextId = currentEntityId + 1
println("TRY: " + tryNextId)
if (tryNextId >= entityIdSequence.size) {
println("F")
0
} else {
println("T")
tryNextId
}
}
}
object ImmutableEntityManager {
def init(entities: Seq[ImmutableEntity]): ImmutableEntityManager = {
entities match {
case Seq() => throw new IllegalArgumentException("EntitySequence must have at least one rule to initialize")
case _ =>
}
val entityIdSequence = entities.map{e => e.uniqueId}
ImmutableEntityManager(entityIdSequence.zip(entities).toMap, entityIdSequence, 0)
}
}
object Adhoc extends App {
object InitializeRule extends Rule with OverrideCreator with Costed with Labeled {
override def label = "InitRule"
override def createOverrides(fromEntityId: String, immutableEntityManager: ImmutableEntityManager): Seq[PropertyOverride] = {
Seq(
PropertyOverride(fromEntityId, "A", None, 1)
,PropertyOverride(fromEntityId, "B", None, 2)
)
}
}
val ruleSeq: Seq[Rule] = Seq(InitializeRule)
val entity1 = ImmutableEntity("GroupId", "Entity1", Map(), RuleSet.init(ruleSeq))
val entity2 = ImmutableEntity("GroupId", "Entity2", Map(), RuleSet.init(ruleSeq))
val entityManager = ImmutableEntityManager.init(Seq(entity1, entity2))
val entityManager1 = entityManager.applyNextRule
println(entityManager1.entityMap.get("Entity1").get)
println(entityManager1.entityMap.get("Entity2").get)
println(entityManager1.nextEntityId)
val entityManager2 = entityManager1.applyNextRule
println(entityManager2.entityMap.get("Entity1").get)
println(entityManager2.entityMap.get("Entity2").get)
println(entityManager2.nextEntityId)
} | rubenpieters/GRE | src/main/scala/be/rubenpieters/gre/rules/RuleApplier.scala | Scala | mit | 4,699 |
package org.webant.commons.link
import java.sql.{Connection, DriverManager, SQLException}
import org.apache.commons.collections.MapUtils
import org.apache.commons.dbutils.QueryRunner
import org.apache.commons.dbutils.handlers.{ArrayListHandler, ScalarHandler}
import org.apache.commons.lang3.StringUtils
import org.apache.commons.lang3.reflect.FieldUtils
import org.apache.log4j.LogManager
import org.webant.commons.entity.Link
import org.webant.commons.utils.BeanUtils
import scala.collection.JavaConverters._
trait JdbcLinkProvider extends ILinkProvider {
private val logger = LogManager.getLogger(classOf[JdbcLinkProvider])
private val threadLocal = new ThreadLocal[Connection]()
protected val runner: QueryRunner = new QueryRunner()
protected var DRIVER: String = _
protected var taskId: String = _
protected var siteId: String = _
protected var table: String = _
protected var conn: Connection = _
override def init(params: java.util.Map[String, Object]): Boolean = {
if (!params.containsKey("url") || !params.containsKey("username")
|| !params.containsKey("password") || !params.containsKey("siteId"))
return false
val url = MapUtils.getString(params, "url")
val username = MapUtils.getString(params, "username")
val password = MapUtils.getString(params, "password")
batch = MapUtils.getInteger(params, "batch", 20)
taskId = MapUtils.getString(params, "taskId")
siteId = MapUtils.getString(params, "siteId")
table = s"link_${taskId}_${siteId}"
conn = getConnection(url, username, password)
conn != null
}
protected def createTable(): Boolean = {
val sql = s"CREATE TABLE IF NOT EXISTS `$table` (" +
" `id` varchar(64) NOT NULL," +
" `taskId` varchar(64) DEFAULT NULL," +
" `siteId` varchar(64) DEFAULT NULL," +
" `url` varchar(1024) DEFAULT NULL," +
" `referer` varchar(1024) DEFAULT NULL," +
" `priority` smallint(255) DEFAULT NULL," +
" `lastCrawlTime` datetime DEFAULT NULL," +
" `status` varchar(32) DEFAULT NULL," +
" `dataVersion` int(11) DEFAULT NULL," +
" `dataCreateTime` datetime DEFAULT NULL," +
" `dataUpdateTime` datetime DEFAULT NULL," +
" `dataDeleteTime` datetime DEFAULT NULL," +
" PRIMARY KEY (`id`)," +
s" KEY `idx_${table}_taskId` (`taskId`)," +
s" KEY `idx_${table}_siteId` (`siteId`)," +
s" KEY `idx_${table}_priority` (`priority`)," +
s" KEY `idx_${table}_status` (`status`)," +
s" KEY `idx_${table}_dataCreateTime` (`dataCreateTime`)," +
s" KEY `idx_${table}_dataUpdateTime` (`dataUpdateTime`)" +
")"
try {
runner.update(conn, sql)
} catch {
case e: SQLException =>
e.printStackTrace()
return false
}
true
}
protected def update(link: Link): Int = {
require(conn != null)
if (link == null) return 0
val fieldNames = BeanUtils.getDeclaredFields(link).map(_.getName)
val excludes = Set("id")
val filterFieldNames = fieldNames.filter(!excludes.contains(_))
val filterPlaceholders = filterFieldNames.map(fieldName => s"$fieldName = ?").mkString(", ")
val filterValues = filterFieldNames.map(fieldName => FieldUtils.readField(link, fieldName, true))
val sql = s"update $table set $filterPlaceholders, dataVersion = dataVersion + 1, dataUpdateTime = datetime('now', 'localtime') where id = '${link.getId}'"
runner.update(conn, sql, filterValues: _*)
}
protected def upsert(link: Link): Int = {
require(conn != null)
if (link == null) return 0
val fieldNames = BeanUtils.getDeclaredFields(link).map(_.getName)
val columns = fieldNames.mkString("(", ", ", ")")
val placeholders = fieldNames.map(_ => "?").mkString("(", ",", ")")
val values = fieldNames.map(fieldName => FieldUtils.readField(link, fieldName, true))
val excludes = Set("id", "dataVersion", "dataCreateTime", "dataUpdateTime", "dataDeleteTime")
val filterFieldNames = fieldNames.filter(!excludes.contains(_))
val filterPlaceholders = filterFieldNames.map(fieldName => s"$fieldName = ?").mkString(", ")
val filterValues = filterFieldNames.map(fieldName => FieldUtils.readField(link, fieldName, true))
val allValues = (values ++ filterValues).toSeq
val sql = s"insert into $table $columns values $placeholders ON DUPLICATE KEY UPDATE $filterPlaceholders, dataVersion = dataVersion + 1, dataUpdateTime = now()"
runner.update(conn, sql, allValues: _*)
}
protected def upsert(links: Iterable[Link]): Int = {
require(conn != null)
if (links == null || links.isEmpty) return 0
val fieldNames = BeanUtils.getDeclaredFields(links.head).map(_.getName)
val columns = fieldNames.mkString("(", ",", ")")
val placeholders = links.map(_ => fieldNames.map(_ => "?").mkString("(", ",", ")")).mkString(", ")
val values = links.flatMap(link => fieldNames.map(fieldName => FieldUtils.readField(link, fieldName, true))).toArray
// val pairs = fieldNames.map(fieldName => s"$fieldName = values($fieldName)").mkString(", ")
// if already exists, do nothing
val sql = s"insert into $table $columns values $placeholders " +
s"ON DUPLICATE KEY UPDATE dataVersion = dataVersion + 1, dataUpdateTime = now()"
// s"ON DUPLICATE KEY UPDATE $pairs, dataVersion = dataVersion + 1, dataUpdateTime = now()"
runner.update(conn, sql, values: _*)
}
// reset pending status to init status, fix some abnormal status
override def reset(status: String): Int = {
val sql = s"UPDATE $table SET status = 'init' WHERE status = '$status'"
runner.update(conn, sql)
}
override def progress(): Progress = {
val t = total()
val sql = s"SELECT status, count(1) FROM $table group by status"
val result = runner.query(conn, sql, new ArrayListHandler())
val counts = result.asScala.map(item => (item(0).asInstanceOf[String], item(1).asInstanceOf[Long])).toMap
val init = if (counts.contains(Link.LINK_STATUS_INIT)) counts(Link.LINK_STATUS_INIT) else 0
val pending = if (counts.contains(Link.LINK_STATUS_PENDING)) counts(Link.LINK_STATUS_PENDING) else 0
val success = if (counts.contains(Link.LINK_STATUS_SUCCESS)) counts(Link.LINK_STATUS_SUCCESS) else 0
val fail = if (counts.contains(Link.LINK_STATUS_FAIL)) counts(Link.LINK_STATUS_FAIL) else 0
Progress(t, init, pending, success, fail)
}
override def total(): Long = {
val sql = s"SELECT count(1) FROM $table"
runner.query(conn, sql, new ScalarHandler())
}
override def count(status: String): Long = {
val sql = s"SELECT count(1) FROM $table WHERE status = ?"
runner.query(conn, sql, new ScalarHandler(), status)
}
override def close(): Boolean = {
if (null != conn) {
try
conn.close()
catch {
case e: SQLException =>
e.printStackTrace()
}
conn = null
logger.info(s"close ${getClass.getSimpleName} connection success.")
}
true
}
protected def getConnection(url: String, user: String, password: String): Connection = {
require(StringUtils.isNotBlank(url))
require(StringUtils.isNotBlank(DRIVER))
var conn: Connection = null
try {
Class.forName(DRIVER)
conn = DriverManager.getConnection(url, user, password)
}
catch {
case e: Exception =>
e.printStackTrace()
}
conn
}
protected def getConnectionThreadLocal(url: String, user: String, password: String): Connection = {
require(StringUtils.isNotBlank(url))
require(StringUtils.isNotBlank(DRIVER))
if (threadLocal.get() == null) {
conn = DriverManager.getConnection(url)
threadLocal.set(conn)
conn
} else {
threadLocal.get()
}
}
}
| sutine/webant | webant-commons/src/main/scala/org/webant/commons/link/JdbcLinkProvider.scala | Scala | apache-2.0 | 7,773 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.streaming
import java.io.File
import java.util.{Locale, TimeZone}
import org.apache.commons.io.FileUtils
import org.scalatest.Assertions
import org.apache.spark.{SparkEnv, SparkException}
import org.apache.spark.rdd.BlockRDD
import org.apache.spark.sql.{AnalysisException, DataFrame, Dataset, SparkSession}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.plans.logical.Aggregate
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.execution.{SparkPlan, UnaryExecNode}
import org.apache.spark.sql.execution.exchange.Exchange
import org.apache.spark.sql.execution.streaming._
import org.apache.spark.sql.execution.streaming.sources.MemorySink
import org.apache.spark.sql.execution.streaming.state.StreamingAggregationStateManager
import org.apache.spark.sql.expressions.scalalang.typed
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.streaming.OutputMode._
import org.apache.spark.sql.streaming.util.{MockSourceProvider, StreamManualClock}
import org.apache.spark.sql.types.StructType
import org.apache.spark.storage.{BlockId, StorageLevel, TestBlockId}
import org.apache.spark.util.Utils
object FailureSingleton {
var firstTime = true
}
class StreamingAggregationSuite extends StateStoreMetricsTest with Assertions {
import testImplicits._
def executeFuncWithStateVersionSQLConf(
stateVersion: Int,
confPairs: Seq[(String, String)],
func: => Any): Unit = {
withSQLConf(confPairs ++
Seq(SQLConf.STREAMING_AGGREGATION_STATE_FORMAT_VERSION.key -> stateVersion.toString): _*) {
func
}
}
def testWithAllStateVersions(name: String, confPairs: (String, String)*)
(func: => Any): Unit = {
for (version <- StreamingAggregationStateManager.supportedVersions) {
test(s"$name - state format version $version") {
executeFuncWithStateVersionSQLConf(version, confPairs, func)
}
}
}
def testQuietlyWithAllStateVersions(name: String, confPairs: (String, String)*)
(func: => Any): Unit = {
for (version <- StreamingAggregationStateManager.supportedVersions) {
testQuietly(s"$name - state format version $version") {
executeFuncWithStateVersionSQLConf(version, confPairs, func)
}
}
}
testWithAllStateVersions("simple count, update mode") {
val inputData = MemoryStream[Int]
val aggregated =
inputData.toDF()
.groupBy($"value")
.agg(count("*"))
.as[(Int, Long)]
testStream(aggregated, Update)(
AddData(inputData, 3),
CheckLastBatch((3, 1)),
AddData(inputData, 3, 2),
CheckLastBatch((3, 2), (2, 1)),
StopStream,
StartStream(),
AddData(inputData, 3, 2, 1),
CheckLastBatch((3, 3), (2, 2), (1, 1)),
// By default we run in new tuple mode.
AddData(inputData, 4, 4, 4, 4),
CheckLastBatch((4, 4))
)
}
testWithAllStateVersions("count distinct") {
val inputData = MemoryStream[(Int, Seq[Int])]
val aggregated =
inputData.toDF()
.select($"*", explode($"_2") as 'value)
.groupBy($"_1")
.agg(size(collect_set($"value")))
.as[(Int, Int)]
testStream(aggregated, Update)(
AddData(inputData, (1, Seq(1, 2))),
CheckLastBatch((1, 2))
)
}
testWithAllStateVersions("simple count, complete mode") {
val inputData = MemoryStream[Int]
val aggregated =
inputData.toDF()
.groupBy($"value")
.agg(count("*"))
.as[(Int, Long)]
testStream(aggregated, Complete)(
AddData(inputData, 3),
CheckLastBatch((3, 1)),
AddData(inputData, 2),
CheckLastBatch((3, 1), (2, 1)),
StopStream,
StartStream(),
AddData(inputData, 3, 2, 1),
CheckLastBatch((3, 2), (2, 2), (1, 1)),
AddData(inputData, 4, 4, 4, 4),
CheckLastBatch((4, 4), (3, 2), (2, 2), (1, 1))
)
}
testWithAllStateVersions("simple count, append mode") {
val inputData = MemoryStream[Int]
val aggregated =
inputData.toDF()
.groupBy($"value")
.agg(count("*"))
.as[(Int, Long)]
val e = intercept[AnalysisException] {
testStream(aggregated, Append)()
}
Seq("append", "not supported").foreach { m =>
assert(e.getMessage.toLowerCase(Locale.ROOT).contains(m.toLowerCase(Locale.ROOT)))
}
}
testWithAllStateVersions("sort after aggregate in complete mode") {
val inputData = MemoryStream[Int]
val aggregated =
inputData.toDF()
.groupBy($"value")
.agg(count("*"))
.toDF("value", "count")
.orderBy($"count".desc)
.as[(Int, Long)]
testStream(aggregated, Complete)(
AddData(inputData, 3),
CheckLastBatch(isSorted = true, (3, 1)),
AddData(inputData, 2, 3),
CheckLastBatch(isSorted = true, (3, 2), (2, 1)),
StopStream,
StartStream(),
AddData(inputData, 3, 2, 1),
CheckLastBatch(isSorted = true, (3, 3), (2, 2), (1, 1)),
AddData(inputData, 4, 4, 4, 4),
CheckLastBatch(isSorted = true, (4, 4), (3, 3), (2, 2), (1, 1))
)
}
testWithAllStateVersions("state metrics") {
val inputData = MemoryStream[Int]
val aggregated =
inputData.toDS()
.flatMap(x => Seq(x, x + 1))
.toDF("value")
.groupBy($"value")
.agg(count("*"))
.as[(Int, Long)]
implicit class RichStreamExecution(query: StreamExecution) {
def stateNodes: Seq[SparkPlan] = {
query.lastExecution.executedPlan.collect {
case p if p.isInstanceOf[StateStoreSaveExec] => p
}
}
}
// Test with Update mode
testStream(aggregated, Update)(
AddData(inputData, 1),
CheckLastBatch((1, 1), (2, 1)),
AssertOnQuery { _.stateNodes.size === 1 },
AssertOnQuery { _.stateNodes.head.metrics("numOutputRows").value === 2 },
AssertOnQuery { _.stateNodes.head.metrics("numUpdatedStateRows").value === 2 },
AssertOnQuery { _.stateNodes.head.metrics("numTotalStateRows").value === 2 },
AddData(inputData, 2, 3),
CheckLastBatch((2, 2), (3, 2), (4, 1)),
AssertOnQuery { _.stateNodes.size === 1 },
AssertOnQuery { _.stateNodes.head.metrics("numOutputRows").value === 3 },
AssertOnQuery { _.stateNodes.head.metrics("numUpdatedStateRows").value === 3 },
AssertOnQuery { _.stateNodes.head.metrics("numTotalStateRows").value === 4 }
)
// Test with Complete mode
inputData.reset()
testStream(aggregated, Complete)(
AddData(inputData, 1),
CheckLastBatch((1, 1), (2, 1)),
AssertOnQuery { _.stateNodes.size === 1 },
AssertOnQuery { _.stateNodes.head.metrics("numOutputRows").value === 2 },
AssertOnQuery { _.stateNodes.head.metrics("numUpdatedStateRows").value === 2 },
AssertOnQuery { _.stateNodes.head.metrics("numTotalStateRows").value === 2 },
AddData(inputData, 2, 3),
CheckLastBatch((1, 1), (2, 2), (3, 2), (4, 1)),
AssertOnQuery { _.stateNodes.size === 1 },
AssertOnQuery { _.stateNodes.head.metrics("numOutputRows").value === 4 },
AssertOnQuery { _.stateNodes.head.metrics("numUpdatedStateRows").value === 3 },
AssertOnQuery { _.stateNodes.head.metrics("numTotalStateRows").value === 4 }
)
}
testWithAllStateVersions("multiple keys") {
val inputData = MemoryStream[Int]
val aggregated =
inputData.toDF()
.groupBy($"value", $"value" + 1)
.agg(count("*"))
.as[(Int, Int, Long)]
testStream(aggregated, Update)(
AddData(inputData, 1, 2),
CheckLastBatch((1, 2, 1), (2, 3, 1)),
AddData(inputData, 1, 2),
CheckLastBatch((1, 2, 2), (2, 3, 2))
)
}
testQuietlyWithAllStateVersions("midbatch failure") {
val inputData = MemoryStream[Int]
FailureSingleton.firstTime = true
val aggregated =
inputData.toDS()
.map { i =>
if (i == 4 && FailureSingleton.firstTime) {
FailureSingleton.firstTime = false
sys.error("injected failure")
}
i
}
.groupBy($"value")
.agg(count("*"))
.as[(Int, Long)]
testStream(aggregated, Update)(
StartStream(),
AddData(inputData, 1, 2, 3, 4),
ExpectFailure[SparkException](),
StartStream(),
CheckLastBatch((1, 1), (2, 1), (3, 1), (4, 1))
)
}
testWithAllStateVersions("typed aggregators") {
val inputData = MemoryStream[(String, Int)]
val aggregated = inputData.toDS().groupByKey(_._1).agg(typed.sumLong(_._2))
testStream(aggregated, Update)(
AddData(inputData, ("a", 10), ("a", 20), ("b", 1), ("b", 2), ("c", 1)),
CheckLastBatch(("a", 30), ("b", 3), ("c", 1))
)
}
testWithAllStateVersions("prune results by current_time, complete mode") {
import testImplicits._
val clock = new StreamManualClock
val inputData = MemoryStream[Long]
val aggregated =
inputData.toDF()
.groupBy($"value")
.agg(count("*"))
.where('value >= current_timestamp().cast("long") - 10L)
testStream(aggregated, Complete)(
StartStream(Trigger.ProcessingTime("10 seconds"), triggerClock = clock),
// advance clock to 10 seconds, all keys retained
AddData(inputData, 0L, 5L, 5L, 10L),
AdvanceManualClock(10 * 1000),
CheckLastBatch((0L, 1), (5L, 2), (10L, 1)),
// advance clock to 20 seconds, should retain keys >= 10
AddData(inputData, 15L, 15L, 20L),
AdvanceManualClock(10 * 1000),
CheckLastBatch((10L, 1), (15L, 2), (20L, 1)),
// advance clock to 30 seconds, should retain keys >= 20
AddData(inputData, 0L, 85L),
AdvanceManualClock(10 * 1000),
CheckLastBatch((20L, 1), (85L, 1)),
// bounce stream and ensure correct batch timestamp is used
// i.e., we don't take it from the clock, which is at 90 seconds.
StopStream,
AssertOnQuery { q => // clear the sink
q.sink.asInstanceOf[MemorySink].clear()
q.commitLog.purge(3)
// advance by a minute i.e., 90 seconds total
clock.advance(60 * 1000L)
true
},
StartStream(Trigger.ProcessingTime("10 seconds"), triggerClock = clock),
// The commit log blown, causing the last batch to re-run
CheckLastBatch((20L, 1), (85L, 1)),
AssertOnQuery { q =>
clock.getTimeMillis() == 90000L
},
// advance clock to 100 seconds, should retain keys >= 90
AddData(inputData, 85L, 90L, 100L, 105L),
AdvanceManualClock(10 * 1000),
CheckLastBatch((90L, 1), (100L, 1), (105L, 1))
)
}
testWithAllStateVersions("prune results by current_date, complete mode") {
import testImplicits._
val clock = new StreamManualClock
val inputData = MemoryStream[Long]
val aggregated =
inputData.toDF()
.select(($"value" * DateTimeUtils.SECONDS_PER_DAY).cast("timestamp").as("value"))
.groupBy($"value")
.agg(count("*"))
.where($"value".cast("date") >= date_sub(current_timestamp().cast("date"), 10))
.select(
($"value".cast("long") / DateTimeUtils.SECONDS_PER_DAY).cast("long"), $"count(1)")
testStream(aggregated, Complete)(
StartStream(Trigger.ProcessingTime("10 day"), triggerClock = clock),
// advance clock to 10 days, should retain all keys
AddData(inputData, 0L, 5L, 5L, 10L),
AdvanceManualClock(DateTimeUtils.MILLIS_PER_DAY * 10),
CheckLastBatch((0L, 1), (5L, 2), (10L, 1)),
// advance clock to 20 days, should retain keys >= 10
AddData(inputData, 15L, 15L, 20L),
AdvanceManualClock(DateTimeUtils.MILLIS_PER_DAY * 10),
CheckLastBatch((10L, 1), (15L, 2), (20L, 1)),
// advance clock to 30 days, should retain keys >= 20
AddData(inputData, 85L),
AdvanceManualClock(DateTimeUtils.MILLIS_PER_DAY * 10),
CheckLastBatch((20L, 1), (85L, 1)),
// bounce stream and ensure correct batch timestamp is used
// i.e., we don't take it from the clock, which is at 90 days.
StopStream,
AssertOnQuery { q => // clear the sink
q.sink.asInstanceOf[MemorySink].clear()
q.commitLog.purge(3)
// advance by 60 days i.e., 90 days total
clock.advance(DateTimeUtils.MILLIS_PER_DAY * 60)
true
},
StartStream(Trigger.ProcessingTime("10 day"), triggerClock = clock),
// Commit log blown, causing a re-run of the last batch
CheckLastBatch((20L, 1), (85L, 1)),
// advance clock to 100 days, should retain keys >= 90
AddData(inputData, 85L, 90L, 100L, 105L),
AdvanceManualClock(DateTimeUtils.MILLIS_PER_DAY * 10),
CheckLastBatch((90L, 1), (100L, 1), (105L, 1))
)
}
testWithAllStateVersions("SPARK-19690: do not convert batch aggregation in streaming query " +
"to streaming") {
val streamInput = MemoryStream[Int]
val batchDF = Seq(1, 2, 3, 4, 5)
.toDF("value")
.withColumn("parity", 'value % 2)
.groupBy('parity)
.agg(count("*") as 'joinValue)
val joinDF = streamInput
.toDF()
.join(batchDF, 'value === 'parity)
// make sure we're planning an aggregate in the first place
assert(batchDF.queryExecution.optimizedPlan match { case _: Aggregate => true })
testStream(joinDF, Append)(
AddData(streamInput, 0, 1, 2, 3),
CheckLastBatch((0, 0, 2), (1, 1, 3)),
AddData(streamInput, 0, 1, 2, 3),
CheckLastBatch((0, 0, 2), (1, 1, 3)))
}
/**
* This method verifies certain properties in the SparkPlan of a streaming aggregation.
* First of all, it checks that the child of a `StateStoreRestoreExec` creates the desired
* data distribution, where the child could be an Exchange, or a `HashAggregateExec` which already
* provides the expected data distribution.
*
* The second thing it checks that the child provides the expected number of partitions.
*
* The third thing it checks that we don't add an unnecessary shuffle in-between
* `StateStoreRestoreExec` and `StateStoreSaveExec`.
*/
private def checkAggregationChain(
se: StreamExecution,
expectShuffling: Boolean,
expectedPartition: Int): Boolean = {
val executedPlan = se.lastExecution.executedPlan
val restore = executedPlan
.collect { case ss: StateStoreRestoreExec => ss }
.head
restore.child match {
case node: UnaryExecNode =>
assert(node.outputPartitioning.numPartitions === expectedPartition,
"Didn't get the expected number of partitions.")
if (expectShuffling) {
assert(node.isInstanceOf[Exchange], s"Expected a shuffle, got: ${node.child}")
} else {
assert(!node.isInstanceOf[Exchange], "Didn't expect a shuffle")
}
case _ => fail("Expected no shuffling")
}
var reachedRestore = false
// Check that there should be no exchanges after `StateStoreRestoreExec`
executedPlan.foreachUp { p =>
if (reachedRestore) {
assert(!p.isInstanceOf[Exchange], "There should be no further exchanges")
} else {
reachedRestore = p.isInstanceOf[StateStoreRestoreExec]
}
}
true
}
testWithAllStateVersions("SPARK-21977: coalesce(1) with 0 partition RDD should be " +
"repartitioned to 1") {
val inputSource = new BlockRDDBackedSource(spark)
MockSourceProvider.withMockSources(inputSource) {
// `coalesce(1)` changes the partitioning of data to `SinglePartition` which by default
// satisfies the required distributions of all aggregations. Therefore in our SparkPlan, we
// don't have any shuffling. However, `coalesce(1)` only guarantees that the RDD has at most 1
// partition. Which means that if we have an input RDD with 0 partitions, nothing gets
// executed. Therefore the StateStore's don't save any delta files for a given trigger. This
// then leads to `FileNotFoundException`s in the subsequent batch.
// This isn't the only problem though. Once we introduce a shuffle before
// `StateStoreRestoreExec`, the input to the operator is an empty iterator. When performing
// `groupBy().agg(...)`, `HashAggregateExec` returns a `0` value for all aggregations. If
// we fail to restore the previous state in `StateStoreRestoreExec`, we save the 0 value in
// `StateStoreSaveExec` losing all previous state.
val aggregated: Dataset[Long] =
spark.readStream.format((new MockSourceProvider).getClass.getCanonicalName)
.load().coalesce(1).groupBy().count().as[Long]
testStream(aggregated, Complete())(
AddBlockData(inputSource, Seq(1)),
CheckLastBatch(1),
AssertOnQuery("Verify no shuffling") { se =>
checkAggregationChain(se, expectShuffling = false, 1)
},
AddBlockData(inputSource), // create an empty trigger
CheckLastBatch(1),
AssertOnQuery("Verify that no exchange is required") { se =>
checkAggregationChain(se, expectShuffling = false, 1)
},
AddBlockData(inputSource, Seq(2, 3)),
CheckLastBatch(3),
AddBlockData(inputSource),
CheckLastBatch(3),
StopStream
)
}
}
testWithAllStateVersions("SPARK-21977: coalesce(1) with aggregation should still be " +
"repartitioned when it has non-empty grouping keys") {
val inputSource = new BlockRDDBackedSource(spark)
MockSourceProvider.withMockSources(inputSource) {
withTempDir { tempDir =>
// `coalesce(1)` changes the partitioning of data to `SinglePartition` which by default
// satisfies the required distributions of all aggregations. However, when we have
// non-empty grouping keys, in streaming, we must repartition to
// `spark.sql.shuffle.partitions`, otherwise only a single StateStore is used to process
// all keys. This may be fine, however, if the user removes the coalesce(1) or changes to
// a `coalesce(2)` for example, then the default behavior is to shuffle to
// `spark.sql.shuffle.partitions` many StateStores. When this happens, all StateStore's
// except 1 will be missing their previous delta files, which causes the stream to fail
// with FileNotFoundException.
def createDf(partitions: Int): Dataset[(Long, Long)] = {
spark.readStream
.format((new MockSourceProvider).getClass.getCanonicalName)
.load().coalesce(partitions).groupBy('a % 1).count().as[(Long, Long)]
}
testStream(createDf(1), Complete())(
StartStream(checkpointLocation = tempDir.getAbsolutePath),
AddBlockData(inputSource, Seq(1)),
CheckLastBatch((0L, 1L)),
AssertOnQuery("Verify addition of exchange operator") { se =>
checkAggregationChain(
se,
expectShuffling = true,
spark.sessionState.conf.numShufflePartitions)
},
StopStream
)
testStream(createDf(2), Complete())(
StartStream(checkpointLocation = tempDir.getAbsolutePath),
Execute(se => se.processAllAvailable()),
AddBlockData(inputSource, Seq(2), Seq(3), Seq(4)),
CheckLastBatch((0L, 4L)),
AssertOnQuery("Verify no exchange added") { se =>
checkAggregationChain(
se,
expectShuffling = false,
spark.sessionState.conf.numShufflePartitions)
},
AddBlockData(inputSource),
CheckLastBatch((0L, 4L)),
StopStream
)
}
}
}
testWithAllStateVersions("SPARK-22230: last should change with new batches") {
val input = MemoryStream[Int]
val aggregated = input.toDF().agg(last('value))
testStream(aggregated, OutputMode.Complete())(
AddData(input, 1, 2, 3),
CheckLastBatch(3),
AddData(input, 4, 5, 6),
CheckLastBatch(6),
AddData(input),
CheckLastBatch(6),
AddData(input, 0),
CheckLastBatch(0)
)
}
testWithAllStateVersions("SPARK-23004: Ensure that TypedImperativeAggregate functions " +
"do not throw errors", SQLConf.SHUFFLE_PARTITIONS.key -> "1") {
// See the JIRA SPARK-23004 for more details. In short, this test reproduces the error
// by ensuring the following.
// - A streaming query with a streaming aggregation.
// - Aggregation function 'collect_list' that is a subclass of TypedImperativeAggregate.
// - Post shuffle partition has exactly 128 records (i.e. the threshold at which
// ObjectHashAggregateExec falls back to sort-based aggregation). This is done by having a
// micro-batch with 128 records that shuffle to a single partition.
// This test throws the exact error reported in SPARK-23004 without the corresponding fix.
val input = MemoryStream[Int]
val df = input.toDF().toDF("value")
.selectExpr("value as group", "value")
.groupBy("group")
.agg(collect_list("value"))
testStream(df, outputMode = OutputMode.Update)(
AddData(input, (1 to spark.sqlContext.conf.objectAggSortBasedFallbackThreshold): _*),
AssertOnQuery { q =>
q.processAllAvailable()
true
}
)
}
test("simple count, update mode - recovery from checkpoint uses state format version 1") {
val inputData = MemoryStream[Int]
val aggregated =
inputData.toDF()
.groupBy($"value")
.agg(count("*"))
.as[(Int, Long)]
val resourceUri = this.getClass.getResource(
"/structured-streaming/checkpoint-version-2.3.1-streaming-aggregate-state-format-1/").toURI
val checkpointDir = Utils.createTempDir().getCanonicalFile
// Copy the checkpoint to a temp dir to prevent changes to the original.
// Not doing this will lead to the test passing on the first run, but fail subsequent runs.
FileUtils.copyDirectory(new File(resourceUri), checkpointDir)
inputData.addData(3)
inputData.addData(3, 2)
testStream(aggregated, Update)(
StartStream(checkpointLocation = checkpointDir.getAbsolutePath,
additionalConfs = Map(SQLConf.STREAMING_AGGREGATION_STATE_FORMAT_VERSION.key -> "2")),
/*
Note: The checkpoint was generated using the following input in Spark version 2.3.1
AddData(inputData, 3),
CheckLastBatch((3, 1)),
AddData(inputData, 3, 2),
CheckLastBatch((3, 2), (2, 1))
*/
AddData(inputData, 3, 2, 1),
CheckLastBatch((3, 3), (2, 2), (1, 1)),
Execute { query =>
// Verify state format = 1
val stateVersions = query.lastExecution.executedPlan.collect {
case f: StateStoreSaveExec => f.stateFormatVersion
case f: StateStoreRestoreExec => f.stateFormatVersion
}
assert(stateVersions.size == 2)
assert(stateVersions.forall(_ == 1))
},
// By default we run in new tuple mode.
AddData(inputData, 4, 4, 4, 4),
CheckLastBatch((4, 4))
)
}
/** Add blocks of data to the `BlockRDDBackedSource`. */
case class AddBlockData(source: BlockRDDBackedSource, data: Seq[Int]*) extends AddData {
override def addData(query: Option[StreamExecution]): (Source, Offset) = {
source.addBlocks(data: _*)
(source, LongOffset(source.counter))
}
}
/**
* A Streaming Source that is backed by a BlockRDD and that can create RDDs with 0 blocks at will.
*/
class BlockRDDBackedSource(spark: SparkSession) extends Source {
var counter = 0L
private val blockMgr = SparkEnv.get.blockManager
private var blocks: Seq[BlockId] = Seq.empty
def addBlocks(dataBlocks: Seq[Int]*): Unit = synchronized {
dataBlocks.foreach { data =>
val id = TestBlockId(counter.toString)
blockMgr.putIterator(id, data.iterator, StorageLevel.MEMORY_ONLY)
blocks ++= id :: Nil
counter += 1
}
counter += 1
}
override def getOffset: Option[Offset] = synchronized {
if (counter == 0) None else Some(LongOffset(counter))
}
override def getBatch(start: Option[Offset], end: Offset): DataFrame = synchronized {
val rdd = new BlockRDD[Int](spark.sparkContext, blocks.toArray)
.map(i => InternalRow(i)) // we don't really care about the values in this test
blocks = Seq.empty
spark.internalCreateDataFrame(rdd, schema, isStreaming = true).toDF()
}
override def schema: StructType = MockSourceProvider.fakeSchema
override def stop(): Unit = {
blockMgr.getMatchingBlockIds(_.isInstanceOf[TestBlockId]).foreach(blockMgr.removeBlock(_))
}
}
}
| pgandhi999/spark | sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingAggregationSuite.scala | Scala | apache-2.0 | 25,628 |
package io.questions.util.collection
import cats.data.NonEmptyList
import io.circe._
import io.circe.syntax._
object JsonCodecs {
implicit def nonEmptyListEncoder[T: Encoder]: Encoder[NonEmptyList[T]] =
(a: NonEmptyList[T]) => a.toList.asJson
implicit def nonEmptyListDecoder[T: Decoder](implicit ld: Decoder[List[T]]): Decoder[NonEmptyList[T]] =
(c: HCursor) => ld(c).map(NonEmptyList.fromListUnsafe)
}
| channingwalton/qanda | questionnaire/src/main/scala/io/questions/util/collection/JsonCodecs.scala | Scala | mit | 419 |
package com.jantvrdik.scala.app
import javafx.scene.canvas.Canvas
class ResizableCanvas extends Canvas {
override def isResizable: Boolean = {
true
}
override def prefWidth(height: Double): Double = {
getWidth
}
override def prefHeight(width: Double): Double = {
getHeight
}
}
| JanTvrdik/pisqworks | src/main/scala/com/jantvrdik/scala/app/ResizableCanvas.scala | Scala | mit | 306 |
/**
* Copyright 2015 Mohiva Organisation (license at mohiva dot com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mohiva.play.silhouette.impl.providers.oauth2
import com.mohiva.play.silhouette.api.LoginInfo
import com.mohiva.play.silhouette.impl.exceptions.{ ProfileRetrievalException, UnexpectedResponseException }
import com.mohiva.play.silhouette.impl.providers.OAuth2Provider._
import com.mohiva.play.silhouette.impl.providers.SocialProfileBuilder._
import com.mohiva.play.silhouette.impl.providers._
import com.mohiva.play.silhouette.impl.providers.oauth2.GitHubProvider._
import play.api.http.HeaderNames
import play.api.libs.json.Json
import play.api.libs.ws.{ WSRequest, WSResponse }
import play.api.test.{ FakeRequest, WithApplication }
import test.Helper
import scala.concurrent.Future
/**
* Test case for the [[GitHubProvider]] class.
*/
class GitHubProviderSpec extends OAuth2ProviderSpec {
"The `withSettings` method" should {
"create a new instance with customized settings" in new WithApplication with Context {
val s = provider.withSettings { s =>
s.copy(accessTokenURL = "new-access-token-url")
}
s.settings.accessTokenURL must be equalTo "new-access-token-url"
}
}
"The `authenticate` method" should {
"fail with UnexpectedResponseException if OAuth2Info can be build because of an unexpected response" in new WithApplication with Context {
val requestHolder = mock[WSRequest]
val response = mock[WSResponse]
implicit val req = FakeRequest(GET, "?" + Code + "=my.code")
response.json returns Json.obj()
requestHolder.withHeaders(HeaderNames.ACCEPT -> "application/json") returns requestHolder
requestHolder.post[Map[String, Seq[String]]](any)(any) returns Future.successful(response)
httpLayer.url(oAuthSettings.accessTokenURL) returns requestHolder
stateProvider.validate(any) returns Future.successful(state)
failed[UnexpectedResponseException](provider.authenticate()) {
case e => e.getMessage must startWith(InvalidInfoFormat.format(provider.id, ""))
}
}
"return the auth info" in new WithApplication with Context {
val requestHolder = mock[WSRequest]
val response = mock[WSResponse]
implicit val req = FakeRequest(GET, "?" + Code + "=my.code")
response.json returns oAuthInfo
requestHolder.withHeaders(any) returns requestHolder
requestHolder.post[Map[String, Seq[String]]](any)(any) returns Future.successful(response)
httpLayer.url(oAuthSettings.accessTokenURL) returns requestHolder
stateProvider.validate(any) returns Future.successful(state)
authInfo(provider.authenticate()) {
case authInfo => authInfo must be equalTo oAuthInfo.as[OAuth2Info]
}
}
}
"The `retrieveProfile` method" should {
"fail with ProfileRetrievalException if API returns error" in new WithApplication with Context {
val requestHolder = mock[WSRequest]
val response = mock[WSResponse]
response.json returns Helper.loadJson("providers/oauth2/github.error.json")
requestHolder.get() returns Future.successful(response)
httpLayer.url(API.format("my.access.token")) returns requestHolder
failed[ProfileRetrievalException](provider.retrieveProfile(oAuthInfo.as[OAuth2Info])) {
case e => e.getMessage must equalTo(SpecifiedProfileError.format(
provider.id,
"Bad credentials",
Some("http://developer.github.com/v3")))
}
}
"fail with ProfileRetrievalException if an unexpected error occurred" in new WithApplication with Context {
val requestHolder = mock[WSRequest]
val response = mock[WSResponse]
response.json throws new RuntimeException("")
requestHolder.get() returns Future.successful(response)
httpLayer.url(API.format("my.access.token")) returns requestHolder
failed[ProfileRetrievalException](provider.retrieveProfile(oAuthInfo.as[OAuth2Info])) {
case e => e.getMessage must equalTo(UnspecifiedProfileError.format(provider.id))
}
}
"return the social profile" in new WithApplication with Context {
val requestHolder = mock[WSRequest]
val response = mock[WSResponse]
response.json returns Helper.loadJson("providers/oauth2/github.success.json")
requestHolder.get() returns Future.successful(response)
httpLayer.url(API.format("my.access.token")) returns requestHolder
profile(provider.retrieveProfile(oAuthInfo.as[OAuth2Info])) {
case p =>
p must be equalTo new CommonSocialProfile(
loginInfo = LoginInfo(provider.id, "1"),
fullName = Some("Apollonia Vanova"),
email = Some("apollonia.vanova@watchmen.com"),
avatarURL = Some("https://github.com/images/error/apollonia_vanova.gif")
)
}
}
}
/**
* Defines the context for the abstract OAuth2 provider spec.
*
* @return The Context to use for the abstract OAuth2 provider spec.
*/
override protected def context: OAuth2ProviderSpecContext = new Context {}
/**
* The context.
*/
trait Context extends OAuth2ProviderSpecContext {
/**
* The OAuth2 settings.
*/
lazy val oAuthSettings = spy(OAuth2Settings(
authorizationURL = Some("https://github.com/login/oauth/authorize"),
accessTokenURL = "https://github.com/login/oauth/access_token",
redirectURL = "https://www.mohiva.com",
clientID = "my.client.id",
clientSecret = "my.client.secret",
scope = Some("repo,gist")))
/**
* The OAuth2 info returned by GitHub.
*
* @see http://vk.com/dev/auth_sites
*/
override lazy val oAuthInfo = Helper.loadJson("providers/oauth2/github.access.token.json")
/**
* The provider to test.
*/
lazy val provider = new GitHubProvider(httpLayer, stateProvider, oAuthSettings)
}
}
| rfranco/play-silhouette | silhouette/test/com/mohiva/play/silhouette/impl/providers/oauth2/GitHubProviderSpec.scala | Scala | apache-2.0 | 6,450 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.reflect.macros
package contexts
trait Reifiers {
self: Context =>
val global: universe.type = universe
import universe._
import definitions._
def reifyTree(universe: Tree, mirror: Tree, tree: Tree): Tree = {
assert(ExprClass != NoSymbol, "Missing ExprClass")
val result = scala.reflect.reify.`package`.reifyTree(self.universe)(callsiteTyper, universe, mirror, tree)
logFreeVars(enclosingPosition, result)
result
}
def reifyType(universe: Tree, mirror: Tree, tpe: Type, concrete: Boolean = false): Tree = {
assert(TypeTagsClass != NoSymbol, "Missing TypeTagsClass")
val result = scala.reflect.reify.`package`.reifyType(self.universe)(callsiteTyper, universe, mirror, tpe, concrete)
logFreeVars(enclosingPosition, result)
result
}
def reifyRuntimeClass(tpe: Type, concrete: Boolean = true): Tree =
scala.reflect.reify.`package`.reifyRuntimeClass(universe)(callsiteTyper, tpe, concrete = concrete)
def reifyEnclosingRuntimeClass: Tree =
scala.reflect.reify.`package`.reifyEnclosingRuntimeClass(universe)(callsiteTyper)
def unreifyTree(tree: Tree): Tree = {
assert(ExprSplice != NoSymbol, "Missing ExprSlice")
Select(tree, ExprSplice)
}
// fixme: if I put utils here, then "global" from utils' early initialization syntax
// and "global" that comes from here conflict with each other when incrementally compiling
// the problem is that both are pickled with the same owner - trait Reifiers
// and this upsets the compiler, so that oftentimes it throws assertion failures
// Martin knows the details
//
// object utils extends {
// val global: self.global.type = self.global
// val typer: global.analyzer.Typer = self.callsiteTyper
// } with scala.reflect.reify.utils.Utils
// import utils._
private def logFreeVars(position: Position, reification: Tree): Unit = {
object utils extends {
val global: self.global.type = self.global
val typer: global.analyzer.Typer = self.callsiteTyper
} with scala.reflect.reify.utils.Utils
import utils._
def logFreeVars(symtab: SymbolTable): Unit =
// logging free vars only when they are untyped prevents avalanches of duplicate messages
symtab.syms map (sym => symtab.symDef(sym)) foreach {
case FreeTermDef(_, _, binding, _, origin) if universe.settings.logFreeTerms && binding.tpe == null =>
reporter.echo(position, s"free term: ${showRaw(binding)} $origin")
case FreeTypeDef(_, _, binding, _, origin) if universe.settings.logFreeTypes && binding.tpe == null =>
reporter.echo(position, s"free type: ${showRaw(binding)} $origin")
case _ =>
// do nothing
}
if (universe.settings.logFreeTerms || universe.settings.logFreeTypes)
reification match {
case ReifiedTree(_, _, symtab, _, _, _, _) => logFreeVars(symtab)
case ReifiedType(_, _, symtab, _, _, _) => logFreeVars(symtab)
}
}
}
| martijnhoekstra/scala | src/compiler/scala/reflect/macros/contexts/Reifiers.scala | Scala | apache-2.0 | 3,263 |
package io.skysail.server.app.bundlerepo.resources.html
import play.twirl.api.Html
import html.main
import io.skysail.server.RepresentationModel
object ReposResource_Get extends _root_.play.twirl.api.BaseScalaTemplate[play.twirl.api.HtmlFormat.Appendable,_root_.play.twirl.api.Format[play.twirl.api.HtmlFormat.Appendable]](play.twirl.api.HtmlFormat) with _root_.play.twirl.api.Template1[RepresentationModel,play.twirl.api.HtmlFormat.Appendable] {
/*************************************
* Home page. *
* *
* @param msg The message to display *
*************************************/
def apply/*6.2*/(rep: RepresentationModel):play.twirl.api.HtmlFormat.Appendable = {
_display_ {
{
Seq[Any](format.raw/*6.28*/("""
"""),_display_(/*8.2*/main/*8.6*/ {_display_(Seq[Any](format.raw/*8.8*/("""
"""),format.raw/*10.1*/("""<br><br><br>
<div class="container">
<div class="starter-template">
<h1>Repos</h1>
<p class="lead">available OBR Repositories:</p>
<table class="table table-sm">
<thead>
<tr>
<th>ID</th>
<th>Title</th>
<th>Url</th>
</tr>
</thead>
<tbody>
"""),_display_(/*25.14*/for(p <- rep.rawData) yield /*25.35*/ {_display_(Seq[Any](format.raw/*25.37*/("""
"""),format.raw/*26.13*/("""<tr>
<th scope="row">"""),_display_(/*27.34*/p/*27.35*/.get("name")),format.raw/*27.47*/("""</th>
<th scope="row">"""),_display_(/*28.34*/p/*28.35*/.get("url")),format.raw/*28.46*/("""</th>
<td><a href='"""),_display_(/*29.31*/p/*29.32*/.get("url")),format.raw/*29.43*/("""'>"""),_display_(/*29.46*/p/*29.47*/.get("url")),format.raw/*29.58*/("""</a></td>
</tr>
""")))}),format.raw/*31.14*/("""
"""),format.raw/*33.13*/("""</tbody>
</table>
<hr>
<a href="/doc/v1/index.html" target="_docs">Doc</a>
</div>
</div>
""")))}))
}
}
}
def render(rep:RepresentationModel): play.twirl.api.HtmlFormat.Appendable = apply(rep)
def f:((RepresentationModel) => play.twirl.api.HtmlFormat.Appendable) = (rep) => apply(rep)
def ref: this.type = this
}
/*
-- GENERATED --
DATE: Tue Dec 19 09:01:43 CET 2017
SOURCE: /Users/carsten/git/skysail-server/skysail.server.ext.bundlerepo/./resources/templates/io/skysail/server/ext/bundlerepo/resources/ReposResource_Get.scala.html
HASH: d91f76b540505338af7cb2a0d77b732534bb8520
MATRIX: 663->193|784->219|812->222|823->226|861->228|890->230|1305->618|1342->639|1382->641|1423->654|1488->692|1498->693|1531->705|1597->744|1607->745|1639->756|1702->792|1712->793|1744->804|1774->807|1784->808|1816->819|1888->860|1930->874
LINES: 15->6|20->6|22->8|22->8|22->8|24->10|39->25|39->25|39->25|40->26|41->27|41->27|41->27|42->28|42->28|42->28|43->29|43->29|43->29|43->29|43->29|43->29|45->31|47->33
-- GENERATED --
*/
| evandor/skysail-server | skysail.server.app.bundlerepo/src/io/skysail/server/app/bundlerepo/resources/html/ReposResource_Get.template.scala | Scala | apache-2.0 | 3,163 |
object Tutorials {
// About finagle
// what is it?
// futures
// services and filters
// request and response
// path
// CODE example of starting a server
//
// What is fintrospect
// feature list
//
// Modules
// what it is
// a module is simply a collection of endpoints which are grouped together under a common context (or Path).
//
// CODE Your first module
import com.twitter.finagle.Http
import com.twitter.finagle.http.path.Root
import com.twitter.util.Await.ready
import io.fintrospect.ResourceLoader.Directory
import io.fintrospect.StaticModule
/* Architectural cookbook
TODO:
finagle future
finagle service
finagle filter
finagle proxy server
contracts
testing
validation
go-to architecture
*/
}
| daviddenton/fintrospect | src/test/scala/Tutorials.scala | Scala | apache-2.0 | 827 |
package edu.gemini.sp.vcs2
import java.security.Principal
import edu.gemini.spModel.obs.ObsQaState
import edu.gemini.util.security.principal.{StaffPrincipal, VisitorPrincipal}
import org.specs2.matcher.MatchResult
import scalaz._
class StaffOnlyFieldCorrectionSpec extends VcsSpecification {
import TestEnv._
"staff only field correction" should {
"allow super-staff to change rollover status" in withVcs { env =>
env.local.rollover = true
(env.remote.rollover should beFalse) and afterSync(env, StaffPrincipal.Gemini) {
env.remote.rollover should beTrue
}
}
"allow visitor to change rollover status" in withVcs { env =>
env.local.rollover = true
(env.remote.rollover should beFalse) and afterSync(env, new VisitorPrincipal(Q1)) {
env.remote.rollover should beTrue
}
}
"allow normal staff to change rollover status" in withVcs { env =>
env.local.rollover = true
(env.remote.rollover should beFalse) and afterSync(env, StaffUserPrincipal) {
env.remote.rollover should beTrue
}
}
"not allow non-staff to change rollover status" in withVcs { env =>
env.local.rollover = true
afterSync(env, PiUserPrincipal) {
(env.local.rollover should beFalse) and (env.remote.rollover should beFalse)
}
}
"not allow non-staff to sneak a change to staff contact" in withVcs { env =>
env.local.rollover = true
env.local.contact = PiEmail
afterSync(env, PiUserPrincipal) {
(env.local.rollover should beFalse) and
(env.remote.rollover should beFalse) and
(env.local.contact must_== StaffEmail) and
(env.remote.contact must_== StaffEmail)
}
}
"keep non-staff changes to non-protected fields" in withVcs { env =>
env.local.rollover = true
env.local.progTitle = "The Plague"
afterSync(env, PiUserPrincipal) {
(env.remote.progTitle must_== "The Plague") and
(env.remote.rollover should beFalse) and
(env.local.rollover should beFalse)
}
}
"allow staff to change QA state" in withVcs { env =>
env.local.setQaState(ObsKey, ObsQaState.PASS)
afterSync(env, StaffUserPrincipal) {
env.remote.getQaState(ObsKey) must_== ObsQaState.PASS
}
}
"not allow non-staff to change QA state" in withVcs { env =>
env.local.setQaState(ObsKey, ObsQaState.PASS)
afterSync(env, PiUserPrincipal) {
(env.local.getQaState(ObsKey) must_== ObsQaState.UNDEFINED) and
(env.remote.getQaState(ObsKey) must_== ObsQaState.UNDEFINED)
}
}
"allow staff to create new observations with non-default QA state" in withVcs { env =>
val obsKey = env.local.addObservation()
env.local.setQaState(obsKey, ObsQaState.PASS)
afterSync(env, StaffUserPrincipal) {
env.remote.getQaState(obsKey) must_== ObsQaState.PASS
}
}
"not allow non-staff to create new observations with non-default QA state" in withVcs { env =>
val obsKey = env.local.addObservation()
env.local.setQaState(obsKey, ObsQaState.PASS)
afterSync(env, PiUserPrincipal) {
(env.local.getQaState(obsKey) must_== ObsQaState.UNDEFINED) and
(env.remote.getQaState(obsKey) must_== ObsQaState.UNDEFINED)
}
}
}
}
| arturog8m/ocs | bundle/edu.gemini.sp.vcs/src/test/scala/edu/gemini/sp/vcs2/StaffOnlyFieldCorrectionSpec.scala | Scala | bsd-3-clause | 3,358 |
package crm
import scala.language._
import scala.util.control.Exception._
import scopt._
import org.w3c.dom._
import dispatch._
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.async.Async._
import scala.util._
import org.log4s._
import com.lucidchart.open.xtract.{ XmlReader, _ }
import XmlReader._
import play.api.libs.functional.syntax._
import cats._
import cats.data._
import cats.implicits._
import better.files._
import java.io.{ File => JFile }
import fs2._
import scala.concurrent.ExecutionContext
import sdk.CrmAuth._
import sdk.httphelpers._
import sdk.streamhelpers._
import scala.util.matching._
import sdk.soapnamespaces.implicits._
import sdk.messages.soaprequestwriters._
import sdk.soapreaders._
import sdk.metadata.xmlreaders._
import sdk._
case class Config(
/** Duration of an authorization in minutes. */
leaseTime: Int = 120,
/**
* % of leasTime that an auth is renewed. This gives time for the auth
* to be generated because it takes time to generate and receive an auth.
*/
leaseTimeRenewalFraction: Double = 0.9,
help: Boolean = false,
/**
* Web app url for CRM online.
*/
url: String = "",
servicesUrl: String = "",
/**
* Region for the org you want to connect to. You may have many
* orgs within a region that you can connect to.
*/
region: String = Option(System.getenv.get("REGION")).getOrElse("NA"),
username: String = Option(System.getenv.get("USERNAME")).getOrElse(""),
password: String = Option(System.getenv.get("PASSWORD")).getOrElse(""),
/**
* Timeout to wait for an individual remote request.
*/
timeout: Int = 30 * 60, // seconds, wow that's long!
mode: String = "auth",
filterFilename: String = "filters.txt",
objects: String = "Entity Relationships Attributes",
output: String = "metadata.xml",
discoveryAction: String = "listRegions",
metadataAction: String = "dumpRawXml",
wsdlFilename: String = "wsdl.xml",
sdkVersion: Option[String] = None,
queryType: String = "countEntities",
queryEntity: String = "contact",
queryCountFilterFilename: String = "count-filters.txt",
queryFilters: Seq[String] = Seq(),
keyfileChunkSize: Int = 2000000,
keyfilePrefix: String = "",
keyfileEntity: String = "",
dump: Dump = Dump(),
/** Comnand to run for entity scripts. */
entityCommand: String = "runCommands",
/** Sychronous callback. */
commandCallback: (Map[String, Any], Map[String, Any]) => Either[String, Map[String, Any]] = (ctx, cctx) => Right(ctx),
/** Hook prior to running a command. */
commandPre: (EntityScript.CrmCommand, Map[String, Any]) => (EntityScript.CrmCommand, Map[String, Any]) = (x, y) => (x, y),
/** Hook post to running a command. */
commandPost: (EntityScript.CrmCommand, Map[String, Any]) => Map[String, Any] = (x, y) => y,
/** JSON command file. */
commandFile: String = "commands.json",
//connectionPoolIdelTimeoutInSec: Int = 5*60,
concurrency: Int = 2,
pconcurrency: Int = 5,
parallelism: Int = 16,
httpRetrys: Int = 10,
pauseBetweenRetriesInSeconds: Int = 30,
ignoreKeyFiles: Boolean = false,
keyfileChunkFetchSize: Int = 5000,
take: Option[Long] = None,
drop: Option[Long] = None,
outputFormattedValues: Boolean = true,
formattedValuesSuffix: String = "_fV",
outputExplodedValues: Boolean = true,
copyDatabaseType: String = "oracle",
copyMetadataFile: Option[String] = None,
copyAction: String = "",
copyFilterFilename: String = "")
/**
* Create a key file for a single entity. A key file contains the primary key
* and no other attributes. The name of the file, the prefix, should indicate
* the entity somehow although that is not a requirement.
*/
case class CreatePartitions(entity: String = "",
/** List of attributes to dump, if None, dump just the primary key. */
attributes: Option[Seq[String]] = Option(Nil),
outputFilePrefix: String = "",
chunkSize: Int = 2000000,
attributeSeparator: String = ",",
recordSeparator: String = "\\n",
pconcurrency: Int = 4)
/**
* Dump an entity. This structure is not really used.
*/
case class Dump(entity: String = "",
outputFilename: String = "",
header: Boolean = true,
batchSize: Int = 0, // usually cannot be greate than 5,000
attributeList: Option[Seq[String]] = None,
attributeListFilename: Option[String] = None,
statusFrequency: Int = 100000,
attributeSeparator: String = ",",
recordSeparator: String = "\\n",
/**
* Run an Entity (a record) through a processor that could
* transform it in some way.
*/
recordProcessor: fs2.Pipe[Task, (sdk.Entity, Int), (sdk.Entity, Int)] = fs2.pipe.id,
/**
* Convert a record from CRM (an Entity) into an output string suitable
* for whatever output format you want.
*/
toOutputProcessor: fs2.Pipe[Task, sdk.Entity, String] = defaultMakeOutputRow // Traversable[String] => fs2.Pipe[Task, sdk.Entity, String] = defaultMakeOutputRow _)
)
object program {
import sdk.metadata._
import sdk._
private[this] implicit val logger = getLogger
val defaultConfig = Config()
val parser = new scopt.OptionParser[Config]("crmauth") {
override def showUsageOnError = true
val urlOpt = opt[String]('r', "url").optional().valueName("<url>").text("Organization service url.").
action((x, c) => c.copy(url = x))
val regionOpt = opt[String]("region").optional().valueName("<region abbrev>").text("Organization region abbreviation e.g. NA.").
action((x, c) => c.copy(region = x))
val servicesUrlOpt = opt[String]("services-url").optional().valueName("<services url>").text("Org services url (not the web app url)").
action((x, c) => c.copy(servicesUrl = x))
head("crmauth", "0.1.0")
opt[String]('u', "userid").optional().valueName("<userid>").text("Userid")
.action((x, c) => c.copy(username = x))
opt[String]('p', "password").optional().valueName("<password>").text("Password")
.action((x, c) => c.copy(password = x))
opt[Int]('t', "timeout").valueName("<number>").text(s"Timeout in seconds for each request. Default is ${defaultConfig.timeout}")
.action((x, c) => c.copy(timeout = x))
opt[Int]("auth-lease-time").text(s"Auth lease time in minutes. Default is ${defaultConfig.leaseTime}")
.action((x, c) => c.copy(leaseTime = x))
opt[Double]("renewal-quantum").text(s"Renewal fraction of leaseTime. Default is ${defaultConfig.leaseTimeRenewalFraction}")
.action((x, c) => c.copy(leaseTimeRenewalFraction = x))
// opt[Int]("connectionPoolIdelTimeoutInSec").text(s"If you get idle timeouts, make this larger. Default is ${defaultConfig.connectionPoolIdelTimeoutInSec}").
// action((x, c) => c.copy(connectionPoolIdelTimeoutInSec = x))
opt[Int]("parallelism").text("Parallelism.").
validate(con =>
if (con < 1 || con > 32) failure("Parallelism must be between 1 and 32")
else success).
action((x, c) => c.copy(parallelism = x))
opt[Int]("concurrency").text("Concurrency factor.").
validate(con =>
if (con < 1 || con > 16) failure("Concurrency must be between 1 and 32")
else success).
action((x, c) => c.copy(concurrency = x))
opt[Int]("http-retries").text(s"# of http retries. Default is ${defaultConfig.httpRetrys}").
action((x, c) => c.copy(httpRetrys = x))
opt[Int]("retry-pause").text(s"Pause between retries in seconds. Default is ${defaultConfig.pauseBetweenRetriesInSeconds}").
action((x, c) => c.copy(pauseBetweenRetriesInSeconds = x))
help("help").text("Show help")
note("Environment variables USERNAME, PASSWORD and REGION are used if they are not specified on the command line.")
note("")
cmd("create-test").action((_, c) => c.copy(mode = "create")).
text("Create entity test.").
children(
urlOpt)
note("You need a username/password and -r URL to run this command.")
note("")
cmd("metadata").action((_, c) => c.copy(mode = "metadata")).
text("Obtain entity metadata from an organization").
children(
opt[String]("filter-file").text("File with one line per regex filter.").
action((x, c) => c.copy(filterFilename = x)),
opt[String]('r', "url").optional().valueName("<url>").text("Organization service url.").
action((x, c) => c.copy(url = x)),
opt[String]('o', "objects").valueName("<Entity Relationship Attributes>").text("What metadata to return. Space separated list. Use quotes in shell. All metadata is returned by default.").
action((x, c) => c.copy(objects = x)),
opt[String]("output").valueName("<filename>").text("Output file for metadata retrieved using -m. Default is '${config.objects}'").
action((x, c) => c.copy(output = x)),
opt[Unit]("generate-ddl").text("Generate DDL. Uses filter file to target specific entities.").
action((x, c) => c.copy(metadataAction = "generateDdl")))
note("You need a username/password and url to run this command.")
note("The entire SOAP response envelope is output when dumping entity metadata. Individual entries are under EntityMetadata")
note("")
cmd("auth").action((_, c) => c.copy(mode = "auth")).
text("Check that authentication works. This is the default command.").
children(
opt[String]('r', "url").optional().valueName("<url>").text("Organization service url.").
action((x, c) => c.copy(url = x)))
note("You need a username/password and -r URL to run the auth command.")
note("")
cmd("discovery").action((_, c) => c.copy(mode = "discovery")).
text("Discovery and work with endpoints").
children(
opt[String]('r', "url").optional().valueName("<url>").text("Organization service url.").
action((x, c) => c.copy(url = x)),
opt[String]("services-url").valueName("<web app url>").optional().text("Find the org services url from a web app url. Can also specify a region abbrev")
.action((x, c) => c.copy(discoveryAction = "findServicesUrl", url = x)),
opt[String]("region").valueName("<region abbrev>").optional().text("The region abbrevation to use.")
.action((x, c) => c.copy(region = x)),
opt[String]("sdk-version").valueName("<sdk version e.g. 8.0>").optional().text("SDK version to use when obtaining WSDL.")
.action((x, c) => c.copy(sdkVersion = Some(x))),
opt[String]("save-org-svc-wsdl-to").valueName("<wsdl output file>").optional().text("Retrieve and save the organization service WSDL")
.action((x, c) => c.copy(discoveryAction = "saveOrgSvcWsdl", wsdlFilename = x)),
opt[String]("save-disc-wsdl-to").valueName("<wsdl output file>").optional().text("Retrieve and save the discovery WSDL")
.action((x, c) => c.copy(discoveryAction = "saveDiscoveryWsdl", wsdlFilename = x)),
opt[Unit]("list-regions").optional().text("List known (to this program) regions (abbrev and URLs) to use when finding endpoints. This is the default discovery action."),
opt[String]("list-endpoints").valueName("<region abbrev>").optional().text("List the specific orgs given a specific region and username/pasword. Default region is NA.")
.action((x, c) => c.copy(region = x, discoveryAction = "listEndpoints")))
note("You only need a username/password and discovery URL for the region to run a discovery command.")
note("")
cmd("entity").action((_, c) => c.copy(mode = "entity")).
text("Create or modify entities using a json command file.").
children(
opt[String]('r', "url").optional().valueName("<url>").text("Organization service url.").
action((x, c) => c.copy(url = x)),
opt[String]("region").optional().valueName("<region abbrev>").text("Organization region abbreviation e.g. NA.").
action((x, c) => c.copy(region = x)),
opt[String]("command-file").valueName("<command file name>").optional().text("Run commands in he form of a json file with specialized syntax.")
.action((x, c) => c.copy(entityCommand = "runCommands", commandFile = x)))
note("The results of the commands are output to the terminal and can be inspected.")
note("")
cmd("query").action((_, c) => c.copy(mode = "query")).
text("Run a query. Entity names should be logical names e.g. contact").
children(
opt[String]('r', "url").optional().valueName("<url>").text("Organization service url.").
action((x, c) => c.copy(url = x)),
opt[String]("region").optional().valueName("<region abbrev>").text("Organization region abbreviation e.g. NA.").
action((x, c) => c.copy(region = x)),
opt[String]("query-filterfile").valueName("<filename>").text("Input regexp filter, one filter per line. No filters means accept everything.")
.action((x, c) => c.copy(queryCountFilterFilename = x)).validate { filename =>
if (File(filename).exists) success
else failure(s"No filter file ${filename} found.")
},
opt[String]("qf").unbounded().valueName("<regex>").text("Regex used to identify which entities are included. Repeat option as needed. May need to be escaped or quoted on command line.")
.action((x, c) => c.copy(queryFilters = c.queryFilters :+ x)),
opt[Unit]("count").text("Count records for the given entity name expressed in the filters.")
.action((x, c) => c.copy(queryType = "countEntities")),
opt[Boolean]("output-formatted-values").text("Output formatted values in additition to the attribute values.")
.action((x, c) => c.copy(outputFormattedValues = x)),
opt[Boolean]("output-exploded-values").text("Output eploded values in additition to the attribute values.")
.action((x, c) => c.copy(outputExplodedValues = x)),
opt[String]("formatted-values-suffix").text("Suffix to put on the attribute names that represent formatted values.").
action((x, c) => c.copy(formattedValuesSuffix = x)),
opt[String]("create-partition").text("Create a set of persistent primary key partitions for entity.").
action((x, c) => c.copy(keyfileEntity = x, queryType = "partition")),
opt[Unit]("create-attribute-file").text("Create an attribute file that can be used for downloading. Download, edit then use --attribute-file. Output is to attributes.csv.").
action((x, c) => c.copy(queryType = "createAttributeFile")),
opt[Int]("keyfile-chunk-fetch-size").text("Fetch size when using a keyfile to dump records.").
action((x, c) => c.copy(keyfileChunkFetchSize = x)),
opt[Int]("take").text("Take n rows.").
action((x, c) => c.copy(take = Option(x))),
opt[Int]("drop").text("Drop leading n rows.").
action((x, c) => c.copy(drop = Option(x))),
opt[Unit]("ignore-keyfiles").text("Ignore keyfiles during a dump, if present for an entity.").
action((x, c) => c.copy(ignoreKeyFiles = true)),
opt[String]("dump").valueName("<entity name interpreted as a regex>").text("Dump entity data into file. Default output file is 'entityname'.csv.").
action((x, c) => c.copy(queryType = "dump", dump = c.dump.copy(entity = x))),
opt[String]("output-filename").valueName("<dump filename>").text("Dump file name. Default is entitityname.csv").
action((x, c) => c.copy(dump = c.dump.copy(outputFilename = x))),
opt[Unit]("header").text("Add a header to the output. Default is no header.").
action((x, c) => c.copy(dump = c.dump.copy(header = true))),
opt[Int]("statusfrequency").text("Report a status every n records.").
action((x, c) => c.copy(dump = c.dump.copy(statusFrequency = x))),
opt[Int]("batchsize").valueName("<batch size as int>").text("Number of records to retrieve for each server call.").
action((x, c) => c.copy(dump = c.dump.copy(batchSize = x))),
opt[String]("attribute-file").text("CSV file of attributes, (entity logical name, attribute logical name, download y/n).").
action((x, c) => c.copy(dump = c.dump.copy(attributeListFilename = Some(x)))),
opt[String]("attributes").text("Comma separate list of attributes to dump.").
action((x, c) => c.copy(dump = c.dump.copy(attributeList = Some(x.split(","))))))
note("Attribute file content and attributes specified in --attributes are merged.")
note("All double quotes and backslashes are removed from dumped values. Formatted values are not dumped.")
note("")
cmd("copy").action((_, c) => c.copy(mode = "copy")).text("Copy or update a copy of a local RDBMS from CRM online.")
.children(
opt[String]('r', "url").optional().valueName("<url>").text("Organization service url.").
action((x, c) => c.copy(url = x)),
opt[String]("region").optional().valueName("<region abbrev>").text("Organization region abbreviation e.g. NA.").
action((x, c) => c.copy(region = x)),
opt[String]("filterfile").valueName("<filename>").text("Input regexp filter, one filter per line. No filters means accept everything.")
.action((x, c) => c.copy(copyFilterFilename = x)).validate { filename =>
if (File(filename).exists) success
else failure(s"No filter file ${filename} found.")
},
cmd("ddl").action((_, c) => c.copy(copyAction = "ddl")).text("DDL generation")
.children(
opt[String]("crm-metadata-file").optional().text("Filename holding the XML metadata. You can use this program to dump the metadata so it is locally accessible").
action((x, c) => c.copy(copyMetadataFile = Option(x))),
opt[String]("dbtype").optional().text("Generate ddl for a specific database type. Use list-targets to see possible target names.")
action ((x, c) => c.copy(copyDatabaseType = x)),
cmd("list-targets").action((_, c) => c.copy(copyAction = "listTargets")).text("List database target types.")))
note("There are a few steps involved in using this command. See the documentation.")
note("Metadata to create the local RDBMS schema can be generated from a local copy of the metadata infromation or dynamically retrieved from CRM online.")
note("")
cmd("test").action((_, c) => c.copy(mode = "test")).text("Run some tests.").
children(
urlOpt,
regionOpt,
servicesUrlOpt)
note("")
def emptyUOrP(c: Config): Boolean = c.username.trim.isEmpty || c.password.trim.isEmpty
checkConfig { c =>
c.mode match {
case "discovery" =>
if (c.discoveryAction == "listEndpoints" && emptyUOrP(c))
failure("Listing endpoints requires an username and password.")
else if (c.discoveryAction == "saveOrgSvcWsdl" && (emptyUOrP(c) || c.url.trim.isEmpty))
failure("Saving organization service WSDL requires username password and url.")
else if (c.discoveryAction == "saveDiscoveryWsdl" && (emptyUOrP(c) || c.url.trim.isEmpty))
failure("Saving discoery WSDL requires username password and url.")
else if (c.discoveryAction == "findServicesUrl" && (emptyUOrP(c) || c.region.trim.isEmpty))
failure("Obtaining a services URL requires an username, password, web app url and region.")
else
success
case "auth" =>
if (emptyUOrP(c))
failure("Auth check requires an username, password and url.")
else success
case "create-test" =>
if (emptyUOrP(c))
failure("Create test requires an username, password and url.")
else success
case "query" =>
if (emptyUOrP(c) || c.url.trim.isEmpty) failure("Queries require a username, password and url.")
else success
case "metadata" =>
if (emptyUOrP(c) || c.url.trim.isEmpty)
failure("Metadata requires an username, password and url.")
else success
case "entity" =>
if (emptyUOrP(c) || c.url.trim.isEmpty) failure("Entity commands require a username, password and url.")
else success
case "test" => success
case "copy" => success
}
}
note("")
note("The organization service url can be obtained from the developer resources web page within your CRM org or using the discovery command.")
note("This program only works with MS CRM Online.")
}
def main(args: Array[String]): Unit = {
utils.mscrmConfigureLogback(Option("mscrm.log"))
val config = parser.parse(args, defaultConfig) match {
case Some(c) => c
case None => Http.shutdown; return
}
import java.time._
import java.time.temporal._
val start = Instant.now
println(s"Program start: ${instantString}")
try {
config.mode match {
case "discovery" => Discovery(config)
case "auth" => Auth(config)
case "metadata" => Metadata(config)
case "create" => Create(config)
case "query" => Query(config)
case "entity" => EntityScript(config)
case "test" => Test(config)
case "copy" => Copy(config)
}
} catch {
case scala.util.control.NonFatal(e) =>
logger.error(e)("Error occurred at the top level.")
println("An unexpected and non-recoverable error during processing. Processing has stopped.")
println("Error: " + e.getMessage)
} finally {
Http.shutdown
}
val stop = Instant.now
println(s"Program stop: ${instantString}")
val elapsed = Duration.between(start, stop)
println(s"Program runtime in minutes: ${elapsed.toMinutes}")
println(s"Program runtime in seconds: ${elapsed.toMillis / 1000}")
}
/**
* Create a post request from a config object.
*/
def createPost(config: Config): Req = crm.sdk.httphelpers.createPost(config.url)
}
object utils {
import java.nio.file._
import org.slf4j._
import ch.qos.logback.classic._
import ch.qos.logback.classic.joran._
import ch.qos.logback.core.joran.spi._
val rootLoggerName = org.slf4j.Logger.ROOT_LOGGER_NAME
def mscrmConfigureLogback(outputlogfile: Option[String] = None): Unit =
configureLogback(None, outputlogfile, Option(getDefaultLogDir()), Option(getDefaultConfDir()))
/**
* Configure logback.
*
* @param configfile Config file name, not the full path prefix. Default is `logback.xml`.
* @param outputlogfile Output log file name. Default is `app.log`.
* @param logDir Directory for the log output. Default is `.`. LOG_DIRECTORY Can be used in the config file.
* @param confDir Directory to look for the logging configuration file `configfile`. Default is `.`.
* @param props Properties for the logging context that become available for variable substitution.
*/
def configureLogback(
configfile: Option[String] = None,
outputlogfile: Option[String] = None,
logDir: Option[String] = None,
confDir: Option[String] = None,
props: Map[String, String] = Map()): Unit = {
val context = LoggerFactory.getILoggerFactory().asInstanceOf[LoggerContext]
context.reset()
val configurator = new JoranConfigurator()
context.putProperty("LOG_DIRECTORY", logDir getOrElse ".")
props.foreach { case (k, v) => context.putProperty(k, v) }
configurator.setContext(context)
val loggingconfigfile = Paths.get(confDir getOrElse ".", configfile getOrElse "logback.xml")
def default() = createDefaultRootLogger(outputlogfile.getOrElse("app.log"), context)
if (Files.exists(loggingconfigfile))
try {
configurator.doConfigure(loggingconfigfile.toString)
} catch {
case j: JoranException =>
default()
throw new RuntimeException(j)
}
else
default()
}
/**
* Create a default root loogger that logs errors to `file`.
*/
def createDefaultRootLogger(file: String, context: LoggerContext) = {
import org.slf4j.LoggerFactory
import ch.qos.logback.classic._
import ch.qos.logback.classic.encoder._
import ch.qos.logback.classic.spi._
import ch.qos.logback.core._
val logger = context.getLogger(rootLoggerName)
logger.setLevel(Level.ERROR)
val encoder = new PatternLayoutEncoder()
encoder.setContext(context)
encoder.setPattern("%d{HH:mm:ss.SSS} [%thread] %-5level %logger{35} - %msg%n")
encoder.start()
val appender = new FileAppender[ILoggingEvent]()
appender.setContext(context)
appender.setName("FILE")
appender.setAppend(false)
appender.setEncoder(encoder)
appender.setFile(file)
appender.start()
logger.addAppender(appender)
appender
}
/** MSCRM default log dir. */
def getDefaultLogDir(env: Map[String, String] = sys.env): String = {
env.get("MSCRM_LOG_DIR").map(t => Paths.get(t))
.orElse(env.get("MSCRM_HOME").map(t => Paths.get(t, "logs")))
.map(_.toAbsolutePath().toString)
.getOrElse(Paths.get(".").toAbsolutePath().toString)
}
/** MSCRM default conf dir. */
def getDefaultConfDir(env: Map[String, String] = sys.env): String = {
env.get("MSCRM_CONF_DIR").map(t => Paths.get(t))
.orElse(env.get("MSCRM_HOME").map(t => Paths.get(t, "conf")))
.filter(p => Files.isDirectory(p))
.map(_.toAbsolutePath().toString)
.getOrElse(Paths.get(".").toAbsolutePath().toString)
}
}
| aappddeevv/mscrm-soap-auth | src/main/scala/crm/program.scala | Scala | apache-2.0 | 25,417 |
package org.powlab.jeye.decode.expression
object ExpressionClassifiers {
private val EA = ExpressionAttribute
private val EC = ExpressionClassifier
// -- ΠΡΡΡΠΈΠ±ΡΡΡ, ΡΠ²ΡΠ·Π°Π½Π½ΡΠ΅ Ρ ΠΎΠ±Ρ
ΠΎΠ΄ΠΎΠΌ -- \\\\
val EA_WIDE = EA("wide") // Π²ΠΎΠ·ΠΌΠΎΠΆΠ΅Π½ ΠΎΠ±Ρ
ΠΎΠ΄ Π²ΡΠΈΡΡ
val EA_DEPTH = EA("depth") // Π²ΠΎΠ·ΠΌΠΎΠΆΠ΅Π½ ΠΎΠ±Ρ
ΠΎΠ΄ Π²Π³Π»ΡΠ±Ρ
val EA_FIXED = EA("fixed") // Π½Π΅ ΡΡΠ΅Π±ΡΠ΅Ρ ΠΎΠ±Ρ
ΠΎΠ΄Π°
// -- ΠΡΡΡΠΈΠ±ΡΡΡ - Ρ
Π°ΡΠ°ΠΊΡΠ΅ΡΠΈΡΡΠΈΠΊΠΈ -- \\\\
val EA_TYPED = EA("typed") // ΡΠΈΠΏΠΈΠ·ΠΈΡΠΎΠ²Π°Π½Π½ΡΠΉ
val EA_CONST = EA("const") // ΠΊΠΎΠ½ΡΡΠ°Π½ΡΠ°
val EA_IFABLE = EA("ifable") // ΡΡΠ»ΠΎΠ²Π½ΡΠΉ
val EA_BRACKETABLE = EA("bracketable") // ΠΎΠ±ΠΎΡΠ°ΡΠΈΠ²Π°Π΅ΠΌΡΠΉ Π² ΡΠΊΠΎΠ±ΠΊΠΈ
val EA_CHANGEABLE = EA("changeable") // ΠΈΠ·ΠΌΠ΅Π½ΡΠΈΠ²ΡΠΉ
val EA_INVOKEABLE = EA("invokable") // Π²ΡΠ·ΡΠ²Π°Π΅ΠΌΡΠΉ
val EA_ASSIGNABLE = EA("assignable") // ΠΏΡΠΈΡΠ²Π°Π΅Π²Π°Π΅ΠΌΡΠΉ
// -- ΠΡΡΡΠΈΠ±ΡΡΡ - ΠΈΠΌΠ΅Π½ΠΎΠ²Π°Π½Π½ΡΠ΅ -- \\\\
val EA_TYPE = EA("type") // ΡΠΈΠΏΠΎΠ²ΠΎΠΉ
val EA_WORD = EA("word") // ΡΡΡΠΎΠΊΠΎΠ²ΠΎΠΉ
val EA_SIGN = EA("sign") // Π·Π½Π°ΠΊΠΎΠ²ΡΠΉ
val EA_LINE = EA("line") // Π»ΠΈΠ½Π΅ΠΉΠ½ΡΠΉ
val EA_CLASS_DECLARE = EA("class_declare") // ΠΎΠ±ΡΡΠ²Π»Π΅Π½ΠΈΠ΅ ΠΊΠ»Π°ΡΡΠ°
val EA_MODIFIERS = EA("modifiers")
val EA_TYPE_PARAMETERS = EA("type_parameters")
val EA_RETURN_TYPE = EA("return_type")
val EA_THROWS_EXPRESSION = EA("throws_expression")
val EA_METHOD_PARAMETERS = EA("method_parameters")
val EA_CLASS = EA("class") // ΠΊΠ»Π°ΡΡ
val EA_METHOD_DECLARE = EA("method_declare") // ΠΎΠ±ΡΡΠ²Π»Π΅Π½ΠΈΠ΅ ΠΌΠ΅ΡΠΎΠ΄Π°
val EA_METHOD = EA("method") // ΠΌΠ΅ΡΠΎΠ΄
val EA_FIELD_DECLARE = EA("field_declare") // ΠΎΠ±ΡΡΠ²Π»Π΅Π½ΠΈΠ΅ ΠΏΠΎΠ»Ρ
val EA_FIELD = EA("field") // ΠΏΠΎΠ»Π΅
val EA_BLOCK = EA("block") // Π±Π»ΠΎΡΠ½ΡΠΉ
val EA_STATEMENT = EA("statement") // ΠΈΠΌΠ΅Π½Π½ΠΎΠ²Π°Π½Π½ΡΠΉ Π±Π»ΠΎΠΊ
val EA_EMPTY = EA("empty") // ΠΏΡΡΡΠΎΠΉ
val EA_NULL = EA("null") // Π½ΡΠ»Π΅Π²ΠΎΠΉ
val EA_BOOLEAN = EA("boolean") // Π»ΠΎΠ³ΠΈΡΠ΅ΡΠΊΠΈΠΉ
val EA_BYTE = EA("byte") // Π±Π°ΠΉΡΠΎΠ²ΡΠΉ
val EA_SHORT = EA("short") // 2Ρ
-Π±Π°ΠΉΡΠΎΠ²ΡΠΉ
val EA_CHAR = EA("char") // ΡΠΈΠΌΠ²ΠΎΠ»ΡΠ½ΡΠΉ
val EA_INT = EA("int") // ΡΠ΅Π»ΡΠΉ
val EA_LONG = EA("long") // ΡΠ΅Π»ΡΠΉ ΡΠ°ΡΡΠΈΡΠ΅Π½Π½ΡΠΉ
val EA_FLOAT = EA("float") // Π²Π΅ΡΠ΅ΡΡΠ²Π΅Π½Π½ΡΠΉ
val EA_DOUBLE = EA("double") // Π²Π΅ΡΠ΅ΡΡΠ²Π΅Π½Π½ΡΠΉ ΡΠ°ΡΡΠΈΡΠ΅Π½Π½ΡΠΉ
val EA_STRING = EA("string") // ΡΡΡΠΎΠΊΠΎΠ²ΡΠΉ
val EA_CLASS_CONST = EA("class_const") // ΠΊΠ»Π°ΡΡΠΎΠ²ΡΠΉ
val EA_ENUM_LITERAL = EA("enum_literal") // ΠΏΠ΅ΡΠ΅ΡΠΈΡΠ»ΡΠ΅ΠΌΡΠΉ
val EA_ARRAY_LITERAL = EA("array_literal") // ΠΌΠ°ΡΡΠΈΠ²
val EA_ANNOTATION = EA("annotaion") // Π°Π½Π½ΠΎΡΠ°ΡΠΈΡ
val EA_DEFAULT_VALUE = EA("default_value") // Π΄Π΅ΡΠΎΠ»ΡΠ½ΡΠΉ
val EA_IMPORT = EA("import") // ΠΈΠΌΠΏΠΎΡΡΠΈΡΡΠ΅ΠΌΡΠΉ
val EA_METHOD_SIGNATURE = EA("method_signature") // ΡΠΈΠ³Π½Π°ΡΡΡΠ° ΠΌΠ΅ΡΠΎΠ΄Π°
val EA_LOCAL_VARIABLE = EA("local_variable") // ΠΏΠ΅ΡΠ΅ΠΌΠ΅Π½Π½ΡΠΉ
val EA_LABEL = EA("label") // ΠΏΠΎΠΌΠ΅ΡΠ΅Π½Π½ΡΠΉ
val EA_BREAK_LABEL = EA("break_label") // ΠΏΠ΅ΡΠ΅Ρ
ΠΎΠ΄ Π½Π° ΠΌΠ΅ΡΠΊΡ
val EA_CONTINUE_LABEL = EA("continue_label") // ΠΏΠ΅ΡΠ΅Ρ
ΠΎΠ΄ Π½Π° ΠΌΠ΅ΡΠΊΡ
val EA_TERNARY_REF = EA("ternary_ref") // ΡΡΡΠ»ΠΊΠ° Π½Π° ΡΠ΅ΡΠ½Π°ΡΠ½ΡΠΉ
val EA_TERNARY = EA("ternary") // ΡΠ΅ΡΠ½Π°ΡΠ½ΡΠΉ
val EA_TERNARY_BOOLEAN = EA("ternary_boolean") // ΡΠ΅ΡΠ½Π°ΡΠ½ΠΎ-Π»ΠΎΠ³ΠΈΡΠ΅ΡΠΊΠΈΠΉ
val EA_WHILE_CYCLE = EA("while_cycle") // while-ΡΠΈΠΊΠ»ΠΈΡΠ½ΡΠΉ
val EA_FOR_CYCLE = EA("for_cycle") // for-ΡΠΈΠΊΠ»ΠΈΡΠ½ΡΠΉ
val EA_FOREACH_CYCLE = EA("foreach_cycle") // for-ΡΠΈΠΊΠ»ΠΈΡΠ½ΡΠΉ
val EA_IF_WORD = EA("if_word") // ΠΏΠΎΠ»Π½ΠΎΠ΅ ΡΠΎ ΡΠ»ΠΎΠ²ΠΎΠΌ if ΡΡΠ»ΠΎΠ²ΠΈΠ΅
val EA_IF_SIMPLE = EA("if_simple") // ΠΏΡΠΎΡΡΠΎΠ΅ ΡΡΠ»ΠΎΠ²ΠΈΠ΅
val EA_IF_BOOLEAN = EA("if_boolean") // Π»ΠΎΠ³ΠΈΡΠ΅ΡΠΊΠΎΠ΅ ΡΡΠ»ΠΎΠ²ΠΈΠ΅
val EA_IF_GROUP = EA("if_group") // Π³ΡΡΠΏΠΏΠΎΠ²ΠΎΠ΅ (and, or) ΡΡΠ»ΠΎΠ²ΠΈΠ΅
val EA_IF_XOR_GROUP = EA("if_xor_group") // Π³ΡΡΠΏΠΏΠΎΠ²ΠΎΠ΅ (xor) ΡΡΠ»ΠΎΠ²ΠΈΠ΅
val EA_IF_CMP = EA("if_cmp") // Π²ΡΠ±ΠΎΡ Π·Π½Π°ΡΠ΅Π½ΠΈΡ ΠΏΠΎ ΡΡΠ»ΠΎΠ²ΠΈΡ
val EA_SWITCH = EA("switch") // Π²ΡΠ±ΠΈΡΠ°Π΅ΠΌΡΠΉ
val EA_RETURN = EA("return") // Π²ΠΎΠ·Π²ΡΠ°ΡΠ°Π΅ΠΌΡΠΉ
val EA_PRIMITIVE_CAST = EA("primitive_cast") // ΠΏΡΠΈΠ²Π΅Π΄Π΅Π½ΠΈΠ΅ ΠΎΠ΄Π½ΠΎΠ³ΠΎ ΠΏΡΠΈΠΌΠΈΡΠΈΠ²Π° ΠΊ Π΄ΡΡΠ³ΠΎΠΌΡ
val EA_CASE = EA("case") // Π²Π°ΡΠΈΠ°Π½Ρ
val EA_CATCH = EA("catch") // ΠΎΡΠ»Π°Π²Π»ΠΈΠ²Π°Π΅ΠΌΡΠΉ
val EA_GET_ARRAY_ITEM = EA("get_array_item") // ΠΏΠΎΠ»ΡΡΠΈΡΡ ΡΠ»Π΅ΠΌΠ΅Π½Ρ ΠΌΠ°ΡΡΠΈΠ²Π°
val EA_MATH_PAIR = EA("math_pair") // Π·Π°Π½ΠΊ + ΠΏΠ΅ΡΠ΅ΠΌΠ΅Π½Π½Π°Ρ
val EA_MATH = EA("math") // ΠΌΠ°ΡΠ΅ΠΌΠ°ΡΠΈΡΠ΅ΡΠΊΠΈΠΉ
val EA_MATH_NEGATE = EA("math_negate") // ΠΎΡΡΠΈΡΠ°ΡΠ΅Π»ΡΠ½ΡΠΉ
val EA_MATH_TILDE = EA("math_tilde") // ΠΈΠ½Π²Π΅ΡΡΠΈΡΠΎΠ²Π°Π½Π½ΡΠΉ
val EA_PRE_INC = EA("pre_inc") // ΠΈΠ½ΠΊΡΠ΅ΠΌΠ΅Π½ΡΠΈΡΡΠ΅ΠΌΡΠΉ
val EA_POST_INC = EA("post_inc") // ΠΈΠ½ΠΊΡΠ΅ΠΌΠ΅Π½ΡΠΈΡΡΠ΅ΠΌΡΠΉ
val EA_STORE_DECLARE_VAR = EA("store_declare") // ΠΎΠ±ΡΡΠ²Π»Π΅Π½ΠΈΠ΅ Π»ΠΎΠΊΠ°Π»ΡΠ½ΠΎΠΉ ΠΏΠ΅ΡΠ΅ΠΌΠ΅Π½Π½ΠΎΠΉ Π±Π΅Π· ΠΈΠ½ΠΈΡΠΈΠ°Π»ΠΈΠ·Π°ΡΠΈΠΈ
val EA_STORE_VAR = EA("store_var") // ΡΠΎΡ
ΡΠ°Π½Π΅Π½ΠΈΠ΅ Π·Π½Π°ΡΠ΅Π½ΠΈΡ Π² Π»ΠΎΠΊΠ°Π»ΡΠ½ΡΡ ΠΏΠ΅ΡΠ΅ΠΌΠ΅Π½Π½ΡΡ
val EA_STORE_NEW_VAR = EA("store_new_var") // ΡΠΎΡ
ΡΠ°Π½Π΅Π½ΠΈΠ΅ Π·Π½Π°ΡΠ΅Π½ΠΈΡ Π² Π»ΠΎΠΊΠ°Π»ΡΠ½ΡΡ ΠΏΠ΅ΡΠ΅ΠΌΠ΅Π½Π½ΡΡ Π² ΠΏΠ΅ΡΠ²ΡΠΉ ΡΠ°Π·
val EA_STORE_ARRAY_VAR = EA("store_array_var") // ΡΠΎΡ
ΡΠ°Π½Π΅Π½ΠΈΠ΅ Π·Π½Π°ΡΠ΅Π½ΠΈΡ Π² ΠΌΠ°ΡΡΠΈΠ²Π΅
val EA_GET_FIELD = EA("get_field") // ΠΏΠΎΠ»ΡΡΠ΅Π½ΠΈΠ΅ ΠΏΠΎΠ»Ρ ΠΊΠ»Π°ΡΡΠ°
val EA_PUT_FIELD = EA("put_field") // ΡΡΡΠ°Π½ΠΎΠ²ΠΊΠ° ΠΏΠΎΠ»Ρ ΠΊΠ»Π°ΡΡΠ°
val EA_GET_STATIC_FIELD = EA("get_static_field") // ΠΏΠΎΠ»ΡΡΠ΅Π½ΠΈΠ΅ ΡΡΠ°ΡΠΈΡΠ΅ΡΠΊΠΎΠ³ΠΎ ΠΏΠΎΠ»Ρ ΠΊΠ»Π°ΡΡΠ°
val EA_PUT_STATIC_FIELD = EA("put_static_field") // ΡΡΡΠ°Π½ΠΎΠ²ΠΊΠ° ΡΡΠ°ΡΠΈΡΠ΅ΡΠΊΠΎΠ³ΠΎ ΠΏΠΎΠ»Ρ ΠΊΠ»Π°ΡΡΠ°
val EA_INSTANCE_OF = EA("instance_of") // Π°Π½Π°Π»ΠΎΠ³ΠΈΡΠ½ΠΎΠ³ΠΎ ΡΠΈΠΏΠ°
val EA_ARGUMENTS = EA("arguments") // Π°ΡΠ³ΡΠΌΠ΅Π½ΡΡ
val EA_INVOKE_INTERFACE = EA("invoke_interface") // Π²ΡΠ·ΠΎΠ² ΠΌΠ΅ΡΠΎΠ΄Π°
val EA_INVOKE_VIRTUAL = EA("invoke_virtual") // Π²ΡΠ·ΠΎΠ² ΠΌΠ΅ΡΠΎΠ΄Π°
val EA_CONSTRUCTOR = EA("constructor") // ΠΊΠΎΠ½ΡΡΡΡΠΊΡΡΠΎΡ
val EA_NEW_OBJECT = EA("new_object") // ΡΠΎΠ·Π΄Π°Π½ΠΈΠ΅ ΠΎΠ±ΡΠ΅ΠΊΡΠ°
val EA_NEW_ARRAY = EA("new_array") // ΡΠΎΠ·Π΄Π°Π½ΠΈΠ΅ ΠΌΠ°ΡΡΠΈΠ²Π°
val EA_INIT_ARRAY = EA("init_array") // ΠΈΠ½ΠΈΡΠΈΠ°Π»ΠΈΠ·Π°ΡΠΈΡ ΠΌΠ°ΡΡΠΈΠ²Π°
val EA_ARRAY_LENGTH = EA("array_length") // ΡΠ°Π·ΠΌΠ΅ΡΠ½ΠΎΡΡΡ ΠΌΠ°ΡΡΠΈΠ²Π°
val EA_INVOKE_SPECIAL = EA("invoke_special") // Π²ΡΠ·ΠΎΠ² ΠΌΠ΅ΡΠΎΠ΄Π°
val EA_INVOKE_STATIC = EA("invoke_static") // Π²ΡΠ·ΠΎΠ² ΠΌΠ΅ΡΠΎΠ΄Π°
val EA_METHOD_TYPE = EA("method_type") // Π²ΡΠ·ΠΎΠ² ΠΌΠ΅ΡΠΎΠ΄Π° ΠΏΠΎΠ»ΡΡΠ΅Π½ΠΈΡ ΡΠΈΠΏΠ°
val EA_INVOKE_DYNAMIC = EA("invoke_dynamic") // Π΄ΠΈΠ½Π°ΠΌΠΈΡΠ΅ΡΠΊΠΈΠΉ Π²ΡΠ·ΠΎΠ² ΠΌΠ΅ΡΠΎΠ΄Π°
val EA_SYNCHRONIZE = EA("synchronize") // ΡΠΈΠ½Ρ
ΡΠΎΠ½ΠΈΠ·ΠΈΡΠΎΠ²Π°Π½Π½ΡΠΉ
val EA_CHECK_CAST = EA("check_cast") // ΠΏΡΠΈΠ²Π΅Π΄Π΅Π½ΠΈΠ΅ ΠΊ ΡΠΈΠΏΡ
val EA_THROW = EA("throw") // Π²ΡΠ±ΡΠ°ΡΡΠ²Π°Π΅ΠΌΡΠΉ
val EA_WRAPPED = EA("wrapped") // ΠΎΠ±ΠΎΡΠ°ΡΠΈΠ²Π°Π΅ΠΌΡΠΉ
val EA_COMMENT = EA("comment") // ΠΊΠΎΠΌΠ΅Π½ΡΠΈΡΡΠ΅ΠΌΡΠΉ
// -- ΠΠ»Π°ΡΡΠΈΡΠΈΠΊΠ°ΡΠΎΡΡ -- \\\\
val EC_TYPE = EC(EA_TYPE, EA_TYPED, EA_WIDE)
val EC_WORD = EC(EA_WORD, EA_FIXED)
val EC_SIGN = EC(EA_SIGN, EA_FIXED)
val EC_LINE = EC(EA_LINE, EA_CHANGEABLE, EA_WIDE)
val EC_CLASS_DECLARE = EC(EA_CLASS_DECLARE, EA_FIXED)
val EC_CLASS = EC(EA_CLASS, EA_WIDE)
val EC_METHOD_DECLARE = EC(EA_METHOD_DECLARE, EA_WIDE)
val EC_TYPE_PARAMETERS = EC(EA_TYPE_PARAMETERS, EA_FIXED)
val EC_RETURN_TYPE = EC(EA_RETURN_TYPE, EA_FIXED)
val EC_MODIFIERS = EC(EA_MODIFIERS, EA_FIXED)
val EC_THROWS_EXPRESSION = EC(EA_THROWS_EXPRESSION, EA_FIXED)
val EC_METHOD_PARAMETERS = EC(EA_METHOD_PARAMETERS, EA_FIXED)
val EC_METHOD = EC(EA_METHOD, EA_WIDE)
val EC_FIELD_DECLARE = EC(EA_FIELD_DECLARE, EA_WIDE)
val EC_FIELD = EC(EA_FIELD, EA_WIDE)
val EC_BLOCK = EC(EA_BLOCK, EA_CHANGEABLE, EA_DEPTH)
val EC_STATEMENT = EC(EA_STATEMENT, EA_CHANGEABLE, EA_DEPTH)
val EC_EMPTY = EC(EA_EMPTY, EA_TYPED, EA_FIXED)
val EC_NULL = EC(EA_NULL, EA_TYPED, EA_CONST, EA_FIXED)
val EC_BOOLEAN = EC(EA_BOOLEAN, EA_TYPED, EA_CONST, EA_FIXED)
val EC_BYTE = EC(EA_BYTE, EA_TYPED, EA_CONST, EA_FIXED)
val EC_SHORT = EC(EA_SHORT, EA_TYPED, EA_CONST, EA_FIXED)
val EC_CHAR = EC(EA_CHAR, EA_TYPED, EA_CONST, EA_FIXED)
val EC_INT = EC(EA_INT, EA_TYPED, EA_CONST, EA_FIXED)
val EC_LONG = EC(EA_LONG, EA_TYPED, EA_CONST, EA_FIXED)
val EC_FLOAT = EC(EA_FLOAT, EA_TYPED, EA_CONST, EA_FIXED)
val EC_DOUBLE = EC(EA_DOUBLE, EA_TYPED, EA_CONST, EA_FIXED)
val EC_STRING = EC(EA_STRING, EA_TYPED, EA_CONST, EA_FIXED)
val EC_CLASS_CONST = EC(EA_CLASS_CONST, EA_TYPED, EA_CONST, EA_FIXED)
val EC_ENUM_LITERAL = EC(EA_ENUM_LITERAL, EA_FIXED)
val EC_ARRAY_LITERAL = EC(EA_ARRAY_LITERAL, EA_WIDE)
val EC_ANNOTATION = EC(EA_ANNOTATION, EA_FIXED) // ΠΈΡΠΏΡΠ°Π²ΠΈΡΡ Π½Π° EA_WIDE - ΠΊΠΎΠ³Π΄Π° Π²ΡΡΠ°ΠΆΠ΅Π½ΠΈΠ΅ Π±ΡΠ΄Π΅Ρ ΠΏΠ΅ΡΠ΅Π΄Π΅Π»Π°Π½ΠΎ
val EC_DEFAULT_VALUE = EC(EA_DEFAULT_VALUE, EA_FIXED) // ΠΈΡΠΏΡΠ°Π²ΠΈΡΡ Π½Π° EA_WIDE - ΠΊΠΎΠ³Π΄Π° Π²ΡΡΠ°ΠΆΠ΅Π½ΠΈΠ΅ Π±ΡΠ΄Π΅Ρ ΠΏΠ΅ΡΠ΅Π΄Π΅Π»Π°Π½ΠΎ
val EC_IMPORT = EC(EA_IMPORT, EA_FIXED)
val EC_METHOD_SIGNATURE = EC(EA_METHOD_SIGNATURE, EA_FIXED)
val EC_LOCAL_VARIABLE = EC(EA_LOCAL_VARIABLE, EA_TYPED, EA_WIDE)
val EC_LABEL = EC(EA_LABEL, EA_FIXED)
val EC_BREAK_LABEL = EC(EA_BREAK_LABEL, EA_FIXED)
val EC_CONTINUE_LABEL = EC(EA_CONTINUE_LABEL, EA_FIXED)
val EC_TERNARY_REF = EC(EA_TERNARY_REF, EA_TYPED, EA_WIDE)
val EC_TERNARY = EC(EA_TERNARY, EA_TYPED, EA_WIDE)
val EC_TERNARY_BOOLEAN = EC(EA_TERNARY_BOOLEAN, EA_TYPED, EA_WIDE)
val EC_WHILE_CYCLE = EC(EA_WHILE_CYCLE, EA_WIDE)
val EC_FOR_CYCLE = EC(EA_FOR_CYCLE, EA_WIDE)
val EC_FOREACH_CYCLE = EC(EA_FOREACH_CYCLE, EA_WIDE)
val EC_IF_WORD = EC(EA_IF_WORD, EA_TYPED, EA_WIDE)
val EC_IF_SIMPLE = EC(EA_IF_SIMPLE, EA_IFABLE, EA_TYPED, EA_WIDE)
val EC_IF_BOOLEAN = EC(EA_IF_BOOLEAN, EA_IFABLE, EA_TYPED, EA_WIDE)
val EC_IF_GROUP = EC(EA_IF_GROUP, EA_IFABLE, EA_TYPED, EA_WIDE)
val EC_IF_XOR_GROUP = EC(EA_IF_XOR_GROUP, EA_IFABLE, EA_TYPED, EA_WIDE)
val EC_IF_CMP = EC(EA_IF_CMP, EA_TYPED, EA_WIDE)
val EC_SWITCH = EC(EA_SWITCH, EA_WIDE)
val EC_RETURN = EC(EA_RETURN, EA_TYPED, EA_WIDE)
val EC_PRIMITIVE_CAST = EC(EA_PRIMITIVE_CAST, EA_TYPED, EA_WIDE)
val EC_CASE = EC(EA_CASE, EA_WIDE)
val EC_CATCH = EC(EA_CATCH, EA_FIXED)
val EC_GET_ARRAY_ITEM = EC(EA_GET_ARRAY_ITEM, EA_TYPED, EA_WIDE)
val EC_MATH_PAIR = EC(EA_MATH_PAIR, EA_WIDE)
val EC_MATH = EC(EA_MATH, EA_TYPED, EA_WIDE)
val EC_MATH_NEGATE = EC(EA_MATH_NEGATE, EA_TYPED, EA_WIDE)
val EC_MATH_TILDE = EC(EA_MATH_TILDE, EA_TYPED, EA_WIDE)
val EC_PRE_INC = EC(EA_PRE_INC, EA_TYPED, EA_WIDE)
val EC_POST_INC = EC(EA_POST_INC, EA_TYPED, EA_WIDE)
val EC_STORE_DECLARE_VAR = EC(EA_STORE_DECLARE_VAR, EA_TYPED, EA_WIDE)
val EC_STORE_VAR = EC(EA_STORE_VAR, EA_ASSIGNABLE, EA_TYPED, EA_WIDE)
val EC_STORE_NEW_VAR = EC(EA_STORE_NEW_VAR, EA_ASSIGNABLE, EA_TYPED, EA_WIDE)
val EC_STORE_ARRAY_VAR = EC(EA_STORE_ARRAY_VAR, EA_ASSIGNABLE, EA_TYPED, EA_WIDE)
val EC_GET_FIELD = EC(EA_GET_FIELD, EA_TYPED, EA_WIDE)
val EC_PUT_FIELD = EC(EA_PUT_FIELD, EA_ASSIGNABLE, EA_TYPED, EA_WIDE)
val EC_GET_STATIC_FIELD = EC(EA_GET_STATIC_FIELD, EA_TYPED, EA_WIDE)
val EC_PUT_STATIC_FIELD = EC(EA_PUT_STATIC_FIELD, EA_ASSIGNABLE, EA_TYPED, EA_WIDE)
val EC_INSTANCE_OF = EC(EA_INSTANCE_OF, EA_TYPED, EA_WIDE)
val EC_ARGUMENTS = EC(EA_ARGUMENTS, EA_CHANGEABLE, EA_WIDE)
val EC_INVOKE_INTERFACE = EC(EA_INVOKE_INTERFACE, EA_TYPED, EA_INVOKEABLE, EA_WIDE)
val EC_INVOKE_VIRTUAL = EC(EA_INVOKE_VIRTUAL, EA_TYPED, EA_INVOKEABLE, EA_WIDE)
val EC_CONSTRUCTOR = EC(EA_CONSTRUCTOR, EA_TYPED, EA_INVOKEABLE, EA_WIDE)
val EC_NEW_OBJECT = EC(EA_NEW_OBJECT, EA_TYPED, EA_WIDE)
val EC_NEW_ARRAY = EC(EA_NEW_ARRAY, EA_TYPED, EA_WIDE)
val EC_INIT_ARRAY = EC(EA_INIT_ARRAY, EA_TYPED, EA_WIDE)
val EC_ARRAY_LENGTH = EC(EA_ARRAY_LENGTH, EA_TYPED, EA_WIDE)
val EC_INVOKE_SPECIAL = EC(EA_INVOKE_SPECIAL, EA_TYPED, EA_INVOKEABLE, EA_WIDE)
val EC_INVOKE_STATIC = EC(EA_INVOKE_STATIC, EA_TYPED, EA_INVOKEABLE, EA_WIDE)
val EΠ‘_METHOD_TYPE = EC(EA_METHOD_TYPE, EA_TYPED, EA_FIXED)
val EC_INVOKE_DYNAMIC = EC(EA_INVOKE_DYNAMIC, EA_TYPED, EA_WIDE)
val EC_SYNCHRONIZE = EC(EA_SYNCHRONIZE, EA_WIDE)
val EC_CHECK_CAST = EC(EA_CHECK_CAST, EA_TYPED, EA_BRACKETABLE, EA_WIDE)
val EC_THROW = EC(EA_THROW, EA_WIDE)
val EC_WRAPPED = EC(EA_WRAPPED, EA_WIDE)
val EΠ‘_COMMENT = EC(EA_COMMENT, EA_FIXED)
}
/** ΠΡΡΡΠΈΠ±ΡΡΡ Π²ΡΡΠ°ΠΆΠ΅Π½ΠΈΠΉ */
object ExpressionAttribute {
def apply(name: String): ExpressionAttribute = new ExpressionAttribute(name)
}
/** ΠΡΡΡΠΈΠ±ΡΡΡ Π²ΡΡΠ°ΠΆΠ΅Π½ΠΈΠΉ */
class ExpressionAttribute(val name: String) {
override def toString(): String = name
}
/** ΠΠ»Π°ΡΡΠΈΡΠΈΠΊΠ°ΡΠΈΡ Π²ΡΡΠ°ΠΆΠ΅Π½ΠΈΠΉ */
object ExpressionClassifier {
def apply(attr: ExpressionAttribute, attrs: ExpressionAttribute*): ExpressionClassifier = {
new ExpressionClassifier((attr :: attrs.toList).toArray)
}
}
/** ΠΠ»Π°ΡΡΠΈΡΠΈΠΊΠ°ΡΠΎΡ Π²ΡΡΠ°ΠΆΠ΅Π½ΠΈΠΉ */
class ExpressionClassifier(attrs: Array[ExpressionAttribute]) {
def head = attrs(0)
def is(attr: ExpressionAttribute): Boolean = head == attr
def has(attr: ExpressionAttribute): Boolean = attrs.contains(attr)
override def toString(): String = "clf-name: " + head + ", attrs: " + attrs.tail.mkString(", ")
}
| powlab/jeye | src/main/scala/org/powlab/jeye/decode/expression/ExpressionClassifiers.scala | Scala | apache-2.0 | 13,670 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.consumer
import java.util.Properties
import org.junit.Assert._
import kafka.common.MessageStreamsExistException
import kafka.integration.KafkaServerTestHarness
import kafka.javaapi.consumer.ConsumerRebalanceListener
import kafka.serializer._
import kafka.server._
import kafka.utils.TestUtils._
import kafka.utils._
import org.apache.kafka.common.record.CompressionType
import org.apache.kafka.common.security.JaasUtils
import org.apache.log4j.{Level, Logger}
import org.junit.{After, Before, Test}
import scala.collection._
@deprecated("This test has been deprecated and it will be removed in a future release", "0.10.0.0")
class ZookeeperConsumerConnectorTest extends KafkaServerTestHarness with Logging {
val RebalanceBackoffMs = 5000
var dirs : ZKGroupTopicDirs = null
val numNodes = 2
val numParts = 2
val topic = "topic1"
val overridingProps = new Properties()
overridingProps.put(KafkaConfig.NumPartitionsProp, numParts.toString)
var zkUtils: ZkUtils = null
override def generateConfigs =
TestUtils.createBrokerConfigs(numNodes, zkConnect).map(KafkaConfig.fromProps(_, overridingProps))
val group = "group1"
val consumer0 = "consumer0"
val consumer1 = "consumer1"
val consumer2 = "consumer2"
val consumer3 = "consumer3"
val nMessages = 2
@Before
override def setUp() {
super.setUp()
zkUtils = ZkUtils(zkConnect, zkSessionTimeout, zkConnectionTimeout, zkAclsEnabled.getOrElse(JaasUtils.isZkSecurityEnabled))
dirs = new ZKGroupTopicDirs(group, topic)
}
@After
override def tearDown() {
if (zkUtils != null)
CoreUtils.swallow(zkUtils.close(), this)
super.tearDown()
}
@Test
def testBasic() {
val requestHandlerLogger = Logger.getLogger(classOf[KafkaRequestHandler])
requestHandlerLogger.setLevel(Level.FATAL)
// test consumer timeout logic
val consumerConfig0 = new ConsumerConfig(
TestUtils.createConsumerProperties(zkConnect, group, consumer0)) {
override val consumerTimeoutMs = 200
}
val zkConsumerConnector0 = new ZookeeperConsumerConnector(consumerConfig0, true)
val topicMessageStreams0 = zkConsumerConnector0.createMessageStreams(Map(topic -> 1), new StringDecoder(), new StringDecoder())
// no messages to consume, we should hit timeout;
// also the iterator should support re-entrant, so loop it twice
for (_ <- 0 until 2) {
try {
getMessages(topicMessageStreams0, nMessages * 2)
fail("should get an exception")
} catch {
case _: ConsumerTimeoutException => // this is ok
}
}
zkConsumerConnector0.shutdown
// send some messages to each broker
val sentMessages1 = produceMessages(nMessages, acks = 0) ++ produceMessages(nMessages, acks = 1)
// wait to make sure the topic and partition have a leader for the successful case
waitUntilLeaderIsElectedOrChanged(zkClient, topic, 0)
waitUntilLeaderIsElectedOrChanged(zkClient, topic, 1)
TestUtils.waitUntilMetadataIsPropagated(servers, topic, 0)
TestUtils.waitUntilMetadataIsPropagated(servers, topic, 1)
// create a consumer
val consumerConfig1 = new ConsumerConfig(TestUtils.createConsumerProperties(zkConnect, group, consumer1))
val zkConsumerConnector1 = new ZookeeperConsumerConnector(consumerConfig1, true)
val topicMessageStreams1 = zkConsumerConnector1.createMessageStreams(Map(topic -> 1), new StringDecoder(), new StringDecoder())
val receivedMessages1 = getMessages(topicMessageStreams1, nMessages * 2)
assertEquals(sentMessages1.sorted, receivedMessages1.sorted)
// also check partition ownership
val actual_1 = getZKChildrenValues(dirs.consumerOwnerDir)
val expected_1 = List( ("0", "group1_consumer1-0"),
("1", "group1_consumer1-0"))
assertEquals(expected_1, actual_1)
// commit consumed offsets
zkConsumerConnector1.commitOffsets(true)
// create a consumer
val consumerConfig2 = new ConsumerConfig(TestUtils.createConsumerProperties(zkConnect, group, consumer2)) {
override val rebalanceBackoffMs = RebalanceBackoffMs
}
val zkConsumerConnector2 = new ZookeeperConsumerConnector(consumerConfig2, true)
val topicMessageStreams2 = zkConsumerConnector2.createMessageStreams(Map(topic -> 1), new StringDecoder(), new StringDecoder())
// send some messages to each broker
val sentMessages2 = produceMessages(nMessages, acks = 0) ++ produceMessages(nMessages, acks = 1)
waitUntilLeaderIsElectedOrChanged(zkClient, topic, 0)
waitUntilLeaderIsElectedOrChanged(zkClient, topic, 1)
val receivedMessages2 = getMessages(topicMessageStreams1, nMessages) ++ getMessages(topicMessageStreams2, nMessages)
assertEquals(sentMessages2.sorted, receivedMessages2.sorted)
// also check partition ownership
val actual_2 = getZKChildrenValues(dirs.consumerOwnerDir)
val expected_2 = List( ("0", "group1_consumer1-0"),
("1", "group1_consumer2-0"))
assertEquals(expected_2, actual_2)
// create a consumer with empty map
val consumerConfig3 = new ConsumerConfig(
TestUtils.createConsumerProperties(zkConnect, group, consumer3))
val zkConsumerConnector3 = new ZookeeperConsumerConnector(consumerConfig3, true)
zkConsumerConnector3.createMessageStreams(new mutable.HashMap[String, Int]())
// send some messages to each broker
val sentMessages3 = produceMessages(nMessages, acks = 0) ++ produceMessages(nMessages, acks = 1)
waitUntilLeaderIsElectedOrChanged(zkClient, topic, 0)
waitUntilLeaderIsElectedOrChanged(zkClient, topic, 1)
val receivedMessages3 = getMessages(topicMessageStreams1, nMessages) ++ getMessages(topicMessageStreams2, nMessages)
assertEquals(sentMessages3.sorted, receivedMessages3.sorted)
// also check partition ownership
val actual_3 = getZKChildrenValues(dirs.consumerOwnerDir)
assertEquals(expected_2, actual_3)
// call createMesssageStreams twice should throw MessageStreamsExistException
try {
zkConsumerConnector3.createMessageStreams(new mutable.HashMap[String, Int]())
fail("Should fail with MessageStreamsExistException")
} catch {
case _: MessageStreamsExistException => // expected
}
zkConsumerConnector1.shutdown
zkConsumerConnector2.shutdown
zkConsumerConnector3.shutdown
info("all consumer connectors stopped")
requestHandlerLogger.setLevel(Level.ERROR)
}
private def produceMessages(numMessages: Int, acks: Int = -1,
compressionType: CompressionType = CompressionType.NONE): Seq[String] = {
TestUtils.generateAndProduceMessages(servers, topic, numMessages, acks, compressionType)
}
@Test
def testCompression() {
val requestHandlerLogger = Logger.getLogger(classOf[kafka.server.KafkaRequestHandler])
requestHandlerLogger.setLevel(Level.FATAL)
// send some messages to each broker
val sentMessages1 = produceMessages(nMessages, acks = 0, CompressionType.GZIP) ++
produceMessages(nMessages, acks = 1, CompressionType.GZIP)
waitUntilLeaderIsElectedOrChanged(zkClient, topic, 0)
waitUntilLeaderIsElectedOrChanged(zkClient, topic, 1)
TestUtils.waitUntilMetadataIsPropagated(servers, topic, 0)
TestUtils.waitUntilMetadataIsPropagated(servers, topic, 1)
// create a consumer
val consumerConfig1 = new ConsumerConfig(
TestUtils.createConsumerProperties(zkConnect, group, consumer1))
val zkConsumerConnector1 = new ZookeeperConsumerConnector(consumerConfig1, true)
val topicMessageStreams1 = zkConsumerConnector1.createMessageStreams(Map(topic -> 1), new StringDecoder(), new StringDecoder())
val receivedMessages1 = getMessages(topicMessageStreams1, nMessages * 2)
assertEquals(sentMessages1.sorted, receivedMessages1.sorted)
// also check partition ownership
val actual_1 = getZKChildrenValues(dirs.consumerOwnerDir)
val expected_1 = List( ("0", "group1_consumer1-0"),
("1", "group1_consumer1-0"))
assertEquals(expected_1, actual_1)
// commit consumed offsets
zkConsumerConnector1.commitOffsets(true)
// create a consumer
val consumerConfig2 = new ConsumerConfig(TestUtils.createConsumerProperties(zkConnect, group, consumer2)) {
override val rebalanceBackoffMs = RebalanceBackoffMs
}
val zkConsumerConnector2 = new ZookeeperConsumerConnector(consumerConfig2, true)
val topicMessageStreams2 = zkConsumerConnector2.createMessageStreams(Map(topic -> 1), new StringDecoder(), new StringDecoder())
// send some messages to each broker
val sentMessages2 = produceMessages(nMessages, acks = 0, CompressionType.GZIP) ++
produceMessages(nMessages, acks = 1, CompressionType.GZIP)
waitUntilLeaderIsElectedOrChanged(zkClient, topic, 0)
waitUntilLeaderIsElectedOrChanged(zkClient, topic, 1)
val receivedMessages2 = getMessages(topicMessageStreams1, nMessages) ++ getMessages(topicMessageStreams2, nMessages)
assertEquals(sentMessages2.sorted, receivedMessages2.sorted)
// also check partition ownership
val actual_2 = getZKChildrenValues(dirs.consumerOwnerDir)
val expected_2 = List( ("0", "group1_consumer1-0"),
("1", "group1_consumer2-0"))
assertEquals(expected_2, actual_2)
// create a consumer with empty map
val consumerConfig3 = new ConsumerConfig(
TestUtils.createConsumerProperties(zkConnect, group, consumer3))
val zkConsumerConnector3 = new ZookeeperConsumerConnector(consumerConfig3, true)
zkConsumerConnector3.createMessageStreams(new mutable.HashMap[String, Int](), new StringDecoder(), new StringDecoder())
// send some messages to each broker
val sentMessages3 = produceMessages(nMessages, acks = 0, CompressionType.GZIP) ++
produceMessages(nMessages, acks = 1, CompressionType.GZIP)
waitUntilLeaderIsElectedOrChanged(zkClient, topic, 0)
waitUntilLeaderIsElectedOrChanged(zkClient, topic, 1)
val receivedMessages3 = getMessages(topicMessageStreams1, nMessages) ++ getMessages(topicMessageStreams2, nMessages)
assertEquals(sentMessages3.sorted, receivedMessages3.sorted)
// also check partition ownership
val actual_3 = getZKChildrenValues(dirs.consumerOwnerDir)
assertEquals(expected_2, actual_3)
zkConsumerConnector1.shutdown
zkConsumerConnector2.shutdown
zkConsumerConnector3.shutdown
info("all consumer connectors stopped")
requestHandlerLogger.setLevel(Level.ERROR)
}
@Test
def testCompressionSetConsumption() {
// send some messages to each broker
val sentMessages = produceMessages(200, acks = 0, CompressionType.GZIP) ++
produceMessages(200, acks = 1, CompressionType.GZIP)
TestUtils.waitUntilMetadataIsPropagated(servers, topic, 0)
TestUtils.waitUntilMetadataIsPropagated(servers, topic, 1)
val consumerConfig1 = new ConsumerConfig(TestUtils.createConsumerProperties(zkConnect, group, consumer0))
val zkConsumerConnector1 = new ZookeeperConsumerConnector(consumerConfig1, true)
val topicMessageStreams1 = zkConsumerConnector1.createMessageStreams(Map(topic -> 1), new StringDecoder(), new StringDecoder())
val receivedMessages = getMessages(topicMessageStreams1, 400)
assertEquals(sentMessages.sorted, receivedMessages.sorted)
// also check partition ownership
val actual_2 = getZKChildrenValues(dirs.consumerOwnerDir)
val expected_2 = List( ("0", "group1_consumer0-0"),
("1", "group1_consumer0-0"))
assertEquals(expected_2, actual_2)
zkConsumerConnector1.shutdown
}
@Test
def testConsumerDecoder() {
val requestHandlerLogger = Logger.getLogger(classOf[kafka.server.KafkaRequestHandler])
requestHandlerLogger.setLevel(Level.FATAL)
// send some messages to each broker
val sentMessages = produceMessages(nMessages, acks = 0) ++ produceMessages(nMessages, acks = 1)
TestUtils.waitUntilMetadataIsPropagated(servers, topic, 0)
TestUtils.waitUntilMetadataIsPropagated(servers, topic, 1)
val consumerConfig = new ConsumerConfig(TestUtils.createConsumerProperties(zkConnect, group, consumer1))
waitUntilLeaderIsElectedOrChanged(zkClient, topic, 0)
waitUntilLeaderIsElectedOrChanged(zkClient, topic, 1)
val zkConsumerConnector =
new ZookeeperConsumerConnector(consumerConfig, true)
val topicMessageStreams =
zkConsumerConnector.createMessageStreams(Map(topic -> 1), new StringDecoder(), new StringDecoder())
var receivedMessages: List[String] = Nil
for (messageStreams <- topicMessageStreams.values) {
for (messageStream <- messageStreams) {
val iterator = messageStream.iterator
for (_ <- 0 until nMessages * 2) {
assertTrue(iterator.hasNext())
val message = iterator.next().message
receivedMessages ::= message
debug("received message: " + message)
}
}
}
assertEquals(sentMessages.sorted, receivedMessages.sorted)
zkConsumerConnector.shutdown()
requestHandlerLogger.setLevel(Level.ERROR)
}
@Test
def testLeaderSelectionForPartition() {
val zkUtils = ZkUtils(zkConnect, 6000, 30000, false)
// create topic topic1 with 1 partition on broker 0
createTopic(topic, numPartitions = 1, replicationFactor = 1)
// send some messages to each broker
val sentMessages1 = produceMessages(nMessages)
// create a consumer
val consumerConfig1 = new ConsumerConfig(TestUtils.createConsumerProperties(zkConnect, group, consumer1))
val zkConsumerConnector1 = new ZookeeperConsumerConnector(consumerConfig1, true)
val topicMessageStreams1 = zkConsumerConnector1.createMessageStreams(Map(topic -> 1), new StringDecoder(), new StringDecoder())
val topicRegistry = zkConsumerConnector1.getTopicRegistry
assertEquals(1, topicRegistry.map(r => r._1).size)
assertEquals(topic, topicRegistry.map(r => r._1).head)
val topicsAndPartitionsInRegistry = topicRegistry.map(r => (r._1, r._2.map(p => p._2)))
val brokerPartition = topicsAndPartitionsInRegistry.head._2.head
assertEquals(0, brokerPartition.partitionId)
// also check partition ownership
val actual_1 = getZKChildrenValues(dirs.consumerOwnerDir)
val expected_1 = List( ("0", "group1_consumer1-0"))
assertEquals(expected_1, actual_1)
val receivedMessages1 = getMessages(topicMessageStreams1, nMessages)
assertEquals(sentMessages1, receivedMessages1)
zkConsumerConnector1.shutdown()
zkUtils.close()
}
@Test
def testConsumerRebalanceListener() {
// Send messages to create topic
produceMessages(nMessages, acks = 0)
produceMessages(nMessages, acks = 1)
val consumerConfig1 = new ConsumerConfig(TestUtils.createConsumerProperties(zkConnect, group, consumer1))
val zkConsumerConnector1 = new ZookeeperConsumerConnector(consumerConfig1, true)
// Register consumer rebalance listener
val rebalanceListener1 = new TestConsumerRebalanceListener()
zkConsumerConnector1.setConsumerRebalanceListener(rebalanceListener1)
val topicMessageStreams1 = zkConsumerConnector1.createMessageStreams(Map(topic -> 1), new StringDecoder(), new StringDecoder())
// Check if rebalance listener is fired
assertEquals(true, rebalanceListener1.beforeReleasingPartitionsCalled)
assertEquals(true, rebalanceListener1.beforeStartingFetchersCalled)
assertEquals(null, rebalanceListener1.partitionOwnership.get(topic))
// Check if partition assignment in rebalance listener is correct
assertEquals("group1_consumer1", rebalanceListener1.globalPartitionOwnership.get(topic).get(0).consumer)
assertEquals("group1_consumer1", rebalanceListener1.globalPartitionOwnership.get(topic).get(1).consumer)
assertEquals(0, rebalanceListener1.globalPartitionOwnership.get(topic).get(0).threadId)
assertEquals(0, rebalanceListener1.globalPartitionOwnership.get(topic).get(1).threadId)
assertEquals("group1_consumer1", rebalanceListener1.consumerId)
// reset the flag
rebalanceListener1.beforeReleasingPartitionsCalled = false
rebalanceListener1.beforeStartingFetchersCalled = false
val actual_1 = getZKChildrenValues(dirs.consumerOwnerDir)
val expected_1 = List(("0", "group1_consumer1-0"),
("1", "group1_consumer1-0"))
assertEquals(expected_1, actual_1)
val consumerConfig2 = new ConsumerConfig(TestUtils.createConsumerProperties(zkConnect, group, consumer2))
val zkConsumerConnector2 = new ZookeeperConsumerConnector(consumerConfig2, true)
// Register consumer rebalance listener
val rebalanceListener2 = new TestConsumerRebalanceListener()
zkConsumerConnector2.setConsumerRebalanceListener(rebalanceListener2)
zkConsumerConnector2.createMessageStreams(Map(topic -> 1), new StringDecoder(), new StringDecoder())
// Consume messages from consumer 1 to make sure it has finished rebalance
getMessages(topicMessageStreams1, nMessages)
val actual_2 = getZKChildrenValues(dirs.consumerOwnerDir)
val expected_2 = List(("0", "group1_consumer1-0"),
("1", "group1_consumer2-0"))
assertEquals(expected_2, actual_2)
// Check if rebalance listener is fired
assertEquals(true, rebalanceListener1.beforeReleasingPartitionsCalled)
assertEquals(true, rebalanceListener1.beforeStartingFetchersCalled)
assertEquals(Set[Int](0, 1), rebalanceListener1.partitionOwnership.get(topic))
// Check if global partition ownership in rebalance listener is correct
assertEquals("group1_consumer1", rebalanceListener1.globalPartitionOwnership.get(topic).get(0).consumer)
assertEquals("group1_consumer2", rebalanceListener1.globalPartitionOwnership.get(topic).get(1).consumer)
assertEquals(0, rebalanceListener1.globalPartitionOwnership.get(topic).get(0).threadId)
assertEquals(0, rebalanceListener1.globalPartitionOwnership.get(topic).get(1).threadId)
assertEquals("group1_consumer1", rebalanceListener1.consumerId)
assertEquals("group1_consumer2", rebalanceListener2.consumerId)
assertEquals(rebalanceListener1.globalPartitionOwnership, rebalanceListener2.globalPartitionOwnership)
zkConsumerConnector1.shutdown()
zkConsumerConnector2.shutdown()
}
def getZKChildrenValues(path : String) : Seq[Tuple2[String,String]] = {
val children = zkUtils.getChildren(path).sorted
children.map(partition =>
(partition, zkUtils.zkClient.readData(path + "/" + partition).asInstanceOf[String]))
}
private class TestConsumerRebalanceListener extends ConsumerRebalanceListener {
var beforeReleasingPartitionsCalled: Boolean = false
var beforeStartingFetchersCalled: Boolean = false
var consumerId: String = ""
var partitionOwnership: java.util.Map[String, java.util.Set[java.lang.Integer]] = null
var globalPartitionOwnership: java.util.Map[String, java.util.Map[java.lang.Integer, ConsumerThreadId]] = null
override def beforeReleasingPartitions(partitionOwnership: java.util.Map[String, java.util.Set[java.lang.Integer]]) {
beforeReleasingPartitionsCalled = true
this.partitionOwnership = partitionOwnership
}
override def beforeStartingFetchers(consumerId: String, globalPartitionOwnership: java.util.Map[String, java.util.Map[java.lang.Integer, ConsumerThreadId]]) {
beforeStartingFetchersCalled = true
this.consumerId = consumerId
this.globalPartitionOwnership = globalPartitionOwnership
}
}
}
| Ishiihara/kafka | core/src/test/scala/unit/kafka/consumer/ZookeeperConsumerConnectorTest.scala | Scala | apache-2.0 | 20,412 |
object ord {
class Ord
object Ord
}
| som-snytt/dotty | tests/pos-special/fatal-warnings/i4185.scala | Scala | apache-2.0 | 40 |
/**
* Copyright (c) 2013-2015 Patrick Nicolas - Scala for Machine Learning - All rights reserved
*
* The source code in this file is provided by the author for the sole purpose of illustrating the
* concepts and algorithms presented in "Scala for Machine Learning". It should not be used to
* build commercial applications.
* ISBN: 978-1-783355-874-2 Packt Publishing.
* Unless required by applicable law or agreed to in writing, software is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* Version 0.98
*/
package org.scalaml.supervised.hmm
import org.scalaml.supervised.Supervised
import org.scalaml.core.Design.Config
/**
* <p>Utility class that defined the dimension of the matrix
* used in the Hidden Markov Model. The terminology used in the code follows
* the naming convention used in the mathematical expressions presented in
* most of papers and technical books on HMM.</p>
* @constructor Create a configuration (dimensions) for this HMM
* @param _T Number of observations
* @param _N Number of hidden states in the HMM
* @param _M Number of symbols (or model dimension) for the HMM
* @throws IllegalArgumenException if any of the argument is out of range [1, 1000]
* @see org.scalaml.core.Design.Config
*
* @author Patrick Nicolas
* @since March 27, 2014
* @note Scala for Machine Learning Chapter 7 Sequential data models / Hidden Markov Model
*/
final class HMMConfig(val _T: Int, val _N: Int, val _M: Int) extends Config {
import HMMConfig._
check(_T, _N, _M)
}
/**
* <p>Companion object for HMMConfig to implement high order method for
* HMMConfig such as foreach, fold and maxBy.
* @author Patrick Nicolas
* @since March 27, 2014
* @note Scala for Machine Learning Chapter 7 Sequential data models / Hidden Markov Model
*/
object HMMConfig {
/**
* Defines the <b>foreach</b> iterator for the elements of a collection between two index
* @param i starting index for the iterator
* @param j ending index for the iterator
* @param f function executed of each element
*/
def foreach(i: Int, j: Int, f: (Int) => Unit): Unit = Range(i, j).foreach(f)
/**
* Defines the <b>foreach</b> iterator for the first j elements of a collection
* @param j ending index for the iterator
* @param f function executed of each element
*/
def foreach(j: Int, f: (Int) => Unit): Unit = foreach(0, j, f)
/**
* Implements a fold operator on the first j elements of a collection
* @param j ending index for the iterator
* @param f reducer function/aggregator executed of each element
* @param zero Initial value for the fold
*/
def foldLeft(j: Int, f: (Double, Int) => Double, zero: Double) = Range(0, j).foldLeft(zero)(f)
/**
* Implements a fold operator on the first j elements of a collection, initialized to 0
* @param j ending index for the iterator
* @param f reducer function/aggregation executed of each element
*/
def foldLeft(j: Int, f: (Double, Int) => Double) = Range(0, j).foldLeft(0.0)(f)
/**
* Compute the maximum value of the first j elements of a collection
* @param j ending index for the iterator
* @param f scoring function executed of each element
*/
def maxBy(j: Int, f: Int => Double): Int = Range(0, j).maxBy(f)
val MAX_NUM_STATES = 512
val MAX_NUM_OBS = 4096
private def check(_T: Int, _N: Int, _M: Int): Unit = {
require( _T > 0 && _T < MAX_NUM_OBS,
s"Number of observations ${_T} in HMM lambda model is out of bounds")
require( _N > 0 && _N < MAX_NUM_STATES,
s"Number of States ${_N} in HMM lambda model is out of bounds")
require( _M > 0 && _M < MAX_NUM_OBS,
s"Number of symbols ${_M} in HMM lambda model is out of bounds")
}
}
// ---------------------------------------- EOF ------------------------------------------------------------ | batermj/algorithm-challenger | books/cs/machine-learning/scala-for-machine-learning/1rst-edition/original-src-from-the-book/src/main/scala/org/scalaml/supervised/hmm/HMMConfig.scala | Scala | apache-2.0 | 3,943 |
/*
* Copyright Β© 2015 - 2019 Lightbend, Inc. <http://www.lightbend.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.lightbend.paradox.tree
import scala.annotation.tailrec
/**
* Generic rose tree.
*/
abstract class Tree[A] {
import Tree.Forest
def label: A
def children: Forest[A]
/**
* Map this tree into a new tree using f.
*/
def map[B](f: A => B): Tree[B] = {
Tree(f(label), children map (_ map f))
}
/**
* Create a new location focused on this tree.
*/
def location: Tree.Location[A] = Tree.Location(this, Nil, Nil, Nil)
/**
* Print the tree (in markdown format).
*/
def show: String = {
@tailrec def showNodes(loc: Tree.Location[A], result: List[String]): List[String] = {
val lines = result ::: indent(loc.depth, "- ", " ", loc.tree.label.toString.split('\\n').toList)
loc.next match {
case Some(next) => showNodes(next, lines)
case _ => lines
}
}
def indent(depth: Int, first: String, other: String, lines: List[String]): List[String] = {
val spaces = " " * depth
((spaces + first) :: List.fill(lines.size - 1)(spaces + other)).zip(lines) map { case (i, l) => i + l }
}
showNodes(location, Nil).mkString("\\n")
}
def toSet[T]: Set[A] = children.flatMap(_.toSet).toSet + label
}
object Tree {
type Forest[A] = List[Tree[A]]
/**
* Default Tree implementation.
*/
case class Node[A](label: A, children: Forest[A]) extends Tree[A] {
override def toString = s"Node($label)"
}
/**
* Create a new tree node (default Node).
*/
def apply[A](label: A, children: Forest[A]): Tree[A] = Node(label, children)
/**
* Create a new tree node (with varargs for children).
*/
def node[A](label: A, children: Tree[A]*): Tree[A] = Tree(label, children.toList)
/**
* Create a new tree node without children.
*/
def leaf[A](label: A): Tree[A] = Tree(label, Nil)
/**
* Parent node for zipper locations.
*/
final case class Parent[A](label: A, lefts: Forest[A], rights: Forest[A])
/**
* Zipper navigation and modification for Tree.
*/
final case class Location[A](tree: Tree[A], lefts: Forest[A], rights: Forest[A], parents: List[Parent[A]]) {
/**
* Move to the root.
*/
@tailrec
def root: Location[A] = parent match {
case Some(parent) => parent.root
case None => this
}
/**
* Move to the parent.
*/
def parent: Option[Location[A]] = parents match {
case p :: ps => Some(Location(Tree(p.label, forest), p.lefts, p.rights, ps))
case Nil => None
}
/**
* This node and its siblings.
*/
def forest: Forest[A] = {
lefts.reverse ::: tree :: rights
}
/**
* Move to the left sibling.
*/
def left: Option[Location[A]] = lefts match {
case l :: ls => Some(Location(l, ls, tree :: rights, parents))
case Nil => None
}
/**
* Move to the right sibling.
*/
def right: Option[Location[A]] = rights match {
case r :: rs => Some(Location(r, tree :: lefts, rs, parents))
case Nil => None
}
/**
* Move to the leftmost child.
*/
def leftmostChild: Option[Location[A]] = tree.children match {
case t :: ts => Some(Location(t, Nil, ts, descend))
case Nil => None
}
/**
* Move to the rightmost child.
*/
def rightmostChild: Option[Location[A]] = tree.children.reverse match {
case t :: ts => Some(Location(t, ts, Nil, descend))
case Nil => None
}
/**
* Move to the nth child.
*/
def child(n: Int): Option[Location[A]] = tree.children.splitAt(n) match {
case (ls, t :: rs) => Some(Location(t, ls.reverse, rs, descend))
case _ => None
}
/**
* This node as a parent, before its parents.
*/
private def descend: List[Parent[A]] = {
Parent(tree.label, lefts, rights) :: parents
}
/**
* Move to the next location in the hierarchy in depth-first order.
*/
def next: Option[Location[A]] = {
leftmostChild orElse nextRight
}
/**
* Move to the right, otherwise the next possible right in a parent.
*/
@tailrec
def nextRight: Option[Location[A]] = {
right match {
case None => parent match {
case Some(parent) => parent.nextRight
case None => None
}
case right => right
}
}
/**
* Move to the previous location in the hierarchy in depth-first order.
*/
def prev: Option[Location[A]] = {
left match {
case Some(left) => left.deepRight
case None => parent
}
}
/**
* Move to the deepest rightmost node.
*/
@tailrec
def deepRight: Option[Location[A]] = {
rightmostChild match {
case Some(child) => child.deepRight
case None => Some(this)
}
}
/**
* Path from here up to the root.
*/
def path: List[Location[A]] = this :: parent.toList.flatMap(_.path)
/**
* Depth of location from the root.
*/
def depth: Int = parents.size
/**
* Is this the root node?
*/
def isRoot: Boolean = parent.isEmpty
/**
* Is this a child node?
*/
def isChild: Boolean = !isRoot
/**
* Is this a leaf node (no children)?
*/
def isLeaf: Boolean = tree.children.isEmpty
/**
* Is this a branch node (has children)?
*/
def isBranch: Boolean = !isLeaf
/**
* Is this the leftmost sibling node?
*/
def isLeftmost: Boolean = lefts.isEmpty
/**
* Is this the rightmost sibling node?
*/
def isRightmost: Boolean = rights.isEmpty
/**
* Replace the focused node.
*/
def set(tree: Tree[A]): Location[A] = {
Location(tree, lefts, rights, parents)
}
/**
* Modify the focused node.
*/
def modify(f: Tree[A] => Tree[A]): Location[A] = {
set(f(tree))
}
/**
* Insert to the left and focus on the new node.
*/
def insertLeft(newTree: Tree[A]): Location[A] = {
Location(newTree, lefts, tree :: rights, parents)
}
/**
* Insert to the right and focus on the new node.
*/
def insertRight(newTree: Tree[A]): Location[A] = {
Location(newTree, tree :: lefts, rights, parents)
}
/**
* Insert as the leftmost child and focus on the new node.
*/
def insertLeftmostChild(newTree: Tree[A]): Location[A] = {
Location(newTree, Nil, tree.children, descend)
}
/**
* Insert as the rightmost child and focus on the new node.
*/
def insertRightmostChild(newTree: Tree[A]): Location[A] = {
Location(newTree, tree.children.reverse, Nil, descend)
}
/**
* Insert as the nth child and focus on the new node.
*/
def insertChild(n: Int, newTree: Tree[A]): Location[A] = {
val (ls, rs) = tree.children.splitAt(n)
Location(newTree, ls.reverse, rs, descend)
}
/**
* Delete the focused node and then move right, otherwise left, otherwise up.
*/
def delete: Option[Location[A]] = rights match {
case r :: rs => Some(Location(r, lefts, rs, parents))
case _ => lefts match {
case l :: ls => Some(Location(l, ls, rights, parents))
case _ => parents match {
case p :: ps => Some(Location(Tree(p.label, Nil), p.lefts, p.rights, ps))
case Nil => None
}
}
}
// simplified toString
override def toString: String = s"Location(${tree.label.toString})"
}
object Location {
def forest[A](ts: Forest[A]): Option[Location[A]] = ts match {
case t :: ts => Some(Location(t, Nil, ts, Nil))
case Nil => None
}
}
/**
* Form a linked forest, given a function for determining child links.
*/
def link[A](nodes: List[A], links: A => List[A]): Forest[A] = {
import scala.collection.mutable
val seen = mutable.HashSet.empty[A]
val completed = mutable.HashSet.empty[A]
val roots = mutable.Map.empty[A, Tree[A]]
def visit(node: A): Unit = {
if (!seen(node)) {
seen(node) = true;
val linked = links(node)
linked foreach visit
val children = linked flatMap roots.remove
roots += node -> Tree(node, children)
completed(node) = true
} else if (!completed(node)) {
throw new RuntimeException("Cycle found at: " + node)
}
}
nodes foreach visit
roots.values.toList
}
/**
* Form a linked forest from a listed hierarchy, given an ordering for levels.
*/
@tailrec
def hierarchy[A](nodes: List[A], stack: Forest[A] = Nil)(implicit ord: Ordering[A]): Forest[A] = {
stack match {
case first :: second :: rest if ord.gt(first.label, second.label) &&
(nodes.isEmpty || ord.lteq(nodes.head, first.label)) => // squash top of stack
hierarchy(nodes, Tree(second.label, second.children ::: List(first)) :: rest)
case result if nodes.isEmpty => // finished, return result
result.reverse
case deeper => // push node on to stack
hierarchy(nodes.tail, Tree.leaf(nodes.head) :: deeper)
}
}
}
| lightbend/paradox | core/src/main/scala/com/lightbend/paradox/tree/Tree.scala | Scala | apache-2.0 | 9,811 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package mongo.query.test
import mongo._
import dsl._
import qb._
import java.util.Date
import mongo.query.create
import scala.collection.mutable.ArrayBuffer
import scalaz.\\/
import scalaz.concurrent.Task
import org.apache.log4j.Logger
import scalaz.stream.Process._
import java.util.concurrent.atomic.AtomicBoolean
import MongoIntegrationEnv.{ executor, ids, sinkWithBuffer, mock, TEST_DB, PRODUCT, CATEGORY }
import org.specs2.mutable._
trait TestEnviroment[T] extends org.specs2.mutable.After {
protected val logger = Logger.getLogger(classOf[IntegrationMongoClientSpec])
val (sink, buffer) = sinkWithBuffer[T]
val isFailureInvoked = new AtomicBoolean()
val isFailureComplete = new AtomicBoolean()
lazy val server = mock()
def EnvLogger() = MongoIntegrationEnv.LoggerSink(logger)
def EnvLoggerEither() = MongoIntegrationEnv.LoggerSinkEither(logger)
/**
* Start mock mongo and return Process
* @return
*/
def Resource = {
server
eval(Task.delay(server._1))
}
override def after = {
server._1.close
server._2.shutdown
}
}
class IntegrationMongoClientSpec extends Specification {
"Hit server with invalid query" in new TestEnviroment[Int] {
val q = create { b β
b.q(""" { "num : } """)
b.db(TEST_DB)
b.collection(PRODUCT)
}.column[Int]("article")
(for {
dbObject β Resource through q.out
_ β dbObject to sink
} yield ())
.onFailure { th β isFailureInvoked.set(true); halt }
.onComplete { eval(Task.delay(isFailureComplete.set(true))) }
.runLog.run
isFailureInvoked.get && isFailureComplete.get must be equalTo true
}
"Hit server with invalid query - missing collection" in new TestEnviroment[Int] {
val q = create { b β
b.q(""" { "num" : 1 } """)
b.db(TEST_DB)
}.column[Int]("article")
(for {
dbObject β Resource through q.out
_ β dbObject to sink
} yield ())
.onFailure { th β isFailureInvoked.set(true); logger.debug(th.getMessage); halt }
.onComplete(eval(Task.delay(isFailureComplete.set(true))))
.runLog.run
isFailureInvoked.get && isFailureComplete.get must be equalTo true
}
"Hit server with invalid query - invalid sorting" in new TestEnviroment[Int] {
val q = create { b β
b.q(""" { "num" : 1 } """)
b.sort(""" { "num } """) //invalid
b.collection(PRODUCT)
b.db(TEST_DB)
}.column[Int]("article")
(for {
dbObject β Resource through q.out
_ β dbObject to sink
} yield ())
.onFailure { th β isFailureInvoked.set(true); logger.debug(th.getMessage); halt }
.onComplete(eval(Task.delay(isFailureComplete.set(true))))
.runLog.run
isFailureInvoked.get && isFailureComplete.get must be equalTo true
}
"Hit server with invalid query - missing db" in new TestEnviroment[Int] {
val q = create { b β
b.q(""" { "num" : 1 } """)
b.collection(PRODUCT)
}.column[Int]("article")
(for {
dbObject β Resource through q.out
_ β dbObject to sink
} yield ())
.onFailure { th β isFailureInvoked.set(true); logger.debug(th.getMessage); halt }
.onComplete(eval(Task.delay(isFailureComplete.set(true))))
.runLog.run
isFailureInvoked.get && isFailureComplete.get must be equalTo true
}
"Hit server several times with the same query by date" in new TestEnviroment[Int] {
val products = create { b β
b.q("dt" $gt new Date())
b.collection(PRODUCT)
b.db(TEST_DB)
}.column[Int]("article")
for (i β 1 to 3) yield {
(for {
dbObject β Resource through products.out
_ β dbObject to sink
} yield ())
.onFailure { th β logger.debug(s"Failure: ${th.getMessage}"); halt }
.onComplete { eval(Task.delay(logger.debug(s"Interaction $i has been completed"))) }
.runLog.run
}
buffer must be equalTo (ids ++ ids ++ ids)
}
"Hit server with monadic query to instructions" in new TestEnviroment[String] {
val program = for {
_ β "article" $gt 0 $lt 4
x β "producer_num" $gt 0
} yield x
val products = create { b β
b.q(program.toQuery)
b.collection(PRODUCT)
b.db(TEST_DB)
}.column[Int]("article").map(_.toString)
(for {
dbObject β Resource through products.out
_ β dbObject observe EnvLogger to sink
} yield ())
.onFailure { th β logger.debug(s"Failure: ${th.getMessage}"); halt }
.onComplete { eval(Task.delay(logger.debug(s"Interaction has been completed"))) }
.runLog.run
buffer must be equalTo ArrayBuffer("1", "2")
}
"Hit server with monadic query2" in new TestEnviroment[String] {
val producers = for {
_ β "article" $gt 0 $lt 4
x β "producer_num" $gt 0
} yield x
val products = create { b β
b.q(producers.toDBObject)
b.collection(PRODUCT)
b.db(TEST_DB)
}.column[Int]("article").map(_.toString)
(for {
dbObject β Resource through products.out
_ β dbObject observe EnvLogger to sink
} yield ())
.onFailure { th β logger.debug(s"Failure: ${th.getMessage}"); halt }
.onComplete { eval(Task.delay(logger.debug(s"Interaction has been completed"))) }
.runLog.run
buffer must be equalTo ArrayBuffer("1", "2")
}
"Interleave query streams nondeterminstically" in new TestEnviroment[String \\/ Int] {
val products = create { b β
b.q("article" $in Seq(1, 2, 3))
b.collection(PRODUCT)
b.db(TEST_DB)
}.column[Int]("article").map(_.toString)
val categories = create { b β
b.q("category" $in Seq(12, 13))
b.collection(CATEGORY)
b.db(TEST_DB)
}.column[Int]("category")
(for {
cats β Resource through categories.out
prodOrCat β Resource through ((products either cats).out)
_ β prodOrCat observe EnvLoggerEither to sink
} yield ())
.onFailure { th β logger.debug(s"Failure: ${th.getMessage}"); halt }
.onComplete { eval(Task.delay(logger.debug(s"Interaction has been completed"))) }
.runLog.run
buffer.size === 5
}
} | haghard/mongo-query-streams | src/test/scala/mongo/query/test/IntegrationMongoClientSpec.scala | Scala | apache-2.0 | 6,751 |
package net.ceedubs.ficus.readers
import com.typesafe.config.{Config, ConfigUtil}
import scala.collection.Factory
import scala.jdk.CollectionConverters._
import scala.language.postfixOps
import scala.language.higherKinds
trait CollectionReaders {
private[this] val DummyPathValue: String = "collection-entry-path"
implicit def traversableReader[C[_], A](implicit
entryReader: ValueReader[A],
cbf: Factory[A, C[A]]
): ValueReader[C[A]] = new ValueReader[C[A]] {
def read(config: Config, path: String): C[A] = {
val list = config.getList(path).asScala
val builder = cbf.newBuilder
builder.sizeHint(list.size)
list foreach { entry =>
val entryConfig = entry.atPath(DummyPathValue)
builder += entryReader.read(entryConfig, DummyPathValue)
}
builder.result()
}
}
implicit def mapValueReader[A](implicit entryReader: ValueReader[A]): ValueReader[Map[String, A]] =
new ValueReader[Map[String, A]] {
def read(config: Config, path: String): Map[String, A] = {
val relativeConfig = config.getConfig(path)
relativeConfig.root().entrySet().asScala map { entry =>
val key = entry.getKey
key -> entryReader.read(relativeConfig, ConfigUtil.quoteString(key))
} toMap
}
}
}
object CollectionReaders extends CollectionReaders
| mdedetrich/ficus | src/main/scala-2.13+/net/ceedubs/ficus/readers/CollectionReaders.scala | Scala | mit | 1,365 |
/*
* Copyright 2014 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.spark.kernel.protocol.v5.content
import com.ibm.spark.kernel.protocol.v5.{KernelMessageContent, Metadata}
import play.api.libs.json.Json
case class CompleteReply (
matches: List[String],
cursor_start: Int,
cursor_end: Int,
metadata: Metadata,
status: String,
ename: Option[String],
evalue: Option[String],
traceback: Option[List[String]]
) extends KernelMessageContent {
override def content : String =
Json.toJson(this)(CompleteReply.completeReplyWrites).toString
}
object CompleteReply {
implicit val completeReplyReads = Json.reads[CompleteReply]
implicit val completeReplyWrites = Json.writes[CompleteReply]
}
| bpburns/spark-kernel | protocol/src/main/scala/com/ibm/spark/kernel/protocol/v5/content/CompleteReply.scala | Scala | apache-2.0 | 1,253 |
/*
* Copyright 2007-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.json4s
package ext
import java.net.URL
import org.scalatest.wordspec.AnyWordSpec
class NativeURLSerializerSpec extends URLSerializerSpec("Native") {
val s: Serialization = native.Serialization
}
abstract class URLSerializerSpec(mod: String) extends AnyWordSpec {
def s: Serialization
implicit lazy val formats: Formats = s.formats(NoTypeHints) ++ JavaTypesSerializers.all
(mod + " URLSerializer Specification") should {
"Serialize URL's" in {
val x = SubjectWithURL(url = new URL("http://www.example.com/"))
val ser = s.write(x)
assert(s.read[SubjectWithURL](ser) == x)
}
}
}
case class SubjectWithURL(url: URL)
| xuwei-k/json4s | tests/src/test/scala/org/json4s/ext/URLSerializerSpec.scala | Scala | apache-2.0 | 1,283 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js IR **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2014, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.core.ir
import scala.annotation.switch
import java.io._
import java.net.URI
import scala.collection.mutable
import Position._
import Trees._
import Types._
import Tags._
import Utils.JumpBackByteArrayOutputStream
object Serializers {
def serialize(stream: OutputStream, tree: Tree): Unit = {
new Serializer().serialize(stream, tree)
}
def deserialize(stream: InputStream, version: String): Tree = {
new Deserializer(stream, version).deserialize()
}
// true for easier debugging (not for "production", it adds 8 bytes per node)
private final val UseDebugMagic = false
private final val DebugMagic = 0x3fa8ef84
private final val PosDebugMagic = 0x65f0ec32
private object PositionFormat {
/* Positions are serialized incrementally as diffs wrt the last position.
*
* Formats are (the first byte is decomposed in bits):
*
* 1st byte | next bytes | description
* -----------------------------------------
* ccccccc0 | | Column diff (7-bit signed)
* llllll01 | CC | Line diff (6-bit signed), column (8-bit unsigned)
* ____0011 | LL LL CC | Line diff (16-bit signed), column (8-bit unsigned)
* ____0111 | 12 bytes | File index, line, column (all 32-bit signed)
* 11111111 | | NoPosition (is not compared/stored in last position)
*
* Underscores are irrelevant and must be set to 0.
*/
final val Format1Mask = 0x01
final val Format1MaskValue = 0x00
final val Format1Shift = 1
final val Format2Mask = 0x03
final val Format2MaskValue = 0x01
final val Format2Shift = 2
final val Format3Mask = 0x0f
final val Format3MaskValue = 0x03
final val FormatFullMask = 0x0f
final val FormatFullMaskValue = 0x7
final val FormatNoPositionValue = -1
}
private final class Serializer {
private[this] val bufferUnderlying = new JumpBackByteArrayOutputStream
private[this] val buffer = new DataOutputStream(bufferUnderlying)
private[this] val files = mutable.ListBuffer.empty[URI]
private[this] val fileIndexMap = mutable.Map.empty[URI, Int]
private def fileToIndex(file: URI): Int =
fileIndexMap.getOrElseUpdate(file, (files += file).size - 1)
private[this] val strings = mutable.ListBuffer.empty[String]
private[this] val stringIndexMap = mutable.Map.empty[String, Int]
private def stringToIndex(str: String): Int =
stringIndexMap.getOrElseUpdate(str, (strings += str).size - 1)
private[this] var lastPosition: Position = Position.NoPosition
def serialize(stream: OutputStream, tree: Tree): Unit = {
// Write tree to buffer and record files and strings
writeTree(tree)
val s = new DataOutputStream(stream)
// Emit the files
s.writeInt(files.size)
files.foreach(f => s.writeUTF(f.toString))
// Emit the strings
s.writeInt(strings.size)
strings.foreach(s.writeUTF)
// Paste the buffer
bufferUnderlying.writeTo(s)
s.flush()
}
def writeTree(tree: Tree): Unit = {
import buffer._
writePosition(tree.pos)
tree match {
case EmptyTree =>
writeByte(TagEmptyTree)
case VarDef(ident, vtpe, mutable, rhs) =>
writeByte(TagVarDef)
writeIdent(ident); writeType(vtpe); writeBoolean(mutable); writeTree(rhs)
case ParamDef(ident, ptpe, mutable) =>
writeByte(TagParamDef)
writeIdent(ident); writeType(ptpe); writeBoolean(mutable)
case Skip() =>
writeByte(TagSkip)
case Block(stats) =>
writeByte(TagBlock)
writeTrees(stats)
case Labeled(label, tpe, body) =>
writeByte(TagLabeled)
writeIdent(label); writeType(tpe); writeTree(body)
case Assign(lhs, rhs) =>
writeByte(TagAssign)
writeTree(lhs); writeTree(rhs)
case Return(expr, label) =>
writeByte(TagReturn)
writeTree(expr); writeOptIdent(label)
case If(cond, thenp, elsep) =>
writeByte(TagIf)
writeTree(cond); writeTree(thenp); writeTree(elsep)
writeType(tree.tpe)
case While(cond, body, label) =>
writeByte(TagWhile)
writeTree(cond); writeTree(body); writeOptIdent(label)
case DoWhile(body, cond, label) =>
writeByte(TagDoWhile)
writeTree(body); writeTree(cond); writeOptIdent(label)
case Try(block, errVar, handler, finalizer) =>
writeByte(TagTry)
writeTree(block); writeIdent(errVar); writeTree(handler); writeTree(finalizer)
writeType(tree.tpe)
case Throw(expr) =>
writeByte(TagThrow)
writeTree(expr)
case Continue(label) =>
writeByte(TagContinue)
writeOptIdent(label)
case Match(selector, cases, default) =>
writeByte(TagMatch)
writeTree(selector)
writeInt(cases.size)
cases foreach { caze =>
writeTrees(caze._1); writeTree(caze._2)
}
writeTree(default)
writeType(tree.tpe)
case Debugger() =>
writeByte(TagDebugger)
case New(cls, ctor, args) =>
writeByte(TagNew)
writeClassType(cls); writeIdent(ctor); writeTrees(args)
case LoadModule(cls) =>
writeByte(TagLoadModule)
writeClassType(cls)
case StoreModule(cls, value) =>
writeByte(TagStoreModule)
writeClassType(cls); writeTree(value)
case Select(qualifier, item) =>
writeByte(TagSelect)
writeTree(qualifier); writeIdent(item)
writeType(tree.tpe)
case Apply(receiver, method, args) =>
writeByte(TagApply)
writeTree(receiver); writeIdent(method); writeTrees(args)
writeType(tree.tpe)
case ApplyStatically(receiver, cls, method, args) =>
writeByte(TagApplyStatically)
writeTree(receiver); writeClassType(cls); writeIdent(method); writeTrees(args)
writeType(tree.tpe)
case ApplyStatic(cls, method, args) =>
writeByte(TagApplyStatic)
writeClassType(cls); writeIdent(method); writeTrees(args)
writeType(tree.tpe)
case UnaryOp(op, lhs) =>
writeByte(TagUnaryOp)
writeByte(op); writeTree(lhs)
case BinaryOp(op, lhs, rhs) =>
writeByte(TagBinaryOp)
writeByte(op); writeTree(lhs); writeTree(rhs)
case NewArray(tpe, lengths) =>
writeByte(TagNewArray)
writeArrayType(tpe); writeTrees(lengths)
case ArrayValue(tpe, elems) =>
writeByte(TagArrayValue)
writeArrayType(tpe); writeTrees(elems)
case ArrayLength(array) =>
writeByte(TagArrayLength)
writeTree(array)
case ArraySelect(array, index) =>
writeByte(TagArraySelect)
writeTree(array); writeTree(index)
writeType(tree.tpe)
case RecordValue(tpe, elems) =>
writeByte(TagRecordValue)
writeType(tpe); writeTrees(elems)
case IsInstanceOf(expr, cls) =>
writeByte(TagIsInstanceOf)
writeTree(expr); writeReferenceType(cls)
case AsInstanceOf(expr, cls) =>
writeByte(TagAsInstanceOf)
writeTree(expr); writeReferenceType(cls)
case Unbox(expr, charCode) =>
writeByte(TagUnbox)
writeTree(expr); writeByte(charCode.toByte)
case GetClass(expr) =>
writeByte(TagGetClass)
writeTree(expr)
case CallHelper(helper, args) =>
writeByte(TagCallHelper)
writeString(helper); writeTrees(args)
writeType(tree.tpe)
case JSNew(ctor, args) =>
writeByte(TagJSNew)
writeTree(ctor); writeTrees(args)
case JSDotSelect(qualifier, item) =>
writeByte(TagJSDotSelect)
writeTree(qualifier); writeIdent(item)
case JSBracketSelect(qualifier, item) =>
writeByte(TagJSBracketSelect)
writeTree(qualifier); writeTree(item)
case JSFunctionApply(fun, args) =>
writeByte(TagJSFunctionApply)
writeTree(fun); writeTrees(args)
case JSDotMethodApply(receiver, method, args) =>
writeByte(TagJSDotMethodApply)
writeTree(receiver); writeIdent(method); writeTrees(args)
case JSBracketMethodApply(receiver, method, args) =>
writeByte(TagJSBracketMethodApply)
writeTree(receiver); writeTree(method); writeTrees(args)
case JSSpread(items) =>
writeByte(TagJSSpread)
writeTree(items)
case JSDelete(prop) =>
writeByte(TagJSDelete)
writeTree(prop)
case JSUnaryOp(op, lhs) =>
writeByte(TagJSUnaryOp)
writeInt(op); writeTree(lhs)
case JSBinaryOp(op, lhs, rhs) =>
writeByte(TagJSBinaryOp)
writeInt(op); writeTree(lhs); writeTree(rhs)
case JSArrayConstr(items) =>
writeByte(TagJSArrayConstr)
writeTrees(items)
case JSObjectConstr(fields) =>
writeByte(TagJSObjectConstr)
writeInt(fields.size)
fields foreach { field =>
writePropertyName(field._1); writeTree(field._2)
}
case JSEnvInfo() =>
writeByte(TagJSEnvInfo)
// Literals
case Undefined() =>
writeByte(TagUndefined)
case UndefinedParam() =>
writeByte(TagUndefinedParam)
writeType(tree.tpe)
case Null() =>
writeByte(TagNull)
case BooleanLiteral(value) =>
writeByte(TagBooleanLiteral)
writeBoolean(value)
case IntLiteral(value) =>
writeByte(TagIntLiteral)
writeInt(value)
case LongLiteral(value) =>
writeByte(TagLongLiteral)
writeLong(value)
case FloatLiteral(value) =>
writeByte(TagFloatLiteral)
writeFloat(value)
case DoubleLiteral(value) =>
writeByte(TagDoubleLiteral)
writeDouble(value)
case StringLiteral(value) =>
writeByte(TagStringLiteral)
writeString(value)
case ClassOf(cls) =>
writeByte(TagClassOf)
writeReferenceType(cls)
case VarRef(ident) =>
writeByte(TagVarRef)
writeIdent(ident)
writeType(tree.tpe)
case This() =>
writeByte(TagThis)
writeType(tree.tpe)
case Closure(captureParams, params, body, captureValues) =>
writeByte(TagClosure)
writeTrees(captureParams)
writeTrees(params)
writeTree(body)
writeTrees(captureValues)
case tree: ClassDef =>
val ClassDef(name, kind, superClass, parents, jsName, defs) = tree
writeByte(TagClassDef)
writeIdent(name)
writeByte(ClassKind.toByte(kind))
writeOptIdent(superClass)
writeIdents(parents)
writeString(jsName.getOrElse(""))
writeTrees(defs)
writeInt(tree.optimizerHints.bits)
case FieldDef(ident, ftpe, mutable) =>
writeByte(TagFieldDef)
writeIdent(ident); writeType(ftpe); writeBoolean(mutable)
case methodDef: MethodDef =>
val MethodDef(static, name, args, resultType, body) = methodDef
writeByte(TagMethodDef)
writeOptHash(methodDef.hash)
// Prepare for back-jump and write dummy length
bufferUnderlying.markJump()
writeInt(-1)
// Write out method def
writeBoolean(static); writePropertyName(name)
writeTrees(args); writeType(resultType); writeTree(body)
writeInt(methodDef.optimizerHints.bits)
// Jump back and write true length
val length = bufferUnderlying.jumpBack()
writeInt(length)
bufferUnderlying.continue()
case PropertyDef(name, getter, arg, setter) =>
writeByte(TagPropertyDef)
writePropertyName(name); writeTree(getter); writeTree(arg); writeTree(setter)
case ConstructorExportDef(fullName, args, body) =>
writeByte(TagConstructorExportDef)
writeString(fullName); writeTrees(args); writeTree(body)
case ModuleExportDef(fullName) =>
writeByte(TagModuleExportDef)
writeString(fullName)
}
if (UseDebugMagic)
writeInt(DebugMagic)
}
def writeTrees(trees: List[Tree]): Unit = {
buffer.writeInt(trees.size)
trees.foreach(writeTree)
}
def writeIdent(ident: Ident): Unit = {
writePosition(ident.pos)
writeString(ident.name); writeString(ident.originalName.getOrElse(""))
}
def writeIdents(idents: List[Ident]): Unit = {
buffer.writeInt(idents.size)
idents.foreach(writeIdent)
}
def writeOptIdent(optIdent: Option[Ident]): Unit = {
buffer.writeBoolean(optIdent.isDefined)
optIdent.foreach(writeIdent)
}
def writeType(tpe: Type): Unit = {
tpe match {
case AnyType => buffer.write(TagAnyType)
case NothingType => buffer.write(TagNothingType)
case UndefType => buffer.write(TagUndefType)
case BooleanType => buffer.write(TagBooleanType)
case IntType => buffer.write(TagIntType)
case LongType => buffer.write(TagLongType)
case FloatType => buffer.write(TagFloatType)
case DoubleType => buffer.write(TagDoubleType)
case StringType => buffer.write(TagStringType)
case NullType => buffer.write(TagNullType)
case NoType => buffer.write(TagNoType)
case tpe: ClassType =>
buffer.write(TagClassType)
writeClassType(tpe)
case tpe: ArrayType =>
buffer.write(TagArrayType)
writeArrayType(tpe)
case RecordType(fields) =>
buffer.write(TagRecordType)
buffer.writeInt(fields.size)
for (RecordType.Field(name, originalName, tpe, mutable) <- fields) {
writeString(name)
writeString(originalName.getOrElse(""))
writeType(tpe)
buffer.writeBoolean(mutable)
}
}
}
def writeClassType(tpe: ClassType): Unit =
writeString(tpe.className)
def writeArrayType(tpe: ArrayType): Unit = {
writeString(tpe.baseClassName)
buffer.writeInt(tpe.dimensions)
}
def writeReferenceType(tpe: ReferenceType): Unit =
writeType(tpe)
def writePropertyName(name: PropertyName): Unit = {
name match {
case name: Ident => buffer.writeBoolean(true); writeIdent(name)
case name: StringLiteral => buffer.writeBoolean(false); writeTree(name)
}
}
def writePosition(pos: Position): Unit = {
import buffer._
import PositionFormat._
def writeFull(): Unit = {
writeByte(FormatFullMaskValue)
writeInt(fileToIndex(pos.source))
writeInt(pos.line)
writeInt(pos.column)
}
if (pos == Position.NoPosition) {
writeByte(FormatNoPositionValue)
} else if (lastPosition == Position.NoPosition ||
pos.source != lastPosition.source) {
writeFull()
lastPosition = pos
} else {
val line = pos.line
val column = pos.column
val lineDiff = line - lastPosition.line
val columnDiff = column - lastPosition.column
val columnIsByte = column >= 0 && column < 256
if (lineDiff == 0 && columnDiff >= -64 && columnDiff < 64) {
writeByte((columnDiff << Format1Shift) | Format1MaskValue)
} else if (lineDiff >= -32 && lineDiff < 32 && columnIsByte) {
writeByte((lineDiff << Format2Shift) | Format2MaskValue)
writeByte(column)
} else if (lineDiff >= Short.MinValue && lineDiff <= Short.MaxValue && columnIsByte) {
writeByte(Format3MaskValue)
writeShort(lineDiff)
writeByte(column)
} else {
writeFull()
}
lastPosition = pos
}
if (UseDebugMagic)
writeInt(PosDebugMagic)
}
def writeOptHash(optHash: Option[TreeHash]): Unit = {
buffer.writeBoolean(optHash.isDefined)
for (hash <- optHash) {
buffer.write(hash.treeHash)
buffer.write(hash.posHash)
}
}
def writeString(s: String): Unit =
buffer.writeInt(stringToIndex(s))
}
private final class Deserializer(stream: InputStream, sourceVersion: String) {
private[this] val input = new DataInputStream(stream)
private[this] val files =
Array.fill(input.readInt())(new URI(input.readUTF()))
private[this] val strings =
Array.fill(input.readInt())(input.readUTF())
private[this] var lastPosition: Position = Position.NoPosition
def deserialize(): Tree = {
readTree()
}
def readTree(): Tree = {
import input._
implicit val pos = readPosition()
val tag = readByte()
val result = (tag: @switch) match {
case TagEmptyTree => EmptyTree
case TagVarDef => VarDef(readIdent(), readType(), readBoolean(), readTree())
case TagParamDef => ParamDef(readIdent(), readType(), readBoolean())
case TagSkip => Skip()
case TagBlock => Block(readTrees())
case TagLabeled => Labeled(readIdent(), readType(), readTree())
case TagAssign => Assign(readTree(), readTree())
case TagReturn => Return(readTree(), readOptIdent())
case TagIf => If(readTree(), readTree(), readTree())(readType())
case TagWhile => While(readTree(), readTree(), readOptIdent())
case TagDoWhile => DoWhile(readTree(), readTree(), readOptIdent())
case TagTry => Try(readTree(), readIdent(), readTree(), readTree())(readType())
case TagThrow => Throw(readTree())
case TagContinue => Continue(readOptIdent())
case TagMatch =>
Match(readTree(), List.fill(readInt()) {
(readTrees().map(_.asInstanceOf[Literal]), readTree())
}, readTree())(readType())
case TagDebugger => Debugger()
case TagNew => New(readClassType(), readIdent(), readTrees())
case TagLoadModule => LoadModule(readClassType())
case TagStoreModule => StoreModule(readClassType(), readTree())
case TagSelect => Select(readTree(), readIdent())(readType())
case TagApply => Apply(readTree(), readIdent(), readTrees())(readType())
case TagApplyStatically => ApplyStatically(readTree(), readClassType(), readIdent(), readTrees())(readType())
case TagApplyStatic => ApplyStatic(readClassType(), readIdent(), readTrees())(readType())
case TagUnaryOp => UnaryOp(readByte(), readTree())
case TagBinaryOp => BinaryOp(readByte(), readTree(), readTree())
case TagNewArray => NewArray(readArrayType(), readTrees())
case TagArrayValue => ArrayValue(readArrayType(), readTrees())
case TagArrayLength => ArrayLength(readTree())
case TagArraySelect => ArraySelect(readTree(), readTree())(readType())
case TagRecordValue => RecordValue(readType().asInstanceOf[RecordType], readTrees())
case TagIsInstanceOf => IsInstanceOf(readTree(), readReferenceType())
case TagAsInstanceOf => AsInstanceOf(readTree(), readReferenceType())
case TagUnbox => Unbox(readTree(), readByte().toChar)
case TagGetClass => GetClass(readTree())
case TagCallHelper => CallHelper(readString(), readTrees())(readType())
case TagJSNew => JSNew(readTree(), readTrees())
case TagJSDotSelect => JSDotSelect(readTree(), readIdent())
case TagJSBracketSelect => JSBracketSelect(readTree(), readTree())
case TagJSFunctionApply => JSFunctionApply(readTree(), readTrees())
case TagJSDotMethodApply => JSDotMethodApply(readTree(), readIdent(), readTrees())
case TagJSBracketMethodApply => JSBracketMethodApply(readTree(), readTree(), readTrees())
case TagJSSpread => JSSpread(readTree())
case TagJSDelete => JSDelete(readTree())
case TagJSUnaryOp => JSUnaryOp(readInt(), readTree())
case TagJSBinaryOp => JSBinaryOp(readInt(), readTree(), readTree())
case TagJSArrayConstr => JSArrayConstr(readTrees())
case TagJSObjectConstr =>
JSObjectConstr(List.fill(readInt())((readPropertyName(), readTree())))
case TagJSEnvInfo => JSEnvInfo()
case TagUndefined => Undefined()
case TagUndefinedParam => UndefinedParam()(readType())
case TagNull => Null()
case TagBooleanLiteral => BooleanLiteral(readBoolean())
case TagIntLiteral => IntLiteral(readInt())
case TagLongLiteral => LongLiteral(readLong())
case TagFloatLiteral => FloatLiteral(readFloat())
case TagDoubleLiteral => DoubleLiteral(readDouble())
case TagStringLiteral => StringLiteral(readString())
case TagClassOf => ClassOf(readReferenceType())
case TagVarRef => VarRef(readIdent())(readType())
case TagThis => This()(readType())
case TagClosure =>
Closure(readParamDefs(), readParamDefs(), readTree(), readTrees())
case TagClassDef =>
val name = readIdent()
val kind = ClassKind.fromByte(readByte())
val superClass = readOptIdent()
val parents = readIdents()
val jsName = Some(readString()).filter(_ != "")
val defs = readTrees()
val optimizerHints = new OptimizerHints(readInt())
ClassDef(name, kind, superClass, parents, jsName, defs)(optimizerHints)
case TagFieldDef =>
FieldDef(readIdent(), readType(), readBoolean())
case TagMethodDef =>
val optHash = readOptHash()
// read and discard the length
val len = readInt()
assert(len >= 0)
MethodDef(readBoolean(), readPropertyName(),
readParamDefs(), readType(), readTree())(
new OptimizerHints(readInt()), optHash)
case TagPropertyDef =>
PropertyDef(readPropertyName(), readTree(),
readTree().asInstanceOf[ParamDef], readTree())
case TagConstructorExportDef =>
ConstructorExportDef(readString(), readParamDefs(), readTree())
case TagModuleExportDef =>
ModuleExportDef(readString())
}
if (UseDebugMagic) {
val magic = readInt()
assert(magic == DebugMagic,
s"Bad magic after reading a ${result.getClass}!")
}
result
}
def readTrees(): List[Tree] =
List.fill(input.readInt())(readTree())
def readParamDefs(): List[ParamDef] =
readTrees().map(_.asInstanceOf[ParamDef])
def readIdent(): Ident = {
implicit val pos = readPosition()
val name = readString()
val originalName = readString()
Ident(name, if (originalName.isEmpty) None else Some(originalName))
}
def readIdents(): List[Ident] =
List.fill(input.readInt())(readIdent())
def readOptIdent(): Option[Ident] = {
if (input.readBoolean()) Some(readIdent())
else None
}
def readType(): Type = {
val tag = input.readByte()
(tag: @switch) match {
case TagAnyType => AnyType
case TagNothingType => NothingType
case TagUndefType => UndefType
case TagBooleanType => BooleanType
case TagIntType => IntType
case TagLongType => LongType
case TagFloatType => FloatType
case TagDoubleType => DoubleType
case TagStringType => StringType
case TagNullType => NullType
case TagNoType => NoType
case TagClassType => readClassType()
case TagArrayType => readArrayType()
case TagRecordType =>
RecordType(List.fill(input.readInt()) {
val name = readString()
val originalName = readString()
val tpe = readType()
val mutable = input.readBoolean()
RecordType.Field(name,
if (originalName.isEmpty) None else Some(originalName),
tpe, mutable)
})
}
}
def readClassType(): ClassType =
ClassType(readString())
def readArrayType(): ArrayType =
ArrayType(readString(), input.readInt())
def readReferenceType(): ReferenceType =
readType().asInstanceOf[ReferenceType]
def readPropertyName(): PropertyName = {
if (input.readBoolean()) readIdent()
else readTree().asInstanceOf[StringLiteral]
}
def readPosition(): Position = {
import input._
import PositionFormat._
val first = readByte()
val result = if (first == FormatNoPositionValue) {
Position.NoPosition
} else {
val result = if ((first & FormatFullMask) == FormatFullMaskValue) {
val file = files(readInt())
val line = readInt()
val column = readInt()
Position(file, line, column)
} else {
assert(lastPosition != NoPosition,
"Position format error: first position must be full")
if ((first & Format1Mask) == Format1MaskValue) {
val columnDiff = first >> Format1Shift
Position(lastPosition.source, lastPosition.line,
lastPosition.column + columnDiff)
} else if ((first & Format2Mask) == Format2MaskValue) {
val lineDiff = first >> Format2Shift
val column = readByte() & 0xff // unsigned
Position(lastPosition.source,
lastPosition.line + lineDiff, column)
} else {
assert((first & Format3Mask) == Format3MaskValue,
s"Position format error: first byte $first does not match any format")
val lineDiff = readShort()
val column = readByte() & 0xff // unsigned
Position(lastPosition.source,
lastPosition.line + lineDiff, column)
}
}
lastPosition = result
result
}
if (UseDebugMagic) {
val magic = readInt()
assert(magic == PosDebugMagic,
s"Bad magic after reading position with first byte $first")
}
result
}
def readOptHash(): Option[TreeHash] = {
if (input.readBoolean()) {
val treeHash = new Array[Byte](20)
val posHash = new Array[Byte](20)
input.readFully(treeHash)
input.readFully(posHash)
Some(new TreeHash(treeHash, posHash))
} else None
}
def readString(): String = {
strings(input.readInt())
}
}
}
| colinrgodsey/scala-js | ir/src/main/scala/org/scalajs/core/ir/Serializers.scala | Scala | bsd-3-clause | 27,628 |
import org.apache.spark._
import org.apache.spark.rdd._
import org.apache.spark.SparkContext._
import scala.collection.mutable
import org.apache.spark.mllib.clustering.LDA
import org.apache.spark.mllib.linalg.{Vector, Vectors}
import org.json4s._
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
import java.io._
/*
* Args
* 1) Input Path
* 2) K - Number of topics
* */
object LDA2 {
def main(args: Array[String]){
val conf = new SparkConf().setAppName("LDA_CSLab")
val sc = new SparkContext(conf)
val corpus: RDD[String] = sc.wholeTextFiles(args(0)).map(doc => doc._2)
val tokenized: RDD[Seq[String]] =
corpus.map(_.toLowerCase.split("\\\\s"))
.map(_.filter(_.length > 3)
.filter(_.forall(java.lang.Character.isLetter)))
val termCounts: Array[(String, Long)] =
tokenized.flatMap(_.map(_ -> 1L)).reduceByKey(_ + _).collect().sortBy(-_._2)
val numStopWords = 20
val vocabArray: Array[String] =
termCounts.takeRight(termCounts.size - numStopWords).map(_._1)
val vocab: Map[String, Int] = vocabArray.zipWithIndex.toMap
val documents: RDD[(Long, Vector)] =
tokenized.zipWithIndex.map { case (tokens, id) =>
val counts = new mutable.HashMap[Int, Double]()
tokens.foreach { term =>
if (vocab.contains(term)) {
val idx = vocab(term)
counts(idx) = counts.getOrElse(idx, 0.0) + 1.0
}
}
(id, Vectors.sparse(vocab.size, counts.toSeq))
}
//LDA
val numberOfTopics = args(1).toInt
var start = java.lang.System.currentTimeMillis()
val ldaModel = new LDA()
.setK(numberOfTopics)
.setMaxIterations(10)
.run(documents)
val execTime = java.lang.System.currentTimeMillis - start
val operator = "LDA_Spark"
val numDocs = corpus.count()
var output = ("operator" -> operator) ~
("exec_time" -> execTime) ~
("k" -> numberOfTopics)~
("input_size" -> numDocs)
val writer = new PrintWriter(new File(s"/var/www/html/${operator}T${start}.json"))
writer.write(compact(output))
writer.close()
}
} | project-asap/IReS-Platform | asap-tools/spark/scala/src/main/scala/LDA2.scala | Scala | apache-2.0 | 2,083 |
package org.vitrivr.adampro.query.planner
import org.vitrivr.adampro.communication.api.QueryOp
import org.vitrivr.adampro.data.entity.Entity
import org.vitrivr.adampro.data.index.Index
import org.vitrivr.adampro.process.SharedComponentContext
import org.vitrivr.adampro.query.query.RankingQuery
import org.vitrivr.adampro.query.tracker.QueryTracker
import org.vitrivr.adampro.utils.Logging
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, TimeoutException}
import scala.concurrent.ExecutionContext.Implicits.global
/**
* ADAMpro
*
* Ivan Giangreco
* November 2016
*/
private[planner] abstract class PlannerHeuristics(protected val name: String, private val defaultNRuns: Int = 100) extends Serializable with Logging {
case class Measurement(tp: Int, precision: Double, recall: Double, time: Double) {
def toConfidence(): Confidence = Confidence(2 * (precision * recall) / (precision + recall))
}
case class Confidence(confidence: Double)
/**
*
* @param entity
* @param queries
*/
def trainEntity(entity: Entity, queries: Seq[RankingQuery], options: Map[String, String] = Map())(implicit ac: SharedComponentContext): Unit
/**
*
* @param indexes
* @param queries
*/
def trainIndexes(indexes: Seq[Index], queries: Seq[RankingQuery], options: Map[String, String] = Map())(implicit ac: SharedComponentContext): Unit
/**
*
* @param entity
* @param nnq
* @return
*/
def test(entity: Entity, nnq: RankingQuery)(implicit ac: SharedComponentContext): Double
/**
*
* @param index
* @param nnq
* @return
*/
def test(index: Index, nnq: RankingQuery)(implicit ac: SharedComponentContext): Double
/**
*
* @param entity
* @param nnq
* @return
*/
protected def performMeasurement(entity: Entity, nnq: RankingQuery, nruns: Option[Int])(implicit ac: SharedComponentContext): Seq[Measurement] = {
val tracker = new QueryTracker()
val res = (0 until nruns.getOrElse(defaultNRuns)).map {
i =>
try {
val t1 = System.currentTimeMillis
val fut = QueryOp.sequential(entity.entityname, nnq, None)(tracker).get.get.select(entity.pk.name).rdd.takeAsync(nnq.k)
val res = Await.result(fut, Duration(ac.config.maximumTimeToWaitInTraining, "seconds"))
val t2 = System.currentTimeMillis
ac.sc.cancelAllJobs()
val recall = 1.toFloat
val precision = 1.toFloat
val time = t2 - t1
Measurement(nnq.k, precision, recall, time)
} catch {
case e: TimeoutException => Measurement(-1, 0.0, 0.0, (ac.config.maximumTimeToWaitInTraining + 1) * 1000)
}
}
tracker.cleanAll()
res
}
/**
*
* @param index
* @param nnq
* @param rel
* @return
*/
protected def performMeasurement(index: Index, nnq: RankingQuery, nruns: Option[Int], rel: Set[Any])(implicit ac: SharedComponentContext): Seq[Measurement] = {
val tracker = new QueryTracker()
val res = (0 until nruns.getOrElse(defaultNRuns)).map {
i =>
try {
val t1 = System.currentTimeMillis
val fut = QueryOp.index(index.indexname, nnq, None)(tracker).get.get.select(index.entity.get.pk.name).rdd.takeAsync(nnq.k)
val res = Await.result(fut, Duration(ac.config.maximumTimeToWaitInTraining, "seconds"))
val t2 = System.currentTimeMillis
ac.sc.cancelAllJobs()
val ret = res.map(_.getAs[Any](0)).toSet
val tp = rel.intersect(ret).size
val nrelevant = rel.size
val nretrieved = ret.size
val recall = tp.toDouble / nrelevant.toDouble
val precision = tp.toDouble / nretrieved.toDouble
val time = t2 - t1
Measurement(tp, precision, recall, time)
} catch {
case e: TimeoutException => Measurement(-1, 0.0, 0.0, (ac.config.maximumTimeToWaitInTraining + 1) * 1000)
}
}
tracker.cleanAll()
res
}
}
| dbisUnibas/ADAMpro | src/main/scala/org/vitrivr/adampro/query/planner/PlannerHeuristics.scala | Scala | mit | 4,041 |
/*
* This file is part of Kiama.
*
* Copyright (C) 2011-2015 Anthony M Sloane, Macquarie University.
*
* Kiama is free software: you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or (at your
* option) any later version.
*
* Kiama is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
* more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Kiama. (See files COPYING and COPYING.LESSER.) If not, see
* <http://www.gnu.org/licenses/>.
*/
package org.kiama
package example.oberon0
package base
/**
* Interface for all transformers. Also provides operations that are
* useful in transformer implementations.
*/
trait Transformer {
import source.SourceTree.SourceTree
/**
* Transform a module tree in some way, returning a new module tree.
* By default, just return the given module.
*/
def transform (tree : SourceTree) : SourceTree =
tree
}
| solomono/kiama | library/src/org/kiama/example/oberon0/base/Transformer.scala | Scala | gpl-3.0 | 1,246 |
package io.youi.component.support
import io.youi.component.Component
import io.youi.component.feature.ContainerFeature
trait TypedContainerSupport[Child <: Component] extends Component {
lazy val children: ContainerFeature[Child] = new ContainerFeature[Child](this)
} | outr/youi | gui/src/main/scala/io/youi/component/support/TypedContainerSupport.scala | Scala | mit | 271 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package testsupport
import com.github.tomakehurst.wiremock.WireMockServer
import com.github.tomakehurst.wiremock.client.WireMock
import com.github.tomakehurst.wiremock.core.WireMockConfiguration
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, Suite}
import pagespecs.pages.BaseUrl
import play.api.Logger
trait WireMockSupport extends BeforeAndAfterAll with BeforeAndAfterEach {
self: Suite =>
private val logger = Logger(getClass)
implicit val wireMockServer: WireMockServer = new WireMockServer(WireMockConfiguration.wireMockConfig().port(WireMockSupport.port))
WireMock.configureFor(WireMockSupport.port)
override protected def beforeAll(): Unit = wireMockServer.start()
override protected def afterAll(): Unit = {
logger.info("Stopping wire mock server ...")
wireMockServer.stop()
logger.info("Stopping wire mock server - done")
}
override def beforeEach() {
logger.info("Resetting wire mock server ...")
WireMock.reset()
logger.info("Resetting wire mock server - done")
}
}
object WireMockSupport {
val port = 11111
val baseUrl = BaseUrl(s"http://localhost:${WireMockSupport.port}")
}
| hmrc/self-service-time-to-pay-frontend | test/testsupport/WireMockSupport.scala | Scala | apache-2.0 | 1,763 |
package test
import models.{Workbook, Worksheet, Cell, OAuth2}
trait SpecHelper {
val testInt : Int = 1
val testValue : String = "A String"
val nothing : Option[Nothing] = None
val testWorkbookId : String = System.getenv("TEST_WORKBOOK")
val testWorksheetId : String = System.getenv("TEST_WORKSHEET")
val refreshToken : String = System.getenv("TEST_REFRESH")
val testWorksheet : Worksheet = Worksheet()
val testWorkbook : Workbook = Workbook()
val someTestWorkbook : Option[Workbook] = Some(Workbook())
val someTestWorksheet : Option[Worksheet] = Some(Worksheet())
val someTestWorkbookList : Option[List[Workbook]] = Some(List.empty[Workbook])
val someTestWorksheetList : Option[List[Worksheet]] = Some(List.empty[Worksheet])
val someTestCells : Option[List[Cell]] = Some(List.empty[Cell])
val authenticatedSession = (
"refreshToken" -> refreshToken
)
} | marinatedpork/google-oauth-ember-play-scala | test/SpecHelper.scala | Scala | mit | 971 |
/**
* Copyright (c) 2013 Saddle Development Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.saddle.time
import org.joda.time.{Period, Duration}
/**
* Enumeration of frequency base for utilizing with an RRule
*/
sealed trait Frequency { this: Frequency =>
protected[time] def toICal: com.google.ical.values.Frequency = this match {
case SECONDLY => com.google.ical.values.Frequency.SECONDLY
case MINUTELY => com.google.ical.values.Frequency.MINUTELY
case HOURLY => com.google.ical.values.Frequency.HOURLY
case DAILY => com.google.ical.values.Frequency.DAILY
case WEEKLY => com.google.ical.values.Frequency.WEEKLY
case MONTHLY => com.google.ical.values.Frequency.MONTHLY
case YEARLY => com.google.ical.values.Frequency.YEARLY
}
protected[time] def toDur: Period = this match {
case SECONDLY => Period.seconds(1)
case MINUTELY => Period.minutes(1)
case HOURLY => Period.hours(1)
case DAILY => Period.days(1)
case WEEKLY => Period.weeks(1)
case MONTHLY => Period.months(1)
case YEARLY => Period.years(1)
}
}
case object SECONDLY extends Frequency
case object MINUTELY extends Frequency
case object HOURLY extends Frequency
case object DAILY extends Frequency
case object WEEKLY extends Frequency
case object MONTHLY extends Frequency
case object YEARLY extends Frequency
| jyt109/saddle | saddle-core/src/main/scala/org/saddle/time/Frequency.scala | Scala | apache-2.0 | 1,898 |
package handlers.server
import database.WorldObject
import handlers.packets.{PacketWriter, ServerCodes}
import scala.concurrent.Future
/**
* Created by franblas on 08/05/17.
*/
class ObjectCreate(obj: WorldObject) {
def process(): Future[Array[Byte]] = {
val writer = new PacketWriter(ServerCodes.objectCreate)
//pak.WriteShort((ushort)obj.ObjectID);
writer.writeShort(obj.objectId.toShort)
/*
if (obj is GameStaticItem) // static_item
pak.WriteShort((ushort)(obj as GameStaticItem).Emblem);
else
pak.WriteShort(0);
*/
writer.writeShort(0)
//pak.WriteShort(obj.Heading);
writer.writeShort(obj.heading.toShort)
//pak.WriteShort((ushort)obj.Z);
writer.writeShort(obj.z.toShort)
//pak.WriteInt((uint)obj.X);
writer.writeInt(obj.x)
//pak.WriteInt((uint)obj.Y);
writer.writeInt(obj.y)
//int flag = ((byte)obj.Realm & 3) << 4;
val flag = (obj.realm & 3) << 4
//ushort model = obj.Model;
/*
if (obj.IsUnderwater)
{
if (obj is GameNPC)
model |= 0x8000;
else
flag |= 0x01; // Underwater
}
*/
//pak.WriteShort(model);
writer.writeShort(obj.model.toShort)
/*
if (obj is Keeps.GameKeepBanner) // game_keep_banner
flag |= 0x08;
if (obj is GameStaticItemTimed && m_gameClient.Player != null && ((GameStaticItemTimed)obj).IsOwner(m_gameClient.Player))
flag |= 0x04;
pak.WriteShort((ushort)flag);
*/
writer.writeShort(flag.toShort)
/*
if (obj is GameStaticItem) // static_item
{
int newEmblemBitMask = ((obj as GameStaticItem).Emblem & 0x010000) << 9;
pak.WriteInt((uint)newEmblemBitMask);//TODO other bits
}
else pak.WriteInt(0);
*/
writer.writeInt(0)
/*
string name = obj.Name;
LanguageDataObject translation = null;
if (obj is GameStaticItem) // static_item
{
translation = LanguageMgr.GetTranslation(m_gameClient, (GameStaticItem)obj);
if (translation != null)
{
if (obj is WorldInventoryItem)
{
//if (!Util.IsEmpty(((DBLanguageItem)translation).Name))
// name = ((DBLanguageItem)translation).Name;
}
else
{
if (!Util.IsEmpty(((DBLanguageGameObject)translation).Name))
name = ((DBLanguageGameObject)translation).Name;
}
}
}
pak.WritePascalString(name.Length > 48 ? name.Substring(0, 48) : name);
*/
var name = obj.name
if (name.length > 48) name = name.substring(0, 48)
writer.writePascalString(name)
/*
if (obj is IDoor)
{
pak.WriteByte(4);
pak.WriteInt((uint)(obj as IDoor).DoorID);
}
else pak.WriteByte(0x00);
*/
writer.writeByte(0x00)
writer.toFinalFuture()
}
}
| franblas/NAOC | src/main/scala/handlers/server/ObjectCreate.scala | Scala | mit | 2,805 |
package controllers.s_about_you
import app.MaritalStatus
import controllers.{ClaimScenarioFactory, PreviewTestUtils}
import org.specs2.mutable._
import utils.WithBrowser
import utils.pageobjects._
import utils.pageobjects.preview.PreviewPage
import utils.pageobjects.s_claim_date.GClaimDatePage
import utils.pageobjects.s_about_you.{GContactDetailsPage, GMaritalStatusPage, GNationalityAndResidencyPage}
import utils.helpers.PreviewField._
class GMaritalStatusIntegrationSpec extends Specification {
sequential
section("integration", models.domain.AboutYou.id)
"Status" should {
"be presented" in new WithBrowser with PageObjects{
val page = GMaritalStatusPage(context)
page goToThePage()
}
"contain errors on invalid submission" in new WithBrowser with PageObjects{
val page = GMaritalStatusPage(context)
page goToThePage()
val nextPage = page submitPage()
nextPage must beAnInstanceOf[GMaritalStatusPage]
}
"navigate to next page on valid Status submission" in new WithBrowser with PageObjects{
val page = GMaritalStatusPage(context)
val claim = ClaimScenarioFactory.maritalStatus()
page goToThePage()
page fillPageWith claim
val nextPage = page submitPage()
nextPage must beAnInstanceOf[GContactDetailsPage]
}
"Modify Status from preview page" in new WithBrowser with PageObjects{
val previewPage = goToPreviewPage(context)
val id = "about_you_marital_status"
val answerText = PreviewTestUtils.answerText(id, _:Page)
answerText(previewPage) mustEqual MaritalStatus.Single
val maritalStatusPage = previewPage.clickLinkOrButton(getLinkId(id))
maritalStatusPage must beAnInstanceOf[GMaritalStatusPage]
val modifiedData = new TestData
modifiedData.AboutYouWhatIsYourMaritalOrCivilPartnershipStatus = MaritalStatus.Married
maritalStatusPage fillPageWith modifiedData
val previewPageModified = maritalStatusPage submitPage()
previewPageModified must beAnInstanceOf[PreviewPage]
answerText(previewPageModified) mustEqual MaritalStatus.Married
}
}
section("integration", models.domain.AboutYou.id)
def goToPreviewPage(context:PageObjectsContext):Page = {
val claimDatePage = GClaimDatePage(context)
claimDatePage goToThePage()
val claimDate = ClaimScenarioFactory.s12ClaimDate()
claimDatePage fillPageWith claimDate
claimDatePage submitPage()
val maritalStatusPage = GMaritalStatusPage(context)
maritalStatusPage goToThePage()
maritalStatusPage fillPageWith ClaimScenarioFactory.maritalStatus()
maritalStatusPage.submitPage()
val nationalityPage = GNationalityAndResidencyPage(context)
val claim = ClaimScenarioFactory.yourNationalityAndResidencyNonResident
nationalityPage goToThePage()
nationalityPage fillPageWith claim
nationalityPage submitPage()
val previewPage = PreviewPage(context)
previewPage goToThePage()
}
}
| Department-for-Work-and-Pensions/ClaimCapture | c3/test/controllers/s_about_you/GMaritalStatusIntegrationSpec.scala | Scala | mit | 2,981 |
package testfeature
import feature.{GenericFeature, MessengerRNA, Transcript}
import org.scalatest.FunSuite
/**
* Created by prussell on 11/12/16.
*/
class FeatureCompareSuite extends FunSuite {
test("GenericFeature compare to GenericFeature") {
// Same blocks, same name
assert(new GenericFeature(chr1_1000_2000_minus, None).compare(new GenericFeature(chr1_1000_2000_minus, None)) === 0)
// Same blocks, different name
assert(new GenericFeature(chr1_1000_2000_minus, None).compare(new GenericFeature(chr1_1000_2000_minus, Some("xxxx"))) > 0)
assert(new GenericFeature(chr1_1000_2000_minus, Some("xxxx")).compare(new GenericFeature(chr1_1000_2000_minus, None)) < 0)
// Different blocks, same name
assert(new GenericFeature(chr1_1000_2000_minus, None).compare(new GenericFeature(chr1_1000_2000_both, None)) < 0)
assert(new GenericFeature(chr1_1000_2000_both, None).compare(new GenericFeature(chr1_1000_2000_minus, None)) > 0)
// Different blocks, different name
assert(new GenericFeature(chr1_1000_2000_minus, None).compare(new GenericFeature(chr1_1000_2000_both, Some("name"))) < 0)
assert(new GenericFeature(chr1_1000_2000_both, None).compare(new GenericFeature(chr1_1000_2000_minus, Some("name"))) > 0)
}
test("GenericFeature compare to Transcript") {
// Same blocks, same name
assert(new GenericFeature(chr1_900_1100_1200_1300_plus, Some("name")).compare(new Transcript(chr1_900_1100_1200_1300_plus, Some("name"), Some("gene"))) < 0)
assert(new Transcript(chr1_900_1100_1200_1300_plus, Some("name"), Some("gene")).compare(new GenericFeature(chr1_900_1100_1200_1300_plus, Some("name"))) > 0)
// Same blocks, different name
assert(new GenericFeature(chr1_900_1100_1200_1300_plus, None).compare(new Transcript(chr1_900_1100_1200_1300_plus, Some("name"), Some("gene"))) > 0)
assert(new Transcript(chr1_900_1100_1200_1300_plus, Some("name"), Some("gene")).compare(new GenericFeature(chr1_900_1100_1200_1300_plus, None)) < 0)
// Different blocks, same name
assert(new GenericFeature(chr1_1100_1200_1300_1400_plus, Some("name")).compare(new Transcript(chr1_900_1100_1200_1300_plus, Some("name"), Some("gene"))) > 0)
assert(new Transcript(chr1_900_1100_1200_1300_plus, Some("name"), Some("gene")).compare(new GenericFeature(chr1_1100_1200_1300_1400_plus, Some("name"))) < 0)
// Different blocks, different name
assert(new GenericFeature(chr1_1100_1200_1300_1400_plus, Some("name")).compare(new Transcript(chr1_900_1100_1200_1300_plus, Some("xxxx"), Some("gene"))) > 0)
assert(new Transcript(chr1_900_1100_1200_1300_plus, Some("xxxx"), Some("gene")).compare(new GenericFeature(chr1_1100_1200_1300_1400_plus, Some("name"))) < 0)
}
test("GenericFeature compare to mRNA") {
// Same blocks, same name
assert(new GenericFeature(chr1_1000_2000_3000_4000_5000_6000_7000_8000_minus, Some("name"))
.compare(MessengerRNA(chr1_1000_2000_3000_4000_5000_6000_7000_8000_minus, 1000, 1099, Some("name"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_1000_2000_3000_4000_5000_6000_7000_8000_minus, 1000, 1099, Some("name"), Some("gene"))
.compare(new GenericFeature(chr1_1000_2000_3000_4000_5000_6000_7000_8000_minus, Some("name"))) > 0)
// Same blocks, different name
assert(new GenericFeature(chr1_1000_2000_3000_4000_5000_6000_7000_8000_minus, None)
.compare(MessengerRNA(chr1_1000_2000_3000_4000_5000_6000_7000_8000_minus, 1000, 1099, Some("name"), Some("gene"))) > 0)
assert(MessengerRNA(chr1_1000_2000_3000_4000_5000_6000_7000_8000_minus, 1000, 1099, Some("name"), Some("gene"))
.compare(new GenericFeature(chr1_1000_2000_3000_4000_5000_6000_7000_8000_minus, None)) < 0)
// Different blocks, same name
assert(new GenericFeature(chr1_7000_8000_9000_10000_minus, Some("name"))
.compare(MessengerRNA(chr1_1000_2000_3000_4000_5000_6000_7000_8000_minus, 1000, 1099, Some("name"), Some("gene"))) > 0)
assert(MessengerRNA(chr1_1000_2000_3000_4000_5000_6000_7000_8000_minus, 1000, 1099, Some("name"), Some("gene"))
.compare(new GenericFeature(chr1_7000_8000_9000_10000_minus, Some("name"))) < 0)
// Different blocks, different name
assert(new GenericFeature(chr1_7000_8000_9000_10000_minus, Some("name"))
.compare(MessengerRNA(chr1_1000_2000_3000_4000_5000_6000_7000_8000_minus, 1000, 1099, Some("xxxx"), Some("gene"))) > 0)
assert(MessengerRNA(chr1_1000_2000_3000_4000_5000_6000_7000_8000_minus, 1000, 1099, Some("xxxx"), Some("gene"))
.compare(new GenericFeature(chr1_7000_8000_9000_10000_minus, Some("name"))) < 0)
}
test("Transcript compare to Transcript") {
// Same blocks, same name, same gene
assert(new Transcript(chr1_6000_8000_9000_10000_plus, Some("name"), Some("gene")).compare(new Transcript(chr1_6000_8000_9000_10000_plus, Some("name"), Some("gene"))) === 0)
// Same blocks, different name, same gene
assert(new Transcript(chr1_6000_8000_9000_10000_plus, Some("name"), Some("gene")).compare(new Transcript(chr1_6000_8000_9000_10000_plus, None, Some("gene"))) < 0)
assert(new Transcript(chr1_6000_8000_9000_10000_plus, None, Some("gene")).compare(new Transcript(chr1_6000_8000_9000_10000_plus, Some("name"), Some("gene"))) > 0)
// Different blocks, same name, same gene
assert(new Transcript(chr1_7000_8000_9000_10000_plus, Some("name"), Some("gene")).compare(new Transcript(chr1_6000_8000_9000_10000_plus, Some("name"), Some("gene"))) > 0)
assert(new Transcript(chr1_6000_8000_9000_10000_plus, Some("name"), Some("gene")).compare(new Transcript(chr1_7000_8000_9000_10000_plus, Some("name"), Some("gene"))) < 0)
// Different blocks, different name, same gene
assert(new Transcript(chr1_7000_8000_9000_10000_plus, Some("name"), Some("gene")).compare(new Transcript(chr1_6000_8000_9000_10000_plus, Some("xxxx"), Some("gene"))) > 0)
assert(new Transcript(chr1_6000_8000_9000_10000_plus, Some("xxxx"), Some("gene")).compare(new Transcript(chr1_7000_8000_9000_10000_plus, Some("name"), Some("gene"))) < 0)
// Same blocks, same name, different gene
assert(new Transcript(chr1_6000_8000_9000_10000_plus, Some("name"), Some("xxxx")).compare(new Transcript(chr1_6000_8000_9000_10000_plus, Some("name"), Some("gene"))) > 0)
assert(new Transcript(chr1_6000_8000_9000_10000_plus, Some("name"), Some("gene")).compare(new Transcript(chr1_6000_8000_9000_10000_plus, Some("name"), Some("xxxx"))) < 0)
// Same blocks, different name, different gene
assert(new Transcript(chr1_6000_8000_9000_10000_plus, Some("name"), Some("xxxx")).compare(new Transcript(chr1_6000_8000_9000_10000_plus, Some("xxxx"), Some("gene"))) < 0)
assert(new Transcript(chr1_6000_8000_9000_10000_plus, Some("xxxx"), Some("gene")).compare(new Transcript(chr1_6000_8000_9000_10000_plus, Some("name"), Some("xxxx"))) > 0)
// Different blocks, same name, different gene
assert(new Transcript(chr1_7000_8000_9000_10000_plus, Some("name"), Some("gene")).compare(new Transcript(chr1_6000_8000_9000_10000_plus, Some("name"), Some("xxxx"))) > 0)
assert(new Transcript(chr1_6000_8000_9000_10000_plus, Some("name"), Some("xxxx")).compare(new Transcript(chr1_7000_8000_9000_10000_plus, Some("name"), Some("gene"))) < 0)
// Different blocks, different name, different gene
assert(new Transcript(chr1_7000_8000_9000_10000_plus, Some("name"), Some("gene")).compare(new Transcript(chr1_6000_8000_9000_10000_plus, Some("xxxx"), Some("xxxx"))) > 0)
assert(new Transcript(chr1_6000_8000_9000_10000_plus, Some("xxxx"), Some("xxxx")).compare(new Transcript(chr1_7000_8000_9000_10000_plus, Some("name"), Some("gene"))) < 0)
}
test("Transcript compare to mRNA") {
// Same blocks, same name, same gene
assert(MessengerRNA(chr1_6000_8000_9000_10000_plus, 7000, 7099, Some("name"), Some("gene"))
.compare(new Transcript(chr1_6000_8000_9000_10000_plus, Some("name"), Some("gene"))) > 0)
assert(new Transcript(chr1_6000_8000_9000_10000_plus, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_6000_8000_9000_10000_plus, 7000, 7099, Some("name"), Some("gene"))) < 0)
// Same blocks, different name, same gene
assert(MessengerRNA(chr1_6000_8000_9000_10000_plus, 7000, 7099, Some("name"), Some("gene"))
.compare(new Transcript(chr1_6000_8000_9000_10000_plus, None, Some("gene"))) < 0)
assert(new Transcript(chr1_6000_8000_9000_10000_plus, None, Some("gene"))
.compare(MessengerRNA(chr1_6000_8000_9000_10000_plus, 7000, 7099, Some("name"), Some("gene"))) > 0)
// Different blocks, same name, same gene
assert(MessengerRNA(chr1_6000_8000_9000_10000_plus, 7000, 7099, Some("name"), Some("gene"))
.compare(new Transcript(chr1_7000_8000_9000_10000_plus, Some("name"), Some("gene"))) < 0)
assert(new Transcript(chr1_7000_8000_9000_10000_plus, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_6000_8000_9000_10000_plus, 7000, 7099, Some("name"), Some("gene"))) > 0)
// Different blocks, different name, same gene
assert(MessengerRNA(chr1_6000_8000_9000_10000_plus, 7000, 7099, Some("xxxx"), Some("gene"))
.compare(new Transcript(chr1_7000_8000_9000_10000_plus, Some("name"), Some("gene"))) < 0)
assert(new Transcript(chr1_7000_8000_9000_10000_plus, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_6000_8000_9000_10000_plus, 7000, 7099, Some("xxxx"), Some("gene"))) > 0)
// Same blocks, same name, different gene
assert(MessengerRNA(chr1_6000_8000_9000_10000_plus, 7000, 7099, Some("name"), Some("gene"))
.compare(new Transcript(chr1_6000_8000_9000_10000_plus, Some("name"), Some("xxxx"))) < 0)
assert(new Transcript(chr1_6000_8000_9000_10000_plus, Some("name"), Some("xxxx"))
.compare(MessengerRNA(chr1_6000_8000_9000_10000_plus, 7000, 7099, Some("name"), Some("gene"))) > 0)
// Same blocks, different name, different gene
assert(MessengerRNA(chr1_6000_8000_9000_10000_plus, 7000, 7099, Some("name"), Some("xxxx"))
.compare(new Transcript(chr1_6000_8000_9000_10000_plus, None, Some("gene"))) < 0)
assert(new Transcript(chr1_6000_8000_9000_10000_plus, None, Some("gene"))
.compare(MessengerRNA(chr1_6000_8000_9000_10000_plus, 7000, 7099, Some("name"), Some("xxxx"))) > 0)
// Different blocks, same name, different gene
assert(MessengerRNA(chr1_6000_8000_9000_10000_plus, 7000, 7099, Some("name"), Some("xxxx"))
.compare(new Transcript(chr1_7000_8000_9000_10000_plus, Some("name"), Some("gene"))) < 0)
assert(new Transcript(chr1_7000_8000_9000_10000_plus, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_6000_8000_9000_10000_plus, 7000, 7099, Some("name"), Some("xxxx"))) > 0)
// Different blocks, different name, different gene
assert(MessengerRNA(chr1_6000_8000_9000_10000_plus, 7000, 7099, Some("xxxx"), Some("xxxx"))
.compare(new Transcript(chr1_7000_8000_9000_10000_plus, Some("name"), Some("gene"))) < 0)
assert(new Transcript(chr1_7000_8000_9000_10000_plus, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_6000_8000_9000_10000_plus, 7000, 7099, Some("xxxx"), Some("xxxx"))) > 0)
}
test("mRNA compare to mRNA") {
// Same blocks, same name, same gene, same CDS start, same CDS end
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) === 0)
// Same blocks, different name, same gene, same CDS start, same CDS end
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, None, Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) > 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("xxxx"), Some("gene"))) < 0)
// Different blocks, same name, same gene, same CDS start, same CDS end
assert(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1099, Some("name"), Some("gene"))) > 0)
// Different blocks, different name, same gene, same CDS start, same CDS end
assert(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1099, Some("xxxx"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1099, Some("xxxx"), Some("gene"))) > 0)
// Same blocks, same name, different gene, same CDS start, same CDS end
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("xxxx"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) > 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), None)) < 0)
// Same blocks, different name, different gene, same CDS start, same CDS end
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("xxxx"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("xxxx"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("xxxx"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("xxxx"))) > 0)
// Different blocks, same name, different gene, same CDS start, same CDS end
assert(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1099, Some("name"), Some("xxxx"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1099, Some("name"), Some("xxxx"))) > 0)
// Different blocks, different name, different gene, same CDS start, same CDS end
assert(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1099, Some("xxxx"), Some("xxxx"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1099, Some("xxxx"), Some("xxxx"))) > 0)
// Same blocks, same name, same gene, different CDS start, same CDS end
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 900, 999, Some("name"), Some("gene"))) > 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 900, 999, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) < 0)
// Same blocks, different name, same gene, different CDS start, same CDS end
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 900, 999, Some("xxxx"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 900, 999, Some("xxxx"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) > 0)
// Different blocks, same name, same gene, different CDS start, same CDS end
assert(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 900, 999, Some("name"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 900, 999, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1099, Some("name"), Some("gene"))) > 0)
// Different blocks, different name, same gene, different CDS start, same CDS end
assert(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1099, Some("xxxx"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 900, 999, Some("name"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 900, 999, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1099, Some("xxxx"), Some("gene"))) > 0)
// Same blocks, same name, different gene, different CDS start, same CDS end
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 900, 999, Some("name"), Some("xxxx"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 900, 999, Some("name"), Some("xxxx"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) > 0)
// Same blocks, different name, different gene, different CDS start, same CDS end
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, None, Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 900, 999, Some("name"), Some("xxxx"))) > 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 900, 999, Some("name"), Some("xxxx"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, None, Some("gene"))) < 0)
// Different blocks, same name, different gene, different CDS start, same CDS end
assert(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1099, Some("name"), Some("xxxx"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 900, 999, Some("name"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 900, 999, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1099, Some("name"), Some("xxxx"))) > 0)
// Different blocks, different name, different gene, different CDS start, same CDS end
assert(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1099, None, Some("xxxx"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 900, 999, Some("name"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 900, 999, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1099, None, Some("xxxx"))) > 0)
// Same blocks, same name, same gene, same CDS start, different CDS end
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1048, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1048, Some("name"), Some("gene"))) > 0)
// Same blocks, different name, same gene, same CDS start, different CDS end
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1048, None, Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) > 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1048, None, Some("gene"))) < 0)
// Different blocks, same name, same gene, same CDS start, different CDS end
assert(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1220, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1220, Some("name"), Some("gene"))) > 0)
// Different blocks, different name, same gene, same CDS start, different CDS end
assert(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1220, Some("xxxx"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1220, Some("xxxx"), Some("gene"))) > 0)
// Same blocks, same name, different gene, same CDS start, different CDS end
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1048, Some("name"), Some("xxxx"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) > 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1048, Some("name"), Some("xxxx"))) < 0)
// Same blocks, different name, different gene, same CDS start, different CDS end
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1048, Some("name"), Some("xxxx"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, None, Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, None, Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1048, Some("name"), Some("xxxx"))) > 0)
// Different blocks, same name, different gene, same CDS start, different CDS end
assert(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1048, Some("name"), Some("xxxx"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1048, Some("name"), Some("xxxx"))) > 0)
// Different blocks, different name, different gene, same CDS start, different CDS end
assert(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1048, Some("xxxx"), Some("xxxx"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1200_1300_plus, 1000, 1048, Some("xxxx"), Some("xxxx"))) > 0)
// Same blocks, same name, same gene, different CDS start, different CDS end
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1020, 1050, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) > 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1020, 1050, Some("name"), Some("gene"))) < 0)
// Same blocks, different name, same gene, different CDS start, different CDS end
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1020, 1050, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("xxxx"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("xxxx"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1020, 1050, Some("name"), Some("gene"))) > 0)
// Different blocks, same name, same gene, different CDS start, different CDS end
assert(MessengerRNA(chr1_900_1100_1200_1300_plus, 1050, 1240, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1200_1300_plus, 1050, 1240, Some("name"), Some("gene"))) > 0)
// Different blocks, different name, same gene, different CDS start, different CDS end
assert(MessengerRNA(chr1_900_1100_1200_1300_plus, 1050, 1240, None, Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("xxxx"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("xxxx"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1200_1300_plus, 1050, 1240, None, Some("gene"))) > 0)
// Same blocks, same name, different gene, different CDS start, different CDS end
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1020, 1050, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), None)) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), None)
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1020, 1050, Some("name"), Some("gene"))) > 0)
// Same blocks, different name, different gene, different CDS start, different CDS end
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1020, 1050, Some("xxxx"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), None)) > 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), None)
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1020, 1050, Some("xxxx"), Some("gene"))) < 0)
// Different blocks, same name, different gene, different CDS start, different CDS end
assert(MessengerRNA(chr1_900_1100_1200_1300_plus, 1050, 1240, Some("name"), None)
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1200_1300_plus, 1050, 1240, Some("name"), None)) > 0)
// Different blocks, different name, different gene, different CDS start, different CDS end
assert(MessengerRNA(chr1_900_1100_1200_1300_plus, 1050, 1240, None, None)
.compare(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))) < 0)
assert(MessengerRNA(chr1_900_1100_1900_2100_2900_4100_plus, 1000, 1099, Some("name"), Some("gene"))
.compare(MessengerRNA(chr1_900_1100_1200_1300_plus, 1050, 1240, None, None)) > 0)
}
}
| pamelarussell/sgxlib | src/test/scala/testfeature/FeatureCompareSuite.scala | Scala | mit | 27,047 |
package org.jetbrains.plugins.scala
package codeInspection
package fileNameInspection
import collection.mutable.ArrayBuffer
import lang.psi.api.ScalaFile
import com.intellij.codeInspection._
import com.intellij.psi.PsiFile
import java.lang.String
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScObject, ScTypeDefinition}
import com.intellij.lang.injection.InjectedLanguageManager
import extensions.toPsiNamedElementExt
import console.ScalaLanguageConsoleView
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.util.IntentionAvailabilityChecker
/**
* User: Alexander Podkhalyuzin
* Date: 02.07.2009
*/
class ScalaFileNameInspection extends LocalInspectionTool {
override def isEnabledByDefault: Boolean = true
override def getID: String = "ScalaFileName"
override def checkFile(file: PsiFile, manager: InspectionManager, isOnTheFly: Boolean): Array[ProblemDescriptor] = {
if (!file.isInstanceOf[ScalaFile] ||
InjectedLanguageManager.getInstance(file.getProject).isInjectedFragment(file) ||
!IntentionAvailabilityChecker.checkInspection(this, file)) return Array.empty
if (file.getName == ScalaLanguageConsoleView.SCALA_CONSOLE) return Array.empty
val virtualFile = file.getVirtualFile
if (virtualFile == null) return Array.empty
val name = virtualFile.getNameWithoutExtension
val scalaFile = file.asInstanceOf[ScalaFile]
if (scalaFile.isScriptFile() || scalaFile.isWorksheetFile) return Array.empty
val definitions = scalaFile.typeDefinitions
if (definitions.length > 1) return Array.empty
var hasProblems = true
for (clazz <- definitions) {
clazz match {
case o: ScObject if file.name == "package.scala" && o.isPackageObject => hasProblems = false
case _ if ScalaPsiUtil.memberNamesEquals(clazz.name, name) => hasProblems = false
case _ =>
}
}
val res = new ArrayBuffer[ProblemDescriptor]
if (hasProblems) {
for (clazz <- definitions;
scalaClass: ScTypeDefinition = clazz) {
res += manager.createProblemDescriptor(scalaClass.nameId, "Class doesn't correspond to file name",
Array[LocalQuickFix](new ScalaRenameClassQuickFix(scalaClass, name),
new ScalaRenameFileQuickFix(scalaFile, clazz.name + ".scala")), ProblemHighlightType.GENERIC_ERROR_OR_WARNING)
}
}
res.toArray
}
} | consulo/consulo-scala | src/org/jetbrains/plugins/scala/codeInspection/fileNameInspection/ScalaFileNameInspection.scala | Scala | apache-2.0 | 2,429 |
package org.scalamu.core.process
import org.scalamu.core.configuration.Derivable
final case class MutationAnalysisProcessConfig(
timeoutFactor: Double,
timeoutConst: Long,
verbose: Boolean = false
)
object MutationAnalysisProcessConfig {
implicit val runnerConfigDerivable: Derivable[MutationAnalysisProcessConfig] = config =>
MutationAnalysisProcessConfig(
config.timeoutFactor,
config.timeoutConst,
config.verbose
)
}
| sugakandrey/scalamu | core/src/main/scala/org/scalamu/core/process/MutationAnalysisProcessConfig.scala | Scala | gpl-3.0 | 455 |
import sbt._
import sbt.Keys._
object Dependencies {
val jacksonV = "2.9.0"
val serviceTestV = "0.16-56c9393"
val workbenchExclusions = Seq(
ExclusionRule(organization = "org.broadinstitute.dsde.workbench", name = s"workbench-metrics_2.12")
)
val workbenchServiceTest: ModuleID = "org.broadinstitute.dsde.workbench" %% "workbench-service-test" % serviceTestV % "test" classifier "tests" excludeAll(workbenchExclusions:_*)
val rootDependencies = Seq(
// proactively pull in latest versions of Jackson libs, instead of relying on the versions
// specified as transitive dependencies, due to OWASP DependencyCheck warnings for earlier versions.
"com.fasterxml.jackson.core" % "jackson-annotations" % jacksonV,
"com.fasterxml.jackson.core" % "jackson-databind" % jacksonV,
"com.fasterxml.jackson.core" % "jackson-core" % jacksonV,
"com.fasterxml.jackson.module" % "jackson-module-scala_2.12" % jacksonV,
"org.scalatest" %% "scalatest" % "3.0.5" % "test",
"org.seleniumhq.selenium" % "selenium-java" % "3.141.59" % "test",
"org.slf4j" % "slf4j-api" % "1.7.25" % "test",
"ch.qos.logback" % "logback-classic" % "1.2.3",
"com.typesafe.scala-logging" %% "scala-logging" % "3.9.0",
workbenchServiceTest,
// required by workbenchGoogle
"com.typesafe.akka" %% "akka-http-spray-json" % "10.0.6" % "provided"
)
}
| broadinstitute/firecloud-ui | automation/project/Dependencies.scala | Scala | bsd-3-clause | 1,397 |
package hr.element.ocd_transform
import hr.element.geom._
import scala.math._
object A extends App {
Intrusion.calc(Parallelogram(Point(-0.05, -0.05), Point(1.05, -0.05), Point(1.05,1.05)))
}
object Intrusion {
private object LineData {
def order(crosses: IndexedSeq[(Int, Double)]) =
crosses.groupBy(_._1).mapValues(
_.map(_._2)
.distinct
.sortBy(identity)
)
}
class LineData(line: Line) {
val box = line.box
val Point(x1, y1) = box.pMin
val Point(x2, y2) = box.pMax
val sX = ceil(x1).toInt
val sY = ceil(y1).toInt
val eX = floor(x2).toInt
val eY = floor(y2).toInt
val fi =
if (box.dim.h == 0) {
None
}
else {
Some(box.dim.w / box.dim.h)
}
val cutX = LineData.order(for {
x <- sX to eY
nzFi <- fi
} yield {
x -> (y1 + nzFi * (x - x1))
})
val cutY = LineData.order(for {
y <- sY to eY
} yield {
y -> (fi match {
case Some(nzFi) =>
x1 + (y - y1) / nzFi
case None =>
x1
})
})
val middles = (
(for {
(x, cY) <- cutX if cY.size > 1
sy = ceil(cY.head).toInt
ey = floor(cY.last).toInt
y <- sy to ey
} yield x -> y) ++
(for {
(y, cX) <- cutY if cX.size > 1
sx = ceil(cX.head).toInt
ex = floor(cX.last).toInt
x <- sx to ex
} yield x -> y)
).toIndexedSeq[(Int, Int)].distinct.sortBy(identity)
}
def calc(par: Parallelogram) = {
val lineData = par.lines map(new LineData(_))
val dots = lineData.flatMap { lD =>
(for ( (x, y) <- lD.middles) yield x.toDouble -> y.toDouble) ++
(for ( (x, cY) <- lD.cutX; y <- cY ) yield x.toDouble -> y)
(for ( (y, cX) <- lD.cutY; x <- cX ) yield x -> y.toDouble)
//(p.line.x1 -> p.line.y1)
}
// (p.line.x1 -> p.line.y1)
// }
}
}
| melezov/ocd-transform | src/main/scala/hr/element/ocd_transform/IntrusionCalc.scala | Scala | bsd-3-clause | 1,930 |
package algebra
package ring
/**
* A Boolean rng is a rng whose multiplication is idempotent, that is
* `aβ
a = a` for all elements ''a''. This property also implies `a+a = 0`
* for all ''a'', and `aβ
b = bβ
a` (commutativity of multiplication).
*
* Every `BoolRng` is equivalent to `algebra.lattice.GenBool`.
* See `algebra.lattice.GenBoolFromBoolRng` for details.
*/
trait BoolRng[A] extends Any with CommutativeRng[A] { self =>
override final def negate(x: A): A = x
}
object BoolRng extends AdditiveGroupFunctions[BoolRng] with MultiplicativeSemigroupFunctions[BoolRng] {
@inline final def apply[A](implicit r: BoolRng[A]): BoolRng[A] = r
}
| tixxit/algebra | core/src/main/scala/algebra/ring/BoolRng.scala | Scala | mit | 661 |
import net.liftweb.json._
import net.liftweb.json.Serialization.{read, write}
import net.liftweb.json.JsonDSL._
import java.io._
import java.util.Date
object Serbench extends Benchmark {
val classes = List(classOf[Project], classOf[Team], classOf[Employee], classOf[Language])
val project = Project("test",
new Date,
Some(Language("Scala", 2.75)),
List(
Team("QA", List(Employee("John Doe", 5), Employee("Mike", 3))),
Team("Impl", List(Employee("Mark", 4), Employee("Mary", 5), Employee("Nick Noob", 1)))))
val jvalueProject = {
("name" -> "test") ~
("startDate" -> new Date().getTime) ~
("lang" ->
(("name" -> "Scala") ~
("version" -> 2.75))) ~
("teams" -> List(
("role" -> "QA") ~
("members" -> List(("name" -> "John Doe") ~ ("experience" -> 5),
("name" -> "Mike") ~ ("experience" -> 3))),
("role" -> "Impl") ~
("members" -> List(("name" -> "Mark") ~ ("experience" -> 4),
("name" -> "Mary") ~ ("experience" -> 5),
("name" -> "Nick Noob") ~ ("experience" -> 1)))
))
}
lazy val bigJValue = {
def appendN(json: JObject, count: Int): JObject = {
if (count == 0) json else json ~ appendN(json, count - 1)
}
appendN(jvalueProject, 100)
}
def main(args: Array[String]) = {
println("** No type hints")
new Bench()(Serialization.formats(NoTypeHints))
println("** Short type hints")
new Bench()(Serialization.formats(ShortTypeHints(classes)))
println("** Full type hints")
new Bench()(Serialization.formats(FullTypeHints(classes)))
println("** JValue Serialization")
new JValueBench()
}
class Bench(implicit formats: Formats) {
benchmark("Java serialization (full)") { deserialize(serialize(project)) }
benchmark("lift-json (full)") { read[Project](write(project)) }
benchmark("Java serialization (ser)") { serialize(project) }
benchmark("lift-json (ser)") { write(project) }
val ser1 = serialize(project)
val ser2 = write(project)
benchmark("Java serialization (deser)") { deserialize(ser1) }
benchmark("lift-json (deser)") { read[Project](ser2) }
}
class JValueBench {
benchmark("lift-json (ser compact(render(jvalue))") { compact(render(bigJValue)) }
benchmark("lift-json (ser compactRender(jvalue)") { compactRender(bigJValue) }
}
def benchmark(name: String)(f: => Any) = run(name, 20000, 20000)(f)
def deserialize(array: Array[Byte]) =
new ObjectInputStream(new ByteArrayInputStream(array)).readObject.asInstanceOf[Project]
def serialize(project: Project) = {
val baos = new ByteArrayOutputStream()
val oos = new ObjectOutputStream(baos)
oos.writeObject(project)
baos.toByteArray
}
case class Project(name: String, startDate: Date, lang: Option[Language], teams: List[Team]) extends Serializable
case class Language(name: String, version: Double) extends Serializable
case class Team(role: String, members: List[Employee]) extends Serializable
case class Employee(name: String, experience: Int) extends Serializable
}
| lzpfmh/framework-2 | core/json/benchmark/Serbench.scala | Scala | apache-2.0 | 3,121 |
package play.boilerplate.api.client.dsl
import scala.concurrent.{ExecutionContext, Future}
trait CircuitBreaker {
def withCircuitBreaker[T](block: => Future[T])(implicit ec: ExecutionContext): Future[T]
}
object CircuitBreaker {
object None extends CircuitBreaker {
override def withCircuitBreaker[T](block: => Future[T])(implicit ec: ExecutionContext): Future[T] = block
}
}
| Romastyi/sbt-play-boilerplate | api-client/core/src/main/scala/play/boilerplate/api/client/dsl/CircuitBreaker.scala | Scala | apache-2.0 | 389 |
/*
* Copyright 2017 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.bottlesvc
import akka.actor.{ActorLogging, Actor, Props}
import spray.can.Http
import spray.routing._
import Directives._
import spray.http._
import MediaTypes._
import spray.httpx.encoding.Gzip
import org.squbs.unicomplex.RouteDefinition
import org.squbs.unicomplex.MediaTypeExt._
import org.squbs.bottlemsgs._
// this class defines our service behavior independently from the service actor
class BottleSvc extends RouteDefinition {
def route =
path("hello") {
get {
respondWithMediaType(`text/html`) { // XML is marshalled to `text/xml` by default, so we simply override here
complete {
<html>
<body>
<h1>Say hello to <i>spray-routing</i> on <i>spray-can</i>!</h1>
</body>
</html>
}
}
}
} ~
path(""".*\\.html""".r) { name =>
encodeResponse(Gzip) {
getFromResource("html/" + name)
}
} ~
path("events") {
get { ctx =>
context.actorOf(Props(classOf[Mediator] ,ctx))
}
}
}
class Mediator(ctx: RequestContext) extends Actor with ActorLogging {
context.actorSelection("/user/bottlecube/lyrics") ! StartEvents
val responseStart = HttpResponse(entity = HttpEntity(`text/event-stream`, toSSE("Starting")))
ctx.responder ! ChunkedResponseStart(responseStart)
def toSSE(msg: String) = "event: lyric\\ndata: " + msg.replace("\\n", "\\ndata: ") + "\\n\\n"
val streamEnd = "event: streamEnd\\ndata: End of stream\\n\\n"
def receive = {
case Event(msg) =>
val eventMessage = toSSE(msg)
log.info('\\n' + eventMessage)
ctx.responder ! MessageChunk(eventMessage)
case EndEvents =>
log.info('\\n' + streamEnd)
ctx.responder ! MessageChunk(streamEnd)
ctx.responder ! ChunkedMessageEnd()
context.stop(self)
// Connection closed sent from ctx.responder
case ev: Http.ConnectionClosed =>
log.warning("Connection closed, {}", ev)
context.stop(self)
}
}
| Harikiranvuyyuru/squbs | samples/bottles/bottlesvc/src/main/scala/org/squbs/bottlesvc/BottleSvc.scala | Scala | apache-2.0 | 2,663 |
package nest.sparkle.loader
import scala.collection.JavaConverters.asScalaBufferConverter
import scala.language.existentials
import scala.reflect.ClassTag
import scala.reflect.runtime.universe._
import scala.util.{Failure, Success, Try}
import com.typesafe.config.Config
import org.joda.time.DateTimeZone
import rx.lang.scala.Observable
import spire.math.Numeric
import spire.implicits._
import nest.sparkle.datastream._
import nest.sparkle.loader.Loader._
import nest.sparkle.measure.{Span, DummySpan}
import nest.sparkle.util.KindCast._
import nest.sparkle.util._
/**
* A transformer that can reduce a column slice by taking a sum, mean, min, or max.
*/
class TransformReduceByPeriod(rootConfig: Config, transformerConfig: Config)
extends LoadingTransformer with Log {
val periodWithZone = {
val timezone = if (transformerConfig.hasPath("timezone")) transformerConfig.getString("timezone") else "UTC"
val period = Period.parse(transformerConfig.getString("period")) match {
case Some(x) => x
case None => throw ConfigurationError(s"invalid period ${transformerConfig.getString("period")}")
}
PeriodWithZone(period, DateTimeZone.forID(timezone))
}
val fieldReductionTypes : Map[String, String] = transformerConfig.getConfigList("fields").asScala.toSeq.map { fieldConfig =>
val reductionType = fieldConfig.getString("reduction-type")
// test if reduction type is valid by using an Int
numericReduction(reductionType, typeTag[Int]) match {
case Success(_) => fieldConfig.getString("field") -> reductionType
case Failure(err) => throw err
}
}.toMap
override def transform(sourceBlock: TaggedBlock): Try[TaggedBlock] = {
val transformedBlock =
sourceBlock.iterator.map { eventSlice: TaggedSlice[_, _] =>
/** name the existential _ key and value type parameters in the eventSlice as T and U so that we can keep them constant */
def withFixedType[T, U](): Try[TaggedSlice[T, U]] = {
val keyType: TypeTag[T] = castKind(eventSlice.keyType)
val valueType: TypeTag[U] = castKind(eventSlice.valueType)
val dataArray = eventSlice.dataArray.asInstanceOf[DataArray[T, U]]
val field = eventSlice.columnPath.substring(eventSlice.columnPath.lastIndexOf('/') + 1)
val reducedDataArray =
if (fieldReductionTypes.contains(field)) {
for {
reduction <- numericReduction[U](fieldReductionTypes.get(field).get, valueType)
reducedDataArray <- reduce(keyType, valueType, dataArray, reduction)(keyType, valueType)
} yield {
reducedDataArray
}
} else {
log.warn(s"unknown field $field, skipping reduce")
Success(dataArray)
}
reducedDataArray.map(TaggedSlice[T, U](eventSlice.columnPath, _)(keyType, valueType))
}
withFixedType()
}
TryUtil.firstFailureOrElseSuccessVector(transformedBlock)
}
/**
* Returns a Reduction per the specified type ("reduceSum", "reduceMean", etc) and numeric TypeTag.
*/
private def numericReduction[U](reductionType: String, valueType: TypeTag[_]): Try[IncrementalReduction[U]] = {
RecoverNumeric.tryNumeric[U](valueType) match {
case Success(valueNumeric) => {
reductionType match {
case "reduceSum" => Success(ReduceSum()(valueNumeric))
case "reduceMean" => Success(ReduceMean()(valueNumeric))
case "reduceAverage" => Success(ReduceMean()(valueNumeric))
case "reduceMin" => Success(ReduceMin()(valueNumeric))
case "reduceMax" => Success(ReduceMax()(valueNumeric))
case _ => Failure(ConfigurationError(s"invalid reduction type $reductionType"))
}
}
case Failure(err) => Failure(ReduceByPeriodTransformException(err))
}
}
/**
* Reduces the given DataArray, removing resulting None values and the corresponding keys.
*/
private def reduce[T: TypeTag, U: TypeTag]
( keyType: TypeTag[T],
valueType: TypeTag[U],
dataArray: DataArray[T, U],
reduction: IncrementalReduction[U] )
: Try[DataArray[T, U]] = {
RecoverNumeric.tryNumeric[T](keyType).flatMap { keyNumeric =>
implicit val keyClassTag: ClassTag[T] = ReflectionUtil.classTag[T](keyType)
implicit val valueClassTag: ClassTag[U] = ReflectionUtil.classTag[U](valueType)
val reducedDataArray = reduceByPeriod(dataArray, periodWithZone, reduction)(keyType,
keyNumeric, valueType, DummySpan)
// we shouldn't have any None values since DataStreamPeriodReduction.reduceByPeriod
// is called with emitEmpties = false
val flattenedPairs = reducedDataArray.mapToArray { (key, value) =>
(key, value.get)
}
Success(DataArray.fromPairs(flattenedPairs))
}
}
// TODO: figure out the right API for the generic DataArray/DataStream, so we don't need need this method
/**
* Convenience method to reduce a DataArray.
*/
private def reduceByPeriod[K:TypeTag:Numeric, V:TypeTag]
( dataArray: DataArray[K, V],
periodWithZone: PeriodWithZone,
reduction: IncrementalReduction[V] )
( implicit parentSpan: Span )
: DataArray[K, Option[V]] = {
implicit val keyClassTag = ReflectionUtil.classTag[K](typeTag[K])
implicit val valueClassTag = ReflectionUtil.classTag[V](typeTag[V])
// DataStream's reduceByPeriod API needs the data to be sorted
val sortedDataArray = dataArray.sortWith { case ((key1,_), (key2,_)) => key1 < key2 }
val dataStream = DataStream(Observable.from(Seq(sortedDataArray)))
val periodResult = dataStream.reduceByPeriod(periodWithZone, SoftInterval.empty, reduction, false)
// it's okay to block here because we're working with a fixed DataArray
val reducedDataArrays = periodResult.reducedStream.data.toBlocking.toList
reducedDataArrays.reduce(_ ++ _)
}
}
/** Indicates there's an issue transforming a block by reducing by period */
case class ReduceByPeriodTransformException(cause: Throwable) extends RuntimeException(cause)
| mighdoll/sparkle | loader/src/main/scala/nest/sparkle/loader/TransformReduceByPeriod.scala | Scala | apache-2.0 | 6,156 |
package fr.laas.fape.acting
import com.sun.org.apache.xpath.internal.operations.VariableSafeAbsRef
import fr.laas.fape.anml.model.AnmlProblem
import fr.laas.fape.anml.model.concrete._
import fr.laas.fape.anml.model.concrete.statements.Persistence
import fr.laas.fape.planning.core.planning.states.PartialPlan
import fr.laas.fape.planning.core.planning.states.modification.ChronicleInsertion
/**
* Created by abitmonn on 11/23/16.
*/
object Utils {
private var problem : AnmlProblem = null
def setProblem(file: String): Unit = {
problem = new AnmlProblem()
problem.extendWithAnmlFile(file)
}
def getProblem = {
require(problem != null)
problem
}
def buildGoal(svName: String, args: List[String], value: String, deadline: Int = -1) = {
assert(RefCounter.useGlobalCounter)
val goal = new Chronicle
val statement = new Persistence(
problem.stateVariable(svName, args),
problem.instance("true"),
goal,
RefCounter.getGlobalCounter)
goal.addStatement(statement)
if(deadline > -1) {
goal.addConstraint(new MinDelayConstraint(statement.start, problem.start, -deadline))
}
new ChronicleInsertion(goal)
}
def buildTask(name: String, args: List[String], deadline: Int = -1) = {
assert(RefCounter.useGlobalCounter)
val goal = new Chronicle
val task = new Task("t-"+name, args.map(problem.instance(_)), None, problem.refCounter)
goal.addTask(task)
if(deadline > -1) {
goal.addConstraint(new MinDelayConstraint(task.end, problem.start, -deadline))
}
new ChronicleInsertion(goal)
}
def asString(variable: VarRef, plan: PartialPlan) = {
plan.domainOf(variable).get(0)
}
}
| athy/fape | acting/src/main/scala/fr/laas/fape/acting/Utils.scala | Scala | bsd-2-clause | 1,701 |
package Tutorial
import Chisel._
import Node._
import Literal._
import scala.collection.mutable.HashMap
import scala.collection.mutable.ArrayBuffer
object GorillaPPConfig {
val pcOn = true
}
| seyedmaysamlavasani/GorillaPP | chisel/KmeansAndMesh/src/Gorilla++Config.scala | Scala | bsd-3-clause | 196 |
/*
* Copyright 2008, Mark Harrah
*
* This file is part of Frostbridge.
*
* Frostbridge is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 2.1 of the License, or
* (at your option) any later version.
*
* Frostbridge is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Frostbridge. If not, see <http://www.gnu.org/licenses/>.
*/
package net.frostbridge.data
abstract class CharacterValue extends BasicParser[Char]
{
def parse(value: String) =
{
if(value.length == 1)
Some(value.charAt(0))
else
None
}
def stringify(c: Char) = Some(c.toString)
def typeDescription = "character"
}
object AnyCharacter extends CharacterValue
{
def isAllowed(c: Char) = true
def dataDescription: String = "character"
}
object AlphanumericCharacter extends CharacterValue
{
def isAllowed(c: Char) = Character.isLetterOrDigit(c)
def dataDescription: String = "alphanumeric character"
}
abstract class DoubleValue extends BasicParser[Double]
{
def parse(value: String) = Some(value.toDouble)
def stringify(value: Double) = Some(value.toString)
}
object AnyDouble extends DoubleValue
{
def isAllowed(value: Double) = true
def dataDescription = "double"
}
class RangedDouble(range: Ranged[Double]) extends DoubleValue
{
def isAllowed(value: Double) = range.contains(value)
def dataDescription = range.description("double")
}
class BooleanValue extends BasicParser[Boolean]
{
def parse(value: String) = Some(value.toLowerCase.equals("true"))
def stringify(b: Boolean) = Some(b.toString)
def isAllowed(value: Boolean) = true
def dataDescription = "boolean"
}
object BooleanValue extends BooleanValue
abstract class IntegerValue extends BasicParser[Int]
{
def parse(value: String) = Some(value.toInt)
def stringify(value: Int) = Some(value.toString)
def typeDescription = "integer"
}
object AnyInteger extends IntegerValue
{
def isAllowed(value: Int) = true
def dataDescription = "integer"
}
class RangedInteger(range: Ranged[Int]) extends IntegerValue
{
def isAllowed(value: Int) = range.contains(value)
def dataDescription = range.description("integer")
}
object PositiveInteger extends IntegerValue
{
def isAllowed(value: Int) = value > 0
def dataDescription = "positive integer"
}
object NonPositiveInteger extends IntegerValue
{
def isAllowed(value: Int) = value <= 0
def dataDescription = "non-positive integer"
}
object NegativeInteger extends IntegerValue
{
def isAllowed(value: Int) = value < 0
def dataDescription = "negative integer"
}
object NonNegativeInteger extends IntegerValue
{
def isAllowed(value: Int) = value >= 0
def dataDescription = "non-negative integer"
}
| yzhhui/frostbridge | src/main/scala/net/frostbridge/data/PrimitiveValues.scala | Scala | lgpl-3.0 | 3,033 |
/*
* Copyright 2009-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package util
import java.util.concurrent.{ConcurrentHashMap => CHash, Callable}
import java.lang.ThreadLocal
import scala.reflect.Manifest
import common._
import xml.NodeSeq
/**
* A trait that does basic dependency injection.
*/
trait Injector {
implicit def inject[T](implicit man: Manifest[T]): Box[T]
}
/**
* An implementation of Injector that has an implementation
*/
trait SimpleInjector extends Injector {
private val diHash: CHash[String, Function0[_]] = new CHash
/**
* Perform the injection for the given type. You can call:
* inject[Date] or inject[List[Map[String, PaymentThing]]]. The
* appropriate Manifest will be
*/
implicit def inject[T](implicit man: Manifest[T]): Box[T] =
(Box !! diHash.get(man.toString)).flatMap(f => Helpers.tryo(f.apply())).asInstanceOf[Box[T]]
/**
* Register a function that will inject for the given Manifest
*/
def registerInjection[T](f: () => T)(implicit man: Manifest[T]) {
diHash.put(man.toString, f)
}
/**
* Create an object or val that is a subclass of the FactoryMaker to
* generate factory for a particular class as well as define session and
* request specific vendors and use doWith to define the vendor just for
* the scope of the call.
*/
abstract class Inject[T](_default: Vendor[T])
(implicit man: Manifest[T]) extends StackableMaker[T] with Vendor[T] {
registerInjection(this)(man)
/**
* The default function for vending an instance
*/
object default extends PSettableValueHolder[Vendor[T]] {
private var value = _default
def get = value
def is = get
def set(v: Vendor[T]): Vendor[T] = {
value = v
v
}
}
/**
* Vend an instance
*/
implicit def vend: T = make openOr default.is.apply()
/**
* Make a Box of the instance.
*/
override implicit def make: Box[T] = super.make or Full(default.is.apply())
}
}
/**
* In addition to an Injector, you can have a Maker which will make a given
* type. The important thing about a Maker is that it's intended to be used
* as part of a factory that can vend an instance without the vaguaries of
* whether the given class has registered a with the injector.
*/
trait Maker[T] {
implicit def make: Box[T]
}
object Maker {
def apply[T](value: T): Maker[T] = new Maker[T]{implicit def make: Box[T] = Full(value)}
def apply[T](func:() => T): Maker[T] = new Maker[T]{implicit def make: Box[T] = Full(func())}
def apply[T](func: Box[() => T]): Maker[T] = new Maker[T]{implicit def make: Box[T] = func.map(_.apply())}
def apply1[T](box: Box[T]): Maker[T] = new Maker[T]{implicit def make: Box[T] = box}
def apply2[T](func: Box[() => Box[T]]): Maker[T] = new Maker[T]{implicit def make: Box[T] = func.flatMap(_.apply())}
def apply3[T](func: () => Box[T]): Maker[T] = new Maker[T]{implicit def make: Box[T] = func.apply()}
implicit def vToMake[T](v: T): Maker[T] = this.apply(v)
implicit def vToMake[T](v: () => T): Maker[T] = this.apply(v)
implicit def vToMakeB1[T](v: Box[T]): Maker[T] = this.apply1(v)
implicit def vToMakeB2[T](v: Box[() => T]): Maker[T] = this.apply(v)
implicit def vToMakeB3[T](v: Box[() => Box[T]]): Maker[T] = this.apply2(v)
implicit def vToMakeB4[T](v: () => Box[T]): Maker[T] = this.apply3(v)
}
/**
* A StackableMaker allows DynamicVar functionality by supply a Maker or function
* that will vend an instance during any sub-call on the stack and then
* restore the implementation. This is value for testing.
*/
trait StackableMaker[T] extends Maker[T] {
private val _stack: ThreadLocal[List[PValueHolder[Maker[T]]]] = new ThreadLocal
private def stack: List[PValueHolder[Maker[T]]] = _stack.get() match {
case null => Nil
case x => x
}
def doWith[F](value: T)(f: => F): F =
doWith(PValueHolder(Maker(value)))(f)
def doWith[F](vFunc: () => T)(f: => F): F =
doWith(PValueHolder(Maker(vFunc)))(f)
def doWith[F](addl: PValueHolder[Maker[T]])(f: => F): F = {
val old = _stack.get()
_stack.set(addl :: stack)
try {
f
} finally {
_stack.set(old)
}
}
protected final def find(in: List[PValueHolder[Maker[T]]]): Box[T] = in match {
case Nil => Empty
case x :: rest =>
x.get.make match {
case Full(v) => Full(v)
case _ => find(rest)
}
}
implicit def make: Box[T] = find(stack)
}
/**
* An implementation where you can define the stack of makers.
*/
class MakerStack[T](subMakers: PValueHolder[Maker[T]]*) extends StackableMaker[T] {
private val _sub: List[PValueHolder[Maker[T]]] = subMakers.toList
override implicit def make: Box[T] = super.make or find(_sub)
}
/**
* A Vendor is a Maker that also guarantees that it will return a value
*/
trait Vendor[T] extends Maker[T] with Function0[T] {
implicit def vend: T
def apply() = vend
}
/**
* A bridge from Java to Scala
*/
class VendorJBridge {
/**
* Create a Vendor from a Func0
*/
def vendor[T](f: Func0[T]): Vendor[T] = Vendor(Func.lift(f))
/**
* Create a Vendor from a Callable
*/
def vendor[T](f: Callable[T]): Vendor[T] = Vendor(Func.lift(f))
/**
* Create a Vendor from a value
*/
def vendor[T](v: T): Vendor[T] = Vendor(v)
}
/**
* A companion to the Vendor trait
*/
object Vendor {
def apply[T](f: () => T): Vendor[T] = new Vendor[T] {
implicit def vend: T = f()
implicit def make: Box[T] = Full(f())
}
def apply[T](f: T): Vendor[T] = new Vendor[T] {
implicit def vend: T = f
implicit def make: Box[T] = Full(f)
}
implicit def valToVender[T](value: T): Vendor[T] = apply(value)
implicit def funcToVender[T](f: () => T): Vendor[T] = apply(f)
}
case class FormBuilderLocator[T](func: (T, T => Unit) => NodeSeq)(implicit val manifest: Manifest[T])
| pbrant/framework | core/util/src/main/scala/net/liftweb/util/Maker.scala | Scala | apache-2.0 | 6,487 |
package org.storrent
import akka.actor.{ Actor, ActorRef, ActorLogging, Props }
import org.saunter.bencode._
import scala.io.Source.{ fromInputStream }
import java.net.{ URLEncoder, URL }
import akka.util.Timeout
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
object Tracker {
case class PingTracker
def hexStringURLEncode(x: String) = { x.grouped(2).toList.map("%" + _).mkString("") }
def torrentFromBencode(torrentName: String) = {
val source = scala.io.Source.fromFile(torrentName, "macintosh")
val metainfo = source.mkString
source.close()
val decodedMeta = BencodeDecoder.decode(metainfo)
decodedMeta.get.asInstanceOf[Map[String, Any]]
}
def getTorrentFileVariables(infoMap: Map[String, Any]) = {
val fileLength = infoMap.get("length").get.asInstanceOf[Long]
val pieceLength = infoMap.get("piece length").get.asInstanceOf[Long]
//FIXME: not sure why this -1 is necessary. Think about this.
val numPieces = fileLength / pieceLength + (fileLength % pieceLength) % 2 - 1
(fileLength, pieceLength, numPieces)
}
def assembleTrackerUrl(infoMap: Map[String, Any]) = {
val encodedInfoMap = BencodeEncoder.encode(infoMap)
val md = java.security.MessageDigest.getInstance("SHA-1")
val infoSHABytes = md.digest(encodedInfoMap.getBytes).map(0xFF & _)
val infoSHA = infoSHABytes.map { "%02x".format(_) }.foldLeft("") { _ + _ } //taken from Play
val infoSHAEncoded = hexStringURLEncode(infoSHA)
val params = Map("port" -> "63211", "uploaded" -> "0", "downloaded" -> "0", "left" -> "1277987")
val encodedParams = (for ((k, v) <- params) yield URLEncoder.encode(k) + "=" + URLEncoder.encode(v)).mkString("&")
val infoSHAParam = s"info_hash=${infoSHAEncoded}"
val peerIdParam = s"peer_id=${infoSHAEncoded}" //FIXME: peer id should obviously not be the same as our hash
val allParams = s"?${infoSHAParam}&${peerIdParam}&${encodedParams}"
val completeUrl = "http://thomasballinger.com:6969/announce" + allParams
(infoSHABytes, completeUrl)
}
def decodeTorrentFile(metaMap: Map[String, Any]) = {
// this is a hack to get around type erasure warnings. It seems that the correct fix is to use the Manifest in the bencode library
// or deconstruct these
val infoMap = metaMap.get("info").get.asInstanceOf[Map[String, Any]]
val (infoSHABytes, completeUrl) = assembleTrackerUrl(infoMap)
val (fileLength, pieceLength, numPieces) = getTorrentFileVariables(infoMap)
(infoSHABytes, fileLength, pieceLength, numPieces, completeUrl)
}
def getTrackerResponse(completeUrl: String) = {
val url = new URL(completeUrl)
val trackerResponse = fromInputStream(url.openStream, "macintosh").getLines.mkString("\\n")
val someTrackerResponse = BencodeDecoder.decode(trackerResponse).get.asInstanceOf[Map[String, Any]]
val peers = someTrackerResponse.get("peers").get.asInstanceOf[String]
val interval = someTrackerResponse.get("interval").get.asInstanceOf[Long]
(peers, interval)
}
}
class Tracker(torrentName: String, torrentManager: ActorRef) extends Actor with ActorLogging {
import Tracker._
var tick = context.system.scheduler.scheduleOnce(0.seconds, self, PingTracker)
def receive = {
case PingTracker =>
val (infoSHABytes, fileLength, pieceLength, numPieces, completeUrl) = decodeTorrentFile(torrentFromBencode(torrentName))
val (peers, interval) = getTrackerResponse(completeUrl)
torrentManager ! Torrent.TorrentInfo(peers, infoSHABytes, fileLength, pieceLength, numPieces)
tick = context.system.scheduler.scheduleOnce(interval.seconds, self, PingTracker)
}
override def postStop(): Unit = tick.cancel
}
| bowlofstew/storrent | src/main/scala/Tracker.scala | Scala | apache-2.0 | 3,730 |
package pl.touk.nussknacker.engine.spel.internal
import java.lang.reflect.{Method, Modifier}
import java.util.Optional
import org.apache.commons.lang3.ClassUtils
import org.springframework.expression.spel.support.ReflectivePropertyAccessor
import org.springframework.expression.{EvaluationContext, PropertyAccessor, TypedValue}
import pl.touk.nussknacker.engine.api.dict.DictInstance
import pl.touk.nussknacker.engine.api.exception.NonTransientException
import pl.touk.nussknacker.engine.api.typed.TypedMap
import scala.collection.concurrent.TrieMap
import scala.concurrent.duration._
object propertyAccessors {
def configured(): Seq[PropertyAccessor] = {
Seq(
new ReflectivePropertyAccessor(),
NullPropertyAccessor, //must come before other non-standard ones
ScalaOptionOrNullPropertyAccessor, // must be before scalaPropertyAccessor
JavaOptionalOrNullPropertyAccessor,
NoParamMethodPropertyAccessor,
PrimitiveOrWrappersPropertyAccessor,
StaticPropertyAccessor,
MapPropertyAccessor,
TypedDictInstancePropertyAccessor,
// it can add performance overhead so it will be better to keep it on the bottom
MapLikePropertyAccessor
)
}
object NullPropertyAccessor extends PropertyAccessor with ReadOnly {
override def getSpecificTargetClasses: Array[Class[_]] = null
override def canRead(context: EvaluationContext, target: Any, name: String): Boolean = target == null
override def read(context: EvaluationContext, target: Any, name: String): TypedValue =
//can we extract anything else here?
throw NonTransientException(name, s"Cannot invoke method/property $name on null object")
}
/* PropertyAccessor for methods without parameters - e.g. parameters in case classes
TODO: is it ok to treat all methods without parameters as properties?
We have to handle Primitives/Wrappers differently, as they have problems with bytecode generation (@see PrimitiveOrWrappersPropertyAccessor)
This one is a bit tricky. We extend ReflectivePropertyAccessor, as it's the only sensible way to make it compilable,
however it's not so easy to extend and in interpreted mode we skip original implementation
*/
object NoParamMethodPropertyAccessor extends ReflectivePropertyAccessor with ReadOnly with Caching {
override def findGetterForProperty(propertyName: String, clazz: Class[_], mustBeStatic: Boolean): Method = {
findMethodFromClass(propertyName, clazz).orNull
}
override protected def reallyFindMethod(name: String, target: Class[_]) : Option[Method] = {
target.getMethods.find(m => !ClassUtils.isPrimitiveOrWrapper(target) && m.getParameterCount == 0 && m.getName == name)
}
override protected def invokeMethod(propertyName: String, method: Method, target: Any, context: EvaluationContext): AnyRef = {
method.invoke(target)
}
override def getSpecificTargetClasses: Array[Class[_]] = null
}
//Spring bytecode generation fails when we try to invoke methods on primitives, so we
//*do not* extend ReflectivePropertyAccessor and we force interpreted mode
//TODO: figure out how to make bytecode generation work also in this case
object PrimitiveOrWrappersPropertyAccessor extends PropertyAccessor with ReadOnly with Caching {
override def getSpecificTargetClasses: Array[Class[_]] = null
override protected def invokeMethod(propertyName: String, method: Method, target: Any, context: EvaluationContext): Any
= method.invoke(target)
override protected def reallyFindMethod(name: String, target: Class[_]): Option[Method] = {
target.getMethods.find(m => ClassUtils.isPrimitiveOrWrapper(target) && m.getParameterCount == 0 && m.getName == name)
}
}
object StaticPropertyAccessor extends PropertyAccessor with ReadOnly with StaticMethodCaching {
override protected def reallyFindMethod(name: String, target: Class[_]): Option[Method] = {
target.asInstanceOf[Class[_]].getMethods.find(m =>
m.getParameterCount == 0 && m.getName == name && Modifier.isStatic(m.getModifiers)
)
}
override protected def invokeMethod(propertyName: String, method: Method, target: Any, context: EvaluationContext): Any = {
method.invoke(target)
}
override def getSpecificTargetClasses: Array[Class[_]] = null
}
// TODO: handle methods with multiple args or at least validate that they can't be called
// - see test for similar case for Futures: "usage of methods with some argument returning future"
object ScalaOptionOrNullPropertyAccessor extends PropertyAccessor with ReadOnly with Caching {
override protected def reallyFindMethod(name: String, target: Class[_]) : Option[Method] = {
target.getMethods.find(m => m.getParameterCount == 0 && m.getName == name && classOf[Option[_]].isAssignableFrom(m.getReturnType))
}
override protected def invokeMethod(propertyName: String, method: Method, target: Any, context: EvaluationContext): Any = {
method.invoke(target).asInstanceOf[Option[Any]].orNull
}
override def getSpecificTargetClasses: Array[Class[_]] = null
}
// TODO: handle methods with multiple args or at least validate that they can't be called
// - see test for similar case for Futures: "usage of methods with some argument returning future"
object JavaOptionalOrNullPropertyAccessor extends PropertyAccessor with ReadOnly with Caching {
override protected def reallyFindMethod(name: String, target: Class[_]) : Option[Method] = {
target.getMethods.find(m => m.getParameterCount == 0 && m.getName == name && classOf[Optional[_]].isAssignableFrom(m.getReturnType))
}
override protected def invokeMethod(propertyName: String, method: Method, target: Any, context: EvaluationContext): Any = {
method.invoke(target).asInstanceOf[Optional[Any]].orElse(null)
}
override def getSpecificTargetClasses: Array[Class[_]] = null
}
object MapPropertyAccessor extends PropertyAccessor with ReadOnly {
// For normal Maps, we always return true to have the same behaviour for missing key nad null value
// For TypedMaps, we want to distinguish both cases and in first one, throw an exception
override def canRead(context: EvaluationContext, target: scala.Any, name: String): Boolean =
!target.isInstanceOf[TypedMap] || target.asInstanceOf[TypedMap].containsKey(name)
override def read(context: EvaluationContext, target: scala.Any, name: String) =
new TypedValue(target.asInstanceOf[java.util.Map[_, _]].get(name))
override def getSpecificTargetClasses: Array[Class[_]] = Array(classOf[java.util.Map[_, _]])
}
object TypedDictInstancePropertyAccessor extends PropertyAccessor with ReadOnly {
//in theory this always happends, because we typed it properly ;)
override def canRead(context: EvaluationContext, target: scala.Any, key: String) =
true
// we already replaced dict's label with keys so we can just return value based on key
override def read(context: EvaluationContext, target: scala.Any, key: String) =
new TypedValue(target.asInstanceOf[DictInstance].value(key))
override def getSpecificTargetClasses: Array[Class[_]] = Array(classOf[DictInstance])
}
// mainly for avro's GenericRecord purpose
object MapLikePropertyAccessor extends PropertyAccessor with Caching with ReadOnly {
override protected def invokeMethod(propertyName: String, method: Method, target: Any, context: EvaluationContext): Any = {
method.invoke(target, propertyName)
}
override protected def reallyFindMethod(name: String, target: Class[_]): Option[Method] = {
target.getMethods.find(m => m.getName == "get" && (m.getParameterTypes sameElements Array(classOf[String])))
}
override def getSpecificTargetClasses: Array[Class[_]] = null
}
trait Caching extends CachingBase { self: PropertyAccessor =>
override def canRead(context: EvaluationContext, target: scala.Any, name: String): Boolean =
!target.isInstanceOf[Class[_]] && findMethod(name, target).isDefined
override protected def extractClassFromTarget(target: Any): Option[Class[_]] =
Option(target).map(_.getClass)
}
trait StaticMethodCaching extends CachingBase { self: PropertyAccessor =>
override def canRead(context: EvaluationContext, target: scala.Any, name: String): Boolean =
target.isInstanceOf[Class[_]] && findMethod(name, target).isDefined
override protected def extractClassFromTarget(target: Any): Option[Class[_]] = Option(target).map(_.asInstanceOf[Class[_]])
}
trait CachingBase { self: PropertyAccessor =>
private val methodsCache = new TrieMap[(String, Class[_]), Option[Method]]()
override def read(context: EvaluationContext, target: scala.Any, name: String): TypedValue =
findMethod(name, target)
.map { method =>
new TypedValue(invokeMethod(name, method, target, context))
}
.getOrElse(throw new IllegalAccessException("Property is not readable"))
protected def findMethod(name: String, target: Any): Option[Method] = {
//this should *not* happen as we have NullPropertyAccessor
val targetClass = extractClassFromTarget(target).getOrElse(throw new IllegalArgumentException(s"Null target for $name"))
findMethodFromClass(name, targetClass)
}
protected def findMethodFromClass(name: String, targetClass: Class[_]): Option[Method] = {
methodsCache.getOrElseUpdate((name, targetClass), reallyFindMethod(name, targetClass))
}
protected def extractClassFromTarget(target: Any): Option[Class[_]]
protected def invokeMethod(propertyName: String, method: Method, target: Any, context: EvaluationContext): Any
protected def reallyFindMethod(name: String, target: Class[_]) : Option[Method]
}
trait ReadOnly { self: PropertyAccessor =>
override def write(context: EvaluationContext, target: scala.Any, name: String, newValue: scala.Any) =
throw new IllegalAccessException("Property is not writeable")
override def canWrite(context: EvaluationContext, target: scala.Any, name: String) = false
}
}
| TouK/nussknacker | interpreter/src/main/scala/pl/touk/nussknacker/engine/spel/internal/propertyAccessors.scala | Scala | apache-2.0 | 10,183 |
package ru.primetalk.typed.ontology
import org.scalatest.FunSuite
import ru.primetalk.typed.ontology.meta.metameta.RecordRepresentation
import meta.metameta.toRecordWrapper
/**
* Test of various ways of handling data with typed ontology.
*/
class OntologyTest extends FunSuite {
test("Creating records and reading values"){
import tmap.TypedMap.TypedMapRecordTypeClassInstance
import ru.primetalk.typed.ontology.ontology.person
import tmap.TypedMap.TypedMapRecordTypeClassInstance.schemaBuilderOps
import tmap.RecordTypeMappings.anyTypeMapping
val alice = person.empty.updated(person.name, "Alice")
assertResult(Some("Alice"))(alice.get(person.name))
}
test("Creating complex records and reading values"){
import tmap.TypedMap.TypedMapRecordTypeClassInstance.PropertyIdOps
import ru.primetalk.typed.ontology.ontology.{address, person}
import tmap.RecordTypeMappings._
import tmap.TypedMap.TypedMapRecordTypeClassInstance.schemaBuilderOps
import tmap.TypedMap.TypedMapRecordTypeClassInstance
import meta.SimplePropertiesMeta.PropertyId.PropertyIdTypeClassInstance
val alice = person.record(
person.name := "Alice",
person.address := address.record(
address.postalIndex := "12345",
address.street := "Blueberry street, 8"
)
)
assertResult(Some("Alice"))(alice.get(person.name))
assertResult("12345")(alice(person.address).apply(address.postalIndex))
}
trait AliceData {
val meta: RecordRepresentation
import meta.RecordImpl
}
trait AliceDataCheck {
val meta: RecordRepresentation
import meta.RecordImpl
import ru.primetalk.typed.ontology.ontology.{address, person}
import meta.recordSupport
// def checkAlice(alice: RecordImpl[ontology.Person]) = {
// assertResult(Some("Alice"))(alice.get(person.name))
// assertResult("12345")(alice(person.address).apply(address.postalIndex))
// }
}
test("Test data with another data representation"){
import json._
import json.RecordTypeMappings.anyTypeMapping
import json.RecordTypeMappings.mapRecordToJObjectRecord
import json.JsonConverters._
// import json.toRecordWrapper
// import meta.metameta.toRecordWrapper
// import meta.PropertyIdOps
import ru.primetalk.typed.ontology.ontology.{address, person}
import json.JObjectRecord.JObjectRecordTypeClassInstance.schemaBuilderOps
import json.JObjectRecord.JObjectRecordTypeClassInstance.PropertyIdOps
import json.JObjectRecord.mapRecordToJObjectRecord
import json.JObjectRecord.JObjectRecordTypeClassInstance
def alice = person.record(
person.name := "Alice",
person.address := address.record(
address.postalIndex := "12345",
address.street := "Blueberry street, 8"
)
)
// import ontology.m
}
}
| Primetalk/typed-map | typed-ontology/src/test/scala/ru/primetalk/typed/ontology/OntologyTest.scala | Scala | bsd-2-clause | 2,846 |
package play.api.libs.context.mvc
import org.scalactic.{Bad, Every, Good, Or}
import play.api.libs.context.ContextExtractor
import play.api.libs.context.functional.Show
import play.api.libs.context.json._
import play.api.libs.context.show._
import play.api.libs.json.Writes
import play.api.mvc._
import scala.reflect.ClassTag
/**
* A context extractor that reads Play Server [[Request]]s.
*
* @tparam R the type of request required to use this extractor
* @tparam Ctx the type of context to extract
* @tparam Errors the type of error(s) that can be returned instead. Note, this is plural
* because it is rare to go from unstructured data to structured data without
* needing the ability to accumulate all the errors.
* I strongly encourage the [[org.scalactic.Every]] type here to wrap your errors.
*/
trait ReadsRequestContext[-R <: RequestHeader, +Ctx, Errors]
extends ContextExtractor[R, Ctx, Errors] {
outer =>
/**
* Creates a reader that maps the context of the underlying reader using the same source and show for errors.
*/
def map[NextCtx](f: Ctx => NextCtx): ReadsRequestContext[R, NextCtx, Errors] = {
new ReadsRequestContext[R, NextCtx, Errors] {
override def contextSource: RequestContextSource = outer.contextSource
override def extractOrResponse(request: R): NextCtx Or Result = outer.extractOrResponse(request).map(f)
override def extractOrErrors(source: R): NextCtx Or Errors = outer.extractOrErrors(source).map(f)
override def extractOrThrow(source: R): NextCtx = f(outer.extractOrThrow(source))
override implicit def showErrors: Show[Errors] = outer.showErrors
}
}
/**
* Creates a reader that reads the request in order to pick the next request reader.
*/
def flatMap[NextCtx, NewR <: R](f: Ctx => ReadsRequestContext[NewR, NextCtx, Errors]): ReadsRequestContext[NewR, NextCtx, Errors] = {
new ReadsRequestContext[NewR, NextCtx, Errors] {
override def contextSource: RequestContextSource = outer.contextSource
override def extractOrResponse(request: NewR): NextCtx Or Result = {
outer.extractOrResponse(request) match {
case Good(ctx) => f(ctx).extractOrResponse(request)
case Bad(response) => Bad(response)
}
}
override def extractOrErrors(source: NewR): NextCtx Or Errors = {
outer.extractOrErrors(source) match {
case Good(ctx) => f(ctx).extractOrErrors(source)
case Bad(errors) => Bad(errors)
}
}
override def extractOrThrow(source: NewR): NextCtx = {
f(outer.extractOrThrow(source)).extractOrThrow(source)
}
override implicit def showErrors: Show[Errors] = outer.showErrors
}
}
/**
* The part or parts of the request from which the context is being extracted.
*/
def contextSource: RequestContextSource
/**
* Reads the context from the request or produces an immediate response.
*
* Since context should be extracted from the request immediately, we can safely
* assume that there is always a way to generate a response for a bad request or
* at least provide some dummy value that satisfies the required context.
*
* @note the behavior of this method is not required to be the same as [[extractOrErrors]].
* It is conceivable that one would want to recover from the errors in a different manner
* than just producing a Result or generating a dummy context.
*/
def extractOrResponse(request: R): Ctx Or Result
}
object ReadsRequestContext {
/**
* A [[ReadsRequestContext]] that extracts the context from the headers or produces a collection of [[HeaderError]].
*/
type FromHeaders[+Ctx] = ReadsRequestContext[RequestHeader, Ctx, Every[HeaderError]]
/**
* Create a common exception for when a [[Request]] extractor fails.
*
* @param reader the context reader (used for type information and the context source)
* @param request the request from which the context could not be extracted
* @param errors the errors that were encountered when extracting the context
* @param tag the class tag of the context for exception information
* @return a [[RequestContextParseException]] carrying a well documented exception message
*/
def badContextException[Ctx, Errors: Writes](
reader: ReadsRequestContext[_, Ctx, Errors],
request: RequestHeader,
errors: Errors)
(implicit tag: ClassTag[Ctx]): RequestContextParseException[Errors] = {
val className = tag.runtimeClass.getName
val exceptionMessage = s"Could not read an instance of $className from $request"
RequestContextParseException[Errors](exceptionMessage, errors, reader.contextSource)
}
/**
* Summon an implicit [[FromHeaders]] reader.
*/
def fromHeaders[Ctx](implicit reader: FromHeaders[Ctx]): FromHeaders[Ctx] = reader
/**
* Creates an intermediary [[FromHeadersBuilder]] for building the final [[ReadsRequestContext.FromHeaders]].
*/
def usingHeaders[Ctx: ClassTag](fromHeaders: HeaderReader => Ctx Or Every[HeaderError]): FromHeadersBuilder[Ctx] = {
new FromHeadersBuilder[Ctx](req => fromHeaders(new HeaderReader(req)))
}
/**
* Creates an intermediary [[FromHeadersBuilder]] for building the final [[ReadsRequestContext.FromHeaders]].
*
* @note this is used to pick the right [[FromHeaders]] reader based on the value of a determiner header.
* It is really just a convenience method for building an abstract reader based on some key header.
*/
def chooseFromHeader[Ctx: ClassTag](headerName: String)(fromHeaders: Option[String] => FromHeaders[Ctx]): FromHeadersBuilder[Ctx] = {
new FromHeadersBuilder[Ctx]({ req =>
fromHeaders(new HeaderReader(req).get(headerName).toOption).extractOrErrors(req)
})
}
/**
* An immutable builder for better syntax without encountering type erasure issues with overloaded methods.
*/
class FromHeadersBuilder[Ctx: ClassTag] private[ReadsRequestContext] (
reads: RequestHeader => Ctx Or Every[HeaderError]
) {
outer =>
/**
* Maps over the resulting [[FromHeadersBuilder]] and combines the recover block.
*/
def map[NewCtx: ClassTag](f: Ctx => NewCtx): FromHeadersBuilder[NewCtx] = {
new FromHeadersBuilder[NewCtx]({ headers =>
outer.reads(headers) match {
case Good(ctx) => Good(f(ctx))
case Bad(errors) => Bad(errors)
}
})
}
/**
* Maps a function that determines the final [[FromHeadersBuilder]] and combines the errors with the recover block.
*/
def flatMap[NewCtx: ClassTag](f: Ctx => FromHeaders[NewCtx]): FromHeadersBuilder[NewCtx] = {
new FromHeadersBuilder[NewCtx]({ request =>
outer.reads(request) match {
case Good(ctx) => f(ctx).extractOrErrors(request)
case Bad(errors) => Bad(errors)
}
})
}
/**
* Builds a [[ReadsRequestContext.FromHeaders]] provided a function to recover either a context
* or a [[Result]] (if no context can be recovered).
*
* @note this can be used recover from any errors with a dummy value for the context.
*/
def recoverWith(recoverWith: Every[HeaderError] => Ctx Or Result): ReadsRequestContext.FromHeaders[Ctx] = {
new ReadsRequestContextFromHeaders[Ctx](reads, recoverWith)
}
/**
* Builds a [[ReadsRequestContext.FromHeaders]] provided a function to recover a [[Result]]
* from any group of header errors.
*
* @note this is similar to [[recoverWith]], except that there is no way to recover a context
* from the errors (or a dummy context).
*/
def recoverResult(recover: Every[HeaderError] => Result): ReadsRequestContext.FromHeaders[Ctx] = {
new ReadsRequestContextFromHeaders[Ctx](reads, { errors: Every[HeaderError] =>
Bad(recover(errors))
})
}
/**
* Builds a [[ReadsRequestContext.FromHeaders]] provided a backup [[Result]] value for
* if the extraction fails.
*/
def orResult(result: => Result): ReadsRequestContext.FromHeaders[Ctx] = {
new ReadsRequestContextFromHeaders[Ctx](reads, _ => Bad(result))
}
}
}
/**
* An implementation that provides the default implementations for methods using constructor arguments.
*/
abstract class DefaultWSRequestContextReads[-R <: RequestHeader, +Ctx: ClassTag, Errors: Writes](
override val contextSource: RequestContextSource,
recoverWith: Errors => Ctx Or Result
)(
implicit override val showErrors: Show[Errors]
) extends ReadsRequestContext[R, Ctx, Errors] {
override def extractOrThrow(request: R): Ctx = {
extractOrErrors(request) match {
case Good(ctx) => ctx
case Bad(errors) => throw ReadsRequestContext.badContextException(this, request, errors)
}
}
override def extractOrResponse(request: R): Or[Ctx, Result] = {
extractOrErrors(request).recoverWith(recoverWith)
}
}
/**
* A [[ContextExtractor]] that extracts the context from the [[RequestHeader]]'s [[Headers]].
*/
class ReadsRequestContextFromHeaders[Ctx: ClassTag](
requestReader: RequestHeader => Ctx Or Every[HeaderError],
recoverWith: Every[HeaderError] => Ctx Or Result
) extends DefaultWSRequestContextReads[RequestHeader, Ctx, Every[HeaderError]](
ContextFromHeaders,
recoverWith
) {
override def extractOrErrors(request: RequestHeader): Ctx Or Every[HeaderError] = requestReader(request)
}
| jeffmay/scala-context-passing | server/src/main/scala/play/api/libs/context/mvc/ReadsRequestContext.scala | Scala | apache-2.0 | 9,546 |
package edu.vanderbilt.accre.stackex
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.SQLContext
/**
* Created by joshuaarnold on 1/3/17.
*/
object SparkContextKeeper {
val conf = new SparkConf().setAppName("Stack-Ex Application")
val sc = new SparkContext(conf)
val sqlContext = new SQLContext(sc)
def stop(): Unit = sc.stop()
}
| bigdata-vandy/stack-ex | src/main/scala-2.10/edu/vanderbilt/accre/stackex/SparkContextKeeper.scala | Scala | mit | 375 |
package org.genericConfig.admin.client.old.step
import org.genericConfig.admin.client.views.html.HtmlElementIds
import org.scalajs.dom.raw.WebSocket
import org.scalajs.jquery.jQuery
import util.CommonFunction
/**
* Copyright (C) 2016 Gennadi Heimann genaheimann@gmail.com
*
* Created by Gennadi Heimann 18.05.2018
*/
class AddStep(websocket: WebSocket) extends CommonFunction {
def addStep(idToAppend: String, userId: String) = {
cleanPage
val htmlMain =
"<dev id='main' class='main'>" +
"<p>Neuen Schritt erstellen</p>" +
drawInputField(HtmlElementIds.inputStepNameToShowHtml, "nameToShow") +
drawInputField(HtmlElementIds.inputSelectionCriteriumMinHtml, "Selection Criterium MIN", "number") +
drawInputField(HtmlElementIds.inputSelectionCriteriumMaxHtml, "Selection Criterium MAX", "number") +
drawButton(HtmlElementIds.addStepHtml, "Speichern") +
drawButton(HtmlElementIds.getConfigsHtml, "Konfiguration") +
"</dev>"
drawNewMain(htmlMain)
jQuery(HtmlElementIds.addStepJQuery).on("click", () => saveStep(idToAppend))
jQuery(HtmlElementIds.getConfigsJQuery).on("click", () => getConfigs(userId))
}
private def saveStep(idToAppend: String) = {
val nameToShow: Dynamic = jQuery(HtmlElementIds.inputStepNameToShowJQuery).value()
val scMin: Dynamic = jQuery(HtmlElementIds.inputSelectionCriteriumMinJQuery).value()
val scMax: Dynamic = jQuery(HtmlElementIds.inputSelectionCriteriumMaxJQuery).value()
// val jsonStepIn = Json.toJson(JsonStepIn(
// JsonNames.ADD_STEP,
// JsonStepParams(
// appendToId = idToAppend,
// stepId = "",
// nameToShow = nameToShow.toString,
// kind = "first",
// Some(JsonSelectionCriterium(
// min = scMin.toString.toInt,
// max = scMax.toString.toInt
// ))
// )
// )).toString
// println("OUT -> " + jsonStepIn)
// websocket.send(jsonStepIn)
}
private def getConfigs(userId: String) = {
val jsonGetConfigs: String = ""
// Json.toJson(JsonGetConfigsIn(
// params = JsonGetConfigsParams(
// userId
// )
// )).toString
//
println("OUT -> " + jsonGetConfigs)
websocket.send(jsonGetConfigs)
}
// def updateStatus(addStep: JsonStepOut) = {
// println(addStep)
// val htmlHeader =
// s"<dev id='status' class='status'>" +
// addStep.result.status.addStep.get.status +
// " , " +
// addStep.result.status.appendStep.get.status +
// " ," +
// addStep.result.status.common.get.status +
// "</dev>"
// jQuery("#status").remove()
// jQuery(htmlHeader).appendTo(jQuery("header"))
// }
} | gennadij/admin | client/src/main/scala/org/genericConfig/admin/client/old/step/AddStep.scala | Scala | apache-2.0 | 2,770 |
package acceptance
import base.FeatureSpecBase
class SupervisionUAT extends FeatureSpecBase {
feature("Supervision of students") {
scenario("Student is not attending the labwork and is not excused") {
Given("The supervision page for this labwork assignment is open")
When("the supervisor calls the name of a student and the student is not attending")
Then("the student is marked as absent")
}
}
}
| FHK-ADV/lwm | test/acceptance/SupervisionUAT.scala | Scala | mit | 429 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iht.views.application.assets.pensions
import iht.controllers.application.assets.pensions.routes
import iht.forms.ApplicationForms.pensionsChangedQuestionForm
import iht.models.application.assets.PrivatePension
import iht.testhelpers.CommonBuilder
import iht.testhelpers.TestHelper._
import iht.views.application.{CancelComponent, YesNoQuestionViewBehaviour}
import iht.views.html.application.asset.pensions.pensions_changed_question
import play.api.data.Form
import play.twirl.api.HtmlFormat.Appendable
class PensionsChangedQuestionViewTest extends YesNoQuestionViewBehaviour[PrivatePension] {
lazy val regDetails = CommonBuilder.buildRegistrationDetails1
lazy val deceasedName = regDetails.deceasedDetails.fold("")(x => x.name)
lazy val pensionsChangedQuestionView: pensions_changed_question = app.injector.instanceOf[pensions_changed_question]
override def guidance = noGuidance
override def pageTitle = messagesApi("page.iht.application.assets.pensions.changed.title", deceasedName)
override def browserTitle = messagesApi("page.iht.application.assets.pensions.changed.browserTitle")
override def formTarget = Some(routes.PensionsChangedQuestionController.onSubmit())
override def form: Form[PrivatePension] = pensionsChangedQuestionForm
override def formToView: Form[PrivatePension] => Appendable =
form => pensionsChangedQuestionView(form, regDetails)
override def cancelComponent = Some(CancelComponent(routes.PensionsOverviewController.onPageLoad(),
messagesApi("iht.estateReport.assets.pensions.returnToPrivatePensions"),
AssetsPensionChangesID
))
"Pensions Changed Question View" must {
behave like yesNoQuestion
}
}
| hmrc/iht-frontend | test/iht/views/application/assets/pensions/PensionsChangedQuestionViewTest.scala | Scala | apache-2.0 | 2,292 |
package markatta
import scala.collection.immutable.NumericRange
object Util {
def range(start: Byte, end: Byte): NumericRange[Byte] = new NumericRange.Inclusive[Byte](start, end, 1)
def timeS[A](block: => A): (A, Float) = {
val before = System.nanoTime()
val result = block
val after = System.nanoTime()
val s = (after - before) / 1000000000F
(result, s)
}
}
| johanandren/sodoku | src/main/scala/markatta/Util.scala | Scala | gpl-2.0 | 390 |
package com.msilb.scalanda.restapi.model
sealed trait CandleFormat
object CandleFormat {
case object MidPoint extends CandleFormat {
override def toString = "midpoint"
}
case object BidAsk extends CandleFormat {
override def toString = "bidask"
}
}
| msilb/scalanda | src/main/scala/com/msilb/scalanda/restapi/model/CandleFormat.scala | Scala | mit | 270 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.security.auth
object Resource {
val Separator = ":"
val ClusterResourceName = "kafka-cluster"
val ClusterResource = new Resource(Cluster, Resource.ClusterResourceName)
val WildCardResource = "*"
def fromString(str: String): Resource = {
str.split(Separator, 2) match {
case Array(resourceType, name, _*) => new Resource(ResourceType.fromString(resourceType), name)
case _ => throw new IllegalArgumentException("expected a string in format ResourceType:ResourceName but got " + str)
}
}
}
/**
*
* @param resourceType type of resource.
* @param name name of the resource, for topic this will be topic name , for group it will be group name. For cluster type
* it will be a constant string kafka-cluster.
*/
case class Resource(val resourceType: ResourceType, val name: String) {
override def toString: String = {
resourceType.name + Resource.Separator + name
}
}
| rhauch/kafka | core/src/main/scala/kafka/security/auth/Resource.scala | Scala | apache-2.0 | 1,736 |
package models
case class TopLevel(links: Option[Map[String,String]] = None,
emails: Option[Either[Email,Seq[Email]]] = None,
users: Option[User] = None,
tokens: Option[Token] = None,
errors: Option[Either[Error,Seq[Error]]] = None)
| jdauphant/play_api_example | app/models/TopLevel.scala | Scala | isc | 314 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.yarn
import scala.collection.mutable.{ArrayBuffer, HashMap}
import org.apache.spark.SparkConf
import org.apache.spark.scheduler.{InputFormatInfo, SplitInfo}
import org.apache.spark.util.IntParam
import org.apache.spark.util.MemoryParam
// TODO: Add code and support for ensuring that yarn resource 'tasks' are location aware !
class ClientArguments(val args: Array[String], val sparkConf: SparkConf) {
var addJars: String = null
var files: String = null
var archives: String = null
var userJar: String = null
var userClass: String = null
var userArgs: Seq[String] = Seq[String]()
var workerMemory = 1024 // MB
var workerCores = 1
var numWorkers = 2
var amQueue = sparkConf.get("QUEUE", "default")
var amMemory: Int = 512 // MB
var amClass: String = "org.apache.spark.deploy.yarn.ApplicationMaster"
var appName: String = "Spark"
// TODO
var inputFormatInfo: List[InputFormatInfo] = null
// TODO(harvey)
var priority = 0
parseArgs(args.toList)
private def parseArgs(inputArgs: List[String]): Unit = {
val userArgsBuffer: ArrayBuffer[String] = new ArrayBuffer[String]()
val inputFormatMap: HashMap[String, InputFormatInfo] = new HashMap[String, InputFormatInfo]()
var args = inputArgs
while (!args.isEmpty) {
args match {
case ("--jar") :: value :: tail =>
userJar = value
args = tail
case ("--class") :: value :: tail =>
userClass = value
args = tail
case ("--args") :: value :: tail =>
userArgsBuffer += value
args = tail
case ("--master-class") :: value :: tail =>
amClass = value
args = tail
case ("--master-memory") :: MemoryParam(value) :: tail =>
amMemory = value
args = tail
case ("--num-workers") :: IntParam(value) :: tail =>
numWorkers = value
args = tail
case ("--worker-memory") :: MemoryParam(value) :: tail =>
workerMemory = value
args = tail
case ("--worker-cores") :: IntParam(value) :: tail =>
workerCores = value
args = tail
case ("--queue") :: value :: tail =>
amQueue = value
args = tail
case ("--name") :: value :: tail =>
appName = value
args = tail
case ("--addJars") :: value :: tail =>
addJars = value
args = tail
case ("--files") :: value :: tail =>
files = value
args = tail
case ("--archives") :: value :: tail =>
archives = value
args = tail
case Nil =>
if (userClass == null) {
printUsageAndExit(1)
}
case _ =>
printUsageAndExit(1, args)
}
}
userArgs = userArgsBuffer.readOnly
inputFormatInfo = inputFormatMap.values.toList
}
def printUsageAndExit(exitCode: Int, unknownParam: Any = null) {
if (unknownParam != null) {
System.err.println("Unknown/unsupported param " + unknownParam)
}
System.err.println(
"Usage: org.apache.spark.deploy.yarn.Client [options] \\n" +
"Options:\\n" +
" --jar JAR_PATH Path to your application's JAR file (required in yarn-standalone mode)\\n" +
" --class CLASS_NAME Name of your application's main class (required)\\n" +
" --args ARGS Arguments to be passed to your application's main class.\\n" +
" Mutliple invocations are possible, each will be passed in order.\\n" +
" --num-workers NUM Number of workers to start (Default: 2)\\n" +
" --worker-cores NUM Number of cores for the workers (Default: 1). This is unsused right now.\\n" +
" --master-class CLASS_NAME Class Name for Master (Default: spark.deploy.yarn.ApplicationMaster)\\n" +
" --master-memory MEM Memory for Master (e.g. 1000M, 2G) (Default: 512 Mb)\\n" +
" --worker-memory MEM Memory per Worker (e.g. 1000M, 2G) (Default: 1G)\\n" +
" --name NAME The name of your application (Default: Spark)\\n" +
" --queue QUEUE The hadoop queue to use for allocation requests (Default: 'default')\\n" +
" --addJars jars Comma separated list of local jars that want SparkContext.addJar to work with.\\n" +
" --files files Comma separated list of files to be distributed with the job.\\n" +
" --archives archives Comma separated list of archives to be distributed with the job."
)
System.exit(exitCode)
}
}
| sryza/spark | yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientArguments.scala | Scala | apache-2.0 | 5,459 |
/*
* Copyright 2015 Nicolas Rinaudo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kantan.csv.ops
import kantan.csv.{HeaderEncoder, RowEncoder}
/** Provides syntax for encoding values as CSV rows.
*
* Importing `kantan.csv.ops._` will add [[asCsvRow]] to any type `A` such that there exists an implicit
* `RowEncoder[A]` in scope.
*/
final class CsvRowEncoderOps[A: RowEncoder](val a: A) {
/** Encodes a value as a CSV row.
*
* @example
* {{{
* scala> List(1, 2, 3).asCsvRow
* res0: Seq[String] = List(1, 2, 3)
* }}}
*/
def asCsvRow: Seq[String] =
RowEncoder[A].encode(a)
}
trait ToCsvRowEncoderOps {
implicit def toCsvRowEncoderOps[A: HeaderEncoder](a: A): CsvRowEncoderOps[A] =
new CsvRowEncoderOps(a)(HeaderEncoder[A].rowEncoder)
}
object rowEncoder extends ToCsvRowEncoderOps
| nrinaudo/tabulate | core/shared/src/main/scala/kantan/csv/ops/CsvRowEncoderOps.scala | Scala | mit | 1,360 |
package io.pivotal.gemfire.spark.connector
import io.pivotal.gemfire.spark.connector.internal.rdd.GemFireRegionRDD
import org.apache.spark.SparkContext
import scala.reflect.ClassTag
/** Provides GemFire specific methods on `SparkContext` */
class GemFireSparkContextFunctions(@transient sc: SparkContext) extends Serializable {
/**
* Expose a GemFire region as a GemFireRDD
* @param regionPath the full path of the region
* @param connConf the GemFireConnectionConf that can be used to access the region
* @param opConf use this to specify preferred partitioner
* and its parameters. The implementation will use it if it's applicable
*/
def gemfireRegion[K: ClassTag, V: ClassTag] (
regionPath: String, connConf: GemFireConnectionConf = GemFireConnectionConf(sc.getConf),
opConf: Map[String, String] = Map.empty): GemFireRegionRDD[K, V] =
GemFireRegionRDD[K, V](sc, regionPath, connConf, opConf)
}
| ysung-pivotal/incubator-geode | gemfire-spark-connector/gemfire-spark-connector/src/main/scala/io/pivotal/gemfire/spark/connector/GemFireSparkContextFunctions.scala | Scala | apache-2.0 | 944 |
package com.alexknvl.btce.site
import org.jsoup.Jsoup
import org.jsoup.Connection
import org.jsoup.nodes.Document
case class ParseException(message: String = null, cause: Throwable = null) extends RuntimeException(message, cause)
private class Auth {
private final val BodyCookieScriptPattern =
"document.cookie=\\"a=([a-f0-9]{32});path=/;\\";location.href=document.URL;".r
private var bodyCookie: Option[String] = None
def updateCookies(response: Connection.Response, doc: Document): Boolean = {
val title = doc.select("head title").first()
if (title.text() == "loading") {
doc.select("body script").first().data() match {
case BodyCookieScriptPattern(bodyCookie) => this.bodyCookie = Some(bodyCookie)
case _ => throw ParseException("Could not parse body cookie script.")
}
true
} else false
}
def setCookies(connection: Connection) = bodyCookie.foreach { connection.cookie("a", _) }
}
case class ChatMessage(
id: Long,
time: String,
user: String,
message: String)
case class ScrapingResult(
messages: List[ChatMessage],
userCount: Long,
botCount: Long,
isDevOnline: Boolean,
isSupportOnline: Boolean,
isAdminOnline: Boolean)
class SiteApi {
private final val SiteUrl = "https://btc-e.com/"
private final val UsersEnPattern = "Users: (\\\\d+) Bots: (\\\\d+)".r
private final val UsersRuPattern = "ΠΠΎΠ»ΡΠ·ΠΎΠ²Π°ΡΠ΅Π»ΠΈ: (\\\\d+) ΠΠΎΡΡ: (\\\\d+)".r
private val auth = new Auth
private def mainPage(locale: String = "en"): Document = mainPage(locale, 0)
private def mainPage(locale: String, tries: Int): Document = {
val connection = Jsoup.connect(SiteUrl)
auth.setCookies(connection)
connection.cookie("locale", locale)
val response = connection.execute()
val doc = response.parse()
if (auth.updateCookies(response, doc)) {
if (tries < 3) mainPage(locale, tries + 1)
else throw ParseException("Could not update cookies.")
} else doc
}
private def scrape(doc: Document): ScrapingResult = {
import scala.collection.JavaConversions._
val (userCount, botCount) = doc.select("div#users-online").first().ownText() match {
case UsersEnPattern(users, bots) => (users.toLong, bots.toLong)
case UsersRuPattern(users, bots) => (users.toLong, bots.toLong)
case _ => throw ParseException("Could not parse user/bot count.")
}
var isDevOnline = false
var isSupportOnline = false
var isAdminOnline = false
for (elem <- doc.select("div#users-online p a[href]").iterator()) {
elem.attr("href") match {
case "https://btc-e.com/profile/1" => isDevOnline = true
case "https://btc-e.com/profile/2" => isSupportOnline = true
case "https://btc-e.com/profile/3" => isAdminOnline = true
case _ => ()
}
}
val messages = for (
elem <- doc.select("div#nChat p.chatmessage").listIterator();
id = elem.id().substring(3).toLong;
a = elem.select("a").get(0);
time = a.attr("title");
user = a.text();
message = elem.select("span").get(0).text()
) yield ChatMessage(id, time, user, message)
ScrapingResult(messages.toList, userCount, botCount, isDevOnline, isSupportOnline, isAdminOnline)
}
def scrape(locale: String = "en"): ScrapingResult = scrape(mainPage(locale))
def scrape: ScrapingResult = scrape()
} | alexknvl/scala-btce | btce-site/src/main/scala/com/alexknvl/btce/site/all.scala | Scala | gpl-3.0 | 3,372 |
/*
Copyright (c) 2016, Rice University
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
3. Neither the name of Rice University
nor the names of its contributors may be used to endorse or
promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.io._
object GenerateInput {
def main(args : Array[String]) {
if (args.length != 3) {
println("usage: GenerateInput output-dir n-output-files n-points-per-file")
return;
}
val outputDir = args(0)
val nOutputFiles = args(1).toInt
val pointsPerFile = args(2).toInt
val r = new scala.util.Random
for (f <- 0 until nOutputFiles) {
val writer = new PrintWriter(new File(outputDir + "/input." + f))
for (p <- 0 until pointsPerFile) {
writer.write(r.nextInt(100) + " " + r.nextInt(100) + "\n")
}
writer.close
}
}
}
| agrippa/spark-swat | functional-tests/tuple-prim-input-output/src/main/scala/sparksimple/GenerateInput.scala | Scala | bsd-3-clause | 2,185 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.utils.audit
import java.io.Serializable
import java.util.Collections
object NoOpAuditProvider extends AuditProvider {
override val getCurrentUserId: String = "unknown"
override val getCurrentUserDetails: java.util.Map[AnyRef, AnyRef] = Collections.emptyMap()
override def configure(params: java.util.Map[String, Serializable]): Unit = {}
}
| MutahirKazmi/geomesa | geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/audit/NoOpAuditProvider.scala | Scala | apache-2.0 | 845 |
package liang.don.dzviewer.tile
/**
* Stores information regarding an image tile.
*
* @constructor Create a new tile setting with its image content, URL,
* image format, position, overlap size, column position,
* row position, and tile size.
* @param image The image data. The instance will differ depending on if this is
* executed using the Java or .NET runtime.
* @param uriSource The URL of the image this tile is from.
* @param thumbnailUri The thumbnail URL (which is of maximum 1 tile size) of the image.
* @param fileFormat The image format.
* @param position The position of the tile as part of the whole image.
* @param overlapSize The overlap size (if any) in this tile.
* @param column The column position of the tile as part of the whole image.
* @param row The row position of the tile as part of the whole image.
* @param tileSize The length of this tile (width and height).
*
* @author Don Liang
* @Version 0.1, 14/09/2011
*/
class ImageTile(val image: AnyRef, val uriSource: String, val thumbnailUri: String, val fileFormat: String, val position: Point, val overlapSize: Int, val column: Int, val row: Int, val tileSize: Int)
extends Serializable { }
| dl2k84/DeepZoomViewer | src/liang/don/dzviewer/tile/ImageTile.scala | Scala | mit | 1,234 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iht.controllers.application.exemptions.qualifyingBody
import iht.config.AppConfig
import iht.connector.{CachingConnector, IhtConnector}
import iht.controllers.application.EstateController
import iht.forms.ApplicationForms.qualifyingBodyNameForm
import iht.models._
import iht.models.application.ApplicationDetails
import iht.models.application.exemptions._
import iht.utils.CommonHelper
import iht.views.html.application.exemption.qualifyingBody.qualifying_body_name
import javax.inject.Inject
import play.api.mvc.{Call, MessagesControllerComponents, Request}
import uk.gov.hmrc.auth.core.AuthConnector
import uk.gov.hmrc.auth.core.retrieve.v2.Retrievals.{nino => ninoRetrieval}
import uk.gov.hmrc.play.bootstrap.frontend.controller.FrontendController
import scala.concurrent.Future
class QualifyingBodyNameControllerImpl @Inject()(val ihtConnector: IhtConnector,
val cachingConnector: CachingConnector,
val authConnector: AuthConnector,
val qualifyingBodyNameView: qualifying_body_name,
implicit val appConfig: AppConfig,
val cc: MessagesControllerComponents) extends FrontendController(cc) with QualifyingBodyNameController
trait QualifyingBodyNameController extends EstateController {
lazy val submitUrl = CommonHelper.addFragmentIdentifier(routes.QualifyingBodyNameController.onSubmit(), Some(appConfig.ExemptionsOtherNameID))
def cancelUrl = routes.QualifyingBodyDetailsOverviewController.onPageLoad()
private def editCancelUrl(id: String) = routes.QualifyingBodyDetailsOverviewController.onEditPageLoad(id)
private def editSubmitUrl(id: String) = {
CommonHelper.addFragmentIdentifier(routes.QualifyingBodyNameController.onEditSubmit(id),
Some(appConfig.ExemptionsOtherNameID))
}
def locationAfterSuccessfulSave(optionID: Option[String]) = CommonHelper.getOrException(
optionID.map(id => routes.QualifyingBodyDetailsOverviewController.onEditPageLoad(id)))
val updateApplicationDetails: (ApplicationDetails, Option[String], QualifyingBody) => (ApplicationDetails, Option[String]) =
(appDetails, id, qualifyingBody) => {
val seekID = id.getOrElse("")
val qbList = appDetails.qualifyingBodies
val updatedQBTuple: (Seq[QualifyingBody], String) = qbList.find(_.id.getOrElse("") equals seekID) match {
case None =>
id.fold {
val nextID = nextId(qbList)
(qbList :+ qualifyingBody.copy(id = Some(nextID)), nextID)
} { reqId => throw new RuntimeException("Id " + reqId + " can not be found") }
case Some(matchedQualifyingBody) =>
val updatedQualifyingBody: QualifyingBody = matchedQualifyingBody.copy(name = qualifyingBody.name)
(qbList.updated(qbList.indexOf(matchedQualifyingBody), updatedQualifyingBody), seekID)
}
(appDetails.copy(qualifyingBodies = updatedQBTuple._1), Some(updatedQBTuple._2))
}
val qualifyingBodyNameView: qualifying_body_name
def onPageLoad = authorisedForIht {
implicit request => {
withRegistrationDetails { regDetails =>
Future.successful(Ok(
qualifyingBodyNameView(qualifyingBodyNameForm,
regDetails,
submitUrl,
cancelUrl)))
}
}
}
def onEditPageLoad(id: String) = authorisedForIhtWithRetrievals(ninoRetrieval) { userNino =>
implicit request => {
estateElementOnEditPageLoadWithNavigation[QualifyingBody](qualifyingBodyNameForm,
qualifyingBodyNameView.apply,
retrieveQualifyingBodyDetailsOrExceptionIfInvalidID(id),
editSubmitUrl(id),
editCancelUrl(id),
userNino)
}
}
def onSubmit = authorisedForIhtWithRetrievals(ninoRetrieval) { userNino =>
implicit request => {
doSubmit(
submitUrl = submitUrl,
cancelUrl = cancelUrl,
None,
userNino)
}
}
private def doSubmit(submitUrl: Call,
cancelUrl: Call,
charityId: Option[String],
userNino: Option[String])
(implicit request: Request[_]) = {
estateElementOnSubmitWithIdAndNavigation[QualifyingBody](
qualifyingBodyNameForm,
qualifyingBodyNameView.apply,
updateApplicationDetails,
(_, updatedQualifyingBodyID) => locationAfterSuccessfulSave(updatedQualifyingBodyID),
None,
charityId,
submitUrl,
cancelUrl,
userNino
)
}
def onEditSubmit(id: String) = authorisedForIhtWithRetrievals(ninoRetrieval) { userNino =>
implicit request => {
doSubmit(
submitUrl = editSubmitUrl(id),
cancelUrl = editCancelUrl(id),
charityId = Some(id),
userNino)
}
}
}
| hmrc/iht-frontend | app/iht/controllers/application/exemptions/qualifyingBody/QualifyingBodyNameController.scala | Scala | apache-2.0 | 5,521 |
package org.scalatra
package validation
import java.net.URI
import java.util.Locale._
import mojolly.inflector.InflectorImports._
import org.apache.commons.validator.routines.{ EmailValidator, UrlValidator }
import org.scalatra.commands.FieldValidation
import org.scalatra.util.RicherString._
import scala.util.control.Exception._
import scala.util.matching.Regex
import scalaz.Scalaz._
import scalaz._
object Validators {
trait Validator[TValue] {
def validate[TResult >: TValue <: TValue](subject: TResult): FieldValidation[TResult]
}
class PredicateValidator[TValue](fieldName: String, isValid: TValue β Boolean, messageFormat: String)
extends Validator[TValue] {
def validate[TResult >: TValue <: TValue](value: TResult): FieldValidation[TResult] = {
if (isValid(value)) value.success
else ValidationError(messageFormat.format(fieldName.underscore.humanize), FieldName(fieldName), ValidationFail).failure[TResult]
}
}
def validate[TValue](fieldName: String, messageFormat: String = "%s is invalid.", validate: TValue => Boolean) =
new PredicateValidator[TValue](fieldName, validate, messageFormat)
/**
* Must be a non-empty [String]. null, " ", and "" are not allowed.
*/
def nonEmptyString(fieldName: String, messageFormat: String = "%s must be present."): Validator[String] =
new PredicateValidator[String](fieldName, _.nonBlank, messageFormat)
/**
* Must be non-null.
*/
def notNull(fieldName: String, messageFormat: String = "%s must be present."): Validator[AnyRef] =
new PredicateValidator[AnyRef](fieldName, _ != null, messageFormat)
/**
* Must be a collection which isn't empty.
*/
def nonEmptyCollection[TResult <: Traversable[_]](fieldName: String, messageFormat: String = "%s must not be empty."): Validator[TResult] =
new PredicateValidator[TResult](fieldName, _.nonEmpty, messageFormat)
/**
* Must be a valid email as determined by org.apache.commons.validator.routines.EmailValidator
*/
def validEmail(fieldName: String, messageFormat: String = "%s must be a valid email."): Validator[String] =
new PredicateValidator[String](fieldName, EmailValidator.getInstance.isValid(_), messageFormat)
/**
* Must be a valid absolute URL, parseable by the Apache Commons URI class.
*/
def validAbsoluteUrl(fieldName: String, allowLocalHost: Boolean, messageFormat: String = "%s must be a valid absolute url.", schemes: Seq[String] = Seq("http", "https")) =
buildUrlValidator(fieldName, absolute = true, allowLocalHost = allowLocalHost, messageFormat = messageFormat, schemes)
/**
* Must be a valid URL, parseable by the Apache Commons URI class.
*/
def validUrl(fieldName: String, allowLocalHost: Boolean, messageFormat: String = "%s must be a valid url.", schemes: Seq[String] = Seq("http", "https")) =
buildUrlValidator(fieldName, absolute = false, allowLocalHost = allowLocalHost, messageFormat = messageFormat, schemes)
/**
* Must match the regex.
*/
def validFormat(fieldName: String, regex: Regex, messageFormat: String = "%s is invalid."): Validator[String] =
new PredicateValidator[String](fieldName, regex.findFirstIn(_).isDefined, messageFormat)
/**
* The confirmation fieldName must have a true value.
*/
def validConfirmation(fieldName: String, confirmationFieldName: String, confirmationValue: => String, messageFormat: String = "%%s must match %s."): Validator[String] =
new PredicateValidator[String](
fieldName,
_ == confirmationValue,
messageFormat.format(confirmationFieldName.underscore.humanize.toLowerCase(ENGLISH)))
/**
* Must be greater than the min param.
*/
def greaterThan[T <% Ordered[T]](fieldName: String, min: T, messageFormat: String = "%%s must be greater than %s."): Validator[T] =
new PredicateValidator[T](fieldName, _ > min, messageFormat format min.toString)
/**
* Must be less than the max param.
*/
def lessThan[T <% Ordered[T]](fieldName: String, max: T, messageFormat: String = "%%s must be less than %s."): Validator[T] =
new PredicateValidator[T](fieldName, _ < max, messageFormat format max.toString)
/**
* Must be greater than or equal to the min param.
*/
def greaterThanOrEqualTo[T <% Ordered[T]](fieldName: String, min: T, messageFormat: String = "%%s must be greater than or equal to %s."): Validator[T] =
new PredicateValidator[T](fieldName, _ >= min, messageFormat format min)
/**
* Must be less than or equal to the max param.
*/
def lessThanOrEqualTo[T <% Ordered[T]](fieldName: String, max: T, messageFormat: String = "%%s must be less than or equal to %s."): Validator[T] =
new PredicateValidator[T](fieldName, _ <= max, messageFormat.format(max))
/**
* Must have a minimum length of min.
*/
def minLength(fieldName: String, min: Int, messageFormat: String = "%%s must be at least %s characters long."): Validator[String] =
new PredicateValidator[String](
fieldName, _.size >= min, messageFormat.format(min))
/**
* Must be included in the expected collection.
*/
def oneOf[TResult](fieldName: String, messageFormat: String = "%%s must be one of %s.", expected: Seq[TResult]): Validator[TResult] =
new PredicateValidator[TResult](
fieldName, expected.contains, messageFormat format expected.mkString("[", ", ", "]"))
/**
* Checks if the value of the data is a value of the specified enum.
*/
def enumValue(fieldName: String, enum: Enumeration, messageFormat: String = "%%s must be one of %s."): Validator[String] =
oneOf(fieldName, messageFormat, enum.values.map(_.toString).toSeq)
private def buildUrlValidator(fieldName: String, absolute: Boolean, allowLocalHost: Boolean, messageFormat: String = "%s must be a valid url.", schemes: Seq[String]): Validator[String] = {
val validator = (url: String) β {
(allCatch opt {
val u = URI.create(url).normalize()
!absolute || u.isAbsolute
}).isDefined && (allowLocalHost || UrlValidator.getInstance().isValid(url))
}
new PredicateValidator[String](fieldName, validator, messageFormat)
}
}
| lightvector/scalatra | commands/src/main/scala/org/scalatra/validation/Validators.scala | Scala | bsd-2-clause | 6,148 |
package com.twitter.zipkin.collector.processor
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import com.twitter.ostrich.stats.{Distribution, Histogram, Stats}
import com.twitter.zipkin.collector.OstrichService
import com.twitter.zipkin.common.{Annotation, Endpoint, Span}
import com.twitter.zipkin.thriftscala
import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
class OstrichServiceSpec extends FunSuite with Matchers with BeforeAndAfter {
val histogram = Histogram()
histogram.add(10)
val distribution = new Distribution(histogram)
val prefix = "agg."
before {
Stats.clearAll()
}
test("add two metrics if server span") {
val agg = new OstrichService(prefix)
val annotation1 = Annotation(10, thriftscala.Constants.SERVER_RECV, Some(Endpoint(1, 2, "service")))
val annotation2 = Annotation(20, thriftscala.Constants.SERVER_SEND, Some(Endpoint(3, 4, "service")))
val annotation3 = Annotation(30, "value3", Some(Endpoint(5, 6, "service")))
val span = Span(12345, "methodcall", 666, None, List(annotation1, annotation2, annotation3))
agg.apply(span)
Stats.getMetrics()(prefix + "service") should be(distribution)
Stats.getMetrics()(prefix + "service.methodcall") should be(distribution)
}
test("add no metrics since not server span") {
val agg = new OstrichService(prefix)
val annotation1 = Annotation(10, thriftscala.Constants.CLIENT_SEND, Some(Endpoint(1, 2, "service")))
val annotation2 = Annotation(20, thriftscala.Constants.CLIENT_RECV, Some(Endpoint(3, 4, "service")))
val annotation3 = Annotation(30, "value3", Some(Endpoint(5, 6, "service")))
val span = Span(12345, "methodcall", 666, None, List(annotation1, annotation2, annotation3))
agg.apply(span)
Stats.getMetrics() should not contain key(prefix + "service")
Stats.getMetrics() should not contain key(prefix + "service.methodcall")
}
}
| jfeltesse-mdsol/zipkin | zipkin-collector-service/src/test/scala/com/twitter/zipkin/collector/processor/OstrichServiceSpec.scala | Scala | apache-2.0 | 2,464 |
/*
* Copyright (c) 2018. Fengguo Wei and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License v2.0
* which accompanies this distribution, and is available at
* https://www.apache.org/licenses/LICENSE-2.0
*
* Detailed contributors are listed in the CONTRIBUTOR.md
*/
package org.argus.amandroid.core.parser
import org.apache.commons.lang3.StringEscapeUtils
import org.argus.jawa.core.elements.JawaType
import org.argus.jawa.core.JawaClass
import org.argus.jawa.core.util._
/**
* @author <a href="mailto:fgwei521@gmail.com">Fengguo Wei</a>
* @author <a href="mailto:sroy@k-state.edu">Sankardas Roy</a>
*/
class IntentFilterDataBase {
/**
* Map from record name to it's intent filter information
*/
private val intentFmap: MMap[JawaType, MSet[IntentFilter]] = mmapEmpty
def updateIntentFmap(intentFilter: IntentFilter): Unit = {
this.intentFmap.getOrElseUpdate(intentFilter.getHolder, msetEmpty) += intentFilter
}
def addIntentFmap(intentFmap: IMap[JawaType, ISet[IntentFilter]]): Unit = {
intentFmap.foreach{
case (rec, filters) =>
this.intentFmap.getOrElseUpdate(rec, msetEmpty) ++= filters
}
}
def merge(intentFilterDB: IntentFilterDataBase): Unit = {
addIntentFmap(intentFilterDB.getIntentFmap)
}
def containsClass(r: JawaClass): Boolean = containsClass(r.getType)
def containsClass(compTyp: JawaType): Boolean = this.intentFmap.contains(compTyp)
def getIntentFmap: IMap[JawaType, ISet[IntentFilter]] = intentFmap.map{case (k, vs) => k -> vs.toSet}.toMap
def getIntentFilters(r: JawaClass): ISet[IntentFilter] = getIntentFilters(r.getType)
def getIntentFilters(compTyp: JawaType): ISet[IntentFilter] = this.intentFmap.getOrElse(compTyp, msetEmpty).toSet
def getIntentFiltersActions(r: JawaClass): ISet[String] = {
val intentFilterS: ISet[IntentFilter] = getIntentFilters(r)
val actions: MSet[String] = msetEmpty
if(intentFilterS != null){
intentFilterS.foreach{
intentFilter =>
actions ++= intentFilter.getActions
}
}
actions.toSet
}
def reset: IntentFilterDataBase = {
this.intentFmap.clear()
this
}
override def toString: String = intentFmap.toString
}
class IntentFilter(holder: JawaType) {
private val actions: MSet[String] = msetEmpty
private val categories: MSet[String] = msetEmpty
private val data = new Data
/**
* checks if this filter can accept an intent with (action, categories, uriData, mType)
*/
def isMatchWith(action:String, categories: Set[String], uriData:UriData, mType:String):Boolean = {
var actionTest = false
var categoryTest = false
var dataTest = false
if(action == null && categories.isEmpty && uriData == null && mType == null) return false
if(action == null || hasAction(action)){
actionTest = true
}
// if(hasCategories(categories)){
// categoryTest = true
// }
//note that in path-insensitive static analysis we had to change the category match subset rule,
//we ensure no false-negative (which means no match is ignored)
if(categories.isEmpty){
categoryTest = true
} else if(categories.intersect(this.categories).nonEmpty){
categoryTest = true
}
// note that in android there is some discrepancy regarding data and mType on the Intent side and the Intent Filter side
if(this.data.matchWith(uriData, mType))
dataTest = true
// println("holder:" + holder + "actionTest:" + actionTest + " categoryTest:" + categoryTest + " dataTest:" + dataTest)
actionTest && categoryTest && dataTest
}
def hasAction(action:String):Boolean = {
this.actions.contains(action) || this.actions.contains("ANY")
}
def hasCategories(categories: Set[String]):Boolean = {
categories.subsetOf(this.categories) || this.categories.contains("ANY")
}
def addAction(action: String): Unit = this.actions += action
def addActions(actions: ISet[String]): Unit = this.actions ++= actions
def addCategory(category: String): Unit = this.categories += category
def addCategories(categories: ISet[String]): Unit = this.categories ++= categories
def modData(
scheme: String,
host: String,
port: String,
path: String,
pathPrefix: String,
pathPattern: String,
mimeType: String): Unit = {
data.add(scheme, host, port, path, pathPrefix, pathPattern, mimeType)
}
def addData(d: Data): Unit = {
this.data.addSchemes(d.getSchemes)
this.data.addAuthorities(d.getAuthorities)
this.data.addPaths(d.getPaths)
this.data.addPathPrefixs(d.getPathPrefixs)
this.data.addPathPatterns(d.getPathPatterns)
this.data.addTypes(d.getMimeTypes)
}
def getActions: ISet[String] = IntentFilter.this.actions.toSet
def getCategorys: ISet[String] = IntentFilter.this.categories.toSet
def getData: Data = IntentFilter.this.data
def getHolder: JawaType = IntentFilter.this.holder
override def toString: String = "component: " + holder + " (actions: " + actions + " categories: " + categories + " datas: " + data + ")"
}
case class Authority(host: String, port: String)
// A Data class represents all pieces of info associated with all <data> tags of a particular filter as declared in a manifest file
class Data{
private val schemes: MSet[String] = msetEmpty
private val authorities: MSet[Authority] = msetEmpty
private val paths: MSet[String] = msetEmpty
private val pathPrefixs: MSet[String] = msetEmpty
private val pathPatterns: MSet[String] = msetEmpty
private val mimeTypes: MSet[String] = msetEmpty
def getSchemes: ISet[String] = schemes.toSet
def getAuthorities: ISet[Authority] = authorities.toSet
def getPaths: ISet[String] = paths.toSet
def getPathPrefixs: ISet[String] = pathPrefixs.toSet
def getPathPatterns: ISet[String] = pathPatterns.toSet
def getMimeTypes: ISet[String] = mimeTypes.toSet
def isEmpty: Boolean = schemes.isEmpty && authorities.isEmpty && paths.isEmpty && pathPrefixs.isEmpty && pathPatterns.isEmpty && mimeTypes.isEmpty
// note that in android there is some discrepancy regarding data and mType on the Intent side compared to that on the Intent Filter side
def matchWith(uriData:UriData, mType:String):Boolean = {
var dataTest = false
var typeTest = false
if(this.schemes.isEmpty && uriData == null) // **** re-check this logic
dataTest = true
if(uriData != null && matchWith(uriData)) // **** re-check this logic
dataTest = true
if(uriData != null && (uriData.getScheme == "content" || uriData.getScheme == "file")){
if(this.schemes.isEmpty) dataTest = true
}
if(this.mimeTypes.isEmpty && mType == null)
typeTest = true
else {
this.mimeTypes.foreach{
ifType =>
if(mType != null && ifType.matches("([^\\\\*]*|\\\\*)/([^\\\\*]*|\\\\*)") && mType.matches("([^\\\\*]*|\\\\*)/([^\\\\*]*|\\\\*)")){ // four cases can match: test/type, test/*, */type, */*
val ifTypeFront = ifType.split("\\\\/")(0)
val ifTypeTail = ifType.split("\\\\/")(1)
val mTypeFront = mType.split("\\\\/")(0)
val mTypeTail = mType.split("\\\\/")(1)
var frontTest = false
var tailTest = false
if(ifTypeFront == mTypeFront || (ifTypeFront == "*" && mTypeFront == "*")){
frontTest = true
}
if(ifTypeTail == mTypeTail || ifTypeTail == "*" || mTypeTail == "*"){
tailTest = true
}
typeTest = frontTest && tailTest
}
}
}
// println(dataTest, typeTest)
dataTest && typeTest
}
def matchWith(uriData:UriData):Boolean = {
val scheme = uriData.getScheme
val host = uriData.getHost
val port = uriData.getPort
val path = uriData.getPath
var schemeTest = false
var authorityTest = false
var pathTest = false
var pathPrefixTest = false
var pathPatternTest = false
if(this.schemes.isEmpty){ // we need to extend the matching logic to include many cases
if(scheme == null){
schemeTest = true
authorityTest = true
pathTest = true
}
} else if(scheme != null && this.schemes.contains(scheme)){
schemeTest = true
if(this.authorities.isEmpty || !this.authorities.exists(a => a.host != null)){
authorityTest = true
pathTest = true
} else {
this.authorities.foreach{
case Authority(if_host, if_port) =>
if(if_host == host){
if(if_port == null || if_port == port){
authorityTest = true
if(this.paths.isEmpty && this.pathPrefixs.isEmpty && this.pathPatterns.isEmpty){
pathTest = true
pathPrefixTest = true
pathPatternTest = true
} else if(path != null){
pathTest = this.paths.contains(path)
this.pathPrefixs.foreach{
pre =>
if(path.startsWith(pre)) pathPrefixTest = true
}
this.pathPatterns.foreach{
pattern =>
if(path.matches(StringEscapeUtils.unescapeJava(pattern))) pathPatternTest = true
}
}
}
}
}
}
}
// println("schemeTest-->" + schemeTest + " authorityTest-->" + authorityTest + "(pathTest || pathPrefixTest || pathPatternTest)-->" + (pathTest || pathPrefixTest || pathPatternTest))
schemeTest && authorityTest && (pathTest || pathPrefixTest || pathPatternTest)
}
def add(
scheme: String,
host: String,
port: String,
path: String,
pathPrefix: String,
pathPattern: String,
mimeType: String): Unit = {
if(scheme!= null) {
this.schemes +=scheme
}
if(host != null || port != null){
val portAfterSanit =
if(port != null) port.replaceAll("\\\\\\\\ ", "")
else port
this.authorities += Authority(host, portAfterSanit)
}
if(path!= null){
this.paths +=path
}
if(pathPrefix != null){
this.pathPrefixs += pathPrefix
}
if(pathPattern != null){
this.pathPatterns += pathPattern
}
if(mimeType != null){
this.mimeTypes += mimeType
}
}
def addScheme(scheme: String): Unit ={
if(scheme!= null){
this.schemes +=scheme
}
}
def addSchemes(schemes: ISet[String]): Unit = this.schemes ++= schemes
def addAuthority(host: String, port: String): Unit = {
this.authorities += Authority(host, port)
}
def addAuthorityHostOnly(host: String): Unit = {
this.authorities += Authority(host, null)
}
def addAuthorityPortOnly(port: String): Unit = {
this.authorities += Authority(null, port)
}
def addAuthorities(authorities: ISet[Authority]): Unit = this.authorities ++= authorities
def addPath(path: String): Unit ={
if(path!= null){
this.paths +=path
}
}
def addPaths(paths: ISet[String]): Unit = this.paths ++= paths
def addPathPrefixs(pathPrefixs: ISet[String]): Unit = this.pathPrefixs ++= pathPrefixs
def addPathPatterns(pathPatterns: ISet[String]): Unit = this.pathPatterns ++= pathPatterns
def addType(mimeType: String): Unit ={
if(mimeType!= null){
this.mimeTypes +=mimeType
}
}
def addTypes(mimeTypes: ISet[String]): Unit = this.mimeTypes ++= mimeTypes
override def toString: String = {"schemes= " + schemes + " authorities= " + authorities + " path= " + paths + " pathPrefix= " + pathPrefixs + " pathPattern= " + pathPatterns + " mimeType= " + mimeTypes}
}
// A UriData class represents all pieces of info associated with the mData field of a particular Intent instance
class UriData{
private var scheme: String = _
private var host: String = _
private var port: String = _
private var path: String = _
private var pathPrefix: String = _
private var pathPattern: String = _
def set(
scheme: String,
host: String,
port: String,
path: String,
pathPrefix: String,
pathPattern: String): Unit = {
if(scheme!= null){
this.scheme =scheme
}
if(host!= null){
this.host =host
}
if(port!= null){
this.port =port
}
if(path!= null){
this.path =path
}
if(pathPrefix != null){
this.pathPrefix = pathPrefix
}
if(pathPattern != null){
this.pathPattern = pathPattern
}
}
def setScheme(scheme: String): Unit ={
if(scheme!= null){
this.scheme =scheme
}
}
def getScheme: String = this.scheme
def setHost(host: String): Unit ={
if(host!= null){
this.host =host
}
}
def getHost: String = this.host
def setPort(port: String): Unit ={
if(port!= null){
this.port =port
}
}
def getPort: String = this.port
def setPath(path: String): Unit ={
if(path!= null){
this.path =path
}
}
def getPath: String = this.path
def setPathPrefix(pathPrefix: String): Unit ={
if(pathPrefix!= null){
this.pathPrefix = pathPrefix
}
}
def getPathPrefix: String = this.pathPrefix
def setPathPattern(pathPattern: String): Unit ={
if(pathPattern!= null){
this.pathPattern = pathPattern
}
}
def getPathPattern: String = this.pathPattern
override def toString: String = {"schemes= " + scheme + " host= " + host + " port= " + port + " path= " + path + " pathPrefix= " + pathPrefix + " pathPattern= " + pathPattern }
}
| arguslab/Argus-SAF | amandroid/src/main/scala/org/argus/amandroid/core/parser/IntentFilterInfo.scala | Scala | apache-2.0 | 13,553 |
/*
* Accio is a platform to launch computer science experiments.
* Copyright (C) 2016-2018 Vincent Primault <v.primault@ucl.ac.uk>
*
* Accio is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Accio is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Accio. If not, see <http://www.gnu.org/licenses/>.
*/
package fr.cnrs.liris.sparkle
import com.google.common.base.MoreObjects
private[sparkle] class MapPartitionsDataFrame[T, U](
inner: DataFrame[T],
fn: (String, Iterable[T]) => Iterable[U],
private[sparkle] val encoder: Encoder[U])
extends DataFrame[U] {
override def toString: String = MoreObjects.toStringHelper(this).addValue(inner).toString
override private[sparkle] def keys: Seq[String] = inner.keys
override private[sparkle] def load(key: String): Iterable[U] = fn(key, inner.load(key))
override private[sparkle] def env = inner.env
}
| privamov/accio | accio/java/fr/cnrs/liris/sparkle/MapPartitionsDataFrame.scala | Scala | gpl-3.0 | 1,328 |
package impulsestorm.stargame.model
case class Hint(mapInfoChangeOnly: Boolean,
selectedUuid: String = "") {
def selected(f: Fleet) = copy(selectedUuid="fv-" + f.uuid)
}
| tommycli/stargame | src/main/scala/impulsestorm/stargame/model/Hint.scala | Scala | agpl-3.0 | 191 |
import scala.io.Source
object BufferReader {
def parseFile(path: String): Iterator[Long] = {
Source.fromFile(path)
.getLines()
.drop(1)
.map(_.toLong)
}
}
| GMadorell/programming-challenges | tuenti-2015/01-TheBuffer/src/main/scala/BufferReader.scala | Scala | mit | 182 |
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* */
package io.github.mandar2812.dynaml.tensorflow.utils
import io.github.mandar2812.dynaml.pipes.DataPipe
import org.apache.spark.annotation.Experimental
import org.platanios.tensorflow.api._
import org.platanios.tensorflow.api.core.types.{IsNotQuantized, TF}
/**
* A basis function expansion yielding a TF tensor.
* */
@Experimental
case class TensorBasis[-I, D: TF: IsNotQuantized](f: I => Tensor[D]) extends DataPipe[I, Tensor[D]] {
self =>
override def run(data: I): Tensor[D] = f(data)
def >[D1: TF: IsNotQuantized](other: DataPipe[Tensor[D], Tensor[D1]]): TensorBasis[I, D1] =
TensorBasis((x: I) => other.run(self.f(x)))
}
| transcendent-ai-labs/DynaML | dynaml-tensorflow/src/main/scala/io/github/mandar2812/dynaml/tensorflow/utils/TensorBasis.scala | Scala | apache-2.0 | 1,409 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.CATO01
import uk.gov.hmrc.ct.box.CtValidation
import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever
class CP286Spec extends WordSpec with Matchers with MockitoSugar {
"CP286" should {
val boxRetriever: ComputationsBoxRetriever = mock[ComputationsBoxRetriever]
"when empty" when {
val box = new CP286(None) {
override val boxId = "CP286"
override def calculateMaximumCP286(cp117: CP117, cato01: CATO01, cp998: CP998, cp281: CP281) = 90
}
"pass validation when CPQ18 is empty" in {
when(boxRetriever.cpQ18()).thenReturn(CPQ18(None))
box.validate(boxRetriever) shouldBe empty
}
"pass validation when CPQ18 is false" in {
when(boxRetriever.cpQ18()).thenReturn(CPQ18(Some(false)))
box.validate(boxRetriever) shouldBe empty
}
"fail validation when CPQ18 is true" in {
when(boxRetriever.cpQ18()).thenReturn(CPQ18(Some(true)))
box.validate(boxRetriever) shouldBe Set(CtValidation(Some("CP286"), "error.CP286.required"))
}
}
"when has value" when {
"pass validation when CPQ18 is true and value < limit" in {
when(boxRetriever.cpQ18()).thenReturn(CPQ18(Some(true)))
val box = new CP286(Some(90)) {
override val boxId = "CP286"
override def calculateMaximumCP286(cp117: CP117, cato01: CATO01, cp998: CP998, cp281: CP281) = 91
}
box.validate(boxRetriever) shouldBe empty
}
"pass validation when CPQ18 is true and value == limit" in {
when(boxRetriever.cpQ18()).thenReturn(CPQ18(Some(true)))
val box = new CP286(Some(90)) {
override def calculateMaximumCP286(cp117: CP117, cato01: CATO01, cp998: CP998, cp281: CP281) = 90
}
box.validate(boxRetriever) shouldBe empty
}
"pass validation when CPQ18 is true and value == 0" in {
when(boxRetriever.cpQ18()).thenReturn(CPQ18(Some(true)))
val box = new CP286(Some(0)) {
override def calculateMaximumCP286(cp117: CP117, cato01: CATO01, cp998: CP998, cp281: CP281) = 90
}
box.validate(boxRetriever) shouldBe empty
}
"fail validation when CPQ18 is true and value > limit" in {
when(boxRetriever.cpQ18()).thenReturn(CPQ18(Some(true)))
val box = new CP286(Some(90)) {
override val boxId = "CP286"
override def calculateMaximumCP286(cp117: CP117, cato01: CATO01, cp998: CP998, cp281: CP281) = 89
}
box.validate(boxRetriever) shouldBe Set(CtValidation(Some("CP286"), "error.CP286.exceeds.max", Some(Seq("89"))))
}
"fail validation when CPQ18 is true and value < 0" in {
when(boxRetriever.cpQ18()).thenReturn(CPQ18(Some(true)))
val box = new CP286(Some(-1)) {
override val boxId = "CP286"
override def calculateMaximumCP286(cp117: CP117, cato01: CATO01, cp998: CP998, cp281: CP281) = 89
}
box.validate(boxRetriever) shouldBe Set(CtValidation(Some("CP286"), "error.CP286.below.min", Some(Seq("0"))))
}
"fail validation when CPQ18 is false and has value" in {
when(boxRetriever.cpQ18()).thenReturn(CPQ18(Some(false)))
val box = new CP286(Some(90)) {
override val boxId = "CP286"
override def calculateMaximumCP286(cp117: CP117, cato01: CATO01, cp998: CP998, cp281: CP281) = 90
}
box.validate(boxRetriever) shouldBe Set(CtValidation(Some("CP286"), "error.CP286.cannot.exist"))
}
"fail validation when CPQ18 is empty and has value" in {
when(boxRetriever.cpQ18()).thenReturn(CPQ18(None))
val box = new CP286(Some(90)) {
override val boxId = "CP286"
override def calculateMaximumCP286(cp117: CP117, cato01: CATO01, cp998: CP998, cp281: CP281) = 90
}
box.validate(boxRetriever) shouldBe Set(CtValidation(Some("CP286"), "error.CP286.cannot.exist"))
}
}
}
}
| pncampbell/ct-calculations | src/test/scala/uk/gov/hmrc/ct/computations/CP286Spec.scala | Scala | apache-2.0 | 4,731 |
package dbis.pig.cep.flink
import scala.reflect.ClassTag
import dbis.pig.cep.ops.SelectionStrategy._
import dbis.pig.cep.ops.OutputStrategy._
import dbis.pig.cep.nfa.NFAController
import dbis.pig.backends.{SchemaClass => Event}
import org.apache.flink.api.common.typeinfo.TypeInformation
//import org.apache.flink.api.java.ExecutionEnvironment
//import org.apache.flink.api.java.DataSet
import scala.collection.JavaConversions._
import org.apache.flink.streaming.api.scala._
class CustomDataStreamMatcher[T <: Event: ClassTag: TypeInformation](@transient val dataStream: DataStream[T]) {
def matchNFA(nfa: NFAController[T], flinkEnv: StreamExecutionEnvironment, sstr: SelectionStrategy = FirstMatch, out: OutputStrategy = Combined) = {
println("create a new DataStream matcher")
new DataStreamMatcher(dataStream, nfa, flinkEnv, sstr, out).compute()
}
}
object CustomDataStreamMatcher {
implicit def addDataSetMatcher[T <: Event: ClassTag: TypeInformation](@transient dataStream: DataStream[T]) = {
println("add a custom DataStream function")
new CustomDataStreamMatcher(dataStream)
}
} | ksattler/piglet | ceplib/src/main/scala/dbis/pig/cep/flink/CustomDataStreamMatcher.scala | Scala | apache-2.0 | 1,117 |
/*
* Copyright (c) 2012, 2013 Roberto Tyley
*
* This file is part of 'BFG Repo-Cleaner' - a tool for removing large
* or troublesome blobs from Git repositories.
*
* BFG Repo-Cleaner is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* BFG Repo-Cleaner is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see http://www.gnu.org/licenses/ .
*/
package com.madgag.git.bfg.cleaner
import com.madgag.git._
import com.madgag.git.bfg.cleaner.protection.ProtectedObjectCensus
import com.madgag.textmatching.Literal
import org.eclipse.jgit.lib.ObjectId
import org.eclipse.jgit.revwalk.RevCommit
import org.scalatest.Inspectors
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.Matcher
import org.scalatest.matchers.should.Matchers
import scala.jdk.CollectionConverters._
class ObjectIdCleanerSpec extends AnyFlatSpec with Matchers {
"cleaning" should "not have a StackOverflowError cleaning a repo with deep history" ignore new unpackedRepo("/sample-repos/deep-history.zip") {
val dirtyCommitWithDeepHistory = "d88ac4f99511667fc0617ea026f3a0ce8a25fd07".asObjectId
val config = ObjectIdCleaner.Config(
ProtectedObjectCensus.None,
treeBlobsCleaners = Seq(new FileDeleter(Literal("foo")))
)
ensureCleanerWith(config).removesDirtOfCommitsThat(haveFile("foo")).whenCleaning(dirtyCommitWithDeepHistory)
}
}
class unpackedRepo(filePath: String) extends bfg.test.unpackedRepo(filePath) {
class EnsureCleanerWith(config: ObjectIdCleaner.Config) {
class RemoveDirtOfCommitsThat(commitM: Matcher[RevCommit]) extends Inspectors with Matchers {
def histOf(c: ObjectId) = repo.git.log.add(c).call.asScala.toSeq.reverse
def whenCleaning(oldCommit: ObjectId): Unit = {
val cleaner = new ObjectIdCleaner(config, repo.getObjectDatabase, revWalk)
forAtLeast(1, histOf(oldCommit)) { commit =>
commit should commitM
}
val cleanCommit = cleaner.cleanCommit(oldCommit)
forAll(histOf(cleanCommit)) { commit =>
commit shouldNot commitM
}
}
}
def removesDirtOfCommitsThat[T](commitM: Matcher[RevCommit]) = new RemoveDirtOfCommitsThat(commitM)
}
def ensureCleanerWith(config: ObjectIdCleaner.Config) = new EnsureCleanerWith(config)
}
| rtyley/bfg-repo-cleaner | bfg-library/src/test/scala/com/madgag/git/bfg/cleaner/ObjectIdCleanerSpec.scala | Scala | gpl-3.0 | 2,759 |
/**
* Magmanics Licensing. This web application allows for centralized control
* of client application activation, with optional configuration parameters
* to control licensable features, and storage of supplementary information
* about the client machine. Client applications may interface with this
* central server (for activation) using libraries licenced under an
* alternative licence.
*
* Copyright (C) 2010 James Baxter <j.w.baxter(at)gmail.com>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.magmanics.licensing.datalayer.dao
import javax.persistence.{EntityManager, PersistenceContext}
import com.magmanics.licensing.datalayer.dao.exception.DataLayerException
import com.magmanics.licensing.datalayer.model._
import com.magmanics.licensing.service.exception.DuplicateNameException
import com.magmanics.licensing.model.{BoolOption, ListOption, Product, TextOption}
import org.slf4j.LoggerFactory
import scala.collection.JavaConverters._
/**
* DAO for [[com.magmanics.licensing.model.Product Product]]s
*
* @author James Baxter <j.w.baxter@gmail.com>
* @since 31-May-2010
*/
trait ProductDao {
/**
* Persists a new Product
* @return The newly created Product with its id populated to facilitate further operations
*/
@throws[DuplicateNameException]("If a Product with the same name already exists")
def create(product: Product): Product
/**
* Gets all Products within the system
*/
def get(): Set[Product]
/**
* Get all enabled Products within the system
*/
def getEnabled(): Set[Product]
/**
* Gets the Product with the given id
*/
def get(id: Long): Option[Product]
/**
* Update the given Product
*/
@throws[DuplicateNameException]("If a Product with the same name already exists")
def update(product: Product)
/**
* Deletes the Product with the given id. Ignores missing entities
*/
def delete(id: Long)
}
class ProductDaoJPA extends ProductDao {
import com.magmanics.licensing.datalayer.dao.ImplicitDataModelConversion._
val log = LoggerFactory.getLogger(classOf[ProductDaoJPA])
@PersistenceContext
var em: EntityManager = _
def create(p: Product): Product = {
log.debug("Creating {}", p)
if (get(p.name).nonEmpty)
throw new DuplicateNameException("Cannot create Product as its name is already in use: " + p)
try {
val product = new ProductEntity
product.name = p.name
product.description = p.description
product.enabled = p.enabled
p.options.foreach {
case b: BoolOption =>
val radio = new RadioProductOptionEntity
radio.name = b.name
radio.default = b.default
radio.product = product
product.addOption(radio)
case t: TextOption =>
val text = new TextProductOptionEntity
text.name = t.name
text.default = t.default
text.product = product
product.addOption(text)
case l: ListOption =>
val list = new ListProductOptionEntity
list.name = l.name
list.default = l.default
list.product = product
product.addOption(list)
l.values.foreach(o => {
val option = new ListProductOptionValueEntity()
option.value = o
option.listProductOption = list
list.addOptionValue(option)
})
}
em.persist(product)
em.refresh(product)
product
} catch {
// case ve: ValidationException => throw new ConstraintException(ve)
case e: Exception => throw new DataLayerException(e)
}
}
def get(): Set[Product] = {
log.debug("Getting all Products")
em.createNamedQuery[ProductEntity]("Product.GetAll", classOf[ProductEntity]).getResultList.asScala
}
def getEnabled(): Set[Product] = {
log.debug("Getting all enabled Products")
em.createNamedQuery[ProductEntity]("Product.GetEnabled", classOf[ProductEntity]).getResultList.asScala
}
def get(id: Long): Option[Product] = {
getEntity(id)
}
private def getEntity(id: Long): Option[ProductEntity] = {
log.debug("Getting Product with id: {}", id)
Option(em.find(classOf[ProductEntity], id))
}
private def get(name: String): Option[Product] = {
log.debug("Getting Product with name: {}", name)
val query = em.createNamedQuery[ProductEntity]("Product.GetByName", classOf[ProductEntity])
query.setParameter("name", name)
query.getResultList.asScala.headOption
}
def update(p: Product) {
log.debug("Updating {}", p)
val id = p.id.getOrElse(
throw new IllegalStateException("Cannot update Product as it does not have an id: " + p))
val product = getEntity(id).getOrElse(
throw new IllegalStateException("Cannot update Product as could not find existing record with same id: " + p))
val existingProduct = get(p.name)
if (existingProduct.nonEmpty && existingProduct.get.id != p.id)
throw new DuplicateNameException("Cannot create Product as its name is already in use: " + p)
product.name = p.name
product.description = p.description
product.enabled = p.enabled
p.options.foreach {
case b: BoolOption =>
val radio = new RadioProductOptionEntity
if (b.id.isDefined) radio.id = b.id.get
radio.name = b.name
radio.default = b.default
product.radioOptions.add(radio)
case t: TextOption =>
val text = new TextProductOptionEntity
if (t.id.isDefined) text.id = t.id.get
text.name = t.name
text.default = t.default
product.textOptions.add(text)
case l: ListOption =>
val list = new ListProductOptionEntity
if (l.id.isDefined) list.id = l.id.get
list.name = l.name
list.default = l.default
l.values.foreach(o => {
val option = new ListProductOptionValueEntity()
if (l.id.isDefined) option.id = l.id.get
option.value = o
list.optionValues.add(option)
})
product.listOptions.add(list)
}
em.merge(product)
}
def delete(id: Long) {
log.debug("Deleting Product with id: {}", id)
getEntity(id).foreach(p => {
log.debug("Deleting Product: {}", p)
em.remove(p)
})
}
} | manicmonkey/licensing | Licensing-Server/src/main/scala/com/magmanics/licensing/datalayer/dao/ProductDao.scala | Scala | gpl-3.0 | 6,897 |
package japgolly.microlibs.compiletime
import java.util.regex.Pattern
import scala.quoted.*
import MacroEnv.*
object QuotingUtils:
def warn(warning: Expr[String])(using Quotes): Expr[Unit] =
import quotes.reflect.*
report.warning(warning.valueOrError)
Expr.inlineConstUnit
def replaceFirst(str: Expr[String], regex: Expr[String], repl: Expr[String])(using Quotes): Expr[String] =
(str.value, regex.value, repl.value) match
case (Some(n), Some(r), Some(p)) => Expr.inlineConst(n.replaceFirst(r, p))
case _ => '{ $str.replaceFirst($regex, $repl) }
def replaceAll(str: Expr[String], regex: Expr[String], repl: Expr[String])(using Quotes): Expr[String] =
(str.value, regex.value, repl.value) match
case (Some(n), Some(r), Some(p)) => Expr.inlineConst(n.replaceAll(r, p))
case _ => '{ $str.replaceAll($regex, $repl) }
def trim(str: Expr[String])(using Quotes): Expr[String] =
str.value match
case Some(s) => Expr.inlineConst(s.trim)
case None => '{ $str.trim }
def toLowerCase(str: Expr[String])(using Quotes): Expr[String] =
str.value match
case Some(s) => Expr.inlineConst(s.toLowerCase)
case None => '{ $str.toLowerCase }
def toUpperCase(str: Expr[String])(using Quotes): Expr[String] =
str.value match
case Some(s) => Expr.inlineConst(s.toUpperCase)
case None => '{ $str.toUpperCase }
def toInt(str: Expr[String])(using Quotes): Expr[Int] =
str.value match
case Some(s) =>
try
Expr.inlineConst(s.toInt)
catch
case _: Throwable => fail(s"Can't convert \\"$s\\" to an Int")
case None =>
'{ $str.toInt }
def toLong(str: Expr[String])(using Quotes): Expr[Long] =
str.value match
case Some(s) =>
try
Expr.inlineConst(s.toLong)
catch
case _: Throwable => fail(s"Can't convert \\"$s\\" to a Long")
case None =>
'{ $str.toLong }
def toBoolean(str: Expr[String])(using Quotes): Expr[Boolean] =
str.value match
case Some(s) =>
try
Expr.inlineConst(parseBooleanOrThrow(s))
catch
case t: Throwable => fail(t.getMessage)
case None =>
'{ parseBooleanOrThrow($str) }
private val RegexTrue = Pattern.compile("^(?:t(?:rue)?|y(?:es)?|1|on|enabled?)$", Pattern.CASE_INSENSITIVE)
private val RegexFalse = Pattern.compile("^(?:f(?:alse)?|n(?:o)?|0|off|disabled?)$", Pattern.CASE_INSENSITIVE)
def parseBooleanOrThrow(s: String): Boolean =
if (RegexTrue.matcher(s).matches)
true
else if (RegexFalse.matcher(s).matches)
false
else
throw new RuntimeException(s"Can't parse \\"$s\\" as a Boolean")
def showCode(e: Expr[Any])(using Quotes): Expr[String] =
import quotes.reflect.*
Expr.inlineConst(e.show)
def showTasty(e: Expr[Any])(using Quotes): Expr[String] =
import quotes.reflect.*
Expr.inlineConst("" + e.asTerm)
| japgolly/microlibs-scala | compile-time/shared/src/main/scala-3/japgolly/microlibs/compiletime/QuotingUtils.scala | Scala | apache-2.0 | 3,007 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.util
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.typeutils.RowTypeInfo
import org.apache.flink.streaming.api.datastream.DataStream
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment
import org.apache.flink.table.api.{TableSchema, Types}
import org.apache.flink.table.expressions.utils.ApiExpressionUtils.unresolvedCall
import org.apache.flink.table.expressions.{Expression, FieldReferenceExpression, UnresolvedCallExpression, ValueLiteralExpression}
import org.apache.flink.table.functions.BuiltInFunctionDefinitions
import org.apache.flink.table.functions.BuiltInFunctionDefinitions.AND
import org.apache.flink.table.runtime.utils.TimeTestUtil.EventTimeSourceFunction
import org.apache.flink.table.sources._
import org.apache.flink.table.sources.tsextractors.ExistingField
import org.apache.flink.table.sources.wmstrategies.{AscendingTimestamps, PreserveWatermarks}
import org.apache.flink.types.Row
import java.io.{File, FileOutputStream, OutputStreamWriter}
import java.util
import java.util.{Collections, List => JList}
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.collection.mutable
object TestTableSources {
def getPersonCsvTableSource: CsvTableSource = {
val csvRecords = Seq(
"First#Id#Score#Last",
"Mike#1#12.3#Smith",
"Bob#2#45.6#Taylor",
"Sam#3#7.89#Miller",
"Peter#4#0.12#Smith",
"% Just a comment",
"Liz#5#34.5#Williams",
"Sally#6#6.78#Miller",
"Alice#7#90.1#Smith",
"Kelly#8#2.34#Williams"
)
val tempFilePath = writeToTempFile(
csvRecords.mkString("$"),
"csv-test",
"tmp")
CsvTableSource.builder()
.path(tempFilePath)
.field("first", Types.STRING)
.field("id", Types.INT)
.field("score",Types.DOUBLE)
.field("last",Types.STRING)
.fieldDelimiter("#")
.lineDelimiter("$")
.ignoreFirstLine()
.commentPrefix("%")
.build()
}
def getOrdersCsvTableSource: CsvTableSource = {
val csvRecords = Seq(
"2,Euro,2",
"1,US Dollar,3",
"50,Yen,4",
"3,Euro,5",
"5,US Dollar,6"
)
val tempFilePath = writeToTempFile(
csvRecords.mkString("$"),
"csv-order-test",
"tmp")
CsvTableSource.builder()
.path(tempFilePath)
.field("amount", Types.LONG)
.field("currency", Types.STRING)
.field("ts",Types.LONG)
.fieldDelimiter(",")
.lineDelimiter("$")
.build()
}
def getRatesCsvTableSource: CsvTableSource = {
val csvRecords = Seq(
"US Dollar,102",
"Yen,1",
"Euro,119",
"RMB,702"
)
val tempFilePath = writeToTempFile(
csvRecords.mkString("$"),
"csv-rate-test",
"tmp")
CsvTableSource.builder()
.path(tempFilePath)
.field("currency", Types.STRING)
.field("rate", Types.LONG)
.fieldDelimiter(",")
.lineDelimiter("$")
.build()
}
private def writeToTempFile(
contents: String,
filePrefix: String,
fileSuffix: String,
charset: String = "UTF-8"): String = {
val tempFile = File.createTempFile(filePrefix, fileSuffix)
tempFile.deleteOnExit()
val tmpWriter = new OutputStreamWriter(new FileOutputStream(tempFile), charset)
tmpWriter.write(contents)
tmpWriter.close()
tempFile.getAbsolutePath
}
}
class TestTableSourceWithTime[T](
isBatch: Boolean,
tableSchema: TableSchema,
returnType: TypeInformation[T],
values: Seq[T],
rowtime: String = null,
proctime: String = null,
mapping: Map[String, String] = null)
extends StreamTableSource[T]
with DefinedRowtimeAttributes
with DefinedProctimeAttribute
with DefinedFieldMapping {
override def isBounded: Boolean = isBatch
override def getDataStream(execEnv: StreamExecutionEnvironment): DataStream[T] = {
val dataStream = execEnv.fromCollection(values, returnType)
dataStream.getTransformation.setMaxParallelism(1)
dataStream
}
override def getRowtimeAttributeDescriptors: util.List[RowtimeAttributeDescriptor] = {
// return a RowtimeAttributeDescriptor if rowtime attribute is defined
if (rowtime != null) {
Collections.singletonList(new RowtimeAttributeDescriptor(
rowtime,
new ExistingField(rowtime),
new AscendingTimestamps))
} else {
Collections.EMPTY_LIST.asInstanceOf[util.List[RowtimeAttributeDescriptor]]
}
}
override def getProctimeAttribute: String = proctime
override def getReturnType: TypeInformation[T] = returnType
override def getTableSchema: TableSchema = tableSchema
override def explainSource(): String = ""
override def getFieldMapping: util.Map[String, String] = {
if (mapping != null) mapping else null
}
}
class TestPreserveWMTableSource[T](
tableSchema: TableSchema,
returnType: TypeInformation[T],
values: Seq[Either[(Long, T), Long]],
rowtime: String)
extends StreamTableSource[T]
with DefinedRowtimeAttributes {
override def getRowtimeAttributeDescriptors: util.List[RowtimeAttributeDescriptor] = {
Collections.singletonList(new RowtimeAttributeDescriptor(
rowtime,
new ExistingField(rowtime),
PreserveWatermarks.INSTANCE))
}
override def getDataStream(execEnv: StreamExecutionEnvironment): DataStream[T] = {
execEnv.addSource(new EventTimeSourceFunction[T](values)).
setParallelism(1).setMaxParallelism(1).returns(returnType)
}
override def getReturnType: TypeInformation[T] = returnType
override def getTableSchema: TableSchema = tableSchema
override def explainSource(): String = ""
}
class TestProjectableTableSource(
isBatch: Boolean,
tableSchema: TableSchema,
returnType: TypeInformation[Row],
values: Seq[Row],
rowtime: String = null,
proctime: String = null,
fieldMapping: Map[String, String] = null)
extends TestTableSourceWithTime[Row](
isBatch,
tableSchema,
returnType,
values,
rowtime,
proctime,
fieldMapping)
with ProjectableTableSource[Row] {
override def projectFields(fields: Array[Int]): TableSource[Row] = {
val rowType = returnType.asInstanceOf[RowTypeInfo]
val (projectedNames: Array[String], projectedMapping) = if (fieldMapping == null) {
val projectedNames = fields.map(rowType.getFieldNames.apply(_))
(projectedNames, null)
} else {
val invertedMapping = fieldMapping.map(_.swap)
val projectedNames = fields.map(rowType.getFieldNames.apply(_))
val projectedMapping: Map[String, String] = projectedNames.map{ f =>
val logField = invertedMapping(f)
logField -> s"remapped-$f"
}.toMap
val renamedNames = projectedNames.map(f => s"remapped-$f")
(renamedNames, projectedMapping)
}
val projectedTypes = fields.map(rowType.getFieldTypes.apply(_))
val projectedReturnType = new RowTypeInfo(
projectedTypes.asInstanceOf[Array[TypeInformation[_]]],
projectedNames)
val projectedDataTypes = fields.map(tableSchema.getFieldDataTypes.apply(_))
val newTableSchema = TableSchema.builder().fields(projectedNames, projectedDataTypes).build()
val projectedValues = values.map { fromRow =>
val pRow = new Row(fields.length)
fields.zipWithIndex.foreach{ case (from, to) => pRow.setField(to, fromRow.getField(from)) }
pRow
}
new TestProjectableTableSource(
isBatch,
newTableSchema,
projectedReturnType,
projectedValues,
rowtime,
proctime,
projectedMapping)
}
override def explainSource(): String = {
s"TestSource(" +
s"physical fields: ${getReturnType.asInstanceOf[RowTypeInfo].getFieldNames.mkString(", ")})"
}
}
class TestNestedProjectableTableSource(
isBatch: Boolean,
tableSchema: TableSchema,
returnType: TypeInformation[Row],
values: Seq[Row],
rowtime: String = null,
proctime: String = null)
extends TestTableSourceWithTime[Row](
isBatch,
tableSchema,
returnType,
values,
rowtime,
proctime,
null)
with NestedFieldsProjectableTableSource[Row] {
var readNestedFields: Seq[String] = tableSchema.getFieldNames.map(f => s"$f.*")
override def projectNestedFields(
fields: Array[Int],
nestedFields: Array[Array[String]]): TableSource[Row] = {
val rowType = returnType.asInstanceOf[RowTypeInfo]
val projectedNames = fields.map(rowType.getFieldNames.apply(_))
val projectedTypes = fields.map(rowType.getFieldTypes.apply(_))
val projectedReturnType = new RowTypeInfo(
projectedTypes.asInstanceOf[Array[TypeInformation[_]]],
projectedNames)
// update read nested fields
val newReadNestedFields = projectedNames.zip(nestedFields)
.flatMap(f => f._2.map(n => s"${f._1}.$n"))
val projectedDataTypes = fields.map(tableSchema.getFieldDataTypes.apply(_))
val newTableSchema = TableSchema.builder().fields(projectedNames, projectedDataTypes).build()
val projectedValues = values.map { fromRow =>
val pRow = new Row(fields.length)
fields.zipWithIndex.foreach{ case (from, to) => pRow.setField(to, fromRow.getField(from)) }
pRow
}
val copy = new TestNestedProjectableTableSource(
isBatch,
newTableSchema,
projectedReturnType,
projectedValues,
rowtime,
proctime)
copy.readNestedFields = newReadNestedFields
copy
}
override def explainSource(): String = {
s"TestSource(read nested fields: ${readNestedFields.mkString(", ")})"
}
}
/**
* A data source that implements some very basic filtering in-memory in order to test
* expression push-down logic.
*
* @param isBatch whether this is a bounded source
* @param rowTypeInfo The type info for the rows.
* @param data The data that filtering is applied to in order to get the final dataset.
* @param filterableFields The fields that are allowed to be filtered.
* @param filterPredicates The predicates that should be used to filter.
* @param filterPushedDown Whether predicates have been pushed down yet.
*/
class TestFilterableTableSource(
isBatch: Boolean,
rowTypeInfo: RowTypeInfo,
data: Seq[Row],
filterableFields: Set[String] = Set(),
filterPredicates: Seq[Expression] = Seq(),
val filterPushedDown: Boolean = false)
extends StreamTableSource[Row]
with FilterableTableSource[Row] {
val fieldNames: Array[String] = rowTypeInfo.getFieldNames
val fieldTypes: Array[TypeInformation[_]] = rowTypeInfo.getFieldTypes
override def isBounded: Boolean = isBatch
override def getDataStream(execEnv: StreamExecutionEnvironment): DataStream[Row] = {
execEnv.fromCollection[Row](applyPredicatesToRows(data).asJava, getReturnType)
.setParallelism(1).setMaxParallelism(1)
}
override def explainSource(): String = {
if (filterPredicates.nonEmpty) {
s"filter=[${filterPredicates.reduce((l, r) => unresolvedCall(AND, l, r)).toString}]"
} else {
""
}
}
override def getReturnType: TypeInformation[Row] = rowTypeInfo
override def applyPredicate(predicates: JList[Expression]): TableSource[Row] = {
val predicatesToUse = new mutable.ListBuffer[Expression]()
val iterator = predicates.iterator()
while (iterator.hasNext) {
val expr = iterator.next()
if (shouldPushDown(expr)) {
predicatesToUse += expr
iterator.remove()
}
}
new TestFilterableTableSource(
isBatch,
rowTypeInfo,
data,
filterableFields,
predicatesToUse,
filterPushedDown = true)
}
override def isFilterPushedDown: Boolean = filterPushedDown
private def applyPredicatesToRows(rows: Seq[Row]): Seq[Row] = rows.filter(shouldKeep)
private def shouldPushDown(expr: Expression): Boolean = {
expr match {
case expr: UnresolvedCallExpression if expr.getChildren.size() == 2 => shouldPushDown(expr)
case _ => false
}
}
private def shouldPushDown(binExpr: UnresolvedCallExpression): Boolean = {
val children = binExpr.getChildren
require(children.size() == 2)
(children.head, children.last) match {
case (f: FieldReferenceExpression, _: ValueLiteralExpression) =>
filterableFields.contains(f.getName)
case (_: ValueLiteralExpression, f: FieldReferenceExpression) =>
filterableFields.contains(f.getName)
case (f1: FieldReferenceExpression, f2: FieldReferenceExpression) =>
filterableFields.contains(f1.getName) && filterableFields.contains(f2.getName)
case (_, _) => false
}
}
private def shouldKeep(row: Row): Boolean = {
filterPredicates.isEmpty || filterPredicates.forall {
case expr: UnresolvedCallExpression if expr.getChildren.size() == 2 =>
binaryFilterApplies(expr, row)
case expr => throw new RuntimeException(expr + " not supported!")
}
}
private def binaryFilterApplies(binExpr: UnresolvedCallExpression, row: Row): Boolean = {
val children = binExpr.getChildren
require(children.size() == 2)
val (lhsValue, rhsValue) = extractValues(binExpr, row)
binExpr.getFunctionDefinition match {
case BuiltInFunctionDefinitions.GREATER_THAN =>
lhsValue.compareTo(rhsValue) > 0
case BuiltInFunctionDefinitions.LESS_THAN =>
lhsValue.compareTo(rhsValue) < 0
case BuiltInFunctionDefinitions.GREATER_THAN_OR_EQUAL =>
lhsValue.compareTo(rhsValue) >= 0
case BuiltInFunctionDefinitions.LESS_THAN_OR_EQUAL =>
lhsValue.compareTo(rhsValue) <= 0
case BuiltInFunctionDefinitions.EQUALS =>
lhsValue.compareTo(rhsValue) == 0
case BuiltInFunctionDefinitions.NOT_EQUALS =>
lhsValue.compareTo(rhsValue) != 0
}
}
private def extractValues(
binExpr: UnresolvedCallExpression,
row: Row): (Comparable[Any], Comparable[Any]) = {
val children = binExpr.getChildren
require(children.size() == 2)
(children.head, children.last) match {
case (l: FieldReferenceExpression, r: ValueLiteralExpression) =>
val idx = rowTypeInfo.getFieldIndex(l.getName)
val lv = row.getField(idx).asInstanceOf[Comparable[Any]]
val rv = getValue(r)
(lv, rv)
case (l: ValueLiteralExpression, r: FieldReferenceExpression) =>
val idx = rowTypeInfo.getFieldIndex(r.getName)
val lv = getValue(l)
val rv = row.getField(idx).asInstanceOf[Comparable[Any]]
(lv, rv)
case (l: ValueLiteralExpression, r: ValueLiteralExpression) =>
val lv = getValue(l)
val rv = getValue(r)
(lv, rv)
case (l: FieldReferenceExpression, r: FieldReferenceExpression) =>
val lidx = rowTypeInfo.getFieldIndex(l.getName)
val ridx = rowTypeInfo.getFieldIndex(r.getName)
val lv = row.getField(lidx).asInstanceOf[Comparable[Any]]
val rv = row.getField(ridx).asInstanceOf[Comparable[Any]]
(lv, rv)
case _ => throw new RuntimeException(binExpr + " not supported!")
}
}
private def getValue(v: ValueLiteralExpression): Comparable[Any] = {
val value = v.getValueAs(v.getOutputDataType.getConversionClass)
if (value.isPresent) {
value.get().asInstanceOf[Comparable[Any]]
} else {
null
}
}
override def getTableSchema: TableSchema = new TableSchema(fieldNames, fieldTypes)
}
object TestFilterableTableSource {
/**
* @return The default filterable table source.
*/
def apply(isBatch: Boolean): TestFilterableTableSource = {
apply(isBatch, defaultTypeInfo, defaultRows, defaultFilterableFields)
}
/**
* A filterable data source with custom data.
*
* @param isBatch whether this is a bounded source
* @param rowTypeInfo The type of the data. Its expected that both types and field
* names are provided.
* @param rows The data as a sequence of rows.
* @param filterableFields The fields that are allowed to be filtered on.
* @return The table source.
*/
def apply(
isBatch: Boolean,
rowTypeInfo: RowTypeInfo,
rows: Seq[Row],
filterableFields: Set[String]): TestFilterableTableSource = {
new TestFilterableTableSource(isBatch, rowTypeInfo, rows, filterableFields)
}
private lazy val defaultFilterableFields = Set("amount")
private lazy val defaultTypeInfo: RowTypeInfo = {
val fieldNames: Array[String] = Array("name", "id", "amount", "price")
val fieldTypes: Array[TypeInformation[_]] =
Array(Types.STRING, Types.LONG, Types.INT, Types.DOUBLE)
new RowTypeInfo(fieldTypes, fieldNames)
}
private lazy val defaultRows: Seq[Row] = {
for {
cnt <- 0 until 33
} yield {
Row.of(
s"Record_$cnt",
cnt.toLong.asInstanceOf[AnyRef],
cnt.toInt.asInstanceOf[AnyRef],
cnt.toDouble.asInstanceOf[AnyRef])
}
}
}
| shaoxuan-wang/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/util/testTableSources.scala | Scala | apache-2.0 | 17,767 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.tree
import scala.collection.mutable
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.mllib.tree.model.TreeEnsembleModel
import org.apache.spark.util.StatCounter
object EnsembleTestHelper {
/**
* Aggregates all values in data, and tests whether the empirical mean and stddev are within
* epsilon of the expected values.
* @param data Every element of the data should be an i.i.d. sample from some distribution.
*/
def testRandomArrays(
data: Array[Array[Double]],
numCols: Int,
expectedMean: Double,
expectedStddev: Double,
epsilon: Double) {
val values = new mutable.ArrayBuffer[Double]()
data.foreach { row =>
assert(row.size == numCols)
values ++= row
}
val stats = new StatCounter(values)
assert(math.abs(stats.mean - expectedMean) < epsilon)
assert(math.abs(stats.stdev - expectedStddev) < epsilon)
}
def validateClassifier(
model: TreeEnsembleModel,
input: Seq[LabeledPoint],
requiredAccuracy: Double) {
val predictions = input.map(x => model.predict(x.features))
val numOffPredictions = predictions.zip(input).count { case (prediction, expected) =>
prediction != expected.label
}
val accuracy = (input.length - numOffPredictions).toDouble / input.length
assert(accuracy >= requiredAccuracy,
s"validateClassifier calculated accuracy $accuracy but required $requiredAccuracy.")
}
/**
* Validates a tree ensemble model for regression.
*/
def validateRegressor(
model: TreeEnsembleModel,
input: Seq[LabeledPoint],
required: Double,
metricName: String = "mse") {
val predictions = input.map(x => model.predict(x.features))
val errors = predictions.zip(input).map { case (prediction, point) =>
point.label - prediction
}
val metric = metricName match {
case "mse" =>
errors.map(err => err * err).sum / errors.size
case "mae" =>
errors.map(math.abs).sum / errors.size
}
assert(metric <= required,
s"validateRegressor calculated $metricName $metric but required $required.")
}
def generateOrderedLabeledPoints(numFeatures: Int, numInstances: Int): Array[LabeledPoint] = {
val arr = new Array[LabeledPoint](numInstances)
for (i <- 0 until numInstances) {
val label = if (i < numInstances / 10) {
0.0
} else if (i < numInstances / 2) {
1.0
} else if (i < numInstances * 0.9) {
0.0
} else {
1.0
}
val features = Array.fill[Double](numFeatures)(i.toDouble)
arr(i) = new LabeledPoint(label, Vectors.dense(features))
}
arr
}
}
| saturday-shi/spark | mllib/src/test/scala/org/apache/spark/mllib/tree/EnsembleTestHelper.scala | Scala | apache-2.0 | 3,569 |
// Solution-5.scala
// Solution to Exercise 5 in "Class Arguments"
// This produces an error:
class Family4(kids:String*, mom:String, dad:String) {
def familySize():Int = {
var count = 2
for(i<-kids) {
count = count + 1
}
count
}
}
/* OUTPUT_SHOULD_CONTAIN
error: *-parameter must come last
class Family4(kids:String*, mom:String, dad:String) {
^
one error found
*/
| P7h/ScalaPlayground | Atomic Scala/atomic-scala-solutions/22_ClassArguments/Solution-5.scala | Scala | apache-2.0 | 409 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.