code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
package glaux.interfaces.api package object domain { type ProfileId = String type AgentName = String type Reward = glaux.reinforcementlearning.Reward type Action = glaux.reinforcementlearning.Action type Time = glaux.reinforcementlearning.Time type Reading = glaux.reinforcementlearning.Reading }
A-Noctua/glaux
interface-api/src/main/scala/glaux/interfaces/api/domain/package.scala
Scala
mit
312
package edu.rice.habanero.benchmarks.concsll import java.util.Random import edu.rice.habanero.actors.{ScalazActor, ScalazActorState, ScalazPool} import edu.rice.habanero.benchmarks.concsll.SortedListConfig.{DoWorkMessage, EndWorkMessage} import edu.rice.habanero.benchmarks.{Benchmark, BenchmarkRunner} /** * * @author <a href="http://shams.web.rice.edu/">Shams Imam</a> (shams@rice.edu) */ object SortedListScalazActorBenchmark { def main(args: Array[String]) { BenchmarkRunner.runBenchmark(args, new SortedListScalazActorBenchmark) } private final class SortedListScalazActorBenchmark extends Benchmark { def initialize(args: Array[String]) { SortedListConfig.parseArgs(args) } def printArgInfo() { SortedListConfig.printArgs() } def runIteration() { val numWorkers: Int = SortedListConfig.NUM_ENTITIES val numMessagesPerWorker: Int = SortedListConfig.NUM_MSGS_PER_WORKER val master = new Master(numWorkers, numMessagesPerWorker) master.start() ScalazActorState.awaitTermination() } def cleanupIteration(lastIteration: Boolean, execTimeMillis: Double) { if (lastIteration) { ScalazPool.shutdown() } } } private class Master(numWorkers: Int, numMessagesPerWorker: Int) extends ScalazActor[AnyRef] { private final val workers = new Array[ScalazActor[AnyRef]](numWorkers) private final val sortedList = new SortedList() private var numWorkersTerminated: Int = 0 override def onPostStart() { sortedList.start() var i: Int = 0 while (i < numWorkers) { workers(i) = new Worker(this, sortedList, i, numMessagesPerWorker) workers(i).start() workers(i).send(DoWorkMessage.ONLY) i += 1 } } override def process(msg: AnyRef) { if (msg.isInstanceOf[SortedListConfig.EndWorkMessage]) { numWorkersTerminated += 1 if (numWorkersTerminated == numWorkers) { sortedList.send(EndWorkMessage.ONLY) exit() } } } } private class Worker(master: Master, sortedList: SortedList, id: Int, numMessagesPerWorker: Int) extends ScalazActor[AnyRef] { private final val writePercent = SortedListConfig.WRITE_PERCENTAGE private final val sizePercent = SortedListConfig.SIZE_PERCENTAGE private var messageCount: Int = 0 private final val random = new Random(id + numMessagesPerWorker + writePercent + sizePercent) override def process(msg: AnyRef) { messageCount += 1 if (messageCount <= numMessagesPerWorker) { val anInt: Int = random.nextInt(100) if (anInt < sizePercent) { sortedList.send(new SortedListConfig.SizeMessage(this)) } else if (anInt < (sizePercent + writePercent)) { sortedList.send(new SortedListConfig.WriteMessage(this, random.nextInt)) } else { sortedList.send(new SortedListConfig.ContainsMessage(this, random.nextInt)) } } else { master.send(EndWorkMessage.ONLY) exit() } } } private class SortedList extends ScalazActor[AnyRef] { private[concsll] final val dataList = new SortedLinkedList[Integer] override def process(msg: AnyRef) { msg match { case writeMessage: SortedListConfig.WriteMessage => val value: Int = writeMessage.value dataList.add(value) val sender = writeMessage.sender.asInstanceOf[ScalazActor[AnyRef]] sender.send(new SortedListConfig.ResultMessage(this, value)) case containsMessage: SortedListConfig.ContainsMessage => val value: Int = containsMessage.value val result: Int = if (dataList.contains(value)) 1 else 0 val sender = containsMessage.sender.asInstanceOf[ScalazActor[AnyRef]] sender.send(new SortedListConfig.ResultMessage(this, result)) case readMessage: SortedListConfig.SizeMessage => val value: Int = dataList.size val sender = readMessage.sender.asInstanceOf[ScalazActor[AnyRef]] sender.send(new SortedListConfig.ResultMessage(this, value)) case _: SortedListConfig.EndWorkMessage => printf(BenchmarkRunner.argOutputFormat, "List Size", dataList.size) exit() case _ => System.err.println("Unsupported message: " + msg) } } } }
smarr/savina
src/main/scala/edu/rice/habanero/benchmarks/concsll/SortedListScalazActorBenchmark.scala
Scala
gpl-2.0
4,374
package at.logic.gapt.proofs.lk.base import at.logic.gapt.expr.hol.{ HOLPosition, HOLOrdering } import at.logic.gapt.proofs.{ HOLSequent, Suc, Ant, SequentIndex } import at.logic.gapt.proofs.occurrences._ import at.logic.gapt.proofs.proofs._ import at.logic.gapt.expr._ import at.logic.gapt.utils.ds.trees._ object HOLSequentOrdering extends HOLSequentOrdering /** * Ordering for sequents. */ class HOLSequentOrdering extends Ordering[HOLSequent] { def compare( x: HOLSequent, y: HOLSequent ): Int = { if ( x.antecedent.size < y.antecedent.size ) -1 else if ( y.antecedent.size < x.antecedent.size ) 1 else if ( x.antecedent.size == y.antecedent.size && x.succedent.size < y.succedent.size ) -1 else if ( x.antecedent.size == y.antecedent.size && y.succedent.size < x.succedent.size ) 1 else { assert( x.antecedent.size == y.antecedent.size && x.succedent.size == y.succedent.size, "Implementation error comparing HOLSequents!" ) val xs = x.sorted( HOLOrdering ).elements val ys = y.sorted( HOLOrdering ).elements val xys = xs zip ys xys.foldLeft( 0 )( ( rv, pair ) => { //as long as it is undecided, we compare pairs if ( rv == 0 ) HOLOrdering.compare( pair._1, pair._2 ) //otherwise we pass the result on else rv } ) } } } // exceptions class LKRuleException( msg: String ) extends RuleException( msg ) class LKRuleCreationException( msg: String ) extends LKRuleException( msg ) //these two classes allow detailed error diagnosis case class LKUnaryRuleCreationException( name: String, parent: LKProof, aux: List[HOLFormula] ) extends LKRuleCreationException( "" ) { override def getMessage = "Could not create lk rule " + name + " from parent " + parent.root + " with auxiliary formulas " + aux.mkString( ", " ) } case class LKBinaryRuleCreationException( name: String, parent1: LKProof, aux1: HOLFormula, parent2: LKProof, aux2: HOLFormula ) extends LKRuleCreationException( "" ) { override def getMessage = "Could not create lk rule " + name + " from left parent " + parent1.root + " with auxiliary formula " + aux1 + " and right parent " + parent2.root + " with auxiliary formula " + aux2 } class FormulaNotExistsException( msg: String ) extends LKRuleException( msg ) trait LKProof extends TreeProof[OccSequent] with Tree[OccSequent] { def getDescendantInLowerSequent( fo: FormulaOccurrence ): Option[FormulaOccurrence] = { ( root.antecedent ++ root.succedent ).filter( ( occ: FormulaOccurrence ) => occ.isDescendantOf( fo, reflexive = true ) ) match { case x :: Nil => Some( x ) case Nil => None case _ => throw new LKRuleException( "Illegal lower sequent in rule in application of getDescendantInLowerSequent: More than one such formula exists" ) } } def containsDescendantOf( fo: FormulaOccurrence ): Boolean = getDescendantInLowerSequent( fo ) match { case Some( _ ) => true case None => false } override def toString = rule + "(" + root.toHOLSequent.toString + ")" } trait NullaryLKProof extends LeafTree[OccSequent] with LKProof with NullaryTreeProof[OccSequent] { override def toString = rule + "(" + root.toHOLSequent.toString + ")" } trait UnaryLKProof extends UnaryTree[OccSequent] with LKProof with UnaryTreeProof[OccSequent] { override def uProof = t.asInstanceOf[LKProof] override def toString = rule + "(" + root.toHOLSequent.toString + ")" } trait BinaryLKProof extends BinaryTree[OccSequent] with LKProof with BinaryTreeProof[OccSequent] { override def uProof1 = t1.asInstanceOf[LKProof] override def uProof2 = t2.asInstanceOf[LKProof] override def toString = rule + "(" + root.toHOLSequent.toString + ")" } // traits denoting having auxiliary and main formulas trait AuxiliaryFormulas { // for each upper sequent we have a list of occurrences def aux: List[List[FormulaOccurrence]] } trait PrincipalFormulas { def prin: List[FormulaOccurrence] } trait SubstitutionTerm { def subst: LambdaExpression } trait Eigenvariable { def eigenvar: Var } trait TermPositions { def termPos: List[HOLPosition] } // method for creating the context of the lower sequent. Essentially creating nre occurrences // create new formula occurrences in the new context object createContext { def apply( set: Seq[FormulaOccurrence] ): Seq[FormulaOccurrence] = set.map( x => x.factory.createFormulaOccurrence( x.formula.asInstanceOf[HOLFormula], x :: Nil ) ) }
loewenheim/gapt
src/main/scala/at/logic/gapt/proofs/lk/base.scala
Scala
gpl-3.0
4,473
package main.scala import spark._ import spark.SparkContext._ import io.Source // TODO - DataWrangler goes here object SummerCruncher { def main(args: Array[String]) { if (args.length < 2) System.err.println("\\nPlease input a file to polarize, and output file [--format] [sparkcontext]\\n") if (args.length < 2) System.exit(1) val spark_string = if (args.length > 3) args(3) else "local[16]" val sc = new SparkContext(spark_string, "Crunchin-dem-tweets") val cp = new CalculatePolarities(sc) //Sun Feb 03 21:34:53 EST 2013 val df = new java.text.SimpleDateFormat("EEE MMM dd HH:mm:ss zzz yyyy") // ID DATE-TIME NAME LOCATION BLANK BLANK TWEET LANGUAGE val tweets = sc.textFile(args(0)).map(line => line.split("\\t")).persist() println("\\nStarting with : " + tweets.count() +"\\n") val cleaned = tweets.filter(line => line.length == 8) .filter(line => line(0) != "" && line(1) != "" && line(7) == "en") //.map() datestring => epoch val processed = if (args(2) == "--libsvm") cleaned.map(line => { val epoch = df.parse(line(1)).getTime() "0 " + cp.compute(line(6)).zip(1 to line(6).length).map{case(e,i) => i+":"+e}.mkString(" ") +","+ epoch.toString +","+ line(6) }) else cleaned.map(line => { val epoch = df.parse(line(1)).getTime() cp.compute(line(6)).mkString(" ") +","+ epoch.toString +","+ line(6) }) .persist() println("\\nAnalyizing : " + processed.count() + " tweets\\n") processed.saveAsTextFile(args(1) + "/Polarity_Time_Tweet/") processed.map(_.split(",")).map(_.head).saveAsTextFile(args(1) + "/Polarity/") } }
AustinBGibbons/emoticat
twitter_cruncher/src/main/scala/SummerCruncher.scala
Scala
bsd-3-clause
1,710
package cobalt.parser.statement import cobalt.ast.AST._ import cobalt.parser.StatementParser import cobalt.utils.TestUtil import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import org.scalatest.{FunSpec, Matchers} import scala.collection.mutable.ArrayBuffer @RunWith(classOf[JUnitRunner]) class ReassignParserTest extends FunSpec with Matchers { describe("Reassign parser") { it("Should parse reassignment an inline statementParser") { TestUtil.parse("x <- 2", StatementParser.statementParser) shouldBe Reassign(Name("x"),Inline(IntConst(2))) } it("Should parse reassignment with a do block") { val code = """x <- do | 1 | 2 | 3 """.stripMargin.replace("\r", "") TestUtil.parse(code, StatementParser.statementParser) shouldBe Reassign(Name("x"),DoBlock(ArrayBuffer(ExprAsStmt(IntConst(1)), ExprAsStmt(IntConst(2)), ExprAsStmt(IntConst(3))))) } } }
Michael2109/cobalt
src/test/scala/cobalt/parser/statement/ReassignParserTest.scala
Scala
lgpl-3.0
969
/* Copyright 2017-19, Emmanouil Antonios Platanios. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.platanios.tensorflow.api.implicits.helpers import org.platanios.tensorflow.api.core.types.DataType import org.platanios.tensorflow.api.ops.Output import shapeless._ import shapeless.ops.hlist.Tupler /** Type trait used to map structures of tensors to structures of symbolic tensors. * * @author Emmanouil Antonios Platanios */ trait DataTypeToOutput[D] { type O def dataTypeStructure: DataTypeStructure[D] } object DataTypeToOutput extends DataTypeToOutputLowPriorityImplicits { def apply[D](implicit ev: DataTypeToOutput[D]): Aux[D, ev.O] = { ev.asInstanceOf[Aux[D, ev.O]] } type Aux[D, OO] = DataTypeToOutput[D] { type O = OO } implicit val fromUnit: Aux[Unit, Unit] = { new DataTypeToOutput[Unit] { override type O = Unit override def dataTypeStructure: DataTypeStructure[Unit] = { DataTypeStructure.fromUnit } } } implicit def fromDataType[T]: Aux[DataType[T], Output[T]] = { new DataTypeToOutput[DataType[T]] { override type O = Output[T] override def dataTypeStructure: DataTypeStructure[DataType[T]] = { DataTypeStructure.fromOutput[T] } } } implicit def fromOption[D](implicit ev: DataTypeToOutput[D] ): DataTypeToOutput.Aux[Option[D], Option[ev.O]] = { new DataTypeToOutput[Option[D]] { override type O = Option[ev.O] override def dataTypeStructure: DataTypeStructure[Option[D]] = { DataTypeStructure.fromOption[D](ev.dataTypeStructure) } } } implicit def fromSeq[D](implicit ev: DataTypeToOutput[D] ): DataTypeToOutput.Aux[Seq[D], Seq[ev.O]] = { new DataTypeToOutput[Seq[D]] { override type O = Seq[ev.O] override def dataTypeStructure: DataTypeStructure[Seq[D]] = { DataTypeStructure.fromSeq[D](ev.dataTypeStructure) } } } implicit def fromMap[K, D](implicit ev: DataTypeToOutput[D] ): DataTypeToOutput.Aux[Map[K, D], Map[K, ev.O]] = { new DataTypeToOutput[Map[K, D]] { override type O = Map[K, ev.O] override def dataTypeStructure: DataTypeStructure[Map[K, D]] = { DataTypeStructure.fromMap[K, D](ev.dataTypeStructure) } } } implicit val fromHNil: DataTypeToOutput.Aux[HNil, HNil] = { new DataTypeToOutput[HNil] { override type O = HNil override def dataTypeStructure: DataTypeStructure[HNil] = { DataTypeStructure.fromHNil } } } implicit def fromHList[HD, HO, TD <: HList, TO <: HList](implicit evH: Strict[DataTypeToOutput.Aux[HD, HO]], evT: Strict[DataTypeToOutput.Aux[TD, TO]] ): DataTypeToOutput.Aux[HD :: TD, HO :: TO] = { new DataTypeToOutput[HD :: TD] { override type O = HO :: TO override def dataTypeStructure: DataTypeStructure[HD :: TD] = { DataTypeStructure.fromHList[HD, TD](evH.value.dataTypeStructure, evT.value.dataTypeStructure) } } } implicit def fromKnownProduct[PD <: Product, PO, HD <: HList, HO <: HList](implicit genD: Generic.Aux[PD, HD], evD: Strict[DataTypeToOutput.Aux[HD, HO]], ): DataTypeToOutput.Aux[PD, PO] = { new DataTypeToOutput[PD] { override type O = PO override def dataTypeStructure: DataTypeStructure[PD] = { DataTypeStructure.fromProduct[PD, HD](genD, evD.value.dataTypeStructure) } } } } trait DataTypeToOutputLowPriorityImplicits { implicit def fromProduct[PD <: Product, PO, HD <: HList, HO <: HList](implicit genD: Generic.Aux[PD, HD], evD: Strict[DataTypeToOutput.Aux[HD, HO]], tuplerO: Tupler.Aux[HO, PO] ): DataTypeToOutput.Aux[PD, PO] = { DataTypeToOutput.fromKnownProduct } }
eaplatanios/tensorflow_scala
modules/api/src/main/scala/org/platanios/tensorflow/api/implicits/helpers/DataTypeToOutput.scala
Scala
apache-2.0
4,319
/** * Copyright 2015 Thomson Reuters * * Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package cmwell.plugins.impl import javax.script.{ScriptEngine, ScriptException} import cmwell.blueprints.jena.{JenaGraph, QueryException} import cmwell.plugins.spi.SgEngineClient import org.apache.jena.query.Dataset import com.tinkerpop.gremlin.groovy.jsr223.GremlinGroovyScriptEngine import com.tinkerpop.pipes.Pipe import com.tinkerpop.pipes.util.iterators.SingleIterator import scala.util.{Failure, Success, Try} class GremlinParser extends SgEngineClient { import scala.collection.JavaConversions._ override def eval(ds: Dataset, query: String): String = { val graph: com.tinkerpop.blueprints.Graph = new JenaGraph(ds.getDefaultModel) // todo figure out if Quads cannot be supported on Gremlin!!! val engine: ScriptEngine = new GremlinGroovyScriptEngine() val bindings = engine.createBindings bindings.put("g", graph) def eval = engine.eval(query, bindings) // evil hack: def extractStartElementFromQuery = { // should match the first v("URI") OR e("URI") and extract the URI out of it: """(?:[v|e]\\(")(.+?)(?:"\\))""".r.findFirstMatchIn(query).map(_.group(1)) // how does it work? glad you asked: // (?:[v|e]\\(") --> non-capturing group of v or e with a (" afterwards // (.+?) --> capturing group of non-greedy whatever with one or more chars // (?:"\\)) --> non-capturing group of ") // get the first occurrence of that if exists, or return None //todo one possible improvement is to have [v|e] captured and returned along with the URI, //todo so the code invoking will know whether to getVertex(URI) or getEdge(URI) //todo 2 - does g.e("... even legal?! } def makeTypedPipe[T](starts: T) = { val pipe = eval // must re-eval per type, otherwise setStarts overrides itself and the universe collapses. val typedPipe = pipe.asInstanceOf[Pipe[T, String]] typedPipe.setStarts(new SingleIterator[T](starts)) typedPipe } def read(p: Pipe[_, _]) = p.iterator().mkString("\\n") val firstNode = extractStartElementFromQuery.map(e => Try(graph.getVertex(e)).getOrElse(graph.getEdge(e))) Try(eval) match { case Failure(e) => e match { case e: QueryException => s"[ Error: ${e.getMessage} ]" case e: ScriptException => "[ Gremlin Syntax Error ]" } case Success(r) => r match { case p: Pipe[_, _] => { Seq(Some(graph), firstNode) .collect { case Some(x) => x } .map(makeTypedPipe) .map( p => Try(read(p)) match { case Success(r) => Some(r) case Failure(e) => e match { case e: ClassCastException => None case _ => Some("[ Unknown Error ]") } } ) .collect { case Some(s) => s } .mkString } case null => "[ Requested element not present in Graph! ]" case v => v.toString } } } }
TRnonodename/CM-Well
server/cmwell-plugin-gremlin/src/main/scala/cmwell/plugins/impl/GremlinParser.scala
Scala
apache-2.0
3,749
/* * tuProlog - Copyright (C) 2001-2002 aliCE team at deis.unibo.it * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package com.szadowsz.gospel.core.engine.state import com.szadowsz.gospel.core.engine.{Engine, EngineRunner} import com.szadowsz.gospel.util.LoggerCategory import org.slf4j.LoggerFactory /** * Template for states of Core Engine. Not for consumption outside of the core engine package. * */ trait State { /** * Use one logger category for all states. */ protected lazy val _logger = LoggerFactory.getLogger(LoggerCategory.ENGINE_STATE) /** * the runner the state occurred in. */ protected val runner : EngineRunner /** * the name of the engine state. */ protected val stateName: String private[engine] def doJob(e: Engine) : Unit override def toString: String = stateName }
zakski/project-soisceal
scala-core/src/main/scala/com/szadowsz/gospel/core/engine/state/State.scala
Scala
lgpl-3.0
1,538
package com.rasterfoundry.common.utils import geotrellis.util._ final case class CacheRangeReader(rr: RangeReader, cachedBytes: Array[Byte]) extends RangeReader { def totalLength: Long = rr.totalLength override def readRange(start: Long, length: Int): Array[Byte] = { val end = length + start if (end <= cachedBytes.length) java.util.Arrays.copyOfRange(cachedBytes, start.toInt, end.toInt) else rr.readRange(start, length) } protected def readClippedRange(start: Long, length: Int): Array[Byte] = ??? override def readAll(): Array[Byte] = { rr.readAll() } }
azavea/raster-foundry
app-backend/common/src/main/scala/utils/CacheRangeReader.scala
Scala
apache-2.0
607
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gov.nasa.jpl.analytics.tools.dump import java.io._ import gov.nasa.jpl.analytics.base.{Loggable, CliTool} import gov.nasa.jpl.analytics.model.CdrDumpParam import gov.nasa.jpl.analytics.nutch.SegmentReader import gov.nasa.jpl.analytics.util.{Constants} import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.{FileSystem, Path} import org.apache.nutch.crawl._ import org.apache.nutch.metadata.Metadata import org.apache.nutch.protocol.Content import org.apache.spark.rdd.RDD import org.apache.spark.{SparkConf, SparkContext} import org.json.simple.JSONObject import org.kohsuke.args4j.Option import scala.collection.JavaConversions._ /** * Created by karanjeetsingh on 8/31/16. */ class CrawlDumper extends CliTool { import CrawlDumper._ @Option(name = "-m", aliases = Array("--master")) var sparkMaster: String = "local[*]" @Option(name = "-sd", aliases = Array("--segmentDir")) var segmentDir: String = "" @Option(name = "-sf", aliases = Array("--segmentFile")) var segmentFile: String = "" @Option(name = "-ldb", aliases = Array("--linkDb")) var linkDb: String = "" @Option(name = "-cdb", aliases = Array("--crawlDb")) var crawlDb: String = "" @Option(name = "-id", aliases = Array("--crawlId")) var crawlId: String = "" @Option(name = "-o", aliases = Array("--outputDir")) var outputDir: String = "" var sc: SparkContext = _ def init(): Unit = { val conf = new SparkConf() conf.setAppName("CrawlDumper") .setMaster(sparkMaster) .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .set("spark.kryo.classesToRegister", "java.util.HashSet,java.util.HashMap") .set("spark.kryoserializer.buffer.max", "2040m") .set("spark.driver.maxResultSize", "2048m") conf.registerKryoClasses(Array(classOf[Content], classOf[Inlinks], classOf[Inlink], classOf[Metadata], classOf[CrawlDb])) sc = new SparkContext(conf) } def getFileTree(f: File): Stream[File] = f #:: (if (f.isDirectory) f.listFiles().toStream.flatMap(getFileTree) else Stream.empty) override def run(): Unit = { // Initialize Spark Context init() val config: Configuration = sc.hadoopConfiguration // Reading LinkDb var linkDbParts: List[Path] = List() linkDbParts = SegmentReader.listFromDir(linkDb, config, LinkDb.CURRENT_NAME) var linkDbRdds: Seq[RDD[Tuple2[String, Inlinks]]] = Seq() for (part <- linkDbParts) { linkDbRdds :+= sc.sequenceFile[String, Inlinks](part.toString) } println("Number of LinkDb Segments to process: " + linkDbRdds.length) val linkDbRdd = sc.union(linkDbRdds) // If Read CrawlDb Locally //var crawlDbParts: Stream[File] = Stream() //val partPattern: String = ".*" + File.separator + "current" + // File.separator + "part-[0-9]{5}" + File.separator + "data" //crawlDbParts = getFileTree(new File(crawlDb)).filter(_.getAbsolutePath.matches(partPattern)) // Reading CrawlDb var crawlDbParts: List[Path] = List() crawlDbParts = SegmentReader.listFromDir(crawlDb, config, "current") var crawlDbRdds: Seq[RDD[Tuple2[String, CrawlDatum]]] = Seq() for (part <- crawlDbParts) { crawlDbRdds :+= sc.sequenceFile[String, CrawlDatum](part.toString) } println("Number of CrawlDb Segments to process: " + crawlDbRdds.length) val crawlDbRdd = sc.union(crawlDbRdds) // Generate a list of segment parts var segParts: List[Path] = List() if (!segmentDir.isEmpty) { segParts = SegmentReader.listFromDir(segmentDir, config) } else if (!segmentFile.isEmpty) { segParts = SegmentReader.listFromFile(segmentFile) } else { println("Please provide Segment Path") System.exit(1) } // Converting all Segment parts to RDDs var segRdds: Seq[RDD[Tuple2[String, Content]]] = Seq() for (part <- segParts) { segRdds :+= sc.sequenceFile[String, Content](part.toString) } println("Number of Segments to process: " + segRdds.length) val segRdd = sc.union(segRdds) val rdd:RDD[Tuple4[String, Content, CrawlDatum, Inlinks]] = segRdd .join(crawlDbRdd) .leftOuterJoin(linkDbRdd) .map{case (url, ((content, crawlDatum), inLinks)) => (url, content, crawlDatum, inLinks match { case Some(inLinks) => inLinks case None => null })} // Broadcast Variables val crawlIdVar = sc.broadcast(crawlId) val dumpRDD = rdd.filter({case(url, content, crawlDatum, inLinks) => SegmentReader.filterUrl(content)}) .mapPartitions({row => row.map(x => (x._1, x._2, x._3, x._4, crawlIdVar.value))}, preservesPartitioning = true) .map({case(url, content, crawlDatum, inLinks, crawlId) => SegmentReader.toSparkler(url, content, crawlDatum, inLinks, crawlId.toString())}) // Deduplication & Dumping Segments dumpRDD.map(doc => new JSONObject(doc).toJSONString).saveAsTextFile(outputDir) /*val rdd:RDD[Tuple4[String, Content, CrawlDatum, Inlinks]] = segRdd.leftOuterJoin(crawlDbRdd).leftOuterJoin(linkDbRdd) .map{case (url, ((content, crawlDatum), inLinks)) => (url, content, crawlDatum match { case Some(crawlDatum) => crawlDatum case None => null }, inLinks match { case Some(inLinks) => inLinks case None => null })}*/ //println("Total number of URLs: " + rdd.collect().length) // Stop Spark Context sc.stop() } } object CrawlDumper extends Loggable with Serializable { def main(args: Array[String]) { new CrawlDumper().run(args) } }
USCDataScience/nutch-analytics
src/main/scala/gov/nasa/jpl/analytics/tools/dump/CrawlDumper.scala
Scala
apache-2.0
6,393
package com.keorkunian.socketsample import akka.actor.{Props, OneForOneStrategy, Actor, ActorSystem} import org.mashupbots.socko.routes.{PathSegments, WebSocketHandshake, Routes} import akka.actor.SupervisorStrategy.Restart import scala.concurrent.duration._ /** * This sample code demonstrates how to monitor and establish web socket requests * for clients that wish to subscribe to a server-side event stream. * * The TimeServiceRoute accepts incoming socket connection requests and notifies the * TimeServiceSocketSupervisor of all new registrations (connections). * * When client's close socket connections, the corresponding actor is sent a Stop message. */ trait TimeServiceRoute { def system: ActorSystem /** * Used to notify the supervisor of a new web socket registration (connection) * @param webSocketId String */ def onWebSocketHandshakeComplete(webSocketId: String) { System.out.println(s"Web Socket $webSocketId connected") system.actorSelection(s"user/${TimeServiceSocketSupervisor.actorName}") ! TimeServiceSocketSupervisor.RegisterWebSocket(webSocketId) } /** * Used to stop the socket actor in response to the socket being closed * @param webSocketId String */ def onWebSocketClose(webSocketId: String) { System.out.println(s"Web Socket $webSocketId closed") system.actorSelection(s"user/${TimeServiceSocketSupervisor.actorName}/$webSocketId") ! TimeServiceSocketActor.Stop } /** * Define a Socko Route that accepts incoming WebSocket requests. */ val timeServiceRoutes = Routes { case WebSocketHandshake(wsHandshake) => wsHandshake match { case PathSegments("web-socket-sample" :: "time-service" :: Nil) => { wsHandshake.authorize( onComplete = Some(onWebSocketHandshakeComplete), onClose = Some(onWebSocketClose)) } } } } /** * The TimeServiceSocketSupervisor is responsible for creating TimeServiceSocketActor for * each Registered Web Socket. */ object TimeServiceSocketSupervisor { val actorName = "time-service-socket-supervisor" case class RegisterWebSocket(webSocketId: String) } class TimeServiceSocketSupervisor extends Actor { import TimeServiceSocketSupervisor._ override val supervisorStrategy = OneForOneStrategy(maxNrOfRetries = 3) { case _: Exception ⇒ Restart } def receive = { case RegisterWebSocket(wsId) => // Create a actor for the new web socket registration context.actorOf(Props(new TimeServiceSocketActor(wsId)), wsId) } } /** * A TimeServiceSocketActor is created for each client connection. * It subscribes to TimeData events and writes the time out to the * socket each time it receives that event. */ object TimeServiceSocketActor { case class Stop() } class TimeServiceSocketActor(webSocketId: String) extends Actor { import TimeServiceSocketActor._ import TimeServiceActor._ override def preStart() { // Subscribe to relevant events context.system.eventStream.subscribe(self, classOf[TimeData]) } override def postStop() { // If this actor stops, close the corresponding socket Main.webServer.webSocketConnections.close(webSocketId) } def receive = { case TimeData(data) => // Write the updated time to the socket Main.webServer.webSocketConnections.writeText(data.toString, webSocketId) case Stop => // Shutdown this actor context.stop(self) } } /** * The TimeServiceActor is a simple service that publishes TimeData ~once per second to the eventStream. */ object TimeServiceActor { case class Tick() case class TimeData(timeInMillis: Long) } class TimeServiceActor extends Actor { import TimeServiceActor._ import context.dispatcher def receive = { case Tick => // Publish the millis and schedule the next Tick val millis = System.currentTimeMillis() context.system.eventStream.publish(TimeData(millis)) context.system.scheduler.scheduleOnce(1 second, self, Tick) } }
garyKeorkunian/web-socket-sample
src/main/scala/com.keorkunian.socketsample/TimeService.scala
Scala
gpl-2.0
4,035
package org.bitcoins.crypto import org.bitcoins.config.TestNet3 import org.bitcoins.protocol.script._ import org.bitcoins.protocol.transaction.{Transaction, TransactionInput} import org.bitcoins.script.{ScriptProgram} import org.bitcoins.script.crypto._ import org.bitcoins.script.flag.{ScriptFlagUtil, ScriptFlag, ScriptVerifyDerSig} import org.bitcoins.util.{BitcoinScriptUtil, BitcoinSLogger, BitcoinSUtil} import org.slf4j.LoggerFactory import scala.annotation.tailrec /** * Created by chris on 2/16/16. * Responsible for checking digital signatures on inputs against their respective * public keys */ trait TransactionSignatureChecker extends BitcoinSLogger { /** * Checks the signature of a scriptSig in the spending transaction against the * given scriptPubKey & explicitly given public key * This is useful for instances of non standard scriptSigs * @param txSignatureComponent the tx signature component that contains all relevant tx information * @param pubKey * @return */ def checkSignature(txSignatureComponent : TransactionSignatureComponent, pubKey: ECPublicKey, signature : ECDigitalSignature, flags : Seq[ScriptFlag]) : TransactionSignatureCheckerResult = { logger.info("Signature: " + signature) val pubKeyEncodedCorrectly = BitcoinScriptUtil.checkPubKeyEncoding(pubKey,flags) if (ScriptFlagUtil.requiresStrictDerEncoding(flags) && !DERSignatureUtil.isStrictDEREncoding(signature)) { logger.error("Signature was not stricly encoded der: " + signature.hex) SignatureValidationFailureNotStrictDerEncoding } else if (ScriptFlagUtil.requireLowSValue(flags) && !DERSignatureUtil.isLowDerSignature(signature)) { logger.error("Signature did not have a low s value") ScriptValidationFailureHighSValue } else if (ScriptFlagUtil.requireStrictEncoding(flags) && signature.bytes.size > 0 && !HashTypeFactory.hashTypes.find(_.byte == signature.bytes.last).isDefined) { logger.error("Hash type was not defined on the signature") ScriptValidationFailureHashType } else if (!pubKeyEncodedCorrectly) { logger.error("The public key given for signature checking was not encoded correctly") SignatureValidationFailurePubKeyEncoding } else { //we need to check if the scriptSignature has a redeemScript //in that case, we need to pass the redeemScript to the TransactionSignatureChecker //we do this by setting the scriptPubKey inside of txSignatureComponent to the redeemScript //instead of the p2sh scriptPubKey it was previously //as the scriptPubKey instead of the one inside of ScriptProgram val txSignatureComponentWithScriptPubKeyAdjusted = txSignatureComponent.scriptSignature match { case s : P2SHScriptSignature => TransactionSignatureComponentFactory.factory(txSignatureComponent,s.redeemScript) case _ : P2PKHScriptSignature | _ : P2PKScriptSignature | _ : NonStandardScriptSignature | _ : MultiSignatureScriptSignature | EmptyScriptSignature => txSignatureComponent } val hashTypeByte = if (signature.bytes.size > 0) signature.bytes.last else 0x00.toByte val hashType = HashTypeFactory.fromByte(hashTypeByte) val hashForSignature = TransactionSignatureSerializer.hashForSignature(txSignatureComponentWithScriptPubKeyAdjusted.transaction, txSignatureComponentWithScriptPubKeyAdjusted.inputIndex,txSignatureComponentWithScriptPubKeyAdjusted.scriptPubKey,hashType) logger.info("Hash for signature: " + BitcoinSUtil.encodeHex(hashForSignature)) val isValid = pubKey.verify(hashForSignature,signature) if (isValid) SignatureValidationSuccess else SignatureValidationFailureIncorrectSignatures } } /** * This is a helper function to check digital signatures against public keys * if the signature does not match this public key, check it against the next * public key in the sequence * @param txSignatureComponent the tx signature component that contains all relevant transaction information * @param sigs the signatures that are being checked for validity * @param pubKeys the public keys which are needed to verify that the signatures are correct * @param flags the script verify flags which are rules to verify the signatures * @return a boolean indicating if all of the signatures are valid against the given public keys */ @tailrec final def multiSignatureEvaluator(txSignatureComponent : TransactionSignatureComponent, sigs : List[ECDigitalSignature], pubKeys : List[ECPublicKey], flags : Seq[ScriptFlag], requiredSigs : Long) : TransactionSignatureCheckerResult = { logger.info("Signatures inside of helper: " + sigs) logger.info("Public keys inside of helper: " + pubKeys) if (sigs.size > pubKeys.size) { //this is how bitcoin core treats this. If there are ever any more //signatures than public keys remaining we immediately return //false https://github.com/bitcoin/bitcoin/blob/master/src/script/interpreter.cpp#L955-L959 logger.info("We have more sigs than we have public keys remaining") SignatureValidationFailureIncorrectSignatures } else if (requiredSigs > sigs.size) { //for the case when we do not have enough sigs left to check to meet the required signature threshold //https://github.com/bitcoin/bitcoin/blob/master/src/script/interpreter.cpp#L914-915 logger.info("We do not have enough sigs to meet the threshold of requireSigs in the multiSignatureScriptPubKey") SignatureValidationFailureSignatureCount } else if (!sigs.isEmpty && !pubKeys.isEmpty) { val sig = sigs.head val pubKey = pubKeys.head val result = checkSignature(txSignatureComponent,pubKey,sig,flags) result match { case SignatureValidationSuccess => multiSignatureEvaluator(txSignatureComponent, sigs.tail,pubKeys.tail,flags, requiredSigs - 1) case SignatureValidationFailureIncorrectSignatures => multiSignatureEvaluator(txSignatureComponent, sigs,pubKeys.tail,flags, requiredSigs) case SignatureValidationFailureNotStrictDerEncoding => SignatureValidationFailureNotStrictDerEncoding case SignatureValidationFailureSignatureCount => SignatureValidationFailureSignatureCount case SignatureValidationFailurePubKeyEncoding => SignatureValidationFailurePubKeyEncoding case ScriptValidationFailureHighSValue => ScriptValidationFailureHighSValue case ScriptValidationFailureHashType => ScriptValidationFailureHashType } } else if (sigs.isEmpty) { //means that we have checked all of the sigs against the public keys //validation succeeds SignatureValidationSuccess } else SignatureValidationFailureIncorrectSignatures } } object TransactionSignatureChecker extends TransactionSignatureChecker
Christewart/scalacoin
src/main/scala/org/bitcoins/crypto/TransactionSignatureChecker.scala
Scala
mit
6,964
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.plans.logical import org.apache.spark.sql.catalyst.analysis.ViewType import org.apache.spark.sql.catalyst.catalog.{BucketSpec, FunctionResource} import org.apache.spark.sql.catalyst.expressions.Attribute import org.apache.spark.sql.catalyst.trees.{LeafLike, UnaryLike} import org.apache.spark.sql.connector.catalog.TableChange.ColumnPosition import org.apache.spark.sql.connector.expressions.Transform import org.apache.spark.sql.errors.QueryExecutionErrors import org.apache.spark.sql.types.{DataType, StructType} /** * A logical plan node that contains exactly what was parsed from SQL. * * This is used to hold information parsed from SQL when there are multiple implementations of a * query or command. For example, CREATE TABLE may be implemented by different nodes for v1 and v2. * Instead of parsing directly to a v1 CreateTable that keeps metadata in CatalogTable, and then * converting that v1 metadata to the v2 equivalent, the sql [[CreateTableStatement]] plan is * produced by the parser and converted once into both implementations. * * Parsed logical plans are not resolved because they must be converted to concrete logical plans. * * Parsed logical plans are located in Catalyst so that as much SQL parsing logic as possible is be * kept in a [[org.apache.spark.sql.catalyst.parser.AbstractSqlParser]]. */ abstract class ParsedStatement extends LogicalPlan { // Redact properties and options when parsed nodes are used by generic methods like toString override def productIterator: Iterator[Any] = super.productIterator.map { case mapArg: Map[_, _] => conf.redactOptions(mapArg) case other => other } override def output: Seq[Attribute] = Seq.empty final override lazy val resolved = false } trait LeafParsedStatement extends ParsedStatement with LeafLike[LogicalPlan] trait UnaryParsedStatement extends ParsedStatement with UnaryLike[LogicalPlan] /** * Type to keep track of Hive serde info */ case class SerdeInfo( storedAs: Option[String] = None, formatClasses: Option[FormatClasses] = None, serde: Option[String] = None, serdeProperties: Map[String, String] = Map.empty) { // this uses assertions because validation is done in validateRowFormatFileFormat etc. assert(storedAs.isEmpty || formatClasses.isEmpty, "Cannot specify both STORED AS and INPUTFORMAT/OUTPUTFORMAT") def describe: String = { val serdeString = if (serde.isDefined || serdeProperties.nonEmpty) { "ROW FORMAT " + serde.map(sd => s"SERDE $sd").getOrElse("DELIMITED") } else { "" } this match { case SerdeInfo(Some(storedAs), _, _, _) => s"STORED AS $storedAs $serdeString" case SerdeInfo(_, Some(formatClasses), _, _) => s"STORED AS $formatClasses $serdeString" case _ => serdeString } } def merge(other: SerdeInfo): SerdeInfo = { def getOnly[T](desc: String, left: Option[T], right: Option[T]): Option[T] = { (left, right) match { case (Some(l), Some(r)) => assert(l == r, s"Conflicting $desc values: $l != $r") left case (Some(_), _) => left case (_, Some(_)) => right case _ => None } } SerdeInfo.checkSerdePropMerging(serdeProperties, other.serdeProperties) SerdeInfo( getOnly("STORED AS", storedAs, other.storedAs), getOnly("INPUTFORMAT/OUTPUTFORMAT", formatClasses, other.formatClasses), getOnly("SERDE", serde, other.serde), serdeProperties ++ other.serdeProperties) } } case class FormatClasses(input: String, output: String) { override def toString: String = s"INPUTFORMAT $input OUTPUTFORMAT $output" } object SerdeInfo { val empty: SerdeInfo = SerdeInfo(None, None, None, Map.empty) def checkSerdePropMerging( props1: Map[String, String], props2: Map[String, String]): Unit = { val conflictKeys = props1.keySet.intersect(props2.keySet) if (conflictKeys.nonEmpty) { throw QueryExecutionErrors.cannotSafelyMergeSerdePropertiesError(props1, props2, conflictKeys) } } } /** * A CREATE TABLE command, as parsed from SQL. * * This is a metadata-only command and is not used to write data to the created table. */ case class CreateTableStatement( tableName: Seq[String], tableSchema: StructType, partitioning: Seq[Transform], bucketSpec: Option[BucketSpec], properties: Map[String, String], provider: Option[String], options: Map[String, String], location: Option[String], comment: Option[String], serde: Option[SerdeInfo], external: Boolean, ifNotExists: Boolean) extends LeafParsedStatement /** * A CREATE TABLE AS SELECT command, as parsed from SQL. */ case class CreateTableAsSelectStatement( tableName: Seq[String], asSelect: LogicalPlan, partitioning: Seq[Transform], bucketSpec: Option[BucketSpec], properties: Map[String, String], provider: Option[String], options: Map[String, String], location: Option[String], comment: Option[String], writeOptions: Map[String, String], serde: Option[SerdeInfo], external: Boolean, ifNotExists: Boolean) extends UnaryParsedStatement { override def child: LogicalPlan = asSelect override protected def withNewChildInternal(newChild: LogicalPlan): CreateTableAsSelectStatement = copy(asSelect = newChild) } /** * A CREATE VIEW statement, as parsed from SQL. */ case class CreateViewStatement( viewName: Seq[String], userSpecifiedColumns: Seq[(String, Option[String])], comment: Option[String], properties: Map[String, String], originalText: Option[String], child: LogicalPlan, allowExisting: Boolean, replace: Boolean, viewType: ViewType) extends UnaryParsedStatement { override protected def withNewChildInternal(newChild: LogicalPlan): CreateViewStatement = copy(child = newChild) } /** * A REPLACE TABLE command, as parsed from SQL. * * If the table exists prior to running this command, executing this statement * will replace the table's metadata and clear the underlying rows from the table. */ case class ReplaceTableStatement( tableName: Seq[String], tableSchema: StructType, partitioning: Seq[Transform], bucketSpec: Option[BucketSpec], properties: Map[String, String], provider: Option[String], options: Map[String, String], location: Option[String], comment: Option[String], serde: Option[SerdeInfo], orCreate: Boolean) extends LeafParsedStatement /** * A REPLACE TABLE AS SELECT command, as parsed from SQL. */ case class ReplaceTableAsSelectStatement( tableName: Seq[String], asSelect: LogicalPlan, partitioning: Seq[Transform], bucketSpec: Option[BucketSpec], properties: Map[String, String], provider: Option[String], options: Map[String, String], location: Option[String], comment: Option[String], writeOptions: Map[String, String], serde: Option[SerdeInfo], orCreate: Boolean) extends UnaryParsedStatement { override def child: LogicalPlan = asSelect override protected def withNewChildInternal( newChild: LogicalPlan): ReplaceTableAsSelectStatement = copy(asSelect = newChild) } /** * Column data as parsed by ALTER TABLE ... ADD COLUMNS. */ case class QualifiedColType( name: Seq[String], dataType: DataType, nullable: Boolean, comment: Option[String], position: Option[ColumnPosition]) /** * ALTER TABLE ... ADD COLUMNS command, as parsed from SQL. */ case class AlterTableAddColumnsStatement( tableName: Seq[String], columnsToAdd: Seq[QualifiedColType]) extends LeafParsedStatement case class AlterTableReplaceColumnsStatement( tableName: Seq[String], columnsToAdd: Seq[QualifiedColType]) extends LeafParsedStatement /** * An INSERT INTO statement, as parsed from SQL. * * @param table the logical plan representing the table. * @param userSpecifiedCols the user specified list of columns that belong to the table. * @param query the logical plan representing data to write to. * @param overwrite overwrite existing table or partitions. * @param partitionSpec a map from the partition key to the partition value (optional). * If the value is missing, dynamic partition insert will be performed. * As an example, `INSERT INTO tbl PARTITION (a=1, b=2) AS` would have * Map('a' -> Some('1'), 'b' -> Some('2')), * and `INSERT INTO tbl PARTITION (a=1, b) AS ...` * would have Map('a' -> Some('1'), 'b' -> None). * @param ifPartitionNotExists If true, only write if the partition does not exist. * Only valid for static partitions. */ case class InsertIntoStatement( table: LogicalPlan, partitionSpec: Map[String, Option[String]], userSpecifiedCols: Seq[String], query: LogicalPlan, overwrite: Boolean, ifPartitionNotExists: Boolean) extends UnaryParsedStatement { require(overwrite || !ifPartitionNotExists, "IF NOT EXISTS is only valid in INSERT OVERWRITE") require(partitionSpec.values.forall(_.nonEmpty) || !ifPartitionNotExists, "IF NOT EXISTS is only valid with static partitions") override def child: LogicalPlan = query override protected def withNewChildInternal(newChild: LogicalPlan): InsertIntoStatement = copy(query = newChild) } /** * A CREATE NAMESPACE statement, as parsed from SQL. */ case class CreateNamespaceStatement( namespace: Seq[String], ifNotExists: Boolean, properties: Map[String, String]) extends LeafParsedStatement /** * A USE statement, as parsed from SQL. */ case class UseStatement(isNamespaceSet: Boolean, nameParts: Seq[String]) extends LeafParsedStatement /** * A SHOW CURRENT NAMESPACE statement, as parsed from SQL */ case class ShowCurrentNamespaceStatement() extends LeafParsedStatement /** * CREATE FUNCTION statement, as parsed from SQL */ case class CreateFunctionStatement( functionName: Seq[String], className: String, resources: Seq[FunctionResource], isTemp: Boolean, ignoreIfExists: Boolean, replace: Boolean) extends LeafParsedStatement
wangmiao1981/spark
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala
Scala
apache-2.0
11,140
package com.wincom.dcim.message.common trait Command { def entityId: String def user: Option[String] }
xtwxy/mysc
dcim-cluster/message/src/main/scala/com/wincom/dcim/message/common/Command.scala
Scala
apache-2.0
107
package org.skycastle.util.parameters.expressions import org.skycastle.util.parameters.Parameters /** * Constant expression */ case class Const(constant: Any) extends Expr { def calculate(context: Parameters) = constant }
zzorn/skycastle
src/main/scala/org/skycastle/util/parameters/expressions/Const.scala
Scala
gpl-2.0
227
package be.objectify.deadbolt.scala.views.di.dynamicTest import be.objectify.deadbolt.scala.{AuthenticatedRequest, DeadboltHandler, DynamicResourceHandler} import be.objectify.deadbolt.scala.views.di.{AbstractViewTest, drh} import be.objectify.deadbolt.scala.views.html.di.dynamicOr import be.objectify.deadbolt.scala.views.html.di.dynamicTest.dynamicOrContent import play.api.test.{FakeRequest, Helpers, WithApplication} /** * @author Steve Chaloner (steve@objectify.be) */ class DynamicOrTest extends AbstractViewTest { val drhAllow: Option[DynamicResourceHandler] = Some(drh(allowed = true, check = false)) val drhDeny: Option[DynamicResourceHandler] = Some(drh(allowed = false, check = false)) "When using the DynamicOr constraint" should { "when allowed by the dynamic handler, the view" should { "show constrained content and hide fallback content" in new WithApplication(testApp(handler(drh = drhAllow))) { val html = constraint(handler(drh = drhAllow)).apply(name = "the name of this constraint", meta = Some("some additional info"))(new AuthenticatedRequest(FakeRequest(), None)) private val content: String = Helpers.contentAsString(html) content must contain("This is before the constraint.") content must contain("This is protected by the constraint.") content must not contain("This is default content in case the constraint denies access to the protected content.") content must contain("This is after the constraint.") } } "when denied by the dynamic handler, the view" should { "hide constrained content and show fallback content" in new WithApplication(testApp(handler(drh = drhDeny))) { val html = constraint(handler(drh = drhDeny)).apply(name = "the name of this constraint", meta = Some("some additional info"))(new AuthenticatedRequest(FakeRequest(), None)) private val content: String = Helpers.contentAsString(html) content must contain("This is before the constraint.") content must not contain("This is protected by the constraint.") content must contain("This is default content in case the constraint denies access to the protected content.") content must contain("This is after the constraint.") } } } def constraint(handler: DeadboltHandler) = new dynamicOrContent(new dynamicOr(viewSupport(), handlerCache(handler))) }
schaloner/deadbolt-2-scala
code/test/be/objectify/deadbolt/scala/views/di/dynamicTest/DynamicOrTest.scala
Scala
apache-2.0
2,407
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.apollo.util.path object Path { def apply(value:String*):Path = Path(value.toList.map(LiteralPart(_))) } /** * <p> * </p> * * @author <a href="http://hiramchirino.com">Hiram Chirino</a> */ case class Path(parts: List[Part]) { def +(other:Path) = Path(parts ::: other.parts) }
chirino/activemq-apollo
apollo-util/src/main/scala/org/apache/activemq/apollo/util/path/Path.scala
Scala
apache-2.0
1,139
package org.jetbrains.plugins.scala package project.gradle import java.io.File import java.util import com.intellij.openapi.externalSystem.model.project.ProjectData import com.intellij.openapi.externalSystem.model.{DataNode, ExternalSystemException, ProjectKeys} import com.intellij.openapi.externalSystem.service.notification.{ExternalSystemNotificationManager, NotificationSource, NotificationCategory, NotificationData} import com.intellij.openapi.externalSystem.service.project.{IdeModifiableModelsProvider, IdeModelsProvider} import com.intellij.openapi.module.Module import com.intellij.openapi.project.Project import com.intellij.openapi.roots.libraries.Library import org.jetbrains.plugins.gradle.model.data.ScalaModelData import org.jetbrains.plugins.gradle.util.GradleConstants import org.jetbrains.plugins.scala.project._ import org.jetbrains.sbt.project.data.service.{Importer, AbstractImporter, AbstractDataService} import scala.collection.JavaConverters._ /** * @author Pavel Fatin */ class ScalaGradleDataService extends AbstractDataService[ScalaModelData, Library](ScalaModelData.KEY) { override def createImporter(toImport: Seq[DataNode[ScalaModelData]], projectData: ProjectData, project: Project, modelsProvider: IdeModifiableModelsProvider): Importer[ScalaModelData] = new ScalaGradleDataService.Importer(toImport, projectData, project, modelsProvider) } private object ScalaGradleDataService { private class Importer(dataToImport: Seq[DataNode[ScalaModelData]], projectData: ProjectData, project: Project, modelsProvider: IdeModifiableModelsProvider) extends AbstractImporter[ScalaModelData](dataToImport, projectData, project, modelsProvider) { override def importData(): Unit = dataToImport.foreach(doImport) private def doImport(scalaNode: DataNode[ScalaModelData]): Unit = for { module <- getIdeModuleByNode(scalaNode) compilerOptions = compilerOptionsFrom(scalaNode.getData) compilerClasspath = scalaNode.getData.getScalaClasspath.asScala.toSeq } { module.configureScalaCompilerSettingsFrom("Gradle", compilerOptions) configureScalaSdk(module, compilerClasspath) } private def configureScalaSdk(module: Module, compilerClasspath: Seq[File]): Unit = { val compilerVersionOption = findScalaLibraryIn(compilerClasspath).flatMap(getVersionFromJar) if (compilerVersionOption.isEmpty) { showWarning(ScalaBundle.message("gradle.dataService.scalaVersionCantBeDetected", module.getName)) return } val compilerVersion = compilerVersionOption.get val scalaLibraries = getScalaLibraries if (scalaLibraries.isEmpty) return val scalaLibraryOption = scalaLibraries.find(_.scalaVersion.contains(compilerVersion)) if (scalaLibraryOption.isEmpty) { showWarning(ScalaBundle.message("gradle.dataService.scalaLibraryIsNotFound", compilerVersion.number, module.getName)) return } val scalaLibrary = scalaLibraryOption.get if (!scalaLibrary.isScalaSdk) { val languageLevel = scalaLibrary.scalaLanguageLevel.getOrElse(ScalaLanguageLevel.Default) convertToScalaSdk(scalaLibrary, languageLevel, compilerClasspath) } } private def findScalaLibraryIn(classpath: Seq[File]): Option[File] = classpath.find(_.getName.startsWith(ScalaLibraryName)) private def getVersionFromJar(scalaLibrary: File): Option[Version] = JarVersion.findFirstIn(scalaLibrary.getName).map(Version(_)) private def compilerOptionsFrom(data: ScalaModelData): Seq[String] = Option(data.getScalaCompileOptions).toSeq.flatMap { options => val presentations = Seq( options.isDeprecation -> "-deprecation", options.isUnchecked -> "-unchecked", options.isOptimize -> "-optimise", !isEmpty(options.getDebugLevel) -> s"-g:${options.getDebugLevel}", !isEmpty(options.getEncoding) -> s"-encoding ${options.getEncoding}", !isEmpty(data.getTargetCompatibility) -> s"-target:jvm-${data.getTargetCompatibility}") val additionalOptions = if (options.getAdditionalParameters != null) options.getAdditionalParameters.asScala else Seq.empty presentations.flatMap((include _).tupled) ++ additionalOptions } private def isEmpty(s: String) = s == null || s.isEmpty private def include(b: Boolean, s: String): Seq[String] = if (b) Seq(s) else Seq.empty private def showWarning(message: String): Unit = { val notification = new NotificationData("Gradle Sync", message, NotificationCategory.WARNING, NotificationSource.PROJECT_SYNC); ExternalSystemNotificationManager.getInstance(project).showNotification(GradleConstants.SYSTEM_ID, notification); } } }
advancedxy/intellij-scala
src/org/jetbrains/plugins/scala/project/gradle/ScalaGradleDataService.scala
Scala
apache-2.0
4,965
package org.mitre.mandolin.util /* * Copyright (c) 2014-2015 The MITRE Corporation */ /** * Facilitates symbol tables and mappings from surface forms to indexed items */ abstract class Alphabet { def ofString(s: String): Int def ofString(s: String, v: Double) : Int def ofString(s: Option[String]) : Int = { s match {case Some(s) => ofString(s) case None => 0} } def ofString(s: Option[String], v: Double) : Int = { s match {case Some(s) => ofString(s,v) case None => 0} } /* * This provides a hook into an Alphabet so that it may keep scaling factors or other * information that can be used to modify a raw feature value. */ def getValue(f: Int, v: Double) : Double def getSize: Int def ensureFixed: Unit def ensureUnFixed : Unit def getInverseMapping : Map[Int,String] def getMapping : collection.mutable.HashMap[String, Int] } /** * Alphabet that assumes that each feature name is actually the index * that corresponds to that feature. This is ONE based by default. * @param size Current size of the alphabet * @author wellner */ class IdentityAlphabet(var size: Int, val oneBased: Boolean = true, val fix: Boolean = false) extends Alphabet with Serializable { var fixed = fix def this() = this(0) def getValue(f: Int, v: Double) = v def ofString(s: String, vl: Double) : Int = ofString(s) def ofString(s: String): Int = try { val i = if (oneBased) s.toInt - 1 else s.toInt if (i >= size) if (!fixed) { size = i + 1 i } else { -1 } else { i } } catch { case _: Throwable => -1 } // just return unrecognized feature if not an integer within range def getSize = size def ensureFixed = { fixed = true} def ensureUnFixed = { fixed = false} def getInverseMapping = throw new RuntimeException("Inverse not available from Identity Alphabet") def getMapping = throw new RuntimeException("Mapping not available from Identity Alphabet") } class IdentityAlphabetWithUnitScaling(zeroMaxMin: Boolean, s: Int) extends IdentityAlphabet(s) with Serializable { def this(s: Int) = this(true, s) val minVals = new collection.mutable.HashMap[Int, Double] val maxVals = new collection.mutable.HashMap[Int, Double] var totalMax = 0.0 var totalMin = 0.0 lazy val fmin = Array.tabulate(this.getSize){i => minVals.get(i).getOrElse(0.0)} lazy val fmax = Array.tabulate(this.getSize){i => maxVals.get(i).getOrElse(0.0)} override def getValue(fid: Int, v: Double) = { val mm = fmin(fid-1) // because of being one-based val mx = fmax(fid-1) val fv = v if (mm == mx) { // this happens if we didn't see the feature when building the alphabet // normalize these features using the total max and min values across all features in training set if (fv > totalMax) 1.0 else if (fv > totalMin) (fv - totalMin) / (totalMax - totalMin) else 0.0 } else { if (fv > mx) 1.0 else if (fv > mm) (fv - mm) / (mx - mm) else 0.0 } } override def ofString(s: String, ivl: Double) : Int = { val i = super.ofString(s,ivl) val vlMin = if (zeroMaxMin) math.min(0.0,ivl) else ivl val vlMax = ivl if (ivl > totalMax) totalMax = ivl if (ivl < totalMin) totalMin = ivl if (minVals.contains(i)) { if (vlMin < minVals(i)) minVals.update(i,vlMin) } else minVals.update(i,vlMin) if (maxVals.contains(i)) { if (vlMax > maxVals(i)) maxVals.update(i,vlMax) } else maxVals.update(i,vlMax) i } } /** * Standard alphabet implemented using a `scala.collection.mutable.HashMap[String,Int]` */ class StdAlphabet extends Alphabet with Serializable { val mapping = new collection.mutable.HashMap[String, Int] var curSize = 0 var fixed = false def ofString(s: String) = ofString(s, 1.0) def getValue(f: Int, v: Double) = v def ofString(s: String, vl: Double): Int = { mapping.get(s) match { case Some(v) => v case None => if (fixed) -1 else { val i = curSize mapping.update(s, i) curSize += 1 i } } } def ensureFixed = { fixed = true } def ensureUnFixed = { fixed = false } def getSize = curSize def getInverseMapping = { var inv = Map[Int,String]() mapping.foreach{ case (s,i) => inv += ((i,s))} inv } def getMapping = mapping } /** * A standard alphabet that facilitates unit scaling of the input features. * As the alphabet is constructed via an initial pass over the input data; the maximum * and minimum values are recorded so that feature values can be unit scaled * on a second pass of instantiating the feature vectors. * @author wellner */ class AlphabetWithUnitScaling(zeroMaxMin: Boolean = true) extends StdAlphabet { val minVals = new collection.mutable.HashMap[Int, Double] val maxVals = new collection.mutable.HashMap[Int, Double] lazy val fmin = Array.tabulate(this.getSize){i => minVals(i)} lazy val fmax = Array.tabulate(this.getSize){i => maxVals(i)} override def getValue(fid: Int, v: Double) = { val mm = fmin(fid) val mx = fmax(fid) val fv = v if (fv > mx) 1.0 else if (fv > mm) (fv - mm) / (mx - mm) else 0.0 } override def ofString(s: String, ivl: Double) : Int = { val i = super.ofString(s,ivl) val vlMin = if (zeroMaxMin) math.min(0.0,ivl) else ivl val vlMax = ivl if (minVals.contains(i)) { if (vlMin < minVals(i)) minVals.update(i,vlMin) } else minVals.update(i,vlMin) if (maxVals.contains(i)) { if (vlMax > maxVals(i)) maxVals.update(i,vlMax) } else maxVals.update(i,vlMax) i } } class PrescaledAlphabet(minVals: Array[Double], maxVals: Array[Double]) extends StdAlphabet { override def getValue(fid: Int, v: Double) = { val mm = minVals(fid) val mx = maxVals(fid) val fv = v if (fv > mx) 1.0 else if (fv > mm) (fv - mm) / (mx - mm) else 0.0 } } /** * A Random or Hashing alphabet that uses a Murmur hash algorithm to map * input strings to integers efficiently but with chances for collision. * @param size the size of the hash/mod space */ class RandomAlphabet(size: Int) extends Alphabet with Serializable { def ensureFixed = {} def ensureUnFixed = {} def getValue(f: Int, v: Double) = v @inline final def ofString(s: String): Int = { update(scala.util.hashing.MurmurHash3.stringHash(s), size) } @inline final def ofString(s: String, v: Double): Int = ofString(s) def getSize = size def getInverseMapping = throw new RuntimeException("Inverse not available from Random Alphabet") def getMapping = throw new RuntimeException("Mapping not available from Random Alphabet") @inline final def update(k: Int, size: Int): Int = { if (k < 0) (math.abs(k) % size).toInt else (k % size).toInt } }
project-mandolin/mandolin
mandolin-core/src/main/scala/org/mitre/mandolin/util/Alphabet.scala
Scala
apache-2.0
6,855
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst import javax.lang.model.SourceVersion import org.apache.commons.lang3.reflect.ConstructorUtils import org.apache.spark.internal.Logging import org.apache.spark.sql.catalyst.DeserializerBuildHelper._ import org.apache.spark.sql.catalyst.SerializerBuildHelper._ import org.apache.spark.sql.catalyst.analysis.GetColumnByOrdinal import org.apache.spark.sql.catalyst.expressions.{Expression, _} import org.apache.spark.sql.catalyst.expressions.objects._ import org.apache.spark.sql.catalyst.util.{ArrayData, MapData} import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String} /** * A helper trait to create [[org.apache.spark.sql.catalyst.encoders.ExpressionEncoder]]s * for classes whose fields are entirely defined by constructor params but should not be * case classes. */ trait DefinedByConstructorParams private[catalyst] object ScalaSubtypeLock /** * A default version of ScalaReflection that uses the runtime universe. */ object ScalaReflection extends ScalaReflection { val universe: scala.reflect.runtime.universe.type = scala.reflect.runtime.universe // Since we are creating a runtime mirror using the class loader of current thread, // we need to use def at here. So, every time we call mirror, it is using the // class loader of the current thread. override def mirror: universe.Mirror = { universe.runtimeMirror(Thread.currentThread().getContextClassLoader) } import universe._ // The Predef.Map is scala.collection.immutable.Map. // Since the map values can be mutable, we explicitly import scala.collection.Map at here. import scala.collection.Map /** * Returns the Spark SQL DataType for a given scala type. Where this is not an exact mapping * to a native type, an ObjectType is returned. Special handling is also used for Arrays including * those that hold primitive types. * * Unlike `schemaFor`, this function doesn't do any massaging of types into the Spark SQL type * system. As a result, ObjectType will be returned for things like boxed Integers */ def dataTypeFor[T : TypeTag]: DataType = dataTypeFor(localTypeOf[T]) /** * Synchronize to prevent concurrent usage of `<:<` operator. * This operator is not thread safe in any current version of scala; i.e. * (2.11.12, 2.12.10, 2.13.0-M5). * * See https://github.com/scala/bug/issues/10766 */ private[catalyst] def isSubtype(tpe1: `Type`, tpe2: `Type`): Boolean = { ScalaSubtypeLock.synchronized { tpe1 <:< tpe2 } } private def dataTypeFor(tpe: `Type`): DataType = cleanUpReflectionObjects { tpe.dealias match { case t if isSubtype(t, definitions.NullTpe) => NullType case t if isSubtype(t, definitions.IntTpe) => IntegerType case t if isSubtype(t, definitions.LongTpe) => LongType case t if isSubtype(t, definitions.DoubleTpe) => DoubleType case t if isSubtype(t, definitions.FloatTpe) => FloatType case t if isSubtype(t, definitions.ShortTpe) => ShortType case t if isSubtype(t, definitions.ByteTpe) => ByteType case t if isSubtype(t, definitions.BooleanTpe) => BooleanType case t if isSubtype(t, localTypeOf[Array[Byte]]) => BinaryType case t if isSubtype(t, localTypeOf[CalendarInterval]) => CalendarIntervalType case t if isSubtype(t, localTypeOf[Decimal]) => DecimalType.SYSTEM_DEFAULT case _ => val className = getClassNameFromType(tpe) className match { case "scala.Array" => val TypeRef(_, _, Seq(elementType)) = tpe arrayClassFor(elementType) case other => val clazz = getClassFromType(tpe) ObjectType(clazz) } } } /** * Given a type `T` this function constructs `ObjectType` that holds a class of type * `Array[T]`. * * Special handling is performed for primitive types to map them back to their raw * JVM form instead of the Scala Array that handles auto boxing. */ private def arrayClassFor(tpe: `Type`): ObjectType = cleanUpReflectionObjects { val cls = tpe.dealias match { case t if isSubtype(t, definitions.IntTpe) => classOf[Array[Int]] case t if isSubtype(t, definitions.LongTpe) => classOf[Array[Long]] case t if isSubtype(t, definitions.DoubleTpe) => classOf[Array[Double]] case t if isSubtype(t, definitions.FloatTpe) => classOf[Array[Float]] case t if isSubtype(t, definitions.ShortTpe) => classOf[Array[Short]] case t if isSubtype(t, definitions.ByteTpe) => classOf[Array[Byte]] case t if isSubtype(t, definitions.BooleanTpe) => classOf[Array[Boolean]] case t if isSubtype(t, localTypeOf[Array[Byte]]) => classOf[Array[Array[Byte]]] case t if isSubtype(t, localTypeOf[CalendarInterval]) => classOf[Array[CalendarInterval]] case t if isSubtype(t, localTypeOf[Decimal]) => classOf[Array[Decimal]] case other => // There is probably a better way to do this, but I couldn't find it... val elementType = dataTypeFor(other).asInstanceOf[ObjectType].cls java.lang.reflect.Array.newInstance(elementType, 0).getClass } ObjectType(cls) } /** * Returns true if the value of this data type is same between internal and external. */ def isNativeType(dt: DataType): Boolean = dt match { case NullType | BooleanType | ByteType | ShortType | IntegerType | LongType | FloatType | DoubleType | BinaryType | CalendarIntervalType => true case _ => false } private def baseType(tpe: `Type`): `Type` = { tpe.dealias match { case annotatedType: AnnotatedType => annotatedType.underlying case other => other } } /** * Returns an expression that can be used to deserialize a Spark SQL representation to an object * of type `T` with a compatible schema. The Spark SQL representation is located at ordinal 0 of * a row, i.e., `GetColumnByOrdinal(0, _)`. Nested classes will have their fields accessed using * `UnresolvedExtractValue`. * * The returned expression is used by `ExpressionEncoder`. The encoder will resolve and bind this * deserializer expression when using it. */ def deserializerForType(tpe: `Type`): Expression = { val clsName = getClassNameFromType(tpe) val walkedTypePath = new WalkedTypePath().recordRoot(clsName) val Schema(dataType, nullable) = schemaFor(tpe) // Assumes we are deserializing the first column of a row. deserializerForWithNullSafetyAndUpcast(GetColumnByOrdinal(0, dataType), dataType, nullable = nullable, walkedTypePath, (casted, typePath) => deserializerFor(tpe, casted, typePath)) } /** * Returns an expression that can be used to deserialize an input expression to an object of type * `T` with a compatible schema. * * @param tpe The `Type` of deserialized object. * @param path The expression which can be used to extract serialized value. * @param walkedTypePath The paths from top to bottom to access current field when deserializing. */ private def deserializerFor( tpe: `Type`, path: Expression, walkedTypePath: WalkedTypePath): Expression = cleanUpReflectionObjects { baseType(tpe) match { case t if !dataTypeFor(t).isInstanceOf[ObjectType] => path case t if isSubtype(t, localTypeOf[Option[_]]) => val TypeRef(_, _, Seq(optType)) = t val className = getClassNameFromType(optType) val newTypePath = walkedTypePath.recordOption(className) WrapOption(deserializerFor(optType, path, newTypePath), dataTypeFor(optType)) case t if isSubtype(t, localTypeOf[java.lang.Integer]) => createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Integer]) case t if isSubtype(t, localTypeOf[java.lang.Long]) => createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Long]) case t if isSubtype(t, localTypeOf[java.lang.Double]) => createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Double]) case t if isSubtype(t, localTypeOf[java.lang.Float]) => createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Float]) case t if isSubtype(t, localTypeOf[java.lang.Short]) => createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Short]) case t if isSubtype(t, localTypeOf[java.lang.Byte]) => createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Byte]) case t if isSubtype(t, localTypeOf[java.lang.Boolean]) => createDeserializerForTypesSupportValueOf(path, classOf[java.lang.Boolean]) case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => createDeserializerForLocalDate(path) case t if isSubtype(t, localTypeOf[java.sql.Date]) => createDeserializerForSqlDate(path) case t if isSubtype(t, localTypeOf[java.time.Instant]) => createDeserializerForInstant(path) case t if isSubtype(t, localTypeOf[java.lang.Enum[_]]) => createDeserializerForTypesSupportValueOf( Invoke(path, "toString", ObjectType(classOf[String]), returnNullable = false), getClassFromType(t)) case t if isSubtype(t, localTypeOf[java.sql.Timestamp]) => createDeserializerForSqlTimestamp(path) case t if isSubtype(t, localTypeOf[java.lang.String]) => createDeserializerForString(path, returnNullable = false) case t if isSubtype(t, localTypeOf[java.math.BigDecimal]) => createDeserializerForJavaBigDecimal(path, returnNullable = false) case t if isSubtype(t, localTypeOf[BigDecimal]) => createDeserializerForScalaBigDecimal(path, returnNullable = false) case t if isSubtype(t, localTypeOf[java.math.BigInteger]) => createDeserializerForJavaBigInteger(path, returnNullable = false) case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => createDeserializerForScalaBigInt(path) case t if isSubtype(t, localTypeOf[Array[_]]) => val TypeRef(_, _, Seq(elementType)) = t val Schema(dataType, elementNullable) = schemaFor(elementType) val className = getClassNameFromType(elementType) val newTypePath = walkedTypePath.recordArray(className) val mapFunction: Expression => Expression = element => { // upcast the array element to the data type the encoder expected. deserializerForWithNullSafetyAndUpcast( element, dataType, nullable = elementNullable, newTypePath, (casted, typePath) => deserializerFor(elementType, casted, typePath)) } val arrayData = UnresolvedMapObjects(mapFunction, path) val arrayCls = arrayClassFor(elementType) val methodName = elementType match { case t if isSubtype(t, definitions.IntTpe) => "toIntArray" case t if isSubtype(t, definitions.LongTpe) => "toLongArray" case t if isSubtype(t, definitions.DoubleTpe) => "toDoubleArray" case t if isSubtype(t, definitions.FloatTpe) => "toFloatArray" case t if isSubtype(t, definitions.ShortTpe) => "toShortArray" case t if isSubtype(t, definitions.ByteTpe) => "toByteArray" case t if isSubtype(t, definitions.BooleanTpe) => "toBooleanArray" // non-primitive case _ => "array" } Invoke(arrayData, methodName, arrayCls, returnNullable = false) // We serialize a `Set` to Catalyst array. When we deserialize a Catalyst array // to a `Set`, if there are duplicated elements, the elements will be de-duplicated. case t if isSubtype(t, localTypeOf[scala.collection.Seq[_]]) || isSubtype(t, localTypeOf[scala.collection.Set[_]]) => val TypeRef(_, _, Seq(elementType)) = t val Schema(dataType, elementNullable) = schemaFor(elementType) val className = getClassNameFromType(elementType) val newTypePath = walkedTypePath.recordArray(className) val mapFunction: Expression => Expression = element => { deserializerForWithNullSafetyAndUpcast( element, dataType, nullable = elementNullable, newTypePath, (casted, typePath) => deserializerFor(elementType, casted, typePath)) } val companion = t.dealias.typeSymbol.companion.typeSignature val cls = companion.member(TermName("newBuilder")) match { case NoSymbol if isSubtype(t, localTypeOf[Seq[_]]) => classOf[Seq[_]] case NoSymbol if isSubtype(t, localTypeOf[scala.collection.Set[_]]) => classOf[scala.collection.Set[_]] case _ => mirror.runtimeClass(t.typeSymbol.asClass) } UnresolvedMapObjects(mapFunction, path, Some(cls)) case t if isSubtype(t, localTypeOf[Map[_, _]]) => val TypeRef(_, _, Seq(keyType, valueType)) = t val classNameForKey = getClassNameFromType(keyType) val classNameForValue = getClassNameFromType(valueType) val newTypePath = walkedTypePath.recordMap(classNameForKey, classNameForValue) UnresolvedCatalystToExternalMap( path, p => deserializerFor(keyType, p, newTypePath), p => deserializerFor(valueType, p, newTypePath), mirror.runtimeClass(t.typeSymbol.asClass) ) case t if t.typeSymbol.annotations.exists(_.tree.tpe =:= typeOf[SQLUserDefinedType]) => val udt = getClassFromType(t).getAnnotation(classOf[SQLUserDefinedType]).udt(). getConstructor().newInstance() val obj = NewInstance( udt.userClass.getAnnotation(classOf[SQLUserDefinedType]).udt(), Nil, dataType = ObjectType(udt.userClass.getAnnotation(classOf[SQLUserDefinedType]).udt())) Invoke(obj, "deserialize", ObjectType(udt.userClass), path :: Nil) case t if UDTRegistration.exists(getClassNameFromType(t)) => val udt = UDTRegistration.getUDTFor(getClassNameFromType(t)).get.getConstructor(). newInstance().asInstanceOf[UserDefinedType[_]] val obj = NewInstance( udt.getClass, Nil, dataType = ObjectType(udt.getClass)) Invoke(obj, "deserialize", ObjectType(udt.userClass), path :: Nil) case t if definedByConstructorParams(t) => val params = getConstructorParameters(t) val cls = getClassFromType(tpe) val arguments = params.zipWithIndex.map { case ((fieldName, fieldType), i) => val Schema(dataType, nullable) = schemaFor(fieldType) val clsName = getClassNameFromType(fieldType) val newTypePath = walkedTypePath.recordField(clsName, fieldName) // For tuples, we based grab the inner fields by ordinal instead of name. val newPath = if (cls.getName startsWith "scala.Tuple") { deserializerFor( fieldType, addToPathOrdinal(path, i, dataType, newTypePath), newTypePath) } else { deserializerFor( fieldType, addToPath(path, fieldName, dataType, newTypePath), newTypePath) } expressionWithNullSafety( newPath, nullable = nullable, newTypePath) } val newInstance = NewInstance(cls, arguments, ObjectType(cls), propagateNull = false) expressions.If( IsNull(path), expressions.Literal.create(null, ObjectType(cls)), newInstance ) case t if isSubtype(t, localTypeOf[Enumeration#Value]) => // package example // object Foo extends Enumeration { // type Foo = Value // val E1, E2 = Value // } // the fullName of tpe is example.Foo.Foo, but we need example.Foo so that // we can call example.Foo.withName to deserialize string to enumeration. val parent = t.asInstanceOf[TypeRef].pre.typeSymbol.asClass val cls = mirror.runtimeClass(parent) StaticInvoke( cls, ObjectType(getClassFromType(t)), "withName", createDeserializerForString(path, false) :: Nil, returnNullable = false) } } /** * Returns an expression for serializing an object of type T to Spark SQL representation. The * input object is located at ordinal 0 of a row, i.e., `BoundReference(0, _)`. * * If the given type is not supported, i.e. there is no encoder can be built for this type, * an [[UnsupportedOperationException]] will be thrown with detailed error message to explain * the type path walked so far and which class we are not supporting. * There are 4 kinds of type path: * * the root type: `root class: "abc.xyz.MyClass"` * * the value type of [[Option]]: `option value class: "abc.xyz.MyClass"` * * the element type of [[Array]] or [[Seq]]: `array element class: "abc.xyz.MyClass"` * * the field of [[Product]]: `field (class: "abc.xyz.MyClass", name: "myField")` */ def serializerForType(tpe: `Type`): Expression = ScalaReflection.cleanUpReflectionObjects { val clsName = getClassNameFromType(tpe) val walkedTypePath = new WalkedTypePath().recordRoot(clsName) // The input object to `ExpressionEncoder` is located at first column of an row. val isPrimitive = tpe.typeSymbol.asClass.isPrimitive val inputObject = BoundReference(0, dataTypeFor(tpe), nullable = !isPrimitive) serializerFor(inputObject, tpe, walkedTypePath) } /** * Returns an expression for serializing the value of an input expression into Spark SQL * internal representation. */ private def serializerFor( inputObject: Expression, tpe: `Type`, walkedTypePath: WalkedTypePath, seenTypeSet: Set[`Type`] = Set.empty): Expression = cleanUpReflectionObjects { def toCatalystArray(input: Expression, elementType: `Type`): Expression = { dataTypeFor(elementType) match { case dt: ObjectType => val clsName = getClassNameFromType(elementType) val newPath = walkedTypePath.recordArray(clsName) createSerializerForMapObjects(input, dt, serializerFor(_, elementType, newPath, seenTypeSet)) case dt @ (BooleanType | ByteType | ShortType | IntegerType | LongType | FloatType | DoubleType) => val cls = input.dataType.asInstanceOf[ObjectType].cls if (cls.isArray && cls.getComponentType.isPrimitive) { createSerializerForPrimitiveArray(input, dt) } else { createSerializerForGenericArray(input, dt, nullable = schemaFor(elementType).nullable) } case dt => createSerializerForGenericArray(input, dt, nullable = schemaFor(elementType).nullable) } } baseType(tpe) match { case _ if !inputObject.dataType.isInstanceOf[ObjectType] => inputObject case t if isSubtype(t, localTypeOf[Option[_]]) => val TypeRef(_, _, Seq(optType)) = t val className = getClassNameFromType(optType) val newPath = walkedTypePath.recordOption(className) val unwrapped = UnwrapOption(dataTypeFor(optType), inputObject) serializerFor(unwrapped, optType, newPath, seenTypeSet) // Since List[_] also belongs to localTypeOf[Product], we put this case before // "case t if definedByConstructorParams(t)" to make sure it will match to the // case "localTypeOf[Seq[_]]" case t if isSubtype(t, localTypeOf[scala.collection.Seq[_]]) => val TypeRef(_, _, Seq(elementType)) = t toCatalystArray(inputObject, elementType) case t if isSubtype(t, localTypeOf[Array[_]]) => val TypeRef(_, _, Seq(elementType)) = t toCatalystArray(inputObject, elementType) case t if isSubtype(t, localTypeOf[Map[_, _]]) => val TypeRef(_, _, Seq(keyType, valueType)) = t val keyClsName = getClassNameFromType(keyType) val valueClsName = getClassNameFromType(valueType) val keyPath = walkedTypePath.recordKeyForMap(keyClsName) val valuePath = walkedTypePath.recordValueForMap(valueClsName) createSerializerForMap( inputObject, MapElementInformation( dataTypeFor(keyType), nullable = !keyType.typeSymbol.asClass.isPrimitive, serializerFor(_, keyType, keyPath, seenTypeSet)), MapElementInformation( dataTypeFor(valueType), nullable = !valueType.typeSymbol.asClass.isPrimitive, serializerFor(_, valueType, valuePath, seenTypeSet)) ) case t if isSubtype(t, localTypeOf[scala.collection.Set[_]]) => val TypeRef(_, _, Seq(elementType)) = t // There's no corresponding Catalyst type for `Set`, we serialize a `Set` to Catalyst array. // Note that the property of `Set` is only kept when manipulating the data as domain object. val newInput = Invoke( inputObject, "toSeq", ObjectType(classOf[Seq[_]])) toCatalystArray(newInput, elementType) case t if isSubtype(t, localTypeOf[String]) => createSerializerForString(inputObject) case t if isSubtype(t, localTypeOf[java.time.Instant]) => createSerializerForJavaInstant(inputObject) case t if isSubtype(t, localTypeOf[java.sql.Timestamp]) => createSerializerForSqlTimestamp(inputObject) case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => createSerializerForJavaLocalDate(inputObject) case t if isSubtype(t, localTypeOf[java.sql.Date]) => createSerializerForSqlDate(inputObject) case t if isSubtype(t, localTypeOf[BigDecimal]) => createSerializerForScalaBigDecimal(inputObject) case t if isSubtype(t, localTypeOf[java.math.BigDecimal]) => createSerializerForJavaBigDecimal(inputObject) case t if isSubtype(t, localTypeOf[java.math.BigInteger]) => createSerializerForJavaBigInteger(inputObject) case t if isSubtype(t, localTypeOf[java.lang.Enum[_]]) => createSerializerForJavaEnum(inputObject) case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => createSerializerForScalaBigInt(inputObject) case t if isSubtype(t, localTypeOf[java.lang.Integer]) => createSerializerForInteger(inputObject) case t if isSubtype(t, localTypeOf[java.lang.Long]) => createSerializerForLong(inputObject) case t if isSubtype(t, localTypeOf[java.lang.Double]) => createSerializerForDouble(inputObject) case t if isSubtype(t, localTypeOf[java.lang.Float]) => createSerializerForFloat(inputObject) case t if isSubtype(t, localTypeOf[java.lang.Short]) => createSerializerForShort(inputObject) case t if isSubtype(t, localTypeOf[java.lang.Byte]) => createSerializerForByte(inputObject) case t if isSubtype(t, localTypeOf[java.lang.Boolean]) => createSerializerForBoolean(inputObject) case t if t.typeSymbol.annotations.exists(_.tree.tpe =:= typeOf[SQLUserDefinedType]) => val udt = getClassFromType(t) .getAnnotation(classOf[SQLUserDefinedType]).udt().getConstructor().newInstance() val udtClass = udt.userClass.getAnnotation(classOf[SQLUserDefinedType]).udt() createSerializerForUserDefinedType(inputObject, udt, udtClass) case t if UDTRegistration.exists(getClassNameFromType(t)) => val udt = UDTRegistration.getUDTFor(getClassNameFromType(t)).get.getConstructor(). newInstance().asInstanceOf[UserDefinedType[_]] val udtClass = udt.getClass createSerializerForUserDefinedType(inputObject, udt, udtClass) case t if definedByConstructorParams(t) => if (seenTypeSet.contains(t)) { throw new UnsupportedOperationException( s"cannot have circular references in class, but got the circular reference of class $t") } val params = getConstructorParameters(t) val fields = params.map { case (fieldName, fieldType) => if (SourceVersion.isKeyword(fieldName) || !SourceVersion.isIdentifier(encodeFieldNameToIdentifier(fieldName))) { throw new UnsupportedOperationException(s"`$fieldName` is not a valid identifier of " + "Java and cannot be used as field name\\n" + walkedTypePath) } // SPARK-26730 inputObject won't be null with If's guard below. And KnownNotNul // is necessary here. Because for a nullable nested inputObject with struct data // type, e.g. StructType(IntegerType, StringType), it will return nullable=true // for IntegerType without KnownNotNull. And that's what we do not expect to. val fieldValue = Invoke(KnownNotNull(inputObject), fieldName, dataTypeFor(fieldType), returnNullable = !fieldType.typeSymbol.asClass.isPrimitive) val clsName = getClassNameFromType(fieldType) val newPath = walkedTypePath.recordField(clsName, fieldName) (fieldName, serializerFor(fieldValue, fieldType, newPath, seenTypeSet + t)) } createSerializerForObject(inputObject, fields) case t if isSubtype(t, localTypeOf[Enumeration#Value]) => createSerializerForString( Invoke( inputObject, "toString", ObjectType(classOf[java.lang.String]), returnNullable = false)) case _ => throw new UnsupportedOperationException( s"No Encoder found for $tpe\\n" + walkedTypePath) } } /** * Returns true if the given type is option of product type, e.g. `Option[Tuple2]`. Note that, * we also treat [[DefinedByConstructorParams]] as product type. */ def optionOfProductType(tpe: `Type`): Boolean = cleanUpReflectionObjects { tpe.dealias match { case t if isSubtype(t, localTypeOf[Option[_]]) => val TypeRef(_, _, Seq(optType)) = t definedByConstructorParams(optType) case _ => false } } /** * Returns the parameter names and types for the primary constructor of this class. * * Note that it only works for scala classes with primary constructor, and currently doesn't * support inner class. */ def getConstructorParameters(cls: Class[_]): Seq[(String, Type)] = { val m = runtimeMirror(cls.getClassLoader) val classSymbol = m.staticClass(cls.getName) val t = classSymbol.selfType getConstructorParameters(t) } /** * Returns the parameter names for the primary constructor of this class. * * Logically we should call `getConstructorParameters` and throw away the parameter types to get * parameter names, however there are some weird scala reflection problems and this method is a * workaround to avoid getting parameter types. */ def getConstructorParameterNames(cls: Class[_]): Seq[String] = { val m = runtimeMirror(cls.getClassLoader) val classSymbol = m.staticClass(cls.getName) val t = classSymbol.selfType constructParams(t).map(_.name.decodedName.toString) } /** * Returns the parameter values for the primary constructor of this class. */ def getConstructorParameterValues(obj: DefinedByConstructorParams): Seq[AnyRef] = { getConstructorParameterNames(obj.getClass).map { name => obj.getClass.getMethod(name).invoke(obj) } } private def erasure(tpe: Type): Type = { // For user-defined AnyVal classes, we should not erasure it. Otherwise, it will // resolve to underlying type which wrapped by this class, e.g erasure // `case class Foo(i: Int) extends AnyVal` will return type `Int` instead of `Foo`. // But, for other types, we do need to erasure it. For example, we need to erasure // `scala.Any` to `java.lang.Object` in order to load it from Java ClassLoader. // Please see SPARK-17368 & SPARK-31190 for more details. if (isSubtype(tpe, localTypeOf[AnyVal]) && !tpe.toString.startsWith("scala")) { tpe } else { tpe.erasure } } /** * Returns the full class name for a type. The returned name is the canonical * Scala name, where each component is separated by a period. It is NOT the * Java-equivalent runtime name (no dollar signs). * * In simple cases, both the Scala and Java names are the same, however when Scala * generates constructs that do not map to a Java equivalent, such as singleton objects * or nested classes in package objects, it uses the dollar sign ($) to create * synthetic classes, emulating behaviour in Java bytecode. */ def getClassNameFromType(tpe: `Type`): String = { erasure(tpe).dealias.typeSymbol.asClass.fullName } /* * Retrieves the runtime class corresponding to the provided type. */ def getClassFromType(tpe: Type): Class[_] = mirror.runtimeClass(erasure(tpe).dealias.typeSymbol.asClass) case class Schema(dataType: DataType, nullable: Boolean) /** Returns a Sequence of attributes for the given case class type. */ def attributesFor[T: TypeTag]: Seq[Attribute] = schemaFor[T] match { case Schema(s: StructType, _) => s.toAttributes case others => throw new UnsupportedOperationException(s"Attributes for type $others is not supported") } /** Returns a catalyst DataType and its nullability for the given Scala Type using reflection. */ def schemaFor[T: TypeTag]: Schema = schemaFor(localTypeOf[T]) /** Returns a catalyst DataType and its nullability for the given Scala Type using reflection. */ def schemaFor(tpe: `Type`): Schema = cleanUpReflectionObjects { baseType(tpe) match { // this must be the first case, since all objects in scala are instances of Null, therefore // Null type would wrongly match the first of them, which is Option as of now case t if isSubtype(t, definitions.NullTpe) => Schema(NullType, nullable = true) case t if t.typeSymbol.annotations.exists(_.tree.tpe =:= typeOf[SQLUserDefinedType]) => val udt = getClassFromType(t).getAnnotation(classOf[SQLUserDefinedType]).udt(). getConstructor().newInstance() Schema(udt, nullable = true) case t if UDTRegistration.exists(getClassNameFromType(t)) => val udt = UDTRegistration.getUDTFor(getClassNameFromType(t)).get.getConstructor(). newInstance().asInstanceOf[UserDefinedType[_]] Schema(udt, nullable = true) case t if isSubtype(t, localTypeOf[Option[_]]) => val TypeRef(_, _, Seq(optType)) = t Schema(schemaFor(optType).dataType, nullable = true) case t if isSubtype(t, localTypeOf[Array[Byte]]) => Schema(BinaryType, nullable = true) case t if isSubtype(t, localTypeOf[Array[_]]) => val TypeRef(_, _, Seq(elementType)) = t val Schema(dataType, nullable) = schemaFor(elementType) Schema(ArrayType(dataType, containsNull = nullable), nullable = true) case t if isSubtype(t, localTypeOf[scala.collection.Seq[_]]) => val TypeRef(_, _, Seq(elementType)) = t val Schema(dataType, nullable) = schemaFor(elementType) Schema(ArrayType(dataType, containsNull = nullable), nullable = true) case t if isSubtype(t, localTypeOf[Map[_, _]]) => val TypeRef(_, _, Seq(keyType, valueType)) = t val Schema(valueDataType, valueNullable) = schemaFor(valueType) Schema(MapType(schemaFor(keyType).dataType, valueDataType, valueContainsNull = valueNullable), nullable = true) case t if isSubtype(t, localTypeOf[Set[_]]) => val TypeRef(_, _, Seq(elementType)) = t val Schema(dataType, nullable) = schemaFor(elementType) Schema(ArrayType(dataType, containsNull = nullable), nullable = true) case t if isSubtype(t, localTypeOf[String]) => Schema(StringType, nullable = true) case t if isSubtype(t, localTypeOf[java.time.Instant]) => Schema(TimestampType, nullable = true) case t if isSubtype(t, localTypeOf[java.sql.Timestamp]) => Schema(TimestampType, nullable = true) case t if isSubtype(t, localTypeOf[java.time.LocalDate]) => Schema(DateType, nullable = true) case t if isSubtype(t, localTypeOf[java.sql.Date]) => Schema(DateType, nullable = true) case t if isSubtype(t, localTypeOf[CalendarInterval]) => Schema(CalendarIntervalType, nullable = true) case t if isSubtype(t, localTypeOf[BigDecimal]) => Schema(DecimalType.SYSTEM_DEFAULT, nullable = true) case t if isSubtype(t, localTypeOf[java.math.BigDecimal]) => Schema(DecimalType.SYSTEM_DEFAULT, nullable = true) case t if isSubtype(t, localTypeOf[java.math.BigInteger]) => Schema(DecimalType.BigIntDecimal, nullable = true) case t if isSubtype(t, localTypeOf[scala.math.BigInt]) => Schema(DecimalType.BigIntDecimal, nullable = true) case t if isSubtype(t, localTypeOf[Decimal]) => Schema(DecimalType.SYSTEM_DEFAULT, nullable = true) case t if isSubtype(t, localTypeOf[java.lang.Integer]) => Schema(IntegerType, nullable = true) case t if isSubtype(t, localTypeOf[java.lang.Long]) => Schema(LongType, nullable = true) case t if isSubtype(t, localTypeOf[java.lang.Double]) => Schema(DoubleType, nullable = true) case t if isSubtype(t, localTypeOf[java.lang.Float]) => Schema(FloatType, nullable = true) case t if isSubtype(t, localTypeOf[java.lang.Short]) => Schema(ShortType, nullable = true) case t if isSubtype(t, localTypeOf[java.lang.Byte]) => Schema(ByteType, nullable = true) case t if isSubtype(t, localTypeOf[java.lang.Boolean]) => Schema(BooleanType, nullable = true) case t if isSubtype(t, localTypeOf[java.lang.Enum[_]]) => Schema(StringType, nullable = true) case t if isSubtype(t, definitions.IntTpe) => Schema(IntegerType, nullable = false) case t if isSubtype(t, definitions.LongTpe) => Schema(LongType, nullable = false) case t if isSubtype(t, definitions.DoubleTpe) => Schema(DoubleType, nullable = false) case t if isSubtype(t, definitions.FloatTpe) => Schema(FloatType, nullable = false) case t if isSubtype(t, definitions.ShortTpe) => Schema(ShortType, nullable = false) case t if isSubtype(t, definitions.ByteTpe) => Schema(ByteType, nullable = false) case t if isSubtype(t, definitions.BooleanTpe) => Schema(BooleanType, nullable = false) case t if definedByConstructorParams(t) => val params = getConstructorParameters(t) Schema(StructType( params.map { case (fieldName, fieldType) => val Schema(dataType, nullable) = schemaFor(fieldType) StructField(fieldName, dataType, nullable) }), nullable = true) case t if isSubtype(t, localTypeOf[Enumeration#Value]) => Schema(StringType, nullable = true) case other => throw new UnsupportedOperationException(s"Schema for type $other is not supported") } } /** * Finds an accessible constructor with compatible parameters. This is a more flexible search than * the exact matching algorithm in `Class.getConstructor`. The first assignment-compatible * matching constructor is returned if it exists. Otherwise, we check for additional compatible * constructors defined in the companion object as `apply` methods. Otherwise, it returns `None`. */ def findConstructor[T](cls: Class[T], paramTypes: Seq[Class[_]]): Option[Seq[AnyRef] => T] = { Option(ConstructorUtils.getMatchingAccessibleConstructor(cls, paramTypes: _*)) match { case Some(c) => Some(x => c.newInstance(x: _*).asInstanceOf[T]) case None => val companion = mirror.staticClass(cls.getName).companion val moduleMirror = mirror.reflectModule(companion.asModule) val applyMethods = companion.asTerm.typeSignature .member(universe.TermName("apply")).asTerm.alternatives applyMethods.find { method => val params = method.typeSignature.paramLists.head // Check that the needed params are the same length and of matching types params.size == paramTypes.tail.size && params.zip(paramTypes.tail).forall { case(ps, pc) => ps.typeSignature.typeSymbol == mirror.classSymbol(pc) } }.map { applyMethodSymbol => val expectedArgsCount = applyMethodSymbol.typeSignature.paramLists.head.size val instanceMirror = mirror.reflect(moduleMirror.instance) val method = instanceMirror.reflectMethod(applyMethodSymbol.asMethod) (_args: Seq[AnyRef]) => { // Drop the "outer" argument if it is provided val args = if (_args.size == expectedArgsCount) _args else _args.tail method.apply(args: _*).asInstanceOf[T] } } } } /** * Whether the fields of the given type is defined entirely by its constructor parameters. */ def definedByConstructorParams(tpe: Type): Boolean = cleanUpReflectionObjects { tpe.dealias match { // `Option` is a `Product`, but we don't wanna treat `Option[Int]` as a struct type. case t if isSubtype(t, localTypeOf[Option[_]]) => definedByConstructorParams(t.typeArgs.head) case _ => isSubtype(tpe.dealias, localTypeOf[Product]) || isSubtype(tpe.dealias, localTypeOf[DefinedByConstructorParams]) } } val typeJavaMapping = Map[DataType, Class[_]]( BooleanType -> classOf[Boolean], ByteType -> classOf[Byte], ShortType -> classOf[Short], IntegerType -> classOf[Int], LongType -> classOf[Long], FloatType -> classOf[Float], DoubleType -> classOf[Double], StringType -> classOf[UTF8String], DateType -> classOf[DateType.InternalType], TimestampType -> classOf[TimestampType.InternalType], BinaryType -> classOf[BinaryType.InternalType], CalendarIntervalType -> classOf[CalendarInterval] ) val typeBoxedJavaMapping = Map[DataType, Class[_]]( BooleanType -> classOf[java.lang.Boolean], ByteType -> classOf[java.lang.Byte], ShortType -> classOf[java.lang.Short], IntegerType -> classOf[java.lang.Integer], LongType -> classOf[java.lang.Long], FloatType -> classOf[java.lang.Float], DoubleType -> classOf[java.lang.Double], DateType -> classOf[java.lang.Integer], TimestampType -> classOf[java.lang.Long] ) def dataTypeJavaClass(dt: DataType): Class[_] = { dt match { case _: DecimalType => classOf[Decimal] case _: StructType => classOf[InternalRow] case _: ArrayType => classOf[ArrayData] case _: MapType => classOf[MapData] case ObjectType(cls) => cls case _ => typeJavaMapping.getOrElse(dt, classOf[java.lang.Object]) } } def javaBoxedType(dt: DataType): Class[_] = dt match { case _: DecimalType => classOf[Decimal] case BinaryType => classOf[Array[Byte]] case StringType => classOf[UTF8String] case CalendarIntervalType => classOf[CalendarInterval] case _: StructType => classOf[InternalRow] case _: ArrayType => classOf[ArrayType] case _: MapType => classOf[MapType] case udt: UserDefinedType[_] => javaBoxedType(udt.sqlType) case ObjectType(cls) => cls case _ => ScalaReflection.typeBoxedJavaMapping.getOrElse(dt, classOf[java.lang.Object]) } def expressionJavaClasses(arguments: Seq[Expression]): Seq[Class[_]] = { if (arguments != Nil) { arguments.map(e => dataTypeJavaClass(e.dataType)) } else { Seq.empty } } def encodeFieldNameToIdentifier(fieldName: String): String = { TermName(fieldName).encodedName.toString } } /** * Support for generating catalyst schemas for scala objects. Note that unlike its companion * object, this trait able to work in both the runtime and the compile time (macro) universe. */ trait ScalaReflection extends Logging { /** The universe we work in (runtime or macro) */ val universe: scala.reflect.api.Universe /** The mirror used to access types in the universe */ def mirror: universe.Mirror import universe._ /** * Any codes calling `scala.reflect.api.Types.TypeApi.<:<` should be wrapped by this method to * clean up the Scala reflection garbage automatically. Otherwise, it will leak some objects to * `scala.reflect.runtime.JavaUniverse.undoLog`. * * @see https://github.com/scala/bug/issues/8302 */ def cleanUpReflectionObjects[T](func: => T): T = { universe.asInstanceOf[scala.reflect.runtime.JavaUniverse].undoLog.undo(func) } /** * Return the Scala Type for `T` in the current classloader mirror. * * Use this method instead of the convenience method `universe.typeOf`, which * assumes that all types can be found in the classloader that loaded scala-reflect classes. * That's not necessarily the case when running using Eclipse launchers or even * Sbt console or test (without `fork := true`). * * @see SPARK-5281 */ def localTypeOf[T: TypeTag]: `Type` = { val tag = implicitly[TypeTag[T]] tag.in(mirror).tpe.dealias } /** * Returns the parameter names and types for the primary constructor of this type. * * Note that it only works for scala classes with primary constructor, and currently doesn't * support inner class. */ def getConstructorParameters(tpe: Type): Seq[(String, Type)] = { val dealiasedTpe = tpe.dealias val formalTypeArgs = dealiasedTpe.typeSymbol.asClass.typeParams val TypeRef(_, _, actualTypeArgs) = dealiasedTpe val params = constructParams(dealiasedTpe) // if there are type variables to fill in, do the substitution (SomeClass[T] -> SomeClass[Int]) if (actualTypeArgs.nonEmpty) { params.map { p => p.name.decodedName.toString -> p.typeSignature.substituteTypes(formalTypeArgs, actualTypeArgs) } } else { params.map { p => p.name.decodedName.toString -> p.typeSignature } } } /** * If our type is a Scala trait it may have a companion object that * only defines a constructor via `apply` method. */ private def getCompanionConstructor(tpe: Type): Symbol = { def throwUnsupportedOperation = { throw new UnsupportedOperationException(s"Unable to find constructor for $tpe. " + s"This could happen if $tpe is an interface, or a trait without companion object " + "constructor.") } tpe.typeSymbol.asClass.companion match { case NoSymbol => throwUnsupportedOperation case sym => sym.asTerm.typeSignature.member(universe.TermName("apply")) match { case NoSymbol => throwUnsupportedOperation case constructorSym => constructorSym } } } protected def constructParams(tpe: Type): Seq[Symbol] = { val constructorSymbol = tpe.member(termNames.CONSTRUCTOR) match { case NoSymbol => getCompanionConstructor(tpe) case sym => sym } val params = if (constructorSymbol.isMethod) { constructorSymbol.asMethod.paramLists } else { // Find the primary constructor, and use its parameter ordering. val primaryConstructorSymbol: Option[Symbol] = constructorSymbol.asTerm.alternatives.find( s => s.isMethod && s.asMethod.isPrimaryConstructor) if (primaryConstructorSymbol.isEmpty) { sys.error("Internal SQL error: Product object did not have a primary constructor.") } else { primaryConstructorSymbol.get.asMethod.paramLists } } params.flatten } }
witgo/spark
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
Scala
apache-2.0
44,330
package reactivemongo.api.commands.bson import reactivemongo.api.BSONSerializationPack import reactivemongo.api.commands._ object BSONCountCommand extends CountCommand[BSONSerializationPack.type] { val pack = BSONSerializationPack } object BSONCountCommandImplicits { import reactivemongo.bson.{ BSONDocument, BSONDocumentWriter, BSONNumberLike, BSONString, BSONValue, BSONWriter } import BSONCountCommand._ implicit object HintWriter extends BSONWriter[Hint, BSONValue] { def write(hint: Hint): BSONValue = hint match { case HintString(s) => BSONString(s) case HintDocument(doc) => doc } } implicit object CountWriter extends BSONDocumentWriter[ResolvedCollectionCommand[Count]] { def write(count: ResolvedCollectionCommand[Count]): BSONDocument = BSONDocument( "count" -> count.collection, "query" -> count.command.query, "limit" -> count.command.limit, "skip" -> count.command.skip, "hint" -> count.command.hint ) } implicit object CountResultReader extends DealingWithGenericCommandErrorsReader[CountResult] { def readResult(doc: BSONDocument): CountResult = CountResult(doc.getAs[BSONNumberLike]("n").map(_.toInt).getOrElse(0)) } }
maxime-gautre/ReactiveMongo
driver/src/main/scala/api/commands/bson/count.scala
Scala
apache-2.0
1,292
package models.daos import java.util.UUID import javax.inject.Inject import models._ import models.daos.tables.{DBPerson, GroupTable, OrganisationTable, PersonTable} import play.api.db.slick.{DatabaseConfigProvider, HasDatabaseConfigProvider} import slick.jdbc.JdbcProfile import slick.jdbc.PostgresProfile.api._ import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import scala.language.implicitConversions class PersonDAO @Inject()(protected val dbConfigProvider: DatabaseConfigProvider) extends HasDatabaseConfigProvider[JdbcProfile] { private val persons = TableQuery[PersonTable] private val groups = TableQuery[GroupTable] private val organisations = TableQuery[OrganisationTable] def all: Future[Seq[Person]] = { val query = for { p <- persons g <- groups if g.id === p.group_id o <- organisations if o.id === g.organisation_id } yield (p, g, o) db.run(query.result).map(rows => rows.map { case (p, g, o) => val group = Group(g.id, g.name, o) Person(p.id, p.name, p.email, p.age, group) }) } def get(id: UUID): Future[Option[Person]] = { val query = for { p <- persons if p.id === id g <- groups if g.id === p.group_id o <- organisations if o.id === g.organisation_id } yield (p, g, o) db.run(query.result.headOption).map(rows => rows.map { case (p, g, o) => val group = Group(g.id, g.name, o) Person(p.id, p.name, p.email, p.age, group) }) } def save(person: Person): Future[Person] = db.run(persons.insertOrUpdate(person)).map(_ => person) def delete(id: UUID): Future[Int] = { val query = persons.filter(_.id === id).delete db.run(query) } implicit private def toDBPerson(person: Person): DBPerson = DBPerson(person.id, person.name, person.email, person.age, person.group.id) }
wjglerum/bamboesmanager
app/models/daos/PersonDAO.scala
Scala
mit
1,879
package io.skysail.ext.oauth2 import io.skysail.ext.oauth2.config.OAuth2ConfigDescriptor class OAuth2ServerParameters(val config: OAuth2ConfigDescriptor) { def apiBaseUrl() = config.apiBaseUrl() def authUri() = config.authUri() def tokenUri() = config.tokenUri() }
evandor/skysail
skysail.ext.oauth2/src/io/skysail/ext/oauth2/OAuth2ServerParameters.scala
Scala
apache-2.0
273
package com.thetestpeople.trt.model.impl import java.net.URI import org.joda.time.DateTime import org.joda.time.Interval import com.thetestpeople.trt.model._ import com.thetestpeople.trt.model.jenkins._ import com.github.nscala_time.time.Imports._ import java.util.concurrent.locks.ReentrantLock import java.util.concurrent.locks.Lock import org.apache.oro.text.GlobCompiler import java.util.regex.Pattern class MockDao extends Dao { private val lock: Lock = new ReentrantLock def transaction[T](p: ⇒ T): T = { lock.lock() try p finally lock.unlock() } private var executions: Seq[Execution] = Seq() private var tests: Seq[Test] = Seq() private var testComments: Seq[TestComment] = Seq() private var testCategories: Seq[TestCategory] = Seq() private var ignoredTestConfigurations: Seq[IgnoredTestConfiguration] = Seq() private var batches: Seq[Batch] = Seq() private var analyses: Seq[Analysis] = Seq() private var executionLogs: Seq[ExecutionLogRow] = Seq() private var executionComments: Seq[ExecutionComment] = Seq() private var batchLogs: Seq[BatchLogRow] = Seq() private var batchComments: Seq[BatchComment] = Seq() private var ciJobs: Seq[CiJob] = Seq() private var ciBuilds: Seq[CiBuild] = Seq() private var importSpecs: Seq[CiImportSpec] = Seq() private var systemConfiguration = SystemConfiguration() private var jenkinsConfiguration = JenkinsConfiguration() private var jenkinsConfigParams: Seq[JenkinsJobParam] = Seq() private var teamCityConfiguration = TeamCityConfiguration() def getEnrichedExecution(id: Id[Execution]): Option[EnrichedExecution] = for { execution ← executions.find(_.id == id) test ← tests.find(_.id == execution.testId) batch ← batches.find(_.id == execution.batchId) logOpt = executionLogs.find(_.executionId == id).map(_.log) commentOpt = executionComments.find(_.executionId == id).map(_.text) } yield EnrichedExecution(execution, test.qualifiedName, batch.nameOpt, logOpt, commentOpt) def getEnrichedTest(id: Id[Test], configuration: Configuration): Option[EnrichedTest] = for { test ← tests.find(_.id == id) analysis ← analyses.find(a ⇒ a.testId == test.id && a.configuration == configuration) commentOpt = testComments.find(_.testId == id).map(_.text) } yield EnrichedTest(test, Some(analysis), commentOpt) def getTestIds(): Seq[Id[Test]] = tests.map(_.id) /** * @return all tests marked as deleted */ def getDeletedTests(): Seq[Test] = for { test ← tests if test.deleted } yield test def getAnalysedTests( configuration: Configuration, testStatusOpt: Option[TestStatus] = None, nameOpt: Option[String] = None, groupOpt: Option[String] = None, categoryOpt: Option[String] = None, blackListOpt: Option[Seq[Id[Test]]] = None, whiteListOpt: Option[Seq[Id[Test]]] = None, startingFrom: Int = 0, limitOpt: Option[Int], sortBy: SortBy.Test = SortBy.Test.Group()): Seq[EnrichedTest] = { val allResults = for { test ← tests if groupOpt.forall(pattern ⇒ test.groupOpt.exists(group ⇒ matchesPattern(pattern, group))) if nameOpt.forall(pattern ⇒ matchesPattern(pattern, test.name)) analysis ← analyses.find(a ⇒ a.testId == test.id && a.configuration == configuration) if testStatusOpt.forall(status ⇒ analysis.status == status) if blackListOpt.forall(blackList ⇒ !(blackList contains test.id)) if whiteListOpt.forall(blackList ⇒ blackList contains test.id) if categoryOpt.forall(category ⇒ testCategories.exists(tc ⇒ tc.testId == test.id && tc.category == category)) commentOpt = testComments.find(_.testId == test.id).map(_.text) } yield EnrichedTest(test, Some(analysis), commentOpt) def order(x: Seq[EnrichedTest], descending: Boolean) = if (descending) x.reverse else x val sortedResults = sortBy match { case SortBy.Test.Weather(descending) ⇒ order(allResults.sortBy(_.analysisOpt.map(_.weather)), descending) case SortBy.Test.Group(descending) ⇒ order(allResults.sortBy(_.test.name).sortBy(_.test.groupOpt), descending) case SortBy.Test.Name(descending) ⇒ order(allResults.sortBy(_.name), descending) case SortBy.Test.Duration(descending) ⇒ order(allResults.sortBy(_.analysisOpt.flatMap(_.medianDurationOpt)), descending) case SortBy.Test.ConsecutiveFailures(descending) ⇒ order(allResults.sortBy(_.analysisOpt.map(_.consecutiveFailures)), descending) case SortBy.Test.StartedFailing(descending) ⇒ order(allResults.sortBy(_.analysisOpt.flatMap(_.failingSinceOpt)), descending) case SortBy.Test.LastPassed(descending) ⇒ order(allResults.sortBy(_.analysisOpt.flatMap(_.lastPassedTimeOpt)), descending) case SortBy.Test.LastFailed(descending) ⇒ order(allResults.sortBy(_.analysisOpt.flatMap(_.lastFailedTimeOpt)), descending) } limitOpt match { case Some(limit) ⇒ sortedResults.drop(startingFrom).take(limit) case None ⇒ sortedResults.drop(startingFrom) } } def getTestsById(testIds: Seq[Id[Test]]): Seq[Test] = tests.filter(test ⇒ testIds.contains(test.id)) def getTestCounts( configuration: Configuration, nameOpt: Option[String] = None, groupOpt: Option[String] = None, categoryOpt: Option[String] = None, ignoredTests: Seq[Id[Test]] = Seq()): TestCounts = { val tests = getAnalysedTests(configuration, nameOpt = nameOpt, groupOpt = groupOpt, categoryOpt = categoryOpt) .filterNot(ignoredTests contains _.id) val passed = tests.count(_.analysisOpt.exists(_.status == TestStatus.Healthy)) val warning = tests.count(_.analysisOpt.exists(_.status == TestStatus.Warning)) val failed = tests.count(_.analysisOpt.exists(_.status == TestStatus.Broken)) TestCounts(passed, warning, failed, ignored = ignoredTests.size) } def upsertAnalysis(analysis: Analysis) { analyses = analysis +: analyses.filterNot(_.testId == analysis.testId) } def getBatch(id: Id[Batch]): Option[EnrichedBatch] = batches.find(_.id == id).map { batch ⇒ val logOpt = batchLogs.find(_.batchId == id).map(_.log) val importSpecIdOpt = ciBuilds.find(_.batchId == id).flatMap(_.importSpecIdOpt) val commentOpt = batchComments.find(_.batchId == id).map(_.text) EnrichedBatch(batch, logOpt = logOpt, importSpecIdOpt = importSpecIdOpt, commentOpt = commentOpt) } def getBatches(jobIdOpt: Option[Id[CiJob]] = None, configurationOpt: Option[Configuration] = None, resultOpt: Option[Boolean]): Seq[Batch] = { batches .filter(batch ⇒ jobIdOpt.forall(jobId ⇒ areAssociated(batch, jobId))) .filter(batch ⇒ configurationOpt.forall(configuration ⇒ batch.configurationOpt == Some(configuration))) .filter(batch ⇒ resultOpt.forall(result ⇒ batch.passed == result)) .sortBy(_.executionTime) .reverse } private def areAssociated(batch: Batch, jobId: Id[CiJob]): Boolean = { for { build ← ciBuilds job ← ciJobs if job.id == jobId if build.jobId == job.id if build.batchId == batch.id } return true false } def getEnrichedExecutionsInBatch(batchId: Id[Batch], passedFilterOpt: Option[Boolean]): Seq[EnrichedExecution] = for { batch ← batches if batch.id == batchId execution ← executions if execution.batchId == batch.id test ← tests.find(_.id == execution.testId) if passedFilterOpt.forall(expected ⇒ execution.passed == expected) } yield EnrichedExecution(execution, test.qualifiedName, batch.nameOpt, logOpt = None, commentOpt = None) def getEnrichedExecutions(ids: Seq[Id[Execution]]): Seq[EnrichedExecution] = for { batch ← batches execution ← executions.filter(_.batchId == batch.id) if execution.batchId == batch.id if ids.contains(execution.id) test ← tests.find(_.id == execution.testId) } yield EnrichedExecution(execution, test.qualifiedName, batch.nameOpt, logOpt = None, commentOpt = None) def getExecutionsForTest(id: Id[Test]): Seq[Execution] = executions.filter(_.testId == id).sortBy(_.executionTime).reverse def getEnrichedExecutionsForTest(testId: Id[Test], configurationOpt: Option[Configuration], resultOpt: Option[Boolean] = None): Seq[EnrichedExecution] = { val executionsForTest = for { test ← tests.filter(_.id == testId) execution ← executions.filter(_.testId == testId) batch ← batches.find(_.id == execution.batchId) if configurationOpt.forall(_ == execution.configuration) if resultOpt.forall(_ == execution.passed) } yield EnrichedExecution(execution, test.qualifiedName, batch.nameOpt, logOpt = None, commentOpt = None) executionsForTest.sortBy(_.execution.executionTime).reverse } private def isDeleted(testId: Id[Test]) = tests.find(_.id == testId).exists(_.deleted) def iterateAllExecutions[T](f: Iterator[ExecutionLite] ⇒ T): T = f(executions.filterNot(e ⇒ isDeleted(e.testId)).sortBy(e ⇒ (e.configuration, e.testId, e.executionTime)).map(executionLite).iterator) private def executionLite(execution: Execution) = ExecutionLite( testId = execution.testId, executionTime = execution.executionTime, passed = execution.passed, configuration = execution.configuration) def getExecutionIntervalsByConfig(): Map[Configuration, Interval] = executions.groupBy(_.configuration).map { case (configuration, configExecutions) ⇒ val executionTimes = configExecutions.map(_.executionTime) configuration -> new Interval(executionTimes.min, executionTimes.max) } def getEnrichedExecutions(configurationOpt: Option[Configuration], resultOpt: Option[Boolean] = None, startingFrom: Int, limit: Int): Seq[EnrichedExecution] = { val all = for { batch ← batches execution ← executions.filter(_.batchId == batch.id) if configurationOpt.forall(c ⇒ c == execution.configuration) if resultOpt.forall(c ⇒ c == execution.passed) test ← tests.find(_.id == execution.testId) } yield EnrichedExecution(execution, test.qualifiedName, batch.nameOpt, logOpt = None, commentOpt = None) all.sortBy(_.qualifiedName.name).sortBy(_.qualifiedName.groupOpt).reverse.sortBy(_.executionTime).reverse.drop(startingFrom).take(limit) } def countExecutions(configurationOpt: Option[Configuration], resultOpt: Option[Boolean] = None): Int = executions.count(e ⇒ configurationOpt.forall(_ == e.configuration) && resultOpt.forall(_ == e.passed)) private def nextId[T <: EntityType](ids: Seq[Id[T]]): Id[T] = { val allIds = ids.map(_.value) Id(if (allIds.isEmpty) 1 else allIds.max + 1) } def newBatch(batch: Batch, logOpt: Option[String]): Id[Batch] = { val newId = nextId(batches.map(_.id)) batches +:= batch.copy(id = newId) for (log ← logOpt) batchLogs +:= BatchLogRow(newId, log) newId } def deleteBatches(batchIds: Seq[Id[Batch]]) = { val (executionIds, testIds) = executions.filter(batchIds contains _.batchId).map(e ⇒ (e.id, e.testId)).toList.unzip ciBuilds = ciBuilds.filterNot(batchIds contains _.batchId) analyses = analyses.filterNot(testIds contains _.testId) executionLogs = executionLogs.filterNot(executionIds contains _.executionId) executions = executions.filterNot(executionIds contains _.id) executionComments = executionComments.filterNot(executionIds contains _.executionId) batchLogs = batchLogs.filterNot(batchIds contains _.batchId) batches = batches.filterNot(batchIds contains _.id) batchComments = batchComments.filterNot(batchIds contains _.batchId) val (deleteTestIds, affectedTestIds) = testIds.partition(getExecutionsForTest(_).isEmpty) tests = tests.filterNot(deleteTestIds contains _.id) testComments = testComments.filterNot(testIds contains _.testId) testCategories = testCategories.filterNot(testIds contains _.testId) ignoredTestConfigurations = ignoredTestConfigurations.filterNot(testIds contains _.testId) DeleteBatchResult(affectedTestIds, executionIds) } private def newTest(test: Test): Id[Test] = { val newId = nextId(tests.map(_.id)) tests +:= test.copy(id = newId) newId } def ensureTestIsRecorded(test: Test): Id[Test] = { tests.find(_.qualifiedName == test.qualifiedName) match { case Some(test) ⇒ test.id case None ⇒ newTest(test) } } def markTestsAsDeleted(ids: Seq[Id[Test]], deleted: Boolean = true) { tests = tests.filterNot(ids contains _.id) ++ tests.filter(ids contains _.id).map(_.copy(deleted = deleted)) } def newExecution(execution: Execution, logOpt: Option[String]): Id[Execution] = { val newId = nextId(executions.map(_.id)) executions +:= execution.copy(id = newId) for (log ← logOpt) executionLogs +:= ExecutionLogRow(newId, log) newId } def getExecutionLog(id: Id[Execution]) = executionLogs.find(_.executionId == id).map(_.log) def newCiBuild(ciBuild: CiBuild) { ciBuilds +:= ciBuild } def getCiBuild(buildUrl: URI): Option[CiBuild] = ciBuilds.find(_.buildUrl == buildUrl) def getCiBuildUrls(): Seq[URI] = ciBuilds.map(_.buildUrl) def getCiJobs(): Seq[CiJob] = ciJobs def getCiBuilds(specId: Id[CiImportSpec]): Seq[CiBuild] = for { build ← ciBuilds if build.importSpecIdOpt == Some(specId) } yield build def newCiImportSpec(spec: CiImportSpec): Id[CiImportSpec] = { val newId = nextId(importSpecs.map(_.id)) importSpecs +:= spec.copy(id = newId) newId } def getCiImportSpecs: Seq[CiImportSpec] = importSpecs def deleteCiImportSpec(id: Id[CiImportSpec]): Boolean = { val found = importSpecs.exists(_.id == id) importSpecs = importSpecs.filterNot(_.id == id) found } def getCiImportSpec(id: Id[CiImportSpec]): Option[CiImportSpec] = importSpecs.find(_.id == id) def updateCiImportSpec(updatedSpec: CiImportSpec): Boolean = importSpecs.find(_.id == updatedSpec.id) match { case Some(spec) ⇒ importSpecs = updatedSpec +: importSpecs.filterNot(_.id == updatedSpec.id) true case None ⇒ false } def updateCiImportSpec(id: Id[CiImportSpec], lastCheckedOpt: Option[DateTime]): Boolean = importSpecs.find(_.id == id) match { case Some(spec) ⇒ val updatedSpec = spec.copy(lastCheckedOpt = lastCheckedOpt) importSpecs = updatedSpec +: importSpecs.filterNot(_.id == id) true case None ⇒ false } def getSystemConfiguration(): SystemConfiguration = systemConfiguration def updateSystemConfiguration(newConfig: SystemConfiguration) { systemConfiguration = newConfig } def getJenkinsConfiguration(): FullJenkinsConfiguration = FullJenkinsConfiguration(jenkinsConfiguration, jenkinsConfigParams.toList) def updateJenkinsConfiguration(config: FullJenkinsConfiguration) { jenkinsConfiguration = config.config jenkinsConfigParams = config.params } def ensureCiJob(job: CiJob): Id[CiJob] = { ciJobs.find(_.url == job.url) match { case Some(jobAgain) ⇒ jobAgain.id case None ⇒ val newId = nextId(ciJobs.map(_.id)) ciJobs +:= job.copy(id = newId) newId } } def getConfigurations(): Seq[Configuration] = executions.map(_.configuration).distinct.sorted def getConfigurations(testId: Id[Test]): Seq[Configuration] = executions.filter(_.testId == testId).map(_.configuration).distinct.sorted private def matchesPattern(pattern: String, text: String) = globToRegex(pattern).matcher(text).matches() private def globToRegex(pattern: String): Pattern = Pattern.compile(GlobCompiler.globToPerl5(pattern.toCharArray, GlobCompiler.CASE_INSENSITIVE_MASK), Pattern.CASE_INSENSITIVE) def getTestNames(pattern: String): Seq[String] = { val matches = globMatcher(pattern) for (test ← tests if matches(test.name)) yield test.name } def getGroups(pattern: String): Seq[String] = { val matches = globMatcher(pattern) for (test ← tests; group ← test.groupOpt if matches(group)) yield group } def getCategoryNames(pattern: String): Seq[String] = { val matches = globMatcher(pattern) for (category ← testCategories if matches(category.category)) yield category.category } private def globMatcher(pattern: String): String ⇒ Boolean = { val regexPattern = globToRegex(pattern) def matches(s: String) = regexPattern.matcher(s).matches() matches } def setExecutionComment(id: Id[Execution], text: String) = executionComments = ExecutionComment(id, text) +: executionComments.filterNot(_.executionId == id) def deleteExecutionComment(id: Id[Execution]) = executionComments = executionComments.filterNot(_.executionId == id) def setBatchComment(id: Id[Batch], text: String) = batchComments = BatchComment(id, text) +: batchComments.filterNot(_.batchId == id) def updateBatch(batch: Batch) { batches = batches.filterNot(_.id == batch.id) :+ batch } def deleteBatchComment(id: Id[Batch]) = batchComments = batchComments.filterNot(_.batchId == id) def setTestComment(id: Id[Test], text: String) = testComments = TestComment(id, text) +: testComments.filterNot(_.testId == id) def deleteTestComment(id: Id[Test]) = testComments = testComments.filterNot(_.testId == id) def getTeamCityConfiguration(): TeamCityConfiguration = teamCityConfiguration def updateTeamCityConfiguration(config: TeamCityConfiguration) = teamCityConfiguration = config def setBatchDuration(id: Id[Batch], durationOpt: Option[Duration]): Boolean = { val updatedBatches = batches.filter(_.id == id).map(_.copy(durationOpt = durationOpt)) batches = batches.filterNot(_.id == id) ++ updatedBatches updatedBatches.nonEmpty } def getCategories(testIds: Seq[Id[Test]]): Map[Id[Test], Seq[TestCategory]] = testCategories.filter(t ⇒ testIds contains t.testId).groupBy(_.testId) def addCategories(categories: Seq[TestCategory]) { testCategories ++:= categories } def removeCategories(testId: Id[Test], categories: Seq[String]) { testCategories = testCategories.filterNot(tc ⇒ tc.testId == testId && categories.contains(tc.category)) } def addIgnoredTestConfigurations(ignoredConfigs: Seq[IgnoredTestConfiguration]) { ignoredTestConfigurations ++:= ignoredConfigs } def removeIgnoredTestConfigurations(testIds: Seq[Id[Test]], configuration: Configuration) { ignoredTestConfigurations = ignoredTestConfigurations.filterNot(c ⇒ testIds.contains(c.testId) && c.configuration == configuration) } def getIgnoredConfigurations(testIds: Seq[Id[Test]]): Map[Id[Test], Seq[Configuration]] = { for { test ← tests if testIds contains test.id ignoredConfigs = ignoredTestConfigurations.filter(_.testId == test.id) } yield test.id -> ignoredConfigs.map(_.configuration) }.toMap def getIgnoredTests(configuration: Configuration, nameOpt: Option[String] = None, groupOpt: Option[String] = None, categoryOpt: Option[String] = None): Seq[Id[Test]] = for { ignoredTestConfig ← ignoredTestConfigurations if ignoredTestConfig.configuration == configuration test ← tests if test.id == ignoredTestConfig.testId if !test.deleted if groupOpt.forall(pattern ⇒ test.groupOpt.exists(group ⇒ matchesPattern(pattern, group))) if nameOpt.forall(pattern ⇒ matchesPattern(pattern, test.name)) if categoryOpt.forall(category ⇒ testCategories.exists(tc ⇒ tc.testId == test.id && tc.category == category)) } yield test.id def isTestIgnoredInConfiguration(testId: Id[Test], configuration: Configuration): Boolean = getIgnoredTests(configuration) contains testId def deleteAll() = { executions = Seq() tests = Seq() testComments = Seq() testCategories = Seq() batches = Seq() analyses = Seq() executionLogs = Seq() executionComments = Seq() batchLogs = Seq() batchComments = Seq() ciJobs = Seq() ciBuilds = Seq() importSpecs = Seq() systemConfiguration = SystemConfiguration() jenkinsConfiguration = JenkinsConfiguration() jenkinsConfigParams = Seq() teamCityConfiguration = TeamCityConfiguration() ignoredTestConfigurations = Seq() } }
thetestpeople/trt
test/com/thetestpeople/trt/model/impl/MockDao.scala
Scala
mit
20,629
/** * Copyright 2009 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS-IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.etherpad.openofficeservice; import net.appjet.common.sars.{SarsServer,SarsMessageHandler}; import java.io.{DataInputStream,DataOutputStream}; import java.io.{File,FileOutputStream,ByteArrayInputStream,ByteArrayOutputStream}; /* Libraries needed for OO.org Conversion */ import com.sun.star.bridge.{XBridge,XBridgeFactory}; import com.sun.star.beans.{PropertyValue,XPropertySet}; import com.sun.star.connection.{NoConnectException,XConnection,XConnector}; import com.sun.star.container.XNamed; import com.sun.star.document.{XExporter,XFilter}; import com.sun.star.frame.{XComponentLoader,XStorable}; import com.sun.star.lang.{XComponent,XMultiComponentFactory}; import com.sun.star.uno.{UnoRuntime,XComponentContext}; class OOSException(m: String) extends RuntimeException(m); class UnsupportedFormatException(format: String) extends OOSException("Unsupported format: "+format); object TemporaryFailure extends OOSException("Temporary failure"); object OpenOfficeServerUtility { def checkServerAvailability(host: String, port: Int): Boolean = { return true; // Rather than checking, lets assume that server is running till I finish coding. } def runOpenOfficeServer(path: String, host: String, port: Int, timeout: Int, wait: Boolean) { // Rather than running it from here, lets assume that the admin will run it. } } class OpenOfficeFileConverter { var host: String = "localhost"; var port: Int = 8100; def setOpenOfficeServerDetails(host: String, port: Int) { this.host = host; this.port = port; } def convertFile(src: File, dst: File, converter: String, extension: String): Boolean = { try { val fromFile: String = "file:///" + src.getAbsolutePath(); val toFile: String = "file:///" + dst.getAbsolutePath(); val cnx: String = "socket,host="+this.host+",port="+this.port+""; val xRemoteContext: XComponentContext = com.sun.star.comp.helper.Bootstrap.createInitialComponentContext(null); val x: Object = xRemoteContext.getServiceManager().createInstanceWithContext("com.sun.star.connection.Connector", xRemoteContext); val xConnector: XConnector = UnoRuntime.queryInterface(classOf[XConnector], x).asInstanceOf[XConnector]; val connection: XConnection = xConnector.connect(cnx); if(connection == null) { throw new OOSException("Connection failure"); } val x2: Object = xRemoteContext.getServiceManager().createInstanceWithContext("com.sun.star.bridge.BridgeFactory", xRemoteContext); val xBridgeFactory: XBridgeFactory = UnoRuntime.queryInterface(classOf[XBridgeFactory], x2).asInstanceOf[XBridgeFactory]; val xBridge: XBridge = xBridgeFactory.createBridge("", "urp", connection, null); val x3: Object = xBridge.getInstance("StarOffice.ServiceManager"); if (x3 == null) { throw new OOSException("Failed to get bridge"); } val xMultiComponentFactory: XMultiComponentFactory = UnoRuntime.queryInterface(classOf[XMultiComponentFactory], x3).asInstanceOf[XMultiComponentFactory]; val xProperySet: XPropertySet = UnoRuntime.queryInterface(classOf[XPropertySet], xMultiComponentFactory).asInstanceOf[XPropertySet]; val oDefaultContext: Object = xProperySet.getPropertyValue("DefaultContext"); val xComponentContext: XComponentContext = UnoRuntime.queryInterface(classOf[XComponentContext], oDefaultContext).asInstanceOf[XComponentContext]; val desktopObj: Object = xMultiComponentFactory.createInstanceWithContext("com.sun.star.frame.Desktop", xComponentContext); val xcomponentloader: XComponentLoader = UnoRuntime.queryInterface(classOf[XComponentLoader], desktopObj).asInstanceOf[XComponentLoader]; if(xcomponentloader == null) { throw new OOSException("XComponent Loader could not be loaded"); } val loadProps: Array[PropertyValue] = new Array[PropertyValue](2); loadProps(0) = new PropertyValue(); loadProps(0).Name = "Hidden"; loadProps(0).Value = boolean2Boolean(false); loadProps(1) = new PropertyValue(); loadProps(1).Name = "UpdateDocMode"; loadProps(1).Value = "1"; val component: XComponent = xcomponentloader.loadComponentFromURL(fromFile,"_blank", 0, loadProps); if (component == null) { throw new OOSException("Failed to load document"); } val convProps: Array[PropertyValue] = new Array[PropertyValue](2); convProps(0) = new PropertyValue(); convProps(0).Name = "FilterName"; convProps(0).Value = converter; val xstorable: XStorable = UnoRuntime.queryInterface(classOf[XStorable],component).asInstanceOf[XStorable]; if (xstorable == null) { throw new OOSException("Storable could not be loaded"); } xstorable.storeToURL(toFile, convProps); component.dispose(); return true; } catch { case e => { e.printStackTrace(); throw new OOSException("Unknown exception occurred: "+e.getMessage()); } } return false; } } object OpenOfficeService { val formats = Map( "pdf" -> "writer_pdf_Export", "doc" -> "MS Word 97", "html" -> "HTML (StarWriter)", "odt" -> "writer8", "txt" -> "Text" ); def createTempFile(bytes: Array[byte], suffix: String) = { var f = File.createTempFile("ooconvert-", if (suffix == null) { null } else if (suffix == "") { "" } else { "."+suffix }); if (bytes != null) { val fos = new FileOutputStream(f); fos.write(bytes); } f; } var soffice = "soffice"; def setExecutable(exec: String) { soffice = exec; } var openOfficeServerHost: String = "localhost"; var openOfficeServerPort: Int = 8100; def setOpenOfficeServer(host: String, port: Int) { openOfficeServerHost = host; openOfficeServerPort = port; } def convertFile(from: String, to: String, bytes: Array[byte]): Array[byte] = { if (from == to) { return bytes; } val tempFile = createTempFile(bytes, from); val outFile = createTempFile(null, to); if (! OpenOfficeServerUtility.checkServerAvailability(openOfficeServerHost, openOfficeServerPort)) { try { OpenOfficeServerUtility.runOpenOfficeServer(soffice, openOfficeServerHost, openOfficeServerPort, 20000, true); } catch { case e: java.io.IOException => { e.printStackTrace(); throw TemporaryFailure; } } } var converter = new OpenOfficeFileConverter(); converter.setOpenOfficeServerDetails(openOfficeServerHost, openOfficeServerPort); var status = false; try { status = converter.convertFile(tempFile, outFile, formats(to), to); } catch { case e => { e.printStackTrace(); throw new OOSException("Unknown exception occurred: "+e.getMessage()); } } if (status == false) { throw new UnsupportedFormatException(from); } net.appjet.common.util.BetterFile.getFileBytes(outFile); } def main(args: Array[String]) { if (args.length > 0) { soffice = args(0); if (soffice.length == 0) { exit(1); } } // Query format: // from: String, to: String, count: Int, bytes: Array[byte] // Response format: // status: Int, <data> // status 0 (success) - <data>: count: Int, bytes: Array[byte] // status 1 (temporary failure) - <data>: <none> // status 2 (permanent failure) - <data>: type: Int // type - 0: unknown failure. // - 1: unsupported format val handler = new SarsMessageHandler { override def handle(b: Array[byte]): Option[Array[byte]] = { val is = new DataInputStream(new ByteArrayInputStream(b)); val from = is.readUTF; val to = is.readUTF; val len = is.readInt; val bytes = new Array[byte](len); is.readFully(bytes); var status = 0; var permfailuretype = 0; println("Converting "+from+" -> "+to+" ("+len+" bytes)"); val output = try { convertFile(from, to, bytes); } catch { case TemporaryFailure => { status = 1; null; } case e: UnsupportedFormatException => { status = 2; permfailuretype = 1; null; } case e => { status = 2; permfailuretype = 0; e.printStackTrace(); null; } } val retBytes = new ByteArrayOutputStream(); val ret = new DataOutputStream(retBytes); if (status != 0) { ret.writeInt(status); // error status match { case 2 => { ret.writeInt(permfailuretype); } case _ => { } } } else { ret.writeInt(0); // success ret.writeInt(output.length); ret.write(output, 0, output.length); } Some(retBytes.toByteArray()); } } val server = new SarsServer("ooffice-password", handler, None, 8101); server.start(); println("Server running..."); server.join(); println("Server quitting..."); } }
titanpad/titanpad
infrastructure/com.etherpad.openofficeservice/importexport.scala
Scala
apache-2.0
9,831
package loader.core import callbacks._ object Core { /** A minimalist root class for Status. */ class Status(val name:String) extends definition.Status /** Defines a base structure which simple processors can use, and which can serve as a base for more * complex processors. * @param Ret the intermediate returned type used internaly to convey information from child to parent. */ trait Def extends definition.Def { type Status >: Null <: Core.Status } trait Impl extends definition.Impl with Def { type Element = Elt type Status = Core.Status /** Motor simplifies the development of processors by focusing on what must be done. * It removes the burden of defining basic classes and builders. */ trait Motor extends super.Motor { //implementations : top builders def apply(cbks:Cbks*):Top[Kind] = builder(new Status(""), cbks:_*) def apply():Top[Kind] = builder(new Status("")) //implementation of a full Element class using the motor defined methods class ElementBase(protected var parser0:Parser, val name: String, val parent: Element, val childBuilder: Bld) extends Element with Processor class ElementCbks(parser:Parser, name: String, parent: Element, childBuilder: Bld, val cbks: Cbks*) extends ElementBase(parser,name,parent,childBuilder) with WithCallbacks class ElementCbk (parser:Parser, name: String, parent: Element, childBuilder: Bld, val cb: Cbk, cbks: Cbks*) extends ElementCbks(parser,name,parent,childBuilder,cbks:_*) with WithCallback { override def onChild(child:Element,r:Ret):Unit = super[WithCallback].onChild(child,r) } val builder:Bld = new Bld { def apply(parser:Parser, parent: Element, c: Status, childBuilder: Bld) = new ElementBase(parser,c.name,parent,childBuilder) def apply(parser:Parser, parent: Element, c: Status, childBuilder: Bld, cbks: Cbks*) = new ElementCbks(parser,c.name,parent,childBuilder,cbks:_*) def apply(parser:Parser, parent: Element, c: Status, childBuilder: Bld, cb:Cbk, cbks: Cbks*) = new ElementCbk(parser,c.name,parent,childBuilder,cb,cbks:_*) } } } }
Y-P-/data-processing-binding
XX3/obsolete/core/Core.scala
Scala
gpl-3.0
2,264
package org.jetbrains.jps.incremental.scala import _root_.java.io.File import _root_.java.util import com.intellij.openapi.util.io.FileUtil import com.intellij.util.Processor import org.jetbrains.jps.ModuleChunk import org.jetbrains.jps.builders.java.{JavaBuilderUtil, JavaSourceRootDescriptor} import org.jetbrains.jps.builders.{DirtyFilesHolder, FileProcessor} import org.jetbrains.jps.incremental.ModuleLevelBuilder.ExitCode import org.jetbrains.jps.incremental.fs.CompilationRound import org.jetbrains.jps.incremental.messages.{BuildMessage, CompilerMessage, ProgressMessage} import org.jetbrains.jps.incremental.scala.ScalaBuilder._ import org.jetbrains.jps.incremental.scala.data.CompilerData import org.jetbrains.jps.incremental.scala.local.{IdeClientIdea, PackageObjectsData, ScalaReflectMacroExpansionParser} import org.jetbrains.jps.incremental.scala.model.{CompileOrder, IncrementalityType} import _root_.scala.collection.JavaConverters._ import scala.collection.mutable import scala.collection.mutable.ListBuffer import org.jetbrains.jps.incremental._ /** * Nikolay.Tropin * 11/19/13 */ class IdeaIncrementalBuilder(category: BuilderCategory) extends ModuleLevelBuilder(category) { override def getPresentableName: String = "Scala IDEA builder" override def build(context: CompileContext, chunk: ModuleChunk, dirtyFilesHolder: DirtyFilesHolder[JavaSourceRootDescriptor, ModuleBuildTarget], outputConsumer: ModuleLevelBuilder.OutputConsumer): ModuleLevelBuilder.ExitCode = { if (isDisabled(context, chunk) || ChunkExclusionService.isExcluded(chunk)) return ExitCode.NOTHING_DONE checkIncrementalTypeChange(context) context.processMessage(new ProgressMessage("Searching for compilable files...")) val sourceDependencies = SourceDependenciesProviderService.getSourceDependenciesFor(chunk) if (sourceDependencies.nonEmpty) { val message = "IDEA incremental compiler cannot handle shared source modules: " + sourceDependencies.map(_.getName).mkString(", ") + ".\\nPlease enable SBT incremental compiler for the project." context.processMessage(new CompilerMessage("scala", BuildMessage.Kind.ERROR, message)) return ExitCode.ABORT } val sources = collectSources(context, chunk, dirtyFilesHolder) if (sources.isEmpty) return ExitCode.NOTHING_DONE if (hasBuildModules(chunk)) return ExitCode.NOTHING_DONE // *.scala files in SBT "build" modules are rightly excluded from compilation if (!hasScalaModules(chunk)) { val message = "skipping Scala files without a Scala SDK in module(s) " + chunk.getPresentableShortName context.processMessage(new CompilerMessage("scala", BuildMessage.Kind.WARNING, message)) return ExitCode.NOTHING_DONE } val packageObjectsData = PackageObjectsData.getFor(context) if (JavaBuilderUtil.isForcedRecompilationAllJavaModules(context)) { //rebuild packageObjectsData.clear() } else { val additionalFiles = packageObjectsData.invalidatedPackageObjects(sources).filter(_.exists) if (additionalFiles.nonEmpty) { (sources ++ additionalFiles).foreach(f => FSOperations.markDirty(context, CompilationRound.NEXT, f)) return ExitCode.ADDITIONAL_PASS_REQUIRED } } val delta = context.getProjectDescriptor.dataManager.getMappings.createDelta() val callback = delta.getCallback val modules = chunk.getModules.asScala.toSet val successfullyCompiled = mutable.Set[File]() val compilerName = if (modules.exists(CompilerData.isDottyModule)) "dotc" else "scalac" val client = new IdeClientIdea(compilerName, context, modules.map(_.getName).toSeq, outputConsumer, callback, successfullyCompiled, packageObjectsData) val scalaSources = sources.filter(_.getName.endsWith(".scala")).asJava compile(context, chunk, sources, modules, client) match { case Left(error) => client.error(error) ExitCode.ABORT case _ if client.hasReportedErrors || client.isCanceled => ExitCode.ABORT case Right(code) => if (delta != null && JavaBuilderUtil.updateMappings(context, delta, dirtyFilesHolder, chunk, scalaSources, successfullyCompiled.asJava)) ExitCode.ADDITIONAL_PASS_REQUIRED else { if (ScalaReflectMacroExpansionParser.expansions.nonEmpty) ScalaReflectMacroExpansionParser.serializeExpansions(context) client.progress("Compilation completed", Some(1.0F)) code } } } override def getCompilableFileExtensions: util.List[String] = util.Arrays.asList("scala", "java") private def isDisabled(context: CompileContext, chunk: ModuleChunk): Boolean = { val settings = projectSettings(context) def wrongIncrType = settings.getIncrementalityType != IncrementalityType.IDEA def wrongCompileOrder = settings.getCompilerSettings(chunk).getCompileOrder match { case CompileOrder.JavaThenScala => getCategory == BuilderCategory.SOURCE_PROCESSOR case (CompileOrder.ScalaThenJava | CompileOrder.Mixed) => getCategory == BuilderCategory.OVERWRITING_TRANSLATOR case _ => false } wrongIncrType || wrongCompileOrder } private def collectSources(context: CompileContext, chunk: ModuleChunk, dirtyFilesHolder: DirtyFilesHolder[JavaSourceRootDescriptor, ModuleBuildTarget]): Seq[File] = { val result = ListBuffer[File]() val project = context.getProjectDescriptor val compileOrder = projectSettings(context).getCompilerSettings(chunk).getCompileOrder val extensionsToCollect = compileOrder match { case CompileOrder.Mixed => List(".scala", ".java") case _ => List(".scala") } def checkAndCollectFile(file: File): Boolean = { val fileName = file.getName if (extensionsToCollect.exists(fileName.endsWith)) result += file true } dirtyFilesHolder.processDirtyFiles(new FileProcessor[JavaSourceRootDescriptor, ModuleBuildTarget] { def apply(target: ModuleBuildTarget, file: File, root: JavaSourceRootDescriptor) = checkAndCollectFile(file) }) for { target <- chunk.getTargets.asScala tempRoot <- project.getBuildRootIndex.getTempTargetRoots(target, context).asScala } { FileUtil.processFilesRecursively(tempRoot.getRootFile, new Processor[File] { def process(file: File) = checkAndCollectFile(file) }) } //if no scala files to compile, return empty seq if (!result.exists(_.getName.endsWith(".scala"))) Seq.empty else result.toSeq } }
whorbowicz/intellij-scala
jps-plugin/src/org/jetbrains/jps/incremental/scala/IdeaIncrementalBuilder.scala
Scala
apache-2.0
6,663
package no.nr.edvard.osiris.testutil import scala.collection.JavaConverters._ import org.objectweb.asm.ClassReader import org.objectweb.asm.tree.{MethodNode, ClassNode} import java.io._ import no.nr.edvard.osiris.util.{BinaryFinder, ByteReader} import no.nr.edvard.osiris.model.{JavaMethod, Application, JavaType} class TestCompiler { private var TEMP_PATH: String = null def compileAndExtractMethods(classBody: String) = { val byteCode = sourceToByteCode(classBody) val classNode = new ClassNode new ClassReader(byteCode).accept(classNode, ClassReader.SKIP_DEBUG) classNode.methods.asScala.toList.map(_.asInstanceOf[MethodNode]) } def createTempDir() { TEMP_PATH = "%s/osirisTemp%d/".format(System.getProperty("java.io.tmpdir"), System.nanoTime()) if (!new File(TEMP_PATH).mkdir()) throw new IOException("Failed to buildFrom temp working dir!") } def deleteTempDir() { def recursiveDelete(path: File) { if (path.isDirectory) path.listFiles().foreach(recursiveDelete) if (!path.delete()) throw new IOException("Failed to delete file %s!".format(path)) } recursiveDelete(new File(TEMP_PATH)) TEMP_PATH = null } def sourceToByteCode(classBody: String): Array[Byte] = { require(TEMP_PATH != null) val fullSource = """ import java.io.*; import java.util.*; public class Klass { %s } """.format(classBody) val sourcePath = TEMP_PATH + "/Klass.java" writeSource(sourcePath, fullSource) compileSources(sourcePath) val classFile = new File(sourcePath.replace(".java", ".class")) if (!classFile.exists) throw new IOException("Could not locate test .class file!") ByteReader.read(new FileInputStream(classFile), classFile.length.toInt) } def writeSource(sourcePath: String, source: String) { { val dir = new File(sourcePath).getParentFile if (!dir.exists) if (!dir.mkdirs()) throw new RuntimeException() } val sourceWriter = new FileWriter(sourcePath) sourceWriter.write(source) sourceWriter.close() } def compileSources(sourcePaths: String *) { val compilatorProcess = Runtime.getRuntime().exec(Array("javac") ++ sourcePaths) val compilationSucceeded = compilatorProcess.waitFor() == 0 if (!compilationSucceeded) throw new RuntimeException("Compilation failed. (" + sourcePaths + ")") } def compileToModels(sources: (String, String, String) *): Set[JavaType] = { val sourcePaths = sources.map { case (packageName, typeName, source) => { val fullSource = """ %s import java.util.*; import java.io.*; %s """.format( if (packageName.isEmpty) "" else ("package " + packageName + ";"), source ) val sourcePath = TEMP_PATH + packageName.replace('.', '/') + "/" + typeName + ".java" writeSource(sourcePath, fullSource) sourcePath } } compileSources(sourcePaths : _*) require(TEMP_PATH != null) new BinaryFinder(new File(TEMP_PATH)).iterator(byteCode => new JavaType(MOCK_APPLICATION, byteCode, MOCK_SOURCE_FINDER) ).toSet } private val MOCK_APPLICATION = new Application("MOCK_APP", () => List().toIterator) private def MOCK_SOURCE_FINDER(m: JavaMethod) = None }
edwkar/edwbsc
projects/Osiris/src/test/scala/no/nr/edvard/osiris/testutil/TestCompiler.scala
Scala
gpl-2.0
3,398
package scala.meta.internal.metacp import java.io.BufferedOutputStream import java.net.URLClassLoader import java.nio.charset.StandardCharsets import java.nio.file._ import java.nio.file.attribute.BasicFileAttributes import java.util.concurrent.ConcurrentHashMap import java.util.jar._ import scala.collection.GenSeq import scala.collection.immutable import scala.collection.mutable import scala.meta.cli._ import scala.meta.internal.classpath._ import scala.meta.internal.cli._ import scala.meta.internal.io._ import scala.meta.internal.scalacp._ import scala.meta.io._ import scala.meta.metacp._ class Main(settings: Settings, reporter: Reporter) { val classpathIndex = ClasspathIndex( settings.classpath ++ settings.dependencyClasspath ++ detectJavacp, includeJdk = settings.includeJdk) private val missingSymbols = mutable.Set.empty[String] def process(): Result = { if (settings.out.isFile) { throw new FileAlreadyExistsException(settings.out.toString, null, "--out must not be a file") } else if (!settings.out.isDirectory) { Files.createDirectories(settings.out.toNIO) } val classpath: GenSeq[AbsolutePath] = if (settings.par) settings.classpath.entries.par else settings.classpath.entries val status = new ConcurrentHashMap[AbsolutePath, Option[AbsolutePath]]() def processEntry(entry: AbsolutePath): OutputEntry = { withOutputEntry(entry) { out => val isSuccess = convertClasspathEntry(entry, out.root) if (isSuccess) status.put(entry, Some(out.output)) else status.put(entry, None) out } } val job = Job(classpath, if (settings.verbose) reporter.err else devnull) job.foreach { entry => val out = processEntry(entry) if (entry.isFile) { val jar = new JarFile(entry.toFile) try { val manifest = jar.getManifest if (manifest != null) { val isSuccess = processManifest(entry, manifest, out.output) if (!isSuccess) status.put(entry, None) } } finally { jar.close() } } } val scalaLibrarySynthetics = { if (settings.scalaLibrarySynthetics) { withOutputEntry(settings.out.resolve("scala-library-synthetics.jar")) { out => Scalalib.synthetics.foreach { infos => infos.save(out.root) } Some(out.output) } } else { None } } if (missingSymbols.nonEmpty) { reporter.err.println( "NOTE. To fix 'missing symbol' errors please provide a complete --classpath or --dependency-classpath. " + "The provided classpath or classpaths should include the Scala library as well as JDK jars such as rt.jar." ) } reporter.out.println("{") reporter.out.println(" \\"status\\": {") val ins = settings.classpath.entries ins.zipWithIndex.foreach { case (in, i) => val s_out = status.get(in).map(_.toString).getOrElse("") reporter.out.print(s""" "${in.toNIO}": "${s_out}"""") if (i != ins.length - 1) reporter.out.print(",") reporter.out.println() } reporter.out.println(" },") val s_out = scalaLibrarySynthetics.map(_.toString).getOrElse("") reporter.out.println(s""" "scalaLibrarySynthetics": "${s_out}"""") reporter.out.println("}") val orderedStatus = immutable.ListMap(ins.map(in => in -> status.get(in)): _*) Result(orderedStatus, scalaLibrarySynthetics) } private def processManifest(entry: AbsolutePath, manifest: Manifest, out: AbsolutePath): Boolean = { var success = true val classpathAttr = manifest.getMainAttributes.getValue("Class-Path") if (classpathAttr != null) { val buf = List.newBuilder[Path] classpathAttr.split(" ").foreach { classpathEntry => val linkedPath = entry.toNIO.getParent.resolve(classpathEntry) val linkedEntry = AbsolutePath(linkedPath) if (linkedEntry.isFile || linkedEntry.isDirectory) { withOutputEntry(linkedEntry) { out => buf += out.output.toNIO.getFileName success &= convertClasspathEntry(linkedEntry, out.root) } } } val convertedLinkedJars = buf.result() if (convertedLinkedJars.nonEmpty) { withJar(out.toNIO) { jos => jos.putNextEntry(new JarEntry("META-INF/MANIFEST.MF")) val classPath = convertedLinkedJars.mkString(" ") val manifest = s"""|Manifest-Version: 1.0 |Class-Path: $classPath |""".stripMargin.trim + "\\n\\n" jos.write(manifest.getBytes(StandardCharsets.UTF_8)) jos.closeEntry() } } } success } private def withJar(path: Path)(fn: JarOutputStream => Unit): Unit = { val os = Files.newOutputStream(path) val bos = new BufferedOutputStream(os) val jos = new JarOutputStream(bos) try fn(jos) finally { jos.close() bos.close() os.close() } } /** An output entry that is either a directory or a jar. * * @param output the output directory or jar file on disk that is returned to the user. * @param root the output directory or the NIO FileSystem jar root path if output is a jar file. */ private case class OutputEntry(output: AbsolutePath, root: AbsolutePath) private def withOutputEntry[T](entry: AbsolutePath)(f: OutputEntry => T): T = { val name = entry.toNIO.normalize().getFileName.toString if (PathIO.extension(entry.toNIO) == "jar") { val freeJar = jarNameAlternatives(name, 0).filter(!_.isFile).head PlatformFileIO.withJarFileSystem(freeJar, create = true) { jarRoot => f(OutputEntry(freeJar, jarRoot)) } } else { val freeDir = directoryNameAlternatives(name, 0).filter(!_.isDirectory).head Files.createDirectories(freeDir.toNIO) f(OutputEntry(freeDir, freeDir)) } } private def directoryNameAlternatives(filename: String, i: Int): Stream[AbsolutePath] = { val name = if (i == 0) filename else filename + "-" + i settings.out.resolve(name) #:: directoryNameAlternatives(filename, i + 1) } private def jarNameAlternatives(filename: String, i: Int): Stream[AbsolutePath] = { val name = if (i == 0) filename else (filename.stripSuffix(".jar") + "-" + i) + ".jar" settings.out.resolve(name) #:: jarNameAlternatives(filename, i + 1) } private def convertClasspathEntry(in: AbsolutePath, out: AbsolutePath): Boolean = { var success = true val classpath = Classpath(in) classpath.visit { _ => new SimpleFileVisitor[Path] { override def visitFile(path: Path, attrs: BasicFileAttributes): FileVisitResult = { if (PathIO.extension(path) == "class" && Files.size(path) > 0) { try { val abspath = AbsolutePath(path) val node = abspath.toClassNode val result = ClassfileInfos.fromClassNode(node, classpathIndex, settings, reporter) result.foreach { infos => infos.save(out) } } catch { case e @ MissingSymbolException(symbol) => if (!missingSymbols(symbol)) { missingSymbols += symbol reporter.err.println(s"${e.getMessage} in $in") success = false } case ex: Throwable => reporter.err.println(s"error: can't convert $path in $in") ex.printStackTrace(reporter.err) success = false } } FileVisitResult.CONTINUE } } } // NOTE: In the case when an input contains no class files, // we need to create an empty META-INF/semanticdb directory to distinguish // metacp-processed outputs from regular class directories and/or jar. val semanticdbRoot = out.resolve("META-INF").resolve("semanticdb") Files.createDirectories(semanticdbRoot.toNIO) success } private def detectJavacp: Classpath = { if (settings.usejavacp) { val scalaLibrary = this.getClass.getClassLoader match { case loader: URLClassLoader => loader.getURLs .collectFirst { case url if url.toString.contains("scala-library") => AbsolutePath(Paths.get(url.toURI)) } .getOrElse { throw new IllegalStateException("Unable to detect scala-library via --usejavacp") } case unexpected => throw new IllegalStateException( s"Expected this.getClass.getClassLoader to be URLClassLoader. " + s"Obtained $unexpected") } Classpath(scalaLibrary) } else { Classpath(Nil) } } }
xeno-by/scalameta
semanticdb/metacp/src/main/scala/scala/meta/internal/metacp/Main.scala
Scala
bsd-3-clause
8,805
/* * Copyright (C) 2005, The Beangle Software. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.beangle.commons.io import java.io.Writer /** * {@link Writer} implementation that outputs to a {@link StringBuilder}. * <p> * <strong>NOTE:</strong> This implementation, as an alternative to * <code>java.io.StringWriter</code>, provides an <i>un-synchronized</i> (i.e. for use in a single * thread) implementation for better performance. For safe usage with multiple {@link Thread}s then * <code>java.io.StringWriter</code> should be used. * @author chaostone * @since 3.1 */ @SerialVersionUID(1L) class StringBuilderWriter(val builder: StringBuilder) extends Writer with Serializable { /** * Construct a new {@link StringBuilder} instance with the specified capacity. * @param capacity The initial capacity of the underlying { @link StringBuilder} */ def this(capacity: Int = 16) = { this(new StringBuilder(capacity)) } /** * Append a single character to this Writer. */ override def append(value: Char): Writer = { builder.append(value) this } /** * Append a character sequence to this Writer. */ override def append(value: CharSequence): Writer = { builder.append(value) this } /** * Append a portion of a character sequence to the {@link StringBuilder}. */ override def append(value: CharSequence, start: Int, end: Int): Writer = { builder.append(value, start, end) this } /** * Closing this writer has no effect. */ override def close(): Unit = { } /** * Flushing this writer has no effect. */ override def flush(): Unit = { } /** * Write a String to the {@link StringBuilder}. * @param value The value to write */ override def write(value: String): Unit = if (value != null) builder.append(value) /** * Write a portion of a character array to the {@link StringBuilder}. */ override def write(value: Array[Char], offset: Int, length: Int): Unit = if (value != null) builder.appendAll(value, offset, length) /** * Returns {@link StringBuilder#toString()}. */ override def toString: String = builder.toString }
beangle/commons
core/src/main/scala/org/beangle/commons/io/StringBuilderWriter.scala
Scala
lgpl-3.0
2,811
package usbinstall import javafx.application.Platform import javafx.fxml.FXMLLoader import javafx.geometry.Pos import javafx.scene.{Parent, Scene} import javafx.scene.control.ButtonType import javafx.scene.layout.{ColumnConstraints, GridPane, Priority, RowConstraints} import suiryc.scala.javafx.stage.Stages.StageLocation import suiryc.scala.javafx.stage.{Stages => sfxStages} import usbinstall.controllers.{StepChangeController, ToolBarController} object Stages { object DialogButtons { val Ok_Cancel: List[ButtonType] = List(ButtonType.OK, ButtonType.CANCEL) } protected def changeScene(title: String, scene: Scene): Unit = { val stage = USBInstall.stage // Try to keep the stage center at the same spot val center = if (!USBInstall.firstScene) { val x = stage.getX + stage.getWidth / 2 val y = stage.getY + stage.getHeight / 2 stage.setMinWidth(0) stage.setMinHeight(0) stage.hide() Some((x, y)) } else { stage.setOnCloseRequest { _ => stage.close() Platform.exit() } None } stage.setTitle(title) stage.setScene(scene) sfxStages.onStageReady(stage, USBInstall.firstScene) { val width = stage.getWidth val height = stage.getHeight val x = center.map(_._1 - width / 2).getOrElse(stage.getX) val y = center.map(_._2 - height / 2).getOrElse(stage.getY) val loc = StageLocation(x, y, width, height, maximized = false) sfxStages.setMinimumDimensions(stage) // Note: setting stage size and keeping it while changing scene // does not play well (at least under Gnome). Default dimension // being good enough, don't change it. sfxStages.setLocation(stage, loc, setSize = false) } stage.show() USBInstall.firstScene = false } protected def toolBar(pane: StepPane, paneController: Option[Any]): Parent = { val loader = new FXMLLoader(getClass.getResource("/fxml/toolBar.fxml")) val root = loader.load[Parent]() val controller = loader.getController[ToolBarController]() pane.subscriptionHolders ::= controller controller.setPaneController(paneController) root } protected def stepChange(pane: StepPane): Parent = { val loader = new FXMLLoader(getClass.getResource("/fxml/stepChange.fxml")) val root = loader.load[Parent]() val controller = loader.getController[StepChangeController]() controller.setStepPane(pane) root } def step(tuple: (StepPane, Option[Any])): Scene = { val (pane, controller) = tuple val grid = new GridPane grid.setAlignment(Pos.TOP_CENTER) grid.getColumnConstraints.add(new ColumnConstraints() { setHgrow(Priority.ALWAYS) }) grid.getRowConstraints.add(new RowConstraints() { setVgrow(Priority.NEVER) }) grid.getRowConstraints.add(new RowConstraints() { setVgrow(Priority.ALWAYS) }) grid.addColumn(0, toolBar(pane, controller), pane, stepChange(pane)) new Scene(grid) } def chooseProfile(): Unit = { changeScene("Choose profile", step(Panes.chooseProfile())) } def chooseDevice(): Unit = { changeScene("Choose device", step(Panes.chooseDevice())) } def choosePartitions(): Unit = { changeScene("Choose partitions", step(Panes.choosePartitions())) } def install(): Unit = { changeScene("Install", step(Panes.install())) } }
suiryc/usbinstall
src/main/scala/usbinstall/Stages.scala
Scala
gpl-3.0
3,399
package nl.malienkolders.htm.viewer.jmonkey.lib.state package spectator import java.awt.Color import nl.malienkolders.htm.viewer.jmonkey.lib._ import java.awt.Font import java.awt.image.BufferedImage import javax.imageio.ImageIO import java.awt.Graphics2D import nl.malienkolders.htm.viewer.jmonkey.lib.util._ import com.jme3._ import texture._ import scene._ import scene.shape._ import material._ import renderer.queue.RenderQueue import math._ import font._ import nl.malienkolders.htm.lib.model._ import nl.malienkolders.htm.viewer.jmonkey.SpectatorScreen import java.io.File import com.jme3.cinematic.events.RotationTrack import com.jme3.cinematic.Cinematic import com.jme3.animation.LoopMode import com.jme3.cinematic.events.CinematicEventListener import com.jme3.cinematic.events.CinematicEvent import com.jme3.animation.SpatialTrack import com.jme3.app.state.AppStateManager object FightAppState extends nl.malienkolders.htm.viewer.jmonkey.lib.state.FightAppState with MessageAppState { val textLabelFont = "Arial Bold" val numberLabelFont = "Raavi Bold" case class PixelsToUnit(value: Int) { def pixels = value * SpectatorScreen.upp } implicit def intToPixelsToUnit(i: Int): PixelsToUnit = PixelsToUnit(i) lazy val message = new TextLabel("", AlignLeft, Arial, Color.black, (SpectatorScreen.ratio * 2, 36 pixels), SpectatorScreen.ppu.toInt, app.getAssetManager()) val messagePositionShown = (-SpectatorScreen.ratio + 0.05f, -1f + (10 pixels), 10f) val messagePositionHidden = (-SpectatorScreen.ratio + 0.05f, -1.1f, 10f) lazy val tournamentBanners = Map(List("longsword_open", "longsword_ladies", "rapier_dagger", "sword_buckler", "wrestling", "sabre").map(n => (n -> createTexturedPanel("banner_" + n, "Fight/Header/banner_" + n + ".jpg", false))): _*) def createFighterBar = { val quad = new Quad(SpectatorScreen.ratio * 2, -120 pixels, true); val bar = new Geometry("FighterBar", quad); bar.setQueueBucket(RenderQueue.Bucket.Transparent) bar.setMaterial(createFighterBarMaterial("", "")) bar } def createFighterBarMaterial(countryA: String, countryB: String) = { val texture = FighterPanelTextureUtil.getTexture(countryA, countryB, app.getAssetManager()) val material = new Material(app.getAssetManager(), "Common/MatDefs/Misc/Unshaded.j3md") material.setTexture("ColorMap", texture) material.setTransparent(true) material.getAdditionalRenderState().setFaceCullMode(RenderState.FaceCullMode.Back) material.getAdditionalRenderState().setBlendMode(RenderState.BlendMode.Alpha) material } def setFighterBarTexture(fighterA: MarshalledParticipant, fighterB: MarshalledParticipant) { fighterBar.setMaterial(createFighterBarMaterial(fighterA.country, fighterB.country)) } def createAvatar(side: AvatarTextureUtil.Generated) = { val quad = new Quad(AvatarTextureUtil.WIDTH pixels, AvatarTextureUtil.HEIGHT pixels) val avatar = new Geometry("Avatar" + side.toString(), quad) avatar.setQueueBucket(RenderQueue.Bucket.Transparent) avatar.setMaterial(createAvatarTextures("default", AvatarTextureUtil.PLACEHOLDER, side)) avatar } def createAvatarTextures(tournamentName: String, fId: String, side: AvatarTextureUtil.Generated) = { val texture = AvatarTextureUtil.getTexture(fId, side, tournamentName, app.getAssetManager()) val material = new Material(app.getAssetManager(), "Common/MatDefs/Misc/Unshaded.j3md") material.setTexture("ColorMap", texture) material.setTransparent(true) material.getAdditionalRenderState().setFaceCullMode(RenderState.FaceCullMode.Back) material.getAdditionalRenderState().setBlendMode(RenderState.BlendMode.Alpha) material } def setAvatarTextures(t: String, a: MarshalledParticipant, b: MarshalledParticipant) { avatarRed.setMaterial(createAvatarTextures(t, a.externalId, AvatarTextureUtil.Left)) avatarBlue.setMaterial(createAvatarTextures(t, b.externalId, AvatarTextureUtil.Right)) } def createRoller = { val w = 203 val h = 166 def uw = w pixels def uh = h pixels def huw = uw / 2 def huh = uh / 2 def createBanner(idx: Int) = { val texture = app.getAssetManager().loadTexture("Fight/Header/tumbler" + idx + ".jpg") val quad = new Quad(uw, -uh, true); val grid = new Geometry("Banner", quad); val material = new Material(app.getAssetManager(), "Common/MatDefs/Misc/Unshaded.j3md"); material.setTexture("ColorMap", texture); material.setTransparent(false); material.getAdditionalRenderState().setFaceCullMode(RenderState.FaceCullMode.Back); grid.setMaterial(material); grid } val pivot = new Node("Roller") val box = new Box(Vector3f.ZERO, 1, 1, 1) val face1 = createBanner(1) face1.setLocalTranslation(0, huh, huh) pivot.attachChild(face1) val face2 = createBanner(2) face2.rotate(FastMath.PI / 2f, 0f, 0f) face2.setLocalTranslation(0, -huh, huh) pivot.attachChild(face2) val face3 = createBanner(3) face3.rotate(FastMath.PI, 0f, 0f) face3.setLocalTranslation(0, -huh, -huh) pivot.attachChild(face3) val face4 = createBanner(4) face4.rotate(FastMath.PI * 1.5f, 0f, 0f) face4.setLocalTranslation(0, huh, -huh) pivot.attachChild(face4) pivot } lazy val roller = createRoller lazy val fighterBar = createFighterBar lazy val avatarRed = createAvatar(AvatarTextureUtil.Left) lazy val avatarBlue = createAvatar(AvatarTextureUtil.Right) lazy val fighterRedName = createFighterNameLabel(AlignLeft) lazy val fighterRedClub = createFighterClubLabel(AlignLeft) lazy val fighterBlueName = createFighterNameLabel(AlignRight) lazy val fighterBlueClub = createFighterClubLabel(AlignRight) def createScoreLabel = createNumericLabel("0") def createFighterNameLabel(align: Align) = new TextLabel("Een Hele Lange Naam", align, Copperplate, Color.white, (SpectatorScreen.ratio - 0.15f, 0.1f), 300, app.getAssetManager()) def createFighterClubLabel(align: Align) = new TextLabel("Een Hele Lange Vereniging", align, Copperplate, Color.white, (SpectatorScreen.ratio - 0.15f, 0.06f), 300, app.getAssetManager()) def createNumericLabel(initialValue: String, alignment: BitmapFont.Align = BitmapFont.Align.Center) = { val label = new BitmapText(app.getAssetManager().loadFont("Interface/Fonts/Raavi.fnt"), false) label.setColor(ColorRGBA.Black) label.setText(initialValue) label.setQueueBucket(RenderQueue.Bucket.Transparent) label.setBox(new font.Rectangle(0f, 0f, 250f, 100f)) label.setAlignment(alignment) label } lazy val timerLabel = createNumericLabel("00:00") def createTexturedPanel(geometryName: String, textureName: String, transparent: Boolean) = { val texture = app.getAssetManager().loadTexture(textureName) val quad = new Quad(texture.getImage().getWidth() pixels, texture.getImage().getHeight() pixels) val panel = new Geometry(geometryName, quad) val material = new Material(app.getAssetManager(), "Common/MatDefs/Misc/Unshaded.j3md") material.setTexture("ColorMap", texture) material.setTransparent(transparent) if (transparent) { material.getAdditionalRenderState().setBlendMode(RenderState.BlendMode.Alpha) panel.setQueueBucket(RenderQueue.Bucket.Transparent) } panel.setMaterial(material) panel } def hideTournamentBanners { tournamentBanners.values.foreach(_.setLocalTranslation(-1000f, -1000f, -1000f)) } override def initializeScene() { super.initializeScene() hideTournamentBanners tournamentBanners.values.foreach(rootNode.attachChild _) scoreLabels(0).setLocalScale(0.00272f) scoreLabels(0).setLocalTranslation(-1.4255f, 0.369f, 0.1f) scoreLabels(1).setLocalScale(0.0012f) scoreLabels(1).setLocalTranslation(-1.14f, 0.167f, 0.1f) scoreLabels(2).setLocalScale(0.00272f) scoreLabels(2).setLocalTranslation(0.743f, 0.369f, 0.1f) scoreLabels(3).setLocalScale(0.0012f) scoreLabels(3).setLocalTranslation(1.019f, 0.167f, 0.1f) scoreLabels(4).setLocalScale(0.00175f) scoreLabels(4).setLocalTranslation(-0.225f, 0.288f, 0.1f) scoreLabels(5).setLocalScale(0.001346f) scoreLabels(5).setLocalTranslation(-0.401f, 0.882f, 0.1f) timerLabel.setLocalScale(0.001518f) timerLabel.setLocalTranslation(-0.2487f, 0.666f, 0.1f) for { l <- scoreLabels } rootNode.attachChild(l) rootNode.attachChild(timerLabel) phaseName.setLocalScale(0.23f) phaseName.setLocalTranslation(-1.308f, 0.9f, 0.1f) rootNode.attachChild(phaseName) roundName.setLocalScale(0.4f, 0.35f, 1f) roundName.setLocalTranslation(-1.308f, 0.68f, 0.1f) rootNode.attachChild(roundName) fightName.setLocalScale(0.4f, 0.35f, 1f) fightName.setLocalTranslation(-1.308f, 0.57f, 0.1f) rootNode.attachChild(fightName) exchangeLimit.setLocalScale(0.001346f) exchangeLimit.setLocalTranslation(-0.08f, 0.882f, 0.1f) rootNode.attachChild(exchangeLimit) fighterBar.setLocalTranslation(-SpectatorScreen.ratio, -0.482f, 0.05f) rootNode.attachChild(fighterBar) fighterRedName.setLocalTranslation(-1.136f, -0.646f, 0.1f) rootNode.attachChild(fighterRedName) fighterRedClub.setLocalTranslation(-1.128f, -0.717f, 0.1f) rootNode.attachChild(fighterRedClub) fighterBlueName.setLocalTranslation(-0.159f + 0.15f, -0.646f, 0.1f) rootNode.attachChild(fighterBlueName) fighterBlueClub.setLocalTranslation(-0.155f + 0.15f, -0.717f, 0.1f) rootNode.attachChild(fighterBlueClub) // tournamentBanner.setLocalTranslation(0.383f, 0.934f, 2f) // rootNode.attachChild(tournamentBanner) val background = List( createTexturedPanel("Points", "Fight/points.png", true), createTexturedPanel("HeaderFooter", "Fight/header_footer.png", true), createTexturedPanel("Backdrops", "Fight/backdrops.png", true), createTexturedPanel("Background", "Fight/background.png", false)) for ((bg, i) <- background.zipWithIndex.reverse) { bg.removeFromParent() bg.setLocalTranslation(-SpectatorScreen.ratio, -1f, -i.toFloat) rootNode.attachChild(bg) } avatarRed.setLocalTranslation((67 - 512) pixels, (384 - 684) pixels, -1.5f) avatarBlue.setLocalTranslation((643 - 512) pixels, (384 - 684) pixels, -1.5f) rootNode.attachChild(avatarRed) rootNode.attachChild(avatarBlue) roller.setLocalTranslation((645 - 512) * SpectatorScreen.upp, (384 - 83) * SpectatorScreen.upp, 1f) rootNode.attachChild(roller) } lazy val rollerCinematics = { val rot = List( new RotationTrack(roller, new Quaternion(Array(FastMath.PI / 2f, 0f, 0f)), 1, LoopMode.DontLoop), new RotationTrack(roller, new Quaternion(Array(FastMath.PI, 0f, 0f)), 1, LoopMode.DontLoop), new RotationTrack(roller, new Quaternion(Array(FastMath.PI * 1.5f, 0f, 0f)), 1, LoopMode.DontLoop), new RotationTrack(roller, new Quaternion(Array(0f, 0f, 0f)), 1, LoopMode.DontLoop)) val rollerRoll1 = new Cinematic(rootNode, 6f, LoopMode.DontLoop) rollerRoll1.addCinematicEvent(1, rot(0)) val rollerRoll2 = new Cinematic(rootNode, 6f, LoopMode.DontLoop) rollerRoll2.addCinematicEvent(1, rot(1)) val rollerRoll3 = new Cinematic(rootNode, 6f, LoopMode.DontLoop) rollerRoll3.addCinematicEvent(1, rot(2)) val rollerRoll4 = new Cinematic(rootNode, 6f, LoopMode.DontLoop) rollerRoll4.addCinematicEvent(1, rot(3)) rollerRoll1.addListener(new CinematicEventListener { def onPlay(e: CinematicEvent) {} def onPause(e: CinematicEvent) {} def onStop(e: CinematicEvent) { rollerRoll2.setTime(0) rollerRoll2.play() } }) rollerRoll2.addListener(new CinematicEventListener { def onPlay(e: CinematicEvent) {} def onPause(e: CinematicEvent) {} def onStop(e: CinematicEvent) { rollerRoll3.setTime(0) rollerRoll3.play() } }) rollerRoll3.addListener(new CinematicEventListener { def onPlay(e: CinematicEvent) {} def onPause(e: CinematicEvent) {} def onStop(e: CinematicEvent) { rollerRoll4.setTime(0) rollerRoll4.play() } }) rollerRoll4.addListener(new CinematicEventListener { def onPlay(e: CinematicEvent) {} def onPause(e: CinematicEvent) {} def onStop(e: CinematicEvent) { rollerRoll1.setTime(0) rollerRoll1.play() } }) List(rollerRoll1, rollerRoll2, rollerRoll3, rollerRoll4) } lazy val phaseName = new TextLabel("poule phase", AlignLeft, Copperplate, Color.black, (4f, 0.5f), 200, app.getAssetManager()) lazy val roundName = new TextLabel("ROUND 6", AlignLeft, Raavi, Color.black, (4f, 0.4f), 200, app.getAssetManager()) lazy val fightName = new TextLabel("FIGHT 29", AlignLeft, Raavi, Color.black, (4f, 0.4f), 200, app.getAssetManager()) lazy val exchangeLimit = createNumericLabel("10") def tournamentBannerResourceName(tournamentIdentifier: Option[String]) = "Fight/Header/" + tournamentIdentifier.map("banner_" + _).getOrElse("banner") + ".jpg" def createTournamentBannerMaterial(tournamentIdentifier: Option[String]) = { val texture = app.getAssetManager().loadTexture(tournamentBannerResourceName(tournamentIdentifier)) val material = new Material(app.getAssetManager(), "Common/MatDefs/Misc/Unshaded.j3md") material.setTexture("ColorMap", texture) material.setTransparent(false) material.getAdditionalRenderState().setFaceCullMode(RenderState.FaceCullMode.Back) material } def updateTextLabels(f: MarshalledViewerFight) { hideTournamentBanners if (tournamentBanners.contains(f.tournament.identifier)) { val banner = tournamentBanners(f.tournament.identifier) banner.setLocalTranslation((857 - 512) pixels, (384 - 140) pixels, 10f) } f.roundName.split("/") match { case Array(phase, round) => phaseName.text = phase.toLowerCase().trim() roundName.text = round.toUpperCase().trim() case Array(round) => phaseName.text = "" roundName.text = round.toUpperCase() case _ => phaseName.text = "" roundName.text = "" } fightName.text = "FIGHT %d" format f.order exchangeLimit.setText(f.exchangeLimit.toString()) setFighterBarTexture(f.fighterA, f.fighterB) setAvatarTextures(f.tournament.identifier, f.fighterA, f.fighterB) fighterRedName.text = f.fighterA.shortName fighterRedClub.text = if (f.fighterA.club.length() > 32) f.fighterA.clubCode else f.fighterA.club fighterBlueName.text = f.fighterB.shortName fighterBlueClub.text = if (f.fighterB.club.length() > 32) f.fighterB.clubCode else f.fighterB.club } override def stateAttached(stateManager: AppStateManager) { super.stateAttached(stateManager) for (c <- rollerCinematics) { c.stop c.setTime(0) stateManager.attach(c) } rollerCinematics(0).play } override def stateDetached(stateManager: AppStateManager) { for (c <- rollerCinematics) { c.stop stateManager.detach(c) } super.stateDetached(stateManager) } }
hema-tournament-manager/htm
htm-viewer-jme/src/main/scala/nl/malienkolders/htm/viewer/jmonkey/lib/state/spectator/FightAppState.scala
Scala
apache-2.0
15,554
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy.master import org.apache.spark.SparkConf import org.apache.spark.util.{IntParam, Utils} /** * Command-line parser for the master. */ private[spark] class MasterArguments(args: Array[String], conf: SparkConf) { var host = Utils.localHostName() var port = 7077 var webUiPort = 8080 // Check for settings in environment variables if (System.getenv("SPARK_MASTER_HOST") != null) { host = System.getenv("SPARK_MASTER_HOST") } if (System.getenv("SPARK_MASTER_PORT") != null) { port = System.getenv("SPARK_MASTER_PORT").toInt } if (System.getenv("SPARK_MASTER_WEBUI_PORT") != null) { webUiPort = System.getenv("SPARK_MASTER_WEBUI_PORT").toInt } if (conf.contains("master.ui.port")) { webUiPort = conf.get("master.ui.port").toInt } parse(args.toList) def parse(args: List[String]): Unit = args match { case ("--ip" | "-i") :: value :: tail => Utils.checkHost(value, "ip no longer supported, please use hostname " + value) host = value parse(tail) case ("--host" | "-h") :: value :: tail => Utils.checkHost(value, "Please use hostname " + value) host = value parse(tail) case ("--port" | "-p") :: IntParam(value) :: tail => port = value parse(tail) case "--webui-port" :: IntParam(value) :: tail => webUiPort = value parse(tail) case ("--help" | "-h") :: tail => printUsageAndExit(0) case Nil => {} case _ => printUsageAndExit(1) } /** * Print usage and exit JVM with the given exit code. */ def printUsageAndExit(exitCode: Int) { System.err.println( "Usage: Master [options]\\n" + "\\n" + "Options:\\n" + " -i HOST, --ip HOST Hostname to listen on (deprecated, please use --host or -h) \\n" + " -h HOST, --host HOST Hostname to listen on\\n" + " -p PORT, --port PORT Port to listen on (default: 7077)\\n" + " --webui-port PORT Port for web UI (default: 8080)") System.exit(exitCode) } }
zhangjunfang/eclipse-dir
spark/core/src/main/scala/org/apache/spark/deploy/master/MasterArguments.scala
Scala
bsd-2-clause
2,846
/** * Created by Romain Reuillon on 28/11/16. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * */ package org.openmole.gui.plugin.authentication.egi import org.openmole.core.preference.PreferenceLocation import org.openmole.gui.ext.data._ import org.openmole.plugin.environment.egi._ import org.openmole.core.services._ import org.openmole.gui.ext.server.utils import scala.util.{ Failure, Success, Try } object EGIAuthenticationAPIImpl { val voTest = PreferenceLocation[Seq[String]]("AuthenicationPanel", "voTest", Some(Seq[String]())) } class EGIAuthenticationAPIImpl(s: Services) extends EGIAuthenticationAPI { implicit val services = s import services._ private def authenticationFile(p: String) = { def path = p.replace(EGIAuthenticationData.authenticationDirectory, utils.authenticationKeysFile.getAbsolutePath) new java.io.File(path) } private def coreObject(data: EGIAuthenticationData) = data.privateKey.map { pk ⇒ P12Certificate( cypher.encrypt(data.cypheredPassword), authenticationFile(pk) ) } def egiAuthentications(): Seq[EGIAuthenticationData] = EGIAuthentication() match { case Some(p12: P12Certificate) ⇒ Seq(EGIAuthenticationData( cypher.decrypt(p12.cypheredPassword), Some(p12.certificate.getPath) )) case x: Any ⇒ Seq() } def addAuthentication(data: EGIAuthenticationData): Unit = coreObject(data).foreach { a ⇒ EGIAuthentication.update(a, test = false) } def removeAuthentication = EGIAuthentication.clear // To be used for ssh private key def deleteAuthenticationKey(keyName: String): Unit = authenticationFile(keyName).delete def testAuthentication(data: EGIAuthenticationData): Seq[Test] = { def testPassword(data: EGIAuthenticationData, test: EGIAuthentication ⇒ Try[Boolean]): Test = coreObject(data).map { d ⇒ test(d) match { case Success(_) ⇒ Test.passed() case Failure(f) ⇒ Test.error("Invalid Password", ErrorData(f)) } }.getOrElse(Test.error("Unknown error", MessageErrorData("Unknown " + data.name, None))) def test(data: EGIAuthenticationData, voName: String, test: (EGIAuthentication, String) ⇒ Try[Boolean]): Test = coreObject(data).map { d ⇒ test(d, voName) match { case Success(_) ⇒ Test.passed(voName) case Failure(f) ⇒ Test.error("Invalid Password", ErrorData(f)) } }.getOrElse(Test.error("Unknown error", MessageErrorData("Unknown " + data.name, None))) val vos = services.preference(EGIAuthenticationAPIImpl.voTest) vos.map { voName ⇒ Try { EGIAuthenticationTest( voName, testPassword(data, EGIAuthentication.testPassword(_)), test(data, voName, EGIAuthentication.testProxy(_, _)), test(data, voName, EGIAuthentication.testDIRACAccess(_, _)) ) } match { case Success(a) ⇒ a case Failure(f) ⇒ EGIAuthenticationTest("Error") } } } override def setVOTest(vos: Seq[String]): Unit = services.preference.setPreference(EGIAuthenticationAPIImpl.voTest, vos) override def geVOTest(): Seq[String] = services.preference(EGIAuthenticationAPIImpl.voTest) }
openmole/openmole
openmole/gui/plugins/org.openmole.gui.plugin.authentication.egi/src/main/scala/org/openmole/gui/plugin/authentication/egi/EGIAuthenticationAPIImpl.scala
Scala
agpl-3.0
3,858
import sbt._ object Dependencies { val resolutionRepos = Seq( Resolver.sonatypeRepo("releases"), Resolver.sonatypeRepo("snapshots") ) def compile(deps: ModuleID*): Seq[ModuleID] = deps map (_ % "compile") def provided(deps: ModuleID*): Seq[ModuleID] = deps map (_ % "provided") def test(deps: ModuleID*): Seq[ModuleID] = deps map (_ % "test") def runtime(deps: ModuleID*): Seq[ModuleID] = deps map (_ % "runtime") def container(deps: ModuleID*): Seq[ModuleID] = deps map (_ % "container") object Ver { val lift = "2.6" val lift_edition = "2.6" val jetty = "9.2.9.v20150224" } // Lift val liftWebkit = "net.liftweb" %% "lift-webkit" % Ver.lift val liftMongodb = "net.liftweb" %% "lift-mongodb-record" % Ver.lift // Lift modules val liftExtras = "net.liftmodules" %% ("extras_"+Ver.lift_edition) % "0.4-SNAPSHOT" val liftMongoauth = "net.liftmodules" %% ("mongoauth_"+Ver.lift_edition) % "0.6-SNAPSHOT" // Rogue val rogueField = "com.foursquare" %% "rogue-field" % "2.5.0" intransitive() val rogueCore = "com.foursquare" %% "rogue-core" % "2.5.1" intransitive() val rogueLift = "com.foursquare" %% "rogue-lift" % "2.5.1" intransitive() val rogueIndex = "com.foursquare" %% "rogue-index" % "2.5.1" intransitive() // Jetty - http://download.eclipse.org/jetty/ val jettyWebapp = "org.eclipse.jetty" % "jetty-webapp" % Ver.jetty val jettyPlus = "org.eclipse.jetty" % "jetty-plus" % Ver.jetty val servlet = "javax.servlet" % "javax.servlet-api" % "3.0.1" // Misc val logback = "ch.qos.logback" % "logback-classic" % "1.1.2" val scalatest = "org.scalatest" %% "scalatest" % "2.2.4" }
eltimn/lift-poly-example
project/Dependencies.scala
Scala
apache-2.0
1,715
package org.openworm.trackercommons import kse.jsonal._ import kse.jsonal.JsonConverters._ import WconImplicits._ object Create { sealed trait HasUnits {} final class NoUnits private () extends HasUnits {} final class YesUnits private () extends HasUnits {} sealed trait HasData {} final class NoData private () extends HasData {} final class YesData private () extends HasData {} sealed trait HasFile {} final class NoFile private () extends HasFile{} final class YesFile private () extends HasFile{} sealed trait HasID {} final class YesID private() extends HasID {} final class NoID private() extends HasID {} final class MakeWcon[U <: HasUnits, D <: HasData, F <: HasFile] private[trackercommons] (building: DataSet, nData: Int) { private[this] var stale = false def result()(implicit evU: U =:= YesUnits, evD: D =:= YesData) = building.copy(data = java.util.Arrays.copyOf(building.data, nData)) def write()(implicit evU: U =:= YesUnits, evD: D =:= YesData, evF: F =:= YesFile): scala.util.Try[Unit] = scala.util.Try{ ReadWrite.write(result, building.files.current) } def setUnits(u: UnitMap)(implicit evD: D =:= YesData): MakeWcon[YesUnits, D, F] = { stale = true new MakeWcon[YesUnits, D, F](building.copy(unitmap = u), nData) } def setUnits(extra: Map[String, String])(implicit evD: D =:= YesData): Either[String, MakeWcon[YesUnits, D, F]] = { val all = new collection.mutable.AnyRefMap[String, String] def sym(a: String, b: String) { val hasA = all contains a val hasB = all contains b if (hasA != hasB) { if (hasA) all += (b, all(a)) else all += (a, all(b)) } } def adopt(a: String, b: String, k: String = null) { if (!all.contains(a)) { all += (a, if (k eq null) all(b) else all.getOrElse(b, k)) } } def default(a: String, k: String) { if (!all.contains(a)) all += (a, k) } extra.foreach{ case (k, v) => all += (k, v) } sym("x", "y") sym("ox", "oy") sym("cx", "cy") sym("px", "py") var hasoxy = false var hascxy = false var haspxy = false var i = 0 while (i < nData) { val di = building.data(i) if (di.cxs.length > 0) hascxy = true if (di.oxs.length > 0) hasoxy = true if (di.perims.nonEmpty || di.walks.nonEmpty) haspxy = true i += 1 } if (!all.contains("t")) all += ("t", "s") adopt("x", "cx", "mm") adopt("y", "cy", "mm") if (hasoxy) { adopt("ox", "x"); adopt("oy", "y") } if (hascxy) { adopt("cx", "x"); adopt("cy", "y") } if (haspxy) { adopt("px", "x"); adopt("py", "y") } if (building.meta.age.isDefined) default("age", "h") if (building.meta.temperature.exists(x => !x.isNaN)) default("temperature", "C") if (building.meta.humidity.exists(x => !x.isNaN)) default("humidity", "%") if (building.meta.arena.exists(_.size.isDefined)) adopt("size", "x") Right(setUnits(UnitMap( all.toMap.map{ case (k, v) => k -> ({ units.parseUnit(v) match { case None => return Left(f"Not a unit: $v (for $k)") case Some(u) => u } })}, Json.Obj.empty ))) } def setUnits()(implicit evD: D =:= YesData): MakeWcon[YesUnits, D, F] = setUnits(Map.empty[String, String]).right.get // Always succeeds when no extras def setFiles(files: FileSet): MakeWcon[U, D, YesFile] = { stale = true new MakeWcon[U, D, YesFile](building.copy(files = files), nData) } def setOnlyFile(file: String): MakeWcon[U, D, YesFile] = setFiles(FileSet(file)) def setOnlyFile(file: java.io.File): MakeWcon[U, D, YesFile] = setFiles(FileSet(file.getPath)) def setMeta(meta: Metadata): MakeWcon[U, D, F] = { stale = true; new MakeWcon[U, D, F](building.copy(meta = meta), nData) } def setMeta(meta: MakeMeta[YesID]): MakeWcon[U, D, F] = setMeta(meta.result) def setMetaWithID(meta: Metadata, id: String): MakeWcon[U, D, F] = setMeta(meta.copy(id = id)) def setMetaWithID[I <: HasID](meta: MakeMeta[I], id: String): MakeWcon[U, D, F] = setMeta(meta.resultWithID(id)) def addData(data: Data): MakeWcon[U, YesData, F] = { if (nData + 1 < building.data.length && !stale){ stale = true building.data(nData) = data new MakeWcon[U, YesData, F](building, nData + 1) } else { stale = false val m = math.max(2*nData, nData + 1) val d2 = if (building.data.length > 0) java.util.Arrays.copyOf(building.data, m) else new Array[Data](m) d2(nData) = data new MakeWcon[U, YesData, F](building.copy(data = d2), nData + 1) } } def addData(data1: Data, data2: Data, more: Data*): MakeWcon[U, YesData, F] = { var w = addData(data1).addData(data2) for (d <- more) w = w.addData(d) w } def addData(builder: DataBuilder[YesData]): MakeWcon[U, YesData, F] = addData(builder.result) def addData(builder1: DataBuilder[YesData], builder2: DataBuilder[YesData], more: DataBuilder[YesData]*): MakeWcon[U, YesData, F] = addData(builder1.result, builder2.result, more.map(_.result): _*) def dropData: MakeWcon[U, NoData, F] = { stale = false; new MakeWcon[U, NoData, F](building.copy(data = Array.empty), 0) } def putCustom(key: String, value: Json) = { stale = true new MakeWcon(building.copy(custom = Json.Obj(building.custom.asMap + ((key, value)))), nData) } def setCustom(custom: Json.Obj) = { stale = true new MakeWcon(building.copy(custom = custom), nData) } def dropCustom = { stale = true if (building.custom.size == 0) this else new MakeWcon(building.copy(custom = Json.Obj.empty), nData) } } def wcon(): MakeWcon[NoUnits, NoData, NoFile] = new MakeWcon(DataSet.empty, 0) final class MakeMeta[I <: HasID] private[trackercommons] (underlying: Metadata) { def setID(id: String): MakeMeta[YesID] = new MakeMeta[YesID](underlying.copy(id = id)) def dropID: MakeMeta[NoID] = if (underlying.id.isEmpty) this.asInstanceOf[MakeMeta[NoID]] else new MakeMeta[NoID](underlying.copy(id = "")) def addLab(lab: Laboratory): MakeMeta[I] = if (lab.isEmpty) this else new MakeMeta[I](underlying.copy(lab = underlying.lab :+ lab)) def addLab(lab: MakeLab): MakeMeta[I] = addLab(lab.result) def setLab(labs: Seq[Laboratory]) = new MakeMeta[I](underlying.copy(lab = labs.toVector)) def dropLabs = if (underlying.lab.isEmpty) this else new MakeMeta[I](underlying.copy(lab = Vector.empty)) def addWho(who: String) = if (who.isEmpty) this else new MakeMeta[I](underlying.copy(who = underlying.who :+ who)) def setWho(whos: Seq[String]) = new MakeMeta[I](underlying.copy(who = whos.toVector)) def dropWhos = if (underlying.who.isEmpty) this else new MakeMeta[I](underlying.copy(who = Vector.empty)) def setTime(time: java.time.OffsetDateTime) = new MakeMeta[I](underlying.copy(timestamp = Some(Left(time)))) def setTime(time: java.time.LocalDateTime) = new MakeMeta[I](underlying.copy(timestamp = Some(Right(time)))) def dropTime = if (underlying.timestamp.isEmpty) this else new MakeMeta[I](underlying.copy(timestamp = None)) def setTemp(temperature: Double) = new MakeMeta[I](underlying.copy(temperature = Some(temperature).filter(x => !x.isNaN && !x.isInfinite))) def dropTemp = if (underlying.temperature.isEmpty) this else new MakeMeta[I](underlying.copy(temperature = None)) def setHumidity(humidity: Double) = new MakeMeta[I](underlying.copy(humidity = Some(humidity).filter(x => !x.isNaN && !x.isInfinite))) def dropHumidity = if (underlying.humidity.isEmpty) this else new MakeMeta[I](underlying.copy(humidity = None)) def setArena(arena: Arena): MakeMeta[I] = if (arena.isEmpty) dropArena else new MakeMeta[I](underlying.copy(arena = Some(arena))) def setArena(arena: MakeArena): MakeMeta[I] = setArena(arena.result) def dropArena = if (underlying.arena.isEmpty) this else new MakeMeta[I](underlying.copy(arena = None)) def setFood(food: String): MakeMeta[I] = if (food.isEmpty) dropFood else new MakeMeta[I](underlying.copy(food = Some(food))) def setFood(): MakeMeta[I] = setFood("OP50") def dropFood: MakeMeta[I] = if (underlying.food.isEmpty) this else new MakeMeta[I](underlying.copy(food = None)) def setMedia(media: String): MakeMeta[I] = if (media.isEmpty) dropMedia else new MakeMeta[I](underlying.copy(media = Some(media))) def setMedia(): MakeMeta[I] = setMedia("NGM") def dropMedia: MakeMeta[I] = if (underlying.media.isEmpty) this else new MakeMeta[I](underlying.copy(media = None)) def setSex(sex: String): MakeMeta[I] = if (sex.isEmpty) dropSex else new MakeMeta[I](underlying.copy(sex = Some(sex))) def setSex(): MakeMeta[I] = setSex("hermaphrodite") def dropSex: MakeMeta[I] = if (underlying.sex.isEmpty) this else new MakeMeta[I](underlying.copy(sex = None)) def setStage(stage: String) = if (stage.isEmpty) dropStage else new MakeMeta[I](underlying.copy(stage = Some(stage))) def dropStage = if (underlying.stage.isEmpty) this else new MakeMeta[I](underlying.copy(stage = None)) def setAge(age: Double) = new MakeMeta[I](underlying.copy(age = Some(age).filter(x => !x.isNaN && !x.isInfinite))) def dropAge = if (underlying.age.isEmpty) this else new MakeMeta[I](underlying.copy(age = None)) def setStrain(strain: String) = if (strain.isEmpty) dropStrain else new MakeMeta[I](underlying.copy(strain = Some(strain))) def dropStrain = if (underlying.strain.isEmpty) this else new MakeMeta[I](underlying.copy(strain = None)) def addProtocol(protocol: String): MakeMeta[I] = new MakeMeta[I](underlying.copy(protocol = underlying.protocol :+ protocol)) def setProtocol(protocols: Seq[String]): MakeMeta[I] = new MakeMeta[I](underlying.copy(protocol = protocols.toVector)) def dropProtocols = if (underlying.protocol.isEmpty) this else new MakeMeta[I](underlying.copy(protocol = Vector.empty)) def addInterpolate(interpolate: Interpolate): MakeMeta[I] = if (interpolate.isEmpty) this else new MakeMeta[I](underlying.copy(interpolate = underlying.interpolate :+ interpolate)) def setInterpolate(interpolations: Seq[Interpolate]): MakeMeta[I] = new MakeMeta[I](underlying.copy(interpolate = interpolations.toVector)) def dropInterpolations = if (underlying.interpolate.isEmpty) this else new MakeMeta[I](underlying.copy(interpolate = Vector.empty)) def addSoftware(software: Software): MakeMeta[I] = if (software.isEmpty) this else new MakeMeta[I](underlying.copy(software = underlying.software :+ software)) def addSoftware(software: MakeSoft): MakeMeta[I] = addSoftware(software.result) def setSoftware(softwares: Seq[Software]): MakeMeta[I] = new MakeMeta[I](underlying.copy(software = softwares.toVector)) def dropSoftware = if (underlying.software.isEmpty) this else new MakeMeta[I](underlying.copy(software = Vector.empty)) def putCustom(key: String, value: Json) = new MakeMeta[I](underlying.copy(custom = Json.Obj(underlying.custom.asMap + ((key, value))))) def setCustom(custom: Json.Obj) = new MakeMeta[I](underlying.copy(custom = custom)) def dropCustom = if (underlying.custom.size == 0) this else new MakeMeta[I](underlying.copy(custom = Json.Obj.empty)) def resultWithID(id: String) = underlying.copy(id = id) def resultWithUUID() = underlying.copy(id = java.util.UUID.randomUUID.toString) def result(implicit ev: I =:= YesID) = underlying } def meta(): MakeMeta[NoID] = new MakeMeta[NoID](Metadata.empty) def meta(id: String): MakeMeta[YesID] = meta().setID(id) final class MakeLab private[trackercommons] (val result: Laboratory) { def isEmpty = result.isEmpty def pi(s: String) = result.copy(pi = s) def title(s: String) = result.copy(name = s) def location(s: String) = result.copy(location = s) def putCustom(key: String, value: Json) = new MakeLab(result.copy(custom = Json.Obj(result.custom.asMap + ((key, value))))) def setCustom(custom: Json.Obj) = new MakeLab(result.copy(custom = custom)) def dropCustom = if (result.custom.size == 0) this else new MakeLab(result.copy(custom = Json.Obj.empty)) } def lab() = new MakeLab(Laboratory.empty) final class MakeArena private[trackercommons] (val result: Arena) { def isEmpty = result.isEmpty def style(s: String) = new MakeArena(result.copy(style = s)) def orientation(s: String) = new MakeArena(result.copy(orientation = s)) def size(d: Double) = new MakeArena(result.copy(size = Some(Right(d)))) def size(d1: Double, d2: Double) = new MakeArena(result.copy(size = Some(Left((d1, d2))))) def dropSize = new MakeArena(result.copy(size = None)) def putCustom(key: String, value: Json) = new MakeArena(result.copy(custom = Json.Obj(result.custom.asMap + ((key, value))))) def setCustom(custom: Json.Obj) = new MakeArena(result.copy(custom = custom)) def dropCustom = if (result.custom.size == 0) this else new MakeArena(result.copy(custom = Json.Obj.empty)) } def arena() = new MakeArena(Arena.empty) final class MakeInterp private[trackercommons] (val result: Interpolate) { def isEmpty = result.isEmpty def method(s: String) = new MakeInterp(result.copy(method = s)) def addValue(s: String): MakeInterp = new MakeInterp(result.copy(values = result.values :+ s)) def setValues(ss: Iterable[String]): MakeInterp = new MakeInterp(result.copy(values = ss.toVector)) def dropValues = if (result.values.isEmpty) this else new MakeInterp(result.copy(values = Vector.empty)) def putCustom(key: String, value: Json) = new MakeInterp(result.copy(custom = Json.Obj(result.custom.asMap + ((key, value))))) def setCustom(custom: Json.Obj) = new MakeInterp(result.copy(custom = custom)) def dropCustom = if (result.custom.size == 0) this else new MakeInterp(result.copy(custom = Json.Obj.empty)) } def interpolate() = new MakeInterp(Interpolate.empty) final class MakeSoft private[trackercommons] (val result: Software) { def isEmpty = result.isEmpty def name(s: String) = new MakeSoft(result.copy(name = s)) def version(s: String) = new MakeSoft(result.copy(version = s)) def addFeature(s: String) = new MakeSoft(result.copy(featureID = result.featureID + (if (s.startsWith("@")) s else "@" + s))) def setFeatures(ss: Iterable[String]) = new MakeSoft(result.copy(featureID = ss.map(s => if (s.startsWith("@")) s else "@" + s).toSet)) def dropFeatures = new MakeSoft(result.copy(featureID = Set.empty)) def setSettings(j: Json) = new MakeSoft(result.copy(settings = Some(j))) def dropSettings = new MakeSoft(result.copy(settings = None)) def putCustom(key: String, value: Json) = new MakeSoft(result.copy(custom = Json.Obj(result.custom.asMap + ((key, value))))) def setCustom(custom: Json.Obj) = new MakeSoft(result.copy(custom = custom)) def dropCustom = if (result.custom.size == 0) this else new MakeSoft(result.copy(custom = Json.Obj.empty)) } def software() = new MakeSoft(Software.empty) final class DataBuilder[D <: HasData](id: String) { private[this] var i = 0 private[this] trait Acc { def size: Int def size_=(ii: Int): Unit def capacity: Int def copyTo(m: Int): Unit def zeroAt(j: Int): Unit final def free(n: Int) { val N = n.toLong + size if (N > capacity) { val m = math.min(Int.MaxValue - 1, math.max(2L*capacity, N)).toInt copyTo(m) } } final def fill(k: Int) { if (size < k) { free(k - size) while (size < k) { zeroAt(size) size = size + 1 } } } def keepup() { fill(i-1) free(1) } def complete() { fill(i); if (i != capacity) copyTo(i) } } // Helper class to accumulate the core times/positions private[this] final class AccTXY extends Acc { var ts = new Array[Double](1) var xs, ys = new Array[Array[Float]](1) var rxs, rys = new Array[Double](1) def size = i def size_=(ii: Int) { i = ii } def capacity = ts.length def copyTo(m: Int) { ts = java.util.Arrays.copyOf(ts, m) xs = java.util.Arrays.copyOf(xs, m) ys = java.util.Arrays.copyOf(ys, m) rxs = java.util.Arrays.copyOf(rxs, m) rys = java.util.Arrays.copyOf(rys, m) } def zeroAt(j: Int) { ts(j) = Double.NaN xs(j) = DataBuilder.emptyF ys(j) = DataBuilder.emptyF rxs(j) = 0.0 rys(j) = 0.0 } def add(t: Double, x: Array[Double], y: Array[Double]) { free(1) ts(i) = t val m = math.min(x.length, y.length) if (m > 0) { var x0 = x(0) var y0 = y(0) var j = 1; while (j < m) { if (x(j) < x0) x0 = x(j); if (y(j) < y0) y0 = y(j); j += 1 } val xi = new Array[Float](m) val yi = new Array[Float](m) j = 0; while (j < m) { xi(j) = (x(j) - x0).toFloat; yi(j) = (y(j) - y0).toFloat; j += 1 } xs(i) = xi ys(i) = yi rxs(i) = x0 rys(i) = y0 } else { xs(i) = DataBuilder.emptyF ys(i) = DataBuilder.emptyF rxs(i) = 0 rys(i) = 0 } i += 1 } } private[this] val txy = new AccTXY; // Helper class to accumulate the additional XY data: origins and centroids private[this] final class AccXYQ extends Acc { var qi = 0 var qxs, qys = DataBuilder.emptyD def size = qi def size_=(ii: Int) { qi = ii } def capacity = qxs.length def copyTo(m: Int) { qxs = java.util.Arrays.copyOf(qxs, m) qys = java.util.Arrays.copyOf(qys, m) } def zeroAt(j: Int) { qxs(j) = Double.NaN qys(j) = Double.NaN } def add(qx: Double, qy: Double) { if (qi > 0 || qx.finite || qy.finite) { keepup() qxs(qi) = qx qys(qi) = qy qi += 1 } } override def complete() { if (qi > 0) super.complete() } } private[this] val oxy = new AccXYQ private[this] val cxy = new AccXYQ // Helper class to accumulate other single things (perimeters, ventral/head stuff) private[this] class AccA[A: reflect.ClassTag](zero: A, dedup: Boolean = false)(nonzero: A => Boolean) extends Acc { var ai = 0 var dupi = 0 var aa = new Array[A](1) def size = ai def size_=(ii: Int) { ai = ii } def capacity = aa.length def copyTo(m: Int) { val aaa = new Array[A](m); System.arraycopy(aa, 0, aaa, 0, math.min(aa.length, m)); aa = aaa } def zeroAt(j: Int) { aa(j) = zero } def add(a: A) { if (dedup && aa.length < 2 && ai == 1) { if (a == aa(0)) { dupi += 1; return } if (dupi > 0) { aa = Array.fill(ai + dupi)(aa(0)) ai += dupi dupi = 0 } } if (nonzero(a) || ai > 0) { keepup() aa(ai) = a ai += 1 } } override def complete() {} def get() = { if (size > 0) { if (dedup && aa.length == 1 && ai == 1) Some(Array(aa(0))) else { fill(i) val temp = aa copyTo(i) val ans = Some(aa) aa = temp ans } } else None } def only(a: A) { ai = 1 aa = Array(a) } } private[this] val pp = new AccA(PerimeterPoints.empty)(_.size > 0) private[this] val wk = new AccA(PixelWalk.empty)(_.size > 0) private[this] val hd = new AccA("?", true)(_ ne null) private[this] val vn = new AccA("?", true)(_ ne null) private[this] var jm: collection.mutable.AnyRefMap[String, JArB] = null private[this] def jAdd(key: String, value: Json) { if (jm eq null) jm = new collection.mutable.AnyRefMap[String, JArB] jm.getOrElseUpdate(key, new JArB).add(value, i) } private[this] def jGet = Json.Obj.empty/* if (jm eq null) Json.Obj.empty else Json.Obj(jm.mapValues(b => Json(b.result)).toMap)*/ def validate: Either[DataBuilder[NoData], DataBuilder[YesData]] = if (i == 0) Left(this.asInstanceOf[DataBuilder[NoData]]) else Right(this.asInstanceOf[DataBuilder[YesData]]) def result(implicit ev: D =:= YesData): Data = { txy.complete oxy.complete cxy.complete if (jm ne null) jm.foreach{ case (_, j) => j.publish(i) } val myHd = hd.get.getOrElse(DataBuilder.emptyS) val myVn = vn.get.getOrElse(DataBuilder.emptyS) Data(id, txy.ts, txy.xs, txy.ys, cxy.qxs, cxy.qys, oxy.qxs, oxy.qys, pp.get, wk.get, myHd, myVn, jGet)(txy.rxs, txy.rys) } private[trackercommons] def personalCustom(j: Json.Obj): this.type = { j.iterator.foreach{ case (k, jv) => jAdd(k, jv) } this } private def myAdd( t: Double, cx: Double, cy: Double, xs: Array[Double], ys: Array[Double], pxs: Array[Double], pys: Array[Double], ptail: Int, w: PixelWalk, ox: Double, oy: Double, j: Json.Obj ): DataBuilder[YesData] = { txy.add(t, xs, ys) var rxi = txy.rxs(i-1) var ryi = txy.rys(i-1) cxy.add(cx, cy) if ((pxs ne null) && (pys ne null) && pxs.length > 0 && pys.length > 0) { val pxi, pyi = new Array[Float](math.min(pxs.length, pys.length)) var j = 0; while (j < pxi.length) { pxi(j) = (pxs(j) - rxi).toFloat; pyi(j) = (pys(j) - ryi).toFloat; j += 1 } pp.add(PerimeterPoints(pxi, pyi, if (ptail >= 0) Some(ptail) else None)(rxi, ryi)) } if (w ne null) wk.add(w) oxy.add(ox, oy) if (j.size > 0) j.iterator.foreach{ case (k, jv) => jAdd(k, jv) } this.asInstanceOf[DataBuilder[YesData]] } def add( t: Double, cx: Double, cy: Double, xs: Array[Double], ys: Array[Double], pxs: Array[Double], pys: Array[Double], ptail: Int, ox: Double, oy: Double, j: Json.Obj ): DataBuilder[YesData] = myAdd(t, cx, cy, xs, ys, pxs, pys, ptail, null, ox, oy, j) def add( t: Double, cx: Double, cy: Double, xs: Array[Double], ys: Array[Double], pxs: Array[Double], pys: Array[Double], ptail: Int, ox: Double, oy: Double ): DataBuilder[YesData] = myAdd(t, cx, cy, xs, ys, pxs, pys, ptail, null, ox, oy, Json.Obj.empty) def add( t: Double, cx: Double, cy: Double, xs: Array[Double], ys: Array[Double], pxs: Array[Double], pys: Array[Double], ptail: Int, j: Json.Obj ): DataBuilder[YesData] = myAdd(t, cx, cy, xs, ys, pxs, pys, ptail, null, Double.NaN, Double.NaN, j) def add( t: Double, cx: Double, cy: Double, xs: Array[Double], ys: Array[Double], pxs: Array[Double], pys: Array[Double], ptail: Int ): DataBuilder[YesData] = myAdd(t, cx, cy, xs, ys, pxs, pys, ptail, null, Double.NaN, Double.NaN, Json.Obj.empty) def add( t: Double, cx: Double, cy: Double, xs: Array[Double], ys: Array[Double], w: PixelWalk, ox: Double, oy: Double, j: Json.Obj ): DataBuilder[YesData] = myAdd(t, cx, cy, xs, ys, null, null, -1, w, ox, oy, j) def add( t: Double, cx: Double, cy: Double, xs: Array[Double], ys: Array[Double], w: PixelWalk, ox: Double, oy: Double ): DataBuilder[YesData] = myAdd(t, cx, cy, xs, ys, null, null, -1, w, ox, oy, Json.Obj.empty) def add( t: Double, cx: Double, cy: Double, xs: Array[Double], ys: Array[Double], w: PixelWalk, j: Json.Obj ): DataBuilder[YesData] = myAdd(t, cx, cy, xs, ys, null, null, -1, w, Double.NaN, Double.NaN, j) def add( t: Double, cx: Double, cy: Double, xs: Array[Double], ys: Array[Double], w: PixelWalk ): DataBuilder[YesData] = myAdd(t, cx, cy, xs, ys, null, null, -1, w, Double.NaN, Double.NaN, Json.Obj.empty) def add( t: Double, cx: Double, cy: Double, xs: Array[Double], ys: Array[Double], ox: Double, oy: Double, j: Json.Obj ): DataBuilder[YesData] = myAdd(t, cx, cy, xs, ys, null, null, -1, null, ox, oy, j) def add( t: Double, cx: Double, cy: Double, xs: Array[Double], ys: Array[Double], ox: Double, oy: Double ): DataBuilder[YesData] = myAdd(t, cx, cy, xs, ys, null, null, -1, null, ox, oy, Json.Obj.empty) def add( t: Double, cx: Double, cy: Double, xs: Array[Double], ys: Array[Double], j: Json.Obj ): DataBuilder[YesData] = myAdd(t, cx, cy, xs, ys, null, null, -1, null, Double.NaN, Double.NaN, j) def add( t: Double, cx: Double, cy: Double, xs: Array[Double], ys: Array[Double] ): DataBuilder[YesData] = myAdd(t, cx, cy, xs, ys, null, null, -1, null, Double.NaN, Double.NaN, Json.Obj.empty) def add( t: Double, xs: Array[Double], ys: Array[Double], pxs: Array[Double], pys: Array[Double], ptail: Int, ox: Double, oy: Double, j: Json.Obj ): DataBuilder[YesData] = myAdd(t, Double.NaN, Double.NaN, xs, ys, pxs, pys, ptail, null, ox, oy, j) def add( t: Double, xs: Array[Double], ys: Array[Double], pxs: Array[Double], pys: Array[Double], ptail: Int, ox: Double, oy: Double ): DataBuilder[YesData] = myAdd(t, Double.NaN, Double.NaN, xs, ys, pxs, pys, ptail, null, ox, oy, Json.Obj.empty) def add( t: Double, xs: Array[Double], ys: Array[Double], pxs: Array[Double], pys: Array[Double], ptail: Int, j: Json.Obj ): DataBuilder[YesData] = myAdd(t, Double.NaN, Double.NaN, xs, ys, pxs, pys, ptail, null, Double.NaN, Double.NaN, j) def add( t: Double, xs: Array[Double], ys: Array[Double], pxs: Array[Double], pys: Array[Double], ptail: Int ): DataBuilder[YesData] = myAdd(t, Double.NaN, Double.NaN, xs, ys, pxs, pys, ptail, null, Double.NaN, Double.NaN, Json.Obj.empty) def add( t: Double, xs: Array[Double], ys: Array[Double], w: PixelWalk, ox: Double, oy: Double, j: Json.Obj ): DataBuilder[YesData] = myAdd(t, Double.NaN, Double.NaN, xs, ys, null, null, -1, w, ox, oy, j) def add( t: Double, xs: Array[Double], ys: Array[Double], w: PixelWalk, ox: Double, oy: Double ): DataBuilder[YesData] = myAdd(t, Double.NaN, Double.NaN, xs, ys, null, null, -1, w, ox, oy, Json.Obj.empty) def add( t: Double, xs: Array[Double], ys: Array[Double], w: PixelWalk, j: Json.Obj ): DataBuilder[YesData] = myAdd(t, Double.NaN, Double.NaN, xs, ys, null, null, -1, w, Double.NaN, Double.NaN, j) def add( t: Double, xs: Array[Double], ys: Array[Double], w: PixelWalk ): DataBuilder[YesData] = myAdd(t, Double.NaN, Double.NaN, xs, ys, null, null, -1, w, Double.NaN, Double.NaN, Json.Obj.empty) def add( t: Double, xs: Array[Double], ys: Array[Double], ox: Double, oy: Double, j: Json.Obj ): DataBuilder[YesData] = myAdd(t, Double.NaN, Double.NaN, xs, ys, null, null, -1, null, ox, oy, j) def add( t: Double, xs: Array[Double], ys: Array[Double], ox: Double, oy: Double ): DataBuilder[YesData] = myAdd(t, Double.NaN, Double.NaN, xs, ys, null, null, -1, null, ox, oy, Json.Obj.empty) def add( t: Double, xs: Array[Double], ys: Array[Double], j: Json.Obj ): DataBuilder[YesData] = myAdd(t, Double.NaN, Double.NaN, xs, ys, null, null, -1, null, Double.NaN, Double.NaN, j) def add( t: Double, xs: Array[Double], ys: Array[Double] ): DataBuilder[YesData] = myAdd(t, Double.NaN, Double.NaN, xs, ys, null, null, -1, null, Double.NaN, Double.NaN, Json.Obj.empty) def allHeads(s: String)(implicit ev: D =:= YesData): this.type = { hd.only(s); this } def withHead(s: String)(implicit ev: D =:= YesData): this.type = { hd.add(s); this } def allVentrals(s: String)(implicit ev: D =:= YesData): this.type = { vn.only(s); this } def withVentral(s: String)(implicit ev: D =:= YesData): this.type = { vn.add(s); this } } object DataBuilder { private[trackercommons] val emptyD = new Array[Double](0) private[trackercommons] val emptyF = new Array[Float](0) private[trackercommons] val emptyP = new Array[PerimeterPoints](0) private[trackercommons] val emptyW = new Array[PixelWalk](0) private[trackercommons] val emptyS = new Array[String](0) } def worm(id: String) = new DataBuilder[NoData](id) def worm(id: String, j: Json.Obj) = (new DataBuilder[NoData](id)).personalCustom(j) private[trackercommons] final class JArB() { private[this] var i = 0 private[this] var unarr: Json = null private[this] var arr = new Array[Json](1) private[this] def free(n: Int) { if (n+i > arr.length) { val m = math.min(Int.MaxValue - 1, math.max(2*arr.length, n+i)) arr = java.util.Arrays.copyOf(arr, m) } } private[this] def fill(k: Int) { if (i < k) { free(k - i) if (unarr ne null) while (i < k) { arr(i) = unarr; i += 1 } else while (i < k) { arr(i) = Json.Null; i += 1} } } def publish(index: Int) { if (i == 0 && (unarr ne null)) { arr(0) = unarr; i += 1 } else fill(index - i) } def add(j: Json, index: Int) { if (index == 0 && i == 0) unarr = j else { if (i == index) arr(i-1) = j else { if (i+1 < index) fill(index-1) free(1) arr(i) = j i = index } } } def result: Array[Json] = java.util.Arrays.copyOf(arr, i) } }
Ichoran/tracker-commons
src/scala/src/main/scala/Create.scala
Scala
mit
29,746
/* * Copyright (C) 2014 - 2017 Contributors as noted in the AUTHORS.md file * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package com.wegtam.tensei.agent.transformers import akka.actor.Props import akka.util.ByteString import com.wegtam.tensei.agent.transformers.BaseTransformer.{ StartTransformation, TransformerResponse } import scala.math.BigDecimal.RoundingMode import scala.util.Try /** * A transformer that emulates simple if-else-branches on numeric values. * * The transformer accepts the following parameters: * - `if` - A function as string which decides whether to execute the `then` or the `else` branch. Supported operators * are ==, !=, <, <=, >= and >. The condition have to be in a form like x>42 or 3.141 != x. * - `then` - A function as string that describes the transformation. Supported Operators are +, -, * and /. The function * have to be in a form like x=x+1 or x=3-x for assignments or 42 for constants. * - `else` - A function as string that describes the transformation. Supported Operators are +, -, * and /. The function * have to be in a form like x=x+1 or x=3-x for assignments or 42 for constants. * - `format` - A string that specifies if the return values are Long ("num") or BigDecimal ("dec"). */ class IfThenElseNumeric extends BaseTransformer { override def transform: Receive = { case msg: StartTransformation => log.debug("Starting If-Then-Else-Transformer on {}", msg.src) val params = msg.options.params val result: List[Any] = msg.src.map { e => val x = e match { case bs: ByteString => bs.utf8String case otherData => otherData.toString } if (ifcondition(x, params)) thenbranch(x, params) else elsebranch(x, params) } log.debug("If-Then-Else-Transformer finished.") context become receive sender() ! TransformerResponse(result, classOf[String]) } /** * Converts a string to an option on a bigdecimal * * @param s the value as string * @return the value as an option on a bigdecimal */ def parseBigDecimal(s: String): Option[BigDecimal] = Try(BigDecimal(s)).toOption /** * evaluates if the conditon is true or false * * @param a the value from the user * @param params a list of parameters with condition, if branch and else branch * @return the evaluated condition */ def ifcondition(a: Any, params: TransformerParameters): Boolean = paramValueO("if")(params) match { case None => true case Some(fn) => val func_as_str = fn.trim val function = if (func_as_str.contains("=>")) func_as_str.split("=>").apply(1).trim else func_as_str val regex = "\\\\w+\\\\s*(>|<|>=|<=|==|!=)\\\\s*(\\\\d+[(,|.)\\\\d+]*)".r val regexreverse = "(\\\\d+[(,|.)\\\\d+]*)\\\\s*(>|<|>=|<=|==)\\\\s*\\\\w+".r val listp: Iterator[(String, Option[BigDecimal], Boolean)] = if (regex.findFirstIn(function).isDefined) { regex.findAllIn(function).matchData map { x => (x.group(1), parseBigDecimal(x.group(2)), false) } } else if (regexreverse.findFirstIn(function).isDefined) { regexreverse.findAllIn(function).matchData map { x => (x.group(2), parseBigDecimal(x.group(1)), true) } } else { Iterator(("", Option(BigDecimal(0)), false)) } val p = listp.next() p._1 match { case ">=" => if (p._3) parseBigDecimal(a.toString).get <= p._2.get else parseBigDecimal(a.toString).get >= p._2.get case "<=" => if (p._3) parseBigDecimal(a.toString).get >= p._2.get else parseBigDecimal(a.toString).get <= p._2.get case "==" => parseBigDecimal(a.toString).get == p._2.get case "!=" => parseBigDecimal(a.toString).get != p._2.get case "<" => if (p._3) parseBigDecimal(a.toString).get > p._2.get else parseBigDecimal(a.toString).get < p._2.get case ">" => if (p._3) parseBigDecimal(a.toString).get < p._2.get else parseBigDecimal(a.toString).get > p._2.get case _ => true } } /** * executes the if branch * * @param a the value from the user * @param params a list of parameters with condition, if branch and else branch * @return the result of the if branch */ def thenbranch(a: Any, params: TransformerParameters): Any = { val number = parseBigDecimal(a.toString).get paramValueO("then")(params) match { case None => new java.math.BigDecimal(number.toString()) case Some(fn) => val func_as_str = fn.replaceAll(">", "").trim //makes x=>x+3 to x=x+3 val format = if (params.exists(p => p._1 == "format")) { params.find(p => p._1 == "format").get._2.trim //num or dec } else { "dec" } execute(func_as_str, number, format) } } /** * executes the else branch * * @param a the value from the user * @param params a list of parameters with condition, if branch and else branch * @return the result of the else branch */ def elsebranch(a: Any, params: TransformerParameters): Any = { val number = parseBigDecimal(a.toString).get paramValueO("else")(params) match { case None => new java.math.BigDecimal(number.toString()) case Some(fn) => val func_as_str = fn.replaceAll(">", "").trim //makes x=>x+3 to x=x+3 val format = if (params.exists(p => p._1 == "format")) { params.find(p => p._1 == "format").get._2.trim //num or dec } else { "dec" } execute(func_as_str, number, format) } } /** * converts a string to the result of the the function * * @param func_as_str the function as string * @param number the value from the user * @return the result of the function */ def execute(func_as_str: String, number: BigDecimal, format: String): Any = { val regex_const = "^(\\\\d+[(,|.)\\\\d+]*)$".r //zb 42 val regex_func1 = "\\\\w+\\\\s*=\\\\s*\\\\w+\\\\s*(\\\\+|-|\\\\*|\\\\/)\\\\s*(\\\\d+[(,|.)\\\\d+]*)".r //zb x=x+42 val regex_func2 = "\\\\w+\\\\s*=\\\\s*(\\\\d+[(,|.)\\\\d+]*)\\\\s*(\\\\+|-|\\\\*|\\\\/)\\\\s*\\\\w+".r //zb x=42+x val regex_func3 = "\\\\w+\\\\s*=\\\\s*(\\\\d+[(,|.)\\\\d+]*)".r //zb x=42 val listp: Iterator[(String, Option[BigDecimal], Boolean)] = if (regex_const.findFirstIn(func_as_str).isDefined) { regex_const.findAllIn(func_as_str).matchData map { x => ("", parseBigDecimal(x.group(1)), false) } } else { if (regex_func1.findFirstIn(func_as_str).isDefined) { regex_func1.findAllIn(func_as_str).matchData map { x => (x.group(1), parseBigDecimal(x.group(2)), false) } } else if (regex_func2.findFirstIn(func_as_str).isDefined) { regex_func2.findAllIn(func_as_str).matchData map { x => (x.group(2), parseBigDecimal(x.group(1)), true) } } else if (regex_func3.findFirstIn(func_as_str).isDefined) { regex_func3.findAllIn(func_as_str).matchData map { x => ("", parseBigDecimal(x.group(1)), false) } } else { Iterator(("", Option(number), false)) } } val p = listp.next() val res = if (p._1 != "") { p._1 match { case "+" => number + p._2.get case "-" => if (p._3) p._2.get - number else number - p._2.get case "*" => number * p._2.get case "/" => if (p._3) p._2.get / number else number / p._2.get } } else { p._2.get } if (format.equalsIgnoreCase("num")) { res.setScale(0, RoundingMode.HALF_UP).toLongExact } else { new java.math.BigDecimal(res.toString()) } } } object IfThenElseNumeric { def props: Props = Props(classOf[IfThenElseNumeric]) }
Tensei-Data/tensei-agent
src/main/scala/com/wegtam/tensei/agent/transformers/IfThenElseNumeric.scala
Scala
agpl-3.0
8,819
package io.scalac.seed.route import akka.actor._ import io.scalac.seed.domain.UserAggregate import io.scalac.seed.service._ import spray.httpx.Json4sSupport import spray.routing._ import spray.routing.authentication.BasicAuth object UserRoute { case class ChangePasswordRequest(pass: String) } trait UserRoute extends HttpService with Json4sSupport with RequestHandlerCreator with UserAuthenticator { import UserAggregateManager._ val userAggregateManager: ActorRef val userRoute = pathPrefix("user") { pathEndOrSingleSlash { post { entity(as[RegisterUser]) { cmd => serveRegister(cmd) } } } ~ path("password") { post { authenticate(BasicAuth(userAuthenticator _, realm = "secure site")) { user => entity(as[UserRoute.ChangePasswordRequest]) { cmd => serveUpdate(ChangeUserPassword(user.id, cmd.pass)) } } } } } private def serveRegister(message : AggregateManager.Command): Route = ctx => handleRegister[UserAggregate.User](ctx, userAggregateManager, message) private def serveUpdate(message : AggregateManager.Command): Route = ctx => handleUpdate[UserAggregate.User](ctx, userAggregateManager, message) }
nestor-by/akka-persistence-eventsourcing
src/main/scala/io/scalac/seed/route/UserRoute.scala
Scala
apache-2.0
1,292
package org.jetbrains.plugins.scala.codeInspection.catchAll import com.intellij.codeInspection.{ProblemHighlightType, ProblemsHolder, LocalInspectionTool} import com.intellij.psi.PsiElementVisitor import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor import org.jetbrains.plugins.scala.codeInspection.InspectionBundle import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScBlockExpr, ScCatchBlock} import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.{ScCaseClause, ScReferencePattern, ScWildcardPattern} /** * @author Ksenia.Sautina * @since 6/25/12 */ class DangerousCatchAllInspection extends LocalInspectionTool { override def isEnabledByDefault: Boolean = true override def buildVisitor(holder: ProblemsHolder, isOnTheFly: Boolean): PsiElementVisitor = { new ScalaElementVisitor { override def visitCatchBlock(catchBlock: ScCatchBlock) { val expr = catchBlock.expression.getOrElse(null) if (expr == null) return def isInspection: (Boolean, ScCaseClause) = expr match { case block: ScBlockExpr => val caseClauses = block.caseClauses.getOrElse(null) if (caseClauses == null || caseClauses.caseClauses.size != 1) return (false, null) val caseClause = caseClauses.caseClause if (caseClause == null) return (false, null) val pattern = caseClause.pattern.getOrElse(null) if (pattern == null) return (false, null) val guard = caseClause.guard.getOrElse(null) pattern match { case p: ScWildcardPattern if (guard == null) => (true, caseClause) case p: ScReferencePattern if (guard == null) => (true, caseClause) case _ => (false, null) } case _ => (false, null) } if (isInspection._1) { val startElement = isInspection._2.firstChild.getOrElse(null) val endElement = isInspection._2.pattern.getOrElse(null) if (startElement == null || endElement == null) return holder.registerProblem(holder.getManager.createProblemDescriptor(startElement, endElement, InspectionBundle.message("catch.all"), ProblemHighlightType.GENERIC_ERROR_OR_WARNING, isOnTheFly, new ReplaceDangerousCatchAllQuickFix(isInspection._2))) } } } } }
consulo/consulo-scala
src/org/jetbrains/plugins/scala/codeInspection/catchAll/DangerousCatchAllInspection.scala
Scala
apache-2.0
2,352
package org.jetbrains.plugins.scala.findUsages.factory import javax.swing.SwingUtilities import com.intellij.find.findUsages.{FindUsagesHandler, FindUsagesHandlerFactory} import com.intellij.openapi.extensions.Extensions import com.intellij.openapi.project.Project import com.intellij.openapi.ui.Messages import com.intellij.psi.{PsiElement, PsiNamedElement} import com.intellij.util.containers.ContainerUtil import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTypeDefinition import org.jetbrains.plugins.scala.lang.psi.api.toplevel.{ScNamedElement, ScTypedDefinition} import org.jetbrains.plugins.scala.lang.psi.fake.FakePsiMethod import org.jetbrains.plugins.scala.lang.psi.light._ import org.jetbrains.plugins.scala.lang.refactoring.rename.RenameSuperMembersUtil import org.jetbrains.plugins.scala.{ScalaBundle, extensions} /** * User: Alexander Podkhalyuzin * Date: 17.08.2009 */ class ScalaFindUsagesHandlerFactory(project: Project) extends FindUsagesHandlerFactory { val typeDefinitionOptions = new ScalaTypeDefinitionFindUsagesOptions(project) val memberOptions = new ScalaMemberFindUsagesOptions(project) val paramOptions = new ScalaParameterFindUsagesOptions(project) override def canFindUsages(element: PsiElement): Boolean = { element match { case _: FakePsiMethod => true case _: ScTypedDefinition => true case _: ScTypeDefinition => true case _: PsiClassWrapper => true case _: ScFunctionWrapper => true case _: StaticPsiMethodWrapper => true case _: PsiTypedDefinitionWrapper => true case _: StaticPsiTypedDefinitionWrapper => true case _ => false } } override def createFindUsagesHandler(element: PsiElement, forHighlightUsages: Boolean): FindUsagesHandler = { var replacedElement = element match { case wrapper: PsiClassWrapper => wrapper.definition case p: PsiTypedDefinitionWrapper => p.typedDefinition case p: StaticPsiTypedDefinitionWrapper => p.typedDefinition case f: ScFunctionWrapper => f.function case f: FakePsiMethod => f.navElement case s: StaticPsiMethodWrapper => s.method case _ => element } def chooseSuper(name: String, supers: Seq[PsiNamedElement]) { def showDialog() { val message = ScalaBundle.message("find.usages.member.has.supers", name) val result = Messages.showYesNoCancelDialog(element.getProject, message, "Warning", Messages.getQuestionIcon) result match { case 0 => val elem = supers.last replacedElement = elem case 1 => //do nothing, it's ok case _ => replacedElement = null } } if (SwingUtilities.isEventDispatchThread) showDialog() else extensions.invokeAndWait(showDialog()) } replacedElement match { case function: ScFunction if function.isLocal => Array(function) case named: ScNamedElement if !forHighlightUsages => val supers = RenameSuperMembersUtil.allSuperMembers(named, withSelfType = true).filter(needToAsk) if (supers.length != 0) chooseSuper(named.name, supers) case _ => } if (replacedElement == null) return FindUsagesHandler.NULL_HANDLER new ScalaFindUsagesHandler(replacedElement, this) } private def needToAsk(named: PsiNamedElement): Boolean = { named match { case fun: ScFunction if fun.containingClass.qualifiedName.startsWith("scala.Function") && fun.name == "apply" => false case _ => true } } } object ScalaFindUsagesHandlerFactory { def getInstance(project: Project): ScalaFindUsagesHandlerFactory = { ContainerUtil.findInstance(Extensions.getExtensions(FindUsagesHandlerFactory.EP_NAME, project), classOf[ScalaFindUsagesHandlerFactory]) } }
triggerNZ/intellij-scala
src/org/jetbrains/plugins/scala/findUsages/factory/ScalaFindUsagesHandlerFactory.scala
Scala
apache-2.0
3,849
package org.scalatra package auth import servlet.{ServletBase, ServletRequest, ServletResponse} import javax.servlet.{FilterConfig, ServletConfig} import scala.util.DynamicVariable trait ScentryConfig { val login = "/login" val returnTo = "/" val returnToKey = "returnTo" val failureUrl = "/unauthenticated" } trait ScentrySupport[TypeForUser <: AnyRef] extends Handler with Initializable with CookieSupport { self: ServletBase => type UserType = TypeForUser type ScentryConfiguration <: ScentryConfig protected def fromSession: PartialFunction[String, UserType] protected def toSession: PartialFunction[UserType, String] protected def scentryConfig: ScentryConfiguration private var _strategiesFromConfig = List[String]() abstract override def initialize(config: ConfigT) { super.initialize(config) readStrategiesFromConfig(config) } abstract override def handle(servletRequest: ServletRequest, servletResponse: ServletResponse) = { withRequest(servletRequest) { request(Scentry.ScentryRequestKey) = new Scentry[UserType](self, toSession, fromSession) configureScentry registerStrategiesFromConfig registerAuthStrategies super.handle(servletRequest, servletResponse) } } private def readStrategiesFromConfig(config: Config) = _strategiesFromConfig = { val strats = (config match { case servletConfig: ServletConfig => { servletConfig.getInitParameter("scentry.strategies") } case filterConfig: FilterConfig => filterConfig.getInitParameter("scentry.strategies") case _ => "" }) if(strats != null && strats.trim.nonEmpty) (strats split ";").toList else Nil } private def registerStrategiesFromConfig = _strategiesFromConfig foreach { strategyClassName => val strategy = Class.forName(strategyClassName).newInstance.asInstanceOf[ScentryStrategy[UserType]] strategy registerWith scentry } protected def configureScentry = { } /** * Override this method to register authentication strategies specific to this servlet. * registerAuthStrategy('UserPassword, app => new UserPasswordStrategy(app)) */ protected def registerAuthStrategies = { } protected def scentry: Scentry[UserType] = request(Scentry.ScentryRequestKey).asInstanceOf[Scentry[UserType]] protected def scentryOption: Option[Scentry[UserType]] = Option(request(Scentry.ScentryRequestKey)).map(_.asInstanceOf[Scentry[UserType]]) protected def userOption: Option[UserType] = scentry.userOption implicit protected def user: UserType = scentry.user protected def user_=(user: UserType) = scentry.user = user protected def isAuthenticated : Boolean = scentry.isAuthenticated protected def isAnonymous : Boolean = !isAuthenticated @deprecated("use isAuthenticated", "2.0.0") protected def authenticated_? : Boolean = isAuthenticated @deprecated("use !isAuthenticated", "2.0.0") protected def unAuthenticated_? : Boolean = !isAuthenticated protected def authenticate() = { scentry.authenticate() } protected def logOut() = scentry.logout() @deprecated("use logOut()", "2.0.0") protected def logOut_! = logOut() }
louk/scalatra
auth/src/main/scala/org/scalatra/auth/ScentrySupport.scala
Scala
bsd-2-clause
3,186
import leon.lang._ object Overflow7 { def foo7(x: Int): Int = { x*2 } }
epfl-lara/leon
src/test/resources/regression/verification/overflow/invalid/Overflow7.scala
Scala
gpl-3.0
83
package org.nisshiee.chatwork_slack_relay import scala.concurrent.Await import scala.concurrent.duration.Duration import scala.concurrent.ExecutionContext.Implicits.global import com.amazonaws.services.lambda.runtime.Context import com.typesafe.config.ConfigFactory import net.ceedubs.ficus.Ficus._ import org.nisshiee.chatwork_slack_relay.domain._ import org.nisshiee.chatwork_slack_relay.domain.chatwork._ class Main extends MixinRelayService with MixinAsakaiNotifyService { lazy val config = ConfigFactory.load lazy val targetRoomIds: List[Id[Room]] = config.as[List[Long]]("chatwork.targetRoomIds"). map(Id.apply[Room]) lazy val asakaiRoomId: Id[Room] = Id[Room](config.as[Long]("chatwork.asakai.roomId")) lazy val asakaiTargetUserName: String = config.as[String]("chatwork.asakai.targetUser") def main(input: String, context: Context): String = { val relayF = relayService.run(targetRoomIds) val asakaiF = asakaiNotifyService.run(asakaiRoomId, asakaiTargetUserName) val future = (relayF zip asakaiF). map { _ => "done" }. recover { case t => t.toString } Await.result(future, Duration.Inf) } }
nisshiee/chatwork-slack-relay
src/main/scala/Main.scala
Scala
mit
1,168
import org.apache.spark.mllib.linalg.distributed.{CoordinateMatrix, MatrixEntry} import org.scalatest.{FlatSpec} import breeze.linalg.{DenseMatrix => BDM, DenseVector, min, Matrix =>BM} import AllPairsShortestPath._ /** * Created by arzav on 5/26/15. */ class LocalMinPlus extends FlatSpec { def localMinPlus(A: BDM[Double], B: BDM[Double]): BDM[Double] = { require(A.cols == B.rows, " Num cols of A does not match the num rows of B") val k = A.cols val onesA = DenseVector.ones[Double](B.cols) val onesB = DenseVector.ones[Double](A.rows) var AMinPlusB = A(::, 0) * onesA.t + onesB * B(0, ::) if (k > 1) { for (i <- 1 until k) { val a = A(::, i) val b = B(i, ::) val aPlusb = a * onesA.t + onesB * b AMinPlusB = min(aPlusb, AMinPlusB) } } AMinPlusB } def fourByFourBlockMatrx = { BDM( (0.0, 20.0, 4.0, 2.0), (2.0, 0.0, 1.0, 3.0), (1.0, 6.0, 0.0, 5.0), (4.0, 2.0, 2.0, 0.0) ) } def fourByFourMinPlusProduct = { BDM( (0.0, 2.0, 1.0, 2.0), (2.0, 0.0, 1.0, 2.0), (1.0, 1.0, 0.0, 2.0), (2.0, 2.0, 2.0, 0.0) ) } "The minPlus product of the sample 4x4 matrix with itself" should "be correct" in { assert(localMinPlus(fourByFourBlockMatrx, fourByFourBlockMatrx.t) === fourByFourMinPlusProduct) } }
arzavj/spark-all-pairs-shortest-path
src/test/scala/LocalMinPlus.scala
Scala
apache-2.0
1,371
/* * Copyright 2015 University of Basel, Graphics and Vision Research Group * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package scalismo.statisticalmodel /** * Contains classes for handling datasets of registered meshes and building PCA model out of them. * * The most important class of this package is the [[scalismo.statisticalmodel.dataset.DataCollection]]. * */ package object dataset {}
unibas-gravis/scalismo
src/main/scala/scalismo/statisticalmodel/dataset/package.scala
Scala
apache-2.0
918
package io.youi.component.feature import io.youi.component.Component import io.youi.dom._ import org.scalajs.dom.{document, html} import reactify.{Priority, Var} class ContainerFeature[Child <: Component](val component: Component) extends Var[Vector[Child]](Vector.empty) with Feature { override protected def parent: FeatureParent = component private var verifyChanges = true changes { case (old, current) => if (verifyChanges) { val removed = old.diff(current) removed.foreach(_.element.remove()) component.element.verifyChildrenInOrder(get.map(_.element): _*) component.measure.trigger() } } private def unverified[Return](f: => Return): Return = { val previousState = verifyChanges verifyChanges = false try { f } finally { verifyChanges = previousState } } def clear(): Unit = unverified { static(Vector.empty) component.element.innerHTML = "" component.measure.trigger() } def isEmpty: Boolean = get.isEmpty def nonEmpty: Boolean = get.nonEmpty def length: Int = get.length def prependMultiple(seq: Seq[Child]): Unit = unverified { static(seq.toVector ++: get) // Optimization to improve insertion of many elements val f = document.createDocumentFragment() seq.foreach { c => f.appendChild(c.element) } component.element.insertBefore(f, component.element.firstChild) component.measure.trigger() } def prepend(child: Child): Unit = static(child +: get) def prepend(child: html.Element): Unit = { child.insertFirst(component.element) component.measure.trigger() } def replace(current: Child, replacement: Child): Unit = { static(get.map { case c if c eq current => replacement case c => c }) } def replace(current: html.Element, replacement: html.Element): Unit = { component.element.replaceChild(replacement, current) component.measure.trigger() } def +=(component: Child): Unit = static(get :+ component) def +=(child: html.Element): Unit = { component.element.appendChild(child) component.measure.trigger() } def -=(component: Child): Unit = static(get.filterNot(_ eq component)) def -=(child: html.Element): Unit = { component.element.removeChild(child) component.measure.trigger() } def ++=(seq: Seq[Child]): Unit = unverified { static(get ++: seq.toVector) // Optimization to improve insertion of many elements val f = document.createDocumentFragment() seq.foreach { c => f.appendChild(c.element) } component.element.appendChild(f) component.measure.trigger() } def --=(seq: Seq[Child]): Unit = static(get.filterNot(seq.contains)) component.measure.on({ get.foreach(_.measure.trigger()) }, Priority.High) }
outr/youi
gui/src/main/scala/io/youi/component/feature/ContainerFeature.scala
Scala
mit
2,791
import leon.lang._ object HOInvocations { def switch(x: Int, f: (Int) => Int, g: (Int) => Int) = if(x > 0) f else g def passing_1(f: (Int) => Int) = { switch(10, (x: Int) => x + 1, f)(2) } ensuring { res => res > 0 } def passing_2(x: Int, f: (Int) => Int, g: (Int) => Int) = { require(x > 0) switch(1, switch(x, f, g), g)(1) } ensuring { res => res == f(1) } } // vim: set ts=4 sw=4 et:
epfl-lara/leon
testcases/verification/higher-order/valid/HOInvocations.scala
Scala
gpl-3.0
414
package akkord.api import akka.actor.ActorRef import akka.pattern.ask import akka.util.Timeout import akkord.api.actors.DiscordApiActor.{RateLimited, RateLimitedException} import akkord.api.circesupport.CirceBodyReadable import play.api.libs.ws.StandaloneWSResponse import scala.concurrent.{ExecutionContext, Future} abstract class DiscordApi(implicit ec: ExecutionContext, timeout: Timeout) extends CirceBodyReadable { protected val api: ActorRef protected def getApiResponse(apiRequest: Any): Future[StandaloneWSResponse] = { val response = (api ? apiRequest).mapTo[Either[RateLimited, StandaloneWSResponse]] unwrapResponse(response) } private def unwrapResponse(response: Future[Either[RateLimited, StandaloneWSResponse]]): Future[StandaloneWSResponse] = { response map { case Right(resp) => resp case Left(rateLimited) => throw RateLimitedException(s"major endpoint: ${rateLimited.majorEndpoint} is being rate limited") } } } object DiscordApi { case class EmptyResponse() }
ryanmiville/akkord
src/main/scala/akkord/api/DiscordApi.scala
Scala
mit
1,040
package uconfig.test import uconfig.PathSeq import PathSeq._ import utest._ object PathSeqTest extends TestSuite { val tests = Tests { val complexPath = "\\"a complex {} [.+*#$]:path=\\".with-multiple.\\" quoted \\".and-unquoted.\\"seg!ments\\"" val complexPathSeq = PathSeq("\\"a complex {} [.+*#$]:path=\\"","with-multiple","\\" quoted \\"","and-unquoted","\\"seg!ments\\"") 'toPath-{ assert( complexPathSeq.toPath == complexPath ) } 'fromString-{ assert( PathSeq.fromString(complexPath) == complexPathSeq ) } } }
jokade/sconfig
shared/src/test/scala/uconfig/test/PathSeqTest.scala
Scala
mit
547
/* * @author Philip Stutz * @author Mihaela Verman * * Copyright 2015 University of Zurich * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.signalcollect.dcop.modules import scala.util.Random trait AdjustmentSchedule extends Algorithm { def shouldConsiderMove(c: State): Boolean } /** * All vertices/agents update every time. */ trait FloodAdjustmentSchedule extends AdjustmentSchedule { def shouldConsiderMove(c: State) = true } /** * Vertices update with a certain probability */ trait ParallelRandomAdjustmentSchedule extends AdjustmentSchedule { def changeProbability: Double def shouldConsiderMove(c: State) = { Random.nextDouble <= changeProbability } } /** * Agents/vertices update with inertia depending on their rank relative to their maximum-ranked neighbour. */ trait RankedBasedAdjustmentSchedule extends AdjustmentSchedule with RankedState { //override type State = RankedState[AgentId, Action, SignalType, UtilityType] def relativeChangeProbability: Double def shouldConsiderMove(c: State) = { val maxNeighbourRank = c.ranks.values.max val rankForCurrentConfig = c.ranks(c.agentId) val relativeRankRatio = rankForCurrentConfig / maxNeighbourRank val changeProbability = 1 - relativeRankRatio * relativeChangeProbability // The higher the rank ratio, the lower the probability to change. Random.nextDouble <= changeProbability } }
elaverman/cuilt
src/main/scala/com/signalcollect/dcop/modules/AdjustmentScheduleTypes.scala
Scala
apache-2.0
1,971
/** * Copyright (C) 2015 Orbeon, Inc. * * This program is free software; you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; either version * 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * The full text of the license is available at http://www.gnu.org/copyleft/lesser.html */ package org.orbeon.oxf.fb import org.dom4j.QName object Names { val FB = "http://orbeon.org/oxf/xml/form-builder" val InitialIterations: QName = QName.get("initial-iterations", "fb", FB) }
joansmith/orbeon-forms
src/main/scala/org/orbeon/oxf/fb/Names.scala
Scala
lgpl-2.1
844
package org.scalajs.jsenv.selenium import org.scalajs.core.tools.io.VirtualTextFile import org.scalajs.jsenv.VirtualFileMaterializer import java.net.URL /** Materializes virtual files in a temporary directory and links to them * via file:// */ object DefaultFileMaterializer extends FileMaterializer { private val materializer = new VirtualFileMaterializer(true) override def materialize(vf: VirtualTextFile): URL = { materializer.materialize(vf).toURI.toURL } }
nicolasstucki/scala-js-env-selenium
seleniumJSEnv/src/main/scala/org/scalajs/jsenv/selenium/DefaultFileMaterializer.scala
Scala
bsd-3-clause
481
package com.twitter.finagle.pool import com.twitter.finagle._ import com.twitter.finagle.client.StackClient import com.twitter.finagle.stats.StatsReceiver import com.twitter.util.{Future, Return, Throw, Time, Promise} import java.util.concurrent.atomic.{AtomicReference, AtomicInteger} import scala.annotation.tailrec private[finagle] object SingletonPool { val role = StackClient.Role.pool /** * Creates a [[com.twitter.finagle.Stackable]] [[com.twitter.finagle.pool.SingletonPool]]. */ def module[Req, Rep]: Stackable[ServiceFactory[Req, Rep]] = new Stack.Module1[param.Stats, ServiceFactory[Req, Rep]] { val role = SingletonPool.role val description = "Maintain at most one connection" def make(_stats: param.Stats, next: ServiceFactory[Req, Rep]) = { val param.Stats(sr) = _stats new SingletonPool(next, sr.scope("singletonpool")) } } /** * A wrapper service to maintain a reference count. The count is * set to 1 on construction; the count is decreased for each * 'close'. Additional references are attained by calling 'open'. * When the count reaches 0, the underlying service is closed. * * @note This implementation doesn't prevent the reference count * from crossing the 0 boundary multiple times -- it may thus call * 'close' on the underlying service multiple times. */ class RefcountedService[Req, Rep](underlying: Service[Req, Rep]) extends ServiceProxy[Req, Rep](underlying) { private[this] val count = new AtomicInteger(1) private[this] val future = Future.value(this) def open(): Future[Service[Req, Rep]] = { count.incrementAndGet() future } override def close(deadline: Time): Future[Unit] = count.decrementAndGet() match { case 0 => underlying.close(deadline) case n if n < 0 => // This is technically an API usage error. count.incrementAndGet() Future.exception(Failure(new ServiceClosedException)) case _ => Future.Done } } sealed trait State[-Req, +Rep] case object Idle extends State[Any, Nothing] case object Closed extends State[Any, Nothing] case class Awaiting(done: Future[Unit]) extends State[Any, Nothing] case class Open[Req, Rep](service: RefcountedService[Req, Rep]) extends State[Req, Rep] } /** * A pool that maintains at most one service from the underlying * ServiceFactory -- concurrent leases share the same, cached * service. A new Service is established whenever the service factory * fails or the current service has become unavailable. */ class SingletonPool[Req, Rep]( underlying: ServiceFactory[Req, Rep], statsReceiver: StatsReceiver) extends ServiceFactory[Req, Rep] { import SingletonPool._ private[this] val scoped = statsReceiver.scope("connects") private[this] val failStat = scoped.counter("fail") private[this] val deadStat = scoped.counter("dead") private[this] val state = new AtomicReference(Idle: State[Req, Rep]) /** * Attempt to connect with the underlying factory and CAS the * state Awaiting(done) => Open()|Idle depending on the outcome. * Connect satisfies passed-in promise when the process is * complete. */ private[this] def connect(done: Promise[Unit], conn: ClientConnection) { def complete(newState: State[Req, Rep]) = state.get match { case s@Awaiting(d) if d == done => state.compareAndSet(s, newState) case Idle | Closed | Awaiting(_) | Open(_) => false } done.become(underlying(conn) transform { case Throw(exc) => failStat.incr() complete(Idle) Future.exception(exc) case Return(svc) if svc.status == Status.Closed => // If we are returned a closed service, we treat the connect // as a failure. This is both correct -- the connect did fail -- and // also prevents us from entering potentially infinite loops where // every returned service is unavailable, which then causes the // follow-on apply() to attempt to reconnect. deadStat.incr() complete(Idle) svc.close() Future.exception( Failure("Returned unavailable service", Failure.Restartable) .withSource(Failure.Source.Role, SingletonPool.role)) case Return(svc) => if (!complete(Open(new RefcountedService(svc)))) svc.close() Future.Done }) } // These two await* methods are required to trick the compiler into accepting // the definitions of 'apply' and 'close' as tail-recursive. private[this] def awaitApply(done: Future[Unit], conn: ClientConnection) = done before apply(conn) @tailrec final def apply(conn: ClientConnection): Future[Service[Req, Rep]] = state.get match { case Open(svc) if svc.status != Status.Closed => // It is possible that the pool's state has changed by the time // we can return the service, so svc is possibly stale. We don't // attempt to resolve this race; rather, we let the lower layers deal // with it. svc.open() case s@Open(svc) => // service died; try to reconnect. if (state.compareAndSet(s, Idle)) svc.close() apply(conn) case Idle => val done = new Promise[Unit] if (state.compareAndSet(Idle, Awaiting(done))) { connect(done, conn) awaitApply(done, conn) } else { apply(conn) } case Awaiting(done) => awaitApply(done, conn) case Closed => Future.exception(Failure(new ServiceClosedException)) } /** * @inheritdoc * * The status of a [[SingletonPool]] is the worse of the * the underlying status and the status of the currently * cached service, if any. */ override def status: Status = state.get match { case Closed => Status.Closed case Open(svc) => // We don't account for closed services as these will // be reestablished on the next request. svc.status match { case Status.Closed => underlying.status case status => Status.worst(status, underlying.status) } case Idle | Awaiting(_) => // This could also be Status.worst(underlying.status, Status.Busy(p)); // in practice this probably won't make much of a difference, though, // since pending requests are anyway queued. underlying.status } /** * @inheritdoc * * SingletonPool closes asynchronously; the underlying connection is * closed once all references are returned. */ final def close(deadline: Time): Future[Unit] = closeService(deadline) before underlying.close(deadline) @tailrec private[this] def closeService(deadline: Time): Future[Unit] = state.get match { case Idle => if (!state.compareAndSet(Idle, Closed)) closeService(deadline) else Future.Done case s@Open(svc) => if (!state.compareAndSet(s, Closed)) closeService(deadline) else svc.close(deadline) case s@Awaiting(done) => if (!state.compareAndSet(s, Closed)) closeService(deadline) else { done.raise(new ServiceClosedException) Future.Done } case Closed => Future.Done } }
sveinnfannar/finagle
finagle-core/src/main/scala/com/twitter/finagle/pool/SingletonPool.scala
Scala
apache-2.0
7,263
/* * Copyright 2001-2013 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest.words import org.scalatest.matchers._ import org.scalactic._ import scala.util.matching.Regex import org.scalatest.FailureMessages import org.scalatest.Resources import org.scalatest.UnquotedString import org.scalatest.MatchersHelper.endWithRegexWithGroups /** * This class is part of the ScalaTest matchers DSL. Please see the documentation for <a href="../Matchers.html"><code>Matchers</code></a> for an overview of * the matchers DSL. * * @author Bill Venners */ final class EndWithWord { /** * This method enables the following syntax: * * <pre class="stHighlight"> * "1.7b" should (endWith ("1.7b") and endWith ("7b")) * ^ * </pre> */ def apply(right: String): Matcher[String] = new Matcher[String] { def apply(left: String): MatchResult = MatchResult( left endsWith right, Resources("didNotEndWith"), Resources("endedWith"), Vector(left, right) ) override def toString: String = "endWith (" + Prettifier.default(right) + ")" } /** * This method enables the following syntax: * * <pre class="stHighlight"> * val decimal = """(-)?(\\d+)(\\.\\d*)?""" * "b1.7" should (endWith regex (decimal) and endWith regex (decimal)) * ^ * </pre> */ def regex[T <: String](right: T): Matcher[T] = regex(right.r) /** * This method enables the following syntax: * * <pre class="stHighlight"> * string should not { endWith regex ("a(b*)c" withGroup "bb") } * ^ * </pre> */ def regex(regexWithGroups: RegexWithGroups) = new Matcher[String] { def apply(left: String): MatchResult = endWithRegexWithGroups(left, regexWithGroups.regex, regexWithGroups.groups) override def toString: String = "endWith regex " + Prettifier.default(regexWithGroups) } /** * This method enables the following syntax: * * <pre class="stHighlight"> * val decimalRegex = """(-)?(\\d+)(\\.\\d*)?""".r * "b1.7" should (endWith regex (decimalRegex) and endWith regex (decimalRegex)) * ^ * </pre> */ def regex(rightRegex: Regex): Matcher[String] = new Matcher[String] { def apply(left: String): MatchResult = { val allMatches = rightRegex.findAllIn(left) MatchResult( allMatches.hasNext && (allMatches.end == left.length), Resources("didNotEndWithRegex"), Resources("endedWithRegex"), Vector(left, UnquotedString(rightRegex.toString)) ) } override def toString: String = "endWith regex \\"" + Prettifier.default(rightRegex) + "\\"" } /** * Overrides toString to return "endWith" */ override def toString: String = "endWith" }
travisbrown/scalatest
src/main/scala/org/scalatest/words/EndWithWord.scala
Scala
apache-2.0
3,426
/* * Copyright 2016 The BigDL Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.bigdl.dllib.integration.torch import com.intel.analytics.bigdl.dllib.nn.{GradientChecker, SpatialMaxPooling} import com.intel.analytics.bigdl.dllib.tensor.Tensor import com.intel.analytics.bigdl.dllib.utils.RandomGenerator._ import scala.math._ import scala.util.Random import com.intel.analytics.bigdl._ @com.intel.analytics.bigdl.tags.Serial class SpatialMaxPoolingSpec extends TorchSpec { "A SpatialMaxPooling" should "generate correct output and gradInput" in { torchCheck() val module = new SpatialMaxPooling[Double](2, 2) val input = Tensor[Double](1, 3, 3) input(Array(1, 1, 1)) = 0.53367262030952 input(Array(1, 1, 2)) = 0.79637692729011 input(Array(1, 1, 3)) = 0.56747663160786 input(Array(1, 2, 1)) = 0.18039962812327 input(Array(1, 2, 2)) = 0.24608615692705 input(Array(1, 2, 3)) = 0.22956256521866 input(Array(1, 3, 1)) = 0.30736334621906 input(Array(1, 3, 2)) = 0.59734606579877 input(Array(1, 3, 3)) = 0.42989541869611 val gradOutput = Tensor[Double](1, 1, 1) gradOutput(Array(1, 1, 1)) = 0.023921491578221 val start = System.nanoTime() val output = module.forward(input) val gradInput = module.backward(input, gradOutput) val end = System.nanoTime() val scalaTime = end - start val code = "output = module:forward(input)\\n" + "gradInput = module:backward(input,gradOutput)" val (luaTime, torchResult) = TH.run(code, Map("module" -> module, "input" -> input, "gradOutput" -> gradOutput), Array("output", "gradInput")) val luaOutput1 = torchResult("output").asInstanceOf[Tensor[Double]] val luaOutput2 = torchResult("gradInput").asInstanceOf[Tensor[Double]] luaOutput1.map(output, (v1, v2) => { assert(abs(v1 - v2) == 0); v1 }) luaOutput2.map(gradInput, (v1, v2) => { assert(abs(v1 - v2) == 0); v1 }) println("Test case : SpatialMaxPooling, Torch : " + luaTime + " s, Scala : " + scalaTime / 1e9 + " s") } "A SpatialMaxPooling" should "be good in gradient check for input" in { torchCheck() val seed = 100 RNG.setSeed(seed) val layer = new SpatialMaxPooling[Double](2, 2) val input = Tensor[Double](1, 3, 3).apply1(e => Random.nextDouble()) val checker = new GradientChecker(1e-4) checker.checkLayer[Double](layer, input, 1e-3) should be(true) } }
intel-analytics/BigDL
scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/integration/torch/SpatialMaxPoolingSpec.scala
Scala
apache-2.0
2,995
/*** * Copyright 2016 Rackspace US, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.rackspace.com.papi.components.checker import com.rackspace.com.papi.components.checker.RunAssertionsHandler._ import com.rackspace.com.papi.components.checker.servlet._ import com.rackspace.com.papi.components.checker.step.results.Result import com.rackspace.cloud.api.wadl.Converters._ import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import scala.collection.JavaConversions._ import scala.xml.Elem import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.databind.JsonNode @RunWith(classOf[JUnitRunner]) class ValidatorWADLJsonPlainParamSuite extends BaseValidatorSuite { /// /// Configs /// val baseConfig = { val c = TestConfig() c.removeDups = false c.checkWellFormed = false c.checkPlainParams = false c.enableCaptureHeaderExtension = false c } val baseWithPlainParams = { val c = TestConfig() c.removeDups = false c.checkWellFormed = true c.checkPlainParams = true c.enableCaptureHeaderExtension = false c } val baseWithCaptureHeaders = { val c = TestConfig() c.removeDups = false c.checkWellFormed = true c.checkPlainParams = true c.enableCaptureHeaderExtension = true c } val baseWithRemoveDups = { val c = TestConfig() c.removeDups = true c.checkWellFormed = true c.checkPlainParams = true c.enableCaptureHeaderExtension = false c } val baseWithJoinXPaths = { val c = TestConfig() c.removeDups = false c.joinXPathChecks = true c.checkWellFormed = true c.checkPlainParams = true c.enableCaptureHeaderExtension = false c } val baseWithJoinXPathsAndRemoveDups = { val c = TestConfig() c.removeDups = true c.joinXPathChecks = true c.checkWellFormed = true c.checkPlainParams = true c.enableCaptureHeaderExtension = false c } // RaxRoles Configs val baseWithPlainParamsRaxRoles = { val c = TestConfig() c.enableRaxRolesExtension = true c.removeDups = false c.checkWellFormed = true c.checkPlainParams = true c.enableCaptureHeaderExtension = false c } val baseWithRemoveDupsRaxRoles = { val c = TestConfig() c.enableRaxRolesExtension = true c.removeDups = true c.checkWellFormed = true c.checkPlainParams = true c.enableCaptureHeaderExtension = false c } val baseWithJoinXPathsRaxRoles = { val c = TestConfig() c.enableRaxRolesExtension = true c.removeDups = false c.joinXPathChecks = true c.checkWellFormed = true c.checkPlainParams = true c.enableCaptureHeaderExtension = false c } val baseWithJoinXPathsAndRemoveDupsRaxRoles = { val c = TestConfig() c.enableRaxRolesExtension = true c.removeDups = true c.joinXPathChecks = true c.checkWellFormed = true c.checkPlainParams = true c.enableCaptureHeaderExtension = false c } // RaxRoles Configs Masked val baseWithPlainParamsRaxRolesMask = { val c = TestConfig() c.enableRaxRolesExtension = true c.maskRaxRoles403 = true c.removeDups = false c.checkWellFormed = true c.checkPlainParams = true c.enableCaptureHeaderExtension = false c } val baseWithRemoveDupsRaxRolesMask = { val c = TestConfig() c.enableRaxRolesExtension = true c.maskRaxRoles403 = true c.removeDups = true c.checkWellFormed = true c.checkPlainParams = true c.enableCaptureHeaderExtension = false c } val baseWithJoinXPathsRaxRolesMask = { val c = TestConfig() c.enableRaxRolesExtension = true c.maskRaxRoles403 = true c.removeDups = false c.joinXPathChecks = true c.checkWellFormed = true c.checkPlainParams = true c.enableCaptureHeaderExtension = false c } val baseWithJoinXPathsAndRemoveDupsRaxRolesMask = { val c = TestConfig() c.enableRaxRolesExtension = true c.maskRaxRoles403 = true c.removeDups = true c.joinXPathChecks = true c.checkWellFormed = true c.checkPlainParams = true c.enableCaptureHeaderExtension = false c } val WADL_withJSONParams = <application xmlns="http://wadl.dev.java.net/2009/02" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:tst="http://www.rackspace.com/repose/wadl/checker/step/test"> <resources base="https://test.api.openstack.com"> <resource path="/a/b"> <method name="PUT"> <request> <representation mediaType="application/json"> <!-- Ensure that all types map --> <param id="map" style="plain" path="$body instance of map(*)" required="true"/> <param id="string" style="plain" path="$body?firstName instance of xsd:string" required="true"/> <param id="map2" style="plain" path="$body?stuff instance of map(*)" required="true"/> <param id="bool" style="plain" path="$body?stuff?thing instance of xsd:boolean" required="true"/> <param id="array" style="plain" path="$body?stuff?array instance of array(*)" required="true"/> <param id="map3" style="plain" path="$body?stuff?obj instance of map(*)" required="true"/> <param id="int" style="plain" path="$body?stuff?array?1 instance of xsd:double" required="true"/> <param id="decimal" style="plain" path="$body?stuff?array2?1 instance of xsd:double" required="true"/> <param id="string2" style="plain" path="$body?stuff?string instance of xsd:string" required="true"/> <param id="string3" style="plain" path="$body?stuff?obj?a instance of xsd:string" required="true"/> <param id="null" style="plain" path="empty($body?stuff?null)" required="true"/> </representation> </request> </method> <method name="POST"> <request> <representation mediaType="application/json"> <param id="firstName" style="plain" path="$body?firstName" required="true"/> <param id="lastName" style="plain" path="$body?lastName" required="true"/> <param id="age" style="plain" path="$body?age" required="true"/> </representation> </request> </method> </resource> <resource path="/c"> <method name="POST"> <request> <representation mediaType="application/json"/> </request> </method> </resource> </resources> </application> val WADL_withJSONParams2 = <application xmlns="http://wadl.dev.java.net/2009/02" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:rax="http://docs.rackspace.com/api" xmlns:tst="http://www.rackspace.com/repose/wadl/checker/step/test"> <resources base="https://test.api.openstack.com"> <resource path="/a/b"> <method name="PUT"> <request> <representation mediaType="application/json"> <!-- Ensure that all types map --> <param id="map" style="plain" path="$_ instance of map(*)" required="true"/> <param id="string" style="plain" path="$_?firstName instance of xsd:string" required="true"/> <param id="map2" style="plain" path="$_?stuff instance of map(*)" required="true"/> <param id="bool" style="plain" path="$_?stuff?thing instance of xsd:boolean" required="true"/> <param id="array" style="plain" path="$_?stuff?array instance of array(*)" required="true"/> <param id="map3" style="plain" path="$_?stuff?obj instance of map(*)" required="true"/> <param id="int" style="plain" path="$_?stuff?array?1 instance of xsd:double" required="true"/> <param id="decimal" style="plain" path="$_?stuff?array2?1 instance of xsd:double" required="true"/> <param id="string2" style="plain" path="$_?stuff?string instance of xsd:string" required="true"/> <param id="string3" style="plain" path="$_?stuff?obj?a instance of xsd:string" required="true"/> <param id="null" style="plain" path="empty($_?stuff?null)" required="true"/> </representation> </request> </method> <method name="POST"> <request> <representation mediaType="application/json"> <param id="firstName" style="plain" path="$_?firstName" required="true"/> <param id="lastName" style="plain" path="$_?lastName" required="true"/> <param id="age" style="plain" path="$_?age" required="true"/> </representation> </request> </method> </resource> <resource path="/c"> <method name="POST"> <request> <representation mediaType="application/json"> <param id="captureJSON" style="plain" path="serialize($_, map {'method' : 'json', 'indent' : false()})" required="true" rax:captureHeader="X-JSON"/> </representation> </request> </method> </resource> </resources> </application> val WADL_withJSONParamsCodeMessage = <application xmlns="http://wadl.dev.java.net/2009/02" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:rax="http://docs.rackspace.com/api" xmlns:tst="http://www.rackspace.com/repose/wadl/checker/step/test"> <resources base="https://test.api.openstack.com"> <resource path="/a/b"> <method name="PUT"> <request> <representation mediaType="application/json"> <!-- Ensure that all types map --> <param id="map" style="plain" path="$_ instance of map(*)" required="true" rax:code="401" rax:message="Expecting an object"/> <param id="string" style="plain" path="$_?firstName instance of xsd:string" required="true" rax:code="401" rax:message="Expecting a string for firstName"/> <param id="map2" style="plain" path="$_?stuff instance of map(*)" required="true" rax:code="401" rax:message="Expecting an object for stuff"/> <param id="bool" style="plain" path="$_?stuff?thing instance of xsd:boolean" required="true" rax:code="401" rax:message="Expecting a boolean for stuff?thing"/> <param id="array" style="plain" path="$_?stuff?array instance of array(*)" required="true" rax:code="401" rax:message="Expecting an array for stuff?array "/> <param id="map3" style="plain" path="$_?stuff?obj instance of map(*)" required="true" rax:code="401" rax:message="Expecting an object for stuff?obj "/> <param id="int" style="plain" path="$_?stuff?array?1 instance of xsd:double" required="true" rax:code="401" rax:message="Expecting a double for stuff?array?1 "/> <param id="decimal" style="plain" path="$_?stuff?array2?1 instance of xsd:double" required="true" rax:code="401" rax:message="Expecting a double for stuff?array2?1"/> <param id="string2" style="plain" path="$_?stuff?string instance of xsd:string" required="true" rax:code="401" rax:message="Expecting a string for stuff?string"/> <param id="string3" style="plain" path="$_?stuff?obj?a instance of xsd:string" required="true" rax:code="401" rax:message="Expecting a string for stuff?obj?a"/> <param id="null" style="plain" path="empty($_?stuff?null)" required="true" rax:code="401" rax:message="Expecting null for stuff?null "/> </representation> </request> </method> <method name="POST"> <request> <representation mediaType="application/json"> <param id="firstName" style="plain" path="$_?firstName" required="true" rax:code="401" rax:message="Expecting a firstName"/> <param id="lastName" style="plain" path="$_?lastName" required="true" rax:code="401" rax:message="Expecting a lastName"/> <param id="age" style="plain" path="$_?age" required="true" rax:code="401" rax:message="Expecting an age "/> </representation> </request> </method> </resource> <resource path="/c"> <method name="POST"> <request> <representation mediaType="application/json"/> </request> </method> </resource> </resources> </application> val WADL_withRAXRolesJSONParams = <application xmlns="http://wadl.dev.java.net/2009/02" xmlns:rax="http://docs.rackspace.com/api" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:tst="http://www.rackspace.com/repose/wadl/checker/step/test"> <resources base="https://test.api.openstack.com"> <resource path="/a/b"> <method name="PUT" rax:roles="admin ab:admin"> <request> <representation mediaType="application/json"> <!-- Ensure that all types map --> <param id="map" style="plain" path="$body instance of map(*)" required="true"/> <param id="string" style="plain" path="$body?firstName instance of xsd:string" required="true"/> <param id="map2" style="plain" path="$body?stuff instance of map(*)" required="true"/> <param id="bool" style="plain" path="$body?stuff?thing instance of xsd:boolean" required="true"/> <param id="array" style="plain" path="$body?stuff?array instance of array(*)" required="true"/> <param id="map3" style="plain" path="$body?stuff?obj instance of map(*)" required="true"/> <param id="int" style="plain" path="$body?stuff?array?1 instance of xsd:double" required="true"/> <param id="decimal" style="plain" path="$body?stuff?array2?1 instance of xsd:double" required="true"/> <param id="string2" style="plain" path="$body?stuff?string instance of xsd:string" required="true"/> <param id="string3" style="plain" path="$body?stuff?obj?a instance of xsd:string" required="true"/> <param id="null" style="plain" path="empty($body?stuff?null)" required="true"/> </representation> </request> </method> <method name="POST"> <request> <representation mediaType="application/json"> <param id="firstName" style="plain" path="$body?firstName" required="true"/> <param id="lastName" style="plain" path="$body?lastName" required="true"/> <param id="age" style="plain" path="$body?age" required="true"/> </representation> </request> </method> </resource> <resource path="/c"> <method name="POST" rax:roles="admin c:admin"> <request> <representation mediaType="application/json"/> </request> </method> </resource> </resources> </application> val jsonPlainWADLs = Map[String, Elem]("WADL with $body json params"->WADL_withJSONParams, "WADL with $_ json params"->WADL_withJSONParams2, "WADL with rax:roles -- but disabled"->WADL_withRAXRolesJSONParams) val jsonMessageWADLs = Map[String, Elem]("WADL with code and message json params"->WADL_withJSONParamsCodeMessage) val jsonPlainConfigs = Map[String, Config]("base config with plain params"->baseWithPlainParams, "base with capture headers" -> baseWithCaptureHeaders, "base with remove dups"->baseWithRemoveDups, "base with join xpath"->baseWithJoinXPaths, "base with remove dups and join xpaths"->baseWithJoinXPathsAndRemoveDups) val jsonPlainDisabledConfigs = Map[String,Config]("base config plain params disabled"->baseConfig) val jsonCaptureWADL = Map[String, Elem]("WADL with capture json"->WADL_withJSONParams2) val jsonCaptureConfig = Map[String, Config]("base with capture headers" -> baseWithCaptureHeaders) val jsonPlainRaxRolesWADL = Map[String, Elem]("WADL with json plain params rax:roles enabled"->WADL_withRAXRolesJSONParams) val jsonPlainRaxRolesConfigs = Map[String, Config]("base config with plain params rax:roles"->baseWithPlainParamsRaxRoles, "base with remove dups rax:roles enabled"->baseWithRemoveDupsRaxRoles, "base with join xpath rax:roles enabled"->baseWithJoinXPathsRaxRoles, "base with remove dups and join xpaths rax:roles enabled"->baseWithJoinXPathsAndRemoveDupsRaxRoles) val jsonPlainRaxRolesMaskConfigs = Map[String, Config]("base config with plain params rax:roles (mask)"->baseWithPlainParamsRaxRolesMask, "base with remove dups rax:roles enabled (mask)"->baseWithRemoveDupsRaxRolesMask, "base with join xpath rax:roles enabled (mask)"->baseWithJoinXPathsRaxRolesMask, "base with remove dups and join xpaths rax:roles enabled (mask)"->baseWithJoinXPathsAndRemoveDupsRaxRolesMask) // // Assertions! // def happyPathAssertions(validator : Validator, wadlDesc : String, configDesc : String) { test (s"A PUT on /a/b should validate with goodJSON on $wadlDesc with $configDesc") { validator.validate(request("PUT", "/a/b", "application/json",goodJSON, false, Map[String,List[String]]()), response, chain) } test (s"A POST on /a/b should validate with goodJSON_Schema1 on $wadlDesc with $configDesc") { validator.validate(request("POST", "/a/b", "application/json",goodJSON_Schema1, false, Map[String,List[String]]()), response, chain) } test (s"A POST on /a/b should validate with goodJSON_Schema2 on $wadlDesc with $configDesc") { validator.validate(request("POST", "/a/b", "application/json",goodJSON_Schema2, false, Map[String,List[String]]()), response, chain) } test (s"A POST on /a/b should validate with goodJSON_Schema3 on $wadlDesc with $configDesc") { validator.validate(request("POST", "/a/b", "application/json",goodJSON_Schema3, false, Map[String,List[String]]()), response, chain) } test (s"A POST on /c should validate with goodJSON on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json",goodJSON, false, Map[String,List[String]]()), response, chain) } test (s"A POST on /c should validate with goodJSON_Schema1 on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json",goodJSON_Schema1, false, Map[String,List[String]]()), response, chain) } test (s"A POST on /c should validate with goodJSON_Schema2 on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json",goodJSON_Schema2, false, Map[String,List[String]]()), response, chain) } test (s"A POST on /c should validate with goodJSON_Schema3 on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json",goodJSON_Schema3, false, Map[String,List[String]]()), response, chain) } test (s"A POST on /c should validate with string on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json","\\"A String\\"", false, Map[String,List[String]]()), response, chain) } test (s"A POST on /c should validate with boolean on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json","false", false, Map[String,List[String]]()), response, chain) } test (s"A POST on /c should validate with number on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json","123.4", false, Map[String,List[String]]()), response, chain) } test (s"A POST on /c should validate with null on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json","null", false, Map[String,List[String]]()), response, chain) } test (s"A POST on /c should validate with object on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json","""{"true" : true }""", false, Map[String,List[String]]()), response, chain) } test (s"A POST on /c should validate with array on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json","""[true]""", false, Map[String,List[String]]()), response, chain) } } def happyPathRaxRolesAssertions(validator : Validator, wadlDesc : String, configDesc : String) { test (s"A PUT on /a/b should validate with goodJSON on $wadlDesc with $configDesc") { validator.validate(request("PUT", "/a/b", "application/json",goodJSON, false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain) } test (s"A PUT on /a/b should validate with goodJSON on $wadlDesc with $configDesc (ab:admin)") { validator.validate(request("PUT", "/a/b", "application/json",goodJSON, false, Map[String,List[String]]("X-ROLES"->List("ab:admin"))), response, chain) } test (s"A POST on /a/b should validate with goodJSON_Schema1 on $wadlDesc with $configDesc") { validator.validate(request("POST", "/a/b", "application/json",goodJSON_Schema1, false, Map[String,List[String]]()), response, chain) } test (s"A POST on /a/b should validate with goodJSON_Schema2 on $wadlDesc with $configDesc") { validator.validate(request("POST", "/a/b", "application/json",goodJSON_Schema2, false, Map[String,List[String]]()), response, chain) } test (s"A POST on /a/b should validate with goodJSON_Schema3 on $wadlDesc with $configDesc") { validator.validate(request("POST", "/a/b", "application/json",goodJSON_Schema3, false, Map[String,List[String]]()), response, chain) } test (s"A POST on /c should validate with goodJSON on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json",goodJSON, false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain) } test (s"A POST on /c should validate with goodJSON_Schema1 on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json",goodJSON_Schema1, false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain) } test (s"A POST on /c should validate with goodJSON_Schema2 on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json",goodJSON_Schema2, false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain) } test (s"A POST on /c should validate with goodJSON_Schema3 on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json",goodJSON_Schema3, false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain) } test (s"A POST on /c should validate with string on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json","\\"A String\\"", false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain) } test (s"A POST on /c should validate with boolean on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json","false", false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain) } test (s"A POST on /c should validate with number on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json","123.4", false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain) } test (s"A POST on /c should validate with null on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json","null", false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain) } test (s"A POST on /c should validate with object on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json","""{"true" : true }""", false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain) } test (s"A POST on /c should validate with array on $wadlDesc with $configDesc") { validator.validate(request("POST", "/c", "application/json","""[true]""", false, Map[String,List[String]]("X-ROLES"->List("c:admin"))), response, chain) } } def BadAccessRaxRolesAssertions(validator : Validator, wadlDesc : String, configDesc : String) { test (s"A PUT on /a/b should fail with bad access on bad role goodJSON on $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json",goodJSON, false, Map[String,List[String]]("X-ROLES"->List("bdmin"))), response, chain), 403, List("forbidden")) } test (s"A POST on /c should fail with bad access on bad role goodJSON_Schema2 on $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("POST", "/c", "application/json",goodJSON_Schema2, false, Map[String,List[String]]("X-ROLES"->List("user", "nobody"))), response, chain), 403, List("forbidden")) } test (s"A PUT on /a/b should fail with bad access on no role goodJSON on $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json",goodJSON, false, Map[String,List[String]]()), response, chain), 403, List("forbidden")) } test (s"A POST on /c should fail with bad access on no role goodJSON_Schema2 on $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("POST", "/c", "application/json",goodJSON_Schema2, false, Map[String,List[String]]()), response, chain), 403, List("forbidden")) } } def BadAccessRaxRolesMaskedAssertions(validator : Validator, wadlDesc : String, configDesc : String) { test (s"A PUT on /a/b should fail with bad access on bad role goodJSON on $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json",goodJSON, false, Map[String,List[String]]("X-ROLES"->List("bdmin"))), response, chain), 405, List("Bad method")) } test (s"A POST on /c should fail with bad access on bad role goodJSON_Schema2 on $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("POST", "/c", "application/json",goodJSON_Schema2, false, Map[String,List[String]]("X-ROLES"->List("user", "nobody"))), response, chain), 404, List("not found")) } test (s"A PUT on /a/b should fail with bad access on no role goodJSON on $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json",goodJSON, false, Map[String,List[String]]()), response, chain), 405, List("Bad method")) } test (s"A POST on /c should fail with bad access on no role goodJSON_Schema2 on $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("POST", "/c", "application/json",goodJSON_Schema2, false, Map[String,List[String]]()), response, chain), 404, List("not found")) } } def sadPathPassingAssertions (validator : Validator, wadlDesc : String, configDesc : String) { test (s"A PUT on /a/b should pass with array body $wadlDesc with $configDesc") { validator.validate(request("PUT", "/a/b", "application/json",""" [1, 2, 3] """, false, Map[String,List[String]]()), response, chain) } test (s"A PUT on /a/b should pass with a bad firstName $wadlDesc with $configDesc") { validator.validate(request("PUT", "/a/b", "application/json",""" { "firstName" : [1, 2, 3] }""", false, Map[String,List[String]]()), response, chain) } test (s"A PUT on /a/b should pass with a bad stuff $wadlDesc with $configDesc") { validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : true }""", false, Map[String,List[String]]()), response, chain) } test (s"A PUT on /a/b should pass with a bad stuff?thing $wadlDesc with $configDesc") { validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : [3, 4] } }""", false, Map[String,List[String]]()), response, chain) } test (s"A PUT on /a/b should pass with a bad stuff?array $wadlDesc with $configDesc") { validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : {"false" : null} } }""", false, Map[String,List[String]]()), response, chain) } test (s"A PUT on /a/b should pass with a bad stuff?obj $wadlDesc with $configDesc") { validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "obj" : "not!" } }""", false, Map[String,List[String]]()), response, chain) } test (s"A PUT on /a/b should pass with a bad stuff?array?1 $wadlDesc with $configDesc") { validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [false,2,3], "obj" : {} } }""", false, Map[String,List[String]]()), response, chain) } test (s"A PUT on /a/b should pass with a bad stuff?array2?1 $wadlDesc with $configDesc") { validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "array2" : ["hello",2,3], "obj" : {} } }""", false, Map[String,List[String]]()), response, chain) } test (s"A PUT on /a/b should pass with a bad stuff?string $wadlDesc with $configDesc") { validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "array2" : [1,2,3], "obj" : {}, "string" : null } }""", false, Map[String,List[String]]()), response, chain) } test (s"A PUT on /a/b should pass with a bad stuff?obj?a $wadlDesc with $configDesc") { validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "array2" : [1,2,3], "obj" : { "b" : "B", "c" : "C" }, "string" : "foo" } }""", false, Map[String,List[String]]()), response, chain) } test (s"A PUT on /a/b should pass with a bad stuff?null $wadlDesc with $configDesc") { validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "array2" : [1,2,3], "obj" : { "a" : "A", "b" : "B", "c" : "C" }, "string" : "foo", "null" : "bar" } }""", false, Map[String,List[String]]()), response, chain) } test (s"A POST on /a/b should pass with a missing ?fistName $wadlDesc with $configDesc") { validator.validate(request("POST", "/a/b", "application/json","""{ "name" : "foo" }""", false, Map[String,List[String]]()), response, chain) } test (s"A POST on /a/b should pass with a missing ?lastName $wadlDesc with $configDesc") { validator.validate(request("POST", "/a/b", "application/json","""{ "firstName" : "foo" }""", false, Map[String,List[String]]()), response, chain) } test (s"A POST on /a/b should pass with a missing ?age $wadlDesc with $configDesc") { validator.validate(request("POST", "/a/b", "application/json","""{ "firstName" : "foo", "lastName" : "bar" }""", false, Map[String,List[String]]()), response, chain) } test (s"A POST on /a/b should pass with a bad ? $wadlDesc with $configDesc") { validator.validate(request("POST", "/a/b", "application/json","""true""", false, Map[String,List[String]]()), response, chain) } } def sadPathAssertions(validator : Validator, wadlDesc : String, configDesc : String) { test (s"A PUT on /a/b should fail with array body $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json",""" [1, 2, 3] """, false, Map[String,List[String]]()), response, chain), 400, List("Expecting","instance of map(*)")) } test (s"A PUT on /a/b should fail with a bad firstName $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json",""" { "firstName" : [1, 2, 3] }""", false, Map[String,List[String]]()), response, chain), 400, List("Expecting","?firstName")) } test (s"A PUT on /a/b should fail with a bad stuff $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : true }""", false, Map[String,List[String]]()), response, chain), 400, List("Expecting","?stuff", "instance of map(*)")) } test (s"A PUT on /a/b should fail with a bad stuff?thing $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : [3, 4] } }""", false, Map[String,List[String]]()), response, chain), 400, List("Expecting","stuff?thing", "instance of xsd:boolean")) } test (s"A PUT on /a/b should fail with a bad stuff?array $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : {"false" : null} } }""", false, Map[String,List[String]]()), response, chain), 400, List("Expecting","stuff?array", "instance of array(*)")) } test (s"A PUT on /a/b should fail with a bad stuff?obj $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "obj" : "not!" } }""", false, Map[String,List[String]]()), response, chain), 400, List("Expecting","stuff?obj", "instance of map(*)")) } test (s"A PUT on /a/b should fail with a bad stuff?array?1 $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [false,2,3], "obj" : {} } }""", false, Map[String,List[String]]()), response, chain), 400, List("Expecting","stuff?array?1", "instance of xsd:double")) } test (s"A PUT on /a/b should fail with a bad stuff?array2?1 $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "array2" : ["hello",2,3], "obj" : {} } }""", false, Map[String,List[String]]()), response, chain), 400, List("Expecting","stuff?array2?1", "instance of xsd:double")) } test (s"A PUT on /a/b should fail with a bad stuff?string $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "array2" : [1,2,3], "obj" : {}, "string" : null } }""", false, Map[String,List[String]]()), response, chain), 400, List("Expecting","stuff?string", "instance of xsd:string")) } test (s"A PUT on /a/b should fail with a bad stuff?obj?a $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "array2" : [1,2,3], "obj" : { "b" : "B", "c" : "C" }, "string" : "foo" } }""", false, Map[String,List[String]]()), response, chain), 400, List("Expecting","stuff?obj?a", "instance of xsd:string")) } test (s"A PUT on /a/b should fail with a bad stuff?null $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "array2" : [1,2,3], "obj" : { "a" : "A", "b" : "B", "c" : "C" }, "string" : "foo", "null" : "bar" } }""", false, Map[String,List[String]]()), response, chain), 400, List("Expecting","stuff?null", "empty")) } test (s"A POST on /a/b should fail with a missing ?fistName $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("POST", "/a/b", "application/json","""{ "name" : "foo" }""", false, Map[String,List[String]]()), response, chain), 400, List("Expecting","?firstName")) } test (s"A POST on /a/b should fail with a missing ?lastName $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("POST", "/a/b", "application/json","""{ "firstName" : "foo" }""", false, Map[String,List[String]]()), response, chain), 400, List("Expecting","?lastName")) } test (s"A POST on /a/b should fail with a missing ?age $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("POST", "/a/b", "application/json","""{ "firstName" : "foo", "lastName" : "bar" }""", false, Map[String,List[String]]()), response, chain), 400, List("Expecting","?age")) } test (s"A POST on /a/b should fail with a bad ? $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("POST", "/a/b", "application/json","""true""", false, Map[String,List[String]]()), response, chain), 400, List("Expecting","?firstName", "supplied value has item type xs:boolean")) } } def sadPathRaxRolesAssertions(validator : Validator, wadlDesc : String, configDesc : String) { test (s"A PUT on /a/b should fail with array body $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json",""" [1, 2, 3] """, false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain), 400, List("Expecting","instance of map(*)")) } test (s"A PUT on /a/b should fail with a bad firstName $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json",""" { "firstName" : [1, 2, 3] }""", false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain), 400, List("Expecting","?firstName")) } test (s"A PUT on /a/b should fail with a bad stuff $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : true }""", false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain), 400, List("Expecting","?stuff", "instance of map(*)")) } test (s"A PUT on /a/b should fail with a bad stuff?thing $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : [3, 4] } }""", false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain), 400, List("Expecting","stuff?thing", "instance of xsd:boolean")) } test (s"A PUT on /a/b should fail with a bad stuff?array $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : {"false" : null} } }""", false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain), 400, List("Expecting","stuff?array", "instance of array(*)")) } test (s"A PUT on /a/b should fail with a bad stuff?obj $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "obj" : "not!" } }""", false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain), 400, List("Expecting","stuff?obj", "instance of map(*)")) } test (s"A PUT on /a/b should fail with a bad stuff?array?1 $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [false,2,3], "obj" : {} } }""", false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain), 400, List("Expecting","stuff?array?1", "instance of xsd:double")) } test (s"A PUT on /a/b should fail with a bad stuff?array2?1 $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "array2" : ["hello",2,3], "obj" : {} } }""", false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain), 400, List("Expecting","stuff?array2?1", "instance of xsd:double")) } test (s"A PUT on /a/b should fail with a bad stuff?string $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "array2" : [1,2,3], "obj" : {}, "string" : null } }""", false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain), 400, List("Expecting","stuff?string", "instance of xsd:string")) } test (s"A PUT on /a/b should fail with a bad stuff?obj?a $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "array2" : [1,2,3], "obj" : { "b" : "B", "c" : "C" }, "string" : "foo" } }""", false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain), 400, List("Expecting","stuff?obj?a", "instance of xsd:string")) } test (s"A PUT on /a/b should fail with a bad stuff?null $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "array2" : [1,2,3], "obj" : { "a" : "A", "b" : "B", "c" : "C" }, "string" : "foo", "null" : "bar" } }""", false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain), 400, List("Expecting","stuff?null", "empty")) } test (s"A POST on /a/b should fail with a missing ?fistName $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("POST", "/a/b", "application/json","""{ "name" : "foo" }""", false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain), 400, List("Expecting","?firstName")) } test (s"A POST on /a/b should fail with a missing ?lastName $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("POST", "/a/b", "application/json","""{ "firstName" : "foo" }""", false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain), 400, List("Expecting","?lastName")) } test (s"A POST on /a/b should fail with a missing ?age $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("POST", "/a/b", "application/json","""{ "firstName" : "foo", "lastName" : "bar" }""", false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain), 400, List("Expecting","?age")) } test (s"A POST on /a/b should fail with a bad ? $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("POST", "/a/b", "application/json","""true""", false, Map[String,List[String]]("X-ROLES"->List("admin"))), response, chain), 400, List("Expecting","?firstName", "supplied value has item type xs:boolean")) } } def sadPathCodeMessageAssertions(validator : Validator, wadlDesc : String, configDesc : String) { test (s"A PUT on /a/b should fail with array body $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json",""" [1, 2, 3] """, false, Map[String,List[String]]()), response, chain), 401, List("Expecting a","object")) } test (s"A PUT on /a/b should fail with a bad firstName $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json",""" { "firstName" : [1, 2, 3] }""", false, Map[String,List[String]]()), response, chain), 401, List("Expecting a","firstName")) } test (s"A PUT on /a/b should fail with a bad stuff $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : true }""", false, Map[String,List[String]]()), response, chain), 401, List("Expecting an","stuff", "object")) } test (s"A PUT on /a/b should fail with a bad stuff?thing $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : [3, 4] } }""", false, Map[String,List[String]]()), response, chain), 401, List("Expecting a","stuff?thing", "boolean")) } test (s"A PUT on /a/b should fail with a bad stuff?array $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : {"false" : null} } }""", false, Map[String,List[String]]()), response, chain), 401, List("Expecting a","stuff?array", "an array")) } test (s"A PUT on /a/b should fail with a bad stuff?obj $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "obj" : "not!" } }""", false, Map[String,List[String]]()), response, chain), 401, List("Expecting a","stuff?obj", "object")) } test (s"A PUT on /a/b should fail with a bad stuff?array?1 $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [false,2,3], "obj" : {} } }""", false, Map[String,List[String]]()), response, chain), 401, List("Expecting a","stuff?array?1", "a double")) } test (s"A PUT on /a/b should fail with a bad stuff?array2?1 $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "array2" : ["hello",2,3], "obj" : {} } }""", false, Map[String,List[String]]()), response, chain), 401, List("Expecting a","stuff?array2?1", "a double")) } test (s"A PUT on /a/b should fail with a bad stuff?string $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "array2" : [1,2,3], "obj" : {}, "string" : null } }""", false, Map[String,List[String]]()), response, chain), 401, List("Expecting a","stuff?string", "a string")) } test (s"A PUT on /a/b should fail with a bad stuff?obj?a $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "array2" : [1,2,3], "obj" : { "b" : "B", "c" : "C" }, "string" : "foo" } }""", false, Map[String,List[String]]()), response, chain), 401, List("Expecting a","stuff?obj?a", "a string")) } test (s"A PUT on /a/b should fail with a bad stuff?null $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("PUT", "/a/b", "application/json","""{ "firstName" : "Jorge", "stuff" : { "thing" : true, "array" : [1,2,3], "array2" : [1,2,3], "obj" : { "a" : "A", "b" : "B", "c" : "C" }, "string" : "foo", "null" : "bar" } }""", false, Map[String,List[String]]()), response, chain), 401, List("Expecting","stuff?null")) } test (s"A POST on /a/b should fail with a missing ?fistName $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("POST", "/a/b", "application/json","""{ "name" : "foo" }""", false, Map[String,List[String]]()), response, chain), 401, List("Expecting a","firstName")) } test (s"A POST on /a/b should fail with a missing ?lastName $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("POST", "/a/b", "application/json","""{ "firstName" : "foo" }""", false, Map[String,List[String]]()), response, chain), 401, List("Expecting a","lastName")) } test (s"A POST on /a/b should fail with a missing ?age $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("POST", "/a/b", "application/json","""{ "firstName" : "foo", "lastName" : "bar" }""", false, Map[String,List[String]]()), response, chain), 401, List("Expecting an","age")) } test (s"A POST on /a/b should fail with a bad ? $wadlDesc with $configDesc") { assertResultFailed(validator.validate(request("POST", "/a/b", "application/json","""true""", false, Map[String,List[String]]()), response, chain), 401, List("Expecting a","firstName", "supplied value has item type xs:boolean")) } } def jsonCaptureAssertions (validator : Validator, wadlDesc : String, configDesc : String) { test (s"A POST on /c with an atomic type should cature that type $wadlDesc with $configDesc") { val req = request("POST", "/c", "application/json","""52""", false, Map[String,List[String]]()) req.setAttribute(ASSERT_FUNCTION, (csReq: CheckerServletRequest, csResp: CheckerServletResponse, res: Result) => { // Correct header should be set... assert(csReq.getHeaders("X-JSON").toList == List("52")) }) validator.validate(req, response, chain) } test (s"A POST on /c with a null should cature that type $wadlDesc with $configDesc") { val req = request("POST", "/c", "application/json","""null""", false, Map[String,List[String]]()) req.setAttribute(ASSERT_FUNCTION, (csReq: CheckerServletRequest, csResp: CheckerServletResponse, res: Result) => { // Correct header should be set... assert(csReq.getHeaders("X-JSON").toList == List("null")) }) validator.validate(req, response, chain) } test (s"A POST on /c with an array type should cature that array $wadlDesc with $configDesc") { val req = request("POST", "/c", "application/json","""[52, 43, 78, 90]""", false, Map[String,List[String]]()) req.setAttribute(ASSERT_FUNCTION, (csReq: CheckerServletRequest, csResp: CheckerServletResponse, res: Result) => { // Correct header should be set... assert(csReq.getHeaders("X-JSON").toList == List("[52,43,78,90]")) }) validator.validate(req, response, chain) } test (s"A POST on /c with an object type should cature that array $wadlDesc with $configDesc") { val req = request("POST", "/c", "application/json","""{ "52" : 43, "78" : 90}]""", false, Map[String,List[String]]()) req.setAttribute(ASSERT_FUNCTION, (csReq: CheckerServletRequest, csResp: CheckerServletResponse, res: Result) => { // Correct header should be set... val objectMapper = new ObjectMapper() val jsonNode = objectMapper.readTree (csReq.getHeaders("X-JSON").toList.head) assert (jsonNode.findValue("52").asInt == 43) assert (jsonNode.findValue("78").asInt == 90) }) validator.validate(req, response, chain) } } // With plain params disabled // for ((wadlDesc, wadl) <- jsonPlainWADLs) { for ((configDesc, config) <- jsonPlainDisabledConfigs) { val validator = Validator(wadl, config) happyPathAssertions(validator, wadlDesc, configDesc) sadPathPassingAssertions(validator, wadlDesc, configDesc) } } // // With actual plain params // for ((wadlDesc, wadl) <- jsonPlainWADLs) { for ((configDesc, config) <- jsonPlainConfigs) { val validator = Validator(wadl, config) happyPathAssertions(validator, wadlDesc, configDesc) sadPathAssertions(validator, wadlDesc, configDesc) } } // // With code and message plain params // for ((wadlDesc, wadl) <- jsonMessageWADLs) { for ((configDesc, config) <- jsonPlainConfigs) { val validator = Validator(wadl, config) happyPathAssertions(validator, wadlDesc, configDesc) sadPathCodeMessageAssertions(validator, wadlDesc, configDesc) } } // // With capture JSON // for ((wadlDesc, wadl) <- jsonCaptureWADL) { for ((configDesc, config) <- jsonCaptureConfig) { val validator = Validator(wadl, config) happyPathAssertions(validator, wadlDesc, configDesc) jsonCaptureAssertions(validator, wadlDesc, configDesc) } } // // With rax:roles // for ((wadlDesc, wadl) <- jsonPlainRaxRolesWADL) { for ((configDesc, config) <- jsonPlainRaxRolesConfigs) { val validator = Validator(wadl, config) happyPathRaxRolesAssertions(validator, wadlDesc, configDesc) sadPathRaxRolesAssertions(validator, wadlDesc, configDesc) BadAccessRaxRolesAssertions (validator, wadlDesc, configDesc) } } // // With rax:roles masked // for ((wadlDesc, wadl) <- jsonPlainRaxRolesWADL) { for ((configDesc, config) <- jsonPlainRaxRolesMaskConfigs) { val validator = Validator(wadl, config) happyPathRaxRolesAssertions(validator, wadlDesc, configDesc) sadPathRaxRolesAssertions(validator, wadlDesc, configDesc) BadAccessRaxRolesMaskedAssertions (validator, wadlDesc, configDesc) } } }
wdschei/api-checker
core/src/test/scala/com/rackspace/com/papi/components/checker/ValidatorWADLJsonPlainParamSuite.scala
Scala
apache-2.0
83,137
package launcher import com.github.nscala_time.time.Imports._ import akka.actor.ActorSystem import akka.http.scaladsl.Http import akka.http.scaladsl.model.{HttpEntity, ContentTypes, ContentType, StatusCodes, MediaTypes} import akka.http.scaladsl.server.Directives._ import akka.http.scaladsl.model.headers.HttpOriginRange import akka.stream.ActorMaterializer import akka.util.Timeout import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._ import spray.json.DefaultJsonProtocol._ import com.typesafe.config.ConfigFactory import scala.io.StdIn import ch.megard.akka.http.cors.CorsDirectives._ import ch.megard.akka.http.cors._ import java.io.File import org.apache.commons.io.FileUtils import caching.CacheHandler import utils.DateUtils._ object Main extends App { val interval = 15 if (!isValidInterval(interval)) { throw new IllegalArgumentException( "Your interval value must be a divisor of 1440 (minutes in a day)") } println(s"Time interval used by the server: $interval minutes") val cacheHandler = CacheHandler(interval) cacheHandler.loadCache() implicit val validTS: ValidDateTimeSpan = cacheHandler.engine.validTimeSpan // we need an ActorSystem to host our application in implicit val system = ActorSystem("ada-aggregator") implicit val materializer = ActorMaterializer() implicit val executionContext = system.dispatcher val host = "localhost" val port = 8080 val DateRegex = """\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}""".r // Static files that need to be served to avoid CORS error val edgesJsonFile = new File("../app/edges.json") val edgesJson = FileUtils.readFileToString(edgesJsonFile, java.nio.charset.StandardCharsets.UTF_8) val switzerlandJsonFile = new File("../app/switzerland.json") val switzerlandJson = FileUtils.readFileToString(switzerlandJsonFile, java.nio.charset.StandardCharsets.UTF_8) val indexFile = new File("../app/index.html") val indexHtml = FileUtils.readFileToString(indexFile, java.nio.charset.StandardCharsets.UTF_8) val corsSettings = CorsSettings.defaultSettings.copy( allowCredentials = false, allowedOrigins = HttpOriginRange.* ) val api = cors(corsSettings) { get { path("") { complete(HttpEntity(ContentTypes.`text/html(UTF-8)`, indexHtml)) } } ~ get { pathPrefix("occupancy" / IntNumber / DateRegex) { (reqInterval, stringDate) => try { if (interval != reqInterval) { println("ERROR: Got a request for a different time interval than " + interval) complete(StatusCodes.BadRequest) } else { val time = DateTime.parse(stringDate, formatter) if (time.isAcceptableDate) { val startTime = System.currentTimeMillis val res = getRequestedResult(time) val elapsedTime = System.currentTimeMillis - startTime val sec = elapsedTime / 1000D println(s"REQ `${time.prettyString}` handled in $sec s") complete(HttpEntity(ContentTypes.`application/json`, res)) } else { complete(StatusCodes.BadRequest) } } // In case of something going wrong with the request, the server stays up! } catch { case e: Throwable => println("Something went wrong! Probably because the date requested is not valid") e.printStackTrace() complete(StatusCodes.BadRequest) } } } ~ get { pathPrefix("edges") { complete(HttpEntity(ContentTypes.`application/json`, edgesJson)) } } ~ get { pathPrefix("switzerland") { complete(HttpEntity(ContentTypes.`application/json`, switzerlandJson)) } } } def getRequestedResult(time: DateTime): String = { val fetchResult = cacheHandler.fetch(time) fetchResult.result.prettyPrint } // start a new HTTP server on port 8080 with our service actor as the handler val serverFuture = Http().bindAndHandle(api, host, port) // // testing // val time = DateTime.parse("2017-01-30T07:00", formatter) // if (time.isAcceptableDate) { // getRequestedResult(time) // } else { // println("ERROR: Date is not in range") // } // Shutting down the server on <Enter> pressed StdIn.readLine() serverFuture.flatMap(_.unbind()).onComplete(_ => system.terminate()) }
tOverney/ADA-Project
aggregator/src/main/scala/launcher/Main.scala
Scala
apache-2.0
4,482
package com.webtrends.harness.component.spray.websocket import java.util.concurrent.LinkedBlockingDeque import akka.actor._ import akka.dispatch._ import com.typesafe.config.Config /** * Copied from https://github.com/smootoo/simple-spray-websockets/blob/master/src/main/scala/org/suecarter/websocket/PriorityUnboundedDequeMailbox.scala * * Specialist priority (user provides the rules), unbounded, deque * (can be used for Stashing) mailbox. * * Very useful for messages of high priority, such as `Ack`s in I/O * situations. * * Based on UnboundedDequeBasedMailbox from Akka. */ abstract class PriorityUnboundedDequeMailbox extends MailboxType with ProducesMessageQueue[UnboundedDequeBasedMailbox.MessageQueue] { def this(settings: ActorSystem.Settings, config: Config) = this() final override def create(owner: Option[ActorRef], system: Option[ActorSystem]): MessageQueue = new PriorityUnboundedDequeMailbox.MessageQueue(priority) /** * When true, the queue will place this envelope at the front of the * queue (as if it was just stashed). */ def priority(e: Envelope): Boolean } /** * Copied from https://github.com/smootoo/simple-spray-websockets/blob/master/src/main/scala/org/suecarter/websocket/PriorityUnboundedDequeMailbox.scala */ object PriorityUnboundedDequeMailbox { class MessageQueue(priority: Envelope => Boolean) extends LinkedBlockingDeque[Envelope] with UnboundedDequeBasedMessageQueue { final val queue = this override def enqueue(receiver: ActorRef, handle: Envelope): Unit = if (priority(handle)) super.enqueueFirst(receiver, handle) else super.enqueue(receiver, handle) } }
mjwallin1/wookiee-spray
src/main/scala/com/webtrends/harness/component/spray/websocket/PriotiryUnboundedDequeMailBox.scala
Scala
apache-2.0
1,661
package edu.osu.cse.groenkeb.logic.model import edu.osu.cse.groenkeb.logic._ case class AtomicDiagram(val domain: Domain, val relations: Relation*) { validate() def ++(diagram: AtomicDiagram) = merge(diagram) def merge(diagram: AtomicDiagram) = AtomicDiagram(this.domain ++ diagram.domain, this.relations.union(diagram.relations).distinct:_*) def has(relation: Relation): Boolean = relations.contains(relation) def has(term: Term): Boolean = domain.has(term) def validate(atom: Atom): Boolean = this.relations.exists { r => r.predicate.matches(atom.predicate) } && atom.terms.forall { t => domain.has(t) } override def toString = String.format("%s(%s Relations{%s})", getClass.getSimpleName, domain, relations.mkString(", ")) private def validate() { relations.find { r1 => relations.exists { r2 => r1.predicate.matches(r2.predicate) && r1.rank != r2.rank }}.foreach { r => throw ModelException("incompatible definitions of predicate " + r.predicate) } } }
bgroenks96/AutoMoL
core/src/main/scala/edu/osu/cse/groenkeb/logic/model/AtomicDiagram.scala
Scala
mit
1,035
import com.cognism.common.utils.ApplicationException import play.api.http.HttpErrorHandler import play.api.mvc._ import play.api.mvc.Results._ import scala.concurrent._ import javax.inject.Singleton @Singleton class ErrorHandler extends HttpErrorHandler { def onClientError(request: RequestHeader, statusCode: Int, message: String) = { Future.successful(Status(statusCode)(s"HTTP [$statusCode] Client Error {$message}")) } def onServerError(request: RequestHeader, exception: Throwable) = { exception.getCause match { case ApplicationException(msg, response) => Future.successful(response) case _ => Future.successful(InternalServerError("A server error occurred: " + exception.getMessage)) } } }
Cognism/cognism-template-play
app/ErrorHandler.scala
Scala
mit
749
package com.karasiq.shadowcloud.test.utils import akka.stream.Materializer import akka.util.Timeout import scala.concurrent.duration._ trait ActorSpecImplicits { self: ActorSpec ⇒ implicit val defaultTimeout = Timeout(15 seconds) implicit val materializer = Materializer.matFromSystem implicit val executionContext = system.dispatcher }
Karasiq/shadowcloud
utils/test/.jvm/src/main/scala/com/karasiq/shadowcloud/test/utils/ActorSpecImplicits.scala
Scala
apache-2.0
354
/* * Copyright 2022 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package iht.controllers.registration import iht.config.AppConfig import iht.metrics.IhtMetrics import iht.models._ import iht.models.application.ApplicationDetails import iht.testhelpers.{CommonBuilder, ContentChecker} import iht.views.html.registration.{registration_error, registration_error_serviceUnavailable, registration_summary} import org.joda.time.LocalDate import org.mockito.ArgumentMatchers._ import org.mockito.Mockito._ import org.mockito.invocation.InvocationOnMock import org.mockito.stubbing.Answer import play.api.mvc.MessagesControllerComponents import play.api.test.Helpers._ import uk.gov.hmrc.http.{GatewayTimeoutException, UpstreamErrorResponse} import uk.gov.hmrc.play.bootstrap.frontend.controller.FrontendController import scala.concurrent.duration.Duration import scala.concurrent.{Await, Future} class RegistrationSummaryControllerTest extends RegistrationControllerTest{ protected abstract class TestController extends FrontendController(mockControllerComponents) with RegistrationSummaryController { override val cc: MessagesControllerComponents = mockControllerComponents override implicit val appConfig: AppConfig = mockAppConfig override val registrationSummaryView: registration_summary = app.injector.instanceOf[registration_summary] override val registrationErrorView: registration_error = app.injector.instanceOf[registration_error] override val registrationErrorServiceUnavailableView: registration_error_serviceUnavailable = app.injector.instanceOf[registration_error_serviceUnavailable] } def controller = new TestController { override val cachingConnector = mockCachingConnector override val ihtConnector = mockIhtConnector override val authConnector = mockAuthConnector override val metrics: IhtMetrics = mock[IhtMetrics] } def controllerNotAuthorised = new TestController { override val cachingConnector = mockCachingConnector override val ihtConnector = mockIhtConnector override val authConnector = mockAuthConnector override val metrics: IhtMetrics = mock[IhtMetrics] } def anchorLink(route: String, postfix: String) = s"$route#$postfix" val testDod = new DeceasedDateOfDeath(new LocalDate(2001,11, 11)) val testAd = CommonBuilder.buildApplicantDetails val testDd = CommonBuilder.buildDeceasedDetails def fullyCompletedRegistrationDetails = { RegistrationDetails(Some(testDod), Some(testAd), Some(testDd), areOthersApplyingForProbate = Some(false)) } "Summary controller" must { "redirect to GG login page on PageLoad if the user is not logged in" in { val result = controllerNotAuthorised.onPageLoad(createFakeRequest(isAuthorised = false)) status(result) must be(SEE_OTHER) redirectLocation(result) must be (Some(loginUrl)) } "redirect to GG login page on Submit if the user is not logged in" in { val result = controllerNotAuthorised.onSubmit(createFakeRequest(isAuthorised = false)) status(result) must be(SEE_OTHER) redirectLocation(result) must be (Some(loginUrl)) } "Load the RegistrationSummary page with title" in { val deceasedDateOfDeath = new DeceasedDateOfDeath(new LocalDate(2001,11, 11)) val applicantDetails = CommonBuilder.buildApplicantDetails val deceasedDetails = CommonBuilder.buildDeceasedDetails val registrationDetails = RegistrationDetails(Some(deceasedDateOfDeath), Some(applicantDetails), Some(deceasedDetails), areOthersApplyingForProbate = Some(false)) createMockToGetRegDetailsFromCache(mockCachingConnector, Some(registrationDetails)) val result = controller.onPageLoad()(createFakeRequest(authRetrieveNino = false)) status(result) must be(OK) val content = ContentChecker.stripLineBreaks(contentAsString(result)) content must include(messagesApi("iht.registration.checkYourAnswers")) } "onSubmit for valid input should redirect to completed registration" in { val deceasedDateOfDeath = new DeceasedDateOfDeath(new LocalDate(2001,11, 11)) val applicantDetails = CommonBuilder.buildApplicantDetails val deceasedDetails = CommonBuilder.buildDeceasedDetails val coExec1 = CommonBuilder.buildCoExecutor copy (firstName=CommonBuilder.firstNameGenerator, lastName=CommonBuilder.surnameGenerator) val coExec2 = CommonBuilder.buildCoExecutor copy (firstName=CommonBuilder.firstNameGenerator, lastName=CommonBuilder.surnameGenerator) val registrationDetails = RegistrationDetails(Some(deceasedDateOfDeath), Some(applicantDetails), Some(deceasedDetails), Seq(coExec1, coExec2)) createMockToGetRegDetailsFromCache(mockCachingConnector, Some(registrationDetails)) createMockToStoreRegDetailsInCache(mockCachingConnector, Some(registrationDetails)) createMockToSaveApplicationDetails(mockIhtConnector) createMockToSubmitRegistration(mockIhtConnector) val result = controller.onSubmit(createFakeRequest(authRetrieveNino = false)) redirectLocation(result) mustBe Some(iht.controllers.registration.routes.CompletedRegistrationController.onPageLoad().url) status(result) must be(SEE_OTHER) } "onSubmit for valid input where no registration details should throw exception" in { createMockToGetRegDetailsFromCache(mockCachingConnector, None) a[RuntimeException] mustBe thrownBy { Await.result(controller.onSubmit(createFakeRequest(authRetrieveNino = false)), Duration.Inf) } } "onSubmit duplicate registration redirect to Duplicate Registration page" in { val deceasedDateOfDeath = new DeceasedDateOfDeath(new LocalDate(2001,11, 11)) val applicantDetails = CommonBuilder.buildApplicantDetails val deceasedDetails = CommonBuilder.buildDeceasedDetails val coExec1 = CommonBuilder.buildCoExecutor copy (firstName=CommonBuilder.firstNameGenerator, lastName=CommonBuilder.surnameGenerator) val coExec2 = CommonBuilder.buildCoExecutor copy (firstName=CommonBuilder.firstNameGenerator, lastName=CommonBuilder.surnameGenerator) val registrationDetails = RegistrationDetails(Some(deceasedDateOfDeath), Some(applicantDetails), Some(deceasedDetails), Seq(coExec1, coExec2)) createMockToGetRegDetailsFromCache(mockCachingConnector, Some(registrationDetails)) createMockToStoreRegDetailsInCache(mockCachingConnector, Some(registrationDetails)) createMockToSubmitRegistration(mockIhtConnector, "") val result = controller.onSubmit(createFakeRequest(authRetrieveNino = false)) status(result) must be(SEE_OTHER) redirectLocation(result) mustBe Some(routes.DuplicateRegistrationController.onPageLoad("IHT Reference").url) } "onSubmit GatewayTimeoutException" in { val deceasedDateOfDeath = new DeceasedDateOfDeath(new LocalDate(2001,11, 11)) val applicantDetails = CommonBuilder.buildApplicantDetails val deceasedDetails = CommonBuilder.buildDeceasedDetails val coExec1 = CommonBuilder.buildCoExecutor copy (firstName=CommonBuilder.firstNameGenerator, lastName=CommonBuilder.surnameGenerator) val coExec2 = CommonBuilder.buildCoExecutor copy (firstName=CommonBuilder.firstNameGenerator, lastName=CommonBuilder.surnameGenerator) val registrationDetails = RegistrationDetails(Some(deceasedDateOfDeath), Some(applicantDetails), Some(deceasedDetails), Seq(coExec1, coExec2)) createMockToGetRegDetailsFromCache(mockCachingConnector, Some(registrationDetails)) createMockToStoreRegDetailsInCache(mockCachingConnector, Some(registrationDetails)) createMockToSubmitRegistration(mockIhtConnector) when(mockIhtConnector.saveApplication(any(), any(), any())(any(), any())) .thenAnswer(new Answer[Future[Option[ApplicationDetails]]] { override def answer(invocation: InvocationOnMock): Future[Option[ApplicationDetails]] = { Future.failed(new GatewayTimeoutException("test")) }}) val result = controller.onSubmit(createFakeRequest(authRetrieveNino = false)) status(result) must be(INTERNAL_SERVER_ERROR) contentAsString(result) must include(messagesApi("error.cannotSend")) } "onSubmit UpstreamErrorResponse" in { val deceasedDateOfDeath = new DeceasedDateOfDeath(new LocalDate(2001,11, 11)) val applicantDetails = CommonBuilder.buildApplicantDetails val deceasedDetails = CommonBuilder.buildDeceasedDetails val coExec1 = CommonBuilder.buildCoExecutor copy (firstName=CommonBuilder.firstNameGenerator, lastName=CommonBuilder.surnameGenerator) val coExec2 = CommonBuilder.buildCoExecutor copy (firstName=CommonBuilder.firstNameGenerator, lastName=CommonBuilder.surnameGenerator) val registrationDetails = RegistrationDetails(Some(deceasedDateOfDeath), Some(applicantDetails), Some(deceasedDetails), Seq(coExec1, coExec2)) createMockToGetRegDetailsFromCache(mockCachingConnector, Some(registrationDetails)) createMockToStoreRegDetailsInCache(mockCachingConnector, Some(registrationDetails)) createMockToSubmitRegistration(mockIhtConnector) when(mockIhtConnector.saveApplication(any(), any(), any())(any(), any())) .thenAnswer(new Answer[Future[Option[ApplicationDetails]]] { override def answer(invocation: InvocationOnMock): Future[Option[ApplicationDetails]] = { Future.failed(UpstreamErrorResponse.apply("Service Unavailable", 502, 502)) }}) val result = controller.onSubmit(createFakeRequest(authRetrieveNino = false)) status(result) must be(INTERNAL_SERVER_ERROR) contentAsString(result) must include(messagesApi("error.registration.serviceUnavailable.p1")) } "onSubmit UpstreamErrorResponse 502" in { val deceasedDateOfDeath = new DeceasedDateOfDeath(new LocalDate(2001,11, 11)) val applicantDetails = CommonBuilder.buildApplicantDetails val deceasedDetails = CommonBuilder.buildDeceasedDetails val coExec1 = CommonBuilder.buildCoExecutor copy (firstName=CommonBuilder.firstNameGenerator, lastName=CommonBuilder.surnameGenerator) val coExec2 = CommonBuilder.buildCoExecutor copy (firstName=CommonBuilder.firstNameGenerator, lastName=CommonBuilder.surnameGenerator) val registrationDetails = RegistrationDetails(Some(deceasedDateOfDeath), Some(applicantDetails), Some(deceasedDetails), Seq(coExec1, coExec2)) createMockToGetRegDetailsFromCache(mockCachingConnector, Some(registrationDetails)) createMockToStoreRegDetailsInCache(mockCachingConnector, Some(registrationDetails)) createMockToSubmitRegistration(mockIhtConnector) when(mockIhtConnector.saveApplication(any(), any(), any())(any(), any())) .thenAnswer(new Answer[Future[Option[ApplicationDetails]]] { override def answer(invocation: InvocationOnMock): Future[Option[ApplicationDetails]] = { Future.failed(UpstreamErrorResponse.apply("test", 502, 502)) }}) val result = controller.onSubmit(createFakeRequest(authRetrieveNino = false)) status(result) must be(500) } "redirect to the estate report page if the RegistrationDetails does not contain deceased's date of death" in { val rd = fullyCompletedRegistrationDetails copy (deceasedDateOfDeath = None) createMockToGetRegDetailsFromCache(mockCachingConnector, Some(rd)) val result = await(controller.onPageLoad(createFakeRequest(authRetrieveNino = false))) status(result) mustBe SEE_OTHER } "redirect to the estate report page if the RegistrationDetails does not contain the deceased's name" in { val dd = testDd copy (firstName = None) val rd = RegistrationDetails(Some(testDod), Some(testAd), Some(dd), areOthersApplyingForProbate = Some(false)) createMockToGetRegDetailsFromCache(mockCachingConnector, Some(rd)) val result = await(controller.onPageLoad(createFakeRequest(authRetrieveNino = false))) status(result) mustBe SEE_OTHER } "redirect to the estate report page if the RegistrationDetails does not contain the deceased's address location" in { val dd = testDd copy (isAddressInUK = None) val rd = RegistrationDetails(Some(testDod), Some(testAd), Some(dd), areOthersApplyingForProbate = Some(false)) createMockToGetRegDetailsFromCache(mockCachingConnector, Some(rd)) val result = await(controller.onPageLoad(createFakeRequest(authRetrieveNino = false))) status(result) mustBe SEE_OTHER } "redirect to the estate report page if the RegistrationDetails does not contain the deceased's address" in { val dd = testDd copy (ukAddress = None) val rd = RegistrationDetails(Some(testDod), Some(testAd), Some(dd), areOthersApplyingForProbate = Some(false)) createMockToGetRegDetailsFromCache(mockCachingConnector, Some(rd)) val result = await(controller.onPageLoad(createFakeRequest(authRetrieveNino = false))) status(result) mustBe SEE_OTHER } "redirect to the estate report page if the RegistrationDetails does not contain an answer to 'applying for probate' question" in { val ad = testAd copy (isApplyingForProbate = None) val rd = RegistrationDetails(Some(testDod), Some(ad), Some(testDd), areOthersApplyingForProbate = Some(false)) createMockToGetRegDetailsFromCache(mockCachingConnector, Some(rd)) val result = await(controller.onPageLoad(createFakeRequest(authRetrieveNino = false))) status(result) mustBe SEE_OTHER } "redirect to the estate report page if the RegistrationDetails does not contain the probate location" in { val ad = testAd copy (country = None) val rd = RegistrationDetails(Some(testDod), Some(ad), Some(testDd), areOthersApplyingForProbate = Some(false)) createMockToGetRegDetailsFromCache(mockCachingConnector, Some(rd)) val result = await(controller.onPageLoad(createFakeRequest(authRetrieveNino = false))) status(result) mustBe SEE_OTHER redirectLocation(result) mustBe Some(iht.controllers.estateReports.routes.YourEstateReportsController.onPageLoad().url) } "redirect to the estate report page if the RegistrationDetails does not contain contact number" in { val ad = testAd copy (phoneNo = None) val rd = RegistrationDetails(Some(testDod), Some(ad), Some(testDd), areOthersApplyingForProbate = Some(false)) createMockToGetRegDetailsFromCache(mockCachingConnector, Some(rd)) val result = await(controller.onPageLoad(createFakeRequest(authRetrieveNino = false))) status(result) mustBe SEE_OTHER } "redirect to the estate report page if the RegistrationDetails does not contain an address" in { val ad = testAd copy (ukAddress = None) val rd = RegistrationDetails(Some(testDod), Some(ad), Some(testDd), areOthersApplyingForProbate = Some(false)) createMockToGetRegDetailsFromCache(mockCachingConnector, Some(rd)) val result = await(controller.onPageLoad(createFakeRequest(authRetrieveNino = false))) status(result) mustBe SEE_OTHER } "redirect to the estate report page if the RegistrationDetails does not contain an answer to 'are others applying for probate' question" in { val rd = RegistrationDetails(Some(testDod), Some(testAd), Some(testDd), areOthersApplyingForProbate = None) createMockToGetRegDetailsFromCache(mockCachingConnector, Some(rd)) val result = await(controller.onPageLoad(createFakeRequest(authRetrieveNino = false))) status(result) mustBe SEE_OTHER } "onSubmit RuntimeException" in { val deceasedDateOfDeath = new DeceasedDateOfDeath(new LocalDate(2001,11, 11)) val applicantDetails = CommonBuilder.buildApplicantDetails val deceasedDetails = CommonBuilder.buildDeceasedDetails val coExec1 = CommonBuilder.buildCoExecutor copy (firstName=CommonBuilder.firstNameGenerator, lastName=CommonBuilder.surnameGenerator) val coExec2 = CommonBuilder.buildCoExecutor copy (firstName=CommonBuilder.firstNameGenerator, lastName=CommonBuilder.surnameGenerator) val registrationDetails = RegistrationDetails(Some(deceasedDateOfDeath), Some(applicantDetails), Some(deceasedDetails), Seq(coExec1, coExec2)) createMockToGetRegDetailsFromCache(mockCachingConnector, Some(registrationDetails)) createMockToStoreRegDetailsInCache(mockCachingConnector, Some(registrationDetails)) createMockToSubmitRegistration(mockIhtConnector) when(mockIhtConnector.saveApplication(any(), any(), any())(any(), any())) .thenAnswer(new Answer[Future[Option[ApplicationDetails]]] { override def answer(invocation: InvocationOnMock): Future[Option[ApplicationDetails]] = { Future.failed(new RuntimeException("Request timed out")) }}) val result = controller.onSubmit(createFakeRequest(authRetrieveNino = false)) status(result) must be(INTERNAL_SERVER_ERROR) contentAsString(result) must include(messagesApi("error.cannotSend")) } "onSubmit RuntimeException not timeout" in { val deceasedDateOfDeath = new DeceasedDateOfDeath(new LocalDate(2001,11, 11)) val applicantDetails = CommonBuilder.buildApplicantDetails val deceasedDetails = CommonBuilder.buildDeceasedDetails val coExec1 = CommonBuilder.buildCoExecutor copy (firstName=CommonBuilder.firstNameGenerator, lastName=CommonBuilder.surnameGenerator) val coExec2 = CommonBuilder.buildCoExecutor copy (firstName=CommonBuilder.firstNameGenerator, lastName=CommonBuilder.surnameGenerator) val registrationDetails = RegistrationDetails(Some(deceasedDateOfDeath), Some(applicantDetails), Some(deceasedDetails), Seq(coExec1, coExec2)) createMockToGetRegDetailsFromCache(mockCachingConnector, Some(registrationDetails)) createMockToStoreRegDetailsInCache(mockCachingConnector, Some(registrationDetails)) createMockToSubmitRegistration(mockIhtConnector) when(mockIhtConnector.saveApplication(any(), any(), any())(any(), any())) .thenAnswer(new Answer[Future[Option[ApplicationDetails]]] { override def answer(invocation: InvocationOnMock): Future[Option[ApplicationDetails]] = { Future.failed(new RuntimeException("testing")) }}) val result = controller.onSubmit(createFakeRequest(authRetrieveNino = false)) status(result) must be(INTERNAL_SERVER_ERROR) contentAsString(result) must include(messagesApi("error.cannotSend")) } "onSubmit for valid input should produce an internal server error if the storage fails" in { val deceasedDateOfDeath = new DeceasedDateOfDeath(new LocalDate(2001,11, 11)) val applicantDetails = CommonBuilder.buildApplicantDetails val deceasedDetails = CommonBuilder.buildDeceasedDetails val coExec1 = CommonBuilder.buildCoExecutor copy (firstName=CommonBuilder.firstNameGenerator, lastName=CommonBuilder.surnameGenerator) val coExec2 = CommonBuilder.buildCoExecutor copy (firstName=CommonBuilder.firstNameGenerator, lastName=CommonBuilder.surnameGenerator) val registrationDetails = RegistrationDetails(Some(deceasedDateOfDeath), Some(applicantDetails), Some(deceasedDetails), Seq(coExec1, coExec2)) createMockToGetRegDetailsFromCache(mockCachingConnector, Some(registrationDetails)) createMockToStoreRegDetailsInCacheWithFailure(mockCachingConnector, Some(registrationDetails)) createMockToSaveApplicationDetails(mockIhtConnector) createMockToSubmitRegistration(mockIhtConnector) val result = controller.onSubmit(createFakeRequest(authRetrieveNino = false)) status(result) must be(INTERNAL_SERVER_ERROR) } } }
hmrc/iht-frontend
test/iht/controllers/registration/RegistrationSummaryControllerTest.scala
Scala
apache-2.0
20,455
package org.vaadin.addons.rinne import com.vaadin.ui.Button import org.vaadin.addons.rinne.mixins._ class VButton extends Button with ButtonMixin
LukaszByczynski/rinne
src/main/scala/org/vaadin/addons/rinne/VButton.scala
Scala
apache-2.0
147
/* * Copyright 2016 Dennis Vriend * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.dnvriend import java.text.SimpleDateFormat import java.util.{ Date, UUID } import akka.NotUsed import akka.actor._ import akka.cluster.sharding.{ ClusterSharding, ClusterShardingSettings, ShardRegion } import akka.event.LoggingReceive import akka.persistence.jdbc.query.scaladsl.JdbcReadJournal import akka.persistence.query.scaladsl._ import akka.persistence.query.{ EventEnvelope, PersistenceQuery } import akka.persistence.{ PersistentActor, RecoveryCompleted } import akka.stream.scaladsl.Sink import akka.stream.{ ActorMaterializer, Materializer } import com.github.dnvriend.dao.{ PersonDao, PersonDaoImpl } import com.github.dnvriend.data.Event.PBPersonCreated import com.typesafe.config.ConfigFactory import scala.concurrent.ExecutionContext import scala.concurrent.duration._ object Person { sealed trait Command final case class CreatePerson(firstName: String, lastName: String, timestamp: Long) extends Command final case class ChangeFirstName(firstName: String, timestamp: Long) extends Command final case class ChangeLastName(lastName: String, timestamp: Long) extends Command // events sealed trait Event final case class PersonCreated(firstName: String, lastName: String, timestamp: Long) extends Event final case class FirstNameChanged(firstName: String, timestamp: Long) extends Event final case class LastNameChanged(lastName: String, timestamp: Long) extends Event // the state final case class PersonState(firstName: String = "", lastName: String = "") // necessary for cluster sharding final case class EntityEnvelope(id: String, payload: Any) final val NumberOfShards: Int = 100 val extractEntityId: ShardRegion.ExtractEntityId = { case EntityEnvelope(id, payload) ⇒ (id.toString, payload) } val extractShardId: ShardRegion.ExtractShardId = { case EntityEnvelope(id, _) ⇒ (id.hashCode % NumberOfShards).toString } final val PersonShardName = "Person" } class Person extends PersistentActor with ActorLogging { import Person._ import ShardRegion.Passivate override val persistenceId: String = "Person-" + self.path.name context.setReceiveTimeout(300.millis) var state = PersonState() def handleEvent(event: Event): Unit = event match { case PersonCreated(firstName, lastName, _) ⇒ state = state.copy(firstName = firstName, lastName = lastName) case FirstNameChanged(firstName, _) ⇒ state = state.copy(firstName = firstName) case LastNameChanged(lastName, _) ⇒ state = state.copy(lastName = lastName) } override def receiveRecover: Receive = LoggingReceive { case event: Event ⇒ handleEvent(event) } def now: Long = System.currentTimeMillis() override def receiveCommand: Receive = LoggingReceive { case CreatePerson(firstName, lastName, _) ⇒ persist(PersonCreated(firstName, lastName, now))(handleEvent) case ChangeFirstName(firstName, _) ⇒ persist(FirstNameChanged(firstName, now))(handleEvent) case ChangeLastName(lastName, _) ⇒ persist(LastNameChanged(lastName, now))(handleEvent) case ReceiveTimeout ⇒ context.parent ! Passivate(stopMessage = SupervisorStrategy.Stop) case SupervisorStrategy.Stop ⇒ context.stop(self) } } object SupportDesk { final case class ChangeFirstName(id: String) final case class ChangeLastName(id: String) } class SupportDesk(personRegion: ActorRef, readJournal: ReadJournal with CurrentPersistenceIdsQuery)(implicit val mat: Materializer, ec: ExecutionContext) extends Actor with ActorLogging { import Person._ private var counter: Int = 0 context.system.scheduler.schedule(1.second, 1.second, self, "GO") def now: Long = System.currentTimeMillis() override def receive: Receive = { case _ if counter % 2 == 0 ⇒ val id = UUID.randomUUID.toString personRegion ! EntityEnvelope(id, CreatePerson("FOO", "BAR", now)) context.system.scheduler.scheduleOnce(5.seconds, self, SupportDesk.ChangeFirstName(id)) context.system.scheduler.scheduleOnce(10.seconds, self, SupportDesk.ChangeLastName(id)) counter += 1 case SupportDesk.ChangeFirstName(id) ⇒ personRegion ! EntityEnvelope(id, ChangeFirstName(s"FOO-${DateUtil.format(now)}", now)) case SupportDesk.ChangeLastName(id) ⇒ personRegion ! EntityEnvelope(id, ChangeLastName(s"BAR-${DateUtil.format(now)}", now)) case _ ⇒ counter += 1 println("Nothing to do: " + counter) } } object DateUtil { def format(timestamp: Long): String = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.XXX").format(new Date(timestamp)) } object InsertPersonInPersonTableHandler { sealed trait Event final case class PersonHandled(offset: Long) extends Event final case class PersonInserted(id: String) extends Event final case class Completed() final case class Ack() final case class Init() final case class SavePersonSucceeded(offset: Long, sender: ActorRef) } /** * Handles only PersonCreated events to insert a record in the person.persons table (read model) */ class InsertPersonInPersonTableHandler(readJournal: JdbcReadJournal, personDao: PersonDao)(implicit ec: ExecutionContext, mat: Materializer) extends PersistentActor { import InsertPersonInPersonTableHandler._ override def persistenceId: String = "InsertPersonInPersonTableHandler" var recoverOffsetPersonCreated: Long = 0 def handleEvent(event: Event): Unit = event match { case PersonHandled(newOffset) ⇒ recoverOffsetPersonCreated = newOffset case _ ⇒ } override def receiveRecover: Receive = LoggingReceive { case event: Event ⇒ handleEvent(event) case RecoveryCompleted ⇒ readJournal.eventsByTag("person-created", recoverOffsetPersonCreated) .runWith(Sink.actorRefWithAck(self, Init(), Ack(), Completed())) } override def receiveCommand: Receive = LoggingReceive { case _: Completed ⇒ context.stop(self) case _: Init ⇒ sender() ! Ack() // provide demand case EventEnvelope(offset, pid, seqno, PBPersonCreated(firstName, lastName, timestamp)) ⇒ // side effect only in command handler val theSender = sender() personDao.savePerson(pid, firstName, lastName).map { _ ⇒ persist(PersonInserted(pid))(handleEvent) self ! SavePersonSucceeded(offset, theSender) } case SavePersonSucceeded(offset, theSender) ⇒ persist(PersonHandled(offset))(handleEvent) theSender ! Ack() // get next message } } object UpdatePersonFirstNameHandler { sealed trait Event final case class PersonHandled(offset: Long) extends Event final case class PersonAggregated(pid: String, firstname: String) extends Event } /** * Aggregates PersonCreated and FirstnameChanged for a certain persistence id * It will only persist a PersonAggregated event */ class UpdatePersonFirstNameAggregator extends PersistentActor { override def persistenceId: String = "UpdatePersonFirstNameHandler" var recoverOffsetPersonCreated: Long = 0 var recoverOffsetFirstNameChanged: Long = 0 var query: NotUsed = null override def receiveRecover: Receive = { case RecoveryCompleted ⇒ } override def receiveCommand: Receive = { case _ ⇒ } } object LaunchPerson extends App { val configName = "person-application.conf" lazy val configuration = ConfigFactory.load(configName) implicit val system: ActorSystem = ActorSystem("ClusterSystem", configuration) sys.addShutdownHook(system.terminate()) implicit val ec: ExecutionContext = system.dispatcher implicit val mat: Materializer = ActorMaterializer() lazy val readJournal: JdbcReadJournal = PersistenceQuery(system).readJournalFor[JdbcReadJournal](JdbcReadJournal.Identifier) // launch the personShardRegion; the returned actor must be used to send messages to the shard val personRegion: ActorRef = ClusterSharding(system).start( typeName = Person.PersonShardName, entityProps = Props[Person], settings = ClusterShardingSettings(system), extractEntityId = Person.extractEntityId, extractShardId = Person.extractShardId ) val supportDesk = system.actorOf(Props(new SupportDesk(personRegion, readJournal))) val personReadModelDatabase = slick.jdbc.JdbcBackend.Database.forConfig("person-read-model", system.settings.config) val personDao = new PersonDaoImpl(personReadModelDatabase, slick.driver.PostgresDriver) val insertPersonInPersonTableHandler = system.actorOf(Props(new InsertPersonInPersonTableHandler(readJournal, personDao))) val banner = s""" | |##### ###### # # #### |# # # ## ## # # |# # ##### # ## # # # |# # # # # # # |# # # # # # # |##### ###### # # #### | |$BuildInfo | """.stripMargin println(banner) }
dnvriend/demo-akka-persistence-jdbc
src/main/scala/com/github/dnvriend/LaunchPerson.scala
Scala
apache-2.0
9,479
package fr.sysf.sample.service import fr.sysf.sample.domain.Customer import org.bson.types.ObjectId import org.springframework.data.mongodb.repository.MongoRepository import org.springframework.stereotype.Repository @Repository trait CustomerRepository extends MongoRepository[Customer, ObjectId] { def findByCustomerId(customerId:String):Customer def findByEmail(customerId:String):Customer }
fpeyron/sample-scala-mongo-rest
src/main/scala/fr/sysf/sample/service/CustomerRepository.scala
Scala
apache-2.0
402
package se.meldrum.machine import akka.http.scaladsl.model._ import akka.http.scaladsl.unmarshalling.Unmarshaller._ import se.meldrum.machine.http.UserNames class UserSpec extends BaseSpec { import se.meldrum.machine.http.JsonSupport._ "User route" should { "not handle GET requests on invalid paths" in { Get("/nonexistingpath") ~> route ~> check { handled shouldBe false } } "confirm that there are no test users when db is empty" in { Get("/v1/user") ~> route ~> check { responseAs[UserNames] shouldEqual UserNames(Seq.empty[String]) } } "create test users" in { val postRequests = createTestUsers() postRequests.map(req => req ~> route ~> check { responseAs[String] shouldEqual "User created" }) } "notify of duplicated user" in { val postRequest = createExistingUser() postRequest ~> route ~> check { responseAs[String] shouldEqual "User already exists" } } "retrieve test users" in { Get("/v1/user") ~> route ~> check { responseAs[UserNames] shouldEqual UserNames(Seq("testuser", "testuser2", "testuser3")) } } } private def createExistingUser(): HttpRequest = { val duplicatedUser = userJsonRequest("testuser", "secret", "test@meldrum.se") postRequest("/v1/user/create", duplicatedUser) } }
Max-Meldrum/machine
src/test/scala/se/meldrum/machine/UserSpec.scala
Scala
apache-2.0
1,386
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.streaming import scala.util.Random import org.scalatest.BeforeAndAfterAll import org.apache.spark.{SparkConf, SparkEnv} import org.apache.spark.rdd.BlockRDD import org.apache.spark.storage.{StorageLevel, StreamBlockId} import org.apache.spark.streaming.dstream.ReceiverInputDStream import org.apache.spark.streaming.rdd.WriteAheadLogBackedBlockRDD import org.apache.spark.streaming.receiver.{BlockManagerBasedStoreResult, Receiver, WriteAheadLogBasedStoreResult} import org.apache.spark.streaming.scheduler.ReceivedBlockInfo import org.apache.spark.streaming.util.{WriteAheadLogRecordHandle, WriteAheadLogUtils} class ReceiverInputDStreamSuite extends TestSuiteBase with BeforeAndAfterAll { override def afterAll(): Unit = { try { StreamingContext.getActive().map { _.stop() } } finally { super.afterAll() } } testWithoutWAL("createBlockRDD creates empty BlockRDD when no block info") { receiverStream => val rdd = receiverStream.createBlockRDD(Time(0), Seq.empty) assert(rdd.isInstanceOf[BlockRDD[_]]) assert(!rdd.isInstanceOf[WriteAheadLogBackedBlockRDD[_]]) assert(rdd.isEmpty()) } testWithoutWAL("createBlockRDD creates correct BlockRDD with block info") { receiverStream => val blockInfos = Seq.fill(5) { createBlockInfo(withWALInfo = false) } val blockIds = blockInfos.map(_.blockId) // Verify that there are some blocks that are present, and some that are not require(blockIds.forall(blockId => SparkEnv.get.blockManager.master.contains(blockId))) val rdd = receiverStream.createBlockRDD(Time(0), blockInfos) assert(rdd.isInstanceOf[BlockRDD[_]]) assert(!rdd.isInstanceOf[WriteAheadLogBackedBlockRDD[_]]) val blockRDD = rdd.asInstanceOf[BlockRDD[_]] assert(blockRDD.blockIds.toSeq === blockIds) } testWithoutWAL("createBlockRDD filters non-existent blocks before creating BlockRDD") { receiverStream => val presentBlockInfos = Seq.fill(2)(createBlockInfo(withWALInfo = false, createBlock = true)) val absentBlockInfos = Seq.fill(3)(createBlockInfo(withWALInfo = false, createBlock = false)) val blockInfos = presentBlockInfos ++ absentBlockInfos val blockIds = blockInfos.map(_.blockId) // Verify that there are some blocks that are present, and some that are not require(blockIds.exists(blockId => SparkEnv.get.blockManager.master.contains(blockId))) require(blockIds.exists(blockId => !SparkEnv.get.blockManager.master.contains(blockId))) val rdd = receiverStream.createBlockRDD(Time(0), blockInfos) assert(rdd.isInstanceOf[BlockRDD[_]]) val blockRDD = rdd.asInstanceOf[BlockRDD[_]] assert(blockRDD.blockIds.toSeq === presentBlockInfos.map { _.blockId}) } testWithWAL("createBlockRDD creates empty WALBackedBlockRDD when no block info") { receiverStream => val rdd = receiverStream.createBlockRDD(Time(0), Seq.empty) assert(rdd.isInstanceOf[WriteAheadLogBackedBlockRDD[_]]) assert(rdd.isEmpty()) } testWithWAL( "createBlockRDD creates correct WALBackedBlockRDD with all block info having WAL info") { receiverStream => val blockInfos = Seq.fill(5) { createBlockInfo(withWALInfo = true) } val blockIds = blockInfos.map(_.blockId) val rdd = receiverStream.createBlockRDD(Time(0), blockInfos) assert(rdd.isInstanceOf[WriteAheadLogBackedBlockRDD[_]]) val blockRDD = rdd.asInstanceOf[WriteAheadLogBackedBlockRDD[_]] assert(blockRDD.blockIds.toSeq === blockIds) assert(blockRDD.walRecordHandles.toSeq === blockInfos.map { _.walRecordHandleOption.get }) } testWithWAL("createBlockRDD creates BlockRDD when some block info don't have WAL info") { receiverStream => val blockInfos1 = Seq.fill(2) { createBlockInfo(withWALInfo = true) } val blockInfos2 = Seq.fill(3) { createBlockInfo(withWALInfo = false) } val blockInfos = blockInfos1 ++ blockInfos2 val blockIds = blockInfos.map(_.blockId) val rdd = receiverStream.createBlockRDD(Time(0), blockInfos) assert(rdd.isInstanceOf[BlockRDD[_]]) val blockRDD = rdd.asInstanceOf[BlockRDD[_]] assert(blockRDD.blockIds.toSeq === blockIds) } private def testWithoutWAL(msg: String)(body: ReceiverInputDStream[_] => Unit): Unit = { test(s"Without WAL enabled: $msg") { runTest(enableWAL = false, body) } } private def testWithWAL(msg: String)(body: ReceiverInputDStream[_] => Unit): Unit = { test(s"With WAL enabled: $msg") { runTest(enableWAL = true, body) } } private def runTest(enableWAL: Boolean, body: ReceiverInputDStream[_] => Unit): Unit = { val conf = new SparkConf() conf.setMaster("local[4]").setAppName("ReceiverInputDStreamSuite") conf.set(WriteAheadLogUtils.RECEIVER_WAL_ENABLE_CONF_KEY, enableWAL.toString) require(WriteAheadLogUtils.enableReceiverLog(conf) === enableWAL) val ssc = new StreamingContext(conf, Seconds(1)) val receiverStream = new ReceiverInputDStream[Int](ssc) { override def getReceiver(): Receiver[Int] = null } withStreamingContext(ssc) { ssc => body(receiverStream) } } /** * Create a block info for input to the ReceiverInputDStream.createBlockRDD * @param withWALInfo Create block with WAL info in it * @param createBlock Actually create the block in the BlockManager * @return */ private def createBlockInfo( withWALInfo: Boolean, createBlock: Boolean = true): ReceivedBlockInfo = { val blockId = new StreamBlockId(0, Random.nextLong()) if (createBlock) { SparkEnv.get.blockManager.putSingle(blockId, 1, StorageLevel.MEMORY_ONLY, tellMaster = true) require(SparkEnv.get.blockManager.master.contains(blockId)) } val storeResult = if (withWALInfo) { new WriteAheadLogBasedStoreResult(blockId, None, new WriteAheadLogRecordHandle { }) } else { new BlockManagerBasedStoreResult(blockId, None) } new ReceivedBlockInfo(0, None, None, storeResult) } }
gioenn/xSpark
streaming/src/test/scala/org/apache/spark/streaming/ReceiverInputDStreamSuite.scala
Scala
apache-2.0
6,843
package unfiltered.filter import java.io.PrintWriter import javax.servlet.http.{HttpServletResponse, HttpServletResponseWrapper} import unfiltered.response.HttpResponse /** * The servlet API states that, for a given response, either the * getOutputStream or getWriter method may be called, but not both: * * http://docs.oracle.com/javaee/6/api/javax/servlet/ServletResponse.html#getOutputStream() * http://docs.oracle.com/javaee/6/api/javax/servlet/ServletResponse.html#getWriter() * * Unfiltered response bindings are based on a single outputStream, * and support filtering it (unfiltered.response.ResponseFilter) * through response function composition. Writing to the underlying * response output stream directly would bypass any filters in place, * and writing to its writer will produce an InvalidStateException. * * If working with software that requires a HttpServletResponse and * uses its Writer interface, this wrapper supplies a writer that * works with any stream filters in the response function chain. */ case class WritableServletResponse(res: HttpResponse[HttpServletResponse]) extends HttpServletResponseWrapper(res.underlying) { override lazy val getWriter = new PrintWriter(res.outputStream) }
omarkilani/unfiltered
filter/src/main/scala/WritableServletResponse.scala
Scala
mit
1,233
/* * Copyright 2011-2021 GatlingCorp (https://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import io.gatling.core.Predef._ import io.gatling.http.Predef._ class HttpCheckSampleScala { http("").get("") //#status .check( status.saveAs("status") ) //#status //#currentLocation .check( currentLocation.saveAs("url") ) //#currentLocation //#currentLocationRegex .check( // single capture group currentLocationRegex("https://(.*)/.*") .saveAs("domain"), // multiple capture groups with "captureGroups" currentLocationRegex("http://foo.com/bar?(.*)=(.*)") .ofType[(String, String)] .saveAs("queryParamKeyAndValue") ) //#currentLocationRegex //#header .check( header("Content-Encoding").is("gzip") ) //#header //#headerRegex .check( headerRegex("FOO", "foo(.*)bar(.*)baz") .ofType[(String, String)] .saveAs("data") ) //#headerRegex }
gatling/gatling
src/docs/content/reference/current/http/check/code/HttpCheckSampleScala.scala
Scala
apache-2.0
1,399
package im.actor.server.bot.services import akka.actor.ActorSystem import im.actor.bots.BotMessages import im.actor.server.bot.{ BotExtension, BotServiceBase } import im.actor.server.user.UserErrors import upickle.Js private[bot] final class BotsBotService(system: ActorSystem) extends BotServiceBase(system) { import BotMessages._ import system.dispatcher val botExt = BotExtension(system) private def createBot(nickname: String, name: String) = RequestHandler[CreateBot, CreateBot#Response]( (botUserId: BotUserId, botAuthId: BotAuthId, botAuthSid: BotAuthSid) ⇒ ifIsAdmin(botUserId) { (for { (token, userId) ← botExt.create(nickname, name, isAdmin = false) } yield Right(BotCreated(token, userId))) recover { case UserErrors.NicknameTaken ⇒ Left(BotError(400, "USERNAME_TAKEN", Js.Obj(), None)) } } ) private def getToken(botUserId: BotUserId) = RequestHandler[GetBotToken, GetBotToken#Response]( (botUserId: BotUserId, botAuthId: BotAuthId, botAuthSid: BotAuthSid) ⇒ ifIsAdmin(botUserId) { (for { token ← botExt.findBotToken(botUserId) } yield Right(BotToken(token))) recover { case _ ⇒ Left(BotError(400, "ERROR_GET_TOKEN", Js.Obj(), None)) } } ) override def handlers: PartialFunction[RequestBody, WeakRequestHandler] = { case CreateBot(nickname, name) ⇒ createBot(nickname, name).toWeak case GetBotToken(botUserId) ⇒ getToken(botUserId).toWeak } }
dfsilva/actor-platform
actor-server/actor-bots/src/main/scala/im/actor/server/bot/services/BotsBotService.scala
Scala
agpl-3.0
1,550
package com.gravity.gdk.impression import com.gravity.gdk.article.ArticleKey import com.gravity.gdk.placement.{ImpressionViewedSpec, PlacementKey} import com.gravity.gdk.reco.{RecoArticleInImpression, _} import com.gravity.gdk.user.GravityUser import com.gravity.gdk.util.http.MockableHttpResponse import com.gravity.gdk.util.test.GdkAsyncTest import org.joda.time.DateTime import org.scalamock.scalatest.AsyncMockFactory import play.api.libs.json.{JsArray, JsObject, Json} import scala.concurrent.Future import scalaj.http.{BaseHttp, HttpRequest} /* ___...---'' ___...---'\\'___ '' _.-'' _`'.______\\\\. /_.) )..- __..--'\\\\ ( __..--'' '-''\\@ Ⓐ Ⓐ Ⓐ Ⓐ Ⓐ Ⓐ Ⓐ Ⓐ Ⓐ Ⓐ Ⓐ */ class ImpressionTest extends GdkAsyncTest with AsyncMockFactory { "Impression" can "be logged" in { val pk = PlacementKey(420L) class TestResponse extends MockableHttpResponse(_code = 200) val responseMock = mock[TestResponse] val requestMock = mock[HttpRequest] (requestMock.postData(_: String)).expects(where { bodyJson: String => val bodyObj = Json.parse(bodyJson).as[JsObject] (bodyObj \\ "currentUrl").as[String] == "http://example.com/currentUrl" && """^\\d+$""".r.findFirstIn((bodyObj \\ "clientTimeMillis").as[String]).nonEmpty && (bodyObj \\ "renderType").as[String] == "api" && (bodyObj \\ "sitePlacementId").as[String] == "420" && (bodyObj \\ "referrer").as[String] == "" && (bodyObj \\ "encodedImpressionSlug").as[String] == "imp slug" && { val articles = (bodyObj \\ "articles").as[JsArray].value articles.length == 2 && articles.head.as[RecoArticleInImpression] == RecoArticleInImpression(ArticleKey(1435L), 0, "article slug 1") && articles(1).as[RecoArticleInImpression] == RecoArticleInImpression(ArticleKey(32434L), 1, "article slug 2") } }).once().returning(requestMock) (requestMock.header _).expects("Content-Type", "application/json").once().returning(requestMock) (requestMock.header _).expects("Accept", "application/json; version=0").once().returning(requestMock) (requestMock.asString _).expects().once().returning(responseMock) implicit val httpMock = mock[BaseHttp] (httpMock.apply _).expects(*).once().returning(requestMock) implicit val recoCtx = RecoContext(GravityUser("joeSchmoe"), "http://example.com/currentUrl") val article1 = RecoArticle(ArticleKey(1435L), "title 1", "click url 1", "display url 1", Some("author 1"), "image url 1", new DateTime(), "article slug 1") val article2 = RecoArticle(ArticleKey(32434L), "title 2", "click url 2", "display url 2", Some("author 2"), "image url 2", new DateTime(), "article slug 2") val resultCodeF = Impression.log(pk, Future { RecoResult(List(article1, article2), ImpressionViewedSpec("callback url", "imp viewed hash"), "imp slug", Map.empty) }) resultCodeF.map(_ should be(200)) } it can "log with arbitrary articles" in { val pk = PlacementKey(420L) class TestResponse extends MockableHttpResponse(_code = 200) val responseMock = mock[TestResponse] val requestMock = mock[HttpRequest] (requestMock.postData(_: String)).expects(where { bodyJson: String => val bodyObj = Json.parse(bodyJson).as[JsObject] (bodyObj \\ "currentUrl").as[String] == "http://example.com/currentUrl" && """^\\d+$""".r.findFirstIn((bodyObj \\ "clientTimeMillis").as[String]).nonEmpty && (bodyObj \\ "renderType").as[String] == "api" && (bodyObj \\ "sitePlacementId").as[String] == "420" && (bodyObj \\ "referrer").as[String] == "" && (bodyObj \\ "encodedImpressionSlug").as[String] == "imp slug" && { val articles = (bodyObj \\ "articles").as[JsArray].value articles.length == 2 && articles.head.as[RecoArticleInImpression] == RecoArticleInImpression(ArticleKey(1435L), 0, "article slug 1") && articles(1).as[RecoArticleInImpression] == RecoArticleInImpression(ArticleKey(32434L), 1, "article slug 2") } }).once().returning(requestMock) (requestMock.header _).expects("Content-Type", "application/json").once().returning(requestMock) (requestMock.header _).expects("Accept", "application/json; version=0").once().returning(requestMock) (requestMock.asString _).expects().once().returning(responseMock) implicit val httpMock = mock[BaseHttp] (httpMock.apply _).expects(*).once().returning(requestMock) implicit val recoCtx = RecoContext(GravityUser("joeSchmoe"), "http://example.com/currentUrl") val otherArticle = RecoArticle(ArticleKey(32425L), "other title", "other click url", "other display url", Some("other author"), "other image url", new DateTime(), "other article slug") val article1 = RecoArticle(ArticleKey(1435L), "title 1", "click url 1", "display url 1", Some("author 1"), "image url 1", new DateTime(), "article slug 1") val article2 = RecoArticle(ArticleKey(32434L), "title 2", "click url 2", "display url 2", Some("author 2"), "image url 2", new DateTime(), "article slug 2") val resultCodeF = Impression.log(pk, Future { RecoResult(List(otherArticle, article1), ImpressionViewedSpec("callback url", "imp viewed hash"), "imp slug", Map.empty) }, Some(Future { List(article1, article2) })) resultCodeF.map(_ should be(200)) } }
GravityLabs/gdk-scala
src/test/scala/com/gravity/gdk/impression/ImpressionTest.scala
Scala
apache-2.0
5,583
package org.jmespike.scene import com.jme3.scene.Node import com.jme3.asset.AssetManager /** * */ class TerrainScene extends Scene { def createScene(assetManager: AssetManager) = new Node() }
zzorn/skycastle
src/main/scala/org/jmespike/scene/TerrainScene.scala
Scala
gpl-2.0
198
/* * Copyright 2015-2020 Snowflake Computing * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.snowflake.spark.snowflake import java.sql.Connection import net.snowflake.client.jdbc.SnowflakeConnectionV1 import org.apache.spark.SparkContext import org.apache.spark.scheduler.{SparkListener, SparkListenerApplicationEnd} import org.slf4j.LoggerFactory import scala.collection.mutable private[snowflake] case class RunningQuery (conn: Connection, queryID: String) object SparkConnectorContext { // The map to track running queries for spark application. // The key is the application ID, the value is the set of running queries. private val runningQueries = mutable.Map[String, mutable.Set[RunningQuery]]() private[snowflake] def getRunningQueries = runningQueries // Register spark listener to cancel any running queries if application fails. // Only one listener is registered for one spark application private[snowflake] def registerSparkListenerIfNotYet(sparkContext: SparkContext): Unit = withSyncAndDoNotThrowException { val appId = sparkContext.applicationId if (!runningQueries.keySet.contains(appId)) { logger.info("Spark connector register listener for: " + appId) runningQueries.put(appId, mutable.Set.empty) sparkContext.addSparkListener(new SparkListener { override def onApplicationEnd(applicationEnd: SparkListenerApplicationEnd): Unit = { try { cancelRunningQueries(appId) } finally { super.onApplicationEnd(applicationEnd) } } }) } } // Currently, this function is called when the spark application is END, // so, the map entry for this application is removed after all the running queries // are canceled. private[snowflake] def cancelRunningQueries(appId: String): Unit = withSyncAndDoNotThrowException { val queries = runningQueries.get(appId) if (queries.nonEmpty) { queries.get.foreach(rq => try { if (!rq.conn.isClosed) { val statement = rq.conn.createStatement() val sessionID = rq.conn.asInstanceOf[SnowflakeConnectionV1].getSessionID logger.warn(s"Canceling query ${rq.queryID} for session: $sessionID") statement.execute(s"select SYSTEM$$CANCEL_QUERY('${rq.queryID}')") statement.close() } } catch { case th: Throwable => logger.warn("Fail to cancel running queries: ", th) }) logger.warn(s"Finish cancelling all queries for $appId") runningQueries.remove(appId) } else { logger.info(s"No running query for: $appId") } } private[snowflake] def addRunningQuery(sparkContext: SparkContext, conn: Connection, queryID: String): Unit = withSyncAndDoNotThrowException { registerSparkListenerIfNotYet(sparkContext) val appId = sparkContext.applicationId val sessionID = conn.asInstanceOf[SnowflakeConnectionV1].getSessionID logger.info(s"Add running query for $appId session: $sessionID queryId: $queryID") val queries = runningQueries.get(appId) queries.foreach(_.add(RunningQuery(conn, queryID))) } private[snowflake] def removeRunningQuery(sparkContext: SparkContext, conn: Connection, queryID: String): Unit = withSyncAndDoNotThrowException { val appId = sparkContext.applicationId val sessionID = conn.asInstanceOf[SnowflakeConnectionV1].getSessionID logger.info(s"Remove running query for $appId session: $sessionID queryId: $queryID") val queries = runningQueries.get(appId) queries.foreach(_.remove(RunningQuery(conn, queryID))) } private[snowflake] val logger = LoggerFactory.getLogger(getClass) private var isConfigLogged = false private val locker = new Object // The system configuration is logged once. private[snowflake] def recordConfig(): Unit = { withSyncAndDoNotThrowException { if (!isConfigLogged) { isConfigLogged = true logger.info(s"Spark Connector system config: " + s"${SnowflakeTelemetry.getClientConfig().toPrettyString}") } } } private def withSyncAndDoNotThrowException(block: => Unit): Unit = try { locker.synchronized { block } } catch { case th: Throwable => logger.warn("Hit un-caught exception: " + th.getMessage) } }
snowflakedb/spark-snowflake
src/main/scala/net/snowflake/spark/snowflake/SparkConnectorContext.scala
Scala
apache-2.0
5,358
/** * Copyright (C) 2013 Orbeon, Inc. * * This program is free software; you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; either version * 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * The full text of the license is available at http://www.gnu.org/copyleft/lesser.html */ package org.orbeon.saxon.function import org.orbeon.oxf.xml.FunctionSupport import org.orbeon.saxon.expr.{ExpressionTool, XPathContext} import org.orbeon.saxon.functions.Evaluate.PreparedExpression import org.orbeon.saxon.trace.Location import org.orbeon.saxon.trans.{SaxonErrorCode, XPathException} import org.orbeon.saxon.value.{BooleanValue, ObjectValue} import org.orbeon.scaxon.Implicits._ import scala.util.control.Breaks._ class Forall extends ExistentialFunction { def defaultValue = true def returnNonDefaultValue(b: Boolean) = ! b } class Exists extends ExistentialFunction { def defaultValue = false def returnNonDefaultValue(b: Boolean) = b } trait ExistentialFunction extends FunctionSupport { def defaultValue: Boolean def returnNonDefaultValue(b: Boolean): Boolean override def evaluateItem(context: XPathContext): BooleanValue = { def throwDynamicError() = { val err = new XPathException("Second argument to xxf:forall must be an expression prepared using saxon:expression", this) err.setXPathContext(context) err.setErrorCode(SaxonErrorCode.SXXF0001) throw err } val items = arguments(0).iterate(context) val pexpr = Option(arguments(1).evaluateItem(context)) collect { case o: ObjectValue => o.getObject } collect { case e: PreparedExpression => e } getOrElse { throwDynamicError() } val c = context.newCleanContext c.setOriginatingConstructType(Location.SAXON_HIGHER_ORDER_EXTENSION_FUNCTION) c.setCurrentIterator(items) c.openStackFrame(pexpr.stackFrameMap) for (i <- 2 until arguments.length) { val slot = pexpr.variables(i - 2).getLocalSlotNumber c.setLocalVariable(slot, ExpressionTool.eagerEvaluate(arguments(i), c)) } breakable { while (true) { val next = items.next() if (next eq null) break() pexpr.expression.evaluateItem(c) match { case b: BooleanValue => if (returnNonDefaultValue(b.getBooleanValue)) return ! defaultValue case _ => val e = new XPathException("expression in xxf:forall() must return numeric values") e.setXPathContext(context) throw e } } } defaultValue } }
orbeon/orbeon-forms
src/main/scala/org/orbeon/saxon/function/existential.scala
Scala
lgpl-2.1
2,940
package org.littlewings.hazelcast.mapreduce import com.hazelcast.mapreduce.{Combiner, CombinerFactory, Context, Mapper, Reducer, ReducerFactory} @SerialVersionUID(1L) class ThreadInspectMapper extends Mapper[String, String, String, String] { override def map(key: String, value: String, context: Context[String, String]): Unit = context.emit("threads", Thread.currentThread.getName) } @SerialVersionUID(1L) class ThreadInspectCombinerFactory extends CombinerFactory[String, String, Map[String, Set[String]]] { override def newCombiner(key: String): Combiner[String, String, Map[String, Set[String]]] = new ThreadInspectCombiner } class ThreadInspectCombiner extends Combiner[String, String, Map[String, Set[String]]] { private[this] var names: Map[String, Set[String]] = Map.empty override def combine(key: String, value: String): Unit = { names += ("mapper" -> (names.get("mapper").getOrElse(Set.empty) + value)) names += ("combiner" -> (names.get("combiner").getOrElse(Set.empty) + Thread.currentThread.getName)) } override def finalizeChunk: Map[String, Set[String]] = { val m = names names = Map.empty m } } @SerialVersionUID(1L) class ThreadInspectReducerFactory extends ReducerFactory[String, Map[String, Set[String]], Map[String, Set[String]]] { override def newReducer(key: String): Reducer[String, Map[String, Set[String]], Map[String, Set[String]]] = new ThreadInspectReducer } class ThreadInspectReducer extends Reducer[String, Map[String, Set[String]], Map[String, Set[String]]] { private[this] var names: Map[String, Set[String]] = Map.empty override def reduce(values: Map[String, Set[String]]): Unit = { values.foreach { case (k, v) => names += (k -> (names.get(k).getOrElse(Set.empty) ++ v)) } names += ("reducer" -> (names.get("reducer").getOrElse(Set.empty) + Thread.currentThread.getName)) } override def finalizeReduce: Map[String, Set[String]] = names }
kazuhira-r/hazelcast-examples
hazelcast-mapreduce-trial/src/main/scala/org/littlewings/hazelcast/mapreduce/ThreadInspectMapReduce.scala
Scala
mit
1,963
/* * Copyright 2018 Analytics Zoo Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.analytics.zoo.feature.image import com.intel.analytics.bigdl.transform.vision.image.ImageFeature import com.intel.analytics.bigdl.transform.vision.image.augmentation /** * adjust the image brightness * * @param deltaLow brightness parameter: low bound * @param deltaHigh brightness parameter: high bound */ class ImageBrightness(deltaLow: Double, deltaHigh: Double) extends ImageProcessing { private val internalCrop = augmentation.Brightness(deltaLow, deltaHigh) override def apply(prev: Iterator[ImageFeature]): Iterator[ImageFeature] = { internalCrop.apply(prev) } override def transformMat(feature: ImageFeature): Unit = { internalCrop.transformMat(feature) } } object ImageBrightness { def apply(deltaLow: Double, deltaHigh: Double): ImageBrightness = new ImageBrightness(deltaLow, deltaHigh) }
intel-analytics/analytics-zoo
zoo/src/main/scala/com/intel/analytics/zoo/feature/image/ImageBrightness.scala
Scala
apache-2.0
1,458
package ru.avhaliullin.whatever.backend import org.apache.bcel.Constants import org.apache.bcel.classfile.JavaClass import org.apache.bcel.generic._ import ru.avhaliullin.whatever.semantic.module.ModuleName import ru.avhaliullin.whatever.semantic.tpe.{JavaTypeGen, Tpe} import ru.avhaliullin.whatever.semantic.{SemanticTreeNode => sem, _} /** * @author avhaliullin */ class ClassBytecodeGenerator(module: ModuleName, name: String) { private val jtg = JavaTypeGen private val className = jtg.moduleToJavaPackage(module) + "." + name private case class MethodContext( cg: ClassGen, cpg: ConstantPoolGen, instF: InstructionFactory, mg: MethodGen, il: InstructionList ) { private var cpv = 0 var vars: Map[VarId, LocalVariableGen] = mg.getLocalVariables.map { lvg => VarId.MethodArg(lvg.getName) -> lvg }.toMap def defineVar(id: VarId, tpe: Tpe): Unit = { vars += id -> mg.addLocalVariable(id.name, jtg.toJavaType(tpe), null, null) } def incrementVar(id: VarId, factor: Int): InstructionHandle = { val lvg = vars(id) val ih = il.append(new IINC(lvg.getIndex, factor)) lvg.setEnd(ih) ih } def storeVar(id: VarId, tpe: Tpe): InstructionHandle = { assignVar(id, InstructionFactory.createStore(jtg.toJavaType(tpe), _)) } def assignVar(id: VarId, initializer: Int => Instruction): InstructionHandle = { val v = vars(id) val inst = initializer(v.getIndex) val ih = il.append(inst) if (v.getStart == null) { v.setStart(ih) } ih } def loadVar(id: VarId, tpe: Tpe): InstructionHandle = { val lvg = vars(id) val ih = il.append(InstructionFactory.createLoad(jtg.toJavaType(tpe), lvg.getIndex)) lvg.setEnd(ih) ih } def withIL(il: InstructionList): MethodContext = { val res = copy(il = il) res.vars = vars res } def PUSH(i: Int) = new PUSH(cpg, i) def generateLocalVar(tpe: Tpe): VarId = { cpv += 1 val id = VarId.CompilerProduced(cpv) defineVar(id, tpe) id } } private def generateBranching(ctx: MethodContext, bInst: BranchInstruction, onPassOpt: Option[InstructionList], onBranchOpt: Option[InstructionList]): Unit = { (onPassOpt, onBranchOpt) match { case (None, None) => ctx.il.append(bInst) val nop = ctx.il.append(new NOP) bInst.setTarget(nop) case (Some(onPass), Some(onBranch)) => ctx.il.append(bInst) ctx.il.append(onPass) val jmp = ctx.il.append(new GOTO(null)) val p0 = ctx.il.append(onBranch) val end = ctx.il.append(new NOP) bInst.setTarget(p0) jmp.setTarget(end) case (Some(onPass), None) => ctx.il.append(bInst) ctx.il.append(onPass) val end = ctx.il.append(new NOP) bInst.setTarget(end) case (None, Some(onBranch)) => bInst match { case ifInst: IfInstruction => generateBranching(ctx, ifInst.negate(), onBranchOpt, onPassOpt) case _ => ctx.il.append(bInst) val jmp = ctx.il.append(new GOTO(null)) val p0 = ctx.il.append(onBranch) val end = ctx.il.append(new NOP) bInst.setTarget(p0) jmp.setTarget(end) } } } private def generateBoolExpr(ctx: MethodContext, arg1: sem.Expression, arg2: sem.Expression, bInst: BranchInstruction): Unit = { generateForNode(ctx, arg1) generateForNode(ctx, arg2) generateBranching(ctx, bInst, Some(new InstructionList(ctx.PUSH(0))), Some(new InstructionList(ctx.PUSH(1)))) } private def generateIf(ctx: MethodContext, cond: sem.Expression, thenIl: Option[InstructionList], elseIl: Option[InstructionList]): Unit = { cond match { case BranchInliner(inliner) => inliner.inline(ctx, thenIl, elseIl) case _ => generateForNode(ctx, cond) generateBranching(ctx, new IFEQ(null), thenIl, elseIl) } } private def generateForNode(ctx: MethodContext, node: sem.Expression, returnUnit: Boolean = true): Unit = { import sem._ def trivialBinOp(arg1: Expression, arg2: Expression, inst: Instruction): Unit = { generateForNode(ctx, arg1) generateForNode(ctx, arg2) ctx.il.append(inst) } node match { case VarDefinition(id, tpe) => ctx.defineVar(id, tpe) case FieldAssignment(field, value, read) => generateForNode(ctx, field.expr) generateForNode(ctx, value) val retVar = if (read) { ctx.il.append(InstructionFactory.createDup(jtg.toJavaType(field.tpe).getSize)) val id = ctx.generateLocalVar(value.tpe) ctx.storeVar(id, value.tpe) Some(id) } else None ctx.il.append(ctx.instF.createPutField(jtg.toJavaType(field.structure).getClassName, field.field.name, jtg.toJavaType(field.tpe))) retVar.foreach { id => ctx.loadVar(id, value.tpe) } case VarAssignment(id, value, read) => generateForNode(ctx, value) if (read) { ctx.il.append(InstructionFactory.createDup(jtg.toJavaType(value.tpe).getSize)) } ctx.storeVar(id, value.tpe) case VarRead(id, tpe) => ctx.loadVar(id, tpe) case FieldAccess(field, st, stExpr) => generateForNode(ctx, stExpr) ctx.il.append(ctx.instF.createGetField(jtg.toJavaType(st).getClassName, field.name, jtg.toJavaType(field.tpe))) case Block(code, tpe) => generateBlock(ctx, code) case FnCall(sig, args) => args.foreach(generateForNode(ctx, _)) ctx.il.append( ctx.instF.createInvoke( jtg.moduleObject(sig.module).getClassName, sig.name, jtg.toJavaFnRetType(sig.returnType), sig.args.map(arg => jtg.toJavaType(arg.tpe)).toArray, Constants.INVOKESTATIC ) ) case StructMethodCall(st, fn, calledOn, args) => generateForNode(ctx, calledOn) args.foreach(generateForNode(ctx, _)) ctx.il.append( ctx.instF.createInvoke( jtg.toJavaType(st).getClassName, fn.name, jtg.toJavaFnRetType(fn.returnType), fn.args.map(arg => jtg.toJavaType(arg.tpe)).toArray, Constants.INVOKEVIRTUAL ) ) case Echo(expr) => val pStream = new ObjectType("java.io.PrintStream") ctx.il.append(ctx.instF.createFieldAccess("java.lang.System", "out", pStream, Constants.GETSTATIC)) generateForNode(ctx, expr) val jType = jtg.toJavaType(expr.tpe) val printType = jType match { case obj: ObjectType if obj != Type.STRING => ctx.il.append(ctx.instF.createInvoke( Type.STRING.getClassName, "valueOf", Type.STRING, Array(Type.OBJECT), Constants.INVOKESTATIC )) Type.STRING case arr: ArrayType => val eTpe = arr.getElementType match { case ot: ObjectType => Type.OBJECT case at: ArrayType => Type.OBJECT //TODO: already need std library here case other => other } ctx.il.append(ctx.instF.createInvoke( "java.util.Arrays", "toString", Type.STRING, Array(new ArrayType(eTpe, 1)), Constants.INVOKESTATIC )) Type.STRING case other => other } ctx.il.append(ctx.instF.createInvoke("java.io.PrintStream", "println", Type.VOID, Array(printType), Constants.INVOKEVIRTUAL)) case BOperator(arg1, arg2, op) => op match { case Operator.IDIV => trivialBinOp(arg1, arg2, new IDIV) case Operator.IMUL => trivialBinOp(arg1, arg2, new IMUL) case Operator.ISUB => trivialBinOp(arg1, arg2, new ISUB) case Operator.IADD => trivialBinOp(arg1, arg2, new IADD) case Operator.BAND => trivialBinOp(arg1, arg2, new IAND) case Operator.BOR => trivialBinOp(arg1, arg2, new IOR) case Operator.BXOR => trivialBinOp(arg1, arg2, new IXOR) case Operator.BAND_LZY => generateForNode(ctx, arg1) val if1 = ctx.il.append(new IFEQ(null)) generateForNode(ctx, arg2) val if2 = ctx.il.append(new IFEQ(null)) ctx.il.append(new PUSH(ctx.cpg, 1)) val gotoEnd = ctx.il.append(new GOTO(null)) val onFalse = ctx.il.append(new PUSH(ctx.cpg, 0)) val end = ctx.il.append(new NOP) if1.setTarget(onFalse) if2.setTarget(onFalse) gotoEnd.setTarget(end) case Operator.BOR_LZY => generateForNode(ctx, arg1) val if1 = ctx.il.append(new IFNE(null)) generateForNode(ctx, arg2) val if2 = ctx.il.append(new IFNE(null)) ctx.il.append(ctx.PUSH(0)) val gotoEnd = ctx.il.append(new GOTO(null)) val onTrue = ctx.il.append(ctx.PUSH(1)) val end = ctx.il.append(new NOP) if1.setTarget(onTrue) if2.setTarget(onTrue) gotoEnd.setTarget(end) case Operator.ILT => generateBoolExpr(ctx, arg1, arg2, new IF_ICMPLT(null)) case Operator.ILE => generateBoolExpr(ctx, arg1, arg2, new IF_ICMPLE(null)) case Operator.IGT => generateBoolExpr(ctx, arg1, arg2, new IF_ICMPGT(null)) case Operator.IGE => generateBoolExpr(ctx, arg1, arg2, new IF_ICMPGE(null)) case Operator.IEQ => generateBoolExpr(ctx, arg1, arg2, new IF_ICMPEQ(null)) case Operator.INE => generateBoolExpr(ctx, arg1, arg2, new IF_ICMPNE(null)) case Operator.CONCAT => generateForNode(ctx, arg1) generateForNode(ctx, arg2) ctx.il.append(ctx.instF.createInvoke( Type.STRING.getClassName, "concat", Type.STRING, Array(Type.STRING), Constants.INVOKEVIRTUAL )) } case UOperator(arg, op) => generateForNode(ctx, arg) val inst = op match { case Operator.INEG => new INEG case Operator.BNEG => ctx.il.append(new PUSH(ctx.cpg, 1)) new IXOR } ctx.il.append(inst) case const: Const => val inst = const match { case IntConst(value) => new PUSH(ctx.cpg, value) case BoolConst(value) => new PUSH(ctx.cpg, value) case StringConst(value) => new PUSH(ctx.cpg, value) } ctx.il.append(inst) case IfExpr(cond, thenBlock, elseBlock, tpe) => val thenIl = new InstructionList() val elseIl = new InstructionList() generateForNode(ctx.withIL(thenIl), thenBlock, returnUnit) generateForNode(ctx.withIL(elseIl), elseBlock, returnUnit) val thenIlOpt = if (thenIl.isEmpty) None else Some(thenIl) val elseIlOpt = if (elseIl.isEmpty) None else Some(elseIl) generateIf(ctx, cond, thenIlOpt, elseIlOpt) case Nop => ctx.il.append(new NOP) case Consume(expr) => generateForNode(ctx, expr) ctx.il.append(InstructionFactory.createPop(jtg.toJavaType(expr.tpe).getSize)) case si@StructureInstantiation(struct, args, evalOrder) => val sType = jtg.toObjectType(si.tpe) ctx.il.append(ctx.instF.createNew(sType)) ctx.il.append(new DUP) val localVars = args.map(e => ctx.generateLocalVar(e.tpe) -> e.tpe).toIndexedSeq evalOrder.foreach { i => val e = args(i) generateForNode(ctx, VarAssignment(localVars(i)._1, e, false), returnUnit = false) } localVars.foreach { case (id, tpe) => ctx.loadVar(id, tpe) } ctx.il.append( ctx.instF.createInvoke( sType.getClassName, "<init>", Type.VOID, args.map(e => jtg.toJavaType(e.tpe)).toArray, Constants.INVOKESPECIAL ) ) case ArrayInstantiation(elemType, args) => val javaElemType = jtg.toJavaType(elemType) val size = args.size ctx.il.append(new PUSH(ctx.cpg, size)) ctx.il.append(ctx.instF.createNewArray(javaElemType, 1)) args.zipWithIndex.foreach { case (e, idx) => ctx.il.append(new DUP) ctx.il.append(new PUSH(ctx.cpg, idx)) generateForNode(ctx, e) ctx.il.append(InstructionFactory.createArrayStore(javaElemType)) } case ArrayLength(arr) => generateForNode(ctx, arr) ctx.il.append(new ARRAYLENGTH) case ArrayGet(arr, index, tpe) => generateForNode(ctx, arr) generateForNode(ctx, index) ctx.il.append(InstructionFactory.createArrayLoad(jtg.toJavaType(tpe))) case ArraySet(arr, index, value) => generateForNode(ctx, arr) generateForNode(ctx, index) generateForNode(ctx, value) ctx.il.append(InstructionFactory.createArrayStore(jtg.toJavaType(value.tpe))) case ForLoop(itVarId, iterable, body) => generateForNode(ctx, iterable) iterable.tpe match { case arrTpe@Tpe.Arr(elemType) => val idxVar = ctx.generateLocalVar(Tpe.INT) val arrVar = ctx.generateLocalVar(arrTpe) ctx.storeVar(arrVar, arrTpe) ctx.il.append(new PUSH(ctx.cpg, 0)) ctx.storeVar(idxVar, Tpe.INT) val loopStart = ctx.loadVar(idxVar, Tpe.INT) ctx.loadVar(arrVar, arrTpe) ctx.il.append(new ARRAYLENGTH) val onFail = ctx.il.append(new IF_ICMPGE(null)) ctx.loadVar(arrVar, arrTpe) ctx.loadVar(idxVar, Tpe.INT) ctx.il.append(InstructionFactory.createArrayLoad(jtg.toJavaType(elemType))) ctx.defineVar(itVarId, elemType) ctx.storeVar(itVarId, elemType) generateBlock(ctx, body) ctx.incrementVar(idxVar, 1) ctx.il.append(new GOTO(loopStart)) val end = ctx.il.append(new NOP) onFail.setTarget(end) case unknown => throw new RuntimeException("For loop not implemented for " + unknown) } } if (returnUnit && !node.valRet) { val unitType = new ObjectType(jtg.moduleToJavaPackage(ModuleName.Default.std) + ".Unit") ctx.il.append(ctx.instF.createGetStatic(unitType.getClassName, "INSTANCE", unitType)) } } private def generateBlock(ctx: MethodContext, code: Seq[sem.Expression]): Unit = { if (code.nonEmpty) { code.foreach(generateForNode(ctx, _, returnUnit = false)) } } def generateMethod(cg: ClassGen, cp: ConstantPoolGen, instF: InstructionFactory, access: AccessModifiers, fnDef: sem.FnDefinition): Unit = { generateMethod(cg, cp, instF, access, fnDef.code, fnDef.sig.name, fnDef.sig.args.map(arg => (arg.name, jtg.toJavaType(arg.tpe))), jtg.toJavaFnRetType(fnDef.sig.returnType)) } private def generateMethod(cg: ClassGen, cp: ConstantPoolGen, instF: InstructionFactory, access: AccessModifiers, code: Seq[sem.Expression], name: String, args: Seq[(String, Type)], retType: Type): Unit = { val il = new InstructionList() val mg = new MethodGen( access.flags, retType, args.map(_._2).toArray, args.map(_._1).toArray, name, className, il, cp ) generateBlock(MethodContext(cg, cp, instF, mg, il), code) il.append(InstructionFactory.createReturn(retType)) mg.setMaxStack() cg.addMethod(mg.getMethod) il.dispose() } def generateClass(ast: Seq[sem.FnDefinition]): JavaClass = { val cg = new ClassGen(className, "java.lang.Object", "<generated>", Constants.ACC_PUBLIC | Constants.ACC_SUPER, null) val cp = cg.getConstantPool val instFactory = new InstructionFactory(cg) ast.foreach { case fnDef: sem.FnDefinition => generateMethod(cg, cp, instFactory, AccessModifiers(public = true, static = true), fnDef) } cg.getJavaClass } abstract class BranchInliner { def inline(ctx: MethodContext, onTrue: Option[InstructionList], onFalse: Option[InstructionList]) } object BranchInliner { def unapply(e: sem.Expression): Option[BranchInliner] = { e match { case sem.BOperator(e1, e2, op) => val instOp = op match { case Operator.ILT => Some(new IF_ICMPLT(null)) case Operator.ILE => Some(new IF_ICMPLE(null)) case Operator.IGT => Some(new IF_ICMPGT(null)) case Operator.IGE => Some(new IF_ICMPGE(null)) case Operator.IEQ => Some(new IF_ICMPEQ(null)) case Operator.INE => Some(new IF_ICMPNE(null)) case _ => None } instOp.map { inst => new BranchInliner { override def inline(ctx: MethodContext, onTrue: Option[InstructionList], onFalse: Option[InstructionList]): Unit = { generateForNode(ctx, e1) generateForNode(ctx, e2) generateBranching(ctx, inst, onFalse, onTrue) } } } case _ => None } } } }
avhaliullin/whatever-compiler
compiler/src/main/scala/ru/avhaliullin/whatever/backend/ClassBytecodeGenerator.scala
Scala
mit
17,722
package ml.combust.mleap.core.feature sealed trait HandleInvalid { def asParamString: String } object HandleInvalid { val default = Error case object Error extends HandleInvalid { override def asParamString: String = "error" } case object Skip extends HandleInvalid { override def asParamString: String = "skip" } case object Keep extends HandleInvalid { override def asParamString: String = "keep" } def fromString(value: String, canSkip: Boolean = true): HandleInvalid = value match { case "error" => HandleInvalid.Error case "skip" => if (canSkip) HandleInvalid.Skip else throw new IllegalArgumentException(s"Invalid handler: $value") case "keep" => HandleInvalid.Keep case _ => throw new IllegalArgumentException(s"Invalid handler: $value") } }
combust/mleap
mleap-core/src/main/scala/ml/combust/mleap/core/feature/HandleInvalid.scala
Scala
apache-2.0
803
/* * Copyright 2014–2017 SlamData Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package quasar.precog.common package security import quasar.blueeyes._, json._, serialization._ import IsoSerialization._, Iso8601Serialization._, Versioned._ import shapeless.HNil import scalaz._, Scalaz._ case class APIKeyRecord(apiKey: APIKey, name: Option[String], description: Option[String], issuerKey: APIKey, grants: Set[GrantId], isRoot: Boolean) object APIKeyRecord { val schemaV1 = "apiKey" :: "name" :: "description" :: ("issuerKey" ||| "(undefined)") :: "grants" :: "isRoot" :: HNil @deprecated("V0 serialization schemas should be removed when legacy data is no longer needed", "2.1.5") val schemaV0 = "tid" :: "name" :: "description" :: ("cid" ||| "(undefined)") :: "gids" :: ("isRoot" ||| false) :: HNil val decomposerV1: Decomposer[APIKeyRecord] = decomposerV[APIKeyRecord](schemaV1, Some("1.0".v)) val extractorV2: Extractor[APIKeyRecord] = extractorV[APIKeyRecord](schemaV1, Some("1.0".v)) val extractorV1: Extractor[APIKeyRecord] = extractorV[APIKeyRecord](schemaV1, None) val extractorV0: Extractor[APIKeyRecord] = extractorV[APIKeyRecord](schemaV0, None) implicit val decomposer: Decomposer[APIKeyRecord] = decomposerV1 implicit val extractor: Extractor[APIKeyRecord] = extractorV2 <+> extractorV1 <+> extractorV0 }
drostron/quasar
blueeyes/src/main/scala/quasar/precog/common/security/APIKey.scala
Scala
apache-2.0
1,876
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.aggregate import org.apache.spark.memory.TaskMemoryManager import org.apache.spark.TaskContext import org.apache.spark.rdd.RDD import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.errors._ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.aggregate._ import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.plans.physical._ import org.apache.spark.sql.execution._ import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics} import org.apache.spark.sql.types.{DecimalType, StringType, StructType} import org.apache.spark.unsafe.KVIterator import org.apache.spark.util.Utils /** * Hash-based aggregate operator that can also fallback to sorting when data exceeds memory size. */ case class HashAggregateExec( requiredChildDistributionExpressions: Option[Seq[Expression]], groupingExpressions: Seq[NamedExpression], aggregateExpressions: Seq[AggregateExpression], aggregateAttributes: Seq[Attribute], initialInputBufferOffset: Int, resultExpressions: Seq[NamedExpression], child: SparkPlan) extends UnaryExecNode with CodegenSupport { private[this] val aggregateBufferAttributes = { aggregateExpressions.flatMap(_.aggregateFunction.aggBufferAttributes) } require(HashAggregateExec.supportsAggregate(aggregateBufferAttributes)) override lazy val allAttributes: AttributeSeq = child.output ++ aggregateBufferAttributes ++ aggregateAttributes ++ aggregateExpressions.flatMap(_.aggregateFunction.inputAggBufferAttributes) override lazy val metrics = Map( "numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"), "peakMemory" -> SQLMetrics.createSizeMetric(sparkContext, "peak memory"), "spillSize" -> SQLMetrics.createSizeMetric(sparkContext, "spill size"), "aggTime" -> SQLMetrics.createTimingMetric(sparkContext, "aggregate time"), "avgHashmapProbe" -> SQLMetrics.createAverageMetric(sparkContext, "avg hashmap probe")) override def output: Seq[Attribute] = resultExpressions.map(_.toAttribute) override def outputPartitioning: Partitioning = child.outputPartitioning override def producedAttributes: AttributeSet = AttributeSet(aggregateAttributes) ++ AttributeSet(resultExpressions.diff(groupingExpressions).map(_.toAttribute)) ++ AttributeSet(aggregateBufferAttributes) override def requiredChildDistribution: List[Distribution] = { requiredChildDistributionExpressions match { case Some(exprs) if exprs.isEmpty => AllTuples :: Nil case Some(exprs) if exprs.nonEmpty => ClusteredDistribution(exprs) :: Nil case None => UnspecifiedDistribution :: Nil } } // This is for testing. We force TungstenAggregationIterator to fall back to the unsafe row hash // map and/or the sort-based aggregation once it has processed a given number of input rows. private val testFallbackStartsAt: Option[(Int, Int)] = { sqlContext.getConf("spark.sql.TungstenAggregate.testFallbackStartsAt", null) match { case null | "" => None case fallbackStartsAt => val splits = fallbackStartsAt.split(",").map(_.trim) Some((splits.head.toInt, splits.last.toInt)) } } protected override def doExecute(): RDD[InternalRow] = attachTree(this, "execute") { val numOutputRows = longMetric("numOutputRows") val peakMemory = longMetric("peakMemory") val spillSize = longMetric("spillSize") val avgHashmapProbe = longMetric("avgHashmapProbe") child.execute().mapPartitions { iter => val hasInput = iter.hasNext if (!hasInput && groupingExpressions.nonEmpty) { // This is a grouped aggregate and the input iterator is empty, // so return an empty iterator. Iterator.empty } else { val aggregationIterator = new TungstenAggregationIterator( groupingExpressions, aggregateExpressions, aggregateAttributes, initialInputBufferOffset, resultExpressions, (expressions, inputSchema) => newMutableProjection(expressions, inputSchema, subexpressionEliminationEnabled), child.output, iter, testFallbackStartsAt, numOutputRows, peakMemory, spillSize, avgHashmapProbe) if (!hasInput && groupingExpressions.isEmpty) { numOutputRows += 1 Iterator.single[UnsafeRow](aggregationIterator.outputForEmptyGroupingKeyWithoutInput()) } else { aggregationIterator } } } } // all the mode of aggregate expressions private val modes = aggregateExpressions.map(_.mode).distinct override def usedInputs: AttributeSet = inputSet override def supportCodegen: Boolean = { // ImperativeAggregate is not supported right now !aggregateExpressions.exists(_.aggregateFunction.isInstanceOf[ImperativeAggregate]) } override def inputRDDs(): Seq[RDD[InternalRow]] = { child.asInstanceOf[CodegenSupport].inputRDDs() } protected override def doProduce(ctx: CodegenContext): String = { if (groupingExpressions.isEmpty) { doProduceWithoutKeys(ctx) } else { doProduceWithKeys(ctx) } } override def doConsume(ctx: CodegenContext, input: Seq[ExprCode], row: ExprCode): String = { if (groupingExpressions.isEmpty) { doConsumeWithoutKeys(ctx, input) } else { doConsumeWithKeys(ctx, input) } } // The variables used as aggregation buffer. Only used for aggregation without keys. private var bufVars: Seq[ExprCode] = _ private def doProduceWithoutKeys(ctx: CodegenContext): String = { val initAgg = ctx.freshName("initAgg") ctx.addMutableState("boolean", initAgg, s"$initAgg = false;") // generate variables for aggregation buffer val functions = aggregateExpressions.map(_.aggregateFunction.asInstanceOf[DeclarativeAggregate]) val initExpr = functions.flatMap(f => f.initialValues) bufVars = initExpr.map { e => val isNull = ctx.freshName("bufIsNull") val value = ctx.freshName("bufValue") ctx.addMutableState("boolean", isNull, "") ctx.addMutableState(ctx.javaType(e.dataType), value, "") // The initial expression should not access any column val ev = e.genCode(ctx) val initVars = s""" | $isNull = ${ev.isNull}; | $value = ${ev.value}; """.stripMargin ExprCode(ev.code + initVars, isNull, value) } val initBufVar = evaluateVariables(bufVars) // generate variables for output val (resultVars, genResult) = if (modes.contains(Final) || modes.contains(Complete)) { // evaluate aggregate results ctx.currentVars = bufVars val aggResults = functions.map(_.evaluateExpression).map { e => BindReferences.bindReference(e, aggregateBufferAttributes).genCode(ctx) } val evaluateAggResults = evaluateVariables(aggResults) // evaluate result expressions ctx.currentVars = aggResults val resultVars = resultExpressions.map { e => BindReferences.bindReference(e, aggregateAttributes).genCode(ctx) } (resultVars, s""" |$evaluateAggResults |${evaluateVariables(resultVars)} """.stripMargin) } else if (modes.contains(Partial) || modes.contains(PartialMerge)) { // output the aggregate buffer directly (bufVars, "") } else { // no aggregate function, the result should be literals val resultVars = resultExpressions.map(_.genCode(ctx)) (resultVars, evaluateVariables(resultVars)) } val doAgg = ctx.freshName("doAggregateWithoutKey") val doAggFuncName = ctx.addNewFunction(doAgg, s""" | private void $doAgg() throws java.io.IOException { | // initialize aggregation buffer | $initBufVar | | ${child.asInstanceOf[CodegenSupport].produce(ctx, this)} | } """.stripMargin) val numOutput = metricTerm(ctx, "numOutputRows") val aggTime = metricTerm(ctx, "aggTime") val beforeAgg = ctx.freshName("beforeAgg") s""" | while (!$initAgg) { | $initAgg = true; | long $beforeAgg = System.nanoTime(); | $doAggFuncName(); | $aggTime.add((System.nanoTime() - $beforeAgg) / 1000000); | | // output the result | ${genResult.trim} | | $numOutput.add(1); | ${consume(ctx, resultVars).trim} | } """.stripMargin } protected override val shouldStopRequired = false private def doConsumeWithoutKeys(ctx: CodegenContext, input: Seq[ExprCode]): String = { // only have DeclarativeAggregate val functions = aggregateExpressions.map(_.aggregateFunction.asInstanceOf[DeclarativeAggregate]) val inputAttrs = functions.flatMap(_.aggBufferAttributes) ++ child.output val updateExpr = aggregateExpressions.flatMap { e => e.mode match { case Partial | Complete => e.aggregateFunction.asInstanceOf[DeclarativeAggregate].updateExpressions case PartialMerge | Final => e.aggregateFunction.asInstanceOf[DeclarativeAggregate].mergeExpressions } } ctx.currentVars = bufVars ++ input val boundUpdateExpr = updateExpr.map(BindReferences.bindReference(_, inputAttrs)) val subExprs = ctx.subexpressionEliminationForWholeStageCodegen(boundUpdateExpr) val effectiveCodes = subExprs.codes.mkString("\n") val aggVals = ctx.withSubExprEliminationExprs(subExprs.states) { boundUpdateExpr.map(_.genCode(ctx)) } // aggregate buffer should be updated atomic val updates = aggVals.zipWithIndex.map { case (ev, i) => s""" | ${bufVars(i).isNull} = ${ev.isNull}; | ${bufVars(i).value} = ${ev.value}; """.stripMargin } s""" | // do aggregate | // common sub-expressions | $effectiveCodes | // evaluate aggregate function | ${evaluateVariables(aggVals)} | // update aggregation buffer | ${updates.mkString("\n").trim} """.stripMargin } private val groupingAttributes = groupingExpressions.map(_.toAttribute) private val groupingKeySchema = StructType.fromAttributes(groupingAttributes) private val declFunctions = aggregateExpressions.map(_.aggregateFunction) .filter(_.isInstanceOf[DeclarativeAggregate]) .map(_.asInstanceOf[DeclarativeAggregate]) private val bufferSchema = StructType.fromAttributes(aggregateBufferAttributes) // The name for Fast HashMap private var fastHashMapTerm: String = _ private var isFastHashMapEnabled: Boolean = false // whether a vectorized hashmap is used instead // we have decided to always use the row-based hashmap, // but the vectorized hashmap can still be switched on for testing and benchmarking purposes. private var isVectorizedHashMapEnabled: Boolean = false // The name for UnsafeRow HashMap private var hashMapTerm: String = _ private var sorterTerm: String = _ /** * This is called by generated Java class, should be public. */ def createHashMap(): UnsafeFixedWidthAggregationMap = { // create initialized aggregate buffer val initExpr = declFunctions.flatMap(f => f.initialValues) val initialBuffer = UnsafeProjection.create(initExpr)(EmptyRow) // create hashMap new UnsafeFixedWidthAggregationMap( initialBuffer, bufferSchema, groupingKeySchema, TaskContext.get().taskMemoryManager(), 1024 * 16, // initial capacity TaskContext.get().taskMemoryManager().pageSizeBytes ) } def getTaskMemoryManager(): TaskMemoryManager = { TaskContext.get().taskMemoryManager() } def getEmptyAggregationBuffer(): InternalRow = { val initExpr = declFunctions.flatMap(f => f.initialValues) val initialBuffer = UnsafeProjection.create(initExpr)(EmptyRow) initialBuffer } /** * This is called by generated Java class, should be public. */ def createUnsafeJoiner(): UnsafeRowJoiner = { GenerateUnsafeRowJoiner.create(groupingKeySchema, bufferSchema) } /** * Called by generated Java class to finish the aggregate and return a KVIterator. */ def finishAggregate( hashMap: UnsafeFixedWidthAggregationMap, sorter: UnsafeKVExternalSorter, peakMemory: SQLMetric, spillSize: SQLMetric, avgHashmapProbe: SQLMetric): KVIterator[UnsafeRow, UnsafeRow] = { // update peak execution memory val mapMemory = hashMap.getPeakMemoryUsedBytes val sorterMemory = Option(sorter).map(_.getPeakMemoryUsedBytes).getOrElse(0L) val maxMemory = Math.max(mapMemory, sorterMemory) val metrics = TaskContext.get().taskMetrics() peakMemory.add(maxMemory) metrics.incPeakExecutionMemory(maxMemory) // Update average hashmap probe val avgProbes = hashMap.getAverageProbesPerLookup() avgHashmapProbe.add(avgProbes.ceil.toLong) if (sorter == null) { // not spilled return hashMap.iterator() } // merge the final hashMap into sorter sorter.merge(hashMap.destructAndCreateExternalSorter()) hashMap.free() val sortedIter = sorter.sortedIterator() // Create a KVIterator based on the sorted iterator. new KVIterator[UnsafeRow, UnsafeRow] { // Create a MutableProjection to merge the rows of same key together val mergeExpr = declFunctions.flatMap(_.mergeExpressions) val mergeProjection = newMutableProjection( mergeExpr, aggregateBufferAttributes ++ declFunctions.flatMap(_.inputAggBufferAttributes), subexpressionEliminationEnabled) val joinedRow = new JoinedRow() var currentKey: UnsafeRow = null var currentRow: UnsafeRow = null var nextKey: UnsafeRow = if (sortedIter.next()) { sortedIter.getKey } else { null } override def next(): Boolean = { if (nextKey != null) { currentKey = nextKey.copy() currentRow = sortedIter.getValue.copy() nextKey = null // use the first row as aggregate buffer mergeProjection.target(currentRow) // merge the following rows with same key together var findNextGroup = false while (!findNextGroup && sortedIter.next()) { val key = sortedIter.getKey if (currentKey.equals(key)) { mergeProjection(joinedRow(currentRow, sortedIter.getValue)) } else { // We find a new group. findNextGroup = true nextKey = key } } true } else { spillSize.add(sorter.getSpillSize) false } } override def getKey: UnsafeRow = currentKey override def getValue: UnsafeRow = currentRow override def close(): Unit = { sortedIter.close() } } } /** * Generate the code for output. */ private def generateResultCode( ctx: CodegenContext, keyTerm: String, bufferTerm: String, plan: String): String = { if (modes.contains(Final) || modes.contains(Complete)) { // generate output using resultExpressions ctx.currentVars = null ctx.INPUT_ROW = keyTerm val keyVars = groupingExpressions.zipWithIndex.map { case (e, i) => BoundReference(i, e.dataType, e.nullable).genCode(ctx) } val evaluateKeyVars = evaluateVariables(keyVars) ctx.INPUT_ROW = bufferTerm val bufferVars = aggregateBufferAttributes.zipWithIndex.map { case (e, i) => BoundReference(i, e.dataType, e.nullable).genCode(ctx) } val evaluateBufferVars = evaluateVariables(bufferVars) // evaluate the aggregation result ctx.currentVars = bufferVars val aggResults = declFunctions.map(_.evaluateExpression).map { e => BindReferences.bindReference(e, aggregateBufferAttributes).genCode(ctx) } val evaluateAggResults = evaluateVariables(aggResults) // generate the final result ctx.currentVars = keyVars ++ aggResults val inputAttrs = groupingAttributes ++ aggregateAttributes val resultVars = resultExpressions.map { e => BindReferences.bindReference(e, inputAttrs).genCode(ctx) } s""" $evaluateKeyVars $evaluateBufferVars $evaluateAggResults ${consume(ctx, resultVars)} """ } else if (modes.contains(Partial) || modes.contains(PartialMerge)) { // This should be the last operator in a stage, we should output UnsafeRow directly val joinerTerm = ctx.freshName("unsafeRowJoiner") ctx.addMutableState(classOf[UnsafeRowJoiner].getName, joinerTerm, s"$joinerTerm = $plan.createUnsafeJoiner();") val resultRow = ctx.freshName("resultRow") s""" UnsafeRow $resultRow = $joinerTerm.join($keyTerm, $bufferTerm); ${consume(ctx, null, resultRow)} """ } else { // generate result based on grouping key ctx.INPUT_ROW = keyTerm ctx.currentVars = null val eval = resultExpressions.map{ e => BindReferences.bindReference(e, groupingAttributes).genCode(ctx) } consume(ctx, eval) } } /** * A required check for any fast hash map implementation (basically the common requirements * for row-based and vectorized). * Currently fast hash map is supported for primitive data types during partial aggregation. * This list of supported use-cases should be expanded over time. */ private def checkIfFastHashMapSupported(ctx: CodegenContext): Boolean = { val isSupported = (groupingKeySchema ++ bufferSchema).forall(f => ctx.isPrimitiveType(f.dataType) || f.dataType.isInstanceOf[DecimalType] || f.dataType.isInstanceOf[StringType]) && bufferSchema.nonEmpty && modes.forall(mode => mode == Partial || mode == PartialMerge) // For vectorized hash map, We do not support byte array based decimal type for aggregate values // as ColumnVector.putDecimal for high-precision decimals doesn't currently support in-place // updates. Due to this, appending the byte array in the vectorized hash map can turn out to be // quite inefficient and can potentially OOM the executor. // For row-based hash map, while decimal update is supported in UnsafeRow, we will just act // conservative here, due to lack of testing and benchmarking. val isNotByteArrayDecimalType = bufferSchema.map(_.dataType).filter(_.isInstanceOf[DecimalType]) .forall(!DecimalType.isByteArrayDecimalType(_)) isSupported && isNotByteArrayDecimalType } private def enableTwoLevelHashMap(ctx: CodegenContext) = { if (!checkIfFastHashMapSupported(ctx)) { if (modes.forall(mode => mode == Partial || mode == PartialMerge) && !Utils.isTesting) { logInfo("spark.sql.codegen.aggregate.map.twolevel.enable is set to true, but" + " current version of codegened fast hashmap does not support this aggregate.") } } else { isFastHashMapEnabled = true // This is for testing/benchmarking only. // We enforce to first level to be a vectorized hashmap, instead of the default row-based one. sqlContext.getConf("spark.sql.codegen.aggregate.map.vectorized.enable", null) match { case "true" => isVectorizedHashMapEnabled = true case null | "" | "false" => None } } } private def doProduceWithKeys(ctx: CodegenContext): String = { val initAgg = ctx.freshName("initAgg") ctx.addMutableState("boolean", initAgg, s"$initAgg = false;") if (sqlContext.conf.enableTwoLevelAggMap) { enableTwoLevelHashMap(ctx) } else { sqlContext.getConf("spark.sql.codegen.aggregate.map.vectorized.enable", null) match { case "true" => logWarning("Two level hashmap is disabled but vectorized hashmap is " + "enabled.") case null | "" | "false" => None } } fastHashMapTerm = ctx.freshName("fastHashMap") val fastHashMapClassName = ctx.freshName("FastHashMap") val fastHashMapGenerator = if (isVectorizedHashMapEnabled) { new VectorizedHashMapGenerator(ctx, aggregateExpressions, fastHashMapClassName, groupingKeySchema, bufferSchema) } else { new RowBasedHashMapGenerator(ctx, aggregateExpressions, fastHashMapClassName, groupingKeySchema, bufferSchema) } val thisPlan = ctx.addReferenceObj("plan", this) // Create a name for iterator from vectorized HashMap val iterTermForFastHashMap = ctx.freshName("fastHashMapIter") if (isFastHashMapEnabled) { if (isVectorizedHashMapEnabled) { ctx.addMutableState(fastHashMapClassName, fastHashMapTerm, s"$fastHashMapTerm = new $fastHashMapClassName();") ctx.addMutableState( "java.util.Iterator<org.apache.spark.sql.execution.vectorized.ColumnarBatch.Row>", iterTermForFastHashMap, "") } else { ctx.addMutableState(fastHashMapClassName, fastHashMapTerm, s"$fastHashMapTerm = new $fastHashMapClassName(" + s"$thisPlan.getTaskMemoryManager(), $thisPlan.getEmptyAggregationBuffer());") ctx.addMutableState( "org.apache.spark.unsafe.KVIterator", iterTermForFastHashMap, "") } } // create hashMap hashMapTerm = ctx.freshName("hashMap") val hashMapClassName = classOf[UnsafeFixedWidthAggregationMap].getName ctx.addMutableState(hashMapClassName, hashMapTerm, "") sorterTerm = ctx.freshName("sorter") ctx.addMutableState(classOf[UnsafeKVExternalSorter].getName, sorterTerm, "") // Create a name for iterator from HashMap val iterTerm = ctx.freshName("mapIter") ctx.addMutableState(classOf[KVIterator[UnsafeRow, UnsafeRow]].getName, iterTerm, "") val doAgg = ctx.freshName("doAggregateWithKeys") val peakMemory = metricTerm(ctx, "peakMemory") val spillSize = metricTerm(ctx, "spillSize") val avgHashmapProbe = metricTerm(ctx, "avgHashmapProbe") def generateGenerateCode(): String = { if (isFastHashMapEnabled) { if (isVectorizedHashMapEnabled) { s""" | ${fastHashMapGenerator.asInstanceOf[VectorizedHashMapGenerator].generate()} """.stripMargin } else { s""" | ${fastHashMapGenerator.asInstanceOf[RowBasedHashMapGenerator].generate()} """.stripMargin } } else "" } val doAggFuncName = ctx.addNewFunction(doAgg, s""" ${generateGenerateCode} private void $doAgg() throws java.io.IOException { $hashMapTerm = $thisPlan.createHashMap(); ${child.asInstanceOf[CodegenSupport].produce(ctx, this)} ${if (isFastHashMapEnabled) { s"$iterTermForFastHashMap = $fastHashMapTerm.rowIterator();"} else ""} $iterTerm = $thisPlan.finishAggregate($hashMapTerm, $sorterTerm, $peakMemory, $spillSize, $avgHashmapProbe); } """) // generate code for output val keyTerm = ctx.freshName("aggKey") val bufferTerm = ctx.freshName("aggBuffer") val outputCode = generateResultCode(ctx, keyTerm, bufferTerm, thisPlan) val numOutput = metricTerm(ctx, "numOutputRows") // The child could change `copyResult` to true, but we had already consumed all the rows, // so `copyResult` should be reset to `false`. ctx.copyResult = false def outputFromGeneratedMap: String = { if (isFastHashMapEnabled) { if (isVectorizedHashMapEnabled) { outputFromVectorizedMap } else { outputFromRowBasedMap } } else "" } def outputFromRowBasedMap: String = { s""" while ($iterTermForFastHashMap.next()) { $numOutput.add(1); UnsafeRow $keyTerm = (UnsafeRow) $iterTermForFastHashMap.getKey(); UnsafeRow $bufferTerm = (UnsafeRow) $iterTermForFastHashMap.getValue(); $outputCode if (shouldStop()) return; } $fastHashMapTerm.close(); """ } // Iterate over the aggregate rows and convert them from ColumnarBatch.Row to UnsafeRow def outputFromVectorizedMap: String = { val row = ctx.freshName("fastHashMapRow") ctx.currentVars = null ctx.INPUT_ROW = row var schema: StructType = groupingKeySchema bufferSchema.foreach(i => schema = schema.add(i)) val generateRow = GenerateUnsafeProjection.createCode(ctx, schema.toAttributes.zipWithIndex .map { case (attr, i) => BoundReference(i, attr.dataType, attr.nullable) }) s""" | while ($iterTermForFastHashMap.hasNext()) { | $numOutput.add(1); | org.apache.spark.sql.execution.vectorized.ColumnarBatch.Row $row = | (org.apache.spark.sql.execution.vectorized.ColumnarBatch.Row) | $iterTermForFastHashMap.next(); | ${generateRow.code} | ${consume(ctx, Seq.empty, {generateRow.value})} | | if (shouldStop()) return; | } | | $fastHashMapTerm.close(); """.stripMargin } val aggTime = metricTerm(ctx, "aggTime") val beforeAgg = ctx.freshName("beforeAgg") s""" if (!$initAgg) { $initAgg = true; long $beforeAgg = System.nanoTime(); $doAggFuncName(); $aggTime.add((System.nanoTime() - $beforeAgg) / 1000000); } // output the result ${outputFromGeneratedMap} while ($iterTerm.next()) { $numOutput.add(1); UnsafeRow $keyTerm = (UnsafeRow) $iterTerm.getKey(); UnsafeRow $bufferTerm = (UnsafeRow) $iterTerm.getValue(); $outputCode if (shouldStop()) return; } $iterTerm.close(); if ($sorterTerm == null) { $hashMapTerm.free(); } """ } private def doConsumeWithKeys(ctx: CodegenContext, input: Seq[ExprCode]): String = { // create grouping key ctx.currentVars = input val unsafeRowKeyCode = GenerateUnsafeProjection.createCode( ctx, groupingExpressions.map(e => BindReferences.bindReference[Expression](e, child.output))) val fastRowKeys = ctx.generateExpressions( groupingExpressions.map(e => BindReferences.bindReference[Expression](e, child.output))) val unsafeRowKeys = unsafeRowKeyCode.value val unsafeRowBuffer = ctx.freshName("unsafeRowAggBuffer") val fastRowBuffer = ctx.freshName("fastAggBuffer") // only have DeclarativeAggregate val updateExpr = aggregateExpressions.flatMap { e => e.mode match { case Partial | Complete => e.aggregateFunction.asInstanceOf[DeclarativeAggregate].updateExpressions case PartialMerge | Final => e.aggregateFunction.asInstanceOf[DeclarativeAggregate].mergeExpressions } } // generate hash code for key val hashExpr = Murmur3Hash(groupingExpressions, 42) ctx.currentVars = input val hashEval = BindReferences.bindReference(hashExpr, child.output).genCode(ctx) val inputAttr = aggregateBufferAttributes ++ child.output ctx.currentVars = new Array[ExprCode](aggregateBufferAttributes.length) ++ input val (checkFallbackForGeneratedHashMap, checkFallbackForBytesToBytesMap, resetCounter, incCounter) = if (testFallbackStartsAt.isDefined) { val countTerm = ctx.freshName("fallbackCounter") ctx.addMutableState("int", countTerm, s"$countTerm = 0;") (s"$countTerm < ${testFallbackStartsAt.get._1}", s"$countTerm < ${testFallbackStartsAt.get._2}", s"$countTerm = 0;", s"$countTerm += 1;") } else { ("true", "true", "", "") } // We first generate code to probe and update the fast hash map. If the probe is // successful the corresponding fast row buffer will hold the mutable row val findOrInsertFastHashMap: Option[String] = { if (isFastHashMapEnabled) { Option( s""" | |if ($checkFallbackForGeneratedHashMap) { | ${fastRowKeys.map(_.code).mkString("\n")} | if (${fastRowKeys.map("!" + _.isNull).mkString(" && ")}) { | $fastRowBuffer = $fastHashMapTerm.findOrInsert( | ${fastRowKeys.map(_.value).mkString(", ")}); | } |} """.stripMargin) } else { None } } def updateRowInFastHashMap(isVectorized: Boolean): Option[String] = { ctx.INPUT_ROW = fastRowBuffer val boundUpdateExpr = updateExpr.map(BindReferences.bindReference(_, inputAttr)) val subExprs = ctx.subexpressionEliminationForWholeStageCodegen(boundUpdateExpr) val effectiveCodes = subExprs.codes.mkString("\n") val fastRowEvals = ctx.withSubExprEliminationExprs(subExprs.states) { boundUpdateExpr.map(_.genCode(ctx)) } val updateFastRow = fastRowEvals.zipWithIndex.map { case (ev, i) => val dt = updateExpr(i).dataType ctx.updateColumn(fastRowBuffer, dt, i, ev, updateExpr(i).nullable, isVectorized) } Option( s""" |// common sub-expressions |$effectiveCodes |// evaluate aggregate function |${evaluateVariables(fastRowEvals)} |// update fast row |${updateFastRow.mkString("\n").trim} | """.stripMargin) } // Next, we generate code to probe and update the unsafe row hash map. val findOrInsertInUnsafeRowMap: String = { s""" | if ($fastRowBuffer == null) { | // generate grouping key | ${unsafeRowKeyCode.code.trim} | ${hashEval.code.trim} | if ($checkFallbackForBytesToBytesMap) { | // try to get the buffer from hash map | $unsafeRowBuffer = | $hashMapTerm.getAggregationBufferFromUnsafeRow($unsafeRowKeys, ${hashEval.value}); | } | // Can't allocate buffer from the hash map. Spill the map and fallback to sort-based | // aggregation after processing all input rows. | if ($unsafeRowBuffer == null) { | if ($sorterTerm == null) { | $sorterTerm = $hashMapTerm.destructAndCreateExternalSorter(); | } else { | $sorterTerm.merge($hashMapTerm.destructAndCreateExternalSorter()); | } | $resetCounter | // the hash map had be spilled, it should have enough memory now, | // try to allocate buffer again. | $unsafeRowBuffer = | $hashMapTerm.getAggregationBufferFromUnsafeRow($unsafeRowKeys, ${hashEval.value}); | if ($unsafeRowBuffer == null) { | // failed to allocate the first page | throw new OutOfMemoryError("No enough memory for aggregation"); | } | } | } """.stripMargin } val updateRowInUnsafeRowMap: String = { ctx.INPUT_ROW = unsafeRowBuffer val boundUpdateExpr = updateExpr.map(BindReferences.bindReference(_, inputAttr)) val subExprs = ctx.subexpressionEliminationForWholeStageCodegen(boundUpdateExpr) val effectiveCodes = subExprs.codes.mkString("\n") val unsafeRowBufferEvals = ctx.withSubExprEliminationExprs(subExprs.states) { boundUpdateExpr.map(_.genCode(ctx)) } val updateUnsafeRowBuffer = unsafeRowBufferEvals.zipWithIndex.map { case (ev, i) => val dt = updateExpr(i).dataType ctx.updateColumn(unsafeRowBuffer, dt, i, ev, updateExpr(i).nullable) } s""" |// common sub-expressions |$effectiveCodes |// evaluate aggregate function |${evaluateVariables(unsafeRowBufferEvals)} |// update unsafe row buffer |${updateUnsafeRowBuffer.mkString("\n").trim} """.stripMargin } // We try to do hash map based in-memory aggregation first. If there is not enough memory (the // hash map will return null for new key), we spill the hash map to disk to free memory, then // continue to do in-memory aggregation and spilling until all the rows had been processed. // Finally, sort the spilled aggregate buffers by key, and merge them together for same key. s""" UnsafeRow $unsafeRowBuffer = null; ${ if (isVectorizedHashMapEnabled) { s""" | org.apache.spark.sql.execution.vectorized.ColumnarBatch.Row $fastRowBuffer = null; """.stripMargin } else { s""" | UnsafeRow $fastRowBuffer = null; """.stripMargin } } ${findOrInsertFastHashMap.getOrElse("")} $findOrInsertInUnsafeRowMap $incCounter if ($fastRowBuffer != null) { // update fast row ${ if (isFastHashMapEnabled) { updateRowInFastHashMap(isVectorizedHashMapEnabled).getOrElse("") } else "" } } else { // update unsafe row $updateRowInUnsafeRowMap } """ } override def verboseString: String = toString(verbose = true) override def simpleString: String = toString(verbose = false) private def toString(verbose: Boolean): String = { val allAggregateExpressions = aggregateExpressions testFallbackStartsAt match { case None => val keyString = Utils.truncatedString(groupingExpressions, "[", ", ", "]") val functionString = Utils.truncatedString(allAggregateExpressions, "[", ", ", "]") val outputString = Utils.truncatedString(output, "[", ", ", "]") if (verbose) { s"HashAggregate(keys=$keyString, functions=$functionString, output=$outputString)" } else { s"HashAggregate(keys=$keyString, functions=$functionString)" } case Some(fallbackStartsAt) => s"HashAggregateWithControlledFallback $groupingExpressions " + s"$allAggregateExpressions $resultExpressions fallbackStartsAt=$fallbackStartsAt" } } } object HashAggregateExec { def supportsAggregate(aggregateBufferAttributes: Seq[Attribute]): Boolean = { val aggregationBufferSchema = StructType.fromAttributes(aggregateBufferAttributes) UnsafeFixedWidthAggregationMap.supportsAggregationBufferSchema(aggregationBufferSchema) } }
saturday-shi/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/aggregate/HashAggregateExec.scala
Scala
apache-2.0
35,252
/** Default (Template) Project * * Copyright (c) 2017 Hugo Firth * Email: <me@hugofirth.com/> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.ac.ncl.la.soar.db import doobie.hi.ConnectionIO /** * Trait which defines raw query methods on the companion objects of [[Repository]]. * TODO: Decide on the scoping of these methods - public for now but could be `private[glance]` */ trait RepositoryCompanion[A, R <: Repository[A]] { val initQ: ConnectionIO[Unit] val listQ: ConnectionIO[List[A]] def findQ(id: R#PK): ConnectionIO[Option[A]] def saveQ(entry: A): ConnectionIO[Unit] def deleteQ(id: R#PK): ConnectionIO[Boolean] }
NewcastleComputingScience/student-outcome-accelerator
db/src/main/scala/uk/ac/ncl/la/soar/db/RepositoryCompanion.scala
Scala
apache-2.0
1,181
package org.squeryl.test import org.squeryl._ import org.squeryl.framework.{DBConnector, SchemaTester, RunTestsInsideTransaction} import org.squeryl.dsl.ast._ import org.squeryl.test.PrimitiveTypeMode4Tests._ object LogicalBooleanObjTests { class Dummy(val id:Int, val p1:Int, val p2:Int) extends KeyedEntity[Int] object TestSchema extends Schema { val dummy = table[Dummy] } } abstract class LogicalBooleanObjTests extends SchemaTester with RunTestsInsideTransaction { self: DBConnector => import org.squeryl.test.LogicalBooleanObjTests._ final def schema = TestSchema test("and operation") { import TestSchema._ prepareDummyTable((1,1,1),(2,1,2),(3,1,2),(4,2,1),(5,3,1)) //Session.currentSession.setLogger(System.err.println(_)) val q0 = from(dummy)(d => where(LogicalBoolean.and(Seq())) select(d)).toList q0 should have length(5) val q1 = from(dummy)(d => where(LogicalBoolean.and(Seq(d.id===1))) select(d)).toList q1 should have length(1) q1.head.id should equal(1) val a2 = (d:Dummy) => LogicalBoolean.and(Seq(d.p1 === 1, d.p2===2)) val q2 = from(dummy)(d => where(a2(d))select(d)).toList q2 should have length(2) val a3 = (d:Dummy) => LogicalBoolean.and(Seq(d.p1 === 1, d.p2===2, d.id===2)) val q3 = from(dummy)(d => where(a3(d))select(d)).toList q3 should have length(1) } test("or operation") { import TestSchema._ prepareDummyTable((1,1,1),(2,1,2),(3,1,2),(4,2,1),(5,3,1)) //Session.currentSession.setLogger(System.err.println(_)) val q1 = from(dummy)(d => where(LogicalBoolean.or(Seq())) select(d)).toList q1 should have length(0) } test("TrueLogicalBoolean, FalseLogicalBoolean") { import TestSchema._ prepareDummyTable((1, 1, 1), (2, 1, 2)) // Session.currentSession.setLogger(System.err.println(_)) from(dummy)(d => where(TrueLogicalBoolean) select (d)). size should equal(2) from(dummy)(d => where(TrueLogicalBoolean and d.p2 === 1) select (d)). size should equal(1) from(dummy)(d => where(TrueLogicalBoolean or d.p2 === 1) select (d)). size should equal(2) from(dummy)(d => where(FalseLogicalBoolean) select (d)). size should equal(0) from(dummy)(d => where(FalseLogicalBoolean and d.p2 === 1) select (d)). size should equal(0) from(dummy)(d => where(FalseLogicalBoolean or d.p2 === 1) select (d)). size should equal(1) } test("and/or operators for Option[LogicalBoolean]") { import TestSchema._ prepareDummyTable((1, 1, 1), (2, 1, 2)) val some: Option[Int] = Some(1) val none: Option[Int] = None // Session.currentSession.setLogger(System.err.println(_)) def q1(opt: Option[Int]) = from(dummy)(d => where(TrueLogicalBoolean and opt.map(_ === d.p2)) select (d)) q1(none).size should equal(2) q1(some).size should equal(1) def q2(opt: Option[Int]) = from(dummy)(d => where(FalseLogicalBoolean or opt.map(_ === d.p2)) select (d)) q2(none).size should equal(0) q2(some).size should equal(1) def q3(opt: Option[Int]) = from(dummy)(d => where(FalseLogicalBoolean and opt.map(_ === d.p2)) select (d)) q3(none).size should equal(0) q3(some).size should equal(0) } def prepareDummyTable(vals: (Int, Int, Int)*) { for (v <- vals) TestSchema.dummy.insert(new Dummy(v._1, v._2, v._3)) } }
kanischev/Squeryl
src/test/scala/org/squeryl/test/LogicalBooleanObjTests.scala
Scala
apache-2.0
3,682
/* * Copyright (c) 2015-2016 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opencypher.tools.tck import cucumber.api.DataTable import org.opencypher.tools.tck.parsing.FormatListener import scala.collection.JavaConverters._ /** * This function will validate that a given DataTable from a TCK scenario contains parseable results representations. * If there are invalid result values in the table, a message describing them will be returned, otherwise None is * returned. */ object validateResults extends (DataTable => Option[String]) { override def apply(table: DataTable): Option[String] = { // TODO: Specify constraints for column names, and enforce these here val keys = table.topCells().asScala val cells = table.cells(1).asScala val badValues = cells.flatMap { list => list.asScala.filterNot(this (_)) } if (badValues.isEmpty) None else Some(s"${badValues.size} expected result values had invalid format: ${badValues.mkString(", ")}") } def apply(value: String): Boolean = { new FormatListener().parseResults(value) } }
Mats-SX/openCypher
tools/tck/src/main/scala/org/opencypher/tools/tck/validateResults.scala
Scala
apache-2.0
1,699
package org.scalex package model /** A value parameter to a constructor or method. */ case class ValueParam( name: String, /** The type of this value parameter. */ resultType: TypeEntity, /** The devault value of this value parameter, if it has been defined. */ defaultValue: Option[Expression], /** Whether this value parameter is implicit. */ isImplicit: Boolean) { override def toString = ( if (isImplicit) "implicit " else "" ) + name + ": " + resultType.toString + (defaultValue match { case Some(dv) ⇒ " = " + dv case None ⇒ "" }) } trait ValueParameterized { def valueParams: List[List[ValueParam]] }
ornicar/scalex
src/main/scala/model/ValueParam.scala
Scala
mit
678
/* * File JsonFluidStack.scala is part of JsonRecipes. * JsonRecipes is opensource Minecraft mod(released under LGPLv3), created by anti344. * Full licence information can be found in LICENCE and LICENCE.LESSER files in jar-file of the mod. * Copyright © 2014, anti344 */ package net.anti344.jsonrecipes.impl import net.anti344.jsonrecipes.api.IJsonFluidStack import net.minecraft.nbt.NBTTagCompound import net.minecraftforge.fluids._ class JsonFluidStack(fluid: String, amount: Int, nbt: NBTTagCompound) extends IJsonFluidStack{ def getName: String = fluid def getAmount: Int = if(amount <= 0) FluidContainerRegistry.BUCKET_VOLUME else amount def getNBT: NBTTagCompound = nbt def exists: Boolean = FluidRegistry.getFluid(fluid) != null def getFluidStack: FluidStack = if(exists) new FluidStack(FluidRegistry.getFluidID(fluid), amount, nbt) else null }
mc-anti344/JsonRecipes
src/main/scala/net/anti344/jsonrecipes/impl/JsonFluidStack.scala
Scala
gpl-3.0
923
package com.lookout.borderpatrol.security import com.lookout.borderpatrol.util.Combinators.tap import com.lookout.borderpatrol.sessionx._ import com.lookout.borderpatrol.util.Helpers import com.twitter.finagle.{SimpleFilter, Service, Filter} import com.twitter.finagle.http.{Request, Response} import com.twitter.util.Future object Csrf { case class InHeader(val header: String = "X-BORDER-CSRF") extends AnyVal case class CsrfToken(val value: String = "_x_border_csrf") extends AnyVal case class CookieName(val name: String = "border_csrf") extends AnyVal case class VerifiedHeader(val header: String = "X-BORDER-CSRF-VERIFIED") extends AnyVal /** * Informs upstream service about Csrf validation via double submit cookie * * @param header The incoming header that contains the CSRF token * @param csrfToken The incoming parameter that contains the CSRF token * @param cookieName The cookie that contains the CSRF token * @param verifiedHeader The verified header to set */ case class Verify(header: InHeader, csrfToken: CsrfToken, cookieName: CookieName, verifiedHeader: VerifiedHeader)(implicit secretStoreApi: SecretStoreApi) { /** * Inject the value of the call to verify in the VerifiedHeader * It's unsafe, because it mutates the Request */ def unsafeInject(req: Request)(f: Boolean => String): Future[Request] = tap(req)(_.headerMap.set(verifiedHeader.header, f(verify(req)))).toFuture /** * Check that CSRF header/param is there, validates that the cookie and header/param are valid SessionIds * If the header is not present it will look for the parameter. * @return false unless all checks are valid */ def verify(req: Request): Boolean = (for { str <- req.headerMap.get(header.header) orElse Helpers.scrubQueryParams(req.params, csrfToken.value) uid <- SignedId.from(str).toOption cid <- SignedId.fromRequest(req, cookieName.name).toOption } yield uid == cid) getOrElse false } } /** * Inserts the CSRF cookie in a Response sent back to the client * * - It should be typically happen only once, perhaps after the successful login */ case class CsrfInsertFilter[A](cookieName: Csrf.CookieName)(implicit secretStore: SecretStoreApi) extends Filter[A, Response, A, Response] { def apply(req: A, service: Service[A, Response]): Future[Response] = for { res <- service(req) csrfId <- SignedId.authenticated _ <- res.addCookie(csrfId.asCookie(cookieName.name)).toFuture } yield res } /** * Sets the CSRF header for inspection by upstream service. Always sets csrf verified header to false unless * the csrf cookie and the header/param match and are valid. */ case class CsrfVerifyFilter(verify: Csrf.Verify) extends SimpleFilter[Request, Response] { def apply(req: Request, service: Service[Request, Response]): Future[Response] = for { alteredReq <- verify.unsafeInject(req)(_.toString) resp <- service(alteredReq) } yield resp }
lookout/borderpatrol
security/src/main/scala/com/lookout/borderpatrol/security/Csrf.scala
Scala
mit
3,095
package org.hello import org.hello.a.WorldA import org.hello.a.WorldB class World { def foo(): String = { WorldA.foo() + WorldB.foo() } }
scoverage/gradle-scoverage
src/functionalTest/resources/projects/scala-multi-module-with-partial-scoverage-use/src/main/scala/org/hello/World.scala
Scala
apache-2.0
148
/* * Scala.js (https://www.scala-js.org/) * * Copyright EPFL. * * Licensed under Apache License 2.0 * (https://www.apache.org/licenses/LICENSE-2.0). * * See the NOTICE file distributed with this work for * additional information regarding copyright ownership. */ package org.scalajs.linker.backend.closure import java.net.URI private[closure] object URIUtil { def sourceURIToString(relativizeBaseURI: Option[URI], uri: URI): String = { import org.scalajs.io.URIUtils._ val relURI = relativizeBaseURI.fold(uri)(relativize(_, uri)) fixFileURI(relURI).toASCIIString } }
nicolasstucki/scala-js
linker/jvm/src/main/scala/org/scalajs/linker/backend/closure/URIUtil.scala
Scala
apache-2.0
597
package org.jetbrains.plugins.scala package lang package parser package parsing package statements import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder import org.jetbrains.plugins.scala.lang.parser.parsing.expressions.Block import org.jetbrains.plugins.scala.lang.parser.parsing.top.Qual_Id import org.jetbrains.plugins.scala.lang.parser.parsing.types.{Type, TypeArgs} /** * @author Jason Zaugg * * MacroDef ::= MacroDef ::= FunSig [‘:’ Type] ‘=’ ‘macro’ QualId [TypeArgs] */ object MacroDef { def parse(builder: ScalaPsiBuilder): Boolean = { val marker = builder.mark builder.getTokenType match { case ScalaTokenTypes.kDEF => builder.advanceLexer() case _ => marker.drop() return false } builder.getTokenType match { case ScalaTokenTypes.tIDENTIFIER => FunSig parse builder builder.getTokenType match { case ScalaTokenTypes.tCOLON => builder.advanceLexer() //Ate : if (Type.parse(builder)) { builder.getTokenType match { case ScalaTokenTypes.tASSIGN => builder.advanceLexer() //Ate = builder.getTokenType match { case ScalaTokenTypes.kMACRO => builder.advanceLexer() //Ate `macro` builder.getTokenType match { case ScalaTokenTypes.tLBRACE => // scalameta style - embedded macro body if (builder.twoNewlinesBeforeCurrentToken) { return false } Block.parse(builder, hasBrace = true) marker.drop() true case _ => if (Qual_Id.parse(builder)) { if (builder.getTokenType == ScalaTokenTypes.tLSQBRACKET) { TypeArgs.parse(builder, isPattern = false) } marker.drop() true } else { marker.drop() false } } case _ => marker.rollbackTo() false } case _ => marker.rollbackTo() false } } else { marker.rollbackTo() false } case ScalaTokenTypes.tASSIGN => builder.advanceLexer() //Ate = builder.getTokenType match { case ScalaTokenTypes.kMACRO => builder.advanceLexer() //Ate `macro` builder.getTokenType match { case ScalaTokenTypes.tLBRACE => // scalameta style - embedded macro body if (builder.twoNewlinesBeforeCurrentToken) { return false } Block.parse(builder, hasBrace = true) marker.drop() true case _ => if (Qual_Id.parse(builder)) { if (builder.getTokenType == ScalaTokenTypes.tLSQBRACKET) { TypeArgs.parse(builder, isPattern = false) } marker.drop() true } else { marker.drop() false } } case _ => marker.rollbackTo() false } case _ => marker.rollbackTo() false } case _ => marker.rollbackTo() false } } }
jastice/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/parser/parsing/statements/MacroDef.scala
Scala
apache-2.0
3,958
/* * Copyright (c) 2015-2022 Snowplow Analytics Ltd. All rights reserved. * * This program is licensed to you under the Apache License Version 2.0, * and you may not use this file except in compliance with the Apache License Version 2.0. * You may obtain a copy of the Apache License Version 2.0 at http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, * software distributed under the Apache License Version 2.0 is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the Apache License Version 2.0 for the specific language governing permissions and limitations there under. */ package com.snowplowanalytics.weather import java.time.{LocalDate, ZoneOffset, ZonedDateTime} import cats.{Functor, Monad} import cats.syntax.functor._ import cats.syntax.flatMap._ import com.snowplowanalytics.lrumap.{CreateLruMap, LruMap} import errors.{TimeoutError, WeatherError} import model.WeatherResponse class Cache[F[_], W <: WeatherResponse] private ( cache: LruMap[F, Cache.CacheKey, Either[WeatherError, W]], val geoPrecision: Int ) { import Cache._ /** * Gets the response from cache; if not found, then calls the provided function `doRequest`, * returns its result and put it in the cache * @param latitude latitude of the event * @param longitude longitude of the event * @param dateTime datetime with zone * @param doRequest function which will be called after a cache miss * @return value stored in the cache or the result of the provided function */ def getCachedOrRequest(latitude: Float, longitude: Float, dateTime: ZonedDateTime)( doRequest: (Float, Float, ZonedDateTime) => F[Either[WeatherError, W]] )(implicit M: Monad[F] ): F[Either[WeatherError, W]] = { val cacheKey = eventToCacheKey(dateTime, Position(latitude, longitude), geoPrecision) cache.get(cacheKey).flatMap { case Some(Right(cached)) => M.pure(Right(cached)) // Cache hit case Some(Left(TimeoutError(_))) => doRequest(latitude, longitude, dateTime).flatTap(cache.put(cacheKey, _)) case Some(Left(error)) => M.pure(Left(error)) case None => doRequest(latitude, longitude, dateTime).flatTap(cache.put(cacheKey, _)) } } } object Cache { /** * Factory method to create a cache. * @param size resulting cache's size * @param geoPrecision nth part of 1 to which latitude and longitude will be rounded when making * requests to the cache * @return a cache of WeatherResponse wrapped in a F */ def init[F[_]: Functor, W <: WeatherResponse]( size: Int, geoPrecision: Int )(implicit CLM: CreateLruMap[F, CacheKey, Either[WeatherError, W]]): F[Cache[F, W]] = for { lru <- CreateLruMap[F, CacheKey, Either[WeatherError, W]].create(size) cache = new Cache[F, W](lru, geoPrecision) } yield cache /** * Cache key for obtaining record * * @param date local date for UTC * @param center rounded geo coordinates */ final case class CacheKey(date: LocalDate, center: Position) /** @param date local date for UTC * @return unix timestamp of the provided day's start */ def dayStartEpoch(date: LocalDate): Timestamp = date.atStartOfDay().toEpochSecond(ZoneOffset.UTC) /** @param date local date for UTC * @return unix timestamp of the provided day's end */ def dayEndEpoch(date: LocalDate): Timestamp = dayStartEpoch(date.plusDays(1)) /** * Class to represent geographical coordinates * * @param latitude place's latitude * @param longitude places's longitude */ final case class Position(latitude: Float, longitude: Float) /** * Round position and timestamp (event) to produce cache key * * @param dateTime zoned datetime * @param position latitude & longitude * @param geoPrecision nth part of 1 to which latitude and longitude will be rounded * @return cache key */ def eventToCacheKey(dateTime: ZonedDateTime, position: Position, geoPrecision: Int): CacheKey = { val lat = roundCoordinate(position.latitude, geoPrecision) val lng = roundCoordinate(position.longitude, geoPrecision) val roundPosition = Position(lat, lng) CacheKey(dateTime.withZoneSameInstant(ZoneOffset.UTC).toLocalDate, roundPosition) } /** * Round coordinate by `geoPrecision` * Scale value to tenths to prevent values to be long like 1.333334 * * @param coordinate latitude or longitude * @return rounded coordinate */ def roundCoordinate(coordinate: Float, geoPrecision: Int): Float = BigDecimal .decimal(Math.round(coordinate * geoPrecision) / geoPrecision.toFloat) .setScale(1, BigDecimal.RoundingMode.HALF_UP) .toFloat }
snowplow/scala-weather
src/main/scala/com.snowplowanalytics/weather/Cache.scala
Scala
apache-2.0
4,858
package com.yetu.oauth2provider.utils import play.api.Logger trait NamedLogger { /** * Will result in a logger with the same name as the class that implements/extends this trait */ val logger = Logger(this.getClass) }
yetu/oauth2-provider
app/com/yetu/oauth2provider/utils/NamedLogger.scala
Scala
mit
231
/* * Copyright 2019 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.play.validators import play.api.data.Forms._ import play.api.data.Mapping import play.api.data.validation._ import uk.gov.hmrc.play.validators.AddressFields._ import scala.util.Try import scala.util.matching.Regex trait Validators { val addressTuple = tuple( addressLine1 -> smallText .verifying("error.address.blank", e => notBlank(e)) .verifying("error.address.main.line.max.length.violation", e => isMainAddressLineLengthValid(e)) .verifying("error.address.invalid.character", e => characterValidator.containsValidAddressCharacters(e)), addressLine2 -> optional(smallText.verifying("error.address.optional.line.max.length.violation", e => isOptionalAddressLineLengthValid(e)) .verifying("error.address.invalid.character", e => characterValidator.containsValidAddressCharacters(e))), addressLine3 -> optional(smallText.verifying("error.address.optional.line.max.length.violation", e => isOptionalAddressLineLengthValid(e)) .verifying("error.address.invalid.character", e => characterValidator.containsValidAddressCharacters(e))), addressLine4 -> optional(smallText.verifying("error.address.optional.line.max.length.violation", e => isOptionalAddressLineLengthValid(e)) .verifying("error.address.invalid.character", e => characterValidator.containsValidAddressCharacters(e))), postcode -> optional(smallText.verifying("error.postcode.length.violation", e => isPostcodeLengthValid(e)) .verifying("error.postcode.invalid.character", e => characterValidator.containsValidPostCodeCharacters(e))) ) // Small text prevents injecting large data into fields def smallText = play.api.data.Forms.text(0, 100) def nonEmptySmallText = play.api.data.Forms.nonEmptyText(0, 100) def nonEmptyNotBlankSmallText = smallText.verifying("error.required", e => notBlank(e)) def smallEmail = play.api.data.Forms.email.verifying("error.maxLength", e => isValidMaxLength(100)(e)) def positiveInteger = number.verifying("error.positive.number", e => e >= 0) //Play email enables 'a@a', this requires 'a@a.com' val emailWithDomain: Mapping[String] = text verifying Constraints.pattern( """(?i)[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?""".r, "constraint.email", "error.email") def validateMandatoryPhoneNumber = { s: String => s.matches("\\\\+?\\\\d+") } def validateOptionalPhoneNumber = { s: String => s.matches("\\\\+?\\\\d*") } def validateSaUtr = { s: String => s.matches("\\\\d{10}") } def notBlank(value: String) = !value.trim.isEmpty def isBlank(value: String) = !notBlank(value) def isValidMaxLength(maxLength: Int)(value: String): Boolean = value.length <= maxLength def isValidMinLength(minLength: Int)(value: String): Boolean = value.length >= minLength def isMainAddressLineLengthValid = isValidMaxLength(28)(_) def isOptionalAddressLineLengthValid = isValidMaxLength(18)(_) def isPostcodeLengthValid(value: String) = { val trimmedVal = value.replaceAll(" ", "") isValidMinLength(5)(trimmedVal) && isValidMaxLength(7)(trimmedVal) } } object Validators extends Validators object characterValidator { //Valid Characters Alphanumeric (A-Z, a-z, 0-9), hyphen( - ), apostrophe ( ' ), comma ( , ), forward slash ( / ) ampersand ( & ) and space private val invalidCharacterRegex = """[^A-Za-z0-9,/'\\-& ]""".r private val invalidPostCodeCharacterRegex = """[^A-Za-z0-9 ]""".r def containsValidPostCodeCharacters(value: String): Boolean = containsValidCharacters(value, invalidPostCodeCharacterRegex) def containsValidAddressCharacters(value: String): Boolean = containsValidCharacters(value, invalidCharacterRegex) private def containsValidCharacters(value: String, regex: Regex): Boolean = { regex.findFirstIn(value).isEmpty } } object ExtractBoolean { def unapply(s: String): Option[Boolean] = Try(s.toBoolean).toOption } object AddressFields { val addressLine1 = "addressLine1" val addressLine2 = "addressLine2" val addressLine3 = "addressLine3" val addressLine4 = "addressLine4" val postcode = "postcode" }
nicf82/play-ui
src/main/scala/uk/gov/hmrc/play/validators/Validators.scala
Scala
apache-2.0
4,800
/* * BrowserClientPlatform.scala * (ScalaOSC) * * Copyright (c) 2008-2021 Hanns Holger Rutz. All rights reserved. * * This software is published under the GNU Lesser General Public License v2.1+ * * * For further information, please contact Hanns Holger Rutz at * contact@sciss.de */ package de.sciss.osc import de.sciss.osc.Browser.Config import java.net.SocketAddress // no constructors on the JVM trait BrowserClientPlatform { def apply(target: SocketAddress, config: Config): Browser.Client = throw new UnsupportedOperationException("Browser.Receiver not supported on the JVM") }
Sciss/ScalaOSC
jvm/src/main/scala/de/sciss/osc/BrowserClientPlatform.scala
Scala
lgpl-2.1
604
package msgpack4z import java.math.BigInteger import scalaz.Order import scalaz.Ordering._ import scalaz.std.anyVal._ import scalaz.std.java.math.bigInteger._ import scalaz.std.string._ private[msgpack4z] object MsgpackUnionOrder extends Order[MsgpackUnion] { override def equalIsNatural = true override val toScalaOrdering = super.toScalaOrdering override def equal(x: MsgpackUnion, y: MsgpackUnion) = x === y private[this] val UnionListOrder: Order[List[MsgpackUnion]] = scalaz.std.list.listOrder(this) private[this] val UnionMapOrder: Order[Map[MsgpackUnion, MsgpackUnion]] = scalaz.std.map.mapOrder(this, this) override def order(x: MsgpackUnion, y: MsgpackUnion) = x match { case MsgpackTrue => y match { case MsgpackTrue => EQ case _ => GT } case MsgpackFalse => y match { case MsgpackTrue => LT case MsgpackFalse => EQ case _ => GT } case MsgpackNil => y match { case MsgpackTrue | MsgpackFalse => LT case MsgpackNil => EQ case _ => GT } case MsgpackLong(xx) => y match { case MsgpackTrue | MsgpackFalse | MsgpackNil => LT case MsgpackLong(yy) => Order[Long].order(xx, yy) case _ => GT } case MsgpackString(xx) => y match { case MsgpackTrue | MsgpackFalse | MsgpackNil | (_: MsgpackLong) => LT case MsgpackString(yy) => Order[String].order(xx, yy) case _ => GT } case MsgpackBinary(xx) => y match { case MsgpackTrue | MsgpackFalse | MsgpackNil | (_: MsgpackLong) | (_: MsgpackString) => LT case MsgpackBinary(yy) => compareArrayByte(xx, yy) case _ => GT } case MsgpackDouble(xx) => y match { case MsgpackTrue | MsgpackFalse | MsgpackNil | MsgpackLong(_) | MsgpackString(_) | MsgpackBinary(_) => LT case MsgpackDouble(yy) => Order[Long].order(java.lang.Double.doubleToLongBits(xx), java.lang.Double.doubleToLongBits(yy)) case _ => GT } case MsgpackULong(xx) => y match { case (_: MsgpackMap) | (_: MsgpackArray) | MsgpackExt(_, _) => GT case MsgpackULong(yy) => Order[BigInteger].order(xx, yy) case _ => LT } case MsgpackArray(xx) => y match { case (_: MsgpackMap) | MsgpackExt(_, _) => GT case MsgpackArray(yy) => UnionListOrder.order(xx, yy) case _ => LT } case MsgpackMap(xx) => y match { case MsgpackExt(_, _) => GT case MsgpackMap(yy) => UnionMapOrder.order(xx, yy) case _ => LT } case MsgpackExt(type1, data1) => y match { case MsgpackExt(type2, data2) => Order[Byte].order(type1, type2) match { case scalaz.Ordering.EQ => compareArrayByte(data1, data2) case other => other } case _ => LT } } private[this] def compareArrayByte(xx: Array[Byte], yy: Array[Byte]): scalaz.Ordering = { if (xx.length == yy.length) { @annotation.tailrec def loop(i: Int): scalaz.Ordering = { if (i < xx.length) { val a = xx(i) val b = yy(i) if (a < b) { scalaz.Ordering.LT } else if (a > b) { scalaz.Ordering.GT } else { loop(i + 1) } } else EQ } loop(0) } else if (xx.length < yy.length) { LT } else { GT } } }
msgpack4z/msgpack4z-core
src/main/scala/msgpack4z/MsgpackUnionOrder.scala
Scala
mit
4,006
/* __ *\ ** ________ ___ / / ___ Scala API ** ** / __/ __// _ | / / / _ | (c) 2006-2010, LAMP/EPFL ** ** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** ** /____/\___/_/ |_/____/_/ | | ** ** |/ ** \* */ package scala.util.parsing.input /** <p> * <code>Position</code> is the base trait for objects describing a * position in a ``document''. * </p> * <p> * It provides functionality for: * </p><ul> * <li> generating a visual representation of this position (`longString'); * <li> comparing two positions (`<'). * </ul> * <p> * To use this class for a concrete kind of ``document'', implement the * <code>lineContents</code> method. * </p> * * @author Martin Odersky, Adriaan Moors */ trait Position { /** The line number referred to by the position; line numbers start at 1 */ def line: Int /** The column number referred to by the position; column numbers start at 1 */ def column: Int /** The contents of the line numbered `lnum' (must not contain a new-line character). * * @param lnum a 1-based integer index into the `document' * @return the line at `lnum' (not including a newline) */ protected def lineContents: String /** Returns a string representation of the `Position', of the form `line.column' */ override def toString = ""+line+"."+column /** Returns a more ``visual'' representation of this position. * More precisely, the resulting string consists of two lines: <ol> * <li> the line in the document referred to by this position </li> * <li>a caret indicating the column</li></ol> * * Example: * *<pre> List(this, is, a, line, from, the, document) * ^</pre> */ def longString = lineContents+"\n"+lineContents.take(column-1).map{x => if (x == '\t') x else ' ' } + "^" /** Compare this position to another, by first comparing their line numbers, * and then -- if necessary -- using the columns to break a tie. * * @param `that' a `Position' to compare to this `Position' * @return true if this position's line or (in case of a tie wrt. line numbers) * its column is smaller than the corresponding components of `that' */ def <(that: Position) = { this.line < that.line || this.line == that.line && this.column < that.column } }
cran/rkafkajars
java/scala/util/parsing/input/Position.scala
Scala
apache-2.0
2,646
package com.julienvey.trello import com.julienvey.trello.domain.{Card, Label} import org.junit.runner.RunWith import org.scalatest.junit.JUnitRunner import org.scalatest.{FunSpec, Matchers} @RunWith(classOf[JUnitRunner]) class LabelIt extends FunSpec with Matchers { val trello = TrelloTestFactory.trello it("get label by id") { val labelId = trello.createLabel(new Label() .setColor("green") .setIdBoard(TrelloConfig.boardId) .setName("New Label")) .getId val label = trello.getLabel(labelId) label.getName should be("New Label") } it("create and delete label") { val label = new Label() label.setColor("green") label.setIdBoard(TrelloConfig.boardId) label.setName("New Label") val createdLabel = trello.createLabel(label) createdLabel.getName should be(label.getName) createdLabel.getColor should be(label.getColor) createdLabel.getIdBoard should be(label.getIdBoard) createdLabel.getId should not be null trello.deleteLabel(createdLabel.getId) the[NotFoundException] thrownBy trello.getLabel(createdLabel.getId) } it("should throw not found when deleting not existing label") { the[NotFoundException] thrownBy trello.deleteLabel("5c5fde2ffb23ac3506a9d6b9") } it("should throw not found when creating label on not existing board") { val label = new Label() label.setColor("green") label.setIdBoard("5b15c8a233e68e7cc54ca235") label.setName("New Label") the[NotFoundException] thrownBy trello.createLabel(label) } it("update label") { val label = new Label() .setColor("green") .setIdBoard(TrelloConfig.boardId) .setName("New Label") val created = trello.createLabel(label) created.setColor("red") created.setName("Updated Label") val updated = trello.updateLabel(created) updated.getColor should be(created.getColor) updated.getName should be(created.getName) trello.deleteLabel(updated.getId) } it("CRUD using fluent API") { val label = new Label() .setInternalTrello(trello) .setColor("blue") .setIdBoard(TrelloConfig.boardId) .setName("New Label") label.create() should be theSameInstanceAs label label.getId should not be null label.setColor("black").update().getColor should be(label.getColor) label.setName("Fluently updated Label name").update().getName should be(label.getName) label.delete() the[NotFoundException] thrownBy trello.getLabel(label.getId) } it("Create label and add to card") { val label = new Label() .setInternalTrello(trello) .setColor("blue") .setIdBoard(TrelloConfig.boardId) .setName("New Label") .create() val card = new Card() card.setName("Card to assign a label") val createdCard = trello.createCard(TrelloConfig.doingListId, card) trello.addLabelToCard(createdCard.getId, label.getId) should contain only label.getId trello.getCard(createdCard.getId).getLabels should contain(label) createdCard.setClosed(true) trello.updateCard(createdCard) } it("Create Label and add to card using fluent API") { var card = new Card() card.setName("Card to assign a label") card = trello.createCard(TrelloConfig.doingListId, card) val label = new Label() .setInternalTrello(trello) .setColor("blue") .setIdBoard(TrelloConfig.boardId) .setName("New Label") .create() .addToCard(card) card.getLabels should contain only label card.setClosed(true) trello.updateCard(card) } }
bywan/trello-java-wrapper
src/test/scala/com/julienvey/trello/LabelIt.scala
Scala
apache-2.0
3,592
package so.modernized.whip.util trait Vectorable { } object VectorOps { implicit class Vector(val xs:Array[Double]) extends AnyVal { def dot(ys:Array[Double]):Double = { assert(xs.length == ys.length) var res = 0.0 var idx = 0 while(idx < xs.length) { res += xs(idx) * ys(idx) idx += 1 } res } } }
JackSullivan/whip
src/main/scala/so/modernized/whip/util/Vectorable.scala
Scala
apache-2.0
366
package com.arcusys.learn.liferay.util import com.liferay.portal.service.permission.PortletPermissionUtil object PortletPermissionUtilHelper { def getPrimaryKey(plid: Long, portletId: String): String = PortletPermissionUtil.getPrimaryKey(plid, portletId) }
ViLPy/Valamis
learn-liferay620-services/src/main/scala/com/arcusys/learn/liferay/util/PortletPermissionUtilHelper.scala
Scala
lgpl-3.0
265
import org.scalajs.sbtplugin.ScalaJSPlugin import org.scalajs.sbtplugin.ScalaJSPlugin.autoImport._ import sbt.Keys._ import sbt._ import webscalajs.ScalaJSWeb //import scalajsbundler.sbtplugin.ScalaJSBundlerPlugin //import scalajsbundler.sbtplugin.ScalaJSBundlerPlugin.autoImport._ object Client { private[this] val clientSettings = Shared.commonSettings ++ Seq( name := "client", libraryDependencies ++= ClientDependencies.scalajsDependencies.value, jsDependencies ++= ClientDependencies.jsDependencies.value, // RuntimeDOM is needed for tests jsDependencies += RuntimeDOM % "test", // yes, we want to package JS dependencies skip in packageJSDependencies := false, // use Scala.js provided launcher code to start the client app scalaJSUseMainModuleInitializer := true, scalaJSStage in Global := FastOptStage // scapegoatIgnoredFiles := Seq(".*/JsonUtils.scala", ".*/JsonSerializers.scala") ) lazy val client = (project in file("client")) .settings(clientSettings: _*) .enablePlugins(ScalaJSPlugin, ScalaJSWeb) .dependsOn(Shared.sharedJs) }
iriddhi/mis
project/Client.scala
Scala
apache-2.0
1,109
package lila.activity import reactivemongo.api.bson._ import scala.util.Success import lila.common.{ Day, Iso } import lila.db.dsl._ import lila.rating.BSONHandlers.perfTypeKeyIso import lila.rating.PerfType import lila.study.BSONHandlers.StudyIdBSONHandler import lila.study.Study import lila.swiss.BsonHandlers.swissIdHandler import lila.swiss.Swiss import lila.user.User private object BSONHandlers { import Activity._ import activities._ import model._ val idSep = ':' def regexId(userId: User.ID): Bdoc = "_id" $startsWith s"$userId$idSep" implicit lazy val activityIdHandler = { tryHandler[Id]( { case BSONString(v) => v split idSep match { case Array(userId, dayStr) => Success(Id(userId, Day(Integer.parseInt(dayStr)))) case _ => handlerBadValue(s"Invalid activity id $v") } }, id => BSONString(s"${id.userId}$idSep${id.day.value}") ) } implicit private lazy val ratingHandler = BSONIntegerHandler.as[Rating](Rating.apply, _.value) implicit private lazy val ratingProgHandler = tryHandler[RatingProg]( { case v: BSONArray => for { before <- v.getAsTry[Rating](0) after <- v.getAsTry[Rating](1) } yield RatingProg(before, after) }, o => BSONArray(o.before, o.after) ) implicit private lazy val scoreHandler = new lila.db.BSON[Score] { private val win = "w" private val loss = "l" private val draw = "d" private val rp = "r" def reads(r: lila.db.BSON.Reader) = Score( win = r.intD(win), loss = r.intD(loss), draw = r.intD(draw), rp = r.getO[RatingProg](rp) ) def writes(w: lila.db.BSON.Writer, o: Score) = BSONDocument( win -> w.intO(o.win), loss -> w.intO(o.loss), draw -> w.intO(o.draw), rp -> o.rp ) } implicit lazy val gamesHandler = typedMapHandler[PerfType, Score](perfTypeKeyIso) .as[Games](Games.apply, _.value) implicit private lazy val gameIdHandler = BSONStringHandler.as[GameId](GameId.apply, _.value) implicit private lazy val forumPostIdHandler = BSONStringHandler.as[ForumPostId](ForumPostId.apply, _.value) implicit lazy val forumPostsHandler = isoHandler[ForumPosts, List[ForumPostId]]((p: ForumPosts) => p.value, ForumPosts.apply _) implicit private lazy val ublogPostIdHandler = BSONStringHandler.as[UblogPostId](UblogPostId.apply, _.value) implicit lazy val ublogPostsHandler = isoHandler[UblogPosts, List[UblogPostId]]((p: UblogPosts) => p.value, UblogPosts.apply _) implicit lazy val puzzlesHandler = isoHandler[Puzzles, Score]((p: Puzzles) => p.score, Puzzles.apply _) implicit lazy val stormHandler = new lila.db.BSON[Storm] { def reads(r: lila.db.BSON.Reader) = Storm(r.intD("r"), r.intD("s")) def writes(w: lila.db.BSON.Writer, s: Storm) = BSONDocument("r" -> s.runs, "s" -> s.score) } implicit lazy val racerHandler = new lila.db.BSON[Racer] { def reads(r: lila.db.BSON.Reader) = Racer(r.intD("r"), r.intD("s")) def writes(w: lila.db.BSON.Writer, r: Racer) = BSONDocument("r" -> r.runs, "s" -> r.score) } implicit lazy val streakHandler = new lila.db.BSON[Streak] { def reads(r: lila.db.BSON.Reader) = Streak(r.intD("r"), r.intD("s")) def writes(w: lila.db.BSON.Writer, r: Streak) = BSONDocument("r" -> r.runs, "s" -> r.score) } implicit lazy val learnHandler = typedMapHandler[Learn.Stage, Int](Iso.string(Learn.Stage.apply, _.value)) .as[Learn](Learn.apply, _.value) implicit lazy val practiceHandler = typedMapHandler[Study.Id, Int](Iso.string[Study.Id](Study.Id.apply, _.value)) .as[Practice](Practice.apply, _.value) implicit lazy val simulIdHandler = BSONStringHandler.as[SimulId](SimulId.apply, _.value) implicit lazy val simulsHandler = isoHandler[Simuls, List[SimulId]]((s: Simuls) => s.value, Simuls.apply _) implicit lazy val corresHandler = Macros.handler[Corres] implicit lazy val patronHandler = BSONIntegerHandler.as[Patron](Patron.apply, _.months) implicit lazy val followListHandler = Macros.handler[FollowList] implicit lazy val followsHandler = new lila.db.BSON[Follows] { def reads(r: lila.db.BSON.Reader) = Follows( in = r.getO[FollowList]("i").filterNot(_.isEmpty), out = r.getO[FollowList]("o").filterNot(_.isEmpty) ) def writes(w: lila.db.BSON.Writer, o: Follows) = BSONDocument( "i" -> o.in, "o" -> o.out ) } implicit lazy val studiesHandler = isoHandler[Studies, List[Study.Id]]((s: Studies) => s.value, Studies.apply _) implicit lazy val teamsHandler = isoHandler[Teams, List[String]]((s: Teams) => s.value, Teams.apply _) implicit lazy val swissRankHandler = new lila.db.BSON[SwissRank] { def reads(r: lila.db.BSON.Reader) = SwissRank(Swiss.Id(r.str("i")), r.intD("r")) def writes(w: lila.db.BSON.Writer, s: SwissRank) = BSONDocument("i" -> s.id, "r" -> s.rank) } implicit lazy val swissesHandler = isoHandler[Swisses, List[SwissRank]]((s: Swisses) => s.value, Swisses.apply _) object ActivityFields { val id = "_id" val games = "g" val forumPosts = "p" val ublogPosts = "u" val puzzles = "z" val storm = "m" val racer = "c" val streak = "k" val learn = "l" val practice = "r" val simuls = "s" val corres = "o" val patron = "a" val follows = "f" val studies = "t" val teams = "e" val swisses = "w" val stream = "st" } implicit lazy val activityHandler = new lila.db.BSON[Activity] { import ActivityFields._ def reads(r: lila.db.BSON.Reader) = Activity( id = r.get[Id](id), games = r.getO[Games](games), forumPosts = r.getO[ForumPosts](forumPosts), ublogPosts = r.getO[UblogPosts](ublogPosts), puzzles = r.getO[Puzzles](puzzles), storm = r.getO[Storm](storm), racer = r.getO[Racer](racer), streak = r.getO[Streak](streak), learn = r.getO[Learn](learn), practice = r.getO[Practice](practice), simuls = r.getO[Simuls](simuls), corres = r.getO[Corres](corres), patron = r.getO[Patron](patron), follows = r.getO[Follows](follows).filterNot(_.isEmpty), studies = r.getO[Studies](studies), teams = r.getO[Teams](teams), swisses = r.getO[Swisses](swisses), stream = r.getD[Boolean](stream) ) def writes(w: lila.db.BSON.Writer, o: Activity) = BSONDocument( id -> o.id, games -> o.games, forumPosts -> o.forumPosts, ublogPosts -> o.ublogPosts, puzzles -> o.puzzles, storm -> o.storm, racer -> o.racer, streak -> o.streak, learn -> o.learn, practice -> o.practice, simuls -> o.simuls, corres -> o.corres, patron -> o.patron, follows -> o.follows, studies -> o.studies, teams -> o.teams, swisses -> o.swisses, stream -> o.stream.option(true) ) } }
luanlv/lila
modules/activity/src/main/BSONHandlers.scala
Scala
mit
7,309