code stringlengths 5 1M | repo_name stringlengths 5 109 | path stringlengths 6 208 | language stringclasses 1 value | license stringclasses 15 values | size int64 5 1M |
|---|---|---|---|---|---|
/*
* Copyright (C) 2011-2013 org.bayswater
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bayswater.musicrest.model
import org.bayswater.musicrest.Util._
import net.liftweb.json._
import spray.http._
import spray.httpx.marshalling._
case class Comment(user: String, cid: String, subject: String, text: String) {
val toMap =
Map("user" -> user, "cid" -> cid, "subject" -> subject, "text" -> text)
def toJSON: String = {
val atts = for (k <- toMap.keys) yield {
formatJSON(k, toMap(k))
}
val json = "{" + atts.mkString(",") + "}"
json
}
def toXML: String = {
val jvalue = JsonParser.parse(toJSON)
"<comment>" + Xml.toXml(jvalue).toString + "</comment>"
}
/* supports json and xml at the moment */
def to(mediaType:MediaType): String = {
val formatExtension:String = mediaType.subType
formatExtension match {
case "json" => toJSON
case "xml" => toXML
case _ => "Unsupported media type: " + formatExtension
}
}
}
object Comment {
def apply(mdbo: com.mongodb.DBObject): Comment = {
Comment(mdbo.get("user").asInstanceOf[String],
mdbo.get("cid").asInstanceOf[String],
mdbo.get("subject").asInstanceOf[String],
mdbo.get("text").asInstanceOf[String])
}
implicit val commentMarshaller = {
val canMarshalTo = Array (ContentTypes.`application/json`,
ContentType(MediaTypes.`text/xml`))
Marshaller.of[Comment] (canMarshalTo:_*) { (value, requestedContentType, ctx) ⇒ {
val content:String = value.to(requestedContentType.mediaType)
ctx.marshalTo(HttpEntity(requestedContentType, content))
}
}
}
}
| newlandsvalley/musicrest | src/main/scala/org/bayswater/musicrest/model/Comment.scala | Scala | apache-2.0 | 2,281 |
package dbpedia.dataparsers.util
import java.net._
object UriUtils
{
private val knownSchemes = Set("http", "https", "ftp")
private val knownPrefixes = knownSchemes.map(_ + "://")
def hasKnownScheme(uri: String) : Boolean = knownPrefixes.exists(uri.startsWith(_))
/**
* TODO: comment
*/
def cleanLink( uri : URI ) : Option[String] =
{
if (knownSchemes.contains(uri.getScheme)) Some(uri.normalize.toString)
else None
}
/**
* Relativizes the given parent URI against a child URI.
*
* @param parent
* @param child
* @return path from parent to child
* @throws IllegalArgumentException if parent is not a parent directory of child.
*/
def relativize( parent : URI, child : URI ) : URI =
{
val path = parent.relativize(child)
if (path eq child) throw new IllegalArgumentException("["+parent+"] is not a parent directory of ["+child+"]")
path
}
}
| FnOio/dbpedia-parsing-functions-scala | src/main/scala/dbpedia/dataparsers/util/UriUtils.scala | Scala | gpl-2.0 | 974 |
package controllers.beans
case class User (email: String, password: String, fullname: String) | leleueri/easygift | easygift-play/app/controllers/beans/User.scala | Scala | apache-2.0 | 94 |
package marg.token
class Token private(string: String, kind: TokenKind) {
require(string != null)
override def toString = string + " : " + kind.toString
def isEOF = kind eq TokenKind.EOF
def String = string
def Kind = kind
}
object Token {
def apply(string: String, kind: TokenKind) = new Token(string, kind)
def apply(char: Char, kind: TokenKind) = new Token(char.toString, kind)
val EOF = Token("", TokenKind.EOF)
}
| 193s/marg | src/main/scala/marg/token/Token.scala | Scala | gpl-2.0 | 439 |
import org.scalatest.FunSuite
import com.ssjskipp.misc._
class DecisionTreeSpec extends FunSuite {
test("Trivial DecisionTree should give expected results") {
val leftDecide = ClassifyLeaf(
(x: Seq[Double]) => if (x(1) > 0) "Positive" else "Negative or Zero"
)
val rightDecide = ClassifyLeaf(
(x: Seq[Double]) => if (x(1) < 0) "Negative" else "Positive or Zero"
)
val root = SplitNode(
(x: Seq[Double]) => x(0) > 5,
leftDecide,
rightDecide
)
val tree = new DecisionTree[String](root)
val sortRight = Seq(
Seq(0d, 15d),
Seq(1d, -15d),
Seq(-10d, -10d),
Seq(5d, 0d)
)
val answerRight = Seq(
"Positive or Zero",
"Negative",
"Negative",
"Positive or Zero"
)
val sortLeft = Seq(
Seq(15d, 10d),
Seq(5.001d, -15d),
Seq(7d, 0d)
)
val answerLeft = Seq(
"Positive",
"Negative or Zero",
"Negative or Zero"
)
val testData = sortLeft ++ sortRight
val expectation = answerLeft ++ answerRight
val results = testData.map(tree.process)
results.zip(expectation).foreach(
x => assert(x._1 == x._2)
)
}
} | ssjskipp/thought | src/test/scala/DecisionTree.spec.scala | Scala | mit | 1,103 |
package formation.warp10
import java.time.LocalDateTime
import scala.util.{Failure, Success}
import akka.actor.ActorSystem
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{Sink, Source}
import kneelnrise.warp10scala.model._
import kneelnrise.warp10scala.services.Warp10Client
class Warp10Api(configuration: Configuration)(implicit warp10configuration: Warp10Configuration, actorSystem: ActorSystem, actorMaterializer: ActorMaterializer) {
implicit val ec = actorSystem.dispatcher
val w10client = Warp10Client(configuration.host, configuration.port)
def pushAll(gts: Seq[GTS]): Unit = {
Source.fromIterator(() => gts.iterator)
.via(w10client.push)
.runWith(Sink.foreach(println))
.onComplete {
case Success(x) => println("Done: " + x)
case Failure(x) => println("Failure: " + x)
}
}
def push(gts: GTS): Unit = {
Source.single(gts)
.via(w10client.push)
.runWith(Sink.foreach(println))
.onComplete {
case Success(x) => println("Done: " + x)
case Failure(x) => println("Failure: " + x)
}
}
def read(time: Long): Unit = {
Source.single(FetchQuery(
selector = Selector("~org.test.plain.*{}"),
interval = FetchInterval(LocalDateTime.now(), 1),
dedup = false
)).via(w10client.fetch)
.runWith(Sink.foreach(println))
.onComplete {
case Success(x) => println("Done: " + x)
case Failure(x) => println("Failure: " + x)
}
}
}
| kneelnrise/formation-warp10 | src/main/scala/formation/warp10/Warp10Api.scala | Scala | mit | 1,500 |
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.app.nlp
/** A span of Tokens making up a sentence within a Section of a Document.
A Sentence is a special case of a TokenSpan, stored in its Section, and available through the Section.sentences method.
From the Sentence you can get its sequence of Tokens, the Section that contains it, and the Document that contains it.
Sentences can be added (in order) to a Section, but not removed from a Section.
The index of this Sentence into the sequence of Sentences in the Section is available as 'Sentence.indexInSection'.
The annotation ParseTree is stored on a Sentence.
Unlike other TokenSpans, constructing a Sentence automatically add it to its Sections.
@author Andrew McCallum */
class Sentence(sec:Section, initialStart:Int, initialLength:Int) extends TokenSpan(sec, initialStart, initialLength) {
/** Construct a new 0-length Sentence that begins just past the current last token of the Section, and add it to the Section automatically.
This constructor is typically used when reading labeled training data one token at a time, where we need Sentence and Token objects. */
def this(sec:Section) = this(sec, sec.length, 0)
/** Construct a new 0-length Sentence that begins just past the current last token of the doc.asSection, and add it to the Section automatically.
This constructor is typically used when reading labeled training data one token at a time, where we need Sentence and Token objects. */
def this(doc:Document) = this(doc.asSection)
// Initialization
if (!sec.document.annotators.contains(classOf[Sentence])) sec.document.annotators(classOf[Sentence]) = UnknownDocumentAnnotator.getClass
sec.addSentence(this)
private val _indexInSection: Int = sec.sentences.length - 1
/** Returns the number of Sentences before this one in the Section. */
def indexInSection: Int = _indexInSection
/** Returns true if the given Token is inside this Sentence. */
def contains(element:Token) = tokens.contains(element) // TODO Re-implement this to be faster avoiding search using token.stringStart bounds
// Parse attributes
/** If this Sentence has a ParseTree, return it; otherwise return null. */
def parse = attr[cc.factorie.app.nlp.parse.ParseTree]
/** Return the Token at the root of this Sentence's ParseTree. Will throw an exception if there is no ParseTree. */
def parseRootChild: Token = attr[cc.factorie.app.nlp.parse.ParseTree].rootChild
// common labels
/** Returns the sequence of PennPosTags attributed to the sequence of Tokens in this Sentence. */
def posTags: IndexedSeq[pos.PennPosTag] = tokens.map(_.posTag)
/** Returns the sequence of NerTags attributed to the sequence of Tokens in this Sentence. */
def nerTags: IndexedSeq[ner.NerTag] = tokens.map(_.nerTag)
}
// Cubbie storage
class SentenceCubbie extends TokenSpanCubbie {
def finishStoreSentence(s:Sentence): Unit = {}
def storeSentence(s:Sentence): this.type = {
storeTokenSpan(s) // also calls finishStoreTokenSpan(s)
finishStoreSentence(s)
this
}
def finishFetchSentence(s:Sentence): Unit = finishFetchTokenSpan(s)
def fetchSentence(section:Section): Sentence = {
val s = new Sentence(section, start.value, length.value)
finishFetchSentence(s)
s
}
}
// To save the sentence with its parse tree use "new SentenceCubbie with SentenceParseTreeCubbie"
trait SentenceParseCubbie extends SentenceCubbie {
val parse = CubbieSlot("parse", () => new cc.factorie.app.nlp.parse.ParseTreeCubbie)
override def finishStoreSentence(s:Sentence): Unit = {
super.finishStoreSentence(s)
parse := parse.constructor().storeParseTree(s.parse)
}
override def finishFetchSentence(s:Sentence): Unit = {
super.finishFetchSentence(s)
s.attr += parse.value.fetchParseTree(s)
}
}
| iesl/fuse_ttl | src/factorie-factorie_2.11-1.1/src/main/scala/cc/factorie/app/nlp/Sentence.scala | Scala | apache-2.0 | 4,555 |
/*
* Copyright (c) 2016 SnappyData, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package io.snappydata.app
case class TestData(key: Int, value: String)
case class Data(col1: Int, col2: Int, col3: Int)
case class Data1(col1: Int, col2: String)
case class Data2(col1: Int, col2: String, col3: String)
case class Data3(col1: Int, col2: String, col3: String, col4: String)
| vjr/snappydata | core/src/test/scala/io/snappydata/app/ExternalTableTestData.scala | Scala | apache-2.0 | 944 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn.mkldnn
import com.intel.analytics.bigdl.mkl.{DataType, Memory, MklDnn}
import com.intel.analytics.bigdl.nn.abstractnn.TensorModule
import com.intel.analytics.bigdl.tensor.{DnnTensor, Tensor}
class ReorderMemory(inputFormat: MemoryData, outputFormat: MemoryData,
gradInputFormat: MemoryData, gradOutputFormat: MemoryData
) extends MklDnnLayer {
_outputFormats = Array(outputFormat)
_gradInputFormats = Array(gradInputFormat)
override private[mkldnn] def initFwdPrimitives(inputs: Array[MemoryData], phase: Phase) = {
_inputFormats = if (inputFormat == null) inputs else Array(inputFormat)
require(_inputFormats.length == 1, "Only accept one tensor as input")
require(_inputFormats(0).shape.product == outputFormat.shape.product,
"input output memory not match")
val fwdReorderPrimDesc = MklDnn.ReorderPrimitiveDescCreate(
_inputFormats(0).getPrimitiveDescription(runtime),
outputFormat.getPrimitiveDescription(runtime))
val fwdReorderPrim = MklDnn.PrimitiveCreate2(fwdReorderPrimDesc,
Array(_inputFormats(0).getPrimitive(runtime)), Array(0), 1,
Array(outputFormat.getPrimitive(runtime)), 1)
updateOutputPrimitives = Array(fwdReorderPrim)
output = initTensor(outputFormat)
(_inputFormats, _outputFormats)
}
override private[mkldnn] def initBwdPrimitives(grads: Array[MemoryData], phase: Phase) = {
_gradInputFormats = (gradInputFormat, inputFormat) match {
case (null, null) => inputFormats()
case (null, x) => Array(x)
case (x, _) => Array(x)
}
_gradOutputFormats = if (gradOutputFormat == null) grads else Array(gradOutputFormat)
_gradOutputFormatsForWeight = if (gradOutputFormat == null) grads else Array(gradOutputFormat)
require(_gradOutputFormats.length == 1, "Only accept one tensor as input")
require(_gradOutputFormats(0).shape.product == _gradInputFormats(0).shape.product,
"input output memory not match")
val bwdReorderPrimDesc = MklDnn.ReorderPrimitiveDescCreate(
_gradOutputFormats(0).getPrimitiveDescription(runtime),
_gradInputFormats(0).getPrimitiveDescription(runtime))
val bwdReorderPrim = MklDnn.PrimitiveCreate2(bwdReorderPrimDesc,
_gradOutputFormats.map(_.getPrimitive(runtime)), Array(0), 1,
_gradInputFormats.map(_.getPrimitive(runtime)), 1)
updateGradInputPrimitives = Array(bwdReorderPrim)
gradInput = initTensor(_gradInputFormats(0))
(_gradOutputFormats, _gradInputFormats)
}
override def toString(): String = {
if (_inputFormats != null) {
s"nn.mkl.ReorderMemory(${_inputFormats(0)} -> ${outputFormat})"
} else {
s"nn.mkl.ReorderMemory(_ -> ${outputFormat})"
}
}
}
object ReorderMemory {
def apply(inputFormat: MemoryData, outputFormat: MemoryData, gradInputFormat: MemoryData,
gradOutputFomat: MemoryData): ReorderMemory = {
new ReorderMemory(inputFormat, outputFormat, gradInputFormat, gradOutputFomat)
}
def apply(outputFormat: MemoryData, gradInputFormat: MemoryData): ReorderMemory = {
new ReorderMemory(null, outputFormat, gradInputFormat, null)
}
def apply(outputFormat: MemoryData): ReorderMemory = {
new ReorderMemory(null, outputFormat, null, null)
}
}
| yiheng/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/mkldnn/ReorderMemory.scala | Scala | apache-2.0 | 3,868 |
package lila.forum
import play.api.libs.json.Json
import lila.db.api._
import lila.db.Implicits._
import tube.categTube
object CategRepo {
def bySlug(slug: String) = $find byId slug
def withTeams(teams: List[String]): Fu[List[Categ]] =
$find($query($or(Seq(
Json.obj("team" -> $exists(false)),
Json.obj("team" -> $in(teams))
))) sort $sort.asc("pos"))
def nextPosition: Fu[Int] = $primitive.one(
$select.all,
"pos",
_ sort $sort.desc("pos")
)(_.asOpt[Int]) map (~_ + 1)
def nbPosts(id: String): Fu[Int] =
$primitive.one($select(id), "nbPosts")(_.asOpt[Int]) map (~_)
}
| danilovsergey/i-bur | modules/forum/src/main/CategRepo.scala | Scala | mit | 624 |
package org.scalawiki.wlx
import org.scalawiki.cache.CachedBot
import org.scalawiki.dto.Site
import org.scalawiki.wlx.dto.Contest
import org.scalawiki.wlx.query.MonumentQuery
import org.specs2.mutable.Specification
class WlmUaListsSpec extends Specification {
sys.props.put("jna.nosys", "true")
sequential
val campaign = "wlm-ua"
val cacheName = s"$campaign-2019"
val contest = Contest.byCampaign(campaign).get.copy(year = 2019)
val country = contest.country
val bot = new CachedBot(Site.ukWiki, cacheName + "-wiki", true, entries = 1000)
val monumentQuery = MonumentQuery.create(contest)(bot)
val monumentDb = MonumentDB.getMonumentDb(contest, monumentQuery)
val all = monumentDb.allMonuments
println(s"all size: ${all.size}")
"places" should {
"be mostly detected" in {
all must not(beEmpty)
val notFound = monumentDb.unknownPlaces()
println(s"notFound size: ${notFound.size}")
notFound
.sortBy(-_.monuments.size)
.foreach(println)
val percentage = notFound.map(_.monuments.size).sum * 100 / all.size
println(s"percentage: $percentage%")
percentage should be < 8 // less than 1%
}
"not be just high level region" in {
val updater = new RegionFixerUpdater(monumentDb)
updater.raions.size === 490
val highLevel = all.filter(m => updater.raionNames.contains(m.cityName) && m.place.exists(_.trim.nonEmpty))
println(s"highLevel size: ${highLevel.size}")
highLevel.groupBy(_.page).toSeq.sortBy(-_._2.size).foreach { case (page, monuments) =>
println(s"$page ${monuments.size} (${monuments.head.city.getOrElse("")})")
}
val percentage = highLevel.size * 100 / all.size
println(s"percentage: $percentage%")
percentage should be <= 5 // less than 10%
}
}
} | intracer/scalawiki | scalawiki-wlx/src/test/scala/org/scalawiki/wlx/WlmUaListsSpec.scala | Scala | apache-2.0 | 1,826 |
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.featran.transformers
import com.spotify.featran.{FlatReader, FlatWriter}
/**
* Transform an optional 1D feature to an indicator variable indicating presence.
*
* Missing values are mapped to 0.0. Present values are mapped to 1.0.
*/
object Indicator extends SettingsBuilder {
/** Create a new [[Indicator]] instance. */
def apply(name: String): Transformer[Double, Unit, Unit] =
new Indicator(name)
/**
* Create a new [[Indicator]] from a settings object
* @param setting
* Settings object
*/
def fromSettings(setting: Settings): Transformer[Double, Unit, Unit] =
Indicator(setting.name)
}
private[featran] class Indicator(name: String) extends MapOne[Double](name) {
override def flatRead[T: FlatReader]: T => Option[Any] = FlatReader[T].readDouble(name)
override def flatWriter[T](implicit fw: FlatWriter[T]): Option[Double] => fw.IF =
fw.writeDouble(name)
override def map(a: Double): Double = 1
}
| spotify/featran | core/src/main/scala/com/spotify/featran/transformers/Indicator.scala | Scala | apache-2.0 | 1,572 |
/**
* Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0
* See accompanying LICENSE file.
*/
package kafka.manager
import java.util.Properties
import akka.actor.{ActorRef, ActorSystem, Kill, Props}
import akka.pattern._
import akka.util.Timeout
import com.typesafe.config.{Config, ConfigFactory}
import kafka.manager.utils.KafkaServerInTest
import ActorModel._
import kafka.test.SeededBroker
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.reflect.ClassTag
import scala.util.Try
/**
* @author hiral
*/
class TestBrokerViewCacheActor extends KafkaServerInTest {
private[this] val akkaConfig: Properties = new Properties()
akkaConfig.setProperty("pinned-dispatcher.type","PinnedDispatcher")
akkaConfig.setProperty("pinned-dispatcher.executor","thread-pool-executor")
private[this] val config : Config = ConfigFactory.parseProperties(akkaConfig)
private[this] val system = ActorSystem("test-broker-view-cache-actor",config)
private[this] val broker = new SeededBroker("bvc-test",4)
override val kafkaServerZkPath = broker.getZookeeperConnectionString
private[this] var kafkaStateActor : Option[ActorRef] = None
private[this] implicit val timeout: Timeout = 10.seconds
private[this] var brokerViewCacheActor : Option[ActorRef] = None
override protected def beforeAll(): Unit = {
super.beforeAll()
val props = Props(classOf[KafkaStateActor],sharedCurator, true)
kafkaStateActor = Some(system.actorOf(props.withDispatcher("pinned-dispatcher"),"ksa"))
val bvcProps = Props(classOf[BrokerViewCacheActor],kafkaStateActor.get.path, FiniteDuration(10, SECONDS))
brokerViewCacheActor = Some(system.actorOf(bvcProps,"broker-view"))
brokerViewCacheActor.get ! BVForceUpdate
Thread.sleep(10000)
}
override protected def afterAll(): Unit = {
brokerViewCacheActor.foreach( _ ! Kill )
kafkaStateActor.foreach( _ ! Kill )
system.shutdown()
Try(broker.shutdown())
super.afterAll()
}
private[this] def withBrokerViewCacheActor[Input,Output,FOutput]
(msg: Input)(fn: Output => FOutput)(implicit tag: ClassTag[Output]) : FOutput = {
require(brokerViewCacheActor.isDefined, "brokerViewCacheActor undefined!")
val future = ask(brokerViewCacheActor.get, msg).mapTo[Output]
val result = Await.result(future,10.seconds)
fn(result)
}
test("get broker view") {
withBrokerViewCacheActor(BVGetView(1)) { optionalBrokerView : Option[BVView] =>
println(optionalBrokerView)
}
}
}
| vvutharkar/kafka-manager | test/kafka/manager/TestBrokerViewCacheActor.scala | Scala | apache-2.0 | 2,532 |
// TODO(low): proper generated API sources caching: doesn't detect output directory change
import sbt._
import Keys._
import Project.Initialize
import Util._
import Common._
import Licensed._
import Scope.ThisScope
import LaunchProguard.{ proguard, Proguard }
object Sbt extends Build {
override lazy val settings = super.settings ++ buildSettings ++ Status.settings ++ nightlySettings
def buildSettings = Seq(
organization := "org.scala-sbt",
version := "0.13.5-SNAPSHOT",
publishArtifact in packageDoc := false,
scalaVersion := "2.10.4",
publishMavenStyle := false,
componentID := None,
crossPaths := false,
resolvers += Resolver.typesafeIvyRepo("releases"),
concurrentRestrictions in Global += Util.testExclusiveRestriction,
testOptions += Tests.Argument(TestFrameworks.ScalaCheck, "-w", "1"),
javacOptions in compile ++= Seq("-target", "6", "-source", "6", "-Xlint", "-Xlint:-serial")
)
lazy val myProvided = config("provided") intransitive;
override def projects = super.projects.map(p => p.copy(configurations = (p.configurations.filter(_ != Provided)) :+ myProvided))
lazy val root: Project = Project("root", file("."), aggregate = nonRoots) settings (rootSettings: _*) configs (Sxr.sxrConf, Proguard)
lazy val nonRoots = projects.filter(_ != root).map(p => LocalProject(p.id))
/* ** Subproject declarations ** */
// defines the Java interfaces through which the launcher and the launched application communicate
lazy val launchInterfaceSub = minProject(launchPath / "interface", "Launcher Interface") settings (javaOnly: _*)
// the launcher. Retrieves, loads, and runs applications based on a configuration file.
lazy val launchSub = testedBaseProject(launchPath, "Launcher") dependsOn (ioSub % "test->test", interfaceSub % "test", launchInterfaceSub) settings (launchSettings: _*)
// used to test the retrieving and loading of an application: sample app is packaged and published to the local repository
lazy val testSamples = noPublish(baseProject(launchPath / "test-sample", "Launch Test")) dependsOn (interfaceSub, launchInterfaceSub) settings (scalaCompiler)
// defines Java structures used across Scala versions, such as the API structures and relationships extracted by
// the analysis compiler phases and passed back to sbt. The API structures are defined in a simple
// format from which Java sources are generated by the datatype generator subproject
lazy val interfaceSub = minProject(file("interface"), "Interface") settings (interfaceSettings: _*)
// defines operations on the API of a source, including determining whether it has changed and converting it to a string
// and discovery of subclasses and annotations
lazy val apiSub = testedBaseProject(compilePath / "api", "API") dependsOn (interfaceSub)
/* **** Utilities **** */
lazy val controlSub = baseProject(utilPath / "control", "Control")
lazy val collectionSub = testedBaseProject(utilPath / "collection", "Collections") settings (Util.keywordsSettings: _*)
lazy val applyMacroSub = testedBaseProject(utilPath / "appmacro", "Apply Macro") dependsOn (collectionSub) settings (scalaCompiler)
// The API for forking, combining, and doing I/O with system processes
lazy val processSub = baseProject(utilPath / "process", "Process") dependsOn (ioSub % "test->test") settings (scalaXml)
// Path, IO (formerly FileUtilities), NameFilter and other I/O utility classes
lazy val ioSub = testedBaseProject(utilPath / "io", "IO") dependsOn (controlSub) settings (ioSettings: _*)
// Utilities related to reflection, managing Scala versions, and custom class loaders
lazy val classpathSub = testedBaseProject(utilPath / "classpath", "Classpath") dependsOn (launchInterfaceSub, interfaceSub, ioSub) settings (scalaCompiler)
// Command line-related utilities.
lazy val completeSub = testedBaseProject(utilPath / "complete", "Completion") dependsOn (collectionSub, controlSub, ioSub) settings (jline)
// logging
lazy val logSub = testedBaseProject(utilPath / "log", "Logging") dependsOn (interfaceSub, processSub) settings (jline)
// Relation
lazy val relationSub = testedBaseProject(utilPath / "relation", "Relation") dependsOn (interfaceSub, processSub)
// class file reader and analyzer
lazy val classfileSub = testedBaseProject(utilPath / "classfile", "Classfile") dependsOn (ioSub, interfaceSub, logSub)
// generates immutable or mutable Java data types according to a simple input format
lazy val datatypeSub = baseProject(utilPath / "datatype", "Datatype Generator") dependsOn (ioSub)
// cross versioning
lazy val crossSub = baseProject(utilPath / "cross", "Cross") settings (inConfig(Compile)(Transform.crossGenSettings): _*)
// A logic with restricted negation as failure for a unique, stable model
lazy val logicSub = testedBaseProject(utilPath / "logic", "Logic").dependsOn(collectionSub, relationSub)
/* **** Intermediate-level Modules **** */
// Apache Ivy integration
lazy val ivySub = baseProject(file("ivy"), "Ivy") dependsOn (interfaceSub, launchInterfaceSub, crossSub, logSub % "compile;test->test", ioSub % "compile;test->test", launchSub % "test->test") settings (ivy, jsch, testExclusive)
// Runner for uniform test interface
lazy val testingSub = baseProject(file("testing"), "Testing") dependsOn (ioSub, classpathSub, logSub, launchInterfaceSub, testAgentSub) settings (testInterface)
// Testing agent for running tests in a separate process.
lazy val testAgentSub = minProject(file("testing/agent"), "Test Agent") settings (testInterface)
// Basic task engine
lazy val taskSub = testedBaseProject(tasksPath, "Tasks") dependsOn (controlSub, collectionSub)
// Standard task system. This provides map, flatMap, join, and more on top of the basic task model.
lazy val stdTaskSub = testedBaseProject(tasksPath / "standard", "Task System") dependsOn (taskSub % "compile;test->test", collectionSub, logSub, ioSub, processSub) settings (testExclusive)
// Persisted caching based on SBinary
lazy val cacheSub = baseProject(cachePath, "Cache") dependsOn (ioSub, collectionSub) settings (sbinary, scalaXml)
// Builds on cache to provide caching for filesystem-related operations
lazy val trackingSub = baseProject(cachePath / "tracking", "Tracking") dependsOn (cacheSub, ioSub)
// Embedded Scala code runner
lazy val runSub = testedBaseProject(file("run"), "Run") dependsOn (ioSub, logSub % "compile;test->test", classpathSub, processSub % "compile;test->test")
// Compiler-side interface to compiler that is compiled against the compiler being used either in advance or on the fly.
// Includes API and Analyzer phases that extract source API and relationships.
lazy val compileInterfaceSub = baseProject(compilePath / "interface", "Compiler Interface") dependsOn (interfaceSub % "compile;test->test", ioSub % "test->test", logSub % "test->test", launchSub % "test->test", apiSub % "test->test") settings (compileInterfaceSettings: _*)
lazy val precompiled282 = precompiled("2.8.2")
lazy val precompiled292 = precompiled("2.9.2")
lazy val precompiled293 = precompiled("2.9.3")
lazy val precompiled2110 = precompiled("2.11.0")
// Implements the core functionality of detecting and propagating changes incrementally.
// Defines the data structures for representing file fingerprints and relationships and the overall source analysis
lazy val compileIncrementalSub = testedBaseProject(compilePath / "inc", "Incremental Compiler") dependsOn (apiSub, ioSub, logSub, classpathSub, relationSub)
// Persists the incremental data structures using SBinary
lazy val compilePersistSub = baseProject(compilePath / "persist", "Persist") dependsOn (compileIncrementalSub, apiSub) settings (sbinary)
// sbt-side interface to compiler. Calls compiler-side interface reflectively
lazy val compilerSub = testedBaseProject(compilePath, "Compile") dependsOn (launchInterfaceSub, interfaceSub % "compile;test->test", logSub, ioSub, classpathSub,
logSub % "test->test", launchSub % "test->test", apiSub % "test") settings (compilerSettings: _*)
lazy val compilerIntegrationSub = baseProject(compilePath / "integration", "Compiler Integration") dependsOn (
compileIncrementalSub, compilerSub, compilePersistSub, apiSub, classfileSub)
lazy val compilerIvySub = baseProject(compilePath / "ivy", "Compiler Ivy Integration") dependsOn (ivySub, compilerSub)
lazy val scriptedBaseSub = baseProject(scriptedPath / "base", "Scripted Framework") dependsOn (ioSub, processSub) settings (scalaParsers)
lazy val scriptedSbtSub = baseProject(scriptedPath / "sbt", "Scripted sbt") dependsOn (ioSub, logSub, processSub, scriptedBaseSub, launchInterfaceSub % "provided")
lazy val scriptedPluginSub = baseProject(scriptedPath / "plugin", "Scripted Plugin") dependsOn (sbtSub, classpathSub)
// Implementation and support code for defining actions.
lazy val actionsSub = testedBaseProject(mainPath / "actions", "Actions") dependsOn (
classpathSub, completeSub, apiSub, compilerIntegrationSub, compilerIvySub,
interfaceSub, ioSub, ivySub, logSub, processSub, runSub, relationSub, stdTaskSub, taskSub, trackingSub, testingSub)
// General command support and core commands not specific to a build system
lazy val commandSub = testedBaseProject(mainPath / "command", "Command") dependsOn (interfaceSub, ioSub, launchInterfaceSub, logSub, completeSub, classpathSub, crossSub)
// Fixes scope=Scope for Setting (core defined in collectionSub) to define the settings system used in build definitions
lazy val mainSettingsSub = testedBaseProject(mainPath / "settings", "Main Settings") dependsOn (applyMacroSub, interfaceSub, ivySub, relationSub, logSub, ioSub, commandSub,
completeSub, classpathSub, stdTaskSub, processSub) settings (sbinary)
// The main integration project for sbt. It brings all of the subsystems together, configures them, and provides for overriding conventions.
lazy val mainSub = testedBaseProject(mainPath, "Main") dependsOn (actionsSub, mainSettingsSub, interfaceSub, ioSub, ivySub, launchInterfaceSub, logSub, logicSub, processSub, runSub, commandSub) settings (scalaXml)
// Strictly for bringing implicits and aliases from subsystems into the top-level sbt namespace through a single package object
// technically, we need a dependency on all of mainSub's dependencies, but we don't do that since this is strictly an integration project
// with the sole purpose of providing certain identifiers without qualification (with a package object)
lazy val sbtSub = baseProject(sbtPath, "sbt") dependsOn (mainSub, compileInterfaceSub, precompiled282, precompiled292, precompiled293, precompiled2110, scriptedSbtSub % "test->test") settings (sbtSettings: _*)
/* Nested subproject paths */
def sbtPath = file("sbt")
def cachePath = file("cache")
def tasksPath = file("tasks")
def launchPath = file("launch")
def utilPath = file("util")
def compilePath = file("compile")
def mainPath = file("main")
def scriptedPath = file("scripted")
def sbtSettings = Seq(
normalizedName := "sbt"
)
private def doScripted(launcher: File, scriptedSbtClasspath: Seq[Attributed[File]], scriptedSbtInstance: ScalaInstance, sourcePath: File, args: Seq[String]) {
val noJLine = new classpath.FilteredLoader(scriptedSbtInstance.loader, "jline." :: Nil)
val loader = classpath.ClasspathUtilities.toLoader(scriptedSbtClasspath.files, noJLine)
val m = ModuleUtilities.getObject("sbt.test.ScriptedTests", loader)
val r = m.getClass.getMethod("run", classOf[File], classOf[Boolean], classOf[Array[String]], classOf[File], classOf[Array[String]])
val launcherVmOptions = Array("-XX:MaxPermSize=256M") // increased after a failure in scripted source-dependencies/macro
try { r.invoke(m, sourcePath, true: java.lang.Boolean, args.toArray[String], launcher, launcherVmOptions) }
catch { case ite: java.lang.reflect.InvocationTargetException => throw ite.getCause }
}
def scriptedTask: Initialize[InputTask[Unit]] = InputTask(scriptedSource(dir => (s: State) => scriptedParser(dir))) { result =>
(proguard in Proguard, fullClasspath in scriptedSbtSub in Test, scalaInstance in scriptedSbtSub, publishAll, scriptedSource, result) map {
(launcher, scriptedSbtClasspath, scriptedSbtInstance, _, sourcePath, args) =>
doScripted(launcher, scriptedSbtClasspath, scriptedSbtInstance, sourcePath, args)
}
}
def scriptedUnpublishedTask: Initialize[InputTask[Unit]] = InputTask(scriptedSource(dir => (s: State) => scriptedParser(dir))) { result =>
(proguard in Proguard, fullClasspath in scriptedSbtSub in Test, scalaInstance in scriptedSbtSub, scriptedSource, result) map doScripted
}
import sbt.complete._
import DefaultParsers._
def scriptedParser(scriptedBase: File): Parser[Seq[String]] =
{
val pairs = (scriptedBase * AllPassFilter * AllPassFilter * "test").get map { (f: File) =>
val p = f.getParentFile
(p.getParentFile.getName, p.getName)
};
val pairMap = pairs.groupBy(_._1).mapValues(_.map(_._2).toSet);
val id = charClass(c => !c.isWhitespace && c != '/').+.string
val groupP = token(id.examples(pairMap.keySet.toSet)) <~ token('/')
def nameP(group: String) = token("*".id | id.examples(pairMap(group)))
val testID = for (group <- groupP; name <- nameP(group)) yield (group, name)
(token(Space) ~> matched(testID)).*
}
lazy val scripted = InputKey[Unit]("scripted")
lazy val scriptedUnpublished = InputKey[Unit]("scripted-unpublished", "Execute scripted without publishing SBT first. Saves you some time when only your test has changed.")
lazy val scriptedSource = SettingKey[File]("scripted-source")
lazy val publishAll = TaskKey[Unit]("publish-all")
lazy val publishLauncher = TaskKey[Unit]("publish-launcher")
def deepTasks[T](scoped: TaskKey[Seq[T]]): Initialize[Task[Seq[T]]] = deep(scoped.task) { _.join.map(_.flatten.distinct) }
def deep[T](scoped: SettingKey[T]): Initialize[Seq[T]] =
Util.inAllProjects(projects filterNot Set(root, sbtSub, scriptedBaseSub, scriptedSbtSub, scriptedPluginSub) map { p => LocalProject(p.id) }, scoped)
def launchSettings =
Seq(ivy,
compile in Test <<= compile in Test dependsOn (publishLocal in interfaceSub, publishLocal in testSamples, publishLocal in launchInterfaceSub)
) ++
inConfig(Compile)(Transform.configSettings) ++
inConfig(Compile)(Transform.transSourceSettings ++ Seq(
Transform.inputSourceDirectory <<= (sourceDirectory in crossSub) / "input_sources",
Transform.sourceProperties := Map("cross.package0" -> "xsbt", "cross.package1" -> "boot")
))
import Sxr.sxr
def releaseSettings = Release.settings(nonRoots, proguard in Proguard)
def rootSettings = releaseSettings ++ fullDocSettings ++ LaunchProguard.settings ++ LaunchProguard.specific(launchSub) ++
Util.publishPomSettings ++ otherRootSettings ++ proguardedLauncherSettings ++ Formatting.sbtFilesSettings ++
Transform.conscriptSettings(launchSub)
def otherRootSettings = Seq(
scripted <<= scriptedTask,
scriptedUnpublished <<= scriptedUnpublishedTask,
scriptedSource <<= (sourceDirectory in sbtSub) / "sbt-test",
publishAll <<= inAll(nonRoots, publishLocal.task),
publishAll <<= (publishAll, publishLocal).map((x, y) => ()) // publish all normal deps as well as the sbt-launch jar
)
def fullDocSettings = Util.baseScalacOptions ++ Docs.settings ++ Sxr.settings ++ Seq(
scalacOptions += "-Ymacro-no-expand", // for both sxr and doc
sources in sxr <<= deepTasks(sources in Compile), //sxr
sources in (Compile, doc) <<= sources in sxr, // doc
Sxr.sourceDirectories <<= deep(sourceDirectories in Compile).map(_.flatten), // to properly relativize the source paths
fullClasspath in sxr <<= (externalDependencyClasspath in Compile in sbtSub),
dependencyClasspath in (Compile, doc) <<= fullClasspath in sxr
)
// the launcher is published with metadata so that the scripted plugin can pull it in
// being proguarded, it shouldn't ever be on a classpath with other jars, however
def proguardedLauncherSettings = Seq(
publishArtifact in packageSrc := false,
moduleName := "sbt-launch",
autoScalaLibrary := false,
description := "sbt application launcher",
publishLauncher <<= publish,
packageBin in Compile <<= (proguard in Proguard, Transform.conscriptConfigs).map((x, y) => x)
)
def interfaceSettings = javaOnly ++ Seq(
projectComponent,
exportJars := true,
componentID := Some("xsbti"),
watchSources <++= apiDefinitions,
resourceGenerators in Compile <+= (version, resourceManaged, streams, compile in Compile) map generateVersionFile,
apiDefinitions <<= baseDirectory map { base => (base / "definition") :: (base / "other") :: (base / "type") :: Nil },
sourceGenerators in Compile <+= (cacheDirectory, apiDefinitions, fullClasspath in Compile in datatypeSub, sourceManaged in Compile, mainClass in datatypeSub in Compile, runner, streams) map generateAPICached
)
def precompiledSettings = Seq(
artifact in packageBin <<= (appConfiguration, scalaVersion) { (app, sv) =>
val launcher = app.provider.scalaProvider.launcher
val bincID = binID + "_" + ScalaInstance(sv, launcher).actualVersion
Artifact(binID) extra ("e:component" -> bincID)
},
target <<= (target, scalaVersion) { (base, sv) => base / ("precompiled_" + sv) },
scalacOptions := Nil,
ivyScala ~= { _.map(_.copy(checkExplicit = false, overrideScalaVersion = false)) },
exportedProducts in Compile := Nil,
libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _ % "provided")
)
//
def compileInterfaceSettings: Seq[Setting[_]] = precompiledSettings ++ Seq[Setting[_]](
exportJars := true,
// we need to fork because in unit tests we set usejavacp = true which means
// we are expecting all of our dependencies to be on classpath so Scala compiler
// can use them while constructing its own classpath for compilation
fork in Test := true,
// needed because we fork tests and tests are ran in parallel so we have multiple Scala
// compiler instances that are memory hungry
javaOptions in Test += "-Xmx1G",
artifact in (Compile, packageSrc) := Artifact(srcID).copy(configurations = Compile :: Nil).extra("e:component" -> srcID)
)
def compilerSettings = Seq(
libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _ % "test"),
unmanagedJars in Test <<= (packageSrc in compileInterfaceSub in Compile).map(x => Seq(x).classpath)
)
def precompiled(scalav: String): Project = baseProject(compilePath / "interface", "Precompiled " + scalav.replace('.', '_')) dependsOn (interfaceSub) settings (precompiledSettings: _*) settings (
scalaHome := None,
scalaVersion <<= (scalaVersion in ThisBuild) { sbtScalaV =>
assert(sbtScalaV != scalav, "Precompiled compiler interface cannot have the same Scala version (" + scalav + ") as sbt.")
scalav
},
// we disable compiling and running tests in precompiled subprojects of compiler interface
// so we do not need to worry about cross-versioning testing dependencies
sources in Test := Nil
)
def ioSettings: Seq[Setting[_]] = Seq(
libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _ % "test")
)
}
| xeno-by/old-scalameta-sbt | project/Sbt.scala | Scala | bsd-3-clause | 19,487 |
package org.mdoc.rendering.service
import com.typesafe.scalalogging.StrictLogging
import scala.util.Try
object Main extends App with StrictLogging {
Try(Blaze.server.run.awaitShutdown()).recover {
case throwable =>
logger.error("awaitShutdown()", throwable)
sys.exit(1)
}
}
| m-doc/rendering-service | src/main/scala/org/mdoc/rendering/service/Main.scala | Scala | apache-2.0 | 296 |
package io.apicollective.play
import akka.stream.Materializer
import play.api.{Logger, Logging}
import play.api.mvc._
import scala.concurrent.{ExecutionContext, Future}
import play.api.http.HttpFilters
/**
* Add this to your base.conf:
* play.http.filters=io.apicollective.play.LoggingFilter
**/
class LoggingFilter @javax.inject.Inject() (loggingFilter: ApibuilderLoggingFilter) extends HttpFilters {
def filters = Seq(loggingFilter)
}
class ApibuilderLoggingFilter @javax.inject.Inject() (
implicit ec: ExecutionContext,
m: Materializer
) extends Filter with Logging {
def apply(f: RequestHeader => Future[Result])(requestHeader: RequestHeader): Future[Result] = {
val startTime = System.currentTimeMillis
f(requestHeader).map { result =>
val endTime = System.currentTimeMillis
val requestTime = endTime - startTime
val headerMap = requestHeader.headers.toMap
val line = Seq(
requestHeader.method,
s"${requestHeader.host}${requestHeader.uri}",
result.header.status,
s"${requestTime}ms",
headerMap.getOrElse("User-Agent", Nil).mkString(",")
).mkString(" ")
logger.info(line)
result
}
}
override implicit def mat: Materializer = m
}
| gheine/apidoc | api/app/play/LoggingFilter.scala | Scala | mit | 1,255 |
/*
* Copyright (C) 2014 GRNET S.A.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package gr.grnet.egi.vmcatcher
import gr.grnet.egi.vmcatcher.event.{ImageEvent, ImageEnvField}
import org.junit.Test
/**
*
*/
class ImageListConfigTest {
val data =
"""
|{
| "hv:imagelist": {
| "dc:date:created": "2014-06-16T12:45:37Z",
| "dc:date:expires": "2499-12-31T22:00:00Z",
| "dc:description": "Description",
| "dc:identifier": "FOOBAR",
| "dc:source": "https://appdb.egi.eu/",
| "dc:title": "CernVM",
| "ad:swid": "810",
| "hv:endorser": {
| "hv:x509": {
| "dc:creator": "EGI Applications Database",
| "hv:ca": "",
| "hv:dn": "",
| "hv:email": "_@_"
| }
| },
| "hv:images": [
| {
| "hv:image": {
| "dc:description": "Description",
| "dc:identifier": "FOOBAR_",
| "ad:mpuri": "https://FOOBAR_/7f1c5d25-614e-4409-bfa6-625d3ae7b9d4:619/",
| "dc:title": "Image [Scientific Linux/6.0/KVM]",
| "ad:group": "General group",
| "hv:hypervisor": "KVM",
| "hv:format": "OVA",
| "hv:ram_minimum": "512",
| "ad:ram_recommended": "1024",
| "hv:core_minimum": "1",
| "ad:core_recommended": "4",
| "hv:size": "121243136",
| "hv:uri": "http://FOO/FOO.ova",
| "hv:version": "3.3.0-1",
| "sl:arch": "x86_64",
| "sl:checksum:sha512": "CHECKSUM",
| "sl:comments": "",
| "sl:os": "Linux",
| "sl:osname": "Scientific Linux",
| "sl:osversion": "6.0",
| "ad:user:fullname": "FOO BAR",
| "ad:user:guid": "FOOBAR__",
| "ad:user:uri": "https://FOO_/person/FOO%20BAR"
| }
| }
| ],
| "hv:uri": "https://FOO_/image.list",
| "hv:version": "3.3.0"
| }
|}
|
""".stripMargin
@Test def test(): Unit = {
ImageEvent.parseImageListJson(data)
}
@Test def test2(): Unit = {
val eventList = ImageEvent.parseImageListJson(data)
for {
event ← eventList
} {
val dcIdentifier = event(ImageEnvField.VMCATCHER_EVENT_DC_IDENTIFIER, "")
println(dcIdentifier)
println(event.envFieldsView.json)
}
}
}
| grnet/snf-vmcatcher | src/test/scala/gr/grnet/egi/vmcatcher/ImageListConfigTest.scala | Scala | gpl-3.0 | 3,313 |
package spark
import akka.actor.ActorSystem
import spark.broadcast.BroadcastManager
import spark.debugger.ActorBasedEventReporter
import spark.debugger.EventReporter
import spark.debugger.NullEventReporter
import spark.network.ConnectionManager
import spark.storage.BlockManager
import spark.storage.BlockManagerMaster
import spark.util.AkkaUtils
class SparkEnv (
val actorSystem: ActorSystem,
val cache: Cache,
val serializer: Serializer,
val closureSerializer: Serializer,
val cacheTracker: CacheTracker,
val mapOutputTracker: MapOutputTracker,
val shuffleFetcher: ShuffleFetcher,
val shuffleManager: ShuffleManager,
val broadcastManager: BroadcastManager,
val blockManager: BlockManager,
val connectionManager: ConnectionManager,
val eventReporter: EventReporter
) {
/** No-parameter constructor for unit tests. */
def this() = {
this(null, null, new JavaSerializer, new JavaSerializer, null, null, null, null, null, null,
null, null)
}
def stop() {
mapOutputTracker.stop()
cacheTracker.stop()
shuffleFetcher.stop()
shuffleManager.stop()
broadcastManager.stop()
blockManager.stop()
blockManager.master.stop()
eventReporter.stop()
actorSystem.shutdown()
actorSystem.awaitTermination()
}
}
object SparkEnv {
private val env = new ThreadLocal[SparkEnv]
def set(e: SparkEnv) {
env.set(e)
}
def get: SparkEnv = {
env.get()
}
def createFromSystemProperties(
hostname: String,
port: Int,
isMaster: Boolean,
isLocal: Boolean
) : SparkEnv = {
val (actorSystem, boundPort) = AkkaUtils.createActorSystem("spark", hostname, port)
// Bit of a hack: If this is the master and our port was 0 (meaning bind to any free port),
// figure out which port number Akka actually bound to and set spark.master.port to it.
if (isMaster && port == 0) {
System.setProperty("spark.master.port", boundPort.toString)
}
val serializerClass = System.getProperty("spark.serializer", "spark.KryoSerializer")
val serializer = Class.forName(serializerClass).newInstance().asInstanceOf[Serializer]
val eventReporter = if (System.getProperty("spark.debugger.enable", "false").toBoolean) {
new ActorBasedEventReporter(actorSystem, isMaster)
} else {
new NullEventReporter
}
val blockManagerMaster = new BlockManagerMaster(actorSystem, isMaster, isLocal)
val blockManager = new BlockManager(blockManagerMaster, serializer, eventReporter)
val connectionManager = blockManager.connectionManager
val shuffleManager = new ShuffleManager()
val broadcastManager = new BroadcastManager(isMaster)
val closureSerializerClass =
System.getProperty("spark.closure.serializer", "spark.JavaSerializer")
val closureSerializer =
Class.forName(closureSerializerClass).newInstance().asInstanceOf[Serializer]
val cacheClass = System.getProperty("spark.cache.class", "spark.BoundedMemoryCache")
val cache = Class.forName(cacheClass).newInstance().asInstanceOf[Cache]
val cacheTracker = new CacheTracker(actorSystem, isMaster, blockManager)
blockManager.cacheTracker = cacheTracker
val mapOutputTracker = new MapOutputTracker(actorSystem, isMaster)
val shuffleFetcherClass =
System.getProperty("spark.shuffle.fetcher", "spark.BlockStoreShuffleFetcher")
val shuffleFetcher =
Class.forName(shuffleFetcherClass).newInstance().asInstanceOf[ShuffleFetcher]
/*
if (System.getProperty("spark.stream.distributed", "false") == "true") {
val blockManagerClass = classOf[spark.storage.BlockManager].asInstanceOf[Class[_]]
if (isLocal || !isMaster) {
(new Thread() {
override def run() {
println("Wait started")
Thread.sleep(60000)
println("Wait ended")
val receiverClass = Class.forName("spark.stream.TestStreamReceiver4")
val constructor = receiverClass.getConstructor(blockManagerClass)
val receiver = constructor.newInstance(blockManager)
receiver.asInstanceOf[Thread].start()
}
}).start()
}
}
*/
new SparkEnv(
actorSystem,
cache,
serializer,
closureSerializer,
cacheTracker,
mapOutputTracker,
shuffleFetcher,
shuffleManager,
broadcastManager,
blockManager,
connectionManager,
eventReporter)
}
}
| ankurdave/arthur | core/src/main/scala/spark/SparkEnv.scala | Scala | bsd-3-clause | 4,500 |
/*
* Copyright (C) 2016-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.internal.javadsl.broker.kafka
import com.google.inject.AbstractModule
class KafkaBrokerModule extends AbstractModule {
override def configure(): Unit = {
bind(classOf[JavadslRegisterTopicProducers]).asEagerSingleton()
}
}
| edouardKaiser/lagom | service/javadsl/kafka/server/src/main/scala/com/lightbend/lagom/internal/javadsl/broker/kafka/KafkaBrokerModule.scala | Scala | apache-2.0 | 339 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.database.azblob
import org.apache.openwhisk.core.entity.WhiskEntity
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class AzureBlobAttachmentStoreITTests extends AzureBlobAttachmentStoreBehaviorBase with AzureBlob {
override lazy val store = makeAzureStore[WhiskEntity]
override def storeType: String = "Azure"
}
| akrabat/openwhisk | tests/src/test/scala/org/apache/openwhisk/core/database/azblob/AzureBlobAttachmentStoreITTests.scala | Scala | apache-2.0 | 1,211 |
package org.jetbrains.plugins.scala.failed.annotator
import org.jetbrains.plugins.scala.base.ScalaLightCodeInsightFixtureTestAdapter
/**
* @author Nikolay.Tropin
*/
class DefaultArgWithTypeArgsTest extends ScalaLightCodeInsightFixtureTestAdapter {
override def shouldPass: Boolean = false
def testSCL8688(): Unit = {
checkTextHasNoErrors(
"""class Test {
| def foo[A, B](f: A => B = (a: A) => a) = ???
|}
""".stripMargin)
}
def testSCL13810(): Unit = {
checkTextHasNoErrors(
"""
|trait Obj[S]
|implicit class Ops[S](val obj: Obj[S]) extends AnyVal {
| def bang[R[_]](child: String): R[S] = ???
|}
|trait Test[S] {
| def in: Obj[S]
| val out = in bang [Obj] "child"
|}
""".stripMargin)
}
}
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/failed/annotator/DefaultArgWithTypeArgsTest.scala | Scala | apache-2.0 | 824 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import scala.language.implicitConversions
import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.encoders.{encoderFor, ExpressionEncoder}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.util.usePrettyExpression
import org.apache.spark.sql.execution.aggregate.TypedAggregateExpression
import org.apache.spark.sql.expressions.Window
import org.apache.spark.sql.functions.lit
import org.apache.spark.sql.types._
private[sql] object Column {
def apply(colName: String): Column = new Column(colName)
def apply(expr: Expression): Column = new Column(expr)
def unapply(col: Column): Option[Expression] = Some(col.expr)
private[sql] def generateAlias(e: Expression): String = {
e match {
case a: AggregateExpression if a.aggregateFunction.isInstanceOf[TypedAggregateExpression] =>
a.aggregateFunction.toString
case expr => usePrettyExpression(expr).sql
}
}
}
/**
* A [[Column]] where an [[Encoder]] has been given for the expected input and return type.
* To create a [[TypedColumn]], use the `as` function on a [[Column]].
*
* @tparam T The input type expected for this expression. Can be `Any` if the expression is type
* checked by the analyzer instead of the compiler (i.e. `expr("sum(...)")`).
* @tparam U The output type of this column.
*
* @since 1.6.0
*/
@InterfaceStability.Stable
class TypedColumn[-T, U](
expr: Expression,
private[sql] val encoder: ExpressionEncoder[U])
extends Column(expr) {
/**
* Inserts the specific input type and schema into any expressions that are expected to operate
* on a decoded object.
*/
private[sql] def withInputType(
inputEncoder: ExpressionEncoder[_],
inputAttributes: Seq[Attribute]): TypedColumn[T, U] = {
val unresolvedDeserializer = UnresolvedDeserializer(inputEncoder.deserializer, inputAttributes)
val newExpr = expr transform {
case ta: TypedAggregateExpression if ta.inputDeserializer.isEmpty =>
ta.withInputInfo(
deser = unresolvedDeserializer,
cls = inputEncoder.clsTag.runtimeClass,
schema = inputEncoder.schema)
}
new TypedColumn[T, U](newExpr, encoder)
}
/**
* Gives the TypedColumn a name (alias).
* If the current TypedColumn has metadata associated with it, this metadata will be propagated
* to the new column.
*
* @group expr_ops
* @since 2.0.0
*/
override def name(alias: String): TypedColumn[T, U] =
new TypedColumn[T, U](super.name(alias).expr, encoder)
}
/**
* A column that will be computed based on the data in a `DataFrame`.
*
* A new column is constructed based on the input columns present in a dataframe:
*
* {{{
* df("columnName") // On a specific DataFrame.
* col("columnName") // A generic column no yet associated with a DataFrame.
* col("columnName.field") // Extracting a struct field
* col("`a.column.with.dots`") // Escape `.` in column names.
* $"columnName" // Scala short hand for a named column.
* expr("a + 1") // A column that is constructed from a parsed SQL Expression.
* lit("abc") // A column that produces a literal (constant) value.
* }}}
*
* [[Column]] objects can be composed to form complex expressions:
*
* {{{
* $"a" + 1
* $"a" === $"b"
* }}}
*
* @note The internal Catalyst expression can be accessed via "expr", but this method is for
* debugging purposes only and can change in any future Spark releases.
*
* @groupname java_expr_ops Java-specific expression operators
* @groupname expr_ops Expression operators
* @groupname df_ops DataFrame functions
* @groupname Ungrouped Support functions for DataFrames
*
* @since 1.3.0
*/
@InterfaceStability.Stable
class Column(val expr: Expression) extends Logging {
def this(name: String) = this(name match {
case "*" => UnresolvedStar(None)
case _ if name.endsWith(".*") =>
val parts = UnresolvedAttribute.parseAttributeName(name.substring(0, name.length - 2))
UnresolvedStar(Some(parts))
case _ => UnresolvedAttribute.quotedString(name)
})
override def toString: String = usePrettyExpression(expr).sql
override def equals(that: Any): Boolean = that match {
case that: Column => that.expr.equals(this.expr)
case _ => false
}
override def hashCode: Int = this.expr.hashCode()
/** Creates a column based on the given expression. */
private def withExpr(newExpr: Expression): Column = new Column(newExpr)
/**
* Returns the expression for this column either with an existing or auto assigned name.
*/
private[sql] def named: NamedExpression = expr match {
// Wrap UnresolvedAttribute with UnresolvedAlias, as when we resolve UnresolvedAttribute, we
// will remove intermediate Alias for ExtractValue chain, and we need to alias it again to
// make it a NamedExpression.
case u: UnresolvedAttribute => UnresolvedAlias(u)
case u: UnresolvedExtractValue => UnresolvedAlias(u)
case expr: NamedExpression => expr
// Leave an unaliased generator with an empty list of names since the analyzer will generate
// the correct defaults after the nested expression's type has been resolved.
case g: Generator => MultiAlias(g, Nil)
case func: UnresolvedFunction => UnresolvedAlias(func, Some(Column.generateAlias))
// If we have a top level Cast, there is a chance to give it a better alias, if there is a
// NamedExpression under this Cast.
case c: Cast =>
c.transformUp {
case c @ Cast(_: NamedExpression, _, _) => UnresolvedAlias(c)
} match {
case ne: NamedExpression => ne
case other => Alias(expr, usePrettyExpression(expr).sql)()
}
case a: AggregateExpression if a.aggregateFunction.isInstanceOf[TypedAggregateExpression] =>
UnresolvedAlias(a, Some(Column.generateAlias))
// Wait until the struct is resolved. This will generate a nicer looking alias.
case struct: CreateNamedStructLike => UnresolvedAlias(struct)
case expr: Expression => Alias(expr, usePrettyExpression(expr).sql)()
}
/**
* Provides a type hint about the expected return value of this column. This information can
* be used by operations such as `select` on a [[Dataset]] to automatically convert the
* results into the correct JVM types.
* @since 1.6.0
*/
def as[U : Encoder]: TypedColumn[Any, U] = new TypedColumn[Any, U](expr, encoderFor[U])
/**
* Extracts a value or values from a complex type.
* The following types of extraction are supported:
*
* - Given an Array, an integer ordinal can be used to retrieve a single value.
* - Given a Map, a key of the correct type can be used to retrieve an individual value.
* - Given a Struct, a string fieldName can be used to extract that field.
* - Given an Array of Structs, a string fieldName can be used to extract filed
* of every struct in that array, and return an Array of fields
*
* @group expr_ops
* @since 1.4.0
*/
def apply(extraction: Any): Column = withExpr {
UnresolvedExtractValue(expr, lit(extraction).expr)
}
/**
* Unary minus, i.e. negate the expression.
* {{{
* // Scala: select the amount column and negates all values.
* df.select( -df("amount") )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* df.select( negate(col("amount") );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def unary_- : Column = withExpr { UnaryMinus(expr) }
/**
* Inversion of boolean expression, i.e. NOT.
* {{{
* // Scala: select rows that are not active (isActive === false)
* df.filter( !df("isActive") )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* df.filter( not(df.col("isActive")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def unary_! : Column = withExpr { Not(expr) }
/**
* Equality test.
* {{{
* // Scala:
* df.filter( df("colA") === df("colB") )
*
* // Java
* import static org.apache.spark.sql.functions.*;
* df.filter( col("colA").equalTo(col("colB")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def === (other: Any): Column = withExpr {
val right = lit(other).expr
if (this.expr == right) {
logWarning(
s"Constructing trivially true equals predicate, '${this.expr} = $right'. " +
"Perhaps you need to use aliases.")
}
EqualTo(expr, right)
}
/**
* Equality test.
* {{{
* // Scala:
* df.filter( df("colA") === df("colB") )
*
* // Java
* import static org.apache.spark.sql.functions.*;
* df.filter( col("colA").equalTo(col("colB")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def equalTo(other: Any): Column = this === other
/**
* Inequality test.
* {{{
* // Scala:
* df.select( df("colA") =!= df("colB") )
* df.select( !(df("colA") === df("colB")) )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* df.filter( col("colA").notEqual(col("colB")) );
* }}}
*
* @group expr_ops
* @since 2.0.0
*/
def =!= (other: Any): Column = withExpr{ Not(EqualTo(expr, lit(other).expr)) }
/**
* Inequality test.
* {{{
* // Scala:
* df.select( df("colA") !== df("colB") )
* df.select( !(df("colA") === df("colB")) )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* df.filter( col("colA").notEqual(col("colB")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
@deprecated("!== does not have the same precedence as ===, use =!= instead", "2.0.0")
def !== (other: Any): Column = this =!= other
/**
* Inequality test.
* {{{
* // Scala:
* df.select( df("colA") !== df("colB") )
* df.select( !(df("colA") === df("colB")) )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* df.filter( col("colA").notEqual(col("colB")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def notEqual(other: Any): Column = withExpr { Not(EqualTo(expr, lit(other).expr)) }
/**
* Greater than.
* {{{
* // Scala: The following selects people older than 21.
* people.select( people("age") > 21 )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* people.select( people("age").gt(21) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def > (other: Any): Column = withExpr { GreaterThan(expr, lit(other).expr) }
/**
* Greater than.
* {{{
* // Scala: The following selects people older than 21.
* people.select( people("age") > lit(21) )
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* people.select( people("age").gt(21) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def gt(other: Any): Column = this > other
/**
* Less than.
* {{{
* // Scala: The following selects people younger than 21.
* people.select( people("age") < 21 )
*
* // Java:
* people.select( people("age").lt(21) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def < (other: Any): Column = withExpr { LessThan(expr, lit(other).expr) }
/**
* Less than.
* {{{
* // Scala: The following selects people younger than 21.
* people.select( people("age") < 21 )
*
* // Java:
* people.select( people("age").lt(21) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def lt(other: Any): Column = this < other
/**
* Less than or equal to.
* {{{
* // Scala: The following selects people age 21 or younger than 21.
* people.select( people("age") <= 21 )
*
* // Java:
* people.select( people("age").leq(21) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def <= (other: Any): Column = withExpr { LessThanOrEqual(expr, lit(other).expr) }
/**
* Less than or equal to.
* {{{
* // Scala: The following selects people age 21 or younger than 21.
* people.select( people("age") <= 21 )
*
* // Java:
* people.select( people("age").leq(21) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def leq(other: Any): Column = this <= other
/**
* Greater than or equal to an expression.
* {{{
* // Scala: The following selects people age 21 or older than 21.
* people.select( people("age") >= 21 )
*
* // Java:
* people.select( people("age").geq(21) )
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def >= (other: Any): Column = withExpr { GreaterThanOrEqual(expr, lit(other).expr) }
/**
* Greater than or equal to an expression.
* {{{
* // Scala: The following selects people age 21 or older than 21.
* people.select( people("age") >= 21 )
*
* // Java:
* people.select( people("age").geq(21) )
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def geq(other: Any): Column = this >= other
/**
* Equality test that is safe for null values.
*
* @group expr_ops
* @since 1.3.0
*/
def <=> (other: Any): Column = withExpr { EqualNullSafe(expr, lit(other).expr) }
/**
* Equality test that is safe for null values.
*
* @group java_expr_ops
* @since 1.3.0
*/
def eqNullSafe(other: Any): Column = this <=> other
/**
* Evaluates a list of conditions and returns one of multiple possible result expressions.
* If otherwise is not defined at the end, null is returned for unmatched conditions.
*
* {{{
* // Example: encoding gender string column into integer.
*
* // Scala:
* people.select(when(people("gender") === "male", 0)
* .when(people("gender") === "female", 1)
* .otherwise(2))
*
* // Java:
* people.select(when(col("gender").equalTo("male"), 0)
* .when(col("gender").equalTo("female"), 1)
* .otherwise(2))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def when(condition: Column, value: Any): Column = this.expr match {
case CaseWhen(branches, None) =>
withExpr { CaseWhen(branches :+ (condition.expr, lit(value).expr)) }
case CaseWhen(branches, Some(_)) =>
throw new IllegalArgumentException(
"when() cannot be applied once otherwise() is applied")
case _ =>
throw new IllegalArgumentException(
"when() can only be applied on a Column previously generated by when() function")
}
/**
* Evaluates a list of conditions and returns one of multiple possible result expressions.
* If otherwise is not defined at the end, null is returned for unmatched conditions.
*
* {{{
* // Example: encoding gender string column into integer.
*
* // Scala:
* people.select(when(people("gender") === "male", 0)
* .when(people("gender") === "female", 1)
* .otherwise(2))
*
* // Java:
* people.select(when(col("gender").equalTo("male"), 0)
* .when(col("gender").equalTo("female"), 1)
* .otherwise(2))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def otherwise(value: Any): Column = this.expr match {
case CaseWhen(branches, None) =>
withExpr { CaseWhen(branches, Option(lit(value).expr)) }
case CaseWhen(branches, Some(_)) =>
throw new IllegalArgumentException(
"otherwise() can only be applied once on a Column previously generated by when()")
case _ =>
throw new IllegalArgumentException(
"otherwise() can only be applied on a Column previously generated by when()")
}
/**
* True if the current column is between the lower bound and upper bound, inclusive.
*
* @group java_expr_ops
* @since 1.4.0
*/
def between(lowerBound: Any, upperBound: Any): Column = {
(this >= lowerBound) && (this <= upperBound)
}
/**
* True if the current expression is NaN.
*
* @group expr_ops
* @since 1.5.0
*/
def isNaN: Column = withExpr { IsNaN(expr) }
/**
* True if the current expression is null.
*
* @group expr_ops
* @since 1.3.0
*/
def isNull: Column = withExpr { IsNull(expr) }
/**
* True if the current expression is NOT null.
*
* @group expr_ops
* @since 1.3.0
*/
def isNotNull: Column = withExpr { IsNotNull(expr) }
/**
* Boolean OR.
* {{{
* // Scala: The following selects people that are in school or employed.
* people.filter( people("inSchool") || people("isEmployed") )
*
* // Java:
* people.filter( people("inSchool").or(people("isEmployed")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def || (other: Any): Column = withExpr { Or(expr, lit(other).expr) }
/**
* Boolean OR.
* {{{
* // Scala: The following selects people that are in school or employed.
* people.filter( people("inSchool") || people("isEmployed") )
*
* // Java:
* people.filter( people("inSchool").or(people("isEmployed")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def or(other: Column): Column = this || other
/**
* Boolean AND.
* {{{
* // Scala: The following selects people that are in school and employed at the same time.
* people.select( people("inSchool") && people("isEmployed") )
*
* // Java:
* people.select( people("inSchool").and(people("isEmployed")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def && (other: Any): Column = withExpr { And(expr, lit(other).expr) }
/**
* Boolean AND.
* {{{
* // Scala: The following selects people that are in school and employed at the same time.
* people.select( people("inSchool") && people("isEmployed") )
*
* // Java:
* people.select( people("inSchool").and(people("isEmployed")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def and(other: Column): Column = this && other
/**
* Sum of this expression and another expression.
* {{{
* // Scala: The following selects the sum of a person's height and weight.
* people.select( people("height") + people("weight") )
*
* // Java:
* people.select( people("height").plus(people("weight")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def + (other: Any): Column = withExpr { Add(expr, lit(other).expr) }
/**
* Sum of this expression and another expression.
* {{{
* // Scala: The following selects the sum of a person's height and weight.
* people.select( people("height") + people("weight") )
*
* // Java:
* people.select( people("height").plus(people("weight")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def plus(other: Any): Column = this + other
/**
* Subtraction. Subtract the other expression from this expression.
* {{{
* // Scala: The following selects the difference between people's height and their weight.
* people.select( people("height") - people("weight") )
*
* // Java:
* people.select( people("height").minus(people("weight")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def - (other: Any): Column = withExpr { Subtract(expr, lit(other).expr) }
/**
* Subtraction. Subtract the other expression from this expression.
* {{{
* // Scala: The following selects the difference between people's height and their weight.
* people.select( people("height") - people("weight") )
*
* // Java:
* people.select( people("height").minus(people("weight")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def minus(other: Any): Column = this - other
/**
* Multiplication of this expression and another expression.
* {{{
* // Scala: The following multiplies a person's height by their weight.
* people.select( people("height") * people("weight") )
*
* // Java:
* people.select( people("height").multiply(people("weight")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def * (other: Any): Column = withExpr { Multiply(expr, lit(other).expr) }
/**
* Multiplication of this expression and another expression.
* {{{
* // Scala: The following multiplies a person's height by their weight.
* people.select( people("height") * people("weight") )
*
* // Java:
* people.select( people("height").multiply(people("weight")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def multiply(other: Any): Column = this * other
/**
* Division this expression by another expression.
* {{{
* // Scala: The following divides a person's height by their weight.
* people.select( people("height") / people("weight") )
*
* // Java:
* people.select( people("height").divide(people("weight")) );
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def / (other: Any): Column = withExpr { Divide(expr, lit(other).expr) }
/**
* Division this expression by another expression.
* {{{
* // Scala: The following divides a person's height by their weight.
* people.select( people("height") / people("weight") )
*
* // Java:
* people.select( people("height").divide(people("weight")) );
* }}}
*
* @group java_expr_ops
* @since 1.3.0
*/
def divide(other: Any): Column = this / other
/**
* Modulo (a.k.a. remainder) expression.
*
* @group expr_ops
* @since 1.3.0
*/
def % (other: Any): Column = withExpr { Remainder(expr, lit(other).expr) }
/**
* Modulo (a.k.a. remainder) expression.
*
* @group java_expr_ops
* @since 1.3.0
*/
def mod(other: Any): Column = this % other
/**
* A boolean expression that is evaluated to true if the value of this expression is contained
* by the evaluated values of the arguments.
*
* @group expr_ops
* @since 1.5.0
*/
@scala.annotation.varargs
def isin(list: Any*): Column = withExpr { In(expr, list.map(lit(_).expr)) }
/**
* SQL like expression.
*
* @group expr_ops
* @since 1.3.0
*/
def like(literal: String): Column = withExpr { Like(expr, lit(literal).expr) }
/**
* SQL RLIKE expression (LIKE with Regex).
*
* @group expr_ops
* @since 1.3.0
*/
def rlike(literal: String): Column = withExpr { RLike(expr, lit(literal).expr) }
/**
* An expression that gets an item at position `ordinal` out of an array,
* or gets a value by key `key` in a `MapType`.
*
* @group expr_ops
* @since 1.3.0
*/
def getItem(key: Any): Column = withExpr { UnresolvedExtractValue(expr, Literal(key)) }
/**
* An expression that gets a field by name in a `StructType`.
*
* @group expr_ops
* @since 1.3.0
*/
def getField(fieldName: String): Column = withExpr {
UnresolvedExtractValue(expr, Literal(fieldName))
}
/**
* An expression that returns a substring.
* @param startPos expression for the starting position.
* @param len expression for the length of the substring.
*
* @group expr_ops
* @since 1.3.0
*/
def substr(startPos: Column, len: Column): Column = withExpr {
Substring(expr, startPos.expr, len.expr)
}
/**
* An expression that returns a substring.
* @param startPos starting position.
* @param len length of the substring.
*
* @group expr_ops
* @since 1.3.0
*/
def substr(startPos: Int, len: Int): Column = withExpr {
Substring(expr, lit(startPos).expr, lit(len).expr)
}
/**
* Contains the other element.
*
* @group expr_ops
* @since 1.3.0
*/
def contains(other: Any): Column = withExpr { Contains(expr, lit(other).expr) }
/**
* String starts with.
*
* @group expr_ops
* @since 1.3.0
*/
def startsWith(other: Column): Column = withExpr { StartsWith(expr, lit(other).expr) }
/**
* String starts with another string literal.
*
* @group expr_ops
* @since 1.3.0
*/
def startsWith(literal: String): Column = this.startsWith(lit(literal))
/**
* String ends with.
*
* @group expr_ops
* @since 1.3.0
*/
def endsWith(other: Column): Column = withExpr { EndsWith(expr, lit(other).expr) }
/**
* String ends with another string literal.
*
* @group expr_ops
* @since 1.3.0
*/
def endsWith(literal: String): Column = this.endsWith(lit(literal))
/**
* Gives the column an alias. Same as `as`.
* {{{
* // Renames colA to colB in select output.
* df.select($"colA".alias("colB"))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def alias(alias: String): Column = name(alias)
/**
* Gives the column an alias.
* {{{
* // Renames colA to colB in select output.
* df.select($"colA".as("colB"))
* }}}
*
* If the current column has metadata associated with it, this metadata will be propagated
* to the new column. If this not desired, use `as` with explicitly empty metadata.
*
* @group expr_ops
* @since 1.3.0
*/
def as(alias: String): Column = name(alias)
/**
* (Scala-specific) Assigns the given aliases to the results of a table generating function.
* {{{
* // Renames colA to colB in select output.
* df.select(explode($"myMap").as("key" :: "value" :: Nil))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def as(aliases: Seq[String]): Column = withExpr { MultiAlias(expr, aliases) }
/**
* Assigns the given aliases to the results of a table generating function.
* {{{
* // Renames colA to colB in select output.
* df.select(explode($"myMap").as("key" :: "value" :: Nil))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def as(aliases: Array[String]): Column = withExpr { MultiAlias(expr, aliases) }
/**
* Gives the column an alias.
* {{{
* // Renames colA to colB in select output.
* df.select($"colA".as('colB))
* }}}
*
* If the current column has metadata associated with it, this metadata will be propagated
* to the new column. If this not desired, use `as` with explicitly empty metadata.
*
* @group expr_ops
* @since 1.3.0
*/
def as(alias: Symbol): Column = name(alias.name)
/**
* Gives the column an alias with metadata.
* {{{
* val metadata: Metadata = ...
* df.select($"colA".as("colB", metadata))
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def as(alias: String, metadata: Metadata): Column = withExpr {
Alias(expr, alias)(explicitMetadata = Some(metadata))
}
/**
* Gives the column a name (alias).
* {{{
* // Renames colA to colB in select output.
* df.select($"colA".name("colB"))
* }}}
*
* If the current column has metadata associated with it, this metadata will be propagated
* to the new column. If this not desired, use `as` with explicitly empty metadata.
*
* @group expr_ops
* @since 2.0.0
*/
def name(alias: String): Column = withExpr {
expr match {
case ne: NamedExpression => Alias(expr, alias)(explicitMetadata = Some(ne.metadata))
case other => Alias(other, alias)()
}
}
/**
* Casts the column to a different data type.
* {{{
* // Casts colA to IntegerType.
* import org.apache.spark.sql.types.IntegerType
* df.select(df("colA").cast(IntegerType))
*
* // equivalent to
* df.select(df("colA").cast("int"))
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def cast(to: DataType): Column = withExpr { Cast(expr, to) }
/**
* Casts the column to a different data type, using the canonical string representation
* of the type. The supported types are: `string`, `boolean`, `byte`, `short`, `int`, `long`,
* `float`, `double`, `decimal`, `date`, `timestamp`.
* {{{
* // Casts colA to integer.
* df.select(df("colA").cast("int"))
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def cast(to: String): Column = cast(CatalystSqlParser.parseDataType(to))
/**
* Returns an ordering used in sorting.
* {{{
* // Scala
* df.sort(df("age").desc)
*
* // Java
* df.sort(df.col("age").desc());
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def desc: Column = withExpr { SortOrder(expr, Descending) }
/**
* Returns a descending ordering used in sorting, where null values appear before non-null values.
* {{{
* // Scala: sort a DataFrame by age column in descending order and null values appearing first.
* df.sort(df("age").desc_nulls_first)
*
* // Java
* df.sort(df.col("age").desc_nulls_first());
* }}}
*
* @group expr_ops
* @since 2.1.0
*/
def desc_nulls_first: Column = withExpr { SortOrder(expr, Descending, NullsFirst) }
/**
* Returns a descending ordering used in sorting, where null values appear after non-null values.
* {{{
* // Scala: sort a DataFrame by age column in descending order and null values appearing last.
* df.sort(df("age").desc_nulls_last)
*
* // Java
* df.sort(df.col("age").desc_nulls_last());
* }}}
*
* @group expr_ops
* @since 2.1.0
*/
def desc_nulls_last: Column = withExpr { SortOrder(expr, Descending, NullsLast) }
/**
* Returns an ascending ordering used in sorting.
* {{{
* // Scala: sort a DataFrame by age column in ascending order.
* df.sort(df("age").asc)
*
* // Java
* df.sort(df.col("age").asc());
* }}}
*
* @group expr_ops
* @since 1.3.0
*/
def asc: Column = withExpr { SortOrder(expr, Ascending) }
/**
* Returns an ascending ordering used in sorting, where null values appear before non-null values.
* {{{
* // Scala: sort a DataFrame by age column in ascending order and null values appearing first.
* df.sort(df("age").asc_nulls_last)
*
* // Java
* df.sort(df.col("age").asc_nulls_last());
* }}}
*
* @group expr_ops
* @since 2.1.0
*/
def asc_nulls_first: Column = withExpr { SortOrder(expr, Ascending, NullsFirst) }
/**
* Returns an ordering used in sorting, where null values appear after non-null values.
* {{{
* // Scala: sort a DataFrame by age column in ascending order and null values appearing last.
* df.sort(df("age").asc_nulls_last)
*
* // Java
* df.sort(df.col("age").asc_nulls_last());
* }}}
*
* @group expr_ops
* @since 2.1.0
*/
def asc_nulls_last: Column = withExpr { SortOrder(expr, Ascending, NullsLast) }
/**
* Prints the expression to the console for debugging purpose.
*
* @group df_ops
* @since 1.3.0
*/
def explain(extended: Boolean): Unit = {
// scalastyle:off println
if (extended) {
println(expr)
} else {
println(expr.sql)
}
// scalastyle:on println
}
/**
* Compute bitwise OR of this expression with another expression.
* {{{
* df.select($"colA".bitwiseOR($"colB"))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def bitwiseOR(other: Any): Column = withExpr { BitwiseOr(expr, lit(other).expr) }
/**
* Compute bitwise AND of this expression with another expression.
* {{{
* df.select($"colA".bitwiseAND($"colB"))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def bitwiseAND(other: Any): Column = withExpr { BitwiseAnd(expr, lit(other).expr) }
/**
* Compute bitwise XOR of this expression with another expression.
* {{{
* df.select($"colA".bitwiseXOR($"colB"))
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def bitwiseXOR(other: Any): Column = withExpr { BitwiseXor(expr, lit(other).expr) }
/**
* Define a windowing column.
*
* {{{
* val w = Window.partitionBy("name").orderBy("id")
* df.select(
* sum("price").over(w.rangeBetween(Long.MinValue, 2)),
* avg("price").over(w.rowsBetween(0, 4))
* )
* }}}
*
* @group expr_ops
* @since 1.4.0
*/
def over(window: expressions.WindowSpec): Column = window.withAggregate(this)
/**
* Define a empty analytic clause. In this case the analytic function is applied
* and presented for all rows in the result set.
*
* {{{
* df.select(
* sum("price").over(),
* avg("price").over()
* )
* }}}
*
* @group expr_ops
* @since 2.0.0
*/
def over(): Column = over(Window.spec)
}
/**
* A convenient class used for constructing schema.
*
* @since 1.3.0
*/
@InterfaceStability.Stable
class ColumnName(name: String) extends Column(name) {
/**
* Creates a new `StructField` of type boolean.
* @since 1.3.0
*/
def boolean: StructField = StructField(name, BooleanType)
/**
* Creates a new `StructField` of type byte.
* @since 1.3.0
*/
def byte: StructField = StructField(name, ByteType)
/**
* Creates a new `StructField` of type short.
* @since 1.3.0
*/
def short: StructField = StructField(name, ShortType)
/**
* Creates a new `StructField` of type int.
* @since 1.3.0
*/
def int: StructField = StructField(name, IntegerType)
/**
* Creates a new `StructField` of type long.
* @since 1.3.0
*/
def long: StructField = StructField(name, LongType)
/**
* Creates a new `StructField` of type float.
* @since 1.3.0
*/
def float: StructField = StructField(name, FloatType)
/**
* Creates a new `StructField` of type double.
* @since 1.3.0
*/
def double: StructField = StructField(name, DoubleType)
/**
* Creates a new `StructField` of type string.
* @since 1.3.0
*/
def string: StructField = StructField(name, StringType)
/**
* Creates a new `StructField` of type date.
* @since 1.3.0
*/
def date: StructField = StructField(name, DateType)
/**
* Creates a new `StructField` of type decimal.
* @since 1.3.0
*/
def decimal: StructField = StructField(name, DecimalType.USER_DEFAULT)
/**
* Creates a new `StructField` of type decimal.
* @since 1.3.0
*/
def decimal(precision: Int, scale: Int): StructField =
StructField(name, DecimalType(precision, scale))
/**
* Creates a new `StructField` of type timestamp.
* @since 1.3.0
*/
def timestamp: StructField = StructField(name, TimestampType)
/**
* Creates a new `StructField` of type binary.
* @since 1.3.0
*/
def binary: StructField = StructField(name, BinaryType)
/**
* Creates a new `StructField` of type array.
* @since 1.3.0
*/
def array(dataType: DataType): StructField = StructField(name, ArrayType(dataType))
/**
* Creates a new `StructField` of type map.
* @since 1.3.0
*/
def map(keyType: DataType, valueType: DataType): StructField =
map(MapType(keyType, valueType))
def map(mapType: MapType): StructField = StructField(name, mapType)
/**
* Creates a new `StructField` of type struct.
* @since 1.3.0
*/
def struct(fields: StructField*): StructField = struct(StructType(fields))
/**
* Creates a new `StructField` of type struct.
* @since 1.3.0
*/
def struct(structType: StructType): StructField = StructField(name, structType)
}
| sachintyagi22/spark | sql/core/src/main/scala/org/apache/spark/sql/Column.scala | Scala | apache-2.0 | 36,191 |
package fpinscala.monoids
import fpinscala.parallelism.Nonblocking._
import fpinscala.parallelism.Nonblocking.Par.toParOps // infix syntax for `Par.map`, `Par.flatMap`, etc
trait Monoid[A] {
def op(a1: A, a2: A): A
def zero: A
}
sealed trait WC
case class Stub(chars: String) extends WC
case class Part(lStub: String, words: Int, rStub: String) extends WC
object Monoid {
val stringMonoid = new Monoid[String] {
def op(a1: String, a2: String) = a1 + a2
val zero = ""
}
def listMonoid[A] = new Monoid[List[A]] {
def op(a1: List[A], a2: List[A]) = a1 ++ a2
val zero = Nil
}
val intAddition: Monoid[Int] = sys.error("todo")
val intMultiplication: Monoid[Int] = sys.error("todo")
val booleanOr: Monoid[Boolean] = sys.error("todo")
val booleanAnd: Monoid[Boolean] = sys.error("todo")
def optionMonoid[A]: Monoid[Option[A]] = sys.error("todo")
def endoMonoid[A]: Monoid[A => A] = sys.error("todo")
// TODO: Placeholder for `Prop`. Remove once you have implemented the `Prop`
// data type from Part 2.
trait Prop {}
// TODO: Placeholder for `Gen`. Remove once you have implemented the `Gen`
// data type from Part 2.
import fpinscala.testing._
import Prop._
def monoidLaws[A](m: Monoid[A], gen: Gen[A]): Prop = sys.error("todo")
def trimMonoid(s: String): Monoid[String] = sys.error("todo")
def concatenate[A](as: List[A], m: Monoid[A]): A =
sys.error("todo")
def foldMap[A, B](as: List[A], m: Monoid[B])(f: A => B): B =
sys.error("todo")
def foldRight[A, B](as: List[A])(z: B)(f: (A, B) => B): B =
sys.error("todo")
def foldLeft[A, B](as: List[A])(z: B)(f: (B, A) => B): B =
sys.error("todo")
def foldMapV[A, B](as: IndexedSeq[A], m: Monoid[B])(f: A => B): B =
sys.error("todo")
def ordered(ints: IndexedSeq[Int]): Boolean =
sys.error("todo")
def par[A](m: Monoid[A]): Monoid[Par[A]] =
sys.error("todo")
def parFoldMap[A,B](v: IndexedSeq[A], m: Monoid[B])(f: A => B): Par[B] =
sys.error("todo")
def count(s: String): Int =
sys.error("todo")
def productMonoid[A,B](A: Monoid[A], B: Monoid[B]): Monoid[(A, B)] =
sys.error("todo")
def functionMonoid[A,B](B: Monoid[B]): Monoid[A => B] =
sys.error("todo")
def mapMergeMonoid[K,V](V: Monoid[V]): Monoid[Map[K, V]] =
sys.error("todo")
def bag[A](as: IndexedSeq[A]): Map[A, Int] =
sys.error("todo")
}
trait Foldable[F[_]] {
import Monoid._
def foldRight[A, B](as: F[A])(z: B)(f: (A, B) => B): B =
sys.error("todo")
def foldLeft[A, B](as: F[A])(z: B)(f: (B, A) => B): B =
sys.error("todo")
def foldMap[A, B](as: F[A])(f: A => B)(mb: Monoid[B]): B =
sys.error("todo")
def concatenate[A](as: F[A])(m: Monoid[A]): A =
sys.error("todo")
def toList[A](as: F[A]): List[A] =
sys.error("todo")
}
object ListFoldable extends Foldable[List] {
override def foldRight[A, B](as: List[A])(z: B)(f: (A, B) => B) =
sys.error("todo")
override def foldLeft[A, B](as: List[A])(z: B)(f: (B, A) => B) =
sys.error("todo")
override def foldMap[A, B](as: List[A])(f: A => B)(mb: Monoid[B]): B =
sys.error("todo")
}
object IndexedSeqFoldable extends Foldable[IndexedSeq] {
override def foldRight[A, B](as: IndexedSeq[A])(z: B)(f: (A, B) => B) =
sys.error("todo")
override def foldLeft[A, B](as: IndexedSeq[A])(z: B)(f: (B, A) => B) =
sys.error("todo")
override def foldMap[A, B](as: IndexedSeq[A])(f: A => B)(mb: Monoid[B]): B =
sys.error("todo")
}
object StreamFoldable extends Foldable[Stream] {
override def foldRight[A, B](as: Stream[A])(z: B)(f: (A, B) => B) =
sys.error("todo")
override def foldLeft[A, B](as: Stream[A])(z: B)(f: (B, A) => B) =
sys.error("todo")
}
sealed trait Tree[+A]
case class Leaf[A](value: A) extends Tree[A]
case class Branch[A](left: Tree[A], right: Tree[A]) extends Tree[A]
object TreeFoldable extends Foldable[Tree] {
override def foldMap[A, B](as: Tree[A])(f: A => B)(mb: Monoid[B]): B =
sys.error("todo")
override def foldLeft[A, B](as: Tree[A])(z: B)(f: (B, A) => B) =
sys.error("todo")
override def foldRight[A, B](as: Tree[A])(z: B)(f: (A, B) => B) =
sys.error("todo")
}
object OptionFoldable extends Foldable[Option] {
override def foldMap[A, B](as: Option[A])(f: A => B)(mb: Monoid[B]): B =
sys.error("todo")
override def foldLeft[A, B](as: Option[A])(z: B)(f: (B, A) => B) =
sys.error("todo")
override def foldRight[A, B](as: Option[A])(z: B)(f: (A, B) => B) =
sys.error("todo")
}
object WC {
def apply() = Stub("")
def apply(str: String) = {
val indexOf = str.indexOf(' ')
if (indexOf < 0) Stub(str)
else {
val (str1, str2) = str.splitAt(indexOf)
val lastIndexOf = str2.lastIndexOf(' ') + 1
val (str3, str4) = str2.splitAt(lastIndexOf)
Part(str1, str3.count(_ == ' ') - 1, str4)
}
}
def count(wc: WC) = wc match {
case Stub("") => 0
case Stub(_) => 1
case Part("", c, "") => c
case Part("", c, _) => c + 1
case Part(_, c, "") => c + 1
case Part(_, c, _) => c + 2
}
}
object MonoidTest {
val wcMonoid = new Monoid[WC] {
def op(a1: WC, a2: WC): WC = a1 match {
case Stub(s1) => a2 match {
case Stub(s2) => Stub(s1 + s2)
case Part(ls1, w, rs1) => Part(s1 + ls1, w, rs1)
}
case Part(ls1, w1, rs1) => a2 match {
case Stub(s2) => Part(ls1, w1, rs1 + s2)
case Part(ls2, w2, rs2) =>
Part(ls1,
w1 + w2 + (if (rs1 == "" && ls2 == "") 0 else 1),
rs2)
}
}
def zero: WC = WC()
}
def counter(str: String): WC = {
val length = str.length
if (length < 10) {
WC(str)
} else {
val (str1, str2) = str.splitAt(length / 2)
wcMonoid.op(counter(str1), counter(str2))
}
}
def main(args: Array[String]) {
val longText = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."
val splitLength = 5
val slideList = longText.sliding(splitLength, splitLength).toList
val splits: List[WC] = slideList.map( text => WC(text) )
println(longText)
val result = splits.foldLeft(wcMonoid.zero)(wcMonoid.op)
println(result)
val result2 = splits.foldRight(wcMonoid.zero)(wcMonoid.op)
println(result2)
val result3 = counter(longText)
println(result3)
println(WC.count(result))
}
}
| enpassant/fpinscala | exercises/src/main/scala/fpinscala/monoids/Monoid.scala | Scala | mit | 6,760 |
package ee.cone.c4actor
import com.typesafe.scalalogging.LazyLogging
import ee.cone.c4actor.ConnProtocol.D_Node
import ee.cone.c4actor.Types.SrcId
import ee.cone.c4assemble.Types.{Each, Outs, Values}
import ee.cone.c4assemble._
import ee.cone.c4di.c4
import ee.cone.c4proto.{Id, protocol}
@protocol("ConnTestApp") object ConnProtocol {
@Id(0x0001) case class D_Node(@Id(0x0003) srcId: String, @Id(0x0005) parentId: String)
}
case class ConnNodePath(path: List[D_Node])
@c4assemble("ConnTestApp") class ConnAssembleBase {
type ParentId = SrcId
def nodesByParentId(
key: SrcId,
node: Each[D_Node]
): Values[(ParentId,D_Node)] = List(node.parentId -> node)
def connect(
key: SrcId,
@was paths: Values[ConnNodePath],
@by[ParentId] node: Each[D_Node]
): Values[(SrcId,ConnNodePath)] = {
for {
path <- if(key.nonEmpty) paths else List(ConnNodePath(Nil))
} yield {
WithPK(path.copy(path=node::path.path))
}
}
/*
By[ParentId,D_Node] := for(node <- Is[D_Node] if node.parentId.nonEmpty) yield node.parentId -> node
Is[List[D_Node]] := for(node <- Is[D_Node] if node.parentId.isEmpty) yield WithPK(node::Nil)
Is[List[D_Node]] := WithPK(Each(By[ParentId,D_Node])::Each(Was[List[D_Node]]))
*/
def multiOut(
key: SrcId,
orig: Each[D_Node],
@was @by[ParentId] children: Values[RichNode],
richToParentOut: OutFactory[ParentId,RichNode],
richOut: OutFactory[SrcId,RichNode],
): Outs = {
val rich = RichNode(orig,children.sortBy(ToPrimaryKey(_)).toList)
richToParentOut.result(orig.parentId,rich) :: richOut.result(WithPK(rich)) :: Nil
}
}
case class RichNode(orig: D_Node, children: List[RichNode])
@c4("ConnTestApp") final class ConnStart(
execution: Execution, toUpdate: ToUpdate, contextFactory: ContextFactory,
getConnNodePath: GetByPK[ConnNodePath],
getRichNode: GetByPK[RichNode],
) extends Executable with LazyLogging {
def run(): Unit = {
import LEvent.update
val recs = update(D_Node("1","")) ++
update(D_Node("12","1")) ++ update(D_Node("13","1")) ++
update(D_Node("124","12")) ++ update(D_Node("125","12"))
val updates = recs.map(rec=>toUpdate.toUpdate(rec)).toList
val nGlobal = contextFactory.updated(updates)
//logger.info(s"${nGlobal.assembled}")
assert(
getConnNodePath.ofA(nGlobal)("125") ==
ConnNodePath(List(
D_Node("125","12"), D_Node("12","1"), D_Node("1","")
))
)
assert(
getRichNode.ofA(nGlobal)("1") ==
RichNode(D_Node("1",""),List(
RichNode(D_Node("12","1"),List(
RichNode(D_Node("124","12"),List()),
RichNode(D_Node("125","12"),List())
)),
RichNode(D_Node("13","1"),List())
))
)
execution.complete()
/*
Map(
ByPK(classOf[PCProtocol.D_RawParentNode]) -> Map(
"1" -> D_RawParentNode("1","P-1")
),
ByPK(classOf[PCProtocol.D_RawChildNode]) -> Map(
"2" -> D_RawChildNode("2","1","C-2"),
"3" -> D_RawChildNode("3","1","C-3")
),
/*getParentNodeWithChildren: GetByPK[ParentNodeWithChildren],*/getParentNodeWithChildren -> Map(
"1" -> ParentNodeWithChildren("1",
"P-1",
List(D_RawChildNode("2","1","C-2"), D_RawChildNode("3","1","C-3"))
)
)
).foreach{
case (k,v) => assert(k.of(nGlobal).toMap==v)
}*/
}
}
| conecenter/c4proto | base_examples/src/main/scala/ee/cone/c4actor/AssemblerConnectTest.scala | Scala | apache-2.0 | 3,411 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.graphframes.lib
import org.apache.spark.graphx.{lib => graphxlib}
import org.graphframes.{GraphFrame, Logging}
/**
* Parallel Personalized PageRank algorithm implementation.
*
* This implementation uses the standalone [[GraphFrame]] interface and
* runs personalized PageRank in parallel for a fixed number of iterations.
* This can be run by setting `maxIter`.
* The source vertex Ids are set in `sourceIds`.
* A simple local implementation of this algorithm is as follows.
* {{{
* var oldPR = Array.fill(n)( 1.0 )
* val PR = (0 until n).map(i => if sourceIds.contains(i) alpha else 0.0)
* for( iter <- 0 until maxIter ) {
* swap(oldPR, PR)
* for( i <- 0 until n ) {
* PR[i] = (1 - alpha) * inNbrs[i].map(j => oldPR[j] / outDeg[j]).sum
* if (sourceIds.contains(i)) PR[i] += alpha
* }
* }
* }}}
*
* `alpha` is the random reset probability (typically 0.15), `inNbrs[i]` is the set of
* neighbors which link to `i` and `outDeg[j]` is the out degree of vertex `j`.
*
* Note that this is not the "normalized" PageRank and as a consequence pages that have no
* inlinks will have a PageRank of alpha. In particular, the pageranks may have some values
* greater than 1.
*
* The resulting vertices DataFrame contains one additional column:
* - pageranks (`VectorType`): the pageranks of this vertex from all input source vertices
*
* The resulting edges DataFrame contains one additional column:
* - weight (`DoubleType`): the normalized weight of this edge after running PageRank
*/
class ParallelPersonalizedPageRank private[graphframes] (
private val graph: GraphFrame) extends Arguments {
private var resetProb: Option[Double] = Some(0.15)
private var maxIter: Option[Int] = None
private var srcIds: Array[Any] = Array()
/** Source vertices for a Personalized Page Rank */
def sourceIds(values: Array[Any]): this.type = {
this.srcIds = values
this
}
/** Reset probability "alpha" */
def resetProbability(value: Double): this.type = {
resetProb = Some(value)
this
}
/** Number of iterations to run */
def maxIter(value: Int): this.type = {
this.maxIter = Some(value)
this
}
def run(): GraphFrame = {
require(maxIter != None, s"Max number of iterations maxIter() must be provided")
require(srcIds.nonEmpty, s"Source vertices Ids sourceIds() must be provided")
ParallelPersonalizedPageRank.run(graph, maxIter.get, resetProb.get, srcIds)
}
}
private object ParallelPersonalizedPageRank {
/** Default name for the pageranks column. */
private val PAGERANKS = "pageranks"
/** Default name for the weight column. */
private val WEIGHT = "weight"
/**
* Run Personalized PageRank for a fixed number of iterations, for a
* set of starting nodes in parallel. Returns a graph with vertex attributes
* containing the pageranks relative to all starting nodes (as a vector) and
* edge attributes the normalized edge weight
*
* @param graph The graph on which to compute personalized pagerank
* @param maxIter The number of iterations to run
* @param resetProb The random reset probability
* @param sourceIds The list of sources to compute personalized pagerank from
* @return the graph with vertex attributes
* containing the pageranks relative to all starting nodes as a vector and
* edge attributes the normalized edge weight
*/
def run(
graph: GraphFrame,
maxIter: Int,
resetProb: Double,
sourceIds: Array[Any]): GraphFrame = {
val longSrcIds = sourceIds.map(GraphXConversions.integralId(graph, _))
val gx = graphxlib.PageRank.runParallelPersonalizedPageRank(
graph.cachedTopologyGraphX, maxIter, resetProb, longSrcIds)
GraphXConversions.fromGraphX(graph, gx, vertexNames = Seq(PAGERANKS), edgeNames = Seq(WEIGHT))
}
}
| graphframes/graphframes | src/main/scala/org/graphframes/lib/ParallelPersonalizedPageRank.scala | Scala | apache-2.0 | 4,664 |
package forcomp
object Anagrams {
/** A word is simply a `String`. */
type Word = String
/** A sentence is a `List` of words. */
type Sentence = List[Word]
/** `Occurrences` is a `List` of pairs of characters and positive integers saying
* how often the character appears.
* This list is sorted alphabetically w.r.t. to the character in each pair.
* All characters in the occurrence list are lowercase.
*
* Any list of pairs of lowercase characters and their frequency which is not sorted
* is **not** an occurrence list.
*
* Note: If the frequency of some character is zero, then that character should not be
* in the list.
*/
type Occurrences = List[(Char, Int)]
/** The dictionary is simply a sequence of words.
* It is predefined and obtained as a sequence using the utility method `loadDictionary`.
*/
val dictionary: List[Word] = loadDictionary
/*
* count a char in a word
*/
/*
def count(ch:Char, w:Word):Int = {
var count:Int = 0
w.count(p => ch==p)
for ( c <- w; ch == c) count += 1
count
} */
/** Converts the word into its character occurence list.
*
* Note: the uppercase and lowercase version of the character are treated as the
* same character, and are represented as a lowercase character in the occurrence list.
*/
def wordOccurrences(w: Word): Occurrences = w.groupBy(_.toLower).mapValues(_.length).toList.sortBy(_._1)
/** Converts a sentence into its character occurrence list. */
def sentenceOccurrences(sentence: Sentence): Occurrences = sentence match {
case Nil => Nil
case head :: tail => wordOccurrences(sentence.reduceLeft(_ + _)).sortBy(_._1)
}
/** The `dictionaryByOccurrences` is a `Map` from different occurrences to a sequence of all
* the words that have that occurrence count.
* This map serves as an easy way to obtain all the anagrams of a word given its occurrence list.
*
* For example, the word "eat" has the following character occurrence list:
*
* `List(('a', 1), ('e', 1), ('t', 1))`
*
* Incidentally, so do the words "ate" and "tea".
*
* This means that the `dictionaryByOccurrences` map will contain an entry:
*
* List(('a', 1), ('e', 1), ('t', 1)) -> Seq("ate", "eat", "tea")
*
*/
lazy val dictionaryByOccurrences: Map[Occurrences, List[Word]] = dictionary.map(w => (wordOccurrences(w), w)).groupBy(_._1).mapValues(words => words.map(_._2))
def without(index: Int, word: String): String = new StringBuilder(word).deleteCharAt(index).toString
/*
def anagram (word: Word): List[Word] = {
if (word.length == 1) {
List(word)
} else {
var anagrams = ListBuffer[String]()
0 to word.length-1 foreach { i =>
anagrams ++= (anagram(without(i, word)) map (word.charAt(i) + _))
}
anagrams.toList
}
}
*/
/** Returns all the anagrams of a given word. Filtre by dictionary */
def wordAnagrams(word: Word): List[Word] = dictionaryByOccurrences.find(_._1 == wordOccurrences(word)).map(_._2) getOrElse Nil
/** Returns the list of all subsets of the occurrence list.
* This includes the occurrence itself, i.e. `List(('k', 1), ('o', 1))`
* is a subset of `List(('k', 1), ('o', 1))`.
* It also include the empty subset `List()`.
*
* Example: the subsets of the occurrence list `List(('a', 2), ('b', 2))` are:
*
* List(
* List(),
* List(('a', 1)),
* List(('a', 2)),
* List(('b', 1)),
* List(('a', 1), ('b', 1)),
* List(('a', 2), ('b', 1)),
* List(('b', 2)),
* List(('a', 1), ('b', 2)),
* List(('a', 2), ('b', 2))
* )
*
* Note that the order of the occurrence list subsets does not matter -- the subsets
* in the example above could have been displayed in some other order.
*/
def combinations(occurrences: Occurrences): List[Occurrences] = {
def loop0(elt: (Char, Int)): Occurrences = elt match {
case (char, count) =>
(for {
index <- 1 to count
} yield char -> index).toList
}
def loop1(elt: List[Occurrences], rest: List[Occurrences], deep: Int): List[Occurrences] = {
(elt, rest) match {
case (head :: tail, _) if(head.length == deep) => elt
case (acc, tail) => {
val res = for {
t <- tail
x <- t
y <- elt
if(!y.contains(x))
} yield {
(y :+ x)
}
if(tail.length > 1) loop1(res, tail.tail, deep) else loop1(res, tail, deep)
}
}
}
def loop2(remain: Occurrences, occ: Occurrences, acc: List[Occurrences]): List[Occurrences] = occ match {
case Nil => acc
case (head :: tail) => {
val res = (for {
deep <- 1 to occurrences.length
cb <- loop0(head)
} yield loop1(List(List(cb)), tail.map(loop0), deep)
).reduceLeft(_ ++ _).toList
if(!remain.contains(head)) {
loop2(head :: remain, tail :+ head, acc ++ res)
}
else acc
}
}
loop2(Nil, occurrences, Nil).map(_.sortBy(identity)).distinct ++ List(Nil)
}
/** Subtracts occurrence list `y` from occurrence list `x`.
*
* The precondition is that the occurrence list `y` is a subset of
* the occurrence list `x` -- any character appearing in `y` must
* appear in `x`, and its frequency in `y` must be smaller or equal
* than its frequency in `x`.
*
* Note: the resulting value is an occurrence - meaning it is sorted
* and has no zero-entries.
*/
def subtract(xs: Occurrences, ys: Occurrences): Occurrences = {
def loop(acc: Map[Char, Int], elts: Occurrences): Occurrences = elts match {
case Nil => {
acc.toList.sortBy(_._1)
}
case (char, c1) :: tail => acc.get(char).map { c2 =>
if(c2 - c1 <= 0) loop(acc - char, tail) else loop(acc updated (char, c2 - c1), tail)
} getOrElse loop(acc, tail)
}
loop(xs.toMap, ys)
}
/* for {
xx <- x
yy <- y
if xx._1 != yy._1
if xx._2 <= yy._2 } yield xx
*/
/** Returns a list of all anagram sentences of the given sentence.
*
* An anagram of a sentence is formed by taking the occurrences of all the characters of
* all the words in the sentence, and producing all possible combinations of words with those characters,
* such that the words have to be from the dictionary.
*
* The number of words in the sentence and its anagrams does not have to correspond.
* For example, the sentence `List("I", "love", "you")` is an anagram of the sentence `List("You", "olive")`.
*
* Also, two sentences with the same words but in a different order are considered two different anagrams.
* For example, sentences `List("You", "olive")` and `List("olive", "you")` are different anagrams of
* `List("I", "love", "you")`.
*
* Here is a full example of a sentence `List("Yes", "man")` and its anagrams for our dictionary:
*
* List(
* List(en, as, my),
* List(en, my, as),
* List(man, yes),
* List(men, say),
* List(as, en, my),
* List(as, my, en),
* List(sane, my),
* List(Sean, my),
* List(my, en, as),
* List(my, as, en),
* List(my, sane),
* List(my, Sean),
* List(say, men),
* List(yes, man)
* )
*
* The different sentences do not have to be output in the order shown above - any order is fine as long as
* all the anagrams are there. Every returned word has to exist in the dictionary.
*
* Note: in case that the words of the sentence are in the dictionary, then the sentence is the anagram of itself,
* so it has to be returned in this list.
*
* Note: There is only one anagram of an empty sentence.
*/
def sentenceAnagrams(sentence: Sentence): List[Sentence] ={
def sentenceAnagrams0(all: Occurrences, occurrences: Occurrences, sentence: Sentence): List[Sentence] = {
if(occurrences.toMap.get('a').isDefined && occurrences.toMap.get('r').isDefined && occurrences.toMap.get('t').isDefined) println(all, occurrences, sentence)
combinations(occurrences).flatMap { occ =>
if(occ.toMap.get('a').isDefined && occ.toMap.get('r').isDefined && occ.toMap.get('t').isDefined) println(all, occurrences, occ, sentence)
val words = dictionaryByOccurrences.get(occ) getOrElse Nil
(words, occ, all) match {
case (_, _, Nil) => {
List(sentence)
}
case (words, occ, _) => {
(for {
word <- words
s <- sentenceAnagrams0(subtract(all, occ), subtract(all, occ), sentence :+ word)
} yield s)
}
}
}
}
val all = sentenceOccurrences(sentence)
sentenceAnagrams0(all, all, Nil)
}
}
| relyah/CourseraFunctionalProgramming | resource/progfun-master/forcomp/src/main/scala/forcomp/Anagrams.scala | Scala | gpl-2.0 | 8,996 |
package pokestats
import com.github.racc.tscg.TypesafeConfigModule
import play.api.ApplicationLoader.Context
import play.api.inject.guice.{GuiceApplicationBuilder, GuiceApplicationLoader}
class CustomApplicationLoader extends GuiceApplicationLoader {
override protected def builder(context: Context): GuiceApplicationBuilder = {
val config = context.initialConfiguration.underlying
val configModule = TypesafeConfigModule.fromConfigWithPackage(config, "pokestats")
super.builder(context).bindings(configModule)
}
}
| guilgaly/pokemon-stats | server/src/main/scala/pokestats/CustomApplicationLoader.scala | Scala | apache-2.0 | 548 |
/*
* Copyright 2012-2016 Steve Chaloner
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package be.objectify.deadbolt.scala.filters
import akka.stream.Materializer
import be.objectify.deadbolt.scala.DeadboltComponents
/**
* Individual components of Deadbolt's filter support. Use this trait if your
* application uses compile-time dependency injection.
*
* @author Steve Chaloner (steve@objectify.be)
* @since 2.5.1
*/
trait DeadboltFilterComponents extends DeadboltComponents {
def authorizedRoutes: AuthorizedRoutes
implicit def mat: Materializer
lazy val deadboltFilter: DeadboltRoutePathFilter = new DeadboltRoutePathFilter(mat,
handlers,
authorizedRoutes)
lazy val filterConstraints: FilterConstraints = new FilterConstraints(constraintLogic,
ecContextProvider)
}
| schaloner/deadbolt-2-scala | code/app/be/objectify/deadbolt/scala/filters/DeadboltFilterComponents.scala | Scala | apache-2.0 | 1,508 |
/**
* Copyright (C) 2013 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.client.fr
import org.junit.Test
import org.orbeon.oxf.client.FormRunnerOps
import org.scalatest.junit.AssertionsForJUnit
trait ClientCurrencyTest extends AssertionsForJUnit with FormRunnerOps {
// https://github.com/orbeon/orbeon-forms/issues/1026
@Test def displayUpdateWhenNoXFormsUpdate(): Unit = {
val currencyInput = cssSelector(".xbl-fr-currency .xbl-fr-number-visible-input")
val emailInput = cssSelector(".xforms-type-email input")
for {
_ ← loadOrbeonPage("/fr/orbeon/controls/new")
_ ← clickOn(LinkTextQuery("Typed Controls"))
}()
def enterCheck(input: String, result: String) = for {
_ ← clickOn(currencyInput)
_ ← textField(currencyInput).value = input
_ ← clickOn(emailInput)
_ ← assert(textField(currencyInput).value === result)
}()
enterCheck(".9", "0.90")
enterCheck(".9998", ".9998")
}
}
| brunobuzzi/orbeon-forms | xforms/jvm/src/test/scala/org/orbeon/oxf/client/fr/ClientCurrencyTest.scala | Scala | lgpl-2.1 | 1,576 |
package mesosphere.marathon.api.v2
import javax.inject.Inject
import javax.servlet.http.{ HttpServletRequest, HttpServletResponse }
import javax.ws.rs._
import javax.ws.rs.core.{ Context, MediaType, Response }
import com.codahale.metrics.annotation.Timed
import mesosphere.marathon.Protos.MarathonTask
import mesosphere.marathon.api.v2.json.Formats._
import mesosphere.marathon.api._
import mesosphere.marathon.core.appinfo.EnrichedTask
import mesosphere.marathon.health.HealthCheckManager
import mesosphere.marathon.plugin.auth._
import mesosphere.marathon.state.PathId._
import mesosphere.marathon.state.{ GroupManager, PathId }
import mesosphere.marathon.tasks.TaskTracker
import mesosphere.marathon.{ MarathonConf, MarathonSchedulerService, UnknownAppException }
import org.slf4j.LoggerFactory
import scala.concurrent.Future
@Consumes(Array(MediaType.APPLICATION_JSON))
@Produces(Array(MarathonMediaType.PREFERRED_APPLICATION_JSON))
class AppTasksResource @Inject() (service: MarathonSchedulerService,
taskTracker: TaskTracker,
taskKiller: TaskKiller,
healthCheckManager: HealthCheckManager,
val config: MarathonConf,
groupManager: GroupManager,
val authorizer: Authorizer,
val authenticator: Authenticator) extends AuthResource {
val log = LoggerFactory.getLogger(getClass.getName)
val GroupTasks = """^((?:.+/)|)\\*$""".r
@GET
@Timed
def indexJson(@PathParam("appId") appId: String,
@Context req: HttpServletRequest, @Context resp: HttpServletResponse): Response = {
doIfAuthorized(req, resp, ViewAppOrGroup, appId.toRootPath) { implicit principal =>
def tasks(appIds: Set[PathId]): Set[EnrichedTask] = for {
id <- appIds
health = result(healthCheckManager.statuses(id))
task <- taskTracker.get(id)
} yield EnrichedTask(id, task, health.getOrElse(task.getId, Nil))
val matchingApps = appId match {
case GroupTasks(gid) =>
result(groupManager.group(gid.toRootPath))
.map(_.transitiveApps.map(_.id))
.getOrElse(Set.empty)
case _ => Set(appId.toRootPath)
}
val running = matchingApps.filter(taskTracker.contains)
if (running.isEmpty) unknownApp(appId.toRootPath) else ok(jsonObjString("tasks" -> tasks(running)))
}
}
@GET
@Produces(Array(MediaType.TEXT_PLAIN))
@Timed
def indexTxt(@PathParam("appId") appId: String,
@Context req: HttpServletRequest, @Context resp: HttpServletResponse): Response = {
doIfAuthorized(req, resp, ViewAppOrGroup, appId.toRootPath) { implicit principal =>
val id = appId.toRootPath
service.getApp(id).fold(unknownApp(id)) { app =>
ok(EndpointsHelper.appsToEndpointString(taskTracker, Seq(app), "\\t"))
}
}
}
@DELETE
@Timed
def deleteMany(@PathParam("appId") appId: String,
@QueryParam("host") host: String,
@QueryParam("scale")@DefaultValue("false") scale: Boolean = false,
@QueryParam("force")@DefaultValue("false") force: Boolean = false,
@Context req: HttpServletRequest, @Context resp: HttpServletResponse): Response = {
doIfAuthorized(req, resp, KillTask, appId.toRootPath) { implicit principal =>
val pathId = appId.toRootPath
def findToKill(appTasks: Set[MarathonTask]): Set[MarathonTask] = Option(host).fold(appTasks) { hostname =>
appTasks.filter(_.getHost == hostname || hostname == "*")
}
if (scale) {
val deploymentF = taskKiller.killAndScale(pathId, findToKill, force)
deploymentResult(result(deploymentF))
}
else {
reqToResponse(taskKiller.kill(pathId, findToKill)) {
tasks => ok(jsonObjString("tasks" -> tasks))
}
}
}
}
@DELETE
@Path("{taskId}")
@Timed
def deleteOne(@PathParam("appId") appId: String,
@PathParam("taskId") id: String,
@QueryParam("scale")@DefaultValue("false") scale: Boolean = false,
@QueryParam("force")@DefaultValue("false") force: Boolean = false,
@Context req: HttpServletRequest, @Context resp: HttpServletResponse): Response = {
val pathId = appId.toRootPath
doIfAuthorized(req, resp, KillTask, appId.toRootPath) { implicit principal =>
def findToKill(appTasks: Set[MarathonTask]): Set[MarathonTask] = appTasks.find(_.getId == id).toSet
if (scale) {
val deploymentF = taskKiller.killAndScale(pathId, findToKill, force)
deploymentResult(result(deploymentF))
}
else {
reqToResponse(taskKiller.kill(pathId, findToKill)) {
tasks => tasks.headOption.fold(unknownTask(id))(task => ok(jsonObjString("task" -> task)))
}
}
}
}
private def reqToResponse(future: Future[Set[MarathonTask]])(toResponse: Set[MarathonTask] => Response): Response = {
import scala.concurrent.ExecutionContext.Implicits.global
val response = future.map {
toResponse
} recover {
case UnknownAppException(unknownAppId) => unknownApp(unknownAppId)
}
result(response)
}
}
| Kosta-Github/marathon | src/main/scala/mesosphere/marathon/api/v2/AppTasksResource.scala | Scala | apache-2.0 | 5,332 |
package dotty1.collections
package immutable
import annotation.unchecked.uncheckedVariance
// Like Iter2, but with non-variant types only.
object Iter2 {
trait Iterator[A] extends IterableOnce[A] {
def hasNext: Boolean
def next: A
def iterator = this
def foreach(f: A => Unit): Unit = ???
def map[B](f: A => B): Iterator[B] = ???
def flatMap[B](f: A => IterableOnce[B]): Iterator[B] = ???
def ++[B >: A](xs: IterableOnce[B]): Iterator[B] = ???
def drop(n: Int): Iterator[A] = ???
def indexWhere(p: A => Boolean): Int = {
var i = 0
while (hasNext) {
if (p(next)) return i
i += 1
}
-1
}
def zip[B](that: Iterator[B]): Iterator[(A, B)] = ???
}
trait IterableOnce[A] {
def iterator: Iterator[A]
def buildIterator: Iterator[A] = iterator
}
trait FromIterator[C[X] <: Iterable[X]] {
def fromIterator[B](it: Iterator[B]): C[B]
}
trait Iterable[IA] extends IterableOnce[IA] with FromIterator[Iterable]
trait Seq[AA] extends Iterable[AA] with FromIterator[Seq] {
def apply(i: Int): AA
def length: Int
}
sealed trait List[A] extends Seq[A] with FromIterator[List] {
def isEmpty: Boolean
def head: A
def tail: List[A]
def iterator = new ListIterator[A](this)
def fromIterator[B](it: Iterator[B]): List[B] = it match {
case ListIterator(xs) => xs
case _ => if (it.hasNext) Cons(it.next, fromIterator(it)) else Nil.asInstanceOf[List[B]]
}
def apply(i: Int): A = {
require(!isEmpty)
if (i == 0) head else tail.apply(i - 1)
}
def length: Int =
if (isEmpty) 0 else 1 + tail.length
}
case class Cons[A](x: A, xs: List[A]) extends List[A] {
def isEmpty = false
def head = x
def tail = xs
}
case object Nil extends List[Nothing] {
def isEmpty = true
def head = ???
def tail = ???
}
class ArrayBuffer[A] private (initElems: Array[AnyRef], initLen: Int) extends Seq[A] with FromIterator[ArrayBuffer] {
def this() = this(new Array[AnyRef](16), 0)
def this(it: ArrayIterator[A]) = this(it.elems, it.len)
private var elems: Array[AnyRef] = initElems
private var len = 0
def iterator =
elems.iterator.take(len).asInstanceOf[Iterator[A]]
override def buildIterator =
new ArrayIterator(elems, len).asInstanceOf[Iterator[A]]
def fromIterator[B](it: Iterator[B]): ArrayBuffer[B] =
new ArrayBuffer(ArrayIterator.fromIterator(it))
def apply(i: Int) = elems(i).asInstanceOf[A]
def length = len
}
implicit class IterableTransforms[A, C[X] <: Iterable[X]](val c: Iterable[A] & FromIterator[C]) extends AnyVal {
def map[B](f: A => B): C[B] = c.fromIterator(c.buildIterator.map(f))
def flatMap[B](f: A => IterableOnce[B]): C[B] = c.fromIterator(c.buildIterator.flatMap(f(_).buildIterator))
def ++[B >: A](xs: IterableOnce[B]): C[B] = c.fromIterator(c.buildIterator ++ xs.buildIterator)
def drop(n: Int): C[A] = c.fromIterator(c.buildIterator.drop(n))
def head: A = c.iterator.next
def zip[B](xs: IterableOnce[B]): C[(A, B)] = c.fromIterator(c.iterator.zip(xs.iterator))
}
implicit class SeqTransforms[SA, C[X] <: Seq[X]](val c: Seq[SA] & FromIterator[C]) extends AnyVal {
def reverse: C[SA] = {
val elems = new Array[AnyRef](c.length)
var i = elems.length
val it = c.iterator
while (it.hasNext) {
i -= 1
elems(i) = it.next.asInstanceOf[AnyRef]
}
val xzz = c.fromIterator(ArrayIterator[SA](elems, c.length))
xzz
}
def indexWhere(p: SA => Boolean): Int = c.iterator.indexWhere(p)
}
case class ListIterator[A](xs: List[A]) extends Iterator[A] {
private[this] var current: List[A] = xs
def hasNext = !current.isEmpty
def next = { val res = current.head; current = current.tail; res }
}
case class ArrayIterator[A](elems: Array[AnyRef], len: Int) extends Iterator[A] {
import ArrayIterator._
private def elem(i: Int) = elems(i).asInstanceOf[A]
private var cur = 0
def hasNext = cur < len
def next = { val res = elem(cur); cur += 1; res }
override def foreach(f: A => Unit): Unit =
for (i <- 0 until len) f(elem(i))
override def map[B](f: A => B): ArrayIterator[B] = {
var mapped = elems
for (i <- 0 until len) {
val x = elem(i)
val y = widen(f(x))
if (widen(x) ne y) {
if (mapped eq elems) mapped = new Array[AnyRef](len)
mapped(i) = y
}
}
if (mapped eq elems) this.asInstanceOf[ArrayIterator[B]]
else new ArrayIterator(mapped, len)
}
override def flatMap[B](f: A => IterableOnce[B]): ArrayIterator[B] =
flatten(map(f(_).buildIterator))
override def ++[B >: A](that: IterableOnce[B]): ArrayIterator[B] = {
val thatIterator @ ArrayIterator(elems2, len2) = fromIterator(that.iterator)
if (len == 0) thatIterator
else if (len2 == 0) this.asInstanceOf[ArrayIterator[B]]
else {
val resLen = len + len2
val resElems = new Array[AnyRef](resLen)
Array.copy(elems, 0, resElems, 0, len)
Array.copy(elems2, 0, resElems, len, len2)
new ArrayIterator(resElems, resLen)
}
}
}
object ArrayIterator {
private def widen(x: Any): AnyRef = x.asInstanceOf[AnyRef]
def fromIterator[A](it: Iterator[A]): ArrayIterator[A] = it match {
case it: ArrayIterator[A] => it
case _ =>
var elems = new Array[AnyRef](32)
var len = 0
def ensureCapacity() = {
while (len > elems.length) {
val newElems = new Array[AnyRef](elems.length * 2)
Array.copy(elems, 0, newElems, 0, elems.length)
elems = newElems
}
}
while (it.hasNext) {
len += 1
ensureCapacity()
elems(len - 1) = widen(it.next)
}
ArrayIterator(elems, len)
}
def flatten[A](its: ArrayIterator[Iterator[A]]): ArrayIterator[A] = {
var arrayIts = its.map(fromIterator)
var totalLen = 0
arrayIts.foreach(totalLen += _.len)
val allElems = new Array[AnyRef](totalLen)
var j = 0
arrayIts.foreach { it =>
Array.copy(it.elems, 0, allElems, j, it.len)
j += it.len
}
new ArrayIterator(allElems, totalLen)
}
}
}
| VladimirNik/dotty | tests/pos/Iter3.scala | Scala | bsd-3-clause | 6,362 |
import java.io.File
import play.api._
import play.api.mvc.Results._
import com.typesafe.config.ConfigFactory
import mvc.{Result, RequestHeader}
object Global extends GlobalSettings {
override def onLoadConfig(config: Configuration, path: File, classloader: ClassLoader, mode: Mode.Mode): Configuration = {
val modeSpecificConfig = config ++ Configuration(ConfigFactory.load(s"application.${mode.toString.toLowerCase}.conf"))
super.onLoadConfig(modeSpecificConfig, path, classloader, mode)
}
override def onHandlerNotFound(request: RequestHeader): Result = {
NotFound(
views.html.notFound()
)
}
} | MoonGene/Analytics | src/moon/app/Global.scala | Scala | gpl-3.0 | 627 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.statsEstimation
import org.mockito.Mockito.mock
import org.apache.spark.sql.catalyst.analysis.ResolvedNamespace
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeMap, AttributeReference, Literal}
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.connector.catalog.SupportsNamespaces
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.IntegerType
class BasicStatsEstimationSuite extends PlanTest with StatsEstimationTestBase {
val attribute = attr("key")
val colStat = ColumnStat(distinctCount = Some(10), min = Some(1), max = Some(10),
nullCount = Some(0), avgLen = Some(4), maxLen = Some(4))
val plan = StatsTestPlan(
outputList = Seq(attribute),
attributeStats = AttributeMap(Seq(attribute -> colStat)),
rowCount = 10,
// row count * (overhead + column size)
size = Some(10 * (8 + 4)))
test("range") {
val range = Range(1, 5, 1, None)
val rangeStats = Statistics(sizeInBytes = 4 * 8)
checkStats(
range,
expectedStatsCboOn = rangeStats,
expectedStatsCboOff = rangeStats)
}
test("windows") {
val windows = plan.window(Seq(min(attribute).as('sum_attr)), Seq(attribute), Nil)
val windowsStats = Statistics(sizeInBytes = plan.size.get * (4 + 4 + 8) / (4 + 8))
checkStats(
windows,
expectedStatsCboOn = windowsStats,
expectedStatsCboOff = windowsStats)
}
test("limit estimation: limit < child's rowCount") {
val localLimit = LocalLimit(Literal(2), plan)
val globalLimit = GlobalLimit(Literal(2), plan)
// LocalLimit's stats is just its child's stats except column stats
checkStats(localLimit, plan.stats.copy(attributeStats = AttributeMap(Nil)))
checkStats(globalLimit, Statistics(sizeInBytes = 24, rowCount = Some(2)))
}
test("limit estimation: limit > child's rowCount") {
val localLimit = LocalLimit(Literal(20), plan)
val globalLimit = GlobalLimit(Literal(20), plan)
checkStats(localLimit, plan.stats.copy(attributeStats = AttributeMap(Nil)))
// Limit is larger than child's rowCount, so GlobalLimit's stats is equal to its child's stats.
checkStats(globalLimit, plan.stats.copy(attributeStats = AttributeMap(Nil)))
}
test("limit estimation: limit = 0") {
val localLimit = LocalLimit(Literal(0), plan)
val globalLimit = GlobalLimit(Literal(0), plan)
val stats = Statistics(sizeInBytes = 1, rowCount = Some(0))
checkStats(localLimit, stats)
checkStats(globalLimit, stats)
}
test("sample estimation") {
val sample = Sample(0.0, 0.5, withReplacement = false, (math.random * 1000).toLong, plan)
checkStats(sample, Statistics(sizeInBytes = 60, rowCount = Some(5)))
// Child doesn't have rowCount in stats
val childStats = Statistics(sizeInBytes = 120)
val childPlan = DummyLogicalPlan(childStats, childStats)
val sample2 =
Sample(0.0, 0.11, withReplacement = false, (math.random * 1000).toLong, childPlan)
checkStats(sample2, Statistics(sizeInBytes = 14))
}
test("estimate statistics when the conf changes") {
val expectedDefaultStats =
Statistics(
sizeInBytes = 40,
rowCount = Some(10),
attributeStats = AttributeMap(Seq(
AttributeReference("c1", IntegerType)() -> ColumnStat(distinctCount = Some(10),
min = Some(1), max = Some(10),
nullCount = Some(0), avgLen = Some(4), maxLen = Some(4)))))
val expectedCboStats =
Statistics(
sizeInBytes = 4,
rowCount = Some(1),
attributeStats = AttributeMap(Seq(
AttributeReference("c1", IntegerType)() -> ColumnStat(distinctCount = Some(10),
min = Some(5), max = Some(5),
nullCount = Some(0), avgLen = Some(4), maxLen = Some(4)))))
val plan = DummyLogicalPlan(defaultStats = expectedDefaultStats, cboStats = expectedCboStats)
checkStats(
plan, expectedStatsCboOn = expectedCboStats, expectedStatsCboOff = expectedDefaultStats)
}
test("command should report a dummy stats") {
val plan = CommentOnNamespace(
ResolvedNamespace(mock(classOf[SupportsNamespaces]), Array("ns")), "comment")
checkStats(
plan,
expectedStatsCboOn = Statistics.DUMMY,
expectedStatsCboOff = Statistics.DUMMY)
}
/** Check estimated stats when cbo is turned on/off. */
private def checkStats(
plan: LogicalPlan,
expectedStatsCboOn: Statistics,
expectedStatsCboOff: Statistics): Unit = {
withSQLConf(SQLConf.CBO_ENABLED.key -> "true") {
// Invalidate statistics
plan.invalidateStatsCache()
assert(plan.stats == expectedStatsCboOn)
}
withSQLConf(SQLConf.CBO_ENABLED.key -> "false") {
plan.invalidateStatsCache()
assert(plan.stats == expectedStatsCboOff)
}
}
/** Check estimated stats when it's the same whether cbo is turned on or off. */
private def checkStats(plan: LogicalPlan, expectedStats: Statistics): Unit =
checkStats(plan, expectedStats, expectedStats)
}
/**
* This class is used for unit-testing the cbo switch, it mimics a logical plan which computes
* a simple statistics or a cbo estimated statistics based on the conf.
*/
private case class DummyLogicalPlan(
defaultStats: Statistics,
cboStats: Statistics)
extends LeafNode {
override def output: Seq[Attribute] = Nil
override def computeStats(): Statistics = if (conf.cboEnabled) cboStats else defaultStats
}
| dbtsai/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/statsEstimation/BasicStatsEstimationSuite.scala | Scala | apache-2.0 | 6,466 |
package fuel
import fuel.util.TRandom
import scala.annotation.tailrec
object Preamble {
/** Draws uniformly a single element from the sequence */
implicit class RndApply[T](s: Seq[T]) {
assert(s.nonEmpty)
def apply(rnd: TRandom) = s(rnd.nextInt(s.size))
// Note: with replacement!
def apply(rnd: TRandom, n: Int) = IndexedSeq.fill(n)(s(rnd.nextInt(s.size)))
def fapply(rnd: TRandom) = (() => s(rnd.nextInt(s.size)))
}
implicit class RndApplyS[T](s: Set[T]) {
assert(s.nonEmpty)
val it = s.iterator
def apply(rnd: TRandom) = it.drop(rnd.nextInt(s.size)).next
}
/** Iverson's bracket */
implicit def iverson(b: Boolean) = if (b) 1 else 0
}
/** Histogram is basically a non-normalized Distribution */
class Histogram[T](val d: Seq[T])(implicit num: Numeric[T]) {
assert(d.nonEmpty, "Histogram should contain at least one element")
assert(d.forall(e => num.gteq(e, num.zero)), "Histogram elements should be non-negative")
val sum = d.sum
assert(num.gt(sum, num.zero), "At least one histogram element must be non-zero")
/** Draws a random index according to histogram */
def apply(rng: TRandom): Int = {
val r = num.toDouble(sum) * rng.nextDouble
var theSum: Double = 0
d.indexWhere(e => { theSum += num.toDouble(e); theSum >= r })
}
/** Draws multiple indices *without replacement* */
def apply(rng: TRandom, n: Int): Seq[Int] = {
assert(n <= d.size)
@tailrec def draw(k: Int, s: Double, remaining: Set[Int], selected: List[Int]): Seq[Int] = k match {
case 0 => selected
case _ => {
val r = s * rng.nextDouble
var theSum: Double = 0
val rem = remaining.toSeq
val iter = rem.iterator
var last = -1
do {
theSum += num.toDouble(d(iter.next))
last = last + 1
} while (iter.hasNext && theSum < r)
draw(k - 1, s - num.toDouble(d(last)), remaining.take(last) ++ iter.toSet,
rem(last) :: selected)
}
}
draw(n, num.toDouble(sum), 0.until(d.size).toSet, List[Int]())
}
}
object Histogram {
def apply[T](d: Seq[T])(implicit num: Numeric[T]) = new Histogram(d)(num)
}
class Distribution(d: Seq[Double]) extends Histogram(d) {
// This was problematic due to numerical roundoffs
// assert(d.sum == 1.0, "Distribution should sum up to 1.0. And this one is: " + d)
assert(sum > 0.999 && sum < 1.001, "Distribution should sum up to 1.0. And this one is: " + d)
}
object Distribution {
def apply(d: Seq[Double]) = new Distribution(d)
def fromAnything(d: Seq[Double]) = new Distribution(d.map(_ / d.sum))
}
| iwob/fuel | src/main/scala/fuel/Preamble.scala | Scala | mit | 2,607 |
package models
import formats.CommonJsonFormats
import play.api.libs.json.Json
import scala.io
import scala.language.postfixOps
case class Shortlist(id: String, category: String)
object Shortlist extends CommonJsonFormats {
val shortlistJsonFile = "/shortlist/shortlist.json"
lazy val shortlists = {
val shortlistsJson = io.Source.fromInputStream(getClass.getResourceAsStream(shortlistJsonFile)).mkString
val shortlistsJsPath = Json.parse(shortlistsJson)
shortlistsJsPath.as[List[Shortlist]].map(s => s.id -> s.category) toMap
}
def getAll = shortlists
def get(id: String) = shortlists.get(id)
} | siz-io/siz-api | app/models/Shortlist.scala | Scala | isc | 623 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.entity.test
import akka.http.scaladsl.model.ContentTypes
import common.StreamLogging
import spray.json._
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}
import org.apache.openwhisk.core.WhiskConfig
import org.apache.openwhisk.core.entity.Attachments.{Attached, Inline}
import org.apache.openwhisk.core.entity.ExecManifest.ImageName
import org.apache.openwhisk.core.entity.{
BlackBoxExec,
CodeExecAsAttachment,
CodeExecAsString,
Exec,
ExecManifest,
WhiskAction
}
import scala.collection.mutable
@RunWith(classOf[JUnitRunner])
class ExecTests extends FlatSpec with Matchers with StreamLogging with BeforeAndAfterAll {
behavior of "exec deserialization"
val config = new WhiskConfig(ExecManifest.requiredProperties)
ExecManifest.initialize(config)
override protected def afterAll(): Unit = {
ExecManifest.initialize(config)
super.afterAll()
}
it should "read existing code string as attachment" in {
val json = """{
| "name": "action_tests_name2",
| "_id": "anon-Yzycx8QnIYDp3Tby0Fnj23KcMtH/action_tests_name2",
| "publish": false,
| "annotations": [],
| "version": "0.0.1",
| "updated": 1533623651650,
| "entityType": "action",
| "exec": {
| "kind": "nodejs:6",
| "code": "foo",
| "binary": false
| },
| "parameters": [
| {
| "key": "x",
| "value": "b"
| }
| ],
| "limits": {
| "timeout": 60000,
| "memory": 256,
| "logs": 10
| },
| "namespace": "anon-Yzycx8QnIYDp3Tby0Fnj23KcMtH"
|}""".stripMargin.parseJson.asJsObject
val action = WhiskAction.serdes.read(json)
action.exec should matchPattern { case CodeExecAsAttachment(_, Inline("foo"), None, false) => }
}
it should "properly determine binary property" in {
val j1 = """{
| "kind": "nodejs:6",
| "code": "SGVsbG8gT3BlbldoaXNr",
| "binary": false
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(j1) should matchPattern {
case CodeExecAsAttachment(_, Inline("SGVsbG8gT3BlbldoaXNr"), None, true) =>
}
val j2 = """{
| "kind": "nodejs:6",
| "code": "while (true)",
| "binary": false
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(j2) should matchPattern {
case CodeExecAsAttachment(_, Inline("while (true)"), None, false) =>
}
//Defaults to binary
val j3 = """{
| "kind": "nodejs:6",
| "code": "while (true)"
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(j3) should matchPattern {
case CodeExecAsAttachment(_, Inline("while (true)"), None, false) =>
}
}
it should "read code stored as attachment" in {
val json = """{
| "kind": "java:8",
| "code": {
| "attachmentName": "foo:bar",
| "attachmentType": "application/java-archive",
| "length": 32768,
| "digest": "sha256-foo"
| },
| "binary": true,
| "main": "hello"
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(json) should matchPattern {
case CodeExecAsAttachment(_, Attached("foo:bar", _, Some(32768), Some("sha256-foo")), Some("hello"), true) =>
}
}
it should "read code stored as jar property" in {
val j1 = """{
| "kind": "nodejs:6",
| "jar": "SGVsbG8gT3BlbldoaXNr",
| "binary": false
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(j1) should matchPattern {
case CodeExecAsAttachment(_, Inline("SGVsbG8gT3BlbldoaXNr"), None, true) =>
}
}
it should "read existing code string as string with old manifest" in {
val oldManifestJson =
"""{
| "runtimes": {
| "nodejs": [
| {
| "kind": "nodejs:6",
| "default": true,
| "image": {
| "prefix": "openwhisk",
| "name": "nodejs6action",
| "tag": "latest"
| },
| "deprecated": false,
| "stemCells": [{
| "count": 2,
| "memory": "256 MB"
| }]
| }
| ]
| }
|}""".stripMargin.parseJson.compactPrint
val oldConfig =
new TestConfig(Map(WhiskConfig.runtimesManifest -> oldManifestJson), ExecManifest.requiredProperties)
ExecManifest.initialize(oldConfig)
val j1 = """{
| "kind": "nodejs:6",
| "code": "SGVsbG8gT3BlbldoaXNr",
| "binary": false
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(j1) should matchPattern {
case CodeExecAsString(_, "SGVsbG8gT3BlbldoaXNr", None) =>
}
//Reset config back
ExecManifest.initialize(config)
}
behavior of "blackbox exec deserialization"
it should "read existing code string as attachment" in {
val json = """{
| "name": "action_tests_name2",
| "_id": "anon-Yzycx8QnIYDp3Tby0Fnj23KcMtH/action_tests_name2",
| "publish": false,
| "annotations": [],
| "version": "0.0.1",
| "updated": 1533623651650,
| "entityType": "action",
| "exec": {
| "kind": "blackbox",
| "image": "docker-custom.com/openwhisk-runtime/magic/nodejs:0.0.1",
| "code": "foo",
| "binary": false
| },
| "parameters": [
| {
| "key": "x",
| "value": "b"
| }
| ],
| "limits": {
| "timeout": 60000,
| "memory": 256,
| "logs": 10
| },
| "namespace": "anon-Yzycx8QnIYDp3Tby0Fnj23KcMtH"
|}""".stripMargin.parseJson.asJsObject
val action = WhiskAction.serdes.read(json)
action.exec should matchPattern { case BlackBoxExec(_, Some(Inline("foo")), None, false, false) => }
}
it should "properly determine binary property" in {
val j1 = """{
| "kind": "blackbox",
| "image": "docker-custom.com/openwhisk-runtime/magic/nodejs:0.0.1",
| "code": "SGVsbG8gT3BlbldoaXNr",
| "binary": false
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(j1) should matchPattern {
case BlackBoxExec(_, Some(Inline("SGVsbG8gT3BlbldoaXNr")), None, false, true) =>
}
val j2 = """{
| "kind": "blackbox",
| "image": "docker-custom.com/openwhisk-runtime/magic/nodejs:0.0.1",
| "code": "while (true)",
| "binary": false
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(j2) should matchPattern {
case BlackBoxExec(_, Some(Inline("while (true)")), None, false, false) =>
}
//Empty code should resolve as None
val j3 = """{
| "kind": "blackbox",
| "image": "docker-custom.com/openwhisk-runtime/magic/nodejs:0.0.1",
| "code": " "
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(j3) should matchPattern {
case BlackBoxExec(_, None, None, false, false) =>
}
val j4 = """{
| "kind": "blackbox",
| "image": "docker-custom.com/openwhisk-runtime/magic/nodejs:0.0.1",
| "code": {
| "attachmentName": "foo:bar",
| "attachmentType": "application/octet-stream",
| "length": 32768,
| "digest": "sha256-foo"
| },
| "binary": true,
| "main": "hello"
|}""".stripMargin.parseJson.asJsObject
Exec.serdes.read(j4) should matchPattern {
case BlackBoxExec(_, Some(Attached("foo:bar", _, Some(32768), Some("sha256-foo"))), Some("hello"), false, true) =>
}
}
behavior of "blackbox exec serialization"
it should "serialize with inline attachment" in {
val bb = BlackBoxExec(
ImageName.fromString("docker-custom.com/openwhisk-runtime/magic/nodejs:0.0.1").get,
Some(Inline("foo")),
None,
false,
false)
val js = Exec.serdes.write(bb)
val js2 = """{
| "kind": "blackbox",
| "image": "docker-custom.com/openwhisk-runtime/magic/nodejs:0.0.1",
| "binary": false,
| "code": "foo"
|}""".stripMargin.parseJson.asJsObject
js shouldBe js2
}
it should "serialize with attached attachment" in {
val bb = BlackBoxExec(
ImageName.fromString("docker-custom.com/openwhisk-runtime/magic/nodejs:0.0.1").get,
Some(Attached("foo", ContentTypes.`application/octet-stream`, Some(42), Some("sha1-42"))),
None,
false,
true)
val js = Exec.serdes.write(bb)
val js2 = """{
| "kind": "blackbox",
| "image": "docker-custom.com/openwhisk-runtime/magic/nodejs:0.0.1",
| "binary": true,
| "code": {
| "attachmentName": "foo",
| "attachmentType": "application/octet-stream",
| "length": 42,
| "digest": "sha1-42"
| }
|}""".stripMargin.parseJson.asJsObject
js shouldBe js2
}
private class TestConfig(val props: Map[String, String], requiredProperties: Map[String, String])
extends WhiskConfig(requiredProperties) {
override protected def getProperties() = mutable.Map(props.toSeq: _*)
}
}
| jasonpet/openwhisk | tests/src/test/scala/org/apache/openwhisk/core/entity/test/ExecTests.scala | Scala | apache-2.0 | 11,278 |
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v2
import uk.gov.hmrc.ct.CATO04
import uk.gov.hmrc.ct.box.{CtBigDecimal, CtBoxIdentifier, Linked}
case class B64(value: BigDecimal) extends CtBoxIdentifier("Marginal Rate Relief") with CtBigDecimal
object B64 extends Linked[CATO04, B64] {
override def apply(source: CATO04): B64 = B64(source.value)
}
| ahudspith-equalexperts/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600/v2/B64.scala | Scala | apache-2.0 | 941 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.torch
import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.nn.TemporalConvolution
import com.intel.analytics.bigdl.nn._
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.RandomGenerator._
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}
import scala.util.Random
class TemporalConvolutionSpec extends FlatSpec with BeforeAndAfter with Matchers {
before {
if (!TH.hasTorch()) {
cancel("Torch is not installed")
}
}
"A TemporalConvolution with 2d input" should "generate correct output" in {
val seed = 100
RNG.setSeed(seed)
val inputFrameSize = 10
val outputFrameSize = 8
val kW = 5
val dW = 2
val layer = TemporalConvolution[Double](inputFrameSize, outputFrameSize, kW, dW)
Random.setSeed(seed)
val input = Tensor[Double](100, 10).apply1(e => Random.nextDouble())
val gradOutput = Tensor[Double](48, 8).apply1(e => Random.nextDouble())
val output = layer.updateOutput(input)
val gradInput = layer.updateGradInput(input, gradOutput)
val code = "torch.manualSeed(" + seed + ")\n" +
s"layer = nn.TemporalConvolution($inputFrameSize, $outputFrameSize, $kW, $dW)\n" +
"weight = layer.weight\n" +
"bias = layer.bias \n" +
"output = layer:forward(input) \n" +
"gradInput = layer:backward(input, gradOutput) "
val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("weight", "bias", "output", "gradInput"))
val luaWeight = torchResult("weight").asInstanceOf[Tensor[Double]]
val luaBias = torchResult("bias").asInstanceOf[Tensor[Double]]
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Tensor[Double]]
val weight = layer.weight
val bias = layer.bias
weight should be equals luaWeight
bias should be equals luaBias
output should be equals luaOutput
gradInput should be equals luaGradInput
}
"A TemporalConvolution" should "generate correct output" in {
val seed = 100
RNG.setSeed(seed)
val inputFrameSize = 10
val outputFrameSize = 8
val kW = 5
val dW = 2
val layer = TemporalConvolution[Double](inputFrameSize, outputFrameSize, kW, dW)
Random.setSeed(seed)
val input = Tensor[Double](10, 100, 10).apply1(e => Random.nextDouble())
val gradOutput = Tensor[Double](10, 48, 8).apply1(e => Random.nextDouble())
val output = layer.updateOutput(input)
val gradInput = layer.updateGradInput(input, gradOutput)
val code = "torch.manualSeed(" + seed + ")\n" +
s"layer = nn.TemporalConvolution($inputFrameSize, $outputFrameSize, $kW, $dW)\n" +
"weight = layer.weight\n" +
"bias = layer.bias \n" +
"output = layer:forward(input) \n" +
"gradInput = layer:backward(input, gradOutput) "
val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("weight", "bias", "output", "gradInput"))
val luaWeight = torchResult("weight").asInstanceOf[Tensor[Double]]
val luaBias = torchResult("bias").asInstanceOf[Tensor[Double]]
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Tensor[Double]]
val weight = layer.weight
val bias = layer.bias
weight should be equals luaWeight
bias should be equals luaBias
output should be equals luaOutput
gradInput should be equals luaGradInput
}
"A TemporalConvolution" should "be good in gradient check for input" in {
val seed = 100
RNG.setSeed(seed)
val layer = TemporalConvolution[Double](10, 8, 5, 2)
val input = Tensor[Double](10, 100, 10).apply1(e => Random.nextDouble())
val checker = new GradientChecker(1e-4)
checker.checkLayer(layer, input, 1e-3) should be(true)
}
"A TemporalConvolution" should "be good in gradient check for weight" in {
val seed = 100
RNG.setSeed(seed)
val layer = TemporalConvolution[Double](10, 8, 5, 2)
val input = Tensor[Double](10, 100, 10).apply1(e => Random.nextDouble())
val checker = new GradientChecker(1e-4)
checker.checkWeight(layer, input, 1e-3) should be(true)
}
}
| JerryYanWan/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/torch/TemporalConvolutionSpec.scala | Scala | apache-2.0 | 4,913 |
/* Copyright 2017-19, Emmanouil Antonios Platanios. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.platanios.tensorflow.api.ops.data
import org.platanios.tensorflow.api.core.client.Session
import org.platanios.tensorflow.api.core.{Graph, Shape}
import org.platanios.tensorflow.api.implicits.Implicits._
import org.platanios.tensorflow.api.ops.Op
import org.platanios.tensorflow.api.ops.math.Math
import org.platanios.tensorflow.api.tensors.Tensor
import org.platanios.tensorflow.api.utilities.using
import org.junit.Test
import org.scalatestplus.junit.JUnitSuite
/**
* @author Emmanouil Antonios Platanios
*/
class FilterDatasetSuite extends JUnitSuite {
@Test def testFilterRange(): Unit = using(Graph()) { graph =>
Op.createWith(graph) {
val dataset = Data.datasetFromRange(0, 100).filter(x => {
Math.notEqual(Math.mod(x, 3L), 2L)
})
val iterator = dataset.createInitializableIterator()
val initOp = iterator.initializer
val nextOutput = iterator.next()
assert(nextOutput.shape == Shape.scalar())
val session = Session()
session.run(targets = initOp)
assert(session.run(fetches = nextOutput) == (0L: Tensor[Long]))
assert(session.run(fetches = nextOutput) == (1L: Tensor[Long]))
assert(session.run(fetches = nextOutput) == (3L: Tensor[Long]))
assert(session.run(fetches = nextOutput) == (4L: Tensor[Long]))
assert(session.run(fetches = nextOutput) == (6L: Tensor[Long]))
}
}
}
| eaplatanios/tensorflow_scala | modules/api/src/test/scala/org/platanios/tensorflow/api/ops/data/FilterDatasetSuite.scala | Scala | apache-2.0 | 2,033 |
package eu.pepot.eu.spark.inputsplitter
import java.util.concurrent.{Executors, TimeUnit}
import eu.pepot.eu.spark.inputsplitter.common.config.Config
import eu.pepot.eu.spark.inputsplitter.common.file._
import eu.pepot.eu.spark.inputsplitter.common.file.matcher.FilesMatcher
import eu.pepot.eu.spark.inputsplitter.common.splits.{Arrow, Metadata, SplitDetails, SplitsDir}
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.mapreduce
import org.apache.spark.SparkContext
import org.apache.log4j.Logger
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, ExecutionContext, Future}
import scala.reflect.ClassTag
class SplitWriter(
config: Config = Config()
) {
implicit val executionContext = ExecutionContext.fromExecutorService(Executors.newCachedThreadPool())
val logger = Logger.getLogger(this.getClass)
def writeNewAPI[
K: ClassTag,
V: ClassTag,
I <: mapreduce.InputFormat[K, V] : ClassTag,
O <: mapreduce.OutputFormat[K, V] : ClassTag
](
inputDir: String,
splitsDir: String
)(implicit sc: SparkContext): Unit = {
val splitsDirO = SplitsDir(splitsDir)
val splitDetails = asRddNew[K, V, I, O](inputDir)
val futureResults = splitDetails.arrows.map { arrow =>
val outputDirectory = splitsDirO.getDataPathWith(arrow.big.asPath().getName)
val result = arrow.rdd.repartition(arrow.getNroExpectedSplits(config.bytesPerSplit))
Future(result.saveAsNewAPIHadoopFile[O](outputDirectory))
}
waitForFutures(futureResults)
val mappings = splitDetails.arrows.flatMap { arrow =>
val outputDirectory = splitsDirO.getDataPathWith(arrow.big.asPath().getName)
val outputPartitionFiles = FileLister.listFiles(outputDirectory).files
outputPartitionFiles.map(outputPartitionFile => (arrow.big, outputPartitionFile))
}.toSet
implicit val fs = FileSystem.get(sc.hadoopConfiguration)
Metadata.dump(Metadata(Mappings(mappings), splitDetails.metadata.bigs, splitDetails.metadata.smalls), splitsDirO)
}
def waitForFutures(futureResults: Seq[Future[Unit]]): Unit = {
futureResults.foreach { f =>
Await.result(f, Duration(config.rddWriteTimeoutSeconds, TimeUnit.SECONDS))
f.onFailure {
case failure => throw failure
}
}
}
private[inputsplitter] def asRddNew[
K: ClassTag,
V: ClassTag,
I <: mapreduce.InputFormat[K, V] : ClassTag,
O <: mapreduce.OutputFormat[K, V] : ClassTag
](
inputDir: String
)(implicit sc: SparkContext): SplitDetails[K, V] = {
val (bigs, smalls) = determineBigsSmalls[K, V](inputDir)
val rdds = bigs.files.map(f => Arrow(f, sc.newAPIHadoopFile[K, V, I](f.path)))
SplitDetails[K, V](rdds.toSeq, Metadata(Mappings(Set()), bigs, smalls))
}
private def determineBigsSmalls[
K: ClassTag,
V: ClassTag
](
inputDir: String
)(implicit sc: SparkContext): (FileDetailsSet, FileDetailsSet) = {
val input = FileLister.listFiles(inputDir)
logger.info("Using input: " + inputDir)
val bigs = FilesMatcher.matches(input, config.splitCondition)
logger.info("Detected bigs from input: " + bigs)
val smalls = FileDetailsSetSubstractor.substract(input, bigs)
logger.info("Detected smalls from input: " + smalls)
(bigs, smalls)
}
}
| mauriciojost/spark-input-splitter | src/main/scala/eu/pepot/eu/spark/inputsplitter/SplitWriter.scala | Scala | apache-2.0 | 3,265 |
package hulk.http
import akka.http.scaladsl.model._
import akka.stream.ActorMaterializer
import akka.util.ByteString
import hulk.http.response.{ResponseFormat, HttpResponseBodyWriter, HttpResponseBody}
import scala.collection.immutable.Seq
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
/**
* Created by reweber on 18/12/2015
*/
trait HulkHttpResponse {
val statusCode: StatusCode
val httpHeader: Seq[HttpHeader]
val body: HttpResponseBody
private[hulk] val rawHttpResponse: Option[HttpResponse]
}
object HulkHttpResponse {
implicit private[hulk] def toAkkaHttpResponse(httpResponse: HulkHttpResponse): HttpResponse = {
HttpResponse(httpResponse.statusCode, httpResponse.httpHeader, httpResponse.body)
}
implicit private[hulk] def fromAkkaHttpResponse(httpResponse: HttpResponse)(implicit actorMaterializer: ActorMaterializer, timeout: FiniteDuration = 1 seconds): Future[HulkHttpResponse] = {
val contentType = httpResponse.entity.contentType
val body = httpResponse.entity.toStrict(timeout).map(_.data)
val httpResponseBody = body.map(new HttpResponseBody(contentType, _))
val response = httpResponseBody.map(body =>
Response(httpResponse.status, body, httpResponse.headers, Some(httpResponse))
)
response
}
}
private case class Response(statusCode: StatusCode, body: HttpResponseBody,
httpHeader: Seq[HttpHeader], rawHttpResponse: Option[HttpResponse] = None) extends HulkHttpResponse
protected trait Empty extends ResponseFormat
protected class EmptyHttpResponseWriter extends HttpResponseBodyWriter[Empty] {
override def apply(): HttpResponseBody = HttpResponseBody(ContentTypes.NoContentType, ByteString.empty)
}
object Ok {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(200, bodyWriter(), httpHeader)
}
object Created {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(201, bodyWriter(), httpHeader)
}
object Accepted {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(202, bodyWriter(), httpHeader)
}
object NonAuthoritiveInformation {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(203, bodyWriter(), httpHeader)
}
object NoContent {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(204, bodyWriter(), httpHeader)
}
object ResetContent {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(205, bodyWriter(), httpHeader)
}
object PartialContent {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(206, bodyWriter(), httpHeader)
}
object MultipleChoices {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(300, bodyWriter(), httpHeader)
}
object MovedPermanently {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(301, bodyWriter(), httpHeader)
}
object Found {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(302, bodyWriter(), httpHeader)
}
object SeeOther {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(303, bodyWriter(), httpHeader)
}
object NotModified {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(304, bodyWriter(), httpHeader)
}
object UseProxy {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(305, bodyWriter(), httpHeader)
}
object TemporaryRedirect {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(307, bodyWriter(), httpHeader)
}
object BadRequest {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(400, bodyWriter(), httpHeader)
}
object Unauthorized {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(401, bodyWriter(), httpHeader)
}
object PaymentRequired {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(402, bodyWriter(), httpHeader)
}
object Forbidden {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(403, bodyWriter(), httpHeader)
}
object NotFound {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(404, bodyWriter(), httpHeader)
}
object MethodNotAllowed {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(405, bodyWriter(), httpHeader)
}
object NotAcceptable {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(406, bodyWriter(), httpHeader)
}
object ProxyAuthenticationRequired {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(407, bodyWriter(), httpHeader)
}
object RequestTimeout {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(408, bodyWriter(), httpHeader)
}
object Conflict {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(409, bodyWriter(), httpHeader)
}
object Gone {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(410, bodyWriter(), httpHeader)
}
object LengthRequired {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(411, bodyWriter(), httpHeader)
}
object PreconditionFailed {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(412, bodyWriter(), httpHeader)
}
object HttpResponseBodyTooLarge {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(413, bodyWriter(), httpHeader)
}
object RequestUriTooLong {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(414, bodyWriter(), httpHeader)
}
object UnsupportedMediaType {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(415, bodyWriter(), httpHeader)
}
object RequestRangeNotSatisfiable {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(416, bodyWriter(), httpHeader)
}
object ExpectationFailed {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(417, bodyWriter(), httpHeader)
}
object PreconditionRequired {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(428, bodyWriter(), httpHeader)
}
object TooManyRequests {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(429, bodyWriter(), httpHeader)
}
object RequestHeaderFieldsTooLarge {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(431, bodyWriter(), httpHeader)
}
object InternalServerError {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(500, bodyWriter(), httpHeader)
}
object NotImplemented {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(501, bodyWriter(), httpHeader)
}
object BadGateway {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(502, bodyWriter(), httpHeader)
}
object ServiceUnavailable {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(503, bodyWriter(), httpHeader)
}
object GatewayTimeout {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(504, bodyWriter(), httpHeader)
}
object HttpVersionNotSupported {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(505, bodyWriter(), httpHeader)
}
object NetworkAuthenticationRequired {
def apply[A <: ResponseFormat](bodyWriter: HttpResponseBodyWriter[A] = new EmptyHttpResponseWriter(), httpHeader: Seq[HttpHeader] = Seq()): HulkHttpResponse = Response(511, bodyWriter(), httpHeader)
} | reneweb/hulk | framework/src/main/scala/hulk/http/HulkHttpResponse.scala | Scala | apache-2.0 | 11,402 |
/*
* Copyright (c) 2013-2014 Telefónica Investigación y Desarrollo S.A.U.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package es.tid.cosmos.api.auth.oauth2
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import com.typesafe.config.{ConfigException, Config}
import dispatch.url
/** OAuth client for authentication and user profile access. */
private[oauth2] abstract class AbstractOAuthProvider(
override val id: String,
config: Config
) extends OAuthProvider {
override val name = stringConfig("name")
override val newAccountUrl: Option[String] = try {
Some(config.getString("signup.url"))
} catch {
case _: ConfigException.Missing => None
}
override def requestUserProfile(token: String): Future[OAuthUserProfile] =
requestProfileResource(token).map(profileParser.parse)
override lazy val adminPassword: Option[String] = {
val isEnabled = try {
config.getBoolean("externalAdmin.enabled")
} catch {
case _: ConfigException.Missing => false
}
if (!isEnabled) None
else try {
Some(config.getString("externalAdmin.password"))
} catch {
case _: ConfigException.Missing => throw new IllegalArgumentException(
s"auth.$id.externalAdmin.password is mandatory when externalAdmin is enabled")
}
}
/** OAuth client ID */
protected val clientId = stringConfig("client.id")
/** OAuth client secret */
protected val clientSecret = stringConfig("client.secret")
/** Request the profile resource contents. */
protected def requestProfileResource(token: String): Future[String]
protected val profileParser: ProfileParser
/** Get a required configuration key
* @param key Configuration key (relative to the OAuth provider conf)
* @return An string
* @throws IllegalArgumentException If the key is missing
*/
protected def stringConfig(key: String) = try {
config.getString(key)
} catch {
case ex: ConfigException.Missing =>
throw new IllegalArgumentException(s"Missing required configuration key auth.$id.$key", ex)
}
protected def authorizationUrl = urlFromConfig("auth.url")
protected def apiUrl = urlFromConfig("api.url")
private def urlFromConfig(key: String) = url(stringConfig(key))
}
| telefonicaid/fiware-cosmos-platform | cosmos-api/app/es/tid/cosmos/api/auth/oauth2/AbstractOAuthProvider.scala | Scala | apache-2.0 | 2,808 |
package net.sansa_stack.query.spark.graph.jena.expression
import net.sansa_stack.query.spark.graph.jena.util.Result
import org.apache.jena.graph.Node
import org.apache.jena.sparql.expr._
/**
* Class that evaluate solution based on expression. Support expression with FILTER operators.
* @param e Expression of the filter.
*
* @author Zhe Wang
*/
class Compare(e: ExprFunction2) extends Filter {
private val tag = "Filter Comparision"
private val left = e.getArg1
private val right = e.getArg2
override def evaluate(solution: Map[Node, Node]): Boolean = {
val leftValue = solution(left.asVar().asNode())
val boolean: Boolean = {
if (right.isConstant) {
compare(leftValue, right)
} else if (right.isFunction) {
right match {
case e: E_Add => if (e.getArg1.isVariable) {
compare(leftValue, NodeValue.makeDouble(solution(e.getArg1.asVar.asNode).getLiteralValue.toString.toDouble +
e.getArg2.getConstant.toString.toDouble))
} else {
compare(leftValue, NodeValue.makeDouble(e.getArg1.getConstant.toString.toDouble +
solution(e.getArg2.asVar.asNode).getLiteralValue.toString.toDouble))
}
case e: E_Subtract => if (e.getArg1.isVariable) {
compare(leftValue, NodeValue.makeDouble(solution(e.getArg1.asVar.asNode).getLiteralValue.toString.toDouble -
e.getArg2.getConstant.toString.toDouble))
} else {
compare(leftValue, NodeValue.makeDouble(e.getArg1.getConstant.toString.toDouble -
solution(e.getArg2.asVar.asNode).getLiteralValue.toString.toDouble))
}
}
} else if (right.isVariable) {
solution(right.asVar().asNode())
false
} else { false }
}
boolean
}
override def evaluate(solution: Result[Node]): Boolean = {
// compiler here
true
}
override def getTag: String = { tag }
def getLeft: Expr = { left }
def getRight: Expr = { right }
private def compare(leftValue: Node, right: Expr): Boolean = {
if (right.getConstant.isDate) { // compare date
e.eval(
NodeValue.makeDate(leftValue.getLiteralLexicalForm),
NodeValue.makeDate(right.getConstant.getDateTime)).toString.equals("true")
} else if (right.getConstant.isInteger) { // compare integer
e.eval(
NodeValue.makeInteger(leftValue.getLiteralLexicalForm),
NodeValue.makeInteger(right.getConstant.getInteger)).toString.equals("true")
} else if (right.getConstant.isFloat) { // compare float
e.eval(
NodeValue.makeFloat(leftValue.getLiteralLexicalForm.toFloat),
NodeValue.makeFloat(right.getConstant.getFloat)).toString.equals("true")
} else if (right.getConstant.isDouble) { // compare double
e.eval(
NodeValue.makeDouble(leftValue.getLiteralLexicalForm.toDouble),
NodeValue.makeDouble(right.getConstant.getDouble)).toString.equals("true")
} else if (right.getConstant.isIRI) { // compare URI
e.eval(NodeValue.makeNode(leftValue), NodeValue.makeNode(right.getConstant.asNode)).toString.equals("true")
} else {
false
}
}
}
| SANSA-Stack/SANSA-RDF | sansa-query/sansa-query-spark/src/main/scala/net/sansa_stack/query/spark/graph/jena/expression/Compare.scala | Scala | apache-2.0 | 3,183 |
/*
Copyright 2013 Lance Gatlin
Author: lance.gatlin@gmail.com
This file is part of org.s_mach library.
org.s_mach library is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
org.s_mach library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with org.s_mach library. If not, see <http://www.gnu.org/licenses/>.
*/
package org.s_mach
import scala.collection.immutable.Seq
package object Transformer {
type Transition[I,O] = StateMachine.Transition[I,O,Unit]
object Transition {
def apply[I,O](
state : State[I,O],
output : Seq[O] = Seq.empty,
overflow : Seq[I] = Seq.empty,
metadata : Seq[Any] = Seq.empty
) : Transition[I,O] = state.fold(
ifSuccess = q => new Succeed(state=q, output=output, overflow=overflow, metadata=metadata),
ifHalted = q => new Halt(state=q, output=output, overflow=overflow, metadata=metadata),
ifContinuation = q => new Continue(state=q, output=output, metadata=metadata)
)
}
type DoneTransition[I,O] = StateMachine.DoneTransition[I,O,Unit]
type State[I,O] = StateMachine.State[I,O,Unit]
object State {
type Done[I,O] = StateMachine.State.Done[I,O,Unit]
type Continuation[I,O] = StateMachine.State.Continuation[I,O,Unit]
type Success[I,O] = StateMachine.State.Success[I,O,Unit]
val Success = StateMachine.State.Success
type Halted[I,O] = StateMachine.State.Halted[I,O,Unit]
val Halted = StateMachine.State.Halted
}
type Continue[I,O] = StateMachine.Continue[I,O,Unit]
object Continue {
def apply[I,O](
state : State.Continuation[I,O],
output : Seq[O] = Seq.empty,
metadata : Seq[Any] = Seq.empty
) = StateMachine.Continue[I,O,Unit](state=state, output=output, metadata=metadata)
}
type Succeed[I,O] = StateMachine.Succeed[I,O,Unit]
object Succeed {
def apply[I,O](
output : Seq[O] = Seq.empty,
overflow : Seq[I] = Seq.empty,
metadata : Seq[Any] = Seq.empty
) = StateMachine.Succeed[I,O,Unit](value=(), output=output, overflow=overflow, metadata=metadata)
}
type Halt[I,O] = StateMachine.Halt[I,O,Unit]
object Halt {
def apply[I,O](
issues : Seq[Issue],
optRecover : Option[() => Transition[I,O]] = None,
output : Seq[O] = Seq.empty,
overflow : Seq[I] = Seq.empty,
metadata : Seq[Any] = Seq.empty
) = StateMachine.Halt[I,O,Unit](issues=issues, optRecover=optRecover, output=output, overflow=overflow, metadata=metadata)
def warn[I,O](
message : String,
cause : Option[Throwable] = None,
recover : () => Transition[I,O],
output : Seq[O] = Seq.empty,
overflow : Seq[I] = Seq.empty,
metadata : Seq[Any] = Seq.empty
) = StateMachine.Halt.warn[I,O,Unit](message=message, cause=cause, recover=recover, output=output, overflow=overflow, metadata=metadata)
def error[I,O](
message : String,
cause : Option[Throwable] = None,
recover : () => Transition[I,O],
output : Seq[O] = Seq.empty,
overflow : Seq[I] = Seq.empty,
metadata : Seq[Any] = Seq.empty
) = StateMachine.Halt.error[I,O,Unit](message=message, cause=cause, recover=recover, output=output, overflow=overflow, metadata=metadata)
def fatal[I,O](
message : String,
cause : Option[Throwable] = None,
output : Seq[O] = Seq.empty,
overflow : Seq[I] = Seq.empty,
metadata : Seq[Any] = Seq.empty
) = StateMachine.Halt.fatal[I,O,Unit](message=message, cause=cause, overflow=overflow, metadata=metadata)
}
// TODO: test me
def tee[A](f: A => Unit) : Transformer[A,A] = new MapTransformer[A,A]({ a => f(a);a })
def teeEOI[A](f: EndOfInput => Unit) : Transformer[A,A] = new EOITransformer[A]({ eoi => f(eoi);Nil })
def collectTee[A](f: PartialFunction[Input[A],Unit]) : Transformer[A,A] = new MapInputTransformer[A,A]({ case i@Chunk(xs) => f(i);xs case i@EndOfInput => f(i);Nil })
def map[A,B](f: A => B) : Transformer[A,B] = new MapTransformer[A,B](f)
def collect[A,B](f: PartialFunction[Input[A],Seq[B]]) : Transformer[A,B] = new MapInputTransformer[A,B](f)
}
| lancegatlin/s_mach.fsm | src/main/scala/org/s_mach/Transformer/package.scala | Scala | gpl-3.0 | 5,275 |
package sampler.cluster.abc.algorithm
import org.mockito.Matchers.anyObject
import org.mockito.Mockito.when
import org.scalatest.FreeSpec
import org.scalatest.Matchers
import org.scalatest.mock.MockitoSugar
import akka.event.LoggingAdapter
import sampler.cluster.abc.Scored
import sampler.cluster.abc.Weighted
import sampler.cluster.abc.actor.LoggingAdapterComponent
import sampler.cluster.abc.actor.ScoredParticles
import sampler.cluster.abc.actor.Tagged
import sampler.cluster.abc.actor.WeighedParticles
import sampler.cluster.abc.actor.root.Getters
import sampler.cluster.abc.actor.root.GettersComponent
import sampler.cluster.abc.algorithm.component.ToleranceCalculatorComponent
import sampler.cluster.abc.config.ABCConfig
import sampler.cluster.abc.config.ClusterParameters
import sampler.cluster.abc.config.JobParameters
import sampler.math.Statistics
import sampler.math.StatisticsComponent
import scala.collection.immutable.Queue
import sampler.cluster.abc.algorithm.component.ParticleMixerComponent
class AlgorithmComponentTest extends FreeSpec with Matchers with MockitoSugar {
"Algorithm component should" - {
val instanceComponent = new AlgorithmComponentImpl
with ToleranceCalculatorComponent
with StatisticsComponent
with LoggingAdapterComponent
with ParticleMixerComponent
with GettersComponent {
val statistics = mock[Statistics]
val getters = new Getters{}
val toleranceCalculator = mock[ToleranceCalculator]
val particleMixer = mock[ParticleMixer]
val logg = mock[LoggingAdapter]
val algorithm = new AlgorithmImpl{}
}
val instance = instanceComponent.algorithm
val (id1, id2, id3, id4) = (111111, 111112, 111113, 111114)
val scored1 = Tagged(Scored(1, Seq(0,5)), id1)
val scored2 = Tagged(Scored(2, Seq(0.5)), id2)
val weighed1 = Tagged(Weighted(Scored(3, Seq(0.25)), 0.25), id3)
val weighed2 = Tagged(Weighted(Scored(4, Seq(0.25)), 0.25), id4)
"Add incoming weighted particles to a generation" in {
val initialSeq = WeighedParticles(Seq(weighed1))
val addedSeq = WeighedParticles(Seq(weighed2))
val gen1 = Generation[Int](
null,
ScoredParticles(Seq()),
initialSeq,
Queue(),
0.1,
1,
null
)
val nextGen = instance.addWeighted(addedSeq, gen1)
val weighedSeq = nextGen.weighted
assert(weighedSeq.seq.length === 2)
assert(weighedSeq.seq.contains(weighed1))
assert(weighedSeq.seq.contains(weighed2))
}
"Filters and queues for weighing" in {
val scoredSeq = ScoredParticles(Seq(scored1))
val gen1 = Generation[Int](
null,
ScoredParticles(Seq()),
WeighedParticles(Seq()),
Queue(),
0.1,
1,
null
)
val nextGen = instance.filterAndQueueForWeighing(scoredSeq, gen1)
val observedIds = nextGen.idsObserved
val dueWeighing = nextGen.dueWeighing
assert(observedIds.size === 1)
assert(observedIds.contains(id1))
assert(dueWeighing.size === 1)
assert(dueWeighing.seq.contains(scored1))
}
"Similar test to above but with some IDs already present and attempted adding of duplicate" in {
val initialObs: Queue[Long] = Queue(id1)
val initialDues = ScoredParticles(Seq(scored1))
val gen1 = Generation[Int](
null,
initialDues,
WeighedParticles(Seq()),
initialObs,
0.1,
1,
null
)
val scoredSeq = ScoredParticles(Seq(scored1, scored2))
val nextGen = instance.filterAndQueueForWeighing(scoredSeq, gen1)
val observedIds = nextGen.idsObserved
val dueWeighing = nextGen.dueWeighing
assert(observedIds.size === 2)
assert(observedIds.contains(id1))
assert(observedIds.contains(id2))
assert(dueWeighing.size === 2)
assert(dueWeighing.seq.contains(scored1))
assert(dueWeighing.seq.contains(scored2))
}
"Flushes generation" - {
"Flushes all elements " in {
when(instanceComponent.toleranceCalculator.apply(anyObject(), org.mockito.Matchers.eq(0.1))).thenReturn(0.01)
val gen1 = Generation[Int](
null,
ScoredParticles(Seq(scored1)),
WeighedParticles(Seq(weighed1)),
Queue(id3),
0.1,
1,
null
)
val nextGen = instance.flushGeneration(gen1, 1, 500)
assert(nextGen.weighted.seq.isEmpty)
assert(nextGen.currentTolerance === 0.01)
assert(nextGen.currentIteration === 2)
assert(nextGen.prevWeightsTable === Map(3 -> 0.25))
assert(nextGen.dueWeighing.seq.isEmpty)
}
val numParticles = 2
val memoryGenerations = 2
"causes assertion error if particles haven't exceeded the memory generations limit" in {
val shortQueue: Queue[Long] = Queue(id1)
val gen1 = Generation[Int](
null,
ScoredParticles(Seq()),
WeighedParticles(Seq(weighed1)),
shortQueue,
0.1,
1,
null
)
intercept[AssertionError]{
instance.flushGeneration(gen1, numParticles, memoryGenerations)
}
}
"reduced to n-1 generations memory if memory limit is exceeded" in {
val longQueue: Queue[Long] = Queue(id1, id2, id3, id4, 111115)
val gen1 = Generation[Int](
null,
ScoredParticles(Seq()),
WeighedParticles(Seq(weighed1, weighed1, weighed1, weighed1)),
longQueue,
0.1,
1,
null
)
val nextGen = instance.flushGeneration(gen1, numParticles, memoryGenerations)
val expectedQueue: Queue[Long] = Queue(id4, 111115)
assert(nextGen.idsObserved === expectedQueue)
}
"reduced to n-1 generations memory if memory limit is equalled" in {
val equalQueue: Queue[Long] = Queue(id1, id2, id3, id4)
val gen1 = Generation[Int](
null,
ScoredParticles(Seq()),
WeighedParticles(Seq(weighed1, weighed1, weighed1, weighed1)),
equalQueue,
0.1,
1,
null
)
val nextGen = instance.flushGeneration(gen1, numParticles, memoryGenerations)
val expectedQueue: Queue[Long] = Queue(id3, id4)
assert(nextGen.idsObserved === expectedQueue)
}
}
"Determine if generation has gathered enough particles" in {
val config1 = ABCConfig(JobParameters(2,0,0), null, null)
val config2 = ABCConfig(JobParameters(5,0,0), null, null)
val config3 = ABCConfig(JobParameters(6,0,0), null, null)
val config4 = ABCConfig(JobParameters(1000,0,0), null, null)
val gen1 = Generation[Int](
null,
null,
WeighedParticles(Seq(
weighed1,
weighed2,
Tagged(Weighted(Scored(5, Seq(0.5)), 0.5), 111115),
Tagged(Weighted(Scored(6, Seq(0.5)), 0.5), 111116),
Tagged(Weighted(Scored(7, Seq(0.5)), 0.5), 111117)
)),
Queue(111113, 111114, 111115, 111116, 111117),
0.1,
1,
null
)
assert(instance.isEnoughParticles(gen1, config1))
assert(instance.isEnoughParticles(gen1, config2))
assert(!instance.isEnoughParticles(gen1, config3))
assert(!instance.isEnoughParticles(gen1, config4))
}
"Empties weighing buffer" in {
val gen1 = Generation[Int](
null,
ScoredParticles(Seq(scored1)),
WeighedParticles(Seq()),
Queue(),
0.1,
1,
null
)
val nextGen = instance.emptyWeighingBuffer(gen1)
assert(nextGen.dueWeighing.seq.isEmpty)
}
"Delegates building a mix payload to separate component" - {
val mixinResponse = Some(ScoredParticles(Seq(scored1, scored2)))
val gen1 = mock[Generation[Int]]
val config = mock[ABCConfig]
when(instanceComponent.particleMixer.apply(gen1, config)).thenReturn(mixinResponse)
assert(instance.buildMixPayload(gen1, config) === mixinResponse)
}
"Generates a report" in {
val gen1 = Generation[Int](
null,
ScoredParticles(Seq()),
WeighedParticles(Seq()),
Queue(),
0.001,
500,
Map(1 -> 0.5, 2 -> 0.5)
)
val config = ABCConfig(JobParameters(1000,0,0), null, null)
val report = instance.buildReport(gen1, config)
val posterior = report.posterior
assert(report.generationId === 500)
assert(report.tolerance === 0.001)
assert(posterior.length === 1000)
posterior.count(_ == 1) should be(500 +- 50)
posterior.count(_ == 2) should be(500 +- 50)
}
}
} | tsaratoon/Sampler | sampler-cluster/src/test/scala/sampler/cluster/abc/algorithm/AlgorithmComponentTest.scala | Scala | apache-2.0 | 9,225 |
package org.jetbrains.plugins.scala
package lang.types.existentialSimplification
import java.io.File
import com.intellij.openapi.util.io.FileUtil
import com.intellij.openapi.util.text.StringUtil
import com.intellij.openapi.vfs.{CharsetToolkit, LocalFileSystem}
import com.intellij.psi.util.PsiTreeUtil
import org.jetbrains.plugins.scala.base.ScalaLightPlatformCodeInsightTestCaseAdapter
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression
import org.jetbrains.plugins.scala.lang.psi.types.result.{Failure, Success, TypingContext}
import org.jetbrains.plugins.scala.lang.psi.types.{ScExistentialType, ScTypeExt}
/**
* @author Alexander Podkhalyuzin
*/
abstract class ExistentialSimplificationTestBase extends ScalaLightPlatformCodeInsightTestCaseAdapter {
private val startExprMarker = "/*start*/"
private val endExprMarker = "/*end*/"
def folderPath: String = baseRootPath() + "types/existentialSimplification/"
protected def doTest() {
import _root_.junit.framework.Assert._
val filePath = folderPath + getTestName(false) + ".scala"
val file = LocalFileSystem.getInstance.findFileByPath(filePath.replace(File.separatorChar, '/'))
assert(file != null, "file " + filePath + " not found")
val fileText = StringUtil.convertLineSeparators(FileUtil.loadFile(new File(file.getCanonicalPath), CharsetToolkit.UTF8))
configureFromFileTextAdapter(getTestName(false) + ".scala", fileText)
val scalaFile = getFileAdapter.asInstanceOf[ScalaFile]
val offset = fileText.indexOf(startExprMarker)
val startOffset = offset + startExprMarker.length
assert(offset != -1, "Not specified start marker in test case. Use /*start*/ in scala file for this.")
val endOffset = fileText.indexOf(endExprMarker)
assert(endOffset != -1, "Not specified end marker in test case. Use /*end*/ in scala file for this.")
val addOne = if(PsiTreeUtil.getParentOfType(scalaFile.findElementAt(startOffset),classOf[ScExpression]) != null) 0 else 1 //for xml tests
val expr: ScExpression = PsiTreeUtil.findElementOfClassAtRange(scalaFile, startOffset + addOne, endOffset, classOf[ScExpression])
assert(expr != null, "Not specified expression in range to infer type.")
val typez = expr.getType(TypingContext.empty)
typez match {
case Success(ttypez: ScExistentialType, _) =>
val res = ttypez.simplify().presentableText
val lastPsi = scalaFile.findElementAt(scalaFile.getText.length - 1)
val text = lastPsi.getText
val output = lastPsi.getNode.getElementType match {
case ScalaTokenTypes.tLINE_COMMENT => text.substring(2).trim
case ScalaTokenTypes.tBLOCK_COMMENT | ScalaTokenTypes.tDOC_COMMENT =>
text.substring(2, text.length - 2).trim
case _ => assertTrue("Test result must be in last comment statement.", false)
}
assertEquals(output, res)
case Success(_, _) =>
assert(assertion = false, message = "Expression has not existential type")
case Failure(msg, elem) => assert(assertion = false, message = msg + " :: " + (elem match {
case Some(x) => x.getText
case None => "empty element"
}))
}
}
} | ilinum/intellij-scala | test/org/jetbrains/plugins/scala/lang/types/existentialSimplification/ExistentialSimplificationTestBase.scala | Scala | apache-2.0 | 3,316 |
package scala.build
import sbt._, Keys._
/** This object defines keys that should be visible with an unqualified name in all .sbt files and the command line */
object BuildSettings extends AutoPlugin {
override def trigger = allRequirements
object autoImport {
lazy val baseVersion = settingKey[String]("The base version number from which all others are derived")
lazy val baseVersionSuffix = settingKey[String]("Identifies the kind of version to build")
lazy val buildDirectory = settingKey[File]("The directory where all build products go. By default ./build")
}
import autoImport._
override def buildSettings = Def.settings(
ThisBuild / target := (ThisBuild / baseDirectory).value / "target",
ThisBuild / buildDirectory := (ThisBuild / baseDirectory).value / "build",
)
}
| lrytz/scala | project/BuildSettings.scala | Scala | apache-2.0 | 819 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.api.inject.guice
import akka.actor.ActorSystem
import akka.actor.ClassicActorSystemProvider
import com.google.inject.AbstractModule
import com.typesafe.config.Config
import org.specs2.mutable.Specification
import play.api.ApplicationLoader
import play.api.Configuration
import play.api.Environment
import play.api.i18n.I18nModule
import play.api.inject.BuiltinModule
import play.api.inject.DefaultApplicationLifecycle
import play.api.mvc.CookiesModule
import play.{ Environment => JavaEnvironment }
import scala.concurrent.Await
import scala.concurrent.Future
import scala.concurrent.duration._
class GuiceApplicationLoaderSpec extends Specification {
"GuiceApplicationLoader" should {
"allow adding additional modules" in {
val module = new AbstractModule {
override def configure() = {
bind(classOf[Bar]) to classOf[MarsBar]
}
}
val builder = new GuiceApplicationBuilder().bindings(module)
val loader = new GuiceApplicationLoader(builder)
val app = loader.load(fakeContext)
app.injector.instanceOf[Bar] must beAnInstanceOf[MarsBar]
}
"allow replacing automatically loaded modules" in {
val builder =
new GuiceApplicationBuilder().load(new BuiltinModule, new I18nModule, new CookiesModule, new ManualTestModule)
val loader = new GuiceApplicationLoader(builder)
val app = loader.load(fakeContext)
app.injector.instanceOf[Foo] must beAnInstanceOf[ManualFoo]
}
"load static Guice modules from configuration" in {
val loader = new GuiceApplicationLoader()
val app = loader.load(fakeContextWithModule(classOf[StaticTestModule]))
app.injector.instanceOf[Foo] must beAnInstanceOf[StaticFoo]
}
"load dynamic Scala Guice modules from configuration" in {
val loader = new GuiceApplicationLoader()
val app = loader.load(fakeContextWithModule(classOf[ScalaConfiguredModule]))
app.injector.instanceOf[Foo] must beAnInstanceOf[ScalaConfiguredFoo]
}
"load dynamic Java Guice modules from configuration" in {
val loader = new GuiceApplicationLoader()
val app = loader.load(fakeContextWithModule(classOf[JavaConfiguredModule]))
app.injector.instanceOf[Foo] must beAnInstanceOf[JavaConfiguredFoo]
}
"provide an ClassicActorSystem " in {
val loader = new GuiceApplicationLoader()
val application = loader.load(fakeContext)
val system = application.injector.instanceOf[ActorSystem]
val classicSystemProvider: ClassicActorSystemProvider =
application.injector.instanceOf[ClassicActorSystemProvider]
system must_!= null
system must_== classicSystemProvider.asInstanceOf[ActorSystem]
}
"call the stop hooks from the context" in {
val lifecycle = new DefaultApplicationLifecycle
var hooksCalled = false
lifecycle.addStopHook(() => Future.successful { hooksCalled = true })
val loader = new GuiceApplicationLoader()
val app = loader.load(ApplicationLoader.Context.create(Environment.simple(), lifecycle = lifecycle))
Await.ready(app.stop(), 5.minutes)
hooksCalled must_== true
}
}
def fakeContext: ApplicationLoader.Context = ApplicationLoader.Context.create(Environment.simple())
def fakeContextWithModule(module: Class[_ <: AbstractModule]): ApplicationLoader.Context = {
val f = fakeContext
val c = f.initialConfiguration
val newModules: Seq[String] = c.get[Seq[String]]("play.modules.enabled") :+ module.getName
val modulesConf = Configuration("play.modules.enabled" -> newModules)
val combinedConf = modulesConf.withFallback(f.initialConfiguration)
f.copy(initialConfiguration = combinedConf)
}
}
class ManualTestModule extends AbstractModule {
override def configure(): Unit = {
bind(classOf[Foo]) to classOf[ManualFoo]
}
}
class StaticTestModule extends AbstractModule {
override def configure(): Unit = {
bind(classOf[Foo]) to classOf[StaticFoo]
}
}
class ScalaConfiguredModule(environment: Environment, configuration: Configuration) extends AbstractModule {
override def configure(): Unit = {
bind(classOf[Foo]) to classOf[ScalaConfiguredFoo]
}
}
class JavaConfiguredModule(environment: JavaEnvironment, config: Config) extends AbstractModule {
override def configure(): Unit = {
bind(classOf[Foo]) to classOf[JavaConfiguredFoo]
}
}
trait Bar
class MarsBar extends Bar
trait Foo
class ManualFoo extends Foo
class StaticFoo extends Foo
class ScalaConfiguredFoo extends Foo
class JavaConfiguredFoo extends Foo
| wegtam/playframework | core/play-guice/src/test/scala/play/api/inject/guice/GuiceApplicationLoaderSpec.scala | Scala | apache-2.0 | 4,763 |
package mesosphere.marathon
import akka.actor.ActorSystem
import akka.testkit.{ TestKit, TestProbe }
import mesosphere.marathon.Protos.MarathonTask
import mesosphere.marathon.health.HealthCheckManager
import mesosphere.marathon.state._
import mesosphere.marathon.tasks.{ OfferReviver, TaskQueue, TaskTracker }
import org.apache.mesos.Protos.{ TaskID, TaskState, TaskStatus }
import org.apache.mesos.SchedulerDriver
import org.mockito.Mockito.{ times, verify, when }
import org.scalatest.Matchers
import org.scalatest.mock.MockitoSugar
import scala.collection.JavaConverters._
import scala.concurrent.duration._
import scala.concurrent.{ Await, Future }
class SchedulerActionsTest extends TestKit(ActorSystem("TestSystem")) with MarathonSpec with Matchers with MockitoSugar {
import system.dispatcher
test("Reset rate limiter if application is stopped") {
val queue = new TaskQueue(conf = MarathonTestHelper.defaultConfig(), offerReviver = mock[OfferReviver])
val repo = mock[AppRepository]
val taskTracker = mock[TaskTracker]
val scheduler = new SchedulerActions(
repo,
mock[GroupRepository],
mock[HealthCheckManager],
taskTracker,
queue,
system.eventStream,
TestProbe().ref,
mock[MarathonConf]
)
val app = AppDefinition(id = PathId("/myapp"))
when(repo.expunge(app.id)).thenReturn(Future.successful(Seq(true)))
when(taskTracker.get(app.id)).thenReturn(Set.empty[Protos.MarathonTask])
queue.rateLimiter.addDelay(app)
queue.rateLimiter.getDelay(app).hasTimeLeft should be(true)
val res = scheduler.stopApp(mock[SchedulerDriver], app)
Await.ready(res, 1.second)
queue.rateLimiter.getDelay(app).hasTimeLeft should be(false)
}
test("Task reconciliation sends known running and staged tasks and empty list") {
val queue = new TaskQueue(conf = MarathonTestHelper.defaultConfig(), offerReviver = mock[OfferReviver])
val repo = mock[AppRepository]
val taskTracker = mock[TaskTracker]
val driver = mock[SchedulerDriver]
val runningStatus = TaskStatus.newBuilder
.setTaskId(TaskID.newBuilder.setValue("task_1"))
.setState(TaskState.TASK_RUNNING)
.build()
val runningTask = MarathonTask.newBuilder
.setId("task_1")
.setStatus(runningStatus)
.build()
val stagedTask = MarathonTask.newBuilder
.setId("task_2")
.build()
val stagedStatus = TaskStatus.newBuilder
.setTaskId(TaskID.newBuilder.setValue(stagedTask.getId))
.setState(TaskState.TASK_STAGING)
.build()
val scheduler = new SchedulerActions(
repo,
mock[GroupRepository],
mock[HealthCheckManager],
taskTracker,
queue,
system.eventStream,
TestProbe().ref,
mock[MarathonConf]
)
val app = AppDefinition(id = PathId("/myapp"))
when(taskTracker.get(app.id)).thenReturn(Set(runningTask, stagedTask))
when(repo.allPathIds()).thenReturn(Future.successful(Seq(app.id)))
when(taskTracker.list).thenReturn(Map(app.id -> TaskTracker.App(app.id, Set(runningTask, stagedTask), shutdown = false)))
Await.result(scheduler.reconcileTasks(driver), 5.seconds)
verify(driver).reconcileTasks(Set(runningStatus, stagedStatus).asJava)
verify(driver).reconcileTasks(java.util.Arrays.asList())
}
test("Task reconciliation only one empty list, when no tasks are present in Marathon") {
val queue = new TaskQueue(conf = MarathonTestHelper.defaultConfig(), offerReviver = mock[OfferReviver])
val repo = mock[AppRepository]
val taskTracker = mock[TaskTracker]
val driver = mock[SchedulerDriver]
val status = TaskStatus.newBuilder
.setTaskId(TaskID.newBuilder.setValue("task_1"))
.setState(TaskState.TASK_RUNNING)
.build()
val task = MarathonTask.newBuilder
.setId("task_1")
.setStatus(status)
.build()
val scheduler = new SchedulerActions(
repo,
mock[GroupRepository],
mock[HealthCheckManager],
taskTracker,
queue,
system.eventStream,
TestProbe().ref,
mock[MarathonConf]
)
val app = AppDefinition(id = PathId("/myapp"))
when(taskTracker.get(app.id)).thenReturn(Set.empty[MarathonTask])
when(repo.allPathIds()).thenReturn(Future.successful(Seq()))
when(taskTracker.list).thenReturn(Map.empty[PathId, TaskTracker.App])
Await.result(scheduler.reconcileTasks(driver), 5.seconds)
verify(driver, times(1)).reconcileTasks(java.util.Arrays.asList())
}
}
| spacejam/marathon | src/test/scala/mesosphere/marathon/SchedulerActionsTest.scala | Scala | apache-2.0 | 4,525 |
package org.apache.spark.ml.parity.feature
import org.apache.spark.ml.parity.SparkParityBase
import org.apache.spark.ml.feature.{MinMaxScaler, VectorAssembler}
import org.apache.spark.ml.{Pipeline, Transformer}
import org.apache.spark.sql.DataFrame
/**
* Created by hollinwilkins on 10/30/16.
*/
class MinMaxScalerParitySpec extends SparkParityBase {
override val dataset: DataFrame = baseDataset.select("dti", "loan_amount")
override val sparkTransformer: Transformer = new Pipeline().setStages(Array(new VectorAssembler().
setInputCols(Array("dti", "loan_amount")).
setOutputCol("features"),
new MinMaxScaler().
setInputCol("features").
setOutputCol("scaled_features"))).fit(dataset)
}
| combust-ml/mleap | mleap-spark/src/test/scala/org/apache/spark/ml/parity/feature/MinMaxScalerParitySpec.scala | Scala | apache-2.0 | 722 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server.checkpoints
import java.io._
import java.util.regex.Pattern
import kafka.server.LogDirFailureChannel
import kafka.server.epoch.EpochEntry
import org.apache.kafka.common.TopicPartition
import scala.collection._
object OffsetCheckpointFile {
private val WhiteSpacesPattern = Pattern.compile("\\\\s+")
private[checkpoints] val CurrentVersion = 0
object Formatter extends CheckpointFileFormatter[(TopicPartition, Long)] {
override def toLine(entry: (TopicPartition, Long)): String = {
s"${entry._1.topic} ${entry._1.partition} ${entry._2}"
}
override def fromLine(line: String): Option[(TopicPartition, Long)] = {
WhiteSpacesPattern.split(line) match {
case Array(topic, partition, offset) =>
Some(new TopicPartition(topic, partition.toInt), offset.toLong)
case _ => None
}
}
}
}
trait OffsetCheckpoint {
def write(epochs: Seq[EpochEntry])
def read(): Seq[EpochEntry]
}
/**
* This class persists a map of (Partition => Offsets) to a file (for a certain replica)
*/
class OffsetCheckpointFile(val f: File, logDirFailureChannel: LogDirFailureChannel = null) {
val checkpoint = new CheckpointFile[(TopicPartition, Long)](f, OffsetCheckpointFile.CurrentVersion,
OffsetCheckpointFile.Formatter, logDirFailureChannel, f.getParent)
def write(offsets: Map[TopicPartition, Long]): Unit = checkpoint.write(offsets.toSeq)
def read(): Map[TopicPartition, Long] = checkpoint.read().toMap
}
| zzwlstarby/mykafka | core/src/main/scala/kafka/server/checkpoints/OffsetCheckpointFile.scala | Scala | apache-2.0 | 2,303 |
package com.rklaehn.interval
import org.scalacheck.Properties
import spire.implicits._
import spire.laws.LogicLaws
object IntervalSeqLogicLawsCheck extends Properties("IntervalSeq") with AddProperties {
val algebra = IntervalSeqAlgebra.booleanAlgebra[Long]
val arb = IntervalSeqArbitrary.arbitrary
addProperties("LogicLaws", LogicLaws(algebra, arb).bool(algebra))
}
| non/intervalset | src/test/scala/com/rklaehn/interval/IntervalSeqLogicLawsCheck.scala | Scala | apache-2.0 | 377 |
package com.github.dronegator.nlp.utils
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, Future, duration}, duration.Duration.Inf
object concurrent {
implicit class Zukunft[A](future: Future[A]) {
def await =
Await.result(future, Inf)
def await(atMost: Duration) =
Await.result(future, atMost)
}
} | dronegator/nlp | utils/src/main/scala/com/github/dronegator/nlp/concurrent.scala | Scala | apache-2.0 | 355 |
package com.example
import org.scalatest.FlatSpec
import spray.testkit.ScalatestRouteTest
import org.scalatest.matchers.ShouldMatchers
import spray.http.StatusCodes
class InlineHtmlTest extends FlatSpec with ScalatestRouteTest with ShouldMatchers with InlineHtml {
def actorRefFactory = system
behavior of "InlineHtml routing trait"
it should "return an inline page on /" in {
Get("/") ~> inlineHtml ~> check {
handled should be(true)
status should be(StatusCodes.OK)
entityAs[String] should include("Say hello to")
}
}
}
| shenbaise/mltoy | src/test/scala/com/example/InlineHtmlTest.scala | Scala | apache-2.0 | 560 |
/**
* Copyright (c) 2007-2011 Eric Torreborre <etorreborre@yahoo.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
* and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of
* the Software. Neither the name of specs nor the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written permission.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
* TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package org.specs.form
import scala.xml._
import org.specs.util.Plural._
import org.specs.execute.Status
/**
* A SeqForm is a TableForm containing a sequence of LineForms
* and using a sequence of values as the actual values
*
* It is used in conjonction with a custom LineForm representing the values on a line: <code>
*
* // @see EntityLineForm for LineForms representing a specific entity, possibly unset
* case class CustomerLine(name: String, age: Int) extends EntityLineForm[Customer] {
* // the prop method accepts a function here, taking the proper attribute on the "Entity"
* prop("Name", (_:Customer).getName)(name)
* prop("Age", (_:Customer).getAge)(age)
* }
* class Customers(actualCustomers: Seq[Customer]) extends SeqForm[T](actualCustomers)
*
* // example usage
* new Customers(listFromTheDatabase) {
* tr(CustomerLine("Eric", 36))
* tr(CustomerLine("Bob", 27))
* }
* </code>
*/
class SeqForm[T](title: Option[String], val seq: Seq[T]) extends TableForm(title) with SeqFormEnabled[T] {
def this(s: String, seq: Seq[T]) = this(Some(s), seq)
def this(seq: Seq[T]) = this(None, seq)
def this() = this(None, List())
}
trait SeqFormEnabled[T] extends TableFormEnabled {
val seq: Seq[T]
/** list of declared lines which are expected but not received as actual */
private var unmatchedLines = new scala.collection.mutable.ListBuffer[LineForm]
/** number of already expected lines */
private var expectedLinesNb = 0
/**
* add a new line with a function taking an object (which may be None, if the set of actual values doesn't contain enough values),
* returning a LineForm
*
* If this is the first line, a table header is added
*/
def line(l : Option[T] => LineForm): LineForm = {
var currentLine: LineForm = null
if (expectedLinesNb >= seq.size) {
currentLine = l(None)
setHeader(currentLine)
unmatchedLines.append(currentLine.comment)
currentLine
} else {
currentLine = l(Some(seq(expectedLinesNb)))
setHeader(currentLine)
trs(currentLine.rows)
form(currentLine)
}
expectedLinesNb = expectedLinesNb + 1
currentLine
}
def tr(l: EntityLineForm[T]): LineForm = line { (actual: Option[T]) =>
l.entityIs(actual)
}
override def setHeader[F <: LineForm](line: F): F = {
if (rowsNb == 0) inNewRow(line.header)
line
}
/**
* upon execution a new row will be added to notify the user of unmatched lines
*/
override def executeThis = {
val i = unmatchedLines.size
if (i > 0) {
th3("There ".bePlural(i) + " " + i + " unmatched line".plural(i), Status.Failure)
unmatchedLines.foreach { (line: LineForm) => trs(line.rows) }
}
super.executeThis
}
}
| stuhood/specs | src/main/scala/org/specs/form/SeqForm.scala | Scala | mit | 4,063 |
package lemastero.algorithms.trie
/**
* Ternary Search Trie implementation.
*
* Store characters and values in nodes.
* Each node has 3 children:
* smaller (lfet),
* equal (middle),
* larger (right)
*/
class TernarySearchTrie[Value] extends StringSymbolTable[Value] {
private val root = new TernaryTrieNode[Value]()
override def put(key: String, value: Value): Unit
= root.put(key, value)
override def get(key: String): Option[Value]
= root.get(key)
}
case class TernaryTrieNode[Value](
var key: Option[Char] = None,
var value: Option[Value] = None,
var left:Option[TernaryTrieNode[Value]] = None,
var middle:Option[TernaryTrieNode[Value]] = None,
var right:Option[TernaryTrieNode[Value]] = None) {
def put(key: String, value: Value):Unit = {
if(noValueYet) this.key = Some(key.head)
if(isForCurrent(key))
if(isLastKeyCharacter(key))
this.value = Some(value)
else getMiddle.put(key.substring(1), value)
else
if(isForLeft(key)) getLeft.put(key, value)
else getRight.put(key, value)
}
def get(key: String): Option[Value] =
if (isForCurrent(key)) {
if(isLastKeyCharacter(key)) value
else if(middle.isDefined) middle.get.get(key.substring(1))
else None
} else if(isForLeft(key))
if(left.isDefined) left.get.get(key) else None
else if(right.isDefined) right.get.get(key)
else None
private def noValueYet = this.key.isEmpty
private def isForCurrent(key: String) = this.key == Some(key.head)
private def isLastKeyCharacter(key: String) = key.length == 1
private def isForLeft(key: String): Boolean =
if(this.key.isDefined) this.key.get > key.head
else false
private def getMiddle: TernaryTrieNode[Value] = {
if (middle.isEmpty)
middle = Some(new TernaryTrieNode[Value]())
middle.get
}
private def getRight: TernaryTrieNode[Value] = {
if (right.isEmpty)
right = Some(new TernaryTrieNode[Value]())
right.get
}
private def getLeft: TernaryTrieNode[Value] = {
if (left.isEmpty)
left = Some(new TernaryTrieNode[Value]())
left.get
}
}
| lemastero/algorithms | src/main/scala/lemastero/algorithms/trie/TernarySearchTrie.scala | Scala | mit | 2,148 |
package io.github.takayuky.fluent
package object refined
extends FluentEncoderRefinedInstances
| takayuky/TypesafeFluentLogger | refined/src/main/scala/io/github/takayuky/fluent/refined/package.scala | Scala | mit | 98 |
/*
* Copyright 2014-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.core.util
import java.time.Duration
import org.openjdk.jmh.annotations.Benchmark
import org.openjdk.jmh.annotations.Scope
import org.openjdk.jmh.annotations.State
import org.openjdk.jmh.annotations.Threads
import org.openjdk.jmh.infra.Blackhole
/**
* ```
* > run -bm all -wi 10 -i 10 -f1 -t1 .*TimeWaveCalc.*
* ...
* [info] Benchmark Mode Cnt Score Error Units
* [info] TimeWaveCalc.testMathSin thrpt 10 11484308.329 ± 729466.777 ops/s
* [info] TimeWaveCalc.testPrecomputeSin thrpt 10 39806386.423 ± 868184.835 ops/s
* [info] TimeWaveCalc.testMathSin avgt 10 ≈ 10⁻⁷ s/op
* [info] TimeWaveCalc.testPrecomputeSin avgt 10 ≈ 10⁻⁸ s/op
* [info] TimeWaveCalc.testMathSin sample 110059 ≈ 10⁻⁷ s/op
* [info] TimeWaveCalc.testPrecomputeSin sample 163664 ≈ 10⁻⁷ s/op
* [info] TimeWaveCalc.testMathSin ss 10 ≈ 10⁻⁶ s/op
* [info] TimeWaveCalc.testPrecomputeSin ss 10 ≈ 10⁻⁶ s/op
* ```
*/
@State(Scope.Thread)
class TimeWaveCalc {
private val step = 60000L
private val dayWave = TimeWave.get(Duration.ofDays(1), step)
private val lambda = 2 * scala.math.Pi / Duration.ofDays(1).toMillis
private val timestamp = System.currentTimeMillis() / step * step
private def mathSin(t: Long): Double = {
scala.math.sin(t * lambda)
}
@Threads(1)
@Benchmark
def testMathSin(bh: Blackhole): Unit = {
bh.consume(mathSin(timestamp))
}
@Threads(1)
@Benchmark
def testPrecomputeSin(bh: Blackhole): Unit = {
bh.consume(dayWave(timestamp))
}
}
| copperlight/atlas | atlas-jmh/src/main/scala/com/netflix/atlas/core/util/TimeWaveCalc.scala | Scala | apache-2.0 | 2,413 |
package org.scaladebugger.api.lowlevel.requests.properties.processors
import com.sun.jdi.request._
import org.scalamock.scalatest.MockFactory
import org.scalatest.{FunSpec, Matchers, ParallelTestExecution}
import org.scaladebugger.api.lowlevel.requests.properties.CustomProperty
import org.scaladebugger.test.helpers.ParallelMockFunSpec
class CustomPropertyProcessorSpec extends ParallelMockFunSpec
{
private val mockKey = mock[AnyRef]
private val mockValue = mock[AnyRef]
private val customProperty = CustomProperty(
key = mockKey,
value = mockValue
)
private val customPropertyProcessor =
new CustomPropertyProcessor(customProperty)
describe("CustomPropertyProcessor") {
describe("#process") {
it("should add the property to the event request") {
val mockEventRequest = mock[EventRequest]
(mockEventRequest.putProperty _).expects(mockKey, mockValue).once()
customPropertyProcessor.process(mockEventRequest)
}
}
}
}
| chipsenkbeil/scala-debugger | scala-debugger-api/src/test/scala/org/scaladebugger/api/lowlevel/requests/properties/processors/CustomPropertyProcessorSpec.scala | Scala | apache-2.0 | 992 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.utils.tf.loaders
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.tf.TensorflowSpecHelper
import com.intel.analytics.bigdl.utils.tf.Tensorflow.typeAttr
import org.tensorflow.framework.{DataType, NodeDef}
class InvSpec extends TensorflowSpecHelper {
"Inv" should "be correct for float tensor" in {
compare(
NodeDef.newBuilder()
.setName("inv_test")
.putAttr("T", typeAttr(DataType.DT_FLOAT))
.setOp("Inv"),
Seq(Tensor[Float](4, 32, 32, 3).rand()),
0
)
}
}
| jenniew/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/utils/tf/loaders/InvSpec.scala | Scala | apache-2.0 | 1,184 |
package com.bob.scala.webapi
import java.time.LocalDateTime
import com.bob.java.webapi.handler.MdcPropagatingOnScheduleAction
import com.bob.scala.webapi.controller.User
import com.bob.scala.webapi.utils.CodeInvoke
import com.fasterxml.jackson.databind.ObjectMapper
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.beans.factory.support.{BeanDefinitionBuilder, DefaultListableBeanFactory}
import org.springframework.boot.autoconfigure.SpringBootApplication
import org.springframework.boot.autoconfigure.thymeleaf.ThymeleafAutoConfiguration
import org.springframework.boot.{CommandLineRunner, SpringApplication}
import org.springframework.context.annotation.ComponentScan
import org.springframework.scheduling.annotation.EnableAsync
import org.springframework.stereotype.Controller
import org.springframework.ui.Model
import org.springframework.web.bind.annotation.{GetMapping, ResponseBody}
import rx.plugins.RxJavaHooks
import springfox.documentation.spring.web.json.JsonSerializer
/**
* Created by bob on 16/2/16.
*/
object ScalaApplication extends App {
RxJavaHooks.setOnScheduleAction(new MdcPropagatingOnScheduleAction)
println(CodeInvoke.invoke("1 + 1"))
/**
* args: _ *:此标注告诉编译器把args中的每个元素当作参数,而不是当作一个当一的参数传递
*/
private val cp = SpringApplication.run(classOf[SampleConfig], args: _ *)
private val bdb = BeanDefinitionBuilder.rootBeanDefinition(classOf[ApplicationContextHolder])
cp.getBeanFactory.asInstanceOf[DefaultListableBeanFactory]
.registerBeanDefinition("applicationContextHolder", bdb.getBeanDefinition)
cp.getBean(classOf[ApplicationContextHolder])
}
@SpringBootApplication(exclude = Array(classOf[ThymeleafAutoConfiguration]))
@Controller
@ComponentScan(value = Array(
"com.bob.scala.*", "com.bob.java.webapi.*"
))
@EnableAsync
class SampleConfig extends CommandLineRunner {
@Autowired
var objectMapper: ObjectMapper = _
/**
* 只有使用swagger的基础上才能导入此实例
*/
@Autowired
val jsonSerializer: JsonSerializer = null
@GetMapping(value = Array("/"))
def index(model: Model): String = {
model.addAttribute("now", LocalDateTime.now())
"index"
}
@GetMapping(value = Array("properties"))
@ResponseBody
def properties(): java.util.Properties = {
System.getProperties()
}
override def run(args: String*): Unit = {
val aUser = new User("c", 4, "a44", 4)
println(objectMapper.writeValueAsString(aUser))
println(jsonSerializer.toJson(aUser).value())
val map = Map("message" -> "fucktest")
println(objectMapper.writeValueAsString(map))
}
} | sevenz/springboot-scala-withswagger | webapi/src/main/scala/com/bob/scala/webapi/ScalaApplication.scala | Scala | apache-2.0 | 2,683 |
package ch.uzh.ifi.pdeboer.pplib.hcomp.ballot.dao
import java.util.UUID
import ch.uzh.ifi.pdeboer.pplib.hcomp.ballot.persistence.{Answer, Permutation, Question}
import org.joda.time.DateTime
import scalikejdbc._
/**
* Created by mattia on 06.07.15.
*/
class BallotDAO extends DAO {
override def countAllAnswers(): Int = {
DB readOnly { implicit session =>
sql"SELECT count(*) AS count FROM answer".map(rs => rs.int("count")).single().apply().get
}
}
override def countAllBatches(): Int = {
DB readOnly { implicit session =>
sql"SELECT count(*) AS count FROM batch".map(rs => rs.int("count")).single().apply().get
}
}
override def countAllQuestions(): Int = {
DB readOnly { implicit session =>
sql"SELECT count(*) AS count FROM question".map(rs => rs.int("count")).single().apply().get
}
}
override def createBatch(allowedAnswersPerTurker: Int, uuid: UUID): Long = {
DB localTx { implicit session =>
sql"INSERT INTO batch(allowed_answers_per_turker, uuid) VALUES(${allowedAnswersPerTurker}, ${uuid.toString})".updateAndReturnGeneratedKey().apply()
}
}
override def getAnswerByQuestionId(questionId: Long): Option[String] = {
DB readOnly { implicit session =>
sql"SELECT answer_json FROM answer WHERE question_id = ${questionId}".map(rs => rs.string("answer_json")).single.apply()
}
}
override def getAnswerIdByOutputCode(insertOutput: String): Option[Long] = {
DB readOnly { implicit session =>
sql"SELECT id FROM answer WHERE expected_output_code = ${insertOutput.toLong}".map(rs => rs.long("id")).single().apply()
}
}
override def getAnswerById(id: Long): Option[Answer] = {
DB readOnly { implicit session =>
sql"SELECT * FROM answer WHERE id = ${id}".map(rs => Answer(rs.long("id"), rs.jodaDateTime("time"), rs.long("question_id"), rs.string("answer_json"), rs.boolean("accepted"))).single().apply()
}
}
override def getExpectedOutputCodeFromAnswerId(ansId: Long): Option[Long] = {
DB readOnly { implicit session =>
sql"SELECT expected_output_code FROM answer WHERE id = ${ansId}".map(rs => rs.long("expected_output_code")).single().apply()
}
}
override def createQuestion(html: String, batchId: Long, uuid: UUID = UUID.randomUUID(), dateTime: DateTime = new DateTime(), permutationId: Long, secret: String = ""): Long = {
DB localTx { implicit session =>
sql"INSERT INTO question(batch_id, html, create_time, uuid, permutation, secret) VALUES(${batchId}, ${html}, ${dateTime}, ${uuid.toString}, ${permutationId}, ${secret})".updateAndReturnGeneratedKey().apply()
}
}
override def getQuestionIdByUUID(uuid: String): Option[Long] = {
DB readOnly { implicit session =>
sql"SELECT id FROM question WHERE uuid = ${uuid}".map(rs => rs.long("id")).single().apply()
}
}
override def getQuestionUUID(questionId: Long): Option[String] = {
DB readOnly { implicit session =>
sql"SELECT uuid FROM question WHERE id = ${questionId}".map(rs => rs.string("uuid")).single().apply()
}
}
override def getBatchIdByUUID(uuid: UUID): Option[Long] = {
DB readOnly { implicit session =>
sql"SELECT id FROM batch WHERE uuid = ${uuid.toString}".map(rs => rs.long("id")).single().apply()
}
}
override def createAsset(binary: Array[Byte], contentType: String, filename: String): Long = {
val hashCode = java.security.MessageDigest.getInstance("SHA-1").digest(binary).map("%02x".format(_)).mkString
val possibleMatch = findAssetsIdByHashCode(hashCode).map(id => id -> getAssetsContentById(id))
.find(p => p._2.equalsIgnoreCase(contentType))
val id = if (possibleMatch.nonEmpty) {
possibleMatch.get._1
} else {
DB localTx { implicit session =>
sql"INSERT INTO assets(hash_code, byte_array, content_type, filename) VALUES(${hashCode}, ${binary}, ${contentType}, ${filename})"
.updateAndReturnGeneratedKey().apply()
}
}
id
}
override def mapQuestionToAssets(qId: Long, assetId: Long): Long = {
DB localTx { implicit session =>
sql"INSERT INTO question2assets(question_id, asset_id) VALUES(${qId}, ${assetId})".updateAndReturnGeneratedKey().apply()
}
}
override def getAssetsContentById(id: Long): String = {
DB readOnly { implicit session =>
sql"SELECT content_type FROM assets WHERE id = ${id}".map(rs =>
rs.string("content_type")).single().apply().get
}
}
override def findAssetsIdByHashCode(hc: String): List[Long] = {
DB readOnly { implicit session =>
sql"SELECT id FROM assets WHERE hash_code = ${hc}".map(rs => rs.long("id")).list().apply()
}
}
override def updateAnswer(answerId: Long, accepted: Boolean) = {
DB localTx { implicit session =>
sql"UPDATE answer SET accepted = ${accepted} WHERE id = ${answerId}"
.update().apply()
}
}
override def getAssetIdsByQuestionId(questionId: Long): List[Long] = {
DB readOnly { implicit session =>
sql"SELECT * FROM question2assets WHERE question_id = ${questionId}".map(rs => rs.long("asset_id")).list().apply()
}
}
override def loadPermutationsCSV(csv: String, paperId: Long): Boolean = {
DB localTx { implicit session =>
val time = new DateTime()
sql"""LOAD DATA LOCAL INFILE ${csv}
INTO TABLE permutations
COLUMNS TERMINATED BY ','
OPTIONALLY ENCLOSED BY '"'
ESCAPED BY '"'
LINES TERMINATED BY '\\n'
IGNORE 1 LINES
(group_name, method_index, snippet_filename, pdf_path, method_on_top ,relative_height_top, relative_height_bottom, distanceMinIndexMax)
SET create_time = $time, paper_id = $paperId""".update().apply()
}
true
}
override def createPermutation(permutation: Permutation, paperId: Long): Long = {
DB localTx { implicit session =>
sql"""INSERT INTO permutations(create_time, group_name, method_index, snippet_filename, pdf_path, method_on_top, relative_height_top, relative_height_bottom, paper_id)
VALUES(NOW(), ${permutation.groupName}, ${permutation.methodIndex}, ${permutation.snippetFilename}, ${permutation.pdfPath}, ${permutation.methodOnTop},
${permutation.relativeHeightTop}, ${permutation.relativeHeightBottom}, ${paperId})"""
.updateAndReturnGeneratedKey().apply()
}
}
override def getAllPermutations(): List[Permutation] = {
DB readOnly { implicit session =>
sql"SELECT * FROM permutations".map(rs =>
Permutation(rs.long("id"), rs.string("group_name"), rs.string("method_index"), rs.string("snippet_filename"), rs.string("pdf_path"), rs.boolean("method_on_top"), rs.long("state"), rs.int("excluded_step"), rs.double("relative_height_top"), rs.double("relative_height_bottom"), rs.long("distanceMinIndexMax"))
).list().apply()
}
}
override def getPermutationById(id: Long): Option[Permutation] = {
DB readOnly { implicit session =>
sql"SELECT * FROM permutations WHERE id = ${id}".map(rs =>
Permutation(rs.long("id"), rs.string("group_name"), rs.string("method_index"), rs.string("snippet_filename"), rs.string("pdf_path"), rs.boolean("method_on_top"), rs.long("state"), rs.int("excluded_step"), rs.double("relative_height_top"), rs.double("relative_height_bottom"), rs.long("distanceMinIndexMax"))
).single().apply()
}
}
override def getAllOpenByGroupName(groupName: String): List[Permutation] = {
DB readOnly { implicit session =>
sql"SELECT * FROM permutations WHERE group_name = ${groupName} AND state = 0".map(rs =>
Permutation(rs.long("id"), rs.string("group_name"), rs.string("method_index"), rs.string("snippet_filename"), rs.string("pdf_path"), rs.boolean("method_on_top"), rs.long("state"), rs.int("excluded_step"), rs.double("relative_height_top"), rs.double("relative_height_bottom"), rs.long("distanceMinIndexMax"))
).list().apply()
}
}
override def updateStateOfPermutationId(id: Long, becauseOfId: Long, excludedByStep: Int = 0) {
DB localTx { implicit session =>
sql"UPDATE permutations SET state = ${becauseOfId}, excluded_step = ${excludedByStep} WHERE id = ${id}"
.update().apply()
}
}
override def getAllOpenGroupsStartingWith(partialGroupName: String): List[Permutation] = {
getAllPermutationsWithStateEquals(0).filter(r => r.groupName.startsWith(partialGroupName))
}
override def getAllQuestions: List[Question] = {
DB readOnly { implicit session =>
sql"SELECT * FROM question".map(rs => Question(rs.long("id"), rs.long("permutation"))).list().apply()
}
}
override def getAllPermutationsWithStateEquals(state: Long): List[Permutation] = {
DB readOnly { implicit session =>
sql"SELECT * FROM permutations WHERE state = ${state}".map(rs =>
Permutation(rs.long("id"), rs.string("group_name"), rs.string("method_index"), rs.string("snippet_filename"), rs.string("pdf_path"), rs.boolean("method_on_top"), rs.long("state"), rs.int("excluded_step"), rs.double("relative_height_top"), rs.double("relative_height_bottom"), rs.long("distanceMinIndexMax"))).list().apply()
}
}
override def allAnswers(): List[Answer] = {
DB readOnly { implicit session =>
sql"SELECT * FROM answer WHERE accepted = 1".map(rs =>
Answer(rs.long("id"), rs.jodaDateTime("time"), rs.long("question_id"), rs.string("answer_json"), rs.boolean("accepted"))
).list().apply()
}
}
override def getPermutationIdByQuestionId(qId: Long): Option[Long] = {
DB readOnly { implicit session =>
sql"SELECT permutation FROM question WHERE id = ${qId}".map(rs =>
rs.long("permutation")).single().apply()
}
}
override def getAllAnswersForSnippet(fileName: String): List[Answer] = {
allAnswers.filter(f => f.answerJson.contains(fileName))
}
override def getQuestionIDsAnsweredSince(date: DateTime): List[Long] = DB readOnly { implicit session =>
sql"SELECT question_id FROM answer WHERE time > ${date}".map(rs => rs.long("question_id")).list().apply()
}
}
| manuelroesch/PaperValidator | app/helper/questiongenerator/dao/BallotDAO.scala | Scala | mit | 10,073 |
package me.frmr.kafka.detective.matchfinder
import me.frmr.kafka.detective.api._
import org.scalatest._
class KeyEqualsFinderSpec extends FlatSpec with Matchers {
"KeyEqualsFinder" should "locate string matches in reference window" in new StringContext {
val result = new KeyEqualsFinder().find(testMessage, referenceWindowWithMatch)
result.isInstanceOf[FoundMatch] shouldBe true
result.asInstanceOf[FoundMatch].matchingReferenceMessage.keyInstance shouldBe testMessage.keyInstance
}
it should "not match different strings in reference window" in new StringContext {
val result = new KeyEqualsFinder().find(testMessage, referenceWindowWithoutMatch)
result.isInstanceOf[NoMatchFound] shouldBe true
}
it should "locate byte array matches in reference window" in new ByteArrayContext {
val result = new KeyEqualsFinder().find(testMessage, referenceWindowWithMatch)
result.isInstanceOf[FoundMatch] shouldBe true
val referenceArray = result.asInstanceOf[FoundMatch].matchingReferenceMessage.keyInstance.asInstanceOf[Array[Byte]]
val testArray = testMessage.keyInstance.asInstanceOf[Array[Byte]]
referenceArray should contain theSameElementsInOrderAs testArray
}
it should "not match different byte arrays in reference window" in new ByteArrayContext {
val result = new KeyEqualsFinder().find(testMessage, referenceWindowWithoutMatch)
result.isInstanceOf[NoMatchFound] shouldBe true
}
trait StringContext {
def makeObjectEnvelope(subject: String) = {
MonitorObjectEnvelope(
0L,
0,
0,
subject.getClass,
subject,
subject.getClass,
subject
)
}
val testMessage = makeObjectEnvelope("abcd")
val referenceWindowWithMatch = Seq(
makeObjectEnvelope("zzzzz"),
makeObjectEnvelope("12345"),
makeObjectEnvelope("09999"),
makeObjectEnvelope("abcd"),
makeObjectEnvelope("lklklklk")
)
val referenceWindowWithoutMatch = Seq(
makeObjectEnvelope("zzzzz"),
makeObjectEnvelope("12345"),
makeObjectEnvelope("09999"),
makeObjectEnvelope("lklklklk")
)
}
trait ByteArrayContext {
def makeObjectEnvelope(subject: String) = {
val byteSubject = subject.getBytes("UTF-8")
MonitorObjectEnvelope(
0L,
0,
0,
byteSubject.getClass,
byteSubject,
byteSubject.getClass,
byteSubject
)
}
val testMessage = makeObjectEnvelope("abcd")
val referenceWindowWithMatch = Seq(
makeObjectEnvelope("zzzzz"),
makeObjectEnvelope("12345"),
makeObjectEnvelope("09999"),
makeObjectEnvelope("abcd"),
makeObjectEnvelope("lklklklk")
)
val referenceWindowWithoutMatch = Seq(
makeObjectEnvelope("zzzzz"),
makeObjectEnvelope("12345"),
makeObjectEnvelope("09999"),
makeObjectEnvelope("lklklklk")
)
}
}
| farmdawgnation/kafka-detective | api/src/test/scala/me/frmr/kafka/detective/matchfinder/KeyEqualsFinderSpec.scala | Scala | apache-2.0 | 2,933 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Benchmarks **
** / __/ __// _ | / / / _ | __ / // __/ Adam Burmister **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ 2012, Google, Inc **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ 2013, Jonas Fonseca **
** |/____/ **
\\* */
// The ray tracer code in this file is written by Adam Burmister. It
// is available in its original form from:
//
// http://labs.flog.co.nz/raytracer/
//
// Ported from the v8 benchmark suite by Google 2012.
// Ported from the Dart benchmark_harness to Scala.js by Jonas Fonseca 2013
package tracer
case class IntersectionInfo(shape: Shape = null,
position: Vector = null,
normal: Vector = null,
color: Color = new Color(0.0, 0.0, 0.0),
distance: Double = 0.0,
isHit: Boolean = false,
hitCount: Int = 0)
abstract class Shape(val position: Vector, val material: Material) {
def intersect(ray: Ray): IntersectionInfo
override def toString = "Shape"
}
class Plane(position: Vector, val d: Double, material: Material)
extends Shape(position, material) {
def intersect(ray: Ray): IntersectionInfo = {
val Vd = this.position.dot(ray.direction);
if (Vd == 0)
return new IntersectionInfo() // no intersection
val t = -(this.position.dot(ray.position) + this.d) / Vd;
if (t <= 0)
return new IntersectionInfo() // no intersection
val intersection = ray.position + ray.direction.multiplyScalar(t)
val color = if (this.material.hasTexture) {
val vU = new Vector(this.position.y, this.position.z, -this.position.x)
val vV = vU.cross(this.position)
val u: Double = intersection.dot(vU)
val v: Double = intersection.dot(vV)
this.material.getColor(u, v)
} else {
this.material.getColor(0, 0)
}
new IntersectionInfo(
shape = this,
isHit = true,
position = intersection,
normal = this.position,
distance = t,
color = color
)
}
override def toString = s"Plane [$position, d=$d]"
}
class Sphere(position: Vector, radius: Double, material: Material)
extends Shape(position, material) {
def intersect(ray: Ray): IntersectionInfo = {
val dst = ray.position - this.position
val B = dst.dot(ray.direction)
val C = dst.dot(dst) - (this.radius * this.radius)
val D = (B * B) - C
if (D <= 0)
return new IntersectionInfo(null) // no intersection
val distance = (-B) - math.sqrt(D)
val pos = ray.position + ray.direction.multiplyScalar(distance)
new IntersectionInfo(
shape = this,
isHit = true,
position = pos,
normal = (pos - this.position).normalize,
distance = distance,
color = this.material.getColor(0, 0)
)
}
override def toString = s"Sphere [position=$position, radius=$radius]"
}
| cedricviaccoz/scala-native | benchmarks/src/main/scala/tracer/Shapes.scala | Scala | bsd-3-clause | 3,217 |
package com.sksamuel.elastic4s
import java.util
import com.sksamuel.elastic4s.ElasticDsl._
import com.sksamuel.elastic4s.mappings.FieldType._
import org.scalatest.{ WordSpec, Matchers }
import org.scalatest.mock.MockitoSugar
/** @author Stephen Samuel */
class MappingTest extends WordSpec with MockitoSugar with ElasticSugar with Matchers {
client.execute {
create index "q" mappings {
"r" as Seq(
field name "a" withType StringType stored true analyzer WhitespaceAnalyzer,
field name "b" withType StringType
)
} analysis {
CustomAnalyzerDefinition("my_analyzer", WhitespaceTokenizer, LowercaseTokenFilter)
}
}.await
"mapping get" should {
"return schema" in {
val mapping = client.execute {
get mapping "q" / "r"
}.await
val map = mapping.mappings().get("q").get("r").sourceAsMap()
val a = map.get("properties").asInstanceOf[util.Map[String, Any]].get("a").asInstanceOf[util.Map[String, Any]]
a.get("type") shouldBe "string"
a.get("store") shouldBe true
a.get("analyzer") shouldBe "whitespace"
val b = map.get("properties").asInstanceOf[util.Map[String, Any]].get("b").asInstanceOf[util.Map[String, Any]]
b.get("type") shouldBe "string"
}
}
"mapping put" should {
"add new fields" in {
client.execute {
put mapping "q" / "r" as Seq(
field name "c" withType FloatType boost 1.2,
field name "d" withType StringType analyzer FrenchLanguageAnalyzer
) ignoreConflicts true
}.await
val mapping = client.execute {
get mapping "q" / "r"
}.await
val map = mapping.mappings().get("q").get("r").sourceAsMap()
val c = map.get("properties").asInstanceOf[util.Map[String, _]].get("c").asInstanceOf[util.Map[String, _]]
c.get("type") shouldBe "float"
c.get("boost") shouldBe 1.2
val d = map.get("properties").asInstanceOf[util.Map[String, _]].get("d").asInstanceOf[util.Map[String, _]]
d.get("type") shouldBe "string"
d.get("analyzer") shouldBe "french"
}
"update existing fields" in {
client.execute {
put mapping "q" / "r" as Seq(
field name "a" withType StringType boost 1.2,
field name "b" withType StringType analyzer GermanLanguageAnalyzer
) ignoreConflicts true
}.await
val mapping = client.execute {
get mapping "q" / "r"
}.await
val map = mapping.mappings().get("q").get("r").sourceAsMap()
val a = map.get("properties").asInstanceOf[util.Map[String, _]].get("a").asInstanceOf[util.Map[String, _]]
a.get("boost") shouldBe 1.2
val b = map.get("properties").asInstanceOf[util.Map[String, _]].get("b").asInstanceOf[util.Map[String, _]]
b.get("search_analyzer") shouldBe "german"
}
}
"mapping delete" should {
"remove mappings and data" in {
client.execute {
delete mapping "q" / "r"
}.await
val mapping = client.execute {
get mapping "q" / "r"
}.await
mapping.mappings().isEmpty shouldBe true
}
}
}
| alexander-svendsen/elastic4s | elastic4s-core/src/test/scala/com/sksamuel/elastic4s/MappingTest.scala | Scala | apache-2.0 | 3,122 |
package org.deepdive.extraction
import akka.actor._
import akka.routing._
import akka.pattern.{ask, pipe}
import akka.util.Timeout
import org.deepdive.settings._
import org.deepdive.Context
import org.deepdive.extraction._
import org.deepdive.extraction.ExtractorRunner._
import org.deepdive.extraction.datastore._
import org.deepdive.extraction.datastore.ExtractionDataStore._
import org.deepdive.Logging
import scala.util.{Try, Success, Failure}
import scala.concurrent._
import scala.concurrent.duration._
import scala.sys.process._
import rx.lang.scala.subjects._
import play.api.libs.json._
import scala.util.Random
/* Companion object to the ExtractorRunner */
object ExtractorRunner {
def props(dataStore: JsonExtractionDataStore) = Props(classOf[ExtractorRunner], dataStore)
// Messages
sealed trait Message
case class SetTask(task: ExtractionTask) extends Message
case class RouteData(data: List[String]) extends Message
case object AllDataDone extends Message
case object ExecuteAfterScript
case object Shutdown
case object PrintStatus
// States
sealed trait State
case object Idle extends State
case object Running extends State
case object Finishing extends State
// Data
sealed trait Data
case object Uninitialized extends Data
case class Task(task: ExtractionTask, sender: ActorRef, workers: Router) extends Data
}
/* Runs a single extrator by executing its before script, UDF, and after sript */
class ExtractorRunner(dataStore: JsonExtractionDataStore) extends Actor
with ActorLogging with FSM[State, Data] {
import ExtractorRunner._
// Execute futures using the current Akka dispatcher
import context.dispatcher
implicit val timeout = Timeout(1337.hours)
// Properties to start workers
def workerProps = ProcessExecutor.props
// Periodically print the status
val scheduledStatus = context.system.scheduler.schedule(30.seconds, 30.seconds, self, PrintStatus)
override def preStart() {
log.info("waiting for tasks")
}
override def postStop() {
scheduledStatus.cancel()
}
// Start in the idle state
startWith(Idle, Uninitialized)
when(Idle) {
case Event(SetTask(task), Uninitialized) =>
log.info(s"Received task=${task.extractor.name}. Executing")
// Execute the before script. Fail if the script fails.
task.extractor.beforeScript.foreach { beforeScript =>
log.info("Executing before script.")
executeScriptOrFail(beforeScript, sender)
}
// Start the children workers
val workers = startWorkers(task)
// Schedule the input data to be sent to myself.
// We will then forward the data to our workers
Future { sendData(task, workers) }
goto(Running) using Task(task, sender, workers)
}
when(Running) {
case Event(Terminated(actor), Task(task, taskSender, workers)) =>
// A worker has terminated, remove it from our list
val newWorkers = workers.removeRoutee(actor)
log.debug(s"worker=${actor.path.name} has terminated. Waiting for ${newWorkers.routees.size} others.")
// If we have no workers left, move to the next state
newWorkers.routees.size match {
case 0 =>
log.info(s"All workers are done. Finishing up.")
self ! ExecuteAfterScript
self ! Shutdown
goto(Finishing) using(Task(task, taskSender, newWorkers))
case _ =>
stay using(Task(task, taskSender, newWorkers))
}
case Event(ProcessExecutor.OutputData(chunk), Task(task, taskSender, workers)) =>
// Don't close over this
val _sender = sender
// We write the data to the data store, asynchronously
Future {
log.debug(s"adding chunk of size=${chunk.size} data store.")
val jsonData = chunk.map(Json.parse).map(_.asInstanceOf[JsObject])
dataStore.addBatch(jsonData.iterator, task.extractor.outputRelation)
}.onComplete {
case Success(_) => _sender ! "OK!"
case Failure(exception) =>
taskSender ! Status.Failure(exception)
context.stop(self)
throw exception
}
stay
case Event(ProcessExecutor.ProcessExited(exitCode), Task(task, taskSender, workers)) =>
// A worker process has exited. If successful, continue.
// If the process failed, shutdown and respond with failure
exitCode match {
case 0 => stay
case exitCode =>
taskSender ! Status.Failure(new RuntimeException(s"process exited with exit_code=${exitCode}"))
stop
}
case Event(PrintStatus, Task(task, taskSender, workers)) =>
log.info(s"Status: ${workers.routees.size} workers are running.")
stay
}
when(Finishing) {
case(Event(ExecuteAfterScript, Task(task, taskSender, workers))) =>
// Execute the after script. Fail if the script fails.
task.extractor.afterScript.foreach { afterScript =>
log.info("Executing after script.")
executeScriptOrFail(afterScript, taskSender)
}
stay
case(Event(Shutdown, Task(task, taskSender, workers))) =>
// All done, shutting down
log.info(s"Shutting down")
taskSender ! "Done!"
stop
}
/* Starts all workers, watches them, and returns a round-robin fashion router */
private def startWorkers(task: ExtractionTask) : Router = {
log.info(s"Starting ${task.extractor.parallelism} children process workers")
// Start workers accoridng tot he specified parallelism
val workers = (1 to task.extractor.parallelism).map { i =>
val worker = context.actorOf(workerProps, s"processExecutor${i}")
// Deathwatch
context.watch(worker)
ActorRefRoutee(worker)
}
val router = Router(RoundRobinRoutingLogic(), workers)
// Send start broadcast to all workers
val startMessage = ProcessExecutor.Start(task.extractor.udf, task.extractor.outputBatchSize)
router.route(Broadcast(startMessage), self)
router
}
/* Queries the data store and gets all the data */
private def sendData(task: ExtractionTask, workers: Router) {
log.info(s"Getting data from the data store and sending it to the workers. query='${task.extractor.inputQuery}'")
// Figure out where to get the input from
val extractorInput = task.extractor.inputQuery match {
case CSVInputQuery(filename, seperator) =>
FileDataUtils.queryAsJson[Unit](filename, seperator)_
case DatastoreInputQuery(query) =>
val totalBatchSize = workers.routees.size * task.extractor.inputBatchSize
dataStore.queryAsJson[Unit](query, Option(totalBatchSize))_
}
// Forward output to the workers
extractorInput { iterator =>
val batchSize = workers.routees.size * task.extractor.inputBatchSize
iterator map(_.toString) grouped(batchSize) foreach { chunk =>
val futures = chunk.grouped(task.extractor.inputBatchSize).map { batch =>
val msg = ProcessExecutor.Write(batch.mkString("\n"))
val destinationWorker = workers.logic.select(msg, workers.routees).asInstanceOf[ActorRefRoutee].ref
destinationWorker ? msg
}
val allRouteeAcks = Future.sequence(futures)
// Wait for all workers to write the data to the output stream to avoid overloading them
Await.result(allRouteeAcks, 1337.hours)
}
}
// Notify all workers that they don't receive more data
workers.route(Broadcast(ProcessExecutor.CloseInputStream), self)
log.debug("all data was sent to workers.")
}
// Executes a given command. If it fails, shutdown and respond to the sender with failure.
private def executeScriptOrFail(script: String, failureReceiver: ActorRef) : Unit = {
executeCmd(script) match {
case Success(_) => // All good. We're done
case Failure(exception) =>
log.error(exception.toString)
failureReceiver ! Status.Failure(exception)
context.stop(self)
throw new RuntimeException(exception.toString)
}
}
/*
* Executes a command.
* Returns Success if the process exists with exit value 0.
* Returns failure of the process fails, or returns exit value != 0.
*/
def executeCmd(cmd: String) : Try[Int] = {
// Make the file executable, if necessary
val file = new java.io.File(cmd)
if (file.isFile) file.setExecutable(true, false)
log.info(s"""Executing: "$cmd" """)
val processLogger = ProcessLogger(line => log.info(line))
Try(cmd!(processLogger)) match {
case Success(0) => Success(0)
case Success(errorExitValue) =>
Failure(new RuntimeException(s"Script exited with exit_value=$errorExitValue"))
case Failure(ex) => Failure(ex)
}
}
} | dennybritz/deepdive | src/main/scala/org/deepdive/extraction/ExtractorRunner.scala | Scala | apache-2.0 | 8,756 |
package com.github.mdr.mash.repl.browser.handler
import com.github.mdr.mash.input.InputAction
import com.github.mdr.mash.repl._
import com.github.mdr.mash.repl.browser.ObjectBrowserActions.{ PreviousPage, _ }
import com.github.mdr.mash.repl.browser.TextLinesBrowserState
trait TextLinesBrowserActionHandler {
self: ObjectBrowserActionHandler with Repl ⇒
protected def handleTextLinesBrowserAction(action: InputAction, browserState: TextLinesBrowserState): Unit =
commonBrowserActionHandler(browserState)
.orElse(textLinesBrowserActionHandler(browserState))
.lift(action)
private def textLinesBrowserActionHandler(browserState: TextLinesBrowserState): PartialFunction[InputAction, Unit] = {
case ViewAsTree ⇒ viewAsTree(browserState)
case NextItem ⇒ updateState(browserState.nextItem(terminalRows))
case PreviousItem ⇒ updateState(browserState.previousItem(terminalRows))
case FirstItem ⇒ updateState(browserState.firstItem(terminalRows))
case LastItem ⇒ updateState(browserState.lastItem(terminalRows))
case NextPage ⇒ updateState(browserState.nextPage(terminalRows))
case PreviousPage ⇒ updateState(browserState.previousPage(terminalRows))
}
}
| mdr/mash | src/main/scala/com/github/mdr/mash/repl/browser/handler/TextLinesBrowserActionHandler.scala | Scala | mit | 1,234 |
package binconcifartests
import chisel3._
import chisel3.iotesters.{PeekPokeTester, Driver, ChiselFlatSpec}
import scala.util.Random
import binconcifar.SSILayer
import scala.collection.mutable.ArrayBuffer
class SSILayerTests[T <: Bits]( c : SSILayer[T] ) extends PeekPokeTester( c ) {
val myRand = new Random
val testImg = ( 0 until c.bufLen*16 ).map( i => {
BigInt( i % ( 1 << 16 ) )
}).toList
val noCyc = c.bufLen*16 / c.tPutIn
var cyc : Int = 0
var outIdx : Int = 0
var stallCycIn : Int = 0
var stallCycOut : Int = 0
var offsetOut : Int = 0
while ( cyc < noCyc ) {
val vldMask = myRand.nextInt( 4 ) != 0
val partCyc = cyc % scala.math.max( c.ratioOut, 1 )
if ( c.tPutOut > c.tPutIn && partCyc == 0 && !vldMask )
stallCycOut = 1
else
stallCycOut = 0
poke( c.io.dataIn.valid, stallCycIn == 0 && stallCycOut == 0 )
if ( c.tPutOut > c.tPutIn ) {
val wholeCyc = ( cyc - partCyc )/c.ratioOut
for ( i <- 0 until c.tPutIn )
poke( c.io.dataIn.bits( i ), testImg( ( wholeCyc)*c.tPutOut + ( partCyc)*c.tPutIn + i ) )
} else {
for ( i <- 0 until c.tPutIn )
poke( c.io.dataIn.bits( i ), testImg( i + cyc*c.tPutIn ) )
}
if ( stallCycIn == 0 && stallCycOut == 0 ) {
if ( c.tPutIn >= c.tPutOut )
stallCycIn = c.ratioIn
cyc = cyc + 1
}
if ( stallCycIn > 1 || stallCycIn == 1 & vldMask )
stallCycIn = stallCycIn - 1
val vldOut = peek( c.io.dataOut.valid ) == 1
// peek( c.io.dataOut.bits )
if ( vldOut ) {
for ( j <- 0 until c.tPutOut )
// expect( c.io.dataOut.bits( j ), testImg( ( outIdx + 1 )*c.bufLen - (offsetOut + 1)*c.tPutOut + j ) )
expect( c.io.dataOut.bits( j ), testImg( ( outIdx)*c.bufLen + (offsetOut)*c.tPutOut + j ) )
if ( offsetOut + 1 >= c.ratioIn ) {
offsetOut = 0
outIdx = outIdx + 1
} else
offsetOut = offsetOut + 1
}
step( 1 )
}
}
class SSILayerSuite extends ChiselFlatSpec {
behavior of "SSILayer"
val tPutPairs = List( ( 256, 256 ), ( 256, 64 ), ( 256, 16), (256,4), ( 64, 256 ), ( 16, 256 ), ( 4, 256 ) )
for ( pair <- tPutPairs ) {
println( "pair = " + pair)
Driver(() => {
new SSILayer( UInt( 16.W ), pair._1, pair._2 )
}, "verilator", true )( c => new SSILayerTests( c ) )
}
}
| da-steve101/binary_connect_cifar | src/test/scala/SSILayerSuite.scala | Scala | gpl-3.0 | 2,342 |
package me.yingrui.segment.filter.disambiguation
import me.yingrui.segment.core.SegmentResult
import me.yingrui.segment.core.disambiguation.DisambiguationToSerialLabels._
import me.yingrui.segment.crf.CRFClassifier
import me.yingrui.segment.dict.POSUtil._
import me.yingrui.segment.tools.CorpusLoader._
import org.mockito.Matchers._
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{FunSuite, Matchers}
class CRFDisambiguationFilterTest extends FunSuite with Matchers with MockitoSugar {
test("should separate union words") {
val segmentResult = convertToSegmentResult("19980101-01-003-002/m 很/d 美的/nt")
val labels = Array(LABEL_A, LABEL_U)
createFilter(segmentResult, labels).filtering()
segmentResult.map(_.name) should be(Array("很", "美", "的"))
segmentResult.map(_.pos) should be(Array(POS_D, POS_NT, POS_NT))
}
test("should separate union four character words") {
val segmentResult = convertToSegmentResult("19980101-01-003-002/m 格力电器/nt")
val labels = Array(LABEL_U)
createFilter(segmentResult, labels).filtering()
segmentResult.map(_.name) should be(Array("格力", "电器"))
segmentResult.map(_.pos) should be(Array(POS_NT, POS_NT))
}
test("should merge two separated words") {
val segmentResult = convertToSegmentResult("19980101-01-003-002/m 保护/v 人/n 体/j 健康/n")
val labels = Array(LABEL_A, LABEL_SB, LABEL_SE, LABEL_A)
createFilter(segmentResult, labels).filtering()
segmentResult.map(_.name) should be(Array("保护", "人体", "健康"))
segmentResult.map(_.pos) should be(Array(POS_V, POS_N, POS_N))
}
test("should merge separated words") {
val segmentResult = convertToSegmentResult("19980101-01-003-002/m 在/p 半/m 梦/n 半/m 醒/n 之间/f")
val labels = Array(LABEL_A, LABEL_SB, LABEL_SM, LABEL_SM, LABEL_SE, LABEL_A)
createFilter(segmentResult, labels).filtering()
segmentResult.map(_.name) should be(Array("在", "半梦半醒", "之间"))
segmentResult.map(_.pos) should be(Array(POS_P, POS_M, POS_F))
}
test("should move last character to next words") {
val segmentResult = convertToSegmentResult("19980101-01-003-002/m 精神病/n 人/n")
val labels = Array(LABEL_LC, LABEL_SE)
createFilter(segmentResult, labels).filtering()
segmentResult.map(_.name) should be(Array("精神", "病人"))
segmentResult.map(_.pos) should be(Array(POS_N, POS_N))
}
test("should do nothing when labels LC LL are wrong") {
val segmentResult = convertToSegmentResult("19980101-01-003-002/m 似/p 乎/p 精/n 神/n")
val labels = Array(LABEL_SB, LABEL_SE, LABEL_LC, LABEL_SE)
createFilter(segmentResult, labels).filtering()
segmentResult.map(_.name) should be(Array("似乎", "精", "神"))
segmentResult.map(_.pos) should be(Array(POS_P, POS_N, POS_N))
}
private def createFilter(segmentResult: SegmentResult, labels: Array[String]): CRFDisambiguationFilter = {
val classifier = mock[CRFClassifier]
when(classifier.findBestLabels(any())).thenReturn(labels)
val words: Seq[Seq[String]] = List(segmentResult.map(_.name))
when(classifier.isFeatureExists(any())).thenReturn(true)
val filter = new CRFDisambiguationFilter(classifier)
filter.setSegmentResult(segmentResult)
filter
}
}
| yingrui/mahjong | lib-segment/src/test/scala/me/yingrui/segment/filter/disambiguation/CRFDisambiguationFilterTest.scala | Scala | gpl-3.0 | 3,358 |
/*
* Copyright 2018 Vladimir Konstantinov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.illfaku.korro.internal.client
import com.github.illfaku.korro.dto.{HttpRequest, HttpResponse}
import com.github.illfaku.korro.internal.common.{HttpInstructions, TimedOut}
import akka.actor.{ActorRef, Status}
import io.netty.channel.{ChannelHandlerContext, SimpleChannelInboundHandler}
import java.util.concurrent.{ScheduledFuture, TimeoutException}
import scala.util.control.NoStackTrace
private[client] class HttpChannelHandler(req: HttpRequest, originator: ActorRef, instructions: HttpInstructions)
extends SimpleChannelInboundHandler[HttpResponse] {
private var timeoutTask: ScheduledFuture[_] = _
override def handlerAdded(ctx: ChannelHandlerContext) = {
timeoutTask = ctx.channel.eventLoop.schedule(
new Runnable { override def run() = ctx.channel.pipeline.fireUserEventTriggered(TimedOut) },
instructions.requestTimeout.length,
instructions.requestTimeout.unit
)
}
override def channelActive(ctx: ChannelHandlerContext): Unit = {
ctx.writeAndFlush(req, ctx.voidPromise)
super.channelActive(ctx)
}
override def channelRead0(ctx: ChannelHandlerContext, msg: HttpResponse): Unit = complete(ctx, msg)
override def userEventTriggered(ctx: ChannelHandlerContext, evt: Any): Unit = evt match {
case TimedOut => complete(ctx, Status.Failure(new TimeoutException("Request has timed out.") with NoStackTrace))
case _ => super.userEventTriggered(ctx, evt)
}
override def exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable): Unit = {
complete(ctx, Status.Failure(cause))
}
override def channelInactive(ctx: ChannelHandlerContext): Unit = {
complete(ctx, Status.Failure(new IllegalStateException("Channel was closed before response.") with NoStackTrace))
super.channelInactive(ctx)
}
private def complete(ctx: ChannelHandlerContext, result: Any): Unit = {
if (timeoutTask != null) timeoutTask.cancel(false)
originator ! result
ctx.close()
}
}
| yet-another-cafebabe/korro | src/main/scala/com/github/illfaku/korro/internal/client/HttpChannelHandler.scala | Scala | lgpl-3.0 | 2,579 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import java.io.{ByteArrayInputStream, File, FileInputStream, FileOutputStream}
import java.net.{HttpURLConnection, URI, URL}
import java.nio.charset.StandardCharsets
import java.nio.file.{Files => JavaFiles, Paths}
import java.nio.file.attribute.PosixFilePermission.{OWNER_EXECUTE, OWNER_READ, OWNER_WRITE}
import java.security.SecureRandom
import java.security.cert.X509Certificate
import java.util.{Arrays, EnumSet, Locale, Properties}
import java.util.concurrent.{TimeoutException, TimeUnit}
import java.util.jar.{JarEntry, JarOutputStream, Manifest}
import java.util.regex.Pattern
import javax.net.ssl._
import javax.tools.{JavaFileObject, SimpleJavaFileObject, ToolProvider}
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import scala.reflect.{classTag, ClassTag}
import scala.sys.process.{Process, ProcessLogger}
import scala.util.Try
import com.google.common.io.{ByteStreams, Files}
import org.apache.commons.lang3.StringUtils
import org.apache.log4j.PropertyConfigurator
import org.json4s.JsonAST.JValue
import org.json4s.jackson.JsonMethods.{compact, render}
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.scheduler._
import org.apache.spark.util.Utils
/**
* Utilities for tests. Included in main codebase since it's used by multiple
* projects.
*
* TODO: See if we can move this to the test codebase by specifying
* test dependencies between projects.
*/
private[spark] object TestUtils {
/**
* Create a jar that defines classes with the given names.
*
* Note: if this is used during class loader tests, class names should be unique
* in order to avoid interference between tests.
*/
def createJarWithClasses(
classNames: Seq[String],
toStringValue: String = "",
classNamesWithBase: Seq[(String, String)] = Seq.empty,
classpathUrls: Seq[URL] = Seq.empty): URL = {
val tempDir = Utils.createTempDir()
val files1 = for (name <- classNames) yield {
createCompiledClass(name, tempDir, toStringValue, classpathUrls = classpathUrls)
}
val files2 = for ((childName, baseName) <- classNamesWithBase) yield {
createCompiledClass(childName, tempDir, toStringValue, baseName, classpathUrls)
}
val jarFile = new File(tempDir, "testJar-%s.jar".format(System.currentTimeMillis()))
createJar(files1 ++ files2, jarFile)
}
/**
* Create a jar file containing multiple files. The `files` map contains a mapping of
* file names in the jar file to their contents.
*/
def createJarWithFiles(files: Map[String, String], dir: File = null): URL = {
val tempDir = Option(dir).getOrElse(Utils.createTempDir())
val jarFile = File.createTempFile("testJar", ".jar", tempDir)
val jarStream = new JarOutputStream(new FileOutputStream(jarFile))
files.foreach { case (k, v) =>
val entry = new JarEntry(k)
jarStream.putNextEntry(entry)
ByteStreams.copy(new ByteArrayInputStream(v.getBytes(StandardCharsets.UTF_8)), jarStream)
}
jarStream.close()
jarFile.toURI.toURL
}
/**
* Create a jar file that contains this set of files. All files will be located in the specified
* directory or at the root of the jar.
*/
def createJar(
files: Seq[File],
jarFile: File,
directoryPrefix: Option[String] = None,
mainClass: Option[String] = None): URL = {
val manifest = mainClass match {
case Some(mc) =>
val m = new Manifest()
m.getMainAttributes.putValue("Manifest-Version", "1.0")
m.getMainAttributes.putValue("Main-Class", mc)
m
case None =>
new Manifest()
}
val jarFileStream = new FileOutputStream(jarFile)
val jarStream = new JarOutputStream(jarFileStream, manifest)
for (file <- files) {
// The `name` for the argument in `JarEntry` should use / for its separator. This is
// ZIP specification.
val prefix = directoryPrefix.map(d => s"$d/").getOrElse("")
val jarEntry = new JarEntry(prefix + file.getName)
jarStream.putNextEntry(jarEntry)
val in = new FileInputStream(file)
ByteStreams.copy(in, jarStream)
in.close()
}
jarStream.close()
jarFileStream.close()
jarFile.toURI.toURL
}
// Adapted from the JavaCompiler.java doc examples
private val SOURCE = JavaFileObject.Kind.SOURCE
private def createURI(name: String) = {
URI.create(s"string:///${name.replace(".", "/")}${SOURCE.extension}")
}
private[spark] class JavaSourceFromString(val name: String, val code: String)
extends SimpleJavaFileObject(createURI(name), SOURCE) {
override def getCharContent(ignoreEncodingErrors: Boolean): String = code
}
/** Creates a compiled class with the source file. Class file will be placed in destDir. */
def createCompiledClass(
className: String,
destDir: File,
sourceFile: JavaSourceFromString,
classpathUrls: Seq[URL]): File = {
val compiler = ToolProvider.getSystemJavaCompiler
// Calling this outputs a class file in pwd. It's easier to just rename the files than
// build a custom FileManager that controls the output location.
val options = if (classpathUrls.nonEmpty) {
Seq("-classpath", classpathUrls.map { _.getFile }.mkString(File.pathSeparator))
} else {
Seq.empty
}
compiler.getTask(null, null, null, options.asJava, null, Arrays.asList(sourceFile)).call()
val fileName = className + ".class"
val result = new File(fileName)
assert(result.exists(), "Compiled file not found: " + result.getAbsolutePath())
val out = new File(destDir, fileName)
// renameTo cannot handle in and out files in different filesystems
// use google's Files.move instead
Files.move(result, out)
assert(out.exists(), "Destination file not moved: " + out.getAbsolutePath())
out
}
/** Creates a compiled class with the given name. Class file will be placed in destDir. */
def createCompiledClass(
className: String,
destDir: File,
toStringValue: String = "",
baseClass: String = null,
classpathUrls: Seq[URL] = Seq.empty,
implementsClasses: Seq[String] = Seq.empty,
extraCodeBody: String = ""): File = {
val extendsText = Option(baseClass).map { c => s" extends ${c}" }.getOrElse("")
val implementsText =
"implements " + (implementsClasses :+ "java.io.Serializable").mkString(", ")
val sourceFile = new JavaSourceFromString(className,
s"""
|public class $className $extendsText $implementsText {
| @Override public String toString() { return "$toStringValue"; }
|
| $extraCodeBody
|}
""".stripMargin)
createCompiledClass(className, destDir, sourceFile, classpathUrls)
}
/**
* Run some code involving jobs submitted to the given context and assert that the jobs spilled.
*/
def assertSpilled(sc: SparkContext, identifier: String)(body: => Unit): Unit = {
val listener = new SpillListener
withListener(sc, listener) { _ =>
body
}
assert(listener.numSpilledStages > 0, s"expected $identifier to spill, but did not")
}
/**
* Run some code involving jobs submitted to the given context and assert that the jobs
* did not spill.
*/
def assertNotSpilled(sc: SparkContext, identifier: String)(body: => Unit): Unit = {
val listener = new SpillListener
withListener(sc, listener) { _ =>
body
}
assert(listener.numSpilledStages == 0, s"expected $identifier to not spill, but did")
}
/**
* Asserts that exception message contains the message. Please note this checks all
* exceptions in the tree. If a type parameter `E` is supplied, this will additionally confirm
* that the exception is a subtype of the exception provided in the type parameter.
*/
def assertExceptionMsg[E <: Throwable : ClassTag](
exception: Throwable,
msg: String,
ignoreCase: Boolean = false): Unit = {
val (typeMsg, typeCheck) = if (classTag[E] == classTag[Nothing]) {
("", (_: Throwable) => true)
} else {
val clazz = classTag[E].runtimeClass
(s"of type ${clazz.getName} ", (e: Throwable) => clazz.isAssignableFrom(e.getClass))
}
def contain(e: Throwable, msg: String): Boolean = {
if (ignoreCase) {
e.getMessage.toLowerCase(Locale.ROOT).contains(msg.toLowerCase(Locale.ROOT))
} else {
e.getMessage.contains(msg)
} && typeCheck(e)
}
var e = exception
var contains = contain(e, msg)
while (e.getCause != null && !contains) {
e = e.getCause
contains = contain(e, msg)
}
assert(contains,
s"Exception tree doesn't contain the expected exception ${typeMsg}with message: $msg")
}
/**
* Test if a command is available.
*/
def testCommandAvailable(command: String): Boolean = {
val attempt = if (Utils.isWindows) {
Try(Process(Seq(
"cmd.exe", "/C", s"where $command")).run(ProcessLogger(_ => ())).exitValue())
} else {
Try(Process(Seq(
"sh", "-c", s"command -v $command")).run(ProcessLogger(_ => ())).exitValue())
}
attempt.isSuccess && attempt.get == 0
}
def isPythonVersionAtLeast38(): Boolean = {
val attempt = if (Utils.isWindows) {
Try(Process(Seq("cmd.exe", "/C", "python3 --version"))
.run(ProcessLogger(s => s.startsWith("Python 3.8") || s.startsWith("Python 3.9")))
.exitValue())
} else {
Try(Process(Seq("sh", "-c", "python3 --version"))
.run(ProcessLogger(s => s.startsWith("Python 3.8") || s.startsWith("Python 3.9")))
.exitValue())
}
attempt.isSuccess && attempt.get == 0
}
/**
* Get the absolute path from the executable. This implementation was borrowed from
* `spark/dev/sparktestsupport/shellutils.py`.
*/
def getAbsolutePathFromExecutable(executable: String): Option[String] = {
val command = if (Utils.isWindows) s"$executable.exe" else executable
if (command.split(File.separator, 2).length == 1 &&
JavaFiles.isRegularFile(Paths.get(command)) &&
JavaFiles.isExecutable(Paths.get(command))) {
Some(Paths.get(command).toAbsolutePath.toString)
} else {
sys.env("PATH").split(Pattern.quote(File.pathSeparator))
.map(path => Paths.get(s"${StringUtils.strip(path, "\\"")}${File.separator}$command"))
.find(p => JavaFiles.isRegularFile(p) && JavaFiles.isExecutable(p))
.map(_.toString)
}
}
/**
* Returns the response code from an HTTP(S) URL.
*/
def httpResponseCode(
url: URL,
method: String = "GET",
headers: Seq[(String, String)] = Nil): Int = {
withHttpConnection(url, method, headers = headers) { connection =>
connection.getResponseCode()
}
}
def withHttpConnection[T](
url: URL,
method: String = "GET",
headers: Seq[(String, String)] = Nil)
(fn: HttpURLConnection => T): T = {
val connection = url.openConnection().asInstanceOf[HttpURLConnection]
connection.setRequestMethod(method)
headers.foreach { case (k, v) => connection.setRequestProperty(k, v) }
// Disable cert and host name validation for HTTPS tests.
if (connection.isInstanceOf[HttpsURLConnection]) {
val sslCtx = SSLContext.getInstance("SSL")
val trustManager = new X509TrustManager {
override def getAcceptedIssuers(): Array[X509Certificate] = null
override def checkClientTrusted(x509Certificates: Array[X509Certificate],
s: String): Unit = {}
override def checkServerTrusted(x509Certificates: Array[X509Certificate],
s: String): Unit = {}
}
val verifier = new HostnameVerifier() {
override def verify(hostname: String, session: SSLSession): Boolean = true
}
sslCtx.init(null, Array(trustManager), new SecureRandom())
connection.asInstanceOf[HttpsURLConnection].setSSLSocketFactory(sslCtx.getSocketFactory())
connection.asInstanceOf[HttpsURLConnection].setHostnameVerifier(verifier)
}
try {
connection.connect()
fn(connection)
} finally {
connection.disconnect()
}
}
/**
* Runs some code with the given listener installed in the SparkContext. After the code runs,
* this method will wait until all events posted to the listener bus are processed, and then
* remove the listener from the bus.
*/
def withListener[L <: SparkListener](sc: SparkContext, listener: L) (body: L => Unit): Unit = {
sc.addSparkListener(listener)
try {
body(listener)
} finally {
sc.listenerBus.waitUntilEmpty()
sc.listenerBus.removeListener(listener)
}
}
/**
* Wait until at least `numExecutors` executors are up, or throw `TimeoutException` if the waiting
* time elapsed before `numExecutors` executors up. Exposed for testing.
*
* @param numExecutors the number of executors to wait at least
* @param timeout time to wait in milliseconds
*/
private[spark] def waitUntilExecutorsUp(
sc: SparkContext,
numExecutors: Int,
timeout: Long): Unit = {
val finishTime = System.nanoTime() + TimeUnit.MILLISECONDS.toNanos(timeout)
while (System.nanoTime() < finishTime) {
if (sc.statusTracker.getExecutorInfos.length > numExecutors) {
return
}
// Sleep rather than using wait/notify, because this is used only for testing and wait/notify
// add overhead in the general case.
Thread.sleep(10)
}
throw new TimeoutException(
s"Can't find $numExecutors executors before $timeout milliseconds elapsed")
}
/**
* config a log4j properties used for testsuite
*/
def configTestLog4j(level: String): Unit = {
val pro = new Properties()
pro.put("log4j.rootLogger", s"$level, console")
pro.put("log4j.appender.console", "org.apache.log4j.ConsoleAppender")
pro.put("log4j.appender.console.target", "System.err")
pro.put("log4j.appender.console.layout", "org.apache.log4j.PatternLayout")
pro.put("log4j.appender.console.layout.ConversionPattern",
"%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n")
PropertyConfigurator.configure(pro)
}
/**
* Lists files recursively.
*/
def recursiveList(f: File): Array[File] = {
require(f.isDirectory)
val current = f.listFiles
current ++ current.filter(_.isDirectory).flatMap(recursiveList)
}
/** Creates a temp JSON file that contains the input JSON record. */
def createTempJsonFile(dir: File, prefix: String, jsonValue: JValue): String = {
val file = File.createTempFile(prefix, ".json", dir)
JavaFiles.write(file.toPath, compact(render(jsonValue)).getBytes())
file.getPath
}
/** Creates a temp bash script that prints the given output. */
def createTempScriptWithExpectedOutput(dir: File, prefix: String, output: String): String = {
val file = File.createTempFile(prefix, ".sh", dir)
val script = s"cat <<EOF\\n$output\\nEOF\\n"
Files.write(script, file, StandardCharsets.UTF_8)
JavaFiles.setPosixFilePermissions(file.toPath,
EnumSet.of(OWNER_READ, OWNER_EXECUTE, OWNER_WRITE))
file.getPath
}
}
/**
* A `SparkListener` that detects whether spills have occurred in Spark jobs.
*/
private class SpillListener extends SparkListener {
private val stageIdToTaskMetrics = new mutable.HashMap[Int, ArrayBuffer[TaskMetrics]]
private val spilledStageIds = new mutable.HashSet[Int]
def numSpilledStages: Int = synchronized {
spilledStageIds.size
}
override def onTaskEnd(taskEnd: SparkListenerTaskEnd): Unit = synchronized {
stageIdToTaskMetrics.getOrElseUpdate(
taskEnd.stageId, new ArrayBuffer[TaskMetrics]) += taskEnd.taskMetrics
}
override def onStageCompleted(stageComplete: SparkListenerStageCompleted): Unit = synchronized {
val stageId = stageComplete.stageInfo.stageId
val metrics = stageIdToTaskMetrics.remove(stageId).toSeq.flatten
val spilled = metrics.map(_.memoryBytesSpilled).sum > 0
if (spilled) {
spilledStageIds += stageId
}
}
}
| witgo/spark | core/src/main/scala/org/apache/spark/TestUtils.scala | Scala | apache-2.0 | 16,931 |
/*
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.compiler
package util
import org.apache.spark.SparkConf
import org.objectweb.asm.Type
import com.asakusafw.lang.compiler.planning.SubPlan
import com.asakusafw.spark.compiler.planning.PartitionGroupInfo
import com.asakusafw.spark.compiler.util.SparkIdioms._
import com.asakusafw.spark.runtime.Props
import com.asakusafw.spark.tools.asm._
import com.asakusafw.spark.tools.asm.MethodBuilder._
import com.asakusafw.spark.tools.asm4s._
object NumPartitions {
def numPartitions(
jobContext: => Stack)(
port: SubPlan.Port)(
implicit mb: MethodBuilder): Stack = {
val dataSize = Option(port.getAttribute(classOf[PartitionGroupInfo]))
.map(_.getDataSize).getOrElse(PartitionGroupInfo.DataSize.REGULAR)
val scale = getParallelismScale(jobContext) _
dataSize match {
case PartitionGroupInfo.DataSize.TINY =>
ldc(1)
case PartitionGroupInfo.DataSize.SMALL =>
invokeStatic(
classOf[Math].asType,
"max",
Type.INT_TYPE,
getParallelism(jobContext).toDouble.multiply(scale("Small")).toInt,
ldc(1))
case PartitionGroupInfo.DataSize.REGULAR =>
invokeStatic(
classOf[Math].asType,
"max",
Type.INT_TYPE,
getParallelism(jobContext),
ldc(1))
case PartitionGroupInfo.DataSize.LARGE =>
invokeStatic(
classOf[Math].asType,
"max",
Type.INT_TYPE,
getParallelism(jobContext).toDouble.multiply(scale("Large")).toInt,
ldc(1))
case PartitionGroupInfo.DataSize.HUGE =>
invokeStatic(
classOf[Math].asType,
"max",
Type.INT_TYPE,
getParallelism(jobContext).toDouble.multiply(scale("Huge")).toInt,
ldc(1))
}
}
private def getParallelism(jobContext: => Stack)(implicit mb: MethodBuilder): Stack = {
sparkContext(jobContext)
.invokeV("getConf", classOf[SparkConf].asType)
.invokeV("getInt", Type.INT_TYPE,
pushObject(Props)
.invokeV("Parallelism", classOf[String].asType),
sparkContext(jobContext)
.invokeV("getConf", classOf[SparkConf].asType)
.invokeV("getInt", Type.INT_TYPE,
ldc("spark.default.parallelism"),
pushObject(Props)
.invokeV("ParallelismFallback", Type.INT_TYPE)))
}
private def getParallelismScale(
jobContext: => Stack)(
suffix: String)(
implicit mb: MethodBuilder): Stack = {
sparkContext(jobContext)
.invokeV("getConf", classOf[SparkConf].asType)
.invokeV("getDouble", Type.DOUBLE_TYPE,
pushObject(Props)
.invokeV(s"ParallelismScale${suffix}", classOf[String].asType),
pushObject(Props)
.invokeV(s"DefaultParallelismScale${suffix}", Type.DOUBLE_TYPE))
}
}
| ueshin/asakusafw-spark | compiler/src/main/scala/com/asakusafw/spark/compiler/util/NumPartitions.scala | Scala | apache-2.0 | 3,473 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.helptosavefrontend.controllers
import org.joda.time.LocalDate
import org.scalamock.scalatest.MockFactory
import uk.gov.hmrc.auth.core.AuthProvider.GovernmentGateway
import uk.gov.hmrc.auth.core._
import uk.gov.hmrc.auth.core.authorise._
import uk.gov.hmrc.auth.core.retrieve._
import uk.gov.hmrc.helptosavefrontend.models.HtsAuth.UserInfoRetrievals
import uk.gov.hmrc.helptosavefrontend.models.userinfo.NSIPayload
import uk.gov.hmrc.helptosavefrontend.models.userinfo.NSIPayload.ContactDetails
import uk.gov.hmrc.helptosavefrontend.util.toJavaDate
import uk.gov.hmrc.http.HeaderCarrier
import scala.concurrent.{ExecutionContext, Future}
object AuthSupport {
implicit class ROps[A, B](val r: ~[A, B]) {
def and[C](c: C): ~[~[A, B], C] = new ~(r, c)
}
}
trait AuthSupport extends MockFactory {
import AuthSupport._
type NameRetrievalType = ~[~[Option[Name], Option[ItmpName]], Option[String]]
type UserRetrievalType =
Option[Name] ~ Option[String] ~ Option[LocalDate] ~ Option[ItmpName] ~ Option[LocalDate] ~ Option[ItmpAddress] ~ Option[
String
]
val mockAuthConnector: AuthConnector = mock[AuthConnector]
val nino = "WM123456C"
val firstName = "Tyrion"
val lastName = "Lannister"
val name = Name(Some(firstName), Some(lastName))
val emailStr = "tyrion_lannister@gmail.com"
val email: Option[String] = Some(emailStr)
val dobStr = "1970-01-01"
val dob: LocalDate = LocalDate.parse(dobStr)
val itmpName = ItmpName(Some(firstName), Some(lastName), Some(lastName))
val itmpDob: Option[LocalDate] = Some(LocalDate.parse(dobStr))
val line1 = "Casterly Rock"
val line2 = "The Westerlands"
val line3 = "Westeros"
val postCode = "BA148FY"
val countryCode = "GB"
val itmpAddress =
ItmpAddress(Some(line1), Some(line2), Some(line3), None, None, Some(postCode), Some(countryCode), Some(countryCode))
val nsiPayload = NSIPayload(
firstName,
lastName,
toJavaDate(dob),
nino,
ContactDetails(
line1,
line2,
Some(line3),
None,
None,
postCode,
Some(countryCode),
emailStr
),
"online",
None,
"V2.0",
"MDTP REGISTRATION"
)
val mockedNINORetrieval: Option[String] = Some(nino)
val mockedNINOAndNameRetrieval: ~[~[Option[Name], Option[ItmpName]], Option[String]] = new ~(
Some(name),
Some(itmpName)
) and mockedNINORetrieval
val mockedNINOAndNameRetrievalMissingNino: ~[~[Option[Name], Option[ItmpName]], Option[String]] = new ~(
Some(name),
Some(itmpName)
) and None
val mockedNINOAndNameRetrievalMissingName: ~[~[Option[Name], Option[ItmpName]], Option[String]] = new ~(
Some(Name(None, None)),
Some(ItmpName(None, None, None))
) and mockedNINORetrieval
val mockedRetrievals
: ~[~[~[~[~[~[Option[Name], Option[String]], Option[LocalDate]], Option[ItmpName]], Option[LocalDate]], Option[
ItmpAddress
]], Option[String]] =
new ~(Some(name), email) and Option(dob) and Some(itmpName) and itmpDob and Some(itmpAddress) and mockedNINORetrieval
def mockedRetrievalsWithEmail(
email: Option[String]
): ~[~[~[~[~[~[Option[Name], Option[String]], Option[LocalDate]], Option[ItmpName]], Option[LocalDate]], Option[
ItmpAddress
]], Option[String]] =
new ~(Some(name), email) and Option(dob) and Some(itmpName) and itmpDob and Some(itmpAddress) and mockedNINORetrieval
val mockedRetrievalsMissingUserInfo
: ~[~[~[~[~[~[Option[Name], Option[String]], Option[LocalDate]], Option[ItmpName]], Option[LocalDate]], Option[
ItmpAddress
]], Option[String]] =
new ~(Some(Name(None, None)), email) and Option(dob) and Some(ItmpName(None, None, None)) and itmpDob and Some(
itmpAddress
) and mockedNINORetrieval
val mockedRetrievalsMissingNinoEnrolment
: ~[~[~[~[~[~[Option[Name], Option[String]], Option[LocalDate]], Option[ItmpName]], Option[LocalDate]], Option[
ItmpAddress
]], Option[String]] =
new ~(Some(name), email) and Option(dob) and Some(itmpName) and itmpDob and Some(itmpAddress) and None
def mockAuthResultWithFail(ex: Throwable): Unit =
(mockAuthConnector
.authorise(_: Predicate, _: Retrieval[Unit])(_: HeaderCarrier, _: ExecutionContext))
.expects(AuthProviders(GovernmentGateway), *, *, *)
.returning(Future.failed(ex))
def mockAuthWithRetrievalsWithFail(predicate: Predicate)(ex: Throwable): Unit =
(mockAuthConnector
.authorise(_: Predicate, _: Retrieval[Enrolments])(_: HeaderCarrier, _: ExecutionContext))
.expects(predicate, *, *, *)
.returning(Future.failed(ex))
def mockAuthWithNINORetrievalWithSuccess(predicate: Predicate)(result: Option[String]): Unit =
(mockAuthConnector
.authorise(_: Predicate, _: Retrieval[Option[String]])(_: HeaderCarrier, _: ExecutionContext))
.expects(predicate, v2.Retrievals.nino, *, *)
.returning(Future.successful(result))
def mockAuthWithNINOAndName(predicate: Predicate)(result: NameRetrievalType): Unit =
(mockAuthConnector
.authorise(_: Predicate, _: Retrieval[NameRetrievalType])(_: HeaderCarrier, _: ExecutionContext))
.expects(predicate, v2.Retrievals.name and v2.Retrievals.itmpName and v2.Retrievals.nino, *, *)
.returning(Future.successful(result))
def mockAuthWithAllRetrievalsWithSuccess(predicate: Predicate)(result: UserRetrievalType): Unit =
(mockAuthConnector
.authorise(_: Predicate, _: Retrieval[UserRetrievalType])(_: HeaderCarrier, _: ExecutionContext))
.expects(predicate, UserInfoRetrievals and v2.Retrievals.nino, *, *)
.returning(Future.successful(result))
def mockAuthWithNoRetrievals(predicate: Predicate): Unit =
(mockAuthConnector
.authorise(_: Predicate, _: Retrieval[Unit])(_: HeaderCarrier, _: ExecutionContext))
.expects(predicate, EmptyRetrieval, *, *)
.returning(Future.successful(EmptyRetrieval))
}
| hmrc/help-to-save-frontend | test/uk/gov/hmrc/helptosavefrontend/controllers/AuthSupport.scala | Scala | apache-2.0 | 6,530 |
/*
* Copyright 2013 Stephan Rehfeld
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scaladelray.ui.model
import javax.swing.table.TableModel
import scaladelray.sampling.SamplingPattern
import javax.swing.event.TableModelListener
class RegularSamplingPatternProvider extends SamplingPatternProvider with TableModel {
var x = 1
var y = 1
override def createSamplingPattern( l : () => Unit ) : SamplingPattern = {
l()
SamplingPattern.regularPattern( x, y )
}
override def getRowCount: Int = 2
override def getColumnCount: Int = 2
override def getColumnName(column: Int): String = column match {
case 0 => "Property"
case 1 => "Value"
}
override def getColumnClass(p1: Int): Class[_] = classOf[String]
override def isCellEditable(row: Int, column: Int): Boolean = column == 1
override def getValueAt(row: Int, column: Int): AnyRef = column match {
case 0 => row match {
case 0 => "x"
case 1 => "y"
}
case 1 => row match {
case 0 => new Integer(x)
case 1 => new Integer(y)
}
}
override def setValueAt( obj : Any, row: Int, column: Int) {
row match {
case 0 => x = obj.asInstanceOf[String].toInt
case 1 => y = obj.asInstanceOf[String].toInt
}
}
override def addTableModelListener(p1: TableModelListener) {}
override def removeTableModelListener(p1: TableModelListener) {}
override def toString: String = "Regular sampling pattern"
override def count = 1
}
| stephan-rehfeld/scaladelray | src/main/scala/scaladelray/ui/model/RegularSamplingPatternProvider.scala | Scala | apache-2.0 | 1,998 |
/*
* Copyright (c) 2014 - 2015 Contributor. All rights reserved.
*/
package org.scalaide.debug.internal.expression
package proxies.primitives
import org.scalaide.debug.internal.expression.Names.Java
import org.scalaide.debug.internal.expression.Names.Scala
import org.scalaide.debug.internal.expression.context.JdiContext
import com.sun.jdi.DoubleValue
/**
* JdiProxy implementation for `double`, `scala.Double` and `java.lang.Double`.
*/
case class DoubleJdiProxy(__context: JdiContext, __value: DoubleValue)
extends PrimitiveJdiProxy[Double, DoubleJdiProxy, DoubleValue](DoubleJdiProxy) {
override def __primitiveValue[I]: I = this.__value.value.asInstanceOf[I]
}
object DoubleJdiProxy extends PrimitiveJdiProxyCompanion[Double, DoubleJdiProxy, DoubleValue](TypeNames.Double) {
protected def mirror(value: Double, context: JdiContext): DoubleValue = context.mirrorOf(value)
}
| Kwestor/scala-ide | org.scala-ide.sdt.debug.expression/src/org/scalaide/debug/internal/expression/proxies/primitives/DoubleJdiProxy.scala | Scala | bsd-3-clause | 895 |
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package com.normation.cfclerk.services
import com.normation.cfclerk.domain._
class DummyPolicyTranslator extends Cf3PromisesFileWriterService {
def prepareCf3PromisesFileTemplate(policyContainer : Cf3PolicyDraftContainer, extraVariables : Map[String, Variable]) : PreparedTemplates = {
null
}
def movePromisesToFinalPosition(folders : Seq[PromisesFinalMoveInfo]) : Seq[PromisesFinalMoveInfo]= {
null
}
def writePromisesFiles(fileSet: Set[Cf3PromisesFileTemplateCopyInfo], variableSet: Seq[STVariable], outPath: String): Unit = {
null
}
/**
* Concatenate all the variables for each policy Instances.
* @param policyContainer
* @return
*/
def prepareAllCf3PolicyDraftVariables(policyContainer: Cf3PolicyDraftContainer) = null
}
| fanf/cf-clerk | src/test/scala/com/normation/cfclerk/services/DummyPolicyTranslator.scala | Scala | agpl-3.0 | 2,453 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.mesos.runtime.clusterframework
import org.apache.flink.runtime.clusterframework.types.ResourceID
import org.apache.flink.runtime.highavailability.HighAvailabilityServices
import org.apache.flink.runtime.io.disk.iomanager.IOManager
import org.apache.flink.runtime.io.network.NetworkEnvironment
import org.apache.flink.runtime.memory.MemoryManager
import org.apache.flink.runtime.metrics.groups.TaskManagerMetricGroup
import org.apache.flink.runtime.taskexecutor.TaskManagerConfiguration
import org.apache.flink.runtime.taskmanager.{TaskManager, TaskManagerLocation}
/** An extension of the TaskManager that listens for additional Mesos-related
* messages.
*/
class MesosTaskManager(
config: TaskManagerConfiguration,
resourceID: ResourceID,
taskManagerLocation: TaskManagerLocation,
memoryManager: MemoryManager,
ioManager: IOManager,
network: NetworkEnvironment,
numberOfSlots: Int,
highAvailabilityServices: HighAvailabilityServices,
taskManagerMetricGroup : TaskManagerMetricGroup)
extends TaskManager(
config,
resourceID,
taskManagerLocation,
memoryManager,
ioManager,
network,
numberOfSlots,
highAvailabilityServices,
taskManagerMetricGroup) {
override def handleMessage: Receive = {
super.handleMessage
}
}
object MesosTaskManager {
/** Entry point (main method) to run the TaskManager on Mesos.
*
* @param args The command line arguments.
*/
def main(args: Array[String]): Unit = {
MesosTaskManagerRunner.runTaskManager(args, classOf[MesosTaskManager])
}
}
| zimmermatt/flink | flink-mesos/src/main/scala/org/apache/flink/mesos/runtime/clusterframework/MesosTaskManager.scala | Scala | apache-2.0 | 2,408 |
package com.aidan.chapter4
import org.scalatest.FunSuite
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class Chapter4Test extends FunSuite {
import com.aidan.chapter4.Chapter4._
test("get discount") {
val gizmos = Map("GPS" -> 10.00, "Dinghy" -> 10000.00, "House" -> 250000.00)
assert(applyDiscount(gizmos)("GPS") === 9.00)
}
test("word map count") {
import scala.collection.mutable.HashMap
val in = new java.util.Scanner(new java.io.File("./src/test/scala/com/aidan/chapter4/myfile.txt"))
val wordMap = mapFileToWords(in);
assert(wordMap("the") === 2)
assert(wordMap("time") === 1)
assert(wordMap.getOrElse("phlogiston", 0) === 0)
}
test("word map count immutable") {
val in = new java.util.Scanner(new java.io.File("./src/test/scala/com/aidan/chapter4/myfile.txt"))
val wordMap = mapFileToWordsImmutable(in);
assert(wordMap("the") === 2)
assert(wordMap("time") === 1)
assert(wordMap.getOrElse("phlogiston", 0) === 0)
}
test("word map count sorted") {
import scala.collection.immutable.TreeMap
val in = new java.util.Scanner(new java.io.File("./src/test/scala/com/aidan/chapter4/myfile.txt"))
val wordMap = mapFileToWordsSorted(in);
assert(wordMap.keys.head === "by")
assert(wordMap.values.head === 1)
}
test("word map count sorted - java implementation") {
import scala.collection.immutable.TreeMap
val in = new java.util.Scanner(new java.io.File("./src/test/scala/com/aidan/chapter4/myfile.txt"))
val wordMap = mapFileToWordsSortedJava(in);
assert(wordMap.keys.head === "by")
assert(wordMap.values.head === 1)
}
test("order of linked hash map") {
val daysOfWeek = getDaysOfWeek()
assert(daysOfWeek.head === ("Monday", java.util.Calendar.MONDAY))
}
test("get java properties") {
val sysProps = getSysProperties
assert(sysProps.keySet.contains("java.runtime.name"))
var maxLength = 0
for ((k, v) <- sysProps if k.length > maxLength) maxLength = k.length
// Uncomment to spew loads to the tests sysout!
// for ((k, v) <- sysProps) println(k + " " * (maxLength - k.length) + "| " + v)
}
test("min max") {
val testArray = Array(4, 1, 5, 9, 3)
val (min, max) = getMinMax(testArray)
assert(min === 1)
assert(max === 9)
}
test("lt eq gt") {
val testArray = Array(4, 1, 5, 9, 3)
val (lt, eq, gt) = lteqgt(testArray, 5)
assert(lt === 3)
assert(eq === 1)
assert(gt === 1)
}
test("hello world zip") {
val zip = "Hello".zip("world");
assert(zip.head === ('H', 'w'))
}
} | aidanwhiteley/scala_impatient | src/test/scala/com/aidan/chapter4/Chapter4Test.scala | Scala | apache-2.0 | 2,664 |
package org.dsa.core.prepare
/**
* Created by xubo on 2016/12/27.
*/
object FindByLength {
def main(args: Array[String]) {
val result=FindLocal.findSequnceByLength(args(0),args(1).toInt)
println("result")
println(result)
}
}
| xubo245/CloudSW | src/main/scala/org/dsa/core/prepare/FindByLength.scala | Scala | gpl-2.0 | 247 |
/**
* Created by rzeznik on 14.11.15.
*/
object Ranks extends Enumeration(2) {
type Rank = Value
val `2` = Value("2")
val `3` = Value("3")
val `4` = Value("4")
val `5` = Value("5")
val `6` = Value("6")
val `7` = Value("7")
val `8` = Value("8")
val `9` = Value("9")
val `10` = Value("10")
val Jack = Value("Jack")
val Queen = Value("Queen")
val King = Value("King")
val Ace = Value("Ace")
val ranks = values.toList
}
| marcin-rzeznicki/Wonderland-Scala-Katas | CardGameWar/src/main/scala/Ranks.scala | Scala | epl-1.0 | 451 |
/*
* Wire
* Copyright (C) 2016 Wire Swiss GmbH
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.waz.service
import com.waz.api.User.ConnectionStatus
import com.waz.content._
import com.waz.log.BasicLogging.LogTag.DerivedLogTag
import com.waz.model.SearchQuery.Recommended
import com.waz.model._
import com.waz.service.conversation.{ConversationsService, ConversationsUiService}
import com.waz.service.teams.TeamsService
import com.waz.specs.AndroidFreeSpec
import com.waz.sync.SyncServiceHandle
import com.waz.testutils.TestUserPreferences
import com.waz.utils.Managed
import com.waz.utils.events.{Signal, SourceSignal}
import com.waz.utils.wrappers.DB
import org.threeten.bp.Instant
import scala.collection.breakOut
import scala.collection.generic.CanBuild
import scala.concurrent.Future
class UserSearchServiceSpec extends AndroidFreeSpec with DerivedLogTag {
val emptyTeamId = Option.empty[TeamId]
val teamId = Option(TeamId("59bbc94c-2618-491a-8dba-cf6f94c65873"))
val partnerPermissions: Long = 1025
val memberPermissions: Long = 1587
val adminPermissions: Long = 5951
val queryCacheStorage = mock[SearchQueryCacheStorage]
val userService = mock[UserService]
val usersStorage = mock[UsersStorage]
val membersStorage = mock[MembersStorage]
val teamsService = mock[TeamsService]
val sync = mock[SyncServiceHandle]
val messagesStorage = mock[MessagesStorage]
val convsUi = mock[ConversationsUiService]
val convsStorage = mock[ConversationStorage]
val convs = mock[ConversationsService]
val timeouts = new Timeouts
val userPrefs = new TestUserPreferences
lazy val users = Map(
id('me) -> UserData(id('me), "A user"),
id('a) -> UserData(id('a), "other user 1"),
id('b) -> UserData(id('b), "other user 2"),
id('c) -> UserData(id('c), "some name"),
id('d) -> UserData(id('d), "related user 1").copy(relation = Relation.Second), // TODO: relation does not exists anymore, can be removed!
id('e) -> UserData(id('e), "related user 2").copy(relation = Relation.Second),
id('f) -> UserData(id('f), "other related").copy(relation = Relation.Third),
id('g) -> UserData(id('g), "friend user 1").copy(connection = ConnectionStatus.ACCEPTED),
id('h) -> UserData(id('h), "friend user 2").copy(connection = ConnectionStatus.ACCEPTED),
id('i) -> UserData(id('i), "some other friend").copy(connection = ConnectionStatus.ACCEPTED),
id('j) -> UserData(id('j), "meep moop").copy(email = Some(EmailAddress("moop@meep.me"))),
id('k) -> UserData(id('k), "unconnected user").copy(connection = ConnectionStatus.UNCONNECTED),
id('l) -> UserData(id('l), "Björn-Rodrigo Smith"),
id('m) -> UserData(id('m), "John Smith"),
id('n) -> UserData(id('n), "Jason-John Mercier"),
id('o) -> UserData(id('o), "Captain Crunch").copy(handle = Some(Handle("john"))),
id('p) -> UserData(id('p), "Peter Pan").copy(handle = Some(Handle("john"))),
id('q) -> UserData(id('q), "James gjohnjones"),
id('r) -> UserData(id('r), "Liv Boeree").copy(handle = Some(Handle("testjohntest"))),
id('s) -> UserData(id('s), "blah").copy(handle = Some(Handle("mores"))),
id('t) -> UserData(id('t), "test handle").copy(handle = Some(Handle("smoresare"))),
id('u) -> UserData(id('u), "Wireless").copy(expiresAt = Some(RemoteInstant.ofEpochMilli(12345L))),
id('v) -> UserData(id('v), "Wireful"),
id('pp1) -> UserData(id('pp1), "Partner 1").copy(
permissions = (partnerPermissions, partnerPermissions),
teamId = teamId,
handle = Some(Handle("pp1")),
createdBy = Some(id('aa1))
),
id('pp2) -> UserData(id('pp2), "Partner 2").copy(
permissions = (partnerPermissions, partnerPermissions),
teamId = teamId,
handle = Some(Handle("pp2")),
createdBy = Some(id('aa2))
),
id('pp3) -> UserData(id('pp3), "Partner 3").copy(
permissions = (partnerPermissions, partnerPermissions),
teamId = teamId,
handle = Some(Handle("pp3"))
),
id('mm1) -> UserData(id('mm1), "Member 1").copy(
permissions = (memberPermissions, memberPermissions),
teamId = teamId,
handle = Some(Handle("mm1")),
createdBy = Some(id('aa1))
),
id('mm2) -> UserData(id('mm2), "Member 2").copy(
permissions = (memberPermissions, memberPermissions),
teamId = teamId,
handle = Some(Handle("mm2")),
createdBy = Some(id('aa1))
),
id('mm3) -> UserData(id('mm3), "Member 3").copy(
permissions = (memberPermissions, memberPermissions),
teamId = teamId,
handle = Some(Handle("mm3")),
createdBy = Some(id('aa1))
),
id('aa1) -> UserData(id('aa1), "Admin 1").copy(
permissions = (adminPermissions, adminPermissions),
teamId = teamId,
handle = Some(Handle("aa1"))
),
id('aa2) -> UserData(id('aa2), "Admin 2").copy(
permissions = (adminPermissions, adminPermissions),
teamId = teamId,
handle = Some(Handle("aa2"))
)
)
// Mock search in team
(teamsService.searchTeamMembers _).expects(*, *).anyNumberOfTimes().onCall { (query, handleOnly) =>
Signal.const(
users
.filter(u => u._2.teamId == teamId)
.filter(_._2.matchesQuery(query, handleOnly))
.map(_._2).toSet
)
}
scenario("search conversation with token starting with query") {
val convMembers = Set(id('l), id('b))
(queryCacheStorage.deleteBefore _).expects(*).anyNumberOfTimes().returning(Future.successful[Unit]({}))
(membersStorage.activeMembers _).expects(*).anyNumberOfTimes().returning(Signal.const(convMembers))
(usersStorage.listSignal _).expects(*).once().returning(Signal.const(convMembers.map(users).toVector))
val res = getService(false, id('me)).mentionsSearchUsersInConversation(ConvId("123"),"rod")
result(res.filter(_.size == 1).head)
}
scenario("search conversation with name starting with query") {
val convMembers = Set(id('l), id('b))
(queryCacheStorage.deleteBefore _).expects(*).anyNumberOfTimes().returning(Future.successful[Unit]({}))
(membersStorage.activeMembers _).expects(*).anyNumberOfTimes().returning(Signal.const(convMembers))
(usersStorage.listSignal _).expects(*).once().returning(Signal.const(convMembers.map(users).toVector))
val res = getService(false, id('me)).mentionsSearchUsersInConversation(ConvId("123"),"bjo")
result(res.filter(_.size == 1).head)
}
scenario("search conversation with name containing query") {
val convMembers = Set(id('l), id('m))
(queryCacheStorage.deleteBefore _).expects(*).anyNumberOfTimes().returning(Future.successful[Unit]({}))
(membersStorage.activeMembers _).expects(*).anyNumberOfTimes().returning(Signal.const(convMembers))
(usersStorage.listSignal _).expects(*).once().returning(Signal.const(convMembers.map(users).toVector))
val res = getService(false, id('me)).mentionsSearchUsersInConversation(ConvId("123"),"rn")
result(res.filter{u => println(u.map(_.displayName));u.size == 1}.head)
}
scenario("search conversation with handle containing query") {
val convMembers = Set(id('s), id('t))
(queryCacheStorage.deleteBefore _).expects(*).anyNumberOfTimes().returning(Future.successful[Unit]({}))
(membersStorage.activeMembers _).expects(*).anyNumberOfTimes().returning(Signal.const(convMembers))
(usersStorage.listSignal _).expects(*).once().returning(Signal.const(convMembers.map(users).toVector))
val res = getService(false, id('me)).mentionsSearchUsersInConversation(ConvId("123"),"mores")
result(res.filter(_.size == 2).head)
}
scenario("search conversation handle beginning with query") {
val convMembers = Set(id('s), id('t))
(queryCacheStorage.deleteBefore _).expects(*).anyNumberOfTimes().returning(Future.successful[Unit]({}))
(membersStorage.activeMembers _).expects(*).anyNumberOfTimes().returning(Signal.const(convMembers))
(usersStorage.listSignal _).expects(*).once().returning(Signal.const(convMembers.map(users).toVector))
val res = getService(false, id('me)).mentionsSearchUsersInConversation(ConvId("123"),"smores")
result(res.filter(_.size == 1).head)
}
scenario("search conversation people ordering") {
val convMembers = Set(id('q), id('r),id('p), id('n), id('m), id('o))
val correctOrder = IndexedSeq(ud('m), ud('n), ud('o), ud('p), ud('q), ud('r))
(queryCacheStorage.deleteBefore _).expects(*).anyNumberOfTimes().returning(Future.successful[Unit]({}))
(membersStorage.activeMembers _).expects(*).anyNumberOfTimes().returning(Signal.const(convMembers))
(usersStorage.listSignal _).expects(*).once().returning(Signal.const(convMembers.map(users).toVector))
val res = getService(false, id('me)).mentionsSearchUsersInConversation(ConvId("123"),"john")
result(res.filter(_.equals(correctOrder)).head)
}
def id(s: Symbol) = UserId(s.toString)
def ids(s: Symbol*) = s.map(id)(breakOut).toSet
def ud(s: Symbol) = users(id(s))
def verifySearch(prefix: String, matches: Set[UserId]) = {
val query = Recommended(prefix)
val expected = users.filterKeys(matches.contains).values.toVector
val querySignal = Signal[Option[SearchQueryCache]]()
val firstQueryCache = SearchQueryCache(query, Instant.now, None)
val secondQueryCache = SearchQueryCache(query, Instant.now, Some(matches.toVector))
(queryCacheStorage.deleteBefore _).expects(*).anyNumberOfTimes().returning(Future.successful({}))
(queryCacheStorage.optSignal _).expects(query).once().returning(querySignal)
(usersStorage.find(_: UserData => Boolean, _: DB => Managed[TraversableOnce[UserData]], _: UserData => UserData)(_: CanBuild[UserData, Vector[UserData]]))
.expects(*, *, *, *).once().returning(Future.successful(expected))
(queryCacheStorage.updateOrCreate _).expects(query, *, *).once().returning {
Future.successful(secondQueryCache)
}
(sync.syncSearchQuery _).expects(query).once().onCall { _: SearchQuery =>
Future.successful {
querySignal ! Some(secondQueryCache)
result(querySignal.filter(_.contains(secondQueryCache)).head)
SyncId()
}
}
if (matches.nonEmpty)
(usersStorage.listSignal _).expects(*).once().returning(Signal.const(expected))
else
(usersStorage.listSignal _).expects(*).never()
querySignal ! Some(firstQueryCache)
result(querySignal.filter(_.contains(firstQueryCache)).head)
val resSignal = getService(false, id('me)).searchUserData(Recommended(prefix)).map(_.map(_.id)).disableAutowiring()
result(querySignal.filter(_.contains(secondQueryCache)).head)
result(resSignal.map(_.toSet).filter(_ == matches).head)
}
feature("Recommended people search") {
scenario("Return search results for name") {
verifySearch("rel", ids('d, 'e))
}
scenario("Return no search results for name") {
verifySearch("relt", Set.empty[UserId])
}
scenario("Return search results for handle") {
verifySearch("@rel", ids('d, 'e))
}
scenario("Return no search results for handle") {
verifySearch("@relt", Set.empty[UserId])
}
}
feature("Search by searchState") {
scenario("search for top people"){
val expected = ids('g, 'h, 'i)
(queryCacheStorage.deleteBefore _).expects(*).anyNumberOfTimes().returning(Future.successful[Unit]({}))
(usersStorage.find(_: UserData => Boolean, _: DB => Managed[TraversableOnce[UserData]], _: UserData => UserData)(_: CanBuild[UserData, Vector[UserData]]))
.expects(*, *, *, *).once().returning(Future.successful(expected.map(users).toVector))
(userService.acceptedOrBlockedUsers _).expects().returns(Signal.const(Map.empty[UserId, UserData]))
(messagesStorage.countLaterThan _).expects(*, *).repeated(3).returning(Future.successful(1L))
val res = getService(false, id('me)).search("").map(_.top.map(_.id).toSet)
result(res.filter(_ == expected).head)
}
scenario("search for local results"){
val expected = ids('g, 'h)
val query = Recommended("fr")
val querySignal = new SourceSignal[Option[SearchQueryCache]]()
val queryCache = SearchQueryCache(query, Instant.now, Some(Vector.empty[UserId]))
(queryCacheStorage.deleteBefore _).expects(*).anyNumberOfTimes().returning(Future.successful[Unit]({}))
(queryCacheStorage.optSignal _).expects(query).once().returning(querySignal)
(usersStorage.find(_: UserData => Boolean, _: DB => Managed[TraversableOnce[UserData]], _: UserData => UserData)(_: CanBuild[UserData, Vector[UserData]]))
.expects(*, *, *, *).once().returning(Future.successful(Vector.empty[UserData]))
(userService.acceptedOrBlockedUsers _).expects().once().returning(Signal.const(expected.map(key => (key -> users(key))).toMap))
(convsStorage.findGroupConversations _).expects(*, *, *, *).returns(Future.successful(IndexedSeq.empty[ConversationData]))
(queryCacheStorage.updateOrCreate _).expects(*, *, *).once().returning(Future.successful(queryCache))
(sync.syncSearchQuery _).expects(query).once().onCall { _: SearchQuery =>
Future.successful[SyncId] {
querySignal ! Some(queryCache)
result(querySignal.filter(_.contains(queryCache)).head)
SyncId()
}
}
(usersStorage.listSignal _).expects(*).never()
val res = getService(false, id('me)).search("fr").map(_.local.map(_.id).toSet)
result(res.filter(_ == expected).head)
}
scenario("search for remote results") {
val expected = ids('a, 'b)
val query = Recommended("ot")
val querySignal = new SourceSignal[Option[SearchQueryCache]]()
val queryCache = SearchQueryCache(query, Instant.now, Some(expected.toVector))
(queryCacheStorage.deleteBefore _).expects(*).anyNumberOfTimes().returning(Future.successful[Unit]({}))
(queryCacheStorage.optSignal _).expects(query).once().returning(querySignal)
(usersStorage.find(_: UserData => Boolean, _: DB => Managed[TraversableOnce[UserData]], _: UserData => UserData)(_: CanBuild[UserData, Vector[UserData]]))
.expects(*, *, *, *).once().returning(Future.successful(Vector.empty[UserData]))
(userService.acceptedOrBlockedUsers _).expects().once().returning(Signal.const(Map.empty[UserId, UserData]))
(convsStorage.findGroupConversations _).expects(*, *, *, *).returns(Future.successful(IndexedSeq.empty[ConversationData]))
(queryCacheStorage.updateOrCreate _).expects(*, *, *).once().returning(Future.successful(queryCache))
(sync.syncSearchQuery _).expects(query).once().onCall { _: SearchQuery =>
Future.successful[SyncId] {
querySignal ! Some(queryCache)
result(querySignal.filter(_.contains(queryCache)).head)
SyncId()
}
}
(usersStorage.listSignal _).expects(expected.toVector).once().returning(Signal.const(expected.map(users).toVector))
val res = getService(false, id('me)).search("ot").map(_.dir.map(_.id).toSet)
result(res.filter(_.nonEmpty).head)
}
}
feature("search inside the team") {
/**
* Helper class to keep track of mocked query
*/
case class PreparedSearch(inTeam: Boolean, selfId: UserId, query: String) {
def perform() = {
val service = getService(this.inTeam, this.selfId)
service.search(query).map(_.local.map(_.id).toSet).head
}
}
/**
* Will mock all services, instantiate a UserSearchService to test, and store the query to expect
*/
def prepareTestSearch(query: String,
selfId: UserId,
conversationMembers: Set[UserId] = Set(),
connectedUsers: Set[UserId] = Set()
): PreparedSearch = {
val convId = ConvId("e7969e91-366d-4ec5-9d85-4e8a4f9d53e6")
val searchQuery = Recommended(query)
val querySignal = new SourceSignal[Option[SearchQueryCache]]()
val queryCache = SearchQueryCache(searchQuery, Instant.now, Some(Vector.empty[UserId]))
// Stubs
(queryCacheStorage.deleteBefore _).stubs(*).returning(Future.successful[Unit]({}))
(queryCacheStorage.optSignal _).stubs(searchQuery).returning(querySignal)
(usersStorage.get _).stubs(*).onCall { id: UserId =>
Future.successful(users.get(id))
}
(usersStorage.find(_: UserData => Boolean, _: DB => Managed[TraversableOnce[UserData]], _: UserData => UserData)(_: CanBuild[UserData, Vector[UserData]]))
.stubs(*, *, *, *).returning(Future.successful(Vector.empty[UserData]))
(userService.acceptedOrBlockedUsers _).stubs().returning(Signal.const(users.filterKeys(connectedUsers.contains)))
(userService.getSelfUser _).stubs().onCall(_ => Future.successful(users.get(selfId)))
(convsStorage.findGroupConversations _).stubs(*, *, *, *).returns(Future.successful(IndexedSeq.empty[ConversationData]))
(queryCacheStorage.updateOrCreate _).stubs(*, *, *).returning(Future.successful(queryCache))
(membersStorage.getByUsers _).stubs(*).onCall { ids: Set[UserId] =>
Future.successful(ids.intersect(conversationMembers).map(ConversationMemberData(_, convId)).toIndexedSeq)
}
(sync.syncSearchQuery _).stubs(*).onCall { _: SearchQuery =>
Future.successful[SyncId] {
querySignal ! Some(queryCache)
result(querySignal.filter(_.contains(queryCache)).head)
SyncId()
}
}
// Set up user permissions according to role
val user = users(selfId)
userPrefs.setValue(UserPreferences.SelfPermissions, user.permissions._1)
PreparedSearch(true, selfId, query)
}
scenario("as a member, search partners that are not in a conversation with me") {
// GIVEN
val preparedSearch = prepareTestSearch(
query = "Partner",
selfId = id('mm1),
conversationMembers = ids('a, 'mm1)
)
// WHEN
val res = result(preparedSearch.perform())
// THEN
res shouldBe ids()
}
scenario("as a member, search partners that are in a conversation with me") {
// GIVEN
val preparedSearch = prepareTestSearch(
query = "Partner",
selfId = id('mm1),
conversationMembers = ids('pp1, 'k, 'mm1)
)
// WHEN
val res = result(preparedSearch.perform())
// THEN
res shouldBe ids('pp1)
}
scenario("as a member, search partners that are not in a conversation with me by exact handle") {
// GIVEN
val preparedSearch = prepareTestSearch(
query = "pp1",
selfId = id('mm1),
conversationMembers = ids('a, 'k, 'mm1)
)
// WHEN
val res = result(preparedSearch.perform())
// THEN
res shouldBe ids('pp1)
}
scenario("as a member, search team members whether they are in a conversation with me or not") {
// GIVEN
val preparedSearch = prepareTestSearch(
query = "Member",
selfId = id('mm3),
conversationMembers = ids('mm2, 'pp1)
)
// WHEN
val res = result(preparedSearch.perform())
// THEN
res shouldBe ids('mm1, 'mm2)
}
scenario("as a member, search connected guests whether they are in a conversation with me or not") {
// GIVEN
val preparedSearch = prepareTestSearch(
query = "related",
selfId = id('mm1),
conversationMembers = ids('mm2, 'pp1, 'e),
connectedUsers = ids('d, 'e)
)
// WHEN
val res = result(preparedSearch.perform())
// THEN
res shouldBe ids('d, 'e)
}
scenario("as a member, search not connected guests") {
// GIVEN
val preparedSearch = prepareTestSearch(
query = "related",
selfId = id('mm1),
conversationMembers = ids('mm2, 'pp1, 'e)
)
// WHEN
val res = result(preparedSearch.perform())
// THEN
res shouldBe ids()
}
scenario("as a partner, search team members that are not in a conversation with me") {
// GIVEN
val preparedSearch = prepareTestSearch(
query = "Member",
selfId = id('pp1),
conversationMembers = ids('pp1, 'k)
)
// WHEN
val res = result(preparedSearch.perform())
// THEN
res shouldBe ids()
}
scenario("as a partner, show no team members") {
// GIVEN
val preparedSearch = prepareTestSearch(
query = "",
selfId = id('pp3),
conversationMembers = ids('pp3, 'k)
)
// WHEN
val res = result(preparedSearch.perform())
// THEN
res shouldBe ids()
}
scenario("as a partner, search team members that are in a conversation with me") {
// GIVEN
val preparedSearch = prepareTestSearch(
query = "Member",
selfId = id('pp1),
conversationMembers = ids('mm1, 'k)
)
// WHEN
val res = result(preparedSearch.perform())
// THEN
res shouldBe ids('mm1)
}
scenario("as a partner, search partners that are in a conversation with me") {
// GIVEN
val preparedSearch = prepareTestSearch(
query = "Partner",
selfId = id('pp2),
conversationMembers = ids('pp1, 'pp2)
)
// WHEN
val res = result(preparedSearch.perform())
// THEN
res shouldBe ids('pp1)
}
scenario("as a partner, search partners that are not in a conversation with me") {
// GIVEN
val preparedSearch = prepareTestSearch(
query = "Partner",
selfId = id('pp1),
conversationMembers = ids('mm1)
)
// WHEN
val res = result(preparedSearch.perform())
// THEN
res shouldBe ids()
}
scenario("as a partner, search connected guests whether they are in a conversation with me or not") {
// GIVEN
val preparedSearch = prepareTestSearch(
query = "related",
selfId = id('pp1),
conversationMembers = ids('mm2, 'pp1, 'e),
connectedUsers = ids('d, 'e)
)
// WHEN
val res = result(preparedSearch.perform())
// THEN
res shouldBe ids('e)
}
scenario("as a partner, search not connected guests") {
// GIVEN
val preparedSearch = prepareTestSearch(
query = "related",
selfId = id('pp1),
conversationMembers = ids('mm2, 'pp1, 'e)
)
// WHEN
val res = result(preparedSearch.perform())
// THEN
res shouldBe ids()
}
scenario("as an admin, search the partners that I invited") {
// GIVEN
val preparedSearch = prepareTestSearch(
query = "Partner",
selfId = id('aa1)
)
// WHEN
val res = result(preparedSearch.perform())
// THEN
res shouldBe ids('pp1)
}
scenario("as an admin, see the partners that I invited") {
// GIVEN
val preparedSearch = prepareTestSearch(
query = "",
selfId = id('aa1)
)
// WHEN
val res = result(preparedSearch.perform())
// THEN
res shouldBe ids(
'aa2, 'mm1, 'mm2, 'mm3, // all non-partner team members
'pp1 // partner that I invited
)
}
scenario("as a partner, see the admin that invited me") {
// GIVEN
val preparedSearch = prepareTestSearch(
query = "",
selfId = id('pp2)
)
// WHEN
val res = result(preparedSearch.perform())
// THEN
res shouldBe ids('aa2)
}
scenario("do not return wireless guests as results") {
// GIVEN
val preparedSearch = prepareTestSearch(
query = "Wire",
selfId = id('aa1),
connectedUsers = ids('u, 'v)
)
// WHEN
val res = result(preparedSearch.perform())
// THEN
res shouldBe ids('v) // the user 'u also has the username starting with Wire, but is wireless
}
}
def getService(inTeam: Boolean, selfId: UserId) = {
new UserSearchService(
selfId,
queryCacheStorage,
if (inTeam) teamId else emptyTeamId,
userService,
usersStorage,
teamsService,
membersStorage,
timeouts,
sync,
messagesStorage,
convsStorage,
convsUi,
convs,
userPrefs
)
}
}
| wireapp/wire-android-sync-engine | zmessaging/src/test/scala/com/waz/service/UserSearchServiceSpec.scala | Scala | gpl-3.0 | 25,069 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package wvlet.airframe.http.codegen
import wvlet.airframe.http.{Endpoint, RPC, Router}
import wvlet.log.LogSupport
import scala.util.{Success, Try}
/**
* Scans Airframe HTTP interfaces from the class loader and build a Router object
*/
object RouteScanner extends LogSupport {
/**
* Run the code block by using a given class loader
* @param cl
* @param body
* @tparam U
* @return
*/
private def withClassLoader[U](cl: ClassLoader)(body: => U): U = {
val prevCl = Thread.currentThread().getContextClassLoader
try {
Thread.currentThread().setContextClassLoader(cl)
body
} finally {
Thread.currentThread().setContextClassLoader(prevCl)
}
}
/**
* Find Airframe HTTP interfaces and build a Router object
* @param targetPackages
* @param classLoader
*/
def buildRouter(targetPackages: Seq[String], classLoader: ClassLoader): Router = {
trace(s"buildRouter: ${targetPackages}")
// We need to use our own class loader as sbt's layered classloader cannot find application classes
withClassLoader(classLoader) {
val lst = ClassScanner.scanClasses(classLoader, targetPackages)
trace(s"classes: ${lst.mkString(", ")}")
val classes = Seq.newBuilder[Class[_]]
lst.foreach { x =>
Try(classLoader.loadClass(x)) match {
case Success(cl) => classes += cl
case _ =>
}
}
buildRouter(classes.result())
}
}
private[codegen] def buildRouter(classes: Seq[Class[_]]): Router = {
var router = Router.empty
// Find classes with @RPC or @Endpoint annotations.
//
// Note: We need to remove object classes ending with $, because Surface.fullTypeNameOf(...)
// will not distinguish regular classes and their corresponding objects.
// This is because we generally cannot call classOf[MyObj$] in Scala other than scanning classes directly from class loaders.
for (cl <- classes if !cl.getName.endsWith("$")) {
trace(f"Searching ${cl} for HTTP endpoints")
import wvlet.airframe.surface.reflect._
lazy val s = ReflectSurfaceFactory.ofClass(cl)
lazy val methods = ReflectSurfaceFactory.methodsOfClass(cl)
val hasRPC = findDeclaredAnnotation[RPC](cl).isDefined
if (hasRPC) {
debug(s"Found an Airframe RPC interface: ${s.fullName}")
router = router.addInternal(s, methods)
} else if (methods.exists(m => m.findAnnotationOf[Endpoint].isDefined)) {
debug(s"Found an Airframe HTTP interface: ${s.fullName}")
router = router.addInternal(s, methods)
}
}
// Check whether the route is valid or not
router.verifyRoutes
router
}
}
| wvlet/airframe | airframe-http-codegen/src/main/scala/wvlet/airframe/http/codegen/RouteScanner.scala | Scala | apache-2.0 | 3,282 |
package pep_095
import common.LongOps
import scala.collection.mutable
object Wip {
val MAX = 1000000
object Attempt1 {
def solve(): String = {
// val a: Seq[(ChainLength, Long)] = (1 to MAX).map(a => (aaa(a), a * -1))
// val b: (ChainLength, Long) = a.max
// val c: Long = b._2 * -1
// c.toString
???
}
}
object Attempt2 {
type ChainLength = Int
val map = mutable.Map.empty[Long, ChainLength]
def aaa(n: Long): Unit = aaa(List(n))
def aaa(ls: List[Long]): Unit = {
//println(s"ls=$ls\\tmap=$map")
ls match {
case l :: t if l > MAX => map ++= t.map(_ -> 0).toMap
case l :: t if map.contains(l) => map ++= t.map(_ -> 0).toMap
case l :: t if t.contains(l) =>
val (chain, nothing) = t.splitAt(t.indexOf(l) + 1)
println(s"l=$l\\tt=$t\\tchain=$chain\\tnothing=$nothing")
map ++= chain.map(_ -> chain.length).toMap ++ nothing.map(_ -> 0).toMap
case l :: t => aaa(LongOps.properDivisorsSum(l) :: ls)
case _ => ???
}
}
def solve() = {
val ls = LongOps.streamFrom(1).take(MAX)
ls foreach aaa
val (len, n) = map.toIterator.map { case (k, v) => (v, -1 * k) }.max
println(s"(len, n)=($len, $n)")
n * -1
}
}
// 15065ms
object Attempt3 {
type ChainLength = Int
val map = mutable.Map.empty[Long, ChainLength]
def findChain(ls: List[Long]): (List[Long], List[Long]) = ls match {
case l :: t if l > MAX | map.contains(l) => (List.empty[Long], t)
case l :: t if t.contains(l) => t.splitAt(t.indexOf(l) + 1)
case l :: t => findChain(LongOps.properDivisorsSum(l) :: ls)
}
def solve() = {
val ls = LongOps.streamFrom(1).take(MAX / 50)
ls.foreach { l =>
val (chain, nothing) = findChain(List(l))
map ++= chain.map(_ -> chain.length).toMap ++ nothing.map(_ -> 0).toMap
}
val (len, n) = map.toIterator.map { case (k, v) => (v, -1 * k) }.max
println(s"(len, n)=($len, $n)")
n * -1
}
}
}
| filippovitale/pe | pe-solution/src/main/scala/pep_095/Wip.scala | Scala | mit | 2,179 |
package com.airbnb.aerosolve.training
import com.airbnb.aerosolve.core.{FeatureVector, Example, FunctionForm}
import com.airbnb.aerosolve.core.models.MlpModel
import com.airbnb.aerosolve.core.util.FloatVector
import com.airbnb.aerosolve.core.util.Util
import com.typesafe.config.Config
import org.slf4j.{LoggerFactory, Logger}
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.rdd.RDD
import scala.collection.JavaConversions._
import scala.util.Try
/**
* A trainer that generates a MLP model.
* TODO (Peng): add maxNorm regularizations
*/
object MlpModelTrainer {
private final val log: Logger = LoggerFactory.getLogger("MlpModelTrainer")
private final val LAYER_PREFIX : String = "$layer:"
private final val NODE_PREFIX : String = "$node:"
private final val BIAS_PREFIX : String = "$bias:"
case class TrainerOptions(loss: String,
margin: Double, // margin in Hinge loss or epsilon in regression
iteration: Int, // number of iterations to run
subsample : Double, // determine mini-batch size
threshold : Double, // threshold for binary classification
rankKey: String,
learningRateInit : Double, // initial learning rate
learningRateDecay : Double, // learning rate decay rate
momentumInit : Double, // initial momentum value
momentumEnd : Double, // ending momentum value
momentumT : Int,
dropout : Double, // dropout rate
maxNorm : Double, // max norm
weightDecay : Double, // l2 regularization parameter
weightInitStd : Double, // weight initialization std
cache : String,
minCount : Int
)
case class NetWorkParams(activationFunctions: java.util.ArrayList[FunctionForm],
nodeNumber : java.util.ArrayList[Integer])
def train(sc : SparkContext,
input : RDD[Example],
config : Config,
key : String) : MlpModel = {
val trainerOptions = parseTrainingOptions(config.getConfig(key))
val networkOptions = parseNetworkOptions(config.getConfig(key))
val raw : RDD[Example] =
LinearRankerUtils
.makePointwiseFloat(input, config, key)
val pointwise = trainerOptions.cache match {
case "memory" => raw.cache()
case _ : String => raw
}
val model = setupModel(trainerOptions, networkOptions, pointwise)
modelIteration(sc, trainerOptions, model, pointwise)
trainerOptions.cache match {
case "memory" => pointwise.unpersist()
case _ => Unit
}
model
}
def modelIteration(sc : SparkContext,
options : TrainerOptions,
model : MlpModel,
pointwise : RDD[Example]) = {
var learningRate = options.learningRateInit
// initialize previous updates with 0
val updateContainer = setupUpdateContainer(model)
val N = Math.floor(1.0 / options.subsample).toInt
for (iter <- 0 until options.iteration) {
log.info(s"Iteration $iter")
// -- update momentum
val momentum = if (options.momentumT > 0) {
updateMomentum(
options.momentumInit,
options.momentumEnd,
options.momentumT,
iter)
} else {
// if momentumT <= 0, we don't apply momentum based updating at all.
0.0
}
for (k <- 0 until N) {
val miniBatch = pointwise
.sample(false, options.subsample)
// compute gradients
val gradientContainer = computeGradient(sc, options, model, miniBatch)
// -- update all layer weights and bias and update container
updateModel(model, gradientContainer, updateContainer,
momentum.toFloat, learningRate.toFloat, options.dropout)
}
// update learning rate
learningRate *= options.learningRateDecay
}
}
def computeGradient(sc : SparkContext,
options : TrainerOptions,
model : MlpModel,
miniBatch : RDD[Example]) : Map[(String, String), FloatVector] = {
// compute the sum of gradient of examples in the mini-batch
val modelBC = sc.broadcast(model)
miniBatch
.mapPartitions(partition => {
val model = modelBC.value
val gradient = scala.collection.mutable.HashMap[(String, String), FloatVector]()
partition.foreach(example => {
val fv = example.example.get(0)
val flatFeatures: java.util.Map[String, java.util.Map[java.lang.String, java.lang.Double]] = Util.flattenFeature(fv)
val score = if (options.dropout > 0) {
model.forwardPropagationWithDropout(flatFeatures, options.dropout)
} else {
model.forwardPropagation(flatFeatures)
}
val grad = options.loss match {
case "hinge" => computeHingeGradient(score, fv, options)
case "regression" => computeRegressionGradient(score, fv, options)
case _ => computeHingeGradient(score, fv, options)
}
// back-propagation for updating gradient
// note: activations have been computed in forwardPropagation
val outputLayerId = model.getNumHiddenLayers
val func = model.getActivationFunction.get(outputLayerId)
// delta: gradient of loss function w.r.t. node input
// activation: the output of a node
val outputNodeDelta = computeActivationGradient(score, func) * grad
backPropagation(model, outputNodeDelta.toFloat, gradient, flatFeatures, options.weightDecay.toFloat)
})
gradient.iterator
})
.mapValues(fv => (fv, 1.0))
.reduceByKey((a, b) => {
a._1.add(b._1)
(a._1, a._2 + b._2)
})
.mapValues(x => {
x._1.scale(1.0f / x._2.toFloat)
x._1
})
.collectAsMap
.toMap
}
def backPropagation(model: MlpModel,
outputNodeDelta: Float,
gradient: scala.collection.mutable.HashMap[(String, String), FloatVector],
flatFeatures: java.util.Map[String, java.util.Map[java.lang.String, java.lang.Double]],
weightDecay: Float = 0.0f) = {
// outputNodeDelta: gradient of the loss function w.r.t the input of the output node
val numHiddenLayers = model.getNumHiddenLayers
val layerNodeNumber = model.getLayerNodeNumber
val activationFunctions = model.getActivationFunction
// set delta for the output layer
var upperLayerDelta = new FloatVector(1)
upperLayerDelta.set(0, outputNodeDelta)
// compute gradient for bias at the output node
val outputBiasGrad = new FloatVector(1)
outputBiasGrad.set(0, outputNodeDelta)
val outputBiasKey = (LAYER_PREFIX + numHiddenLayers.toString, BIAS_PREFIX)
outputBiasGrad.add(gradient.getOrElse(outputBiasKey, new FloatVector(1)))
gradient.put(outputBiasKey, outputBiasGrad)
// update for hidden layers
for (i <- (0 until numHiddenLayers).reverse) {
// i decreases from numHiddenLayers-1 to 0
val numNode = layerNodeNumber.get(i)
val numNodeUpperLayer = layerNodeNumber.get(i + 1)
val func = activationFunctions.get(i)
val thisLayerDelta = new FloatVector(numNode)
// compute gradient of weights from the i-th layer to the (i+1)-th layer
val activations = model.getLayerActivations.get(i)
val hiddenLayerWeights = model.getHiddenLayerWeights.get(i)
val biasKey = (LAYER_PREFIX + i.toString, BIAS_PREFIX)
val biasGrad = gradient.getOrElse(biasKey, new FloatVector(numNode))
for (j <- 0 until numNode) {
val key = (LAYER_PREFIX + i.toString, NODE_PREFIX + j.toString)
val gradFv = gradient.getOrElse(key, new FloatVector(numNodeUpperLayer))
gradFv.multiplyAdd(activations.get(j), upperLayerDelta)
if (weightDecay > 0.0f) {
val weight = model.getHiddenLayerWeights.get(i).get(j)
gradFv.multiplyAdd(weightDecay, weight)
}
gradient.put(key, gradFv)
val grad = upperLayerDelta.dot(hiddenLayerWeights.get(j))
val delta = computeActivationGradient(activations.get(j), func) * grad
thisLayerDelta.set(j, delta.toFloat)
}
biasGrad.add(thisLayerDelta)
if (weightDecay > 0.0f) {
biasGrad.multiplyAdd(weightDecay, model.getBias.get(i))
}
gradient.put(biasKey, biasGrad)
upperLayerDelta = thisLayerDelta
}
val inputLayerWeights = model.getInputLayerWeights
// update for the input layer
val numNodeUpperLayer = layerNodeNumber.get(0)
for (family <- flatFeatures) {
for (feature <- family._2) {
val key = (family._1, feature._1)
// We only care about features in the model.
if (inputLayerWeights.containsKey(key._1) && inputLayerWeights.get(key._1).containsKey(key._2)) {
val gradFv = gradient.getOrElse(key, new FloatVector(numNodeUpperLayer))
gradFv.multiplyAdd(feature._2.toFloat, upperLayerDelta)
if (weightDecay > 0.0f) {
val weight = inputLayerWeights.get(key._1).get(key._2)
gradFv.multiplyAdd(weightDecay, weight)
}
gradient.put(key, gradFv)
}
}
}
}
def updateModel(model: MlpModel,
gradientContainer: Map[(String, String), FloatVector],
updateContainer: scala.collection.mutable.HashMap[(String, String), FloatVector],
momentum: Float,
learningRate: Float,
dropout: Double) = {
// computing current updates based on previous updates and new gradient
// then update model weights (also update the prevUpdateContainer)
val numHiddenLayers = model.getNumHiddenLayers
for ((key, prevUpdate) <- updateContainer) {
val weightToUpdate : FloatVector = if (key._1.startsWith(LAYER_PREFIX)) {
val layerId: Int = key._1.substring(LAYER_PREFIX.length).toInt
assert(layerId >= 0 && layerId <= numHiddenLayers)
if (key._2.equals(BIAS_PREFIX)) {
// node bias updates
model.getBias.get(layerId)
} else if (key._2.startsWith(NODE_PREFIX)) {
val nodeId = key._2.substring(NODE_PREFIX.length).toInt
// hidden layer weight updates
model.getHiddenLayerWeights.get(layerId).get(nodeId)
} else {
// error
assert(false)
new FloatVector()
}
} else {
// input layer weight updates
val inputLayerWeight = model.getInputLayerWeights.get(key._1)
if (inputLayerWeight != null) {
inputLayerWeight.get(key._2)
} else {
new FloatVector()
}
}
if (weightToUpdate.length() > 0) {
val gradient: FloatVector = gradientContainer.getOrElse(key, new FloatVector(weightToUpdate.length))
val update: FloatVector = computeUpdates(prevUpdate, momentum, learningRate, gradient)
// update the update container
updateContainer.put(key, update)
// update weights
weightToUpdate.add(update)
}
}
}
def trainAndSaveToFile(sc : SparkContext,
input : RDD[Example],
config : Config,
key : String) = {
val model = train(sc, input, config, key)
TrainingUtils.saveModel(model, config, key + ".model_output")
}
private def parseTrainingOptions(config : Config) : TrainerOptions = {
TrainerOptions(
loss = config.getString("loss"),
margin = config.getDouble("margin"),
iteration = config.getInt("iterations"),
subsample = config.getDouble("subsample"),
threshold = Try(config.getDouble("rank_threshold")).getOrElse(0.0),
rankKey = config.getString("rank_key"),
learningRateInit = config.getDouble("learning_rate_init"),
learningRateDecay = Try(config.getDouble("learning_rate_decay")).getOrElse(1.0),
momentumInit = Try(config.getDouble("momentum_init")).getOrElse(0.0),
momentumEnd = Try(config.getDouble("momentum_end")).getOrElse(0.0),
momentumT = Try(config.getInt("momentum_t")).getOrElse(0),
dropout = Try(config.getDouble("dropout")).getOrElse(0.0),
maxNorm = Try(config.getDouble("max_norm")).getOrElse(0.0),
weightDecay = Try(config.getDouble("weight_decay")).getOrElse(0.0),
weightInitStd = config.getDouble("weight_init_std"),
cache = Try(config.getString("cache")).getOrElse(""),
minCount = Try(config.getInt("min_count")).getOrElse(0)
)
}
private def parseNetworkOptions(config : Config) : NetWorkParams = {
val activationStr = config.getStringList("activations")
val activations = new java.util.ArrayList[FunctionForm]()
for (func: String <- activationStr) {
activations.append(getFunctionForm(func))
}
val nodeNumbers = new java.util.ArrayList[Integer]()
for (num : Integer <- config.getIntList("node_number")) {
nodeNumbers.append(num)
}
NetWorkParams(
activationFunctions = activations,
nodeNumber = nodeNumbers
)
}
def setupModel(trainerOptions : TrainerOptions,
networkOptions: NetWorkParams,
pointwise : RDD[Example]) : MlpModel = {
val model = new MlpModel(
networkOptions.activationFunctions,
networkOptions.nodeNumber)
val hiddenLayerWeights = model.getHiddenLayerWeights
val inputLayerWeights = model.getInputLayerWeights
val layerNodeNumber = model.getLayerNodeNumber
val numHiddenLayers = model.getNumHiddenLayers
val std = trainerOptions.weightInitStd.toFloat
val stats = TrainingUtils.getFeatureStatistics(trainerOptions.minCount, pointwise)
// set up input layer weights
var count : Int = 0
for (kv <- stats) {
val (family, feature) = kv._1
if (family != trainerOptions.rankKey) {
if (!inputLayerWeights.containsKey(family)) {
inputLayerWeights.put(family, new java.util.HashMap[java.lang.String, FloatVector]())
}
val familyMap = inputLayerWeights.get(family)
if (!familyMap.containsKey(feature)) {
count = count + 1
familyMap.put(feature, FloatVector.getGaussianVector(layerNodeNumber.get(0), std))
}
}
}
// set up hidden layer weights
for (i <- 0 until numHiddenLayers) {
val arr = new java.util.ArrayList[FloatVector]()
for (j <- 0 until layerNodeNumber.get(i)) {
val fv = FloatVector.getGaussianVector(layerNodeNumber.get(i + 1), std)
arr.add(fv)
}
hiddenLayerWeights.put(i, arr)
}
// note: bias at each node initialized to zero in this trainer
log.info(s"Total number of features is $count")
model
}
private def setupUpdateContainer(model: MlpModel) : scala.collection.mutable.HashMap[(String, String), FloatVector] = {
val container = scala.collection.mutable.HashMap[(String, String), FloatVector]()
// set up input layer weights gradient
val inputLayerWeights = model.getInputLayerWeights
val n0 = model.getLayerNodeNumber.get(0)
for (family <- inputLayerWeights) {
for (feature <- family._2) {
val key = (family._1, feature._1)
container.put(key, new FloatVector(n0))
}
}
// set up hidden layer weights gradient
val numHiddenLayers = model.getNumHiddenLayers
for (i <- 0 until numHiddenLayers) {
val thisLayerNodeNum = model.getLayerNodeNumber.get(i)
val nextLayerNodeNum = model.getLayerNodeNumber.get(i + 1)
for (j <- 0 until thisLayerNodeNum) {
val key = (LAYER_PREFIX + i.toString, NODE_PREFIX + j.toString)
container.put(key, new FloatVector(nextLayerNodeNum))
}
}
// set up bias gradient
for (i <- 0 to numHiddenLayers) {
// all bias in the same layer are put to the same FloatVector
val key = (LAYER_PREFIX + i.toString, BIAS_PREFIX)
container.put(key, new FloatVector(model.getLayerNodeNumber.get(i)))
}
container
}
private def computeUpdates(prevUpdate: FloatVector,
momentum: Float,
learningRate: Float,
gradient: FloatVector): FloatVector = {
// based on hinton's dropout paper: http://arxiv.org/pdf/1207.0580.pdf
val update: FloatVector = new FloatVector(prevUpdate.length)
update.multiplyAdd(momentum, prevUpdate)
update.multiplyAdd(-(1.0f - momentum) * learningRate, gradient)
update
}
private def getFunctionForm(func: String) : FunctionForm = {
func match {
case "sigmoid" => FunctionForm.SIGMOID
case "relu" => FunctionForm.RELU
case "tanh" => FunctionForm.TANH
case "identity" => FunctionForm.IDENTITY
case _ => assert(false); FunctionForm.SIGMOID
}
}
private def updateMomentum(momentumInit: Double,
momentumEnd: Double,
momentumT: Int,
iter: Int) : Double = {
if (iter >= momentumT)
return momentumEnd
val frac = iter.toDouble / momentumT
frac * momentumInit + (1 - frac) * momentumEnd
}
private def computeHingeGradient(prediction: Double,
fv: FeatureVector,
option: TrainerOptions): Double = {
// Returns d_loss / d_output_activation
// gradient of loss function w.r.t the output node activation
val label = TrainingUtils.getLabel(fv, option.rankKey, option.threshold)
// loss = max(0.0, option.margin - label * prediction)
if (option.margin - label * prediction > 0) {
-label
} else {
0.0
}
}
private def computeRegressionGradient(prediction: Double,
fv: FeatureVector,
option: TrainerOptions): Double = {
// epsilon-insensitive loss for regression (as in SVM regression)
// loss = max(0.0, |prediction - label| - epsilon)
// where epsilon = option.margin
assert(option.margin > 0)
val label = TrainingUtils.getLabel(fv, option.rankKey)
if (prediction - label > option.margin) {
1.0
} else if (prediction - label < - option.margin) {
-1.0
} else {
0.0
}
}
private def computeActivationGradient(activation: Double,
func: FunctionForm): Double = {
// compute the gradient of activation w.r.t input
func match {
case FunctionForm.SIGMOID => activation * (1.0 - activation)
case FunctionForm.RELU => if (activation > 0) 1.0 else 0.0
case FunctionForm.IDENTITY => 1.0
case FunctionForm.TANH => 1.0 - activation * activation
}
}
}
| ralic/aerosolve | training/src/main/scala/com/airbnb/aerosolve/training/MlpModelTrainer.scala | Scala | apache-2.0 | 19,172 |
package org.jetbrains.sbt
package project.module
import java.awt.event.ActionEvent
import java.util.Collections
import com.intellij.openapi.project.Project
import com.intellij.openapi.roots.ui.configuration.{ModuleConfigurationState, ModuleElementsEditor}
import com.intellij.ui.CollectionListModel
import com.intellij.util.text.DateFormatUtil
import javax.swing.JPanel
import javax.swing.event.ListSelectionEvent
import javax.swing.table.AbstractTableModel
import org.jetbrains.plugins.scala.util.JListCompatibility
import org.jetbrains.sbt.resolvers.indexes.ResolverIndex
import org.jetbrains.sbt.resolvers.{SbtIndexesManager, SbtResolver}
import org.jetbrains.sbt.settings.SbtSettings
import scala.jdk.CollectionConverters._
/**
* @author Nikolay Obedin
* @since 12/1/14.
*/
class SbtModuleSettingsEditor (state: ModuleConfigurationState) extends ModuleElementsEditor(state) {
import SbtModule._
private val myForm = new SbtModuleSettingsForm
private val modelWrapper = new JListCompatibility.CollectionListModelWrapper(new CollectionListModel[String](Collections.emptyList[String]))
private val resolvers = Resolvers(getModel.getModule).toSeq
override def getDisplayName: String = SbtBundle.message("sbt.settings.sbtModuleSettings")
override def saveData(): Unit = {}
override def createComponentImpl(): JPanel = {
myForm.sbtImportsList.setEmptyText(SbtBundle.message("sbt.settings.noImplicitImportsFound"))
JListCompatibility.setModel(myForm.sbtImportsList, modelWrapper.getModelRaw)
myForm.updateButton.addActionListener((e: ActionEvent) => {
val resolversToUpdate: Seq[SbtResolver] = myForm.resolversTable.getSelectedRows.map(resolvers(_)).toSeq
SbtIndexesManager.getInstance(state.getProject).foreach(_.updateWithProgress(resolversToUpdate))
})
myForm.mainPanel
}
override def reset(): Unit = {
val module = getModel.getModule
val moduleSettings = SbtSettings.getInstance(state.getProject).getLinkedProjectSettings(module)
myForm.sbtVersionTextField.setText(moduleSettings.map(_.sbtVersion).getOrElse(SbtBundle.message("sbt.settings.sbtVersionNotDetected")))
modelWrapper.getModel.replaceAll(Imports(module).asJava)
myForm.resolversTable.setModel(new ResolversModel(resolvers, state.getProject))
if (myForm.resolversTable.getRowCount > 0)
myForm.resolversTable.setRowSelectionInterval(0, 0)
myForm.resolversTable.getColumnModel.getColumn(0).setPreferredWidth(50)
myForm.resolversTable.getColumnModel.getColumn(1).setPreferredWidth(400)
myForm.resolversTable.getColumnModel.getColumn(2).setPreferredWidth(30)
myForm.resolversTable.getSelectionModel.addListSelectionListener((_: ListSelectionEvent) => setupUpdateButton())
setupUpdateButton()
}
def setupUpdateButton(): Unit = {
// use first element in model to do availability checking if no ros has yet been selected
val selectedRow = Option(myForm.resolversTable.getSelectedRow).filter(_ >= 0).getOrElse(0)
try {
val value = myForm.resolversTable.getModel.getValueAt(selectedRow, 2)
myForm.updateButton.setEnabled(value != SbtBundle.message("sbt.settings.resolvers.mavenUnavailable"))
} catch {
case _: IndexOutOfBoundsException => myForm.updateButton.setEnabled(false) // no resolvers in project?
}
}
}
private class ResolversModel(val resolvers: Seq[SbtResolver], val project:Project) extends AbstractTableModel {
private val columns = Seq(
SbtBundle.message("sbt.settings.resolvers.name"),
SbtBundle.message("sbt.settings.resolvers.url"),
SbtBundle.message("sbt.settings.resolvers.updated")
)
override def getColumnCount: Int = columns.size
override def getRowCount: Int = resolvers.size
override def getColumnName(columnIndex: Int): String = columns(columnIndex)
override def getValueAt(rowIndex: Int, columnIndex: Int): String = {
val valueOpt = columnIndex match {
case 0 => resolvers.lift(rowIndex).map(_.name)
case 1 => resolvers.lift(rowIndex).map(_.root)
case 2 =>
for {
resolver <- resolvers.lift(rowIndex)
index <- resolver.getIndex(project)
} yield {
val ts = index.getUpdateTimeStamp
if (ts == ResolverIndex.NO_TIMESTAMP)
SbtBundle.message("sbt.settings.resolvers.neverUpdated")
else if (ts == ResolverIndex.MAVEN_UNAVALIABLE)
SbtBundle.message("sbt.settings.resolvers.mavenUnavailable")
else
DateFormatUtil.formatDate(ts)
}
}
valueOpt.getOrElse("???")
}
}
| JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/sbt/project/module/SbtModuleSettingsEditor.scala | Scala | apache-2.0 | 4,576 |
/*
* Copyright (C) 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.commons.bean
import org.beangle.commons.lang.reflect.BeanInfos
import org.beangle.commons.lang.testbean.TestBean
import org.scalatest.matchers.should.Matchers
import org.scalatest.funspec.AnyFunSpec
class PropertiesTest extends AnyFunSpec with Matchers {
BeanInfos.of(classOf[TestBean])
describe("Properties") {
it("Get or Set property") {
BeanInfos.get(classOf[TestBean]).properties("javaMap")
val bean = new TestBean
Properties.set(bean, "intValue", 2)
bean.intValue should be(2)
bean.javaMap = new java.util.HashMap[Int, String]
Properties.copy(bean, "javaMap(1)", "2")
bean.javaMap.get(1) should be("2")
}
it("get option nested value") {
BeanInfos.get(classOf[TestBean]).properties("javaMap")
val bean = new TestBean
var parent = new TestBean
parent.id = 2
val a = Properties.get[Object](bean, "parent.id")
assert(null == a)
bean.parent = Some(parent)
val b = Properties.get[Object](bean, "parent.id")
assert(Integer.valueOf(2) == b)
Properties.set(bean, "parent.id", 4)
val c = Properties.get[Object](bean, "parent.id")
assert(Integer.valueOf(4) == c)
Properties.copy(bean, "parent", "")
assert(Properties.get[Object](bean, "parent") == None)
Properties.copy(bean, "parent", null)
assert(Properties.get[Object](bean, "parent") == None)
Properties.copy(bean, "parent", None)
assert(Properties.get[Object](bean, "parent") == None)
Properties.copy(bean, "parent", parent)
assert(Properties.get[Object](bean, "parent") == Some(parent))
Properties.copy(bean, "parent", Some(parent))
assert(Properties.get[Object](bean, "parent") == Some(parent))
}
it("get set option[primitives]") {
BeanInfos.get(classOf[TestBean]).properties("javaMap")
val bean = new TestBean
var parent = new TestBean
Properties.set(bean, "age", 4)
assert(bean.age == Some(4))
Properties.set(bean, "age", null)
assert(bean.age == None)
}
it("test scala map") {
val p = new org.beangle.commons.collection.Properties("id" -> 1, "name" -> "mike")
assert(Properties.get[Any](p, "id") == 1)
assert(Properties.get[Any](p, "name") == "mike")
Properties.set(p, "id", 2)
assert(Properties.get[Any](p, "id") == 2)
}
it("test java map") {
val p = new java.util.HashMap[Any, Any]()
p.put("id", 1)
p.put("name", "mike")
assert(Properties.get[Any](p, "id") == 1)
assert(Properties.get[Any](p, "name") == "mike")
Properties.set(p, "id", 2)
assert(Properties.get[Any](p, "id") == 2)
}
}
}
| beangle/commons | core/src/test/scala/org/beangle/commons/bean/PropertiesTest.scala | Scala | lgpl-3.0 | 3,433 |
package org.shapelogic.sc.polygon
/**
*
* @author Sami Badawi
*
*/
object PolygonFactory {
def createSameType(poygon: Polygon): Polygon = {
if (poygon.isInstanceOf[MultiLinePolygon]) {
new MultiLinePolygon(null)
} else if (poygon.isInstanceOf[Polygon]) {
new Polygon(null)
} else
null
}
}
| sami-badawi/shapelogic-scala | src/test/scala/org/shapelogic/sc/polygon/PolygonFactory.scala | Scala | mit | 329 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package mocks.config
import config.AppConfig
import config.AppConfig.RequestMethodAndRoute
import mocks.Mock
import org.mockito.stubbing.OngoingStubbing
import org.scalatest.Suite
import play.api.Configuration
trait MockAppConfig extends Mock { _: Suite =>
val mockAppConfig = mock[AppConfig]
object MockAppConfig {
def appName: OngoingStubbing[String] = when(mockAppConfig.appName)
def appUrl: OngoingStubbing[String] = when(mockAppConfig.appUrl)
def apiStatus(version: String): OngoingStubbing[String] = when(mockAppConfig.apiStatus(any[String]()))
def featureSwitch: OngoingStubbing[Option[Configuration]] = when(mockAppConfig.featureSwitch)
def registrationEnabled: OngoingStubbing[Boolean] = when(mockAppConfig.registrationEnabled)
def saApiUrl: OngoingStubbing[String] = when(mockAppConfig.saApiUrl)
def cgApiUrl: OngoingStubbing[String] = when(mockAppConfig.cgApiUrl)
def taxCalcUrl: OngoingStubbing[String] = when(mockAppConfig.taxCalcUrl)
def propertyUrl: OngoingStubbing[String] = when(mockAppConfig.propertyUrl)
def selfEmploymentUrl: OngoingStubbing[String] = when(mockAppConfig.selfEmploymentUrl)
def dividendsApiUrl: OngoingStubbing[String] = when(mockAppConfig.dividendsApiUrl)
def savingsAccountsApiUrl: OngoingStubbing[String] = when(mockAppConfig.savingsAccountApiUrl)
def crystallisationApiUrl: OngoingStubbing[String] = when(mockAppConfig.crystallisationApiUrl)
def deprecatedRoutes[A]: OngoingStubbing[Seq[RequestMethodAndRoute]] = when(mockAppConfig.deprecatedRoutes)
def environmentHeaders: OngoingStubbing[Option[Seq[String]]] = when(mockAppConfig.environmentHeaders)
def confidenceLevelDefinitionConfig: OngoingStubbing[Boolean] = when(mockAppConfig.confidenceLevelDefinitionConfig)
}
override protected def beforeEach(): Unit = {
super.beforeEach()
reset(mockAppConfig)
}
}
| hmrc/self-assessment-api | test/mocks/config/MockAppConfig.scala | Scala | apache-2.0 | 2,498 |
package controllers
import play.api._
import play.api.mvc._
import java.util.Date._
import play.api.i18n._
import play.api.data.Form
import play.api.data.Forms._
import play.api.data.validation.Constraints._
import play.api.libs.json.Json
import java.util.Date
import scala.concurrent.{ ExecutionContext, Future }
import javax.inject._
class Application @Inject() (val messagesApi: MessagesApi)
(implicit ec: ExecutionContext) extends Controller with I18nSupport{
// Default / Home Page
def index = Action {
Ok(views.html.index())
}//End index
// System Administration menu
def sysconfig = Action {
Ok(views.html.sysconfig())
}//End sysconfig
//Application Configuration Actions
/**
* The mapping for the App Config form.
*/
val AppConfigForm: Form[CreateAppConfigForm] = Form {
mapping(
"blogTitle" -> nonEmptyText,
"blogTagLine" -> nonEmptyText,
"blogTitleImage" -> nonEmptyText,
"blogUrl" -> nonEmptyText,
"accurateAt" -> date,
"modifiedBy" -> mapping("firstName" -> text,
"lastName" -> text
)(NormalUserForm.apply)(NormalUserForm.unapply)
)(CreateAppConfigForm.apply)(CreateAppConfigForm.unapply)
}//End AppConfigForm
// Configure THIS installation of the ScalaBlog application
def appconfig = Action {
Ok(views.html.appconfig(AppConfigForm))
}//End appconfig
//SAVE the contents of the AppConfig Form.
def appconfigsubmit = Action {
//TODO: verify the contents of the form are valid
//TODO: save the form
//Return the SYS config Menu
Ok(views.html.sysconfig())
}//End appconfigsubmit
}//End Application controller class
/**
* Case classes
*
* The following case classes are used "here" within the controller only.
* It is quite often the case that the model representation and that which is needed within a view are different.
* So we can use the controller as an appropriate place to hold this interfacing code.
*
* You can also use "nested" values.
* In THIS controller we embed the Normal User case class into the CreateAppConfigForm case class.
*/
case class NormalUserForm(firstName: String, lastName: String)
case class CreateAppConfigForm(blogTitle: String, blogTagLine: String, blogTitleImage: String, blogUrl: String, accurateAt: java.util.Date, modifiedBy: NormalUserForm) | gavinbaumanis/scribble | app/controllers/Application.scala | Scala | apache-2.0 | 2,353 |
package org.intracer.wmua.cmd
import org.intracer.wmua.{ContestJury, Image, JuryTestHelpers}
import org.scalawiki.dto.{Namespace, Page}
import org.scalawiki.query.SinglePageQuery
import org.specs2.concurrent.ExecutionEnv
import org.specs2.mock.Mockito
import org.specs2.mutable.Specification
import scala.concurrent.Future
class FetchImageInfoSpec extends Specification with Mockito with JuryTestHelpers {
def contestImage(id: Long, contest: Long) =
Image(id, s"File:Image$id.jpg", Some(s"url$id"), Some(s"pageUrl$id"), 640, 480, None, size = Some(1234))
def imageInfo(id: Long) = new Page(Some(id), Some(Namespace.FILE), s"File:Image$id.jpg", images = Seq(
new org.scalawiki.dto.Image(s"File:Image$id.jpg", Some(s"url$id"), Some(s"pageUrl$id"), Some(1234), Some(640), Some(480))
))
"appendImages" should {
"get images empty" in {
implicit ee: ExecutionEnv =>
val category = "Category:Category Name"
val contestId = 13
val imageId = 11
val images = Seq.empty[Image]
val imageInfos = Seq.empty[Page]
val query = mock[SinglePageQuery]
query.withContext(Map("contestId" -> contestId.toString, "max" -> "0")) returns query
query.imageInfoByGenerator(
"categorymembers", "cm", namespaces = Set(Namespace.FILE), props = Set("timestamp", "user", "size", "url"), titlePrefix = None
) returns Future.successful(imageInfos)
val commons = mockBot()
commons.page(category) returns query
val contest = ContestJury(Some(contestId), "WLE", 2015, "Ukraine", Some(category), None, None)
FetchImageInfo(category, Seq.empty, contest, commons).apply() must be_==(images).await
}
"get images one image" in {
implicit ee: ExecutionEnv =>
val category = "Category:Category Name"
val contestId = 13
val imageId = 11
val images = Seq(contestImage(imageId, contestId))
val imageInfos = Seq(imageInfo(imageId))
val query = mock[SinglePageQuery]
query.withContext(Map("contestId" -> contestId.toString, "max" -> "0")) returns query
query.imageInfoByGenerator(
"categorymembers", "cm", namespaces = Set(Namespace.FILE), props = Set("timestamp", "user", "size", "url"), titlePrefix = None
) returns Future.successful(imageInfos)
val commons = mockBot()
commons.page(category) returns query
val contest = ContestJury(Some(contestId), "WLE", 2015, "Ukraine", Some(category))
FetchImageInfo(category, Seq.empty, contest, commons).apply() must be_==(images).await
}
}
}
| intracer/wlxjury | test/org/intracer/wmua/cmd/FetchImageInfoSpec.scala | Scala | apache-2.0 | 2,628 |
package com.twitter.finagle.memcached.integration
import com.twitter.finagle.Service
import com.twitter.finagle.builder.ClientBuilder
import com.twitter.finagle.memcached.protocol._
import com.twitter.finagle.memcached.protocol.text.Memcached
import com.twitter.finagle.memcached.util.ChannelBufferUtils._
import com.twitter.io.Buf
import com.twitter.util.TimeConversions._
import com.twitter.util.{Await, Time}
import java.net.{InetAddress, InetSocketAddress}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfter, FunSuite}
@RunWith(classOf[JUnitRunner])
class InterpreterServiceTest extends FunSuite with BeforeAndAfter {
var server: InProcessMemcached = null
var client: Service[Command, Response] = null
before {
server = new InProcessMemcached(new InetSocketAddress(InetAddress.getLoopbackAddress, 0))
val address = server.start().boundAddress.asInstanceOf[InetSocketAddress]
client = ClientBuilder()
.hosts(address)
.codec(new Memcached)
.hostConnectionLimit(1)
.build()
}
after {
server.stop()
}
test("set & get") {
val key = Buf.Utf8("key")
val value = Buf.Utf8("value")
val zero = "0"
val result = for {
_ <- client(Delete(key))
_ <- client(Set(key, 0, Time.epoch, value))
r <- client(Get(Seq(key)))
} yield r
assert(Await.result(result, 1.second) == Values(Seq(Value(key, value, None, Some(Buf.Utf8(zero))))))
assert(client.isAvailable)
}
test("quit") {
val result = client(Quit())
assert(Await.result(result) == NoOp())
}
}
| lukiano/finagle | finagle-memcached/src/test/scala/com/twitter/finagle/memcached/integration/InterpreterServiceTest.scala | Scala | apache-2.0 | 1,612 |
/**
* Copyright (C) 2010 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.event.events
import org.orbeon.oxf.xforms.control.{XFormsControl, XFormsSingleNodeControl}
import org.orbeon.oxf.xforms.event.XFormsEvent
import XFormsEvent._
import org.orbeon.xforms.analysis.model.ValidationLevel
/**
* Base class for UI events, that is events only dispatched to controls.
*/
abstract class XFormsUIEvent(
eventName : String,
val targetControl : XFormsControl,
properties : PropertyGetter,
bubbles : Boolean,
cancelable : Boolean
) extends XFormsEvent(
eventName,
targetControl,
properties,
bubbles,
cancelable
) {
def this(eventName: String, target: XFormsControl, properties: PropertyGetter) =
this(eventName, target, properties, bubbles = true, cancelable = false)
require(targetControl ne null)
override def lazyProperties = getters(this, XFormsUIEvent.Getters)
override def newPropertyName(name: String) = XFormsUIEvent.Deprecated.get(name) orElse super.newPropertyName(name)
}
private object XFormsUIEvent {
val Deprecated = Map(
"target-ref" -> "xxf:binding",
"alert" -> "xxf:alert",
"label" -> "xxf:label",
"hint" -> "xxf:hint",
"help" -> "xxf:help"
)
val Getters = Map[String, XFormsUIEvent => Option[Any]](
"target-ref" -> binding,
xxfName("binding") -> binding,
xxfName("control-position") -> controlPosition,
"label" -> label,
xxfName("label") -> label,
"help" -> help,
xxfName("help") -> help,
"hint" -> hint,
xxfName("hint") -> hint,
"alert" -> alert,
xxfName("alert") -> alert,
xxfName("level") -> level
)
def binding(e: XFormsUIEvent) = Option(e.targetControl.bindingEvenIfNonRelevant)
def controlPosition(e: XFormsUIEvent) =
e.targetControl.container.getPartAnalysis.getControlPosition(e.targetControl.getPrefixedId)
def label(e: XFormsUIEvent) = Option(e.targetControl.getLabel)
def help(e: XFormsUIEvent) = Option(e.targetControl.getHelp)
def hint(e: XFormsUIEvent) = Option(e.targetControl.getHint)
def alert(e: XFormsUIEvent) = Option(e.targetControl.getAlert)
def level(e: XFormsUIEvent): Option[String] = e.targetControl match {
case c: XFormsSingleNodeControl => c.alertLevel map (_.entryName)
case c => None
}
} | orbeon/orbeon-forms | xforms-runtime/shared/src/main/scala/org/orbeon/oxf/xforms/event/events/XFormsUIEvent.scala | Scala | lgpl-2.1 | 3,125 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa
/**
* Placeholder class to force generation of -javadocs and -sources jars for maven publishing
*/
object ScalaDocs {
}
| ddseapy/geomesa | geomesa-kudu/geomesa-kudu-spark-runtime/src/main/scala/org/locationtech/geomesa/ScalaDocs.scala | Scala | apache-2.0 | 624 |
package org.jetbrains.sbt
package project.data
import com.intellij.openapi.externalSystem.model.{ProjectKeys, Key, ProjectSystemId}
import com.intellij.openapi.externalSystem.model.project.AbstractExternalEntityData
/**
* @author Pavel Fatin
*/
class ScalaFacetData(val owner: ProjectSystemId,
val scalaVersion: String,
val basePackage: String,
val compilerLibraryName: String,
val compilerOptions: Seq[String]) extends AbstractExternalEntityData(owner)
object ScalaFacetData {
val Key: Key[ScalaFacetData] = new Key(classOf[ScalaFacetData].getName,
ProjectKeys.LIBRARY_DEPENDENCY.getProcessingWeight + 1)
} | consulo/consulo-scala | SBT/src/main/scala/org/jetbrains/sbt/project/data/ScalaFacetData.scala | Scala | apache-2.0 | 702 |
package com.github.gdefacci.briscola.web
package modules
import org.obl.raz.Api._
import org.obl.raz.UriTemplate
import com.github.gdefacci.briscola.player.PlayerId
import com.github.gdefacci.briscola.game.GameId
import com.github.gdefacci.briscola.competition.CompetitionId
import javax.inject.Singleton
import com.github.gdefacci.briscola.presentation.player.PlayerRoutes
import com.github.gdefacci.briscola.presentation.player.PlayerWebSocketRoutes
import com.github.gdefacci.briscola.presentation.competition.CompetitionRoutes
import com.github.gdefacci.briscola.presentation.game.GameRoutes
import com.github.gdefacci.briscola.presentation.sitemap.SiteMapRoutes
import com.github.gdefacci.briscola.presentation.Resources
object RoutesModule {
@Singleton def playerRoutes(resources:Resources):PlayerRoutes = new PlayerRoutes {
lazy val Players = resources.Players.pathMatchDecoder
lazy val PlayerLogin = resources.Players.login.pathMatchDecoder
lazy val PlayerById = resources.Players.byId.pathCodec
}
@Singleton def playerWebSocketRoutes(resources:Resources):PlayerWebSocketRoutes = new PlayerWebSocketRoutes {
lazy val PlayerById = resources.WebSockets.Players.byId.pathConverter
lazy val playerByIdUriTemplate: UriTemplate = this.PlayerById.encodeUriTemplate("playerId")
}
@Singleton def gameRoutes(resources:Resources):GameRoutes = new GameRoutes {
lazy val Games = resources.Games.pathMatchDecoder
lazy val GameById = resources.Games.byId.pathCodec
lazy val Player = resources.Games.player.pathCodec
lazy val Team = resources.Games.team.pathCodec
}
@Singleton def competitionRoutes(resources:Resources):CompetitionRoutes = new CompetitionRoutes {
lazy val Competitions = resources.Competitions.pathMatchDecoder
lazy val CompetitionById = resources.Competitions.byId.pathCodec
lazy val PlayerCompetitionById = resources.Competitions.player.pathCodec
lazy val AcceptCompetition = resources.Competitions.accept.pathCodec
lazy val DeclineCompetition = resources.Competitions.decline.pathCodec
lazy val CreateCompetition = resources.Competitions.create.pathCodec
}
@Singleton def siteMapRoutes(resources:Resources):SiteMapRoutes = new SiteMapRoutes {
def SiteMap:PathMatchDecoder = resources.SiteMap.pathMatchDecoder
}
} | gdefacci/briscola | ddd-briscola-web/src/main/scala/com/github/gdefacci/briscola/web/modules/RoutesModule.scala | Scala | bsd-3-clause | 2,325 |
package au.id.cxd.math.probability.discrete
import au.id.cxd.math.count.Choose
/**
* ##import MathJax
*
* Created by cd on 7/09/2014.
*
* Hyper geometric
*
*
* The Hypergeometric (class name HyperGeometric) distribution represents the probability of choosing $y$ number of events of the same kind
* from a subset of $r$ like items within a population of all $N$ possible items (of different kinds) for the sample of size $n$ containing
* the mixed items.
*
* The constraints are such that $r \\le n \\le N$ and $y \\le r \\le n$. The parameters are $y,r,n,N$.
*
* The probability distribution is defined as follows.
* $$
* P(y; r,n,N) = \\frac{ {r \\choose y } {{N - r} \\choose {n - y} } } { {N \\choose n} }
* $$
*
* The simple properties of the distribution are:\\\\\\\\
* Mean: $\\mu = \\frac{nr}{N}$\\\\
* Variance: $\\sigma^2 = n \\left( \\frac{r}{N} \\right) \\left( \\frac{N - r}{N} \\right) \\left( \\frac{N - n}{N-1}\\right)$
*
*
* r <= n <= N
* y <= r <= n
*
**/
class HyperGeometric(rSubsetSize: Double, sampleSize: Double, populationSize: Double) extends DiscreteDistribution {
/**
*
* calculate the probability of selecting y (select Size) samples
*
* @param selectSize
* @return
*/
def pdf(selectSize: Double) =
Choose(rSubsetSize)(selectSize) * Choose(populationSize - rSubsetSize)(sampleSize - selectSize) / Choose(populationSize)(sampleSize)
def mean() = sampleSize * rSubsetSize / populationSize
def variance() = sampleSize * (rSubsetSize / populationSize) * ((populationSize - rSubsetSize) / populationSize) * ((populationSize - sampleSize) / (populationSize - 1.0))
}
object HyperGeometric {
def apply(r: Double)(n: Double)(N: Double) = new HyperGeometric(r, n, N)
}
| cxd/scala-au.id.cxd.math | math/src/main/scala/au/id/cxd/math/probability/discrete/HyperGeometric.scala | Scala | mit | 1,766 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.api
import java.io._
import java.util.Properties
import java.util.concurrent.TimeUnit
import com.typesafe.config._
import com.typesafe.config.impl.ConfigImpl
import play.utils.PlayIO
import scala.collection.JavaConverters._
import scala.concurrent.duration.{ Duration, FiniteDuration, _ }
import scala.util.control.NonFatal
/**
* This object provides a set of operations to create `Configuration` values.
*
* For example, to load a `Configuration` in a running application:
* {{{
* val config = Configuration.load()
* val foo = config.getString("foo").getOrElse("boo")
* }}}
*
* The underlying implementation is provided by https://github.com/typesafehub/config.
*/
object Configuration {
private[this] lazy val dontAllowMissingConfigOptions = ConfigParseOptions.defaults().setAllowMissing(false)
private[this] lazy val dontAllowMissingConfig = ConfigFactory.load(dontAllowMissingConfigOptions)
private[play] def load(
classLoader: ClassLoader,
properties: Properties,
directSettings: Map[String, AnyRef],
allowMissingApplicationConf: Boolean): Configuration = {
try {
// Get configuration from the system properties.
// Iterating through the system properties is prone to ConcurrentModificationExceptions (especially in our tests)
// Typesafe config maintains a cache for this purpose. So, if the passed in properties *are* the system
// properties, use the Typesafe config cache, otherwise it should be safe to parse it ourselves.
val systemPropertyConfig = if (properties eq System.getProperties) {
ConfigImpl.systemPropertiesAsConfig()
} else {
ConfigFactory.parseProperties(properties)
}
// Inject our direct settings into the config.
val directConfig: Config = ConfigFactory.parseMap(directSettings.asJava)
// Resolve application.conf ourselves because:
// - we may want to load configuration when application.conf is missing.
// - We also want to delay binding and resolving reference.conf, which
// is usually part of the default application.conf loading behavior.
// - We want to read config.file and config.resource settings from our
// own properties and directConfig rather than system properties.
val applicationConfig: Config = {
def setting(key: String): Option[AnyRef] =
directSettings.get(key).orElse(Option(properties.getProperty(key)))
{
setting("config.resource").map(resource => ConfigFactory.parseResources(classLoader, resource.toString))
} orElse {
setting("config.file").map(fileName => ConfigFactory.parseFileAnySyntax(new File(fileName.toString)))
} getOrElse {
val parseOptions = ConfigParseOptions.defaults
.setClassLoader(classLoader)
.setAllowMissing(allowMissingApplicationConf)
ConfigFactory.defaultApplication(parseOptions)
}
}
// Resolve another .conf file so that we can override values in Akka's
// reference.conf, but still make it possible for users to override
// Play's values in their application.conf.
val playOverridesConfig: Config = ConfigFactory.parseResources(classLoader, "play/reference-overrides.conf")
// Resolve reference.conf ourselves because ConfigFactory.defaultReference resolves
// values, and we won't have a value for `play.server.dir` until all our config is combined.
val referenceConfig: Config = ConfigFactory.parseResources(classLoader, "reference.conf")
// Combine all the config together into one big config
val combinedConfig: Config = Seq(
systemPropertyConfig,
directConfig,
applicationConfig,
playOverridesConfig,
referenceConfig
).reduceLeft(_ withFallback _)
// Resolve settings. Among other things, the `play.server.dir` setting defined in directConfig will
// be substituted into the default settings in referenceConfig.
val resolvedConfig = combinedConfig.resolve
Configuration(resolvedConfig)
} catch {
case e: ConfigException => throw configError(e.getMessage, Option(e.origin), Some(e))
}
}
/**
* Load a new Configuration from the Environment.
*/
def load(environment: Environment, devSettings: Map[String, AnyRef]): Configuration = {
load(environment.classLoader, System.getProperties, devSettings, allowMissingApplicationConf = environment.mode == Mode.Test)
}
/**
* Load a new Configuration from the Environment.
*/
def load(environment: Environment): Configuration = load(environment, Map.empty[String, String])
/**
* Returns an empty Configuration object.
*/
def empty = Configuration(ConfigFactory.empty())
/**
* Returns the reference configuration object.
*/
def reference = Configuration(ConfigFactory.defaultReference())
/**
* Create a new Configuration from the data passed as a Map.
*/
def from(data: Map[String, Any]): Configuration = {
def toJava(data: Any): Any = data match {
case map: Map[_, _] => map.mapValues(toJava).asJava
case iterable: Iterable[_] => iterable.map(toJava).asJava
case v => v
}
Configuration(ConfigFactory.parseMap(toJava(data).asInstanceOf[java.util.Map[String, AnyRef]]))
}
/**
* Create a new Configuration from the given key-value pairs.
*/
def apply(data: (String, Any)*): Configuration = from(data.toMap)
private[api] def configError(
message: String, origin: Option[ConfigOrigin] = None, e: Option[Throwable] = None): PlayException = {
/*
The stable values here help us from putting a reference to a ConfigOrigin inside the anonymous ExceptionSource.
This is necessary to keep the Exception serializable, because ConfigOrigin is not serializable.
*/
val originLine = origin.map(_.lineNumber: java.lang.Integer).orNull
val originSourceName = origin.map(_.filename).orNull
val originUrlOpt = origin.flatMap(o => Option(o.url))
new PlayException.ExceptionSource("Configuration error", message, e.orNull) {
def line = originLine
def position = null
def input = originUrlOpt.map(PlayIO.readUrlAsString).orNull
def sourceName = originSourceName
override def toString = "Configuration error: " + getMessage
}
}
private[Configuration] def asScalaList[A](l: java.util.List[A]): Seq[A] = asScalaBufferConverter(l).asScala.toList
}
/**
* A full configuration set.
*
* The underlying implementation is provided by https://github.com/typesafehub/config.
*
* @param underlying the underlying Config implementation
*/
case class Configuration(underlying: Config) {
import Configuration.asScalaList
private[play] def reportDeprecation(path: String, deprecated: String): Unit = {
val origin = underlying.getValue(deprecated).origin
Logger.warn(s"${origin.description}: $deprecated is deprecated, use $path instead")
}
/**
* Merge two configurations. The second configuration overrides the first configuration.
* This is the opposite direction of `Config`'s `withFallback` method.
*/
def ++(other: Configuration): Configuration = {
Configuration(other.underlying.withFallback(underlying))
}
/**
* Reads a value from the underlying implementation.
* If the value is not set this will return None, otherwise returns Some.
*
* Does not check neither for incorrect type nor null value, but catches and wraps the error.
*/
private def readValue[T](path: String, v: => T): Option[T] = {
try {
if (underlying.hasPathOrNull(path)) Some(v) else None
} catch {
case NonFatal(e) => throw reportError(path, e.getMessage, Some(e))
}
}
/**
* Check if the given path exists.
*/
def has(path: String): Boolean = underlying.hasPath(path)
/**
* Get the config at the given path.
*/
def get[A](path: String)(implicit loader: ConfigLoader[A]): A = {
loader.load(underlying, path)
}
/**
* Get the config at the given path and validate against a set of valid values.
*/
def getAndValidate[A](path: String, values: Set[A])(implicit loader: ConfigLoader[A]): A = {
val value = get(path)
if (!values(value)) {
throw reportError(path, s"Incorrect value, one of (${values.mkString(", ")}) was expected.")
}
value
}
/**
* Get a value that may either not exist or be null. Note that this is not generally considered idiomatic Config
* usage. Instead you should define all config keys in a reference.conf file.
*/
def getOptional[A](path: String)(implicit loader: ConfigLoader[A]): Option[A] = {
readValue(path, get[A](path))
}
/**
* Get a prototyped sequence of objects.
*
* Each object in the sequence will fallback to the object loaded from prototype.\\$path.
*/
def getPrototypedSeq(path: String, prototypePath: String = "prototype.$path"): Seq[Configuration] = {
val prototype = underlying.getConfig(prototypePath.replace("$path", path))
get[Seq[Config]](path).map { config =>
Configuration(config.withFallback(prototype))
}
}
/**
* Get a prototyped map of objects.
*
* Each value in the map will fallback to the object loaded from prototype.\\$path.
*/
def getPrototypedMap(path: String, prototypePath: String = "prototype.$path"): Map[String, Configuration] = {
val prototype = if (prototypePath.isEmpty) {
underlying
} else {
underlying.getConfig(prototypePath.replace("$path", path))
}
get[Map[String, Config]](path).map {
case (key, config) => key -> Configuration(config.withFallback(prototype))
}
}
/**
* Get a deprecated configuration item.
*
* If the deprecated configuration item is defined, it will be returned, and a warning will be logged.
*
* Otherwise, the configuration from path will be looked up.
*/
def getDeprecated[A: ConfigLoader](path: String, deprecatedPaths: String*): A = {
deprecatedPaths.collectFirst {
case deprecated if underlying.hasPath(deprecated) =>
reportDeprecation(path, deprecated)
get[A](deprecated)
}.getOrElse {
get[A](path)
}
}
/**
* Get a deprecated configuration.
*
* If the deprecated configuration is defined, it will be returned, falling back to the new configuration, and a
* warning will be logged.
*
* Otherwise, the configuration from path will be looked up and used as is.
*/
def getDeprecatedWithFallback(path: String, deprecated: String, parent: String = ""): Configuration = {
val config = get[Config](path)
val merged = if (underlying.hasPath(deprecated)) {
reportDeprecation(path, deprecated)
get[Config](deprecated).withFallback(config)
} else config
Configuration(merged)
}
/**
* Retrieves a configuration value as a `String`.
*
* This method supports an optional set of valid values:
* {{{
* val config = Configuration.load()
* val mode = config.getString("engine.mode", Some(Set("dev","prod")))
* }}}
*
* A configuration error will be thrown if the configuration value does not match any of the required values.
*
* @param path the configuration key, relative to configuration root key
* @param validValues valid values for this configuration
* @return a configuration value
*/
@deprecated("Use get[String] or getAndValidate[String] with reference config entry", "2.6.0")
def getString(path: String, validValues: Option[Set[String]] = None): Option[String] = readValue(path, underlying.getString(path)).map { value =>
validValues match {
case Some(values) if values.contains(value) => value
case Some(values) if values.isEmpty => value
case Some(values) => throw reportError(path, "Incorrect value, one of " + (values.reduceLeft(_ + ", " + _)) + " was expected.")
case None => value
}
}
/**
* Retrieves a configuration value as an `Int`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val poolSize = configuration.getInt("engine.pool.size")
* }}}
*
* A configuration error will be thrown if the configuration value is not a valid `Int`.
*
* @param path the configuration key, relative to the configuration root key
* @return a configuration value
*/
@deprecated("Use get[Int] with reference config entry", "2.6.0")
def getInt(path: String): Option[Int] = readValue(path, underlying.getInt(path))
/**
* Retrieves a configuration value as a `Boolean`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val isEnabled = configuration.getBoolean("engine.isEnabled")
* }}}
*
* A configuration error will be thrown if the configuration value is not a valid `Boolean`.
* Authorized values are `yes`/`no` or `true`/`false`.
*
* @param path the configuration key, relative to the configuration root key
* @return a configuration value
*/
@deprecated("Use get[Boolean] with reference config entry", "2.6.0")
def getBoolean(path: String): Option[Boolean] = getOptional[Boolean](path)
/**
* Retrieves a configuration value as `Milliseconds`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val timeout = configuration.getMilliseconds("engine.timeout")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.timeout = 1 second
* }}}
*/
@deprecated("Use getMillis with reference config entry", "2.6.0")
def getMilliseconds(path: String): Option[Long] = getOptional[Duration](path).map(_.toMillis)
/**
* Retrieves a configuration value as `Milliseconds`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val timeout = configuration.getMillis("engine.timeout")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.timeout = 1 second
* }}}
*/
def getMillis(path: String): Long = get[Duration](path).toMillis
/**
* Retrieves a configuration value as `Nanoseconds`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val timeout = configuration.getNanoseconds("engine.timeout")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.timeout = 1 second
* }}}
*/
@deprecated("Use getNanos with reference config entry", "2.6.0")
def getNanoseconds(path: String): Option[Long] = getOptional[Duration](path).map(_.toNanos)
/**
* Retrieves a configuration value as `Milliseconds`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val timeout = configuration.getNanos("engine.timeout")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.timeout = 1 second
* }}}
*/
def getNanos(path: String): Long = get[Duration](path).toNanos
/**
* Retrieves a configuration value as `Bytes`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val maxSize = configuration.getBytes("engine.maxSize")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.maxSize = 512k
* }}}
*/
@deprecated("Use underlying.getBytes with reference config entry", "2.6.0")
def getBytes(path: String): Option[Long] = readValue(path, underlying.getBytes(path))
/**
* Retrieves a sub-configuration, i.e. a configuration instance containing all keys starting with a given prefix.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val engineConfig = configuration.getConfig("engine")
* }}}
*
* The root key of this new configuration will be ‘engine’, and you can access any sub-keys relatively.
*
* @param path the root prefix for this sub-configuration
* @return a new configuration
*/
@deprecated("Use get[Configuration] with reference config entry", "2.6.0")
def getConfig(path: String): Option[Configuration] = getOptional[Configuration](path)
/**
* Retrieves a configuration value as a `Double`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val population = configuration.getDouble("world.population")
* }}}
*
* A configuration error will be thrown if the configuration value is not a valid `Double`.
*
* @param path the configuration key, relative to the configuration root key
* @return a configuration value
*/
@deprecated("Use get[Double] with reference config entry", "2.6.0")
def getDouble(path: String): Option[Double] = getOptional[Double](path)
/**
* Retrieves a configuration value as a `Long`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val duration = configuration.getLong("timeout.duration")
* }}}
*
* A configuration error will be thrown if the configuration value is not a valid `Long`.
*
* @param path the configuration key, relative to the configuration root key
* @return a configuration value
*/
@deprecated("Use get[Long] with reference config entry", "2.6.0")
def getLong(path: String): Option[Long] = getOptional[Long](path)
/**
* Retrieves a configuration value as a `Number`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val counter = configuration.getNumber("foo.counter")
* }}}
*
* A configuration error will be thrown if the configuration value is not a valid `Number`.
*
* @param path the configuration key, relative to the configuration root key
* @return a configuration value
*/
@deprecated("Use get[Number] with reference config entry", "2.6.0")
def getNumber(path: String): Option[Number] = getOptional[Number](path)
/**
* Retrieves a configuration value as a List of `Boolean`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val switches = configuration.getBooleanList("board.switches")
* }}}
*
* The configuration must be provided as:
*
* {{{
* board.switches = [true, true, false]
* }}}
*
* A configuration error will be thrown if the configuration value is not a valid `Boolean`.
* Authorized values are `yes`/`no` or `true`/`false`.
*/
@deprecated("Use underlying.getBooleanList with reference config entry", "2.6.0")
def getBooleanList(path: String): Option[java.util.List[java.lang.Boolean]] = readValue(path, underlying.getBooleanList(path))
/**
* Retrieves a configuration value as a Seq of `Boolean`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val switches = configuration.getBooleanSeq("board.switches")
* }}}
*
* The configuration must be provided as:
*
* {{{
* board.switches = [true, true, false]
* }}}
*
* A configuration error will be thrown if the configuration value is not a valid `Boolean`.
* Authorized values are `yes`/`no` or `true`/`false`.
*/
@deprecated("Use get[Seq[Boolean]] with reference config entry", "2.6.0")
def getBooleanSeq(path: String): Option[Seq[java.lang.Boolean]] = getOptional[Seq[Boolean]](path).map(_.map(new java.lang.Boolean(_)))
/**
* Retrieves a configuration value as a List of `Bytes`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val maxSizes = configuration.getBytesList("engine.maxSizes")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.maxSizes = [512k, 256k, 256k]
* }}}
*/
@deprecated("Use underlying.getBytesList with reference config entry", "2.6.0")
def getBytesList(path: String): Option[java.util.List[java.lang.Long]] = readValue(path, underlying.getBytesList(path))
/**
* Retrieves a configuration value as a Seq of `Bytes`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val maxSizes = configuration.getBytesSeq("engine.maxSizes")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.maxSizes = [512k, 256k, 256k]
* }}}
*/
@deprecated("Use underlying.getBytesList with reference config entry", "2.6.0")
def getBytesSeq(path: String): Option[Seq[java.lang.Long]] = getBytesList(path).map(asScalaList)
/**
* Retrieves a List of sub-configurations, i.e. a configuration instance for each key that matches the path.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val engineConfigs = configuration.getConfigList("engine")
* }}}
*
* The root key of this new configuration will be "engine", and you can access any sub-keys relatively.
*/
@deprecated("Use underlying.getConfigList with reference config entry", "2.6.0")
def getConfigList(path: String): Option[java.util.List[Configuration]] = readValue[java.util.List[_ <: Config]](path, underlying.getConfigList(path)).map { configs => configs.asScala.map(Configuration(_)).asJava }
/**
* Retrieves a Seq of sub-configurations, i.e. a configuration instance for each key that matches the path.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val engineConfigs = configuration.getConfigSeq("engine")
* }}}
*
* The root key of this new configuration will be "engine", and you can access any sub-keys relatively.
*/
@deprecated("Use underlying.getConfigList with reference config entry", "2.6.0")
def getConfigSeq(path: String): Option[Seq[Configuration]] = getConfigList(path).map(asScalaList)
/**
* Retrieves a configuration value as a List of `Double`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val maxSizes = configuration.getDoubleList("engine.maxSizes")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.maxSizes = [5.0, 3.34, 2.6]
* }}}
*/
@deprecated("Use underlying.getDoubleList with reference config entry", "2.6.0")
def getDoubleList(path: String): Option[java.util.List[java.lang.Double]] = readValue(path, underlying.getDoubleList(path))
/**
* Retrieves a configuration value as a Seq of `Double`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val maxSizes = configuration.getDoubleSeq("engine.maxSizes")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.maxSizes = [5.0, 3.34, 2.6]
* }}}
*/
@deprecated("Use get[Seq[Double]] with reference config entry", "2.6.0")
def getDoubleSeq(path: String): Option[Seq[java.lang.Double]] = getOptional[Seq[Double]](path).map(_.map(new java.lang.Double(_)))
/**
* Retrieves a configuration value as a List of `Integer`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val maxSizes = configuration.getIntList("engine.maxSizes")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.maxSizes = [100, 500, 2]
* }}}
*/
@deprecated("Use underlying.getIntList with reference config entry", "2.6.0")
def getIntList(path: String): Option[java.util.List[java.lang.Integer]] = readValue(path, underlying.getIntList(path))
/**
* Retrieves a configuration value as a Seq of `Integer`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val maxSizes = configuration.getIntSeq("engine.maxSizes")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.maxSizes = [100, 500, 2]
* }}}
*/
@deprecated("Use get[Seq[Int]] with reference config entry", "2.6.0")
def getIntSeq(path: String): Option[Seq[java.lang.Integer]] = getOptional[Seq[Int]](path).map(_.map(new java.lang.Integer(_)))
/**
* Gets a list value (with any element type) as a ConfigList, which implements java.util.List<ConfigValue>.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val maxSizes = configuration.getList("engine.maxSizes")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.maxSizes = ["foo", "bar"]
* }}}
*/
@deprecated("Use get[ConfigList] with reference config entry", "2.6.0")
def getList(path: String): Option[ConfigList] = getOptional[ConfigList](path)
/**
* Retrieves a configuration value as a List of `Long`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val maxSizes = configuration.getLongList("engine.maxSizes")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.maxSizes = [10000000000000, 500, 2000]
* }}}
*/
@deprecated("Use underlying.getLongList with reference config entry", "2.6.0")
def getLongList(path: String): Option[java.util.List[java.lang.Long]] = readValue(path, underlying.getLongList(path))
/**
* Retrieves a configuration value as a Seq of `Long`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val maxSizes = configuration.getLongSeq("engine.maxSizes")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.maxSizes = [10000000000000, 500, 2000]
* }}}
*/
@deprecated("Use get[Seq[Long]] with reference config entry", "2.6.0")
def getLongSeq(path: String): Option[Seq[java.lang.Long]] =
getOptional[Seq[Long]](path).map(_.map(new java.lang.Long(_)))
/**
* Retrieves a configuration value as List of `Milliseconds`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val timeouts = configuration.getMillisecondsList("engine.timeouts")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.timeouts = [1 second, 1 second]
* }}}
*/
@deprecated("Use underlying.getMillisecondsList with reference config entry", "2.6.0")
def getMillisecondsList(path: String): Option[java.util.List[java.lang.Long]] =
readValue(path, underlying.getDurationList(path, TimeUnit.MILLISECONDS))
/**
* Retrieves a configuration value as Seq of `Milliseconds`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val timeouts = configuration.getMillisecondsSeq("engine.timeouts")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.timeouts = [1 second, 1 second]
* }}}
*/
@deprecated("Use get[Seq[Duration]].map(_.toMillis) with reference config entry", "2.6.0")
def getMillisecondsSeq(path: String): Option[Seq[java.lang.Long]] =
getOptional[Seq[Duration]](path).map(_.map(duration => new java.lang.Long(duration.toMillis)))
/**
* Retrieves a configuration value as List of `Nanoseconds`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val timeouts = configuration.getNanosecondsList("engine.timeouts")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.timeouts = [1 second, 1 second]
* }}}
*/
@deprecated("Use underlying.getNanosecondsList with reference config entry", "2.6.0")
def getNanosecondsList(path: String): Option[java.util.List[java.lang.Long]] =
readValue(path, underlying.getDurationList(path, TimeUnit.NANOSECONDS))
/**
* Retrieves a configuration value as Seq of `Nanoseconds`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val timeouts = configuration.getNanosecondsSeq("engine.timeouts")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.timeouts = [1 second, 1 second]
* }}}
*/
@deprecated("Use get[Seq[Duration]].map(_.toMillis) with reference config entry", "2.6.0")
def getNanosecondsSeq(path: String): Option[Seq[java.lang.Long]] =
getOptional[Seq[Duration]](path).map(_.map(duration => new java.lang.Long(duration.toNanos)))
/**
* Retrieves a configuration value as a List of `Number`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val maxSizes = configuration.getNumberList("engine.maxSizes")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.maxSizes = [50, 500, 5000]
* }}}
*/
@deprecated("Use underlying.getNumberList with reference config entry", "2.6.0")
def getNumberList(path: String): Option[java.util.List[java.lang.Number]] =
readValue(path, underlying.getNumberList(path))
/**
* Retrieves a configuration value as a Seq of `Number`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val maxSizes = configuration.getNumberSeq("engine.maxSizes")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.maxSizes = [50, 500, 5000]
* }}}
*/
@deprecated("Use get[Seq[Number]] with reference config entry", "2.6.0")
def getNumberSeq(path: String): Option[Seq[java.lang.Number]] =
getOptional[Seq[Number]](path)
/**
* Retrieves a configuration value as a List of `ConfigObject`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val engineProperties = configuration.getObjectList("engine.properties")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.properties = [{id: 5, power: 3}, {id: 6, power: 20}]
* }}}
*/
@deprecated("Use underlying.getObjectList with reference config entry", "2.6.0")
def getObjectList(path: String): Option[java.util.List[_ <: ConfigObject]] =
readValue[java.util.List[_ <: ConfigObject]](path, underlying.getObjectList(path))
/**
* Retrieves a configuration value as a List of `String`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val names = configuration.getStringList("names")
* }}}
*
* The configuration must be provided as:
*
* {{{
* names = ["Jim", "Bob", "Steve"]
* }}}
*/
@deprecated("Use underlying.getStringList with reference config entry", "2.6.0")
def getStringList(path: String): Option[java.util.List[java.lang.String]] =
readValue(path, underlying.getStringList(path))
/**
* Retrieves a configuration value as a Seq of `String`.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val names = configuration.getStringSeq("names")
* }}}
*
* The configuration must be provided as:
*
* {{{
* names = ["Jim", "Bob", "Steve"]
* }}}
*/
@deprecated("Use get[Seq[String]] with reference config entry", "2.6.0")
def getStringSeq(path: String): Option[Seq[java.lang.String]] =
getOptional[Seq[String]](path)
/**
* Retrieves a ConfigObject for this path, which implements Map<String,ConfigValue>
*
* For example:
* {{{
* val configuration = Configuration.load()
* val engineProperties = configuration.getObject("engine.properties")
* }}}
*
* The configuration must be provided as:
*
* {{{
* engine.properties = {id: 1, power: 5}
* }}}
*/
@deprecated("Use get[ConfigObject] with reference config entry", "2.6.0")
def getObject(path: String): Option[ConfigObject] =
getOptional[ConfigObject](path)
/**
* Returns available keys.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val keys = configuration.keys
* }}}
*
* @return the set of keys available in this configuration
*/
def keys: Set[String] = underlying.entrySet.asScala.map(_.getKey).toSet
/**
* Returns sub-keys.
*
* For example:
* {{{
* val configuration = Configuration.load()
* val subKeys = configuration.subKeys
* }}}
*
* @return the set of direct sub-keys available in this configuration
*/
def subKeys: Set[String] = underlying.root().keySet().asScala.toSet
/**
* Returns every path as a set of key to value pairs, by recursively iterating through the
* config objects.
*/
def entrySet: Set[(String, ConfigValue)] = underlying.entrySet().asScala.map(e => e.getKey -> e.getValue).toSet
/**
* Creates a configuration error for a specific configuration key.
*
* For example:
* {{{
* val configuration = Configuration.load()
* throw configuration.reportError("engine.connectionUrl", "Cannot connect!")
* }}}
*
* @param path the configuration key, related to this error
* @param message the error message
* @param e the related exception
* @return a configuration exception
*/
def reportError(path: String, message: String, e: Option[Throwable] = None): PlayException = {
val origin = Option(if (underlying.hasPath(path)) underlying.getValue(path).origin else underlying.root.origin)
Configuration.configError(message, origin, e)
}
/**
* Creates a configuration error for this configuration.
*
* For example:
* {{{
* val configuration = Configuration.load()
* throw configuration.globalError("Missing configuration key: [yop.url]")
* }}}
*
* @param message the error message
* @param e the related exception
* @return a configuration exception
*/
def globalError(message: String, e: Option[Throwable] = None): PlayException = {
Configuration.configError(message, Option(underlying.root.origin), e)
}
}
/**
* A config loader
*/
trait ConfigLoader[A] { self =>
def load(config: Config, path: String = ""): A
def map[B](f: A => B): ConfigLoader[B] = new ConfigLoader[B] {
def load(config: Config, path: String): B = {
f(self.load(config, path))
}
}
}
object ConfigLoader {
def apply[A](f: Config => String => A): ConfigLoader[A] = new ConfigLoader[A] {
def load(config: Config, path: String): A = f(config)(path)
}
import scala.collection.JavaConverters._
implicit val stringLoader: ConfigLoader[String] = ConfigLoader(_.getString)
implicit val seqStringLoader: ConfigLoader[Seq[String]] = ConfigLoader(_.getStringList).map(_.asScala)
implicit val intLoader: ConfigLoader[Int] = ConfigLoader(_.getInt)
implicit val seqIntLoader: ConfigLoader[Seq[Int]] = ConfigLoader(_.getIntList).map(_.asScala.map(_.toInt))
implicit val booleanLoader: ConfigLoader[Boolean] = ConfigLoader(_.getBoolean)
implicit val seqBooleanLoader: ConfigLoader[Seq[Boolean]] =
ConfigLoader(_.getBooleanList).map(_.asScala.map(_.booleanValue))
implicit val durationLoader: ConfigLoader[Duration] = ConfigLoader { config => path =>
if (config.getIsNull(path)) Duration.Inf
else if (config.getString(path) == "infinite") Duration.Inf
else config.getDuration(path).toNanos.nanos
}
// Note: this does not support null values but it added for convenience
implicit val seqDurationLoader: ConfigLoader[Seq[Duration]] =
ConfigLoader(_.getDurationList).map(_.asScala.map(_.toNanos.nanos))
implicit val finiteDurationLoader: ConfigLoader[FiniteDuration] =
ConfigLoader(_.getDuration).map(_.toNanos.nanos)
implicit val seqFiniteDurationLoader: ConfigLoader[Seq[FiniteDuration]] =
ConfigLoader(_.getDurationList).map(_.asScala.map(_.toNanos.nanos))
implicit val doubleLoader: ConfigLoader[Double] = ConfigLoader(_.getDouble)
implicit val seqDoubleLoader: ConfigLoader[Seq[Double]] =
ConfigLoader(_.getDoubleList).map(_.asScala.map(_.doubleValue))
implicit val numberLoader: ConfigLoader[Number] = ConfigLoader(_.getNumber)
implicit val seqNumberLoader: ConfigLoader[Seq[Number]] = ConfigLoader(_.getNumberList).map(_.asScala)
implicit val longLoader: ConfigLoader[Long] = ConfigLoader(_.getLong)
implicit val seqLongLoader: ConfigLoader[Seq[Long]] =
ConfigLoader(_.getDoubleList).map(_.asScala.map(_.longValue))
implicit val bytesLoader: ConfigLoader[ConfigMemorySize] = ConfigLoader(_.getMemorySize)
implicit val seqBytesLoader: ConfigLoader[Seq[ConfigMemorySize]] = ConfigLoader(_.getMemorySizeList).map(_.asScala)
implicit val configLoader: ConfigLoader[Config] = ConfigLoader(_.getConfig)
implicit val configListLoader: ConfigLoader[ConfigList] = ConfigLoader(_.getList)
implicit val configObjectLoader: ConfigLoader[ConfigObject] = ConfigLoader(_.getObject)
implicit val seqConfigLoader: ConfigLoader[Seq[Config]] = ConfigLoader(_.getConfigList).map(_.asScala)
implicit val configurationLoader: ConfigLoader[Configuration] = configLoader.map(Configuration(_))
implicit val seqConfigurationLoader: ConfigLoader[Seq[Configuration]] = seqConfigLoader.map(_.map(Configuration(_)))
private[play] implicit val playConfigLoader: ConfigLoader[PlayConfig] = configLoader.map(PlayConfig(_))
private[play] implicit val seqPlayConfigLoader: ConfigLoader[Seq[PlayConfig]] = seqConfigLoader.map(_.map(PlayConfig(_)))
/**
* Loads a value, interpreting a null value as None and any other value as Some(value).
*/
implicit def optionLoader[A](implicit valueLoader: ConfigLoader[A]): ConfigLoader[Option[A]] = new ConfigLoader[Option[A]] {
def load(config: Config, path: String): Option[A] = {
if (config.getIsNull(path)) None else {
val value = valueLoader.load(config, path)
Some(value)
}
}
}
implicit def mapLoader[A](implicit valueLoader: ConfigLoader[A]): ConfigLoader[Map[String, A]] = new ConfigLoader[Map[String, A]] {
def load(config: Config, path: String): Map[String, A] = {
val obj = config.getObject(path)
val conf = obj.toConfig
obj.keySet().asScala.map { key =>
key -> valueLoader.load(conf, key)
}(scala.collection.breakOut)
}
}
}
// TODO: remove when play projects (play-slick et al.) stop depending on PlayConfig
@deprecated("Use play.api.Configuration", "2.6.0")
private[play] class PlayConfig(val underlying: Config) {
def get[A](path: String)(implicit loader: ConfigLoader[A]): A = {
loader.load(underlying, path)
}
def getPrototypedSeq(path: String, prototypePath: String = "prototype.$path"): Seq[PlayConfig] = {
Configuration(underlying).getPrototypedSeq(path, prototypePath).map(c => PlayConfig(c.underlying))
}
def getPrototypedMap(path: String, prototypePath: String = "prototype.$path"): Map[String, PlayConfig] = {
Configuration(underlying).getPrototypedMap(path, prototypePath).mapValues(c => PlayConfig(c.underlying))
}
def getDeprecated[A: ConfigLoader](path: String, deprecatedPaths: String*): A = {
Configuration(underlying).getDeprecated(path, deprecatedPaths: _*)
}
def getDeprecatedWithFallback(path: String, deprecated: String, parent: String = ""): PlayConfig = {
PlayConfig(Configuration(underlying).getDeprecatedWithFallback(path, deprecated, parent).underlying)
}
def reportError(path: String, message: String, e: Option[Throwable] = None): PlayException = {
Configuration(underlying).reportError(path, message, e)
}
def subKeys: Set[String] = Configuration(underlying).subKeys
private[play] def reportDeprecation(path: String, deprecated: String): Unit = {
Configuration(underlying).reportDeprecation(path, deprecated)
}
}
@deprecated("Use play.api.Configuration", "2.6.0")
private[play] object PlayConfig {
def apply(underlying: Config) = new PlayConfig(underlying)
def apply(configuration: Configuration) = new PlayConfig(configuration.underlying)
}
| Shruti9520/playframework | framework/src/play/src/main/scala/play/api/Configuration.scala | Scala | apache-2.0 | 39,028 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala
package collection
package immutable
import scala.annotation.tailrec
import scala.collection.Stepper.EfficientSplit
import scala.collection.generic.DefaultSerializable
import scala.collection.immutable.{RedBlackTree => RB}
import scala.collection.mutable.ReusableBuilder
import scala.runtime.AbstractFunction2
/** An immutable SortedMap whose values are stored in a red-black tree.
*
* This class is optimal when range queries will be performed,
* or when traversal in order of an ordering is desired.
* If you only need key lookups, and don't care in which order key-values
* are traversed in, consider using * [[scala.collection.immutable.HashMap]],
* which will generally have better performance. If you need insertion order,
* consider a * [[scala.collection.immutable.SeqMap]], which does not need to
* have an ordering supplied.
*
* @example {{{
* import scala.collection.immutable.TreeMap
*
* // Make a TreeMap via the companion object factory
* val weekdays = TreeMap(
* 2 -> "Monday",
* 3 -> "Tuesday",
* 4 -> "Wednesday",
* 5 -> "Thursday",
* 6 -> "Friday"
* )
* // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday)
*
* val days = weekdays ++ List(1 -> "Sunday", 7 -> "Saturday")
* // TreeMap(1 -> Sunday, 2 -> Monday, 3 -> Tuesday, 4 -> Wednesday, 5 -> Thursday, 6 -> Friday, 7 -> Saturday)
*
* val day3 = days.get(3) // Some("Tuesday")
*
* val rangeOfDays = days.range(2, 5) // TreeMap(2 -> Monday, 3 -> Tuesday, 4 -> Wednesday)
*
* val daysUntil2 = days.rangeUntil(2) // TreeMap(1 -> Sunday)
* val daysTo2 = days.rangeTo(2) // TreeMap(1 -> Sunday, 2 -> Monday)
* val daysAfter5 = days.rangeFrom(5) // TreeMap(5 -> Thursday, 6 -> Friday, 7 -> Saturday)
* }}}
*
* @tparam K the type of the keys contained in this tree map.
* @tparam V the type of the values associated with the keys.
* @param ordering the implicit ordering used to compare objects of type `A`.
*
* @see [[https://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]]
* section on `Red-Black Trees` for more information.
*
* @define Coll immutable.TreeMap
* @define coll immutable tree map
* @define orderDependent
* @define orderDependentFold
* @define mayNotTerminateInf
* @define willNotTerminateInf
*/
final class TreeMap[K, +V] private (private val tree: RB.Tree[K, V])(implicit val ordering: Ordering[K])
extends AbstractMap[K, V]
with SortedMap[K, V]
with StrictOptimizedSortedMapOps[K, V, TreeMap, TreeMap[K, V]]
with SortedMapFactoryDefaults[K, V, TreeMap, Iterable, Map]
with DefaultSerializable {
def this()(implicit ordering: Ordering[K]) = this(null)(ordering)
private[immutable] def tree0: RB.Tree[K, V] = tree
private[this] def newMapOrSelf[V1 >: V](t: RB.Tree[K, V1]): TreeMap[K, V1] = if(t eq tree) this else new TreeMap[K, V1](t)
override def sortedMapFactory: SortedMapFactory[TreeMap] = TreeMap
def iterator: Iterator[(K, V)] = RB.iterator(tree)
def keysIteratorFrom(start: K): Iterator[K] = RB.keysIterator(tree, Some(start))
override def keySet: TreeSet[K] = new TreeSet(tree)(ordering)
def iteratorFrom(start: K): Iterator[(K, V)] = RB.iterator(tree, Some(start))
override def valuesIteratorFrom(start: K): Iterator[V] = RB.valuesIterator(tree, Some(start))
override def stepper[S <: Stepper[_]](implicit shape: StepperShape[(K, V), S]): S with EfficientSplit =
shape.parUnbox(
scala.collection.convert.impl.AnyBinaryTreeStepper.from[(K, V), RB.Tree[K, V]](
size, tree, _.left, _.right, x => (x.key, x.value)
)
)
override def keyStepper[S <: Stepper[_]](implicit shape: StepperShape[K, S]): S with EfficientSplit = {
import scala.collection.convert.impl._
type T = RB.Tree[K, V]
val s = shape.shape match {
case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Int])
case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.key.asInstanceOf[Long])
case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T](size, tree, _.left, _.right, _.key.asInstanceOf[Double])
case _ => shape.parUnbox(AnyBinaryTreeStepper.from[K, T](size, tree, _.left, _.right, _.key))
}
s.asInstanceOf[S with EfficientSplit]
}
override def valueStepper[S <: Stepper[_]](implicit shape: StepperShape[V, S]): S with EfficientSplit = {
import scala.collection.convert.impl._
type T = RB.Tree[K, V]
val s = shape.shape match {
case StepperShape.IntShape => IntBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Int])
case StepperShape.LongShape => LongBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Long])
case StepperShape.DoubleShape => DoubleBinaryTreeStepper.from[T] (size, tree, _.left, _.right, _.value.asInstanceOf[Double])
case _ => shape.parUnbox(AnyBinaryTreeStepper.from[V, T] (size, tree, _.left, _.right, _.value.asInstanceOf[V]))
}
s.asInstanceOf[S with EfficientSplit]
}
def get(key: K): Option[V] = RB.get(tree, key)
override def getOrElse[V1 >: V](key: K, default: => V1): V1 = {
val resultOrNull = RB.lookup(tree, key)
if (resultOrNull eq null) default
else resultOrNull.value
}
def removed(key: K): TreeMap[K,V] =
newMapOrSelf(RB.delete(tree, key))
def updated[V1 >: V](key: K, value: V1): TreeMap[K, V1] =
newMapOrSelf(RB.update(tree, key, value, overwrite = true))
override def concat[V1 >: V](that: collection.IterableOnce[(K, V1)]): TreeMap[K, V1] =
newMapOrSelf(that match {
case tm: TreeMap[K, V] @unchecked if ordering == tm.ordering =>
RB.union(tree, tm.tree)
case ls: LinearSeq[(K,V1)] =>
if (ls.isEmpty) tree //to avoid the creation of the adder
else {
val adder = new Adder[V1]
adder.addAll(ls)
adder.finalTree
}
case _ =>
val adder = new Adder[V1]
val it = that.iterator
while (it.hasNext) {
adder.apply(it.next())
}
adder.finalTree
})
override def removedAll(keys: IterableOnce[K]): TreeMap[K, V] = keys match {
case ts: TreeSet[K] if ordering == ts.ordering =>
newMapOrSelf(RB.difference(tree, ts.tree))
case _ => super.removedAll(keys)
}
/** A new TreeMap with the entry added is returned,
* assuming that key is <em>not</em> in the TreeMap.
*
* @tparam V1 type of the values of the new bindings, a supertype of `V`
* @param key the key to be inserted
* @param value the value to be associated with `key`
* @return a new $coll with the inserted binding, if it wasn't present in the map
*/
@deprecated("Use `updated` instead", "2.13.0")
def insert[V1 >: V](key: K, value: V1): TreeMap[K, V1] = {
assert(!RB.contains(tree, key))
updated(key, value)
}
def rangeImpl(from: Option[K], until: Option[K]): TreeMap[K, V] = newMapOrSelf(RB.rangeImpl(tree, from, until))
override def minAfter(key: K): Option[(K, V)] = RB.minAfter(tree, key) match {
case null => Option.empty
case x => Some((x.key, x.value))
}
override def maxBefore(key: K): Option[(K, V)] = RB.maxBefore(tree, key) match {
case null => Option.empty
case x => Some((x.key, x.value))
}
override def range(from: K, until: K): TreeMap[K,V] = newMapOrSelf(RB.range(tree, from, until))
override def foreach[U](f: ((K, V)) => U): Unit = RB.foreach(tree, f)
override def foreachEntry[U](f: (K, V) => U): Unit = RB.foreachEntry(tree, f)
override def size: Int = RB.count(tree)
override def knownSize: Int = size
override def isEmpty = size == 0
override def firstKey: K = RB.smallest(tree).key
override def lastKey: K = RB.greatest(tree).key
override def head: (K, V) = {
val smallest = RB.smallest(tree)
(smallest.key, smallest.value)
}
override def last: (K, V) = {
val greatest = RB.greatest(tree)
(greatest.key, greatest.value)
}
override def tail: TreeMap[K, V] = new TreeMap(RB.tail(tree))
override def init: TreeMap[K, V] = new TreeMap(RB.init(tree))
override def drop(n: Int): TreeMap[K, V] = {
if (n <= 0) this
else if (n >= size) empty
else new TreeMap(RB.drop(tree, n))
}
override def take(n: Int): TreeMap[K, V] = {
if (n <= 0) empty
else if (n >= size) this
else new TreeMap(RB.take(tree, n))
}
override def slice(from: Int, until: Int) = {
if (until <= from) empty
else if (from <= 0) take(until)
else if (until >= size) drop(from)
else new TreeMap(RB.slice(tree, from, until))
}
override def dropRight(n: Int): TreeMap[K, V] = take(size - math.max(n, 0))
override def takeRight(n: Int): TreeMap[K, V] = drop(size - math.max(n, 0))
private[this] def countWhile(p: ((K, V)) => Boolean): Int = {
var result = 0
val it = iterator
while (it.hasNext && p(it.next())) result += 1
result
}
override def dropWhile(p: ((K, V)) => Boolean): TreeMap[K, V] = drop(countWhile(p))
override def takeWhile(p: ((K, V)) => Boolean): TreeMap[K, V] = take(countWhile(p))
override def span(p: ((K, V)) => Boolean): (TreeMap[K, V], TreeMap[K, V]) = splitAt(countWhile(p))
override def filter(f: ((K, V)) => Boolean): TreeMap[K, V] =
newMapOrSelf(RB.filterEntries[K, V](tree, (k, v) => f((k, v))))
override def partition(p: ((K, V)) => Boolean): (TreeMap[K, V], TreeMap[K, V]) = {
val (l, r) = RB.partitionEntries[K, V](tree, (k, v) => p((k, v)))
(newMapOrSelf(l), newMapOrSelf(r))
}
override def transform[W](f: (K, V) => W): TreeMap[K, W] = {
val t2 = RB.transform[K, V, W](tree, f)
if(t2 eq tree) this.asInstanceOf[TreeMap[K, W]]
else new TreeMap(t2)
}
private final class Adder[B1 >: V]
extends RB.MapHelper[K, B1] with Function1[(K, B1), Unit] {
private var currentMutableTree: RB.Tree[K,B1] = tree0
def finalTree = beforePublish(currentMutableTree)
override def apply(kv: (K, B1)): Unit = {
currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2)
}
@tailrec def addAll(ls: LinearSeq[(K, B1)]): Unit = {
if (!ls.isEmpty) {
val kv = ls.head
currentMutableTree = mutableUpd(currentMutableTree, kv._1, kv._2)
addAll(ls.tail)
}
}
}
override def equals(obj: Any): Boolean = obj match {
case that: TreeMap[K @unchecked, _] if ordering == that.ordering => RB.entriesEqual(tree, that.tree)
case _ => super.equals(obj)
}
override protected[this] def className = "TreeMap"
}
/** $factoryInfo
* @define Coll immutable.TreeMap
* @define coll immutable tree map
*/
@SerialVersionUID(3L)
object TreeMap extends SortedMapFactory[TreeMap] {
def empty[K : Ordering, V]: TreeMap[K, V] = new TreeMap()
def from[K, V](it: IterableOnce[(K, V)])(implicit ordering: Ordering[K]): TreeMap[K, V] =
it match {
case tm: TreeMap[K, V] if ordering == tm.ordering => tm
case sm: scala.collection.SortedMap[K, V] if ordering == sm.ordering =>
new TreeMap[K, V](RB.fromOrderedEntries(sm.iterator, sm.size))
case _ =>
var t: RB.Tree[K, V] = null
val i = it.iterator
while (i.hasNext) {
val (k, v) = i.next()
t = RB.update(t, k, v, overwrite = true)
}
new TreeMap[K, V](t)
}
def newBuilder[K, V](implicit ordering: Ordering[K]): ReusableBuilder[(K, V), TreeMap[K, V]] = new TreeMapBuilder[K, V]
private class TreeMapBuilder[K, V](implicit ordering: Ordering[K])
extends RB.MapHelper[K, V]
with ReusableBuilder[(K, V), TreeMap[K, V]] {
type Tree = RB.Tree[K, V]
private var tree:Tree = null
def addOne(elem: (K, V)): this.type = {
tree = mutableUpd(tree, elem._1, elem._2)
this
}
private object adder extends AbstractFunction2[K, V, Unit] {
// we cache tree to avoid the outer access to tree
// in the hot path (apply)
private[this] var accumulator :Tree = null
def addForEach(hasForEach: collection.Map[K, V]): Unit = {
accumulator = tree
hasForEach.foreachEntry(this)
tree = accumulator
// be friendly to GC
accumulator = null
}
override def apply(key: K, value: V): Unit = {
accumulator = mutableUpd(accumulator, key, value)
}
}
override def addAll(xs: IterableOnce[(K, V)]): this.type = {
xs match {
// TODO consider writing a mutable-safe union for TreeSet/TreeMap builder ++=
// for the moment we have to force immutability before the union
// which will waste some time and space
// calling `beforePublish` makes `tree` immutable
case ts: TreeMap[K, V] if ts.ordering == ordering =>
if (tree eq null) tree = ts.tree0
else tree = RB.union(beforePublish(tree), ts.tree0)
case that: collection.Map[K, V] =>
//add avoiding creation of tuples
adder.addForEach(that)
case _ =>
super.addAll(xs)
}
this
}
override def clear(): Unit = {
tree = null
}
override def result(): TreeMap[K, V] = new TreeMap[K, V](beforePublish(tree))
}
}
| lrytz/scala | src/library/scala/collection/immutable/TreeMap.scala | Scala | apache-2.0 | 13,808 |
import com.typesafe.sbt.osgi.OsgiKeys._
import sbt.Keys._
import sbt._
import blended.sbt.Dependencies
object BlendedSecurityLoginImpl extends ProjectFactory {
private[this] val helper = new ProjectSettings(
projectName = "blended.security.login.impl",
description = "Implementation of the Login backend.",
deps = Seq(
Dependencies.jjwt,
Dependencies.bouncyCastleBcprov,
Dependencies.scalatest % "test",
Dependencies.logbackCore % "test",
Dependencies.logbackClassic % "test"
),
adaptBundle = b => b.copy(
bundleActivator = s"${b.bundleSymbolicName}.LoginActivator",
importPackage = Seq("android.*;resolution:=optional"),
privatePackage = Seq(b.bundleSymbolicName),
exportPackage = Seq()
)
) {
override def settings: Seq[sbt.Setting[_]] = defaultSettings ++ Seq(
embeddedJars := {
(Compile/externalDependencyClasspath).value.map(_.data).filter { f =>
f.getName.startsWith("bcprov") || f.getName().startsWith("jjwt")
}
}
)
}
override val project = helper.baseProject.dependsOn(
BlendedSecurityLoginApi.project,
BlendedTestsupport.project.project % "test",
BlendedTestsupportPojosr.project % "test"
)
}
| lefou/blended | project/BlendedSecurityLoginImpl.scala | Scala | apache-2.0 | 1,253 |
package com.github.tarao
package slickjdbc
package helper
trait TraitSingletonBehavior { self: UnitSpec =>
import scala.reflect.Manifest
import java.lang.Class
def signatures[T](clazz: Class[T]): Set[String] =
clazz.getDeclaredMethods.map { x =>
x.getReturnType.toString + " " + x.getName +
"(" + x.getParameterTypes.mkString(", ") + ")"
}.toSet
/**
Check a singleton object to export methods in a trait. The object
should implement the trait and have exactly the same methods as
the trait. This ensures that importing the methods by `with
TheTrait` and by `import TheSingleton._` have the the same effect.
*/
def exportingTheTraitMethods[T : Manifest](singleton: Any) = {
singleton shouldBe a [T]
val parent = implicitly[Manifest[T]].runtimeClass.asInstanceOf[Class[T]]
signatures(singleton.getClass) subsetOf (signatures(parent)) shouldBe true
}
}
| tarao/slick-jdbc-extension-scala | src/test/scala/com/github/tarao/slickjdbc/helper/TraitSingletonBehavior.scala | Scala | mit | 921 |
Subsets and Splits
Filtered Scala Code Snippets
The query filters and retrieves a sample of code snippets that meet specific criteria, providing a basic overview of the dataset's content without revealing deeper insights.